diff --git a/CHANGELOG.md b/CHANGELOG.md index f151fdb6e8..b44a74c9f0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,21 @@ Current ### Added: +- [Major refactor for availability and schemas and tables](https://github.com/yahoo/fili/pull/165) + * `ImmutableAvailability` - provides immutable, typed replacement for maps of column availabilities + * New Table Implementations: + * `BasePhysicalTable` core implementation + * `ConcretePhysicalTable` creates an ImmutableAvailability + * `Schema` implementations + * `BaseSchema` has `Columns`, `Granularity` + * `PhysicalTableSchema` has base plus `ZonedTimeGrain`, name mappings + * `LogicalTableSchema` base with builder from table group + * `ResultSetSchema` base with transforming with-ers + + * `ApiName`, `TableName`: Added static factory from String to Name + + * `ErrorMessageFormat` for errors during `ResultSetMapper` cycle + - [Added default base class for all dimension types](https://github.com/yahoo/fili/pull/177) * Added base classes `DefaultKeyValueStoreDimensionConfig`, `DefaultLookupDimensionConfig` and `DefaultRegisteredLookupDimensionConfig` to create default dimensions. @@ -29,7 +44,31 @@ Current ### Changed: --- [Request IDS now supports underscores.](https://github.com/yahoo/fili/pull/176) +- [Major refactor for availability and schemas and tables](https://github.com/yahoo/fili/pull/165) + * `Schema` and `Table` became interfaces + * `Table` has-a `Schema` + * `PhysicalTable` extends `Table`, interface only supports read only operations + * `Schema` constructed as immutable, `Column`s no longer bind to `Schema` + * Removed addNew...Column method + * `Schema` implementations now: `BaseSchema`, `PhysicalTableSchema`, `LogicalTableSchema`, `ResultSetSchema` + * `DimensionLoader` uses `ConcretePhysicalTable` + + * `PhysicalTableDefinition` made some fields private, accepts iterables, returns immutable dimensions + + * `ResultSet` constructor parameter order swapped + * `ResultSetMapper` now depends on ResultSetSchema + + * `TableDataSource` constructor arg narrows: PhysicalTable->ConcreteTable + + * `DataApiRequest` constructor arg narrows: Table->LogicalTable + + * `DruidQueryBuilder` now polymorphic on building data sources models from new physical tables + + * `ApiFilter` schema validation moved to DataApiRequest + + * Guava version bumped to 21.0 + +- [Request IDS now supports underscores.](https://github.com/yahoo/fili/pull/176) - Added support for extensions defining new Query types * TestDruidWebService assumes unknown query types behave like GroupBy, TimeSeries, and TopN @@ -55,6 +94,11 @@ Current ### Fixed: +- [Major refactor for availability and schemas and tables](https://github.com/yahoo/fili/pull/165) + * Ordering of fields on serialization could be inconsistent if intermediate stages used `HashSet` or `HashMap`. + * Several constructors switched to accept `Iterable` and return `LinkedHashSet` to emphasize importance of ordering/prevent `HashSet` intermediates which disrupt ordering. + + -[Fix Lookup Dimension Serialization](https://github.com/yahoo/fili/pull/187) * Fix a bug where lookup dimension is serialized as dimension spec in both outer and inner query @@ -68,6 +112,18 @@ Current ### Removed: +- [Major refactor for availability and schemas and tables](https://github.com/yahoo/fili/pull/165) + * Removed `ZonedSchema` (all methods moved to child class ResultSetSchema) + + * `PhysicalTable` no longer supports mutable availability + * Removed addColumn, removeColumn, getWorkingIntervals, resetColumns, commit + * resetColumns moved to BasePhysicalTable + * Other mutators no longer exist, availability is immutable + * getAvailableIntervals removed (availability.getAvailableIntervals replaces) + + * `DruidResponseParser` buildSchema removed, work moved to ResultSetSchema constructor + + * `BaseTableLoader` removed redundant buildLogicalTable methods v0.7.36 - 2017/01/30 diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/application/AbstractBinderFactory.java b/fili-core/src/main/java/com/yahoo/bard/webservice/application/AbstractBinderFactory.java index b42312c1ea..6f49db6605 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/application/AbstractBinderFactory.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/application/AbstractBinderFactory.java @@ -167,6 +167,7 @@ public abstract class AbstractBinderFactory implements BinderFactory { SYSTEM_CONFIG.getPackageVariableName("loader_scheduler_thread_pool_size"), LOADER_SCHEDULER_THREAD_POOL_SIZE_DEFAULT ); + public static final String SYSTEM_CONFIG_TIMEZONE_KEY = "timezone"; private ObjectMappersSuite objectMappers; @@ -355,7 +356,10 @@ protected Class buildDruidResponseParser() { */ protected Clock getClock() { return Clock.system( - ZoneId.of(SYSTEM_CONFIG.getStringProperty(SYSTEM_CONFIG.getPackageVariableName("timezone"), "UTC")) + ZoneId.of(SYSTEM_CONFIG.getStringProperty( + SYSTEM_CONFIG.getPackageVariableName(SYSTEM_CONFIG_TIMEZONE_KEY), + "UTC" + )) ); } diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/application/DruidDimensionsLoader.java b/fili-core/src/main/java/com/yahoo/bard/webservice/application/DruidDimensionsLoader.java index 06ad1409ae..e31d7c964a 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/application/DruidDimensionsLoader.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/application/DruidDimensionsLoader.java @@ -17,6 +17,7 @@ import com.yahoo.bard.webservice.druid.model.query.DruidSearchQuery; import com.yahoo.bard.webservice.druid.model.query.RegexSearchQuerySpec; import com.yahoo.bard.webservice.druid.model.query.SearchQuerySpec; +import com.yahoo.bard.webservice.table.ConcretePhysicalTable; import com.yahoo.bard.webservice.table.PhysicalTableDictionary; import com.yahoo.bard.webservice.web.handlers.RequestContext; @@ -110,6 +111,8 @@ public DruidDimensionsLoader( .collect(Collectors.toList()); dataSources = physicalTableDictionary.values().stream() + .filter(physicalTable -> physicalTable instanceof ConcretePhysicalTable) + .map(physicalTable -> (ConcretePhysicalTable) physicalTable) .map(TableDataSource::new) .collect(Collectors.toList()); } diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/async/AsyncUtils.java b/fili-core/src/main/java/com/yahoo/bard/webservice/async/AsyncUtils.java index 40e553bb8c..ca3fe29f51 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/async/AsyncUtils.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/async/AsyncUtils.java @@ -3,8 +3,8 @@ package com.yahoo.bard.webservice.async; import com.yahoo.bard.webservice.data.ResultSet; +import com.yahoo.bard.webservice.data.ResultSetSchema; import com.yahoo.bard.webservice.druid.model.query.AllGranularity; -import com.yahoo.bard.webservice.table.Schema; import com.yahoo.bard.webservice.web.PreResponse; import com.yahoo.bard.webservice.web.responseprocessors.ResponseContext; import com.yahoo.bard.webservice.web.responseprocessors.ResponseContextKeys; @@ -61,7 +61,11 @@ public static PreResponse buildErrorPreResponse(Throwable throwable) { } return new PreResponse( - new ResultSet(Collections.emptyList(), new Schema(AllGranularity.INSTANCE)), + new ResultSet(new ResultSetSchema( + AllGranularity.INSTANCE, + Collections.emptySet()), + Collections.emptyList() + ), responseContext ); } diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/async/ResponseException.java b/fili-core/src/main/java/com/yahoo/bard/webservice/async/ResponseException.java index cbdfebced3..94d3160101 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/async/ResponseException.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/async/ResponseException.java @@ -84,6 +84,7 @@ public ResponseException( * @param statusType Status type of the response * @param druidQuery The druid query being processed * @param error Exception object with error details + * * @deprecated In order to ensure correct serialization of the Druid Query, an ObjectWriter with all appropriate * configuration should be passed in to the constructor */ diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/data/DruidQueryBuilder.java b/fili-core/src/main/java/com/yahoo/bard/webservice/data/DruidQueryBuilder.java index 39cda87ead..93d79ce44a 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/data/DruidQueryBuilder.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/data/DruidQueryBuilder.java @@ -11,6 +11,7 @@ import com.yahoo.bard.webservice.druid.model.datasource.DataSource; import com.yahoo.bard.webservice.druid.model.datasource.QueryDataSource; import com.yahoo.bard.webservice.druid.model.datasource.TableDataSource; +import com.yahoo.bard.webservice.druid.model.datasource.UnionDataSource; import com.yahoo.bard.webservice.druid.model.filter.Filter; import com.yahoo.bard.webservice.druid.model.having.Having; import com.yahoo.bard.webservice.druid.model.orderby.LimitSpec; @@ -21,6 +22,7 @@ import com.yahoo.bard.webservice.druid.model.query.GroupByQuery; import com.yahoo.bard.webservice.druid.model.query.TimeSeriesQuery; import com.yahoo.bard.webservice.druid.model.query.TopNQuery; +import com.yahoo.bard.webservice.table.ConcretePhysicalTable; import com.yahoo.bard.webservice.table.LogicalTable; import com.yahoo.bard.webservice.table.LogicalTableDictionary; import com.yahoo.bard.webservice.table.PhysicalTable; @@ -30,6 +32,8 @@ import com.yahoo.bard.webservice.table.resolver.PhysicalTableResolver; import com.yahoo.bard.webservice.web.DataApiRequest; +import com.google.common.collect.Sets; + import org.joda.time.DateTimeZone; import org.joda.time.Interval; import org.slf4j.Logger; @@ -199,51 +203,36 @@ protected GroupByQuery buildGroupByQuery( granularity = template.getTimeGrain().buildZonedTimeGrain(timeZone); } + DataSource dataSource; if (!template.isNested()) { LOG.trace("Building a single pass druid groupBy query"); - - // The data source is the table directly, since there is no nested query below us - DataSource dataSource = new TableDataSource(table); - - // Filters must be applied at the lowest level as they exclude data from aggregates - return new GroupByQuery( - dataSource, + dataSource = buildTableDataSource(table); + } else { + LOG.trace("Building a multi pass druid groupBy query"); + // Build the inner query without an order by, since we only want to do that at the top level + // Sorts don't apply to inner queries and Filters only apply to the innermost query + GroupByQuery query = buildGroupByQuery( + template.getInnerQuery(), + table, granularity, + timeZone, groupByDimensions, filter, having, - template.getAggregations(), - template.getPostAggregations(), intervals, - druidOrderBy + (LimitSpec) null ); + dataSource = new QueryDataSource(query); + // Filters have been handled by the inner query, are not needed/allowed on the outer query + filter = null; } - LOG.trace("Building a multi pass druid groupBy query"); - - // Build the inner query without an order by, since we only want to do that at the top level - TemplateDruidQuery nestedQuery = template.getInnerQuery(); - GroupByQuery query = buildGroupByQuery( - nestedQuery, - table, - granularity, - timeZone, - groupByDimensions, - filter, - having, - intervals, - (LimitSpec) null - ); - - // The data source is the inner query we just built - DataSource dataSource = new QueryDataSource(query); - - // Build the wrapping query without filters + // Filters must be applied at the lowest level as they exclude data from aggregates return new GroupByQuery( dataSource, granularity, groupByDimensions, - (Filter) null, + filter, having, template.getAggregations(), template.getPostAggregations(), @@ -252,6 +241,21 @@ protected GroupByQuery buildGroupByQuery( ); } + /** + * Build a data source from a table. + * + * @param table A fact table or fact table view + * + * @return A table datasource for a fact table or a union data source for a fact table view + */ + private DataSource buildTableDataSource(PhysicalTable table) { + if (table instanceof ConcretePhysicalTable) { + return new TableDataSource((ConcretePhysicalTable) table); + } else { + return new UnionDataSource(Sets.newHashSet(table)); + } + } + /** * Builds a druid topN query. * @@ -308,10 +312,8 @@ protected TopNQuery buildTopNQuery( LOG.trace("Building a single pass druid topN query"); // The data source is the table directly, since there is no nested query below us - DataSource dataSource = new TableDataSource(table); - return new TopNQuery( - dataSource, + buildTableDataSource(table), // The check that the set of dimensions has exactly one element is currently done above granularity, groupByDimension.iterator().next(), @@ -325,7 +327,7 @@ protected TopNQuery buildTopNQuery( } /** - * Builds a druid timeseries query. + * Builds a druid TimeSeries query. * * @param template The query template. Not nested since nesting is not supported in druid timeseries queries * @param table The physical table that underlies the lowest-level datasource @@ -367,11 +369,8 @@ protected TimeSeriesQuery buildTimeSeriesQuery( LOG.trace("Building a single pass druid timeseries query"); - // The data source is the table directly, since there is no nested query below us - DataSource dataSource = new TableDataSource(table); - return new TimeSeriesQuery( - dataSource, + buildTableDataSource(table), granularity, filter, template.getAggregations(), diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/data/DruidResponseParser.java b/fili-core/src/main/java/com/yahoo/bard/webservice/data/DruidResponseParser.java index 1069131044..137ec6f5a2 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/data/DruidResponseParser.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/data/DruidResponseParser.java @@ -4,17 +4,11 @@ import static com.yahoo.bard.webservice.web.ErrorMessageFormat.RESULT_SET_ERROR; -import com.yahoo.bard.webservice.data.dimension.Dimension; import com.yahoo.bard.webservice.data.dimension.DimensionColumn; import com.yahoo.bard.webservice.data.dimension.DimensionRow; import com.yahoo.bard.webservice.data.metric.MetricColumn; -import com.yahoo.bard.webservice.druid.model.QueryType; -import com.yahoo.bard.webservice.druid.model.aggregation.Aggregation; -import com.yahoo.bard.webservice.druid.model.postaggregation.PostAggregation; -import com.yahoo.bard.webservice.druid.model.query.DruidAggregationQuery; -import com.yahoo.bard.webservice.druid.model.query.Granularity; import com.yahoo.bard.webservice.druid.model.DefaultQueryType; -import com.yahoo.bard.webservice.table.ZonedSchema; +import com.yahoo.bard.webservice.druid.model.QueryType; import com.fasterxml.jackson.databind.JsonNode; @@ -39,37 +33,6 @@ public class DruidResponseParser { private static final Logger LOG = LoggerFactory.getLogger(DruidResponseParser.class); - /** - * Build the schema that should be expected for the specified query. - * - * @param druidQuery The query - * @param granularity The granularity for the schema - * @param dateTimeZone The timezone for the schema - * - * @return The schema for the query - */ - public ZonedSchema buildSchema( - DruidAggregationQuery druidQuery, - Granularity granularity, - DateTimeZone dateTimeZone - ) { - ZonedSchema resultSetSchema = new ZonedSchema(granularity, dateTimeZone); - - for (Aggregation aggregation : druidQuery.getAggregations()) { - MetricColumn.addNewMetricColumn(resultSetSchema, aggregation.getName()); - } - - for (PostAggregation postAggregation : druidQuery.getPostAggregations()) { - MetricColumn.addNewMetricColumn(resultSetSchema, postAggregation.getName()); - } - - for (Dimension dimension : druidQuery.getDimensions()) { - DimensionColumn.addNewDimensionColumn(resultSetSchema, dimension); - } - - return resultSetSchema; - } - /** * Parse Druid GroupBy result into ResultSet. * @@ -77,10 +40,16 @@ public ZonedSchema buildSchema( * @param schema Schema for results * @param queryType the type of query, note that this implementation only supports instances of * {@link DefaultQueryType} + * @param dateTimeZone the time zone used for format the results * * @return the set of results */ - public ResultSet parse(JsonNode jsonResult, ZonedSchema schema, QueryType queryType) { + public ResultSet parse( + JsonNode jsonResult, + ResultSetSchema schema, + QueryType queryType, + DateTimeZone dateTimeZone + ) { LOG.trace("Parsing druid query {} by json result: {} using schema: {}", queryType, jsonResult, schema); @@ -97,16 +66,16 @@ public ResultSet parse(JsonNode jsonResult, ZonedSchema schema, QueryType queryT List results = null; switch (defaultQueryType) { case GROUP_BY: - results = makeGroupByResults(jsonResult, dimensionColumns, metricColumns, schema.getDateTimeZone()); + results = makeGroupByResults(jsonResult, dimensionColumns, metricColumns, dateTimeZone); break; case TOP_N: - results = makeTopNResults(jsonResult, dimensionColumns, metricColumns, schema.getDateTimeZone()); + results = makeTopNResults(jsonResult, dimensionColumns, metricColumns, dateTimeZone); break; case TIMESERIES: - results = makeTimeSeriesResults(jsonResult, metricColumns, schema.getDateTimeZone()); + results = makeTimeSeriesResults(jsonResult, metricColumns, dateTimeZone); break; case LOOKBACK: - results = makeLookbackResults(jsonResult, dimensionColumns, metricColumns, schema.getDateTimeZone()); + results = makeLookbackResults(jsonResult, dimensionColumns, metricColumns, dateTimeZone); break; default: // Throw an exception for unsupported query types @@ -114,7 +83,7 @@ public ResultSet parse(JsonNode jsonResult, ZonedSchema schema, QueryType queryT } LOG.trace("Parsed druid query {} results: {}", queryType, results); - return new ResultSet(results, schema); + return new ResultSet(schema, results); } /** diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/data/PartialDataHandler.java b/fili-core/src/main/java/com/yahoo/bard/webservice/data/PartialDataHandler.java index 55a6ff2437..f2ecf6e341 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/data/PartialDataHandler.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/data/PartialDataHandler.java @@ -8,15 +8,19 @@ import com.yahoo.bard.webservice.druid.model.query.DruidAggregationQuery; import com.yahoo.bard.webservice.druid.model.query.Granularity; import com.yahoo.bard.webservice.table.PhysicalTable; +import com.yahoo.bard.webservice.table.availability.Availability; import com.yahoo.bard.webservice.util.IntervalUtils; import com.yahoo.bard.webservice.util.SimplifiedIntervalList; import com.yahoo.bard.webservice.util.TableUtils; import com.yahoo.bard.webservice.web.DataApiRequest; +import org.joda.time.Interval; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Collection; +import java.util.HashSet; +import java.util.List; import java.util.Set; import javax.inject.Singleton; @@ -139,11 +143,15 @@ public SimplifiedIntervalList getIntersectSubintervalsForColumns( Collection columnNames, PhysicalTable physicalTable ) { + Availability availability = physicalTable.getAvailability(); return columnNames.isEmpty() ? new SimplifiedIntervalList() : - new SimplifiedIntervalList(columnNames.stream() - .map(physicalTable::getIntervalsByColumnName) - .reduce(null, IntervalUtils::getOverlappingSubintervals)); + new SimplifiedIntervalList( + columnNames.stream() + .map(availability::getIntervalsByColumnName) + .map(it -> (Set) new HashSet(it)) + .reduce(null, IntervalUtils::getOverlappingSubintervals) + ); } /** @@ -159,11 +167,12 @@ public SimplifiedIntervalList getUnionSubintervalsForColumns( Collection columnNames, PhysicalTable physicalTable ) { + Availability availability = physicalTable.getAvailability(); return columnNames.isEmpty() ? new SimplifiedIntervalList() : columnNames.stream() - .map(physicalTable::getIntervalsByColumnName) - .flatMap(Set::stream) + .map(availability::getIntervalsByColumnName) + .flatMap(List::stream) .collect(SimplifiedIntervalList.getCollector()); } } diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/data/PreResponseDeserializer.java b/fili-core/src/main/java/com/yahoo/bard/webservice/data/PreResponseDeserializer.java index bc00803d04..a2f658642c 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/data/PreResponseDeserializer.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/data/PreResponseDeserializer.java @@ -24,7 +24,7 @@ import com.yahoo.bard.webservice.data.metric.MetricColumnWithValueType; import com.yahoo.bard.webservice.data.time.GranularityParser; import com.yahoo.bard.webservice.druid.model.query.Granularity; -import com.yahoo.bard.webservice.table.ZonedSchema; +import com.yahoo.bard.webservice.table.Column; import com.yahoo.bard.webservice.util.GranularityParseException; import com.yahoo.bard.webservice.web.ErrorMessageFormat; import com.yahoo.bard.webservice.web.PreResponse; @@ -35,6 +35,7 @@ import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.Streams; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -43,12 +44,13 @@ import java.io.IOException; import java.util.HashMap; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; -import java.util.stream.StreamSupport; +import java.util.stream.Stream; /** * Class to de-serialize and prepare the PreResponse object from JSON. The advantages of custom deserialization are: @@ -203,13 +205,12 @@ private ResponseContext getResponseContext(JsonNode serializedResponseContext) t * @return ResultSet object generated from JsonNode */ private ResultSet getResultSet(JsonNode serializedResultSet) { - ZonedSchema zonedSchema = getZonedSchema(serializedResultSet.get(SCHEMA_KEY)); - List results = StreamSupport - .stream(serializedResultSet.get(RESULTS_KEY).spliterator(), false) - .map(serializedResult -> getResult(serializedResult, zonedSchema)) + ResultSetSchema resultSetSchema = getResultSetSchema(serializedResultSet.get(SCHEMA_KEY)); + List results = Streams.stream(serializedResultSet.get(RESULTS_KEY)) + .map(serializedResult -> getResult(serializedResult, resultSetSchema)) .collect(Collectors.toList()); - return new ResultSet(results, zonedSchema); + return new ResultSet(resultSetSchema, results); } /** @@ -217,10 +218,9 @@ private ResultSet getResultSet(JsonNode serializedResultSet) { * * @param schemaNode JsonNode which contains all the columns, timezone and granularity * - * @return ZonedSchema object generated from the JsonNode + * @return ResultSetSchema object generated from the JsonNode */ - private ZonedSchema getZonedSchema(JsonNode schemaNode) { - + private ResultSetSchema getResultSetSchema(JsonNode schemaNode) { DateTimeZone timezone = generateTimezone( schemaNode.get(SCHEMA_TIMEZONE).asText(), DateTimeZone.forID( @@ -228,22 +228,17 @@ private ZonedSchema getZonedSchema(JsonNode schemaNode) { ) ); - //Recreate ZonedSchema from granularity and timezone values - ZonedSchema zonedSchema = new ZonedSchema( - generateGranularity(schemaNode.get(SCHEMA_GRANULARITY).asText(), timezone), - timezone - ); - - StreamSupport.stream(schemaNode.get(SCHEMA_DIM_COLUMNS).spliterator(), false) - .map(JsonNode::asText) - .map(this::resolveDimensionName) - .forEach(dimension -> DimensionColumn.addNewDimensionColumn(zonedSchema, dimension)); - - schemaNode.get(SCHEMA_METRIC_COLUMNS_TYPE).fields().forEachRemaining( - field -> zonedSchema.addColumn(new MetricColumnWithValueType(field.getKey(), field.getValue().asText())) - ); + //Recreate ResultSetSchema + LinkedHashSet columns = Stream.concat( + Streams.stream(schemaNode.get(SCHEMA_DIM_COLUMNS)) + .map(JsonNode::asText) + .map(this::resolveDimensionName) + .map(DimensionColumn::new), + Streams.stream(() -> schemaNode.get(SCHEMA_METRIC_COLUMNS_TYPE).fields()) + .map(entry -> new MetricColumnWithValueType(entry.getKey(), entry.getValue().asText())) + ).collect(Collectors.toCollection(LinkedHashSet::new)); - return zonedSchema; + return new ResultSetSchema(generateGranularity(schemaNode.get(SCHEMA_GRANULARITY).asText(), timezone), columns); } /** @@ -268,19 +263,19 @@ private Dimension resolveDimensionName(String dimensionName) { * Creates new Result object from JsonNode. * * @param serializedResult JsonNode which contains all the serialized details to generate Result object - * @param zonedSchema Schema of the result to generate the Result object + * @param resultSetSchema Schema of the result to generate the Result object * * @return Result object generated from given JsonNode */ - private Result getResult(JsonNode serializedResult, ZonedSchema zonedSchema) { + private Result getResult(JsonNode serializedResult, ResultSetSchema resultSetSchema) { return new Result( extractDimensionValues( serializedResult.get(DIMENSION_VALUES_KEY), - zonedSchema.getColumns(DimensionColumn.class) + resultSetSchema.getColumns(DimensionColumn.class) ), extractMetricValues( serializedResult.get(METRIC_VALUES_KEY), - zonedSchema.getColumns(MetricColumnWithValueType.class) + resultSetSchema.getColumns(MetricColumnWithValueType.class) ), DateTime.parse(serializedResult.get(TIMESTAMP_KEY).asText()) ); diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/data/ResultSet.java b/fili-core/src/main/java/com/yahoo/bard/webservice/data/ResultSet.java index ff22991801..5d2bee1cc7 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/data/ResultSet.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/data/ResultSet.java @@ -2,8 +2,6 @@ // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.data; -import com.yahoo.bard.webservice.table.Schema; - import java.util.ArrayList; import java.util.List; @@ -12,15 +10,15 @@ */ public class ResultSet extends ArrayList { - private final Schema schema; + private final ResultSetSchema schema; /** * Constructor. * - * @param results The list of results * @param schema The associated schema + * @param results The list of results */ - public ResultSet(List results, Schema schema) { + public ResultSet(ResultSetSchema schema, List results) { super(results); this.schema = schema; } @@ -30,7 +28,7 @@ public ResultSet(List results, Schema schema) { * * @return The schema associated with this result set */ - public Schema getSchema() { + public ResultSetSchema getSchema() { return this.schema; } diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/data/ResultSetSchema.java b/fili-core/src/main/java/com/yahoo/bard/webservice/data/ResultSetSchema.java new file mode 100644 index 0000000000..f199b24155 --- /dev/null +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/data/ResultSetSchema.java @@ -0,0 +1,42 @@ +// Copyright 2017 Yahoo Inc. +// Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. +package com.yahoo.bard.webservice.data; + +import com.yahoo.bard.webservice.druid.model.query.Granularity; +import com.yahoo.bard.webservice.table.BaseSchema; +import com.yahoo.bard.webservice.table.Column; + +import java.util.LinkedHashSet; + +import javax.validation.constraints.NotNull; + +/** + * The schema for a result set. + * The result set describes the set of data returned from a fact source. It's schema includes dimension and metric + * columns as well as a granularity describing how time is bucketed for the result set. + */ +public class ResultSetSchema extends BaseSchema { + + /** + * Constructor. + * + * @param granularity The bucketing time grain for this schema + * @param columns The columns in this schema + */ + public ResultSetSchema(@NotNull Granularity granularity, Iterable columns) { + super(granularity, columns); + } + + /** + * Create a new result set with an additional final column. + * + * @param column the column being added + * + * @return the result set being constructed + */ + public ResultSetSchema withAddColumn(Column column) { + LinkedHashSet columns = new LinkedHashSet<>(this.getColumns()); + columns.add(column); + return new ResultSetSchema(this.getGranularity(), columns); + } +} diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/data/ResultSetSerializationProxy.java b/fili-core/src/main/java/com/yahoo/bard/webservice/data/ResultSetSerializationProxy.java index b2f273fb87..5353049871 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/data/ResultSetSerializationProxy.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/data/ResultSetSerializationProxy.java @@ -8,7 +8,7 @@ import com.yahoo.bard.webservice.data.metric.MetricColumn; import com.yahoo.bard.webservice.table.Column; import com.yahoo.bard.webservice.table.Schema; -import com.yahoo.bard.webservice.table.ZonedSchema; +import com.yahoo.bard.webservice.util.DateTimeUtils; import com.fasterxml.jackson.annotation.JsonProperty; @@ -96,16 +96,11 @@ public Map getSerializedSchema() { * * @return Schema components. */ - private Map getSchemaComponents(Schema schema) { + private Map getSchemaComponents(ResultSetSchema schema) { Map schemaComponents = new HashMap<>(); - String timeId = (schema instanceof ZonedSchema) ? - ((ZonedSchema) schema).getDateTimeZone().getID() : - SYSTEM_CONFIG.getStringProperty(SYSTEM_CONFIG.getPackageVariableName("timezone"), "UTC"); - - schemaComponents.put(SCHEMA_TIMEZONE, timeId); + schemaComponents.put(SCHEMA_TIMEZONE, DateTimeUtils.getTimeZone(schema.getGranularity()).getID()); schemaComponents.put(SCHEMA_GRANULARITY, schema.getGranularity().getName()); - schemaComponents.put( SCHEMA_DIM_COLUMNS, schema.getColumns(DimensionColumn.class).stream().map(Column::getName).collect(Collectors.toSet()) @@ -114,6 +109,7 @@ private Map getSchemaComponents(Schema schema) { SCHEMA_METRIC_COLUMNS, getMetricColumnNames(schema) ); + return schemaComponents; } diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/data/config/names/ApiMetricName.java b/fili-core/src/main/java/com/yahoo/bard/webservice/data/config/names/ApiMetricName.java index 0b5ad61c70..564c19cf72 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/data/config/names/ApiMetricName.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/data/config/names/ApiMetricName.java @@ -6,6 +6,8 @@ import com.yahoo.bard.webservice.druid.model.query.AllGranularity; import com.yahoo.bard.webservice.druid.model.query.Granularity; +import java.util.Objects; + /** * Interface to mark metric names. *

@@ -49,4 +51,44 @@ default boolean isValidFor(Granularity granularity) { * @return User facing name for this metric */ String getApiName(); + + /** + * Wrap a string in an anonymous instance of ApiMetricName. + * + * @param name the name being wrapped + * + * @return an anonymous subclass instance of ApiMetricName + */ + static ApiMetricName of(String name) { + return new ApiMetricName() { + @Override + public boolean isValidFor(TimeGrain grain) { + return true; + } + + @Override + public String getApiName() { + return name; + } + + @Override + public String asName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (o == null || ! (o instanceof ApiMetricName)) { + return false; + } + return Objects.equals(name, ((ApiMetricName) o).asName()) && + Objects.equals(name, ((ApiMetricName) o).getApiName()); + } + + @Override + public int hashCode() { + return name.hashCode(); + } + }; + } } diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/data/config/names/TableName.java b/fili-core/src/main/java/com/yahoo/bard/webservice/data/config/names/TableName.java index 6d60214e84..760e753fe5 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/data/config/names/TableName.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/data/config/names/TableName.java @@ -2,6 +2,8 @@ // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.data.config.names; +import java.util.Comparator; + /** * Marker interface for objects that can be treated as a table name in druid or web services. */ @@ -14,4 +16,35 @@ public interface TableName { */ String asName(); + /** + * Wrap a string in an anonymous instance of TableName. + * Rather than make heavy use of this, instead make a class. + * + * @param name the name being wrapped + * + * @return an anonymous subclass instance of TableName + */ + static TableName of(String name) { + return new TableName() { + @Override + public String asName() { + return name; + } + + @Override + public int hashCode() { + return name.hashCode(); + } + + @Override + public boolean equals(Object o) { + if (o != null && o instanceof TableName) { + return name.equals(((TableName) o).asName()); + } + return false; + } + }; + } + + Comparator COMPARATOR = Comparator.comparing(TableName::asName); } diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/data/config/table/BaseTableLoader.java b/fili-core/src/main/java/com/yahoo/bard/webservice/data/config/table/BaseTableLoader.java index ba557ed93b..f427990612 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/data/config/table/BaseTableLoader.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/data/config/table/BaseTableLoader.java @@ -9,14 +9,15 @@ import com.yahoo.bard.webservice.data.dimension.Dimension; import com.yahoo.bard.webservice.data.dimension.DimensionColumn; import com.yahoo.bard.webservice.data.dimension.DimensionDictionary; -import com.yahoo.bard.webservice.data.metric.LogicalMetric; -import com.yahoo.bard.webservice.data.metric.LogicalMetricColumn; import com.yahoo.bard.webservice.data.metric.MetricColumn; import com.yahoo.bard.webservice.data.metric.MetricDictionary; import com.yahoo.bard.webservice.druid.model.query.Granularity; +import com.yahoo.bard.webservice.table.Column; +import com.yahoo.bard.webservice.table.ConcretePhysicalTable; import com.yahoo.bard.webservice.table.LogicalTable; import com.yahoo.bard.webservice.table.LogicalTableDictionary; import com.yahoo.bard.webservice.table.PhysicalTable; +import com.yahoo.bard.webservice.table.Schema; import com.yahoo.bard.webservice.table.TableGroup; import com.yahoo.bard.webservice.table.TableIdentifier; @@ -29,6 +30,7 @@ import java.util.Map; import java.util.Set; import java.util.stream.Collectors; +import java.util.stream.Stream; /** * Provides commonly-needed methods for loading tables. @@ -36,6 +38,7 @@ public abstract class BaseTableLoader implements TableLoader { private static final Logger LOG = LoggerFactory.getLogger(BaseTableLoader.class); + protected final DateTimeZone defaultTimeZone; /** @@ -69,13 +72,37 @@ protected BaseTableLoader() { * @param dictionaries The container for all the data dictionaries * * @return A table group binding all the tables for a logical table view together. + * + * @deprecated logicalTableName is not used in TableGroup */ + @Deprecated public TableGroup buildTableGroup( String logicalTableName, Set apiMetrics, Set druidMetrics, Set tableDefinitions, ResourceDictionaries dictionaries + ) { + return buildTableGroup(apiMetrics, druidMetrics, tableDefinitions, dictionaries); + } + + /** + * Builds a table group. + *

+ * Builds and loads the physical tables for the physical table definitions as well. + * + * @param apiMetrics The set of metric names surfaced to the api + * @param druidMetrics Names of druid datasource metric columns + * @param tableDefinitions A list of config objects for physical tables + * @param dictionaries The container for all the data dictionaries + * + * @return A table group binding all the tables for a logical table view together. + */ + public TableGroup buildTableGroup( + Set apiMetrics, + Set druidMetrics, + Set tableDefinitions, + ResourceDictionaries dictionaries ) { // Load a physical table for each of the table definitions LinkedHashSet physicalTables = new LinkedHashSet<>(); @@ -86,12 +113,14 @@ public TableGroup buildTableGroup( } //Derive the logical dimensions by taking the union of all the physical dimensions - LinkedHashSet dimensions = physicalTables.stream() - .flatMap(table -> table.getColumns(DimensionColumn.class).stream()) + Set dimensions = physicalTables.stream() + .map(PhysicalTable::getSchema) + .map(Schema::getColumns) + .flatMap(Set::stream) + .filter(column -> column instanceof DimensionColumn) + .map(column -> (DimensionColumn) column) .map(DimensionColumn::getDimension) .collect(Collectors.toCollection(LinkedHashSet::new)); - - // Put all of the table definitions into a new table group return new TableGroup(physicalTables, apiMetrics, dimensions); } @@ -140,7 +169,7 @@ public void loadLogicalTableWithGranularities( // For every legal grain for (Granularity grain : validGrains) { // Build the logical table - LogicalTable logicalTable = buildLogicalTable(logicalTableName, grain, nameGroup, metricDictionary); + LogicalTable logicalTable = new LogicalTable(logicalTableName, grain, nameGroup, metricDictionary); // Load it into the dictionary logicalDictionary.put(new TableIdentifier(logicalTable), logicalTable); @@ -159,7 +188,10 @@ public void loadLogicalTableWithGranularities( * @param metrics The dictionary of all metrics * * @return The logical table built + * + * @deprecated use new LogicalTable(...) by preferences */ + @Deprecated public LogicalTable buildLogicalTable( String name, Granularity granularity, @@ -193,7 +225,10 @@ public LogicalTable buildLogicalTable( * @param metrics The dictionary of all metrics * * @return The logical table built + * + * @deprecated The LogicalTable constructor is being mirrored here, can be referenced directly */ + @Deprecated public LogicalTable buildLogicalTable( String name, Granularity granularity, @@ -204,39 +239,16 @@ public LogicalTable buildLogicalTable( TableGroup group, MetricDictionary metrics ) { - LogicalTable logicalTable = new LogicalTable( + return new LogicalTable( name, category, longName, granularity, retention, description, - group + group, + metrics ); - - // All Logical tables support the dimension set for their table group - PhysicalTable firstPhysicalTable = group.getPhysicalTables().iterator().next(); - Set tables = group.getPhysicalTables(); - for (Dimension dim : group.getDimensions()) { - // Select the first table with a non-default logical mapping to this dimension name - // otherwise, use the defaulting behavior from the first table in the list - PhysicalTable physicalTable = tables.stream() - .filter(table -> table.hasLogicalMapping(dim.getApiName())) - .findFirst() - .orElse(firstPhysicalTable); - DimensionColumn.addNewDimensionColumn(logicalTable, dim); - } - - // All metrics that are available for a particular logical table grain are added - for (ApiMetricName metricName : group.getApiMetricNames()) { - // Skip metric if not valid for this table grain - if (!metricName.isValidFor(granularity)) { - continue; - } - LogicalMetric metric = metrics.get(metricName.getApiName()); - LogicalMetricColumn.addNewLogicalMetricColumn(logicalTable, metric.getName(), metric); - } - return logicalTable; } /** @@ -282,27 +294,23 @@ protected PhysicalTable buildPhysicalTable( Set metricNames, DimensionDictionary dimensionDictionary ) { - // Create the physical table - PhysicalTable physicalTable = new PhysicalTable( + LinkedHashSet columns = Stream.concat( + // Load the dimension columns + definition.getDimensions().stream() + .map(DimensionConfig::getApiName) + .map(dimensionDictionary::findByApiName) + .map(DimensionColumn::new), + // And the metric columns + metricNames.stream() + .map(FieldName::asName) + .map(MetricColumn::new) + ).collect(Collectors.toCollection(LinkedHashSet::new)); + + return new ConcretePhysicalTable( definition.getName().asName(), definition.getGrain(), + columns, definition.getLogicalToPhysicalNames() ); - - // Load the dimension columns - for (DimensionConfig dimensionConfig : definition.getDimensions()) { - String apiName = dimensionConfig.getApiName(); - Dimension dimension = dimensionDictionary.findByApiName(apiName); - DimensionColumn.addNewDimensionColumn(physicalTable, dimension); - } - - // Load the metric columns - for (FieldName druidMetricName : metricNames) { - MetricColumn.addNewMetricColumn(physicalTable, druidMetricName.asName()); - } - - // Build initial cache - physicalTable.commit(); - return physicalTable; } } diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/data/config/table/PhysicalTableDefinition.java b/fili-core/src/main/java/com/yahoo/bard/webservice/data/config/table/PhysicalTableDefinition.java index b8a9d5c2ae..e53af0a830 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/data/config/table/PhysicalTableDefinition.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/data/config/table/PhysicalTableDefinition.java @@ -7,21 +7,22 @@ import com.yahoo.bard.webservice.data.time.ZonedTimeGrain; import com.yahoo.bard.webservice.data.time.ZonelessTimeGrain; +import com.google.common.collect.ImmutableSet; + import org.joda.time.DateTimeZone; import java.util.Collections; import java.util.Map; -import java.util.Set; import java.util.stream.Collectors; /** * Holds the fields needed to define a Physical Table. */ public class PhysicalTableDefinition { - final TableName name; - final ZonedTimeGrain grain; - final Set dimensions; - final Map logicalToPhysicalNames; + private final TableName name; + private final ZonedTimeGrain grain; + private final ImmutableSet dimensions; + private final Map logicalToPhysicalNames; /** * Define a physical table using a zoned time grain. @@ -33,14 +34,13 @@ public class PhysicalTableDefinition { public PhysicalTableDefinition( TableName name, ZonedTimeGrain grain, - Set dimensions + Iterable dimensions ) { this.name = name; this.grain = grain; - this.dimensions = Collections.unmodifiableSet(dimensions); + this.dimensions = ImmutableSet.copyOf(dimensions); this.logicalToPhysicalNames = Collections.unmodifiableMap( - dimensions - .stream() + this.dimensions.stream() .collect( Collectors.toMap( DimensionConfig::getApiName, @@ -67,7 +67,11 @@ public PhysicalTableDefinition( * @deprecated The time zone of a physical table should be set explicitly rather than rely on defaulting to UTC */ @Deprecated - public PhysicalTableDefinition(TableName name, ZonelessTimeGrain grain, Set dimensions) { + public PhysicalTableDefinition( + TableName name, + ZonelessTimeGrain grain, + Iterable dimensions + ) { this(name, grain.buildZonedTimeGrain(DateTimeZone.UTC), dimensions); } @@ -79,7 +83,7 @@ public ZonedTimeGrain getGrain() { return grain; } - public Set getDimensions() { + public ImmutableSet getDimensions() { return dimensions; } diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/data/dimension/DimensionColumn.java b/fili-core/src/main/java/com/yahoo/bard/webservice/data/dimension/DimensionColumn.java index 5818d63f0b..8ddf4e1129 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/data/dimension/DimensionColumn.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/data/dimension/DimensionColumn.java @@ -1,10 +1,8 @@ -// Copyright 2016 Yahoo Inc. +// Copyright 2017 Yahoo Inc. // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.data.dimension; import com.yahoo.bard.webservice.table.Column; -import com.yahoo.bard.webservice.table.PhysicalTable; -import com.yahoo.bard.webservice.table.Schema; import javax.validation.constraints.NotNull; @@ -17,12 +15,13 @@ public class DimensionColumn extends Column { /** * Constructor. - * Uses the given dimension's api name for column name. + * Uses the given dimension's name for column name. * * @param dimension The column's corresponding dimension */ - protected DimensionColumn(@NotNull Dimension dimension) { - this(dimension, dimension.getApiName()); + public DimensionColumn(@NotNull Dimension dimension) { + super(dimension.getApiName()); + this.dimension = dimension; } /** @@ -41,38 +40,6 @@ public Dimension getDimension() { return this.dimension; } - /** - * Method to create a DimensionColumn tied to a schema. - * - * @param schema The schema to which the column needs to be added - * @param d The dimension the column encapsulates - * - * @return The dimension column created - */ - public static DimensionColumn addNewDimensionColumn(Schema schema, Dimension d) { - DimensionColumn col = new DimensionColumn(d); - schema.addColumn(col); - return col; - } - - /** - * Method to create a DimensionColumn tied to a schema. - * - * @param schema The schema to which the column needs to be added - * @param d The dimension the column encapsulates - * @param physicalTable Physical table associated with dimension column - * - * @return The dimension column created - * - * @deprecated in favor of addNewDimensionColumn(Schema, Dimension) which stores api name instead of physical name - */ - @Deprecated - public static DimensionColumn addNewDimensionColumn(Schema schema, Dimension d, PhysicalTable physicalTable) { - DimensionColumn col = new DimensionColumn(d, physicalTable.getPhysicalColumnName(d.getApiName())); - schema.addColumn(col); - return col; - } - @Override public String toString() { return "{dim:'" + getName() + "'}"; diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/LogicalMetricColumn.java b/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/LogicalMetricColumn.java index f052a8fc0a..7c206be0f8 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/LogicalMetricColumn.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/LogicalMetricColumn.java @@ -2,8 +2,6 @@ // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.data.metric; -import com.yahoo.bard.webservice.table.Schema; - /** * LogicalMetricColumn. */ @@ -23,27 +21,21 @@ public LogicalMetricColumn(String name, LogicalMetric metric) { } /** - * Getter for a logical metric. + * Constructor. * - * @return logical metric + * @param metric The logical metric */ - public LogicalMetric getLogicalMetric() { - return this.metric; + public LogicalMetricColumn(LogicalMetric metric) { + this(metric.getName(), metric); } /** - * Method to create a LogicalMetricColumn tied to a schema. - * - * @param schema The associated schema - * @param name The metric name - * @param metric The logical metric + * Getter for a logical metric. * - * @return DimensionColumn created + * @return logical metric */ - public static LogicalMetricColumn addNewLogicalMetricColumn(Schema schema, String name, LogicalMetric metric) { - LogicalMetricColumn col = new LogicalMetricColumn(name, metric); - schema.addColumn(col); - return col; + public LogicalMetric getLogicalMetric() { + return this.metric; } @Override diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/MetricColumn.java b/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/MetricColumn.java index 98d3c6eeaa..533dcf9792 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/MetricColumn.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/MetricColumn.java @@ -1,37 +1,23 @@ -// Copyright 2016 Yahoo Inc. +// Copyright 2017 Yahoo Inc. // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.data.metric; import com.yahoo.bard.webservice.table.Column; -import com.yahoo.bard.webservice.table.Schema; /** * Metric. */ public class MetricColumn extends Column { + /** * Constructor. * * @param name The column name */ - protected MetricColumn(String name) { + public MetricColumn(String name) { super(name); } - /** - * Method to create a MetricColumn tied to a schema. - * - * @param schema - The schema for this column to be added - * @param name - The name for this metric column - * - * @return the new column - */ - public static MetricColumn addNewMetricColumn(Schema schema, String name) { - MetricColumn col = new MetricColumn(name); - schema.addColumn(col); - return col; - } - @Override public String toString() { return "{metric:'" + getName() + "'}"; diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/DateTimeSortMapper.java b/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/DateTimeSortMapper.java index f8aaf21daa..762d22a22c 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/DateTimeSortMapper.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/DateTimeSortMapper.java @@ -4,9 +4,9 @@ import com.yahoo.bard.webservice.data.Result; import com.yahoo.bard.webservice.data.ResultSet; +import com.yahoo.bard.webservice.data.ResultSetSchema; import com.yahoo.bard.webservice.druid.model.orderby.SortDirection; import com.yahoo.bard.webservice.logging.RequestLog; -import com.yahoo.bard.webservice.table.Schema; import org.joda.time.DateTime; @@ -59,11 +59,11 @@ public ResultSet map(ResultSet resultSet) { Comparator.reverseOrder()); return new ResultSet( + resultSet.getSchema(), dateTimeList.stream() .map(bucketizedResultsMap::get) .flatMap(List::stream) - .collect(Collectors.toList()), - resultSet.getSchema() + .collect(Collectors.toList()) ); } finally { RequestLog.stopTiming("sortResultSet"); @@ -71,14 +71,14 @@ public ResultSet map(ResultSet resultSet) { } @Override - protected Result map(Result result, Schema schema) { + protected Result map(Result result, ResultSetSchema schema) { //Not needed, because this mapper overrides map(ResultSet). So it is just a no-op. return result; } @Override - protected Schema map(Schema schema) { + protected ResultSetSchema map(ResultSetSchema schema) { //Because this method is not necessary, it just returns the schema unchanged. return schema; } diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/NoOpResultSetMapper.java b/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/NoOpResultSetMapper.java index e661e21388..6ea17af3eb 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/NoOpResultSetMapper.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/NoOpResultSetMapper.java @@ -4,7 +4,7 @@ import com.yahoo.bard.webservice.data.Result; import com.yahoo.bard.webservice.data.ResultSet; -import com.yahoo.bard.webservice.table.Schema; +import com.yahoo.bard.webservice.data.ResultSetSchema; /** * NoOp Result set mapper. @@ -16,12 +16,12 @@ public ResultSet map(ResultSet resultSet) { } @Override - protected Result map(Result result, Schema schema) { + protected Result map(Result result, ResultSetSchema schema) { return result; } @Override - protected Schema map(Schema schema) { + protected ResultSetSchema map(ResultSetSchema schema) { return schema; } diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/PaginationMapper.java b/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/PaginationMapper.java index 072ba04715..9a84dfae6a 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/PaginationMapper.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/PaginationMapper.java @@ -4,7 +4,7 @@ import com.yahoo.bard.webservice.data.Result; import com.yahoo.bard.webservice.data.ResultSet; -import com.yahoo.bard.webservice.table.Schema; +import com.yahoo.bard.webservice.data.ResultSetSchema; import com.yahoo.bard.webservice.util.AllPagesPagination; import com.yahoo.bard.webservice.util.Pagination; import com.yahoo.bard.webservice.web.AbstractResponse; @@ -54,18 +54,18 @@ public ResultSet map(ResultSet resultSet) { Pagination pages = new AllPagesPagination<>(resultSet, paginationParameters); AbstractResponse.addLinks(pages, uriBuilder, responseProcessor); //uses map for additional flexibility and robustness, even though it is currently a no-op. - return new ResultSet(pages.getPageOfData(), map(resultSet.getSchema())); + return new ResultSet(map(resultSet.getSchema()), pages.getPageOfData()); } @Override - protected Result map(Result result, Schema schema) { + protected Result map(Result result, ResultSetSchema schema) { //Not needed, because this mapper overrides map(ResultSet). So it is just a no-op. return result; } @Override - protected Schema map(Schema schema) { + protected ResultSetSchema map(ResultSetSchema schema) { //Because this method is not necessary, it just returns the schema unchanged. return schema; } diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/PartialDataResultSetMapper.java b/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/PartialDataResultSetMapper.java index 9f0c8e793c..a5e8a846fb 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/PartialDataResultSetMapper.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/PartialDataResultSetMapper.java @@ -3,10 +3,10 @@ package com.yahoo.bard.webservice.data.metric.mappers; import com.yahoo.bard.webservice.data.Result; +import com.yahoo.bard.webservice.data.ResultSetSchema; import com.yahoo.bard.webservice.data.time.TimeGrain; import com.yahoo.bard.webservice.druid.model.query.AllGranularity; import com.yahoo.bard.webservice.druid.model.query.Granularity; -import com.yahoo.bard.webservice.table.Schema; import com.yahoo.bard.webservice.util.SimplifiedIntervalList; import org.joda.time.Interval; @@ -49,7 +49,7 @@ public PartialDataResultSetMapper( * @return Null if the bucket this result falls in is missing but not volatile */ @Override - public Result map(Result result, Schema schema) { + public Result map(Result result, ResultSetSchema schema) { Granularity grain = schema.getGranularity(); if (grain.equals(AllGranularity.INSTANCE)) { @@ -67,7 +67,7 @@ public Result map(Result result, Schema schema) { } @Override - protected Schema map(Schema schema) { + protected ResultSetSchema map(ResultSetSchema schema) { return schema; } diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/ResultSetMapper.java b/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/ResultSetMapper.java index c568f188a0..374c28282b 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/ResultSetMapper.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/ResultSetMapper.java @@ -2,9 +2,9 @@ // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.data.metric.mappers; +import com.yahoo.bard.webservice.data.ResultSetSchema; import com.yahoo.bard.webservice.data.Result; import com.yahoo.bard.webservice.data.ResultSet; -import com.yahoo.bard.webservice.table.Schema; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -38,8 +38,8 @@ public ResultSet map(ResultSet resultSet) { } } - Schema newSchema = map(resultSet.getSchema()); - ResultSet newResultSet = new ResultSet(newResults, newSchema); + ResultSetSchema newSchema = map(resultSet.getSchema()); + ResultSet newResultSet = new ResultSet(newSchema, newResults); LOG.trace("Mapped resultSet: {} to new resultSet {}", resultSet, newResultSet); return newResultSet; @@ -53,7 +53,7 @@ public ResultSet map(ResultSet resultSet) { * * @return The result row, a modified copy, or null (if row is eliminated) */ - abstract protected Result map(Result result, Schema schema); + abstract protected Result map(Result result, ResultSetSchema schema); /** * Returns a transformed schema. @@ -62,7 +62,7 @@ public ResultSet map(ResultSet resultSet) { * * @return The same schema or a new (altered) one */ - abstract protected Schema map(Schema schema); + abstract protected ResultSetSchema map(ResultSetSchema schema); /** * Since a ResultSetMapper has no state associated with it, we consider two ResultSetMappers to be the same iff diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/RowNumMapper.java b/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/RowNumMapper.java index 7c17a419f4..4fc771a5de 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/RowNumMapper.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/RowNumMapper.java @@ -4,8 +4,8 @@ import com.yahoo.bard.webservice.data.Result; import com.yahoo.bard.webservice.data.ResultSet; +import com.yahoo.bard.webservice.data.ResultSetSchema; import com.yahoo.bard.webservice.data.metric.MetricColumn; -import com.yahoo.bard.webservice.table.Schema; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -27,35 +27,29 @@ public class RowNumMapper extends ResultSetMapper { @Override public ResultSet map(ResultSet resultSet) { - Schema modifiedSchema = map(resultSet.getSchema()); - MetricColumn metricColumn = modifiedSchema.getColumn(ROW_NUM_COLUMN_NAME, MetricColumn.class); - - List newResults = new ArrayList<>(); - Result newResult; + ResultSetSchema schema = map(resultSet.getSchema()); + MetricColumn column = schema.getColumn(ROW_NUM_COLUMN_NAME, MetricColumn.class).get(); int resultSetSize = resultSet.size(); + List newResults = new ArrayList<>(resultSetSize); for (int i = 0; i < resultSetSize; i++) { - newResult = rowNumMap(resultSet.get(i), metricColumn, i); - if (newResult != null) { - newResults.add(newResult); - } + newResults.add(rowNumMap(resultSet.get(i), column, i)); } - ResultSet newResultSet = new ResultSet(newResults, modifiedSchema); + ResultSet newResultSet = new ResultSet(schema, newResults); LOG.trace("Mapped resultSet: {} to new resultSet {}", resultSet, newResultSet); - return newResultSet; } @Override - protected Result map(Result result, Schema schema) { - return result; + protected Result map(Result result, ResultSetSchema schema) { + // map for rows is not + throw new UnsupportedOperationException("This code should never be reached."); } @Override - protected Schema map(Schema schema) { - MetricColumn.addNewMetricColumn(schema, ROW_NUM_COLUMN_NAME); - return schema; + protected ResultSetSchema map(ResultSetSchema schema) { + return schema.withAddColumn(new MetricColumn(ROW_NUM_COLUMN_NAME)); } /** diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/SketchRoundUpMapper.java b/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/SketchRoundUpMapper.java index 4ae96d5c76..ba1a03a7d9 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/SketchRoundUpMapper.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/SketchRoundUpMapper.java @@ -2,10 +2,10 @@ // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.data.metric.mappers; +import com.yahoo.bard.webservice.data.metric.MetricColumn; +import com.yahoo.bard.webservice.data.ResultSetSchema; import com.yahoo.bard.webservice.data.Result; import com.yahoo.bard.webservice.data.config.names.FieldName; -import com.yahoo.bard.webservice.data.metric.MetricColumn; -import com.yahoo.bard.webservice.table.Schema; import java.math.BigDecimal; import java.math.RoundingMode; @@ -38,12 +38,15 @@ public SketchRoundUpMapper(String columnName) { } @Override - protected Result map(Result result, Schema schema) { + protected Result map(Result result, ResultSetSchema schema) { if (columnName == null) { throw new IllegalStateException("Cannot map results without a column name"); } - MetricColumn metricColumn = (MetricColumn) schema.getColumn(columnName); + MetricColumn metricColumn = schema.getColumn(columnName, MetricColumn.class).orElseThrow( + () -> new IllegalStateException("Unexpected missing column: " + columnName) + ); + BigDecimal value = result.getMetricValueAsNumber(metricColumn); if (value == null) { return result; @@ -53,7 +56,7 @@ protected Result map(Result result, Schema schema) { } @Override - protected Schema map(Schema schema) { + protected ResultSetSchema map(ResultSetSchema schema) { return schema; } diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/TopNResultSetMapper.java b/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/TopNResultSetMapper.java index 751d2fe590..8c317dc3df 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/TopNResultSetMapper.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/data/metric/mappers/TopNResultSetMapper.java @@ -2,9 +2,9 @@ // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.data.metric.mappers; +import com.yahoo.bard.webservice.data.ResultSetSchema; import com.yahoo.bard.webservice.data.Result; import com.yahoo.bard.webservice.data.ResultSet; -import com.yahoo.bard.webservice.table.Schema; import org.joda.time.DateTime; @@ -32,16 +32,16 @@ public ResultSet map(ResultSet resultSet) { // TODO: Use only native stream operations in RxJava: GroupByTime -> Sort -> Take N -> Concat streams by time TopNAccumulator acc = new TopNAccumulator(); resultSet.stream().forEachOrdered(acc); - return new ResultSet(acc.data, resultSet.getSchema()); + return new ResultSet(resultSet.getSchema(), acc.data); } @Override - protected Result map(Result result, Schema schema) { + protected Result map(Result result, ResultSetSchema schema) { return result; } @Override - protected Schema map(Schema schema) { + protected ResultSetSchema map(ResultSetSchema schema) { return schema; } diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/druid/model/datasource/DataSource.java b/fili-core/src/main/java/com/yahoo/bard/webservice/druid/model/datasource/DataSource.java index b4a5a9ebdc..64d0871a2f 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/druid/model/datasource/DataSource.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/druid/model/datasource/DataSource.java @@ -2,6 +2,7 @@ // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.druid.model.datasource; +import com.yahoo.bard.webservice.data.config.names.TableName; import com.yahoo.bard.webservice.druid.model.query.DruidQuery; import com.yahoo.bard.webservice.table.PhysicalTable; @@ -10,6 +11,7 @@ import java.util.Collections; import java.util.Set; +import java.util.function.Function; import java.util.stream.Collectors; /** @@ -51,11 +53,13 @@ public Set getPhysicalTables() { */ @JsonInclude(JsonInclude.Include.NON_NULL) public Set getNames() { - return Collections.unmodifiableSet( - getPhysicalTables() - .stream() - .map(PhysicalTable::getName) - .collect(Collectors.toSet()) + return Collections.unmodifiableSet(getPhysicalTables() + .stream() + .map(PhysicalTable::getAvailability) + .map(it -> it.getDataSourceNames().stream()) + .flatMap(Function.identity()) + .map(TableName::asName) + .collect(Collectors.toSet()) ); } diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/druid/model/datasource/TableDataSource.java b/fili-core/src/main/java/com/yahoo/bard/webservice/druid/model/datasource/TableDataSource.java index 8d9686d1e8..c135aa35fb 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/druid/model/datasource/TableDataSource.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/druid/model/datasource/TableDataSource.java @@ -3,7 +3,7 @@ package com.yahoo.bard.webservice.druid.model.datasource; import com.yahoo.bard.webservice.druid.model.query.DruidQuery; -import com.yahoo.bard.webservice.table.PhysicalTable; +import com.yahoo.bard.webservice.table.ConcretePhysicalTable; import com.fasterxml.jackson.annotation.JsonIgnore; @@ -22,7 +22,7 @@ public class TableDataSource extends DataSource { * * @param physicalTable The physical table of the data source */ - public TableDataSource(PhysicalTable physicalTable) { + public TableDataSource(ConcretePhysicalTable physicalTable) { super(DefaultDataSourceType.TABLE, Collections.singleton(physicalTable)); this.name = physicalTable.getFactTableName(); diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/druid/model/dimension/extractionfunction/MapLookup.java b/fili-core/src/main/java/com/yahoo/bard/webservice/druid/model/dimension/extractionfunction/MapLookup.java index 7dde943f20..ff73679692 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/druid/model/dimension/extractionfunction/MapLookup.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/druid/model/dimension/extractionfunction/MapLookup.java @@ -3,8 +3,8 @@ package com.yahoo.bard.webservice.druid.model.dimension.extractionfunction; import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.collect.ImmutableMap; -import java.util.Collections; import java.util.Map; import java.util.Objects; @@ -21,7 +21,7 @@ public class MapLookup extends Lookup { */ public MapLookup(Map mapping) { super("map"); - this.mapping = Collections.unmodifiableMap(mapping); + this.mapping = ImmutableMap.copyOf(mapping); } @JsonProperty(value = "map") diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/druid/model/query/QueryContext.java b/fili-core/src/main/java/com/yahoo/bard/webservice/druid/model/query/QueryContext.java index def0acf6ef..5082cc5055 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/druid/model/query/QueryContext.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/druid/model/query/QueryContext.java @@ -12,9 +12,9 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; +import com.google.common.collect.ImmutableMap; import java.util.AbstractMap.SimpleImmutableEntry; -import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -56,7 +56,7 @@ String getName() { // ACCEPTING_FIELDS holds the list of legitimate parameters to context and their expected types @SuppressWarnings("rawtypes") - public static final Map ACCEPTING_FIELDS = Collections.unmodifiableMap(Stream.of( + public static final Map ACCEPTING_FIELDS = ImmutableMap.copyOf(Stream.of( new SimpleImmutableEntry<>(TIMEOUT, Number.class), new SimpleImmutableEntry<>(PRIORITY, Number.class), new SimpleImmutableEntry<>(QUERY_ID, String.class), @@ -88,7 +88,7 @@ public QueryContext(Map contextMap, AtomicLong totalQueries) { } // Store an immutable copy - this.contextMap = Collections.unmodifiableMap(contextMap); + this.contextMap = ImmutableMap.copyOf(contextMap); this.totalQueries = totalQueries == null ? new AtomicLong(0) : totalQueries; this.sequenceNumber = this.totalQueries.incrementAndGet(); } @@ -115,7 +115,7 @@ protected QueryContext(QueryContext copy, Map contextMap) { } // Store an immutable copy - this.contextMap = Collections.unmodifiableMap(contextMap); + this.contextMap = ImmutableMap.copyOf(contextMap); this.totalQueries = copy.totalQueries; this.sequenceNumber = copy.sequenceNumber; } diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/druid/serializers/LookupDimensionToDimensionSpec.java b/fili-core/src/main/java/com/yahoo/bard/webservice/druid/serializers/LookupDimensionToDimensionSpec.java index 7f32f1355a..0f866f786a 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/druid/serializers/LookupDimensionToDimensionSpec.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/druid/serializers/LookupDimensionToDimensionSpec.java @@ -6,8 +6,8 @@ import com.yahoo.bard.webservice.data.dimension.impl.KeyValueStoreDimension; import com.yahoo.bard.webservice.data.dimension.impl.LookupDimension; import com.yahoo.bard.webservice.data.dimension.impl.RegisteredLookupDimension; -import com.yahoo.bard.webservice.druid.model.dimension.extractionfunction.ExtractionFunction; import com.yahoo.bard.webservice.druid.model.dimension.ExtractionDimensionSpec; +import com.yahoo.bard.webservice.druid.model.dimension.extractionfunction.ExtractionFunction; import com.yahoo.bard.webservice.druid.model.util.ModelUtil; import com.yahoo.bard.webservice.web.ErrorMessageFormat; diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/logging/blocks/DataRequest.java b/fili-core/src/main/java/com/yahoo/bard/webservice/logging/blocks/DataRequest.java index fc5a3b4507..483500c557 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/logging/blocks/DataRequest.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/logging/blocks/DataRequest.java @@ -5,7 +5,7 @@ import com.yahoo.bard.webservice.data.dimension.Dimension; import com.yahoo.bard.webservice.data.metric.LogicalMetric; import com.yahoo.bard.webservice.logging.LogInfo; -import com.yahoo.bard.webservice.table.Table; +import com.yahoo.bard.webservice.table.LogicalTable; import com.yahoo.bard.webservice.web.ApiFilter; import com.fasterxml.jackson.annotation.JsonAutoDetect; @@ -52,7 +52,7 @@ public class DataRequest implements LogInfo { * @param format In which format the request asked for a response */ public DataRequest( - Table table, + LogicalTable table, Set intervals, Collection> filterSuperSet, Set metricSet, diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/metadata/DataSourceMetadata.java b/fili-core/src/main/java/com/yahoo/bard/webservice/metadata/DataSourceMetadata.java index c599f828d9..86555600c7 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/metadata/DataSourceMetadata.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/metadata/DataSourceMetadata.java @@ -10,6 +10,8 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; import com.google.common.collect.Range; import com.google.common.collect.RangeSet; import com.google.common.collect.TreeRangeSet; @@ -19,7 +21,6 @@ import io.druid.timeline.DataSegment; -import java.util.Collections; import java.util.EnumMap; import java.util.HashMap; import java.util.LinkedHashSet; @@ -51,8 +52,8 @@ public DataSourceMetadata( @JsonProperty("segments") List segments ) { this.name = name; - this.properties = Collections.unmodifiableMap(properties); - this.segments = Collections.unmodifiableList(segments); + this.properties = ImmutableMap.copyOf(properties); + this.segments = ImmutableList.copyOf(segments); } /** diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/metadata/DataSourceMetadataLoader.java b/fili-core/src/main/java/com/yahoo/bard/webservice/metadata/DataSourceMetadataLoader.java index 240f118368..6382d0ed62 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/metadata/DataSourceMetadataLoader.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/metadata/DataSourceMetadataLoader.java @@ -12,7 +12,7 @@ import com.yahoo.bard.webservice.druid.client.FailureCallback; import com.yahoo.bard.webservice.druid.client.HttpErrorCallback; import com.yahoo.bard.webservice.druid.client.SuccessCallback; -import com.yahoo.bard.webservice.table.PhysicalTable; +import com.yahoo.bard.webservice.table.ConcretePhysicalTable; import com.yahoo.bard.webservice.table.PhysicalTableDictionary; import com.fasterxml.jackson.databind.JsonNode; @@ -99,6 +99,8 @@ public DataSourceMetadataLoader( @Override public void run() { physicalTableDictionary.values().stream() + .filter(table -> table instanceof ConcretePhysicalTable) + .map(table -> (ConcretePhysicalTable) table) .peek(table -> LOG.trace("Querying metadata for datasource: {}", table)) .forEach(this::queryDataSourceMetadata); lastRunTimestamp.set(DateTime.now()); @@ -109,7 +111,7 @@ public void run() { * * @param table The physical table to be updated. */ - protected void queryDataSourceMetadata(PhysicalTable table) { + protected void queryDataSourceMetadata(ConcretePhysicalTable table) { String resourcePath = String.format(DATASOURCE_METADATA_QUERY_FORMAT, table.getFactTableName()); // Success callback will update datasource metadata on success @@ -174,7 +176,7 @@ protected void queryDataSourceMetadata(PhysicalTable table) { * * @return The callback itself. */ - protected final SuccessCallback buildDataSourceMetadataSuccessCallback(PhysicalTable table) { + protected final SuccessCallback buildDataSourceMetadataSuccessCallback(ConcretePhysicalTable table) { return new SuccessCallback() { @Override public void invoke(JsonNode rootNode) { @@ -207,7 +209,7 @@ public DateTime getLastRunTimestamp() { * * @return A newly created http error callback object. */ - protected HttpErrorCallback getErrorCallback(PhysicalTable table) { + protected HttpErrorCallback getErrorCallback(ConcretePhysicalTable table) { return new TaskHttpErrorCallback(table); } @@ -215,14 +217,14 @@ protected HttpErrorCallback getErrorCallback(PhysicalTable table) { * Defines the callback for http errors. */ private final class TaskHttpErrorCallback extends Loader.TaskHttpErrorCallback { - private final PhysicalTable table; + private final ConcretePhysicalTable table; /** * Constructor. * * @param table PhysicalTable that this error callback is tied to */ - TaskHttpErrorCallback(PhysicalTable table) { + TaskHttpErrorCallback(ConcretePhysicalTable table) { this.table = table; } @@ -242,7 +244,7 @@ public void invoke(int statusCode, String reason, String responseBody) { LOG.warn(msg); metadataService.update( table, - new DataSourceMetadata(table.getFactTableName(), Collections.emptyMap(), Collections.emptyList()) + new DataSourceMetadata(table.getName(), Collections.emptyMap(), Collections.emptyList()) ); } else { LOG.error(msg); diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/metadata/SegmentMetadataLoader.java b/fili-core/src/main/java/com/yahoo/bard/webservice/metadata/SegmentMetadataLoader.java index 798fb29fbc..79113b1ced 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/metadata/SegmentMetadataLoader.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/metadata/SegmentMetadataLoader.java @@ -10,7 +10,7 @@ import com.yahoo.bard.webservice.druid.client.FailureCallback; import com.yahoo.bard.webservice.druid.client.HttpErrorCallback; import com.yahoo.bard.webservice.druid.client.SuccessCallback; -import com.yahoo.bard.webservice.table.PhysicalTable; +import com.yahoo.bard.webservice.table.ConcretePhysicalTable; import com.yahoo.bard.webservice.table.PhysicalTableDictionary; import com.fasterxml.jackson.core.type.TypeReference; @@ -98,6 +98,8 @@ public SegmentMetadataLoader( public void run() { physicalTableDictionary.values().stream() .peek(table -> LOG.trace("Querying segment metadata for table: {}", table)) + .filter(table -> table instanceof ConcretePhysicalTable) + .map(table -> (ConcretePhysicalTable) table) .forEach(this::querySegmentMetadata); lastRunTimestamp.set(DateTime.now()); } @@ -107,7 +109,7 @@ public void run() { * * @param table The physical table to be updated. */ - protected void querySegmentMetadata(PhysicalTable table) { + protected void querySegmentMetadata(ConcretePhysicalTable table) { String resourcePath = String.format(SEGMENT_METADATA_QUERY_FORMAT, table.getFactTableName()); // Success callback will update segment metadata on success @@ -143,7 +145,7 @@ protected void querySegmentMetadata(PhysicalTable table) { * * @return The callback itself. */ - protected final SuccessCallback buildSegmentMetadataSuccessCallback(PhysicalTable table) { + protected final SuccessCallback buildSegmentMetadataSuccessCallback(ConcretePhysicalTable table) { return new SuccessCallback() { @Override public void invoke(JsonNode rootNode) { diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/table/BasePhysicalTable.java b/fili-core/src/main/java/com/yahoo/bard/webservice/table/BasePhysicalTable.java new file mode 100644 index 0000000000..1ff1c2a255 --- /dev/null +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/table/BasePhysicalTable.java @@ -0,0 +1,107 @@ +// Copyright 2017 Yahoo Inc. +// Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. +package com.yahoo.bard.webservice.table; + +import com.yahoo.bard.webservice.data.dimension.DimensionDictionary; +import com.yahoo.bard.webservice.data.time.ZonedTimeGrain; +import com.yahoo.bard.webservice.metadata.SegmentMetadata; +import com.yahoo.bard.webservice.table.availability.Availability; +import com.yahoo.bard.webservice.table.availability.ImmutableAvailability; +import com.yahoo.bard.webservice.util.IntervalUtils; + +import org.joda.time.DateTime; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Map; + +import javax.validation.constraints.NotNull; + +/** + * Base Physical Table implements common PhysicalTable capabilities. + */ +public abstract class BasePhysicalTable implements PhysicalTable { + private static final Logger LOG = LoggerFactory.getLogger(BasePhysicalTable.class); + + private final String name; + private final PhysicalTableSchema schema; + private volatile Availability availability; + + /** + * Create a physical table. + * + * @param name Fili name of the physical table + * @param timeGrain time grain of the table + * @param columns The columns for this physical table + * @param logicalToPhysicalColumnNames Mappings from logical to physical names + * @param availability The availability of columns in this table + */ + public BasePhysicalTable( + @NotNull String name, + @NotNull ZonedTimeGrain timeGrain, + @NotNull Iterable columns, + @NotNull Map logicalToPhysicalColumnNames, + @NotNull Availability availability + ) { + this.name = name; + this.availability = availability; + this.schema = new PhysicalTableSchema(timeGrain, columns, logicalToPhysicalColumnNames); + } + + + @Override + public Availability getAvailability() { + return availability; + } + + + @Override + public DateTime getTableAlignment() { + return schema.getTimeGrain().roundFloor( + IntervalUtils.firstMoment(getAvailability().getAvailableIntervals().values()).orElse(new DateTime()) + ); + } + + @Override + public String getName() { + return name; + } + + @Override + public PhysicalTableSchema getSchema() { + return schema; + } + + @Override + public String getPhysicalColumnName(String logicalName) { + if (!schema.containsLogicalName(logicalName)) { + LOG.warn( + "No mapping found for logical name '{}' to physical name on table '{}'. Will use logical name as " + + "physical name. This is unexpected and should not happen for properly configured " + + "dimensions.", + logicalName, + getName() + ); + } + return schema.getPhysicalColumnName(logicalName); + } + + /** + * Update the working intervals with values from a map. + * + * @param segmentMetadata A map of names of metrics and sets of intervals over which they are valid + * @param dimensionDictionary The dimension dictionary from which to look up dimensions by name + */ + public void resetColumns(SegmentMetadata segmentMetadata, DimensionDictionary dimensionDictionary) { + setAvailability(new ImmutableAvailability( + name, + segmentMetadata.getDimensionIntervals(), + segmentMetadata.getMetricIntervals(), + dimensionDictionary + )); + } + + protected void setAvailability(Availability availability) { + this.availability = availability; + } +} diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/table/BaseSchema.java b/fili-core/src/main/java/com/yahoo/bard/webservice/table/BaseSchema.java new file mode 100644 index 0000000000..f6a6950cb1 --- /dev/null +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/table/BaseSchema.java @@ -0,0 +1,61 @@ +// Copyright 2017 Yahoo Inc. +// Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. +package com.yahoo.bard.webservice.table; + +import com.yahoo.bard.webservice.druid.model.query.Granularity; + +import com.google.common.collect.Sets; + +import java.util.LinkedHashSet; +import java.util.Objects; + +/** + * A parent class for most schema implementations. + */ +public class BaseSchema implements Schema { + + private final LinkedHashSet columns; + private final Granularity granularity; + + /** + * Constructor. + * + * @param granularity The granularity for this schema. + * @param columns The columns for this schema. + */ + protected BaseSchema(Granularity granularity, Iterable columns) { + this.granularity = granularity; + this.columns = Sets.newLinkedHashSet(columns); + } + + @Override + public LinkedHashSet getColumns() { + return columns; + } + + @Override + public Granularity getGranularity() { + return granularity; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof BaseSchema)) { + return false; + } + + BaseSchema that = (BaseSchema) o; + return this.getClass() == o.getClass() + && Objects.equals(columns, that.columns) + && Objects.equals(granularity, that.getGranularity() + ); + } + + @Override + public int hashCode() { + return Objects.hash(granularity, columns); + } +} diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/table/Column.java b/fili-core/src/main/java/com/yahoo/bard/webservice/table/Column.java index c3c93a35cc..b3bcad0558 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/table/Column.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/table/Column.java @@ -8,6 +8,7 @@ * Column. */ public class Column { + private final String name; /** @@ -25,7 +26,7 @@ public Column(String name) { * @return name */ public String getName() { - return this.name; + return name; } /** diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/table/ConcretePhysicalTable.java b/fili-core/src/main/java/com/yahoo/bard/webservice/table/ConcretePhysicalTable.java new file mode 100644 index 0000000000..f685bc9cbb --- /dev/null +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/table/ConcretePhysicalTable.java @@ -0,0 +1,71 @@ +// Copyright 2017 Yahoo Inc. +// Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. +package com.yahoo.bard.webservice.table; + +import com.yahoo.bard.webservice.data.time.ZonedTimeGrain; +import com.yahoo.bard.webservice.table.availability.ImmutableAvailability; + +import java.util.Collections; +import java.util.Map; + +import javax.validation.constraints.NotNull; + +/** + * An implementation of Physical table that is backed by a single fact table. + */ +public class ConcretePhysicalTable extends BasePhysicalTable { + + /** + * Create a concrete physical table. + * The availability on this table is initialized to empty intervals. + * + * @param name Fili name of the physical table + * @param factTableName Name of the associated table in Druid + * @param timeGrain time grain of the table + * @param columns The columns for this table + * @param logicalToPhysicalColumnNames Mappings from logical to physical names + */ + public ConcretePhysicalTable( + @NotNull String name, + @NotNull String factTableName, + @NotNull ZonedTimeGrain timeGrain, + @NotNull Iterable columns, + @NotNull Map logicalToPhysicalColumnNames + ) { + super( + name, + timeGrain, + columns, + logicalToPhysicalColumnNames, + new ImmutableAvailability(factTableName, Collections.emptyMap()) + ); + } + + /** + * Create a concrete physical table. + * The fact table name will be defaulted to the name and the availability initialized to empty intervals. + * + * @param name Fili name of the physical table + * @param timeGrain time grain of the table + * @param columns The columns for this table + * @param logicalToPhysicalColumnNames Mappings from logical to physical names + */ + public ConcretePhysicalTable( + @NotNull String name, + @NotNull ZonedTimeGrain timeGrain, + @NotNull Iterable columns, + @NotNull Map logicalToPhysicalColumnNames + ) { + this(name, name, timeGrain, columns, logicalToPhysicalColumnNames); + } + + public String getFactTableName() { + return getAvailability().getDataSourceNames().stream().findFirst().get().asName(); + } + + @Override + public String toString() { + return super.toString() + " factTableName: " + getAvailability().getDataSourceNames() + " alignment: " + + getTableAlignment(); + } +} diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/table/LogicalTable.java b/fili-core/src/main/java/com/yahoo/bard/webservice/table/LogicalTable.java index f005f6e23f..238fffdf8a 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/table/LogicalTable.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/table/LogicalTable.java @@ -4,6 +4,7 @@ import com.yahoo.bard.webservice.data.metric.LogicalMetric; import com.yahoo.bard.webservice.data.metric.LogicalMetricColumn; +import com.yahoo.bard.webservice.data.metric.MetricDictionary; import com.yahoo.bard.webservice.druid.model.query.Granularity; import org.joda.time.ReadablePeriod; @@ -18,19 +19,23 @@ /** * A LogicalTable has a grain and a tablegroup of physical tables that satisfy the logical table. */ -public class LogicalTable extends Table implements Comparable { +public class LogicalTable implements Table, Comparable { public static final String DEFAULT_CATEGORY = "General"; public static final ReadablePeriod DEFAULT_RETENTION = Years.ONE; + private String name; private TableGroup tableGroup; - // parameter used by the compare to method - private String comparableParam; + private LogicalTableSchema schema; + private String category; private String longName; private ReadablePeriod retention; private String description; + // parameter used by the compare to method + private String comparableParam; + /** * Constructor *

@@ -39,9 +44,15 @@ public class LogicalTable extends Table implements Comparable { * @param name The logical table name * @param granularity The logical table granularity * @param tableGroup The tablegroup for the logical table + * @param metricDictionary The metric dictionary to bind tableGroup's metrics */ - public LogicalTable(@NotNull String name, @NotNull Granularity granularity, TableGroup tableGroup) { - this(name, DEFAULT_CATEGORY, name, granularity, DEFAULT_RETENTION, name, tableGroup); + public LogicalTable( + @NotNull String name, + @NotNull Granularity granularity, + TableGroup tableGroup, + MetricDictionary metricDictionary + ) { + this(name, DEFAULT_CATEGORY, name, granularity, DEFAULT_RETENTION, name, tableGroup, metricDictionary); } /** @@ -54,6 +65,7 @@ public LogicalTable(@NotNull String name, @NotNull Granularity granularity, Tabl * @param retention The period the data in the logical table is retained for * @param description The description for this logical table * @param tableGroup The tablegroup for the logical table + * @param metricDictionary The metric dictionary to bind tableGroup's metrics */ public LogicalTable( @NotNull String name, @@ -62,33 +74,27 @@ public LogicalTable( @NotNull Granularity granularity, ReadablePeriod retention, String description, - TableGroup tableGroup + TableGroup tableGroup, + MetricDictionary metricDictionary ) { - super(name, granularity); + this.name = name; this.tableGroup = tableGroup; this.category = category; this.longName = longName; this.retention = retention; this.description = description; this.comparableParam = name + granularity.toString(); + + schema = new LogicalTableSchema(tableGroup, granularity, metricDictionary); + } - /** - * Getter for table group. - * - * @return tableGroup - */ public TableGroup getTableGroup() { return this.tableGroup; } - /** - * Getter for logical metrics. - * - * @return set of LogicalMetric - */ public Set getLogicalMetrics() { - return getColumns(LogicalMetricColumn.class).stream() + return schema.getColumns(LogicalMetricColumn.class).stream() .map(LogicalMetricColumn::getLogicalMetric) .collect(Collectors.toCollection(LinkedHashSet::new)); } @@ -113,4 +119,18 @@ public ReadablePeriod getRetention() { public String getDescription() { return description; } + + public Granularity getGranularity() { + return schema.getGranularity(); + } + + @Override + public String getName() { + return name; + } + + @Override + public LogicalTableSchema getSchema() { + return schema; + } } diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/table/LogicalTableSchema.java b/fili-core/src/main/java/com/yahoo/bard/webservice/table/LogicalTableSchema.java new file mode 100644 index 0000000000..2a2eb6d8d0 --- /dev/null +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/table/LogicalTableSchema.java @@ -0,0 +1,54 @@ +// Copyright 2017 Yahoo Inc. +// Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. +package com.yahoo.bard.webservice.table; + +import com.yahoo.bard.webservice.data.config.names.ApiMetricName; +import com.yahoo.bard.webservice.data.dimension.DimensionColumn; +import com.yahoo.bard.webservice.data.metric.LogicalMetricColumn; +import com.yahoo.bard.webservice.data.metric.MetricDictionary; +import com.yahoo.bard.webservice.druid.model.query.Granularity; + +import java.util.LinkedHashSet; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +/** + * The schema for a logical table. + */ +public class LogicalTableSchema extends BaseSchema { + + /** + * Constructor. + * + * @param tableGroup The table group used to initial this logical table + * @param granularity The granularity for this schema + * @param metricDictionary The dictionary to resolve metric names from the table group against + */ + public LogicalTableSchema(TableGroup tableGroup, Granularity granularity, MetricDictionary metricDictionary) { + super(granularity, buildLogicalColumns(tableGroup, granularity, metricDictionary)); + } + + /** + * Convert the tables in the table group to a set of dimension and metric columns. + * + * @param tableGroup The collection of table group physical tables. + * @param granularity The granularity for this schema + * @param metricDictionary The dictionary to build logical metrics from names. + * + * @return The union of all columns from the table group + */ + private static LinkedHashSet buildLogicalColumns( + TableGroup tableGroup, + Granularity granularity, + MetricDictionary metricDictionary + ) { + return Stream.concat( + tableGroup.getDimensions().stream() + .map(DimensionColumn::new), + tableGroup.getApiMetricNames().stream() + .filter(apiMetricName -> apiMetricName.isValidFor(granularity)) + .map(ApiMetricName::getApiName) + .map(name -> new LogicalMetricColumn(name, metricDictionary.get(name))) + ).collect(Collectors.toCollection(LinkedHashSet::new)); + } +} diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/table/PhysicalTable.java b/fili-core/src/main/java/com/yahoo/bard/webservice/table/PhysicalTable.java index 262e0ee897..c2fdbda71e 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/table/PhysicalTable.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/table/PhysicalTable.java @@ -1,210 +1,26 @@ -// Copyright 2016 Yahoo Inc. +// Copyright 2017 Yahoo Inc. // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.table; -import com.yahoo.bard.webservice.data.dimension.DimensionColumn; -import com.yahoo.bard.webservice.data.dimension.DimensionDictionary; -import com.yahoo.bard.webservice.data.metric.MetricColumn; import com.yahoo.bard.webservice.data.time.ZonedTimeGrain; -import com.yahoo.bard.webservice.metadata.SegmentMetadata; -import com.yahoo.bard.webservice.util.IntervalUtils; -import com.yahoo.bard.webservice.util.Utils; +import com.yahoo.bard.webservice.table.availability.Availability; import org.joda.time.DateTime; -import org.joda.time.Interval; -import org.joda.time.ReadablePeriod; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import java.util.Collections; -import java.util.LinkedHashMap; -import java.util.LinkedHashSet; -import java.util.Map; -import java.util.Objects; import java.util.Set; -import java.util.concurrent.atomic.AtomicReference; -import java.util.stream.Collectors; - -import javax.validation.constraints.NotNull; /** - * Physical Table represents a druid table. + * An interface describing the Fili model for a fact data source (e.g. a table of dimensions and metrics). + * It may be backed by a single concrete fact data source or by more than one with underlying joins. */ -public class PhysicalTable extends Table { - private static final Logger LOG = LoggerFactory.getLogger(PhysicalTable.class); - - private final AtomicReference>> availableIntervalsRef; - private Map> workingIntervals; - private final Object mutex = new Object(); - private final Map logicalToPhysicalColumnNames; - private final Map> physicalToLogicalColumnNames; - private final String factTableName; - - /** - * Create a physical table. - * - * @param name Fili name of the physical table - * @param factTableName Name of the associated table in Druid - * @param timeGrain time grain of the table - * @param logicalToPhysicalColumnNames Mappings from logical to physical names - */ - public PhysicalTable( - @NotNull String name, - @NotNull String factTableName, - @NotNull ZonedTimeGrain timeGrain, - @NotNull Map logicalToPhysicalColumnNames - ) { - super(name, timeGrain); - this.factTableName = factTableName; - this.availableIntervalsRef = new AtomicReference<>(); - availableIntervalsRef.set(new LinkedHashMap<>()); - this.workingIntervals = Collections.synchronizedMap(new LinkedHashMap<>()); - this.logicalToPhysicalColumnNames = Collections.unmodifiableMap(logicalToPhysicalColumnNames); - this.physicalToLogicalColumnNames = Collections.unmodifiableMap( - this.logicalToPhysicalColumnNames.entrySet().stream().collect( - Collectors.groupingBy( - Map.Entry::getValue, - Collectors.mapping(Map.Entry::getKey, Collectors.toSet()) - ) - ) - ); - } - - /** - * Creates a physical table whose Fili and Druid names are the same. - * - * @param name Fili name of the physical table - * @param timeGrain time grain of the table - * @param logicalToPhysicalColumnNames Mappings from logical to physical names - */ - public PhysicalTable( - @NotNull String name, - @NotNull ZonedTimeGrain timeGrain, - @NotNull Map logicalToPhysicalColumnNames - ) { - this(name, name, timeGrain, logicalToPhysicalColumnNames); - } - - @Override - public Set getColumns() { - return getAvailableIntervals().keySet(); - } - - @Override - public Boolean addColumn(Column columnToAdd) { - synchronized (mutex) { - return addColumn(columnToAdd, Collections.emptySet()); - } - } - - /** - * Get the time grain from granularity. - * - * @return The time grain of this physical table - */ - public ZonedTimeGrain getTimeGrain() { - return (ZonedTimeGrain) getGranularity(); - } - - /** - * Add a column to the working intervals. - * - * @param columnToAdd The column instance to add - * @param intervals The interval set to add - * - * @return True if the workingIntervals had this column already - */ - private Boolean addColumn(Column columnToAdd, Set intervals) { - return workingIntervals.put(columnToAdd, intervals) == null; - } - - @Override - public Boolean removeColumn(Column columnToRemove) { - synchronized (mutex) { - return workingIntervals.remove(columnToRemove) == null; - } - } - - /** - * Getter for active column intervals. - * - * @return tableEntries map of column to set of available intervals - */ - public Map> getAvailableIntervals() { - return availableIntervalsRef.get(); - } - - /** - * Getter for working copy of the column intervals. - * - * @return tableEntries map of column to set of available intervals - */ - public Map> getWorkingIntervals() { - return Utils.makeImmutable(workingIntervals); - } +public interface PhysicalTable extends Table { /** - * Update the working intervals with values from a map. + * Get the value of the actual availability for this physical table. * - * @param segmentMetadata A map of names of metrics and sets of intervals over which they are valid - * @param dimensionDictionary The dimension dictionary from which to look up dimensions by name + * @return The current actual physical availability or a runtime exception if there isn't one yet. */ - public synchronized void resetColumns(SegmentMetadata segmentMetadata, DimensionDictionary dimensionDictionary) { - synchronized (mutex) { - Map> dimensionIntervals = segmentMetadata.getDimensionIntervals(); - Map> metricIntervals = segmentMetadata.getMetricIntervals(); - - workingIntervals.clear(); - for (Map.Entry> nameIntervals : dimensionIntervals.entrySet()) { - String physicalName = nameIntervals.getKey(); - - getLogicalColumnNames(physicalName).stream() - .map(dimensionDictionary::findByApiName) - .filter(Objects::nonNull) - .forEach( - dimension -> { - DimensionColumn dimensionColumn = DimensionColumn.addNewDimensionColumn( - this, - dimension - ); - workingIntervals.put(dimensionColumn, nameIntervals.getValue()); - } - ); - } - for (Map.Entry> nameIntervals : metricIntervals.entrySet()) { - MetricColumn metricColumn = MetricColumn.addNewMetricColumn(this, nameIntervals.getKey()); - workingIntervals.put(metricColumn, nameIntervals.getValue()); - } - commit(); - } - } - - /** - * Swaps the actual cache with the built-up temporary cache and creates a fresh, empty temporary cache. - */ - public synchronized void commit() { - synchronized (mutex) { - Map> temp = workingIntervals; - workingIntervals = Collections.synchronizedMap(new LinkedHashMap<>()); - availableIntervalsRef.set(Collections.unmodifiableMap(new LinkedHashMap<>(temp))); - super.columns = new LinkedHashSet<>(temp.keySet()); - } - } - - /** - * Fetch a set of intervals given a column name. - * - * @param columnName Name of the column - * - * @return Set of intervals associated with a column, empty if column is missing - */ - public Set getIntervalsByColumnName(String columnName) { - Set result = getAvailableIntervals().get(new Column(columnName)); - if (result != null) { - return result; - } - return Collections.emptySet(); - } + Availability getAvailability(); /** * Get a date time that the table will align to based on grain and available intervals. @@ -212,21 +28,15 @@ public Set getIntervalsByColumnName(String columnName) { * @return The time of either the first available interval of any columns in this table or now, floored to the * table's time grain. */ - public DateTime getTableAlignment() { - return getTimeGrain().roundFloor( - IntervalUtils.firstMoment(getAvailableIntervals().values()).orElse(new DateTime()) - ); - } + DateTime getTableAlignment(); /** - * Determine whether or not this PhysicalTable has a mapping for a specific logical name. + * Get the schema for this physical table. + * Schemas contain granularity and column definitions. * - * @param logicalName Logical name to check - * @return True if contains a non-default mapping for the logical name, false otherwise + * @return A physical table schema. */ - public boolean hasLogicalMapping(String logicalName) { - return logicalToPhysicalColumnNames.containsKey(logicalName); - } + PhysicalTableSchema getSchema(); /** * Translate a logical name into a physical column name. If no translation exists (i.e. they are the same), @@ -242,45 +52,44 @@ public boolean hasLogicalMapping(String logicalName) { * @param logicalName Logical name to lookup in physical table * @return Translated logicalName if applicable */ - public String getPhysicalColumnName(String logicalName) { - if (!logicalToPhysicalColumnNames.containsKey(logicalName)) { - LOG.warn( - "No mapping found for logical name '{}' to physical name on table '{}'. Will use logical name as " + - "physical name. This is unexpected and should not happen for properly configured " + - "dimensions.", - logicalName, - getName() - ); - } - return logicalToPhysicalColumnNames.getOrDefault(logicalName, logicalName); - } + String getPhysicalColumnName(String logicalName); /** - * Translate a physical name into a logical column name. If no translation exists (i.e. they are the same), - * then the physical name is returned. + * Determine whether or not this PhysicalTable has a mapping for a specific logical name. + * + * @param logicalName Logical name to check + * + * @return True if contains a non-default mapping for the logical name, false otherwise * - * @param physicalName Physical name to lookup in physical table - * @return Translated physicalName if applicable + * @deprecated This may no longer be needed */ - private Set getLogicalColumnNames(String physicalName) { - return physicalToLogicalColumnNames.getOrDefault(physicalName, Collections.singleton(physicalName)); + @Deprecated + default boolean hasLogicalMapping(String logicalName) { + return getSchema().containsLogicalName(logicalName); } /** - * Get the table bucketing as a period. + * Get the columns from the schema for this physical table. * - * @return The table bucketing as a period + * @return The columns of this physical table + * + * @deprecated In favor of getting the columns directly from the schema */ - public ReadablePeriod getTablePeriod() { - return getTimeGrain().getPeriod(); - } - - public String getFactTableName() { - return factTableName; + @Deprecated + default Set getColumns() { + return getSchema().getColumns(); } - @Override - public String toString() { - return super.toString() + " factTableName: " + factTableName + " alignment: " + getTableAlignment(); + /** + * Get the time grain from granularity. + * Physical tables must have time zone associated time grains. + * + * @return The time grain of this physical table + * + * @deprecated use getSchema().getTimeGrain() + */ + @Deprecated + default ZonedTimeGrain getTimeGrain() { + return getSchema().getTimeGrain(); } } diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/table/PhysicalTableSchema.java b/fili-core/src/main/java/com/yahoo/bard/webservice/table/PhysicalTableSchema.java new file mode 100644 index 0000000000..131c9525ed --- /dev/null +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/table/PhysicalTableSchema.java @@ -0,0 +1,99 @@ +// Copyright 2017 Yahoo Inc. +// Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. +package com.yahoo.bard.webservice.table; + +import com.yahoo.bard.webservice.data.time.ZonedTimeGrain; + +import java.util.Collections; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import javax.validation.constraints.NotNull; + +/** + * The schema for a physical table. + */ +public class PhysicalTableSchema extends BaseSchema implements Schema { + + private final ZonedTimeGrain timeGrain; + private final Map logicalToPhysicalColumnNames; + private final Map> physicalToLogicalColumnNames; + + /** + * Constructor. + * + * @param timeGrain The time grain for this table + * @param columns The columns for this table + * @param logicalToPhysicalColumnNames The mapping of logical column names to physical names + */ + public PhysicalTableSchema( + @NotNull ZonedTimeGrain timeGrain, + Iterable columns, + @NotNull Map logicalToPhysicalColumnNames + ) { + super(timeGrain, columns); + this.timeGrain = timeGrain; + + this.logicalToPhysicalColumnNames = Collections.unmodifiableMap(logicalToPhysicalColumnNames); + this.physicalToLogicalColumnNames = Collections.unmodifiableMap( + this.logicalToPhysicalColumnNames.entrySet().stream().collect( + Collectors.groupingBy( + Map.Entry::getValue, + Collectors.mapping(Map.Entry::getKey, Collectors.toSet()) + ) + ) + ); + } + + /** + * Translate a logical name into a physical column name. If no translation exists (i.e. they are the same), + * then the logical name is returned. + *

+ * NOTE: This defaulting behavior WILL BE REMOVED in future releases. + *

+ * The defaulting behavior shouldn't be hit for Dimensions that are serialized via the default serializer and are + * not properly configured with a logical-to-physical name mapping. Dimensions that are not "normal" dimensions, + * such as dimensions used for DimensionSpecs in queries to do mapping from fact-level dimensions to something else, + * should likely use their own serialization strategy so as to not hit this defaulting behavior. + * + * @param logicalName Logical name to lookup in physical table + * + * @return Translated logicalName if applicable + */ + public String getPhysicalColumnName(String logicalName) { + return logicalToPhysicalColumnNames.getOrDefault(logicalName, logicalName); + } + + /** + * Look up all the logical column names corresponding to a physical name. + * If no translation exists (i.e. they are the same), then the physical name is returned. + * + * @param physicalName Physical name to lookup in physical table + * + * @return Translated physicalName if applicable + */ + public Set getLogicalColumnNames(String physicalName) { + return physicalToLogicalColumnNames.getOrDefault(physicalName, Collections.singleton(physicalName)); + } + + /** + * Returns true if the mapping of names is populated for this logical name. + * + * @param logicalName the name of a metric or dimension column + * + * @return true if this table supports this column explicitly + */ + public boolean containsLogicalName(String logicalName) { + return logicalToPhysicalColumnNames.containsKey(logicalName); + } + + /** + * Granularity. + * + * @return the granularity for this schema + */ + public ZonedTimeGrain getTimeGrain() { + return timeGrain; + } +} diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/table/Schema.java b/fili-core/src/main/java/com/yahoo/bard/webservice/table/Schema.java index 8fc96a7c4b..b0a0fabe34 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/table/Schema.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/table/Schema.java @@ -1,4 +1,4 @@ -// Copyright 2016 Yahoo Inc. +// Copyright 2017 Yahoo Inc. // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.table; @@ -6,42 +6,27 @@ import com.yahoo.bard.webservice.util.Utils; import java.util.LinkedHashSet; -import java.util.Objects; +import java.util.Optional; import java.util.Set; -import javax.validation.constraints.NotNull; - /** - * Schema. + * An interface describing a table or table-like entity composed of sets of columns. */ -public class Schema { - - private final Granularity granularity; - - protected LinkedHashSet columns; +public interface Schema { /** - * Constructor. + * Get all the columns underlying this Schema. * - * @param granularity The granularity of the table + * @return The columns of this schema */ - public Schema(@NotNull Granularity granularity) { - this.granularity = granularity; - this.columns = new LinkedHashSet<>(); - } + Set getColumns(); /** - * Getter for timeGrain. + * Get the time granularity for this Schema. * - * @return timeGrain + * @return The columns of this schema */ - public Granularity getGranularity() { - return this.granularity; - } - - public Set getColumns() { - return columns; - } + Granularity getGranularity(); /** * Getter for set of columns by sub-type. @@ -51,83 +36,22 @@ public Set getColumns() { * * @return Set of Columns */ - public Set getColumns(Class columnClass) { + default LinkedHashSet getColumns(Class columnClass) { return Utils.getSubsetByType(getColumns(), columnClass); } /** - * Getter for column by name. - * - * @param columnName Name of the column - * - * @return Column having name columnName - */ - public Column getColumn(String columnName) { - for (Column column : getColumns()) { - if (columnName.equals(column.getName())) { - return column; - } - } - return null; - } - - /** - * Get a column by its name. + * Given a column type and name, return the column of the expected type. * - * @param columnName Name of the column - * @param columnClass sub class - * @param sub class type + * @param name The name on the column + * @param columnClass The class of the column being retrieved + * @param The type of the subclass of the column being retrieved * - * @return Column having name columnName + * @return The an optional containing the column of the name and type specified, if any */ - public T getColumn(String columnName, Class columnClass) { - for (T column: Utils.getSubsetByType(getColumns(), columnClass)) { - if (column.getName().equals(columnName)) { - return column; - } - } - return null; - } - - /** - * Method to add a column. - * - * @param columnToAdd The column to add - * - * @return set of columns which has the newly added column - */ - public Boolean addColumn(Column columnToAdd) { - return this.columns.add(columnToAdd); - } - - /** - * Method to remove a column. - * - * @param columnToRemove The column to remove - * - * @return set of columns after removing the specified column - */ - public Boolean removeColumn(Column columnToRemove) { - return this.columns.remove(columnToRemove); - } - - @Override - public boolean equals(Object o) { - if (this == o) { return true; } - if (o == null || getClass() != o.getClass()) { return false; } - Schema schema = (Schema) o; - return - Objects.equals(columns, schema.columns) && - Objects.equals(granularity, schema.granularity); - } - - @Override - public int hashCode() { - return Objects.hash(granularity, columns); - } - - @Override - public String toString() { - return columns.toString(); + default Optional getColumn(String name, Class columnClass) { + return getColumns(columnClass).stream() + .filter(column -> column.getName().equals(name)) + .findFirst(); } } diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/table/Table.java b/fili-core/src/main/java/com/yahoo/bard/webservice/table/Table.java index 9f5a5bdf16..086e43fdfd 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/table/Table.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/table/Table.java @@ -1,63 +1,40 @@ -// Copyright 2016 Yahoo Inc. +// Copyright 2017 Yahoo Inc. // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.table; import com.yahoo.bard.webservice.data.dimension.Dimension; import com.yahoo.bard.webservice.data.dimension.DimensionColumn; -import com.yahoo.bard.webservice.druid.model.query.Granularity; import java.util.LinkedHashSet; -import java.util.Objects; -import java.util.Set; import java.util.stream.Collectors; -import javax.validation.constraints.NotNull; - /** - * Table is a schema with dimension columns. + * Table has a schema and a name. */ -public class Table extends Schema { +public interface Table { - private final String name; + /** + * The schema for this table. + * + * @return a schema + */ + Schema getSchema(); /** - * Constructor. + * The name for this table. * - * @param name The name of the table - * @param granularity The granularity of the table + * @return The table name */ - public Table(@NotNull String name, @NotNull Granularity granularity) { - super(granularity); - this.name = name; - } + String getName(); /** * Getter for set of dimensions. * * @return Set of Dimension */ - public Set getDimensions() { - return this.getColumns(DimensionColumn.class).stream() + default LinkedHashSet getDimensions() { + return getSchema().getColumns(DimensionColumn.class).stream() .map(DimensionColumn::getDimension) .collect(Collectors.toCollection(LinkedHashSet::new)); } - - public String getName() { - return name; - } - - @Override - public String toString() { - return "{Table:{ name:'" + getName() + "', grain:'" + getGranularity() + "', cols:'" + getColumns() + "'} }"; - } - - @Override - public boolean equals(final Object o) { - return super.equals(o) && name.equals(((Table) o).name); - } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), getName()); - } } diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/table/TableGroup.java b/fili-core/src/main/java/com/yahoo/bard/webservice/table/TableGroup.java index a52c358bcf..14fa6a41bc 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/table/TableGroup.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/table/TableGroup.java @@ -32,11 +32,11 @@ public class TableGroup { public TableGroup( LinkedHashSet tables, Set apiMetricNames, - LinkedHashSet dimensions + Set dimensions ) { this.tables = tables; this.apiMetricNames = apiMetricNames; - this.dimensions = dimensions; + this.dimensions = new LinkedHashSet<>(dimensions); } /** @@ -59,7 +59,7 @@ public TableGroup(LinkedHashSet tables, Set apiMet tables, apiMetricNames, tables.stream() - .flatMap(table -> table.getColumns(DimensionColumn.class).stream()) + .flatMap(table -> table.getSchema().getColumns(DimensionColumn.class).stream()) .map(DimensionColumn::getDimension) .collect(Collectors.toCollection(LinkedHashSet::new)) ); diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/table/TableIdentifier.java b/fili-core/src/main/java/com/yahoo/bard/webservice/table/TableIdentifier.java index ec3401f186..d3f5d559b0 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/table/TableIdentifier.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/table/TableIdentifier.java @@ -41,7 +41,7 @@ public TableIdentifier(String logicalTableName, Optional period) * * @param table Logical table for the table identifier */ - public TableIdentifier(Table table) { + public TableIdentifier(LogicalTable table) { this(table.getName(), table.getGranularity()); } @@ -64,7 +64,6 @@ public static TableIdentifier create(DataApiRequest request) { * @return an Optional with the period if the granularity had one, or empty otherwise. */ private static Optional getGranularityPeriod(Granularity granularity) { - //TODO: Short lived code, remove once TableIdentifiers no longer need periods return Optional.ofNullable( granularity instanceof TimeGrain ? ((TimeGrain) granularity).getPeriod() : null ); diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/table/ZonedSchema.java b/fili-core/src/main/java/com/yahoo/bard/webservice/table/ZonedSchema.java index 5fd4c6cfa5..1993b80d02 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/table/ZonedSchema.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/table/ZonedSchema.java @@ -10,8 +10,12 @@ /** * A schema anchored to a particular time zone. + * + * @deprecated This class is no longer used as a subclass for {@link com.yahoo.bard.webservice.data.ResultSetSchema}. + * Use that class directly now. */ -public class ZonedSchema extends Schema { +@Deprecated +public class ZonedSchema extends BaseSchema implements Schema { private final DateTimeZone dateTimeZone; @@ -20,12 +24,25 @@ public class ZonedSchema extends Schema { * * @param granularity Granularity of the schema * @param dateTimeZone TimeZone of the schema + * @param columns The columns for this schema */ - public ZonedSchema(@NotNull Granularity granularity, @NotNull DateTimeZone dateTimeZone) { - super(granularity); + public ZonedSchema( + @NotNull Granularity granularity, + @NotNull DateTimeZone dateTimeZone, + @NotNull Iterable columns + ) { + super(granularity, columns); this.dateTimeZone = dateTimeZone; } + /** + * Constructor. + * + * @param schema schema to copy construct + */ + public ZonedSchema(ZonedSchema schema) { + this(schema.getGranularity(), schema.getDateTimeZone(), schema.getColumns()); + } public DateTimeZone getDateTimeZone() { return dateTimeZone; } diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/table/availability/Availability.java b/fili-core/src/main/java/com/yahoo/bard/webservice/table/availability/Availability.java new file mode 100644 index 0000000000..a9520827e7 --- /dev/null +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/table/availability/Availability.java @@ -0,0 +1,54 @@ +// Copyright 2017 Yahoo Inc. +// Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. +package com.yahoo.bard.webservice.table.availability; + +import com.yahoo.bard.webservice.data.config.names.TableName; +import com.yahoo.bard.webservice.table.Column; + +import org.joda.time.Interval; + +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.SortedSet; + +/** + * Availability describes the intervals available by column for a table. + */ +public interface Availability { + + /** + * The names of the data sources backing this availability. + * + * @return A set of names for datasources backing this table + */ + SortedSet getDataSourceNames(); + + /** + * The availability for a given column. + * + * @param c A column + * + * @return The list of intervals that column is available for. + */ + List get(Column c); + + /** + * The availability of all columns. + * + * @return The intervals, by column, available. + */ + Map> getAvailableIntervals(); + + /** + * Fetch a set of intervals given a column name. + * + * @param columnName Name of the column + * + * @return Set of intervals associated with a column, empty if column is missing + */ + default List getIntervalsByColumnName(String columnName) { + List result = get(new Column(columnName)); + return result == null ? Collections.emptyList() : result; + } +} diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/table/availability/ImmutableAvailability.java b/fili-core/src/main/java/com/yahoo/bard/webservice/table/availability/ImmutableAvailability.java new file mode 100644 index 0000000000..b598398585 --- /dev/null +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/table/availability/ImmutableAvailability.java @@ -0,0 +1,145 @@ +// Copyright 2017 Yahoo Inc. +// Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. +package com.yahoo.bard.webservice.table.availability; + +import com.yahoo.bard.webservice.data.config.names.TableName; +import com.yahoo.bard.webservice.data.dimension.DimensionColumn; +import com.yahoo.bard.webservice.data.dimension.DimensionDictionary; +import com.yahoo.bard.webservice.data.metric.MetricColumn; +import com.yahoo.bard.webservice.table.Column; +import com.yahoo.bard.webservice.util.SimplifiedIntervalList; + +import com.google.common.collect.ImmutableMap; + +import org.joda.time.Interval; + +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeSet; +import java.util.function.Function; +import java.util.stream.Collectors; + +/** + * An availability which guarantees immutability on its contents. + */ +public class ImmutableAvailability implements Availability { + + private final TableName name; + private final Map> columnIntervals; + private final SortedSet dataSourceNames; + /** + * Constructor. + * + * @param tableName The name of the data source associated with this ImmutableAvailability + * @param map A map of columns to lists of available intervals + */ + public ImmutableAvailability(TableName tableName, Map> map) { + this.name = tableName; + columnIntervals = ImmutableMap.copyOf(map); + SortedSet names = new TreeSet<>(TableName.COMPARATOR); + names.add(tableName); + dataSourceNames = Collections.unmodifiableSortedSet(Collections.unmodifiableSortedSet(names)); + } + + /** + * Constructor. + * + * @param tableName The name of the data source associated with this ImmutableAvailability + * @param map A map of columns to lists of available intervals + */ + public ImmutableAvailability(String tableName, Map> map) { + this(TableName.of(tableName), map); + } + + /** + * Constructor. + * + * @param tableName The name of the data source associated with this ImmutableAvailability + * @param dimensionIntervals The dimension availability map by dimension name + * @param metricIntervals The metric availability map + * @param dimensionDictionary The dictionary to resolve dimension names against + */ + public ImmutableAvailability( + String tableName, + Map> dimensionIntervals, + Map> metricIntervals, + DimensionDictionary dimensionDictionary + ) { + this( + TableName.of(tableName), + buildAvailabilityMap(dimensionIntervals, metricIntervals, dimensionDictionary) + ); + } + + /** + * Build an availability map from unbound dimension and metric name maps and dimension dictionaries. + * + * @param dimensionIntervals The dimension availability map by dimension name + * @param metricIntervals The metric availability map + * @param dimensionDictionary The dictionary to resolve dimension names against + * + * @return A map of available intervals by columns + */ + private static Map> buildAvailabilityMap( + Map> dimensionIntervals, + Map> metricIntervals, + DimensionDictionary dimensionDictionary + ) { + Function>, Column> dimensionKeyMapper = + entry -> new DimensionColumn(dimensionDictionary.findByApiName(entry.getKey())); + Function>, Column> metricKeyMapper = + entry -> new MetricColumn(entry.getKey()); + Function>, List> valueMapper = + entry -> new SimplifiedIntervalList(entry.getValue()); + + Map> map = dimensionIntervals.entrySet().stream() + .collect(Collectors.toMap(dimensionKeyMapper, valueMapper)); + map.putAll( + metricIntervals.entrySet().stream() + .collect(Collectors.toMap(metricKeyMapper, valueMapper)) + ); + return map; + } + + @Override + public SortedSet getDataSourceNames() { + return dataSourceNames; + } + + @Override + public List get(final Column c) { + return columnIntervals.get(c); + } + + @Override + public Map> getAvailableIntervals() { + return columnIntervals; + } + + @Override + public int hashCode() { + return columnIntervals.hashCode(); + } + + @Override + public boolean equals(final Object obj) { + if (this == obj) { + return true; + } + if (obj instanceof ImmutableAvailability) { + ImmutableAvailability that = (ImmutableAvailability) obj; + return Objects.equals(columnIntervals, that.columnIntervals) && + Objects.equals(name, that.name); + } + if (obj instanceof Availability) { + Availability that = (Availability) obj; + return Objects.equals(columnIntervals, that.getAvailableIntervals()) && + Objects.equals(getDataSourceNames(), that.getDataSourceNames()); + } + return false; + } +} diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/table/resolver/AggregatableDimensionsMatcher.java b/fili-core/src/main/java/com/yahoo/bard/webservice/table/resolver/AggregatableDimensionsMatcher.java index 677c4121c5..11b863e7f9 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/table/resolver/AggregatableDimensionsMatcher.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/table/resolver/AggregatableDimensionsMatcher.java @@ -4,9 +4,9 @@ import static com.yahoo.bard.webservice.web.ErrorMessageFormat.NO_TABLE_FOR_NON_AGGREGATABLE; +import com.yahoo.bard.webservice.table.PhysicalTable; import com.yahoo.bard.webservice.data.dimension.Dimension; import com.yahoo.bard.webservice.data.metric.TemplateDruidQuery; -import com.yahoo.bard.webservice.table.PhysicalTable; import com.yahoo.bard.webservice.util.StreamUtils; import com.yahoo.bard.webservice.util.TableUtils; import com.yahoo.bard.webservice.web.DataApiRequest; diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/table/resolver/BasePhysicalTableResolver.java b/fili-core/src/main/java/com/yahoo/bard/webservice/table/resolver/BasePhysicalTableResolver.java index 8d435c5257..7072d21fc2 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/table/resolver/BasePhysicalTableResolver.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/table/resolver/BasePhysicalTableResolver.java @@ -2,12 +2,12 @@ // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.table.resolver; +import com.yahoo.bard.webservice.table.PhysicalTable; import com.yahoo.bard.webservice.application.MetricRegistryFactory; import com.yahoo.bard.webservice.data.dimension.Dimension; import com.yahoo.bard.webservice.data.metric.LogicalMetric; import com.yahoo.bard.webservice.data.metric.TemplateDruidQuery; import com.yahoo.bard.webservice.druid.model.query.Granularity; -import com.yahoo.bard.webservice.table.PhysicalTable; import com.yahoo.bard.webservice.util.TableUtils; import com.yahoo.bard.webservice.web.DataApiRequest; import com.yahoo.bard.webservice.web.ErrorMessageFormat; diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/table/resolver/DefaultPhysicalTableResolver.java b/fili-core/src/main/java/com/yahoo/bard/webservice/table/resolver/DefaultPhysicalTableResolver.java index af9c00e86e..16a9599f15 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/table/resolver/DefaultPhysicalTableResolver.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/table/resolver/DefaultPhysicalTableResolver.java @@ -22,12 +22,12 @@ /** * A physical table resolver which implements core bard filters and comparator functions *

- * {@link com.yahoo.bard.webservice.table.PhysicalTable}) based on the optimum (lowest query cost) table, considering + * {@link PhysicalTable}) based on the optimum (lowest query cost) table, considering * completeness of data, granularity, time alignment, aggregatability constraints and cardinality for a particular * query. */ @Singleton -public class DefaultPhysicalTableResolver extends BasePhysicalTableResolver implements PhysicalTableResolver { +public class DefaultPhysicalTableResolver extends BasePhysicalTableResolver { protected static final GranularityComparator COMPARE_GRANULARITY = new GranularityComparator(); protected static final DimensionCardinalityComparator CARDINALITY_COMPARATOR = new DimensionCardinalityComparator(); diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/table/resolver/SchemaPhysicalTableMatcher.java b/fili-core/src/main/java/com/yahoo/bard/webservice/table/resolver/SchemaPhysicalTableMatcher.java index 21afd573da..f8f42796bf 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/table/resolver/SchemaPhysicalTableMatcher.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/table/resolver/SchemaPhysicalTableMatcher.java @@ -49,11 +49,11 @@ public SchemaPhysicalTableMatcher(DataApiRequest request, TemplateDruidQuery que @Override public boolean test(PhysicalTable table) { - if (!granularity.satisfiedBy(table.getGranularity())) { + if (!granularity.satisfiedBy(table.getSchema().getTimeGrain())) { return false; } - Set supplyNames = table.getColumns().stream() + Set supplyNames = table.getSchema().getColumns().stream() .map(Column::getName) .collect(Collectors.toCollection(LinkedHashSet::new)); diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/table/resolver/VolatileTimeComparator.java b/fili-core/src/main/java/com/yahoo/bard/webservice/table/resolver/VolatileTimeComparator.java index 1108baa7c6..e957d7b7db 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/table/resolver/VolatileTimeComparator.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/table/resolver/VolatileTimeComparator.java @@ -2,11 +2,11 @@ // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.table.resolver; +import com.yahoo.bard.webservice.table.PhysicalTable; import com.yahoo.bard.webservice.data.PartialDataHandler; import com.yahoo.bard.webservice.data.volatility.VolatileIntervalsService; import com.yahoo.bard.webservice.druid.model.query.DruidAggregationQuery; import com.yahoo.bard.webservice.druid.model.query.Granularity; -import com.yahoo.bard.webservice.table.PhysicalTable; import com.yahoo.bard.webservice.util.IntervalUtils; import com.yahoo.bard.webservice.util.SimplifiedIntervalList; import com.yahoo.bard.webservice.util.TableUtils; @@ -78,8 +78,8 @@ public int compare(PhysicalTable left, PhysicalTable right) { * [12-2015/01-2016], while t2 is missing the bucket[12-31-2015/01-01-2016]. Suppose we make a request for * [01-2015/01-2016] at the monthly grain. Both tables have equal volatility and partiality at the request grain, * and t2 has more data available in the partial-but-volatile range. However, a comparator that looked just at - * volatility would favor t1, because it has 11 months of volatile-but-complete data, while t2 only has 30 days of - * volatile-but-complete data, even though t2 actually has more complete data. + * volatility would favor t1, because it has 11 months of volatile-but-present data, while t2 only has 30 days of + * volatile-but-present data, even though t2 actually has more present data. * * @param table The table of interest * diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/util/DateTimeUtils.java b/fili-core/src/main/java/com/yahoo/bard/webservice/util/DateTimeUtils.java index f39fcef13a..1203e1fc3b 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/util/DateTimeUtils.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/util/DateTimeUtils.java @@ -3,8 +3,11 @@ package com.yahoo.bard.webservice.util; import com.yahoo.bard.webservice.data.time.TimeGrain; +import com.yahoo.bard.webservice.data.time.ZonedTimeGrain; +import com.yahoo.bard.webservice.druid.model.query.Granularity; import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; import org.joda.time.Interval; import org.joda.time.format.DateTimeFormatter; import org.slf4j.Logger; @@ -226,4 +229,17 @@ public static DateTime quarterlyRound(DateTime from) { // an integer multiple of three months from the start of the year, then round to the start of that month. return property.addToCopy(-1 * ((property.get() - 1) % 3)).monthOfYear().roundFloorCopy(); } + + /** + * Given a granularity, produce a time zone. + * + * @param granularity The granularity's time zone, or if there isn't one, the default time zone + * + * @return A time zone + */ + public static DateTimeZone getTimeZone(Granularity granularity) { + return (granularity instanceof ZonedTimeGrain) ? + ((ZonedTimeGrain) granularity).getTimeZone() : + DateTimeZone.getDefault(); + } } diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/util/IntervalUtils.java b/fili-core/src/main/java/com/yahoo/bard/webservice/util/IntervalUtils.java index f727eea864..844f6c0285 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/util/IntervalUtils.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/util/IntervalUtils.java @@ -72,6 +72,20 @@ public static Set getOverlappingSubintervals(Collection left .collect(Collectors.toSet()); } + /** + * Find all the interval overlaps between two collections of intervals. + *

+ * If the left set is null, return the right. This makes this usable in a reduce function. + * + * @param left The intervals being streamed over + * @param right The intervals being tested against + * + * @return A set of intervals describing the time common to both sets + */ + public static Set getOverlappingSubintervals(Set left, Set right) { + return getOverlappingSubintervals((Collection) left, (Collection) right); + } + /** * Simplify raw intervals and return a map of intervals (dividing them by the grain) to the ordinal of the interval. * diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/web/ApiFilter.java b/fili-core/src/main/java/com/yahoo/bard/webservice/web/ApiFilter.java index e900e859ee..dc63cf64d2 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/web/ApiFilter.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/web/ApiFilter.java @@ -2,7 +2,6 @@ // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.web; -import static com.yahoo.bard.webservice.web.ErrorMessageFormat.FILTER_DIMENSION_NOT_IN_TABLE; import static com.yahoo.bard.webservice.web.ErrorMessageFormat.FILTER_DIMENSION_UNDEFINED; import static com.yahoo.bard.webservice.web.ErrorMessageFormat.FILTER_ERROR; import static com.yahoo.bard.webservice.web.ErrorMessageFormat.FILTER_FIELD_NOT_IN_DIMENSIONS; @@ -12,7 +11,6 @@ import com.yahoo.bard.webservice.data.dimension.Dimension; import com.yahoo.bard.webservice.data.dimension.DimensionDictionary; import com.yahoo.bard.webservice.data.dimension.DimensionField; -import com.yahoo.bard.webservice.table.LogicalTable; import com.yahoo.bard.webservice.util.FilterTokenizer; import org.slf4j.Logger; @@ -78,21 +76,6 @@ public ApiFilter withValues(@NotNull Set values) { return new ApiFilter(dimension, dimensionField, operation, values); } - /** - * Parses the URL filter Query and generates the ApiFilter object. - * - * @param filterQuery Expects a URL filter query String in the format: - *

- * (dimension name)|(field name)-(operation)[?(value or comma separated values)]? - * @param dimensionDictionary cache containing all the valid dimension objects. - * - * @throws BadFilterException Exception when filter pattern is not matched or when any of its properties are not - * valid. - */ - public ApiFilter(@NotNull String filterQuery, DimensionDictionary dimensionDictionary) throws BadFilterException { - this(filterQuery, (LogicalTable) null, dimensionDictionary); - } - /** * Parses the URL filter Query and generates the ApiFilter object. * @@ -100,7 +83,6 @@ public ApiFilter(@NotNull String filterQuery, DimensionDictionary dimensionDicti *

* (dimension name)|(field name)-(operation)[?(value or comma separated values)]? * - * @param table The logical table for a data request (if any) * @param dimensionDictionary cache containing all the valid dimension objects. * * @throws BadFilterException Exception when filter pattern is not matched or when any of its properties are not @@ -108,7 +90,6 @@ public ApiFilter(@NotNull String filterQuery, DimensionDictionary dimensionDicti */ public ApiFilter( @NotNull String filterQuery, - LogicalTable table, DimensionDictionary dimensionDictionary ) throws BadFilterException { LOG.trace("Filter query: {}\n\n DimensionDictionary: {}", filterQuery, dimensionDictionary); @@ -143,14 +124,6 @@ public ApiFilter( throw new BadFilterException(FILTER_DIMENSION_UNDEFINED.format(filterDimensionName)); } - // If there is a logical table and the filter is not part of it, throw exception. - if (table != null && !table.getDimensions().contains(dimension)) { - LOG.debug(FILTER_DIMENSION_NOT_IN_TABLE.logFormat(filterDimensionName, table)); - throw new BadFilterException( - FILTER_DIMENSION_NOT_IN_TABLE.format(filterDimensionName, table.getName()) - ); - } - String dimensionFieldName = matcher.group(2); try { this.dimensionField = this.dimension.getFieldByName(dimensionFieldName); diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/web/DataApiRequest.java b/fili-core/src/main/java/com/yahoo/bard/webservice/web/DataApiRequest.java index 4f7739ce18..338033d27a 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/web/DataApiRequest.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/web/DataApiRequest.java @@ -7,6 +7,7 @@ import static com.yahoo.bard.webservice.web.ErrorMessageFormat.DIMENSIONS_NOT_IN_TABLE; import static com.yahoo.bard.webservice.web.ErrorMessageFormat.DIMENSIONS_UNDEFINED; import static com.yahoo.bard.webservice.web.ErrorMessageFormat.DIMENSION_FIELDS_UNDEFINED; +import static com.yahoo.bard.webservice.web.ErrorMessageFormat.FILTER_DIMENSION_NOT_IN_TABLE; import static com.yahoo.bard.webservice.web.ErrorMessageFormat.HAVING_METRICS_NOT_IN_QUERY_FORMAT; import static com.yahoo.bard.webservice.web.ErrorMessageFormat.INCORRECT_METRIC_FILTER_FORMAT; import static com.yahoo.bard.webservice.web.ErrorMessageFormat.INTEGER_INVALID; @@ -1075,7 +1076,17 @@ protected Map> generateFilters( for (String apiFilter : apiFilters) { ApiFilter newFilter; try { - newFilter = new ApiFilter(apiFilter, table, dimensionDictionary); + newFilter = new ApiFilter(apiFilter, dimensionDictionary); + + // If there is a logical table and the filter is not part of it, throw exception. + if (! table.getDimensions().contains(newFilter.getDimension())) { + String filterDimensionName = newFilter.getDimension().getApiName(); + LOG.debug(FILTER_DIMENSION_NOT_IN_TABLE.logFormat(filterDimensionName, table)); + throw new BadFilterException( + FILTER_DIMENSION_NOT_IN_TABLE.format(filterDimensionName, table.getName()) + ); + } + } catch (BadFilterException filterException) { throw new BadApiRequestException(filterException.getMessage(), filterException); } diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/web/DimensionsApiRequest.java b/fili-core/src/main/java/com/yahoo/bard/webservice/web/DimensionsApiRequest.java index 91a970d400..833d8411d5 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/web/DimensionsApiRequest.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/web/DimensionsApiRequest.java @@ -9,6 +9,8 @@ import com.yahoo.bard.webservice.data.dimension.DimensionDictionary; import com.yahoo.bard.webservice.web.util.PaginationParameters; +import com.google.common.collect.Sets; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -31,8 +33,8 @@ public class DimensionsApiRequest extends ApiRequest { private static final Logger LOG = LoggerFactory.getLogger(DimensionsApiRequest.class); public static final String REQUEST_MAPPER_NAMESPACE = "dimensionApiRequestMapper"; - private final Set dimensions; - private final Set filters; + private final LinkedHashSet dimensions; + private final LinkedHashSet filters; /** * Parses the API request URL and generates the Api Request object. @@ -109,12 +111,12 @@ private DimensionsApiRequest( Optional paginationParameters, UriInfo uriInfo, Response.ResponseBuilder builder, - Set dimensions, - Set filters + Iterable dimensions, + Iterable filters ) { super(format, SYNCHRONOUS_ASYNC_AFTER_VALUE, paginationParameters, uriInfo, builder); - this.dimensions = dimensions; - this.filters = filters; + this.dimensions = Sets.newLinkedHashSet(dimensions); + this.filters = Sets.newLinkedHashSet(filters); } /** @@ -126,12 +128,12 @@ private DimensionsApiRequest( * @return Set of dimension objects. * @throws BadApiRequestException if an invalid dimension is requested or the dimension dictionary is empty. */ - protected Set generateDimensions( + protected LinkedHashSet generateDimensions( String apiDimension, DimensionDictionary dimensionDictionary ) throws BadApiRequestException { // Dimension is optional hence check if dimension is requested. - Set generated = dimensionDictionary.findAll().stream() + LinkedHashSet generated = dimensionDictionary.findAll().stream() .filter(dimension -> apiDimension == null || apiDimension.equals(dimension.getApiName())) .collect(Collectors.toCollection(LinkedHashSet::new)); @@ -160,13 +162,13 @@ protected Set generateDimensions( * @return Set of filter objects. * @throws BadApiRequestException if the filter query string does not match required syntax. */ - protected Set generateFilters( + protected LinkedHashSet generateFilters( String filterQuery, DimensionDictionary dimensionDictionary ) throws BadApiRequestException { LOG.trace("Dimension Dictionary: {}", dimensionDictionary); // Set of filter objects - Set generated = new LinkedHashSet<>(); + LinkedHashSet generated = new LinkedHashSet<>(); // Filters are optional hence check if filters are requested. if (filterQuery == null || "".equals(filterQuery)) { @@ -209,7 +211,7 @@ public DimensionsApiRequest withBuilder(Response.ResponseBuilder builder) { return new DimensionsApiRequest(format, paginationParameters, uriInfo, builder, dimensions, filters); } - public DimensionsApiRequest withDimensions(Set dimensions) { + public DimensionsApiRequest withDimensions(LinkedHashSet dimensions) { return new DimensionsApiRequest(format, paginationParameters, uriInfo, builder, dimensions, filters); } @@ -218,7 +220,7 @@ public DimensionsApiRequest withFilters(Set filters) { } // CHECKSTYLE:ON - public Set getDimensions() { + public LinkedHashSet getDimensions() { return this.dimensions; } diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/web/ErrorMessageFormat.java b/fili-core/src/main/java/com/yahoo/bard/webservice/web/ErrorMessageFormat.java index 06c0aa9fa1..fccfb20e8d 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/web/ErrorMessageFormat.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/web/ErrorMessageFormat.java @@ -212,6 +212,10 @@ public enum ErrorMessageFormat implements MessageFormatter { "Failed to retrieve data.", "Received %s with status code %s for reason %s when sending %s to Druid" ), + + RESULT_MAPPING_FAILURE( + "Error occurred while processing response data: %s" + ) ; private final String messageFormat; diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/web/SlicesApiRequest.java b/fili-core/src/main/java/com/yahoo/bard/webservice/web/SlicesApiRequest.java index ef92deeb08..d7139f384a 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/web/SlicesApiRequest.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/web/SlicesApiRequest.java @@ -23,6 +23,7 @@ import java.util.Collections; import java.util.LinkedHashMap; import java.util.LinkedHashSet; +import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; @@ -164,7 +165,7 @@ protected Map generateSlice( throw new BadApiRequestException(msg); } - Map> columnCache = table.getAvailableIntervals(); + Map> columnCache = table.getAvailability().getAvailableIntervals(); Set> dimensionsResult = new LinkedHashSet<>(); Set> metricsResult = new LinkedHashSet<>(); diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/web/endpoints/DataServlet.java b/fili-core/src/main/java/com/yahoo/bard/webservice/web/endpoints/DataServlet.java index a0987dd4b2..7e1baa9b06 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/web/endpoints/DataServlet.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/web/endpoints/DataServlet.java @@ -37,7 +37,7 @@ import com.yahoo.bard.webservice.logging.blocks.BardQueryInfo; import com.yahoo.bard.webservice.logging.blocks.DataRequest; import com.yahoo.bard.webservice.logging.blocks.DruidFilterInfo; -import com.yahoo.bard.webservice.table.Table; +import com.yahoo.bard.webservice.table.LogicalTable; import com.yahoo.bard.webservice.table.resolver.NoMatchFoundException; import com.yahoo.bard.webservice.util.Either; import com.yahoo.bard.webservice.web.DataApiRequest; @@ -205,7 +205,7 @@ private void logRequestMetrics(DataApiRequest request, Boolean readCache, DruidQ } // Log table metric - Table table = request.getTable(); + LogicalTable table = request.getTable(); REGISTRY.meter("request.logical.table." + table.getName() + "." + table.getGranularity()).mark(); RequestLog.record(new BardQueryInfo(druidQuery.getQueryType().toJson(), false)); diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/web/endpoints/DimensionsServlet.java b/fili-core/src/main/java/com/yahoo/bard/webservice/web/endpoints/DimensionsServlet.java index 91049df3d3..5055223b5b 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/web/endpoints/DimensionsServlet.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/web/endpoints/DimensionsServlet.java @@ -25,6 +25,7 @@ import com.codahale.metrics.annotation.Timed; import com.fasterxml.jackson.core.JsonProcessingException; +import com.google.common.collect.Streams; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -102,7 +103,7 @@ public DimensionsServlet( * "dimensions": {@literal } * } * - * @see DimensionsServlet#getDimensionListSummaryView(Collection, UriInfo) + * @see DimensionsServlet#getDimensionListSummaryView(Iterable, UriInfo) */ @GET @Timed @@ -339,11 +340,11 @@ public Response getDimensionRows( * * @return Summary list view of the dimensions */ - public static Set> getDimensionListSummaryView( - Collection dimensions, + public static LinkedHashSet> getDimensionListSummaryView( + Iterable dimensions, final UriInfo uriInfo ) { - return dimensions.stream() + return Streams.stream(dimensions) .map(dimension -> getDimensionSummaryView(dimension, uriInfo)) .collect(Collectors.toCollection(LinkedHashSet::new)); } diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/web/endpoints/JobsServlet.java b/fili-core/src/main/java/com/yahoo/bard/webservice/web/endpoints/JobsServlet.java index 62368df035..45c4819c47 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/web/endpoints/JobsServlet.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/web/endpoints/JobsServlet.java @@ -502,7 +502,7 @@ protected Observable handlePreResponseWithError( return paginationParameters .map(pageParams -> new AllPagesPagination<>(preResponse.getResultSet(), pageParams)) .map(page -> new PreResponse( - new ResultSet(page.getPageOfData(), preResponse.getResultSet().getSchema()), + new ResultSet(preResponse.getResultSet().getSchema(), page.getPageOfData()), addPaginationInfoToResponseContext(responseContext, uriInfo, page) )) .map(Observable::just) diff --git a/fili-core/src/main/java/com/yahoo/bard/webservice/web/responseprocessors/ResultSetResponseProcessor.java b/fili-core/src/main/java/com/yahoo/bard/webservice/web/responseprocessors/ResultSetResponseProcessor.java index 247ee76819..da0c306e40 100644 --- a/fili-core/src/main/java/com/yahoo/bard/webservice/web/responseprocessors/ResultSetResponseProcessor.java +++ b/fili-core/src/main/java/com/yahoo/bard/webservice/web/responseprocessors/ResultSetResponseProcessor.java @@ -2,29 +2,36 @@ // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.web.responseprocessors; +import static com.yahoo.bard.webservice.web.ErrorMessageFormat.RESULT_MAPPING_FAILURE; import static com.yahoo.bard.webservice.web.responseprocessors.ResponseContextKeys.API_METRIC_COLUMN_NAMES; import static com.yahoo.bard.webservice.web.responseprocessors.ResponseContextKeys.HEADERS; import static com.yahoo.bard.webservice.web.responseprocessors.ResponseContextKeys.REQUESTED_API_DIMENSION_FIELDS; import com.yahoo.bard.webservice.application.ObjectMappersSuite; +import com.yahoo.bard.webservice.async.ResponseException; import com.yahoo.bard.webservice.data.DruidResponseParser; import com.yahoo.bard.webservice.data.HttpResponseMaker; import com.yahoo.bard.webservice.data.ResultSet; +import com.yahoo.bard.webservice.data.ResultSetSchema; +import com.yahoo.bard.webservice.data.dimension.DimensionColumn; import com.yahoo.bard.webservice.data.dimension.DimensionField; import com.yahoo.bard.webservice.data.metric.LogicalMetric; +import com.yahoo.bard.webservice.data.metric.MetricColumn; import com.yahoo.bard.webservice.druid.client.FailureCallback; import com.yahoo.bard.webservice.druid.client.HttpErrorCallback; +import com.yahoo.bard.webservice.druid.model.aggregation.Aggregation; +import com.yahoo.bard.webservice.druid.model.postaggregation.PostAggregation; import com.yahoo.bard.webservice.druid.model.query.DruidAggregationQuery; import com.yahoo.bard.webservice.druid.model.query.Granularity; -import com.yahoo.bard.webservice.async.ResponseException; import com.yahoo.bard.webservice.logging.RequestLog; -import com.yahoo.bard.webservice.table.ZonedSchema; +import com.yahoo.bard.webservice.table.Column; import com.yahoo.bard.webservice.web.DataApiRequest; import com.yahoo.bard.webservice.web.PageNotFoundException; import com.yahoo.bard.webservice.web.PreResponse; import com.fasterxml.jackson.databind.JsonNode; +import org.joda.time.DateTimeZone; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -34,7 +41,9 @@ import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.Map; +import java.util.function.Function; import java.util.stream.Collectors; +import java.util.stream.Stream; import javax.ws.rs.core.Response.Status; @@ -87,10 +96,7 @@ public HttpErrorCallback getErrorCallback(DruidAggregationQuery druidQuery) { public void processResponse(JsonNode json, DruidAggregationQuery druidQuery, LoggingContext metadata) { try { RequestLog.restore(metadata.getRequestLog()); - ZonedSchema resultSetSchema = druidResponseParser.buildSchema( - druidQuery, granularity, apiRequest.getTimeZone()); - ResultSet resultSet = druidResponseParser.parse(json, resultSetSchema, druidQuery.getQueryType()); - + ResultSet resultSet = buildResultSet(json, druidQuery, apiRequest.getTimeZone()); resultSet = mapResultSet(resultSet); LinkedHashSet apiMetricColumnNames = apiRequest.getLogicalMetrics().stream() @@ -120,6 +126,14 @@ public void processResponse(JsonNode json, DruidAggregationQuery druidQuery, invalidPage, getObjectMappers().getMapper().writer() )); + } catch (IllegalStateException ise) { + LOG.error(RESULT_MAPPING_FAILURE.logFormat(ise.getMessage())); + responseEmitter.onError(new ResponseException( + Status.INTERNAL_SERVER_ERROR, + druidQuery, + new Exception(RESULT_MAPPING_FAILURE.format(ise.getMessage())), + getObjectMappers().getMapper().writer() + )); } catch (Exception exception) { LOG.error("Exception processing druid call in success", exception); responseEmitter.onError(new ResponseException( @@ -130,4 +144,33 @@ public void processResponse(JsonNode json, DruidAggregationQuery druidQuery, )); } } + + /** + * Build a result set using the api request time grain. + * + * @param json The json representing the druid response. + * @param druidQuery The druid query being processed + * @param dateTimeZone The date time zone for parsing result rows + * + * @return The initial result set from the json node. + */ + public ResultSet buildResultSet(JsonNode json, DruidAggregationQuery druidQuery, DateTimeZone dateTimeZone) { + + LinkedHashSet columns = Stream.of( + druidQuery.getDimensions().stream() + .map(DimensionColumn::new), + druidQuery.getAggregations().stream() + .map(Aggregation::getName) + .map(MetricColumn::new), + druidQuery.getPostAggregations().stream() + .map(PostAggregation::getName) + .map(MetricColumn::new) + + ).flatMap(Function.identity()).collect(Collectors.toCollection(LinkedHashSet::new)); + + + ResultSetSchema resultSetSchema = new ResultSetSchema(granularity, columns); + + return druidResponseParser.parse(json, resultSetSchema, druidQuery.getQueryType(), dateTimeZone); + } } diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/application/ConfigurationLoaderSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/application/ConfigurationLoaderSpec.groovy index 1b4147b744..d5c932bc08 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/application/ConfigurationLoaderSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/application/ConfigurationLoaderSpec.groovy @@ -9,8 +9,8 @@ import static com.yahoo.bard.webservice.data.time.DefaultTimeGrain.WEEK import com.yahoo.bard.webservice.data.config.ConfigurationLoader import com.yahoo.bard.webservice.data.config.dimension.DimensionConfig -import com.yahoo.bard.webservice.data.config.dimension.KeyValueStoreDimensionLoader import com.yahoo.bard.webservice.data.config.dimension.TestDimensions +import com.yahoo.bard.webservice.data.config.dimension.TypeAwareDimensionLoader import com.yahoo.bard.webservice.data.config.metric.TestMetricLoader import com.yahoo.bard.webservice.data.config.names.TestApiMetricName import com.yahoo.bard.webservice.data.config.names.TestDruidMetricName @@ -54,7 +54,7 @@ class ConfigurationLoaderSpec extends Specification { FieldConverterSupplier.sketchConverter = new SketchFieldConverter() LinkedHashSet dimensions = new TestDimensions().getAllDimensionConfigurations() loader = new ConfigurationLoader( - new KeyValueStoreDimensionLoader(dimensions), + new TypeAwareDimensionLoader(dimensions), new TestMetricLoader(), new TestTableLoader() ) diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/async/preresponses/stores/PreResponseTestingUtils.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/async/preresponses/stores/PreResponseTestingUtils.groovy index e46e616cce..eba3d56a03 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/async/preresponses/stores/PreResponseTestingUtils.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/async/preresponses/stores/PreResponseTestingUtils.groovy @@ -6,12 +6,12 @@ import static com.yahoo.bard.webservice.data.time.DefaultTimeGrain.DAY import com.yahoo.bard.webservice.data.Result import com.yahoo.bard.webservice.data.ResultSet +import com.yahoo.bard.webservice.data.ResultSetSchema import com.yahoo.bard.webservice.data.dimension.Dimension import com.yahoo.bard.webservice.data.dimension.DimensionColumn import com.yahoo.bard.webservice.data.dimension.DimensionField import com.yahoo.bard.webservice.data.dimension.DimensionRow import com.yahoo.bard.webservice.data.metric.MetricColumn -import com.yahoo.bard.webservice.table.Schema import com.yahoo.bard.webservice.web.PreResponse import com.yahoo.bard.webservice.web.responseprocessors.ResponseContext @@ -38,8 +38,8 @@ class PreResponseTestingUtils { Map> dimensionToDimensionFieldMap = new HashMap<>() ResponseContext responseContext = new ResponseContext(dimensionToDimensionFieldMap) - Schema schema = new Schema(DAY) + ResultSetSchema schema = new ResultSetSchema(DAY, Collections.emptySet()) - return new PreResponse(new ResultSet([rs], schema), responseContext) + return new PreResponse(new ResultSet(schema, [rs]), responseContext) } } diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/async/workflows/DefaultAsynchronousWorkflowsBuilderSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/async/workflows/DefaultAsynchronousWorkflowsBuilderSpec.groovy index ea054b7cb7..db9d2f1195 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/async/workflows/DefaultAsynchronousWorkflowsBuilderSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/async/workflows/DefaultAsynchronousWorkflowsBuilderSpec.groovy @@ -12,9 +12,9 @@ import com.yahoo.bard.webservice.async.jobs.jobrows.JobRow import com.yahoo.bard.webservice.async.jobs.stores.ApiJobStore import com.yahoo.bard.webservice.async.preresponses.stores.PreResponseStore import com.yahoo.bard.webservice.data.ResultSet +import com.yahoo.bard.webservice.data.ResultSetSchema import com.yahoo.bard.webservice.druid.model.query.AllGranularity import com.yahoo.bard.webservice.druid.model.query.DruidAggregationQuery -import com.yahoo.bard.webservice.table.Schema import com.yahoo.bard.webservice.util.Either import com.yahoo.bard.webservice.web.PreResponse import com.yahoo.bard.webservice.web.responseprocessors.ResponseContext @@ -175,7 +175,10 @@ class DefaultAsynchronousWorkflowsBuilderSpec extends Specification { then: "The error message is stored in the PreResponseStore" 1 * preResponseStore.save( "greg0", - new PreResponse(new ResultSet([], new Schema(AllGranularity.INSTANCE)), expectedResponseContext) + new PreResponse( + new ResultSet(new ResultSetSchema(AllGranularity.INSTANCE, [] as Set), []), + expectedResponseContext + ) ) >> Observable.just("greg0") and: "The JobRow's status is updated with with error" diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/DruidQueryBuilderSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/DruidQueryBuilderSpec.groovy index c9fa96f3e5..65cd912182 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/DruidQueryBuilderSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/DruidQueryBuilderSpec.groovy @@ -28,6 +28,7 @@ import com.yahoo.bard.webservice.druid.model.query.DruidAggregationQuery import com.yahoo.bard.webservice.druid.model.query.GroupByQuery import com.yahoo.bard.webservice.druid.model.query.TimeSeriesQuery import com.yahoo.bard.webservice.druid.model.query.TopNQuery +import com.yahoo.bard.webservice.table.ConcretePhysicalTable import com.yahoo.bard.webservice.table.PhysicalTable import com.yahoo.bard.webservice.table.resolver.DefaultPhysicalTableResolver import com.yahoo.bard.webservice.web.ApiFilter @@ -129,7 +130,7 @@ class DruidQueryBuilderSpec extends Specification { def "Test recursive buildQueryMethods"() { setup: Set apiSet = (["abie1234", "abde1129"].collect() { apiFilters.get(it) }) as Set - PhysicalTable tab = new PhysicalTable("tab1", DAY.buildZonedTimeGrain(UTC), [:]) + PhysicalTable tab = new ConcretePhysicalTable("tab1", DAY.buildZonedTimeGrain(UTC), [] as Set, [:]) Filter filter = FILTER_BUILDER.buildFilters([(resources.d3): apiSet]) ZonedTimeGrain granularity = WEEK.buildZonedTimeGrain(UTC) Set dimension = [resources.d1] as Set @@ -224,7 +225,12 @@ class DruidQueryBuilderSpec extends Specification { when: Set apiSet = (["abie1234", "abde1129"].collect() { apiFilters.get(it) }) as Set - PhysicalTable tab = new PhysicalTable("tab1", DAY.buildZonedTimeGrain(UTC), [:]) + PhysicalTable tab = new ConcretePhysicalTable( + "tab1", + DAY.buildZonedTimeGrain(UTC), + [] as Set, + [:] + ) Filter filter = FILTER_BUILDER.buildFilters([(resources.d3): apiSet]) ZonedTimeGrain granularity = YEAR.buildZonedTimeGrain(UTC) TemplateDruidQuery simpleQuery = resources.simpleTemplateWithGrainQuery diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/DruidResponseParserSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/DruidResponseParserSpec.groovy index 83fc1a3ecc..e0aaa1170b 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/DruidResponseParserSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/DruidResponseParserSpec.groovy @@ -2,13 +2,9 @@ // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.data -import com.yahoo.bard.webservice.druid.model.QueryType -import com.yahoo.bard.webservice.table.PhysicalTable - import static com.yahoo.bard.webservice.data.time.DefaultTimeGrain.DAY import com.yahoo.bard.webservice.data.dimension.BardDimensionField -import com.yahoo.bard.webservice.data.dimension.Dimension import com.yahoo.bard.webservice.data.dimension.DimensionColumn import com.yahoo.bard.webservice.data.dimension.DimensionDictionary import com.yahoo.bard.webservice.data.dimension.DimensionField @@ -17,12 +13,9 @@ import com.yahoo.bard.webservice.data.dimension.impl.KeyValueStoreDimension import com.yahoo.bard.webservice.data.dimension.impl.ScanSearchProviderManager import com.yahoo.bard.webservice.data.metric.MetricColumn import com.yahoo.bard.webservice.druid.model.DefaultQueryType -import com.yahoo.bard.webservice.druid.model.aggregation.Aggregation -import com.yahoo.bard.webservice.druid.model.postaggregation.PostAggregation -import com.yahoo.bard.webservice.druid.model.query.DruidAggregationQuery -import com.yahoo.bard.webservice.druid.model.query.Granularity +import com.yahoo.bard.webservice.druid.model.QueryType +import com.yahoo.bard.webservice.table.Column import com.yahoo.bard.webservice.table.Schema -import com.yahoo.bard.webservice.table.ZonedSchema import com.fasterxml.jackson.core.JsonFactory import com.fasterxml.jackson.core.JsonParser @@ -33,7 +26,6 @@ import com.fasterxml.jackson.datatype.jdk8.Jdk8Module import org.joda.time.DateTime import org.joda.time.DateTimeZone -import spock.lang.Shared import spock.lang.Specification import spock.lang.Unroll @@ -41,15 +33,22 @@ class DruidResponseParserSpec extends Specification { private static final ObjectMapper MAPPER = new ObjectMapper() .registerModule(new Jdk8Module().configureAbsentsAsNulls(false)) - @Shared DimensionDictionary dimensionDictionary + DimensionDictionary dimensionDictionary + + Set dimensionColumns + DimensionColumn ageColumn + DimensionColumn genderColumn + DimensionColumn unknownColumn + DruidResponseParser responseParser def setup() { + responseParser = new DruidResponseParser() + def dimensionNames = [ "ageBracket", "gender", "unknown" ] - LinkedHashSet dimensionFields = new LinkedHashSet<>() dimensionFields.add(BardDimensionField.ID) dimensionFields.add(BardDimensionField.DESC) @@ -78,10 +77,16 @@ class DruidResponseParserSpec extends Specification { addDimensionRow(BardDimensionField.makeDimensionRow(it, "u", "u")) addDimensionRow(BardDimensionField.makeDimensionRow(it, "f", "u")) } + + ageColumn = new DimensionColumn(dimensionDictionary.findByApiName("ageBracket")) + genderColumn = new DimensionColumn(dimensionDictionary.findByApiName("gender")) + unknownColumn = new DimensionColumn(dimensionDictionary.findByApiName("unknown")) + dimensionColumns = [ ageColumn, genderColumn, unknownColumn] } def "parse group by with numeric metrics only into a ResultSet"() { given: + DimensionField description = BardDimensionField.DESC String jsonText = """ [ { @@ -125,7 +130,10 @@ class DruidResponseParserSpec extends Specification { JsonNode jsonResult = MAPPER.readTree(parser) Schema schema = buildSchema(["pageViews", "time_spent"]) - ResultSet resultSet = new DruidResponseParser().parse(jsonResult, schema, DefaultQueryType.GROUP_BY) + Column pageViewsColumn = schema.getColumn("pageViews", MetricColumn.class).get() + Column timeSpentColumn = schema.getColumn("time_spent", MetricColumn.class).get() + + ResultSet resultSet = responseParser.parse(jsonResult, schema, DefaultQueryType.GROUP_BY, DateTimeZone.UTC) expect: resultSet != null @@ -133,19 +141,18 @@ class DruidResponseParserSpec extends Specification { resultSet.getSchema() == schema and: - Result result = resultSet.get(0) - result.getDimensionRow(schema.columns.toArray()[0])?.get(BardDimensionField.DESC) == "4" - result.getDimensionRow(schema.columns.toArray()[1])?.get(BardDimensionField.DESC) == "u" - result.getDimensionRow(schema.columns.toArray()[2])?.get(BardDimensionField.DESC) == "" - result.getDimensionRow(schema.columns.toArray()[2])?.get(BardDimensionField.ID) == "foo" - resultSet[0].getMetricValueAsNumber(schema.getColumn("pageViews")) == 1 as BigDecimal - resultSet[0].getMetricValueAsNumber(schema.getColumn("time_spent")) == 2 as BigDecimal + Result firstResult = resultSet.get(0) + firstResult.getDimensionRow(genderColumn)?.get(description) == "u" + firstResult.getDimensionRow(ageColumn)?.get(description) == "4" + firstResult.getDimensionRow(unknownColumn)?.get(description) == "" + firstResult.getDimensionRow(unknownColumn)?.get(BardDimensionField.ID) == "foo" + firstResult.getMetricValueAsNumber(pageViewsColumn) == 1 as BigDecimal + firstResult.getMetricValueAsNumber(timeSpentColumn) == 2 as BigDecimal and: Result resultWithNullDimensionKey = resultSet.get(2) - resultWithNullDimensionKey.getDimensionRow(schema.columns.toArray()[0])?.get(BardDimensionField.ID) == "" - resultWithNullDimensionKey.getDimensionRow(schema.columns.toArray()[0])?.get(BardDimensionField.DESC) == - "unknown" + resultWithNullDimensionKey.getDimensionRow(ageColumn)?.get(BardDimensionField.ID) == "" + resultWithNullDimensionKey.getDimensionRow(ageColumn)?.get(BardDimensionField.DESC) == "unknown" } def "parse sample top N ResultSet with only numeric metrics"() { @@ -178,10 +185,8 @@ class DruidResponseParserSpec extends Specification { JsonNode jsonResult = MAPPER.readTree(parser) /* build Schema */ - ZonedSchema schema = new ZonedSchema(DAY, DateTimeZone.UTC) - DimensionColumn.addNewDimensionColumn(schema, dimensionDictionary.findByApiName("ageBracket")) - MetricColumn.addNewMetricColumn(schema, "pageViews") - ResultSet resultSet = new DruidResponseParser().parse(jsonResult, schema, DefaultQueryType.TOP_N) + ResultSetSchema schema = new ResultSetSchema(DAY, [new DimensionColumn(dimensionDictionary.findByApiName("ageBracket")), new MetricColumn("pageViews")].toSet()) + ResultSet resultSet = responseParser.parse(jsonResult, schema, DefaultQueryType.TOP_N, DateTimeZone.UTC) expect: resultSet != null @@ -190,12 +195,12 @@ class DruidResponseParserSpec extends Specification { and: Result result = resultSet.get(0) - result.getDimensionRow(schema.columns.toArray()[0])?.get(BardDimensionField.DESC) == "4" + result.getDimensionRow((DimensionColumn) schema.columns.toArray()[1])?.get(BardDimensionField.DESC) == "4" and: Result resultWithNullDimensionKey = resultSet.get(2) - resultWithNullDimensionKey.getDimensionRow(schema.columns.toArray()[0])?.get(BardDimensionField.ID) == "" - resultWithNullDimensionKey.getDimensionRow(schema.columns.toArray()[0])?.get(BardDimensionField.DESC) == + resultWithNullDimensionKey.getDimensionRow((DimensionColumn) schema.columns.toArray()[1])?.get(BardDimensionField.ID) == "" + resultWithNullDimensionKey.getDimensionRow((DimensionColumn) schema.columns.toArray()[1])?.get(BardDimensionField.DESC) == "unknown" } @@ -224,11 +229,8 @@ class DruidResponseParserSpec extends Specification { JsonNode jsonResult = MAPPER.readTree(parser) /* build Schema */ - ZonedSchema schema = new ZonedSchema(DAY, DateTimeZone.UTC) - MetricColumn.addNewMetricColumn(schema, "pageViews") - MetricColumn.addNewMetricColumn(schema, "lookback_pageViews") - MetricColumn.addNewMetricColumn(schema, "retentionPageViews") - ResultSet resultSet = new DruidResponseParser().parse(jsonResult, schema, DefaultQueryType.LOOKBACK) + ResultSetSchema schema = new ResultSetSchema(DAY, [new MetricColumn("pageViews"), new MetricColumn("lookback_pageViews"), new MetricColumn("retentionPageViews")].toSet()) + ResultSet resultSet = responseParser.parse(jsonResult, schema, DefaultQueryType.LOOKBACK, DateTimeZone.UTC) expect: resultSet != null @@ -236,9 +238,9 @@ class DruidResponseParserSpec extends Specification { resultSet.getSchema() == schema and: - resultSet[0].getMetricValueAsNumber(schema.getColumn("retentionPageViews")) == 1 as BigDecimal - resultSet[0].getMetricValueAsNumber(schema.getColumn("lookback_pageViews")) == 2 as BigDecimal - resultSet[0].getMetricValueAsNumber(schema.getColumn("pageViews")) == 1 as BigDecimal + resultSet[0].getMetricValueAsNumber(schema.getColumn("retentionPageViews", MetricColumn.class).get()) == 1 as BigDecimal + resultSet[0].getMetricValueAsNumber(schema.getColumn("lookback_pageViews", MetricColumn.class).get()) == 2 as BigDecimal + resultSet[0].getMetricValueAsNumber(schema.getColumn("pageViews", MetricColumn.class).get()) == 1 as BigDecimal } def "parse lookback with groupBy datasource into a Resultset"() { @@ -274,7 +276,10 @@ class DruidResponseParserSpec extends Specification { JsonNode jsonResult = MAPPER.readTree(parser) Schema schema = buildSchema(["pageViews", "lookback_pageViews", "retentionPageViews"]) - ResultSet resultSet = new DruidResponseParser().parse(jsonResult, schema, DefaultQueryType.GROUP_BY) + ResultSet resultSet = responseParser.parse(jsonResult, schema, DefaultQueryType.GROUP_BY, DateTimeZone.UTC) + Column pageViewsColumn = schema.getColumn("pageViews", MetricColumn.class).get() + Column lookbackPageviewsColumn = schema.getColumn("lookback_pageViews", MetricColumn.class).get() + Column retentionPageviewsColumn = schema.getColumn("retentionPageViews", MetricColumn.class).get() expect: resultSet != null @@ -282,14 +287,14 @@ class DruidResponseParserSpec extends Specification { resultSet.getSchema() == schema and: - Result result = resultSet.get(0) - result.getDimensionRow(schema.columns.toArray()[0])?.get(BardDimensionField.DESC) == "4" - result.getDimensionRow(schema.columns.toArray()[1])?.get(BardDimensionField.DESC) == "u" - result.getDimensionRow(schema.columns.toArray()[2])?.get(BardDimensionField.DESC) == "" - result.getDimensionRow(schema.columns.toArray()[2])?.get(BardDimensionField.ID) == "foo" - resultSet[0].getMetricValueAsNumber(schema.getColumn("pageViews")) == 1 as BigDecimal - resultSet[0].getMetricValueAsNumber(schema.getColumn("lookback_pageViews")) == 2 as BigDecimal - resultSet[0].getMetricValueAsNumber(schema.getColumn("retentionPageViews")) == 1 as BigDecimal + Result firstResult = resultSet.get(0) + firstResult.getDimensionRow(genderColumn)?.get(BardDimensionField.DESC) == "u" + firstResult.getDimensionRow(ageColumn)?.get(BardDimensionField.DESC) == "4" + firstResult.getDimensionRow(unknownColumn)?.get(BardDimensionField.DESC) == "" + firstResult.getDimensionRow(unknownColumn)?.get(BardDimensionField.ID) == "foo" + firstResult.getMetricValueAsNumber(pageViewsColumn) == 1 as BigDecimal + firstResult.getMetricValueAsNumber(lookbackPageviewsColumn) == 2 as BigDecimal + firstResult.getMetricValueAsNumber(retentionPageviewsColumn) == 1 as BigDecimal } @Unroll @@ -298,7 +303,7 @@ class DruidResponseParserSpec extends Specification { String druidResponse = buildResponse(queryType, ['"luckyNumbers"':'"1, 3, 7"', '"unluckyNumbers"': '"2"']) when: "We build a result set from the Druid response" - ZonedSchema schema = buildSchema(["luckyNumbers", "unluckyNumbers"]) + ResultSetSchema schema = buildSchema(["luckyNumbers", "unluckyNumbers"]) ResultSet resultSet = buildResultSet(druidResponse, schema, queryType) then: "The result set was built correctly" @@ -307,8 +312,8 @@ class DruidResponseParserSpec extends Specification { resultSet.getSchema() == schema and: "The metrics were parsed correctly from the Druid response" - resultSet.get(0).getMetricValueAsString(schema.getColumn("luckyNumbers") as MetricColumn) == "1, 3, 7" - resultSet.get(0).getMetricValueAsString(schema.getColumn("unluckyNumbers") as MetricColumn) == "2" + resultSet.get(0).getMetricValueAsString(schema.getColumn("luckyNumbers", MetricColumn.class).get() as MetricColumn) == "1, 3, 7" + resultSet.get(0).getMetricValueAsString(schema.getColumn("unluckyNumbers", MetricColumn.class).get() as MetricColumn) == "2" where: queryType << [DefaultQueryType.GROUP_BY, DefaultQueryType.TOP_N, DefaultQueryType.TIMESERIES] @@ -321,7 +326,7 @@ class DruidResponseParserSpec extends Specification { String druidResponse = buildResponse(queryType, ['"true"': true, '"false"': false]) when: "We build a result set from the Druid response" - ZonedSchema schema = buildSchema(["true", "false"]) + ResultSetSchema schema = buildSchema(["true", "false"]) ResultSet resultSet = buildResultSet(druidResponse, schema, queryType) then: "The result set was built correctly" @@ -330,8 +335,8 @@ class DruidResponseParserSpec extends Specification { resultSet.getSchema() == schema and: "The metrics were parsed correctly from the Druid response" - resultSet.get(0).getMetricValueAsBoolean(schema.getColumn("true", MetricColumn.class)) - !resultSet.get(0).getMetricValueAsBoolean(schema.getColumn("false", MetricColumn.class)) + resultSet.get(0).getMetricValueAsBoolean(schema.getColumn("true", MetricColumn.class).get()) + !resultSet.get(0).getMetricValueAsBoolean(schema.getColumn("false", MetricColumn.class).get()) where: queryType << [DefaultQueryType.GROUP_BY, DefaultQueryType.TOP_N, DefaultQueryType.TIMESERIES] @@ -343,7 +348,7 @@ class DruidResponseParserSpec extends Specification { String druidResponse = buildResponse(queryType, ['"null"': null]) when: "We try to build a result set from the Druid response" - ZonedSchema schema = buildSchema(["null"]) + ResultSetSchema schema = buildSchema(["null"]) ResultSet resultSet = buildResultSet(druidResponse, schema, queryType) then: "The result set was built correctly" @@ -352,7 +357,7 @@ class DruidResponseParserSpec extends Specification { resultSet.getSchema() == schema and: "The metrics were parsed correctly from the Druid response" - resultSet.get(0).getMetricValue(schema.getColumn("null", MetricColumn.class)) == null + resultSet.get(0).getMetricValue(schema.getColumn("null", MetricColumn.class).get()) == null where: queryType << [DefaultQueryType.GROUP_BY, DefaultQueryType.TOP_N, DefaultQueryType.TIMESERIES] @@ -372,7 +377,7 @@ class DruidResponseParserSpec extends Specification { ) when: "We try to build a result set from the Druid response" - ZonedSchema schema = buildSchema(["luckyNumbers", "unluckyNumbers"]) + ResultSetSchema schema = buildSchema(["luckyNumbers", "unluckyNumbers"]) ResultSet resultSet = buildResultSet(druidResponse, schema, queryType) then: "The result set was built correctly" @@ -381,9 +386,9 @@ class DruidResponseParserSpec extends Specification { resultSet.getSchema() == schema and: "The metrics were parsed correctly from the Druid response" - resultSet.get(0).getMetricValueAsJsonNode(schema.getColumn("luckyNumbers", MetricColumn.class)) == + resultSet.get(0).getMetricValueAsJsonNode(schema.getColumn("luckyNumbers", MetricColumn.class).get()) == MAPPER.readTree(luckyNumberNode) - resultSet.get(0).getMetricValueAsJsonNode(schema.getColumn("unluckyNumbers", MetricColumn.class)) == + resultSet.get(0).getMetricValueAsJsonNode(schema.getColumn("unluckyNumbers", MetricColumn.class).get()) == MAPPER.readTree(unluckyNumberNode) where: @@ -392,54 +397,15 @@ class DruidResponseParserSpec extends Specification { def "Attempting to parse an unknown query type throws an UnsupportedOperationException"() { given: - DruidResponseParser responseParser = new DruidResponseParser() QueryType mysteryType = Mock(QueryType) when: - responseParser.parse(MAPPER.readTree("[]"), Mock(ZonedSchema), mysteryType) + responseParser.parse(MAPPER.readTree("[]"), Mock(ResultSetSchema), mysteryType, DateTimeZone.UTC) then: thrown(UnsupportedOperationException) } - - def "Build the schema from the query"() { - setup: - DruidResponseParser responseParser = new DruidResponseParser() - DruidAggregationQuery query = Mock(DruidAggregationQuery) - Granularity granularity = Mock(Granularity) - DateTimeZone dateTimeZone = Mock(DateTimeZone) - Dimension dim = Mock(Dimension) { getApiName() >> "dimension1" } - Aggregation agg = Mock(Aggregation) { getName() >> "agg1" } - PostAggregation postAgg = Mock(PostAggregation) { getName() >> "postAgg1" } - - query.getAggregations() >> { - [ - agg - ] - } - query.getPostAggregations() >> { - [ - postAgg - ] - } - query.getDimensions() >> { - [ - dim - ] - } - - when: - ZonedSchema schema = responseParser.buildSchema(query, granularity, dateTimeZone) - - then: - schema.dateTimeZone == dateTimeZone - schema.granularity == granularity - schema.columns.size() == 3 - schema.getColumn("dimension1").dimension == dim - schema.getColumn("agg1") != null - schema.getColumn("postAgg1") != null - } String buildResponse(DefaultQueryType queryType, Map complexMetrics) { //Strip off the brackets from the String representation of the Map. @@ -478,19 +444,15 @@ class DruidResponseParserSpec extends Specification { } } - ResultSet buildResultSet(String druidResponse, ZonedSchema schema, DefaultQueryType queryType) { + ResultSet buildResultSet(String druidResponse, ResultSetSchema schema, DefaultQueryType queryType) { JsonNode jsonResult = MAPPER.readTree(new JsonFactory().createParser(druidResponse)) - return new DruidResponseParser().parse(jsonResult, schema, queryType) + return responseParser.parse(jsonResult, schema, queryType, DateTimeZone.UTC) } - ZonedSchema buildSchema(List metricNames) { - Schema schema = new ZonedSchema(DAY, DateTimeZone.UTC) - DimensionColumn.addNewDimensionColumn(schema, dimensionDictionary.findByApiName("ageBracket")) - DimensionColumn.addNewDimensionColumn(schema, dimensionDictionary.findByApiName("gender")) - DimensionColumn.addNewDimensionColumn(schema, dimensionDictionary.findByApiName("unknown")) + ResultSetSchema buildSchema(List metricNames) { metricNames.each { - MetricColumn.addNewMetricColumn(schema, it) + dimensionColumns.add(new MetricColumn(it)) } - return schema + new ResultSetSchema(DAY, dimensionColumns) } } diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/HttpResponseMakerSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/HttpResponseMakerSpec.groovy index db5a2c72a8..a036737e16 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/HttpResponseMakerSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/HttpResponseMakerSpec.groovy @@ -14,7 +14,6 @@ import com.yahoo.bard.webservice.data.metric.LogicalMetric import com.yahoo.bard.webservice.data.metric.MetricColumn import com.yahoo.bard.webservice.data.metric.mappers.ResultSetMapper import com.yahoo.bard.webservice.druid.model.query.DruidAggregationQuery -import com.yahoo.bard.webservice.table.Schema import com.yahoo.bard.webservice.web.DataApiRequest import com.yahoo.bard.webservice.web.PreResponse import com.yahoo.bard.webservice.web.ResponseFormatType @@ -79,8 +78,7 @@ class HttpResponseMakerSpec extends Specification { apiRequest.getFormat() >> ResponseFormatType.JSON apiRequest.getUriInfo() >> uriInfo - Schema schema = new Schema(DAY) - MetricColumn.addNewMetricColumn(schema, "lm1") + ResultSetSchema schema = new ResultSetSchema(DAY, [new MetricColumn("lm1")] as Set) resultSet = Mock(ResultSet) resultSet.getSchema() >> schema diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/PartialDataHandlerSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/PartialDataHandlerSpec.groovy index 0c22242bca..ec171eb6b2 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/PartialDataHandlerSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/PartialDataHandlerSpec.groovy @@ -16,6 +16,7 @@ import com.yahoo.bard.webservice.druid.model.query.AllGranularity import com.yahoo.bard.webservice.druid.model.query.Granularity import com.yahoo.bard.webservice.druid.model.query.GroupByQuery import com.yahoo.bard.webservice.metadata.SegmentMetadata +import com.yahoo.bard.webservice.table.ConcretePhysicalTable import com.yahoo.bard.webservice.table.PhysicalTable import com.yahoo.bard.webservice.util.SimplifiedIntervalList import com.yahoo.bard.webservice.web.DataApiRequest @@ -34,7 +35,12 @@ class PartialDataHandlerSpec extends Specification { static boolean originalConfig = PERMISSIVE_COLUMN_AVAILABILITY.isOn() Dimension dim1, dim2, dim3 - Set tables = [new PhysicalTable("basefact_network", DAY.buildZonedTimeGrain(UTC), ["userDeviceType" : "user_device_type"])] as Set + Set tables = [new ConcretePhysicalTable( + "basefact_network", + DAY.buildZonedTimeGrain(UTC), + [] as Set, + ["userDeviceType": "user_device_type"] + )] as Set Set columnNames GroupByQuery groupByQuery = Mock(GroupByQuery.class) @@ -68,7 +74,15 @@ class PartialDataHandlerSpec extends Specification { // setup mock inner query GroupByQuery innerQuery = Mock(GroupByQuery.class) - innerQuery.getDataSource() >> { new TableDataSource(new PhysicalTable("basefact_network", DAY.buildZonedTimeGrain(UTC), ["userDeviceType":"user_device_type"])) } + innerQuery.getDataSource() >> { + new TableDataSource( + new ConcretePhysicalTable( + "basefact_network", DAY.buildZonedTimeGrain(UTC), + [] as Set, + ["userDeviceType": "user_device_type"] + ) + ) + } innerQuery.getDependentFieldNames() >> { ["page_views"] as Set } groupByQuery.getGranularity() >> WEEK @@ -83,7 +97,7 @@ class PartialDataHandlerSpec extends Specification { * starts inside the dim1 hole and goes to the end of the period. */ SegmentMetadata segments = new SegmentMetadata( - [("user_device_type"): buildIntervals(["2014-07-01/2014-07-09","2014-07-11/2014-07-29"]) as LinkedHashSet, + [("userDeviceType"): buildIntervals(["2014-07-01/2014-07-09","2014-07-11/2014-07-29"]) as LinkedHashSet, ("property"): buildIntervals(["2014-07-01/2014-07-29"]) as LinkedHashSet, ("os"): buildIntervals(["2014-07-01/2014-07-29"]) as LinkedHashSet], [("page_views"): buildIntervals(["2014-07-04/2014-07-29"]) as Set] diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/PreResponseDeSerializationSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/PreResponseDeSerializationSpec.groovy index f39c3f9c71..ce46e090c5 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/PreResponseDeSerializationSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/PreResponseDeSerializationSpec.groovy @@ -1,12 +1,12 @@ // Copyright 2016 Yahoo Inc. // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.data + import com.yahoo.bard.webservice.application.ObjectMappersSuite import com.yahoo.bard.webservice.data.dimension.DimensionColumn import com.yahoo.bard.webservice.data.dimension.DimensionRow import com.yahoo.bard.webservice.data.metric.MetricColumn import com.yahoo.bard.webservice.data.time.StandardGranularityParser -import com.yahoo.bard.webservice.table.ZonedSchema import com.yahoo.bard.webservice.util.GroovyTestUtils import com.yahoo.bard.webservice.web.PreResponse import com.yahoo.bard.webservice.web.responseprocessors.ResponseContext @@ -77,7 +77,7 @@ class PreResponseDeSerializationSpec extends Specification { def "ZonedSchema de-Serialization from serialized ZonedSchema object validation"() { setup: - ZonedSchema zonedSchema = preResponseDeSerializer.getZonedSchema( + ResultSetSchema zonedSchema = preResponseDeSerializer.getResultSetSchema( objectMappers.getMapper().readTree(getSerializedZonedSchema()) ) diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/QueryBuildingTestingResources.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/QueryBuildingTestingResources.groovy index eb9e64a569..49004be4ef 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/QueryBuildingTestingResources.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/QueryBuildingTestingResources.groovy @@ -1,4 +1,4 @@ -// Copyright 2016 Yahoo Inc. +// Copyright 2017 Yahoo Inc. // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.data @@ -17,18 +17,19 @@ import static org.joda.time.DateTimeZone.UTC import com.yahoo.bard.webservice.data.config.dimension.DimensionConfig import com.yahoo.bard.webservice.data.config.dimension.TestLookupDimensions import com.yahoo.bard.webservice.data.config.dimension.TestRegisteredLookupDimensions +import com.yahoo.bard.webservice.data.config.names.ApiMetricName import com.yahoo.bard.webservice.data.dimension.BardDimensionField import com.yahoo.bard.webservice.data.dimension.Dimension import com.yahoo.bard.webservice.data.dimension.DimensionColumn import com.yahoo.bard.webservice.data.dimension.DimensionDictionary import com.yahoo.bard.webservice.data.dimension.DimensionField -import com.yahoo.bard.webservice.data.dimension.DimensionRow import com.yahoo.bard.webservice.data.dimension.MapStoreManager import com.yahoo.bard.webservice.data.dimension.impl.KeyValueStoreDimension import com.yahoo.bard.webservice.data.dimension.impl.LookupDimension import com.yahoo.bard.webservice.data.dimension.impl.RegisteredLookupDimension import com.yahoo.bard.webservice.data.dimension.impl.ScanSearchProviderManager import com.yahoo.bard.webservice.data.metric.LogicalMetric +import com.yahoo.bard.webservice.data.metric.LogicalMetricColumn import com.yahoo.bard.webservice.data.metric.MetricColumn import com.yahoo.bard.webservice.data.metric.MetricDictionary import com.yahoo.bard.webservice.data.metric.TemplateDruidQuery @@ -37,11 +38,14 @@ import com.yahoo.bard.webservice.data.volatility.DefaultingVolatileIntervalsServ import com.yahoo.bard.webservice.data.volatility.VolatileIntervalsFunction import com.yahoo.bard.webservice.data.volatility.VolatileIntervalsService import com.yahoo.bard.webservice.druid.model.query.AllGranularity +import com.yahoo.bard.webservice.table.Column +import com.yahoo.bard.webservice.table.ConcretePhysicalTable import com.yahoo.bard.webservice.table.LogicalTable import com.yahoo.bard.webservice.table.LogicalTableDictionary import com.yahoo.bard.webservice.table.PhysicalTable import com.yahoo.bard.webservice.table.TableGroup import com.yahoo.bard.webservice.table.TableIdentifier +import com.yahoo.bard.webservice.table.availability.ImmutableAvailability import com.yahoo.bard.webservice.util.SimplifiedIntervalList import org.joda.time.DateTimeZone @@ -49,56 +53,55 @@ import org.joda.time.Interval public class QueryBuildingTestingResources { - // Aggregatable dimensions + // Aggregatable dimensions, numbered for identification public Dimension d1, d2, d3, d4, d5 - // Non-aggregatable dimensions + + // Non-aggregatable dimensions, numbered for identification public Dimension d6, d7, d8, d9, d10, d11, d12, d13 + + // Logical metrics, numbered for identification public LogicalMetric m1, m2, m3, m4, m5, m6 - public Interval interval1 - public Interval interval2 - public Interval interval3 + // Some intervals + public Interval interval1, interval2, interval3 + + // tables are enumerated by dimension set number, d or h for day or hour + public ConcretePhysicalTable t1h, t1d, t1hShort, t2h, t3d, t4h1, t4h2, t4d1, t4d2, t5h - public PhysicalTable emptyFirst, partialSecond, wholeThird, emptyLast - public PhysicalTable t1h, t1d, t1hShort, t2h, t3d, t4h1, t4h2, t4d1, t4d2, t5h + // table used to test ordering, empty has no availability, partial some, whole largest availability + public ConcretePhysicalTable emptyFirst, partialSecond, wholeThird, emptyLast - //Volatility testing - public PhysicalTable volatileHourTable = new PhysicalTable("hour", HOUR.buildZonedTimeGrain(UTC), [:]) - public PhysicalTable volatileDayTable = new PhysicalTable("day", DAY.buildZonedTimeGrain(UTC), [:]) + // Tables with not aggregatable dimensions, numbers indicate the dimension set + public ConcretePhysicalTable tna1236d, tna1237d, tna167d, tna267d + + // Tables with volatile hour and volatile day + public ConcretePhysicalTable volatileHourTable, volatileDayTable public VolatileIntervalsService volatileIntervalsService - public TableGroup tg1h, tg1d, tg1Short, tg2h, tg3d, tg4h, tg5h, tg6h, tg1All - LogicalTable lt12, lt13, lt14, lt1All - LogicalTableDictionary logicalDictionary - TableIdentifier ti2h, ti2d, ti3d, ti4d, ti1All + // Table Groups h/d for hour day, number to indicate which concrete tables comprise them, 'na' for non-aggregatable + public TableGroup tg1h, tg1d, tg1Short, tg2h, tg3d, tg4h, tg5h, tg6h, tg1All, tgna + + // Logical tables for table groups, na for 'non-aggregatable' + public LogicalTable lt12, lt13, lt14, lt1All, ltna - // Tables with non-aggregatable dimensions - public PhysicalTable tna1236d, tna1237d, tna167d, tna267d - public TableGroup tgna - LogicalTable ltna - TableIdentifier tina + public LogicalTableDictionary logicalDictionary - TemplateDruidQuery simpleTemplateQuery - TemplateDruidQuery simpleNestedTemplateQuery - TemplateDruidQuery complexTemplateQuery - TemplateDruidQuery simpleTemplateWithGrainQuery - TemplateDruidQuery complexTemplateWithInnerGrainQuery - TemplateDruidQuery complexTemplateWithDoubleGrainQuery + public TableIdentifier ti2h, ti2d, ti3d, ti4d, ti1All, tina - DimensionDictionary dimensionDictionary - MetricDictionary metricDictionary + public TemplateDruidQuery simpleTemplateQuery, simpleNestedTemplateQuery, complexTemplateQuery, simpleTemplateWithGrainQuery, complexTemplateWithInnerGrainQuery, complexTemplateWithDoubleGrainQuery - Set dimensionRows2 - Set dimensionRows3 + public DimensionDictionary dimensionDictionary + + public MetricDictionary metricDictionary public QueryBuildingTestingResources() { + init() + } - DateTimeZone.setDefault(UTC) - interval1 = new Interval("2014-06-23/2014-07-14") - interval2 = new Interval("2014-07-07/2014-07-21") - interval3 = new Interval("2014-07-01/2014-08-01") + public QueryBuildingTestingResources init() { + DateTimeZone.setDefault(UTC) def ages = ["1": "0-10", "2": "11-14", "3": "14-29", "4": "30-40", "5": "41-59", "6": "60+"] LinkedHashSet dimensionFields = new LinkedHashSet<>() @@ -123,10 +126,9 @@ public class QueryBuildingTestingResources { MapStoreManager.getInstance("dim2"), ScanSearchProviderManager.getInstance("dim2") ) - dimensionRows2 = ages.collect() { - BardDimensionField.makeDimensionRow(d2, it.key, it.value) - } - d2.addAllDimensionRows(dimensionRows2) + + d2.addAllDimensionRows(ages.collect { BardDimensionField.makeDimensionRow(d2, it.key, it.value) } as Set) + d3 = new KeyValueStoreDimension( "ageBracket", "age_bracket", @@ -134,10 +136,9 @@ public class QueryBuildingTestingResources { MapStoreManager.getInstance("ageBracket"), ScanSearchProviderManager.getInstance("ageBracket") ) - dimensionRows3 = ages.collect() { - BardDimensionField.makeDimensionRow(d3, it.key, it.value) - } - d3.addAllDimensionRows(dimensionRows3) + + d3.addAllDimensionRows(ages.collect { BardDimensionField.makeDimensionRow(d3, it.key, it.value) } as Set) + d4 = new KeyValueStoreDimension( "dim4", "dim4", @@ -145,6 +146,7 @@ public class QueryBuildingTestingResources { MapStoreManager.getInstance("dim4"), ScanSearchProviderManager.getInstance("dim4") ) + d5 = new KeyValueStoreDimension( "dim5", "dim5", @@ -152,6 +154,7 @@ public class QueryBuildingTestingResources { MapStoreManager.getInstance("dim5"), ScanSearchProviderManager.getInstance("dim5") ) + d6 = new KeyValueStoreDimension( "dim6", "dim6", @@ -160,6 +163,7 @@ public class QueryBuildingTestingResources { ScanSearchProviderManager.getInstance("dim6"), false ) + d7 = new KeyValueStoreDimension( "dim7", "dim_7", @@ -168,6 +172,9 @@ public class QueryBuildingTestingResources { ScanSearchProviderManager.getInstance("dim7"), false ) + + LinkedHashSet dimConfig = new TestLookupDimensions().getDimensionConfigurationsByApiName(SIZE, SHAPE, COLOR) + // lookup dimensions with multiple, one, and none lookups d8 = new LookupDimension(lookupDimConfig.getAt(0)) d9 = new LookupDimension(lookupDimConfig.getAt(1)) @@ -193,148 +200,72 @@ public class QueryBuildingTestingResources { metricDictionary.add(it) } + interval1 = new Interval("2014-06-23/2014-07-14") + interval2 = new Interval("2014-07-07/2014-07-21") + interval3 = new Interval("2014-07-01/2014-08-01") + TimeGrain utcHour = HOUR.buildZonedTimeGrain(UTC) TimeGrain utcDay = DAY.buildZonedTimeGrain(UTC) - t1h = new PhysicalTable("table1h", utcHour, ["ageBracket":"age_bracket"]) - t1d = new PhysicalTable("table1d", utcDay, ["ageBracket":"age_bracket"]) - t1hShort = new PhysicalTable("table1Short", utcHour, new HashMap<>()) - - t2h = new PhysicalTable("table2", utcHour, new HashMap<>()) - t3d = new PhysicalTable("table3", utcDay, new HashMap<>()) - - tna1236d = new PhysicalTable("tableNA1236", utcDay, ["ageBracket":"age_bracket"]) - tna1237d = new PhysicalTable("tableNA1237", utcDay, ["ageBracket":"age_bracket"]) - tna167d = new PhysicalTable("tableNA167", utcDay, ["ageBracket":"age_bracket", "dim7":"dim_7"]) - tna267d = new PhysicalTable("tableNA267", utcDay, ["dim7":"dim_7"]) - - t4h1 = new PhysicalTable("table4h1", utcHour, new HashMap<>()) - t4h2 = new PhysicalTable("table4h2", utcHour, new HashMap<>()) - t4d1 = new PhysicalTable("table4d1", utcDay, new HashMap<>()) - t4d2 = new PhysicalTable("table4d2", utcDay, new HashMap<>()) - - t5h = new PhysicalTable("table5h", utcHour, new HashMap<>()) - - [d1, d2, d3].each { - t1h.addColumn(DimensionColumn.addNewDimensionColumn(t1h, it)) - t1d.addColumn(DimensionColumn.addNewDimensionColumn(t1d, it)) - } - - [d1, d2].each { - t1hShort.addColumn(DimensionColumn.addNewDimensionColumn(t1hShort, it)) - } - - [d1, d2, d4].each { - t2h.addColumn(DimensionColumn.addNewDimensionColumn(t2h, it)) - } - - [d1, d2, d5].each { - t3d.addColumn(DimensionColumn.addNewDimensionColumn(t3d, it)) - } - - [d1, d2].each { - t4h1.addColumn(DimensionColumn.addNewDimensionColumn(t4h1, it), [interval1] as Set) - t4h2.addColumn(DimensionColumn.addNewDimensionColumn(t4h2, it), [interval2] as Set) - t4d1.addColumn(DimensionColumn.addNewDimensionColumn(t4d1, it), [interval1] as Set) - t4d2.addColumn(DimensionColumn.addNewDimensionColumn(t4d2, it), [interval2] as Set) - } - - [d8, d9, d10, d11, d12, d13].each { - t5h.addColumn(DimensionColumn.addNewDimensionColumn(t5h, it)) - } - - MetricColumn.addNewMetricColumn(t1h, m1.name) - MetricColumn.addNewMetricColumn(t1d, m1.name) - MetricColumn.addNewMetricColumn(t1hShort, m1.name) - MetricColumn.addNewMetricColumn(t2h, m1.name) - MetricColumn.addNewMetricColumn(t5h, m1.name) + volatileHourTable = new ConcretePhysicalTable("hour", HOUR.buildZonedTimeGrain(UTC), [d1, m1].collect{toColumn(it)}.toSet(), [:]) + volatileDayTable = new ConcretePhysicalTable("day", DAY.buildZonedTimeGrain(UTC), [d1, m1].collect{toColumn(it)}.toSet(), [:]) - [m2, m3].each { - MetricColumn.addNewMetricColumn(t1h, it.name) - MetricColumn.addNewMetricColumn(t1d, it.name) - MetricColumn.addNewMetricColumn(t1hShort, it.name) - } + t1h = new ConcretePhysicalTable("table1h", utcHour, [d1, d2, d3, m1, m2, m3].collect{toColumn(it)}.toSet(), ["ageBracket":"age_bracket"]) + t1d = new ConcretePhysicalTable("table1d", utcDay, [d1, d2, d3, m1, m2, m3].collect{toColumn(it)}.toSet(), ["ageBracket":"age_bracket"]) + t1hShort = new ConcretePhysicalTable("table1Short", utcHour, [d1, d2, m1, m2, m3].collect{toColumn(it)}.toSet(), [:]) - [m4, m5].each { - MetricColumn.addNewMetricColumn(t2h, it.name) - } + t2h = new ConcretePhysicalTable("table2", utcHour, [d1, d2, d4, m1, m4, m5].collect{toColumn(it)}.toSet(), [:]) + t3d = new ConcretePhysicalTable("table3", utcDay, [d1, d2, d5, m6].collect{toColumn(it)}.toSet(), [:]) - MetricColumn.addNewMetricColumn(t3d, m6.name) + // Tables with non aggregatable dimensions + tna1236d = new ConcretePhysicalTable("tableNA1236", utcDay, [d1, d2, d3, d6].collect{toColumn(it)}.toSet(),["ageBracket":"age_bracket"]) + tna1237d = new ConcretePhysicalTable("tableNA1237", utcDay, [d1, d2, d3, d7].collect{toColumn(it)}.toSet(), ["ageBracket":"age_bracket"]) + tna167d = new ConcretePhysicalTable("tableNA167", utcDay, [d1, d6, d7].collect{toColumn(it)}.toSet(), ["ageBracket":"age_bracket", "dim7":"dim_7"]) + tna267d = new ConcretePhysicalTable("tableNA267", utcDay, [d2, d6, d7].collect{toColumn(it)}.toSet(), ["dim7":"dim_7"]) - [d1, d2, d3, d6].each { - tna1236d.addColumn(DimensionColumn.addNewDimensionColumn(tna1236d, it)) - } + t4h1 = new ConcretePhysicalTable("table4h1", utcHour, [d1, d2, m1, m2, m3].collect{toColumn(it)}.toSet(), [:]) + t4h2 = new ConcretePhysicalTable("table4h2", utcHour, [d1, d2, m1, m2, m3].collect{toColumn(it)}.toSet(), [:]) + t4d1 = new ConcretePhysicalTable("table4d1", utcDay, [d1, d2, m1, m2, m3].collect{toColumn(it)}.toSet(), [:]) + t4d2 = new ConcretePhysicalTable("table4d2", utcDay, [d1, d2, m1, m2, m3].collect{toColumn(it)}.toSet(), [:]) - [d1, d2, d3, d7].each { - tna1237d.addColumn(DimensionColumn.addNewDimensionColumn(tna1237d, it)) - } + Map> availabilityMap1 = [:] + Map> availabilityMap2 = [:] - [d1, d6, d7].each { - tna167d.addColumn(DimensionColumn.addNewDimensionColumn(tna167d, it)) + [d1, d2, m1, m2, m3].each { + availabilityMap1.put(toColumn(it), [interval1]) + availabilityMap2.put(toColumn(it), [interval2]) } - [d2, d6, d7].each { - tna267d.addColumn(DimensionColumn.addNewDimensionColumn(tna267d, it)) - } - - [d1, d2].each { - t4h1.addColumn(DimensionColumn.addNewDimensionColumn(t4h1, it), [interval1] as Set) - t4h2.addColumn(DimensionColumn.addNewDimensionColumn(t4h2, it), [interval2] as Set) - t4d1.addColumn(DimensionColumn.addNewDimensionColumn(t4d1, it), [interval1] as Set) - t4d2.addColumn(DimensionColumn.addNewDimensionColumn(t4d2, it), [interval2] as Set) - } + t4h1.setAvailability(new ImmutableAvailability(t4h1.getName(), availabilityMap1)) + t4d1.setAvailability(new ImmutableAvailability(t4d1.getName(), availabilityMap1)) + t5h = new ConcretePhysicalTable("table5d", utcHour, [d8, d9, d10, d11, d12, d13, m1].collect{toColumn(it)}.toSet(), [:]) - MetricColumn.addNewMetricColumn(t1h, m1.name) - MetricColumn.addNewMetricColumn(t1d, m1.name) - MetricColumn.addNewMetricColumn(t1hShort, m1.name) - MetricColumn.addNewMetricColumn(t2h, m1.name) - - [m2, m3].each { - MetricColumn.addNewMetricColumn(t1h, it.name) - MetricColumn.addNewMetricColumn(t1d, it.name) - MetricColumn.addNewMetricColumn(t1hShort, it.name) - } + t4h2.setAvailability(new ImmutableAvailability(t4h2.getName(), availabilityMap2)) + t4d2.setAvailability(new ImmutableAvailability(t4d1.getName(), availabilityMap2)) - [m4, m5].each { - MetricColumn.addNewMetricColumn(t2h, it.name) - } - - MetricColumn.addNewMetricColumn(t3d, m6.name) - - [m1, m2, m3].each { - t4h1.addColumn(MetricColumn.addNewMetricColumn(t4h1, it.name), [interval1] as Set) - t4h2.addColumn(MetricColumn.addNewMetricColumn(t4h2, it.name), [interval2] as Set) - t4d1.addColumn(MetricColumn.addNewMetricColumn(t4d1, it.name), [interval1] as Set) - t4d2.addColumn(MetricColumn.addNewMetricColumn(t4d2, it.name), [interval2] as Set) - } - - [t1h, t1d, t1hShort, t2h, t5h, t3d, t4h1, t4h2, t4d1, t4d2, tna1236d, tna1237d, tna167d, tna267d].each { - it.commit() - } - - tg1h = new TableGroup([t1h, t1d, t1hShort] as LinkedHashSet, [m1, m2, m3] as Set) - tg1d = new TableGroup([t1d] as LinkedHashSet, [m1, m2, m3] as Set) - tg1Short = new TableGroup([t1hShort] as LinkedHashSet, [m1, m2, m3] as Set) - tg2h = new TableGroup([t2h] as LinkedHashSet, [m1, m2, m3] as Set) - tg3d = new TableGroup([t3d] as LinkedHashSet, [m1, m2, m3] as Set) - tg4h = new TableGroup([t1h, t2h] as LinkedHashSet, [m1, m2, m3] as Set) - tg5h = new TableGroup([t2h, t1h] as LinkedHashSet, [m1, m2, m3] as Set) - tg6h = new TableGroup([t5h] as LinkedHashSet, [] as Set) + setupPartialData() - lt12 = new LogicalTable("base12", HOUR, tg1h) - lt13 = new LogicalTable("base13", DAY, tg1d) - lt14 = new LogicalTable("base14", HOUR, tg6h) - lt1All = new LogicalTable("baseAll", AllGranularity.INSTANCE, tg1All) + tg1h = new TableGroup([t1h, t1d, t1hShort] as LinkedHashSet, [m1, m2, m3].collect {buildMockName(it.getName())}.toSet(), [d1].toSet()) + tg1d = new TableGroup([t1d] as LinkedHashSet, [m1, m2, m3].collect {buildMockName(it.getName())}.toSet(), [d1].toSet()) + tg1Short = new TableGroup([t1hShort] as LinkedHashSet, [m1, m2, m3].collect {buildMockName(it.getName())}.toSet(), [].toSet()) + tg2h = new TableGroup([t2h] as LinkedHashSet, [m1, m2, m3].collect {buildMockName(it.getName())}.toSet(), [].toSet()) + tg3d = new TableGroup([t3d] as LinkedHashSet, [m1, m2, m3].collect {buildMockName(it.getName())}.toSet(), [].toSet()) + tg4h = new TableGroup([t1h, t2h] as LinkedHashSet, [m1, m2, m3].collect {buildMockName(it.getName())}.toSet(), [].toSet()) + tg5h = new TableGroup([t2h, t1h] as LinkedHashSet, [m1, m2, m3].collect {buildMockName(it.getName())}.toSet(), [].toSet()) + tg6h = new TableGroup([t5h] as LinkedHashSet, [].toSet(), [].toSet()) + tgna = new TableGroup([tna1236d, tna1237d, tna167d, tna267d] as LinkedHashSet, [m1, m2, m3].collect {buildMockName(it.getName())}.toSet(), [].toSet()) + + lt12 = new LogicalTable("base12", HOUR, tg1h, metricDictionary) + lt13 = new LogicalTable("base13", DAY, tg1d, metricDictionary) + lt14 = new LogicalTable("base14", HOUR, tg6h, metricDictionary) + lt1All = new LogicalTable("baseAll", AllGranularity.INSTANCE, tg1All, metricDictionary) + ltna = new LogicalTable("baseNA", AllGranularity.INSTANCE, tgna, metricDictionary) ti2h = new TableIdentifier("base12", HOUR) ti2d = new TableIdentifier("base12", DAY) ti3d = new TableIdentifier("base13", DAY) ti4d = new TableIdentifier("base14", HOUR) - - // Tables with non-agg dimensions - tgna = new TableGroup([tna1236d, tna1237d, tna167d, tna267d] as LinkedHashSet, [m1, m2, m3] as Set) - ltna = new LogicalTable("baseNA", AllGranularity.INSTANCE, tgna) tina = new TableIdentifier("baseNA", DAY) Map baseMap = [ @@ -371,37 +302,33 @@ public class QueryBuildingTestingResources { simpleTemplateWithGrainQuery, WEEK ) - setupPartialData() + + return this } def setupPartialData() { // In the event of partiality on all data, the coarsest table will be selected and the leftmost of the // coarsest tables should be selected - emptyFirst = new PhysicalTable("emptyFirst", MONTH.buildZonedTimeGrain(UTC), [:]) - emptyLast = new PhysicalTable("emptyLast", MONTH.buildZonedTimeGrain(UTC), [:]) - partialSecond = new PhysicalTable("partialSecond", MONTH.buildZonedTimeGrain(UTC), [:]) - wholeThird = new PhysicalTable("wholeThird", MONTH.buildZonedTimeGrain(UTC), [:]) - - Interval emptyInterval = new Interval("2015/2015"); - emptyFirst.addColumn(DimensionColumn.addNewDimensionColumn(emptyFirst, d1), [emptyInterval] as Set) - emptyLast.addColumn(DimensionColumn.addNewDimensionColumn(emptyLast, d1), [emptyInterval] as Set) - Interval oneYear = new Interval("2015/2016"); - partialSecond.addColumn(DimensionColumn.addNewDimensionColumn(partialSecond, d1), [oneYear] as Set) - Interval fiveYears = new Interval("2011/2016") - wholeThird.addColumn(DimensionColumn.addNewDimensionColumn(wholeThird, d1), [fiveYears] as Set) - - [m1, m2, m3].each { - emptyFirst.addColumn(MetricColumn.addNewMetricColumn(emptyFirst, it.name), [emptyInterval] as Set) - emptyLast.addColumn(MetricColumn.addNewMetricColumn(emptyLast, it.name), [emptyInterval] as Set) - partialSecond.addColumn(MetricColumn.addNewMetricColumn(partialSecond, it.name), [oneYear] as Set) - wholeThird.addColumn(MetricColumn.addNewMetricColumn(wholeThird, it.name), [fiveYears] as Set) + emptyFirst = new ConcretePhysicalTable("emptyFirst", MONTH.buildZonedTimeGrain(UTC), [d1, m1, m2, m3].collect{toColumn(it)}.toSet(), [:]) + emptyLast = new ConcretePhysicalTable("emptyLast", MONTH.buildZonedTimeGrain(UTC), [d1, m1, m2, m3].collect{toColumn(it)}.toSet(), [:]) + partialSecond = new ConcretePhysicalTable("partialSecond", MONTH.buildZonedTimeGrain(UTC), [d1, m1, m2, m3].collect{toColumn(it)}.toSet(), [:]) + wholeThird = new ConcretePhysicalTable("wholeThird", MONTH.buildZonedTimeGrain(UTC), [d1, m1, m2, m3].collect{toColumn(it)}.toSet(), [:]) + + Map> availabilityMapLengthZero = [:] + Map> availabilityMapOneYear = [:] + Map> availabilityMapLongTime = [:] + + [d1, d2, m1, m2, m3].each { + availabilityMapLengthZero.put(toColumn(it), [new Interval("2015/2015")]) + availabilityMapOneYear.put(toColumn(it), [new Interval("2015/2016")]) + availabilityMapLongTime.put(toColumn(it), [new Interval("2011/2016")]) } - emptyFirst.commit() - emptyLast.commit() - partialSecond.commit() - wholeThird.commit() + emptyFirst.setAvailability(new ImmutableAvailability(emptyFirst.name, availabilityMapLengthZero)) + emptyLast.setAvailability(new ImmutableAvailability(emptyLast.name, availabilityMapLengthZero)) + partialSecond.setAvailability(new ImmutableAvailability(partialSecond.name, availabilityMapOneYear)) + wholeThird.setAvailability(new ImmutableAvailability(wholeThird.name, availabilityMapLongTime)) - tg1All = new TableGroup([emptyFirst, partialSecond, wholeThird, emptyLast] as LinkedHashSet, [m1, m2, m3] as Set) + tg1All = new TableGroup([emptyFirst, partialSecond, wholeThird, emptyLast] as LinkedHashSet, [].toSet(), [].toSet()) ti1All = new TableIdentifier("base1All", AllGranularity.INSTANCE) } @@ -414,16 +341,14 @@ public class QueryBuildingTestingResources { * availability, and volatility information */ def setupVolatileTables(Collection physicalTableAvailabilityVolatilityTriples) { - physicalTableAvailabilityVolatilityTriples.each { PhysicalTable table, Interval availability, _ -> - table.addColumn( - DimensionColumn.addNewDimensionColumn(table, d1), - [availability] as Set + physicalTableAvailabilityVolatilityTriples.each { ConcretePhysicalTable table, Interval availability, _ -> + table.setAvailability( + new ImmutableAvailability( table.name, + [new DimensionColumn(d1), new LogicalMetricColumn(m1)].collectEntries() { + [(it): [availability]] + } + ) ) - table.addColumn( - MetricColumn.addNewMetricColumn(table, m1.getName()), - [availability] as Set - ) - table.commit() } volatileIntervalsService = new DefaultingVolatileIntervalsService( @@ -434,4 +359,16 @@ public class QueryBuildingTestingResources { } ) } + + ApiMetricName buildMockName(String name) { + ApiMetricName.of(name) + } + + Column toColumn(Object item) { + if (item instanceof Dimension) + return new DimensionColumn(item) + + if (item instanceof LogicalMetric) + return new MetricColumn(item.getName()) + } } diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/SerializationResources.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/SerializationResources.groovy index c61fe859f7..256a85b5b8 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/SerializationResources.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/SerializationResources.groovy @@ -1,6 +1,7 @@ // Copyright 2016 Yahoo Inc. // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.data + import com.yahoo.bard.webservice.data.dimension.BardDimensionField import com.yahoo.bard.webservice.data.dimension.Dimension import com.yahoo.bard.webservice.data.dimension.DimensionColumn @@ -14,8 +15,6 @@ import com.yahoo.bard.webservice.data.metric.MetricColumn import com.yahoo.bard.webservice.data.metric.MetricColumnWithValueType import com.yahoo.bard.webservice.data.time.StandardGranularityParser import com.yahoo.bard.webservice.druid.model.query.Granularity -import com.yahoo.bard.webservice.table.Schema -import com.yahoo.bard.webservice.table.ZonedSchema import com.yahoo.bard.webservice.util.SimplifiedIntervalList import com.yahoo.bard.webservice.web.PreResponse import com.yahoo.bard.webservice.web.responseprocessors.ResponseContext @@ -33,11 +32,11 @@ class SerializationResources extends Specification { DimensionDictionary dimensionDictionary PreResponse preResponse ResultSet resultSet - Result result1, result2, result3, result4, result5 + Result result1, result2, result3, result4 ResponseContext responseContext, responseContext1 - Schema schema, schema2, schema3 + ResultSetSchema schema, schema3 HashMap dimensionRows1 - Map metricValues1, metricValues2, metricValues3, metricValues4, metricValues5 + Map metricValues1, metricValues2, metricValues3, metricValues4 Granularity granularity Interval interval BigDecimal bigDecimal @@ -131,7 +130,7 @@ class SerializationResources extends Specification { result4 = new Result(dimensionRows2, metricValues4, DateTime.parse(("2016-01-12T00:00:00.000Z"))) StandardGranularityParser granularityParser = new StandardGranularityParser() - granularity = granularityParser.parseGranularity("day", DateTimeZone.UTC); + granularity = granularityParser.parseGranularity("day", DateTimeZone.UTC) Map baseSchemaTypeMap = [ "simplePageViews": "java.math.BigDecimal", @@ -144,36 +143,42 @@ class SerializationResources extends Specification { schema3 = buildSchema(baseSchemaTypeMap) List results = new ArrayList<>([result1, result2]) - resultSet = new ResultSet(results, schema) + resultSet = new ResultSet(schema, results) - DateTime ny = new DateTime(2011, 2, 2, 7, 0, 0, 0, DateTimeZone.forID("UTC")); - DateTime la = new DateTime(2011, 2, 3, 10, 15, 0, 0, DateTimeZone.forID("UTC")); + DateTime ny = new DateTime(2011, 2, 2, 7, 0, 0, 0, DateTimeZone.forID("UTC")) + DateTime la = new DateTime(2011, 2, 3, 10, 15, 0, 0, DateTimeZone.forID("UTC")) interval = new Interval(ny, la) bigDecimal = new BigDecimal("100") responseContext = new ResponseContext([:]) responseContext.put("randomHeader", "someHeader") - responseContext.put("missingIntervals", ["a","b","c", new SimplifiedIntervalList([interval]), bigDecimal]) + responseContext.put( + "missingIntervals", + (["a", "b", "c", new SimplifiedIntervalList([interval]), bigDecimal] as ArrayList) + ) responseContext1 = new ResponseContext([:]) responseContext1.put("randomHeader", "someHeader") - responseContext1.put("apiMetricColumnNames", ["metric1, metric2"] as Set) - responseContext1.put("requestedApiDimensionFields", [(ageBracketDim.getApiName()) : [BardDimensionField.ID] as Set]) + responseContext1.put("apiMetricColumnNames", ["metric1, metric2"] as LinkedHashSet) + responseContext1.put( + "requestedApiDimensionFields", + [(ageBracketDim.getApiName()): [BardDimensionField.ID] as Set] + ) preResponse = new PreResponse(resultSet, responseContext) return this } - ZonedSchema buildSchema(Map metricNameClassNames) { - Schema schema = new ZonedSchema(granularity, DateTimeZone.UTC) - schema.addColumn(new DimensionColumn(dimensionDictionary.findByApiName("ageBracket"))) - schema.addColumn(new DimensionColumn(dimensionDictionary.findByApiName("gender"))) - schema.addColumn(new DimensionColumn(dimensionDictionary.findByApiName("country"))) + ResultSetSchema buildSchema(Map metricNameClassNames) { + List columns = [] + columns.add(new DimensionColumn(dimensionDictionary.findByApiName("ageBracket"))) + columns.add(new DimensionColumn(dimensionDictionary.findByApiName("gender"))) + columns.add(new DimensionColumn(dimensionDictionary.findByApiName("country"))) metricNameClassNames.each { - schema.addColumn(new MetricColumnWithValueType(it.key, it.value)) + columns.add(new MetricColumnWithValueType(it.key, it.value)) } - return schema + new ResultSetSchema(granularity, columns) } String getSerializedResultSet(){ diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/config/table/BaseTableLoaderSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/config/table/BaseTableLoaderSpec.groovy index e0f06b8410..acb5396db5 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/config/table/BaseTableLoaderSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/config/table/BaseTableLoaderSpec.groovy @@ -2,6 +2,8 @@ // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.data.config.table +import static com.yahoo.bard.webservice.data.config.names.TestLogicalTableName.SHAPES + import com.yahoo.bard.webservice.data.config.ResourceDictionaries import com.yahoo.bard.webservice.data.config.dimension.TestDimensions import com.yahoo.bard.webservice.data.config.names.ApiMetricName @@ -9,13 +11,12 @@ import com.yahoo.bard.webservice.data.config.names.FieldName import com.yahoo.bard.webservice.data.config.names.TestApiDimensionName import com.yahoo.bard.webservice.data.config.names.TestApiMetricName import com.yahoo.bard.webservice.data.config.names.TestDruidMetricName -import com.yahoo.bard.webservice.data.dimension.Dimension; +import com.yahoo.bard.webservice.data.dimension.Dimension import com.yahoo.bard.webservice.data.dimension.impl.KeyValueStoreDimension -import com.yahoo.bard.webservice.table.TableGroup import com.yahoo.bard.webservice.table.PhysicalTable -import spock.lang.Specification +import com.yahoo.bard.webservice.table.TableGroup -import static com.yahoo.bard.webservice.data.config.names.TestLogicalTableName.SHAPES; +import spock.lang.Specification /** * Testing basic table loader functionality @@ -53,7 +54,6 @@ class BaseTableLoaderSpec extends Specification { def "table group has correct contents after being build"() { when: TableGroup group = loader.buildTableGroup( - SHAPES.asName(), apiNames, metricNames, physDefs, diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/dimension/impl/SearchProviderSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/dimension/impl/SearchProviderSpec.groovy index 7459f9e9ea..2ab6d80cdf 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/dimension/impl/SearchProviderSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/dimension/impl/SearchProviderSpec.groovy @@ -509,7 +509,7 @@ abstract class SearchProviderSpec extends Specificatio abstract boolean indicesHaveBeenCleared(); ApiFilter buildFilter(String filterQuery) { - new ApiFilter(filterQuery, animalTable, spaceIdDictionary) + new ApiFilter(filterQuery, spaceIdDictionary) } def "findAllDimensionRowsPaged and findFilteredDimensionRowsPaged paginates results correctly"() { diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/metric/mappers/NoOpResultSetMapperSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/metric/mappers/NoOpResultSetMapperSpec.groovy index 4af2aaa3f3..38cb520abd 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/metric/mappers/NoOpResultSetMapperSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/metric/mappers/NoOpResultSetMapperSpec.groovy @@ -6,6 +6,7 @@ import static com.yahoo.bard.webservice.data.time.DefaultTimeGrain.DAY import com.yahoo.bard.webservice.data.Result import com.yahoo.bard.webservice.data.ResultSet +import com.yahoo.bard.webservice.data.ResultSetSchema import com.yahoo.bard.webservice.data.dimension.BardDimensionField import com.yahoo.bard.webservice.data.dimension.DimensionColumn import com.yahoo.bard.webservice.data.dimension.DimensionField @@ -14,7 +15,6 @@ import com.yahoo.bard.webservice.data.dimension.MapStoreManager import com.yahoo.bard.webservice.data.dimension.impl.KeyValueStoreDimension import com.yahoo.bard.webservice.data.dimension.impl.ScanSearchProviderManager import com.yahoo.bard.webservice.data.metric.MetricColumn -import com.yahoo.bard.webservice.table.Schema import org.joda.time.DateTime @@ -54,9 +54,9 @@ class NoOpResultSetMapperSpec extends Specification { Result rs2 = new Result(dimensionRows2, metricValues2, DateTime.now()) - Schema schema = new Schema(DAY) + ResultSetSchema schema = new ResultSetSchema(DAY, Collections.emptySet()) - ResultSet resultSet = new ResultSet([rs1, rs2], schema) + ResultSet resultSet = new ResultSet(schema, [rs1, rs2]) ResultSetMapper resultSetMapper = new NoOpResultSetMapper() diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/metric/mappers/PaginationMapperSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/metric/mappers/PaginationMapperSpec.groovy index 930ff42a20..ed6f313ff3 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/metric/mappers/PaginationMapperSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/metric/mappers/PaginationMapperSpec.groovy @@ -2,14 +2,12 @@ // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.data.metric.mappers -import com.yahoo.bard.webservice.table.PhysicalTable -import org.joda.time.DateTimeZone - import static com.yahoo.bard.webservice.data.time.DefaultTimeGrain.DAY import com.yahoo.bard.webservice.application.ObjectMappersSuite import com.yahoo.bard.webservice.data.Result import com.yahoo.bard.webservice.data.ResultSet +import com.yahoo.bard.webservice.data.ResultSetSchema import com.yahoo.bard.webservice.data.dimension.BardDimensionField import com.yahoo.bard.webservice.data.dimension.Dimension import com.yahoo.bard.webservice.data.dimension.DimensionColumn @@ -24,12 +22,11 @@ import com.yahoo.bard.webservice.data.metric.MetricColumn import com.yahoo.bard.webservice.druid.client.FailureCallback import com.yahoo.bard.webservice.druid.client.HttpErrorCallback import com.yahoo.bard.webservice.druid.model.query.DruidAggregationQuery -import com.yahoo.bard.webservice.table.Schema import com.yahoo.bard.webservice.util.GroovyTestUtils import com.yahoo.bard.webservice.web.DataApiRequest import com.yahoo.bard.webservice.web.PageNotFoundException -import com.yahoo.bard.webservice.web.responseprocessors.MappingResponseProcessor import com.yahoo.bard.webservice.web.responseprocessors.LoggingContext +import com.yahoo.bard.webservice.web.responseprocessors.MappingResponseProcessor import com.yahoo.bard.webservice.web.responseprocessors.ResponseContextKeys import com.yahoo.bard.webservice.web.util.PaginationLink import com.yahoo.bard.webservice.web.util.PaginationParameters @@ -182,7 +179,10 @@ class PaginationMapperSpec extends Specification { uriBuilder ) and: "The expected data" - ResultSet expectedPage = new ResultSet(testResults[-numLastRows..-1], testResults.getSchema()) + ResultSet expectedPage = new ResultSet( + testResults.getSchema(), + testResults[-numLastRows..-1] + ) expect: "The computed subset of results is as expected" mapper.map(testResults) == expectedPage @@ -246,7 +246,7 @@ class PaginationMapperSpec extends Specification { new Result(dimensionData, metricValues, new DateTime()) } - new ResultSet(resultList, new Schema(DAY)) + new ResultSet(new ResultSetSchema(DAY, [].toSet()), resultList) } @@ -258,7 +258,10 @@ class PaginationMapperSpec extends Specification { * @return The desired page of results */ ResultSet buildExpectedPage(ResultSet results, int page, int perPage) { - return new ResultSet(results.subList((page - 1) * perPage, page * perPage), results.getSchema()) + return new ResultSet( + results.getSchema(), + results.subList((page - 1) * perPage, page * perPage) + ) } /** @@ -275,7 +278,7 @@ class PaginationMapperSpec extends Specification { store, searchProvider ) - DimensionColumn column = DimensionColumn.addNewDimensionColumn(new PhysicalTable("", DAY.buildZonedTimeGrain(DateTimeZone.UTC), [(dimension.getApiName()): dimension.getApiName()]), dimension) + DimensionColumn column = new DimensionColumn(dimension) DimensionField idField = BardDimensionField.ID DimensionField descField = BardDimensionField.DESC Map fieldValueMap = [ diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/metric/mappers/RowNumMapperSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/metric/mappers/RowNumMapperSpec.groovy index 3023bd4455..1cff65b130 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/metric/mappers/RowNumMapperSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/metric/mappers/RowNumMapperSpec.groovy @@ -2,13 +2,11 @@ // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.data.metric.mappers -import com.yahoo.bard.webservice.table.PhysicalTable -import org.joda.time.DateTimeZone - import static com.yahoo.bard.webservice.data.time.DefaultTimeGrain.DAY import com.yahoo.bard.webservice.data.Result import com.yahoo.bard.webservice.data.ResultSet +import com.yahoo.bard.webservice.data.ResultSetSchema import com.yahoo.bard.webservice.data.config.names.TestApiMetricName import com.yahoo.bard.webservice.data.dimension.BardDimensionField import com.yahoo.bard.webservice.data.dimension.Dimension @@ -19,7 +17,6 @@ import com.yahoo.bard.webservice.data.dimension.MapStoreManager import com.yahoo.bard.webservice.data.dimension.impl.KeyValueStoreDimension import com.yahoo.bard.webservice.data.dimension.impl.ScanSearchProviderManager import com.yahoo.bard.webservice.data.metric.MetricColumn -import com.yahoo.bard.webservice.table.Schema import org.joda.time.DateTime @@ -34,9 +31,8 @@ class RowNumMapperSpec extends Specification { /** * Create a dummy schema and add dummy metric columns to it */ - Schema schema = new Schema(DAY) - MetricColumn mc1 = MetricColumn.addNewMetricColumn(schema, "m1") - MetricColumn mc2 = MetricColumn.addNewMetricColumn(schema, "m2") + MetricColumn mc1 = new MetricColumn("m1") + MetricColumn mc2 = new MetricColumn("m2") // Initialize dummy metric values as a map of metric column and its value LinkedHashMap mv1 = new LinkedHashMap<>() @@ -53,8 +49,8 @@ class RowNumMapperSpec extends Specification { Dimension d2 = new KeyValueStoreDimension("d2", "d2-desc", dimensionFields, MapStoreManager.getInstance("d2"), ScanSearchProviderManager.getInstance("d2")) // Add dimension columns to the dummy schema created earlier - DimensionColumn dc1 = DimensionColumn.addNewDimensionColumn(schema, d1) - DimensionColumn dc2 = DimensionColumn.addNewDimensionColumn(schema, d2) + DimensionColumn dc1 = new DimensionColumn(d1) + DimensionColumn dc2 = new DimensionColumn(d2) // Create dummy DimensionRow's DimensionRow dr1 = BardDimensionField.makeDimensionRow(d1, "id1", "desc1") @@ -74,12 +70,15 @@ class RowNumMapperSpec extends Specification { Result r2 = new Result(drow, mv2, dateTime) + // Create a dummy column with name rowNum + MetricColumn rowNum = new MetricColumn(TestApiMetricName.A_ROW_NUM.getApiName()) + + ResultSetSchema schema = new ResultSetSchema(DAY, [mc1, mc2, dc1, dc2, rowNum] as Set) + // From the dummy result's created above, create a ResultSet // This is the resultSet which we pass to the mapper - ResultSet resultSet = new ResultSet(Arrays.asList(r1, r2), schema) + ResultSet resultSet = new ResultSet(schema, Arrays.asList(r1, r2)) - // Create a dummy column with name rowNum - MetricColumn rowNum = MetricColumn.addNewMetricColumn(schema, TestApiMetricName.A_ROW_NUM.getApiName()) // Add the new column with respective values, which we expect the rowNumMapper would do LinkedHashMap mappedMv1 = new LinkedHashMap<>() @@ -98,7 +97,7 @@ class RowNumMapperSpec extends Specification { ResultSetMapper rowNumMapper = new RowNumMapper() // Create an expected mapped ResultSet to be compared with - ResultSet mappedResultSet = new ResultSet(Arrays.asList(mappedR1, mappedR2), schema) + ResultSet mappedResultSet = new ResultSet(schema, Arrays.asList(mappedR1, mappedR2)) expect: mappedResultSet == rowNumMapper.map(resultSet) diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/metric/mappers/SketchRoundUpMapperSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/metric/mappers/SketchRoundUpMapperSpec.groovy index 4512ee68a9..5bb78a6506 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/metric/mappers/SketchRoundUpMapperSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/data/metric/mappers/SketchRoundUpMapperSpec.groovy @@ -3,9 +3,9 @@ package com.yahoo.bard.webservice.data.metric.mappers import com.yahoo.bard.webservice.data.Result +import com.yahoo.bard.webservice.data.ResultSetSchema import com.yahoo.bard.webservice.data.metric.MetricColumn import com.yahoo.bard.webservice.data.time.DefaultTimeGrain -import com.yahoo.bard.webservice.table.Schema import org.joda.time.DateTime @@ -14,14 +14,19 @@ import spock.lang.Unroll class SketchRoundUpMapperSpec extends Specification { - Schema schema = new Schema(DefaultTimeGrain.DAY) - MetricColumn column = MetricColumn.addNewMetricColumn(schema, "Row row row your boat") + MetricColumn column = new MetricColumn("Row row row your boat") + + ResultSetSchema schema = new ResultSetSchema(DefaultTimeGrain.DAY, [column].toSet()) SketchRoundUpMapper mapper = new SketchRoundUpMapper(column.name) @Unroll def "The mapper rounds #floatingPoint to #integer"() { given: "A result containing the floating point value" - Result result = new Result([:], [(column): floatingPoint as BigDecimal], new DateTime()) + Result result = new Result( + [:], + [(column): floatingPoint as BigDecimal] as Map, + new DateTime() + ) expect: "The sketch round up mapper returns a new result with the rounded value" mapper.map(result, schema).getMetricValueAsNumber(column) == integer as BigDecimal diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/aggregation/CardinalityAggregationSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/aggregation/CardinalityAggregationSpec.groovy index a49e3c640f..c6eac76b48 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/aggregation/CardinalityAggregationSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/aggregation/CardinalityAggregationSpec.groovy @@ -6,13 +6,14 @@ import com.yahoo.bard.webservice.data.dimension.Dimension import com.yahoo.bard.webservice.data.time.DefaultTimeGrain import com.yahoo.bard.webservice.druid.model.datasource.DataSource import com.yahoo.bard.webservice.druid.model.query.DruidAggregationQuery -import com.yahoo.bard.webservice.table.PhysicalTable +import com.yahoo.bard.webservice.table.ConcretePhysicalTable import com.yahoo.bard.webservice.util.GroovyTestUtils import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.datatype.jdk8.Jdk8Module import org.joda.time.DateTimeZone + import spock.lang.Specification class CardinalityAggregationSpec extends Specification { @@ -68,7 +69,12 @@ class CardinalityAggregationSpec extends Specification { // Consequently, query's also need this to be serialized. DruidAggregationQuery query = Mock(DruidAggregationQuery) DataSource ds = Mock(DataSource) - ds.getPhysicalTables() >> [new PhysicalTable("table", DefaultTimeGrain.DAY.buildZonedTimeGrain(DateTimeZone.UTC), ["d1ApiName":"d1DruidName","d2ApiName":"d2DruidName"])] + ds.getPhysicalTables() >> [new ConcretePhysicalTable( + "table", + DefaultTimeGrain.DAY.buildZonedTimeGrain(DateTimeZone.UTC), + [] as Set, + ["d1ApiName": "d1DruidName", "d2ApiName": "d2DruidName"] + )] query.dataSource >> ds query.aggregations >> [a1] diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/aggregation/FilteredAggregationSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/aggregation/FilteredAggregationSpec.groovy index 3d949a75a2..7f7d64c077 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/aggregation/FilteredAggregationSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/aggregation/FilteredAggregationSpec.groovy @@ -6,9 +6,10 @@ import static com.yahoo.bard.webservice.data.time.DefaultTimeGrain.DAY import static org.joda.time.DateTimeZone.UTC import com.yahoo.bard.webservice.data.config.metric.MetricInstance -import com.yahoo.bard.webservice.data.config.metric.makers.SketchCountMaker +import com.yahoo.bard.webservice.data.config.metric.makers.ThetaSketchMaker import com.yahoo.bard.webservice.data.config.names.ApiMetricName import com.yahoo.bard.webservice.data.dimension.BardDimensionField +import com.yahoo.bard.webservice.data.dimension.Dimension import com.yahoo.bard.webservice.data.dimension.DimensionColumn import com.yahoo.bard.webservice.data.dimension.DimensionDictionary import com.yahoo.bard.webservice.data.dimension.MapStoreManager @@ -17,15 +18,14 @@ import com.yahoo.bard.webservice.data.dimension.impl.ScanSearchProviderManager import com.yahoo.bard.webservice.data.filterbuilders.DefaultDruidFilterBuilder import com.yahoo.bard.webservice.data.filterbuilders.DruidFilterBuilder import com.yahoo.bard.webservice.data.metric.LogicalMetric -import com.yahoo.bard.webservice.data.metric.MetricColumn import com.yahoo.bard.webservice.data.metric.MetricDictionary import com.yahoo.bard.webservice.druid.model.filter.Filter import com.yahoo.bard.webservice.druid.util.FieldConverterSupplier -import com.yahoo.bard.webservice.table.LogicalTable +import com.yahoo.bard.webservice.table.Column +import com.yahoo.bard.webservice.table.ConcretePhysicalTable import com.yahoo.bard.webservice.table.PhysicalTable -import com.yahoo.bard.webservice.table.TableGroup import com.yahoo.bard.webservice.web.ApiFilter -import com.yahoo.bard.webservice.web.FilteredSketchMetricsHelper +import com.yahoo.bard.webservice.web.FilteredThetaSketchMetricsHelper import com.yahoo.bard.webservice.web.MetricsFilterSetBuilder import spock.lang.Specification @@ -43,50 +43,36 @@ class FilteredAggregationSpec extends Specification{ static MetricsFilterSetBuilder oldBuilder = FieldConverterSupplier.metricsFilterSetBuilder def setupSpec() { - FieldConverterSupplier.metricsFilterSetBuilder = new FilteredSketchMetricsHelper() + FieldConverterSupplier.metricsFilterSetBuilder = new FilteredThetaSketchMetricsHelper() } def setup() { - Set metricNames = ["FOO","FOO_NO_BAR"] + MetricDictionary metricDictionary = new MetricDictionary() - ageDimension = new KeyValueStoreDimension( - "age", - null, - [BardDimensionField.ID] as LinkedHashSet, - MapStoreManager.getInstance("age"), - ScanSearchProviderManager.getInstance("age") - ) - - genderDimension = new KeyValueStoreDimension( - "gender", - null, - [BardDimensionField.ID] as LinkedHashSet, - MapStoreManager.getInstance("gender"), - ScanSearchProviderManager.getInstance("gender") - ) + def filtered_metric_name = "FOO_NO_BAR" + Set metricNames = (["FOO", filtered_metric_name].collect { ApiMetricName.of(it)}) as Set + ageDimension = buildSimpleDimension("age") + genderDimension = buildSimpleDimension("gender") DimensionDictionary dimensionDictionary = new DimensionDictionary([ageDimension] as Set) ageDimension.addDimensionRow(BardDimensionField.makeDimensionRow(ageDimension, "114")) ageDimension.addDimensionRow(BardDimensionField.makeDimensionRow(ageDimension, "125")) - PhysicalTable physicalTable = new PhysicalTable("NETWORK", DAY.buildZonedTimeGrain(UTC), [:]) - - physicalTable.addColumn(new DimensionColumn(ageDimension)) - physicalTable.addColumn(new DimensionColumn(ageDimension)) - metricNames.each {physicalTable.addColumn(new MetricColumn(it))} - physicalTable.commit() + Set columns = [new DimensionColumn(ageDimension)] as Set - TableGroup tableGroup = new TableGroup([physicalTable] as LinkedHashSet, metricNames) - LogicalTable table = new LogicalTable("NETWORK_DAY", DAY, tableGroup) - tableGroup.dimensions.each { - DimensionColumn.addNewDimensionColumn(table, it) - } + PhysicalTable physicalTable = new ConcretePhysicalTable( + "NETWORK", + DAY.buildZonedTimeGrain(UTC), + columns, + [:] + ) - SketchCountMaker sketchCountMaker = new SketchCountMaker(new MetricDictionary(), 16384) - MetricInstance fooNoBarSketchPm = new MetricInstance("FOO_NO_BAR",sketchCountMaker,"FOO_NO_BAR_SKETCH") + ThetaSketchMaker sketchCountMaker = new ThetaSketchMaker(new MetricDictionary(), 16384) + MetricInstance fooNoBarSketchPm = new MetricInstance(filtered_metric_name,sketchCountMaker,"FOO_NO_BAR_SKETCH") LogicalMetric fooNoBarSketch = fooNoBarSketchPm.make() + metricDictionary.put(filtered_metric_name, fooNoBarSketch) metricAgg = fooNoBarSketch.getTemplateDruidQuery().getAggregations().first() genderDependentMetricAgg = Mock(Aggregation) @@ -94,13 +80,15 @@ class FilteredAggregationSpec extends Specification{ genderDependentMetricAgg.withName(_) >> genderDependentMetricAgg genderDependentMetricAgg.withFieldName(_) >> genderDependentMetricAgg - Set filterSet = [new ApiFilter("age|id-in[114,125]", table, dimensionDictionary)] as Set + LogicalMetric logicalMetric = new LogicalMetric(null, null, filtered_metric_name) + + Set filterSet = [new ApiFilter("age|id-in[114,125]", dimensionDictionary)] as Set DruidFilterBuilder filterBuilder = new DefaultDruidFilterBuilder() filter1 = filterBuilder.buildFilters([(ageDimension): filterSet]) filter2 = filterBuilder.buildFilters( - [(ageDimension): [new ApiFilter("age|id-in[114]", table, dimensionDictionary)] as Set] + [(ageDimension): [new ApiFilter("age|id-in[114]", dimensionDictionary)] as Set] ) filteredAgg = new FilteredAggregation("FOO_NO_BAR-114_127", metricAgg, filter1) @@ -172,4 +160,15 @@ class FilteredAggregationSpec extends Specification{ filteredAgg.getAggregation().getFieldName() == "FOO_NO_BAR_SKETCH" filteredAgg.getAggregation().getName() == "FOO_NO_BAR-114_127" } + + def Dimension buildSimpleDimension(String name) { + return new KeyValueStoreDimension( + name, + null, + [BardDimensionField.ID] as LinkedHashSet, + MapStoreManager.getInstance(name), + ScanSearchProviderManager.getInstance(name) + ) + + } } diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/query/DruidSearchQuerySpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/query/DruidSearchQuerySpec.groovy index 4ae1e42b18..a7b512261e 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/query/DruidSearchQuerySpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/query/DruidSearchQuerySpec.groovy @@ -2,10 +2,10 @@ // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.druid.model.query -import com.yahoo.bard.webservice.table.PhysicalTable import static com.yahoo.bard.webservice.data.time.DefaultTimeGrain.DAY +import com.yahoo.bard.webservice.table.ConcretePhysicalTable import com.yahoo.bard.webservice.data.dimension.Dimension import com.yahoo.bard.webservice.data.dimension.DimensionField import com.yahoo.bard.webservice.data.dimension.MapStore @@ -44,7 +44,14 @@ class DruidSearchQuerySpec extends Specification { DruidSearchQuery defaultQuery(Map vars) { vars.queryType = DefaultQueryType.SEARCH - vars.dataSource = vars.dataSource ?: new TableDataSource(new PhysicalTable("table_name", DAY.buildZonedTimeGrain(DateTimeZone.UTC), [:])) + vars.dataSource = vars.dataSource ?: new TableDataSource( + new ConcretePhysicalTable( + "table_name", + DAY.buildZonedTimeGrain(DateTimeZone.UTC), + [] as Set, + [:] + ) + ) vars.granularity = vars.granularity ?: DAY vars.filter = vars.filter ?: null vars.intervals = vars.intervals ?: [] diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/query/GroupByQuerySpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/query/GroupByQuerySpec.groovy index 61186d010f..79046bb15e 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/query/GroupByQuerySpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/query/GroupByQuerySpec.groovy @@ -2,8 +2,6 @@ // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.druid.model.query -import com.yahoo.bard.webservice.table.PhysicalTable - import static com.yahoo.bard.webservice.data.time.DefaultTimeGrain.DAY import com.yahoo.bard.webservice.data.dimension.BardDimensionField @@ -12,6 +10,7 @@ import com.yahoo.bard.webservice.data.dimension.DimensionField import com.yahoo.bard.webservice.data.dimension.MapStoreManager import com.yahoo.bard.webservice.data.dimension.impl.KeyValueStoreDimension import com.yahoo.bard.webservice.data.dimension.impl.ScanSearchProviderManager +import com.yahoo.bard.webservice.data.time.TimeGrain import com.yahoo.bard.webservice.druid.model.aggregation.Aggregation import com.yahoo.bard.webservice.druid.model.aggregation.LongSumAggregation import com.yahoo.bard.webservice.druid.model.datasource.DataSource @@ -25,6 +24,7 @@ import com.yahoo.bard.webservice.druid.model.filter.SelectorFilter import com.yahoo.bard.webservice.druid.model.postaggregation.ArithmeticPostAggregation import com.yahoo.bard.webservice.druid.model.postaggregation.FieldAccessorPostAggregation import com.yahoo.bard.webservice.druid.model.postaggregation.PostAggregation +import com.yahoo.bard.webservice.table.ConcretePhysicalTable import com.yahoo.bard.webservice.util.GroovyTestUtils import com.fasterxml.jackson.databind.ObjectMapper @@ -42,6 +42,7 @@ class GroupByQuerySpec extends Specification { @Shared DateTimeZone currentTZ + TimeGrain day = DAY.buildZonedTimeGrain(DateTimeZone.UTC) def setupSpec() { currentTZ = DateTimeZone.getDefault() @@ -54,7 +55,14 @@ class GroupByQuerySpec extends Specification { GroupByQuery defaultQuery(Map vars) { - vars.dataSource = vars.dataSource ?: new TableDataSource(new PhysicalTable("table_name", DAY.buildZonedTimeGrain(DateTimeZone.UTC), ["apiLocale":"locale", "apiPlatform":"platform", "apiProduct":"product"])) + vars.dataSource = vars.dataSource ?: new TableDataSource( + new ConcretePhysicalTable( + "table_name", + day, + [] as Set, + ["apiLocale": "locale", "apiPlatform": "platform", "apiProduct": "product"] + ) + ) vars.granularity = vars.granularity ?: DAY vars.dimensions = vars.dimensions ?: new ArrayList() vars.filter = vars.filter ?: null @@ -160,7 +168,7 @@ class GroupByQuerySpec extends Specification { def "check dataSource serialization"() { //non nested query - DataSource ds1 = new TableDataSource(new PhysicalTable("table_name", DAY.buildZonedTimeGrain(DateTimeZone.UTC), [:])) + DataSource ds1 = new TableDataSource(new ConcretePhysicalTable("table_name", day, [] as Set, [:])) GroupByQuery dq1 = defaultQuery(dataSource: ds1) //nested query @@ -457,8 +465,8 @@ class GroupByQuerySpec extends Specification { def "Check innermost query injection"() { setup: - TableDataSource inner1 = new TableDataSource(new PhysicalTable("inner1", DAY.buildZonedTimeGrain(DateTimeZone.UTC), [:])) - TableDataSource inner2 = new TableDataSource(new PhysicalTable("inner2", DAY.buildZonedTimeGrain(DateTimeZone.UTC), [:])) + TableDataSource inner1 = new TableDataSource(new ConcretePhysicalTable("inner1", day, [] as Set, [:])) + TableDataSource inner2 = new TableDataSource(new ConcretePhysicalTable("inner2", day, [] as Set, [:])) GroupByQuery dq1 = defaultQuery(dataSource: inner1) DataSource outer1 = new QueryDataSource(dq1) GroupByQuery dq2 = defaultQuery(dataSource: outer1) @@ -479,7 +487,7 @@ class GroupByQuerySpec extends Specification { List endingIntervals = [Interval.parse("2016/2017")] and: "A nested query" - TableDataSource table = new TableDataSource(new PhysicalTable("inner1", DAY.buildZonedTimeGrain(DateTimeZone.UTC), [:])) + TableDataSource table = new TableDataSource(new ConcretePhysicalTable("inner1", day, [] as Set, [:])) GroupByQuery inner = defaultQuery(dataSource: table, intervals: startingIntervals) GroupByQuery middle = defaultQuery(dataSource: new QueryDataSource<>(inner), intervals: startingIntervals) GroupByQuery outer = defaultQuery(dataSource: new QueryDataSource<>(middle), intervals: startingIntervals) diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/query/SegmentMetadataQuerySpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/query/SegmentMetadataQuerySpec.groovy index ea635df12e..bb5090a9bf 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/query/SegmentMetadataQuerySpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/query/SegmentMetadataQuerySpec.groovy @@ -5,7 +5,7 @@ package com.yahoo.bard.webservice.druid.model.query import com.yahoo.bard.webservice.data.time.DefaultTimeGrain import com.yahoo.bard.webservice.druid.model.datasource.DataSource import com.yahoo.bard.webservice.druid.model.datasource.TableDataSource -import com.yahoo.bard.webservice.table.PhysicalTable +import com.yahoo.bard.webservice.table.ConcretePhysicalTable import com.yahoo.bard.webservice.util.GroovyTestUtils import com.fasterxml.jackson.databind.ObjectMapper @@ -33,7 +33,12 @@ class SegmentMetadataQuerySpec extends Specification { def "SegmentMetadataQuery serializes to JSON correctly with one interval"() { given: "A Table data source and interval" String tableName = "basefact_network" - DataSource dataSource = new TableDataSource(new PhysicalTable(tableName, DefaultTimeGrain.DAY.buildZonedTimeGrain(DateTimeZone.UTC), [:])) + DataSource dataSource = new TableDataSource(new ConcretePhysicalTable( + tableName, + DefaultTimeGrain.DAY.buildZonedTimeGrain(DateTimeZone.UTC), + [] as Set, + [:] + )) Collection intervals = [new Interval("2014-07-01/2014-07-15")] when: "We create and serialize a SegmentMetadataQuery" @@ -56,7 +61,12 @@ class SegmentMetadataQuerySpec extends Specification { def "SegmentMetadataQuery serializes to JSON correctly with multiple intervals"() { given: "A Table data source and interval" String tableName = "basefact_network" - DataSource dataSource = new TableDataSource(new PhysicalTable(tableName, DefaultTimeGrain.DAY.buildZonedTimeGrain(DateTimeZone.UTC), [:])) + DataSource dataSource = new TableDataSource(new ConcretePhysicalTable( + tableName, + DefaultTimeGrain.DAY.buildZonedTimeGrain(DateTimeZone.UTC), + [] as Set, + [:] + )) Collection intervals = [new Interval("2014-07-01/2014-07-15"), new Interval("2014-08-01/2014-08-15")] when: "We create and serialize a SegmentMetadataQuery" diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/query/TimeBoundaryQuerySpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/query/TimeBoundaryQuerySpec.groovy index 0ce6d0bdfb..3d550dee3b 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/query/TimeBoundaryQuerySpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/query/TimeBoundaryQuerySpec.groovy @@ -4,7 +4,7 @@ package com.yahoo.bard.webservice.druid.model.query import com.yahoo.bard.webservice.data.time.DefaultTimeGrain import com.yahoo.bard.webservice.druid.model.datasource.TableDataSource -import com.yahoo.bard.webservice.table.PhysicalTable +import com.yahoo.bard.webservice.table.ConcretePhysicalTable import com.yahoo.bard.webservice.util.GroovyTestUtils import com.fasterxml.jackson.databind.ObjectMapper @@ -21,7 +21,14 @@ class TimeBoundaryQuerySpec extends Specification { def "TimeBoundaryQuery serializes to JSON correctly"() { given: "A Table data source" String tableName = "basefact_network" - TableDataSource dataSource = new TableDataSource(new PhysicalTable(tableName, DefaultTimeGrain.DAY.buildZonedTimeGrain(DateTimeZone.UTC), [:])) + TableDataSource dataSource = new TableDataSource( + new ConcretePhysicalTable( + tableName, + DefaultTimeGrain.DAY.buildZonedTimeGrain(DateTimeZone.UTC), + [] as Set, + [:] + ) + ) when: "We create and serialize a TimeBoundaryQuery" String timeBoundaryQueryStr = MAPPER.writeValueAsString(new TimeBoundaryQuery(dataSource)) diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/query/TimeSeriesQuerySpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/query/TimeSeriesQuerySpec.groovy index 4b1fe20a19..50f35aac26 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/query/TimeSeriesQuerySpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/query/TimeSeriesQuerySpec.groovy @@ -2,13 +2,12 @@ // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.druid.model.query -import com.yahoo.bard.webservice.table.PhysicalTable - import static com.yahoo.bard.webservice.data.time.DefaultTimeGrain.DAY import com.yahoo.bard.webservice.druid.model.aggregation.Aggregation import com.yahoo.bard.webservice.druid.model.datasource.TableDataSource import com.yahoo.bard.webservice.druid.model.postaggregation.PostAggregation +import com.yahoo.bard.webservice.table.ConcretePhysicalTable import com.yahoo.bard.webservice.util.GroovyTestUtils import com.fasterxml.jackson.databind.ObjectMapper @@ -38,7 +37,12 @@ class TimeSeriesQuerySpec extends Specification { TimeSeriesQuery defaultQuery(Map vars) { - vars.dataSource = vars.dataSource ?: new TableDataSource(new PhysicalTable("table_name", DAY.buildZonedTimeGrain(DateTimeZone.UTC), [:])) + vars.dataSource = vars.dataSource ?: new TableDataSource(new ConcretePhysicalTable( + "table_name", + DAY.buildZonedTimeGrain(DateTimeZone.UTC), + [] as Set, + [:] + )) vars.granularity = vars.granularity ?: DAY vars.filter = vars.filter ?: null vars.having = vars.having ?: null diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/query/TopNQuerySpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/query/TopNQuerySpec.groovy index 84e55ea1b9..20746d8cb8 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/query/TopNQuerySpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/model/query/TopNQuerySpec.groovy @@ -2,8 +2,6 @@ // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.druid.model.query -import com.yahoo.bard.webservice.table.PhysicalTable - import static com.yahoo.bard.webservice.data.time.DefaultTimeGrain.DAY import com.yahoo.bard.webservice.data.dimension.BardDimensionField @@ -15,6 +13,7 @@ import com.yahoo.bard.webservice.data.dimension.impl.ScanSearchProviderManager import com.yahoo.bard.webservice.druid.model.aggregation.Aggregation import com.yahoo.bard.webservice.druid.model.datasource.TableDataSource import com.yahoo.bard.webservice.druid.model.postaggregation.PostAggregation +import com.yahoo.bard.webservice.table.ConcretePhysicalTable import com.yahoo.bard.webservice.util.GroovyTestUtils import com.fasterxml.jackson.databind.ObjectMapper @@ -44,7 +43,12 @@ class TopNQuerySpec extends Specification { TopNQuery defaultQuery(Map vars) { - vars.dataSource = vars.dataSource ?: new TableDataSource(new PhysicalTable("table_name", DAY.buildZonedTimeGrain(DateTimeZone.UTC), ["apiLocale": "locale"])) + vars.dataSource = vars.dataSource ?: new TableDataSource(new ConcretePhysicalTable( + "table_name", + DAY.buildZonedTimeGrain(DateTimeZone.UTC), + [] as Set, + ["apiLocale": "locale"] + )) vars.dimension = vars.dimension ?: "" vars.threshold = vars.threshold ?: 5 vars.granularity = vars.granularity ?: DAY diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/serializers/DimensionToDefaultDimensionSpecSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/serializers/DimensionToDefaultDimensionSpecSpec.groovy index 8247d83994..5a78a22102 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/serializers/DimensionToDefaultDimensionSpecSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/serializers/DimensionToDefaultDimensionSpecSpec.groovy @@ -8,7 +8,6 @@ import static org.joda.time.DateTimeZone.UTC import com.yahoo.bard.webservice.data.DruidQueryBuilder import com.yahoo.bard.webservice.data.PartialDataHandler import com.yahoo.bard.webservice.data.QueryBuildingTestingResources -import com.yahoo.bard.webservice.data.dimension.Dimension import com.yahoo.bard.webservice.data.metric.LogicalMetric import com.yahoo.bard.webservice.data.metric.mappers.NoOpResultSetMapper import com.yahoo.bard.webservice.data.volatility.DefaultingVolatileIntervalsService diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/serializers/LookupDimensionToDimensionSpecSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/serializers/LookupDimensionToDimensionSpecSpec.groovy index 3bf7f8ef14..6d982d23f0 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/serializers/LookupDimensionToDimensionSpecSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/druid/serializers/LookupDimensionToDimensionSpecSpec.groovy @@ -8,7 +8,6 @@ import static org.joda.time.DateTimeZone.UTC import com.yahoo.bard.webservice.data.DruidQueryBuilder import com.yahoo.bard.webservice.data.PartialDataHandler import com.yahoo.bard.webservice.data.QueryBuildingTestingResources -import com.yahoo.bard.webservice.data.dimension.Dimension import com.yahoo.bard.webservice.data.metric.LogicalMetric import com.yahoo.bard.webservice.data.metric.mappers.NoOpResultSetMapper import com.yahoo.bard.webservice.data.volatility.DefaultingVolatileIntervalsService diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/metadata/DataSourceMetadataLoaderSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/metadata/DataSourceMetadataLoaderSpec.groovy index 94d2ae8202..70e657cd16 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/metadata/DataSourceMetadataLoaderSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/metadata/DataSourceMetadataLoaderSpec.groovy @@ -19,7 +19,7 @@ import com.yahoo.bard.webservice.druid.model.datasource.DataSource import com.yahoo.bard.webservice.druid.model.query.DruidAggregationQuery import com.yahoo.bard.webservice.models.druid.client.impl.TestDruidWebService import com.yahoo.bard.webservice.table.Column -import com.yahoo.bard.webservice.table.PhysicalTable +import com.yahoo.bard.webservice.table.ConcretePhysicalTable import com.yahoo.bard.webservice.table.PhysicalTableDictionary import org.joda.time.DateTime @@ -209,7 +209,8 @@ class DataSourceMetadataLoaderSpec extends Specification { MAPPERS.getMapper() ) druidWS.jsonResponse = {gappyDataSourceMetadataJson} - PhysicalTable table = Mock(PhysicalTable) + ConcretePhysicalTable table = Mock(ConcretePhysicalTable) + table.getFactTableName() >> "test" DataSourceMetadata capture when: "JSON metadata return successfully" @@ -240,7 +241,8 @@ class DataSourceMetadataLoaderSpec extends Specification { testWs, MAPPERS.getMapper() ) - PhysicalTable table = Mock(PhysicalTable) + ConcretePhysicalTable table = Mock(ConcretePhysicalTable) + table.getFactTableName() >> "test" when: "loader issues a metadata query" loader.queryDataSourceMetadata(table) diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/metadata/SegmentIntervalsHashIdGeneratorSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/metadata/SegmentIntervalsHashIdGeneratorSpec.groovy index ab689f6c2c..a5e084b5bd 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/metadata/SegmentIntervalsHashIdGeneratorSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/metadata/SegmentIntervalsHashIdGeneratorSpec.groovy @@ -12,7 +12,7 @@ import com.yahoo.bard.webservice.druid.model.query.LookbackQuery import com.yahoo.bard.webservice.druid.model.query.LookbackQuerySpec import com.yahoo.bard.webservice.druid.model.query.TimeSeriesQuery import com.yahoo.bard.webservice.druid.model.query.TimeSeriesQuerySpec -import com.yahoo.bard.webservice.table.PhysicalTable +import com.yahoo.bard.webservice.table.ConcretePhysicalTable import com.yahoo.bard.webservice.table.PhysicalTableDictionary import com.yahoo.bard.webservice.util.DefaultingDictionary import com.yahoo.bard.webservice.util.SimplifiedIntervalList @@ -25,7 +25,6 @@ import spock.lang.Unroll import java.util.concurrent.ConcurrentSkipListMap import java.util.concurrent.atomic.AtomicReference -import java.util.function.Function import static org.joda.time.DateTimeZone.UTC @@ -102,7 +101,13 @@ class SegmentIntervalsHashIdGeneratorSpec extends BaseDataSourceMetadataSpec { TimeSeriesQuerySpec timeSeriesQuerySpec = new TimeSeriesQuerySpec() timeSeriesQuery = timeSeriesQuerySpec.defaultQuery( intervals: [interval2], - dataSource: new TableDataSource(new PhysicalTable(tableName, DefaultTimeGrain.DAY.buildZonedTimeGrain(UTC), [:])) + dataSource: new TableDataSource( + new ConcretePhysicalTable( + tableName, DefaultTimeGrain.DAY.buildZonedTimeGrain(UTC), + [] as Set, + [:] + ) + ) ) LookbackQuerySpec lookbackQuerySpec = new LookbackQuerySpec() diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/metadata/SegmentMetadataLoaderSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/metadata/SegmentMetadataLoaderSpec.groovy index 83455a7c54..661bc10ccb 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/metadata/SegmentMetadataLoaderSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/metadata/SegmentMetadataLoaderSpec.groovy @@ -10,8 +10,10 @@ import com.yahoo.bard.webservice.data.metric.MetricColumn import com.yahoo.bard.webservice.druid.client.DruidWebService import com.yahoo.bard.webservice.druid.client.SuccessCallback import com.yahoo.bard.webservice.models.druid.client.impl.TestDruidWebService +import com.yahoo.bard.webservice.table.ConcretePhysicalTable import com.yahoo.bard.webservice.table.PhysicalTable import com.yahoo.bard.webservice.table.PhysicalTableDictionary +import com.yahoo.bard.webservice.table.availability.ImmutableAvailability import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.datatype.jdk8.Jdk8Module @@ -33,18 +35,13 @@ class SegmentMetadataLoaderSpec extends Specification { DateTimeZone.setDefault(UTC) } TestDruidWebService druidWS = new TestDruidWebService() - def metric1 = new MetricColumn("abe") - def metric2 = new MetricColumn("lincoln") + MetricColumn metric1 = new MetricColumn("abe") + MetricColumn metric2 = new MetricColumn("lincoln") - def segmentMetaDataintervals + String intervalString + List intervalList - def intervalString = "2014-09-04T00:00:00.000Z/2014-09-06T00:00:00.000Z" - def intervalSet = [new Interval(intervalString)] as Set - - def expectedColumnCache = [ - (metric1): intervalSet, - (metric2): intervalSet - ] + Map> expectedColumnCache String gappySegmentMetadataJson = """ { @@ -93,18 +90,19 @@ class SegmentMetadataLoaderSpec extends Specification { DateTimeZone.setDefault(UTC) intervalString = "2014-09-04T00:00:00.000Z/2014-09-06T00:00:00.000Z" - intervalSet = [new Interval(intervalString)] as Set - + intervalList = [new Interval(intervalString)] expectedColumnCache = [ - (metric1): intervalSet, - (metric2): intervalSet + (metric1): intervalList, + (metric2): intervalList ] ["tablename"].each { - PhysicalTable table = new PhysicalTable(it, WEEK.buildZonedTimeGrain(UTC), [:]) - table.addColumn(metric1) - table.addColumn(metric2) - table.commit() + + PhysicalTable table = new ConcretePhysicalTable( + it, WEEK.buildZonedTimeGrain(UTC), + [metric1, metric2] as Set, + [:] + ) tableDict.put(it, table) } String json = """ @@ -128,7 +126,7 @@ class SegmentMetadataLoaderSpec extends Specification { loader.run() expect: "cache gets loaded as expected" - tableDict.get("tablename").getAvailableIntervals() == expectedColumnCache + tableDict.get("tablename").getAvailability() == new ImmutableAvailability("tablename", expectedColumnCache) } @@ -136,7 +134,7 @@ class SegmentMetadataLoaderSpec extends Specification { setup: SegmentMetadataLoader loader = new SegmentMetadataLoader(tableDict, dimensionDict, druidWS, MAPPER) druidWS.jsonResponse = {gappySegmentMetadataJson} - PhysicalTable table = Mock(PhysicalTable) + ConcretePhysicalTable table = Mock(ConcretePhysicalTable) SegmentMetadata capture when: @@ -157,7 +155,7 @@ class SegmentMetadataLoaderSpec extends Specification { setup: DruidWebService testWs = Mock(DruidWebService) SegmentMetadataLoader loader = new SegmentMetadataLoader(tableDict, dimensionDict, testWs, MAPPER) - PhysicalTable table = Mock(PhysicalTable) + ConcretePhysicalTable table = Mock(ConcretePhysicalTable) when: loader.querySegmentMetadata(table) diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/table/PhysicalTableSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/table/PhysicalTableSpec.groovy index e0504e89c2..ea83a3d3f0 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/table/PhysicalTableSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/table/PhysicalTableSpec.groovy @@ -1,4 +1,4 @@ -// Copyright 2016 Yahoo Inc. +// Copyright 2017 Yahoo Inc. // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.table @@ -15,6 +15,8 @@ import com.yahoo.bard.webservice.data.dimension.impl.KeyValueStoreDimension import com.yahoo.bard.webservice.data.dimension.impl.ScanSearchProviderManager import com.yahoo.bard.webservice.data.metric.MetricColumn import com.yahoo.bard.webservice.metadata.SegmentMetadata +import com.yahoo.bard.webservice.table.availability.Availability +import com.yahoo.bard.webservice.util.SimplifiedIntervalList import org.joda.time.Interval @@ -24,7 +26,7 @@ import spock.lang.Unroll class PhysicalTableSpec extends Specification { - @Shared PhysicalTable physicalTable + @Shared ConcretePhysicalTable physicalTable @Shared DimensionDictionary dimensionDictionary @Shared DimensionColumn dimensionColumn @@ -40,9 +42,15 @@ class PhysicalTableSpec extends Specification { @Shared Dimension dimension def setupSpec() { - physicalTable = new PhysicalTable("test table", DAY.buildZonedTimeGrain(UTC), ['dimension':'druidDim']) dimension = new KeyValueStoreDimension("dimension", null, [BardDimensionField.ID] as LinkedHashSet, MapStoreManager.getInstance("dimension"), ScanSearchProviderManager.getInstance("apiProduct")) - dimensionDictionary = new DimensionDictionary([dimension] as Set) + + physicalTable = new ConcretePhysicalTable( + "test table", + DAY.buildZonedTimeGrain(UTC), + [new DimensionColumn(dimension)] as Set, + ['dimension': 'druidDim'] + ) + dimensionDictionary = new DimensionDictionary([dimension].toSet()) dimensionColumn = new DimensionColumn(dimension) @@ -79,7 +87,7 @@ class PhysicalTableSpec extends Specification { @Unroll def "Physical table getColumnAvailability returns #expected for column #column"() { expect: - physicalTable.getIntervalsByColumnName(column) == expected + physicalTable.availability.getIntervalsByColumnName(column).toList() == new SimplifiedIntervalList(expected) as List where: column | expected @@ -98,52 +106,45 @@ class PhysicalTableSpec extends Specification { [:] ) when: - table = new PhysicalTable(name, YEAR.buildZonedTimeGrain(UTC), ["dimension":"druidDim"]) + table = new ConcretePhysicalTable( + name, + YEAR.buildZonedTimeGrain(UTC), + [new DimensionColumn(dimension)] as Set, + ["dimension": "druidDim"] + ) then: - table.availableIntervalsRef.get() != null - table.availableIntervals.isEmpty() - table.workingIntervals.isEmpty() + table.getAvailability() != null + table.getAvailability().getAvailableIntervals().isEmpty() when: table.resetColumns(noMetricMetadata, dimensionDictionary) + Availability availability = table.availability then: - table.availableIntervals.containsKey(dimensionColumn) - table.availableIntervals.get(dimensionColumn) == intervalSet3 + availability.availableIntervals.containsKey(dimensionColumn) + availability.availableIntervals.get(dimensionColumn).toList() == new SimplifiedIntervalList(intervalSet3) as List table.getDimensions() == [dimension] as Set - table.getColumns(MetricColumn.class) == [] as Set - table.workingIntervals.isEmpty() + table.getSchema().getColumns(MetricColumn.class) == [] as Set when: table.resetColumns(segmentMetadata, dimensionDictionary) then: table.getDimensions() == [dimension] as Set - table.getIntervalsByColumnName(metricColumn1.name) == intervalSet2 - } - - def "test the addition of columns to the table cache"() { - setup: - physicalTable.addColumn(dimensionColumn) - physicalTable.addColumn(metricColumn1) - physicalTable.addColumn(metricColumn2) - physicalTable.commit() - - def expectedCache = [(dimensionColumn): [] as Set, (metricColumn1): [] as Set, (metricColumn2) :[] as Set] - - expect: - physicalTable.getAvailableIntervals() == expectedCache + table.availability.getIntervalsByColumnName(metricColumn1.name).toList() == new SimplifiedIntervalList(intervalSet2) as List } def "test the setColumnCache() method"() { + def cacheValues = cache.collectEntries {Map.Entry> it -> [(it.key) : (new SimplifiedIntervalList(it.value))]} + expect: - physicalTable.getAvailableIntervals() == cache + physicalTable.availability.getAvailableIntervals() == cacheValues } def "test the getIntervalsByColumnName() method"() { expect: - physicalTable.getIntervalsByColumnName("metric2") == intervalSet3 + physicalTable.availability.getIntervalsByColumnName("metric2").toList() == new SimplifiedIntervalList(intervalSet3).toList() } def "test the fetching of all dimensions from the table"() { @@ -153,11 +154,19 @@ class PhysicalTableSpec extends Specification { def "test physical to logical mapping is constructed correctly"() { setup: - PhysicalTable oneDimPhysicalTable = new PhysicalTable("test table", DAY.buildZonedTimeGrain(UTC), ['dimension':'druidDim']) - PhysicalTable twoDimPhysicalTable = new PhysicalTable("test table", DAY.buildZonedTimeGrain(UTC), ['dimension1':'druidDim', 'dimension2':'druidDim']) + PhysicalTable oneDimPhysicalTable = new ConcretePhysicalTable( + "test table", DAY.buildZonedTimeGrain(UTC), + [new DimensionColumn(dimension)] as Set, + ['dimension': 'druidDim'] + ) + PhysicalTable twoDimPhysicalTable = new ConcretePhysicalTable( + "test table", DAY.buildZonedTimeGrain(UTC), + [new DimensionColumn(dimension)] as Set, + ['dimension1': 'druidDim', 'dimension2': 'druidDim'] + ) expect: - oneDimPhysicalTable.getLogicalColumnNames('druidDim') == ['dimension'] as Set - twoDimPhysicalTable.getLogicalColumnNames('druidDim') == ['dimension1', 'dimension2'] as Set + oneDimPhysicalTable.getSchema().getLogicalColumnNames('druidDim') == ['dimension'] as Set + twoDimPhysicalTable.getSchema().getLogicalColumnNames('druidDim') == ['dimension1', 'dimension2'] as Set } } diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/table/availability/AvailabilityTestingUtils.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/table/availability/AvailabilityTestingUtils.groovy index 7ea0552de3..ce0140265b 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/table/availability/AvailabilityTestingUtils.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/table/availability/AvailabilityTestingUtils.groovy @@ -6,7 +6,7 @@ import com.yahoo.bard.webservice.application.JerseyTestBinder import com.yahoo.bard.webservice.data.dimension.DimensionColumn import com.yahoo.bard.webservice.data.metric.MetricColumn import com.yahoo.bard.webservice.metadata.SegmentMetadata -import com.yahoo.bard.webservice.table.PhysicalTable +import com.yahoo.bard.webservice.table.ConcretePhysicalTable import com.yahoo.bard.webservice.table.PhysicalTableDictionary import org.joda.time.Interval @@ -39,14 +39,14 @@ class AvailabilityTestingUtils extends Specification { physicalTableDictionary .findAll { tableName, _ -> tableName in tableNames} - .each { _, PhysicalTable table -> - Map> metricIntervals = table.getColumns(MetricColumn.class) + .each { _, ConcretePhysicalTable table -> + Map> metricIntervals = table.getSchema().getColumns(MetricColumn.class) .collectEntries { [(it.name): intervalSet] } - Map> dimensionIntervals = table.getColumns(DimensionColumn.class) + Map> dimensionIntervals = table.getSchema().getColumns(DimensionColumn.class) .collectEntries { - [(table.getPhysicalColumnName(it.getDimension().getApiName())): intervalSet] + [(it.getDimension().getApiName()): intervalSet] } // set new cache table.resetColumns( diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/table/resolver/BasePhysicalTableResolverSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/table/resolver/BasePhysicalTableResolverSpec.groovy index f62e9bc63d..2d75dc9b44 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/table/resolver/BasePhysicalTableResolverSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/table/resolver/BasePhysicalTableResolverSpec.groovy @@ -2,16 +2,19 @@ // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.table.resolver -import com.google.common.collect.Sets +import static com.yahoo.bard.webservice.data.time.DefaultTimeGrain.DAY +import static org.joda.time.DateTimeZone.UTC + import com.yahoo.bard.webservice.data.metric.TemplateDruidQuery -import com.yahoo.bard.webservice.data.time.DefaultTimeGrain import com.yahoo.bard.webservice.druid.model.query.AllGranularity +import com.yahoo.bard.webservice.table.ConcretePhysicalTable import com.yahoo.bard.webservice.table.LogicalTable import com.yahoo.bard.webservice.table.PhysicalTable import com.yahoo.bard.webservice.table.TableGroup import com.yahoo.bard.webservice.web.DataApiRequest -import org.joda.time.DateTimeZone +import com.google.common.collect.Sets + import spock.lang.Shared import spock.lang.Specification import spock.lang.Unroll @@ -62,8 +65,8 @@ class BasePhysicalTableResolverSpec extends Specification { three.getName() >> "three" three.toString() >> "three" - pickFirst = { PhysicalTable table1, PhysicalTable table2 -> table1 } - pickLast = { PhysicalTable table1, PhysicalTable table2 -> table2 } + pickFirst = { PhysicalTable table1, PhysicalTable table2 -> table1 } as BinaryOperator + pickLast = { PhysicalTable table1, PhysicalTable table2 -> table2 } as BinaryOperator matchAll = new PhysicalTableMatcher() { @Override boolean test(PhysicalTable table) { @@ -128,7 +131,8 @@ class BasePhysicalTableResolverSpec extends Specification { LogicalTable logical = Mock(LogicalTable.class) TableGroup group = Mock(TableGroup.class) logical.getTableGroup() >> group - group.getPhysicalTables() >> Sets.newHashSet(new PhysicalTable("table_name", DefaultTimeGrain.DAY.buildZonedTimeGrain(DateTimeZone.UTC), [:])) + PhysicalTable table = new ConcretePhysicalTable("table_name", DAY.buildZonedTimeGrain(UTC), [] as Set, [:]) + group.getPhysicalTables() >> Sets.newHashSet(table) request.getTable() >> logical } @Unroll diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/table/resolver/DefaultPhysicalTableResolverSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/table/resolver/DefaultPhysicalTableResolverSpec.groovy index 92ba57e95c..183a73328c 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/table/resolver/DefaultPhysicalTableResolverSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/table/resolver/DefaultPhysicalTableResolverSpec.groovy @@ -17,6 +17,7 @@ import com.yahoo.bard.webservice.data.time.ZonelessTimeGrain import com.yahoo.bard.webservice.data.volatility.DefaultingVolatileIntervalsService import com.yahoo.bard.webservice.druid.model.query.AllGranularity import com.yahoo.bard.webservice.druid.model.query.Granularity +import com.yahoo.bard.webservice.table.PhysicalTable import com.yahoo.bard.webservice.web.DataApiRequest import org.joda.time.Interval @@ -429,8 +430,8 @@ class DefaultPhysicalTableResolverSpec extends Specification { BinaryOperator betterTable = resolver.getBetterTableOperator(apiRequest, query) expect: - [table1, table2].stream().reduce(betterTable).get() == [table1, table1, table2].get(which) - [table2, table1].stream().reduce(betterTable).get() == [table2, table1, table2].get(which) + [(PhysicalTable) table1, (PhysicalTable) table2].stream().reduce(betterTable).get() == [table1, table1, table2].get(which) + [(PhysicalTable) table2, (PhysicalTable) table1].stream().reduce(betterTable).get() == [table2, table1, table2].get(which) where: interval | table1 | table2 | which | grain diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/table/resolver/SchemaPhysicalTableMatcherSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/table/resolver/SchemaPhysicalTableMatcherSpec.groovy index a6ea6ffdcd..1f20f19dd2 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/table/resolver/SchemaPhysicalTableMatcherSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/table/resolver/SchemaPhysicalTableMatcherSpec.groovy @@ -5,15 +5,16 @@ package com.yahoo.bard.webservice.table.resolver import static com.yahoo.bard.webservice.data.time.DefaultTimeGrain.DAY import static org.joda.time.DateTimeZone.UTC -import com.yahoo.bard.webservice.data.dimension.DimensionField -import com.yahoo.bard.webservice.data.dimension.DimensionDictionary +import com.yahoo.bard.webservice.data.dimension.BardDimensionField import com.yahoo.bard.webservice.data.dimension.Dimension import com.yahoo.bard.webservice.data.dimension.DimensionColumn -import com.yahoo.bard.webservice.data.dimension.BardDimensionField +import com.yahoo.bard.webservice.data.dimension.DimensionDictionary +import com.yahoo.bard.webservice.data.dimension.DimensionField import com.yahoo.bard.webservice.data.dimension.MapStoreManager import com.yahoo.bard.webservice.data.dimension.impl.KeyValueStoreDimension import com.yahoo.bard.webservice.data.dimension.impl.ScanSearchProviderManager import com.yahoo.bard.webservice.data.metric.TemplateDruidQuery +import com.yahoo.bard.webservice.table.ConcretePhysicalTable import com.yahoo.bard.webservice.table.PhysicalTable import com.yahoo.bard.webservice.web.DataApiRequest @@ -34,12 +35,6 @@ class SchemaPhysicalTableMatcherSpec extends Specification { def setup() { // table containing a logical name for a dimension same as physical name for other dimension - physicalTable = new PhysicalTable( - "test table", - DAY.buildZonedTimeGrain(UTC), - ['dimA':'druidDimA', 'dimCommon': 'druidDimC', 'dimB': 'dimCommon'] - ) - dimensionFields = [BardDimensionField.ID, BardDimensionField.DESC] as LinkedHashSet dimSet = [ @@ -69,11 +64,14 @@ class SchemaPhysicalTableMatcherSpec extends Specification { ), ] as Set - dimSet.each { - physicalTable.addColumn(DimensionColumn.addNewDimensionColumn(physicalTable, it)) - } + physicalTable = new ConcretePhysicalTable( + "test table", + DAY.buildZonedTimeGrain(UTC), + dimSet.collect {new DimensionColumn(it)}.toSet(), + ['dimA':'druidDimA', 'dimCommon': 'druidDimC', 'dimB': 'dimCommon'] + ) + - physicalTable.commit() dimensionDictionary = new DimensionDictionary(dimSet) schemaPhysicalTableMatcher = new SchemaPhysicalTableMatcher( request, diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/util/TableUtilsSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/util/TableUtilsSpec.groovy index be73c01a17..c00b909611 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/util/TableUtilsSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/util/TableUtilsSpec.groovy @@ -3,13 +3,9 @@ package com.yahoo.bard.webservice.util import com.yahoo.bard.webservice.data.dimension.Dimension -import com.yahoo.bard.webservice.data.time.DefaultTimeGrain -import com.yahoo.bard.webservice.druid.model.datasource.TableDataSource import com.yahoo.bard.webservice.druid.model.query.AbstractDruidAggregationQuery -import com.yahoo.bard.webservice.table.PhysicalTable import com.yahoo.bard.webservice.web.DataApiRequest -import org.joda.time.DateTimeZone import spock.lang.Shared import spock.lang.Specification import spock.lang.Unroll diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/AggregatabilityValidationSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/AggregatabilityValidationSpec.groovy index 0a569f66d4..fc280f1e48 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/AggregatabilityValidationSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/AggregatabilityValidationSpec.groovy @@ -2,19 +2,16 @@ // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.web -import com.yahoo.bard.webservice.table.PhysicalTable -import org.joda.time.DateTimeZone - import static com.yahoo.bard.webservice.data.time.DefaultTimeGrain.DAY import com.yahoo.bard.webservice.data.dimension.BardDimensionField import com.yahoo.bard.webservice.data.dimension.Dimension -import com.yahoo.bard.webservice.data.dimension.DimensionColumn import com.yahoo.bard.webservice.data.dimension.DimensionDictionary import com.yahoo.bard.webservice.data.dimension.DimensionField import com.yahoo.bard.webservice.data.dimension.MapStoreManager import com.yahoo.bard.webservice.data.dimension.impl.KeyValueStoreDimension import com.yahoo.bard.webservice.data.dimension.impl.ScanSearchProviderManager +import com.yahoo.bard.webservice.data.metric.MetricDictionary import com.yahoo.bard.webservice.table.LogicalTable import com.yahoo.bard.webservice.table.TableGroup @@ -70,11 +67,9 @@ class AggregatabilityValidationSpec extends Specification { dimensionDict.add(keyValueStoreDimension) } TableGroup tg = Mock(TableGroup) + tg.getApiMetricNames() >> ([] as Set) tg.getDimensions() >> dimensionDict.apiNameToDimension.values() - table = new LogicalTable("name", DAY, tg) - dimensionDict.apiNameToDimension.values().each { - DimensionColumn.addNewDimensionColumn(table, it) - } + table = new LogicalTable("name", DAY, tg, new MetricDictionary()) } @Unroll diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/DataApiRequestFilterSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/DataApiRequestFilterSpec.groovy index c33ac22e19..d1658ddcce 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/DataApiRequestFilterSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/DataApiRequestFilterSpec.groovy @@ -2,14 +2,11 @@ // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.web -import com.yahoo.bard.webservice.table.PhysicalTable - import static com.yahoo.bard.webservice.config.BardFeatureFlag.DATA_FILTER_SUBSTRING_OPERATIONS import static com.yahoo.bard.webservice.data.time.DefaultTimeGrain.DAY import com.yahoo.bard.webservice.data.dimension.BardDimensionField import com.yahoo.bard.webservice.data.dimension.Dimension -import com.yahoo.bard.webservice.data.dimension.DimensionColumn import com.yahoo.bard.webservice.data.dimension.DimensionDictionary import com.yahoo.bard.webservice.data.dimension.MapStoreManager import com.yahoo.bard.webservice.data.dimension.impl.KeyValueStoreDimension @@ -59,11 +56,9 @@ class DataApiRequestFilterSpec extends Specification { metricDict.put(name, new LogicalMetric(null, null, name)) } TableGroup tg = Mock(TableGroup) + tg.getApiMetricNames() >> ([] as Set) tg.getDimensions() >> dimensionDict.apiNameToDimension.values() - table = new LogicalTable("name", DAY, tg) - dimensionDict.apiNameToDimension.values().each { - DimensionColumn.addNewDimensionColumn(table, it) - } + table = new LogicalTable("name", DAY, tg, metricDict) } def cleanupSpec() { @@ -121,7 +116,7 @@ class DataApiRequestFilterSpec extends Specification { setup: String expectedMessage = ErrorMessageFormat.FILTER_FIELD_NOT_IN_DIMENSIONS.format('unknown', 'locale') when: - new DataApiRequest().generateFilters("locale|unknown-in:[US,India],locale.id-eq:[5]", table, dimensionDict) + new DataApiRequest().generateFilters("locale|unknown-in[US,India],locale.id-eq[5]", table, dimensionDict) then: Exception e = thrown(BadApiRequestException) @@ -132,11 +127,12 @@ class DataApiRequestFilterSpec extends Specification { setup: TableGroup tg = Mock(TableGroup) tg.getDimensions() >> ([] as Set) - table = new LogicalTable("name", DAY, tg) + tg.getApiMetricNames() >> ([] as Set) + table = new LogicalTable("name", DAY, tg, metricDict) String expectedMessage = ErrorMessageFormat.FILTER_DIMENSION_NOT_IN_TABLE.format('locale', 'name') when: - new DataApiRequest().generateFilters("locale|id-in:[US,India],locale.id-eq:[5]", table, dimensionDict) + new DataApiRequest().generateFilters("locale|id-in[US,India],locale.id-eq[5]", table, dimensionDict) then: Exception e = thrown(BadApiRequestException) @@ -147,7 +143,7 @@ class DataApiRequestFilterSpec extends Specification { setup: String expectedMessage = ErrorMessageFormat.FILTER_DIMENSION_UNDEFINED.format('undefined') when: - new DataApiRequest().generateFilters("undefined|id-in:[US,India],locale.id-eq:[5]", table, dimensionDict) + new DataApiRequest().generateFilters("undefined|id-in[US,India],locale.id-eq[5]", table, dimensionDict) then: Exception e = thrown(BadApiRequestException) diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/DataApiRequestIntervalsSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/DataApiRequestIntervalsSpec.groovy index 01e56a724c..1521a75709 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/DataApiRequestIntervalsSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/DataApiRequestIntervalsSpec.groovy @@ -2,8 +2,6 @@ // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.web -import com.yahoo.bard.webservice.table.PhysicalTable - import static com.yahoo.bard.webservice.data.time.DefaultTimeGrain.DAY import static com.yahoo.bard.webservice.data.time.DefaultTimeGrain.MONTH import static com.yahoo.bard.webservice.data.time.DefaultTimeGrain.QUARTER @@ -11,7 +9,6 @@ import static com.yahoo.bard.webservice.data.time.DefaultTimeGrain.WEEK import static com.yahoo.bard.webservice.data.time.DefaultTimeGrain.YEAR import com.yahoo.bard.webservice.data.dimension.BardDimensionField -import com.yahoo.bard.webservice.data.dimension.DimensionColumn import com.yahoo.bard.webservice.data.dimension.DimensionDictionary import com.yahoo.bard.webservice.data.dimension.DimensionField import com.yahoo.bard.webservice.data.dimension.MapStoreManager @@ -76,10 +73,8 @@ class DataApiRequestIntervalsSpec extends Specification { } TableGroup tg = Mock(TableGroup) tg.getDimensions() >> dimensionDict.apiNameToDimension.values() - table = new LogicalTable("name", DAY, tg) - dimensionDict.apiNameToDimension.values().each { - DimensionColumn.addNewDimensionColumn(table, it) - } + tg.getApiMetricNames() >> ([] as Set) + table = new LogicalTable("name", DAY, tg, metricDict) } def cleanupSpec() { diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/DataApiRequestSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/DataApiRequestSpec.groovy index 8c8012f04b..5a888dbd87 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/DataApiRequestSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/DataApiRequestSpec.groovy @@ -2,15 +2,12 @@ // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.web -import com.yahoo.bard.webservice.table.PhysicalTable - import static com.yahoo.bard.webservice.data.time.DefaultTimeGrain.DAY import static com.yahoo.bard.webservice.util.DateTimeFormatterFactory.FULLY_OPTIONAL_DATETIME_FORMATTER import static com.yahoo.bard.webservice.web.ErrorMessageFormat.TIME_ALIGNMENT import com.yahoo.bard.webservice.data.dimension.BardDimensionField import com.yahoo.bard.webservice.data.dimension.Dimension -import com.yahoo.bard.webservice.data.dimension.DimensionColumn import com.yahoo.bard.webservice.data.dimension.DimensionDictionary import com.yahoo.bard.webservice.data.dimension.DimensionField import com.yahoo.bard.webservice.data.dimension.MapStoreManager @@ -82,11 +79,9 @@ class DataApiRequestSpec extends Specification { metricDict.put(name, new LogicalMetric(null, null, name)) } TableGroup tg = Mock(TableGroup) + tg.getApiMetricNames() >> ([] as Set) tg.getDimensions() >> dimensionDict.apiNameToDimension.values() - table = new LogicalTable("name", DAY, tg) - dimensionDict.apiNameToDimension.values().each { - DimensionColumn.addNewDimensionColumn(table, it) - } + table = new LogicalTable("name", DAY, tg, metricDict) } def cleanupSpec() { diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/IntersectionReportingFlagOffSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/IntersectionReportingFlagOffSpec.groovy index fa68ac0770..af2a70700d 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/IntersectionReportingFlagOffSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/IntersectionReportingFlagOffSpec.groovy @@ -2,14 +2,10 @@ // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.web -import com.yahoo.bard.webservice.table.PhysicalTable -import org.joda.time.DateTimeZone - import static com.yahoo.bard.webservice.config.BardFeatureFlag.INTERSECTION_REPORTING import static com.yahoo.bard.webservice.data.time.DefaultTimeGrain.DAY import com.yahoo.bard.webservice.data.dimension.BardDimensionField -import com.yahoo.bard.webservice.data.dimension.DimensionColumn import com.yahoo.bard.webservice.data.dimension.DimensionDictionary import com.yahoo.bard.webservice.data.dimension.DimensionField import com.yahoo.bard.webservice.data.dimension.MapStoreManager @@ -48,12 +44,9 @@ class IntersectionReportingFlagOffSpec extends Specification { metricDict.put(name, new LogicalMetric(null, null, name)) } TableGroup tg = Mock(TableGroup) + tg.getApiMetricNames() >> ([] as Set) tg.getDimensions() >> dimensionDict.apiNameToDimension.values() - table = new LogicalTable("name", DAY, tg) - dimensionDict.apiNameToDimension.values().each { - DimensionColumn.addNewDimensionColumn(table, it) - } - + table = new LogicalTable("name", DAY, tg, metricDict) } def cleanup() { diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/RequestUtils.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/RequestUtils.groovy index 0fb35610a5..07d908a2b0 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/RequestUtils.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/RequestUtils.groovy @@ -2,9 +2,6 @@ // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.web -import com.yahoo.bard.webservice.table.PhysicalTable -import org.joda.time.DateTimeZone - import static com.yahoo.bard.webservice.data.time.DefaultTimeGrain.DAY import com.yahoo.bard.webservice.data.dimension.Dimension @@ -18,6 +15,9 @@ import com.yahoo.bard.webservice.druid.model.orderby.TopNMetric import com.yahoo.bard.webservice.druid.model.query.GroupByQuery import com.yahoo.bard.webservice.druid.model.query.TimeSeriesQuery import com.yahoo.bard.webservice.druid.model.query.TopNQuery +import com.yahoo.bard.webservice.table.ConcretePhysicalTable + +import org.joda.time.DateTimeZone class RequestUtils { @@ -27,7 +27,12 @@ class RequestUtils { List aggregations = [], List postAggregations = [] ) { - DataSource dataSource = new TableDataSource(new PhysicalTable(dataSourceName, DAY.buildZonedTimeGrain(DateTimeZone.UTC), [:])) + DataSource dataSource = new TableDataSource(new ConcretePhysicalTable( + dataSourceName, + DAY.buildZonedTimeGrain(DateTimeZone.UTC), + [] as Set, + [:] + )) List dimensions = [] List intervals = [] new GroupByQuery( @@ -50,7 +55,12 @@ class RequestUtils { List aggregations = [], List postAggregations = [] ) { - DataSource dataSource = new TableDataSource(new PhysicalTable(dataSourceName, DAY.buildZonedTimeGrain(DateTimeZone.UTC), [:])) + DataSource dataSource = new TableDataSource(new ConcretePhysicalTable( + dataSourceName, + DAY.buildZonedTimeGrain(DateTimeZone.UTC), + [] as Set, + [:] + )) List intervals = [] new TopNQuery( dataSource, @@ -71,7 +81,12 @@ class RequestUtils { List aggregations = [], List postAggregations = [] ) { - DataSource dataSource = new TableDataSource(new PhysicalTable(dataSourceName, DAY.buildZonedTimeGrain(DateTimeZone.UTC), [:])) + DataSource dataSource = new TableDataSource(new ConcretePhysicalTable( + dataSourceName, + DAY.buildZonedTimeGrain(DateTimeZone.UTC), + [] as Set, + [:] + )) List intervals = [] new TimeSeriesQuery( dataSource, diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/ResponseSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/ResponseSpec.groovy index dd096de5e0..a7e6206261 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/ResponseSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/ResponseSpec.groovy @@ -2,10 +2,8 @@ // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.web -import com.yahoo.bard.webservice.data.time.DefaultTimeGrain -import com.yahoo.bard.webservice.table.PhysicalTable - import static com.yahoo.bard.webservice.config.BardFeatureFlag.PARTIAL_DATA +import static com.yahoo.bard.webservice.data.time.DefaultTimeGrain.DAY import static com.yahoo.bard.webservice.util.SimplifiedIntervalList.NO_INTERVALS import com.yahoo.bard.webservice.application.ObjectMappersSuite @@ -13,6 +11,7 @@ import com.yahoo.bard.webservice.config.SystemConfig import com.yahoo.bard.webservice.config.SystemConfigProvider import com.yahoo.bard.webservice.data.Result import com.yahoo.bard.webservice.data.ResultSet +import com.yahoo.bard.webservice.data.ResultSetSchema import com.yahoo.bard.webservice.data.dimension.BardDimensionField import com.yahoo.bard.webservice.data.dimension.Dimension import com.yahoo.bard.webservice.data.dimension.DimensionColumn @@ -23,7 +22,7 @@ import com.yahoo.bard.webservice.data.dimension.impl.KeyValueStoreDimension import com.yahoo.bard.webservice.data.dimension.impl.ScanSearchProviderManager import com.yahoo.bard.webservice.data.metric.LogicalMetric import com.yahoo.bard.webservice.data.metric.MetricColumn -import com.yahoo.bard.webservice.table.Schema +import com.yahoo.bard.webservice.table.Column import com.yahoo.bard.webservice.util.DateTimeFormatterFactory import com.yahoo.bard.webservice.util.GroovyTestUtils import com.yahoo.bard.webservice.util.JsonSlurper @@ -107,6 +106,7 @@ class ResponseSpec extends Specification { "last": "$LAST_PAGE"/ ] + Set columns Set testLogicalMetrics Response response DateTime dateTime = new DateTime(1000L * 60 * 60 * 24 * 365 * 45) @@ -243,13 +243,19 @@ class ResponseSpec extends Specification { dim3.setLastUpdated(null) dimensionColumns << new DimensionColumn(dim3) - def schema = Mock(Schema) + ResultSetSchema schema = Mock(ResultSetSchema) + columns = [] as Set + columns.addAll(metricColumnsMap.keySet()) + columns.addAll(dimensionColumns) + + schema.getColumns() >> columns schema.getColumns(_) >> { Class cls -> if (cls == MetricColumn.class) { return metricColumnsMap.keySet() as LinkedHashSet } return dimensionColumns as LinkedHashSet } + schema.getGranularity() >> DAY // Map same dimensionColumns as different DimensionRows @@ -276,7 +282,7 @@ class ResponseSpec extends Specification { Result result1 = new Result(dimensionRows1, metricValues, dateTime) Result result2 = new Result(dimensionRows2, metricValues, dateTime) - resultSet = new ResultSet([result1, result2], schema) + resultSet = new ResultSet(schema, [result1, result2]) //response without pagination response = new Response(resultSet, apiRequest, NO_INTERVALS, volatileIntervals, [:], (Pagination) null, MAPPERS) @@ -446,7 +452,7 @@ class ResponseSpec extends Specification { } dimension.setLastUpdated(null) - dimensionColumns << DimensionColumn.addNewDimensionColumn(new PhysicalTable("", DefaultTimeGrain.DAY.buildZonedTimeGrain(DateTimeZone.UTC), [(dimension.getApiName()): dimension.getApiName()]), dimension) + dimensionColumns << new DimensionColumn(dimension) } apiRequest1.getDimensionFields() >> defaultDimensionFieldsToShow @@ -470,7 +476,9 @@ class ResponseSpec extends Specification { Map metricValues = metricColumns.collectEntries { [(it): 10] } - def schema = Mock(Schema) + ResultSetSchema schema = Mock(ResultSetSchema) + schema.getColumns() >> columns + schema.getColumns(_) >> { Class cls -> if (cls == MetricColumn.class) { return metricColumns @@ -491,7 +499,7 @@ class ResponseSpec extends Specification { Result result = new Result(dimensionRows, metricValues, dateTime) - ResultSet resultSet = new ResultSet([result, result], schema) + ResultSet resultSet = new ResultSet(schema, [result, result]) Response response = new Response( resultSet, @@ -689,7 +697,7 @@ class ResponseSpec extends Specification { */ String withMetaObject(GString jsonString, GString metaBlock) { JsonSlurper jsonSlurper = new JsonSlurper(JsonSortStrategy.SORT_NONE) - Map baseJson = jsonSlurper.parseText(jsonString) + Map baseJson = (Map) jsonSlurper.parseText(jsonString) def metaJson = jsonSlurper.parseText(metaBlock) baseJson.put("meta", metaJson) MAPPERS.getMapper().writeValueAsString(baseJson) diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/SketchIntersectionReportingResources.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/SketchIntersectionReportingResources.groovy index f825a080f3..722a4b593c 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/SketchIntersectionReportingResources.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/SketchIntersectionReportingResources.groovy @@ -35,6 +35,7 @@ import com.yahoo.bard.webservice.druid.model.postaggregation.SketchSetOperationP import com.yahoo.bard.webservice.druid.model.postaggregation.SketchSetOperationPostAggregation import com.yahoo.bard.webservice.druid.util.FieldConverterSupplier import com.yahoo.bard.webservice.druid.util.SketchFieldConverter +import com.yahoo.bard.webservice.table.ConcretePhysicalTable import com.yahoo.bard.webservice.table.LogicalTable import com.yahoo.bard.webservice.table.PhysicalTable import com.yahoo.bard.webservice.table.TableGroup @@ -43,7 +44,6 @@ import org.json.JSONArray import org.json.JSONObject import spock.lang.Specification - /** * This class is a resource container for intersection report tests. * @@ -103,27 +103,14 @@ class SketchIntersectionReportingResources extends Specification { dimensionDict = new DimensionDictionary() dimensionDict.addAll([propertyDim, countryDim]) - PhysicalTable physicalTable = new PhysicalTable("NETWORK", DAY.buildZonedTimeGrain(UTC), [:]) //added dimensions to the physical table - [propertyDim, countryDim].each { - physicalTable.addColumn(DimensionColumn.addNewDimensionColumn(physicalTable, it)) - } + Set columns = [propertyDim, countryDim].collect() { new DimensionColumn(it)} - Set metrics = [buildMockName("foos"), buildMockName("fooNoBar"), buildMockName("regFoos"), buildMockName("pageViews"), buildMockName("bar"), buildMockName("wiz"), buildMockName("waz"), buildMockName("viz"), buildMockName("unregFoos"), buildMockName("ratioMetric")] + // regFoos deliberately omitted + Set metrics = [buildMockName("foos"), buildMockName("fooNoBar"), buildMockName("pageViews"), buildMockName("bar"), buildMockName("wiz"), buildMockName("waz"), buildMockName("viz"), buildMockName("unregFoos"), buildMockName("ratioMetric")] //added metrics to the physical table - metrics.each { - physicalTable.addColumn(MetricColumn.addNewMetricColumn(physicalTable, it.apiName)) - } - - physicalTable.commit() - - TableGroup tableGroup = new TableGroup([physicalTable] as LinkedHashSet, metrics) - table = new LogicalTable("NETWORK", DAY, tableGroup) - - for (Dimension dim : tableGroup.getDimensions()) { - DimensionColumn.addNewDimensionColumn(table, dim) - } + columns.addAll( metrics.collect() { new MetricColumn(it.apiName)}) metricDict = new MetricDictionary() @@ -160,11 +147,24 @@ class SketchIntersectionReportingResources extends Specification { metricDict.add(unregFoos.make()) metricDict.add(viz.make()) - LogicalMetric ratioMetric = new LogicalMetric(metricDict.get("foos").templateDruidQuery, metricDict.get("foos").calculation, "ratioMetric", "ratioMetric Long Name", "Ratios", "Dummy metric Ratio Metric description") + LogicalMetric foosMetric = metricDict.get("foos") + + LogicalMetric ratioMetric = new LogicalMetric(foosMetric.templateDruidQuery, foosMetric.calculation, "ratioMetric", "ratioMetric Long Name", "Ratios", "Dummy metric Ratio Metric description") metricDict.add(ratioMetric) - LogicalMetricColumn lmc = new LogicalMetricColumn("foos", foos.make()); - table.addColumn(lmc) + LogicalMetricColumn lmc = new LogicalMetricColumn(foosMetric); + + columns.add(lmc) + + PhysicalTable physicalTable = new ConcretePhysicalTable( + "NETWORK", + DAY.buildZonedTimeGrain(UTC), + columns, + [:] + ) + + TableGroup tableGroup = new TableGroup([physicalTable] as LinkedHashSet, metrics) + table = new LogicalTable("NETWORK", DAY, tableGroup, metricDict) JSONArray metricJsonObjArray = new JSONArray("[{\"filter\":{\"AND\":\"country|id-in[US,IN],property|id-in[114,125]\"},\"name\":\"foo\"},{\"filter\":{},\"name\":\"pageviews\"}]") JSONObject jsonobject = metricJsonObjArray.getJSONObject(0) diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/SlicesApiRequestSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/SlicesApiRequestSpec.groovy index 897e177765..739e43f7b4 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/SlicesApiRequestSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/SlicesApiRequestSpec.groovy @@ -92,7 +92,7 @@ class SlicesApiRequestSpec extends BaseDataSourceMetadataSpec { Set> dimensionsResult = new LinkedHashSet<>() Set> metricsResult = new LinkedHashSet<>() - table.getAvailableIntervals().each { + table.getAvailability().getAvailableIntervals().each { Map row = new LinkedHashMap<>() row.put("intervals", it.getValue()) diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/TablesApiRequestSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/TablesApiRequestSpec.groovy index 26d74b6573..dfd2f43295 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/TablesApiRequestSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/TablesApiRequestSpec.groovy @@ -5,6 +5,7 @@ package com.yahoo.bard.webservice.web import static com.yahoo.bard.webservice.data.time.DefaultTimeGrain.DAY import com.yahoo.bard.webservice.application.JerseyTestBinder +import com.yahoo.bard.webservice.data.metric.MetricDictionary import com.yahoo.bard.webservice.data.time.StandardGranularityParser import com.yahoo.bard.webservice.table.LogicalTable import com.yahoo.bard.webservice.table.LogicalTableDictionary @@ -81,7 +82,9 @@ class TablesApiRequestSpec extends Specification { def "check api request construction for a given table name and a given granularity"() { setup: TableGroup tg = Mock(TableGroup) - LogicalTable table = new LogicalTable("pets", DAY, tg) + tg.getApiMetricNames() >> ([] as Set) + tg.getDimensions() >> ([] as Set) + LogicalTable table = new LogicalTable("pets", DAY, tg, new MetricDictionary()) String name = "pets" tablesServlet.getLogicalTableDictionary() >> fullDictionary diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/ThetaSketchIntersectionReportingResources.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/ThetaSketchIntersectionReportingResources.groovy index e3ba13bac6..a1dffc38f4 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/ThetaSketchIntersectionReportingResources.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/ThetaSketchIntersectionReportingResources.groovy @@ -35,6 +35,8 @@ import com.yahoo.bard.webservice.druid.model.postaggregation.SketchSetOperationP import com.yahoo.bard.webservice.druid.model.postaggregation.ThetaSketchSetOperationPostAggregation import com.yahoo.bard.webservice.druid.util.FieldConverterSupplier import com.yahoo.bard.webservice.druid.util.ThetaSketchFieldConverter +import com.yahoo.bard.webservice.table.Column +import com.yahoo.bard.webservice.table.ConcretePhysicalTable import com.yahoo.bard.webservice.table.LogicalTable import com.yahoo.bard.webservice.table.PhysicalTable import com.yahoo.bard.webservice.table.TableGroup @@ -98,28 +100,16 @@ class ThetaSketchIntersectionReportingResources extends Specification { dimensionDict = new DimensionDictionary() dimensionDict.addAll([propertyDim, countryDim]) + //Reg foos omitted to make invalid on table + Set metrics = [buildMockName("foos"), buildMockName("fooNoBar"), buildMockName("pageViews"), buildMockName("foo"), buildMockName("wiz"), buildMockName("waz"), buildMockName("viz"), buildMockName("unregFoos"), buildMockName("ratioMetric")] - PhysicalTable physicalTable = new PhysicalTable("NETWORK", DAY.buildZonedTimeGrain(UTC), ["property":"property", "country":"country"]) + Set columns = (Set) (metrics.collect { + new MetricColumn(it.apiName) + }.toSet()) - //added dimensions to the physical table - [propertyDim, countryDim].each { - physicalTable.addColumn(DimensionColumn.addNewDimensionColumn(physicalTable, it)) - } - - Set metrics = [buildMockName("foos"), buildMockName("fooNoBar"), buildMockName("regFoos"), buildMockName("pageViews"), buildMockName("foo"), buildMockName("wiz"), buildMockName("waz"), buildMockName("viz"), buildMockName("unregFoos"), buildMockName("ratioMetric")] - //added metrics to the physical table - metrics.each { - physicalTable.addColumn(MetricColumn.addNewMetricColumn(physicalTable, it.apiName)) - } - - physicalTable.commit() + columns.add(new DimensionColumn(propertyDim)) + columns.add(new DimensionColumn(countryDim)) - TableGroup tableGroup = new TableGroup([physicalTable] as LinkedHashSet, metrics) - table = new LogicalTable("NETWORK", DAY, tableGroup) - - for (Dimension dim : tableGroup.getDimensions()) { - DimensionColumn.addNewDimensionColumn(table, dim) - } metricDict = new MetricDictionary() @@ -156,11 +146,24 @@ class ThetaSketchIntersectionReportingResources extends Specification { metricDict.add(unregFoos.make()) metricDict.add(viz.make()) - LogicalMetric ratioMetric = new LogicalMetric(metricDict.get("foos").templateDruidQuery, metricDict.get("foos").calculation, "ratioMetric", "ratioMetric Long Name", "Ratios", "Dummy metric Ratio Metric description") + LogicalMetric foosMetric = metricDict.get("foos") + LogicalMetric ratioMetric = new LogicalMetric(foosMetric.templateDruidQuery, foosMetric.calculation, "ratioMetric", "ratioMetric Long Name", "Ratios", "Dummy metric Ratio Metric description") metricDict.add(ratioMetric) - LogicalMetricColumn lmc = new LogicalMetricColumn("foos", foos.make()); - table.addColumn(lmc) + LogicalMetricColumn lmc = new LogicalMetricColumn(foosMetric); + + columns.add(lmc) + + PhysicalTable physicalTable = new ConcretePhysicalTable( + "NETWORK", + DAY.buildZonedTimeGrain(UTC), + columns, + ["property": "property", "country": "country"] + ) + + TableGroup tableGroup = new TableGroup([physicalTable] as LinkedHashSet, metrics) + + table = new LogicalTable("NETWORK", DAY, tableGroup, metricDict) JSONArray metricJsonObjArray = new JSONArray("[{\"filter\":{\"AND\":\"country|id-in[US,IN],property|id-in[114,125]\"},\"name\":\"foo\"},{\"filter\":{},\"name\":\"pageviews\"}]") JSONObject jsonobject = metricJsonObjArray.getJSONObject(0) diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/UIJsonResponseSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/UIJsonResponseSpec.groovy index 46c2d9240f..b8d7aa92a0 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/UIJsonResponseSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/UIJsonResponseSpec.groovy @@ -2,14 +2,13 @@ // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.web -import com.yahoo.bard.webservice.table.PhysicalTable - import static com.yahoo.bard.webservice.data.time.DefaultTimeGrain.DAY import static com.yahoo.bard.webservice.util.SimplifiedIntervalList.NO_INTERVALS import com.yahoo.bard.webservice.application.ObjectMappersSuite import com.yahoo.bard.webservice.data.Result import com.yahoo.bard.webservice.data.ResultSet +import com.yahoo.bard.webservice.data.ResultSetSchema import com.yahoo.bard.webservice.data.dimension.BardDimensionField import com.yahoo.bard.webservice.data.dimension.Dimension import com.yahoo.bard.webservice.data.dimension.DimensionColumn @@ -18,10 +17,7 @@ import com.yahoo.bard.webservice.data.dimension.DimensionRow import com.yahoo.bard.webservice.data.dimension.MapStoreManager import com.yahoo.bard.webservice.data.dimension.impl.KeyValueStoreDimension import com.yahoo.bard.webservice.data.dimension.impl.ScanSearchProviderManager -import com.yahoo.bard.webservice.data.metric.LogicalMetric import com.yahoo.bard.webservice.data.metric.MetricColumn -import com.yahoo.bard.webservice.data.metric.mappers.NoOpResultSetMapper -import com.yahoo.bard.webservice.table.Schema import com.yahoo.bard.webservice.util.GroovyTestUtils import com.yahoo.bard.webservice.util.Pagination import com.yahoo.bard.webservice.util.SimplifiedIntervalList @@ -34,11 +30,11 @@ import spock.lang.Specification class UIJsonResponseSpec extends Specification { private static final ObjectMappersSuite MAPPERS = new ObjectMappersSuite() - Schema newSchema + ResultSetSchema newSchema Map dimensionRows Map metricValues DateTime timeStamp - Map> defaultDimensionFieldsToShow + LinkedHashMap> defaultDimensionFieldsToShow SimplifiedIntervalList volatileIntervals = [] @@ -49,8 +45,6 @@ class UIJsonResponseSpec extends Specification { // Build a default timestamp timeStamp = new DateTime(10000) - // Build a default schema - newSchema = new Schema(DAY) LinkedHashSet dimensionFields = new LinkedHashSet<>() dimensionFields.add(BardDimensionField.ID) @@ -66,9 +60,9 @@ class UIJsonResponseSpec extends Specification { [] as Set ) newDimension.setLastUpdated(timeStamp) - DimensionColumn dimensionColumn = DimensionColumn.addNewDimensionColumn(newSchema, newDimension) - MetricColumn metricColumn1 = MetricColumn.addNewMetricColumn(newSchema, "metricColumn1Name") - MetricColumn metricColumn2 = MetricColumn.addNewMetricColumn(newSchema, "metricColumn2Name") + DimensionColumn dimensionColumn = new DimensionColumn(newDimension) + MetricColumn metricColumn1 = new MetricColumn("metricColumn1Name") + MetricColumn metricColumn2 = new MetricColumn("metricColumn2Name") // Build a default dimension row DimensionRow dimensionRow = BardDimensionField.makeDimensionRow( @@ -87,16 +81,20 @@ class UIJsonResponseSpec extends Specification { defaultDimensionFieldsToShow = [ (newDimension): dimensionFields ] + + // Build a default schema + newSchema = new ResultSetSchema(DAY, [dimensionColumn, metricColumn1, metricColumn2] as Set) + } def "Get single row response"() { given: "A Result Set with one row" Result r1 = new Result(dimensionRows, metricValues, timeStamp) - ResultSet resultSet = new ResultSet([r1], newSchema) + ResultSet resultSet = new ResultSet(newSchema, [r1]) and: "An API Request" - Set apiMetricColumnNames = getApiMetricColumnNames() + LinkedHashSet apiMetricColumnNames = getApiMetricColumnNames() and: "An expected json serialization" @@ -136,11 +134,11 @@ class UIJsonResponseSpec extends Specification { given: "A Result Set with multiple rows" Result r1 = new Result(dimensionRows, metricValues, timeStamp) - ResultSet resultSet = new ResultSet([r1, r1, r1], newSchema) + ResultSet resultSet = new ResultSet(newSchema, [r1, r1, r1]) and: "An API Request" DataApiRequest apiRequest = Mock(DataApiRequest) - Set apiMetricColumnNames = getApiMetricColumnNames() + LinkedHashSet apiMetricColumnNames = getApiMetricColumnNames() apiRequest.getDimensionFields() >> defaultDimensionFieldsToShow diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/endpoints/DruidLimitSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/endpoints/DruidLimitSpec.groovy index 38a8035547..9bd248597c 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/endpoints/DruidLimitSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/endpoints/DruidLimitSpec.groovy @@ -58,8 +58,7 @@ class DruidLimitSpec extends BaseDataServletComponentSpec { "type": "longSum" } ], - "postAggregations": [] - , + "postAggregations": [], "intervals": ["2014-06-02T00:00:00.000Z/2014-06-30T00:00:00.000Z"], "limitSpec": { "type": "default", diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/handlers/PartialDataResultSetMapperSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/handlers/PartialDataResultSetMapperSpec.groovy index f38d7bccf3..8761b674fd 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/handlers/PartialDataResultSetMapperSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/handlers/PartialDataResultSetMapperSpec.groovy @@ -7,10 +7,10 @@ import static com.yahoo.bard.webservice.data.time.DefaultTimeGrain.YEAR import com.yahoo.bard.webservice.data.PartialDataHandler import com.yahoo.bard.webservice.data.Result +import com.yahoo.bard.webservice.data.ResultSetSchema import com.yahoo.bard.webservice.data.metric.mappers.PartialDataResultSetMapper import com.yahoo.bard.webservice.druid.model.query.AllGranularity import com.yahoo.bard.webservice.table.PhysicalTableDictionary -import com.yahoo.bard.webservice.table.Schema import com.yahoo.bard.webservice.util.SimplifiedIntervalList import org.joda.time.DateTime @@ -26,7 +26,7 @@ public class PartialDataResultSetMapperSpec extends Specification { PhysicalTableDictionary physicalTableDictionary = Mock(PhysicalTableDictionary) SimplifiedIntervalList intervals = SimplifiedIntervalList.NO_INTERVALS - Schema schema = Mock(Schema) + ResultSetSchema schema = Mock(ResultSetSchema) Result result = Mock(Result) PartialDataHandler handler = new PartialDataHandler() @@ -65,7 +65,7 @@ public class PartialDataResultSetMapperSpec extends Specification { @Unroll def "If #gapIntervals gap overlaps a fixed request, data is filtered: #filtered"() { setup: "Given a request for a fixed duration and a year" - Schema schema = Mock(Schema) + ResultSetSchema schema = Mock(ResultSetSchema) schema.getGranularity() >> YEAR DateTime start = new DateTime("2014-01-01") @@ -130,8 +130,8 @@ public class PartialDataResultSetMapperSpec extends Specification { @Unroll def "Under all time grain, missing data #missingIntervals and volatileData #missingIntervals is filtered: #filtered"() { setup: "Given an all time grain request" - Schema schema1 = Mock(Schema) - schema1.getGranularity() >> AllGranularity.INSTANCE + ResultSetSchema schema = Mock(ResultSetSchema) + schema.getGranularity() >> AllGranularity.INSTANCE and: "some possibly missing or volatile intervals" SimplifiedIntervalList missingData = buildIntervalList(missingIntervals) @@ -140,7 +140,7 @@ public class PartialDataResultSetMapperSpec extends Specification { Result result = new Result([:], [:], new DateTime("2014")) expect: - mapper.map(result, schema1) == filtered ? null : result + mapper.map(result, schema) == (filtered ? null : result) where: missingIntervals | volatileIntervals | filtered diff --git a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/responseprocessors/ResultSetResponseProcessorSpec.groovy b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/responseprocessors/ResultSetResponseProcessorSpec.groovy index eeec865ca5..afe2e6eef9 100644 --- a/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/responseprocessors/ResultSetResponseProcessorSpec.groovy +++ b/fili-core/src/test/groovy/com/yahoo/bard/webservice/web/responseprocessors/ResultSetResponseProcessorSpec.groovy @@ -3,12 +3,14 @@ package com.yahoo.bard.webservice.web.responseprocessors import static com.yahoo.bard.webservice.data.time.DefaultTimeGrain.DAY +import static com.yahoo.bard.webservice.druid.model.DefaultQueryType.GROUP_BY import com.yahoo.bard.webservice.application.ObjectMappersSuite import com.yahoo.bard.webservice.data.DruidResponseParser import com.yahoo.bard.webservice.data.HttpResponseChannel import com.yahoo.bard.webservice.data.HttpResponseMaker import com.yahoo.bard.webservice.data.ResultSet +import com.yahoo.bard.webservice.data.ResultSetSchema import com.yahoo.bard.webservice.data.dimension.BardDimensionField import com.yahoo.bard.webservice.data.dimension.Dimension import com.yahoo.bard.webservice.data.dimension.DimensionColumn @@ -26,13 +28,15 @@ import com.yahoo.bard.webservice.druid.model.query.DruidAggregationQuery import com.yahoo.bard.webservice.druid.model.query.Granularity import com.yahoo.bard.webservice.druid.model.query.GroupByQuery import com.yahoo.bard.webservice.logging.RequestLog -import com.yahoo.bard.webservice.table.PhysicalTable +import com.yahoo.bard.webservice.table.ConcretePhysicalTable import com.yahoo.bard.webservice.table.Schema -import com.yahoo.bard.webservice.table.ZonedSchema import com.yahoo.bard.webservice.web.DataApiRequest import com.yahoo.bard.webservice.web.ResponseFormatType import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.ObjectMapper +import com.fasterxml.jackson.datatype.jdk8.Jdk8Module +import com.google.common.collect.Sets import org.joda.time.DateTimeZone @@ -47,8 +51,12 @@ import javax.ws.rs.core.Response import javax.ws.rs.core.UriInfo class ResultSetResponseProcessorSpec extends Specification { + private static final ObjectMappersSuite MAPPERS = new ObjectMappersSuite() + private static final ObjectMapper MAPPER = new ObjectMapper() + .registerModule(new Jdk8Module().configureAbsentsAsNulls(false)) + HttpResponseMaker httpResponseMaker GroupByQuery groupByQuery DataApiRequest apiRequest @@ -59,7 +67,12 @@ class ResultSetResponseProcessorSpec extends Specification { PathSegment pathSegment MultivaluedMap paramMap + String dimension1Name = "dimension1" Dimension d1 + Dimension d2 + + String metric1Name = "agg1" + String metric2Name = "postagg1" ResultSetMapper rsm1 LogicalMetric lm1 @@ -93,38 +106,43 @@ class ResultSetResponseProcessorSpec extends Specification { apiRequest.getGranularity() >> DAY apiRequest.getFormat() >> ResponseFormatType.JSON apiRequest.getUriInfo() >> uriInfo - apiRequest.getTimeZone() >> DateTimeZone.UTC List dimensions = new ArrayList() List aggregations = new ArrayList() List postAggs = new ArrayList() d1 = Mock(Dimension) dimensions.add(d1) - d1.getApiName() >> "dimension1" + d1.getApiName() >> dimension1Name + Dimension d2 = Mock(Dimension) + dimensions.add(d2) + d2.getApiName() >> "dimension2" + Aggregation agg1 = Mock(Aggregation) aggregations.add(agg1) - agg1.getName() >> "agg1" + agg1.getName() >> metric1Name + Aggregation agg2 = Mock(Aggregation) + agg2.getName() >> "otherAgg" + aggregations.add(agg2) PostAggregation postAgg1 = Mock(PostAggregation) postAggs.add(postAgg1) - postAgg1.getName() >> "postAgg1" + postAgg1.getName() >> metric2Name + PostAggregation postAgg2 = Mock(PostAggregation) + postAggs.add(postAgg2) + postAgg2.getName() >> "otherPostAgg" - DefaultQueryType queryType = DefaultQueryType.GROUP_BY + + DefaultQueryType queryType = GROUP_BY groupByQuery.getQueryType() >> queryType groupByQuery.getDimensions() >> dimensions groupByQuery.getAggregations() >> aggregations groupByQuery.getPostAggregations() >> postAggs - groupByQuery.getDataSource() >> new TableDataSource(new PhysicalTable("table_name", DAY.buildZonedTimeGrain(DateTimeZone.UTC), ["dimension1":"dimension1"])) + groupByQuery.getDataSource() >> new TableDataSource(new ConcretePhysicalTable("table_name", DAY.buildZonedTimeGrain(DateTimeZone.UTC), [] as Set, ["dimension1":"dimension1"])) + + - Dimension d = Mock(Dimension) - dimensions.add(d) - Aggregation agg = Mock(Aggregation) - aggregations.add(agg) - PostAggregation postAgg = Mock(PostAggregation) - postAggs.add(postAgg) + ResultSetSchema schema = new ResultSetSchema(DAY, Sets.newHashSet(new MetricColumn("lm1"))) - Schema schema = new Schema(DAY) - MetricColumn.addNewMetricColumn(schema, "lm1") rs1 = Mock(ResultSet) rs1.getSchema() >> schema @@ -150,14 +168,51 @@ class ResultSetResponseProcessorSpec extends Specification { resultSetResponseProcessor.druidResponseParser == druidResponseParser } + def "Test buildResultSet"() { + setup: + def resultSetResponseProcessor = new ResultSetResponseProcessor( + apiRequest, + responseEmitter, + druidResponseParser, + MAPPERS, + httpResponseMaker + ) + JsonNode jsonMock = Mock(JsonNode) + ResultSetSchema captureSchema = null + JsonNode captureJson + ResultSet rs = Mock(ResultSet) + + 1 * druidResponseParser.parse(_, _, _, _) >> { + JsonNode json, Schema schema, DefaultQueryType type, DateTimeZone dateTimeZone + -> + captureSchema = schema; + captureJson = json; + rs + } + ResultSet actual + + when: + actual = resultSetResponseProcessor.buildResultSet(jsonMock, groupByQuery, DateTimeZone.UTC) + DimensionColumn dimCol = captureSchema.getColumn(dimension1Name, DimensionColumn.class).get() + MetricColumn m1 = captureSchema.getColumn(metric1Name, MetricColumn.class).get() + MetricColumn m2 = captureSchema.getColumn(metric2Name, MetricColumn.class).get() + Optional m3 = captureSchema.getColumn("fakeMetric", MetricColumn.class) + + then: + captureSchema.granularity == DAY + dimCol.dimension == d1 + m1 != null + m2 != null + m3 == Optional.empty() + actual == rs + + } + def "Test processResponse"() { setup: JsonNode jsonMock = Mock(JsonNode) ResultSet resultSetMock = Mock(ResultSet) - Schema captureSchema = null - JsonNode captureJson = null - ResultSet actual = null - ZonedSchema zonedSchema + ResultSetResponseProcessor resultSetResponseProcessor = new ResultSetResponseProcessor( apiRequest, responseEmitter, @@ -166,10 +221,17 @@ class ResultSetResponseProcessorSpec extends Specification { httpResponseMaker ) { @Override - protected ResultSet mapResultSet(ResultSet resultSet) { - actual = resultSet - resultSet + public ResultSet buildResultSet( + JsonNode json, + DruidAggregationQuery groupByQuery, + DateTimeZone dateTimeZone + ) { + json.clone(); + return resultSetMock } + + @Override + protected ResultSet mapResultSet(ResultSet resultSet) { resultSet.getSchema(); return resultSet } } when: @@ -180,32 +242,9 @@ class ResultSetResponseProcessorSpec extends Specification { ) then: - 1 * druidResponseParser.buildSchema(_, _, _) >> { DruidAggregationQuery q, Granularity g, DateTimeZone tz -> - zonedSchema = new ZonedSchema(g, tz) - for (Aggregation aggregation : q.getAggregations()) { - MetricColumn.addNewMetricColumn(zonedSchema, aggregation.getName()) - } - - for (PostAggregation postAggregation : q.getPostAggregations()) { - MetricColumn.addNewMetricColumn(zonedSchema, postAggregation.getName()) - } - - for (Dimension dimension : q.getDimensions()) { - DimensionColumn.addNewDimensionColumn(zonedSchema, dimension) - } - zonedSchema - } - 1 * druidResponseParser.parse(_,_,_) >> { JsonNode json, Schema schema, DefaultQueryType type -> - captureSchema = schema - captureJson = json - resultSetMock - } - 1 * resultSetMock.getSchema() >> { zonedSchema } - captureSchema.granularity == DAY - captureSchema.getColumn("dimension1").dimension == d1 - captureSchema.getColumn("agg1") != null - captureSchema.getColumn("postAgg1") != null - actual == resultSetMock + 1 * jsonMock.clone() + 2 * resultSetMock.getSchema() + 1 == 1 } def "Test failure callback"() { @@ -221,11 +260,9 @@ class ResultSetResponseProcessorSpec extends Specification { Throwable t = new Throwable("message1234") Response responseCaptor = null 1 * httpResponseChannel.asyncResponse.resume(_) >> { javax.ws.rs.core.Response r -> responseCaptor = r } - when: fbc.invoke(t) String entity = responseCaptor.entity - then: responseCaptor.getStatus() == 500 entity.contains("message1234") @@ -253,4 +290,39 @@ class ResultSetResponseProcessorSpec extends Specification { entity.contains("myreason") entity.contains("body123") } + + def "Build the schema from the query"() { + setup: + ResultSetResponseProcessor processor = new ResultSetResponseProcessor( + apiRequest, + responseEmitter, + druidResponseParser, + MAPPERS, + httpResponseMaker + ) + + Granularity granularity = apiRequest.granularity + DateTimeZone dateTimeZone = Mock(DateTimeZone) + Dimension dim = Mock(Dimension) { getApiName() >> dimension1Name } + Aggregation agg = Mock(Aggregation) { getName() >> metric1Name } + PostAggregation postAgg = Mock(PostAggregation) { getName() >> metric2Name } + DruidAggregationQuery query = Mock(DruidAggregationQuery){ + getAggregations() >> [agg] + getPostAggregations() >> [postAgg] + getDimensions() >> [dim] + getQueryType() >> GROUP_BY + } + + druidResponseParser.parse(_, _, _, _) >> { + JsonNode json, Schema schema, DefaultQueryType type, DateTimeZone tz -> + new ResultSet(schema, []) + } + ResultSet actual = processor.buildResultSet(MAPPER.readTree("[]"), query, dateTimeZone) + + expect: + actual.schema == new ResultSetSchema( + granularity, + [new DimensionColumn(dim), new MetricColumn(metric1Name), new MetricColumn(metric2Name)] + ) + } } diff --git a/fili-core/src/test/java/com/yahoo/bard/webservice/application/TestBinderFactory.java b/fili-core/src/test/java/com/yahoo/bard/webservice/application/TestBinderFactory.java index e06694989b..73add5a623 100644 --- a/fili-core/src/test/java/com/yahoo/bard/webservice/application/TestBinderFactory.java +++ b/fili-core/src/test/java/com/yahoo/bard/webservice/application/TestBinderFactory.java @@ -8,7 +8,13 @@ import static com.yahoo.bard.webservice.data.config.names.TestDruidTableName.HOURLY; import static com.yahoo.bard.webservice.data.config.names.TestDruidTableName.MONTHLY; +import com.yahoo.bard.webservice.async.jobs.jobrows.DefaultJobField; +import com.yahoo.bard.webservice.async.jobs.jobrows.DefaultJobRowBuilder; +import com.yahoo.bard.webservice.async.jobs.jobrows.JobRowBuilder; +import com.yahoo.bard.webservice.async.jobs.stores.ApiJobStore; +import com.yahoo.bard.webservice.async.preresponses.stores.PreResponseStore; import com.yahoo.bard.webservice.async.workflows.AsynchronousWorkflowsBuilder; +import com.yahoo.bard.webservice.async.workflows.TestAsynchronousWorkflowsBuilder; import com.yahoo.bard.webservice.config.BardFeatureFlag; import com.yahoo.bard.webservice.data.cache.DataCache; import com.yahoo.bard.webservice.data.cache.HashDataCache; @@ -28,11 +34,6 @@ import com.yahoo.bard.webservice.data.volatility.VolatileIntervalsFunction; import com.yahoo.bard.webservice.data.volatility.VolatileIntervalsService; import com.yahoo.bard.webservice.druid.client.DruidWebService; -import com.yahoo.bard.webservice.async.jobs.stores.ApiJobStore; -import com.yahoo.bard.webservice.async.jobs.jobrows.DefaultJobField; -import com.yahoo.bard.webservice.async.preresponses.stores.PreResponseStore; -import com.yahoo.bard.webservice.async.jobs.jobrows.DefaultJobRowBuilder; -import com.yahoo.bard.webservice.async.jobs.jobrows.JobRowBuilder; import com.yahoo.bard.webservice.metadata.DataSourceMetadataService; import com.yahoo.bard.webservice.metadata.QuerySigningService; import com.yahoo.bard.webservice.metadata.SegmentIntervalsHashIdGenerator; @@ -43,7 +44,6 @@ import com.yahoo.bard.webservice.web.endpoints.JobsEndpointResources; import com.fasterxml.jackson.databind.ObjectMapper; -import com.yahoo.bard.webservice.async.workflows.TestAsynchronousWorkflowsBuilder; import org.glassfish.hk2.utilities.binding.AbstractBinder; import org.joda.time.DateTime; @@ -51,7 +51,6 @@ import java.time.Clock; import java.util.Collections; - import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.Map; diff --git a/fili-core/src/test/java/com/yahoo/bard/webservice/data/config/metric/NonNumericMetrics.java b/fili-core/src/test/java/com/yahoo/bard/webservice/data/config/metric/NonNumericMetrics.java index aa60b91047..247812e21d 100644 --- a/fili-core/src/test/java/com/yahoo/bard/webservice/data/config/metric/NonNumericMetrics.java +++ b/fili-core/src/test/java/com/yahoo/bard/webservice/data/config/metric/NonNumericMetrics.java @@ -10,12 +10,12 @@ import static com.yahoo.bard.webservice.data.config.names.TestApiMetricName.A_STRING_METRIC; import com.yahoo.bard.webservice.data.Result; +import com.yahoo.bard.webservice.data.ResultSetSchema; import com.yahoo.bard.webservice.data.metric.LogicalMetric; import com.yahoo.bard.webservice.data.metric.MetricColumn; import com.yahoo.bard.webservice.data.metric.TemplateDruidQuery; import com.yahoo.bard.webservice.data.metric.mappers.ResultSetMapper; import com.yahoo.bard.webservice.druid.model.aggregation.MinAggregation; -import com.yahoo.bard.webservice.table.Schema; import com.fasterxml.jackson.databind.node.ObjectNode; @@ -29,6 +29,8 @@ */ public class NonNumericMetrics { + public static final String UNEXPECTED_MISSING_COLUMN = "Unexpected missing column "; + /** * Returns a list of LogicalMetrics whose names are those intended to represent non-numeric metrics. * @@ -95,14 +97,17 @@ private static class StringMetricMapper extends ResultSetMapper { } @Override - protected Result map(Result result, Schema schema) { - MetricColumn stringColumn = schema.getColumn(A_STRING_METRIC.asName(), MetricColumn.class); + protected Result map(Result result, ResultSetSchema schema) { + MetricColumn stringColumn = schema.getColumn(A_STRING_METRIC.asName(), MetricColumn.class) + .orElseThrow( + () -> new IllegalStateException(UNEXPECTED_MISSING_COLUMN + A_STRING_METRIC.asName()) + ); String stringValue = result.getMetricValueAsString(stringColumn); return result.withMetricValue(stringColumn, stringValue + stringValue); } @Override - protected Schema map(Schema schema) { + protected ResultSetSchema map(ResultSetSchema schema) { return schema; } } @@ -120,14 +125,15 @@ private static class BooleanMetricMapper extends ResultSetMapper { } @Override - protected Result map(Result result, Schema schema) { - return result.getMetricValueAsBoolean(schema.getColumn(A_BOOLEAN_METRIC.asName(), MetricColumn.class)) ? - result : - null; + protected Result map(Result result, ResultSetSchema schema) { + MetricColumn column = schema.getColumn(A_BOOLEAN_METRIC.asName(), MetricColumn.class).orElseThrow( + () -> new IllegalStateException(UNEXPECTED_MISSING_COLUMN + A_BOOLEAN_METRIC.asName()) + ); + return result.getMetricValueAsBoolean(column) ? result : null; } @Override - protected Schema map(Schema schema) { + protected ResultSetSchema map(ResultSetSchema schema) { return schema; } } @@ -146,15 +152,17 @@ private static class JsonNodeMetricMapper extends ResultSetMapper { } @Override - protected Result map(Result result, Schema schema) { - MetricColumn column = schema.getColumn(A_JSON_NODE_METRIC.asName(), MetricColumn.class); + protected Result map(Result result, ResultSetSchema schema) { + MetricColumn column = schema.getColumn(A_JSON_NODE_METRIC.asName(), MetricColumn.class).orElseThrow( + () -> new IllegalStateException(UNEXPECTED_MISSING_COLUMN + A_JSON_NODE_METRIC.asName()) + ); ObjectNode node = (ObjectNode) result.getMetricValueAsJsonNode(column); node.put("length", node.get("clarification").textValue().length()); return result; } @Override - protected Schema map(Schema schema) { + protected ResultSetSchema map(ResultSetSchema schema) { return schema; } } @@ -172,8 +180,12 @@ private static class NullMetricMapper extends ResultSetMapper { } @Override - protected Result map(Result result, Schema schema) { - Object nullMetric = result.getMetricValue(schema.getColumn(A_NULL_METRIC.asName(), MetricColumn.class)); + protected Result map(Result result, ResultSetSchema schema) { + MetricColumn column = schema.getColumn(A_NULL_METRIC.asName(), MetricColumn.class).orElseThrow( + () -> new IllegalStateException(UNEXPECTED_MISSING_COLUMN + A_NULL_METRIC.asName()) + ); + + Object nullMetric = result.getMetricValue(column); if (nullMetric != null) { throw new IllegalStateException( String.format("Metric 'nullMetric' should be null but is: %s", nullMetric) @@ -183,7 +195,7 @@ protected Result map(Result result, Schema schema) { } @Override - protected Schema map(Schema schema) { + protected ResultSetSchema map(ResultSetSchema schema) { return schema; } } diff --git a/fili-core/src/test/java/com/yahoo/bard/webservice/data/config/table/TestTableLoader.java b/fili-core/src/test/java/com/yahoo/bard/webservice/data/config/table/TestTableLoader.java index 3a3343f33e..9b7644f4d9 100644 --- a/fili-core/src/test/java/com/yahoo/bard/webservice/data/config/table/TestTableLoader.java +++ b/fili-core/src/test/java/com/yahoo/bard/webservice/data/config/table/TestTableLoader.java @@ -20,6 +20,7 @@ import org.joda.time.DateTimeZone; import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; @@ -67,10 +68,9 @@ public TestTableLoader(TestDimensions testDimensions, DateTimeZone defaultTimeZo @Override public void loadTableDictionary(ResourceDictionaries dictionaries) { - Map logicalTableTableGroup = new HashMap<>(); + Map logicalTableTableGroup = new LinkedHashMap<>(); for (TestLogicalTableName logicalTableName : TestLogicalTableName.values()) { TableGroup tableGroup = buildTableGroup( - logicalTableName.asName(), TestApiMetricName.getByLogicalTable(logicalTableName), TestDruidMetricName.getByLogicalTable(logicalTableName), logicalTableTableDefinitions.get(logicalTableName), diff --git a/fili-core/src/test/java/com/yahoo/bard/webservice/models/druid/client/impl/TestDruidWebService.java b/fili-core/src/test/java/com/yahoo/bard/webservice/models/druid/client/impl/TestDruidWebService.java index d36d865b2b..402d800bd7 100644 --- a/fili-core/src/test/java/com/yahoo/bard/webservice/models/druid/client/impl/TestDruidWebService.java +++ b/fili-core/src/test/java/com/yahoo/bard/webservice/models/druid/client/impl/TestDruidWebService.java @@ -115,27 +115,26 @@ public void postDruidQuery( } // Set the response to use based on the type of the query we're processing - if (lastQuery.getQueryType() instanceof DefaultQueryType) { - DefaultQueryType defaultQueryType = (DefaultQueryType) lastQuery.getQueryType(); - switch (defaultQueryType) { - case GROUP_BY: - case TOP_N: - case TIMESERIES: - case LOOKBACK: - // default response is groupBy response - break; - case SEGMENT_METADATA: - jsonResponse = () -> segmentMetadataResponse; - break; - case TIME_BOUNDARY: - jsonResponse = () -> timeBoundaryResponse; - break; - default: - throw new IllegalArgumentException("Illegal query type : " + lastQuery.getQueryType()); - } - } else { - // not a default query type. Assume the response is the default groupBy response - // initialized above. Don't need to do anything else here. + if (!(lastQuery.getQueryType() instanceof DefaultQueryType)) { + throw new IllegalArgumentException("Illegal query type : " + lastQuery.getQueryType()); + } + + DefaultQueryType defaultQueryType = (DefaultQueryType) lastQuery.getQueryType(); + switch (defaultQueryType) { + case GROUP_BY: + case TOP_N: + case TIMESERIES: + case LOOKBACK: + // default response is groupBy response + break; + case SEGMENT_METADATA: + jsonResponse = () -> segmentMetadataResponse; + break; + case TIME_BOUNDARY: + jsonResponse = () -> timeBoundaryResponse; + break; + default: + throw new IllegalArgumentException("Illegal query type : " + lastQuery.getQueryType()); } try { diff --git a/fili-core/src/test/java/com/yahoo/bard/webservice/web/endpoints/JobsEndpointResources.java b/fili-core/src/test/java/com/yahoo/bard/webservice/web/endpoints/JobsEndpointResources.java index 5c71134810..b3d7089f9e 100644 --- a/fili-core/src/test/java/com/yahoo/bard/webservice/web/endpoints/JobsEndpointResources.java +++ b/fili-core/src/test/java/com/yahoo/bard/webservice/web/endpoints/JobsEndpointResources.java @@ -2,29 +2,28 @@ // Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms. package com.yahoo.bard.webservice.web.endpoints; +import com.yahoo.bard.webservice.async.jobs.jobrows.DefaultJobField; +import com.yahoo.bard.webservice.async.jobs.jobrows.JobField; +import com.yahoo.bard.webservice.async.jobs.jobrows.JobRow; +import com.yahoo.bard.webservice.async.jobs.stores.ApiJobStore; +import com.yahoo.bard.webservice.async.jobs.stores.HashJobStore; +import com.yahoo.bard.webservice.async.preresponses.stores.HashPreResponseStore; +import com.yahoo.bard.webservice.async.preresponses.stores.PreResponseStore; import com.yahoo.bard.webservice.data.Result; import com.yahoo.bard.webservice.data.ResultSet; +import com.yahoo.bard.webservice.data.ResultSetSchema; import com.yahoo.bard.webservice.data.metric.MetricColumn; import com.yahoo.bard.webservice.druid.model.query.AllGranularity; import com.yahoo.bard.webservice.druid.model.query.Granularity; -import com.yahoo.bard.webservice.async.jobs.stores.ApiJobStore; -import com.yahoo.bard.webservice.async.jobs.jobrows.DefaultJobField; -import com.yahoo.bard.webservice.async.jobs.stores.HashJobStore; -import com.yahoo.bard.webservice.async.preresponses.stores.HashPreResponseStore; -import com.yahoo.bard.webservice.async.jobs.jobrows.JobField; -import com.yahoo.bard.webservice.async.jobs.jobrows.JobRow; -import com.yahoo.bard.webservice.async.preresponses.stores.PreResponseStore; -import com.yahoo.bard.webservice.table.Schema; -import com.yahoo.bard.webservice.table.ZonedSchema; import com.yahoo.bard.webservice.web.PreResponse; import com.yahoo.bard.webservice.web.responseprocessors.ResponseContext; import com.yahoo.bard.webservice.web.responseprocessors.ResponseContextKeys; import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; import java.math.BigDecimal; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedHashSet; @@ -90,15 +89,16 @@ public static ApiJobStore getApiJobStore() { public static PreResponseStore getPreResponseStore() { PreResponseStore preResponseStore = new HashPreResponseStore(); Granularity granularity = AllGranularity.INSTANCE; - Schema schema = new ZonedSchema(granularity, DateTimeZone.UTC); Map metricValues = new HashMap<>(); - metricValues.put(MetricColumn.addNewMetricColumn(schema, "pageViews"), new BigDecimal(111)); + MetricColumn pageViewColumn = new MetricColumn("pageViews"); + metricValues.put(pageViewColumn, new BigDecimal(111)); + ResultSetSchema schema = new ResultSetSchema(granularity, Collections.singleton(pageViewColumn)); Result result = new Result(new HashMap<>(), metricValues, DateTime.parse("2016-01-12T00:00:00.000Z")); List results = new ArrayList<>(); results.add(result); - ResultSet resultSet = new ResultSet(results, schema); + ResultSet resultSet = new ResultSet(schema, results); LinkedHashSet apiMetricColumnNames = new LinkedHashSet<>(); apiMetricColumnNames.add("pageViews"); @@ -131,7 +131,7 @@ public static PreResponseStore getPreResponseStore() { results1.add(result1); results1.add(result2); results1.add(result3); - ResultSet resultSet1 = new ResultSet(results1, schema); + ResultSet resultSet1 = new ResultSet(schema, results1); PreResponse preResponse1 = new PreResponse(resultSet1, responseContext); preResponseStore.save("ticket3p", preResponse1); diff --git a/fili-wikipedia-example/src/main/java/com/yahoo/wiki/webservice/data/config/table/WikiTableLoader.java b/fili-wikipedia-example/src/main/java/com/yahoo/wiki/webservice/data/config/table/WikiTableLoader.java index 18e2d17503..c8aa23c3fb 100644 --- a/fili-wikipedia-example/src/main/java/com/yahoo/wiki/webservice/data/config/table/WikiTableLoader.java +++ b/fili-wikipedia-example/src/main/java/com/yahoo/wiki/webservice/data/config/table/WikiTableLoader.java @@ -93,7 +93,6 @@ private void configureSample(WikiDimensions wikiDimensions) { public void loadTableDictionary(ResourceDictionaries dictionaries) { for (WikiLogicalTableName table : WikiLogicalTableName.values()) { TableGroup tableGroup = buildTableGroup( - table.asName(), apiMetricNames.get(table), druidMetricNames.get(table), tableDefinitions.get(table), diff --git a/pom.xml b/pom.xml index 84885ac468..78135aa6e0 100644 --- a/pom.xml +++ b/pom.xml @@ -81,7 +81,7 @@ 5.3.0 2.6.2 2.4.5 - 20.0 + 21.0 0.3.8 test