From 4aedd7c82afeb7edd8bb0e7b59bfe2e658acaec1 Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Tue, 21 Jan 2020 18:50:20 +0800 Subject: [PATCH 01/31] Add metric for feature_value, cleanup tags - Ignore StatsD execption since it is not gonna be thrown when using non blocking StatsD client: https://github.com/DataDog/java-dogstatsd-client#unix-domain-socket-support - Reuse tags variables - Add validation for correct feature set reference in FeatureRow --- .../metrics/WriteRowMetricsDoFn.java | 158 ++++++++++-------- 1 file changed, 90 insertions(+), 68 deletions(-) diff --git a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java index db2d1acd6d..1ec7d0314c 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java +++ b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java @@ -17,27 +17,28 @@ package feast.ingestion.transform.metrics; import com.google.auto.value.AutoValue; +import com.google.protobuf.util.Timestamps; import com.timgroup.statsd.NonBlockingStatsDClient; import com.timgroup.statsd.StatsDClient; import com.timgroup.statsd.StatsDClientException; import feast.types.FeatureRowProto.FeatureRow; import feast.types.FieldProto.Field; -import feast.types.ValueProto.Value.ValCase; +import feast.types.ValueProto.Value; import org.apache.beam.sdk.transforms.DoFn; import org.slf4j.Logger; @AutoValue public abstract class WriteRowMetricsDoFn extends DoFn { - private static final Logger log = org.slf4j.LoggerFactory.getLogger(WriteRowMetricsDoFn.class); + private static final Logger LOG = org.slf4j.LoggerFactory.getLogger(WriteRowMetricsDoFn.class); - private final String METRIC_PREFIX = "feast_ingestion"; - private final String STORE_TAG_KEY = "feast_store"; - private final String FEATURE_SET_PROJECT_TAG_KEY = "feast_project_name"; - private final String FEATURE_SET_NAME_TAG_KEY = "feast_featureSet_name"; - private final String FEATURE_SET_VERSION_TAG_KEY = "feast_featureSet_version"; - private final String FEATURE_TAG_KEY = "feast_feature_name"; - private final String INGESTION_JOB_NAME_KEY = "ingestion_job_name"; + private static final String METRIC_PREFIX = "feast_ingestion"; + private static final String STORE_TAG_KEY = "feast_store"; + private static final String FEATURE_SET_PROJECT_TAG_KEY = "feast_project_name"; + private static final String FEATURE_SET_NAME_TAG_KEY = "feast_featureSet_name"; + private static final String FEATURE_SET_VERSION_TAG_KEY = "feast_featureSet_version"; + private static final String FEATURE_TAG_KEY = "feast_feature_name"; + private static final String INGESTION_JOB_NAME_KEY = "ingestion_job_name"; public abstract String getStoreName(); @@ -45,8 +46,8 @@ public abstract class WriteRowMetricsDoFn extends DoFn { public abstract int getStatsdPort(); - public static WriteRowMetricsDoFn create( - String newStoreName, String newStatsdHost, int newStatsdPort) { + public static WriteRowMetricsDoFn create(String newStoreName, String newStatsdHost, + int newStatsdPort) { return newBuilder() .setStoreName(newStoreName) .setStatsdHost(newStatsdHost) @@ -74,74 +75,95 @@ public abstract static class Builder { @Setup public void setup() { - statsd = new NonBlockingStatsDClient(METRIC_PREFIX, getStatsdHost(), getStatsdPort()); + try { + statsd = new NonBlockingStatsDClient(METRIC_PREFIX, getStatsdHost(), getStatsdPort()); + } catch (StatsDClientException e) { + LOG.warn("Failed to create StatsD client"); + } } @ProcessElement public void processElement(ProcessContext c) { + if (statsd == null) { + LOG.warn( + "No StatsD client available (maybe it failed to initialize). No FeatureRow metrics will be sent."); + return; + } - try { - FeatureRow row = c.element(); - long eventTimestamp = com.google.protobuf.util.Timestamps.toMillis(row.getEventTimestamp()); - - String[] split = row.getFeatureSet().split(":"); - String featureSetProject = split[0].split("/")[0]; - String featureSetName = split[0].split("/")[1]; - String featureSetVersion = split[1]; - - statsd.histogram( - "feature_row_lag_ms", - System.currentTimeMillis() - eventTimestamp, - STORE_TAG_KEY + ":" + getStoreName(), - FEATURE_SET_PROJECT_TAG_KEY + ":" + featureSetProject, - FEATURE_SET_NAME_TAG_KEY + ":" + featureSetName, - FEATURE_SET_VERSION_TAG_KEY + ":" + featureSetVersion, - INGESTION_JOB_NAME_KEY + ":" + c.getPipelineOptions().getJobName()); + FeatureRow row = c.element(); + String[] colonSplits = row.getFeatureSet().split(":"); + if (colonSplits.length < 1) { + LOG.warn( + "FeatureRow an invalid FeatureSet reference: " + row.getFeatureSet() + + ". Expected format: PROJECT/FEATURE_SET:VERSION"); + return; + } + String[] slashSplits = colonSplits[0].split("/"); + if (slashSplits.length < 2) { + LOG.warn( + "FeatureRow an invalid FeatureSet reference: " + row.getFeatureSet() + + ". Expected format: PROJECT/FEATURE_SET:VERSION"); + return; + } - statsd.histogram( - "feature_row_event_time_epoch_ms", - eventTimestamp, + String featureSetProject = slashSplits[0]; + String featureSetName = slashSplits[1]; + String featureSetVersion = colonSplits[1]; + String[] tags = new String[]{ + STORE_TAG_KEY + ":" + getStoreName(), + FEATURE_SET_PROJECT_TAG_KEY + ":" + featureSetProject, + FEATURE_SET_NAME_TAG_KEY + ":" + featureSetName, + FEATURE_SET_VERSION_TAG_KEY + ":" + featureSetVersion, + INGESTION_JOB_NAME_KEY + ":" + c.getPipelineOptions().getJobName() + }; + long eventTimestamp = Timestamps.toMillis(row.getEventTimestamp()); + + statsd.histogram("feature_row_lag_ms", System.currentTimeMillis() - eventTimestamp, tags); + statsd.histogram("feature_row_event_time_epoch_ms", eventTimestamp, tags); + statsd.count("feature_row_ingested_count", 1, tags); + + // Feature value metrics will be used for validation + for (Field field : row.getFieldsList()) { + tags = new String[]{ STORE_TAG_KEY + ":" + getStoreName(), FEATURE_SET_PROJECT_TAG_KEY + ":" + featureSetProject, FEATURE_SET_NAME_TAG_KEY + ":" + featureSetName, FEATURE_SET_VERSION_TAG_KEY + ":" + featureSetVersion, - INGESTION_JOB_NAME_KEY + ":" + c.getPipelineOptions().getJobName()); - - for (Field field : row.getFieldsList()) { - if (!field.getValue().getValCase().equals(ValCase.VAL_NOT_SET)) { - statsd.histogram( - "feature_value_lag_ms", - System.currentTimeMillis() - eventTimestamp, - STORE_TAG_KEY + ":" + getStoreName(), - FEATURE_SET_PROJECT_TAG_KEY + ":" + featureSetProject, - FEATURE_SET_NAME_TAG_KEY + ":" + featureSetName, - FEATURE_SET_VERSION_TAG_KEY + ":" + featureSetVersion, - FEATURE_TAG_KEY + ":" + field.getName(), - INGESTION_JOB_NAME_KEY + ":" + c.getPipelineOptions().getJobName()); - } else { - statsd.count( - "feature_value_missing_count", - 1, - STORE_TAG_KEY + ":" + getStoreName(), - FEATURE_SET_PROJECT_TAG_KEY + ":" + featureSetProject, - FEATURE_SET_NAME_TAG_KEY + ":" + featureSetName, - FEATURE_SET_VERSION_TAG_KEY + ":" + featureSetVersion, - FEATURE_TAG_KEY + ":" + field.getName(), - INGESTION_JOB_NAME_KEY + ":" + c.getPipelineOptions().getJobName()); - } + FEATURE_TAG_KEY + ":" + field.getName(), + INGESTION_JOB_NAME_KEY + ":" + c.getPipelineOptions().getJobName() + }; + + Value val = field.getValue(); + switch (val.getValCase()) { + case INT32_VAL: + statsd.histogram("feature_value", val.getInt32Val(), tags); + break; + case INT64_VAL: + statsd.histogram("feature_value", val.getInt64Val(), tags); + break; + case DOUBLE_VAL: + statsd.histogram("feature_value", val.getDoubleVal(), tags); + break; + case FLOAT_VAL: + statsd.histogram("feature_value", val.getFloatVal(), tags); + break; + case BOOL_VAL: + statsd.histogram("feature_value", val.getBoolVal() ? 1 : 0, tags); + break; + case BYTES_VAL: + case STRING_VAL: + case BYTES_LIST_VAL: + case FLOAT_LIST_VAL: + case STRING_LIST_VAL: + case INT32_LIST_VAL: + case INT64_LIST_VAL: + case DOUBLE_LIST_VAL: + case BOOL_LIST_VAL: + break; + case VAL_NOT_SET: + statsd.count("feature_value_missing_count", 1, tags); + break; } - - statsd.count( - "feature_row_ingested_count", - 1, - STORE_TAG_KEY + ":" + getStoreName(), - FEATURE_SET_PROJECT_TAG_KEY + ":" + featureSetProject, - FEATURE_SET_NAME_TAG_KEY + ":" + featureSetName, - FEATURE_SET_VERSION_TAG_KEY + ":" + featureSetVersion, - INGESTION_JOB_NAME_KEY + ":" + c.getPipelineOptions().getJobName()); - - } catch (StatsDClientException e) { - log.warn("Unable to push metrics to server", e); } } } From d25f77debe531fa9ad8883bbb8c449fa9bae2d27 Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Wed, 22 Jan 2020 03:33:56 +0800 Subject: [PATCH 02/31] Refactor map variable for feature set ref to feature set object --- .../main/java/feast/ingestion/ImportJob.java | 25 ++++++++++--------- 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/ingestion/src/main/java/feast/ingestion/ImportJob.java b/ingestion/src/main/java/feast/ingestion/ImportJob.java index 41af5f9bb4..0a22b8f42f 100644 --- a/ingestion/src/main/java/feast/ingestion/ImportJob.java +++ b/ingestion/src/main/java/feast/ingestion/ImportJob.java @@ -47,10 +47,12 @@ public class ImportJob { // Tag for main output containing Feature Row that has been successfully processed. - private static final TupleTag FEATURE_ROW_OUT = new TupleTag() {}; + private static final TupleTag FEATURE_ROW_OUT = new TupleTag() { + }; // Tag for deadletter output containing elements and error messages from invalid input/transform. - private static final TupleTag DEADLETTER_OUT = new TupleTag() {}; + private static final TupleTag DEADLETTER_OUT = new TupleTag() { + }; private static final Logger log = org.slf4j.LoggerFactory.getLogger(ImportJob.class); /** @@ -88,14 +90,13 @@ public static PipelineResult runPipeline(ImportOptions options) List subscribedFeatureSets = SpecUtil.getSubscribedFeatureSets(store.getSubscriptionsList(), featureSets); - // Generate tags by key - Map featureSetsByKey = new HashMap<>(); - subscribedFeatureSets.stream() - .forEach( - fs -> { - String ref = getFeatureSetReference(fs); - featureSetsByKey.put(ref, fs); - }); + // featureSetsByRef is a map of FeatureSet string reference to FeatureSet object. + // FeatureSet reference follows this format: PROJECT/FEATURE_SET_NAME:VERSION + Map featureSetsByRef = new HashMap<>(); + subscribedFeatureSets.forEach(fs -> { + String ref = getFeatureSetReference(fs); + featureSetsByRef.put(ref, fs); + }); // TODO: make the source part of the job initialisation options Source source = subscribedFeatureSets.get(0).getSpec().getSource(); @@ -121,7 +122,7 @@ public static PipelineResult runPipeline(ImportOptions options) .get(FEATURE_ROW_OUT) .apply( ValidateFeatureRows.newBuilder() - .setFeatureSets(featureSetsByKey) + .setFeatureSets(featureSetsByRef) .setSuccessTag(FEATURE_ROW_OUT) .setFailureTag(DEADLETTER_OUT) .build()); @@ -131,7 +132,7 @@ public static PipelineResult runPipeline(ImportOptions options) .get(FEATURE_ROW_OUT) .apply( "WriteFeatureRowToStore", - WriteToStore.newBuilder().setFeatureSets(featureSetsByKey).setStore(store).build()); + WriteToStore.newBuilder().setFeatureSets(featureSetsByRef).setStore(store).build()); // Step 4. Write FailedElements to a dead letter table in BigQuery. if (options.getDeadLetterTableSpec() != null) { From cbb720b30bb6e5d087152d90455b92c6e6252d44 Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Wed, 22 Jan 2020 03:42:19 +0800 Subject: [PATCH 03/31] Update documentation for FeatureRow.proto --- protos/feast/types/FeatureRow.proto | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/protos/feast/types/FeatureRow.proto b/protos/feast/types/FeatureRow.proto index 24293c6faa..d878ea111d 100644 --- a/protos/feast/types/FeatureRow.proto +++ b/protos/feast/types/FeatureRow.proto @@ -35,8 +35,10 @@ message FeatureRow { // will use to perform joins, determine latest values, and coalesce rows. google.protobuf.Timestamp event_timestamp = 3; - // Complete reference to the featureSet this featureRow belongs to, in the form of - // featureSetName:version. This value will be used by the feast ingestion job to filter - // rows, and write the values to the correct tables. + // Complete reference to the featureSet this featureRow belongs to, in the form of: + // [project]/[feature_set_name]:[version] + // + // FeatureSet reference will be used by the Feast ingestion job to filter + // rows and write the values to the correct tables. string feature_set = 6; } \ No newline at end of file From 3059bc48617b4c8ef00845b4a74c7e568e780dbf Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Wed, 22 Jan 2020 03:46:26 +0800 Subject: [PATCH 04/31] Add docs to ImportOptions for metrics exporter type. And make the checking of exporter type in ingestion more robust. --- .../src/main/java/feast/ingestion/options/ImportOptions.java | 2 +- .../ingestion/transform/metrics/WriteMetricsTransform.java | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/ingestion/src/main/java/feast/ingestion/options/ImportOptions.java b/ingestion/src/main/java/feast/ingestion/options/ImportOptions.java index b299bb47e5..4aea4519e3 100644 --- a/ingestion/src/main/java/feast/ingestion/options/ImportOptions.java +++ b/ingestion/src/main/java/feast/ingestion/options/ImportOptions.java @@ -65,7 +65,7 @@ public interface ImportOptions extends PipelineOptions, DataflowPipelineOptions, void setDeadLetterTableSpec(String deadLetterTableSpec); // TODO: expound - @Description("MetricsAccumulator exporter type to instantiate.") + @Description("MetricsAccumulator exporter type to instantiate. Currently supported type: statsd.") @Default.String("none") String getMetricsExporterType(); diff --git a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteMetricsTransform.java b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteMetricsTransform.java index 43f314aa86..92bea22db5 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteMetricsTransform.java +++ b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteMetricsTransform.java @@ -55,7 +55,9 @@ public abstract static class Builder { @Override public PDone expand(PCollectionTuple input) { ImportOptions options = input.getPipeline().getOptions().as(ImportOptions.class); - switch (options.getMetricsExporterType()) { + assert options.getMetricsExporterType() != null; + + switch (options.getMetricsExporterType().trim().toLowerCase()) { case "statsd": input .get(getFailureTag()) From 13bc909faaee6c7b89081ea06d8ac3f04308c8af Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Tue, 28 Jan 2020 03:04:50 +0700 Subject: [PATCH 05/31] Write value and constraint metrics for each feature --- .../main/java/feast/ingestion/ImportJob.java | 1 + .../metrics/WriteMetricsTransform.java | 37 ++- .../metrics/WriteRowMetricsDoFn.java | 290 ++++++++++++++---- 3 files changed, 270 insertions(+), 58 deletions(-) diff --git a/ingestion/src/main/java/feast/ingestion/ImportJob.java b/ingestion/src/main/java/feast/ingestion/ImportJob.java index 0a22b8f42f..8ac82c71f3 100644 --- a/ingestion/src/main/java/feast/ingestion/ImportJob.java +++ b/ingestion/src/main/java/feast/ingestion/ImportJob.java @@ -162,6 +162,7 @@ public static PipelineResult runPipeline(ImportOptions options) .setStoreName(store.getName()) .setSuccessTag(FEATURE_ROW_OUT) .setFailureTag(DEADLETTER_OUT) + .setFeatureSetByRef(featureSetsByRef) .build()); } diff --git a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteMetricsTransform.java b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteMetricsTransform.java index 92bea22db5..6593470011 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteMetricsTransform.java +++ b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteMetricsTransform.java @@ -17,25 +17,43 @@ package feast.ingestion.transform.metrics; import com.google.auto.value.AutoValue; +import feast.core.FeatureSetProto; +import feast.core.FeatureSetProto.FeatureSet; import feast.ingestion.options.ImportOptions; import feast.ingestion.values.FailedElement; import feast.types.FeatureRowProto.FeatureRow; +import java.util.Map; import org.apache.beam.sdk.transforms.DoFn; +import org.apache.beam.sdk.transforms.GroupByKey; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.ParDo; +import org.apache.beam.sdk.transforms.windowing.FixedWindows; +import org.apache.beam.sdk.transforms.windowing.Window; +import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollectionTuple; import org.apache.beam.sdk.values.PDone; import org.apache.beam.sdk.values.TupleTag; +import org.joda.time.Duration; @AutoValue public abstract class WriteMetricsTransform extends PTransform { + // FIXED_WINDOW_DURATION_IN_SEC_FOR_ROW_METRICS is the interval at which ingestion metrics + // are collected and aggregated. + // + // Metrics in Feast are sent via StatsD and are later scraped by Prometheus at a regular interval. + // The duration here should be higher than Prometheus scrope interval, otherwise some metrics may + // not be scraped because Prometheus scrape frequency is too slow. + private static final long FIXED_WINDOW_DURATION_IN_SEC_FOR_ROW_METRICS = 20; + public abstract String getStoreName(); public abstract TupleTag getSuccessTag(); public abstract TupleTag getFailureTag(); + public abstract Map getFeatureSetByRef(); + public static Builder newBuilder() { return new AutoValue_WriteMetricsTransform.Builder(); } @@ -49,6 +67,9 @@ public abstract static class Builder { public abstract Builder setFailureTag(TupleTag failureTag); + public abstract Builder setFeatureSetByRef( + Map featureSetByRef); + public abstract WriteMetricsTransform build(); } @@ -72,13 +93,24 @@ public PDone expand(PCollectionTuple input) { input .get(getSuccessTag()) + .apply("FixedWindowForMetricsCollection", + Window.into(FixedWindows + .of(Duration.standardSeconds(FIXED_WINDOW_DURATION_IN_SEC_FOR_ROW_METRICS)))) + .apply("MapToFeatureRowByRef", ParDo.of(new DoFn>() { + @ProcessElement + public void processElement(ProcessContext c) { + c.output(KV.of(c.element().getFeatureSet(), c.element())); + } + })) + .apply("GroupByFeatureRef", GroupByKey.create()) .apply( - "WriteRowMetrics", + "WriteFeatureRowMetrics", ParDo.of( WriteRowMetricsDoFn.newBuilder() .setStatsdHost(options.getStatsdHost()) .setStatsdPort(options.getStatsdPort()) .setStoreName(getStoreName()) + .setFeatureSetByRef(getFeatureSetByRef()) .build())); return PDone.in(input.getPipeline()); @@ -91,7 +123,8 @@ public PDone expand(PCollectionTuple input) { ParDo.of( new DoFn() { @ProcessElement - public void processElement(ProcessContext c) {} + public void processElement(ProcessContext c) { + } })); return PDone.in(input.getPipeline()); } diff --git a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java index 1ec7d0314c..1499382c93 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java +++ b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java @@ -21,14 +21,29 @@ import com.timgroup.statsd.NonBlockingStatsDClient; import com.timgroup.statsd.StatsDClient; import com.timgroup.statsd.StatsDClientException; +import feast.core.FeatureSetProto; +import feast.core.FeatureSetProto.EntitySpec; +import feast.core.FeatureSetProto.EntitySpec.DomainInfoCase; +import feast.core.FeatureSetProto.EntitySpec.PresenceConstraintsCase; +import feast.core.FeatureSetProto.FeatureSet; +import feast.core.FeatureSetProto.FeatureSpec; import feast.types.FeatureRowProto.FeatureRow; import feast.types.FieldProto.Field; import feast.types.ValueProto.Value; +import feast.types.ValueProto.Value.ValCase; +import java.util.DoubleSummaryStatistics; +import java.util.HashMap; +import java.util.Map; +import java.util.Map.Entry; import org.apache.beam.sdk.transforms.DoFn; +import org.apache.beam.sdk.values.KV; import org.slf4j.Logger; +import org.tensorflow.metadata.v0.FeaturePresence; +import org.tensorflow.metadata.v0.FloatDomain; +import org.tensorflow.metadata.v0.IntDomain; @AutoValue -public abstract class WriteRowMetricsDoFn extends DoFn { +public abstract class WriteRowMetricsDoFn extends DoFn>, Void> { private static final Logger LOG = org.slf4j.LoggerFactory.getLogger(WriteRowMetricsDoFn.class); @@ -37,8 +52,8 @@ public abstract class WriteRowMetricsDoFn extends DoFn { private static final String FEATURE_SET_PROJECT_TAG_KEY = "feast_project_name"; private static final String FEATURE_SET_NAME_TAG_KEY = "feast_featureSet_name"; private static final String FEATURE_SET_VERSION_TAG_KEY = "feast_featureSet_version"; - private static final String FEATURE_TAG_KEY = "feast_feature_name"; - private static final String INGESTION_JOB_NAME_KEY = "ingestion_job_name"; + private static final String FEATURE_NAME_TAG_KEY = "feast_feature_name"; + private static final String INGESTION_JOB_NAME_KEY = "feast_job_name"; public abstract String getStoreName(); @@ -46,16 +61,9 @@ public abstract class WriteRowMetricsDoFn extends DoFn { public abstract int getStatsdPort(); - public static WriteRowMetricsDoFn create(String newStoreName, String newStatsdHost, - int newStatsdPort) { - return newBuilder() - .setStoreName(newStoreName) - .setStatsdHost(newStatsdHost) - .setStatsdPort(newStatsdPort) - .build(); - } + public abstract Map getFeatureSetByRef(); - public StatsDClient statsd; + private StatsDClient statsd; public static Builder newBuilder() { return new AutoValue_WriteRowMetricsDoFn.Builder(); @@ -70,6 +78,9 @@ public abstract static class Builder { public abstract Builder setStatsdPort(int statsdPort); + public abstract Builder setFeatureSetByRef( + Map featureSetByRef); + public abstract WriteRowMetricsDoFn build(); } @@ -86,22 +97,32 @@ public void setup() { public void processElement(ProcessContext c) { if (statsd == null) { LOG.warn( - "No StatsD client available (maybe it failed to initialize). No FeatureRow metrics will be sent."); + "No StatsD client available, maybe it failed to initialize. No FeatureRow metrics will be sent."); + return; + } + + String featureSetRef = c.element().getKey(); + if (featureSetRef == null) { return; } - FeatureRow row = c.element(); - String[] colonSplits = row.getFeatureSet().split(":"); + if (!getFeatureSetByRef().containsKey(featureSetRef)) { + // FeatureRow has a reference not known by the ImportJob. Skip sending metrics. + return; + } + + String[] colonSplits = featureSetRef.split(":"); if (colonSplits.length < 1) { LOG.warn( - "FeatureRow an invalid FeatureSet reference: " + row.getFeatureSet() + "FeatureRow has an invalid FeatureSet reference: " + featureSetRef + ". Expected format: PROJECT/FEATURE_SET:VERSION"); return; } + String[] slashSplits = colonSplits[0].split("/"); if (slashSplits.length < 2) { LOG.warn( - "FeatureRow an invalid FeatureSet reference: " + row.getFeatureSet() + "FeatureRow has an invalid FeatureSet reference: " + featureSetRef + ". Expected format: PROJECT/FEATURE_SET:VERSION"); return; } @@ -109,6 +130,13 @@ public void processElement(ProcessContext c) { String featureSetProject = slashSplits[0]; String featureSetName = slashSplits[1]; String featureSetVersion = colonSplits[1]; + + FeatureSet featureSet = getFeatureSetByRef().get(featureSetRef); + Map entityNameToSpec = createEntityNameToSpecMap(featureSet); + Map featureNameToSpec = createFeatureNameToSpecMap(featureSet); + Map fieldNameToMissingCount = new HashMap<>(); + Map fieldNameToValueStat = new HashMap<>(); + String[] tags = new String[]{ STORE_TAG_KEY + ":" + getStoreName(), FEATURE_SET_PROJECT_TAG_KEY + ":" + featureSetProject, @@ -116,54 +144,204 @@ public void processElement(ProcessContext c) { FEATURE_SET_VERSION_TAG_KEY + ":" + featureSetVersion, INGESTION_JOB_NAME_KEY + ":" + c.getPipelineOptions().getJobName() }; - long eventTimestamp = Timestamps.toMillis(row.getEventTimestamp()); - statsd.histogram("feature_row_lag_ms", System.currentTimeMillis() - eventTimestamp, tags); - statsd.histogram("feature_row_event_time_epoch_ms", eventTimestamp, tags); - statsd.count("feature_row_ingested_count", 1, tags); + for (FeatureRow row : c.element().getValue()) { + // All features in a FeatueRow have the same timestamp so lag metrics are recorded per row basis. + long eventTimestamp = Timestamps.toMillis(row.getEventTimestamp()); + statsd.histogram("feature_row_lag_ms", System.currentTimeMillis() - eventTimestamp, tags); + statsd.histogram("feature_row_event_time_epoch_ms", eventTimestamp, tags); + statsd.count("feature_row_ingested_count", 1, tags); + + // Feature value, count and constraint metrics for each feature/entity in a FeatureRow. + for (Field field : row.getFieldsList()) { + String fieldName = field.getName(); + + // Ensure the map objects have properly initialized value for every key. + if (!fieldNameToMissingCount.containsKey(fieldName)) { + fieldNameToMissingCount.put(fieldName, 0); + } + if (!fieldNameToValueStat.containsKey(fieldName)) { + fieldNameToValueStat.put(fieldName, new DoubleSummaryStatistics()); + } + + tags = new String[]{ + STORE_TAG_KEY + ":" + getStoreName(), + FEATURE_SET_PROJECT_TAG_KEY + ":" + featureSetProject, + FEATURE_SET_NAME_TAG_KEY + ":" + featureSetName, + FEATURE_SET_VERSION_TAG_KEY + ":" + featureSetVersion, + INGESTION_JOB_NAME_KEY + ":" + c.getPipelineOptions().getJobName(), + FEATURE_NAME_TAG_KEY + ":" + fieldName, + }; + + Value val = field.getValue(); + ValCase valCase = val.getValCase(); + DoubleSummaryStatistics valueStat = fieldNameToValueStat.get(fieldName); + + switch (valCase) { + case INT32_VAL: + valueStat.accept(val.getInt32Val()); + fieldNameToValueStat.put(fieldName, valueStat); + writeConstraintMetrics(entityNameToSpec, featureNameToSpec, fieldName, valCase, tags); + break; + case INT64_VAL: + valueStat.accept(val.getInt64Val()); + fieldNameToValueStat.put(fieldName, valueStat); + writeConstraintMetrics(entityNameToSpec, featureNameToSpec, fieldName, valCase, tags); + break; + case DOUBLE_VAL: + valueStat.accept(val.getDoubleVal()); + fieldNameToValueStat.put(fieldName, valueStat); + writeConstraintMetrics(entityNameToSpec, featureNameToSpec, fieldName, valCase, tags); + break; + case FLOAT_VAL: + valueStat.accept(val.getFloatVal()); + fieldNameToValueStat.put(fieldName, valueStat); + writeConstraintMetrics(entityNameToSpec, featureNameToSpec, fieldName, valCase, tags); + break; + case BOOL_VAL: + valueStat.accept(val.getBoolVal() ? 1 : 0); + fieldNameToValueStat.put(fieldName, valueStat); + break; + case BYTES_VAL: + case STRING_VAL: + case BYTES_LIST_VAL: + case FLOAT_LIST_VAL: + case STRING_LIST_VAL: + case INT32_LIST_VAL: + case INT64_LIST_VAL: + case DOUBLE_LIST_VAL: + case BOOL_LIST_VAL: + break; + case VAL_NOT_SET: + Integer oldCount = fieldNameToMissingCount.get(fieldName); + fieldNameToMissingCount.put(fieldName, oldCount + 1); + break; + } + } + } + + for (Entry entry : fieldNameToMissingCount.entrySet()) { + String fieldName = entry.getKey(); + Integer missingCount = entry.getValue(); + tags = new String[]{ + STORE_TAG_KEY + ":" + getStoreName(), + FEATURE_SET_PROJECT_TAG_KEY + ":" + featureSetProject, + FEATURE_SET_NAME_TAG_KEY + ":" + featureSetName, + FEATURE_SET_VERSION_TAG_KEY + ":" + featureSetVersion, + INGESTION_JOB_NAME_KEY + ":" + c.getPipelineOptions().getJobName(), + FEATURE_NAME_TAG_KEY + ":" + fieldName, + }; + statsd.count("feature_value_missing_count", missingCount, tags); + } - // Feature value metrics will be used for validation - for (Field field : row.getFieldsList()) { + for (Entry entry : fieldNameToValueStat.entrySet()) { + String fieldName = entry.getKey(); + DoubleSummaryStatistics valueStat = entry.getValue(); tags = new String[]{ STORE_TAG_KEY + ":" + getStoreName(), FEATURE_SET_PROJECT_TAG_KEY + ":" + featureSetProject, FEATURE_SET_NAME_TAG_KEY + ":" + featureSetName, FEATURE_SET_VERSION_TAG_KEY + ":" + featureSetVersion, - FEATURE_TAG_KEY + ":" + field.getName(), - INGESTION_JOB_NAME_KEY + ":" + c.getPipelineOptions().getJobName() + INGESTION_JOB_NAME_KEY + ":" + c.getPipelineOptions().getJobName(), + FEATURE_NAME_TAG_KEY + ":" + fieldName, }; + statsd.gauge("feature_value_min", valueStat.getMin(), tags); + statsd.gauge("feature_value_max", valueStat.getMax(), tags); + statsd.count("feature_value_count", valueStat.getCount(), tags); + } + } - Value val = field.getValue(); - switch (val.getValCase()) { - case INT32_VAL: - statsd.histogram("feature_value", val.getInt32Val(), tags); - break; - case INT64_VAL: - statsd.histogram("feature_value", val.getInt64Val(), tags); - break; - case DOUBLE_VAL: - statsd.histogram("feature_value", val.getDoubleVal(), tags); - break; - case FLOAT_VAL: - statsd.histogram("feature_value", val.getFloatVal(), tags); - break; - case BOOL_VAL: - statsd.histogram("feature_value", val.getBoolVal() ? 1 : 0, tags); - break; - case BYTES_VAL: - case STRING_VAL: - case BYTES_LIST_VAL: - case FLOAT_LIST_VAL: - case STRING_LIST_VAL: - case INT32_LIST_VAL: - case INT64_LIST_VAL: - case DOUBLE_LIST_VAL: - case BOOL_LIST_VAL: - break; - case VAL_NOT_SET: - statsd.count("feature_value_missing_count", 1, tags); - break; - } + // Record the acceptable value and count for each feature according to the spec. + // These can be used to compare against the actual values in the dashboarding / alerting tool. + private void writeConstraintMetrics(Map entityNameToSpec, + Map featureNameToSpec, String fieldName, ValCase valCase, + String[] tags) { + switch (valCase) { + case INT32_VAL: + case INT64_VAL: + if (entityNameToSpec.containsKey(fieldName)) { + EntitySpec entitySpec = entityNameToSpec.get(fieldName); + if (entitySpec.getDomainInfoCase().equals(DomainInfoCase.INT_DOMAIN)) { + IntDomain intDomain = entitySpec.getIntDomain(); + statsd.gauge("feature_value_domain_min", intDomain.getMin(), tags); + statsd.gauge("feature_value_domain_max", intDomain.getMax(), tags); + } + if (entitySpec.getPresenceConstraintsCase().equals(PresenceConstraintsCase.PRESENCE)) { + FeaturePresence presence = entitySpec.getPresence(); + statsd.gauge("feature_presence_min_fraction", presence.getMinFraction(), tags); + statsd.gauge("feature_presence_min_count", presence.getMinCount(), tags); + } + } else if (featureNameToSpec.containsKey(fieldName)) { + FeatureSpec featureSpec = featureNameToSpec.get(fieldName); + if (featureSpec.getDomainInfoCase().equals(FeatureSpec.DomainInfoCase.INT_DOMAIN)) { + IntDomain intDomain = featureSpec.getIntDomain(); + statsd.gauge("feature_value_domain_min", intDomain.getMin(), tags); + statsd.gauge("feature_value_domain_max", intDomain.getMax(), tags); + } + if (featureSpec.getPresenceConstraintsCase() + .equals(FeatureSpec.PresenceConstraintsCase.PRESENCE)) { + FeaturePresence presence = featureSpec.getPresence(); + statsd.gauge("feature_presence_min_fraction", presence.getMinFraction(), tags); + statsd.gauge("feature_presence_min_count", presence.getMinCount(), tags); + } + } + break; + case DOUBLE_VAL: + case FLOAT_VAL: + if (entityNameToSpec.containsKey(fieldName)) { + EntitySpec entitySpec = entityNameToSpec.get(fieldName); + if (entitySpec.getDomainInfoCase().equals(DomainInfoCase.FLOAT_DOMAIN)) { + FloatDomain floatDomain = entitySpec.getFloatDomain(); + statsd.gauge("feature_value_domain_min", floatDomain.getMin(), tags); + statsd.gauge("feature_value_domain_max", floatDomain.getMax(), tags); + } + if (entitySpec.getPresenceConstraintsCase().equals(PresenceConstraintsCase.PRESENCE)) { + FeaturePresence presence = entitySpec.getPresence(); + statsd.gauge("feature_presence_min_fraction", presence.getMinFraction(), tags); + statsd.gauge("feature_presence_min_count", presence.getMinCount(), tags); + } + } else if (featureNameToSpec.containsKey(fieldName)) { + FeatureSpec featureSpec = featureNameToSpec.get(fieldName); + if (featureSpec.getDomainInfoCase().equals(FeatureSpec.DomainInfoCase.FLOAT_DOMAIN)) { + FloatDomain floatDomain = featureSpec.getFloatDomain(); + statsd.gauge("feature_value_domain_min", floatDomain.getMin(), tags); + statsd.gauge("feature_value_domain_max", floatDomain.getMax(), tags); + } + if (featureSpec.getPresenceConstraintsCase() + .equals(FeatureSpec.PresenceConstraintsCase.PRESENCE)) { + FeaturePresence presence = featureSpec.getPresence(); + statsd.gauge("feature_presence_min_fraction", presence.getMinFraction(), tags); + statsd.gauge("feature_presence_min_count", presence.getMinCount(), tags); + } + } + break; + default: + break; } + + } + + private Map createFeatureNameToSpecMap(FeatureSet featureSet) { + Map featureSpecByName = new HashMap<>(); + if (featureSet == null) { + return featureSpecByName; + } + + featureSet.getSpec().getFeaturesList().forEach(featureSpec -> { + featureSpecByName.put(featureSpec.getName(), featureSpec); + }); + return featureSpecByName; + } + + private Map createEntityNameToSpecMap(FeatureSet featureSet) { + Map entitySpecByName = new HashMap<>(); + if (featureSet == null) { + return entitySpecByName; + } + + featureSet.getSpec().getEntitiesList().forEach(entitySpec -> { + entitySpecByName.put(entitySpec.getName(), entitySpec); + }); + return entitySpecByName; } } From 2e441971fb4ff8d752ef987d91de19b160b9a3c4 Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Wed, 29 Jan 2020 07:33:21 +0800 Subject: [PATCH 06/31] Add grafana-dashboard.json for features validation --- .../metrics/WriteRowMetricsDoFn.java | 2 +- .../src/main/resources/grafana-dashboard.json | 1469 +++++++++++++++++ 2 files changed, 1470 insertions(+), 1 deletion(-) create mode 100644 ingestion/src/main/resources/grafana-dashboard.json diff --git a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java index 1499382c93..d889e16f19 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java +++ b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java @@ -247,7 +247,7 @@ public void processElement(ProcessContext c) { }; statsd.gauge("feature_value_min", valueStat.getMin(), tags); statsd.gauge("feature_value_max", valueStat.getMax(), tags); - statsd.count("feature_value_count", valueStat.getCount(), tags); + statsd.count("feature_value_presence_count", valueStat.getCount(), tags); } } diff --git a/ingestion/src/main/resources/grafana-dashboard.json b/ingestion/src/main/resources/grafana-dashboard.json new file mode 100644 index 0000000000..762295aa3e --- /dev/null +++ b/ingestion/src/main/resources/grafana-dashboard.json @@ -0,0 +1,1469 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "id": 1, + "iteration": 1580253818906, + "links": [], + "panels": [ + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 0 + }, + "id": 16, + "panels": [], + "repeat": null, + "title": "Constraint Violation", + "type": "row" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 6, + "w": 8, + "x": 0, + "y": 1 + }, + "hiddenSeries": false, + "id": 12, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 2, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "(feast_ingestion_feature_value_max - on(feast_project_name,feast_feature_name) feast_ingestion_feature_value_domain_max) > 0", + "hide": false, + "legendFormat": "", + "refId": "A" + }, + { + "expr": "(feast_ingestion_feature_value_domain_min - on(feast_project_name,feast_feature_name) feast_ingestion_feature_value_min) > 0", + "hide": false, + "legendFormat": "", + "refId": "B" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Domain Value", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 6, + "w": 8, + "x": 8, + "y": 1 + }, + "hiddenSeries": false, + "id": 22, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 2, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "feast_ingestion_feature_presence_min_count - on (feast_feature_name, feast_project_name) increase(feast_ingestion_feature_value_presence_count[5m]) > 0", + "refId": "B" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Presence Count", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 6, + "w": 8, + "x": 16, + "y": 1 + }, + "hiddenSeries": false, + "id": 24, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 2, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "feast_ingestion_feature_presence_min_fraction - on (feast_feature_name, feast_project_name) increase(feast_ingestion_feature_value_presence_count[5m]) / (\nincrease(feast_ingestion_feature_value_presence_count[5m]) +\nincrease(feast_ingestion_feature_value_missing_count[5m])\n) > 0", + "refId": "A" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Presence Fraction", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 7 + }, + "id": 14, + "panels": [], + "repeat": "feature", + "scopedVars": { + "feature": { + "selected": true, + "text": "entity1", + "value": "entity1" + } + }, + "title": "Feature Stats", + "type": "row" + }, + { + "aliasColors": { + "max": "super-light-red", + "max_domain": "light-red", + "max_val": "dark-orange", + "min": "super-light-green", + "min_domain": "light-green", + "min_val": "dark-green" + }, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 6, + "w": 8, + "x": 0, + "y": 8 + }, + "hiddenSeries": false, + "id": 2, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 2, + "points": false, + "renderer": "flot", + "repeat": null, + "repeatDirection": "v", + "scopedVars": { + "feature": { + "selected": true, + "text": "entity1", + "value": "entity1" + } + }, + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "feast_ingestion_feature_value_min{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\"}", + "legendFormat": "min_val", + "refId": "A" + }, + { + "expr": "feast_ingestion_feature_value_max{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\"}", + "legendFormat": "max_val", + "refId": "B" + }, + { + "expr": "feast_ingestion_feature_value_domain_min{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\"}", + "legendFormat": "min_domain", + "refId": "C" + }, + { + "expr": "feast_ingestion_feature_value_domain_max{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\"}", + "legendFormat": "max_domain", + "refId": "D" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Feature Value - [[feature]]", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": { + "count": "dark-green", + "count over selected range": "dark-yellow", + "feature_presence_min_count": "light-green" + }, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "description": "", + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 6, + "w": 8, + "x": 8, + "y": 8 + }, + "hiddenSeries": false, + "id": 5, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 2, + "points": false, + "renderer": "flot", + "repeat": null, + "repeatDirection": "v", + "scopedVars": { + "feature": { + "selected": true, + "text": "entity1", + "value": "entity1" + } + }, + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "increase(feast_ingestion_feature_value_presence_count{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\"}[$__range])", + "instant": false, + "interval": "", + "intervalFactor": 1, + "legendFormat": "count", + "refId": "A" + }, + { + "expr": "feast_ingestion_feature_presence_min_count{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\"}", + "legendFormat": "feature_presence_min_count", + "refId": "B" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Presence Count - [[feature]]", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": { + "fraction": "dark-green", + "min_fraction": "light-green" + }, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 6, + "w": 8, + "x": 16, + "y": 8 + }, + "hiddenSeries": false, + "id": 7, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 2, + "points": false, + "renderer": "flot", + "repeat": null, + "repeatDirection": "v", + "scopedVars": { + "feature": { + "selected": true, + "text": "entity1", + "value": "entity1" + } + }, + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "increase(feast_ingestion_feature_value_presence_count{feast_feature_name=~\"[[feature]]\", feast_project_name=\"[[project]]\"}[$__range]) / (\nincrease(feast_ingestion_feature_value_presence_count{feast_feature_name=~\"[[feature]]\", feast_project_name=\"[[project]]\"}[$__range]) + \nincrease(feast_ingestion_feature_value_missing_count{feast_feature_name=~\"[[feature]]\", feast_project_name=\"[[project]]\"}[$__range])\n)", + "legendFormat": "fraction", + "refId": "B" + }, + { + "expr": "feast_ingestion_feature_presence_min_fraction{feast_feature_name=~\"[[feature]]\", feast_project_name=\"[[project]]\"}", + "legendFormat": "min_fraction", + "refId": "A" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Presence Fraction - [[feature]]", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 14 + }, + "id": 25, + "panels": [], + "repeat": null, + "repeatIteration": 1580253818906, + "repeatPanelId": 14, + "scopedVars": { + "feature": { + "selected": true, + "text": "feature1", + "value": "feature1" + } + }, + "title": "Feature Stats", + "type": "row" + }, + { + "aliasColors": { + "max": "super-light-red", + "max_domain": "light-red", + "max_val": "dark-orange", + "min": "super-light-green", + "min_domain": "light-green", + "min_val": "dark-green" + }, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 6, + "w": 8, + "x": 0, + "y": 15 + }, + "hiddenSeries": false, + "id": 26, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 2, + "points": false, + "renderer": "flot", + "repeat": null, + "repeatDirection": "v", + "repeatIteration": 1580253818906, + "repeatPanelId": 2, + "repeatedByRow": true, + "scopedVars": { + "feature": { + "selected": true, + "text": "feature1", + "value": "feature1" + } + }, + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "feast_ingestion_feature_value_min{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\"}", + "legendFormat": "min_val", + "refId": "A" + }, + { + "expr": "feast_ingestion_feature_value_max{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\"}", + "legendFormat": "max_val", + "refId": "B" + }, + { + "expr": "feast_ingestion_feature_value_domain_min{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\"}", + "legendFormat": "min_domain", + "refId": "C" + }, + { + "expr": "feast_ingestion_feature_value_domain_max{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\"}", + "legendFormat": "max_domain", + "refId": "D" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Feature Value - [[feature]]", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": { + "count": "dark-green", + "count over selected range": "dark-yellow", + "feature_presence_min_count": "light-green" + }, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "description": "", + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 6, + "w": 8, + "x": 8, + "y": 15 + }, + "hiddenSeries": false, + "id": 27, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 2, + "points": false, + "renderer": "flot", + "repeat": null, + "repeatDirection": "v", + "repeatIteration": 1580253818906, + "repeatPanelId": 5, + "repeatedByRow": true, + "scopedVars": { + "feature": { + "selected": true, + "text": "feature1", + "value": "feature1" + } + }, + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "increase(feast_ingestion_feature_value_presence_count{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\"}[$__range])", + "instant": false, + "interval": "", + "intervalFactor": 1, + "legendFormat": "count", + "refId": "A" + }, + { + "expr": "feast_ingestion_feature_presence_min_count{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\"}", + "legendFormat": "feature_presence_min_count", + "refId": "B" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Presence Count - [[feature]]", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": { + "fraction": "dark-green", + "min_fraction": "light-green" + }, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 6, + "w": 8, + "x": 16, + "y": 15 + }, + "hiddenSeries": false, + "id": 28, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 2, + "points": false, + "renderer": "flot", + "repeat": null, + "repeatDirection": "v", + "repeatIteration": 1580253818906, + "repeatPanelId": 7, + "repeatedByRow": true, + "scopedVars": { + "feature": { + "selected": true, + "text": "feature1", + "value": "feature1" + } + }, + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "increase(feast_ingestion_feature_value_presence_count{feast_feature_name=~\"[[feature]]\", feast_project_name=\"[[project]]\"}[$__range]) / (\nincrease(feast_ingestion_feature_value_presence_count{feast_feature_name=~\"[[feature]]\", feast_project_name=\"[[project]]\"}[$__range]) + \nincrease(feast_ingestion_feature_value_missing_count{feast_feature_name=~\"[[feature]]\", feast_project_name=\"[[project]]\"}[$__range])\n)", + "legendFormat": "fraction", + "refId": "B" + }, + { + "expr": "feast_ingestion_feature_presence_min_fraction{feast_feature_name=~\"[[feature]]\", feast_project_name=\"[[project]]\"}", + "legendFormat": "min_fraction", + "refId": "A" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Presence Fraction - [[feature]]", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 21 + }, + "id": 29, + "panels": [], + "repeat": null, + "repeatIteration": 1580253818906, + "repeatPanelId": 14, + "scopedVars": { + "feature": { + "selected": true, + "text": "feature2", + "value": "feature2" + } + }, + "title": "Feature Stats", + "type": "row" + }, + { + "aliasColors": { + "max": "super-light-red", + "max_domain": "light-red", + "max_val": "dark-orange", + "min": "super-light-green", + "min_domain": "light-green", + "min_val": "dark-green" + }, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 6, + "w": 8, + "x": 0, + "y": 22 + }, + "hiddenSeries": false, + "id": 30, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 2, + "points": false, + "renderer": "flot", + "repeat": null, + "repeatDirection": "v", + "repeatIteration": 1580253818906, + "repeatPanelId": 2, + "repeatedByRow": true, + "scopedVars": { + "feature": { + "selected": true, + "text": "feature2", + "value": "feature2" + } + }, + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "feast_ingestion_feature_value_min{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\"}", + "legendFormat": "min_val", + "refId": "A" + }, + { + "expr": "feast_ingestion_feature_value_max{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\"}", + "legendFormat": "max_val", + "refId": "B" + }, + { + "expr": "feast_ingestion_feature_value_domain_min{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\"}", + "legendFormat": "min_domain", + "refId": "C" + }, + { + "expr": "feast_ingestion_feature_value_domain_max{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\"}", + "legendFormat": "max_domain", + "refId": "D" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Feature Value - [[feature]]", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": { + "count": "dark-green", + "count over selected range": "dark-yellow", + "feature_presence_min_count": "light-green" + }, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "description": "", + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 6, + "w": 8, + "x": 8, + "y": 22 + }, + "hiddenSeries": false, + "id": 31, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 2, + "points": false, + "renderer": "flot", + "repeat": null, + "repeatDirection": "v", + "repeatIteration": 1580253818906, + "repeatPanelId": 5, + "repeatedByRow": true, + "scopedVars": { + "feature": { + "selected": true, + "text": "feature2", + "value": "feature2" + } + }, + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "increase(feast_ingestion_feature_value_presence_count{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\"}[$__range])", + "instant": false, + "interval": "", + "intervalFactor": 1, + "legendFormat": "count", + "refId": "A" + }, + { + "expr": "feast_ingestion_feature_presence_min_count{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\"}", + "legendFormat": "feature_presence_min_count", + "refId": "B" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Presence Count - [[feature]]", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": { + "fraction": "dark-green", + "min_fraction": "light-green" + }, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 6, + "w": 8, + "x": 16, + "y": 22 + }, + "hiddenSeries": false, + "id": 32, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 2, + "points": false, + "renderer": "flot", + "repeat": null, + "repeatDirection": "v", + "repeatIteration": 1580253818906, + "repeatPanelId": 7, + "repeatedByRow": true, + "scopedVars": { + "feature": { + "selected": true, + "text": "feature2", + "value": "feature2" + } + }, + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "increase(feast_ingestion_feature_value_presence_count{feast_feature_name=~\"[[feature]]\", feast_project_name=\"[[project]]\"}[$__range]) / (\nincrease(feast_ingestion_feature_value_presence_count{feast_feature_name=~\"[[feature]]\", feast_project_name=\"[[project]]\"}[$__range]) + \nincrease(feast_ingestion_feature_value_missing_count{feast_feature_name=~\"[[feature]]\", feast_project_name=\"[[project]]\"}[$__range])\n)", + "legendFormat": "fraction", + "refId": "B" + }, + { + "expr": "feast_ingestion_feature_presence_min_fraction{feast_feature_name=~\"[[feature]]\", feast_project_name=\"[[project]]\"}", + "legendFormat": "min_fraction", + "refId": "A" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Presence Fraction - [[feature]]", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + } + ], + "refresh": "", + "schemaVersion": 21, + "style": "dark", + "tags": [], + "templating": { + "list": [ + { + "allValue": null, + "current": { + "text": "project1", + "value": "project1" + }, + "datasource": "Prometheus", + "definition": "label_values(feast_project_name)", + "hide": 0, + "includeAll": false, + "label": null, + "multi": false, + "name": "project", + "options": [], + "query": "label_values(feast_project_name)", + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 5, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "allValue": null, + "current": { + "text": "entity1 + feature1 + feature2", + "value": [ + "entity1", + "feature1", + "feature2" + ] + }, + "datasource": "Prometheus", + "definition": "label_values(feast_ingestion_feature_value_presence_count{feast_project_name=\"[[project]]\"},feast_feature_name)", + "hide": 0, + "includeAll": false, + "label": null, + "multi": true, + "name": "feature", + "options": [], + "query": "label_values(feast_ingestion_feature_value_presence_count{feast_project_name=\"[[project]]\"},feast_feature_name)", + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 5, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + } + ] + }, + "time": { + "from": "now-5m", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ] + }, + "timezone": "", + "title": "Feast Features Dashboard", + "uid": "ywufPPyWz", + "version": 45 +} \ No newline at end of file From 6fb87258805dd58b6ea0f6cb0c8e13d3263e21ec Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Wed, 29 Jan 2020 07:42:05 +0800 Subject: [PATCH 07/31] Make fixed window size a part of pipeline options --- .../feast/ingestion/options/ImportOptions.java | 17 +++++++++++++++-- .../metrics/WriteMetricsTransform.java | 10 +--------- 2 files changed, 16 insertions(+), 11 deletions(-) diff --git a/ingestion/src/main/java/feast/ingestion/options/ImportOptions.java b/ingestion/src/main/java/feast/ingestion/options/ImportOptions.java index 4aea4519e3..fca98532d1 100644 --- a/ingestion/src/main/java/feast/ingestion/options/ImportOptions.java +++ b/ingestion/src/main/java/feast/ingestion/options/ImportOptions.java @@ -24,8 +24,11 @@ import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.options.Validation.Required; -/** Options passed to Beam to influence the job's execution environment */ +/** + * Options passed to Beam to influence the job's execution environment + */ public interface ImportOptions extends PipelineOptions, DataflowPipelineOptions, DirectOptions { + @Required @Description( "JSON string representation of the FeatureSet that the import job will process." @@ -60,7 +63,7 @@ public interface ImportOptions extends PipelineOptions, DataflowPipelineOptions, /** * @param deadLetterTableSpec (Optional) BigQuery table for storing elements that failed to be - * processed. Table spec must follow this format PROJECT_ID:DATASET_ID.PROJECT_ID + * processed. Table spec must follow this format PROJECT_ID:DATASET_ID.PROJECT_ID */ void setDeadLetterTableSpec(String deadLetterTableSpec); @@ -83,4 +86,14 @@ public interface ImportOptions extends PipelineOptions, DataflowPipelineOptions, int getStatsdPort(); void setStatsdPort(int StatsdPort); + + @Description( + "The fixed window size in seconds at which ingestion metrics are collected and aggregated. " + + "Metrics in Feast are sent via StatsD and are later scraped by Prometheus at a regular interval. " + + "The window size here should be higher than Prometheus scrope interval, otherwise gauge metrics may " + + "miss scraping because Prometheus scrape frequency is too slow.") + @Default.Integer(20) + int getWindowSizeForMetrics(); + + void setWindowSizeForMetrics(int durationInSeconds); } diff --git a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteMetricsTransform.java b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteMetricsTransform.java index 6593470011..aad7e8b37e 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteMetricsTransform.java +++ b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteMetricsTransform.java @@ -38,14 +38,6 @@ @AutoValue public abstract class WriteMetricsTransform extends PTransform { - // FIXED_WINDOW_DURATION_IN_SEC_FOR_ROW_METRICS is the interval at which ingestion metrics - // are collected and aggregated. - // - // Metrics in Feast are sent via StatsD and are later scraped by Prometheus at a regular interval. - // The duration here should be higher than Prometheus scrope interval, otherwise some metrics may - // not be scraped because Prometheus scrape frequency is too slow. - private static final long FIXED_WINDOW_DURATION_IN_SEC_FOR_ROW_METRICS = 20; - public abstract String getStoreName(); public abstract TupleTag getSuccessTag(); @@ -95,7 +87,7 @@ public PDone expand(PCollectionTuple input) { .get(getSuccessTag()) .apply("FixedWindowForMetricsCollection", Window.into(FixedWindows - .of(Duration.standardSeconds(FIXED_WINDOW_DURATION_IN_SEC_FOR_ROW_METRICS)))) + .of(Duration.standardSeconds(options.getWindowSizeForMetrics())))) .apply("MapToFeatureRowByRef", ParDo.of(new DoFn>() { @ProcessElement public void processElement(ProcessContext c) { From ec2e6c843b2b7258f98482ea66ae80557158f6dd Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Wed, 29 Jan 2020 09:09:40 +0800 Subject: [PATCH 08/31] Update generated protobuf code for Python to include Tensorflow metadata schema --- sdk/python/feast/core/CoreService_pb2.pyi | 123 +++--- sdk/python/feast/core/FeatureSet_pb2.py | 397 +++++++++++++++++- sdk/python/feast/core/FeatureSet_pb2.pyi | 224 ++++++++-- sdk/python/feast/core/Source_pb2.pyi | 35 +- sdk/python/feast/core/Store_pb2.py | 36 +- sdk/python/feast/core/Store_pb2.pyi | 65 +-- .../feast/serving/ServingService_pb2.pyi | 179 ++++---- sdk/python/feast/storage/Redis_pb2.pyi | 8 +- .../feast/types/FeatureRowExtended_pb2.pyi | 24 +- sdk/python/feast/types/FeatureRow_pb2.pyi | 12 +- sdk/python/feast/types/Field_pb2.pyi | 12 +- sdk/python/feast/types/Value_pb2.pyi | 149 +++---- 12 files changed, 938 insertions(+), 326 deletions(-) diff --git a/sdk/python/feast/core/CoreService_pb2.pyi b/sdk/python/feast/core/CoreService_pb2.pyi index 645226982a..24130ee870 100644 --- a/sdk/python/feast/core/CoreService_pb2.pyi +++ b/sdk/python/feast/core/CoreService_pb2.pyi @@ -36,20 +36,27 @@ from typing_extensions import ( ) +builtin___bool = bool +builtin___bytes = bytes +builtin___float = float +builtin___int = int +builtin___str = str + + class GetFeatureSetRequest(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... project = ... # type: typing___Text name = ... # type: typing___Text - version = ... # type: int + version = ... # type: builtin___int def __init__(self, *, project : typing___Optional[typing___Text] = None, name : typing___Optional[typing___Text] = None, - version : typing___Optional[int] = None, + version : typing___Optional[builtin___int] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> GetFeatureSetRequest: ... + def FromString(cls, s: builtin___bytes) -> GetFeatureSetRequest: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): @@ -68,14 +75,14 @@ class GetFeatureSetResponse(google___protobuf___message___Message): feature_set : typing___Optional[feast___core___FeatureSet_pb2___FeatureSet] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> GetFeatureSetResponse: ... + def FromString(cls, s: builtin___bytes) -> GetFeatureSetResponse: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"feature_set"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"feature_set"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"feature_set"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"feature_set",b"feature_set"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"feature_set",b"feature_set"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"feature_set",b"feature_set"]) -> None: ... class ListFeatureSetsRequest(google___protobuf___message___Message): @@ -93,7 +100,7 @@ class ListFeatureSetsRequest(google___protobuf___message___Message): feature_set_version : typing___Optional[typing___Text] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> ListFeatureSetsRequest.Filter: ... + def FromString(cls, s: builtin___bytes) -> ListFeatureSetsRequest.Filter: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): @@ -110,14 +117,14 @@ class ListFeatureSetsRequest(google___protobuf___message___Message): filter : typing___Optional[ListFeatureSetsRequest.Filter] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> ListFeatureSetsRequest: ... + def FromString(cls, s: builtin___bytes) -> ListFeatureSetsRequest: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"filter"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"filter"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"filter"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"filter",b"filter"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"filter",b"filter"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"filter",b"filter"]) -> None: ... class ListFeatureSetsResponse(google___protobuf___message___Message): @@ -131,7 +138,7 @@ class ListFeatureSetsResponse(google___protobuf___message___Message): feature_sets : typing___Optional[typing___Iterable[feast___core___FeatureSet_pb2___FeatureSet]] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> ListFeatureSetsResponse: ... + def FromString(cls, s: builtin___bytes) -> ListFeatureSetsResponse: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): @@ -150,7 +157,7 @@ class ListStoresRequest(google___protobuf___message___Message): name : typing___Optional[typing___Text] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> ListStoresRequest.Filter: ... + def FromString(cls, s: builtin___bytes) -> ListStoresRequest.Filter: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): @@ -167,14 +174,14 @@ class ListStoresRequest(google___protobuf___message___Message): filter : typing___Optional[ListStoresRequest.Filter] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> ListStoresRequest: ... + def FromString(cls, s: builtin___bytes) -> ListStoresRequest: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"filter"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"filter"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"filter"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"filter",b"filter"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"filter",b"filter"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"filter",b"filter"]) -> None: ... class ListStoresResponse(google___protobuf___message___Message): @@ -188,7 +195,7 @@ class ListStoresResponse(google___protobuf___message___Message): store : typing___Optional[typing___Iterable[feast___core___Store_pb2___Store]] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> ListStoresResponse: ... + def FromString(cls, s: builtin___bytes) -> ListStoresResponse: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): @@ -207,36 +214,36 @@ class ApplyFeatureSetRequest(google___protobuf___message___Message): feature_set : typing___Optional[feast___core___FeatureSet_pb2___FeatureSet] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> ApplyFeatureSetRequest: ... + def FromString(cls, s: builtin___bytes) -> ApplyFeatureSetRequest: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"feature_set"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"feature_set"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"feature_set"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"feature_set",b"feature_set"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"feature_set",b"feature_set"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"feature_set",b"feature_set"]) -> None: ... class ApplyFeatureSetResponse(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - class Status(int): + class Status(builtin___int): DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... @classmethod - def Name(cls, number: int) -> str: ... + def Name(cls, number: builtin___int) -> builtin___str: ... @classmethod - def Value(cls, name: str) -> ApplyFeatureSetResponse.Status: ... + def Value(cls, name: builtin___str) -> 'ApplyFeatureSetResponse.Status': ... @classmethod - def keys(cls) -> typing___List[str]: ... + def keys(cls) -> typing___List[builtin___str]: ... @classmethod - def values(cls) -> typing___List[ApplyFeatureSetResponse.Status]: ... + def values(cls) -> typing___List['ApplyFeatureSetResponse.Status']: ... @classmethod - def items(cls) -> typing___List[typing___Tuple[str, ApplyFeatureSetResponse.Status]]: ... - NO_CHANGE = typing___cast(ApplyFeatureSetResponse.Status, 0) - CREATED = typing___cast(ApplyFeatureSetResponse.Status, 1) - ERROR = typing___cast(ApplyFeatureSetResponse.Status, 2) - NO_CHANGE = typing___cast(ApplyFeatureSetResponse.Status, 0) - CREATED = typing___cast(ApplyFeatureSetResponse.Status, 1) - ERROR = typing___cast(ApplyFeatureSetResponse.Status, 2) + def items(cls) -> typing___List[typing___Tuple[builtin___str, 'ApplyFeatureSetResponse.Status']]: ... + NO_CHANGE = typing___cast('ApplyFeatureSetResponse.Status', 0) + CREATED = typing___cast('ApplyFeatureSetResponse.Status', 1) + ERROR = typing___cast('ApplyFeatureSetResponse.Status', 2) + NO_CHANGE = typing___cast('ApplyFeatureSetResponse.Status', 0) + CREATED = typing___cast('ApplyFeatureSetResponse.Status', 1) + ERROR = typing___cast('ApplyFeatureSetResponse.Status', 2) status = ... # type: ApplyFeatureSetResponse.Status @@ -249,14 +256,14 @@ class ApplyFeatureSetResponse(google___protobuf___message___Message): status : typing___Optional[ApplyFeatureSetResponse.Status] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> ApplyFeatureSetResponse: ... + def FromString(cls, s: builtin___bytes) -> ApplyFeatureSetResponse: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"feature_set"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"feature_set"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"feature_set",u"status"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"feature_set",b"feature_set"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"feature_set",b"feature_set"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"feature_set",b"feature_set",u"status",b"status"]) -> None: ... class GetFeastCoreVersionRequest(google___protobuf___message___Message): @@ -265,7 +272,7 @@ class GetFeastCoreVersionRequest(google___protobuf___message___Message): def __init__(self, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> GetFeastCoreVersionRequest: ... + def FromString(cls, s: builtin___bytes) -> GetFeastCoreVersionRequest: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... @@ -278,7 +285,7 @@ class GetFeastCoreVersionResponse(google___protobuf___message___Message): version : typing___Optional[typing___Text] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> GetFeastCoreVersionResponse: ... + def FromString(cls, s: builtin___bytes) -> GetFeastCoreVersionResponse: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): @@ -297,34 +304,34 @@ class UpdateStoreRequest(google___protobuf___message___Message): store : typing___Optional[feast___core___Store_pb2___Store] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> UpdateStoreRequest: ... + def FromString(cls, s: builtin___bytes) -> UpdateStoreRequest: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"store"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"store"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"store"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"store",b"store"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"store",b"store"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"store",b"store"]) -> None: ... class UpdateStoreResponse(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - class Status(int): + class Status(builtin___int): DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... @classmethod - def Name(cls, number: int) -> str: ... + def Name(cls, number: builtin___int) -> builtin___str: ... @classmethod - def Value(cls, name: str) -> UpdateStoreResponse.Status: ... + def Value(cls, name: builtin___str) -> 'UpdateStoreResponse.Status': ... @classmethod - def keys(cls) -> typing___List[str]: ... + def keys(cls) -> typing___List[builtin___str]: ... @classmethod - def values(cls) -> typing___List[UpdateStoreResponse.Status]: ... + def values(cls) -> typing___List['UpdateStoreResponse.Status']: ... @classmethod - def items(cls) -> typing___List[typing___Tuple[str, UpdateStoreResponse.Status]]: ... - NO_CHANGE = typing___cast(UpdateStoreResponse.Status, 0) - UPDATED = typing___cast(UpdateStoreResponse.Status, 1) - NO_CHANGE = typing___cast(UpdateStoreResponse.Status, 0) - UPDATED = typing___cast(UpdateStoreResponse.Status, 1) + def items(cls) -> typing___List[typing___Tuple[builtin___str, 'UpdateStoreResponse.Status']]: ... + NO_CHANGE = typing___cast('UpdateStoreResponse.Status', 0) + UPDATED = typing___cast('UpdateStoreResponse.Status', 1) + NO_CHANGE = typing___cast('UpdateStoreResponse.Status', 0) + UPDATED = typing___cast('UpdateStoreResponse.Status', 1) status = ... # type: UpdateStoreResponse.Status @@ -337,14 +344,14 @@ class UpdateStoreResponse(google___protobuf___message___Message): status : typing___Optional[UpdateStoreResponse.Status] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> UpdateStoreResponse: ... + def FromString(cls, s: builtin___bytes) -> UpdateStoreResponse: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"store"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"store"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"status",u"store"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"store",b"store"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"store",b"store"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"status",b"status",u"store",b"store"]) -> None: ... class CreateProjectRequest(google___protobuf___message___Message): @@ -356,7 +363,7 @@ class CreateProjectRequest(google___protobuf___message___Message): name : typing___Optional[typing___Text] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> CreateProjectRequest: ... + def FromString(cls, s: builtin___bytes) -> CreateProjectRequest: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): @@ -370,7 +377,7 @@ class CreateProjectResponse(google___protobuf___message___Message): def __init__(self, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> CreateProjectResponse: ... + def FromString(cls, s: builtin___bytes) -> CreateProjectResponse: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... @@ -383,7 +390,7 @@ class ArchiveProjectRequest(google___protobuf___message___Message): name : typing___Optional[typing___Text] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> ArchiveProjectRequest: ... + def FromString(cls, s: builtin___bytes) -> ArchiveProjectRequest: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): @@ -397,7 +404,7 @@ class ArchiveProjectResponse(google___protobuf___message___Message): def __init__(self, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> ArchiveProjectResponse: ... + def FromString(cls, s: builtin___bytes) -> ArchiveProjectResponse: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... @@ -407,7 +414,7 @@ class ListProjectsRequest(google___protobuf___message___Message): def __init__(self, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> ListProjectsRequest: ... + def FromString(cls, s: builtin___bytes) -> ListProjectsRequest: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... @@ -420,7 +427,7 @@ class ListProjectsResponse(google___protobuf___message___Message): projects : typing___Optional[typing___Iterable[typing___Text]] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> ListProjectsResponse: ... + def FromString(cls, s: builtin___bytes) -> ListProjectsResponse: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): diff --git a/sdk/python/feast/core/FeatureSet_pb2.py b/sdk/python/feast/core/FeatureSet_pb2.py index 991220ccae..f265061002 100644 --- a/sdk/python/feast/core/FeatureSet_pb2.py +++ b/sdk/python/feast/core/FeatureSet_pb2.py @@ -18,6 +18,7 @@ from feast.core import Source_pb2 as feast_dot_core_dot_Source__pb2 from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from tensorflow_metadata.proto.v0 import schema_pb2 as tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -25,9 +26,9 @@ package='feast.core', syntax='proto3', serialized_options=_b('\n\nfeast.coreB\017FeatureSetProtoZ/github.com/gojek/feast/sdk/go/protos/feast/core'), - serialized_pb=_b('\n\x1b\x66\x65\x61st/core/FeatureSet.proto\x12\nfeast.core\x1a\x17\x66\x65\x61st/types/Value.proto\x1a\x17\x66\x65\x61st/core/Source.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"`\n\nFeatureSet\x12(\n\x04spec\x18\x01 \x01(\x0b\x32\x1a.feast.core.FeatureSetSpec\x12(\n\x04meta\x18\x02 \x01(\x0b\x32\x1a.feast.core.FeatureSetMeta\"\xe5\x01\n\x0e\x46\x65\x61tureSetSpec\x12\x0f\n\x07project\x18\x07 \x01(\t\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12(\n\x08\x65ntities\x18\x03 \x03(\x0b\x32\x16.feast.core.EntitySpec\x12)\n\x08\x66\x65\x61tures\x18\x04 \x03(\x0b\x32\x17.feast.core.FeatureSpec\x12*\n\x07max_age\x18\x05 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\"\n\x06source\x18\x06 \x01(\x0b\x32\x12.feast.core.Source\"K\n\nEntitySpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\nvalue_type\x18\x02 \x01(\x0e\x32\x1b.feast.types.ValueType.Enum\"L\n\x0b\x46\x65\x61tureSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\nvalue_type\x18\x02 \x01(\x0e\x32\x1b.feast.types.ValueType.Enum\"u\n\x0e\x46\x65\x61tureSetMeta\x12\x35\n\x11\x63reated_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x06status\x18\x02 \x01(\x0e\x32\x1c.feast.core.FeatureSetStatus*L\n\x10\x46\x65\x61tureSetStatus\x12\x12\n\x0eSTATUS_INVALID\x10\x00\x12\x12\n\x0eSTATUS_PENDING\x10\x01\x12\x10\n\x0cSTATUS_READY\x10\x02\x42N\n\nfeast.coreB\x0f\x46\x65\x61tureSetProtoZ/github.com/gojek/feast/sdk/go/protos/feast/coreb\x06proto3') + serialized_pb=_b('\n\x1b\x66\x65\x61st/core/FeatureSet.proto\x12\nfeast.core\x1a\x17\x66\x65\x61st/types/Value.proto\x1a\x17\x66\x65\x61st/core/Source.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a)tensorflow_metadata/proto/v0/schema.proto\"`\n\nFeatureSet\x12(\n\x04spec\x18\x01 \x01(\x0b\x32\x1a.feast.core.FeatureSetSpec\x12(\n\x04meta\x18\x02 \x01(\x0b\x32\x1a.feast.core.FeatureSetMeta\"\xe5\x01\n\x0e\x46\x65\x61tureSetSpec\x12\x0f\n\x07project\x18\x07 \x01(\t\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12(\n\x08\x65ntities\x18\x03 \x03(\x0b\x32\x16.feast.core.EntitySpec\x12)\n\x08\x66\x65\x61tures\x18\x04 \x03(\x0b\x32\x17.feast.core.FeatureSpec\x12*\n\x07max_age\x18\x05 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\"\n\x06source\x18\x06 \x01(\x0b\x32\x12.feast.core.Source\"\xbf\x08\n\nEntitySpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\nvalue_type\x18\x02 \x01(\x0e\x32\x1b.feast.types.ValueType.Enum\x12;\n\x08presence\x18\x03 \x01(\x0b\x32\'.tensorflow.metadata.v0.FeaturePresenceH\x00\x12L\n\x0egroup_presence\x18\x04 \x01(\x0b\x32\x32.tensorflow.metadata.v0.FeaturePresenceWithinGroupH\x00\x12\x33\n\x05shape\x18\x05 \x01(\x0b\x32\".tensorflow.metadata.v0.FixedShapeH\x01\x12\x39\n\x0bvalue_count\x18\x06 \x01(\x0b\x32\".tensorflow.metadata.v0.ValueCountH\x01\x12\x10\n\x06\x64omain\x18\x07 \x01(\tH\x02\x12\x37\n\nint_domain\x18\x08 \x01(\x0b\x32!.tensorflow.metadata.v0.IntDomainH\x02\x12;\n\x0c\x66loat_domain\x18\t \x01(\x0b\x32#.tensorflow.metadata.v0.FloatDomainH\x02\x12=\n\rstring_domain\x18\n \x01(\x0b\x32$.tensorflow.metadata.v0.StringDomainH\x02\x12\x39\n\x0b\x62ool_domain\x18\x0b \x01(\x0b\x32\".tensorflow.metadata.v0.BoolDomainH\x02\x12=\n\rstruct_domain\x18\x0c \x01(\x0b\x32$.tensorflow.metadata.v0.StructDomainH\x02\x12P\n\x17natural_language_domain\x18\r \x01(\x0b\x32-.tensorflow.metadata.v0.NaturalLanguageDomainH\x02\x12;\n\x0cimage_domain\x18\x0e \x01(\x0b\x32#.tensorflow.metadata.v0.ImageDomainH\x02\x12\x37\n\nmid_domain\x18\x0f \x01(\x0b\x32!.tensorflow.metadata.v0.MIDDomainH\x02\x12\x37\n\nurl_domain\x18\x10 \x01(\x0b\x32!.tensorflow.metadata.v0.URLDomainH\x02\x12\x39\n\x0btime_domain\x18\x11 \x01(\x0b\x32\".tensorflow.metadata.v0.TimeDomainH\x02\x12\x45\n\x12time_of_day_domain\x18\x12 \x01(\x0b\x32\'.tensorflow.metadata.v0.TimeOfDayDomainH\x02\x42\x16\n\x14presence_constraintsB\x0c\n\nshape_typeB\r\n\x0b\x64omain_info\"\xc0\x08\n\x0b\x46\x65\x61tureSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\nvalue_type\x18\x02 \x01(\x0e\x32\x1b.feast.types.ValueType.Enum\x12;\n\x08presence\x18\x03 \x01(\x0b\x32\'.tensorflow.metadata.v0.FeaturePresenceH\x00\x12L\n\x0egroup_presence\x18\x04 \x01(\x0b\x32\x32.tensorflow.metadata.v0.FeaturePresenceWithinGroupH\x00\x12\x33\n\x05shape\x18\x05 \x01(\x0b\x32\".tensorflow.metadata.v0.FixedShapeH\x01\x12\x39\n\x0bvalue_count\x18\x06 \x01(\x0b\x32\".tensorflow.metadata.v0.ValueCountH\x01\x12\x10\n\x06\x64omain\x18\x07 \x01(\tH\x02\x12\x37\n\nint_domain\x18\x08 \x01(\x0b\x32!.tensorflow.metadata.v0.IntDomainH\x02\x12;\n\x0c\x66loat_domain\x18\t \x01(\x0b\x32#.tensorflow.metadata.v0.FloatDomainH\x02\x12=\n\rstring_domain\x18\n \x01(\x0b\x32$.tensorflow.metadata.v0.StringDomainH\x02\x12\x39\n\x0b\x62ool_domain\x18\x0b \x01(\x0b\x32\".tensorflow.metadata.v0.BoolDomainH\x02\x12=\n\rstruct_domain\x18\x0c \x01(\x0b\x32$.tensorflow.metadata.v0.StructDomainH\x02\x12P\n\x17natural_language_domain\x18\r \x01(\x0b\x32-.tensorflow.metadata.v0.NaturalLanguageDomainH\x02\x12;\n\x0cimage_domain\x18\x0e \x01(\x0b\x32#.tensorflow.metadata.v0.ImageDomainH\x02\x12\x37\n\nmid_domain\x18\x0f \x01(\x0b\x32!.tensorflow.metadata.v0.MIDDomainH\x02\x12\x37\n\nurl_domain\x18\x10 \x01(\x0b\x32!.tensorflow.metadata.v0.URLDomainH\x02\x12\x39\n\x0btime_domain\x18\x11 \x01(\x0b\x32\".tensorflow.metadata.v0.TimeDomainH\x02\x12\x45\n\x12time_of_day_domain\x18\x12 \x01(\x0b\x32\'.tensorflow.metadata.v0.TimeOfDayDomainH\x02\x42\x16\n\x14presence_constraintsB\x0c\n\nshape_typeB\r\n\x0b\x64omain_info\"u\n\x0e\x46\x65\x61tureSetMeta\x12\x35\n\x11\x63reated_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x06status\x18\x02 \x01(\x0e\x32\x1c.feast.core.FeatureSetStatus*L\n\x10\x46\x65\x61tureSetStatus\x12\x12\n\x0eSTATUS_INVALID\x10\x00\x12\x12\n\x0eSTATUS_PENDING\x10\x01\x12\x10\n\x0cSTATUS_READY\x10\x02\x42N\n\nfeast.coreB\x0f\x46\x65\x61tureSetProtoZ/github.com/gojek/feast/sdk/go/protos/feast/coreb\x06proto3') , - dependencies=[feast_dot_types_dot_Value__pb2.DESCRIPTOR,feast_dot_core_dot_Source__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) + dependencies=[feast_dot_types_dot_Value__pb2.DESCRIPTOR,feast_dot_core_dot_Source__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2.DESCRIPTOR,]) _FEATURESETSTATUS = _descriptor.EnumDescriptor( name='FeatureSetStatus', @@ -50,8 +51,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=762, - serialized_end=838, + serialized_start=2831, + serialized_end=2907, ) _sym_db.RegisterEnumDescriptor(_FEATURESETSTATUS) @@ -95,8 +96,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=158, - serialized_end=254, + serialized_start=201, + serialized_end=297, ) @@ -168,8 +169,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=257, - serialized_end=486, + serialized_start=300, + serialized_end=529, ) @@ -194,6 +195,118 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='presence', full_name='feast.core.EntitySpec.presence', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='group_presence', full_name='feast.core.EntitySpec.group_presence', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='shape', full_name='feast.core.EntitySpec.shape', index=4, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value_count', full_name='feast.core.EntitySpec.value_count', index=5, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='domain', full_name='feast.core.EntitySpec.domain', index=6, + number=7, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='int_domain', full_name='feast.core.EntitySpec.int_domain', index=7, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='float_domain', full_name='feast.core.EntitySpec.float_domain', index=8, + number=9, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='string_domain', full_name='feast.core.EntitySpec.string_domain', index=9, + number=10, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='bool_domain', full_name='feast.core.EntitySpec.bool_domain', index=10, + number=11, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='struct_domain', full_name='feast.core.EntitySpec.struct_domain', index=11, + number=12, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='natural_language_domain', full_name='feast.core.EntitySpec.natural_language_domain', index=12, + number=13, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='image_domain', full_name='feast.core.EntitySpec.image_domain', index=13, + number=14, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='mid_domain', full_name='feast.core.EntitySpec.mid_domain', index=14, + number=15, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='url_domain', full_name='feast.core.EntitySpec.url_domain', index=15, + number=16, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='time_domain', full_name='feast.core.EntitySpec.time_domain', index=16, + number=17, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='time_of_day_domain', full_name='feast.core.EntitySpec.time_of_day_domain', index=17, + number=18, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -205,9 +318,18 @@ syntax='proto3', extension_ranges=[], oneofs=[ + _descriptor.OneofDescriptor( + name='presence_constraints', full_name='feast.core.EntitySpec.presence_constraints', + index=0, containing_type=None, fields=[]), + _descriptor.OneofDescriptor( + name='shape_type', full_name='feast.core.EntitySpec.shape_type', + index=1, containing_type=None, fields=[]), + _descriptor.OneofDescriptor( + name='domain_info', full_name='feast.core.EntitySpec.domain_info', + index=2, containing_type=None, fields=[]), ], - serialized_start=488, - serialized_end=563, + serialized_start=532, + serialized_end=1619, ) @@ -232,6 +354,118 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='presence', full_name='feast.core.FeatureSpec.presence', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='group_presence', full_name='feast.core.FeatureSpec.group_presence', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='shape', full_name='feast.core.FeatureSpec.shape', index=4, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value_count', full_name='feast.core.FeatureSpec.value_count', index=5, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='domain', full_name='feast.core.FeatureSpec.domain', index=6, + number=7, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='int_domain', full_name='feast.core.FeatureSpec.int_domain', index=7, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='float_domain', full_name='feast.core.FeatureSpec.float_domain', index=8, + number=9, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='string_domain', full_name='feast.core.FeatureSpec.string_domain', index=9, + number=10, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='bool_domain', full_name='feast.core.FeatureSpec.bool_domain', index=10, + number=11, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='struct_domain', full_name='feast.core.FeatureSpec.struct_domain', index=11, + number=12, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='natural_language_domain', full_name='feast.core.FeatureSpec.natural_language_domain', index=12, + number=13, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='image_domain', full_name='feast.core.FeatureSpec.image_domain', index=13, + number=14, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='mid_domain', full_name='feast.core.FeatureSpec.mid_domain', index=14, + number=15, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='url_domain', full_name='feast.core.FeatureSpec.url_domain', index=15, + number=16, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='time_domain', full_name='feast.core.FeatureSpec.time_domain', index=16, + number=17, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='time_of_day_domain', full_name='feast.core.FeatureSpec.time_of_day_domain', index=17, + number=18, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -243,9 +477,18 @@ syntax='proto3', extension_ranges=[], oneofs=[ + _descriptor.OneofDescriptor( + name='presence_constraints', full_name='feast.core.FeatureSpec.presence_constraints', + index=0, containing_type=None, fields=[]), + _descriptor.OneofDescriptor( + name='shape_type', full_name='feast.core.FeatureSpec.shape_type', + index=1, containing_type=None, fields=[]), + _descriptor.OneofDescriptor( + name='domain_info', full_name='feast.core.FeatureSpec.domain_info', + index=2, containing_type=None, fields=[]), ], - serialized_start=565, - serialized_end=641, + serialized_start=1622, + serialized_end=2710, ) @@ -282,8 +525,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=643, - serialized_end=760, + serialized_start=2712, + serialized_end=2829, ) _FEATURESET.fields_by_name['spec'].message_type = _FEATURESETSPEC @@ -293,7 +536,133 @@ _FEATURESETSPEC.fields_by_name['max_age'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION _FEATURESETSPEC.fields_by_name['source'].message_type = feast_dot_core_dot_Source__pb2._SOURCE _ENTITYSPEC.fields_by_name['value_type'].enum_type = feast_dot_types_dot_Value__pb2._VALUETYPE_ENUM +_ENTITYSPEC.fields_by_name['presence'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._FEATUREPRESENCE +_ENTITYSPEC.fields_by_name['group_presence'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._FEATUREPRESENCEWITHINGROUP +_ENTITYSPEC.fields_by_name['shape'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._FIXEDSHAPE +_ENTITYSPEC.fields_by_name['value_count'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._VALUECOUNT +_ENTITYSPEC.fields_by_name['int_domain'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._INTDOMAIN +_ENTITYSPEC.fields_by_name['float_domain'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._FLOATDOMAIN +_ENTITYSPEC.fields_by_name['string_domain'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._STRINGDOMAIN +_ENTITYSPEC.fields_by_name['bool_domain'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._BOOLDOMAIN +_ENTITYSPEC.fields_by_name['struct_domain'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._STRUCTDOMAIN +_ENTITYSPEC.fields_by_name['natural_language_domain'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._NATURALLANGUAGEDOMAIN +_ENTITYSPEC.fields_by_name['image_domain'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._IMAGEDOMAIN +_ENTITYSPEC.fields_by_name['mid_domain'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._MIDDOMAIN +_ENTITYSPEC.fields_by_name['url_domain'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._URLDOMAIN +_ENTITYSPEC.fields_by_name['time_domain'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._TIMEDOMAIN +_ENTITYSPEC.fields_by_name['time_of_day_domain'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._TIMEOFDAYDOMAIN +_ENTITYSPEC.oneofs_by_name['presence_constraints'].fields.append( + _ENTITYSPEC.fields_by_name['presence']) +_ENTITYSPEC.fields_by_name['presence'].containing_oneof = _ENTITYSPEC.oneofs_by_name['presence_constraints'] +_ENTITYSPEC.oneofs_by_name['presence_constraints'].fields.append( + _ENTITYSPEC.fields_by_name['group_presence']) +_ENTITYSPEC.fields_by_name['group_presence'].containing_oneof = _ENTITYSPEC.oneofs_by_name['presence_constraints'] +_ENTITYSPEC.oneofs_by_name['shape_type'].fields.append( + _ENTITYSPEC.fields_by_name['shape']) +_ENTITYSPEC.fields_by_name['shape'].containing_oneof = _ENTITYSPEC.oneofs_by_name['shape_type'] +_ENTITYSPEC.oneofs_by_name['shape_type'].fields.append( + _ENTITYSPEC.fields_by_name['value_count']) +_ENTITYSPEC.fields_by_name['value_count'].containing_oneof = _ENTITYSPEC.oneofs_by_name['shape_type'] +_ENTITYSPEC.oneofs_by_name['domain_info'].fields.append( + _ENTITYSPEC.fields_by_name['domain']) +_ENTITYSPEC.fields_by_name['domain'].containing_oneof = _ENTITYSPEC.oneofs_by_name['domain_info'] +_ENTITYSPEC.oneofs_by_name['domain_info'].fields.append( + _ENTITYSPEC.fields_by_name['int_domain']) +_ENTITYSPEC.fields_by_name['int_domain'].containing_oneof = _ENTITYSPEC.oneofs_by_name['domain_info'] +_ENTITYSPEC.oneofs_by_name['domain_info'].fields.append( + _ENTITYSPEC.fields_by_name['float_domain']) +_ENTITYSPEC.fields_by_name['float_domain'].containing_oneof = _ENTITYSPEC.oneofs_by_name['domain_info'] +_ENTITYSPEC.oneofs_by_name['domain_info'].fields.append( + _ENTITYSPEC.fields_by_name['string_domain']) +_ENTITYSPEC.fields_by_name['string_domain'].containing_oneof = _ENTITYSPEC.oneofs_by_name['domain_info'] +_ENTITYSPEC.oneofs_by_name['domain_info'].fields.append( + _ENTITYSPEC.fields_by_name['bool_domain']) +_ENTITYSPEC.fields_by_name['bool_domain'].containing_oneof = _ENTITYSPEC.oneofs_by_name['domain_info'] +_ENTITYSPEC.oneofs_by_name['domain_info'].fields.append( + _ENTITYSPEC.fields_by_name['struct_domain']) +_ENTITYSPEC.fields_by_name['struct_domain'].containing_oneof = _ENTITYSPEC.oneofs_by_name['domain_info'] +_ENTITYSPEC.oneofs_by_name['domain_info'].fields.append( + _ENTITYSPEC.fields_by_name['natural_language_domain']) +_ENTITYSPEC.fields_by_name['natural_language_domain'].containing_oneof = _ENTITYSPEC.oneofs_by_name['domain_info'] +_ENTITYSPEC.oneofs_by_name['domain_info'].fields.append( + _ENTITYSPEC.fields_by_name['image_domain']) +_ENTITYSPEC.fields_by_name['image_domain'].containing_oneof = _ENTITYSPEC.oneofs_by_name['domain_info'] +_ENTITYSPEC.oneofs_by_name['domain_info'].fields.append( + _ENTITYSPEC.fields_by_name['mid_domain']) +_ENTITYSPEC.fields_by_name['mid_domain'].containing_oneof = _ENTITYSPEC.oneofs_by_name['domain_info'] +_ENTITYSPEC.oneofs_by_name['domain_info'].fields.append( + _ENTITYSPEC.fields_by_name['url_domain']) +_ENTITYSPEC.fields_by_name['url_domain'].containing_oneof = _ENTITYSPEC.oneofs_by_name['domain_info'] +_ENTITYSPEC.oneofs_by_name['domain_info'].fields.append( + _ENTITYSPEC.fields_by_name['time_domain']) +_ENTITYSPEC.fields_by_name['time_domain'].containing_oneof = _ENTITYSPEC.oneofs_by_name['domain_info'] +_ENTITYSPEC.oneofs_by_name['domain_info'].fields.append( + _ENTITYSPEC.fields_by_name['time_of_day_domain']) +_ENTITYSPEC.fields_by_name['time_of_day_domain'].containing_oneof = _ENTITYSPEC.oneofs_by_name['domain_info'] _FEATURESPEC.fields_by_name['value_type'].enum_type = feast_dot_types_dot_Value__pb2._VALUETYPE_ENUM +_FEATURESPEC.fields_by_name['presence'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._FEATUREPRESENCE +_FEATURESPEC.fields_by_name['group_presence'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._FEATUREPRESENCEWITHINGROUP +_FEATURESPEC.fields_by_name['shape'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._FIXEDSHAPE +_FEATURESPEC.fields_by_name['value_count'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._VALUECOUNT +_FEATURESPEC.fields_by_name['int_domain'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._INTDOMAIN +_FEATURESPEC.fields_by_name['float_domain'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._FLOATDOMAIN +_FEATURESPEC.fields_by_name['string_domain'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._STRINGDOMAIN +_FEATURESPEC.fields_by_name['bool_domain'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._BOOLDOMAIN +_FEATURESPEC.fields_by_name['struct_domain'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._STRUCTDOMAIN +_FEATURESPEC.fields_by_name['natural_language_domain'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._NATURALLANGUAGEDOMAIN +_FEATURESPEC.fields_by_name['image_domain'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._IMAGEDOMAIN +_FEATURESPEC.fields_by_name['mid_domain'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._MIDDOMAIN +_FEATURESPEC.fields_by_name['url_domain'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._URLDOMAIN +_FEATURESPEC.fields_by_name['time_domain'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._TIMEDOMAIN +_FEATURESPEC.fields_by_name['time_of_day_domain'].message_type = tensorflow__metadata_dot_proto_dot_v0_dot_schema__pb2._TIMEOFDAYDOMAIN +_FEATURESPEC.oneofs_by_name['presence_constraints'].fields.append( + _FEATURESPEC.fields_by_name['presence']) +_FEATURESPEC.fields_by_name['presence'].containing_oneof = _FEATURESPEC.oneofs_by_name['presence_constraints'] +_FEATURESPEC.oneofs_by_name['presence_constraints'].fields.append( + _FEATURESPEC.fields_by_name['group_presence']) +_FEATURESPEC.fields_by_name['group_presence'].containing_oneof = _FEATURESPEC.oneofs_by_name['presence_constraints'] +_FEATURESPEC.oneofs_by_name['shape_type'].fields.append( + _FEATURESPEC.fields_by_name['shape']) +_FEATURESPEC.fields_by_name['shape'].containing_oneof = _FEATURESPEC.oneofs_by_name['shape_type'] +_FEATURESPEC.oneofs_by_name['shape_type'].fields.append( + _FEATURESPEC.fields_by_name['value_count']) +_FEATURESPEC.fields_by_name['value_count'].containing_oneof = _FEATURESPEC.oneofs_by_name['shape_type'] +_FEATURESPEC.oneofs_by_name['domain_info'].fields.append( + _FEATURESPEC.fields_by_name['domain']) +_FEATURESPEC.fields_by_name['domain'].containing_oneof = _FEATURESPEC.oneofs_by_name['domain_info'] +_FEATURESPEC.oneofs_by_name['domain_info'].fields.append( + _FEATURESPEC.fields_by_name['int_domain']) +_FEATURESPEC.fields_by_name['int_domain'].containing_oneof = _FEATURESPEC.oneofs_by_name['domain_info'] +_FEATURESPEC.oneofs_by_name['domain_info'].fields.append( + _FEATURESPEC.fields_by_name['float_domain']) +_FEATURESPEC.fields_by_name['float_domain'].containing_oneof = _FEATURESPEC.oneofs_by_name['domain_info'] +_FEATURESPEC.oneofs_by_name['domain_info'].fields.append( + _FEATURESPEC.fields_by_name['string_domain']) +_FEATURESPEC.fields_by_name['string_domain'].containing_oneof = _FEATURESPEC.oneofs_by_name['domain_info'] +_FEATURESPEC.oneofs_by_name['domain_info'].fields.append( + _FEATURESPEC.fields_by_name['bool_domain']) +_FEATURESPEC.fields_by_name['bool_domain'].containing_oneof = _FEATURESPEC.oneofs_by_name['domain_info'] +_FEATURESPEC.oneofs_by_name['domain_info'].fields.append( + _FEATURESPEC.fields_by_name['struct_domain']) +_FEATURESPEC.fields_by_name['struct_domain'].containing_oneof = _FEATURESPEC.oneofs_by_name['domain_info'] +_FEATURESPEC.oneofs_by_name['domain_info'].fields.append( + _FEATURESPEC.fields_by_name['natural_language_domain']) +_FEATURESPEC.fields_by_name['natural_language_domain'].containing_oneof = _FEATURESPEC.oneofs_by_name['domain_info'] +_FEATURESPEC.oneofs_by_name['domain_info'].fields.append( + _FEATURESPEC.fields_by_name['image_domain']) +_FEATURESPEC.fields_by_name['image_domain'].containing_oneof = _FEATURESPEC.oneofs_by_name['domain_info'] +_FEATURESPEC.oneofs_by_name['domain_info'].fields.append( + _FEATURESPEC.fields_by_name['mid_domain']) +_FEATURESPEC.fields_by_name['mid_domain'].containing_oneof = _FEATURESPEC.oneofs_by_name['domain_info'] +_FEATURESPEC.oneofs_by_name['domain_info'].fields.append( + _FEATURESPEC.fields_by_name['url_domain']) +_FEATURESPEC.fields_by_name['url_domain'].containing_oneof = _FEATURESPEC.oneofs_by_name['domain_info'] +_FEATURESPEC.oneofs_by_name['domain_info'].fields.append( + _FEATURESPEC.fields_by_name['time_domain']) +_FEATURESPEC.fields_by_name['time_domain'].containing_oneof = _FEATURESPEC.oneofs_by_name['domain_info'] +_FEATURESPEC.oneofs_by_name['domain_info'].fields.append( + _FEATURESPEC.fields_by_name['time_of_day_domain']) +_FEATURESPEC.fields_by_name['time_of_day_domain'].containing_oneof = _FEATURESPEC.oneofs_by_name['domain_info'] _FEATURESETMETA.fields_by_name['created_timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _FEATURESETMETA.fields_by_name['status'].enum_type = _FEATURESETSTATUS DESCRIPTOR.message_types_by_name['FeatureSet'] = _FEATURESET diff --git a/sdk/python/feast/core/FeatureSet_pb2.pyi b/sdk/python/feast/core/FeatureSet_pb2.pyi index c663c70c68..fa3c626a41 100644 --- a/sdk/python/feast/core/FeatureSet_pb2.pyi +++ b/sdk/python/feast/core/FeatureSet_pb2.pyi @@ -29,6 +29,24 @@ from google.protobuf.timestamp_pb2 import ( Timestamp as google___protobuf___timestamp_pb2___Timestamp, ) +from tensorflow_metadata.proto.v0.schema_pb2 import ( + BoolDomain as tensorflow_metadata___proto___v0___schema_pb2___BoolDomain, + FeaturePresence as tensorflow_metadata___proto___v0___schema_pb2___FeaturePresence, + FeaturePresenceWithinGroup as tensorflow_metadata___proto___v0___schema_pb2___FeaturePresenceWithinGroup, + FixedShape as tensorflow_metadata___proto___v0___schema_pb2___FixedShape, + FloatDomain as tensorflow_metadata___proto___v0___schema_pb2___FloatDomain, + ImageDomain as tensorflow_metadata___proto___v0___schema_pb2___ImageDomain, + IntDomain as tensorflow_metadata___proto___v0___schema_pb2___IntDomain, + MIDDomain as tensorflow_metadata___proto___v0___schema_pb2___MIDDomain, + NaturalLanguageDomain as tensorflow_metadata___proto___v0___schema_pb2___NaturalLanguageDomain, + StringDomain as tensorflow_metadata___proto___v0___schema_pb2___StringDomain, + StructDomain as tensorflow_metadata___proto___v0___schema_pb2___StructDomain, + TimeDomain as tensorflow_metadata___proto___v0___schema_pb2___TimeDomain, + TimeOfDayDomain as tensorflow_metadata___proto___v0___schema_pb2___TimeOfDayDomain, + URLDomain as tensorflow_metadata___proto___v0___schema_pb2___URLDomain, + ValueCount as tensorflow_metadata___proto___v0___schema_pb2___ValueCount, +) + from typing import ( Iterable as typing___Iterable, List as typing___List, @@ -36,6 +54,7 @@ from typing import ( Text as typing___Text, Tuple as typing___Tuple, cast as typing___cast, + overload as typing___overload, ) from typing_extensions import ( @@ -43,24 +62,31 @@ from typing_extensions import ( ) -class FeatureSetStatus(int): +builtin___bool = bool +builtin___bytes = bytes +builtin___float = float +builtin___int = int +builtin___str = str + + +class FeatureSetStatus(builtin___int): DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... @classmethod - def Name(cls, number: int) -> str: ... + def Name(cls, number: builtin___int) -> builtin___str: ... @classmethod - def Value(cls, name: str) -> FeatureSetStatus: ... + def Value(cls, name: builtin___str) -> 'FeatureSetStatus': ... @classmethod - def keys(cls) -> typing___List[str]: ... + def keys(cls) -> typing___List[builtin___str]: ... @classmethod - def values(cls) -> typing___List[FeatureSetStatus]: ... + def values(cls) -> typing___List['FeatureSetStatus']: ... @classmethod - def items(cls) -> typing___List[typing___Tuple[str, FeatureSetStatus]]: ... - STATUS_INVALID = typing___cast(FeatureSetStatus, 0) - STATUS_PENDING = typing___cast(FeatureSetStatus, 1) - STATUS_READY = typing___cast(FeatureSetStatus, 2) -STATUS_INVALID = typing___cast(FeatureSetStatus, 0) -STATUS_PENDING = typing___cast(FeatureSetStatus, 1) -STATUS_READY = typing___cast(FeatureSetStatus, 2) + def items(cls) -> typing___List[typing___Tuple[builtin___str, 'FeatureSetStatus']]: ... + STATUS_INVALID = typing___cast('FeatureSetStatus', 0) + STATUS_PENDING = typing___cast('FeatureSetStatus', 1) + STATUS_READY = typing___cast('FeatureSetStatus', 2) +STATUS_INVALID = typing___cast('FeatureSetStatus', 0) +STATUS_PENDING = typing___cast('FeatureSetStatus', 1) +STATUS_READY = typing___cast('FeatureSetStatus', 2) class FeatureSet(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... @@ -77,21 +103,21 @@ class FeatureSet(google___protobuf___message___Message): meta : typing___Optional[FeatureSetMeta] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> FeatureSet: ... + def FromString(cls, s: builtin___bytes) -> FeatureSet: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"meta",u"spec"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"meta",u"spec"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"meta",u"spec"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"meta",b"meta",u"spec",b"spec"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"meta",b"meta",u"spec",b"spec"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"meta",b"meta",u"spec",b"spec"]) -> None: ... class FeatureSetSpec(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... project = ... # type: typing___Text name = ... # type: typing___Text - version = ... # type: int + version = ... # type: builtin___int @property def entities(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[EntitySpec]: ... @@ -109,60 +135,200 @@ class FeatureSetSpec(google___protobuf___message___Message): *, project : typing___Optional[typing___Text] = None, name : typing___Optional[typing___Text] = None, - version : typing___Optional[int] = None, + version : typing___Optional[builtin___int] = None, entities : typing___Optional[typing___Iterable[EntitySpec]] = None, features : typing___Optional[typing___Iterable[FeatureSpec]] = None, max_age : typing___Optional[google___protobuf___duration_pb2___Duration] = None, source : typing___Optional[feast___core___Source_pb2___Source] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> FeatureSetSpec: ... + def FromString(cls, s: builtin___bytes) -> FeatureSetSpec: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"max_age",u"source"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"max_age",u"source"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"entities",u"features",u"max_age",u"name",u"project",u"source",u"version"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"max_age",b"max_age",u"source",b"source"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"max_age",b"max_age",u"source",b"source"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"entities",b"entities",u"features",b"features",u"max_age",b"max_age",u"name",b"name",u"project",b"project",u"source",b"source",u"version",b"version"]) -> None: ... class EntitySpec(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... name = ... # type: typing___Text value_type = ... # type: feast___types___Value_pb2___ValueType.Enum + domain = ... # type: typing___Text + + @property + def presence(self) -> tensorflow_metadata___proto___v0___schema_pb2___FeaturePresence: ... + + @property + def group_presence(self) -> tensorflow_metadata___proto___v0___schema_pb2___FeaturePresenceWithinGroup: ... + + @property + def shape(self) -> tensorflow_metadata___proto___v0___schema_pb2___FixedShape: ... + + @property + def value_count(self) -> tensorflow_metadata___proto___v0___schema_pb2___ValueCount: ... + + @property + def int_domain(self) -> tensorflow_metadata___proto___v0___schema_pb2___IntDomain: ... + + @property + def float_domain(self) -> tensorflow_metadata___proto___v0___schema_pb2___FloatDomain: ... + + @property + def string_domain(self) -> tensorflow_metadata___proto___v0___schema_pb2___StringDomain: ... + + @property + def bool_domain(self) -> tensorflow_metadata___proto___v0___schema_pb2___BoolDomain: ... + + @property + def struct_domain(self) -> tensorflow_metadata___proto___v0___schema_pb2___StructDomain: ... + + @property + def natural_language_domain(self) -> tensorflow_metadata___proto___v0___schema_pb2___NaturalLanguageDomain: ... + + @property + def image_domain(self) -> tensorflow_metadata___proto___v0___schema_pb2___ImageDomain: ... + + @property + def mid_domain(self) -> tensorflow_metadata___proto___v0___schema_pb2___MIDDomain: ... + + @property + def url_domain(self) -> tensorflow_metadata___proto___v0___schema_pb2___URLDomain: ... + + @property + def time_domain(self) -> tensorflow_metadata___proto___v0___schema_pb2___TimeDomain: ... + + @property + def time_of_day_domain(self) -> tensorflow_metadata___proto___v0___schema_pb2___TimeOfDayDomain: ... def __init__(self, *, name : typing___Optional[typing___Text] = None, value_type : typing___Optional[feast___types___Value_pb2___ValueType.Enum] = None, + presence : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___FeaturePresence] = None, + group_presence : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___FeaturePresenceWithinGroup] = None, + shape : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___FixedShape] = None, + value_count : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___ValueCount] = None, + domain : typing___Optional[typing___Text] = None, + int_domain : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___IntDomain] = None, + float_domain : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___FloatDomain] = None, + string_domain : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___StringDomain] = None, + bool_domain : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___BoolDomain] = None, + struct_domain : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___StructDomain] = None, + natural_language_domain : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___NaturalLanguageDomain] = None, + image_domain : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___ImageDomain] = None, + mid_domain : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___MIDDomain] = None, + url_domain : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___URLDomain] = None, + time_domain : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___TimeDomain] = None, + time_of_day_domain : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___TimeOfDayDomain] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> EntitySpec: ... + def FromString(cls, s: builtin___bytes) -> EntitySpec: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"name",u"value_type"]) -> None: ... + def HasField(self, field_name: typing_extensions___Literal[u"bool_domain",u"domain",u"domain_info",u"float_domain",u"group_presence",u"image_domain",u"int_domain",u"mid_domain",u"natural_language_domain",u"presence",u"presence_constraints",u"shape",u"shape_type",u"string_domain",u"struct_domain",u"time_domain",u"time_of_day_domain",u"url_domain",u"value_count"]) -> builtin___bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"bool_domain",u"domain",u"domain_info",u"float_domain",u"group_presence",u"image_domain",u"int_domain",u"mid_domain",u"name",u"natural_language_domain",u"presence",u"presence_constraints",u"shape",u"shape_type",u"string_domain",u"struct_domain",u"time_domain",u"time_of_day_domain",u"url_domain",u"value_count",u"value_type"]) -> None: ... else: - def ClearField(self, field_name: typing_extensions___Literal[u"name",b"name",u"value_type",b"value_type"]) -> None: ... + def HasField(self, field_name: typing_extensions___Literal[u"bool_domain",b"bool_domain",u"domain",b"domain",u"domain_info",b"domain_info",u"float_domain",b"float_domain",u"group_presence",b"group_presence",u"image_domain",b"image_domain",u"int_domain",b"int_domain",u"mid_domain",b"mid_domain",u"natural_language_domain",b"natural_language_domain",u"presence",b"presence",u"presence_constraints",b"presence_constraints",u"shape",b"shape",u"shape_type",b"shape_type",u"string_domain",b"string_domain",u"struct_domain",b"struct_domain",u"time_domain",b"time_domain",u"time_of_day_domain",b"time_of_day_domain",u"url_domain",b"url_domain",u"value_count",b"value_count"]) -> builtin___bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"bool_domain",b"bool_domain",u"domain",b"domain",u"domain_info",b"domain_info",u"float_domain",b"float_domain",u"group_presence",b"group_presence",u"image_domain",b"image_domain",u"int_domain",b"int_domain",u"mid_domain",b"mid_domain",u"name",b"name",u"natural_language_domain",b"natural_language_domain",u"presence",b"presence",u"presence_constraints",b"presence_constraints",u"shape",b"shape",u"shape_type",b"shape_type",u"string_domain",b"string_domain",u"struct_domain",b"struct_domain",u"time_domain",b"time_domain",u"time_of_day_domain",b"time_of_day_domain",u"url_domain",b"url_domain",u"value_count",b"value_count",u"value_type",b"value_type"]) -> None: ... + @typing___overload + def WhichOneof(self, oneof_group: typing_extensions___Literal[u"domain_info",b"domain_info"]) -> typing_extensions___Literal["domain","int_domain","float_domain","string_domain","bool_domain","struct_domain","natural_language_domain","image_domain","mid_domain","url_domain","time_domain","time_of_day_domain"]: ... + @typing___overload + def WhichOneof(self, oneof_group: typing_extensions___Literal[u"presence_constraints",b"presence_constraints"]) -> typing_extensions___Literal["presence","group_presence"]: ... + @typing___overload + def WhichOneof(self, oneof_group: typing_extensions___Literal[u"shape_type",b"shape_type"]) -> typing_extensions___Literal["shape","value_count"]: ... class FeatureSpec(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... name = ... # type: typing___Text value_type = ... # type: feast___types___Value_pb2___ValueType.Enum + domain = ... # type: typing___Text + + @property + def presence(self) -> tensorflow_metadata___proto___v0___schema_pb2___FeaturePresence: ... + + @property + def group_presence(self) -> tensorflow_metadata___proto___v0___schema_pb2___FeaturePresenceWithinGroup: ... + + @property + def shape(self) -> tensorflow_metadata___proto___v0___schema_pb2___FixedShape: ... + + @property + def value_count(self) -> tensorflow_metadata___proto___v0___schema_pb2___ValueCount: ... + + @property + def int_domain(self) -> tensorflow_metadata___proto___v0___schema_pb2___IntDomain: ... + + @property + def float_domain(self) -> tensorflow_metadata___proto___v0___schema_pb2___FloatDomain: ... + + @property + def string_domain(self) -> tensorflow_metadata___proto___v0___schema_pb2___StringDomain: ... + + @property + def bool_domain(self) -> tensorflow_metadata___proto___v0___schema_pb2___BoolDomain: ... + + @property + def struct_domain(self) -> tensorflow_metadata___proto___v0___schema_pb2___StructDomain: ... + + @property + def natural_language_domain(self) -> tensorflow_metadata___proto___v0___schema_pb2___NaturalLanguageDomain: ... + + @property + def image_domain(self) -> tensorflow_metadata___proto___v0___schema_pb2___ImageDomain: ... + + @property + def mid_domain(self) -> tensorflow_metadata___proto___v0___schema_pb2___MIDDomain: ... + + @property + def url_domain(self) -> tensorflow_metadata___proto___v0___schema_pb2___URLDomain: ... + + @property + def time_domain(self) -> tensorflow_metadata___proto___v0___schema_pb2___TimeDomain: ... + + @property + def time_of_day_domain(self) -> tensorflow_metadata___proto___v0___schema_pb2___TimeOfDayDomain: ... def __init__(self, *, name : typing___Optional[typing___Text] = None, value_type : typing___Optional[feast___types___Value_pb2___ValueType.Enum] = None, + presence : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___FeaturePresence] = None, + group_presence : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___FeaturePresenceWithinGroup] = None, + shape : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___FixedShape] = None, + value_count : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___ValueCount] = None, + domain : typing___Optional[typing___Text] = None, + int_domain : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___IntDomain] = None, + float_domain : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___FloatDomain] = None, + string_domain : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___StringDomain] = None, + bool_domain : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___BoolDomain] = None, + struct_domain : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___StructDomain] = None, + natural_language_domain : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___NaturalLanguageDomain] = None, + image_domain : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___ImageDomain] = None, + mid_domain : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___MIDDomain] = None, + url_domain : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___URLDomain] = None, + time_domain : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___TimeDomain] = None, + time_of_day_domain : typing___Optional[tensorflow_metadata___proto___v0___schema_pb2___TimeOfDayDomain] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> FeatureSpec: ... + def FromString(cls, s: builtin___bytes) -> FeatureSpec: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"name",u"value_type"]) -> None: ... + def HasField(self, field_name: typing_extensions___Literal[u"bool_domain",u"domain",u"domain_info",u"float_domain",u"group_presence",u"image_domain",u"int_domain",u"mid_domain",u"natural_language_domain",u"presence",u"presence_constraints",u"shape",u"shape_type",u"string_domain",u"struct_domain",u"time_domain",u"time_of_day_domain",u"url_domain",u"value_count"]) -> builtin___bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"bool_domain",u"domain",u"domain_info",u"float_domain",u"group_presence",u"image_domain",u"int_domain",u"mid_domain",u"name",u"natural_language_domain",u"presence",u"presence_constraints",u"shape",u"shape_type",u"string_domain",u"struct_domain",u"time_domain",u"time_of_day_domain",u"url_domain",u"value_count",u"value_type"]) -> None: ... else: - def ClearField(self, field_name: typing_extensions___Literal[u"name",b"name",u"value_type",b"value_type"]) -> None: ... + def HasField(self, field_name: typing_extensions___Literal[u"bool_domain",b"bool_domain",u"domain",b"domain",u"domain_info",b"domain_info",u"float_domain",b"float_domain",u"group_presence",b"group_presence",u"image_domain",b"image_domain",u"int_domain",b"int_domain",u"mid_domain",b"mid_domain",u"natural_language_domain",b"natural_language_domain",u"presence",b"presence",u"presence_constraints",b"presence_constraints",u"shape",b"shape",u"shape_type",b"shape_type",u"string_domain",b"string_domain",u"struct_domain",b"struct_domain",u"time_domain",b"time_domain",u"time_of_day_domain",b"time_of_day_domain",u"url_domain",b"url_domain",u"value_count",b"value_count"]) -> builtin___bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"bool_domain",b"bool_domain",u"domain",b"domain",u"domain_info",b"domain_info",u"float_domain",b"float_domain",u"group_presence",b"group_presence",u"image_domain",b"image_domain",u"int_domain",b"int_domain",u"mid_domain",b"mid_domain",u"name",b"name",u"natural_language_domain",b"natural_language_domain",u"presence",b"presence",u"presence_constraints",b"presence_constraints",u"shape",b"shape",u"shape_type",b"shape_type",u"string_domain",b"string_domain",u"struct_domain",b"struct_domain",u"time_domain",b"time_domain",u"time_of_day_domain",b"time_of_day_domain",u"url_domain",b"url_domain",u"value_count",b"value_count",u"value_type",b"value_type"]) -> None: ... + @typing___overload + def WhichOneof(self, oneof_group: typing_extensions___Literal[u"domain_info",b"domain_info"]) -> typing_extensions___Literal["domain","int_domain","float_domain","string_domain","bool_domain","struct_domain","natural_language_domain","image_domain","mid_domain","url_domain","time_domain","time_of_day_domain"]: ... + @typing___overload + def WhichOneof(self, oneof_group: typing_extensions___Literal[u"presence_constraints",b"presence_constraints"]) -> typing_extensions___Literal["presence","group_presence"]: ... + @typing___overload + def WhichOneof(self, oneof_group: typing_extensions___Literal[u"shape_type",b"shape_type"]) -> typing_extensions___Literal["shape","value_count"]: ... class FeatureSetMeta(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... @@ -177,12 +343,12 @@ class FeatureSetMeta(google___protobuf___message___Message): status : typing___Optional[FeatureSetStatus] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> FeatureSetMeta: ... + def FromString(cls, s: builtin___bytes) -> FeatureSetMeta: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"created_timestamp"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"created_timestamp"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"created_timestamp",u"status"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"created_timestamp",b"created_timestamp"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"created_timestamp",b"created_timestamp"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"created_timestamp",b"created_timestamp",u"status",b"status"]) -> None: ... diff --git a/sdk/python/feast/core/Source_pb2.pyi b/sdk/python/feast/core/Source_pb2.pyi index 0521ac34f8..fec428a07a 100644 --- a/sdk/python/feast/core/Source_pb2.pyi +++ b/sdk/python/feast/core/Source_pb2.pyi @@ -22,22 +22,29 @@ from typing_extensions import ( ) -class SourceType(int): +builtin___bool = bool +builtin___bytes = bytes +builtin___float = float +builtin___int = int +builtin___str = str + + +class SourceType(builtin___int): DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... @classmethod - def Name(cls, number: int) -> str: ... + def Name(cls, number: builtin___int) -> builtin___str: ... @classmethod - def Value(cls, name: str) -> SourceType: ... + def Value(cls, name: builtin___str) -> 'SourceType': ... @classmethod - def keys(cls) -> typing___List[str]: ... + def keys(cls) -> typing___List[builtin___str]: ... @classmethod - def values(cls) -> typing___List[SourceType]: ... + def values(cls) -> typing___List['SourceType']: ... @classmethod - def items(cls) -> typing___List[typing___Tuple[str, SourceType]]: ... - INVALID = typing___cast(SourceType, 0) - KAFKA = typing___cast(SourceType, 1) -INVALID = typing___cast(SourceType, 0) -KAFKA = typing___cast(SourceType, 1) + def items(cls) -> typing___List[typing___Tuple[builtin___str, 'SourceType']]: ... + INVALID = typing___cast('SourceType', 0) + KAFKA = typing___cast('SourceType', 1) +INVALID = typing___cast('SourceType', 0) +KAFKA = typing___cast('SourceType', 1) class Source(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... @@ -52,14 +59,14 @@ class Source(google___protobuf___message___Message): kafka_source_config : typing___Optional[KafkaSourceConfig] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> Source: ... + def FromString(cls, s: builtin___bytes) -> Source: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"kafka_source_config",u"source_config"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"kafka_source_config",u"source_config"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"kafka_source_config",u"source_config",u"type"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"kafka_source_config",b"kafka_source_config",u"source_config",b"source_config"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"kafka_source_config",b"kafka_source_config",u"source_config",b"source_config"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"kafka_source_config",b"kafka_source_config",u"source_config",b"source_config",u"type",b"type"]) -> None: ... def WhichOneof(self, oneof_group: typing_extensions___Literal[u"source_config",b"source_config"]) -> typing_extensions___Literal["kafka_source_config"]: ... @@ -74,7 +81,7 @@ class KafkaSourceConfig(google___protobuf___message___Message): topic : typing___Optional[typing___Text] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> KafkaSourceConfig: ... + def FromString(cls, s: builtin___bytes) -> KafkaSourceConfig: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): diff --git a/sdk/python/feast/core/Store_pb2.py b/sdk/python/feast/core/Store_pb2.py index 716a597b9a..9adf197b0a 100644 --- a/sdk/python/feast/core/Store_pb2.py +++ b/sdk/python/feast/core/Store_pb2.py @@ -20,7 +20,7 @@ package='feast.core', syntax='proto3', serialized_options=_b('\n\nfeast.coreB\nStoreProtoZ/github.com/gojek/feast/sdk/go/protos/feast/core'), - serialized_pb=_b('\n\x16\x66\x65\x61st/core/Store.proto\x12\nfeast.core\"\xca\x04\n\x05Store\x12\x0c\n\x04name\x18\x01 \x01(\t\x12)\n\x04type\x18\x02 \x01(\x0e\x32\x1b.feast.core.Store.StoreType\x12\x35\n\rsubscriptions\x18\x04 \x03(\x0b\x32\x1e.feast.core.Store.Subscription\x12\x35\n\x0credis_config\x18\x0b \x01(\x0b\x32\x1d.feast.core.Store.RedisConfigH\x00\x12;\n\x0f\x62igquery_config\x18\x0c \x01(\x0b\x32 .feast.core.Store.BigQueryConfigH\x00\x12=\n\x10\x63\x61ssandra_config\x18\r \x01(\x0b\x32!.feast.core.Store.CassandraConfigH\x00\x1a)\n\x0bRedisConfig\x12\x0c\n\x04host\x18\x01 \x01(\t\x12\x0c\n\x04port\x18\x02 \x01(\x05\x1a\x38\n\x0e\x42igQueryConfig\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x12\n\ndataset_id\x18\x02 \x01(\t\x1a-\n\x0f\x43\x61ssandraConfig\x12\x0c\n\x04host\x18\x01 \x01(\t\x12\x0c\n\x04port\x18\x02 \x01(\x05\x1a>\n\x0cSubscription\x12\x0f\n\x07project\x18\x03 \x01(\t\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\"@\n\tStoreType\x12\x0b\n\x07INVALID\x10\x00\x12\t\n\x05REDIS\x10\x01\x12\x0c\n\x08\x42IGQUERY\x10\x02\x12\r\n\tCASSANDRA\x10\x03\x42\x08\n\x06\x63onfigBI\n\nfeast.coreB\nStoreProtoZ/github.com/gojek/feast/sdk/go/protos/feast/coreb\x06proto3') + serialized_pb=_b('\n\x16\x66\x65\x61st/core/Store.proto\x12\nfeast.core\"\xfb\x04\n\x05Store\x12\x0c\n\x04name\x18\x01 \x01(\t\x12)\n\x04type\x18\x02 \x01(\x0e\x32\x1b.feast.core.Store.StoreType\x12\x35\n\rsubscriptions\x18\x04 \x03(\x0b\x32\x1e.feast.core.Store.Subscription\x12\x35\n\x0credis_config\x18\x0b \x01(\x0b\x32\x1d.feast.core.Store.RedisConfigH\x00\x12;\n\x0f\x62igquery_config\x18\x0c \x01(\x0b\x32 .feast.core.Store.BigQueryConfigH\x00\x12=\n\x10\x63\x61ssandra_config\x18\r \x01(\x0b\x32!.feast.core.Store.CassandraConfigH\x00\x1aZ\n\x0bRedisConfig\x12\x0c\n\x04host\x18\x01 \x01(\t\x12\x0c\n\x04port\x18\x02 \x01(\x05\x12\x1a\n\x12initial_backoff_ms\x18\x03 \x01(\x05\x12\x13\n\x0bmax_retries\x18\x04 \x01(\x05\x1a\x38\n\x0e\x42igQueryConfig\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x12\n\ndataset_id\x18\x02 \x01(\t\x1a-\n\x0f\x43\x61ssandraConfig\x12\x0c\n\x04host\x18\x01 \x01(\t\x12\x0c\n\x04port\x18\x02 \x01(\x05\x1a>\n\x0cSubscription\x12\x0f\n\x07project\x18\x03 \x01(\t\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\"@\n\tStoreType\x12\x0b\n\x07INVALID\x10\x00\x12\t\n\x05REDIS\x10\x01\x12\x0c\n\x08\x42IGQUERY\x10\x02\x12\r\n\tCASSANDRA\x10\x03\x42\x08\n\x06\x63onfigBI\n\nfeast.coreB\nStoreProtoZ/github.com/gojek/feast/sdk/go/protos/feast/coreb\x06proto3') ) @@ -50,8 +50,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=551, - serialized_end=615, + serialized_start=600, + serialized_end=664, ) _sym_db.RegisterEnumDescriptor(_STORE_STORETYPE) @@ -77,6 +77,20 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='initial_backoff_ms', full_name='feast.core.Store.RedisConfig.initial_backoff_ms', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='max_retries', full_name='feast.core.Store.RedisConfig.max_retries', index=3, + number=4, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -90,7 +104,7 @@ oneofs=[ ], serialized_start=339, - serialized_end=380, + serialized_end=429, ) _STORE_BIGQUERYCONFIG = _descriptor.Descriptor( @@ -126,8 +140,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=382, - serialized_end=438, + serialized_start=431, + serialized_end=487, ) _STORE_CASSANDRACONFIG = _descriptor.Descriptor( @@ -163,8 +177,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=440, - serialized_end=485, + serialized_start=489, + serialized_end=534, ) _STORE_SUBSCRIPTION = _descriptor.Descriptor( @@ -207,8 +221,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=487, - serialized_end=549, + serialized_start=536, + serialized_end=598, ) _STORE = _descriptor.Descriptor( @@ -277,7 +291,7 @@ index=0, containing_type=None, fields=[]), ], serialized_start=39, - serialized_end=625, + serialized_end=674, ) _STORE_REDISCONFIG.containing_type = _STORE diff --git a/sdk/python/feast/core/Store_pb2.pyi b/sdk/python/feast/core/Store_pb2.pyi index 541bcd329b..049969de4a 100644 --- a/sdk/python/feast/core/Store_pb2.pyi +++ b/sdk/python/feast/core/Store_pb2.pyi @@ -27,47 +27,58 @@ from typing_extensions import ( ) +builtin___bool = bool +builtin___bytes = bytes +builtin___float = float +builtin___int = int +builtin___str = str + + class Store(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - class StoreType(int): + class StoreType(builtin___int): DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... @classmethod - def Name(cls, number: int) -> str: ... + def Name(cls, number: builtin___int) -> builtin___str: ... @classmethod - def Value(cls, name: str) -> Store.StoreType: ... + def Value(cls, name: builtin___str) -> 'Store.StoreType': ... @classmethod - def keys(cls) -> typing___List[str]: ... + def keys(cls) -> typing___List[builtin___str]: ... @classmethod - def values(cls) -> typing___List[Store.StoreType]: ... + def values(cls) -> typing___List['Store.StoreType']: ... @classmethod - def items(cls) -> typing___List[typing___Tuple[str, Store.StoreType]]: ... - INVALID = typing___cast(Store.StoreType, 0) - REDIS = typing___cast(Store.StoreType, 1) - BIGQUERY = typing___cast(Store.StoreType, 2) - CASSANDRA = typing___cast(Store.StoreType, 3) - INVALID = typing___cast(Store.StoreType, 0) - REDIS = typing___cast(Store.StoreType, 1) - BIGQUERY = typing___cast(Store.StoreType, 2) - CASSANDRA = typing___cast(Store.StoreType, 3) + def items(cls) -> typing___List[typing___Tuple[builtin___str, 'Store.StoreType']]: ... + INVALID = typing___cast('Store.StoreType', 0) + REDIS = typing___cast('Store.StoreType', 1) + BIGQUERY = typing___cast('Store.StoreType', 2) + CASSANDRA = typing___cast('Store.StoreType', 3) + INVALID = typing___cast('Store.StoreType', 0) + REDIS = typing___cast('Store.StoreType', 1) + BIGQUERY = typing___cast('Store.StoreType', 2) + CASSANDRA = typing___cast('Store.StoreType', 3) class RedisConfig(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... host = ... # type: typing___Text - port = ... # type: int + port = ... # type: builtin___int + initial_backoff_ms = ... # type: builtin___int + max_retries = ... # type: builtin___int def __init__(self, *, host : typing___Optional[typing___Text] = None, - port : typing___Optional[int] = None, + port : typing___Optional[builtin___int] = None, + initial_backoff_ms : typing___Optional[builtin___int] = None, + max_retries : typing___Optional[builtin___int] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> Store.RedisConfig: ... + def FromString(cls, s: builtin___bytes) -> Store.RedisConfig: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"host",u"port"]) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"host",u"initial_backoff_ms",u"max_retries",u"port"]) -> None: ... else: - def ClearField(self, field_name: typing_extensions___Literal[u"host",b"host",u"port",b"port"]) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"host",b"host",u"initial_backoff_ms",b"initial_backoff_ms",u"max_retries",b"max_retries",u"port",b"port"]) -> None: ... class BigQueryConfig(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... @@ -80,7 +91,7 @@ class Store(google___protobuf___message___Message): dataset_id : typing___Optional[typing___Text] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> Store.BigQueryConfig: ... + def FromString(cls, s: builtin___bytes) -> Store.BigQueryConfig: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): @@ -91,15 +102,15 @@ class Store(google___protobuf___message___Message): class CassandraConfig(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... host = ... # type: typing___Text - port = ... # type: int + port = ... # type: builtin___int def __init__(self, *, host : typing___Optional[typing___Text] = None, - port : typing___Optional[int] = None, + port : typing___Optional[builtin___int] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> Store.CassandraConfig: ... + def FromString(cls, s: builtin___bytes) -> Store.CassandraConfig: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): @@ -120,7 +131,7 @@ class Store(google___protobuf___message___Message): version : typing___Optional[typing___Text] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> Store.Subscription: ... + def FromString(cls, s: builtin___bytes) -> Store.Subscription: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): @@ -153,13 +164,13 @@ class Store(google___protobuf___message___Message): cassandra_config : typing___Optional[Store.CassandraConfig] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> Store: ... + def FromString(cls, s: builtin___bytes) -> Store: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"bigquery_config",u"cassandra_config",u"config",u"redis_config"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"bigquery_config",u"cassandra_config",u"config",u"redis_config"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"bigquery_config",u"cassandra_config",u"config",u"name",u"redis_config",u"subscriptions",u"type"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"bigquery_config",b"bigquery_config",u"cassandra_config",b"cassandra_config",u"config",b"config",u"redis_config",b"redis_config"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"bigquery_config",b"bigquery_config",u"cassandra_config",b"cassandra_config",u"config",b"config",u"redis_config",b"redis_config"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"bigquery_config",b"bigquery_config",u"cassandra_config",b"cassandra_config",u"config",b"config",u"name",b"name",u"redis_config",b"redis_config",u"subscriptions",b"subscriptions",u"type",b"type"]) -> None: ... def WhichOneof(self, oneof_group: typing_extensions___Literal[u"config",b"config"]) -> typing_extensions___Literal["redis_config","bigquery_config","cassandra_config"]: ... diff --git a/sdk/python/feast/serving/ServingService_pb2.pyi b/sdk/python/feast/serving/ServingService_pb2.pyi index e10245d6c7..6fab5e46c9 100644 --- a/sdk/python/feast/serving/ServingService_pb2.pyi +++ b/sdk/python/feast/serving/ServingService_pb2.pyi @@ -42,79 +42,86 @@ from typing_extensions import ( ) -class FeastServingType(int): +builtin___bool = bool +builtin___bytes = bytes +builtin___float = float +builtin___int = int +builtin___str = str + + +class FeastServingType(builtin___int): DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... @classmethod - def Name(cls, number: int) -> str: ... + def Name(cls, number: builtin___int) -> builtin___str: ... @classmethod - def Value(cls, name: str) -> FeastServingType: ... + def Value(cls, name: builtin___str) -> 'FeastServingType': ... @classmethod - def keys(cls) -> typing___List[str]: ... + def keys(cls) -> typing___List[builtin___str]: ... @classmethod - def values(cls) -> typing___List[FeastServingType]: ... + def values(cls) -> typing___List['FeastServingType']: ... @classmethod - def items(cls) -> typing___List[typing___Tuple[str, FeastServingType]]: ... - FEAST_SERVING_TYPE_INVALID = typing___cast(FeastServingType, 0) - FEAST_SERVING_TYPE_ONLINE = typing___cast(FeastServingType, 1) - FEAST_SERVING_TYPE_BATCH = typing___cast(FeastServingType, 2) -FEAST_SERVING_TYPE_INVALID = typing___cast(FeastServingType, 0) -FEAST_SERVING_TYPE_ONLINE = typing___cast(FeastServingType, 1) -FEAST_SERVING_TYPE_BATCH = typing___cast(FeastServingType, 2) - -class JobType(int): + def items(cls) -> typing___List[typing___Tuple[builtin___str, 'FeastServingType']]: ... + FEAST_SERVING_TYPE_INVALID = typing___cast('FeastServingType', 0) + FEAST_SERVING_TYPE_ONLINE = typing___cast('FeastServingType', 1) + FEAST_SERVING_TYPE_BATCH = typing___cast('FeastServingType', 2) +FEAST_SERVING_TYPE_INVALID = typing___cast('FeastServingType', 0) +FEAST_SERVING_TYPE_ONLINE = typing___cast('FeastServingType', 1) +FEAST_SERVING_TYPE_BATCH = typing___cast('FeastServingType', 2) + +class JobType(builtin___int): DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... @classmethod - def Name(cls, number: int) -> str: ... + def Name(cls, number: builtin___int) -> builtin___str: ... @classmethod - def Value(cls, name: str) -> JobType: ... + def Value(cls, name: builtin___str) -> 'JobType': ... @classmethod - def keys(cls) -> typing___List[str]: ... + def keys(cls) -> typing___List[builtin___str]: ... @classmethod - def values(cls) -> typing___List[JobType]: ... + def values(cls) -> typing___List['JobType']: ... @classmethod - def items(cls) -> typing___List[typing___Tuple[str, JobType]]: ... - JOB_TYPE_INVALID = typing___cast(JobType, 0) - JOB_TYPE_DOWNLOAD = typing___cast(JobType, 1) -JOB_TYPE_INVALID = typing___cast(JobType, 0) -JOB_TYPE_DOWNLOAD = typing___cast(JobType, 1) + def items(cls) -> typing___List[typing___Tuple[builtin___str, 'JobType']]: ... + JOB_TYPE_INVALID = typing___cast('JobType', 0) + JOB_TYPE_DOWNLOAD = typing___cast('JobType', 1) +JOB_TYPE_INVALID = typing___cast('JobType', 0) +JOB_TYPE_DOWNLOAD = typing___cast('JobType', 1) -class JobStatus(int): +class JobStatus(builtin___int): DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... @classmethod - def Name(cls, number: int) -> str: ... + def Name(cls, number: builtin___int) -> builtin___str: ... @classmethod - def Value(cls, name: str) -> JobStatus: ... + def Value(cls, name: builtin___str) -> 'JobStatus': ... @classmethod - def keys(cls) -> typing___List[str]: ... + def keys(cls) -> typing___List[builtin___str]: ... @classmethod - def values(cls) -> typing___List[JobStatus]: ... + def values(cls) -> typing___List['JobStatus']: ... @classmethod - def items(cls) -> typing___List[typing___Tuple[str, JobStatus]]: ... - JOB_STATUS_INVALID = typing___cast(JobStatus, 0) - JOB_STATUS_PENDING = typing___cast(JobStatus, 1) - JOB_STATUS_RUNNING = typing___cast(JobStatus, 2) - JOB_STATUS_DONE = typing___cast(JobStatus, 3) -JOB_STATUS_INVALID = typing___cast(JobStatus, 0) -JOB_STATUS_PENDING = typing___cast(JobStatus, 1) -JOB_STATUS_RUNNING = typing___cast(JobStatus, 2) -JOB_STATUS_DONE = typing___cast(JobStatus, 3) - -class DataFormat(int): + def items(cls) -> typing___List[typing___Tuple[builtin___str, 'JobStatus']]: ... + JOB_STATUS_INVALID = typing___cast('JobStatus', 0) + JOB_STATUS_PENDING = typing___cast('JobStatus', 1) + JOB_STATUS_RUNNING = typing___cast('JobStatus', 2) + JOB_STATUS_DONE = typing___cast('JobStatus', 3) +JOB_STATUS_INVALID = typing___cast('JobStatus', 0) +JOB_STATUS_PENDING = typing___cast('JobStatus', 1) +JOB_STATUS_RUNNING = typing___cast('JobStatus', 2) +JOB_STATUS_DONE = typing___cast('JobStatus', 3) + +class DataFormat(builtin___int): DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... @classmethod - def Name(cls, number: int) -> str: ... + def Name(cls, number: builtin___int) -> builtin___str: ... @classmethod - def Value(cls, name: str) -> DataFormat: ... + def Value(cls, name: builtin___str) -> 'DataFormat': ... @classmethod - def keys(cls) -> typing___List[str]: ... + def keys(cls) -> typing___List[builtin___str]: ... @classmethod - def values(cls) -> typing___List[DataFormat]: ... + def values(cls) -> typing___List['DataFormat']: ... @classmethod - def items(cls) -> typing___List[typing___Tuple[str, DataFormat]]: ... - DATA_FORMAT_INVALID = typing___cast(DataFormat, 0) - DATA_FORMAT_AVRO = typing___cast(DataFormat, 1) -DATA_FORMAT_INVALID = typing___cast(DataFormat, 0) -DATA_FORMAT_AVRO = typing___cast(DataFormat, 1) + def items(cls) -> typing___List[typing___Tuple[builtin___str, 'DataFormat']]: ... + DATA_FORMAT_INVALID = typing___cast('DataFormat', 0) + DATA_FORMAT_AVRO = typing___cast('DataFormat', 1) +DATA_FORMAT_INVALID = typing___cast('DataFormat', 0) +DATA_FORMAT_AVRO = typing___cast('DataFormat', 1) class GetFeastServingInfoRequest(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... @@ -122,7 +129,7 @@ class GetFeastServingInfoRequest(google___protobuf___message___Message): def __init__(self, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> GetFeastServingInfoRequest: ... + def FromString(cls, s: builtin___bytes) -> GetFeastServingInfoRequest: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... @@ -139,7 +146,7 @@ class GetFeastServingInfoResponse(google___protobuf___message___Message): job_staging_location : typing___Optional[typing___Text] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> GetFeastServingInfoResponse: ... + def FromString(cls, s: builtin___bytes) -> GetFeastServingInfoResponse: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): @@ -151,7 +158,7 @@ class FeatureReference(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... project = ... # type: typing___Text name = ... # type: typing___Text - version = ... # type: int + version = ... # type: builtin___int @property def max_age(self) -> google___protobuf___duration_pb2___Duration: ... @@ -160,18 +167,18 @@ class FeatureReference(google___protobuf___message___Message): *, project : typing___Optional[typing___Text] = None, name : typing___Optional[typing___Text] = None, - version : typing___Optional[int] = None, + version : typing___Optional[builtin___int] = None, max_age : typing___Optional[google___protobuf___duration_pb2___Duration] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> FeatureReference: ... + def FromString(cls, s: builtin___bytes) -> FeatureReference: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"max_age"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"max_age"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"max_age",u"name",u"project",u"version"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"max_age",b"max_age"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"max_age",b"max_age"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"max_age",b"max_age",u"name",b"name",u"project",b"project",u"version",b"version"]) -> None: ... class GetOnlineFeaturesRequest(google___protobuf___message___Message): @@ -191,14 +198,14 @@ class GetOnlineFeaturesRequest(google___protobuf___message___Message): value : typing___Optional[feast___types___Value_pb2___Value] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> GetOnlineFeaturesRequest.EntityRow.FieldsEntry: ... + def FromString(cls, s: builtin___bytes) -> GetOnlineFeaturesRequest.EntityRow.FieldsEntry: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"value"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"value"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"key",u"value"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"value",b"value"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"value",b"value"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"key",b"key",u"value",b"value"]) -> None: ... @@ -214,17 +221,17 @@ class GetOnlineFeaturesRequest(google___protobuf___message___Message): fields : typing___Optional[typing___Mapping[typing___Text, feast___types___Value_pb2___Value]] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> GetOnlineFeaturesRequest.EntityRow: ... + def FromString(cls, s: builtin___bytes) -> GetOnlineFeaturesRequest.EntityRow: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"entity_timestamp"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"entity_timestamp"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"entity_timestamp",u"fields"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"entity_timestamp",b"entity_timestamp"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"entity_timestamp",b"entity_timestamp"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"entity_timestamp",b"entity_timestamp",u"fields",b"fields"]) -> None: ... - omit_entities_in_response = ... # type: bool + omit_entities_in_response = ... # type: builtin___bool @property def features(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[FeatureReference]: ... @@ -236,10 +243,10 @@ class GetOnlineFeaturesRequest(google___protobuf___message___Message): *, features : typing___Optional[typing___Iterable[FeatureReference]] = None, entity_rows : typing___Optional[typing___Iterable[GetOnlineFeaturesRequest.EntityRow]] = None, - omit_entities_in_response : typing___Optional[bool] = None, + omit_entities_in_response : typing___Optional[builtin___bool] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> GetOnlineFeaturesRequest: ... + def FromString(cls, s: builtin___bytes) -> GetOnlineFeaturesRequest: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): @@ -262,14 +269,14 @@ class GetBatchFeaturesRequest(google___protobuf___message___Message): dataset_source : typing___Optional[DatasetSource] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> GetBatchFeaturesRequest: ... + def FromString(cls, s: builtin___bytes) -> GetBatchFeaturesRequest: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"dataset_source"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"dataset_source"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"dataset_source",u"features"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"dataset_source",b"dataset_source"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"dataset_source",b"dataset_source"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"dataset_source",b"dataset_source",u"features",b"features"]) -> None: ... class GetOnlineFeaturesResponse(google___protobuf___message___Message): @@ -289,14 +296,14 @@ class GetOnlineFeaturesResponse(google___protobuf___message___Message): value : typing___Optional[feast___types___Value_pb2___Value] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> GetOnlineFeaturesResponse.FieldValues.FieldsEntry: ... + def FromString(cls, s: builtin___bytes) -> GetOnlineFeaturesResponse.FieldValues.FieldsEntry: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"value"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"value"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"key",u"value"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"value",b"value"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"value",b"value"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"key",b"key",u"value",b"value"]) -> None: ... @@ -308,7 +315,7 @@ class GetOnlineFeaturesResponse(google___protobuf___message___Message): fields : typing___Optional[typing___Mapping[typing___Text, feast___types___Value_pb2___Value]] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> GetOnlineFeaturesResponse.FieldValues: ... + def FromString(cls, s: builtin___bytes) -> GetOnlineFeaturesResponse.FieldValues: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): @@ -325,7 +332,7 @@ class GetOnlineFeaturesResponse(google___protobuf___message___Message): field_values : typing___Optional[typing___Iterable[GetOnlineFeaturesResponse.FieldValues]] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> GetOnlineFeaturesResponse: ... + def FromString(cls, s: builtin___bytes) -> GetOnlineFeaturesResponse: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): @@ -344,14 +351,14 @@ class GetBatchFeaturesResponse(google___protobuf___message___Message): job : typing___Optional[Job] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> GetBatchFeaturesResponse: ... + def FromString(cls, s: builtin___bytes) -> GetBatchFeaturesResponse: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"job"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"job"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"job"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"job",b"job"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"job",b"job"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"job",b"job"]) -> None: ... class GetJobRequest(google___protobuf___message___Message): @@ -365,14 +372,14 @@ class GetJobRequest(google___protobuf___message___Message): job : typing___Optional[Job] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> GetJobRequest: ... + def FromString(cls, s: builtin___bytes) -> GetJobRequest: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"job"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"job"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"job"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"job",b"job"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"job",b"job"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"job",b"job"]) -> None: ... class GetJobResponse(google___protobuf___message___Message): @@ -386,14 +393,14 @@ class GetJobResponse(google___protobuf___message___Message): job : typing___Optional[Job] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> GetJobResponse: ... + def FromString(cls, s: builtin___bytes) -> GetJobResponse: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"job"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"job"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"job"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"job",b"job"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"job",b"job"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"job",b"job"]) -> None: ... class Job(google___protobuf___message___Message): @@ -415,7 +422,7 @@ class Job(google___protobuf___message___Message): data_format : typing___Optional[DataFormat] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> Job: ... + def FromString(cls, s: builtin___bytes) -> Job: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): @@ -436,7 +443,7 @@ class DatasetSource(google___protobuf___message___Message): data_format : typing___Optional[DataFormat] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> DatasetSource.FileSource: ... + def FromString(cls, s: builtin___bytes) -> DatasetSource.FileSource: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): @@ -453,13 +460,13 @@ class DatasetSource(google___protobuf___message___Message): file_source : typing___Optional[DatasetSource.FileSource] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> DatasetSource: ... + def FromString(cls, s: builtin___bytes) -> DatasetSource: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"dataset_source",u"file_source"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"dataset_source",u"file_source"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"dataset_source",u"file_source"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"dataset_source",b"dataset_source",u"file_source",b"file_source"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"dataset_source",b"dataset_source",u"file_source",b"file_source"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"dataset_source",b"dataset_source",u"file_source",b"file_source"]) -> None: ... def WhichOneof(self, oneof_group: typing_extensions___Literal[u"dataset_source",b"dataset_source"]) -> typing_extensions___Literal["file_source"]: ... diff --git a/sdk/python/feast/storage/Redis_pb2.pyi b/sdk/python/feast/storage/Redis_pb2.pyi index 717aae79db..9bea087885 100644 --- a/sdk/python/feast/storage/Redis_pb2.pyi +++ b/sdk/python/feast/storage/Redis_pb2.pyi @@ -27,6 +27,12 @@ from typing_extensions import ( ) +builtin___bool = bool +builtin___bytes = bytes +builtin___float = float +builtin___int = int + + class RedisKey(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... feature_set = ... # type: typing___Text @@ -40,7 +46,7 @@ class RedisKey(google___protobuf___message___Message): entities : typing___Optional[typing___Iterable[feast___types___Field_pb2___Field]] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> RedisKey: ... + def FromString(cls, s: builtin___bytes) -> RedisKey: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): diff --git a/sdk/python/feast/types/FeatureRowExtended_pb2.pyi b/sdk/python/feast/types/FeatureRowExtended_pb2.pyi index 4f3d02c8ee..8c4109a75f 100644 --- a/sdk/python/feast/types/FeatureRowExtended_pb2.pyi +++ b/sdk/python/feast/types/FeatureRowExtended_pb2.pyi @@ -26,6 +26,12 @@ from typing_extensions import ( ) +builtin___bool = bool +builtin___bytes = bytes +builtin___float = float +builtin___int = int + + class Error(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... cause = ... # type: typing___Text @@ -41,7 +47,7 @@ class Error(google___protobuf___message___Message): stack_trace : typing___Optional[typing___Text] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> Error: ... + def FromString(cls, s: builtin___bytes) -> Error: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): @@ -51,25 +57,25 @@ class Error(google___protobuf___message___Message): class Attempt(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - attempts = ... # type: int + attempts = ... # type: builtin___int @property def error(self) -> Error: ... def __init__(self, *, - attempts : typing___Optional[int] = None, + attempts : typing___Optional[builtin___int] = None, error : typing___Optional[Error] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> Attempt: ... + def FromString(cls, s: builtin___bytes) -> Attempt: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"error"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"error"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"attempts",u"error"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"error",b"error"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"error",b"error"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"attempts",b"attempts",u"error",b"error"]) -> None: ... class FeatureRowExtended(google___protobuf___message___Message): @@ -91,12 +97,12 @@ class FeatureRowExtended(google___protobuf___message___Message): first_seen : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> FeatureRowExtended: ... + def FromString(cls, s: builtin___bytes) -> FeatureRowExtended: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"first_seen",u"last_attempt",u"row"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"first_seen",u"last_attempt",u"row"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"first_seen",u"last_attempt",u"row"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"first_seen",b"first_seen",u"last_attempt",b"last_attempt",u"row",b"row"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"first_seen",b"first_seen",u"last_attempt",b"last_attempt",u"row",b"row"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"first_seen",b"first_seen",u"last_attempt",b"last_attempt",u"row",b"row"]) -> None: ... diff --git a/sdk/python/feast/types/FeatureRow_pb2.pyi b/sdk/python/feast/types/FeatureRow_pb2.pyi index 9bf745f913..e634f46486 100644 --- a/sdk/python/feast/types/FeatureRow_pb2.pyi +++ b/sdk/python/feast/types/FeatureRow_pb2.pyi @@ -31,6 +31,12 @@ from typing_extensions import ( ) +builtin___bool = bool +builtin___bytes = bytes +builtin___float = float +builtin___int = int + + class FeatureRow(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... feature_set = ... # type: typing___Text @@ -48,12 +54,12 @@ class FeatureRow(google___protobuf___message___Message): feature_set : typing___Optional[typing___Text] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> FeatureRow: ... + def FromString(cls, s: builtin___bytes) -> FeatureRow: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"event_timestamp"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"event_timestamp"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"event_timestamp",u"feature_set",u"fields"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"event_timestamp",b"event_timestamp"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"event_timestamp",b"event_timestamp"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"event_timestamp",b"event_timestamp",u"feature_set",b"feature_set",u"fields",b"fields"]) -> None: ... diff --git a/sdk/python/feast/types/Field_pb2.pyi b/sdk/python/feast/types/Field_pb2.pyi index 1305503fab..b5e6c1f609 100644 --- a/sdk/python/feast/types/Field_pb2.pyi +++ b/sdk/python/feast/types/Field_pb2.pyi @@ -22,6 +22,12 @@ from typing_extensions import ( ) +builtin___bool = bool +builtin___bytes = bytes +builtin___float = float +builtin___int = int + + class Field(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... name = ... # type: typing___Text @@ -35,12 +41,12 @@ class Field(google___protobuf___message___Message): value : typing___Optional[feast___types___Value_pb2___Value] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> Field: ... + def FromString(cls, s: builtin___bytes) -> Field: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"value"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"value"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"name",u"value"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"value",b"value"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"value",b"value"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"name",b"name",u"value",b"value"]) -> None: ... diff --git a/sdk/python/feast/types/Value_pb2.pyi b/sdk/python/feast/types/Value_pb2.pyi index d8b8a73dd3..5ead403ad9 100644 --- a/sdk/python/feast/types/Value_pb2.pyi +++ b/sdk/python/feast/types/Value_pb2.pyi @@ -27,68 +27,75 @@ from typing_extensions import ( ) +builtin___bool = bool +builtin___bytes = bytes +builtin___float = float +builtin___int = int +builtin___str = str + + class ValueType(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - class Enum(int): + class Enum(builtin___int): DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... @classmethod - def Name(cls, number: int) -> str: ... + def Name(cls, number: builtin___int) -> builtin___str: ... @classmethod - def Value(cls, name: str) -> ValueType.Enum: ... + def Value(cls, name: builtin___str) -> 'ValueType.Enum': ... @classmethod - def keys(cls) -> typing___List[str]: ... + def keys(cls) -> typing___List[builtin___str]: ... @classmethod - def values(cls) -> typing___List[ValueType.Enum]: ... + def values(cls) -> typing___List['ValueType.Enum']: ... @classmethod - def items(cls) -> typing___List[typing___Tuple[str, ValueType.Enum]]: ... - INVALID = typing___cast(ValueType.Enum, 0) - BYTES = typing___cast(ValueType.Enum, 1) - STRING = typing___cast(ValueType.Enum, 2) - INT32 = typing___cast(ValueType.Enum, 3) - INT64 = typing___cast(ValueType.Enum, 4) - DOUBLE = typing___cast(ValueType.Enum, 5) - FLOAT = typing___cast(ValueType.Enum, 6) - BOOL = typing___cast(ValueType.Enum, 7) - BYTES_LIST = typing___cast(ValueType.Enum, 11) - STRING_LIST = typing___cast(ValueType.Enum, 12) - INT32_LIST = typing___cast(ValueType.Enum, 13) - INT64_LIST = typing___cast(ValueType.Enum, 14) - DOUBLE_LIST = typing___cast(ValueType.Enum, 15) - FLOAT_LIST = typing___cast(ValueType.Enum, 16) - BOOL_LIST = typing___cast(ValueType.Enum, 17) - INVALID = typing___cast(ValueType.Enum, 0) - BYTES = typing___cast(ValueType.Enum, 1) - STRING = typing___cast(ValueType.Enum, 2) - INT32 = typing___cast(ValueType.Enum, 3) - INT64 = typing___cast(ValueType.Enum, 4) - DOUBLE = typing___cast(ValueType.Enum, 5) - FLOAT = typing___cast(ValueType.Enum, 6) - BOOL = typing___cast(ValueType.Enum, 7) - BYTES_LIST = typing___cast(ValueType.Enum, 11) - STRING_LIST = typing___cast(ValueType.Enum, 12) - INT32_LIST = typing___cast(ValueType.Enum, 13) - INT64_LIST = typing___cast(ValueType.Enum, 14) - DOUBLE_LIST = typing___cast(ValueType.Enum, 15) - FLOAT_LIST = typing___cast(ValueType.Enum, 16) - BOOL_LIST = typing___cast(ValueType.Enum, 17) + def items(cls) -> typing___List[typing___Tuple[builtin___str, 'ValueType.Enum']]: ... + INVALID = typing___cast('ValueType.Enum', 0) + BYTES = typing___cast('ValueType.Enum', 1) + STRING = typing___cast('ValueType.Enum', 2) + INT32 = typing___cast('ValueType.Enum', 3) + INT64 = typing___cast('ValueType.Enum', 4) + DOUBLE = typing___cast('ValueType.Enum', 5) + FLOAT = typing___cast('ValueType.Enum', 6) + BOOL = typing___cast('ValueType.Enum', 7) + BYTES_LIST = typing___cast('ValueType.Enum', 11) + STRING_LIST = typing___cast('ValueType.Enum', 12) + INT32_LIST = typing___cast('ValueType.Enum', 13) + INT64_LIST = typing___cast('ValueType.Enum', 14) + DOUBLE_LIST = typing___cast('ValueType.Enum', 15) + FLOAT_LIST = typing___cast('ValueType.Enum', 16) + BOOL_LIST = typing___cast('ValueType.Enum', 17) + INVALID = typing___cast('ValueType.Enum', 0) + BYTES = typing___cast('ValueType.Enum', 1) + STRING = typing___cast('ValueType.Enum', 2) + INT32 = typing___cast('ValueType.Enum', 3) + INT64 = typing___cast('ValueType.Enum', 4) + DOUBLE = typing___cast('ValueType.Enum', 5) + FLOAT = typing___cast('ValueType.Enum', 6) + BOOL = typing___cast('ValueType.Enum', 7) + BYTES_LIST = typing___cast('ValueType.Enum', 11) + STRING_LIST = typing___cast('ValueType.Enum', 12) + INT32_LIST = typing___cast('ValueType.Enum', 13) + INT64_LIST = typing___cast('ValueType.Enum', 14) + DOUBLE_LIST = typing___cast('ValueType.Enum', 15) + FLOAT_LIST = typing___cast('ValueType.Enum', 16) + BOOL_LIST = typing___cast('ValueType.Enum', 17) def __init__(self, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> ValueType: ... + def FromString(cls, s: builtin___bytes) -> ValueType: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... class Value(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - bytes_val = ... # type: bytes + bytes_val = ... # type: builtin___bytes string_val = ... # type: typing___Text - int32_val = ... # type: int - int64_val = ... # type: int - double_val = ... # type: float - float_val = ... # type: float - bool_val = ... # type: bool + int32_val = ... # type: builtin___int + int64_val = ... # type: builtin___int + double_val = ... # type: builtin___float + float_val = ... # type: builtin___float + bool_val = ... # type: builtin___bool @property def bytes_list_val(self) -> BytesList: ... @@ -113,13 +120,13 @@ class Value(google___protobuf___message___Message): def __init__(self, *, - bytes_val : typing___Optional[bytes] = None, + bytes_val : typing___Optional[builtin___bytes] = None, string_val : typing___Optional[typing___Text] = None, - int32_val : typing___Optional[int] = None, - int64_val : typing___Optional[int] = None, - double_val : typing___Optional[float] = None, - float_val : typing___Optional[float] = None, - bool_val : typing___Optional[bool] = None, + int32_val : typing___Optional[builtin___int] = None, + int64_val : typing___Optional[builtin___int] = None, + double_val : typing___Optional[builtin___float] = None, + float_val : typing___Optional[builtin___float] = None, + bool_val : typing___Optional[builtin___bool] = None, bytes_list_val : typing___Optional[BytesList] = None, string_list_val : typing___Optional[StringList] = None, int32_list_val : typing___Optional[Int32List] = None, @@ -129,27 +136,27 @@ class Value(google___protobuf___message___Message): bool_list_val : typing___Optional[BoolList] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> Value: ... + def FromString(cls, s: builtin___bytes) -> Value: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"bool_list_val",u"bool_val",u"bytes_list_val",u"bytes_val",u"double_list_val",u"double_val",u"float_list_val",u"float_val",u"int32_list_val",u"int32_val",u"int64_list_val",u"int64_val",u"string_list_val",u"string_val",u"val"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"bool_list_val",u"bool_val",u"bytes_list_val",u"bytes_val",u"double_list_val",u"double_val",u"float_list_val",u"float_val",u"int32_list_val",u"int32_val",u"int64_list_val",u"int64_val",u"string_list_val",u"string_val",u"val"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"bool_list_val",u"bool_val",u"bytes_list_val",u"bytes_val",u"double_list_val",u"double_val",u"float_list_val",u"float_val",u"int32_list_val",u"int32_val",u"int64_list_val",u"int64_val",u"string_list_val",u"string_val",u"val"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"bool_list_val",b"bool_list_val",u"bool_val",b"bool_val",u"bytes_list_val",b"bytes_list_val",u"bytes_val",b"bytes_val",u"double_list_val",b"double_list_val",u"double_val",b"double_val",u"float_list_val",b"float_list_val",u"float_val",b"float_val",u"int32_list_val",b"int32_list_val",u"int32_val",b"int32_val",u"int64_list_val",b"int64_list_val",u"int64_val",b"int64_val",u"string_list_val",b"string_list_val",u"string_val",b"string_val",u"val",b"val"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"bool_list_val",b"bool_list_val",u"bool_val",b"bool_val",u"bytes_list_val",b"bytes_list_val",u"bytes_val",b"bytes_val",u"double_list_val",b"double_list_val",u"double_val",b"double_val",u"float_list_val",b"float_list_val",u"float_val",b"float_val",u"int32_list_val",b"int32_list_val",u"int32_val",b"int32_val",u"int64_list_val",b"int64_list_val",u"int64_val",b"int64_val",u"string_list_val",b"string_list_val",u"string_val",b"string_val",u"val",b"val"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"bool_list_val",b"bool_list_val",u"bool_val",b"bool_val",u"bytes_list_val",b"bytes_list_val",u"bytes_val",b"bytes_val",u"double_list_val",b"double_list_val",u"double_val",b"double_val",u"float_list_val",b"float_list_val",u"float_val",b"float_val",u"int32_list_val",b"int32_list_val",u"int32_val",b"int32_val",u"int64_list_val",b"int64_list_val",u"int64_val",b"int64_val",u"string_list_val",b"string_list_val",u"string_val",b"string_val",u"val",b"val"]) -> None: ... def WhichOneof(self, oneof_group: typing_extensions___Literal[u"val",b"val"]) -> typing_extensions___Literal["bytes_val","string_val","int32_val","int64_val","double_val","float_val","bool_val","bytes_list_val","string_list_val","int32_list_val","int64_list_val","double_list_val","float_list_val","bool_list_val"]: ... class BytesList(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[bytes] + val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[builtin___bytes] def __init__(self, *, - val : typing___Optional[typing___Iterable[bytes]] = None, + val : typing___Optional[typing___Iterable[builtin___bytes]] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> BytesList: ... + def FromString(cls, s: builtin___bytes) -> BytesList: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): @@ -166,7 +173,7 @@ class StringList(google___protobuf___message___Message): val : typing___Optional[typing___Iterable[typing___Text]] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> StringList: ... + def FromString(cls, s: builtin___bytes) -> StringList: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): @@ -176,14 +183,14 @@ class StringList(google___protobuf___message___Message): class Int32List(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[int] + val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[builtin___int] def __init__(self, *, - val : typing___Optional[typing___Iterable[int]] = None, + val : typing___Optional[typing___Iterable[builtin___int]] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> Int32List: ... + def FromString(cls, s: builtin___bytes) -> Int32List: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): @@ -193,14 +200,14 @@ class Int32List(google___protobuf___message___Message): class Int64List(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[int] + val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[builtin___int] def __init__(self, *, - val : typing___Optional[typing___Iterable[int]] = None, + val : typing___Optional[typing___Iterable[builtin___int]] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> Int64List: ... + def FromString(cls, s: builtin___bytes) -> Int64List: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): @@ -210,14 +217,14 @@ class Int64List(google___protobuf___message___Message): class DoubleList(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[float] + val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[builtin___float] def __init__(self, *, - val : typing___Optional[typing___Iterable[float]] = None, + val : typing___Optional[typing___Iterable[builtin___float]] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> DoubleList: ... + def FromString(cls, s: builtin___bytes) -> DoubleList: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): @@ -227,14 +234,14 @@ class DoubleList(google___protobuf___message___Message): class FloatList(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[float] + val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[builtin___float] def __init__(self, *, - val : typing___Optional[typing___Iterable[float]] = None, + val : typing___Optional[typing___Iterable[builtin___float]] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> FloatList: ... + def FromString(cls, s: builtin___bytes) -> FloatList: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): @@ -244,14 +251,14 @@ class FloatList(google___protobuf___message___Message): class BoolList(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[bool] + val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[builtin___bool] def __init__(self, *, - val : typing___Optional[typing___Iterable[bool]] = None, + val : typing___Optional[typing___Iterable[builtin___bool]] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> BoolList: ... + def FromString(cls, s: builtin___bytes) -> BoolList: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): From 539cd1de418ed8d5bbf94e00f735fdb2ce8f2ff0 Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Wed, 29 Jan 2020 09:19:03 +0800 Subject: [PATCH 09/31] Add skeleton for update/get schema in FeatureSet --- sdk/python/feast/feature_set.py | 7 +++++++ sdk/python/tests/test_feature_set.py | 6 ++++++ 2 files changed, 13 insertions(+) diff --git a/sdk/python/feast/feature_set.py b/sdk/python/feast/feature_set.py index c47c51e5a2..f0a81df964 100644 --- a/sdk/python/feast/feature_set.py +++ b/sdk/python/feast/feature_set.py @@ -16,6 +16,7 @@ from collections import OrderedDict from typing import Dict from typing import List, Optional +from tensorflow_metadata.proto.v0.schema_pb2 import Schema import pandas as pd import pyarrow as pa @@ -663,6 +664,12 @@ def is_valid(self): if len(self.entities) == 0: raise ValueError(f"No entities found in feature set {self.name}") + def update_schema(self): + pass + + def get_schema(self) -> Schema: + pass + @classmethod def from_yaml(cls, yml: str): """ diff --git a/sdk/python/tests/test_feature_set.py b/sdk/python/tests/test_feature_set.py index 57d7a8f810..bdb58be683 100644 --- a/sdk/python/tests/test_feature_set.py +++ b/sdk/python/tests/test_feature_set.py @@ -167,3 +167,9 @@ def test_add_features_from_df_success( ) assert len(my_feature_set.features) == feature_count assert len(my_feature_set.entities) == entity_count + + def test_update_schema(self): + pass + + def test_get_schema(self): + pass From dcbf9c115b7bbf1005a36eac375f3993d39cd014 Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Wed, 29 Jan 2020 09:26:50 +0800 Subject: [PATCH 10/31] Update tag name for feast ingestion job Previosly missing the word 'ingestion' --- .../feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java index d889e16f19..dfba833c34 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java +++ b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java @@ -53,7 +53,7 @@ public abstract class WriteRowMetricsDoFn extends DoFn Date: Thu, 30 Jan 2020 09:43:08 +0800 Subject: [PATCH 11/31] Add update_schema method to FeatureSet - Update Field, Feature and Entity class with fields from presence_constraints, shape_type and domain_info --- sdk/python/feast/entity.py | 36 ++- sdk/python/feast/feature.py | 47 ++- sdk/python/feast/feature_set.py | 57 +++- sdk/python/feast/field.py | 285 ++++++++++++++++++ sdk/python/setup.py | 1 + .../tensorflow_metadata/schema_bikeshare.json | 136 +++++++++ sdk/python/tests/test_feature_set.py | 58 +++- 7 files changed, 583 insertions(+), 37 deletions(-) create mode 100644 sdk/python/tests/data/tensorflow_metadata/schema_bikeshare.json diff --git a/sdk/python/feast/entity.py b/sdk/python/feast/entity.py index 795758bc41..2e50ccb3d4 100644 --- a/sdk/python/feast/entity.py +++ b/sdk/python/feast/entity.py @@ -11,11 +11,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from tensorflow_metadata.proto.v0 import schema_pb2 -from feast.value_type import ValueType from feast.core.FeatureSet_pb2 import EntitySpec as EntityProto -from feast.types import Value_pb2 as ValueTypeProto from feast.field import Field +from feast.types import Value_pb2 as ValueTypeProto +from feast.value_type import ValueType class Entity(Field): @@ -29,17 +30,42 @@ def to_proto(self) -> EntityProto: Returns EntitySpec object """ value_type = ValueTypeProto.ValueType.Enum.Value(self.dtype.name) - return EntityProto(name=self.name, value_type=value_type) + return EntityProto( + name=self.name, + value_type=value_type, + presence=self.presence, + group_presence=self.group_presence, + shape=self.shape, + value_count=self.value_count, + domain=self.domain, + int_domain=self.int_domain, + float_domain=self.float_domain, + string_domain=self.string_domain, + bool_domain=self.bool_domain, + struct_domain=self.struct_domain, + natural_language_domain=self.natural_language_domain, + image_domain=self.image_domain, + mid_domain=self.mid_domain, + url_domain=self.url_domain, + time_domain=self.time_domain, + time_of_day_domain=self.time_of_day_domain, + ) @classmethod - def from_proto(cls, entity_proto: EntityProto): + def from_proto(cls, entity_proto: EntityProto, schema: schema_pb2.Schema = None): """ Creates a Feast Entity object from its Protocol Buffer representation Args: entity_proto: EntitySpec protobuf object + schema: Schema from Tensorflow metadata, will be used to reference domain + defined at the schema level Returns: Entity object """ - return cls(name=entity_proto.name, dtype=ValueType(entity_proto.value_type)) + entity = cls(name=entity_proto.name, dtype=ValueType(entity_proto.value_type)) + entity.update_presence_constraints(entity_proto) + entity.update_shape_type(entity_proto) + entity.update_domain_info(entity_proto, schema) + return entity diff --git a/sdk/python/feast/feature.py b/sdk/python/feast/feature.py index c7e3d7af8b..de63ff68d6 100644 --- a/sdk/python/feast/feature.py +++ b/sdk/python/feast/feature.py @@ -11,11 +11,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from tensorflow_metadata.proto.v0 import schema_pb2 -from feast.value_type import ValueType from feast.core.FeatureSet_pb2 import FeatureSpec as FeatureProto -from feast.types import Value_pb2 as ValueTypeProto from feast.field import Field +from feast.types import Value_pb2 as ValueTypeProto +from feast.value_type import ValueType class Feature(Field): @@ -24,9 +25,43 @@ class Feature(Field): def to_proto(self) -> FeatureProto: """Converts Feature object to its Protocol Buffer representation""" value_type = ValueTypeProto.ValueType.Enum.Value(self.dtype.name) - return FeatureProto(name=self.name, value_type=value_type) + return FeatureProto( + name=self.name, + value_type=value_type, + presence=self.presence, + group_presence=self.group_presence, + shape=self.shape, + value_count=self.value_count, + domain=self.domain, + int_domain=self.int_domain, + float_domain=self.float_domain, + string_domain=self.string_domain, + bool_domain=self.bool_domain, + struct_domain=self.struct_domain, + natural_language_domain=self.natural_language_domain, + image_domain=self.image_domain, + mid_domain=self.mid_domain, + url_domain=self.url_domain, + time_domain=self.time_domain, + time_of_day_domain=self.time_of_day_domain, + ) @classmethod - def from_proto(cls, feature_proto: FeatureProto): - """Converts Protobuf Feature to its SDK equivalent""" - return cls(name=feature_proto.name, dtype=ValueType(feature_proto.value_type)) + def from_proto(cls, feature_proto: FeatureProto, schema: schema_pb2.Schema = None): + """ + + Args: + feature_proto: FeatureSpec protobuf object + schema: Schema from Tensorflow metadata, will be used to reference domain + defined at the schema level + + Returns: + Feature object + """ + feature = cls( + name=feature_proto.name, dtype=ValueType(feature_proto.value_type) + ) + feature.update_presence_constraints(feature_proto) + feature.update_shape_type(feature_proto) + feature.update_domain_info(feature_proto, schema) + return feature diff --git a/sdk/python/feast/feature_set.py b/sdk/python/feast/feature_set.py index f0a81df964..b7b00fea17 100644 --- a/sdk/python/feast/feature_set.py +++ b/sdk/python/feast/feature_set.py @@ -11,15 +11,20 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - - +import warnings from collections import OrderedDict from typing import Dict from typing import List, Optional -from tensorflow_metadata.proto.v0.schema_pb2 import Schema import pandas as pd import pyarrow as pa +from google.protobuf import json_format +from google.protobuf.duration_pb2 import Duration +from google.protobuf.json_format import MessageToJson +from pandas.api.types import is_datetime64_ns_dtype +from pyarrow.lib import TimestampType +from tensorflow_metadata.proto.v0.schema_pb2 import Schema + from feast.core.FeatureSet_pb2 import FeatureSet as FeatureSetProto from feast.core.FeatureSet_pb2 import FeatureSetMeta as FeatureSetMetaProto from feast.core.FeatureSet_pb2 import FeatureSetSpec as FeatureSetSpecProto @@ -30,15 +35,6 @@ from feast.type_map import DATETIME_COLUMN from feast.type_map import pa_to_feast_value_type from feast.type_map import python_type_to_feast_value_type -from google.protobuf import json_format -from feast.core.FeatureSet_pb2 import FeatureSetSpec as FeatureSetSpecProto -from feast.core.FeatureSet_pb2 import FeatureSetMeta as FeatureSetMetaProto -from feast.core.FeatureSet_pb2 import FeatureSet as FeatureSetProto -from google.protobuf.duration_pb2 import Duration -from feast.type_map import python_type_to_feast_value_type -from google.protobuf.json_format import MessageToJson -from pandas.api.types import is_datetime64_ns_dtype -from pyarrow.lib import TimestampType class FeatureSet: @@ -664,11 +660,40 @@ def is_valid(self): if len(self.entities) == 0: raise ValueError(f"No entities found in feature set {self.name}") - def update_schema(self): - pass + def update_schema(self, schema: Schema): + """ + Updates presence_constraints, shape_type and domain_info for all entities + and features in the FeatureSet from schema in Tensorflow metadata. + + Args: + schema: schema from Tensorflow metadata + + Returns: + None - def get_schema(self) -> Schema: - pass + """ + name_to_feature = {f.name: f for f in self.features} + name_to_entity = {e.name: e for e in self.entities} + + for feature_from_new_schema in schema.feature: + + if feature_from_new_schema.name in name_to_feature: + feature = name_to_feature[feature_from_new_schema.name] + feature.update_presence_constraints(feature_from_new_schema) + feature.update_shape_type(feature_from_new_schema) + feature.update_domain_info(feature_from_new_schema, schema) + + elif feature_from_new_schema.name in name_to_entity: + entity = name_to_entity[feature_from_new_schema.name] + entity.update_presence_constraints(feature_from_new_schema) + entity.update_shape_type(feature_from_new_schema) + entity.update_domain_info(feature_from_new_schema, schema) + + else: + warnings.warn( + f"The provided schema contains feature name '{feature_from_new_schema.name}' " + f"that does not exist in the FeatureSet '{self.name}' in Feast" + ) @classmethod def from_yaml(cls, yml: str): diff --git a/sdk/python/feast/field.py b/sdk/python/feast/field.py index 2efd4587ff..660cf4acd5 100644 --- a/sdk/python/feast/field.py +++ b/sdk/python/feast/field.py @@ -11,6 +11,9 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import warnings + +from tensorflow_metadata.proto.v0 import schema_pb2 from feast.value_type import ValueType @@ -26,6 +29,22 @@ def __init__(self, name: str, dtype: ValueType): if not isinstance(dtype, ValueType): raise ValueError("dtype is not a valid ValueType") self._dtype = dtype + self._presence = None + self._group_presence = None + self._shape = None + self._value_count = None + self._domain = None + self._int_domain = None + self._float_domain = None + self._string_domain = None + self._bool_domain = None + self._struct_domain = None + self._natural_language_domain = None + self._image_domain = None + self._mid_domain = None + self._url_domain = None + self._time_domain = None + self._time_of_day_domain = None def __eq__(self, other): if self.name != other.name or self.dtype != other.dtype: @@ -46,6 +65,250 @@ def dtype(self) -> ValueType: """ return self._dtype + @property + def presence(self) -> schema_pb2.FeaturePresence: + return self._presence + + @presence.setter + def presence(self, presence: schema_pb2.FeaturePresence): + if not isinstance(presence, schema_pb2.FeaturePresence): + raise TypeError("presence must be of FeaturePresence type") + self._clear_presence_constraints() + self._presence = presence + + @property + def group_presence(self) -> schema_pb2.FeaturePresenceWithinGroup: + return self._group_presence + + @group_presence.setter + def group_presence(self, group_presence: schema_pb2.FeaturePresenceWithinGroup): + if not isinstance(group_presence, schema_pb2.FeaturePresenceWithinGroup): + raise TypeError("group_presence must be of FeaturePresenceWithinGroup type") + self._clear_presence_constraints() + self._group_presence = group_presence + + @property + def shape(self) -> schema_pb2.FixedShape: + return self._shape + + @shape.setter + def shape(self, shape: schema_pb2.FixedShape): + if not isinstance(shape, schema_pb2.FixedShape): + raise TypeError("shape must be of FixedShape type") + self._clear_shape_type() + self._shape = shape + + @property + def value_count(self) -> schema_pb2.ValueCount: + return self._value_count + + @value_count.setter + def value_count(self, value_count: schema_pb2.ValueCount): + if not isinstance(value_count, schema_pb2.ValueCount): + raise TypeError("value_count must be of ValueCount type") + self._clear_shape_type() + self._value_count = value_count + + @property + def domain(self) -> str: + return self._domain + + @domain.setter + def domain(self, domain: str): + if not isinstance(domain, str): + raise TypeError("domain must be of str type") + self._clear_domain_info() + self._domain = domain + + @property + def int_domain(self) -> schema_pb2.IntDomain: + return self._int_domain + + @int_domain.setter + def int_domain(self, int_domain: schema_pb2.IntDomain): + if not isinstance(int_domain, schema_pb2.IntDomain): + raise TypeError("int_domain must be of IntDomain type") + self._clear_domain_info() + self._int_domain = int_domain + + @property + def float_domain(self) -> schema_pb2.FloatDomain: + return self._float_domain + + @float_domain.setter + def float_domain(self, float_domain: schema_pb2.FloatDomain): + if not isinstance(float_domain, schema_pb2.FloatDomain): + raise TypeError("float_domain must be of FloatDomain type") + self._clear_domain_info() + self._float_domain = float_domain + + @property + def string_domain(self) -> schema_pb2.StringDomain: + return self._string_domain + + @string_domain.setter + def string_domain(self, string_domain: schema_pb2.StringDomain): + if not isinstance(string_domain, schema_pb2.StringDomain): + raise TypeError("string_domain must be of StringDomain type") + self._clear_domain_info() + self._string_domain = string_domain + + @property + def bool_domain(self) -> schema_pb2.BoolDomain: + return self._bool_domain + + @bool_domain.setter + def bool_domain(self, bool_domain: schema_pb2.BoolDomain): + if not isinstance(bool_domain, schema_pb2.BoolDomain): + raise TypeError("bool_domain must be of BoolDomain type") + self._clear_domain_info() + self._bool_domain = bool_domain + + @property + def struct_domain(self) -> schema_pb2.StructDomain: + return self._struct_domain + + @struct_domain.setter + def struct_domain(self, struct_domain: schema_pb2.StructDomain): + if not isinstance(struct_domain, schema_pb2.StructDomain): + raise TypeError("struct_domain must be of StructDomain type") + self._clear_domain_info() + self._struct_domain = struct_domain + + @property + def natural_language_domain(self) -> schema_pb2.NaturalLanguageDomain: + return self._natural_language_domain + + @natural_language_domain.setter + def natural_language_domain( + self, natural_language_domain: schema_pb2.NaturalLanguageDomain + ): + if not isinstance(natural_language_domain, schema_pb2.NaturalLanguageDomain): + raise TypeError( + "natural_language_domain must be of NaturalLanguageDomain type" + ) + self._clear_domain_info() + self._natural_language_domain = natural_language_domain + + @property + def image_domain(self) -> schema_pb2.ImageDomain: + return self._image_domain + + @image_domain.setter + def image_domain(self, image_domain: schema_pb2.ImageDomain): + if not isinstance(image_domain, schema_pb2.ImageDomain): + raise TypeError("image_domain must be of ImageDomain type") + self._clear_domain_info() + self._image_domain = image_domain + + @property + def mid_domain(self) -> schema_pb2.MIDDomain: + return self._mid_domain + + @mid_domain.setter + def mid_domain(self, mid_domain: schema_pb2.MIDDomain): + if not isinstance(mid_domain, schema_pb2.MIDDomain): + raise TypeError("mid_domain must be of MIDDomain type") + self._clear_domain_info() + self._mid_domain = mid_domain + + @property + def url_domain(self) -> schema_pb2.URLDomain: + return self._url_domain + + @url_domain.setter + def url_domain(self, url_domain: schema_pb2.URLDomain): + if not isinstance(url_domain, schema_pb2.URLDomain): + raise TypeError("url_domain must be of URLDomain type") + self._clear_domain_info() + self.url_domain = url_domain + + @property + def time_domain(self) -> schema_pb2.TimeDomain: + return self._time_domain + + @time_domain.setter + def time_domain(self, time_domain: schema_pb2.TimeDomain): + if not isinstance(time_domain, schema_pb2.TimeDomain): + raise TypeError("time_domain must be of TimeDomain type") + self._clear_domain_info() + self._time_domain = time_domain + + @property + def time_of_day_domain(self) -> schema_pb2.TimeOfDayDomain: + return self._time_of_day_domain + + @time_of_day_domain.setter + def time_of_day_domain(self, time_of_day_domain) -> schema_pb2.TimeOfDayDomain: + if not isinstance(time_of_day_domain, schema_pb2.TimeOfDayDomain): + raise TypeError("time_of_day_domain must be of TimeOfDayDomain type") + self._clear_domain_info() + self._time_of_day_domain = time_of_day_domain + + def update_presence_constraints(self, feature: schema_pb2.Feature): + presence_constraints_case = feature.WhichOneof("presence_constraints") + if presence_constraints_case == "presence": + self.presence = feature.presence + elif presence_constraints_case == "group_presence": + self.group_presence = feature.group_presence + + def update_shape_type(self, feature: schema_pb2.Feature): + shape_type_case = feature.WhichOneof("shape_type") + if shape_type_case == "shape": + self.shape = feature.shape + elif shape_type_case == "value_count": + self.value_count = feature.value_count + + def update_domain_info( + self, feature: schema_pb2.Feature, schema: schema_pb2.Schema = None + ): + domain_info_case = feature.WhichOneof("domain_info") + if domain_info_case == "domain": + domain_ref = feature.domain + if schema is None: + warnings.warn( + f"Schema is not provided so domain '{domain_ref}' cannot be " + f"referenced and domain for field '{self.name}' will not be updated." + ) + else: + domain_ref_to_string_domain = {d.name: d for d in schema.string_domain} + domain_ref_to_float_domain = {d.name: d for d in schema.float_domain} + domain_ref_to_int_domain = {d.name: d for d in schema.int_domain} + + if domain_ref in domain_ref_to_string_domain: + self.string_domain = domain_ref_to_string_domain[domain_ref] + elif domain_ref in domain_ref_to_float_domain: + self.float_domain = domain_ref_to_float_domain[domain_ref] + elif domain_ref in domain_ref_to_int_domain: + self.int_domain = domain_ref_to_int_domain[domain_ref] + else: + raise ValueError( + f"Reference to a domain '{domain_ref}' is missing in the schema. " + f"Please validate the schema in your Tensorflow metadata." + ) + elif domain_info_case == "int_domain": + self.int_domain = feature.int_domain + elif domain_info_case == "float_domain": + self.float_domain = feature.float_domain + elif domain_info_case == "string_domain": + self.string_domain = feature.string_domain + elif domain_info_case == "bool_domain": + self.bool_domain = feature.bool_domain + elif domain_info_case == "struct_domain": + self.struct_domain = feature.struct_domain + elif domain_info_case == "natural_language_domain": + self.natural_language_domain = feature.natural_language_domain + elif domain_info_case == "image_domain": + self.image_domain = feature.image_domain + elif domain_info_case == "mid_domain": + self.mid_domain = feature.mid_domain + elif domain_info_case == "url_domain": + self.url_domain = feature.url_domain + elif domain_info_case == "time_domain": + self.time_domain = feature.time_domain + elif domain_info_case == "time_of_day_domain": + self.time_of_day_domain = feature.time_of_day_domain + def to_proto(self): """ Unimplemented to_proto method for a field. This should be extended. @@ -57,3 +320,25 @@ def from_proto(self, proto): Unimplemented from_proto method for a field. This should be extended. """ pass + + def _clear_presence_constraints(self): + self._presence = None + self._group_presence = None + + def _clear_shape_type(self): + self._shape = None + self._value_count = None + + def _clear_domain_info(self): + self._domain = None + self._int_domain = None + self._float_domain = None + self._string_domain = None + self._bool_domain = None + self._struct_domain = None + self._natural_language_domain = None + self._image_domain = None + self._mid_domain = None + self._url_domain = None + self._time_domain = None + self._time_of_day_domain = None diff --git a/sdk/python/setup.py b/sdk/python/setup.py index 1617f83852..0a3b515bef 100644 --- a/sdk/python/setup.py +++ b/sdk/python/setup.py @@ -45,6 +45,7 @@ "numpy", "google", "confluent_kafka", + "tensorflow_data_validation==0.15.*", ] # README file from Feast repo root directory diff --git a/sdk/python/tests/data/tensorflow_metadata/schema_bikeshare.json b/sdk/python/tests/data/tensorflow_metadata/schema_bikeshare.json new file mode 100644 index 0000000000..e7a886053c --- /dev/null +++ b/sdk/python/tests/data/tensorflow_metadata/schema_bikeshare.json @@ -0,0 +1,136 @@ +{ + "feature": [ + { + "name": "location", + "type": "BYTES", + "domain": "location", + "presence": { + "minFraction": 1.0, + "minCount": "1" + }, + "shape": { + "dim": [ + { + "size": "1" + } + ] + } + }, + { + "name": "name", + "type": "BYTES", + "domain": "name", + "presence": { + "minFraction": 1.0, + "minCount": "1" + }, + "shape": { + "dim": [ + { + "size": "1" + } + ] + } + }, + { + "name": "status", + "type": "BYTES", + "domain": "status", + "presence": { + "minFraction": 1.0, + "minCount": "1" + }, + "shape": { + "dim": [ + { + "size": "1" + } + ] + } + }, + { + "name": "latitude", + "type": "FLOAT", + "float_domain": { + "min": 100.0, + "max": 105.0 + }, + "presence": { + "minFraction": 1.0, + "minCount": "1" + }, + "shape": { + "dim": [ + { + "size": "1" + } + ] + } + }, + { + "name": "longitude", + "type": "FLOAT", + "presence": { + "minFraction": 1.0, + "minCount": "1" + }, + "float_domain": { + "min": 102.0, + "max": 105.0 + }, + "shape": { + "dim": [ + { + "size": "1" + } + ] + } + }, + { + "name": "station_id", + "type": "INT", + "presence": { + "minFraction": 1.0, + "minCount": "1" + }, + "int_domain": { + "min": 1, + "max": 5000 + }, + "shape": { + "dim": [ + { + "size": "1" + } + ] + } + } + ], + "stringDomain": [ + { + "name": "location", + "value": [ + "(30.24258, -97.71726)", + "(30.24472, -97.72336)", + "(30.24891, -97.75019)" + ] + }, + { + "name": "name", + "value": [ + "10th & Red River", + "11th & Salina", + "11th & San Jacinto", + "13th & San Antonio", + "17th & Guadalupe" + ] + }, + { + "name": "status", + "value": [ + "active", + "closed" + ] + } + ] +} \ No newline at end of file diff --git a/sdk/python/tests/test_feature_set.py b/sdk/python/tests/test_feature_set.py index bdb58be683..776c8a4e50 100644 --- a/sdk/python/tests/test_feature_set.py +++ b/sdk/python/tests/test_feature_set.py @@ -11,21 +11,24 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import pathlib +from concurrent import futures from datetime import datetime +import grpc +import pandas as pd +import pytest import pytz +from google.protobuf import json_format +from tensorflow_metadata.proto.v0 import schema_pb2 +import dataframes +import feast.core.CoreService_pb2_grpc as Core +from feast.client import Client from feast.entity import Entity from feast.feature_set import FeatureSet, Feature from feast.value_type import ValueType -from feast.client import Client -import pandas as pd -import pytest -from concurrent import futures -import grpc from feast_core_server import CoreServicer -import feast.core.CoreService_pb2_grpc as Core -import dataframes CORE_URL = "core.feast.local" SERVING_URL = "serving.feast.local" @@ -169,7 +172,42 @@ def test_add_features_from_df_success( assert len(my_feature_set.entities) == entity_count def test_update_schema(self): - pass + test_data_folder = ( + pathlib.Path(__file__).parent / "data" / "tensorflow_metadata" + ) + schema_bikeshare = schema_pb2.Schema() + json_format.Parse( + open(test_data_folder / "schema_bikeshare.json").read(), schema_bikeshare + ) + feature_set_bikeshare = FeatureSet( + name="bikeshare", + entities=[Entity(name="station_id", dtype=ValueType.INT64),], + features=[ + Feature(name="name", dtype=ValueType.STRING), + Feature(name="status", dtype=ValueType.STRING), + Feature(name="latitude", dtype=ValueType.FLOAT), + Feature(name="longitude", dtype=ValueType.FLOAT), + Feature(name="location", dtype=ValueType.STRING), + ], + ) + # Before update + for entity in feature_set_bikeshare.entities: + assert entity.presence is None + for feature in feature_set_bikeshare.features: + assert feature.presence is None + + feature_set_bikeshare.update_schema(schema_bikeshare) - def test_get_schema(self): - pass + # After update + for entity in feature_set_bikeshare.entities: + assert entity.presence is not None + assert entity.shape is not None + for feature in feature_set_bikeshare.features: + assert feature.presence is not None + assert feature.shape is not None + if feature.name in ["location", "name", "status"]: + assert feature.string_domain is not None + elif feature.name in ["latitude", "longitude"]: + assert feature.float_domain is not None + elif feature.name in ["station_id"]: + assert feature.int_domain is not None From 08654c44f0e12ff154b2b27ca20a70a9a45d6262 Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Thu, 30 Jan 2020 13:24:58 +0800 Subject: [PATCH 12/31] Update error message when domain ref is missing from top level schema --- sdk/python/feast/field.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/sdk/python/feast/field.py b/sdk/python/feast/field.py index 660cf4acd5..b1e6f1448b 100644 --- a/sdk/python/feast/field.py +++ b/sdk/python/feast/field.py @@ -284,7 +284,9 @@ def update_domain_info( else: raise ValueError( f"Reference to a domain '{domain_ref}' is missing in the schema. " - f"Please validate the schema in your Tensorflow metadata." + f"Please check the string_domain, float_domain and int_domain" + f"fields in the schema of your Tensorflow metadata, making sure" + f"that the domain referenced exists." ) elif domain_info_case == "int_domain": self.int_domain = feature.int_domain From c3b68f7038c7baf865fe69b57adf2cadd3973d4a Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Thu, 30 Jan 2020 13:29:12 +0800 Subject: [PATCH 13/31] Add more assertion in test_update_schema before updating schema --- sdk/python/tests/test_feature_set.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/sdk/python/tests/test_feature_set.py b/sdk/python/tests/test_feature_set.py index 776c8a4e50..687e08724e 100644 --- a/sdk/python/tests/test_feature_set.py +++ b/sdk/python/tests/test_feature_set.py @@ -193,8 +193,13 @@ def test_update_schema(self): # Before update for entity in feature_set_bikeshare.entities: assert entity.presence is None + assert entity.shape is None for feature in feature_set_bikeshare.features: assert feature.presence is None + assert feature.shape is None + assert feature.string_domain is None + assert feature.float_domain is None + assert feature.int_domain is None feature_set_bikeshare.update_schema(schema_bikeshare) From a831a8ce4030b307fca7e61d40102ebdbd6ec5bd Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Thu, 30 Jan 2020 14:00:51 +0800 Subject: [PATCH 14/31] Fix conflicting versions in package requirements --- sdk/python/requirements-ci.txt | 40 ++++++++++++++++----------------- sdk/python/setup.py | 41 +++++++++++++++++----------------- 2 files changed, 40 insertions(+), 41 deletions(-) diff --git a/sdk/python/requirements-ci.txt b/sdk/python/requirements-ci.txt index d0fdd76e49..c6890bb3f7 100644 --- a/sdk/python/requirements-ci.txt +++ b/sdk/python/requirements-ci.txt @@ -1,30 +1,30 @@ -Click==7.* -google-api-core==1.* -google-auth==1.* -google-cloud-bigquery==1.* -google-cloud-bigquery-storage==0.* -google-cloud-storage==1.* +Click>=7.* +google-api-core>=1.* +google-auth>=1.* +google-cloud-bigquery>=1.* +google-cloud-bigquery-storage>=0.* +google-cloud-storage>=1.* google-resumable-media>=0.5 -googleapis-common-protos==1.* -grpcio==1.* +googleapis-common-protos>=1.* +grpcio>=1.* numpy -mock==2.0.0 -pandas==0.* -protobuf==3.* +mock>=2.0.0 +pandas>=0.* +protobuf>=3.* pytest pytest-mock pytest-timeout -PyYAML==5.1.* -fastavro==0.* -grpcio-testing==1.* -pytest-ordering==0.6.* +PyYAML>=5.1.* +fastavro>=0.* +grpcio-testing>=1.26* +pytest-ordering>=0.6.* pyarrow Sphinx sphinx-rtd-theme -toml==0.10.* -tqdm==4.* +toml>=0.10.* +tqdm>=4.* confluent_kafka google -pandavro==1.5.* -kafka-python==1.* -tabulate==0.8.* \ No newline at end of file +pandavro>=1.5.* +kafka-python>=1.* +tabulate>=0.8.* \ No newline at end of file diff --git a/sdk/python/setup.py b/sdk/python/setup.py index 0a3b515bef..d87f655320 100644 --- a/sdk/python/setup.py +++ b/sdk/python/setup.py @@ -23,29 +23,28 @@ REQUIRES_PYTHON = ">=3.6.0" REQUIRED = [ - "Click==7.*", - "google-api-core==1.14.*", - "google-auth==1.6.*", - "google-cloud-bigquery==1.18.*", - "google-cloud-storage==1.20.*", - "google-cloud-core==1.0.*", - "googleapis-common-protos==1.*", - "google-cloud-bigquery-storage==0.7.*", - "grpcio==1.*", - "pandas==0.*", - "pandavro==1.5.*", + "Click>=7.*", + "google-api-core>=1.14.*", + "google-auth>=1.6.*", + "google-cloud-bigquery>=1.18.*", + "google-cloud-storage>=1.20.*", + "google-cloud-core>=1.0.*", + "googleapis-common-protos>=1.*", + "google-cloud-bigquery-storage>=0.7.*", + "grpcio>=1.*", + "pandas>=0.*", + "pandavro>=1.5.*", "protobuf>=3.10", - "PyYAML==5.1.*", - "fastavro==0.*", - "kafka-python==1.*", - "tabulate==0.8.*", - "toml==0.10.*", - "tqdm==4.*", + "PyYAML>=5.1.*", + "fastavro>=0.*", + "kafka-python>=1.*", + "tabulate>=0.8.*", + "toml>=0.10.*", + "tqdm>=4.*", "pyarrow>=0.15.1", - "numpy", - "google", - "confluent_kafka", - "tensorflow_data_validation==0.15.*", + "numpy>=1.15.0", + "confluent_kafka>=1.3.0", + "tensorflow_metadata>=0.21.0", ] # README file from Feast repo root directory From 7ef9ed6a5c06a5ebf65273d31eef46d6661f2ed9 Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Thu, 30 Jan 2020 17:12:17 +0800 Subject: [PATCH 15/31] Check against NaN value in stats, count the occurence of NaN feature values --- .../transform/metrics/WriteRowMetricsDoFn.java | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java index dfba833c34..5339042699 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java +++ b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java @@ -211,6 +211,8 @@ public void processElement(ProcessContext c) { case INT64_LIST_VAL: case DOUBLE_LIST_VAL: case BOOL_LIST_VAL: + valueStat.accept(Double.NaN); + fieldNameToValueStat.put(fieldName, valueStat); break; case VAL_NOT_SET: Integer oldCount = fieldNameToMissingCount.get(fieldName); @@ -245,8 +247,15 @@ public void processElement(ProcessContext c) { INGESTION_JOB_NAME_KEY + ":" + c.getPipelineOptions().getJobName(), FEATURE_NAME_TAG_KEY + ":" + fieldName, }; - statsd.gauge("feature_value_min", valueStat.getMin(), tags); - statsd.gauge("feature_value_max", valueStat.getMax(), tags); + + // valueStat.getMin() or getMax() can return non finite values when there is no element + // or there is an element that is not a number. No metric should be sent in such case. + if (Double.isFinite(valueStat.getMin())) { + statsd.gauge("feature_value_min", valueStat.getMin(), tags); + } + if (Double.isFinite(valueStat.getMax())) { + statsd.gauge("feature_value_max", valueStat.getMax(), tags); + } statsd.count("feature_value_presence_count", valueStat.getCount(), tags); } } From 9a1f24aba6abf50f5a91cd123d1d1a939cc0137c Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Fri, 31 Jan 2020 14:33:57 +0800 Subject: [PATCH 16/31] Add export_schema method to export schema from FeatureSet --- sdk/python/feast/feature_set.py | 47 ++++++++++++++++++++++++++++ sdk/python/tests/test_feature_set.py | 12 +++---- 2 files changed, 53 insertions(+), 6 deletions(-) diff --git a/sdk/python/feast/feature_set.py b/sdk/python/feast/feature_set.py index b7b00fea17..13a0428911 100644 --- a/sdk/python/feast/feature_set.py +++ b/sdk/python/feast/feature_set.py @@ -21,8 +21,10 @@ from google.protobuf import json_format from google.protobuf.duration_pb2 import Duration from google.protobuf.json_format import MessageToJson +from google.protobuf.message import Message from pandas.api.types import is_datetime64_ns_dtype from pyarrow.lib import TimestampType +from tensorflow_metadata.proto.v0 import schema_pb2 from tensorflow_metadata.proto.v0.schema_pb2 import Schema from feast.core.FeatureSet_pb2 import FeatureSet as FeatureSetProto @@ -695,6 +697,51 @@ def update_schema(self, schema: Schema): f"that does not exist in the FeatureSet '{self.name}' in Feast" ) + def export_schema(self) -> Schema: + schema = Schema() + for _, field in self._fields.items(): + # TODO: export type as well + feature = schema_pb2.Feature() + attributes_to_copy_from_field_to_feature = [ + "name", + "presence", + "group_presence", + "shape", + "value_count", + "domain", + "int_domain", + "float_domain", + "string_domain", + "bool_domain", + "struct_domain", + "_natural_language_domain", + "image_domain", + "mid_domain", + "url_domain", + "time_domain", + "time_of_day_domain", + ] + for attr in attributes_to_copy_from_field_to_feature: + if getattr(field, attr) is None: + continue + + if issubclass(type(getattr(feature, attr)), Message): + # Proto message field to copy is an embedded field, so MergeFrom() method must be used + getattr(feature, attr).MergeFrom(getattr(field, attr)) + elif issubclass(type(getattr(feature, attr)), (int, str, bool)): + # Proto message field is a simple Python type, so setattr() can be used + setattr(feature, attr, getattr(field, attr)) + else: + warnings.warn( + f"Attribute '{attr}' cannot be copied from Field " + f"'{field.name}' in FeatureSet '{self.name}' to a " + f"Feature in the Schema in Tensorflow metadata, because" + f"the type is neither a Protobuf message or Python " + f"int, str and bool" + ) + schema.feature.append(feature) + return schema + @classmethod def from_yaml(cls, yml: str): """ diff --git a/sdk/python/tests/test_feature_set.py b/sdk/python/tests/test_feature_set.py index 687e08724e..c328eb6c98 100644 --- a/sdk/python/tests/test_feature_set.py +++ b/sdk/python/tests/test_feature_set.py @@ -179,7 +179,7 @@ def test_update_schema(self): json_format.Parse( open(test_data_folder / "schema_bikeshare.json").read(), schema_bikeshare ) - feature_set_bikeshare = FeatureSet( + feature_set = FeatureSet( name="bikeshare", entities=[Entity(name="station_id", dtype=ValueType.INT64),], features=[ @@ -191,23 +191,23 @@ def test_update_schema(self): ], ) # Before update - for entity in feature_set_bikeshare.entities: + for entity in feature_set.entities: assert entity.presence is None assert entity.shape is None - for feature in feature_set_bikeshare.features: + for feature in feature_set.features: assert feature.presence is None assert feature.shape is None assert feature.string_domain is None assert feature.float_domain is None assert feature.int_domain is None - feature_set_bikeshare.update_schema(schema_bikeshare) + feature_set.update_schema(schema_bikeshare) # After update - for entity in feature_set_bikeshare.entities: + for entity in feature_set.entities: assert entity.presence is not None assert entity.shape is not None - for feature in feature_set_bikeshare.features: + for feature in feature_set.features: assert feature.presence is not None assert feature.shape is not None if feature.name in ["location", "name", "status"]: From 7d63a2d27bcf2a7d06f07d1a08347cfb1e5676db Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Fri, 31 Jan 2020 17:49:46 +0800 Subject: [PATCH 17/31] Fix statsd gauge argument when the value is negative --- .../metrics/WriteRowMetricsDoFn.java | 39 ++++++++++++++----- 1 file changed, 29 insertions(+), 10 deletions(-) diff --git a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java index 5339042699..00079125e1 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java +++ b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java @@ -248,12 +248,19 @@ public void processElement(ProcessContext c) { FEATURE_NAME_TAG_KEY + ":" + fieldName, }; - // valueStat.getMin() or getMax() can return non finite values when there is no element - // or there is an element that is not a number. No metric should be sent in such case. + // valueStat.getMin() or getMax() should only return finite values. + // (non-finite values can be returned if there is no element or there is an element that is + // not a number. No metric should be sent in such case) if (Double.isFinite(valueStat.getMin())) { + // Statsd gauge will asssign a delta instead of the actual value, if there is a sign in + // the value. For e.g. if the value is negative, a delta will be assigned. For this reason, + // the gauge value is set to zero beforehand. + // https://github.com/statsd/statsd/blob/master/docs/metric_types.md#gauges + statsd.gauge("feature_value_min", 0, tags); statsd.gauge("feature_value_min", valueStat.getMin(), tags); } if (Double.isFinite(valueStat.getMax())) { + statsd.gauge("feature_value_max", 0, tags); statsd.gauge("feature_value_max", valueStat.getMax(), tags); } statsd.count("feature_value_presence_count", valueStat.getCount(), tags); @@ -272,26 +279,32 @@ private void writeConstraintMetrics(Map entityNameToSpec, EntitySpec entitySpec = entityNameToSpec.get(fieldName); if (entitySpec.getDomainInfoCase().equals(DomainInfoCase.INT_DOMAIN)) { IntDomain intDomain = entitySpec.getIntDomain(); + statsd.gauge("feature_value_domain_min", 0, tags); statsd.gauge("feature_value_domain_min", intDomain.getMin(), tags); + statsd.gauge("feature_value_domain_max", 0, tags); statsd.gauge("feature_value_domain_max", intDomain.getMax(), tags); } if (entitySpec.getPresenceConstraintsCase().equals(PresenceConstraintsCase.PRESENCE)) { FeaturePresence presence = entitySpec.getPresence(); - statsd.gauge("feature_presence_min_fraction", presence.getMinFraction(), tags); - statsd.gauge("feature_presence_min_count", presence.getMinCount(), tags); + statsd.gauge("feature_presence_min_fraction", Math.max(presence.getMinFraction(), 0), + tags); + statsd.gauge("feature_presence_min_count", Math.max(presence.getMinCount(), 0), tags); } } else if (featureNameToSpec.containsKey(fieldName)) { FeatureSpec featureSpec = featureNameToSpec.get(fieldName); if (featureSpec.getDomainInfoCase().equals(FeatureSpec.DomainInfoCase.INT_DOMAIN)) { IntDomain intDomain = featureSpec.getIntDomain(); + statsd.gauge("feature_value_domain_min", 0, tags); statsd.gauge("feature_value_domain_min", intDomain.getMin(), tags); + statsd.gauge("feature_value_domain_max", 0, tags); statsd.gauge("feature_value_domain_max", intDomain.getMax(), tags); } if (featureSpec.getPresenceConstraintsCase() .equals(FeatureSpec.PresenceConstraintsCase.PRESENCE)) { FeaturePresence presence = featureSpec.getPresence(); - statsd.gauge("feature_presence_min_fraction", presence.getMinFraction(), tags); - statsd.gauge("feature_presence_min_count", presence.getMinCount(), tags); + statsd.gauge("feature_presence_min_fraction", Math.max(presence.getMinFraction(), 0), + tags); + statsd.gauge("feature_presence_min_count", Math.max(presence.getMinCount(), 0), tags); } } break; @@ -301,26 +314,32 @@ private void writeConstraintMetrics(Map entityNameToSpec, EntitySpec entitySpec = entityNameToSpec.get(fieldName); if (entitySpec.getDomainInfoCase().equals(DomainInfoCase.FLOAT_DOMAIN)) { FloatDomain floatDomain = entitySpec.getFloatDomain(); + statsd.gauge("feature_value_domain_min", 0, tags); statsd.gauge("feature_value_domain_min", floatDomain.getMin(), tags); + statsd.gauge("feature_value_domain_max", 0, tags); statsd.gauge("feature_value_domain_max", floatDomain.getMax(), tags); } if (entitySpec.getPresenceConstraintsCase().equals(PresenceConstraintsCase.PRESENCE)) { FeaturePresence presence = entitySpec.getPresence(); - statsd.gauge("feature_presence_min_fraction", presence.getMinFraction(), tags); - statsd.gauge("feature_presence_min_count", presence.getMinCount(), tags); + statsd.gauge("feature_presence_min_fraction", Math.max(presence.getMinFraction(), 0), + tags); + statsd.gauge("feature_presence_min_count", Math.max(presence.getMinCount(), 0), tags); } } else if (featureNameToSpec.containsKey(fieldName)) { FeatureSpec featureSpec = featureNameToSpec.get(fieldName); if (featureSpec.getDomainInfoCase().equals(FeatureSpec.DomainInfoCase.FLOAT_DOMAIN)) { FloatDomain floatDomain = featureSpec.getFloatDomain(); + statsd.gauge("feature_value_domain_min", 0, tags); statsd.gauge("feature_value_domain_min", floatDomain.getMin(), tags); + statsd.gauge("feature_value_domain_max", 0, tags); statsd.gauge("feature_value_domain_max", floatDomain.getMax(), tags); } if (featureSpec.getPresenceConstraintsCase() .equals(FeatureSpec.PresenceConstraintsCase.PRESENCE)) { FeaturePresence presence = featureSpec.getPresence(); - statsd.gauge("feature_presence_min_fraction", presence.getMinFraction(), tags); - statsd.gauge("feature_presence_min_count", presence.getMinCount(), tags); + statsd.gauge("feature_presence_min_fraction", Math.max(presence.getMinFraction(), 0), + tags); + statsd.gauge("feature_presence_min_count", Math.max(presence.getMinCount(), 0), tags); } } break; From 571fa81c6710cf4c0b4b942e1519ccd87f7514ce Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Sun, 2 Feb 2020 11:47:19 +0800 Subject: [PATCH 18/31] Add exporting of Tensorflow metadata schema from FeatureSet. - Update documentation for properties in Field - Deduplication refactoring in FeatureSet --- sdk/python/feast/feature_set.py | 104 +++++++++--------- sdk/python/feast/field.py | 48 ++++++++ sdk/python/feast/loaders/yaml.py | 3 +- sdk/python/feast/value_type.py | 23 ++++ .../bikeshare_feature_set.yaml | 81 ++++++++++++++ ...a_bikeshare.json => bikeshare_schema.json} | 0 sdk/python/tests/test_feature_set.py | 63 +++++++++-- 7 files changed, 263 insertions(+), 59 deletions(-) create mode 100644 sdk/python/tests/data/tensorflow_metadata/bikeshare_feature_set.yaml rename sdk/python/tests/data/tensorflow_metadata/{schema_bikeshare.json => bikeshare_schema.json} (100%) diff --git a/sdk/python/feast/feature_set.py b/sdk/python/feast/feature_set.py index 13a0428911..9f0e7ec418 100644 --- a/sdk/python/feast/feature_set.py +++ b/sdk/python/feast/feature_set.py @@ -25,8 +25,8 @@ from pandas.api.types import is_datetime64_ns_dtype from pyarrow.lib import TimestampType from tensorflow_metadata.proto.v0 import schema_pb2 -from tensorflow_metadata.proto.v0.schema_pb2 import Schema +from feast.value_type import ValueType from feast.core.FeatureSet_pb2 import FeatureSet as FeatureSetProto from feast.core.FeatureSet_pb2 import FeatureSetMeta as FeatureSetMetaProto from feast.core.FeatureSet_pb2 import FeatureSetSpec as FeatureSetSpecProto @@ -662,84 +662,90 @@ def is_valid(self): if len(self.entities) == 0: raise ValueError(f"No entities found in feature set {self.name}") - def update_schema(self, schema: Schema): + def import_tfx_schema(self, schema: schema_pb2.Schema): """ - Updates presence_constraints, shape_type and domain_info for all entities - and features in the FeatureSet from schema in Tensorflow metadata. + Updates presence_constraints, shape_type and domain_info for all fields + (features and entities) in the FeatureSet from schema in the Tensorflow metadata. Args: - schema: schema from Tensorflow metadata + schema: Schema from Tensorflow metadata Returns: None """ - name_to_feature = {f.name: f for f in self.features} - name_to_entity = {e.name: e for e in self.entities} - - for feature_from_new_schema in schema.feature: - - if feature_from_new_schema.name in name_to_feature: - feature = name_to_feature[feature_from_new_schema.name] - feature.update_presence_constraints(feature_from_new_schema) - feature.update_shape_type(feature_from_new_schema) - feature.update_domain_info(feature_from_new_schema, schema) - - elif feature_from_new_schema.name in name_to_entity: - entity = name_to_entity[feature_from_new_schema.name] - entity.update_presence_constraints(feature_from_new_schema) - entity.update_shape_type(feature_from_new_schema) - entity.update_domain_info(feature_from_new_schema, schema) - + for feature_from_tfx_schema in schema.feature: + if feature_from_tfx_schema.name in self._fields.keys(): + field = self._fields[feature_from_tfx_schema.name] + field.update_presence_constraints(feature_from_tfx_schema) + field.update_shape_type(feature_from_tfx_schema) + field.update_domain_info(feature_from_tfx_schema, schema) else: warnings.warn( - f"The provided schema contains feature name '{feature_from_new_schema.name}' " + f"The provided schema contains feature name '{feature_from_tfx_schema.name}' " f"that does not exist in the FeatureSet '{self.name}' in Feast" ) - def export_schema(self) -> Schema: - schema = Schema() + def export_tfx_schema(self) -> schema_pb2.Schema: + """ + Create a Tensorflow metadata schema from a FeatureSet. + + Returns: + Tensorflow metadata schema. + + """ + schema = schema_pb2.Schema() + + # List of attributes to copy from fields in the FeatureSet to feature in + # Tensorflow metadata schema where the attribute name is the same. + attributes_to_copy_from_field_to_feature = [ + "name", + "presence", + "group_presence", + "shape", + "value_count", + "domain", + "int_domain", + "float_domain", + "string_domain", + "bool_domain", + "struct_domain", + "_natural_language_domain", + "image_domain", + "mid_domain", + "url_domain", + "time_domain", + "time_of_day_domain", + ] + for _, field in self._fields.items(): - # TODO: export type as well feature = schema_pb2.Feature() - attributes_to_copy_from_field_to_feature = [ - "name", - "presence", - "group_presence", - "shape", - "value_count", - "domain", - "int_domain", - "float_domain", - "string_domain", - "bool_domain", - "struct_domain", - "_natural_language_domain", - "image_domain", - "mid_domain", - "url_domain", - "time_domain", - "time_of_day_domain", - ] for attr in attributes_to_copy_from_field_to_feature: if getattr(field, attr) is None: + # This corresponds to an unset member in the proto Oneof field. continue - if issubclass(type(getattr(feature, attr)), Message): - # Proto message field to copy is an embedded field, so MergeFrom() method must be used + # Proto message field to copy is an "embedded" field, so MergeFrom() + # method must be used. getattr(feature, attr).MergeFrom(getattr(field, attr)) elif issubclass(type(getattr(feature, attr)), (int, str, bool)): - # Proto message field is a simple Python type, so setattr() can be used + # Proto message field is a simple Python type, so setattr() + # can be used. setattr(feature, attr, getattr(field, attr)) else: warnings.warn( f"Attribute '{attr}' cannot be copied from Field " f"'{field.name}' in FeatureSet '{self.name}' to a " - f"Feature in the Schema in Tensorflow metadata, because" + f"Feature in the Tensorflow metadata schema, because" f"the type is neither a Protobuf message or Python " f"int, str and bool" ) + # "type" attr is handled separately because the attribute name is different + # ("dtype" in field and "type" in Feature) and "type" in Feature is only + # a subset of "dtype". + feature.type = field.dtype.to_tfx_schema_feature_type() schema.feature.append(feature) + return schema @classmethod diff --git a/sdk/python/feast/field.py b/sdk/python/feast/field.py index b1e6f1448b..19a1d0d6a2 100644 --- a/sdk/python/feast/field.py +++ b/sdk/python/feast/field.py @@ -67,6 +67,9 @@ def dtype(self) -> ValueType: @property def presence(self) -> schema_pb2.FeaturePresence: + """ + Getter for presence of this field + """ return self._presence @presence.setter @@ -78,6 +81,9 @@ def presence(self, presence: schema_pb2.FeaturePresence): @property def group_presence(self) -> schema_pb2.FeaturePresenceWithinGroup: + """ + Getter for group_presence of this field + """ return self._group_presence @group_presence.setter @@ -89,6 +95,9 @@ def group_presence(self, group_presence: schema_pb2.FeaturePresenceWithinGroup): @property def shape(self) -> schema_pb2.FixedShape: + """ + Getter for shape of this field + """ return self._shape @shape.setter @@ -100,6 +109,9 @@ def shape(self, shape: schema_pb2.FixedShape): @property def value_count(self) -> schema_pb2.ValueCount: + """ + Getter for value_count of this field + """ return self._value_count @value_count.setter @@ -111,6 +123,9 @@ def value_count(self, value_count: schema_pb2.ValueCount): @property def domain(self) -> str: + """ + Getter for domain of this field + """ return self._domain @domain.setter @@ -122,6 +137,9 @@ def domain(self, domain: str): @property def int_domain(self) -> schema_pb2.IntDomain: + """ + Getter for int_domain of this field + """ return self._int_domain @int_domain.setter @@ -133,6 +151,9 @@ def int_domain(self, int_domain: schema_pb2.IntDomain): @property def float_domain(self) -> schema_pb2.FloatDomain: + """ + Getter for float_domain of this field + """ return self._float_domain @float_domain.setter @@ -144,6 +165,9 @@ def float_domain(self, float_domain: schema_pb2.FloatDomain): @property def string_domain(self) -> schema_pb2.StringDomain: + """ + Getter for string_domain of this field + """ return self._string_domain @string_domain.setter @@ -155,6 +179,9 @@ def string_domain(self, string_domain: schema_pb2.StringDomain): @property def bool_domain(self) -> schema_pb2.BoolDomain: + """ + Getter for bool_domain of this field + """ return self._bool_domain @bool_domain.setter @@ -166,6 +193,9 @@ def bool_domain(self, bool_domain: schema_pb2.BoolDomain): @property def struct_domain(self) -> schema_pb2.StructDomain: + """ + Getter for struct_domain of this field + """ return self._struct_domain @struct_domain.setter @@ -177,6 +207,9 @@ def struct_domain(self, struct_domain: schema_pb2.StructDomain): @property def natural_language_domain(self) -> schema_pb2.NaturalLanguageDomain: + """ + Getter for natural_language_domain of this field + """ return self._natural_language_domain @natural_language_domain.setter @@ -192,6 +225,9 @@ def natural_language_domain( @property def image_domain(self) -> schema_pb2.ImageDomain: + """ + Getter for image_domain of this field + """ return self._image_domain @image_domain.setter @@ -203,6 +239,9 @@ def image_domain(self, image_domain: schema_pb2.ImageDomain): @property def mid_domain(self) -> schema_pb2.MIDDomain: + """ + Getter for mid_domain of this field + """ return self._mid_domain @mid_domain.setter @@ -214,6 +253,9 @@ def mid_domain(self, mid_domain: schema_pb2.MIDDomain): @property def url_domain(self) -> schema_pb2.URLDomain: + """ + Getter for url_domain of this field + """ return self._url_domain @url_domain.setter @@ -225,6 +267,9 @@ def url_domain(self, url_domain: schema_pb2.URLDomain): @property def time_domain(self) -> schema_pb2.TimeDomain: + """ + Getter for time_domain of this field + """ return self._time_domain @time_domain.setter @@ -236,6 +281,9 @@ def time_domain(self, time_domain: schema_pb2.TimeDomain): @property def time_of_day_domain(self) -> schema_pb2.TimeOfDayDomain: + """ + Getter for time_of_day_domain of this field + """ return self._time_of_day_domain @time_of_day_domain.setter diff --git a/sdk/python/feast/loaders/yaml.py b/sdk/python/feast/loaders/yaml.py index 130a71a3d0..624bc47d49 100644 --- a/sdk/python/feast/loaders/yaml.py +++ b/sdk/python/feast/loaders/yaml.py @@ -57,7 +57,8 @@ def _get_yaml_contents(yml: str) -> str: yml_content = yml else: raise Exception( - f"Invalid YAML provided. Please provide either a file path or YAML string: ${yml}" + f"Invalid YAML provided. Please provide either a file path or YAML string.\n" + f"Provided YAML: {yml}" ) return yml_content diff --git a/sdk/python/feast/value_type.py b/sdk/python/feast/value_type.py index df315480ce..687dccc7b7 100644 --- a/sdk/python/feast/value_type.py +++ b/sdk/python/feast/value_type.py @@ -14,6 +14,8 @@ import enum +from tensorflow_metadata.proto.v0 import schema_pb2 + class ValueType(enum.Enum): """ @@ -35,3 +37,24 @@ class ValueType(enum.Enum): DOUBLE_LIST = 15 FLOAT_LIST = 16 BOOL_LIST = 17 + + def to_tfx_schema_feature_type(self) -> schema_pb2.FeatureType: + if self.value in [ + ValueType.BYTES.value, + ValueType.STRING.value, + ValueType.BOOL.value, + ValueType.BYTES_LIST.value, + ValueType.STRING_LIST.value, + ValueType.INT32_LIST.value, + ValueType.INT64_LIST.value, + ValueType.DOUBLE_LIST.value, + ValueType.FLOAT_LIST.value, + ValueType.BOOL_LIST.value, + ]: + return schema_pb2.FeatureType.BYTES + elif self.value in [ValueType.INT32.value, ValueType.INT64.value]: + return schema_pb2.FeatureType.INT + elif self.value in [ValueType.DOUBLE.value, ValueType.FLOAT.value]: + return schema_pb2.FeatureType.FLOAT + else: + return schema_pb2.FeatureType.TYPE_UNKNOWN diff --git a/sdk/python/tests/data/tensorflow_metadata/bikeshare_feature_set.yaml b/sdk/python/tests/data/tensorflow_metadata/bikeshare_feature_set.yaml new file mode 100644 index 0000000000..daa0a35f0a --- /dev/null +++ b/sdk/python/tests/data/tensorflow_metadata/bikeshare_feature_set.yaml @@ -0,0 +1,81 @@ +spec: + name: bikeshare + entities: + - name: station_id + valueType: INT64 + intDomain: + min: 1 + max: 5000 + presence: + minFraction: 1.0 + minCount: 1 + shape: + dim: + - size: 1 + features: + - name: location + valueType: STRING + stringDomain: + name: location + value: + - (30.24258, -97.71726) + - (30.24472, -97.72336) + - (30.24891, -97.75019) + presence: + minFraction: 1.0 + minCount: 1 + shape: + dim: + - size: 1 + - name: name + valueType: STRING + stringDomain: + name: name + value: + - 10th & Red River + - 11th & Salina + - 11th & San Jacinto + - 13th & San Antonio + - 17th & Guadalupe + presence: + minFraction: 1.0 + minCount: 1 + shape: + dim: + - size: 1 + - name: status + valueType: STRING + stringDomain: + name: status + value: + - "active" + - "closed" + presence: + minFraction: 1.0 + minCount: 1 + shape: + dim: + - size: 1 + - name: latitude + valueType: DOUBLE + floatDomain: + min: 100.0 + max: 105.0 + presence: + minFraction: 1.0 + minCount: 1 + shape: + dim: + - size: 1 + - name: longitude + valueType: DOUBLE + floatDomain: + min: 102.0 + max: 105.0 + presence: + minFraction: 1.0 + minCount: 1 + shape: + dim: + - size: 1 + maxAge: 3600s diff --git a/sdk/python/tests/data/tensorflow_metadata/schema_bikeshare.json b/sdk/python/tests/data/tensorflow_metadata/bikeshare_schema.json similarity index 100% rename from sdk/python/tests/data/tensorflow_metadata/schema_bikeshare.json rename to sdk/python/tests/data/tensorflow_metadata/bikeshare_schema.json diff --git a/sdk/python/tests/test_feature_set.py b/sdk/python/tests/test_feature_set.py index c328eb6c98..0aaac8fc81 100644 --- a/sdk/python/tests/test_feature_set.py +++ b/sdk/python/tests/test_feature_set.py @@ -171,14 +171,14 @@ def test_add_features_from_df_success( assert len(my_feature_set.features) == feature_count assert len(my_feature_set.entities) == entity_count - def test_update_schema(self): - test_data_folder = ( - pathlib.Path(__file__).parent / "data" / "tensorflow_metadata" - ) - schema_bikeshare = schema_pb2.Schema() - json_format.Parse( - open(test_data_folder / "schema_bikeshare.json").read(), schema_bikeshare - ) + def test_import_tfx_schema(self): + tests_folder = pathlib.Path(__file__).parent + test_input_schema_json = open( + tests_folder / "data" / "tensorflow_metadata" / "bikeshare_schema.json" + ).read() + test_input_schema = schema_pb2.Schema() + json_format.Parse(test_input_schema_json, test_input_schema) + feature_set = FeatureSet( name="bikeshare", entities=[Entity(name="station_id", dtype=ValueType.INT64),], @@ -190,6 +190,7 @@ def test_update_schema(self): Feature(name="location", dtype=ValueType.STRING), ], ) + # Before update for entity in feature_set.entities: assert entity.presence is None @@ -201,7 +202,7 @@ def test_update_schema(self): assert feature.float_domain is None assert feature.int_domain is None - feature_set.update_schema(schema_bikeshare) + feature_set.import_tfx_schema(test_input_schema) # After update for entity in feature_set.entities: @@ -216,3 +217,47 @@ def test_update_schema(self): assert feature.float_domain is not None elif feature.name in ["station_id"]: assert feature.int_domain is not None + + def test_export_tfx_schema(self): + tests_folder = pathlib.Path(__file__).parent + test_input_feature_set = FeatureSet.from_yaml( + str( + tests_folder + / "data" + / "tensorflow_metadata" + / "bikeshare_feature_set.yaml" + ) + ) + + expected_schema_json = open( + tests_folder / "data" / "tensorflow_metadata" / "bikeshare_schema.json" + ).read() + expected_schema = schema_pb2.Schema() + json_format.Parse(expected_schema_json, expected_schema) + make_tfx_schema_domain_info_inline(expected_schema) + + actual_schema = test_input_feature_set.export_tfx_schema() + + assert len(actual_schema.feature) == len(expected_schema.feature) + for actual, expected in zip(actual_schema.feature, expected_schema.feature): + assert actual.SerializeToString() == expected.SerializeToString() + + +def make_tfx_schema_domain_info_inline(schema): + # Copy top-level domain info defined in the schema to inline definition. + # One use case is in FeatureSet which does not have access to the top-level domain + # info. + domain_ref_to_string_domain = {d.name: d for d in schema.string_domain} + domain_ref_to_float_domain = {d.name: d for d in schema.float_domain} + domain_ref_to_int_domain = {d.name: d for d in schema.int_domain} + + for feature in schema.feature: + domain_info_case = feature.WhichOneof("domain_info") + if domain_info_case == "domain": + domain_ref = feature.domain + if domain_ref in domain_ref_to_string_domain: + feature.string_domain.MergeFrom(domain_ref_to_string_domain[domain_ref]) + elif domain_ref in domain_ref_to_float_domain: + feature.float_domain.MergeFrom(domain_ref_to_float_domain[domain_ref]) + elif domain_ref in domain_ref_to_int_domain: + feature.int_domain.MergeFrom(domain_ref_to_int_domain[domain_ref]) From e8e02d4fb375588d0be475d1df515d5a63112217 Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Mon, 3 Feb 2020 08:04:47 +0800 Subject: [PATCH 19/31] Add telegraf and prometheus installation to e2e test --- .prow/scripts/test-end-to-end.sh | 64 +++++++++++++++++++++++++++++--- 1 file changed, 58 insertions(+), 6 deletions(-) diff --git a/.prow/scripts/test-end-to-end.sh b/.prow/scripts/test-end-to-end.sh index 7e785fbfa9..daab56901e 100755 --- a/.prow/scripts/test-end-to-end.sh +++ b/.prow/scripts/test-end-to-end.sh @@ -34,7 +34,7 @@ echo " Installing Redis at localhost:6379 ============================================================ " -# Allow starting serving in this Maven Docker image. Default set to not allowed. +# Allow starting Linux services in this Maven Docker image. Default set to not allowed. echo "exit 0" > /usr/sbin/policy-rc.d apt-get -y install redis-server > /var/log/redis.install.log redis-server --daemonize yes @@ -63,15 +63,65 @@ Installing Kafka at localhost:9092 ============================================================ " wget -qO- https://www-eu.apache.org/dist/kafka/2.3.0/kafka_2.12-2.3.0.tgz | tar xz -mv kafka_2.12-2.3.0/ /tmp/kafka -nohup /tmp/kafka/bin/zookeeper-server-start.sh /tmp/kafka/config/zookeeper.properties &> /var/log/zookeeper.log 2>&1 & +mv kafka_2.12-2.3.0/ /opt/kafka +nohup /opt/kafka/bin/zookeeper-server-start.sh /opt/kafka/config/zookeeper.properties &> /var/log/zookeeper.log 2>&1 & sleep 5 tail -n10 /var/log/zookeeper.log -nohup /tmp/kafka/bin/kafka-server-start.sh /tmp/kafka/config/server.properties &> /var/log/kafka.log 2>&1 & +nohup /opt/kafka/bin/kafka-server-start.sh /opt/kafka/config/server.properties &> /var/log/kafka.log 2>&1 & sleep 20 tail -n10 /var/log/kafka.log kafkacat -b localhost:9092 -L +echo " +============================================================ +Installing Telegraf with StatsD input and Prometheus output +Installing Prometheus that scrapes metrics from Telegraf +============================================================ +" + +echo "Downloading Telegraf ..." +wget -O- https://dl.influxdata.com/telegraf/releases/telegraf-1.13.2_linux_amd64.tar.gz | tar xz && \ +mv telegraf /opt/telegraf + +cat < /tmp/telegraf.conf +[agent] + interval = "10s" + round_interval = true + +[[inputs.statsd]] + protocol = "udp" + service_address = ":8125" + datadog_extensions = true + delete_counters = false + +[[outputs.prometheus_client]] + listen = ":9273" + collectors_exclude = ["gocollector", "process"] +EOF +nohup /opt/telegraf/usr/bin/telegraf --config /tmp/telegraf.conf &> /var/log/telegraf.log & + +echo "Downloading Prometheus ..." +wget -qO- https://github.com/prometheus/prometheus/releases/download/v2.15.2/prometheus-2.15.2.linux-amd64.tar.gz | tar xz +mv prometheus-2.15.2.linux-amd64 /opt/prometheus + +cat < /tmp/prometheus.yml +global: + scrape_interval: 10s + evaluation_interval: 10s + +scrape_configs: +- job_name: "prometheus" + static_configs: + - targets: ["localhost:9273"] +EOF +nohup /opt/prometheus/prometheus --config.file=/tmp/prometheus.yml &> /var/log/prometheus.log & + +sleep 3 +echo -e "\nTelegraf logs:" +tail -n5 /var/log/telegraf.log +echo -e "\nPrometheus logs:" +tail -n5 /var/log/prometheus.log + if [[ ${SKIP_BUILD_JARS} != "true" ]]; then echo " ============================================================ @@ -111,7 +161,10 @@ feast: updates: timeoutSeconds: 240 metrics: - enabled: false + enabled: true + type: statsd + host: localhost + port: 8125 stream: type: kafka @@ -190,7 +243,6 @@ grpc: spring: main: web-environment: false - EOF nohup java -jar serving/target/feast-serving-*${JAR_VERSION_SUFFIX}.jar \ From a79847b54056503369ce4ff1444de89c66e98f70 Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Mon, 3 Feb 2020 18:52:01 +0700 Subject: [PATCH 20/31] Add e2e test for metrics for ingestion of basic DataFrame --- ...rving.py => basic_ingest_redis_serving.py} | 257 ++++++++++++------ tests/e2e/conftest.py | 3 + 2 files changed, 171 insertions(+), 89 deletions(-) rename tests/e2e/{basic-ingest-redis-serving.py => basic_ingest_redis_serving.py} (69%) diff --git a/tests/e2e/basic-ingest-redis-serving.py b/tests/e2e/basic_ingest_redis_serving.py similarity index 69% rename from tests/e2e/basic-ingest-redis-serving.py rename to tests/e2e/basic_ingest_redis_serving.py index 1aeccfa5a3..ce1ae75ab8 100644 --- a/tests/e2e/basic-ingest-redis-serving.py +++ b/tests/e2e/basic_ingest_redis_serving.py @@ -1,47 +1,48 @@ -import pytest -import math +import os import random +import tempfile +import uuid +from datetime import datetime, timedelta + +import math +import numpy as np +import pandas as pd +import pytest +import pytz +import requests import time +from feast.client import Client from feast.entity import Entity +from feast.feature import Feature +from feast.feature_set import FeatureSet from feast.serving.ServingService_pb2 import ( GetOnlineFeaturesRequest, GetOnlineFeaturesResponse, ) -from feast.types.Value_pb2 import Value as Value -from feast.client import Client -from feast.feature_set import FeatureSet from feast.type_map import ValueType +from feast.types.Value_pb2 import Value as Value from google.protobuf.duration_pb2 import Duration -from datetime import datetime -import pytz - -import pandas as pd -import numpy as np -import tempfile -import os -from feast.feature import Feature -import uuid FLOAT_TOLERANCE = 0.00001 -PROJECT_NAME = 'basic_' + uuid.uuid4().hex.upper()[0:6] +PROJECT_NAME = "basic_" + uuid.uuid4().hex.upper()[0:6] + -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def core_url(pytestconfig): return pytestconfig.getoption("core_url") -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def serving_url(pytestconfig): return pytestconfig.getoption("serving_url") -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def allow_dirty(pytestconfig): - return True if pytestconfig.getoption( - "allow_dirty").lower() == "true" else False + return True if pytestconfig.getoption("allow_dirty").lower() == "true" else False -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def client(core_url, serving_url, allow_dirty): # Get client for core and serving client = Client(core_url=core_url, serving_url=serving_url) @@ -59,13 +60,12 @@ def client(core_url, serving_url, allow_dirty): return client -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def basic_dataframe(): offset = random.randint(1000, 100000) # ensure a unique key space is used return pd.DataFrame( { - "datetime": [datetime.utcnow().replace(tzinfo=pytz.utc) for _ in - range(5)], + "datetime": [datetime.utcnow().replace(tzinfo=pytz.utc) for _ in range(5)], "customer_id": [offset + inc for inc in range(5)], "daily_transactions": [np.random.rand() for _ in range(5)], "total_transactions": [512 for _ in range(5)], @@ -128,10 +128,7 @@ def test_basic_retrieve_online_success(client, basic_dataframe): } ) ], - feature_refs=[ - "daily_transactions", - "total_transactions", - ], + feature_refs=["daily_transactions", "total_transactions",], ) # type: GetOnlineFeaturesResponse if response is None: @@ -139,11 +136,10 @@ def test_basic_retrieve_online_success(client, basic_dataframe): returned_daily_transactions = float( response.field_values[0] - .fields[PROJECT_NAME + "/daily_transactions"] - .float_val + .fields[PROJECT_NAME + "/daily_transactions"] + .float_val ) - sent_daily_transactions = float( - basic_dataframe.iloc[0]["daily_transactions"]) + sent_daily_transactions = float(basic_dataframe.iloc[0]["daily_transactions"]) if math.isclose( sent_daily_transactions, @@ -153,18 +149,16 @@ def test_basic_retrieve_online_success(client, basic_dataframe): break -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def all_types_dataframe(): return pd.DataFrame( { - "datetime": [datetime.utcnow().replace(tzinfo=pytz.utc) for _ in - range(3)], + "datetime": [datetime.utcnow().replace(tzinfo=pytz.utc) for _ in range(3)], "user_id": [1001, 1002, 1003], "int32_feature": [np.int32(1), np.int32(2), np.int32(3)], "int64_feature": [np.int64(1), np.int64(2), np.int64(3)], "float_feature": [np.float(0.1), np.float(0.2), np.float(0.3)], - "double_feature": [np.float64(0.1), np.float64(0.2), - np.float64(0.3)], + "double_feature": [np.float64(0.1), np.float64(0.2), np.float64(0.3)], "string_feature": ["one", "two", "three"], "bytes_feature": [b"one", b"two", b"three"], "bool_feature": [True, False, False], @@ -226,8 +220,7 @@ def test_all_types_register_feature_set_success(client): Feature(name="float_list_feature", dtype=ValueType.FLOAT_LIST), Feature(name="int64_list_feature", dtype=ValueType.INT64_LIST), Feature(name="int32_list_feature", dtype=ValueType.INT32_LIST), - Feature(name="string_list_feature", - dtype=ValueType.STRING_LIST), + Feature(name="string_list_feature", dtype=ValueType.STRING_LIST), Feature(name="bytes_list_feature", dtype=ValueType.BYTES_LIST), ], max_age=Duration(seconds=3600), @@ -273,8 +266,11 @@ def test_all_types_retrieve_online_success(client, all_types_dataframe): response = client.get_online_features( entity_rows=[ GetOnlineFeaturesRequest.EntityRow( - fields={"user_id": Value( - int64_val=all_types_dataframe.iloc[0]["user_id"])} + fields={ + "user_id": Value( + int64_val=all_types_dataframe.iloc[0]["user_id"] + ) + } ) ], feature_refs=[ @@ -297,11 +293,10 @@ def test_all_types_retrieve_online_success(client, all_types_dataframe): if response is None: continue - returned_float_list = ( response.field_values[0] - .fields[PROJECT_NAME+"/float_list_feature"] - .float_list_val.val + .fields[PROJECT_NAME + "/float_list_feature"] + .float_list_val.val ) sent_float_list = all_types_dataframe.iloc[0]["float_list_feature"] @@ -312,15 +307,14 @@ def test_all_types_retrieve_online_success(client, all_types_dataframe): break -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def large_volume_dataframe(): ROW_COUNT = 100000 offset = random.randint(1000000, 10000000) # ensure a unique key space customer_data = pd.DataFrame( { "datetime": [ - datetime.utcnow().replace(tzinfo=pytz.utc) for _ in - range(ROW_COUNT) + datetime.utcnow().replace(tzinfo=pytz.utc) for _ in range(ROW_COUNT) ], "customer_id": [offset + inc for inc in range(ROW_COUNT)], "daily_transactions_large": [np.random.rand() for _ in range(ROW_COUNT)], @@ -334,7 +328,8 @@ def large_volume_dataframe(): @pytest.mark.run(order=30) def test_large_volume_register_feature_set_success(client): cust_trans_fs_expected = FeatureSet.from_yaml( - "large_volume/cust_trans_large_fs.yaml") + "large_volume/cust_trans_large_fs.yaml" + ) # Register feature set client.apply(cust_trans_fs_expected) @@ -342,8 +337,7 @@ def test_large_volume_register_feature_set_success(client): # Feast Core needs some time to fully commit the FeatureSet applied # when there is no existing job yet for the Featureset time.sleep(10) - cust_trans_fs_actual = client.get_feature_set( - name="customer_transactions_large") + cust_trans_fs_actual = client.get_feature_set(name="customer_transactions_large") assert cust_trans_fs_actual == cust_trans_fs_expected @@ -378,16 +372,12 @@ def test_large_volume_retrieve_online_success(client, large_volume_dataframe): GetOnlineFeaturesRequest.EntityRow( fields={ "customer_id": Value( - int64_val=large_volume_dataframe.iloc[0][ - "customer_id"] + int64_val=large_volume_dataframe.iloc[0]["customer_id"] ) } ) ], - feature_refs=[ - "daily_transactions_large", - "total_transactions_large", - ], + feature_refs=["daily_transactions_large", "total_transactions_large",], ) # type: GetOnlineFeaturesResponse if response is None: @@ -395,11 +385,12 @@ def test_large_volume_retrieve_online_success(client, large_volume_dataframe): returned_daily_transactions = float( response.field_values[0] - .fields[PROJECT_NAME + "/daily_transactions_large"] - .float_val + .fields[PROJECT_NAME + "/daily_transactions_large"] + .float_val ) sent_daily_transactions = float( - large_volume_dataframe.iloc[0]["daily_transactions_large"]) + large_volume_dataframe.iloc[0]["daily_transactions_large"] + ) if math.isclose( sent_daily_transactions, @@ -409,49 +400,47 @@ def test_large_volume_retrieve_online_success(client, large_volume_dataframe): break -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def all_types_parquet_file(): COUNT = 20000 df = pd.DataFrame( { "datetime": [datetime.utcnow() for _ in range(COUNT)], - "customer_id": [np.int32(random.randint(0, 10000)) for _ in - range(COUNT)], - "int32_feature_parquet": [np.int32(random.randint(0, 10000)) for _ in - range(COUNT)], - "int64_feature_parquet": [np.int64(random.randint(0, 10000)) for _ in - range(COUNT)], + "customer_id": [np.int32(random.randint(0, 10000)) for _ in range(COUNT)], + "int32_feature_parquet": [ + np.int32(random.randint(0, 10000)) for _ in range(COUNT) + ], + "int64_feature_parquet": [ + np.int64(random.randint(0, 10000)) for _ in range(COUNT) + ], "float_feature_parquet": [np.float(random.random()) for _ in range(COUNT)], - "double_feature_parquet": [np.float64(random.random()) for _ in - range(COUNT)], - "string_feature_parquet": ["one" + str(random.random()) for _ in - range(COUNT)], + "double_feature_parquet": [ + np.float64(random.random()) for _ in range(COUNT) + ], + "string_feature_parquet": [ + "one" + str(random.random()) for _ in range(COUNT) + ], "bytes_feature_parquet": [b"one" for _ in range(COUNT)], "int32_list_feature_parquet": [ np.array([1, 2, 3, random.randint(0, 10000)], dtype=np.int32) - for _ - in range(COUNT) + for _ in range(COUNT) ], "int64_list_feature_parquet": [ np.array([1, random.randint(0, 10000), 3, 4], dtype=np.int64) - for _ - in range(COUNT) + for _ in range(COUNT) ], "float_list_feature_parquet": [ - np.array([1.1, 1.2, 1.3, random.random()], dtype=np.float32) for - _ - in range(COUNT) + np.array([1.1, 1.2, 1.3, random.random()], dtype=np.float32) + for _ in range(COUNT) ], "double_list_feature_parquet": [ - np.array([1.1, 1.2, 1.3, random.random()], dtype=np.float64) for - _ - in range(COUNT) + np.array([1.1, 1.2, 1.3, random.random()], dtype=np.float64) + for _ in range(COUNT) ], "string_list_feature_parquet": [ - np.array(["one", "two" + str(random.random()), "three"]) for _ - in - range(COUNT) + np.array(["one", "two" + str(random.random()), "three"]) + for _ in range(COUNT) ], "bytes_list_feature_parquet": [ np.array([b"one", b"two", b"three"]) for _ in range(COUNT) @@ -462,7 +451,7 @@ def all_types_parquet_file(): # TODO: Boolean list is not being tested. # https://github.com/gojek/feast/issues/341 - file_path = os.path.join(tempfile.mkdtemp(), 'all_types.parquet') + file_path = os.path.join(tempfile.mkdtemp(), "all_types.parquet") df.to_parquet(file_path, allow_truncated_timestamps=True) return file_path @@ -472,7 +461,8 @@ def all_types_parquet_file(): def test_all_types_parquet_register_feature_set_success(client): # Load feature set from file all_types_parquet_expected = FeatureSet.from_yaml( - "all_types_parquet/all_types_parquet.yaml") + "all_types_parquet/all_types_parquet.yaml" + ) # Register feature set client.apply(all_types_parquet_expected) @@ -496,11 +486,100 @@ def test_all_types_parquet_register_feature_set_success(client): @pytest.mark.timeout(600) @pytest.mark.run(order=41) -def test_all_types_infer_register_ingest_file_success(client, - all_types_parquet_file): +def test_all_types_infer_register_ingest_file_success(client, all_types_parquet_file): # Get feature set all_types_fs = client.get_feature_set(name="all_types_parquet") # Ingest user embedding data - client.ingest(feature_set=all_types_fs, source=all_types_parquet_file, - force_update=True) + client.ingest( + feature_set=all_types_fs, source=all_types_parquet_file, force_update=True + ) + + +@pytest.mark.run(order=42) +def test_basic_metrics(pytestconfig, basic_dataframe): + if not pytestconfig.getoption("prometheus_server_url"): + return + + project_name = PROJECT_NAME + feature_set_name = "customer_transactions" + + range_query_endpoint = ( + f"{pytestconfig.getoption('prometheus_server_url')}/api/v1/query_range" + ) + promql_queries = [ + "feast_ingestion_feature_value_min", + "feast_ingestion_feature_value_max", + "feast_ingestion_feature_value_domain_min", + "feast_ingestion_feature_value_domain_max", + "feast_ingestion_feature_value_presence_count", + "feast_ingestion_feature_value_missing_count", + "feast_ingestion_feature_presence_min_fraction", + "feast_ingestion_feature_presence_min_count", + ] + # "datetime" is the timestamp for the FeatureRow, not a feature in the DataFrame + feature_names = list(c for c in basic_dataframe.columns if c != "datetime") + + for query in promql_queries: + for feature_name in feature_names: + query_with_label_filter = f'{query}{{feast_feature_name="{feature_name}"}}' + resp = requests.post( + range_query_endpoint, + data={ + "query": query_with_label_filter, + "start": int((datetime.now() - timedelta(minutes=30)).timestamp()), + "end": int(datetime.now().timestamp()), + "step": "15s", + }, + ) + assert resp.status_code == 200 + for item in resp.json()["data"]["result"]: + metric = item["metric"] + values = item["values"] + + if ( + metric.get("feast_project_name", "") != project_name + or metric.get("feast_featureSet_name", "") != feature_set_name + ): + continue + + assert len(values) > 0 + # Values item in Prometheus is a tuple of (timestamp, value). + # Only last_value is tested here because the assertions are checking + # for the min, max and count and using the last values make the test + # more deterministic. + last_value_tuple = values[len(values) - 1] + assert len(last_value_tuple) == 2 + last_value = last_value_tuple[1] + + if query == "feast_ingestion_feature_value_min": + assert math.isclose( + float(last_value), + basic_dataframe[feature_name].min(), + abs_tol=FLOAT_TOLERANCE, + ) + elif query == "feast_ingestion_feature_value_max": + assert math.isclose( + float(last_value), + basic_dataframe[feature_name].max(), + abs_tol=FLOAT_TOLERANCE, + ) + elif query == "feast_ingestion_feature_value_domain_min": + # TODO: get from FeatureSetSpec + pass + elif query == "feast_ingestion_feature_value_domain_max": + # TODO: get from FeatureSetSpec + pass + # basic_dataframe has not UNSET values, hence the assertions + # for "feast_ingestion_feature_value_presence_count" and + # "feast_ingestion_feature_value_missing_count" + elif query == "feast_ingestion_feature_value_presence_count": + assert int(last_value) == basic_dataframe[feature_name].size + elif query == "feast_ingestion_feature_value_missing_count": + assert int(last_value) == 0 + elif query == "feast_ingestion_feature_presence_min_fraction": + # TODO: get from FeatureSetSpec + pass + elif query == "feast_ingestion_feature_presence_min_count": + # TODO: get from FeatureSetSpec + pass diff --git a/tests/e2e/conftest.py b/tests/e2e/conftest.py index 8ea472b662..96a9c377d6 100644 --- a/tests/e2e/conftest.py +++ b/tests/e2e/conftest.py @@ -3,3 +3,6 @@ def pytest_addoption(parser): parser.addoption("--serving_url", action="store", default="localhost:6566") parser.addoption("--allow_dirty", action="store", default="False") parser.addoption("--gcs_path", action="store", default="gs://feast-templocation-kf-feast/") + # If prometheus_server_url is not empty, then the e2e test will validate the + # prometheus metrics written by Feast. Example value: http://localhost:9090 + parser.addoption("--prometheus_server_url", action="store", default="") From 39f9c59130012caf11a54fb5075ff9c4d45de942 Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Tue, 4 Feb 2020 15:45:31 +0700 Subject: [PATCH 21/31] Add tests for feature constraints metrics for basic dataframe --- tests/e2e/basic/cust_trans_fs.yaml | 27 +++++++++++++++++ tests/e2e/basic_ingest_redis_serving.py | 40 ++++++++++++++++++++----- 2 files changed, 59 insertions(+), 8 deletions(-) diff --git a/tests/e2e/basic/cust_trans_fs.yaml b/tests/e2e/basic/cust_trans_fs.yaml index 14d46794a6..f0041d3d6b 100644 --- a/tests/e2e/basic/cust_trans_fs.yaml +++ b/tests/e2e/basic/cust_trans_fs.yaml @@ -4,9 +4,36 @@ spec: entities: - name: customer_id valueType: INT64 + intDomain: + min: 1000 + max: 100005 + presence: + minFraction: 1.0 + minCount: 1 + shape: + dim: + - size: 1 features: - name: daily_transactions valueType: FLOAT + floatDomain: + min: 0 + max: 1 + presence: + minFraction: 1.0 + minCount: 1 + shape: + dim: + - size: 1 - name: total_transactions valueType: FLOAT + floatDomain: + min: 512 + max: 512 + presence: + minFraction: 1.0 + minCount: 1 + shape: + dim: + - size: 1 maxAge: 3600s diff --git a/tests/e2e/basic_ingest_redis_serving.py b/tests/e2e/basic_ingest_redis_serving.py index ce1ae75ab8..04badff728 100644 --- a/tests/e2e/basic_ingest_redis_serving.py +++ b/tests/e2e/basic_ingest_redis_serving.py @@ -501,6 +501,7 @@ def test_basic_metrics(pytestconfig, basic_dataframe): if not pytestconfig.getoption("prometheus_server_url"): return + feature_set = FeatureSet.from_yaml("basic/cust_trans_fs.yaml") project_name = PROJECT_NAME feature_set_name = "customer_transactions" @@ -565,11 +566,29 @@ def test_basic_metrics(pytestconfig, basic_dataframe): abs_tol=FLOAT_TOLERANCE, ) elif query == "feast_ingestion_feature_value_domain_min": - # TODO: get from FeatureSetSpec - pass + if feature_name == "customer_id": + assert ( + int(last_value) + == feature_set.fields[feature_name].int_domain.min + ) + elif feature_name in ["daily_transactions", "total_transactions"]: + assert math.isclose( + float(last_value), + feature_set.fields[feature_name].float_domain.min, + abs_tol=FLOAT_TOLERANCE, + ) elif query == "feast_ingestion_feature_value_domain_max": - # TODO: get from FeatureSetSpec - pass + if feature_name == "customer_id": + assert ( + int(last_value) + == feature_set.fields[feature_name].int_domain.max + ) + elif feature_name in ["daily_transactions", "total_transactions"]: + assert math.isclose( + float(last_value), + feature_set.fields[feature_name].float_domain.max, + abs_tol=FLOAT_TOLERANCE, + ) # basic_dataframe has not UNSET values, hence the assertions # for "feast_ingestion_feature_value_presence_count" and # "feast_ingestion_feature_value_missing_count" @@ -578,8 +597,13 @@ def test_basic_metrics(pytestconfig, basic_dataframe): elif query == "feast_ingestion_feature_value_missing_count": assert int(last_value) == 0 elif query == "feast_ingestion_feature_presence_min_fraction": - # TODO: get from FeatureSetSpec - pass + assert math.isclose( + float(last_value), + feature_set.fields[feature_name].presence.min_fraction, + abs_tol=FLOAT_TOLERANCE, + ) elif query == "feast_ingestion_feature_presence_min_count": - # TODO: get from FeatureSetSpec - pass + assert ( + int(last_value) + == feature_set.fields[feature_name].presence.min_count + ) From 67cf4e5f4dda73d218bb06f2abf9c893dc4c9648 Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Tue, 4 Feb 2020 16:14:30 +0700 Subject: [PATCH 22/31] Fix incorrect name of test files --- .prow/scripts/test-end-to-end-batch.sh | 2 +- .prow/scripts/test-end-to-end.sh | 5 +++-- tests/e2e/basic_ingest_redis_serving.py | 7 ++++++- .../{bq-batch-retrieval.py => bigquery_batch_retrieval.py} | 0 4 files changed, 10 insertions(+), 4 deletions(-) rename tests/e2e/{bq-batch-retrieval.py => bigquery_batch_retrieval.py} (100%) diff --git a/.prow/scripts/test-end-to-end-batch.sh b/.prow/scripts/test-end-to-end-batch.sh index 2fb0bdb706..fe4ca56db6 100755 --- a/.prow/scripts/test-end-to-end-batch.sh +++ b/.prow/scripts/test-end-to-end-batch.sh @@ -254,7 +254,7 @@ ORIGINAL_DIR=$(pwd) cd tests/e2e set +e -pytest bq-batch-retrieval.py --gcs_path "gs://${TEMP_BUCKET}/" --junitxml=${LOGS_ARTIFACT_PATH}/python-sdk-test-report.xml +pytest bigquery_batch_retrieval.py --gcs_path "gs://${TEMP_BUCKET}/" --junitxml=${LOGS_ARTIFACT_PATH}/python-sdk-test-report.xml TEST_EXIT_CODE=$? if [[ ${TEST_EXIT_CODE} != 0 ]]; then diff --git a/.prow/scripts/test-end-to-end.sh b/.prow/scripts/test-end-to-end.sh index daab56901e..a1bc0a4119 100755 --- a/.prow/scripts/test-end-to-end.sh +++ b/.prow/scripts/test-end-to-end.sh @@ -259,7 +259,7 @@ Installing Python 3.7 with Miniconda and Feast SDK " # Install Python 3.7 with Miniconda wget -q https://repo.continuum.io/miniconda/Miniconda3-4.7.12-Linux-x86_64.sh \ - -O /tmp/miniconda.sh + -O /tmp/miniconda.sh bash /tmp/miniconda.sh -b -p /root/miniconda -f /root/miniconda/bin/conda init source ~/.bashrc @@ -280,7 +280,8 @@ ORIGINAL_DIR=$(pwd) cd tests/e2e set +e -pytest basic-ingest-redis-serving.py --junitxml=${LOGS_ARTIFACT_PATH}/python-sdk-test-report.xml +pytest basic_ingest_redis_serving.py --prometheus_server_url=http://localhost:9090 \ + --junitxml=${LOGS_ARTIFACT_PATH}/python-sdk-test-report.xml TEST_EXIT_CODE=$? if [[ ${TEST_EXIT_CODE} != 0 ]]; then diff --git a/tests/e2e/basic_ingest_redis_serving.py b/tests/e2e/basic_ingest_redis_serving.py index 04badff728..4dcb130f3c 100644 --- a/tests/e2e/basic_ingest_redis_serving.py +++ b/tests/e2e/basic_ingest_redis_serving.py @@ -22,6 +22,7 @@ from feast.type_map import ValueType from feast.types.Value_pb2 import Value as Value from google.protobuf.duration_pb2 import Duration +import warnings FLOAT_TOLERANCE = 0.00001 PROJECT_NAME = "basic_" + uuid.uuid4().hex.upper()[0:6] @@ -499,6 +500,10 @@ def test_all_types_infer_register_ingest_file_success(client, all_types_parquet_ @pytest.mark.run(order=42) def test_basic_metrics(pytestconfig, basic_dataframe): if not pytestconfig.getoption("prometheus_server_url"): + warnings.warn( + "Skipping 'test_basic_metrics' because 'prometheus_server_url' argument" + "is not provided" + ) return feature_set = FeatureSet.from_yaml("basic/cust_trans_fs.yaml") @@ -589,7 +594,7 @@ def test_basic_metrics(pytestconfig, basic_dataframe): feature_set.fields[feature_name].float_domain.max, abs_tol=FLOAT_TOLERANCE, ) - # basic_dataframe has not UNSET values, hence the assertions + # basic_dataframe has no UNSET values, hence the assertions # for "feast_ingestion_feature_value_presence_count" and # "feast_ingestion_feature_value_missing_count" elif query == "feast_ingestion_feature_value_presence_count": diff --git a/tests/e2e/bq-batch-retrieval.py b/tests/e2e/bigquery_batch_retrieval.py similarity index 100% rename from tests/e2e/bq-batch-retrieval.py rename to tests/e2e/bigquery_batch_retrieval.py From 305171f168de40b6499094ca59d4488aebc23f1a Mon Sep 17 00:00:00 2001 From: Iain Rauch Date: Mon, 3 Feb 2020 13:32:45 +0000 Subject: [PATCH 23/31] Helm Chart Upgrades Move prometheus-statsd-exporter to toggleable core dependency (default false). Add ingresses for gRPC and HTTP for both core and serving. Refactor ConfigMaps to user Spring profiles rather than manipulating the base application.yaml. Add ability to define and enable arbitrary Spring profiles. Add toggle to enable prometheus scraping in core. Add parameters to change LOG_LEVEL and LOG_TYPE (#430). Add parameter to specify GOOGLE_CLOUD_PROJECT. Allow jar path to be specified (e.g. if using non-standard image). Add missing documentation for Helm parameters. --- infra/charts/feast/README.md | 80 +++++++++++++++- .../prometheus-statsd-exporter/.helmignore | 0 .../prometheus-statsd-exporter/Chart.yaml | 0 .../prometheus-statsd-exporter/README.md | 0 .../templates/NOTES.txt | 0 .../templates/_helpers.tpl | 0 .../templates/config.yaml | 0 .../templates/deployment.yaml | 0 .../templates/pvc.yaml | 0 .../templates/service.yaml | 0 .../templates/serviceaccount.yaml | 0 .../prometheus-statsd-exporter/values.yaml | 0 .../feast/charts/feast-core/requirements.yaml | 8 +- .../charts/feast-core/templates/_ingress.yaml | 68 +++++++++++++ .../feast-core/templates/configmap.yaml | 45 ++++++--- .../feast-core/templates/deployment.yaml | 42 ++++++-- .../charts/feast-core/templates/ingress.yaml | 33 ++----- .../feast/charts/feast-core/values.yaml | 95 +++++++++++++++---- .../charts/feast-serving/requirements.yaml | 3 + .../feast-serving/templates/_helpers.tpl | 7 ++ .../feast-serving/templates/_ingress.yaml | 68 +++++++++++++ .../feast-serving/templates/configmap.yaml | 36 ++++--- .../feast-serving/templates/deployment.yaml | 30 ++++-- .../feast-serving/templates/ingress.yaml | 31 +----- .../feast/charts/feast-serving/values.yaml | 77 +++++++++++---- infra/charts/feast/requirements.lock | 16 +--- infra/charts/feast/requirements.yaml | 2 +- infra/charts/feast/values-demo.yaml | 17 +++- infra/charts/feast/values.yaml | 12 ++- 29 files changed, 510 insertions(+), 160 deletions(-) rename infra/charts/feast/charts/{ => feast-core/charts}/prometheus-statsd-exporter/.helmignore (100%) rename infra/charts/feast/charts/{ => feast-core/charts}/prometheus-statsd-exporter/Chart.yaml (100%) rename infra/charts/feast/charts/{ => feast-core/charts}/prometheus-statsd-exporter/README.md (100%) rename infra/charts/feast/charts/{ => feast-core/charts}/prometheus-statsd-exporter/templates/NOTES.txt (100%) rename infra/charts/feast/charts/{ => feast-core/charts}/prometheus-statsd-exporter/templates/_helpers.tpl (100%) rename infra/charts/feast/charts/{ => feast-core/charts}/prometheus-statsd-exporter/templates/config.yaml (100%) rename infra/charts/feast/charts/{ => feast-core/charts}/prometheus-statsd-exporter/templates/deployment.yaml (100%) rename infra/charts/feast/charts/{ => feast-core/charts}/prometheus-statsd-exporter/templates/pvc.yaml (100%) rename infra/charts/feast/charts/{ => feast-core/charts}/prometheus-statsd-exporter/templates/service.yaml (100%) rename infra/charts/feast/charts/{ => feast-core/charts}/prometheus-statsd-exporter/templates/serviceaccount.yaml (100%) rename infra/charts/feast/charts/{ => feast-core/charts}/prometheus-statsd-exporter/values.yaml (100%) create mode 100644 infra/charts/feast/charts/feast-core/templates/_ingress.yaml create mode 100644 infra/charts/feast/charts/feast-serving/templates/_ingress.yaml diff --git a/infra/charts/feast/README.md b/infra/charts/feast/README.md index ab5321ca86..e93b687f19 100644 --- a/infra/charts/feast/README.md +++ b/infra/charts/feast/README.md @@ -81,17 +81,26 @@ The following table lists the configurable parameters of the Feast chart and the | `feast-core.kafka.topics[0].name` | Default topic name in Kafka| `feast` | `feast-core.kafka.topics[0].replicationFactor` | No of replication factor for the topic| `1` | `feast-core.kafka.topics[0].partitions` | No of partitions for the topic | `1` +| `feast-core.prometheus-statsd-exporter.enabled` | Flag to install Prometheus StatsD Exporter | `false` +| `feast-core.prometheus-statsd-exporter.*` | Refer to this [link](charts/feast-core/charts/prometheus-statsd-exporter/values.yaml | | `feast-core.replicaCount` | No of pods to create | `1` | `feast-core.image.repository` | Repository for Feast Core Docker image | `gcr.io/kf-feast/feast-core` -| `feast-core.image.tag` | Tag for Feast Core Docker image | `0.3.2` +| `feast-core.image.tag` | Tag for Feast Core Docker image | `0.4.4` | `feast-core.image.pullPolicy` | Image pull policy for Feast Core Docker image | `IfNotPresent` +| `feast-core.prometheus.enabled` | Add annotations to enable Prometheus scraping | `false` | `feast-core.application.yaml` | Configuration for Feast Core application | Refer to this [link](charts/feast-core/values.yaml) | `feast-core.springConfigMountPath` | Directory to mount application.yaml | `/etc/feast/feast-core` | `feast-core.gcpServiceAccount.useExistingSecret` | Flag to use existing secret for GCP service account | `false` | `feast-core.gcpServiceAccount.existingSecret.name` | Secret name for the service account | `feast-gcp-service-account` | `feast-core.gcpServiceAccount.existingSecret.key` | Secret key for the service account | `key.json` | `feast-core.gcpServiceAccount.mountPath` | Directory to mount the JSON key file | `/etc/gcloud/service-accounts` +| `feast-core.gcpProjectId` | Project ID to set `GOOGLE_CLOUD_PROJECT` to change default project used by SDKs | `""` +| `feast-core.jarPath` | Path to Jar file in the Docker image | `/opt/feast/feast-core.jar` | `feast-core.jvmOptions` | Options for the JVM | `[]` +| `feast-core.logLevel` | Application logging level | `warn` +| `feast-core.logType` | Application logging type (`JSON` or `Console`) | `JSON` +| `feast-core.springConfigProfiles` | Map of profile name to file content for additional Spring profiles | `{}` +| `feast-core.springConfigProfilesActive` | CSV of profiles to enable from `springConfigProfiles` | `""` | `feast-core.livenessProbe.enabled` | Flag to enable liveness probe | `true` | `feast-core.livenessProbe.initialDelaySeconds` | Delay before liveness probe is initiated | `60` | `feast-core.livenessProbe.periodSeconds` | How often to perform the probe | `10` @@ -109,6 +118,7 @@ The following table lists the configurable parameters of the Feast chart and the | `feast-core.grpc.port` | Kubernetes Service port for GRPC request| `6565` | `feast-core.grpc.targetPort` | Container port for GRPC request| `6565` | `feast-core.resources` | CPU and memory allocation for the pod | `{}` +| `feast-core.ingress` | See *Ingress Parameters* [below](#ingress-parameters) | `{}` | `feast-serving-online.enabled` | Flag to install Feast Online Serving | `true` | `feast-serving-online.redis.enabled` | Flag to install Redis in Feast Serving | `false` | `feast-serving-online.redis.usePassword` | Flag to use password to access Redis | `false` @@ -116,8 +126,9 @@ The following table lists the configurable parameters of the Feast chart and the | `feast-serving-online.core.enabled` | Flag for Feast Serving to use Feast Core in the same Helm release | `true` | `feast-serving-online.replicaCount` | No of pods to create | `1` | `feast-serving-online.image.repository` | Repository for Feast Serving Docker image | `gcr.io/kf-feast/feast-serving` -| `feast-serving-online.image.tag` | Tag for Feast Serving Docker image | `0.3.2` +| `feast-serving-online.image.tag` | Tag for Feast Serving Docker image | `0.4.4` | `feast-serving-online.image.pullPolicy` | Image pull policy for Feast Serving Docker image | `IfNotPresent` +| `feast-serving-online.prometheus.enabled` | Add annotations to enable Prometheus scraping | `true` | `feast-serving-online.application.yaml` | Application configuration for Feast Serving | Refer to this [link](charts/feast-serving/values.yaml) | `feast-serving-online.store.yaml` | Store configuration for Feast Serving | Refer to this [link](charts/feast-serving/values.yaml) | `feast-serving-online.springConfigMountPath` | Directory to mount application.yaml and store.yaml | `/etc/feast/feast-serving` @@ -125,7 +136,13 @@ The following table lists the configurable parameters of the Feast chart and the | `feast-serving-online.gcpServiceAccount.existingSecret.name` | Secret name for the service account | `feast-gcp-service-account` | `feast-serving-online.gcpServiceAccount.existingSecret.key` | Secret key for the service account | `key.json` | `feast-serving-online.gcpServiceAccount.mountPath` | Directory to mount the JSON key file | `/etc/gcloud/service-accounts` +| `feast-serving-online.gcpProjectId` | Project ID to set `GOOGLE_CLOUD_PROJECT` to change default project used by SDKs | `""` +| `feast-serving-online.jarPath` | Path to Jar file in the Docker image | `/opt/feast/feast-serving.jar` | `feast-serving-online.jvmOptions` | Options for the JVM | `[]` +| `feast-serving-online.logLevel` | Application logging level | `warn` +| `feast-serving-online.logType` | Application logging type (`JSON` or `Console`) | `JSON` +| `feast-serving-online.springConfigProfiles` | Map of profile name to file content for additional Spring profiles | `{}` +| `feast-serving-online.springConfigProfilesActive` | CSV of profiles to enable from `springConfigProfiles` | `""` | `feast-serving-online.livenessProbe.enabled` | Flag to enable liveness probe | `true` | `feast-serving-online.livenessProbe.initialDelaySeconds` | Delay before liveness probe is initiated | `60` | `feast-serving-online.livenessProbe.periodSeconds` | How often to perform the probe | `10` @@ -143,6 +160,7 @@ The following table lists the configurable parameters of the Feast chart and the | `feast-serving-online.grpc.port` | Kubernetes Service port for GRPC request| `6566` | `feast-serving-online.grpc.targetPort` | Container port for GRPC request| `6566` | `feast-serving-online.resources` | CPU and memory allocation for the pod | `{}` +| `feast-serving-online.ingress` | See *Ingress Parameters* [below](#ingress-parameters) | `{}` | `feast-serving-batch.enabled` | Flag to install Feast Batch Serving | `true` | `feast-serving-batch.redis.enabled` | Flag to install Redis in Feast Serving | `false` | `feast-serving-batch.redis.usePassword` | Flag to use password to access Redis | `false` @@ -150,8 +168,9 @@ The following table lists the configurable parameters of the Feast chart and the | `feast-serving-batch.core.enabled` | Flag for Feast Serving to use Feast Core in the same Helm release | `true` | `feast-serving-batch.replicaCount` | No of pods to create | `1` | `feast-serving-batch.image.repository` | Repository for Feast Serving Docker image | `gcr.io/kf-feast/feast-serving` -| `feast-serving-batch.image.tag` | Tag for Feast Serving Docker image | `0.3.2` +| `feast-serving-batch.image.tag` | Tag for Feast Serving Docker image | `0.4.4` | `feast-serving-batch.image.pullPolicy` | Image pull policy for Feast Serving Docker image | `IfNotPresent` +| `feast-serving-batch.prometheus.enabled` | Add annotations to enable Prometheus scraping | `true` | `feast-serving-batch.application.yaml` | Application configuration for Feast Serving | Refer to this [link](charts/feast-serving/values.yaml) | `feast-serving-batch.store.yaml` | Store configuration for Feast Serving | Refer to this [link](charts/feast-serving/values.yaml) | `feast-serving-batch.springConfigMountPath` | Directory to mount application.yaml and store.yaml | `/etc/feast/feast-serving` @@ -159,7 +178,13 @@ The following table lists the configurable parameters of the Feast chart and the | `feast-serving-batch.gcpServiceAccount.existingSecret.name` | Secret name for the service account | `feast-gcp-service-account` | `feast-serving-batch.gcpServiceAccount.existingSecret.key` | Secret key for the service account | `key.json` | `feast-serving-batch.gcpServiceAccount.mountPath` | Directory to mount the JSON key file | `/etc/gcloud/service-accounts` +| `feast-serving-batch.gcpProjectId` | Project ID to set `GOOGLE_CLOUD_PROJECT` to change default project used by SDKs | `""` +| `feast-serving-batch.jarPath` | Path to Jar file in the Docker image | `/opt/feast/feast-serving.jar` | `feast-serving-batch.jvmOptions` | Options for the JVM | `[]` +| `feast-serving-batch.logLevel` | Application logging level | `warn` +| `feast-serving-batch.logType` | Application logging type (`JSON` or `Console`) | `JSON` +| `feast-serving-batch.springConfigProfiles` | Map of profile name to file content for additional Spring profiles | `{}` +| `feast-serving-batch.springConfigProfilesActive` | CSV of profiles to enable from `springConfigProfiles` | `""` | `feast-serving-batch.livenessProbe.enabled` | Flag to enable liveness probe | `true` | `feast-serving-batch.livenessProbe.initialDelaySeconds` | Delay before liveness probe is initiated | `60` | `feast-serving-batch.livenessProbe.periodSeconds` | How often to perform the probe | `10` @@ -176,4 +201,51 @@ The following table lists the configurable parameters of the Feast chart and the | `feast-serving-batch.http.targetPort` | Container port for HTTP request | `8080` | `feast-serving-batch.grpc.port` | Kubernetes Service port for GRPC request| `6566` | `feast-serving-batch.grpc.targetPort` | Container port for GRPC request| `6566` -| `feast-serving-batch.resources` | CPU and memory allocation for the pod | `{}` \ No newline at end of file +| `feast-serving-batch.resources` | CPU and memory allocation for the pod | `{}` +| `feast-serving-batch.ingress` | See *Ingress Parameters* [below](#ingress-parameters) | `{}` + +## Ingress Parameters + +The following table lists the configurable parameters of the ingress section for each Feast module. + +Note, there are two ingresses available for each module - `grpc` and `http`. + +| Parameter | Description | Default +| ----------------------------- | ----------- | ------- +| `ingress.grcp.enabled` | Enables an ingress (endpoint) for the gRPC server | `false` +| `ingress.grcp.*` | See below | +| `ingress.http.enabled` | Enables an ingress (endpoint) for the HTTP server | `false` +| `ingress.http.*` | See below | +| `ingress.*.class` | Value for `kubernetes.io/ingress.class` | `nginx` +| `ingress.*.hosts` | List of host-names for the ingress | `[]` +| `ingress.*.annotations` | Additional ingress annotations | `{}` +| `ingress.*.https.enabled` | Add a tls section to the ingress | `true` +| `ingress.*.https.secretNames` | Map of hostname to TLS secret name | `{}` If not specified, defaults to `domain-tld-tls` e.g. `feast.example.com` uses secret `example-com-tls` +| `ingress.*.auth.enabled` | Enable auth on the ingress (only applicable for `nginx` type | `false` +| `ingress.*.auth.signinHost` | External hostname of the OAuth2 proxy to use | First item in `ingress.hosts`, replacing the sub-domain with 'auth' e.g. `feast.example.com` uses `auth.example.com` +| `ingress.*.auth.authUrl` | Internal URI to internal auth endpoint | `http://auth-server.auth-ns.svc.cluster.local/auth` +| `ingress.*.whitelist` | Subnet masks to whitelist (i.e. value for `nginx.ingress.kubernetes.io/whitelist-source-range`) | `"""` + +To enable all the ingresses will a config like the following (while also adding the hosts etc): + +```yaml +feast-core: + ingress: + grpc: + enabled: true + http: + enabled: true +feast-serving-online: + ingress: + grpc: + enabled: true + http: + enabled: true +feast-serving-batch: + ingress: + grpc: + enabled: true + http: + enabled: true +``` + diff --git a/infra/charts/feast/charts/prometheus-statsd-exporter/.helmignore b/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/.helmignore similarity index 100% rename from infra/charts/feast/charts/prometheus-statsd-exporter/.helmignore rename to infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/.helmignore diff --git a/infra/charts/feast/charts/prometheus-statsd-exporter/Chart.yaml b/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/Chart.yaml similarity index 100% rename from infra/charts/feast/charts/prometheus-statsd-exporter/Chart.yaml rename to infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/Chart.yaml diff --git a/infra/charts/feast/charts/prometheus-statsd-exporter/README.md b/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/README.md similarity index 100% rename from infra/charts/feast/charts/prometheus-statsd-exporter/README.md rename to infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/README.md diff --git a/infra/charts/feast/charts/prometheus-statsd-exporter/templates/NOTES.txt b/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/NOTES.txt similarity index 100% rename from infra/charts/feast/charts/prometheus-statsd-exporter/templates/NOTES.txt rename to infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/NOTES.txt diff --git a/infra/charts/feast/charts/prometheus-statsd-exporter/templates/_helpers.tpl b/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/_helpers.tpl similarity index 100% rename from infra/charts/feast/charts/prometheus-statsd-exporter/templates/_helpers.tpl rename to infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/_helpers.tpl diff --git a/infra/charts/feast/charts/prometheus-statsd-exporter/templates/config.yaml b/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/config.yaml similarity index 100% rename from infra/charts/feast/charts/prometheus-statsd-exporter/templates/config.yaml rename to infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/config.yaml diff --git a/infra/charts/feast/charts/prometheus-statsd-exporter/templates/deployment.yaml b/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/deployment.yaml similarity index 100% rename from infra/charts/feast/charts/prometheus-statsd-exporter/templates/deployment.yaml rename to infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/deployment.yaml diff --git a/infra/charts/feast/charts/prometheus-statsd-exporter/templates/pvc.yaml b/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/pvc.yaml similarity index 100% rename from infra/charts/feast/charts/prometheus-statsd-exporter/templates/pvc.yaml rename to infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/pvc.yaml diff --git a/infra/charts/feast/charts/prometheus-statsd-exporter/templates/service.yaml b/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/service.yaml similarity index 100% rename from infra/charts/feast/charts/prometheus-statsd-exporter/templates/service.yaml rename to infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/service.yaml diff --git a/infra/charts/feast/charts/prometheus-statsd-exporter/templates/serviceaccount.yaml b/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/serviceaccount.yaml similarity index 100% rename from infra/charts/feast/charts/prometheus-statsd-exporter/templates/serviceaccount.yaml rename to infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/serviceaccount.yaml diff --git a/infra/charts/feast/charts/prometheus-statsd-exporter/values.yaml b/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/values.yaml similarity index 100% rename from infra/charts/feast/charts/prometheus-statsd-exporter/values.yaml rename to infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/values.yaml diff --git a/infra/charts/feast/charts/feast-core/requirements.yaml b/infra/charts/feast/charts/feast-core/requirements.yaml index efe9fec508..ef1e39a7d0 100644 --- a/infra/charts/feast/charts/feast-core/requirements.yaml +++ b/infra/charts/feast/charts/feast-core/requirements.yaml @@ -6,4 +6,10 @@ dependencies: - name: kafka version: 0.20.1 repository: "@incubator" - condition: kafka.enabled \ No newline at end of file + condition: kafka.enabled +- name: common + version: 0.0.5 + repository: "@incubator" +- name: prometheus-statsd-exporter + version: 0.1.2 + condition: prometheus-statsd-exporter.enabled \ No newline at end of file diff --git a/infra/charts/feast/charts/feast-core/templates/_ingress.yaml b/infra/charts/feast/charts/feast-core/templates/_ingress.yaml new file mode 100644 index 0000000000..5bed6df047 --- /dev/null +++ b/infra/charts/feast/charts/feast-core/templates/_ingress.yaml @@ -0,0 +1,68 @@ +{{- /* +This takes an array of three values: +- the top context +- the feast component +- the service protocol +- the ingress context +*/ -}} +{{- define "feast.ingress" -}} +{{- $top := (index . 0) -}} +{{- $component := (index . 1) -}} +{{- $protocol := (index . 2) -}} +{{- $ingressValues := (index . 3) -}} +apiVersion: extensions/v1beta1 +kind: Ingress +{{ include "feast.ingress.metadata" . }} +spec: + rules: + {{- range $host := $ingressValues.hosts }} + - host: {{ $host }} + http: + paths: + - path: / + backend: + serviceName: {{ include (printf "feast-%s.fullname" $component) $top }} + servicePort: {{ index $top.Values "service" $protocol "port" }} + {{- end }} +{{- if $ingressValues.https.enabled }} + tls: + {{- range $host := $ingressValues.hosts }} + - secretName: {{ index $ingressValues.https.secretNames $host | default (splitList "." $host | rest | join "-" | printf "%s-tls") }} + hosts: + - {{ $host }} + {{- end }} +{{- end -}} +{{- end -}} + +{{- define "feast.ingress.metadata" -}} +{{- $commonMetadata := fromYaml (include "common.metadata" (first .)) }} +{{- $overrides := fromYaml (include "feast.ingress.metadata-overrides" .) -}} +{{- toYaml (merge $overrides $commonMetadata) -}} +{{- end -}} + +{{- define "feast.ingress.metadata-overrides" -}} +{{- $top := (index . 0) -}} +{{- $component := (index . 1) -}} +{{- $protocol := (index . 2) -}} +{{- $ingressValues := (index . 3) -}} +{{- $commonFullname := include "common.fullname" $top }} +metadata: + name: {{ $commonFullname }}-{{ $component }}-{{ $protocol }} + annotations: + kubernetes.io/ingress.class: {{ $ingressValues.class | quote }} + {{- if (and (eq $ingressValues.class "nginx") $ingressValues.auth.enabled) }} + nginx.ingress.kubernetes.io/auth-url: {{ $ingressValues.auth.authUrl | quote }} + nginx.ingress.kubernetes.io/auth-response-headers: "x-auth-request-email, x-auth-request-user" + nginx.ingress.kubernetes.io/auth-signin: "https://{{ $ingressValues.auth.signinHost | default (splitList "." (index $ingressValues.hosts 0) | rest | join "." | printf "auth.%s")}}/oauth2/start?rd=/r/$host/$request_uri" + {{- end }} + {{- if (and (eq $ingressValues.class "nginx") $ingressValues.whitelist) }} + nginx.ingress.kubernetes.io/whitelist-source-range: {{ $ingressValues.whitelist | quote -}} + {{- end }} + {{- if (and (eq $ingressValues.class "nginx") (eq $protocol "grpc") ) }} + # TODO: Allow choice of GRPC/GRPCS + nginx.ingress.kubernetes.io/backend-protocol: "GRPC" + {{- end }} + {{- if $ingressValues.annotations -}} + {{ include "common.annote" $ingressValues.annotations | indent 4 }} + {{- end }} +{{- end -}} diff --git a/infra/charts/feast/charts/feast-core/templates/configmap.yaml b/infra/charts/feast/charts/feast-core/templates/configmap.yaml index 68dc45c057..da45cad5bd 100644 --- a/infra/charts/feast/charts/feast-core/templates/configmap.yaml +++ b/infra/charts/feast/charts/feast-core/templates/configmap.yaml @@ -11,22 +11,43 @@ metadata: heritage: {{ .Release.Service }} data: application.yaml: | -{{- $config := index .Values "application.yaml"}} +{{- toYaml (index .Values "application.yaml") | nindent 4 }} {{- if .Values.postgresql.enabled }} -{{- $datasource := dict "url" (printf "jdbc:postgresql://%s:%s/%s" (printf "%s-postgresql" .Release.Name) (.Values.postgresql.service.port | toString) (.Values.postgresql.postgresqlDatabase)) "driverClassName" "org.postgresql.Driver" }} -{{- $newConfig := dict "spring" (dict "datasource" $datasource) }} -{{- $config := mergeOverwrite $config $newConfig }} + application-bundled-postgresql.yaml: | + spring: + datasource: + url: {{ printf "jdbc:postgresql://%s:%s/%s" (printf "%s-postgresql" .Release.Name) (.Values.postgresql.service.port | toString) (.Values.postgresql.postgresqlDatabase) }} + driverClassName: org.postgresql.Driver {{- end }} -{{- if .Values.kafka.enabled }} -{{- $topic := index .Values.kafka.topics 0 }} -{{- $options := dict "topic" $topic.name "replicationFactor" $topic.replicationFactor "partitions" $topic.partitions }} -{{- if not .Values.kafka.external.enabled }} -{{- $_ := set $options "bootstrapServers" (printf "%s:9092" (printf "%s-kafka" .Release.Name)) }} +{{ if .Values.kafka.enabled }} + {{- $topic := index .Values.kafka.topics 0 }} + application-bundled-kafka.yaml: | + feast: + stream: + type: kafka + options: + topic: {{ $topic.name | quote }} + replicationFactor: {{ $topic.replicationFactor }} + partitions: {{ $topic.partitions }} + {{- if not .Values.kafka.external.enabled }} + bootstrapServers: {{ printf "%s:9092" (printf "%s-kafka" .Release.Name) }} + {{- end }} {{- end }} -{{- $newConfig := dict "feast" (dict "stream" (dict "type" "kafka" "options" $options))}} -{{- $config := mergeOverwrite $config $newConfig }} + +{{- if (index .Values "prometheus-statsd-exporter" "enabled" )}} + application-bundled-statsd.yaml: | + feast: + jobs: + metrics: + enabled: true + type: statsd + host: prometheus-statsd-exporter + port: 9125 {{- end }} -{{- toYaml $config | nindent 4 }} +{{- range $name, $content := .Values.springConfigProfiles }} + application-{{ $name }}.yaml: | +{{- toYaml $content | nindent 4 }} +{{- end }} diff --git a/infra/charts/feast/charts/feast-core/templates/deployment.yaml b/infra/charts/feast/charts/feast-core/templates/deployment.yaml index 0671d9574b..df834b6749 100644 --- a/infra/charts/feast/charts/feast-core/templates/deployment.yaml +++ b/infra/charts/feast/charts/feast-core/templates/deployment.yaml @@ -18,6 +18,13 @@ spec: release: {{ .Release.Name }} template: metadata: + {{- if .Values.prometheus.enabled }} + annotations: + {{ $config := index .Values "application.yaml" }} + prometheus.io/path: /metrics + prometheus.io/port: "{{ $config.server.port }}" + prometheus.io/scrape: "true" + {{- end }} labels: app: {{ template "feast-core.name" . }} component: core @@ -42,7 +49,7 @@ spec: - name: {{ .Chart.Name }} image: '{{ .Values.image.repository }}:{{ required "No .image.tag found. This must be provided as input." .Values.image.tag }}' imagePullPolicy: {{ .Values.image.pullPolicy }} - + volumeMounts: - name: {{ template "feast-core.fullname" . }}-config mountPath: "{{ .Values.springConfigMountPath }}" @@ -53,31 +60,48 @@ spec: {{- end }} env: + - name: LOG_TYPE + value: {{ .Values.logType | quote }} + - name: LOG_LEVEL + value: {{ .Values.logLevel | quote }} + {{- if .Values.postgresql.enabled }} - name: SPRING_DATASOURCE_USERNAME - value: {{ .Values.postgresql.postgresqlUsername }} + value: {{ .Values.postgresql.postgresqlUsername | quote }} - name: SPRING_DATASOURCE_PASSWORD - value: {{ .Values.postgresql.postgresqlPassword }} + value: {{ .Values.postgresql.postgresqlPassword | quote }} {{- end }} {{- if .Values.gcpServiceAccount.useExistingSecret }} - name: GOOGLE_APPLICATION_CREDENTIALS value: {{ .Values.gcpServiceAccount.mountPath }}/{{ .Values.gcpServiceAccount.existingSecret.key }} {{- end }} + {{- if .Values.gcpProjectId }} + - name: GOOGLE_CLOUD_PROJECT + value: {{ .Values.gcpProjectId | quote }} + {{- end }} command: - java {{- range .Values.jvmOptions }} - - {{ . }} + - {{ . | quote }} + {{- end }} + - -jar + - {{ .Values.jarPath | quote }} + - "--spring.config.location=file:{{ .Values.springConfigMountPath }}/" + {{- $profilesArray := splitList "," .Values.springConfigProfilesActive -}} + {{- $profilesArray = append $profilesArray (.Values.postgresql.enabled | ternary "bundled-postgresql" "") -}} + {{- $profilesArray = append $profilesArray (.Values.kafka.enabled | ternary "bundled-kafka" "") -}} + {{- $profilesArray = append $profilesArray (index .Values "prometheus-statsd-exporter" "enabled" | ternary "bundled-statsd" "") -}} + {{- $profilesArray = compact $profilesArray -}} + {{- if $profilesArray }} + - "--spring.profiles.active={{ join "," $profilesArray }}" {{- end }} - - -jar - - /opt/feast/feast-core.jar - - "--spring.config.location=file:{{ .Values.springConfigMountPath }}/application.yaml" ports: - name: http containerPort: {{ .Values.service.http.targetPort }} - - name: grpc + - name: grpc containerPort: {{ .Values.service.grpc.targetPort }} {{- if .Values.livenessProbe.enabled }} @@ -103,6 +127,6 @@ spec: timeoutSeconds: {{ .Values.readinessProbe.timeoutSeconds }} failureThreshold: {{ .Values.readinessProbe.failureThreshold }} {{- end }} - + resources: {{- toYaml .Values.resources | nindent 10 }} diff --git a/infra/charts/feast/charts/feast-core/templates/ingress.yaml b/infra/charts/feast/charts/feast-core/templates/ingress.yaml index 86fc2d3f17..7f453e1a75 100644 --- a/infra/charts/feast/charts/feast-core/templates/ingress.yaml +++ b/infra/charts/feast/charts/feast-core/templates/ingress.yaml @@ -1,28 +1,7 @@ -{{- if .Values.ingress.enabled -}} -{{- $fullName := include "feast-core.fullname" . -}} -apiVersion: extensions/v1beta1 -kind: Ingress -metadata: - name: {{ $fullName }} - labels: - app: {{ template "feast-core.name" . }} - chart: {{ .Chart.Name }}-{{ .Chart.Version }} - component: core - heritage: {{ .Release.Service }} - release: {{ .Release.Name }} - annotations: -{{- with .Values.ingress.annotations }} -{{ toYaml . | indent 4 }} +{{- if .Values.ingress.http.enabled -}} +{{ template "feast.ingress" (list . "core" "http" .Values.ingress.http) }} +{{- end }} +--- +{{ if .Values.ingress.grpc.enabled -}} +{{ template "feast.ingress" (list . "core" "grpc" .Values.ingress.grpc) }} {{- end }} -spec: - rules: - {{- range .Values.ingress.hosts }} - - host: {{ .host | quote }} - http: - paths: - - path: / - backend: - serviceName: {{ $fullName }} - servicePort: {{ .port | quote }} - {{- end }} -{{- end }} \ No newline at end of file diff --git a/infra/charts/feast/charts/feast-core/values.yaml b/infra/charts/feast/charts/feast-core/values.yaml index f746bc96ea..077906dc35 100644 --- a/infra/charts/feast/charts/feast-core/values.yaml +++ b/infra/charts/feast/charts/feast-core/values.yaml @@ -1,12 +1,15 @@ -# postgresql configures Postgresql that is installed as part of Feast Core. +# ============================================================ +# Bundled PostgreSQL +# ============================================================ + # Refer to https://github.com/helm/charts/tree/c42002a21abf8eff839ff1d2382152bde2bbe596/stable/postgresql # for additional configuration. postgresql: # enabled specifies whether Postgresql should be installed as part of Feast Core. # - # Feast Core requires a database to store data such as the created FeatureSets + # Feast Core requires a database to store data such as the created FeatureSets # and job statuses. If enabled, the database and service port specified below - # will override "spring.datasource.url" value in application.yaml. The + # will override "spring.datasource.url" value in application.yaml. The # username and password will also be set as environment variables that will # override "spring.datasource.username/password" in application.yaml. enabled: true @@ -20,12 +23,15 @@ postgresql: # port is the TCP port that Postgresql will listen to port: 5432 -# kafka configures Kafka that is installed as part of Feast Core. +# ============================================================ +# Bundled Kafka +# ============================================================ + # Refer to https://github.com/helm/charts/tree/c42002a21abf8eff839ff1d2382152bde2bbe596/incubator/kafka # for additional configuration. kafka: # enabled specifies whether Kafka should be installed as part of Feast Core. - # + # # Feast Core requires a Kafka instance to be set as the default source for # FeatureRows. If enabled, "feast.stream" option in application.yaml will # be overridden by this installed Kafka configuration. @@ -36,6 +42,18 @@ kafka: replicationFactor: 1 partitions: 1 + +# ============================================================ +# Bundled Prometheus StatsD Exporter +# ============================================================ + +prometheus-statsd-exporter: + enabled: false + +# ============================================================ +# Feast Core +# ============================================================ + # replicaCount is the number of pods that will be created. replicaCount: 1 @@ -44,13 +62,18 @@ image: repository: gcr.io/kf-feast/feast-core pullPolicy: IfNotPresent +# Add prometheus scraping annotations to the Pod metadata. +# If enabled, you must also ensure server.port is specified under application.yaml +prometheus: + enabled: false + # application.yaml is the main configuration for Feast Core application. -# +# # Feast Core is a Spring Boot app which uses this yaml configuration file. # Refer to https://github.com/gojek/feast/blob/79eb4ab5fa3d37102c1dca9968162a98690526ba/core/src/main/resources/application.yml # for a complete list and description of the configuration. # -# Note that some properties defined in application.yaml may be overriden by +# Note that some properties defined in application.yaml may be overriden by # Helm under certain conditions. For example, if postgresql and kafka dependencies # are enabled. application.yaml: @@ -96,7 +119,14 @@ application.yaml: host: localhost port: 8125 -# springConfigMountPath is the directory path where application.yaml will be +springConfigProfiles: {} +# db: | +# spring: +# datasource: +# driverClassName: org.postgresql.Driver +# url: jdbc:postgresql://${DB_HOST:127.0.0.1}:${DB_PORT:5432}/${DB_DATABASE:postgres} +springConfigProfilesActive: "" +# springConfigMountPath is the directory path where application.yaml will be # mounted in the container. springConfigMountPath: /etc/feast/feast-core @@ -107,7 +137,7 @@ gcpServiceAccount: useExistingSecret: false existingSecret: # name is the secret name of the existing secret for the service account. - name: feast-gcp-service-account + name: feast-gcp-service-account # key is the secret key of the existing secret for the service account. # key is normally derived from the file name of the JSON key file. key: key.json @@ -115,19 +145,29 @@ gcpServiceAccount: # the value of "existingSecret.key" is file name of the service account file. mountPath: /etc/gcloud/service-accounts -# jvmOptions are options that will be passed to the Java Virtual Machine (JVM) +# Project ID picked up by the Cloud SDK (e.g. BigQuery run against this project) +gcpProjectId: "" + +# Path to Jar file in the Docker image. +# If you are using gcr.io/kf-feast/feast-core this should not need to be changed +jarPath: /opt/feast/feast-core.jar + +# jvmOptions are options that will be passed to the Java Virtual Machine (JVM) # running Feast Core. -# +# # For example, it is good practice to set min and max heap size in JVM. # https://stackoverflow.com/questions/6902135/side-effect-for-increasing-maxpermsize-and-max-heap-size # # Refer to https://docs.oracle.com/cd/E22289_01/html/821-1274/configuring-the-default-jvm-and-java-arguments.html # to see other JVM options that can be set. # -# jvmOptions: -# - -Xms1024m +jvmOptions: [] +# - -Xms1024m # - -Xmx1024m +logType: JSON +logLevel: warn + livenessProbe: enabled: true initialDelaySeconds: 60 @@ -162,12 +202,29 @@ service: # nodePort: ingress: - enabled: false - annotations: {} - # kubernetes.io/ingress.class: nginx - hosts: - # - host: chart-example.local - # port: http + grpc: + enabled: false + class: nginx + hosts: [] + annotations: {} + https: + enabled: true + secretNames: {} + whitelist: "" + auth: + enabled: false + http: + enabled: false + class: nginx + hosts: [] + annotations: {} + https: + enabled: true + secretNames: {} + whitelist: "" + auth: + enabled: false + authUrl: http://auth-server.auth-ns.svc.cluster.local/auth resources: {} # We usually recommend not to specify default resources and to leave this as a conscious diff --git a/infra/charts/feast/charts/feast-serving/requirements.yaml b/infra/charts/feast/charts/feast-serving/requirements.yaml index fa4c1df4c1..2cee3f8149 100644 --- a/infra/charts/feast/charts/feast-serving/requirements.yaml +++ b/infra/charts/feast/charts/feast-serving/requirements.yaml @@ -3,3 +3,6 @@ dependencies: version: 9.5.0 repository: "@stable" condition: redis.enabled +- name: common + version: 0.0.5 + repository: "@incubator" diff --git a/infra/charts/feast/charts/feast-serving/templates/_helpers.tpl b/infra/charts/feast/charts/feast-serving/templates/_helpers.tpl index 49abb6b8e5..ab670cc8cc 100644 --- a/infra/charts/feast/charts/feast-serving/templates/_helpers.tpl +++ b/infra/charts/feast/charts/feast-serving/templates/_helpers.tpl @@ -43,3 +43,10 @@ app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} {{- end }} app.kubernetes.io/managed-by: {{ .Release.Service }} {{- end -}} + +{{/* +Helpers +*/}} +{{- define "bq_store_and_no_job_options" -}} +{{ and (eq (index .Values "store.yaml" "type") "BIGQUERY") (empty (index .Values "application.yaml" "feast" "jobs" "store-options")) }} +{{- end -}} diff --git a/infra/charts/feast/charts/feast-serving/templates/_ingress.yaml b/infra/charts/feast/charts/feast-serving/templates/_ingress.yaml new file mode 100644 index 0000000000..5bed6df047 --- /dev/null +++ b/infra/charts/feast/charts/feast-serving/templates/_ingress.yaml @@ -0,0 +1,68 @@ +{{- /* +This takes an array of three values: +- the top context +- the feast component +- the service protocol +- the ingress context +*/ -}} +{{- define "feast.ingress" -}} +{{- $top := (index . 0) -}} +{{- $component := (index . 1) -}} +{{- $protocol := (index . 2) -}} +{{- $ingressValues := (index . 3) -}} +apiVersion: extensions/v1beta1 +kind: Ingress +{{ include "feast.ingress.metadata" . }} +spec: + rules: + {{- range $host := $ingressValues.hosts }} + - host: {{ $host }} + http: + paths: + - path: / + backend: + serviceName: {{ include (printf "feast-%s.fullname" $component) $top }} + servicePort: {{ index $top.Values "service" $protocol "port" }} + {{- end }} +{{- if $ingressValues.https.enabled }} + tls: + {{- range $host := $ingressValues.hosts }} + - secretName: {{ index $ingressValues.https.secretNames $host | default (splitList "." $host | rest | join "-" | printf "%s-tls") }} + hosts: + - {{ $host }} + {{- end }} +{{- end -}} +{{- end -}} + +{{- define "feast.ingress.metadata" -}} +{{- $commonMetadata := fromYaml (include "common.metadata" (first .)) }} +{{- $overrides := fromYaml (include "feast.ingress.metadata-overrides" .) -}} +{{- toYaml (merge $overrides $commonMetadata) -}} +{{- end -}} + +{{- define "feast.ingress.metadata-overrides" -}} +{{- $top := (index . 0) -}} +{{- $component := (index . 1) -}} +{{- $protocol := (index . 2) -}} +{{- $ingressValues := (index . 3) -}} +{{- $commonFullname := include "common.fullname" $top }} +metadata: + name: {{ $commonFullname }}-{{ $component }}-{{ $protocol }} + annotations: + kubernetes.io/ingress.class: {{ $ingressValues.class | quote }} + {{- if (and (eq $ingressValues.class "nginx") $ingressValues.auth.enabled) }} + nginx.ingress.kubernetes.io/auth-url: {{ $ingressValues.auth.authUrl | quote }} + nginx.ingress.kubernetes.io/auth-response-headers: "x-auth-request-email, x-auth-request-user" + nginx.ingress.kubernetes.io/auth-signin: "https://{{ $ingressValues.auth.signinHost | default (splitList "." (index $ingressValues.hosts 0) | rest | join "." | printf "auth.%s")}}/oauth2/start?rd=/r/$host/$request_uri" + {{- end }} + {{- if (and (eq $ingressValues.class "nginx") $ingressValues.whitelist) }} + nginx.ingress.kubernetes.io/whitelist-source-range: {{ $ingressValues.whitelist | quote -}} + {{- end }} + {{- if (and (eq $ingressValues.class "nginx") (eq $protocol "grpc") ) }} + # TODO: Allow choice of GRPC/GRPCS + nginx.ingress.kubernetes.io/backend-protocol: "GRPC" + {{- end }} + {{- if $ingressValues.annotations -}} + {{ include "common.annote" $ingressValues.annotations | indent 4 }} + {{- end }} +{{- end -}} diff --git a/infra/charts/feast/charts/feast-serving/templates/configmap.yaml b/infra/charts/feast/charts/feast-serving/templates/configmap.yaml index 0ec80252c1..934216a9d5 100644 --- a/infra/charts/feast/charts/feast-serving/templates/configmap.yaml +++ b/infra/charts/feast/charts/feast-serving/templates/configmap.yaml @@ -11,37 +11,43 @@ metadata: heritage: {{ .Release.Service }} data: application.yaml: | -{{- $config := index .Values "application.yaml" }} +{{- toYaml (index .Values "application.yaml") | nindent 4 }} {{- if .Values.core.enabled }} -{{- $newConfig := dict "feast" (dict "core-host" (printf "%s-feast-core" .Release.Name)) }} -{{- $config := mergeOverwrite $config $newConfig }} + application-bundled-core.yaml: | + feast: + core-host: {{ printf "%s-feast-core" .Release.Name }} {{- end }} -{{- $store := index .Values "store.yaml" }} -{{- if and (eq $store.type "BIGQUERY") (not (hasKey $config.feast.jobs "store-options")) }} -{{- $jobStore := dict "host" (printf "%s-redis-headless" .Release.Name) "port" 6379 }} -{{- $newConfig := dict "feast" (dict "jobs" (dict "store-options" $jobStore)) }} -{{- $config := mergeOverwrite $config $newConfig }} +{{- if eq (include "bq_store_and_no_job_options" .) "true" }} + application-bundled-redis.yaml: | + feast: + jobs: + store-options: + host: {{ printf "%s-redis-headless" .Release.Name }} + port: 6379 {{- end }} -{{- toYaml $config | nindent 4 }} - store.yaml: | -{{- $config := index .Values "store.yaml"}} +{{- $store := index .Values "store.yaml"}} -{{- if and .Values.redis.enabled (eq $config.type "REDIS") }} +{{- if and .Values.redis.enabled (eq $store.type "REDIS") }} {{- if eq .Values.redis.master.service.type "ClusterIP" }} {{- $newConfig := dict "redis_config" (dict "host" (printf "%s-redis-headless" .Release.Name) "port" .Values.redis.redisPort) }} -{{- $config := mergeOverwrite $config $newConfig }} +{{- $config := mergeOverwrite $store $newConfig }} {{- end }} {{- if and (eq .Values.redis.master.service.type "LoadBalancer") (not (empty .Values.redis.master.service.loadBalancerIP)) }} {{- $newConfig := dict "redis_config" (dict "host" .Values.redis.master.service.loadBalancerIP "port" .Values.redis.redisPort) }} -{{- $config := mergeOverwrite $config $newConfig }} +{{- $config := mergeOverwrite $store $newConfig }} {{- end }} {{- end }} -{{- toYaml $config | nindent 4 }} +{{- toYaml $store | nindent 4 }} + +{{- range $name, $content := .Values.springConfigProfiles }} + application-{{ $name }}.yaml: | +{{- toYaml $content | nindent 4 }} +{{- end }} diff --git a/infra/charts/feast/charts/feast-serving/templates/deployment.yaml b/infra/charts/feast/charts/feast-serving/templates/deployment.yaml index e6824a2346..64dd3955d0 100644 --- a/infra/charts/feast/charts/feast-serving/templates/deployment.yaml +++ b/infra/charts/feast/charts/feast-serving/templates/deployment.yaml @@ -49,7 +49,7 @@ spec: - name: {{ .Chart.Name }} image: '{{ .Values.image.repository }}:{{ required "No .image.tag found. This must be provided as input." .Values.image.tag }}' imagePullPolicy: {{ .Values.image.pullPolicy }} - + volumeMounts: - name: {{ template "feast-serving.fullname" . }}-config mountPath: "{{ .Values.springConfigMountPath }}" @@ -60,24 +60,40 @@ spec: {{- end }} env: + - name: LOG_TYPE + value: {{ .Values.logType | quote }} + - name: LOG_LEVEL + value: {{ .Values.logLevel | quote }} + {{- if .Values.gcpServiceAccount.useExistingSecret }} - name: GOOGLE_APPLICATION_CREDENTIALS value: {{ .Values.gcpServiceAccount.mountPath }}/{{ .Values.gcpServiceAccount.existingSecret.key }} {{- end }} + {{- if .Values.gcpProjectId }} + - name: GOOGLE_CLOUD_PROJECT + value: {{ .Values.gcpProjectId | quote }} + {{- end }} command: - java {{- range .Values.jvmOptions }} - - {{ . }} + - {{ . | quote }} + {{- end }} + - -jar + - {{ .Values.jarPath | quote }} + - "--spring.config.location=file:{{ .Values.springConfigMountPath }}/" + {{- $profilesArray := splitList "," .Values.springConfigProfilesActive -}} + {{- $profilesArray = append $profilesArray (.Values.core.enabled | ternary "bundled-core" "") -}} + {{- $profilesArray = append $profilesArray (eq (include "bq_store_and_no_job_options" .) "true" | ternary "bundled-redis" "") -}} + {{- $profilesArray = compact $profilesArray -}} + {{- if $profilesArray }} + - "--spring.profiles.active={{ join "," $profilesArray }}" {{- end }} - - -jar - - /opt/feast/feast-serving.jar - - "--spring.config.location=file:{{ .Values.springConfigMountPath }}/application.yaml" ports: - name: http containerPort: {{ .Values.service.http.targetPort }} - - name: grpc + - name: grpc containerPort: {{ .Values.service.grpc.targetPort }} {{- if .Values.livenessProbe.enabled }} @@ -101,6 +117,6 @@ spec: timeoutSeconds: {{ .Values.readinessProbe.timeoutSeconds }} failureThreshold: {{ .Values.readinessProbe.failureThreshold }} {{- end }} - + resources: {{- toYaml .Values.resources | nindent 10 }} diff --git a/infra/charts/feast/charts/feast-serving/templates/ingress.yaml b/infra/charts/feast/charts/feast-serving/templates/ingress.yaml index c6b4cb07a8..1bcd176147 100644 --- a/infra/charts/feast/charts/feast-serving/templates/ingress.yaml +++ b/infra/charts/feast/charts/feast-serving/templates/ingress.yaml @@ -1,28 +1,7 @@ -{{- if .Values.ingress.enabled -}} -{{- $fullName := include "feast-serving.fullname" . -}} -apiVersion: extensions/v1beta1 -kind: Ingress -metadata: - name: {{ $fullName }} - labels: - app: {{ template "feast-serving.name" . }} - chart: {{ .Chart.Name }}-{{ .Chart.Version }} - component: serving - heritage: {{ .Release.Service }} - release: {{ .Release.Name }} - annotations: -{{- with .Values.ingress.annotations }} -{{ toYaml . | indent 4 }} +{{- if .Values.ingress.http.enabled -}} +{{ template "feast.ingress" (list . "serving" "http" .Values.ingress.http) }} {{- end }} -spec: - rules: - {{- range .Values.ingress.hosts }} - - host: {{ .host | quote }} - http: - paths: - - path: / - backend: - serviceName: {{ $fullName }} - servicePort: {{ .port | quote }} - {{- end }} +--- +{{ if .Values.ingress.grpc.enabled -}} +{{ template "feast.ingress" (list . "serving" "grpc" .Values.ingress.grpc) }} {{- end }} diff --git a/infra/charts/feast/charts/feast-serving/values.yaml b/infra/charts/feast/charts/feast-serving/values.yaml index d2b3c59947..52d10cd744 100644 --- a/infra/charts/feast/charts/feast-serving/values.yaml +++ b/infra/charts/feast/charts/feast-serving/values.yaml @@ -3,23 +3,23 @@ # for additional configuration redis: # enabled specifies whether Redis should be installed as part of Feast Serving. - # + # # If enabled, "redis_config" in store.yaml will be overwritten by Helm # to the configuration in this Redis installation. enabled: false # usePassword specifies if password is required to access Redis. Note that # Feast 0.3 does not support Redis with password. - usePassword: false + usePassword: false # cluster configuration for Redis. cluster: # enabled specifies if Redis should be installed in cluster mode. enabled: false -# core configures Feast Core in the same parent feast chart that this Feast +# core configures Feast Core in the same parent feast chart that this Feast # Serving connects to. core: # enabled specifies that Feast Serving will use Feast Core installed - # in the same parent feast chart. If enabled, Helm will overwrite + # in the same parent feast chart. If enabled, Helm will overwrite # "feast.core-host" in application.yaml with the correct value. enabled: true @@ -37,7 +37,7 @@ image: # Refer to https://github.com/gojek/feast/blob/79eb4ab5fa3d37102c1dca9968162a98690526ba/serving/src/main/resources/application.yml # for a complete list and description of the configuration. # -# Note that some properties defined in application.yaml may be overridden by +# Note that some properties defined in application.yaml may be overridden by # Helm under certain conditions. For example, if core is enabled, then # "feast.core-host" will be overridden. Also, if "type: BIGQUERY" is specified # in store.yaml, "feast.jobs.store-options" will be overridden as well with @@ -66,19 +66,19 @@ application.yaml: port: 8080 # store.yaml is the configuration for Feast Store. -# +# # Refer to this link for description: # https://github.com/gojek/feast/blob/79eb4ab5fa3d37102c1dca9968162a98690526ba/protos/feast/core/Store.proto # # Use the correct store configuration depending on whether the installed # Feast Serving is "online" or "batch", by uncommenting the correct store.yaml. # -# Note that if "redis.enabled: true" and "type: REDIS" in store.yaml, +# Note that if "redis.enabled: true" and "type: REDIS" in store.yaml, # Helm will override "redis_config" with configuration of Redis installed # in this chart. -# +# # Note that if "type: BIGQUERY" in store.yaml, Helm assumes Feast Online serving -# is also installed with Redis store. Helm will then override "feast.jobs.store-options" +# is also installed with Redis store. Helm will then override "feast.jobs.store-options" # in application.yaml with the installed Redis store configuration. This is # because in Feast 0.3, Redis job store is required. # @@ -104,7 +104,14 @@ application.yaml: # name: "*" # version: "*" -# springConfigMountPath is the directory path where application.yaml and +springConfigProfiles: {} +# db: | +# spring: +# datasource: +# driverClassName: org.postgresql.Driver +# url: jdbc:postgresql://${DB_HOST:127.0.0.1}:${DB_PORT:5432}/${DB_DATABASE:postgres} +springConfigProfilesActive: "" +# springConfigMountPath is the directory path where application.yaml and # store.yaml will be mounted in the container. springConfigMountPath: /etc/feast/feast-serving @@ -115,7 +122,7 @@ gcpServiceAccount: useExistingSecret: false existingSecret: # name is the secret name of the existing secret for the service account. - name: feast-gcp-service-account + name: feast-gcp-service-account # key is the secret key of the existing secret for the service account. # key is normally derived from the file name of the JSON key file. key: key.json @@ -123,19 +130,29 @@ gcpServiceAccount: # the value of "existingSecret.key" is file name of the service account file. mountPath: /etc/gcloud/service-accounts -# jvmOptions are options that will be passed to the Java Virtual Machine (JVM) +# Project ID picked up by the Cloud SDK (e.g. BigQuery run against this project) +gcpProjectId: "" + +# Path to Jar file in the Docker image. +# If using gcr.io/kf-feast/feast-serving this should not need to be changed. +jarPath: /opt/feast/feast-serving.jar + +# jvmOptions are options that will be passed to the Java Virtual Machine (JVM) # running Feast Core. -# +# # For example, it is good practice to set min and max heap size in JVM. # https://stackoverflow.com/questions/6902135/side-effect-for-increasing-maxpermsize-and-max-heap-size # # Refer to https://docs.oracle.com/cd/E22289_01/html/821-1274/configuring-the-default-jvm-and-java-arguments.html # to see other JVM options that can be set. # -# jvmOptions: -# - -Xms768m +jvmOptions: [] +# - -Xms768m # - -Xmx768m +logType: JSON +logLevel: warn + livenessProbe: enabled: false initialDelaySeconds: 60 @@ -170,12 +187,29 @@ service: # nodePort: ingress: - enabled: false - annotations: {} - # kubernetes.io/ingress.class: nginx - hosts: - # - host: chart-example.local - # port: http + grpc: + enabled: false + class: nginx + hosts: [] + annotations: {} + https: + enabled: true + secretNames: {} + whitelist: "" + auth: + enabled: false + http: + enabled: false + class: nginx + hosts: [] + annotations: {} + https: + enabled: true + secretNames: {} + whitelist: "" + auth: + enabled: false + authUrl: http://auth-server.auth-ns.svc.cluster.local/auth prometheus: enabled: true @@ -185,6 +219,7 @@ resources: {} # choice for the user. This also increases chances charts run on environments with little # resources, such as Minikube. If you do want to specify resources, uncomment the following # lines, adjust them as necessary, and remove the curly braces after 'resources:'. + # # limits: # cpu: 100m # memory: 128Mi diff --git a/infra/charts/feast/requirements.lock b/infra/charts/feast/requirements.lock index 8afd952157..e441790dc7 100644 --- a/infra/charts/feast/requirements.lock +++ b/infra/charts/feast/requirements.lock @@ -1,12 +1,6 @@ dependencies: -- name: feast-core - repository: "" - version: 0.3.2 -- name: feast-serving - repository: "" - version: 0.3.2 -- name: feast-serving - repository: "" - version: 0.3.2 -digest: sha256:7ee4cd271cbd4ace44817dd12ba65f490a8e3529adf199604a2c2bdad9c2fac3 -generated: "2019-11-27T13:35:41.334054+08:00" +- name: common + repository: https://kubernetes-charts-incubator.storage.googleapis.com + version: 0.0.5 +digest: sha256:935bfb09e9ed90ff800826a7df21adaabe3225511c3ad78df44e1a5a60e93f14 +generated: 2019-12-10T14:47:49.57569Z diff --git a/infra/charts/feast/requirements.yaml b/infra/charts/feast/requirements.yaml index 5416ded3fe..1fa1826965 100644 --- a/infra/charts/feast/requirements.yaml +++ b/infra/charts/feast/requirements.yaml @@ -9,4 +9,4 @@ dependencies: - name: feast-serving alias: feast-serving-online version: 0.4.4 - condition: feast-serving-online.enabled + condition: feast-serving-online.enabled \ No newline at end of file diff --git a/infra/charts/feast/values-demo.yaml b/infra/charts/feast/values-demo.yaml index fad4bc0afb..2cb5ccbe74 100644 --- a/infra/charts/feast/values-demo.yaml +++ b/infra/charts/feast/values-demo.yaml @@ -1,7 +1,7 @@ # The following are values for installing Feast for demonstration purpose: # - Persistence is disabled since for demo purpose data is not expected # to be durable -# - Only online serving (no batch serving) is installed to remove dependency +# - Only online serving (no batch serving) is installed to remove dependency # on Google Cloud services. Batch serving requires BigQuery dependency. # - Replace all occurrences of "feast.example.com" with the domain name or # external IP pointing to your cluster @@ -68,4 +68,17 @@ feast-serving-online: version: "*" feast-serving-batch: - enabled: false +# enabled: false + enabled: true + store.yaml: + name: bigquery + type: BIGQUERY + bigquery_config: + project_id: PROJECT_ID + dataset_id: DATASET_ID + subscriptions: + - project: "*" + name: "*" + version: "*" + redis: + enabled: false \ No newline at end of file diff --git a/infra/charts/feast/values.yaml b/infra/charts/feast/values.yaml index f9a0a76dc1..fde03f9ad7 100644 --- a/infra/charts/feast/values.yaml +++ b/infra/charts/feast/values.yaml @@ -2,10 +2,12 @@ # - Feast Core # - Feast Serving Online # - Feast Serving Batch +# - Prometheus StatsD Exporter # # The configuration for different components can be referenced from: # - charts/feast-core/values.yaml # - charts/feast-serving/values.yaml +# - charts/prometheus-statsd-exporter/values.yaml # # Note that "feast-serving-online" and "feast-serving-batch" are # aliases to "feast-serving" chart since in typical scenario two instances @@ -235,11 +237,11 @@ feast-serving-batch: # enabled as well. So Feast Serving Batch will share the same # Redis instance to store job statuses. store-type: REDIS - store-options: - # Use the externally exposed redis instance deployed by Online service - # Please set EXTERNAL_IP to your cluster's external IP - host: EXTERNAL_IP - port: 32101 + # Default to use the internal hostname of the redis instance deployed by Online service, + # otherwise use externally exposed by setting EXTERNAL_IP to your cluster's external IP + # store-options: + # host: EXTERNAL_IP + # port: 32101 # store.yaml is the configuration for Feast Store. # # Refer to this link for more description: From d13b588bbc7f6ec5462519c70da61febe0f69085 Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Sun, 9 Feb 2020 12:40:03 +0800 Subject: [PATCH 24/31] Update templates for prometheus statsd exporter - Create 2 distinct services, one for metrics, one for statsd because they use different protocols (TCP and UDP) respectively and exposing this via Kube LoadBalancer won't work by default because most LoadBalancer only supports either TCP or UDP - Allow users to define statsd_mapping from helm values --- .../templates/NOTES.txt | 17 ---- .../templates/config.yaml | 4 +- .../templates/service.yaml | 82 ++++++++++++++----- .../prometheus-statsd-exporter/values.yaml | 26 ++++-- 4 files changed, 79 insertions(+), 50 deletions(-) delete mode 100644 infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/NOTES.txt diff --git a/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/NOTES.txt b/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/NOTES.txt deleted file mode 100644 index bbd06f118a..0000000000 --- a/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/NOTES.txt +++ /dev/null @@ -1,17 +0,0 @@ - -To verify that prometheus-statsd-exporter has started, run: - -{{- if contains "NodePort" .Values.service.type }} - export NODE_PORT=$(kubectl get --namespace {{ .Release.Namespace }} -o jsonpath="{.spec.ports[0].nodePort}" services {{ template "prometheus-statsd-exporter.fullname" . }}) - export NODE_IP=$(kubectl get nodes --namespace {{ .Release.Namespace }} -o jsonpath="{.items[0].status.addresses[0].address}") - echo http://$NODE_IP:$NODE_PORT -{{- else if contains "LoadBalancer" .Values.service.type }} - NOTE: It may take a few minutes for the LoadBalancer IP to be available. - You can watch the status of by running 'kubectl get svc --namespace {{ .Release.Namespace }} -w {{ template "prometheus-statsd-exporter.fullname" . }}' - - export SERVICE_IP=$(kubectl get svc --namespace {{ .Release.Namespace }} {{ template "prometheus-statsd-exporter.fullname" . }} -o jsonpath='{.status.loadBalancer.ingress[0].ip}') - echo http://$SERVICE_IP:{{ .Values.service.servicePort }} -{{- else if contains "ClusterIP" .Values.service.type }} - export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app={{ template "prometheus-statsd-exporter.name" . }},component={{ .Chart.Name }}" -o jsonpath="{.items[0].metadata.name}") - kubectl --namespace {{ .Release.Namespace }} port-forward $POD_NAME 9090 -{{- end }} \ No newline at end of file diff --git a/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/config.yaml b/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/config.yaml index 0f9de1e953..fc27a752bb 100644 --- a/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/config.yaml +++ b/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/config.yaml @@ -9,6 +9,4 @@ metadata: heritage: {{ .Release.Service }} data: statsd_mappings.yaml: | -# -# defaults: -# ttl: "45s" \ No newline at end of file +{{- toYaml (index .Values "statsd_mappings.yaml") | nindent 4 }} diff --git a/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/service.yaml b/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/service.yaml index 88d01b24a6..bb45428b9c 100644 --- a/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/service.yaml +++ b/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/service.yaml @@ -1,9 +1,9 @@ apiVersion: v1 kind: Service metadata: -{{- if .Values.service.annotations }} +{{- if .Values.metricsService.annotations }} annotations: -{{ toYaml .Values.service.annotations | indent 4 }} +{{ toYaml .Values.metricsService.annotations | indent 4 }} {{- end }} labels: app: {{ template "prometheus-statsd-exporter.fullname" . }} @@ -11,41 +11,79 @@ metadata: component: "{{ .Chart.Name }}" heritage: {{ .Release.Service }} release: {{ .Release.Name }} -{{- if .Values.service.labels }} -{{ toYaml .Values.service.labels | indent 4 }} +{{- if .Values.metricsService.labels }} +{{ toYaml .Values.metricsService.labels | indent 4 }} {{- end }} - name: {{ template "prometheus-statsd-exporter.fullname" . }} + name: {{ template "prometheus-statsd-exporter.fullname" . }}-metrics spec: -{{- if .Values.service.clusterIP }} - clusterIP: {{ .Values.service.clusterIP }} +{{- if .Values.metricsService.clusterIP }} + clusterIP: {{ .Values.metricsService.clusterIP }} {{- end }} -{{- if .Values.service.externalIPs }} +{{- if .Values.metricsService.externalIPs }} externalIPs: -{{ toYaml .Values.service.externalIPs | indent 4 }} +{{ toYaml .Values.metricsService.externalIPs | indent 4 }} {{- end }} -{{- if .Values.service.loadBalancerIP }} - loadBalancerIP: {{ .Values.service.loadBalancerIP }} +{{- if .Values.metricsService.loadBalancerIP }} + loadBalancerIP: {{ .Values.metricsService.loadBalancerIP }} {{- end }} -{{- if .Values.service.loadBalancerSourceRanges }} +{{- if .Values.metricsService.loadBalancerSourceRanges }} loadBalancerSourceRanges: - {{- range $cidr := .Values.service.loadBalancerSourceRanges }} + {{- range $cidr := .Values.metricsService.loadBalancerSourceRanges }} - {{ $cidr }} {{- end }} {{- end }} ports: - name: metrics - port: {{ .Values.service.metricsPort }} + port: {{ .Values.metricsService.port }} protocol: TCP targetPort: 9102 - - name: statsd-tcp - port: {{ .Values.service.statsdPort }} - protocol: TCP - targetPort: 9125 + selector: + app: {{ template "prometheus-statsd-exporter.name" . }} + release: {{ .Release.Name }} + type: "{{ .Values.metricsService.type }}" + +--- + +apiVersion: v1 +kind: Service +metadata: +{{- if .Values.statsdService.annotations }} + annotations: +{{ toYaml .Values.statsdService.annotations | indent 4 }} +{{- end }} + labels: + app: {{ template "prometheus-statsd-exporter.fullname" . }} + chart: {{ .Chart.Name }}-{{ .Chart.Version }} + component: "{{ .Chart.Name }}" + heritage: {{ .Release.Service }} + release: {{ .Release.Name }} +{{- if .Values.statsdService.labels }} +{{ toYaml .Values.statsdService.labels | indent 4 }} +{{- end }} + name: {{ template "prometheus-statsd-exporter.fullname" . }}-statsd +spec: +{{- if .Values.statsdService.clusterIP }} + clusterIP: {{ .Values.statsdService.clusterIP }} +{{- end }} +{{- if .Values.statsdService.externalIPs }} + externalIPs: +{{ toYaml .Values.statsdService.externalIPs | indent 4 }} +{{- end }} +{{- if .Values.statsdService.loadBalancerIP }} + loadBalancerIP: {{ .Values.statsdService.loadBalancerIP }} +{{- end }} +{{- if .Values.statsdService.loadBalancerSourceRanges }} + loadBalancerSourceRanges: + {{- range $cidr := .Values.statsdService.loadBalancerSourceRanges }} + - {{ $cidr }} + {{- end }} +{{- end }} + ports: - name: statsd-udp - port: {{ .Values.service.statsdPort }} - protocol: UDP - targetPort: 9125 + port: {{ .Values.statsdService.port }} + protocol: {{ .Values.statsdService.protocol }} + targetPort: {{ .Values.statsdService.port }} selector: app: {{ template "prometheus-statsd-exporter.name" . }} release: {{ .Release.Name }} - type: "{{ .Values.service.type }}" \ No newline at end of file + type: "{{ .Values.statsdService.type }}" \ No newline at end of file diff --git a/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/values.yaml b/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/values.yaml index f2d523771e..954f3dcba6 100644 --- a/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/values.yaml +++ b/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/values.yaml @@ -3,20 +3,30 @@ image: tag: v0.12.1 pullPolicy: IfNotPresent -service: +metricsService: + type: ClusterIP + port: 9102 annotations: {} labels: {} clusterIP: "" - ## List of IP addresses at which the alertmanager service is available - ## Ref: https://kubernetes.io/docs/user-guide/services/#external-ips - ## externalIPs: [] loadBalancerIP: "" loadBalancerSourceRanges: [] - servicePort: 80 - type: ClusterIP - metricsPort: 9102 - statsdPort: 9125 + +statsdService: + type: ClusterIP + port: 9125 + protocol: UDP + annotations: {} + labels: {} + clusterIP: "" + externalIPs: [] + loadBalancerIP: "" + loadBalancerSourceRanges: [] + +statsd_mappings.yaml: + defaults: + ttl: 60s statsdexporter: podAnnotations: From 0b72ddd583112cacb195dd8ad210add1f5d5bddd Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Sun, 9 Feb 2020 21:11:17 +0800 Subject: [PATCH 25/31] Update grafana-dashboard for ingestion - Include more filters for constraint validation (so no many to many results for subtrations) - Use datasource: null so dashboard will use "default" datasource - Unset uid so it will be autogenerated during import --- .../src/main/resources/grafana-dashboard.json | 102 +++++++++++------- 1 file changed, 65 insertions(+), 37 deletions(-) diff --git a/ingestion/src/main/resources/grafana-dashboard.json b/ingestion/src/main/resources/grafana-dashboard.json index 762295aa3e..3758818c86 100644 --- a/ingestion/src/main/resources/grafana-dashboard.json +++ b/ingestion/src/main/resources/grafana-dashboard.json @@ -75,13 +75,13 @@ "steppedLine": false, "targets": [ { - "expr": "(feast_ingestion_feature_value_max - on(feast_project_name,feast_feature_name) feast_ingestion_feature_value_domain_max) > 0", + "expr": "(feast_ingestion_feature_value_max - on(feast_project_name,feast_feature_name,feast_store,feast_ingestion_job_name) feast_ingestion_feature_value_domain_max) > 0", "hide": false, "legendFormat": "", "refId": "A" }, { - "expr": "(feast_ingestion_feature_value_domain_min - on(feast_project_name,feast_feature_name) feast_ingestion_feature_value_min) > 0", + "expr": "(feast_ingestion_feature_value_domain_min - on(feast_project_name,feast_feature_name,feast_store,feast_ingestion_job_name) feast_ingestion_feature_value_min) > 0", "hide": false, "legendFormat": "", "refId": "B" @@ -169,7 +169,7 @@ "steppedLine": false, "targets": [ { - "expr": "feast_ingestion_feature_presence_min_count - on (feast_feature_name, feast_project_name) increase(feast_ingestion_feature_value_presence_count[5m]) > 0", + "expr": "feast_ingestion_feature_presence_min_count - on(feast_project_name,feast_feature_name,feast_store,feast_ingestion_job_name) increase(feast_ingestion_feature_value_presence_count[5m]) > 0", "refId": "B" } ], @@ -198,6 +198,7 @@ "logBase": 1, "max": null, "min": null, + "decimals": 0, "show": true }, { @@ -255,7 +256,7 @@ "steppedLine": false, "targets": [ { - "expr": "feast_ingestion_feature_presence_min_fraction - on (feast_feature_name, feast_project_name) increase(feast_ingestion_feature_value_presence_count[5m]) / (\nincrease(feast_ingestion_feature_value_presence_count[5m]) +\nincrease(feast_ingestion_feature_value_missing_count[5m])\n) > 0", + "expr": "feast_ingestion_feature_presence_min_fraction - on(feast_project_name,feast_feature_name,feast_store,feast_ingestion_job_name) increase(feast_ingestion_feature_value_presence_count[5m]) / (\nincrease(feast_ingestion_feature_value_presence_count[5m]) +\nincrease(feast_ingestion_feature_value_missing_count[5m])\n) > 0", "refId": "A" } ], @@ -282,8 +283,8 @@ "format": "short", "label": null, "logBase": 1, - "max": null, - "min": null, + "max": 1, + "min": -1, "show": true }, { @@ -379,22 +380,22 @@ "steppedLine": false, "targets": [ { - "expr": "feast_ingestion_feature_value_min{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\"}", + "expr": "feast_ingestion_feature_value_min{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", "legendFormat": "min_val", "refId": "A" }, { - "expr": "feast_ingestion_feature_value_max{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\"}", + "expr": "feast_ingestion_feature_value_max{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", "legendFormat": "max_val", "refId": "B" }, { - "expr": "feast_ingestion_feature_value_domain_min{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\"}", + "expr": "feast_ingestion_feature_value_domain_min{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", "legendFormat": "min_domain", "refId": "C" }, { - "expr": "feast_ingestion_feature_value_domain_max{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\"}", + "expr": "feast_ingestion_feature_value_domain_max{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", "legendFormat": "max_domain", "refId": "D" } @@ -495,7 +496,7 @@ "steppedLine": false, "targets": [ { - "expr": "increase(feast_ingestion_feature_value_presence_count{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\"}[$__range])", + "expr": "increase(feast_ingestion_feature_value_presence_count{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}[$__range])", "instant": false, "interval": "", "intervalFactor": 1, @@ -503,7 +504,7 @@ "refId": "A" }, { - "expr": "feast_ingestion_feature_presence_min_count{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\"}", + "expr": "feast_ingestion_feature_presence_min_count{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", "legendFormat": "feature_presence_min_count", "refId": "B" } @@ -532,7 +533,8 @@ "label": null, "logBase": 1, "max": null, - "min": null, + "min": 0, + "decimals": 0, "show": true }, { @@ -635,8 +637,8 @@ "format": "short", "label": null, "logBase": 1, - "max": null, - "min": null, + "max": 1, + "min": 0, "show": true }, { @@ -737,22 +739,22 @@ "steppedLine": false, "targets": [ { - "expr": "feast_ingestion_feature_value_min{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\"}", + "expr": "feast_ingestion_feature_value_min{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", "legendFormat": "min_val", "refId": "A" }, { - "expr": "feast_ingestion_feature_value_max{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\"}", + "expr": "feast_ingestion_feature_value_max{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", "legendFormat": "max_val", "refId": "B" }, { - "expr": "feast_ingestion_feature_value_domain_min{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\"}", + "expr": "feast_ingestion_feature_value_domain_min{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", "legendFormat": "min_domain", "refId": "C" }, { - "expr": "feast_ingestion_feature_value_domain_max{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\"}", + "expr": "feast_ingestion_feature_value_domain_max{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", "legendFormat": "max_domain", "refId": "D" } @@ -856,7 +858,7 @@ "steppedLine": false, "targets": [ { - "expr": "increase(feast_ingestion_feature_value_presence_count{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\"}[$__range])", + "expr": "increase(feast_ingestion_feature_value_presence_count{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}[$__range])", "instant": false, "interval": "", "intervalFactor": 1, @@ -864,7 +866,7 @@ "refId": "A" }, { - "expr": "feast_ingestion_feature_presence_min_count{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\"}", + "expr": "feast_ingestion_feature_presence_min_count{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", "legendFormat": "feature_presence_min_count", "refId": "B" } @@ -893,7 +895,8 @@ "label": null, "logBase": 1, "max": null, - "min": null, + "min": 0, + "decimals": 0, "show": true }, { @@ -999,8 +1002,8 @@ "format": "short", "label": null, "logBase": 1, - "max": null, - "min": null, + "max": 1, + "min": 0, "show": true }, { @@ -1101,22 +1104,22 @@ "steppedLine": false, "targets": [ { - "expr": "feast_ingestion_feature_value_min{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\"}", + "expr": "feast_ingestion_feature_value_min{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", "legendFormat": "min_val", "refId": "A" }, { - "expr": "feast_ingestion_feature_value_max{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\"}", + "expr": "feast_ingestion_feature_value_max{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", "legendFormat": "max_val", "refId": "B" }, { - "expr": "feast_ingestion_feature_value_domain_min{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\"}", + "expr": "feast_ingestion_feature_value_domain_min{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", "legendFormat": "min_domain", "refId": "C" }, { - "expr": "feast_ingestion_feature_value_domain_max{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\"}", + "expr": "feast_ingestion_feature_value_domain_max{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", "legendFormat": "max_domain", "refId": "D" } @@ -1220,7 +1223,7 @@ "steppedLine": false, "targets": [ { - "expr": "increase(feast_ingestion_feature_value_presence_count{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\"}[$__range])", + "expr": "increase(feast_ingestion_feature_value_presence_count{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}[$__range])", "instant": false, "interval": "", "intervalFactor": 1, @@ -1228,7 +1231,7 @@ "refId": "A" }, { - "expr": "feast_ingestion_feature_presence_min_count{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\"}", + "expr": "feast_ingestion_feature_presence_min_count{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", "legendFormat": "feature_presence_min_count", "refId": "B" } @@ -1257,7 +1260,8 @@ "label": null, "logBase": 1, "max": null, - "min": null, + "min": 0, + "decimals": 0, "show": true }, { @@ -1363,8 +1367,8 @@ "format": "short", "label": null, "logBase": 1, - "max": null, - "min": null, + "max": 1, + "min": 0, "show": true }, { @@ -1394,7 +1398,7 @@ "text": "project1", "value": "project1" }, - "datasource": "Prometheus", + "datasource": null, "definition": "label_values(feast_project_name)", "hide": 0, "includeAll": false, @@ -1423,7 +1427,7 @@ "feature2" ] }, - "datasource": "Prometheus", + "datasource": null, "definition": "label_values(feast_ingestion_feature_value_presence_count{feast_project_name=\"[[project]]\"},feast_feature_name)", "hide": 0, "includeAll": false, @@ -1441,6 +1445,31 @@ "tagsQuery": "", "type": "query", "useTags": false + }, + { + "allValue": null, + "current": { + "text": "redis", + "value": "redis" + }, + "datasource": null, + "definition": "label_values(feast_store)", + "hide": 0, + "includeAll": false, + "label": null, + "multi": false, + "name": "store", + "options": [], + "query": "label_values(feast_store)", + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 5, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false } ] }, @@ -1464,6 +1493,5 @@ }, "timezone": "", "title": "Feast Features Dashboard", - "uid": "ywufPPyWz", - "version": 45 + "version": 1 } \ No newline at end of file From 1fe767a0efd6b4ea66698fc9569831def97e46f1 Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Mon, 10 Feb 2020 02:54:08 +0800 Subject: [PATCH 26/31] Add sample csv for data validation --- .../data_validation/bikeshare_stations.csv | 97 +++++++++++++++++++ 1 file changed, 97 insertions(+) create mode 100644 examples/data_validation/bikeshare_stations.csv diff --git a/examples/data_validation/bikeshare_stations.csv b/examples/data_validation/bikeshare_stations.csv new file mode 100644 index 0000000000..ef9f056518 --- /dev/null +++ b/examples/data_validation/bikeshare_stations.csv @@ -0,0 +1,97 @@ +station_id,name,status,latitude,longitude,location +3793,Rio Grande & 28th,active,30.29333,-97.74412,"(30.29333, -97.74412)" +3291,11th & San Jacinto,active,30.27193,-97.73854,"(30.27193, -97.73854)" +4058,Hollow Creek & Barton Hills,active,30.26139,-97.77234,"(30.26139, -97.77234)" +3797,21st & University,active,30.28354,-97.73953,"(30.28354, -97.73953)" +3838,Nueces & 26th,active,30.29068,-97.74292,"(30.29068, -97.74292)" +2544,East 6th & Pedernales St.,active,30.25895,-97.71475,"(30.25895, -97.71475)" +3390,Brazos & 6th,active,30.26754,-97.74154,"(30.26754, -97.74154)" +2823,Capital Metro HQ - East 5th at Broadway,active,30.2563,-97.71007,"(30.2563, -97.71007)" +2711,Barton Springs @ Kinney Ave,active,30.262,-97.76118,"(30.262, -97.76118)" +3685,Henderson & 9th,active,30.27217,-97.75246,"(30.27217, -97.75246)" +4052,Rosewood & Angelina,active,30.26888,-97.72431,"(30.26888, -97.72431)" +3621,Nueces & 3rd,active,30.26697,-97.74929,"(30.26697, -97.74929)" +3686,Sterzing at Barton Springs,active,30.26406,-97.76385,"(30.26406, -97.76385)" +3684,Congress & Cesar Chavez,active,30.26332,-97.74508,"(30.26332, -97.74508)" +4055,11th & Salina,active,30.26638,-97.7214,"(30.26638, -97.7214)" +2566,Pfluger Bridge @ W 2nd Street,active,30.26717,-97.75484,"(30.26717, -97.75484)" +4059,Nash Hernandez @ RBJ South,active,30.25189,-97.73323,"(30.25189, -97.73323)" +3635,13th & San Antonio,active,30.27616,-97.74488,"(30.27616, -97.74488)" +3513,South Congress & Barton Springs at the Austin American-Statesman,active,30.25839,-97.74592,"(30.25839, -97.74592)" +4050,5th & Campbell,active,30.27489,-97.76483,"(30.27489, -97.76483)" +2499,City Hall / Lavaca & 2nd,active,30.26476,-97.74678,"(30.26476, -97.74678)" +2568,East 11th St. at Victory Grill,active,30.26896,-97.72843,"(30.26896, -97.72843)" +2575,Riverside @ S. Lamar,active,30.26446,-97.75665,"(30.26446, -97.75665)" +2502,Barton Springs & Riverside,active,30.2587,-97.74872,"(30.2587, -97.74872)" +2563,Davis at Rainey Street,active,30.26019,-97.73845,"(30.26019, -97.73845)" +4061,Lakeshore @ Austin Hostel,active,30.24472,-97.72336,"(30.24472, -97.72336)" +2707,Rainey St @ Cummings,active,30.25579,-97.73982,"(30.25579, -97.73982)" +2549,Long Center @ South 1st & Riverside,active,30.25941,-97.74971,"(30.25941, -97.74971)" +2561,State Capitol Visitors Garage @ San Jacinto & 12th,active,30.27336,-97.73805,"(30.27336, -97.73805)" +2494,2nd & Congress,active,30.26408,-97.74355,"(30.26408, -97.74355)" +2570,South Congress & Academy,active,30.25226,-97.74854,"(30.25226, -97.74854)" +2540,17th & Guadalupe,active,30.27974,-97.74254,"(30.27974, -97.74254)" +2498,Convention Center / 4th St. @ MetroRail,active,30.26483,-97.739,"(30.26483, -97.739)" +3794,Dean Keeton & Speedway,active,30.28953,-97.73695,"(30.28953, -97.73695)" +2547,Guadalupe & 21st,active,30.28395,-97.74198,"(30.28395, -97.74198)" +2501,5th & Bowie,active,30.2696,-97.75332,"(30.2696, -97.75332)" +2572,Barton Springs Pool,active,30.26452,-97.7712,"(30.26452, -97.7712)" +2567,Palmer Auditorium,active,30.25971,-97.75346,"(30.25971, -97.75346)" +3799,23rd & San Jacinto @ DKR Stadium,active,30.2856,-97.7335,"(30.2856, -97.7335)" +3660,Medina & East 6th,active,30.26455,-97.73165,"(30.26455, -97.73165)" +3791,Lake Austin & Enfield,active,30.29439,-97.78375,"(30.29439, -97.78375)" +4057,6th & Chalmers,active,30.26269,-97.72438,"(30.26269, -97.72438)" +2537,West & 6th St.,active,30.27041,-97.75046,"(30.27041, -97.75046)" +2503,South Congress & James,active,30.25103,-97.74926,"(30.25103, -97.74926)" +3792,22nd & Pearl,active,30.2853,-97.7467,"(30.2853, -97.7467)" +3293,East 2nd & Pedernales,active,30.25542,-97.71665,"(30.25542, -97.71665)" +4062,Lakeshore & Pleasant Valley,active,30.24258,-97.71726,"(30.24258, -97.71726)" +2571,Red River & 8th Street,active,30.26854,-97.73646,"(30.26854, -97.73646)" +3798,21st & Speedway @PCL,active,30.283,-97.7375,"(30.283, -97.7375)" +2548,UT West Mall @ Guadalupe,active,30.28576,-97.74181,"(30.28576, -97.74181)" +3795,Dean Keeton & Whitis,active,30.2898,-97.74041,"(30.2898, -97.74041)" +2542,Plaza Saltillo,active,30.26217,-97.72743,"(30.26217, -97.72743)" +3619,6th & Congress,active,30.26822,-97.74285,"(30.26822, -97.74285)" +4048,South Congress @ Bouldin Creek,active,30.25495,-97.74755,"(30.25495, -97.74755)" +3790,Lake Austin Blvd @ Deep Eddy,active,30.27807,-97.77272,"(30.27807, -97.77272)" +2495,4th & Congress,active,30.26634,-97.74378,"(30.26634, -97.74378)" +3841,23rd & Rio Grande,active,30.28728,-97.74495,"(30.28728, -97.74495)" +2562,San Jacinto & 8th Street,active,30.26912,-97.73986,"(30.26912, -97.73986)" +2822,East 6th at Robert Martinez,active,30.26032,-97.71899,"(30.26032, -97.71899)" +4047,8th & Lavaca,active,30.27059,-97.74441,"(30.27059, -97.74441)" +3292,East 4th & Chicon,active,30.25987,-97.72373,"(30.25987, -97.72373)" +2497,Capitol Station / Congress & 11th,active,30.2726,-97.74127,"(30.2726, -97.74127)" +3377,MoPac Pedestrian Bridge @ Veterans Drive,active,30.27466,-97.77028,"(30.27466, -97.77028)" +3687,Boardwalk West,active,30.25457,-97.74258,"(30.25457, -97.74258)" +2504,South Congress & Elizabeth,active,30.24891,-97.75019,"(30.24891, -97.75019)" +1001,OFFICE/Main/Shop/Repair,active,30.27186,-97.73997,"(30.27186, -97.73997)" +2574,Zilker Park,active,30.2659,-97.76822,"(30.2659, -97.76822)" +2569,East 11th St. & San Marcos,active,30.26968,-97.73074,"(30.26968, -97.73074)" +2539,Convention Center / 3rd & Trinity,active,30.26426,-97.74023,"(30.26426, -97.74023)" +3294,Lavaca & 6th,active,30.26911,-97.7462,"(30.26911, -97.7462)" +2496,8th & Congress,active,30.2698,-97.74186,"(30.2698, -97.74186)" +2565,Trinity & 6th Street,active,30.26735,-97.73933,"(30.26735, -97.73933)" +4054,Rosewood & Chicon,active,30.26969,-97.71873,"(30.26969, -97.71873)" +2552,3rd & West,active,30.2678,-97.75189,"(30.2678, -97.75189)" +3455,Republic Square @ 5th & Guadalupe,active,30.26753,-97.74805,"(30.26753, -97.74805)" +4060,Red River/Cesar Chavez @ The Fairmont,active,30.26212,-97.73815,"(30.26212, -97.73815)" +4051,10th & Red River,active,30.27024,-97.73578,"(30.27024, -97.73578)" +2546,ACC - West & 12th Street,closed,30.27624,-97.74831,"(30.27624, -97.74831)" +1008,Nueces @ 3rd,closed,30.26694,-97.74939,"(30.26694, -97.74939)" +2550,Republic Square @ Guadalupe & 4th St.,closed,30.26774,-97.74692,"(30.26774, -97.74692)" +2538,Bullock Museum @ Congress & MLK,closed,30.28039,-97.73809,"(30.28039, -97.73809)" +1002,6th & Navasota St.,closed,30.26383,-97.72864,"(30.26383, -97.72864)" +2541,State Capitol @ 14th & Colorado,closed,30.27654,-97.74155,"(30.27654, -97.74155)" +2545,ACC - Rio Grande & 12th,closed,30.27595,-97.74739,"(30.27595, -97.74739)" +3464,Pease Park,closed,30.28118,-97.75219,"(30.28118, -97.75219)" +2712,Toomey Rd @ South Lamar,closed,30.26304,-97.75824,"(30.26304, -97.75824)" +2576,Rainey @ River St,closed,30.25802,-97.7391,"(30.25802, -97.7391)" +3381,East 7th & Pleasant Valley,closed,30.26025,-97.71002,"(30.26025, -97.71002)" +1006,Zilker Park West,closed,30.26587,-97.76826,"(30.26587, -97.76826)" +2500,Republic Square,closed,30.26751,-97.74802,"(30.26751, -97.74802)" +2536,Waller & 6th St.,closed,30.26461,-97.73049,"(30.26461, -97.73049)" +1007,Lavaca & 6th,closed,30.26889,-97.74525,"(30.26889, -97.74525)" +1003,8th & Guadalupe,closed,30.27106,-97.74563,"(30.27106, -97.74563)" +2564,5th & San Marcos,closed,30.26416,-97.73289,"(30.26416, -97.73289)" +1004,Red River & LBJ Library,closed,30.2848,-97.72756,"(30.2848, -97.72756)" +1005,State Parking Garage @ Brazos & 18th,closed,30.27907,-97.73715,"(30.27907, -97.73715)" From 55930400cca9d0b88441c92c30e07a81ffabe7b3 Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Mon, 10 Feb 2020 03:16:18 +0800 Subject: [PATCH 27/31] Add sample notebook for working with schema for data validation --- .../data_validation/working_with_schema.ipynb | 761 ++++++++++++++++++ 1 file changed, 761 insertions(+) create mode 100644 examples/data_validation/working_with_schema.ipynb diff --git a/examples/data_validation/working_with_schema.ipynb b/examples/data_validation/working_with_schema.ipynb new file mode 100644 index 0000000000..3bba92c35d --- /dev/null +++ b/examples/data_validation/working_with_schema.ipynb @@ -0,0 +1,761 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Data Validation in Feast\n", + "\n", + "Feast allows users to specify a **schema** that specifies the value, shape and presence constraints \n", + "of the features they are ingesting. This schema is compatible with the schema defined in Tensorflow\n", + "metadata.\n", + "\n", + "cp https://github.com/tensorflow/metadata/blob/master/tensorflow_metadata/proto/v0/schema.proto.\n", + "\n", + "This means that you can import an existing Tensorflow metadata schema into Feast and Feast can\n", + "check that the features ingested fulfill the schema provided. In Feast v0.5, however, only feature\n", + "value domains and presence will be validated during ingestion.\n", + "\n", + "For more information regarding Tensorflow data validation, please check these documentations:\n", + "- https://www.tensorflow.org/tfx/data_validation/get_started\n", + "- https://colab.research.google.com/github/tensorflow/tfx/blob/master/docs/tutorials/data_validation/tfdv_basic.ipynb" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 1. Importing Tensorflow metadata schema to Feast" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "from feast import Client, FeatureSet\n", + "import tensorflow_data_validation as tfdv\n", + "from google.protobuf import text_format\n", + "import pandas as pd" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "bikeshare_stations.csv\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "--2020-02-10 03:11:51-- https://raw.githubusercontent.com/davidheryanto/feast/update-ingestion-metrics-for-validation/examples/data_validation/bikeshare_stations.csv\n", + "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 151.101.64.133, 151.101.128.133, 151.101.192.133, ...\n", + "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|151.101.64.133|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 7492 (7.3K) [text/plain]\n", + "Saving to: ‘bikeshare_stations.csv.5’\n", + "\n", + " 0K ....... 100% 12.9M=0.001s\n", + "\n", + "2020-02-10 03:11:51 (12.9 MB/s) - ‘bikeshare_stations.csv.5’ saved [7492/7492]\n", + "\n" + ] + } + ], + "source": [ + "%%bash\n", + "# Sample data from BigQuery public dataset: bikeshare stations\n", + "# https://cloud.google.com/bigquery/public-data\n", + "wget https://raw.githubusercontent.com/davidheryanto/feast/update-ingestion-metrics-for-validation/examples/data_validation/bikeshare_stations.csv\n", + "ls *.csv" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
station_idnamestatuslatitudelongitudelocation
03793Rio Grande & 28thactive30.29333-97.74412(30.29333, -97.74412)
1329111th & San Jacintoactive30.27193-97.73854(30.27193, -97.73854)
24058Hollow Creek & Barton Hillsactive30.26139-97.77234(30.26139, -97.77234)
\n", + "
" + ], + "text/plain": [ + " station_id name status latitude longitude \\\n", + "0 3793 Rio Grande & 28th active 30.29333 -97.74412 \n", + "1 3291 11th & San Jacinto active 30.27193 -97.73854 \n", + "2 4058 Hollow Creek & Barton Hills active 30.26139 -97.77234 \n", + "\n", + " location \n", + "0 (30.29333, -97.74412) \n", + "1 (30.27193, -97.73854) \n", + "2 (30.26139, -97.77234) " + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "pd.read_csv(\"bikeshare_stations.csv\").head(3)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "%%bash\n", + "cat < bikeshare_stations_feature_set.yaml\n", + "\n", + "spec:\n", + " name: bikeshare_stations\n", + " entities:\n", + " - name: station_id\n", + " valueType: INT64\n", + " features:\n", + " - name: name\n", + " valueType: STRING\n", + " - name: status\n", + " valueType: STRING\n", + " - name: latitude\n", + " valueType: FLOAT\n", + " - name: longitude\n", + " valueType: FLOAT\n", + " - name: location\n", + " valueType: STRING\n", + " maxAge: 3600s\n", + "\n", + "EOF" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{\n", + " \"spec\": {\n", + " \"name\": \"bikeshare_stations\",\n", + " \"entities\": [\n", + " {\n", + " \"name\": \"station_id\",\n", + " \"valueType\": \"INT64\"\n", + " }\n", + " ],\n", + " \"features\": [\n", + " {\n", + " \"name\": \"name\",\n", + " \"valueType\": \"STRING\"\n", + " },\n", + " {\n", + " \"name\": \"status\",\n", + " \"valueType\": \"STRING\"\n", + " },\n", + " {\n", + " \"name\": \"latitude\",\n", + " \"valueType\": \"FLOAT\"\n", + " },\n", + " {\n", + " \"name\": \"longitude\",\n", + " \"valueType\": \"FLOAT\"\n", + " },\n", + " {\n", + " \"name\": \"location\",\n", + " \"valueType\": \"STRING\"\n", + " }\n", + " ],\n", + " \"maxAge\": \"3600s\"\n", + " },\n", + " \"meta\": {\n", + " \"createdTimestamp\": \"1970-01-01T00:00:00Z\"\n", + " }\n", + "}\n" + ] + } + ], + "source": [ + "# Create a FeatureSet bikeshare_stations\n", + "bikeshare_stations_feature_set = FeatureSet.from_yaml(\"bikeshare_stations_feature_set.yaml\")\n", + "print(bikeshare_stations_feature_set)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "WARNING:apache_beam.runners.interactive.interactive_environment:Dependencies required for Interactive Beam PCollection visualization are not available, please use: `pip install apache-beam[interactive]` to install necessary dependencies to enable all data visualization features.\n", + "/home/dheryanto/miniconda3/envs/feast/lib/python3.7/site-packages/tensorflow_data_validation/arrow/arrow_util.py:214: FutureWarning: Calling .data on ChunkedArray is provided for compatibility after Column was removed, simply drop this attribute\n", + " types.FeaturePath([column_name]), column.data.chunk(0), weights):\n", + "WARNING:apache_beam.io.tfrecordio:Couldn't find python-snappy so the implementation of _TFRecordUtil._masked_crc32c is not as fast as it could be.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "WARNING:tensorflow:From /home/dheryanto/miniconda3/envs/feast/lib/python3.7/site-packages/tensorflow_data_validation/utils/stats_gen_lib.py:366: tf_record_iterator (from tensorflow.python.lib.io.tf_record) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Use eager execution and: \n", + "`tf.data.TFRecordDataset(path)`\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "WARNING:tensorflow:From /home/dheryanto/miniconda3/envs/feast/lib/python3.7/site-packages/tensorflow_data_validation/utils/stats_gen_lib.py:366: tf_record_iterator (from tensorflow.python.lib.io.tf_record) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "Use eager execution and: \n", + "`tf.data.TFRecordDataset(path)`\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
TypePresenceValencyDomain
Feature name
'station_id'INTrequired-
'name'BYTESrequired-
'status'STRINGrequired'status'
'latitude'FLOATrequired-
'longitude'FLOATrequired-
'location'BYTESrequired-
\n", + "
" + ], + "text/plain": [ + " Type Presence Valency Domain\n", + "Feature name \n", + "'station_id' INT required -\n", + "'name' BYTES required -\n", + "'status' STRING required 'status'\n", + "'latitude' FLOAT required -\n", + "'longitude' FLOAT required -\n", + "'location' BYTES required -" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Values
Domain
'status''active', 'closed'
\n", + "
" + ], + "text/plain": [ + " Values\n", + "Domain \n", + "'status' 'active', 'closed'" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# Use Tensorflow Data Validation (tfdv) to create a schema from the csv\n", + "train_stats = tfdv.generate_statistics_from_csv(data_location=\"bikeshare_stations.csv\")\n", + "schema = tfdv.infer_schema(statistics=train_stats, max_string_domain_size=10)\n", + "tfdv.display_schema(schema=schema)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{\n", + " \"spec\": {\n", + " \"name\": \"bikeshare_stations\",\n", + " \"entities\": [\n", + " {\n", + " \"name\": \"station_id\",\n", + " \"valueType\": \"INT64\",\n", + " \"presence\": {\n", + " \"minFraction\": 1.0,\n", + " \"minCount\": \"1\"\n", + " },\n", + " \"shape\": {\n", + " \"dim\": [\n", + " {\n", + " \"size\": \"1\"\n", + " }\n", + " ]\n", + " }\n", + " }\n", + " ],\n", + " \"features\": [\n", + " {\n", + " \"name\": \"name\",\n", + " \"valueType\": \"STRING\",\n", + " \"presence\": {\n", + " \"minFraction\": 1.0,\n", + " \"minCount\": \"1\"\n", + " },\n", + " \"shape\": {\n", + " \"dim\": [\n", + " {\n", + " \"size\": \"1\"\n", + " }\n", + " ]\n", + " }\n", + " },\n", + " {\n", + " \"name\": \"status\",\n", + " \"valueType\": \"STRING\",\n", + " \"presence\": {\n", + " \"minFraction\": 1.0,\n", + " \"minCount\": \"1\"\n", + " },\n", + " \"shape\": {\n", + " \"dim\": [\n", + " {\n", + " \"size\": \"1\"\n", + " }\n", + " ]\n", + " },\n", + " \"stringDomain\": {\n", + " \"name\": \"status\",\n", + " \"value\": [\n", + " \"active\",\n", + " \"closed\"\n", + " ]\n", + " }\n", + " },\n", + " {\n", + " \"name\": \"latitude\",\n", + " \"valueType\": \"FLOAT\",\n", + " \"presence\": {\n", + " \"minFraction\": 1.0,\n", + " \"minCount\": \"1\"\n", + " },\n", + " \"shape\": {\n", + " \"dim\": [\n", + " {\n", + " \"size\": \"1\"\n", + " }\n", + " ]\n", + " }\n", + " },\n", + " {\n", + " \"name\": \"longitude\",\n", + " \"valueType\": \"FLOAT\",\n", + " \"presence\": {\n", + " \"minFraction\": 1.0,\n", + " \"minCount\": \"1\"\n", + " },\n", + " \"shape\": {\n", + " \"dim\": [\n", + " {\n", + " \"size\": \"1\"\n", + " }\n", + " ]\n", + " }\n", + " },\n", + " {\n", + " \"name\": \"location\",\n", + " \"valueType\": \"STRING\",\n", + " \"presence\": {\n", + " \"minFraction\": 1.0,\n", + " \"minCount\": \"1\"\n", + " },\n", + " \"shape\": {\n", + " \"dim\": [\n", + " {\n", + " \"size\": \"1\"\n", + " }\n", + " ]\n", + " }\n", + " }\n", + " ],\n", + " \"maxAge\": \"3600s\"\n", + " },\n", + " \"meta\": {\n", + " \"createdTimestamp\": \"1970-01-01T00:00:00Z\"\n", + " }\n", + "}\n" + ] + } + ], + "source": [ + "# Import the schema into the FeatureSet\n", + "bikeshare_stations_feature_set.import_tfx_schema(schema)\n", + "print(bikeshare_stations_feature_set)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now that the FeatureSet has imported the schema, Prometheus metrics will be exported during ingestion, which\n", + "can be used to check if the features ingested fulfill the requirements." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2. Exporting Tensorflow metadata schema from Feast\n", + "\n", + "The following scenario is for users who have created a FeatureSet and used Feast to ingest features. During training,\n", + "they want to run batch validation using Tensorflow data validation utility. Rather than attempting to recreate the\n", + "schema from scratch, users can export the existing one from the FeatureSet.\n", + "\n", + "This ensures that the schema that is currently applied for Feast ingestion will be consistent to the one used in\n", + "batch validation with Tensorflow data validation." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
TypePresenceValencyDomain
Feature name
'name'BYTESrequired-
'status'STRINGrequired'status'
'latitude'FLOATrequired-
'longitude'FLOATrequired-
'location'BYTESrequired-
'station_id'INTrequired-
\n", + "
" + ], + "text/plain": [ + " Type Presence Valency Domain\n", + "Feature name \n", + "'name' BYTES required - \n", + "'status' STRING required 'status'\n", + "'latitude' FLOAT required - \n", + "'longitude' FLOAT required - \n", + "'location' BYTES required - \n", + "'station_id' INT required - " + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Values
Domain
'status''active', 'closed'
\n", + "
" + ], + "text/plain": [ + " Values\n", + "Domain \n", + "'status' 'active', 'closed'" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "exported_tfx_schema = bikeshare_stations_feature_set.export_tfx_schema()\n", + "tfdv.display_schema(exported_tfx_schema)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} From 17083738e1c82db2d49cdb55452c22b32373c80a Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Mon, 10 Feb 2020 09:38:12 +0800 Subject: [PATCH 28/31] Update validation schema in test dataset --- tests/e2e/basic/cust_trans_fs.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/e2e/basic/cust_trans_fs.yaml b/tests/e2e/basic/cust_trans_fs.yaml index f0041d3d6b..bcc2bad090 100644 --- a/tests/e2e/basic/cust_trans_fs.yaml +++ b/tests/e2e/basic/cust_trans_fs.yaml @@ -8,7 +8,7 @@ spec: min: 1000 max: 100005 presence: - minFraction: 1.0 + minFraction: 0.99 minCount: 1 shape: dim: @@ -20,7 +20,7 @@ spec: min: 0 max: 1 presence: - minFraction: 1.0 + minFraction: 0.90 minCount: 1 shape: dim: @@ -28,10 +28,10 @@ spec: - name: total_transactions valueType: FLOAT floatDomain: - min: 512 - max: 512 + min: 500 + max: 520 presence: - minFraction: 1.0 + minFraction: 0.90 minCount: 1 shape: dim: From 861f21d252df41da28483e26fb2ef27a27fcf5f2 Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Mon, 10 Feb 2020 09:39:23 +0800 Subject: [PATCH 29/31] Add grafana and prometheus dependency in feast-core --- .../feast-core/charts/grafana-4.6.3.tgz | Bin 0 -> 19220 bytes .../feast-core/charts/prometheus-10.4.0.tgz | Bin 0 -> 27158 bytes .../templates/service.yaml | 8 ++- .../prometheus-statsd-exporter/values.yaml | 4 +- .../feast/charts/feast-core/requirements.lock | 18 +++++++ .../feast/charts/feast-core/requirements.yaml | 13 +++-- .../feast-core/templates/configmap.yaml | 11 ---- .../feast-core/templates/deployment.yaml | 1 - .../feast/charts/feast-core/values.yaml | 48 ++++++++++++++++++ 9 files changed, 84 insertions(+), 19 deletions(-) create mode 100644 infra/charts/feast/charts/feast-core/charts/grafana-4.6.3.tgz create mode 100644 infra/charts/feast/charts/feast-core/charts/prometheus-10.4.0.tgz create mode 100644 infra/charts/feast/charts/feast-core/requirements.lock diff --git a/infra/charts/feast/charts/feast-core/charts/grafana-4.6.3.tgz b/infra/charts/feast/charts/feast-core/charts/grafana-4.6.3.tgz new file mode 100644 index 0000000000000000000000000000000000000000..279d7f509bcd363e9966eff8b618301cfd79869e GIT binary patch literal 19220 zcmV)lK%c)KiwG0|00000|0w_~VMtOiV@ORlOnEsqVl!4SWK%V1T2nbTPgYhoO;>Dc zVQyr3R8em|NM&qo0PMYcR~tFfFus5LQ`BR!3(Sk#h7dB3pUs?QfMj_lA^d>MoITmR zAa~2QmUgR0Qk!@Jzt8?2U0Z##&1J}3=si2Z-6g41lIo^XX~fVFC8)bMMoa`VG>QN6 z;7_mD>%DyOLjBw8^~(Ql_O_nezxDggt?d`Rzre#|QuAb7AoiDD@4oevJNG+zP)dHp zj1!vlVY=BuY3hFN1Uo^m6=5DSk_z?eY>XksC?d%S1f?+yDTALg%x3ZpN+OsNo}rlh zq9Diw1ILVDuD(sMU?k*03nwFz;699~NaNO+PH-Q_LZrOk?V1T0Bv`aaC>yn37`h5Z zM2xdR#xVMJK4$R^# zyd?M^A>kn{z%rpf<**Y(SukV-M-!C%gW+h5M4^ZT7_yfBVY=DkG-DxF2s-L1`qUAn zB}g1&=DNE2-K(u&b1Uerwx;&ocCZsXZ+-14`Sd*EV`s?}#Tn+mjsTd?|DSJdJ>Mzw z|Cd`&{QprNHb7y&1rRcf1nz@i8EyfjJUvLzAjXmZ>I28SX}ZfMl)2ZECGix65^{)Q zF5ltLf}s-{g zr!gW4q%?{MXPJ_ogDe_h5%3g;7fNnUD8sTS!^2$2$tqZa1?B;v-H3*~D-gfrU6v&s zl&VH$ZQYP2Lo&)3?xfM6+e&G)@0$8vHPweNHxls)Np`1*#Bw+#y7CqJ{2@X9%f>!L zIF0FSB1Nr4J>Y^Nfk(63^&zB5h*QD4SCn1G6h-QlU{uN^-pw27uGj`z`O8!DN{)$` zrMM3#G>%C!`jAFQU^yC6C&8Q_GdfU1Cvka$)!+i8NR0c?MJefyF^a`lzoks{;d!su zYqerB#rG%=KoTMdiuZAhW~Vr$NyPiGBfm&3h-SjQd@f%O5s5Q~&&CY%F^!`>Z1&U` zeBc;R0w@-kB}ic4;TT6*jM)a@U=%=`3(79r!SXW)MH7+`xqLdIUPj*JnPjmbX^cCr z9o}uNth840hL4oe(T?yCWwFQs+pQLvpphaM!ztxNP&RYwiBmiP1R7a&5+Kdu_?X5d zob};wctpi9!yG4~CD-W+yjL?A#qkW}d?Uk|q6cGW26PCj#p5iFPjSew;DIh>e!(Q; z0tOf+D9KbEdSypI5)fmYb1PGBISP7fjYb_gag}P=m0js5j5;YJQzUSQ;Su42&AO_7 z#d^{l+rHsN9hl5cG~^F>rawwDw^{;o@tUCtmJ3CK;}RRzB!_ksWpV$u9rEG3&$j%Pl!Kx#Fk#XRD?8ucE*NrSBxm z^x3agNkhy8?}Qf0baNtskM-igB6KP#=W*A7@NA#~WDR5}xf|v|$ONpw8Z~oa-2m(Q z-=VarLCFW+C-KZs%OHO+NyTiRum;>yMyEt7z$6)U5-PF4j4$YdU#OcGQfqjZb23T* z!7(FLQ9DMQ%kc$4rBL8NhQP6Ctw34^>9<-Dy-FmH+(&#opom5Mux_29G-Whp1PR;? zGZxz^5K9t@xht*Of}UzqJxf+r;Fw0Rn@c>2DUG}r1;DpxfMWxr|JI*V8XZ$6jtW+b zEVS=fyDwH-D+M8ITR8ZfQjXQ~HFbcI;F6pJ3@8jS=Qx5PqZ6Q+;3U%UhTp;3TZ*FZ zQH+uhGh2FH@leW4c_2bpa7Dz}unSk-=pTp2d0{xkLIF8sW+_&7u6!xirbJJE#6U!m zU)M7mY1Vc|8HsS$)Tw!~`H@RMhfT{$p>LI>L2QJ@u`ib7B=%o#Z+^Q8XNqc>q?qh0 zAh`rS&%my9&D~m184Z9qA6?zF9$SN9t3{F#!<-iwm(>cZwMp!XbTH+EP%ei)BqNf1 zt_2h^@1Rf#Cb`tx72Ncam6Dut&3Gl-4abOy4*rZLX^exIhA7s8;6V0WmEvlVGMr*L zloaq3vze*|tQ=6C2;gi?I1nx=Im}|X8sh{c+a}4#5Z3h=21t_I4r^fbTFF^_j%3jO z#&Dkj3NLXIS+N1O0z-8kcNmTc!=Wfc=;6*d?&L_wDH3}saYxDsBYFBo(sV>@Ibcpe z-9Q5sTb|CX_0E_~h%Bgzg=r>roZdwLoZyM#VVhguz9aG#!#`)3F9$A{(h-fY8F8hE zrZfVi_^#wC%P2?QYD;K@I~>P2RH}7rr6u2-su$AKF^uSp##oR2UE6eOaA78O<$@C( zltdljnoui0|EY&fNtfYJDx0&1ML}1h^c>?g}!^D6~)_NXNdBh}pk2ZPZjgOo(vI zBS%-8ky3{{HR_zxpQKV&J3d$`Xi;%p~nVM`-~O zX0Np0&Atv}f)mjp(W}xMz24kgU8+Ob=+)ofpS(Fg+I@HMo&Bg>gPn*@5J_Hr<0_)` zlHhVha=beziX`uRWduwwQ2*;omT%#*lnaK@q?57O!fkH7RFIo}B{NoxWn))jk!YPO zJTUJeAIMy`_(Psz#--Vjgi_eZ(L1(p1pG+jY=U7gMr6WoCmUV8qBB*sXesG6mrKfY z8rF%T6_8dG8sS{+0%PAuL8bJ8cQnFIZk%8g{ltjC?~_ngz{xMtZ0@}wsRZcl}?48tT5KH-b)PmM zLIMH3l9PQ*Qn(^<3<*`nkktAFAOTz;CbCo)RSQimF0M5eMkiQ|amG7JX<-k)T}UTsrVKnI=A4CB6nsidTG3}@W-iUli=sSJFLMF7T%lHL1$K>P z&yb)@n!1r>j40+>7m>rS7amI7Y%($*);Pv6z%jjAZz&IfHUZV2QW2N`9wW}LD2w{Q z1*6F*HNR!s#!*qORthCKn^buuBqfV?K(0>oYb#zG9`>ET?M9y5y>|s z_d!B4tN~U74>S?f^E@h>x#gRuZM9$Vn$bzSwo=(Y%%m(RRgWS$>NT}5K@(SJcZ3tE zm*L0*>!6f$r<=j%%kBzJrc=Z;Tj)$TJE3utp5P?HjLU(}St3ty2uR9Xxy)rR^F@UPu>Z@-fy{Lz(gYOV2$0UZLe>)05; zT)xlynETJ*YD~hh?S;mJTYKEnSk~Z;PRmN25GBR}m3EV48B(bOK`(K_Hvo%JVk(jJ zIED65C!EOPU1B2ny-)C!?HC=%WqM0g^tf4M4Bd94h-m**!7rg445}&RJK`mZ^tQ(VobMEH_x_`GgC~AGD7mN9NY|+ctjQQdaU$Ar>TxlMx zngLk#hU?HD${~#-sjjcC7AJ{O7u6FHF2&-$B)e)5kzy28O9T$IyjYo10 ziLBwgXfy={GD#^@!uRO?*+C!9sFb`pjApNDTu{uZu`F|)6RNbYC~kIINX5J|@?J|R zW{&9YeR^`C7uO^5zkFLyF#qYq!Zk6V7asOKU@<2I(MGJvy}6v+42juI^7+}Uz(7}kr!f0r}y0^8P~m4~+uMaQ-! z;k^al&CU6)z+Cv4mMZHE5XU>)x39A^*xB~$1sLpXo2Wof0GwE*UHS4qE88eG^Yk4` z3)3J_HdNg-P>ustNS3i5l-8?M^xeFaH^nT2vG$#2(9$HWBEo?xho~2bwV^C{lE8q9 zagZ-BiGp(X80z%hzTWvdwalKaUmC`m})Z2lO(o1#e_`w~cf zkLifp?{wtFEC4t~u2R~3X*m{dM9rK%VzvK;;+Y_!!7Lo(@bY{}*?Fd-Sh&+mV>;3x z6B^;Zq#38N{9r0RPi$aHVXgR)vnwm`c7OL6c8kt(W9=A+owh!tayz7POs_~XvcpE5 zCqmLRA5}B_dOU}S#A=UIASQqeRaM&bl!jCsp)?S55(AO*V2WZAX>VyJ#)1557kxR# zLHXGwM(3Q2lJhJre9^;yuGwRO;I75eR8^#?%+oVGNn>SJ4Q2{;VP42ddp^c-Dh0Sm zxca-%l0t5&J5go#R&8Gc)5-fx^%YrLN7i<`T4agtN|6AT{x!bwi=v%kij=joV+yLdooj_WQ>-!mC=;RuRG6o%!`cUbIxO1tx+Z9YC0D&hgaU10B|0k^GlMRPcb9I zS?TM5B++@4$i_1gz4|NPpxLX8W5ze4@YQ5S*+~8o(n*<*pT0Y*SsbPLzOmF~=M(WT z^ITTclK<&H+=P_X&7Y8sbI-Y`+w!k=**)F1zl@WLL^wpuxT8WE$2b&r#_Dh->-bSS zq69~JCtE2QYR2jbPLeRrA{;4AP`#P}NsM2OVOd&`DK^Fc*V@j{8Ik3YfEXh#++uqn zsdPozrD`Qa$*OQsAcBst(o7UqDPm}X1!lZP-QMuceN~mGV{_f}ejcWFNv_CWNen%`)q1E~2cF7| z7={UAmxiTi9Zi;YzQ}V2Z7SE(-}3lW5>7V6$25y07}Gd1jjDx+{TT~uSrTK;;lk*- zuT@<~Xo3}!;aIGfgHs;ty*lbHQ-<2x`j zO?EFKKbuK!Pi`4v*k#emU2!9#T*sae&G-LW&h4W;c(znN*K=}v(Q<}bRCXp8=_)}qu&-cK=OHX_&C zc6Ajfzq$Qkp_S_OwNFR=@gHkCTicg+rRIMch|l7|+E{S@`^ENF`TXDZ=FW?!^M8-= zYzFWK3z*T2!RnVU5d5h9k1iw#Zf;hU0px5R7|bj$=}S`>?2)k%7S{IabaH<5o^%ON+Oq1I9YI7_?l(x;U)exoU2^Qzd zv0(<_)&Ifis)gugoOSttB;7nnL5=9&{$@e=*a6!>sT}KlED(vgb|XkS)NUZrkk9p0 zsK{G6FO(#G&m3W|lqQDaD>UN}GQ`J$>h zDx<3c^Uc_T^|Q!nE8odtIEnP1fMbpg5R&_hkBVn;mo zsA(6s9DI`AtgaUp_p==3ev^Facq_gz4cw`*Hq=jo()v7pzpqXtl!&9wFW!E9VrZi@ zebor8;Wijnl1Rb@C;IO|ot(V6X*aH%2^L6c1F|BwRxHxD##NcHL&~lYi{P0hgFaGM zV5{!W*7E?~(h*7WEj6btDV2Z=oWiC%9VqoF?H*`Qxmv1rGE_Uj7Qj2<;7j(Ehki_r z0Pxf9$YR#^j?(gzZbn1&!6nS$9TSc>3#!K zGU)@yLizcmD0nrIWC_Q(58reZtLuK#I{2Jg{^k2YRXl6ZGH@Le>{4d#o&2kElLtEC zoMP>kzez~vEV>54vLu9^=ju-~In9Pc@)_ElyfV33JDOQ`ziI7RC%bdSXK_4(pEDHe zO*WM3(4C<56V|mWcp=-6z2GpwA<8&b;au=TG3*_N1jlBFLTy9Y7gsfvr@7}F z2j=+HWT0Xy)e_MF1<$s|z2xkch|_HcfHX&$bo+#tX!-n0m!;+o)mL})M6%;q-9Vm9PM{s*R|Km9-NUtolrt`T!^>L znC6kKSeF6_^5X??bJJe{EYL_z9ap~}#kQbqp2EO--0C+_syirl#{@pMRUg|M(C(bb z{}!Bqg~|d~OQ~D);AEBMsQ30Klc>RkRa9qwvAYg5UjaL|%Txbkr|GXdv8r=(I481q zs882s&TU@9v{?_0F;06(r0Bz!XQ zJ5zrz?AaxrZNM{q+MxexPTvBYO!FCF$43r;tiuru>}h)QP1jr!4~u55-W;r-xJz`k zI!o&TfAj0bYQn{R%gR~x>afUGtg%(o9r@Y)^kMR|-4ew9@=;m1lIO{}!qQ%_h0)<* z$3I@(ueEuowou}@vR2gsDN^&@!P!6G@0X|d+r!hd65!k2?+@PAErWT)(ZO^1 z`mvS0e*bp=;G|MpbNWoL(sErV2dD2poa`M`Y8z1D*2@wDS&B}pPQ0VtcL%4(yEQE> zphC4~A1!Y^G~!u|Ij>-L`orPz`Ptjk^B)gR4qyLk8T`sMHefVYl-s)QEW`_|AWze< zfHO8v41;`i;npahChDQor8>Z6b@Nvb9lqa7M z56c~Mh2nvk;M0njao|=E3-OY*WdS-TSNx zNQSUR8LW*F{{hb|Z2Lc)Chhgg>miA8dwso1+`QtBw1?e!s`{Bwp|@}Om&$i@+gqij zsF~313P`gJSNgNYab1_-qDa=YOFZNA;je*HCU)a=j22QWxFeN ztu<-e-E#J?I@d%AWDc$_<gwwjvuo90Sp-;Ssk)=i4@TD916H3Os+PF5kW0BQ z>J0*T4EM)1esv3}?ehQlG_DwBxA7HKdiLvB!gECXSN5~b!OTzU z_?y%?mg`3GR+)$CNU3F$hrL>FP2BfUsi{(O_U6+}u0+bHpdpR>aJF~c+^Bb{%bfPy z{Uh}?dmezAKxdpxgEz0wPY?D!oE)D0>wI_r-Qm&shtq?TDw*ryik6^GkPGh7htz(+ zvlBe@-$S*xDkwWBW(D!4)q+4Ak3RCb#Iu^pO?DA~vgWC<+jG=lLpf;`tt}?s@$TvA zPw!9m*KY%Se*XIK?Lm#c(_G~DM0zicOO^aCtIOgcaBIUPiLE2o;nC^Y?%TKL$8SHp zIXpU@PbPQM)m%y~I&V^?fSt)Nn-mZ!5u&z=IS$k& z$7#h?Ub@D|?(3v61n_MB=1rj<|2`)PP`Ka8NAMPP+B=ZsM|U669qaNgdXnG$d!Lr8 z(cFa7Z`E3>`NfH?TTZc>9j#{1VUz0H+X}@N z4K>y|rWbUu=%mt>z%xdvQ2euo?|be2HGk~S z^Qt#UTK^UUkC)FaKb}enEo7<-}cIsY)Y7nZAgMXjGt+aJV{CoG13ncI`i#Zm$rz_IB z9Ft_!Dvoen|FJRaRm#CRKILUM;G2AHm3D6DI$P{EyF1S}*XsJAiw_$?)$9?QOZ}z1 zTum~l$VtlFA^)8(S~GRusU*x2$a_&kh^0JTP1xOL)fJc6Qi#;~s+v%lI|P5a^U;6w zr+WU!yXl|27t!|K8l$+AN>{e);0*{LiC2H7W%gpwJ#cEVv5Q2X_)KQlj;e zB$2z2`h>=~q=Taz!pRTrvFH(NGqM?b&2Sur4#rVHcNw1gWY9*QFJ_ z-l3j3>d0CxSo2{XHE7z%%ZK$Oi?Ke_!{lH6+o74lPVE8z_(w7V?{l9+ZE?gp2m4(q>;_ zq?;)M#X!@1qNI+%CLnw)H>v-6|NmaknY)RBJ8?rZOaHu5glFnbOXK|0BXU|eIon!b|23YowdG7s-(}Kgwd9x> zk9Ji%xU0kiCmQBVr{Bt<`rpx0!~d-Wt#K^eO%g2N|69H7mqq@+wYmBHN&kP8=T=@V zpQleZE$_E}rYD8|mI{5v%XWKSYsp<$pymRj@{N;Fyl~Rrc3qtx>gQ@Veju+?}Vft?R1^mMmRJa9OlVnJk~Qu{y&iSZ=w9(e*WTl+5h)^XY)z^KgM$lk$#)7v)jtC--W`U2{A9^ z&s`b%FJ2fih^`*{f8Oo)&k<7tpXhB*!gisq$TZ$62%cd!SXPNVW z^Sk`43hankcQR1l5MFaOFk`WIG%(Cq3>|(sgnw(}L0T{g4n;?h38tBVou2zNL7&y@ z{9Pf^-%vTfvD-G%`6sk@!w{#UFU3G?ZdvTALx8sSe?ogt7weoUBcKfVKPsIs7Oy^> zz3zP5_Asc4*)+JQ-bwY>e__;Nli{)UiV&2N@>xe+UXxX7*7m}6`PFaIzAv`x-WD6* z^cJ5>AmqK%r>{&M-hE;>H`l;%1gre|-|hVUzuWzf|99ilH|3Y`?>x@7Sk1rN*P~TO z!D3Imfs}O0FX4%N{$a-9ySZti7918WEv(LhptGl-h51ia|M$WHCA~ZMw+9yO|GeBP z<$vGmZEyFU^#8|r=0@4drKj&6mXhc-aT}g@m0J&OZPWzbmf}}$VTe4ty&>}K7KX^P zIz!~yVoT&%ohhQ6SQYoYhW80zxpZ+DaziHn@S0{xBu7r&j|q>Kbf69keIL4+fjZ;0 zIicc3zbk3r%(1$5EupSEtz4wZFEytQiV=-<=oYv7*UZWY-e9NA$QQ8gDbNdZ733`{ zrKm&(Ez6n;b~>Gk8FqU~SHu5ZqV0!r04?DE&$o77mi<3Fy(jov`^Q}|Ckia8zo}Zgv<-Q804k$EkLdhRz+jKa_;U&)|#?7x#utzh@V;5k}T+^4%~6q(LV9@ulQ8?zr9`jp`t$)$A50VsNVm%^A!L8C{M}q zw1aQ?zPssBve_e(T43H$?!1IiAC4&#zC*a?uIdgvGwmv^)C0EYsC`xktWM%cP+kY? zeeINF4E`)|qBr8En*%J6l{tsHY`x6M`~IhKKv-JNb3~M2mUl?lf)6IS8ju&UsaM1e zmlfr3bNm42yjxu!KwHC@@;n1z2jtH_6w|<}3)~98Z;9ltvNZEK?yagDe{1`)&j* zvj4U#@t-eWY;HZt|HpXB(Tp_OcZ2`E8vL?HKUEq1O9J$`T5eeb?#a7<3-5ldz^fUw zU_z4JDI&2{<20iFn{qCfllfSX| z=r7CSmk<<{?KqCZkWNx}5o>_rbc}+_Y=BvU1y%>-)fq7y9Wy#4u~N=?A66q8USei% z)LdP@I#a@{t`GIqHB5{T9Pg&-E}Kx7AP}V~lE3qoyzD|Y>|JR zp6~th)Qqjw(N&xl?nSBOQs$A@{Lx}<3Gt2D5Wu(+b^G9MlJ^6+@C_~Q6*-y1)7pR!)B znbbp8PvqO(WVWi*ink=mJ~w~Q(lo{soCp+~JT*0xqRk>M9 zsq2gW+dL7u9)+JX%=2qiOzzjlgtnagf{XQ=5_>V?d~Pn>G9Z*r(0%Q-|Cd_+|A2`f z&9lh<+uAAHf7{QW@;^S#Q>W&e++XG0vw2y(e0xykdHUV(?Z-0#e~r+MFO@8eWea|- zlG-iL;MR~D@&PkQ=BXpsYm!IK@j4VzL8|BDYQBBD`l*=r(EyZPR|IQFllsv;A6GMu z3w0QSRGpt+G)q6#48Q{rc?klqS19w9l4RCS=bBM(j0(L?ddxqA8}_R`_45CL5`UU! zq5b!~SIPg}dy@Z;^88Bj-(~@QQUe}E4e(NKE|VtHCg!|tMour&Xk(tyyJZ=@?ReKs zTTkTHWg)$FXK$Bs^fzeRE%Q|Qzg7o2IGH}u`On_Vt(`Le-+uXY{_|0uFAMLjYY5Vp zpYIr>uWzmULwe|RX#4vV{hVR%n0@&eed!*=>Q#Y$jwW+<9j;-pvyGFmc;)sz?`LUR z{}1T_TBQGNZm8_X_;j z+$C$Yo5C%})yf@oa|8KRYBiNHltk+4#iMKj+r4?!lr9MLXIbNgeeU+W0_F-_`!^Oh z_~ccn<^KeCQ)G zD$k3zMQ_{D+w<;3W_umhR0vNkpv?$bKE_$SKHd~^=Sp=s@0h8d(!3t{%a@Xem<*w@ z18*sczDF@iLU-kRHA%c*qE1q}LGvoJ*c9K@)>~JsPR7kEcdA_VC(>x()rS4Ab?2HE z*0?aktCQ*rsn;AAZXxk!fmwp$!(-DG@5Maq{c>|oXQh;h(q#rU=t`*KQKrwkn2wA*NyY`h(_w{RNH9S4T~=^VC(B%@(=q` z;?h(=VHUkX&*ujT->Mf`+vUTc1(!`zxJg-54H$oUiP$t}gJZEwu2 zvE?OfU5o{Bqh~Z- z^(@-|-RxD)e{b)+c+&qK<0;vHa@b!pG{IMtUEcBP*X092WNW{oqE^ueowC+m&ctk& zbh386Qr0f&cI_I?pzXGES(fNqu=EwJ|By6*_L=L%z==tg+jO|m$K&LMQJp#9e`vWasEdDDhx8H?9V;g8UM zCSSMV)hlRk^?Gg4m&!fuoc>{+x%~fuQh_Yu|IfF3<@>*%Z#|v=eUzuh{;!t-zkv)P zzqY*i4R|Bn$=JqZ6oO^AA%n#o(<0PlDzJ$P>Qf^4U*{%7@BnLbFL>X*oDv;y2_@7o zFExbvZIer=A1<+k`lZdWC+W2OGnfBA&{5#U`u}ES|7YhZ{_|0ud-MO>oD#bC0pUM^ z1TZIL>(0Mw+4@z_zN!KE^u%EQFwa8%{{Tlp7xDkC=au_Eds|QbpGSGV7XQE30ntB$ z7|x90zUz6h~D zH1nIc{vYf9-xr(Z{QoarJjH)L&U3H*zhfHRPl5X#?&H;j_q4}2o&c&VE$9@Cg#OCESqmtT@3L`XnGoxuPJ4uXQ9H0BT~$FylDh0Y0dGtRm(4N=^U$pC-GVHdgzxGRfx4Y>Az zzmr4AbId04_p!RxN0)Jx8+w^{?o*?nF=l>DrT5|%^@a^|yoRq>hWKmFcKF!# zr=9#ydjr}XmFUfgeI1?9{xoM!R%WFv=mlQo>)3y5(_Om_ti=<&JjDxJMCDf8mQgW) z?H01AQ)&*}MpGZYm_)JyF`;)b`*4J(nEmdpRjdC6QihRHLK%MWHWt}`QrRo&e_J~{ z&!6kDD;N>cRsYrKw}QbhtgHTOUyMgx`5*g}Pm^w5 z(Ex>)SqeiEW8V5E;8$tun_z%0Ti*m?lD5A2|642YBVvSR91iyncq^FlkVd!_n9b)_ zKq8E~8lKVrwA57&r@sq^VAuc2!S4RMgJ2T<3T@0^|2vz#o$~p=t=?1o&*MBR@J7!7 z_$Q7hpe(0W>l-*sxIl5Nv&mlJ0k{Bs$rAKs74XrN{j_GH6s?4%Y}8$EwN_T(?Ct;T zpZrg&b#ZYK(uC6(x1NENH9&L=z=bq2ZBYrR3K|4tYZ3*6*co-@RGM4)Rx=R!)0l2b zki*4f)-f+G`tuOzbQF9J-h|SJgft$X*{dSX!Fg&4C%)xg32X`Hg1W9Rv6Tty-PD6r(Wve-nWF5L0?$avJJxfOnsgXqq*kcl4 zbW|xA5G42tCg?wu!BpGTt=6u2p&Kx-98l6CiR=jyWDsK*FpMrG7N))gV};-ZB^inVg-RP&=&!&Tg>6|qtnDTdJP$Uv zg3WbFKf)be=Jp=0C;E4 z*@6N$u=8q%i!aa?vlO@c&_1Dxm3~MgBybyU=3%VxipdlwnDb*s2Y4<=Y`5amMi3m|;Gqan$as8=Wt}EF#H;m+0X&oLJDMdU)3k-drfW zO8&iO#EKK5I#? zqhuTBWRyTkBktpM$0elj(jGL1Q_6{;tZDhkb`B*d`hYPE*n}3(Z(Zfri(V|io zFM&p|cJVIb0tQ$L%@t`@`TBE?U#qKODkL&Szh;b|?%d4sGS~icOh~a{) z7F1=#gMldvYh-BMk~mtw3wn)s`Kj|fV<9G7NIr3R410(BCmhTTx8`iTLUB-o*T+wH z+B$rq&%0^6?_cNLFkE1?K66I$o5+p)RthcDPW-lkTcd7`&F? z6BeIG;S(0G-x{B=cuAXJf!ucV3PYR<9BDWI9K5>sFhnh0W6GO#uLiti85LU5fY*b} z`_F=*V`o8&ZZ-oJ8_ZJjHCL&^5c5#tjv~2abnXthZzACz6cdW2h&G zae`@A-|kPRu>1CV*Pyhz7>#4hw0oBKyWRW_?G7g+9*l?>X9IOV_Z9D;c+fFD>x8a! zca)I`cg=@xzD~L;rv3Hcq39{*E{Xy@&)%LE$<+XVFTC6_o5l9h-tmX-J3NsVoGs<@ zl5}G-Aq9irp6k^QomjA5k7$G~7a39}&0Hfzm5pA3zZYI!oW!DWJaeGSV0X`Pv_TLPlEWqnN^ioXWun-Q_ET@R@B1)7p6e<28trO!6PgIcMmSv zj^79m$2iJj%;uU`6$|U~TMn)$tWhz>48#~EV6O1LlP>2`xF(rh6NNu;4EdGwCfr1J zoD0e@L`a|l{Jk`ka&o*m7S@?G*XaoFZJo(SaI^pJgiR1 zFGZ{z#ljeoNd^95v-gHP$SC?52g?=eQS>*lj(^-!@>bVSGm7`IW9DV7x+vmR8O25P zdMLcSJY3cKuJdcxrLC+@mr3)kjO4>77)3uZBJlep#7i^fDqiz+^XvNS;2X8{d-CD> z3(jlp(R#f6ThU8+eS~(dc1cw$oray1QGu2A)P{NzR_9kXIBrzWuR#Ur0Ro;4AR^4U zuqxBd;&m_owiH=odX?-Wf#&hmYd9S0L=zhj(PULfT_!}4%{LYNxwpz|^to$zmd-{!vGiiXo z*LuXsbh)zEaG>l^oJ)Fk)`|7vJR0Sx%X0#%z%XrmdG-T4u z%}HtQ!-i9=Lu_`B4!T5gU3#X_s9MkF52xJY5yX^xpo3h zxQfzQY965^(mJ%Xp={rlnpwI1qH)rZ=*>>--6$ebsG`_eDjc(^@@A`6RkS&G@b@w* z6<$Aj5%PDz%LwHM$7|1~?wo_#JiJ^yeHp_$A(!KIU}Ea$QEVPwHYUG9xVz((#@UE0 za#(1*jy0%RRWU_!S)rIFBkhB;v6L46UgIdm*;4MOYZtzj8|YqPShMSK%xE?mgGl4` z`+J}S8Irl~gl6Pi;d@}wfR_tXJvM2gn?192L^FlKCYdfogxrT-c}ndCbzH-~163jU zljdFC2gS=Jb8TW4IRH2Ts)IO<*x@+=?=!LrFB?frk_AqKYrE)HIi!qC5Sz{Q3*Qqj zeI4bZ?JjS}3*PSU9s|n67$<^+jjrl@`jcv|$!ub-XJZo~Ewv8MQ({DLDwwk zB)(8bBsM^d2?wQ_jL}r9VV8KO0o8?^7ePU<@UP^A`N~i}l!|H#{QIVnJ3QJcyl8VU zZ^sL2TVkK^zEf<`=3p&eOKcS0*GW=HSU-33@|Z?TZm2#CUT>EonZ|3$9n}ZH>v&PT zk!iZdb&IpUw!p*=OF9$lm_In2R7`$?!+aQ?o;0tve#KiYp zkq86Fh=pTi(0a048f++vcsEm_*}Ur=9GOVl)6UVd&}zc#bkC||7qxgPvsUtWIs{YP zsbB?q36|lDhDO}QAc?=?uHi#Me$H}{S6E|}HXyIN4&}QHio=Vt5piR44Tq6Mhq4h$ z2v;Q7kV@axn1uT9jyEcNOtU!3&G}^yMl8f@NaMVl*YK8(NQh$4ujO!SbS%+EDl274 z*SmutUWivjxSSsQYVuu3voI87iiAFqG^HfcB$D6F12n!sF+`ceJzIXsXNu&Cw-Lt)aYIm1m4Ob+NA)MA--&4Yn)7U4*ZaaT;zZpkWd>Q+rFl^&icPNWa^ z8O|vs&W4+ZCR3W)p)&wkqL!z$vgQy|O5-#WK1nnk8CGXlF^ZPmKWOTpRs)W#BiV_9 z!=bwNi3|yjHmXaoPJ1wuu%jj+NhG|s7@eCsfcFylE5fz$=sx74#&3ymU|FKjXpdC@IPG3QjPSGAVav~u6v@~YNc zO)JN%mQ5_;5;;Cgu?Y*ll>5F8$=dmLyXlsr{D+!wnq(94>o{{x-<=&xMik*h5ES!Z zY1J*qE902`faizOX}m7+%yN8(m%^YPIa#snibhKQS^}{guM}~9MOictw>#lwE7o|~ zUkh;49;TXW2OFHLw;M-Q@tp?J!)u9wY^ImSV3~nbkex-t>EX4+K;8|n;y`M=77eF= zETra2=!&IXOn7<=C)h2YI=r-oQLmfsY3;+F4lyZxok?9=U()Eg@{U)TiqzcID*c8I zSXVoBj)QWo8{7SBTZ{}Fd!h9>z_B(GE{t*|&8>@Oe9vlQVdmni1d701JRagIk$3ZO z)eM}}6C|!D-|y~u?xo`RscN0bnR^^ZSIbqxa8>z9p6lOfmBu|`)mn2h)D_Xcy=?9} zR@!&4sH`YSi2ePI9MwFH{9;tAmq@{y(q0P0+>k9N0vJ_s2$)R>=W6vK4p%tV8=qy8 zr9Az>@ot*#vPn}wy;SXnl!X{99B3A-rbP{MF5gI{uj^2UgxVgL#5~F3CLd#AXI$MB zQi{(3vLIi!%@XM>?Xsl=}^UMj#J;j=Gj#* zDxr=+xj+Y|InkrJ|#`MZ+NEn4<(@sC9 zYTk?_rqL2~{#DsPKI3do*8gyt^u2`2U%XteZBJEh_gmhVAPBr)Ds5w`(xu7jR7?U$ zij+$DRSP0wm2v1)_S>zm_J$9Xkw}7|K^SdrJ>M>VG)ctTeb{ubO(t@66tmh_lPi;O z(5F6i7Bz*fb5azxcOt^ndA^zdU8h!apG@7dmcCcETSg*~q*1^rqjKDE#yeb~Jd5uN zoZw*}Y>=!Cw+ZRQbBcDi4D=|$;xOvNmb-&p?{DO9a)(SGj$NDTqe^{UB~ZrVynTkQ z%wbe%C`$H(rAAFBUHQA4OQ`MyNgcvRveaw4a=za^J=obUtQ;@0DztlP=Gcx3sdZ6? z%1;(i%psuy7#0~z;D1lwADN+uXKK-SdNQ$Ka=gDs9PexcPC^>tNDulN4n_gIkxWpm zw{}%Nz0@U{8zT+_jFV!dZpLD@{6dwKkV>OtMl<$#(y`^;))^6U!`1O()sHU$JZ-2%G4!CN=NR^u#@sjuZEJ7Hm0Lc#D!*wxBafH289h@Pnf;+0Ty0mH1e>9a zp60MdZSXi>x7Aq3z9F%|j04St-pfz0cB{9L4mFa}G*WhmLZf+WFHlY`J(0ZvVoc@i z&oPn${E87FaMIF3Meio5tps&~uVC8*u&1|E?Yb*fHK7SH!TE{ag|nOJEk7iLnT>WF znXQT_Xtj2MVQF09B+|Q^gjJ2ZYvMMFYc4n8sy3&<m*4V|wC=-orv>ma2!c;eh7}iy+wz#dN^16~+%iejd8xZEl9+Iz zmxxVrCf7$kqc-Gp%|>G*H_hC@QQgvMH7*@kxOB?OxmzB&U^2f@_#e5w$8(EAV$s#toYlI{K~NJd+x?Hbr3VRNE|+!S{?_@jT4t!{K_RQC(GE*OIqM67iiJ5+G4v!mEta zUr;?%uqky=rSRJ=`c`QqOFj5bYLsPTuiw>IA&n^){cn5U_Nt}q=s6CuAzRM4YtX*0 z45wFC@}hD%`EFhr6R%S!V?TQ329(C%ftIaZ6Qc+N#7U@*c^E+TVmA}xnig|l$1}^N zKGfstm9$hSmpragrxs%+XuU0D7-JM+X7AsTpZD^wmD(nVT^jkU=83;#-l5O>9uGAw zNA_MLzVEw;R9zF4eg{K>H9o}~iR|m2WdC>fFcSp;Z!8p6RNb6w8CS6nI0@M-RhqIctlY==e=Dep znA@h>K@hYHA7X+NG56yScvk#2%^7@Y)Q}ET}(o zwG~g#13lJj9g?nD$sO&Oc&N)im$?6F^F^<8|I7Bvt*85+9_3kqw^%#-2-nJ@4u6o$ zA>kP_Cp`BKfn^ETmqKwsBP30fLUcg!upUXkn@7PD!}yIp-ujmkKX*!Wq84V<1anjP{mJ`vQ=q@qTxQH~06m$z--* zcmFU7|MmY-`qSmI+xv|>+h3b;KhV=G{~yi_Sfu~&yeP~6ovq%BC;9&v&pqV74GP(m z{jCIsY|dXk)jU1goo(K_{5#sMUE{e19o&+a&7HHWUN=_jos@d#rw^;E1!t@@fUL7n zd7KXohcdT#$I#2TfBb!TKuM_)!Ja9`?RrW v{r7rX<@Nva>Hg=(dA{6yZ492Cr|0Q;dY+!I@%+C4009607SGx90JH%BJ9{}^ literal 0 HcmV?d00001 diff --git a/infra/charts/feast/charts/feast-core/charts/prometheus-10.4.0.tgz b/infra/charts/feast/charts/feast-core/charts/prometheus-10.4.0.tgz new file mode 100644 index 0000000000000000000000000000000000000000..23712b6c8c253f166cc0743bc50c3abf2d290a8e GIT binary patch literal 27158 zcmV*0KzY9(iwG0|00000|0w_~VMtOiV@ORlOnEsqVl!4SWK%V1T2nbTPgYhoO;>Dc zVQyr3R8em|NM&qo0PMYKciT9YFx+29zXD^|GfAqW%}Y|Hp7cDfBvqX=S$yoI`pn7H zV?!h)u|g3n0JNia^8WVsU?Tw#T(nqmQsRfiB7uvG?QXcZHzjzE*bHT~|8xck>n-3s z{>x^c!C)|We0(ha9}EWW|HFgB{F{6~JUloa{sn9p4gZr-2FYIrgLUD!o%>22m`1-M zLL;1vz}2t=(=`8i&>J524mu&C0f|y3-ks*l1w<4;Fvm&6Fo}{WpbN^-9Ka+5ESe)g z5s455A!Kk2De84lGK~^60#nS=xHH3ZGy*fmQabAQi)Gl0aK94;yfOWe!0X;LVzX?V zQA7fqFqE)ffagXv6M`(u0nH(0i1Z<)h?zw$Vm!s&V0?T$9v=sXM~`}GGVRP^lrR`2 zh|p1|3(y=!@d!*Kn2>B7;r~13FL=8h020X2&%YJL{{@p7jnK?fT7T-up9MM{iZc=* z(W>sW>V(rv7$d^wFo9D<8Vef@G#1V1Yzi5=hKt6s2@cT(`j}$Es*w1QjS)!@Lsb0P zr3^CEog+q~fOc3E$B5+X?B3s>9Q1zZ4eoWW^sC{ZchnnnzVzn!6FkPeUcop+^cEOE zE&U%J4IUj0UHX4KJlfI!Z9HTQgHZ>7fFQ`w2r!bNPAA3TISMiovBfElqhP^{pahO% z6pp|I#uRlr(Hu_EX%@%lC?JT@QD+}?6{*`3AG>K5$6YEu?{zx+``{UxM2X-HS5bgY zf&ga;6LlzPjP}0Mti(0%wk3X!^Qi16il)>UVuL`7)O%`g`#$^v%lZ5p;C^s zNiX?tr=$FZK)3?{CsgO0PQ8+3s-ZZig3lEZY$$3F*M^e32MyplZkP7=!HbEUlZRG+ z!F3eJAi)fb5r`7XU>u`R)G%jL&bWYM)#iLwFhkL>t|q6~Vnelc#X8WH#Shs#f>KPm z=&+EC(B^Es1cTENxEk~h2EBoJ!55QSjo`)PHD;#-QIs$r0x5|wU%gLbNa|I&vM z*xOSOPQozaVh)U}dIQNcoAcF~kK#v0;0c-15xDuRz`UIy@b2s-NC}!m9|2_GdKLvU zz-Gt*6~Khg3k(88DGGbw9YvEY22mmlLW;y6_qx|;+(RE3A_$L|g!) zaW<9BQViG(GEwOYk_dAdEs@nYJ`_NL0OiaV@Tuq9O12Il<+T=~fNDRKn@`};lMUQ| zlgiO~AjXk*?%ml-WBi_fELu~2=DpO7*b~fULRp>552JoR#sQ3Hn6lB2gC7su&k0Jd zB7ze>Eu!DNZ?f*sn8eO&F04s>o?IDoSk-gb@M z@K#MGz!Sbt<-$>uO!@mlNG>K_ZyUL~#kK?|&bpsOQ|}P`Oz>Q-&u#U-XwvDp{G1WU zw*=rTM~(%`P=EB%>IW zS9NbOk2%6oG9`#AZZCN$U(gOKeIZN9fgK~gX;mfhpDP|IcwR6tNiY-rpe6|8wV*|M zuN5nWE42#aUcgz{2ym_XV?|LG(=H5n*YEL8-D|+AAYm~aqu9btkfN93wWgERkT@Ba zLY*LjSTPQeLN?PCUnxSE5_}c$sf;Fh{fO$}G*RGCO$rD=*PhZ4a}{3c+`2^ZvhRv5 znrjg#FcXqyN>GXtu{hBcA`8)~z4m(G?JN>VW|C)L&rkxcG5NqZqv&xllY-HOTH>X> zVlX)Ywh4MF3HJ9n_G1`)KuI{tUjPkAE@(>QEUjz!s^X zWPqwSLXhUDE>biCXGnHSfqRWZPPt+v$3$u>*7(t2hQci7)6cL_4b;l#R9s1m;0#R) zo0h##Lmbe4fRg~FO#D&IKp#>XO_Oel!!8Hh*Tl_0$XY}n&|(DkKA=Ux*>TECVlo1I z&$-_8>G{WqGFlW6s{aV=iT{h&XfiCkKbKoQ?DUVr-Lm>Oc=ce5dWfYO_G>bwctWaHwVibr`%JmZ*qI2<kPI+S! z&MsLl_UYetv1bt_nV?rcW#JStKoMh6GF6jE+D;-T2b@RIeu_id4|A|I$2BdtHuA9& z{%Q_CzDwX0jAH(21cnOZcN76GW5+`DW=JnZL>>avoA$sSWte=}>$O9lLHdFAVK|SH zh%zoG#F;Rm>NSbF^DJgj8l!IBQqiORqWRZSTLmCRgmPF3`xVF895rxSDY}U5$EO=W zaGC=E5(0P{!)R`RQOYkbj^-&_Jd22zGiyn;OaMd3d<`kpWzILW#kaJ8f~k=25Z_OH z_n-a>FahBrf%7Pktc^xEK^mW8g*PtnV%)k{0vj>Rg{&Y@d&M!6Bg)C*8G_+INyN~b zBv`JeVo2M{CCdQLhb9m$C|iOWi$Ai6AX_!;x$C88PhTH180Xc-Z{5Hl}5-L_K@e>1}rP$WJ6M7aP7&BA0kvrB zXelV?3==pNlS`>@Ufw!QA2uiD<;L#4We5#`fG%bOrdrUtw$ zQCNWchP~kd-#)6TB#YyRAi<^P08LQCz1f_8lz^C#E453hOqp4WR1h5%Qz!2Q0 z2!Sz*@%00}Ce5l!7oY|3vV^Uuk}5)lA+jJmie?BEnQt(@a^+sdR zZbdIGONsh=rP9)g`=BZYB zQ#Ct!^v5wCYhRYL=O@ozJ@3uKe6`B4eTC@kl;dZrIgTmlhk@^`Oq+HAL4Rb3GAev{ z;%FX~EEkPR3*U$tLtPECP!01phGkQ>QP_5ZG#i28ps?P}(OjGm9v-}k)Ju(I+gcrM zzmj}Jcn^JKt(m{308eE-b$FR2C-fafWCR`^A0HkSFJI&23}a^KD&qAo1ZSyPcw)cm zR3IYwmyR(;_ zntA;_n$M$fL2=U04~V)1@s4%&_dEH!Gx17!AGjrJJqJo)8)3BZAgIF|7`_CD?OX|r z)887z8k_+dAt{rDR%0S$1CkoivxumL*9@>YmCk`Y)nzJ=k4xtFf{Z$6$ntY9%nSGq zk@Hj0I>_$Xam5<=5{#+ia|5!hjAe($~+>G zw$HHlV{h1I`*^Tg$seb-cctA2Z7&?}p^XRP?&zk~(T#6Tw|Rh5W4Q67R%gE185?%S zhHuo^kYeR6U~94-gTYsM-09itUxE5Me6))CDr&V;UmHl^T1sr4ZmJfsZ8D|-fm}z~ z(i;;Kp`NtGf#2E;HC6kUGoHh$jt8m=}G3A1z-}U!3;I1y^s?I{KYOjO(+D0o_w>ax3#JXKPVTo#Hq>tIXm0~Gwd3Ir?d1EN z;ItE*zB$3Ew6#`JS)XQ<;W>DhhLEA$-%!`g#Orgz=P>s;#4%SvWxcYgr)tx;x{p!` zM-V5-Wgu(?QG?kj3~M4vybc|z_HZk@>06p5$In*(vPFDhbVdM zM0P6#r*I18wPcR%Tl`dyOQ*g0D-a&1x#`K<6XPDO$IMj)!Fxf%Yn8BPWoM9RWsoQ_ z^h zt;$tTysO0(_#3L+V5hGc`kF&ozo@DvxSmjwejKV(zKv*aIkb|}Roo`##G!D>lJ6@t z57*mPUozxX_|l3(6hM-*S~}`9!*8vdIGk20CMpg|3&kWY;joerx4XdG<^u2jK6nKe zW93aKBoy=dmtIuGJzvk2-qK3(UFOXR;Ql`NADk)Y$^~Gvh!%d{ar6NR2gqxj#UX%z zz-!a7X%3TUf+*{C3TLJ{f(aFs#AHU&)6$EBylALz^BNe5OBR$9*uYy;ZNFULeD?g+ zn~Pu0o{_*zhDL6TOp_XjExh7c>$c4uF zEzN=%fVt;wh+-t}B^beqii>^9i#-J6h2BZMTq^eAYm)!{+(*2}XgKaAxEtf(gSZ82 zck8ETfPcw7<3YEwW?ci|_VyFu_w*@htg8TnGdHKMP zfdg6-d#*JKhZ4_4Uz~Udak0gJKF@FF8qNjsg&D<3-OruIcnss*rSTv58cc-fm9Tgr z{SdkM0vIGy#HcZ0=7`Bc7lNE#!FU9Qa}#2WlPSm&7R8(*OMptg6YGNlocZAKZ#ZLI z>Z8>dBEI1uLx5!Y4SFRg$S)C2T-5gWD`H~_X~giq5_h|qqvzSb_6eF}hAzak7jiIv zCoT&W?kXE^k}JV>i)RRie&a-)1hK54<3$p&LPp_}5G4_dU>yBRa%JhCT7a@EXgbz} zpm__(=e=W?9a=;4!D$U*Et8J4WK`L6_%1c#dIgq8kIxmzF!R*bEgMR1*LKt3wzQNz zr*<;_EWtwf7;iI3h#cO{hX;{xZ#OVVWw?p*@>xCRL^Z509(bY#wN;`|RD;`I2nvH) zHU*$CsFB9uPa1xrdg8ut3<^aATEw96&%3tri9X#@>Kom-{yfYyvT6b=hI25gc`-4&ZU<9LR4QdyWV!IJ3 zT2IWMH!MZ#A-dJr6q;|0@*Vavnn$c%=nAid(-^-3L~!1NbNDZuz-ub>6M|zqFnyhi zLSw6?@D5dBAzX1^%1Fj>lWG6jc^~e)55Gn4!(EIBk1?yoMff`&Mgo*gB6%2wG6f|F zhEXCdbs>nOD${DD^~sNcRm6ut^-SKWYe~mmZ?<7$T#cG~vKPN1!lD4if)$@+42wN)2db_L zH((qI!MtXl5rlLQi7N$NEY_k-2LS!0XwoHV0D@mnPXHeOjsoUd(er(-&f+=B_ff=V zh*&fjYa^1ybAr}MWnc!i`Vh*p@cB6O`GDVGzjQ%NboV60~nh~}l94U~&9%Cpr= zYPl@J5Sd4bJo1qFTvk7C_CfjLC(S;jws(82%|Wi&-Q8(!tJ7SI z65o+qUk*;Y&@($5=AY1p`K`p5F?aJFv*LV_*rbapJGv zfbiqs$BIZSr6sjO-ZZ0CGVPQo#dN#tZC8Fm6E%(fZN8U#uSS0j9*QQzOCX> z(*kUBVwV;`c3w2w^`dDpC4XN3nHGa{i=HzbH=2EMF9Rh^1XbSOw{m9x9L1=V8@#ue zt2>73zM-M&02;&K!v##Ei;To0a8KEo2ntYiC9c403N4M7YG}K9&fuxXUr%uUMoh7~=_rSU zYN`}1eMV}HkEV0`62KYs=@NWmDP z1cdlH5eA6DSK}DWqa@>^%sm&=Y(9rvp}4091XMK4!QNAsv!0)xJ%4)g_W3jL?D@|p z?_RzIzn;8&_xvHq65*49K5|tuV)25{3@2F&oDp%om_7ulxECfJR!UQ>=iM~7oS^`d z@K#nG5umMQ=e#o4-6p&4X{MtWtm?Xbn_BB~)Iwp`9T@}i4Bp%`BS zc8w*{A}EE?Z7rt}G{;vugR_X`pjnczAX1RudM}}vU!(RSrS4X#E7}gN?ef)s;fB7? zX{`+8P$tm4qPk&jzT~g1y!C@l&NWW-MRMngJSJQn7Iza3GKv!bMc7T15nSo9y&Rk{ z?q^gusi%<013m-3w2rE3;?h4(P{JsUvp}Ok-q>h2i{1bqEa(^BTo5AEtktR21%ccHO-K-7ZWc zHQM(&`L-&)U(nFM6{VkYJ~|ykR(huTsUt)1alqngJnw9ei`GQyrr|^IJ6A20W+P{f z?;(*o1u$$t6+?B|E9gWpL%cHrsM)+2%C|4Mo-sy{AOaZvfK+4*wJ_!42p#7N5!fVP z8I0A&HlP;kz`x0hv%j1;Fw8p^U~yW=_pJfWrbuHXFG3f^|DzZ>5#sh57jT6&mLwEJ zi2u5$!^-{ILsKHi(omC<$6yt0g^_c*>B;Xr6?hI!)AzFM-)Ajl8FK;4{@n< zLy@M-{OGpGd1wIrPu~Ljdoi~xrs}1+=mL15PTj8{>60wc*W?{TkJ^DS2)M@64`44K z%zz`5JHgU985Yq7q6sIcic?Y)Pr5Xka?KAD<&|4d;HM{QtymzXw;XtS@{oT@@+ECR z2VA0@Xv!^Ig?r#NW{6`l#wi88L=_cRFTd^ zcqT#a#;Az16D87xNc!T1W>}-1i24-3!~)pd#RbTc0M9uHt@OrGqTHl=CNSggD3N`+ zpdX_2LI5kF~3S6UOIrb#sZ8=+JVNdsB8>3|45%?Xeo=aL2(PzB3{VVH-; z5$+_MuN%Z#8g~p%2*Xg!vTn+zgZ20ogQ~cjpL)x~jtgzMe}8c?M+{yV+cH0~e+bGS zRb!%%@4LqQf{%(Q^_wNKNwHyMa$Ak`6n)Hf;lC9i|7MZ-DrP2*?C#8%|iwg&I8pA+b3mTbD zgF2ocy$VK0M~BO@CXQE`si=N9sDcuzO2fTKA)D3V+lOh?zZwc3CEEXP*iQ-mxajBU z-`en^ZUDoph+?v1N;{^sV@f-w^ygwqiq_bBStvy!2ap(HoCxSKVqn;N+&lcMQTv3- zi=fh|=unswNm=Elk_0g{>PBJ-i}fl^aX1bU;o~pju<~ySQWQ>6(-|^3fK6R%S;OdP z_~VhoAB>fc<+{T_#MtxP!c)0F4VP!_>)Veg%@FNBIy~I5dbb~2V)cE^#Ouu5CHp|x z*!@6xEz5ud?%`A zi<5{MWcq5EI1u85^pew=%Q}!;>bM*U=d&5g&lB>_2myeTuTb&<5infbv_eMD#MMbA z=`q9FP7eM9%rRvm(l|#L$x!W=1v|Xdx81ChnO-%%d})-i6gDGsY7g4IwkgjFw3kMVVxvHwk`+kjL?Ab1GI z8B>@0K+KyW1s9XCF2s?u{s^=y*HL@fUzdcqrm-*j!rPKqq+2rI}m`(SpuTF$_M8@kgLDoqr8h7%^U2zLIg~mJqKR z*t)6mn?zS8W*HYNIE4hxt;I0MVa6BGd(J^F4nBVT?HgTvEm(R^?tiQ03Stm1c^to)i6Le0x)~fUTu_oi8Fzg}H11E`& z)TLs)<$$mADhvi6NOXlr5eP_v{UbwUA-oMvPG4}6CBuR#I;)734zPUJxrEQg_a|o+ z+c$%4nbB;=__gCUHby&cQ`wih%5C=5l?)?mx_$3cgom`!ChzAlQy`#>2xA2n#~9iN z5QZUu;1hro&QfINPyzL+-thaHE6L^!W06)33zlRd)bT-+VFmrhrxHAU$GpN~uy5H5 zX<3`%0dhn=XhZz)7vF)*$G9{@i)%4YD7=MDQ08r%WjXLioM8ae?W}fkuEj5r`yl z?AFg-pNre+G9rZXOc##e%ENusxw*;HHG@5a>8X}<0_i=_1D`+lI4WimX~EL@2YMCs zwAZ`{-_YTZ%fq0_- z;PdB>3LMAL=0ABwl!W=8G3Cg2eo-z3c$=|6TpZ5d;!3^t&fZr)JM&IS5R?Mt54=;^ik1ghc)9Vp65B)D+cRfe%ITo5m8MntCz$#CnVR;$~Pk; zSI_8nMC9uBZ%;yQrlU&;$gOto7nhGsEWe<5JgkttOiucs_NA93DS*&VP>|9Utz_f4A}6-1NT( zSJ8YV?(a^b7;)n9WX^{rn4uB)zAyGi|NGAKk10&TJfo_1ZwzLTuwF-%>#CGD9_r97 z3J8D5-cwOiWd6y;YB)!4v`gkEz(|$^;L)M@Gn$`glS%Xu>~#xZeAjlR0Nnq+^HijW zk|;?eImSBb5>#R3#)|@R=|@tJz9?8UnUW}B6R`JxX!rlnJ-Zw7?_X7epR{)k*!$}q z*t^(Uy-QglbBvprPzy=E2y|E95j>ycq$kpwy%d0Ct1wMRl^n7l?HwYi?Wjb0K^gU0 z1Q3H}`(l8rF;L?#P?#YSF*tQ_I9IpkTB2cDKVkzBt}7%#oP{;ztfj=_LPzxMAeQHLMGv#vz8G0oh_1X+&sm@l0v%b~pAuZM0#;{tZ zg%(345KM>wirdT_*Mq7vcc*xEJ@L~JcQ~*yv69Ll3N)#Ut85< zX8Ip&Znz4@40CygtB(utM+Rfbs$rT68Kl?wCz5PmlxLjzbCyQ)z_Jw2kr{(u8q6XT zhcYCK$OSB|sv^S5eI>ytp}>+RAAx`Ler?YpEkOtnm^{&tp6Y1x>^;$LL_v=T~cSChA7 z<~eGeZ;j?)n(~I4 z2Dts0LNvz-MNLDR7u$_!!-%ST#i{fxs}~l{yTvU>_U})Iy`%2nA^6W>@9}nrc;)6) zKci&Y`|yJ}Y~SE`r#Pg_o3G87dz}ATkNcq-_u*i>1HXEEazs3BHAxKav9S(xsdE14 zT;tkt$-c>>ExV0!XB40l36S#}%Op}MhZc1l8%iKC?+Q;E0B&w_{(Qgjl+f@cdeE-5 z#5_mUOx0<3PyMmC0>}+dkgKY%K1nX5RufgS6f8MeHHM*@3G>@0)}FL_up6E@Q)69y z(o86=Cd^Wlmz=B$8%WJWmDTdKC#f3bh9}6>q*k6E14ygsF=g>3XQ#r2vU+kn&VXyq zO(nz)PmK-D{+4&Q=jXu>hl z+@@yGI{=i~?K&f}V-Mfp(}MnsFsXBxt|vEvo!X@cbMF1hEGXN-X6n=NTIO9b^>}8*z|NFlJ!%1oU#t zhYSnw-3PRI2)>g!4ET4?c>Em2h)P^;ZbS|6`SS>T5&;tM|G;^y0emV9GdG|2z$ak# zWI5AI^ZqvuzgHvx_4|K#I5>9Yzv19;7yoxFPwV}kU{(>7i9v=YS$vL|E?wToRokk< zvqEeux$&RnCGOzPrzr|J4iv>GV3e@p|sNTp0S%(7+W+=>JMD)E~ z3C4Oo0(+GK-JI%sozl3Lm7aAKq}nkh5hf8^JdGixcArY0YWn2^qZqv|ttttT zGEBsLLN==mVps|aIOjW*%Xc8#2n zDfKm9ERZJ^Yr(q`>gdWCQuOZZrLVPXH16p*Ks1RyN`(8}K8L?>0r(IBAW9+@!T1@9 z;o=+xI0@+p91iSMM>0K`|FdyA(!db&k_C$&9@ry%B^K-}aWL%;soeN-k91{0r;GVI z9|2>#dBLcS)Ci}V>(p(sJqIAo#>;|JRUBY9^yrxag`Z<2T+i|Jp@`)YHn+5Dbc#LiHRVc3p)yOkOkxWD+4JlqeYl8yWMk6hf{ zFd|Q?_k$pcGmJk7dZ@ym4G$jo2E9RVIN}7OHPzv$wj~X25AOnXFqdndve02Q`LxM> zW^6I{OQAu!eJ{btyL?cg^VIRX1c&Hcsj&q%=G#WnY=LN0z-pBgbuj)ECk%aLrfy+< zXhg{cv=&wj$B4K(s`0WB8xz1NkxzS+ukN8=0ltJh6SuiV_nh_Wb|3W#bAu2^xy z3(vpo#AbaKkrB8lS(U6qgx$ICH~2K4|LEw|tDgQgp8p;k4TtXe@8R%xcmA`DXIcNB z##BS{7*z7y*Xmlc|4#CE%J{2GEpw*|I50%q@Z_A)(VZQ#k zo$~c<6pBg_{Pkdcmywuvkfw3OUPhFGy}seq)djqUz;O~0%4GSR{R%xf=ea3gStFIN zz+9K2-?AIHZ# z|F5k)%kqCa!hp&6+qipF$0sm6KCA@kmea@T1b>rkPBRa2C1e|ZABvpmYabKiY_eO% zLOvc`>M`<1a4m2t%&1-au&Qew3hgNBSN1g1|1=!0q5J)%jsNe!&Hw-SXt>M&u$8C5 zUFlgwNtSXQ;-@T}79M`UA^3J!5JkvO%k}RtIiJIi?~*(-Qq3v3XH@LE%ByZ8yupX| zqYd81XtDoG;c%9v22J*#!@;q8|Lf@R@oxWb<8k)CdA-XMBCqZeY#aHnK30+;1D?7C z?;znQS!HS|X?21~TfbkSex-3ImsCk~6~$(m%0zKFcEWy@bC-SaGbZCG1TYL$sz@Mi zr%XAZUM{@PqhtgMf5kccI0BE3j}MPK08FU7WVqEv^)#Mg%*2(L1!ZWiCUtjPeZx-+ z{(lwRuKa&+IB@g7J>K#EEj@~BEpxlawArq9Zd7?hZ$PfZsNvka@&@6lj}2L3#Zq1c zYaP+lLaV)EbGPn2$#l`LaM;3ex7_EVvPXnzJrT-9L|oi3D&I$ch`7Pa19_ltMHR2q z<&ixne2S+?d)HhW?Y#=tw$xBaSxm)L-~%hsEWKV9S07yGa*Pw+hRcqK7DhtV+oc95 z`j_2KU8vD7O-8@kryN%v;S@7S&NTX2?t*OtAeMr z^n@@$=PPe5p4$j^w3enjqLZmqSlHHgi zI!h5ER#%K0ZcR9eqSi#%_Tu#p4&7<4-v6!X|Lytz9UhkG|KMOtz5V&;hUCG-K!1U3Qg!3sOW=Cn=(wGn`nr(wvuV)gUcyT-@(V0^vf~gke-yNHW7yNHWRAhV0O2v!Mr_$LThc&Fn9t{w>Rs<;I3RvW(cLAGtV zDkgLj(FjZ4FY?*^?Ex7R$i&e$*C<=3YJTI6{C4w>d$@jSJBcgA)^HW~&WDO@(Rgvd zs_b4LXamGj8ok#CjJa^)FmCiJLB*82P8<#jjUu)cDROrc@0)nM=YJovF_Pzg-8o_; z3g}u{z#Gp0kA{ct`QO38qn-cfRvyp!AD?}3PWMXA{hiAJUP5r?!@v@xZ!Q;jQx7dB zQNoAh=|I@XB-wk)R9o}LO1wFh8Z+p3tKH;SpiNM z!Eua8@i~s>5tEnFTchE8F20~7OmUPj6HB4^<-c=nxHMl98Orh>*N_FXszI(_AiP8f z(Htk#Y`jRGSG-fP!cb|z@|r8-2-9?3>_D&!8dY%>J|FOIW z-d+u8kpG9np&S48@X=rw|9vaZ9gzQ3T!A~R2GDH$I|`(sstdfn%mA$5ckH%nL=Mko zYiR~GZNlQU&`xlSlv|4C5;YUh0idjUtvfP*7B}f{_GwH1*U19kK>v@2k4yHS;g0@q z=W$FMm9+ns6S_B{QGhKu3OAgbv;?*09pWwWqIFZwml{~fx4Z|efwH+Tu!iXP!j za5m@_-fG5w@SIWlaIC#KURR;g7aTZ8d~14VS4b+2U0c~pyA`Ng_0%q5(8}qkRI9=3 zsG|ImZbubRD*TR`W(}5nK3zYhW>3-cK4QJXC_Vj$2bBu(h#U#l5K($l4e! zcR#7RdMzS5`{3Pq+UkGnWPxh5|CP`GA00f}>3`dJRUo|IG>;1n*7N~~(e=s;Io&O#WcJW`f^4xrT z`Jc8;H0tv}`RK*6xuBMWai{V@)ueL*?|<86SK4J)+C5+JY0LlDi2~Uq{~tYa<3Aoe zKHTyDtvsvPf7}?5r3$wk2Xb?V?aQ5Xev3wp*86{rEN~6`|8O{P<^RF)F8|ARo>lgL zYbT-hi2^)lZL1{xs?0IrjiG=^E7JPg;q%f+SRVU-(OjO*<=E(0JTjh zP(4G-B^6i#&YH+JFt-_GN34?gqH)xfW0hrsGy-z~tbDFj+AtgL`qP&V!9Z2@m`DyW->EDa$;=ZrvxrphKJad-pVQ5s?p#O?kT&ZGq?xBk^v}F$Y#Bi;9x?Z^kJ`>VX<=NKv{9<{JGbC$By?wK zW!Mor{j0|e*jYsEbV1w8$qTt$#1jd$m_Iv?VR`xKv&q{J!rLaiZo%y&Ac9jk6^yB> z<)Va%Ky|%UVux3c-cub6=WYyNz3cm(xI|%%p>YsiAY+}5Td{4IfO`7knGI1Kt*-)Y zl^Y9{0G7S2U^Cv8uA5YYSPPkTu9?&VS|?_$Lv?40w%KG{#;3O_w4eUg)-Q^#M`r73 zL>&sehW-wGAurpbS3}3@@oS0(T!ifPV)!Kv8C%$Y)^Yt4Pm}-OqeD0U_rdYw$2a5Fd!_SK9L{^JUSt4JR4pu|J=&68UBw6xF4bk%wpD-KWV^Xr#vVU zvsIRS;K~qfL(P?9Oy3phpjC`F8G+i20}9^XE5Zfe`>kV|{J{Hro>Q(?*9&kyZGN*d6w1x+`GS4;ak46TvV@$UaL zqq8Yw=o&88O8lsvCi~Cez`g%F9PaM_Zs+mre`EIVT$cgzZ6*9Nn!J0tzM444jyrsV zPb2+bC-Ikh8tDJy!;=0#*v0?a(&I|H9@>A)slV!ocq zbj$DgZg4Ddq1(iNrL0{hfO5jFnzcZ!yWh0t*0;CTrmlONz*oHOT?1l+m%W>oYu>^J zuV0sii@MQU`SNuWlv}-fZ8qNO>a|&+;pTO@=#m$&%K%#6yKcTKZBb_bRp~VnYE|51 zc5f>Wel#;&#nBZ?5T&OCkC9z<##s6bVk6&e*86-9eAFESd)B($%lozm_O8*mM{vea zHx;h+dk@MtLDdDcn;Wn?Sn&CCzuTW77_-@;d_mT8bv$pkrkXp#AU_Us#u0!hiC6^V zXDEh?a}?ktq$4o2Z-%pIj&R2Op9z9t^bO4g@6XaRQBi5T^H6j7-sZlIckk<{Hy5rb zLkas;+^(_hk@{)AyiXY>aEiLJq%%gwZR|?=@8V%z+1_*D(QLdVG*!ca^%dOfw%F84 z7nLg^EPGGclE>D#rd$VS9h_FVQC?Ml9XyuHeCMs%LVjFpPgmabuH42h%%?Go<~At^ z_^VvjY>nH;h_Wb|3V`7A=e9e#Tra2U%PVc((bb2xJCjwbr_uhSl8vo%0j$CPb8s+p z?|%;thP(UUTX~kW|63}KS{bIDh=co%NCURe2e}4g8nla2N`uv6*ieqh9Bn@9UX85e;OuAA-@2#O-FzDP|2m0()zftTKRj^r{|=9iclSTG@+`^! z?{Mnh8aA^*X19rizsnrs7`cA!Y`>NHek(Knu95Tipx~sIow#cn(sqRWD|{O1|2m0( z)zd)#2ag^9_;*oxAN21)f@{oJRB2UCH##07FAKv$^jkd${pSS53t$zG z=3O$-bj&a)Mc--jMZD^jgVHj&Sz{LXVFn4GXmtDYEU`gNM+;vR^crNFdy%*5b0&OD zHdz&6tWDuFt@|qLK8Y!%%WgPwxoG`^r9@D(?(?0 z<8{S{V!9o@~p%ZNV3mRBAxk*Efe^! zM;>(?7(=f8(R!q;XDSd`R(9bSBbCECQcsJKI!x6!`q%0|=5utqU!GoSaV_N5&4s|0 zbfB?=W&$t<>fe7asKFCpd0vlQDB^~CFVpJUX5{1I$j|PF_Kjcb9#*G4M?=;;AU|h- z`+*YBSFga?g71gM_QhN4E&Z1HYO_jgcCjWB+q&C?gGBFM?u42Ucgh(3Bu;!CZEmw} zeDHpOc6Ba4f_hL0?miYq7xUaq!FbR83;Bv;l2gUH=jyrUjK0Mg>VINjb{Nv|h019( z{6IIL`2UN>pZ4_{X}!d_;^xHEfTOfLzl5+uPRPTGNb9d^G{}bR0Ew>MF1#6BK;AsR z5VMVi-h$O~d$c6~7qfWK?5}LVH`L08jRld*^25SbnmN8?=5Rx zkmU1jojS<@pS64cUJNO+_?+S*M`Nirk54W&c518vf*%f6D17W%dpb$75{vB9aU^0K z)zO1@qq=4shkMMoMZN|aV z)zJ$-y~^xx<(76bFVaCDWQz^f<@&t~F~*n(;f8E5=r_fxca&rsdG?Vo zU|!n1?%Rkthj!ocxNz!+5A#W%?fH3aaO18x2PC?@`#xbTtAKyAx5_b0wG-c? zBHPmJ5#V#*z<+@1+C3TWiv{0iqmk-~Xxpq%F;YzzA6JS(Z_0dW>gZ#Q4GnBoB8*y6T8ie; z^+wu$+wyyGnd?=DE$FUV>d6AjGk2oViaa9<#yC*$Di#tR2FrD%)XOPu=sKXO+sch$ zMw8o`GoE9aK!tGO0XWjF3;XMhXM38lvktmR7j+gf*)JyIOw>$HXx+M1>XeLkXiW}I(H(F|RYhpU%r}1! z=te#9J z$!^T(ZZ7%Im$Ahu-4HexpyX<|#_kx?V59b;a$DE{CkVraY`HmEc#NXy;T2<5Oro5Is5ZrxO0)TRJ^VY0-$p)ZEp>8a%{R$77d(4Me0>CLj*0 zxC}0(jtA_kTNz@ThfNfSlDxtPfbxh@plR5aL!_+1L2isqaH;X}J?k`T`R$DcsvMt| zvTkd_GInBxpRGl|+IJ2M5=AVN*sG@9=}@BSP(>@6iY(#=t!b>T>?D6?zL0-_a8NW-CTOH48WmrR*18vimjMmzM#wesO;}aX$t&M3*!HW4%fp&qsU8Yf9A)KgYyw zi&T8uvf86<^yTr#hx*FPe8+vIt8DL=Zn`GOjYgXzJ6KXIzlwckDm?v4(=m*YJUcE7 zbw-GeqC8agm=$iDx&OI#_nq^(eE5Nv^$g7SeuqZL(znF1-@OJs-;w1J@C(7fjTAI3 zN5Cco+K%i}LuOcqYF|UGyQ0rZE5I=X@{*j71{4-0QJF{O<7h0+i)6 z*Tq|-@h?X8iU0YvnA?8)Qz2R5@=IUfG0{E!0qFE`yu14?pE=S^Pqw52PdP>UPEU$BXn_h3RoIr1%>BKXd7PX_(tg}RcQ*bJAoifUQP#kmHvWbi1bj6?8n9&;#}+wG#dHM0UW%gTdVJJG4`3Izt>|6czB>@!eI z01{u{_1GP@MZfpRwBs#s);Gy2t6f%El=f_XD7ARCw31q@nj)|Nh$?@Ou}!5bmwS{e z3bzWk8b?r4n^9LlYC;%5JP0l)h$&7|a2l& z-i`k)L_B`<$X&!fh3E4L-ix)60d0etf>rKQepSj4E#~XVNvNZM-EC&efQy@zE^`Tu zmw=X^i=4{hHp)Q%w8%m#JzW{Zr@vA7m#sYmTz>-c@HyWD5r~mv-EO~Q*zbDTBlTN^ z7SO8P54U}jY&xgs#H_4ApetwIsba#{#RU%&r`?FZC;iM;<+ZJ#Qx~geXpN4&vu?vj zrpeLT^4;o#I@NS`k+YiO0wNU2;IkNsyd`B(ONF+zY{G6gQNQA?CPH~&%4CP_IVDOd z-yAw!&T)l-pflqd)}x<>-MFB?4%eEbn7Rk5zFAlGmluZ86t;Y&wei&WTe4cYy3ivLuB=@i`h7RTjD7V4?*IOJuHtL41O{44zeB=OThW`sZ1{iLN1ho+ zk64pnWEkq`xRAOUw-9mnU$2GwULD4%$%eQw&^t!_(;##GBA}EFJ*jAs^(?1;{e$h& z>PuMDouUyX(mCpYIceV9kvq%)AiCaz|G%;Qcw^9U)BnG*j>z;excB~sy?r^v!KinY z4_{$UJ{#;7?^PjghK=p^-|>tUx-(6H{p^~*$4HFj_SxFn(c3CRQVSo|;UQDtMs)$U zzol-lq1N`yh;{nkVJSyHw~j>FdGOsod!**LcwfcgYtbX|K2Uvf>3Lf(Q!;i4_I0sodbBo?aZ#XdCfzj8vwo8NEJ z|2Q|PaR2@EcbqIDEsYQzZG7H^-o;`n7$l^guGPn^R3X6H=?;svSZz9`4Q>lf>fCk+ z`C+@Ult~A6heatGs3B>d{b7G zZK|XSk|P-QgE4Eb0u6$6eB zSd{2}`g!ioQ8Dn-34iCk8(DM^3q81JTk0W%=B=Vkx1LBcO|eX611~&Ev!t zm6#{V5`zJZM)K(>DoKaD(t@&l;&RJo?0bDYzCwf@2~XdtI$2dXuDAW?CI=zc_zviH;h_& zCOFiKaVH4i)@kSK@%gd&v3??g?FI1vup4qMN*%z^z}=mb-qr8J9qRv^fuH)ISyS(O z!}ibSQAN%RoFC$?Cr>-y-Ta^PIOe4844iM@y?<;&QiIc6GQxE|k!M0^vZ7+S&66T1 zN+1CU^%8@JvWL2GM_l1JKM0aOHo%#1U>pbv{FRs`qC`O<(E-O}LCL5Xw0%)vBfUS{ zg<-?cLPjv7PDT8y4Kf|8#6{5|PK>1wszlyUf>7m*TXR`0Nbg66J8pwjx{4Sz=`k&xVthIGVx-WCc)TIVv=aR%1w`GFzO%|DdR0b$9zdYqw>+QM zKYh-h!3EvyYE@b1fp@4r1_dePVGiYeVUY>;YaswGw&;>I6XCm*@N4GxP=BtBB06}?dFL{V-)GRhDzat#V6FgbwzamgW7X5?#93t>_ z#r4k82BqTXSucGR@bwvJ&Xl60tL1piYv;~MF>t;Yh#&|e@&)ernh=V|Rby31%)3Dv zAEUR-;y~SUaYCK3gJ9MO!QK{{Y>)&>SPAHKwIcOFH$rR@fe%F+(LQ}JFcm#$p9ALC z9WhvcSTi=!yM*NiYH_H|f=tm7J9%bM7$`Pl%V*vGLdDib%uX5RX=-94LDc;MhtakV zzn-W7KY3Knm)1wu-6hkN;D!9RfU5X1Z%2f*m7d?(kwEXMHxj}Cjm+pu-e&2TLZZ4C z8K#IbNQk=S5@ML8J0{_@bRPGqx5K+2b!{S?$i28(ryJ1$YzC7o5|9R9$yX{ZH9fnk z2=y1grw$QCM~+ncFO#Tr{*_aH7&uuNUoh1JI=SbHF8MQ*%0rsCZylK4Ww{w57;xiE~5CC1x(Bp$#8Hd<0UqV$z}4MKAon6 z5_hw0i?D<>xJW@wBgz>hQ9cY*b+3?vUH{Ik*b4Kl9gl36ma?%PNk1lP znotN`6h+^av8+j4TWU?=Pb?%?u(#Q!HYpM->Wbcs$JYEaMEE`c9@aq$$;5E?<9o*n zpk#UKJAVQ~T4qX-zXJ72jPHo}5r=Xy&f}RvLyfS4d;2W|PGfRmdT>;_9Y`ie`L`pF zETa!UK>Wo}$u?3iqkD<4gP!;xRjWmcF5lr`YCgJ%p#%_~7ETeU-~m5Akf%VhcIlBo z5&VvSwb6_8Kp0CZl`gh?TapfaF$p*a=STu<0!3=}!TN)KpEbk1%EzD>+LCk+0o+8) zW^70Yb>BiRlohR*u%w4Baw)=CuCFa(p z<$2OJJD-K|3tp~=9bhQ2_EODUdiuIBGJuk%Rtp4R zqZ_(;z^EeNH4wZM9@v)fq|UkjuCVd7qKJmg@mCv$+}*mU0Hulm+eh1>;*R_@dqI%L z2W8_rM5%%53KX*6302uZwt*OqEN|2!xQHCJo{)#lr#57~_&`F0ep8Rr`2w5a$^aW_ z<-?zlJ(RpfIJFBBM%oT8vs1SWdn3ZaMq2}HsMH6=t%}|)5km_QU9~YB<`dbUQzWVX z!qJFEqEP7*p#CP&G+?Kna%e)Ur6i_;L3BR%wB2-O*AW!g-O{H)f)A8KVF$o~CDO+f zbslu`YOR{gGyqG?LcU}{jwu4_Q1^k*`Q6O3U{ zhQaV6<#f%7O3C{EaAzErt3jNl;saIW*0NF(q`nHt>>HoOfRd~tVlPRU$l_}F6<&4p`HlS^a6aFgWITqrz!fD`ZT z%uIf-`or{j~eOgE%&Kie}#SCfyr zTl591OZ0)p{*RrYW4&KMLqci}{{y{s0xe?f&eb?Qr}?ssR`B(($wq!t{4hGxe4m*N zkPip1o*|B_o_MGatKQ2ti@N^RTl6tQ{wNtJiWyAb+D-O?L?M!imA_NLAz0Di98o8w zgd&HP0KoTA+$;MDEeR5m?{fc&=IxZuDzf$)j!GO46$3z#5sOL_-7(ZAYdfA@hGjcJ zO%*8Ks~|LhicsWk+luP2*U!s59eAMT6cZeh6%x!hu_>1|=(tOtg|)Ww>n58Fv;}5|s>u(lCOYUEyOM&vu^d z#w)xssTVGBnabf+WE4aR&RYXa9Vz{~M`s^#TON^sTUCZ(cHNzet3V$0Jl z8Be>)y6?)xSnDA9^26+%*d@Mqa^q5j(({Fe2jW@SC7CFH!n>ta^0!nGjmQsNAVpBn zc!80r5XNd48?isul0kxz&y;G$T1cK;MRBR*nB@EBv8Sg>r<5xp+prKd+^X$L*zK9m zkV}}j;=Lj7P~O$X5AvD|es>SSwnL=(kC!+Gq5Xi?>8`d&V^FRu6NB(g^|t=NUAlXs zLej3`#V_QNyiB%1W>;s@f&GApLHO2F@I##hg$CEal7TSg`nX7%EQ436L`T0XKol1;20@W z3@>L6Pvm0=4i)|CuUFL#7>$EhvPb!9#=lwq4VvF6kcmdQ;hj-{S8i_~#nz6qAB!2c z$Y=pp#u30LQ3YaZAMhjWk+(O*ezFa|^)RWUo7ttANKYN==K73$xIaw^tdMG5rGjt! zP?$fiTdM~0TK?mEAxJ=B>rTKi3#C1!T52*xU@=(jab0{;hZ#c(dRaF5>-6wQR|$&Y zTa%_r8F|p+%VrV1U_wo!ofKg>`|+}eVe=emN8^-R{g$v(3SeSy5i)j!b!`a9H6shb z3~W#;aH@b>Vxv#}R>v~C14`BTev#WMIv`iFf`&?!O1V37?dr$tt72D?bFvzIc&Y9Ep@mkr)F`Xr^jzMu$&NN#cHz%!)4G|7Dn_)0KHp6b| zd^dI(%wjR?hhYq&$1P*ndW1_PtLw~*7ui{R{#aI0(yOjuqQXf?SJRiVblt7@H}i1| zcsZ!2^(Ts#ZDnY(5FFKijDzoTei5-hfi@Zc{djXG#^}XJ`!HJ$HI|@(n@K2JjpaZ@ zJf(y=E_j|igVyegxBxc3?i;g8pAOo` zX{7mNZpOj7G8_Nw5}-9n^ZZl{bLOtE_pFwKHl7_!Ujgq=-*$fK=1dZcWcyzWvSH|x ze8DW9{=@mxrC?A{dgRbQWZLJmm=ZpZ0w(|g_B^q(sDHS4?#%W#e{`#+kP(GLP+rR z5Qm-2%>%~;Ox9PP6`OQ5#7U?s&Br1dMXD7rlq--av16o_N<DkHvs_Cn zGTCgjUeVIq4nLj2R#B9#j{{%dXMabzL@x4Ku68-`O74V`Sg)aD@>?Cx zx8-w3BnD|HTI%lHLbbX%tg`rHTC?@meEHa8vW~dWv0CTW_H?G}ZN1*a=bdWZFtps# zEOmL-p=;edZ|?&hGkxuZ*tXRZEMR9_pm)7}sO|dUufw6c53L(BI=6(oVl`Xxu07DG zecwdWnD^3-P%{|cA$GY|R%%99jhu~~sSn)*F=BglZZA3>P0f zGr~-(Uu-o8U>bO4Y^qh}L=PduUEtVJRmL1570W-%G9r_}A@?@??c1C(i3r!lHn7zC z5F>v0@k0X)vBF>9Tq{%ZM=hg4CXXa^wR| zGA{5M-C)ZfglQxrtN;QIDoIoWh#xN3Hu!w1R%rOx{n0Du@uLDx3H3sXWErq)KG2Z>QJ z>0cGEG|D~%!EPH13^Fz{{~u2aQV$+&>TzIJl)hcM$t7 zdDXLOdu&O}6^OrcUw=n8)o+|*amYa1HPK%9J1^+{(WbQnF87urfbIxRFyt=Qj=3rl ztIK2ve(Jnca0c-h|4TP3pGA`%*JAb&pK*f6ELPN({?S%V8NX&p##B-LVa?HO$YzMR z@<<=Vz5ZBwlf7H5et?;qkX(mQxVj{HK0nMJS7TZLsoJD&t5eyM{4)cSPRE9&jMfgyOyz@5(o`Qfhh~EIIT0?@o~m6UX!v?aCZNR zjF>BscSi%VM)Ig?oBM9W^IX zcL;gE0?$AD=GlX&R-S`HVO}ou9SD~je|!9?K5`t5VE$6;LHtQWky=8TKk%g~8s6Po z(QMJIQQD#W(Bo$mWXVW1BrTF|< zNH3*{T1KSRkA;oQ zmII^$f}D5JFYX92AWtY@g6ihKnF1a}BePH9m;PvR1?%d~ar#Qi@ml!1tn?6cvQbTr ze~gFTn|ID3igxbJk!g!6bRP*_BbjDT$M7G_hVgef-MSa#KS4D0CLXgmBN=BMfx$lu z=9?Tccy0P>t}8t(^tLs&wY>jm`0&q}DV*nWxGe z7aF8=1WLw|vc{s3qJIh-_{*b)2f2nN)(Vqx50WyM2hh|&D=Wow`f}A48YTY?{@2cw zDBt%I0IR-2aOVUtL5lq+e8mhvhK;%s5TOwQ#kd1ZpHSa{wS$JqddLf7m`^f*jIo`X zx%1>K2T%M|M~Z9w3E*+lLLX?x;jx4Cl^U85|He>CWs^qG-er~>P2x~-VIp2!mc^-< zvx-SDv_9ukD6aAa6)9tFo2((t4h3hi1W7|45Pza4uF702vQOM)B^zE?;0V)*)@?D#miD<6AxBlxnIW$+E3BukBkrJ;#%&Yj-x7B@^ z^4p|_V@2UO**@w{_=cyL@9!rY#(gQw;Jzs@9&?1Im(+~ThJ`peu7|sxlgnx|l>imn zm>_|^h`$Jd#t7$uE$C5>#gdw1?T+m`dV$buhSCZNhn~1kMQZioN@s#xQRM8E{W)$w z{&m<;AIkBbt&PIJRf_D|5f^As507w1*T>YeYA|H#hq5c6eS9pymcev4GP*9`uN>dk zW*jvuLNU1^w^{UL4}!Zk>I;y_x|{bGgY(32OpaGfVL(GR@D_W5-@_a6zx5#cqqwK~ z>lV*A{WCaN^oGA1Q=@wJcseTb`B3Jr6EJHm_PTsL(-P4-IZ%!xu2vFV^A}-q(ie%O zGEM2?q9e!ZOtpX=4lip=&AV=RspvW{F_iz6w7zX)*!0b<0U*UgUvt`n2?~?r37|Xz zK|LW-s{_ybEo8zU>&la5`C%f#y+z1bV9bV3qG6KU8^IKyzN{Z7b!0#Y;~}vOEp??X zm1w8|JEz2+VnfWbBBBy!&_kwhy1J1(goHwKlQU?Ta#BrFN0ko)ze8O@5`H#`n!&QG z7+t5p_-t{ppQmel(s9OQ2}l?A7PE*hunWlUQk^Ul3evJ&7&V)v!6?Eb!R^DlL#UK3I6gwNJ7vWp_XbnX;`a+ynV{SAXuaN7Ofs2@9r3gOj~ z3-w-w(A)jcdNgM>ET`yiX!FSJuR=zP(d~z-zshOGBS1(e+a(H|iM#L#j8jTlbp3BLei;38-vg-w2MP=$u=S~tCXI~39$ z)4bm9kz&8i6;rD!-MTtr_K_A1w&rSAU%0Pr)Xe9W{HNj9QpJLBj5HVVm=rIhh0zJd zqkH3|gG{H3)4GW)%!wLX*3SffIzj^NsK42_W8@M;#L+ z=U0_+OlAQu15QEuMm+G<>W7!Q=k=l#+)&K()OkKCpEs>U%ln?XvFCp7pXlGefCA2< L Date: Mon, 10 Feb 2020 16:24:22 +0800 Subject: [PATCH 30/31] Add statsdexporter, prometheus and grafana to docker-compose --- examples/basic/basic.ipynb | 2 +- .../data_validation/working_with_schema.ipynb | 35 +- infra/docker-compose/core/direct-runner.yml | 7 + infra/docker-compose/docker-compose.yml | 29 +- .../grafana/dashboards/dashboard.yaml | 11 + .../grafana/dashboards/feast-ingestion.json | 1497 +++++++++++++++++ .../grafana/datasources/prometheus.yaml | 9 + .../docker-compose/prometheus/prometheus.yml | 30 + 8 files changed, 1587 insertions(+), 33 deletions(-) create mode 100644 infra/docker-compose/grafana/dashboards/dashboard.yaml create mode 100644 infra/docker-compose/grafana/dashboards/feast-ingestion.json create mode 100644 infra/docker-compose/grafana/datasources/prometheus.yaml create mode 100644 infra/docker-compose/prometheus/prometheus.yml diff --git a/examples/basic/basic.ipynb b/examples/basic/basic.ipynb index 94fc82f2ce..0360fe05a3 100644 --- a/examples/basic/basic.ipynb +++ b/examples/basic/basic.ipynb @@ -500,7 +500,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.4" + "version": "3.7.6" }, "pycharm": { "stem_cell": { diff --git a/examples/data_validation/working_with_schema.ipynb b/examples/data_validation/working_with_schema.ipynb index 3bba92c35d..bc436824bc 100644 --- a/examples/data_validation/working_with_schema.ipynb +++ b/examples/data_validation/working_with_schema.ipynb @@ -10,10 +10,10 @@ "of the features they are ingesting. This schema is compatible with the schema defined in Tensorflow\n", "metadata.\n", "\n", - "cp https://github.com/tensorflow/metadata/blob/master/tensorflow_metadata/proto/v0/schema.proto.\n", + "https://github.com/tensorflow/metadata/blob/master/tensorflow_metadata/proto/v0/schema.proto.\n", "\n", - "This means that you can import an existing Tensorflow metadata schema into Feast and Feast can\n", - "check that the features ingested fulfill the schema provided. In Feast v0.5, however, only feature\n", + "An existing Tensorflow metadata schema can be imported into Feast. Subsequently, Feast can\n", + "check that the features ingested follow the schema provided. In Feast v0.5, feature\n", "value domains and presence will be validated during ingestion.\n", "\n", "For more information regarding Tensorflow data validation, please check these documentations:\n", @@ -42,34 +42,9 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "bikeshare_stations.csv\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "--2020-02-10 03:11:51-- https://raw.githubusercontent.com/davidheryanto/feast/update-ingestion-metrics-for-validation/examples/data_validation/bikeshare_stations.csv\n", - "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 151.101.64.133, 151.101.128.133, 151.101.192.133, ...\n", - "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|151.101.64.133|:443... connected.\n", - "HTTP request sent, awaiting response... 200 OK\n", - "Length: 7492 (7.3K) [text/plain]\n", - "Saving to: ‘bikeshare_stations.csv.5’\n", - "\n", - " 0K ....... 100% 12.9M=0.001s\n", - "\n", - "2020-02-10 03:11:51 (12.9 MB/s) - ‘bikeshare_stations.csv.5’ saved [7492/7492]\n", - "\n" - ] - } - ], + "outputs": [], "source": [ "%%bash\n", "# Sample data from BigQuery public dataset: bikeshare stations\n", diff --git a/infra/docker-compose/core/direct-runner.yml b/infra/docker-compose/core/direct-runner.yml index f9123480bb..1504a94c70 100644 --- a/infra/docker-compose/core/direct-runner.yml +++ b/infra/docker-compose/core/direct-runner.yml @@ -1,3 +1,10 @@ feast: jobs: runner: DirectRunner + updates: + timeoutSeconds: 20 + metrics: + enabled: true + type: statsd + host: statsd-exporter + port: 9125 diff --git a/infra/docker-compose/docker-compose.yml b/infra/docker-compose/docker-compose.yml index 27d82efc3c..bcd509a10d 100644 --- a/infra/docker-compose/docker-compose.yml +++ b/infra/docker-compose/docker-compose.yml @@ -19,7 +19,7 @@ services: - java - -jar - /opt/feast/feast-core.jar - - --spring.config.location=classpath:/application.yml,file:/etc/feast/application.yaml + - --spring.config.location=classpath:/application.yml,file:/etc/feast/application.yml online-serving: image: ${FEAST_SERVING_IMAGE}:${FEAST_VERSION} @@ -106,4 +106,29 @@ services: ZOOKEEPER_CLIENT_PORT: 2181 db: - image: postgres:12-alpine \ No newline at end of file + image: postgres:12-alpine + + statsd-exporter: + image: prom/statsd-exporter:v0.14.1 + ports: + - "9125:9125/udp" + - "9102:9102" + + prometheus: + image: prom/prometheus:v2.15.2 + volumes: + - ./prometheus/prometheus.yml:/etc/prometheus/prometheus.yml + depends_on: + - statsd-exporter + ports: + - "9090:9090" + + grafana: + image: grafana/grafana:6.5.3 + volumes: + - ./grafana/datasources:/etc/grafana/provisioning/datasources + - ./grafana/dashboards:/etc/grafana/provisioning/dashboards + depends_on: + - prometheus + ports: + - "3000:3000" diff --git a/infra/docker-compose/grafana/dashboards/dashboard.yaml b/infra/docker-compose/grafana/dashboards/dashboard.yaml new file mode 100644 index 0000000000..2e2dee6c1a --- /dev/null +++ b/infra/docker-compose/grafana/dashboards/dashboard.yaml @@ -0,0 +1,11 @@ +apiVersion: 1 + +providers: +- name: 'Prometheus' + orgId: 1 + folder: '' + type: file + disableDeletion: false + editable: true + options: + path: /etc/grafana/provisioning/dashboards \ No newline at end of file diff --git a/infra/docker-compose/grafana/dashboards/feast-ingestion.json b/infra/docker-compose/grafana/dashboards/feast-ingestion.json new file mode 100644 index 0000000000..3d7630f2bf --- /dev/null +++ b/infra/docker-compose/grafana/dashboards/feast-ingestion.json @@ -0,0 +1,1497 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "id": 1, + "iteration": 1580253818906, + "links": [], + "panels": [ + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 0 + }, + "id": 16, + "panels": [], + "repeat": null, + "title": "Constraint Violation", + "type": "row" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 6, + "w": 8, + "x": 0, + "y": 1 + }, + "hiddenSeries": false, + "id": 12, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 2, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "(feast_ingestion_feature_value_max - on(feast_project_name,feast_feature_name,feast_store,feast_ingestion_job_name) feast_ingestion_feature_value_domain_max) > 0", + "hide": false, + "legendFormat": "", + "refId": "A" + }, + { + "expr": "(feast_ingestion_feature_value_domain_min - on(feast_project_name,feast_feature_name,feast_store,feast_ingestion_job_name) feast_ingestion_feature_value_min) > 0", + "hide": false, + "legendFormat": "", + "refId": "B" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Domain Value", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 6, + "w": 8, + "x": 8, + "y": 1 + }, + "hiddenSeries": false, + "id": 22, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 2, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "feast_ingestion_feature_presence_min_count - on(feast_project_name,feast_feature_name,feast_store,feast_ingestion_job_name) increase(feast_ingestion_feature_value_presence_count[5m]) > 0", + "refId": "B" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Presence Count", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "decimals": 0, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 6, + "w": 8, + "x": 16, + "y": 1 + }, + "hiddenSeries": false, + "id": 24, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 2, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "feast_ingestion_feature_presence_min_fraction - on(feast_project_name,feast_feature_name,feast_store,feast_ingestion_job_name) increase(feast_ingestion_feature_value_presence_count[5m]) / (\nincrease(feast_ingestion_feature_value_presence_count[5m]) +\nincrease(feast_ingestion_feature_value_missing_count[5m])\n) > 0", + "refId": "A" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Presence Fraction", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": 1, + "min": -1, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 7 + }, + "id": 14, + "panels": [], + "repeat": "feature", + "scopedVars": { + "feature": { + "selected": true, + "text": "entity1", + "value": "entity1" + } + }, + "title": "Feature Stats", + "type": "row" + }, + { + "aliasColors": { + "max": "super-light-red", + "max_domain": "light-red", + "max_val": "dark-orange", + "min": "super-light-green", + "min_domain": "light-green", + "min_val": "dark-green" + }, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 6, + "w": 8, + "x": 0, + "y": 8 + }, + "hiddenSeries": false, + "id": 2, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 2, + "points": false, + "renderer": "flot", + "repeat": null, + "repeatDirection": "v", + "scopedVars": { + "feature": { + "selected": true, + "text": "entity1", + "value": "entity1" + } + }, + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "feast_ingestion_feature_value_min{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", + "legendFormat": "min_val", + "refId": "A" + }, + { + "expr": "feast_ingestion_feature_value_max{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", + "legendFormat": "max_val", + "refId": "B" + }, + { + "expr": "feast_ingestion_feature_value_domain_min{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", + "legendFormat": "min_domain", + "refId": "C" + }, + { + "expr": "feast_ingestion_feature_value_domain_max{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", + "legendFormat": "max_domain", + "refId": "D" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Feature Value - [[feature]]", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": { + "count": "dark-green", + "count over selected range": "dark-yellow", + "feature_presence_min_count": "light-green" + }, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "description": "", + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 6, + "w": 8, + "x": 8, + "y": 8 + }, + "hiddenSeries": false, + "id": 5, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 2, + "points": false, + "renderer": "flot", + "repeat": null, + "repeatDirection": "v", + "scopedVars": { + "feature": { + "selected": true, + "text": "entity1", + "value": "entity1" + } + }, + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "increase(feast_ingestion_feature_value_presence_count{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}[$__range])", + "instant": false, + "interval": "", + "intervalFactor": 1, + "legendFormat": "count", + "refId": "A" + }, + { + "expr": "feast_ingestion_feature_presence_min_count{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", + "legendFormat": "feature_presence_min_count", + "refId": "B" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Presence Count - [[feature]]", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": 0, + "decimals": 0, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": { + "fraction": "dark-green", + "min_fraction": "light-green" + }, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 6, + "w": 8, + "x": 16, + "y": 8 + }, + "hiddenSeries": false, + "id": 7, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 2, + "points": false, + "renderer": "flot", + "repeat": null, + "repeatDirection": "v", + "scopedVars": { + "feature": { + "selected": true, + "text": "entity1", + "value": "entity1" + } + }, + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "increase(feast_ingestion_feature_value_presence_count{feast_feature_name=~\"[[feature]]\", feast_project_name=\"[[project]]\"}[$__range]) / (\nincrease(feast_ingestion_feature_value_presence_count{feast_feature_name=~\"[[feature]]\", feast_project_name=\"[[project]]\"}[$__range]) + \nincrease(feast_ingestion_feature_value_missing_count{feast_feature_name=~\"[[feature]]\", feast_project_name=\"[[project]]\"}[$__range])\n)", + "legendFormat": "fraction", + "refId": "B" + }, + { + "expr": "feast_ingestion_feature_presence_min_fraction{feast_feature_name=~\"[[feature]]\", feast_project_name=\"[[project]]\"}", + "legendFormat": "min_fraction", + "refId": "A" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Presence Fraction - [[feature]]", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": 1, + "min": 0, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 14 + }, + "id": 25, + "panels": [], + "repeat": null, + "repeatIteration": 1580253818906, + "repeatPanelId": 14, + "scopedVars": { + "feature": { + "selected": true, + "text": "feature1", + "value": "feature1" + } + }, + "title": "Feature Stats", + "type": "row" + }, + { + "aliasColors": { + "max": "super-light-red", + "max_domain": "light-red", + "max_val": "dark-orange", + "min": "super-light-green", + "min_domain": "light-green", + "min_val": "dark-green" + }, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 6, + "w": 8, + "x": 0, + "y": 15 + }, + "hiddenSeries": false, + "id": 26, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 2, + "points": false, + "renderer": "flot", + "repeat": null, + "repeatDirection": "v", + "repeatIteration": 1580253818906, + "repeatPanelId": 2, + "repeatedByRow": true, + "scopedVars": { + "feature": { + "selected": true, + "text": "feature1", + "value": "feature1" + } + }, + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "feast_ingestion_feature_value_min{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", + "legendFormat": "min_val", + "refId": "A" + }, + { + "expr": "feast_ingestion_feature_value_max{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", + "legendFormat": "max_val", + "refId": "B" + }, + { + "expr": "feast_ingestion_feature_value_domain_min{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", + "legendFormat": "min_domain", + "refId": "C" + }, + { + "expr": "feast_ingestion_feature_value_domain_max{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", + "legendFormat": "max_domain", + "refId": "D" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Feature Value - [[feature]]", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": { + "count": "dark-green", + "count over selected range": "dark-yellow", + "feature_presence_min_count": "light-green" + }, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "description": "", + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 6, + "w": 8, + "x": 8, + "y": 15 + }, + "hiddenSeries": false, + "id": 27, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 2, + "points": false, + "renderer": "flot", + "repeat": null, + "repeatDirection": "v", + "repeatIteration": 1580253818906, + "repeatPanelId": 5, + "repeatedByRow": true, + "scopedVars": { + "feature": { + "selected": true, + "text": "feature1", + "value": "feature1" + } + }, + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "increase(feast_ingestion_feature_value_presence_count{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}[$__range])", + "instant": false, + "interval": "", + "intervalFactor": 1, + "legendFormat": "count", + "refId": "A" + }, + { + "expr": "feast_ingestion_feature_presence_min_count{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", + "legendFormat": "feature_presence_min_count", + "refId": "B" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Presence Count - [[feature]]", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": 0, + "decimals": 0, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": { + "fraction": "dark-green", + "min_fraction": "light-green" + }, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 6, + "w": 8, + "x": 16, + "y": 15 + }, + "hiddenSeries": false, + "id": 28, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 2, + "points": false, + "renderer": "flot", + "repeat": null, + "repeatDirection": "v", + "repeatIteration": 1580253818906, + "repeatPanelId": 7, + "repeatedByRow": true, + "scopedVars": { + "feature": { + "selected": true, + "text": "feature1", + "value": "feature1" + } + }, + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "increase(feast_ingestion_feature_value_presence_count{feast_feature_name=~\"[[feature]]\", feast_project_name=\"[[project]]\"}[$__range]) / (\nincrease(feast_ingestion_feature_value_presence_count{feast_feature_name=~\"[[feature]]\", feast_project_name=\"[[project]]\"}[$__range]) + \nincrease(feast_ingestion_feature_value_missing_count{feast_feature_name=~\"[[feature]]\", feast_project_name=\"[[project]]\"}[$__range])\n)", + "legendFormat": "fraction", + "refId": "B" + }, + { + "expr": "feast_ingestion_feature_presence_min_fraction{feast_feature_name=~\"[[feature]]\", feast_project_name=\"[[project]]\"}", + "legendFormat": "min_fraction", + "refId": "A" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Presence Fraction - [[feature]]", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": 1, + "min": 0, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": false, + "datasource": null, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 21 + }, + "id": 29, + "panels": [], + "repeat": null, + "repeatIteration": 1580253818906, + "repeatPanelId": 14, + "scopedVars": { + "feature": { + "selected": true, + "text": "feature2", + "value": "feature2" + } + }, + "title": "Feature Stats", + "type": "row" + }, + { + "aliasColors": { + "max": "super-light-red", + "max_domain": "light-red", + "max_val": "dark-orange", + "min": "super-light-green", + "min_domain": "light-green", + "min_val": "dark-green" + }, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 6, + "w": 8, + "x": 0, + "y": 22 + }, + "hiddenSeries": false, + "id": 30, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 2, + "points": false, + "renderer": "flot", + "repeat": null, + "repeatDirection": "v", + "repeatIteration": 1580253818906, + "repeatPanelId": 2, + "repeatedByRow": true, + "scopedVars": { + "feature": { + "selected": true, + "text": "feature2", + "value": "feature2" + } + }, + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "feast_ingestion_feature_value_min{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", + "legendFormat": "min_val", + "refId": "A" + }, + { + "expr": "feast_ingestion_feature_value_max{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", + "legendFormat": "max_val", + "refId": "B" + }, + { + "expr": "feast_ingestion_feature_value_domain_min{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", + "legendFormat": "min_domain", + "refId": "C" + }, + { + "expr": "feast_ingestion_feature_value_domain_max{feast_project_name=\"[[project]]\",feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", + "legendFormat": "max_domain", + "refId": "D" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Feature Value - [[feature]]", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": { + "count": "dark-green", + "count over selected range": "dark-yellow", + "feature_presence_min_count": "light-green" + }, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "description": "", + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 6, + "w": 8, + "x": 8, + "y": 22 + }, + "hiddenSeries": false, + "id": 31, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 2, + "points": false, + "renderer": "flot", + "repeat": null, + "repeatDirection": "v", + "repeatIteration": 1580253818906, + "repeatPanelId": 5, + "repeatedByRow": true, + "scopedVars": { + "feature": { + "selected": true, + "text": "feature2", + "value": "feature2" + } + }, + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "increase(feast_ingestion_feature_value_presence_count{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}[$__range])", + "instant": false, + "interval": "", + "intervalFactor": 1, + "legendFormat": "count", + "refId": "A" + }, + { + "expr": "feast_ingestion_feature_presence_min_count{feast_project_name=\"[[project]]\", feast_feature_name=~\"[[feature]]\",feast_store=\"[[store]]\"}", + "legendFormat": "feature_presence_min_count", + "refId": "B" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Presence Count - [[feature]]", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": 0, + "decimals": 0, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": { + "fraction": "dark-green", + "min_fraction": "light-green" + }, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "fill": 1, + "fillGradient": 0, + "gridPos": { + "h": 6, + "w": 8, + "x": 16, + "y": 22 + }, + "hiddenSeries": false, + "id": 32, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "options": { + "dataLinks": [] + }, + "percentage": false, + "pointradius": 2, + "points": false, + "renderer": "flot", + "repeat": null, + "repeatDirection": "v", + "repeatIteration": 1580253818906, + "repeatPanelId": 7, + "repeatedByRow": true, + "scopedVars": { + "feature": { + "selected": true, + "text": "feature2", + "value": "feature2" + } + }, + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "increase(feast_ingestion_feature_value_presence_count{feast_feature_name=~\"[[feature]]\", feast_project_name=\"[[project]]\"}[$__range]) / (\nincrease(feast_ingestion_feature_value_presence_count{feast_feature_name=~\"[[feature]]\", feast_project_name=\"[[project]]\"}[$__range]) + \nincrease(feast_ingestion_feature_value_missing_count{feast_feature_name=~\"[[feature]]\", feast_project_name=\"[[project]]\"}[$__range])\n)", + "legendFormat": "fraction", + "refId": "B" + }, + { + "expr": "feast_ingestion_feature_presence_min_fraction{feast_feature_name=~\"[[feature]]\", feast_project_name=\"[[project]]\"}", + "legendFormat": "min_fraction", + "refId": "A" + } + ], + "thresholds": [], + "timeFrom": null, + "timeRegions": [], + "timeShift": null, + "title": "Presence Fraction - [[feature]]", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": 1, + "min": 0, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + } + ], + "refresh": "", + "schemaVersion": 21, + "style": "dark", + "tags": [], + "templating": { + "list": [ + { + "allValue": null, + "current": { + "text": "project1", + "value": "project1" + }, + "datasource": null, + "definition": "label_values(feast_project_name)", + "hide": 0, + "includeAll": false, + "label": null, + "multi": false, + "name": "project", + "options": [], + "query": "label_values(feast_project_name)", + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 5, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "allValue": null, + "current": { + "text": "entity1 + feature1 + feature2", + "value": [ + "entity1", + "feature1", + "feature2" + ] + }, + "datasource": null, + "definition": "label_values(feast_ingestion_feature_value_presence_count{feast_project_name=\"[[project]]\"},feast_feature_name)", + "hide": 0, + "includeAll": false, + "label": null, + "multi": true, + "name": "feature", + "options": [], + "query": "label_values(feast_ingestion_feature_value_presence_count{feast_project_name=\"[[project]]\"},feast_feature_name)", + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 5, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "allValue": null, + "current": { + "text": "redis", + "value": "redis" + }, + "datasource": null, + "definition": "label_values(feast_store)", + "hide": 0, + "includeAll": false, + "label": null, + "multi": false, + "name": "store", + "options": [], + "query": "label_values(feast_store)", + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 5, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + } + ] + }, + "time": { + "from": "now-5m", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ] + }, + "timezone": "", + "title": "Feast Features Dashboard", + "version": 1 +} diff --git a/infra/docker-compose/grafana/datasources/prometheus.yaml b/infra/docker-compose/grafana/datasources/prometheus.yaml new file mode 100644 index 0000000000..a833442116 --- /dev/null +++ b/infra/docker-compose/grafana/datasources/prometheus.yaml @@ -0,0 +1,9 @@ +apiVersion: 1 + +datasources: +- name: Prometheus + type: prometheus + access: proxy + orgId: 1 + url: http://prometheus:9090 + isDefault: true \ No newline at end of file diff --git a/infra/docker-compose/prometheus/prometheus.yml b/infra/docker-compose/prometheus/prometheus.yml new file mode 100644 index 0000000000..dd90102d39 --- /dev/null +++ b/infra/docker-compose/prometheus/prometheus.yml @@ -0,0 +1,30 @@ +global: + scrape_interval: 15s # Set the scrape interval to every 15 seconds. Default is every 1 minute. + evaluation_interval: 15s # Evaluate rules every 15 seconds. The default is every 1 minute. + # scrape_timeout is set to the global default (10s). + +# Alertmanager configuration +alerting: + alertmanagers: + - static_configs: + - targets: + # - alertmanager:9093 + +# Load rules once and periodically evaluate them according to the global 'evaluation_interval'. +rule_files: + # - "first_rules.yml" + # - "second_rules.yml" + +# A scrape configuration containing exactly one endpoint to scrape: +# Here it's Prometheus itself. +scrape_configs: + # The job name is added as a label `job=` to any timeseries scraped from this config. + - job_name: 'prometheus' + + # metrics_path defaults to '/metrics' + # scheme defaults to 'http'. + + static_configs: + - targets: + - localhost:9090 + - statsd-exporter:9102 \ No newline at end of file From 63809f5234d0e77cfc71fb8d94e8f8ca931a6cdc Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Mon, 10 Feb 2020 20:05:19 +0800 Subject: [PATCH 31/31] ApplyFeatureSet should update FeatureSet when constraints are updated --- .../feast/core/http/HealthController.java | 6 ++-- .../src/main/java/feast/core/model/Field.java | 34 ++++++++++++++++--- .../feast/core/service/SpecServiceTest.java | 28 ++++++++++++++- 3 files changed, 60 insertions(+), 8 deletions(-) diff --git a/core/src/main/java/feast/core/http/HealthController.java b/core/src/main/java/feast/core/http/HealthController.java index 2451ed793e..3efe6e6e8c 100644 --- a/core/src/main/java/feast/core/http/HealthController.java +++ b/core/src/main/java/feast/core/http/HealthController.java @@ -29,7 +29,9 @@ import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RestController; -/** Web http for pod health-check endpoints. */ +/** + * Web http for pod health-check endpoints. + */ @Slf4j @RestController public class HealthController { @@ -64,7 +66,7 @@ public ResponseEntity healthz() { return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) .body("Unable to establish connection with DB"); } catch (SQLException e) { - log.error("Unable to reach DB: {}", e); + log.error(String.format("Unable to reach DB: %s", e.getMessage())); return ResponseEntity.status(INTERNAL_SERVER_ERROR).body(e.getMessage()); } } diff --git a/core/src/main/java/feast/core/model/Field.java b/core/src/main/java/feast/core/model/Field.java index edb0a73acb..0067b48e3a 100644 --- a/core/src/main/java/feast/core/model/Field.java +++ b/core/src/main/java/feast/core/model/Field.java @@ -19,6 +19,7 @@ import feast.core.FeatureSetProto.EntitySpec; import feast.core.FeatureSetProto.FeatureSpec; import feast.types.ValueProto.ValueType; +import java.util.Arrays; import java.util.Objects; import javax.persistence.Column; import javax.persistence.Embeddable; @@ -216,15 +217,38 @@ public Field(EntitySpec entitySpec) { } @Override - public boolean equals(Object o) { - if (this == o) { + public boolean equals(Object thatObject) { + if (this == thatObject) { return true; } - if (o == null || getClass() != o.getClass()) { + + if (thatObject == null || this.getClass() != thatObject.getClass()) { return false; } - Field field = (Field) o; - return name.equals(field.getName()) && type.equals(field.getType()); + + Field that = (Field) thatObject; + + // "this" field is equal to "that" field if all the properties (except version) have the same values. + // Note that Objects.equals(a,b) handles "null" String as well. + return Objects.equals(this.name, that.name) && + Objects.equals(this.type, that.type) && + Objects.equals(this.project, that.project) && + Arrays.equals(this.presence, that.presence) && + Arrays.equals(this.groupPresence, that.groupPresence) && + Arrays.equals(this.shape, that.shape) && + Arrays.equals(this.valueCount, that.valueCount) && + Objects.equals(this.domain, that.domain) && + Arrays.equals(this.intDomain, that.intDomain) && + Arrays.equals(this.floatDomain, that.floatDomain) && + Arrays.equals(this.stringDomain, that.stringDomain) && + Arrays.equals(this.boolDomain, that.boolDomain) && + Arrays.equals(this.structDomain, that.structDomain) && + Arrays.equals(this.naturalLanguageDomain, that.naturalLanguageDomain) && + Arrays.equals(this.imageDomain, that.imageDomain) && + Arrays.equals(this.midDomain, that.midDomain) && + Arrays.equals(this.urlDomain, that.urlDomain) && + Arrays.equals(this.timeDomain, that.timeDomain) && + Arrays.equals(this.timeOfDayDomain, that.timeOfDayDomain); } @Override diff --git a/core/src/test/java/feast/core/service/SpecServiceTest.java b/core/src/test/java/feast/core/service/SpecServiceTest.java index c533f593e3..1f52ef5529 100644 --- a/core/src/test/java/feast/core/service/SpecServiceTest.java +++ b/core/src/test/java/feast/core/service/SpecServiceTest.java @@ -596,7 +596,8 @@ public void applyFeatureSetShouldAcceptPresenceShapeAndDomainConstraints() // appliedFeatureSpecs needs to be sorted because the list returned by specService may not // follow the order in the request - List appliedFeatureSpecs = new ArrayList<>(appliedFeatureSetSpec.getFeaturesList()); + List appliedFeatureSpecs = new ArrayList<>( + appliedFeatureSetSpec.getFeaturesList()); appliedFeatureSpecs.sort(Comparator.comparing(FeatureSpec::getName)); assertEquals(appliedEntitySpecs.size(), entitySpecs.size()); @@ -611,6 +612,31 @@ public void applyFeatureSetShouldAcceptPresenceShapeAndDomainConstraints() } } + @Test + public void applyFeatureSetShouldUpdateExistingFeatureSetWhenConstraintsAreUpdated() + throws InvalidProtocolBufferException { + FeatureSetProto.FeatureSet existingFeatureSet = featureSets.get(2).toProto(); + assertThat("Existing feature set has version 3", + existingFeatureSet.getSpec().getVersion() == 3); + assertThat("Existing feature set has at least 1 feature", + existingFeatureSet.getSpec().getFeaturesList().size() > 0); + + // New FeatureSetSpec with IntDomain + FeatureSpec newFeatureSpec = existingFeatureSet.getSpec().getFeatures(0).toBuilder() + .setIntDomain(IntDomain.newBuilder().setMin(5)).build(); + FeatureSetSpec newFeatureSetSpec = existingFeatureSet.getSpec().toBuilder() + .setFeatures(0, newFeatureSpec).build(); + FeatureSetProto.FeatureSet newFeatureSet = existingFeatureSet.toBuilder() + .setSpec(newFeatureSetSpec).build(); + + ApplyFeatureSetResponse response = specService.applyFeatureSet(newFeatureSet); + assertEquals("Response should have CREATED status", Status.CREATED, response.getStatus()); + assertEquals("Response FeatureSet should have new version", + 4, response.getFeatureSet().getSpec().getVersion()); + assertEquals("Response should have IntDomain value set", + 5, response.getFeatureSet().getSpec().getFeatures(0).getIntDomain().getMin()); + } + @Test public void shouldUpdateStoreIfConfigChanges() throws InvalidProtocolBufferException { when(storeRepository.findById("SERVING")).thenReturn(Optional.of(stores.get(0)));