From 5215a2139a9d824dc2d8f45181bd177a1e8e9561 Mon Sep 17 00:00:00 2001 From: Artur Kolakowski <36094018+arturkolakowski@users.noreply.github.com> Date: Tue, 13 Aug 2024 05:41:46 -0400 Subject: [PATCH 01/96] feat: Allow feast snowflake to read in byte string for private-key authentication (#4384) * allow feast snowflake to read in byte string for private-key authentication Signed-off-by: Artur * Update type hint for to use Union instead of | syntax Signed-off-by: Artur * Update type hint for private_key to use Union instead of | syntax Signed-off-by: Artur * Update type hint in parse_private_key_path Signed-off-by: Artur * added private_key_content in Snowflake configs to support key-pair auth by reading in byte string Signed-off-by: Artur * fix incompatible linting types Signed-off-by: Artur * remove unused Union import Signed-off-by: Artur * fix formating Signed-off-by: Artur --------- Signed-off-by: Artur Co-authored-by: Artur --- .../infra/materialization/snowflake_engine.py | 3 +++ .../feast/infra/offline_stores/snowflake.py | 3 +++ .../feast/infra/online_stores/snowflake.py | 3 +++ sdk/python/feast/infra/registry/snowflake.py | 3 +++ .../infra/utils/snowflake/snowflake_utils.py | 26 +++++++++++++++---- 5 files changed, 33 insertions(+), 5 deletions(-) diff --git a/sdk/python/feast/infra/materialization/snowflake_engine.py b/sdk/python/feast/infra/materialization/snowflake_engine.py index f77239398e..5d0f08c2f5 100644 --- a/sdk/python/feast/infra/materialization/snowflake_engine.py +++ b/sdk/python/feast/infra/materialization/snowflake_engine.py @@ -70,6 +70,9 @@ class SnowflakeMaterializationEngineConfig(FeastConfigBaseModel): private_key: Optional[str] = None """ Snowflake private key file path""" + private_key_content: Optional[bytes] = None + """ Snowflake private key stored as bytes""" + private_key_passphrase: Optional[str] = None """ Snowflake private key file passphrase""" diff --git a/sdk/python/feast/infra/offline_stores/snowflake.py b/sdk/python/feast/infra/offline_stores/snowflake.py index 96552ff87e..ada6c99c98 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake.py +++ b/sdk/python/feast/infra/offline_stores/snowflake.py @@ -107,6 +107,9 @@ class SnowflakeOfflineStoreConfig(FeastConfigBaseModel): private_key: Optional[str] = None """ Snowflake private key file path""" + private_key_content: Optional[bytes] = None + """ Snowflake private key stored as bytes""" + private_key_passphrase: Optional[str] = None """ Snowflake private key file passphrase""" diff --git a/sdk/python/feast/infra/online_stores/snowflake.py b/sdk/python/feast/infra/online_stores/snowflake.py index fef804a377..6f39bdd0f6 100644 --- a/sdk/python/feast/infra/online_stores/snowflake.py +++ b/sdk/python/feast/infra/online_stores/snowflake.py @@ -53,6 +53,9 @@ class SnowflakeOnlineStoreConfig(FeastConfigBaseModel): private_key: Optional[str] = None """ Snowflake private key file path""" + private_key_content: Optional[bytes] = None + """ Snowflake private key stored as bytes""" + private_key_passphrase: Optional[str] = None """ Snowflake private key file passphrase""" diff --git a/sdk/python/feast/infra/registry/snowflake.py b/sdk/python/feast/infra/registry/snowflake.py index f2bc09e7e4..ac4f52dc06 100644 --- a/sdk/python/feast/infra/registry/snowflake.py +++ b/sdk/python/feast/infra/registry/snowflake.py @@ -96,6 +96,9 @@ class SnowflakeRegistryConfig(RegistryConfig): private_key: Optional[str] = None """ Snowflake private key file path""" + private_key_content: Optional[bytes] = None + """ Snowflake private key stored as bytes""" + private_key_passphrase: Optional[str] = None """ Snowflake private key file passphrase""" diff --git a/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py b/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py index dd965c4bed..b9035b40db 100644 --- a/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py +++ b/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py @@ -84,9 +84,11 @@ def __enter__(self): # https://docs.snowflake.com/en/user-guide/python-connector-example.html#using-key-pair-authentication-key-pair-rotation # https://docs.snowflake.com/en/user-guide/key-pair-auth.html#configuring-key-pair-authentication - if "private_key" in kwargs: + if "private_key" in kwargs or "private_key_content" in kwargs: kwargs["private_key"] = parse_private_key_path( - kwargs["private_key"], kwargs["private_key_passphrase"] + kwargs.get("private_key_passphrase"), + kwargs.get("private_key"), + kwargs.get("private_key_content"), ) try: @@ -510,13 +512,27 @@ def chunk_helper(lst: pd.DataFrame, n: int) -> Iterator[Tuple[int, pd.DataFrame] yield int(i / n), lst[i : i + n] -def parse_private_key_path(key_path: str, private_key_passphrase: str) -> bytes: - with open(key_path, "rb") as key: +def parse_private_key_path( + private_key_passphrase: str, + key_path: Optional[str] = None, + private_key_content: Optional[bytes] = None, +) -> bytes: + """Returns snowflake pkb by parsing and reading either from key path or private_key_content as byte string.""" + if private_key_content: p_key = serialization.load_pem_private_key( - key.read(), + private_key_content, password=private_key_passphrase.encode(), backend=default_backend(), ) + elif key_path: + with open(key_path, "rb") as key: + p_key = serialization.load_pem_private_key( + key.read(), + password=private_key_passphrase.encode(), + backend=default_backend(), + ) + else: + raise ValueError("Please provide key_path or private_key_content.") pkb = p_key.private_bytes( encoding=serialization.Encoding.DER, From a8a98c7401ac06212783868adc5e702d6667bc82 Mon Sep 17 00:00:00 2001 From: Francisco Arceo Date: Wed, 14 Aug 2024 10:02:00 -0400 Subject: [PATCH 02/96] Update pull_request_template.md --- .github/pull_request_template.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index b7d630e8bc..7849c24976 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -5,7 +5,7 @@ 3. If your change introduces any API changes, make sure to update the integration tests here: https://github.com/feast-dev/feast/tree/master/sdk/python/tests 4. Make sure documentation is updated for your PR! 5. Make sure your commits are signed: https://github.com/feast-dev/feast/blob/master/CONTRIBUTING.md#signing-off-commits -6. Make sure your PR title follows conventional commits (e.g. fix: [description] vs feat: [description]) +6. Make sure your PR title follows conventional commits (e.g. fix: [Description of ...], feat: [Description of ...], chore: [Description of ...], refactor: [Description of ...]) --> From 409e6f62943ee69a13087618b4592e35c799e34d Mon Sep 17 00:00:00 2001 From: Francisco Arceo Date: Wed, 14 Aug 2024 14:17:31 -0400 Subject: [PATCH 03/96] chore: Update Feast documentation to add information about write patterns and feature transformations (#4400) * merging changes Signed-off-by: Francisco Javier Arceo * merging Signed-off-by: Francisco Javier Arceo * updated Signed-off-by: Francisco Javier Arceo * Update write-patterns.md Signed-off-by: Francisco Javier Arceo * Update write-patterns.md Signed-off-by: Francisco Javier Arceo * Update write-patterns.md Signed-off-by: Francisco Javier Arceo * Update write-patterns.md Adding some clarity. Signed-off-by: Francisco Javier Arceo * Update write-patterns.md Signed-off-by: Francisco Javier Arceo * chore: Update feature-transformetion.md (#4405) Signed-off-by: Francisco Javier Arceo * updated readme Signed-off-by: Francisco Javier Arceo * updated summary and readme Signed-off-by: Francisco Javier Arceo * updated docs Signed-off-by: Francisco Javier Arceo * Updated readme Signed-off-by: Francisco Javier Arceo * updated more Signed-off-by: Francisco Javier Arceo * updated transformation Signed-off-by: Francisco Javier Arceo * updated urls Signed-off-by: Francisco Javier Arceo * refactoring and renaming architecture-and-components to components and architecture Signed-off-by: Francisco Javier Arceo * updated urls Signed-off-by: Francisco Javier Arceo * moving files from architecture-and-components to components/ Signed-off-by: Francisco Javier Arceo * had a typo in components Signed-off-by: Francisco Javier Arceo * Cleaned everything up Signed-off-by: Francisco Javier Arceo --------- Signed-off-by: Francisco Javier Arceo --- docs/README.md | 32 ++++----- docs/SUMMARY.md | 22 +++--- docs/getting-started/architecture/README.md | 21 ++++++ .../architecture/feature-transformation.md | 20 ++++++ .../language.md | 0 docs/getting-started/architecture/overview.md | 18 +++++ .../push-vs-pull-model.md | 16 +++-- .../architecture/write-patterns.md | 67 +++++++++++++++++++ .../README.md | 14 +--- .../batch-materialization-engine.md | 0 .../offline-store.md | 0 .../online-store.md | 0 .../overview.md | 8 --- .../provider.md | 0 .../registry.md | 0 .../stream-processor.md | 0 docs/getting-started/concepts/dataset.md | 2 +- docs/getting-started/faq.md | 2 +- docs/getting-started/quickstart.md | 2 +- docs/how-to-guides/scaling-feast.md | 2 +- .../reference/batch-materialization/README.md | 2 +- docs/reference/codebase-structure.md | 2 +- .../feature-servers/python-feature-server.md | 2 +- docs/reference/offline-stores/README.md | 2 +- docs/reference/online-stores/README.md | 2 +- docs/reference/providers/README.md | 2 +- sdk/python/feast/templates/gcp/README.md | 2 +- sdk/python/feast/templates/local/README.md | 2 +- 28 files changed, 181 insertions(+), 61 deletions(-) create mode 100644 docs/getting-started/architecture/README.md create mode 100644 docs/getting-started/architecture/feature-transformation.md rename docs/getting-started/{architecture-and-components => architecture}/language.md (100%) create mode 100644 docs/getting-started/architecture/overview.md rename docs/getting-started/{architecture-and-components => architecture}/push-vs-pull-model.md (66%) create mode 100644 docs/getting-started/architecture/write-patterns.md rename docs/getting-started/{architecture-and-components => components}/README.md (63%) rename docs/getting-started/{architecture-and-components => components}/batch-materialization-engine.md (100%) rename docs/getting-started/{architecture-and-components => components}/offline-store.md (100%) rename docs/getting-started/{architecture-and-components => components}/online-store.md (100%) rename docs/getting-started/{architecture-and-components => components}/overview.md (85%) rename docs/getting-started/{architecture-and-components => components}/provider.md (100%) rename docs/getting-started/{architecture-and-components => components}/registry.md (100%) rename docs/getting-started/{architecture-and-components => components}/stream-processor.md (100%) diff --git a/docs/README.md b/docs/README.md index d391069429..6652eaddc8 100644 --- a/docs/README.md +++ b/docs/README.md @@ -2,7 +2,16 @@ ## What is Feast? -Feast (**Fea**ture **St**ore) is a customizable operational data system that re-uses existing infrastructure to manage and serve machine learning features to realtime models. +Feast (**Fea**ture **St**ore) is an [open-source](https://github.com/feast-dev/feast) feature store that helps teams +operate production ML systems at scale by allowing them to define, manage, validate, and serve features for production +AI/ML. + +Feast's feature store is composed of two foundational components: (1) an [offline store](getting-started/components/offline-store.md) +for historical feature extraction used in model training and an (2) [online store](getting-started/components/online-store.md) +for serving features at low-latency in production systems and applications. + +Feast is a configurable operational data system that re-uses existing infrastructure to manage and serve machine learning +features to realtime models. For more details please review our [architecture](getting-started/architecture/overview.md). Feast allows ML platform teams to: @@ -20,38 +29,31 @@ Feast allows ML platform teams to: **Note:** Feast uses a push model for online serving. This means that the feature store pushes feature values to the online store, which reduces the latency of feature retrieval. This is more efficient than a pull model, where the model serving system must make a request to the feature store to retrieve feature values. See -[this document](getting-started/architecture-and-components/push-vs-pull-model.md) for a more detailed discussion. -{% endhint %} - -{% hint style="info" %} +[this document](getting-started/architecture/push-vs-pull-model.md) for a more detailed discussion. {% endhint %} ## Who is Feast for? -Feast helps ML platform teams with DevOps experience productionize real-time models. Feast can also help these teams build towards a feature platform that improves collaboration between engineers and data scientists. +Feast helps ML platform/MLOps teams with DevOps experience productionize real-time models. Feast also helps these teams +build a feature platform that improves collaboration between data engineers, software engineers, machine learning +engineers, and data scientists. Feast is likely **not** the right tool if you - * are in an organization that’s just getting started with ML and is not yet sure what the business impact of ML is -* rely primarily on unstructured data -* need very low latency feature retrieval (e.g. p99 feature retrieval << 10ms) -* have a small team to support a large number of use cases ## What Feast is not? ### Feast is not -* **an** [**ETL**](https://en.wikipedia.org/wiki/Extract,\_transform,\_load) / [**ELT**](https://en.wikipedia.org/wiki/Extract,\_load,\_transform) **system:** Feast is not (and does not plan to become) a general purpose data transformation or pipelining system. Users often leverage tools like [dbt](https://www.getdbt.com) to manage upstream data transformations. +* **an** [**ETL**](https://en.wikipedia.org/wiki/Extract,\_transform,\_load) / [**ELT**](https://en.wikipedia.org/wiki/Extract,\_load,\_transform) **system.** Feast is not a general purpose data pipelining system. Users often leverage tools like [dbt](https://www.getdbt.com) to manage upstream data transformations. Feast does support some [transformations](getting-started/architecture/feature-transformetion.md). * **a data orchestration tool:** Feast does not manage or orchestrate complex workflow DAGs. It relies on upstream data pipelines to produce feature values and integrations with tools like [Airflow](https://airflow.apache.org) to make features consistently available. * **a data warehouse:** Feast is not a replacement for your data warehouse or the source of truth for all transformed data in your organization. Rather, Feast is a light-weight downstream layer that can serve data from an existing data warehouse (or other data sources) to models in production. * **a database:** Feast is not a database, but helps manage data stored in other systems (e.g. BigQuery, Snowflake, DynamoDB, Redis) to make features consistently available at training / serving time ### Feast does not _fully_ solve - * **reproducible model training / model backtesting / experiment management**: Feast captures feature and model metadata, but does not version-control datasets / labels or manage train / test splits. Other tools like [DVC](https://dvc.org/), [MLflow](https://www.mlflow.org/), and [Kubeflow](https://www.kubeflow.org/) are better suited for this. -* **batch + streaming feature engineering**: Feast primarily processes already transformed feature values but is investing in supporting batch and streaming transformations. +* **batch feature engineering**: Feast supports on demand and streaming transformations. Feast is also investing in supporting batch transformations. * **native streaming feature integration:** Feast enables users to push streaming features, but does not pull from streaming sources or manage streaming pipelines. -* **feature sharing**: Feast has experimental functionality to enable discovery and cataloguing of feature metadata with a [Feast web UI (alpha)](https://docs.feast.dev/reference/alpha-web-ui). Feast also has community contributed plugins with [DataHub](https://datahubproject.io/docs/generated/ingestion/sources/feast/) and [Amundsen](https://github.com/amundsen-io/amundsen/blob/4a9d60176767c4d68d1cad5b093320ea22e26a49/databuilder/databuilder/extractor/feast\_extractor.py). * **lineage:** Feast helps tie feature values to model versions, but is not a complete solution for capturing end-to-end lineage from raw data sources to model versions. Feast also has community contributed plugins with [DataHub](https://datahubproject.io/docs/generated/ingestion/sources/feast/) and [Amundsen](https://github.com/amundsen-io/amundsen/blob/4a9d60176767c4d68d1cad5b093320ea22e26a49/databuilder/databuilder/extractor/feast\_extractor.py). * **data quality / drift detection**: Feast has experimental integrations with [Great Expectations](https://greatexpectations.io/), but is not purpose built to solve data drift / data quality issues. This requires more sophisticated monitoring across data pipelines, served feature values, labels, and model versions. @@ -74,7 +76,7 @@ Explore the following resources to get started with Feast: * [Quickstart](getting-started/quickstart.md) is the fastest way to get started with Feast * [Concepts](getting-started/concepts/) describes all important Feast API concepts -* [Architecture](getting-started/architecture-and-components/) describes Feast's overall architecture. +* [Architecture](getting-started/architecture/) describes Feast's overall architecture. * [Tutorials](tutorials/tutorials-overview/) shows full examples of using Feast in machine learning applications. * [Running Feast with Snowflake/GCP/AWS](how-to-guides/feast-snowflake-gcp-aws/) provides a more in-depth guide to using Feast. * [Reference](reference/feast-cli-commands.md) contains detailed API and design documents. diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index 87c3626254..a6a40fc91d 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -17,15 +17,19 @@ * [Point-in-time joins](getting-started/concepts/point-in-time-joins.md) * [Registry](getting-started/concepts/registry.md) * [\[Alpha\] Saved dataset](getting-started/concepts/dataset.md) -* [Architecture](getting-started/architecture-and-components/README.md) - * [Overview](getting-started/architecture-and-components/overview.md) - * [Language](getting-started/architecture-and-components/language.md) - * [Push vs Pull Model](getting-started/architecture-and-components/push-vs-pull-model.md) - * [Registry](getting-started/architecture-and-components/registry.md) - * [Offline store](getting-started/architecture-and-components/offline-store.md) - * [Online store](getting-started/architecture-and-components/online-store.md) - * [Batch Materialization Engine](getting-started/architecture-and-components/batch-materialization-engine.md) - * [Provider](getting-started/architecture-and-components/provider.md) +* [Architecture](getting-started/architecture/README.md) + * [Overview](getting-started/architecture/overview.md) + * [Language](getting-started/architecture/language.md) + * [Push vs Pull Model](getting-started/architecture/push-vs-pull-model.md) + * [Write Patterns](getting-started/architecture/write-patterns.md) + * [Feature Transformation](getting-started/architecture/feature-transformation.md) +* [Components](getting-started/components/README.md) + * [Overview](getting-started/components/overview.md) + * [Registry](getting-started/components/registry.md) + * [Offline store](getting-started/components/offline-store.md) + * [Online store](getting-started/components/online-store.md) + * [Batch Materialization Engine](getting-started/components/batch-materialization-engine.md) + * [Provider](getting-started/components/provider.md) * [Third party integrations](getting-started/third-party-integrations.md) * [FAQ](getting-started/faq.md) diff --git a/docs/getting-started/architecture/README.md b/docs/getting-started/architecture/README.md new file mode 100644 index 0000000000..a45f4ed6ec --- /dev/null +++ b/docs/getting-started/architecture/README.md @@ -0,0 +1,21 @@ +# Architecture + +{% content-ref url="overview.md" %} +[overview.md](overview.md) +{% endcontent-ref %} + +{% content-ref url="language.md" %} +[language.md](language.md) +{% endcontent-ref %} + +{% content-ref url="push-vs-pull-model.md" %} +[push-vs-pull-model.md](push-vs-pull-model.md) +{% endcontent-ref %} + +{% content-ref url="write-patterns.md" %} +[write-patterns.md](write-patterns.md) +{% endcontent-ref %} + +{% content-ref url="feature-transformation-model.md" %} +[feature-transformation.md](feature-transformation.md) +{% endcontent-ref %} diff --git a/docs/getting-started/architecture/feature-transformation.md b/docs/getting-started/architecture/feature-transformation.md new file mode 100644 index 0000000000..457e71d85e --- /dev/null +++ b/docs/getting-started/architecture/feature-transformation.md @@ -0,0 +1,20 @@ +# Feature Transformation + +A *feature transformation* is a function that takes some set of input data and +returns some set of output data. Feature transformations can happen on either raw data or derived data. + +Feature transformations can be executed by three types of "transformation engines": + +1. The Feast Feature Server +2. An Offline Store (e.g., Snowflake, BigQuery, DuckDB, Spark, etc.) +3. A Stream processor (e.g., Flink or Spark Streaming) + +The three transformation engines are coupled with the [communication pattern used for writes](write-patterns.md). + +Importantly, this implies that different feature transformation code may be +used under different transformation engines, so understanding the tradeoffs of +when to use which transformation engine/communication pattern is extremely critical to +the success of your implementation. + +In general, we recommend transformation engines and network calls to be chosen by aligning it with what is most +appropriate for the data producer, feature/model usage, and overall product. \ No newline at end of file diff --git a/docs/getting-started/architecture-and-components/language.md b/docs/getting-started/architecture/language.md similarity index 100% rename from docs/getting-started/architecture-and-components/language.md rename to docs/getting-started/architecture/language.md diff --git a/docs/getting-started/architecture/overview.md b/docs/getting-started/architecture/overview.md new file mode 100644 index 0000000000..7d1180bfd1 --- /dev/null +++ b/docs/getting-started/architecture/overview.md @@ -0,0 +1,18 @@ +# Overview + +![Feast Architecture Diagram](<../../assets/feast_marchitecture.png>) + +Feast's architecture is designed to be flexible and scalable. It is composed of several components that work together to provide a feature store that can be used to serve features for training and inference. + +* Feast uses a [Push Model](push-vs-pull-model.md) to ingest data from different sources and store feature values in the +online store. +This allows Feast to serve features in real-time with low latency. + +* Feast supports On Demand and Streaming Transformations for [feature computation](feature-transformation.md) and + will support Batch transformations in the future. For Streaming and Batch, Feast requires a separate Feature Transformation + Engine (in the batch case, this is typically your Offline Store). We are exploring adding a default streaming engine to Feast. + +* Domain expertise is recommended when integrating a data source with Feast understand the [tradeoffs from different + write patterns](write-patterns.md) to your application + +* We recommend [using Python](language.md) for your Feature Store microservice. As mentioned in the document, precomputing features is the recommended optimal path to ensure low latency performance. Reducing feature serving to a lightweight database lookup is the ideal pattern, which means the marginal overhead of Python should be tolerable. Because of this we believe the pros of Python outweigh the costs, as reimplementing feature logic is undesirable. Java and Go Clients are also available for online feature retrieval. diff --git a/docs/getting-started/architecture-and-components/push-vs-pull-model.md b/docs/getting-started/architecture/push-vs-pull-model.md similarity index 66% rename from docs/getting-started/architecture-and-components/push-vs-pull-model.md rename to docs/getting-started/architecture/push-vs-pull-model.md index a1f404221b..b205e97fc5 100644 --- a/docs/getting-started/architecture-and-components/push-vs-pull-model.md +++ b/docs/getting-started/architecture/push-vs-pull-model.md @@ -6,15 +6,23 @@ in the online store, to serve features in real-time. In a [Pull Model](https://en.wikipedia.org/wiki/Pull_technology), Feast would pull data from the data producers at request time and store the feature values in -the online store before serving them (storing them would actually be unneccessary). +the online store before serving them (storing them would actually be unnecessary). This approach would incur additional network latency as Feast would need to orchestrate a request to each data producer, which would mean the latency would be at least as long as your slowest call. So, in order to serve features as fast as possible, we push data to Feast and store the feature values in the online store. -The trade-off with the Push Model is that strong consistency is not gauranteed out -of the box. Instead, stong consistency has to be explicitly designed for in orchestrating +The trade-off with the Push Model is that strong consistency is not guaranteed out +of the box. Instead, strong consistency has to be explicitly designed for in orchestrating the updates to Feast and the client usage. The significant advantage with this approach is that Feast is read-optimized for low-latency -feature retrieval. \ No newline at end of file +feature retrieval. + +# How to Push + +Implicit in the Push model are decisions about _how_ and _when_ to push feature values to the online store. + +From a developer's perspective, there are three ways to push feature values to the online store with different tradeoffs. + +They are discussed further in the [Write Patterns](getting-started/architecture/write-patterns.md) section. diff --git a/docs/getting-started/architecture/write-patterns.md b/docs/getting-started/architecture/write-patterns.md new file mode 100644 index 0000000000..4674b5504d --- /dev/null +++ b/docs/getting-started/architecture/write-patterns.md @@ -0,0 +1,67 @@ +# Writing Data to Feast + +Feast uses a [Push Model](getting-started/architecture/push-vs-pull-model.md) to push features to the online store. + +This has two important consequences: (1) communication patterns between the Data Producer (i.e., the client) and Feast (i.e,. the server) and (2) feature computation and +_feature value_ write patterns to Feast's online store. + +Data Producers (i.e., services that generate data) send data to Feast so that Feast can write feature values to the online store. That data can +be either raw data where Feast computes and stores the feature values or precomputed feature values. + +## Communication Patterns + +There are two ways a client (or Data Producer) can *_send_* data to the online store: + +1. Synchronously + - Using a synchronous API call for a small number of entities or a single entity (e.g., using the [`push` or `write_to_online_store` methods](../../reference/data-sources/push.md#pushing-data)) or the Feature Server's [`push` endpoint](../../reference/feature-servers/python-feature-server.md#pushing-features-to-the-online-and-offline-stores)) +2. Asynchronously + - Using an asynchronous API call for a small number of entities or a single entity (e.g., using the [`push` or `write_to_online_store` methods](../../reference/data-sources/push.md#pushing-data)) or the Feature Server's [`push` endpoint](../../reference/feature-servers/python-feature-server.md#pushing-features-to-the-online-and-offline-stores)) + - Using a "batch job" for a large number of entities (e.g., using a [batch materialization engine](../components/batch-materialization-engine.md)) + +Note, in some contexts, developers may "batch" a group of entities together and write them to the online store in a +single API call. This is a common pattern when writing data to the online store to reduce write loads but we would +not qualify this as a batch job. + +## Feature Value Write Patterns + +Writing feature values to the online store (i.e., the server) can be done in two ways: Precomputing the transformations client-side or Computing the transformations On Demand server-side. + +### Combining Approaches + +In some scenarios, a combination of Precomputed and On Demand transformations may be optimal. + +When selecting feature value write patterns, one must consider the specific requirements of your application, the acceptable correctness of the data, the latency tolerance, and the computational resources available. Making deliberate choices can help the performance and reliability of your service. + +There are two ways the client can write *feature values* to the online store: + +1. Precomputing transformations +2. Computing transformations On Demand +3. Hybrid (Precomputed + On Demand) + +### 1. Precomputing Transformations +Precomputed transformations can happen outside of Feast (e.g., via some batch job or streaming application) or inside of the Feast feature server when writing to the online store via the `push` or `write-to-online-store` api. + +### 2. Computing Transformations On Demand +On Demand transformations can only happen inside of Feast at either (1) the time of the client's request or (2) when the data producer writes to the online store. + +### 3. Hybrid (Precomputed + On Demand) +The hybrid approach allows for precomputed transformations to happen inside or outside of Feast and have the On Demand transformations happen at client request time. This is particularly convenient for "Time Since Last" types of features (e.g., time since purchase). + +## Tradeoffs + +When deciding between synchronous and asynchronous data writes, several tradeoffs should be considered: + +- **Data Consistency**: Asynchronous writes allow Data Producers to send data without waiting for the write operation to complete, which can lead to situations where the data in the online store is stale. This might be acceptable in scenarios where absolute freshness is not critical. However, for critical operations, such as calculating loan amounts in financial applications, stale data can lead to incorrect decisions, making synchronous writes essential. +- **Correctness**: The risk of data being out-of-date must be weighed against the operational requirements. For instance, in a lending application, having up-to-date feature data can be crucial for correctness (depending upon the features and raw data), thus favoring synchronous writes. In less sensitive contexts, the eventual consistency offered by asynchronous writes might be sufficient. +- **Service Coupling**: Synchronous writes result in tighter coupling between services. If a write operation fails, it can cause the dependent service operation to fail as well, which might be a significant drawback in systems requiring high reliability and independence between services. +- **Application Latency**: Asynchronous writes typically reduce the perceived latency from the client's perspective because the client does not wait for the write operation to complete. This can enhance the user experience and efficiency in environments where operations are not critically dependent on immediate data freshness. + +The table below can help guide the most appropriate data write and feature computation strategies based on specific application needs and data sensitivity. + +| Data Write Type | Feature Computation | Scenario | Recommended Approach | +|----------|-----------------|---------------------|----------------------| +| Asynchronous | On Demand | Data-intensive applications tolerant to staleness | Opt for asynchronous writes with on-demand computation to balance load and manage resource usage efficiently. | +| Asynchronous | Precomputed | High volume, non-critical data processing | Use asynchronous batch jobs with precomputed transformations for efficiency and scalability. | +| Synchronous | On Demand | High-stakes decision making | Use synchronous writes with on-demand feature computation to ensure data freshness and correctness. | +| Synchronous | Precomputed | User-facing applications requiring quick feedback | Use synchronous writes with precomputed features to reduce latency and improve user experience. | +| Synchronous | Hybrid (Precomputed + On Demand) | High-stakes decision making that want to optimize for latency under constraints| Use synchronous writes with precomputed features where possible and a select set of on demand computations to reduce latency and improve user experience. | diff --git a/docs/getting-started/architecture-and-components/README.md b/docs/getting-started/components/README.md similarity index 63% rename from docs/getting-started/architecture-and-components/README.md rename to docs/getting-started/components/README.md index 050a430c97..d468714bd4 100644 --- a/docs/getting-started/architecture-and-components/README.md +++ b/docs/getting-started/components/README.md @@ -1,16 +1,4 @@ -# Architecture - -{% content-ref url="language.md" %} -[language.md](language.md) -{% endcontent-ref %} - -{% content-ref url="overview.md" %} -[overview.md](overview.md) -{% endcontent-ref %} - -{% content-ref url="push-vs-pull-model.md" %} -[push-vs-pull-model.md](push-vs-pull-model.md) -{% endcontent-ref %} +# Components {% content-ref url="registry.md" %} [registry.md](registry.md) diff --git a/docs/getting-started/architecture-and-components/batch-materialization-engine.md b/docs/getting-started/components/batch-materialization-engine.md similarity index 100% rename from docs/getting-started/architecture-and-components/batch-materialization-engine.md rename to docs/getting-started/components/batch-materialization-engine.md diff --git a/docs/getting-started/architecture-and-components/offline-store.md b/docs/getting-started/components/offline-store.md similarity index 100% rename from docs/getting-started/architecture-and-components/offline-store.md rename to docs/getting-started/components/offline-store.md diff --git a/docs/getting-started/architecture-and-components/online-store.md b/docs/getting-started/components/online-store.md similarity index 100% rename from docs/getting-started/architecture-and-components/online-store.md rename to docs/getting-started/components/online-store.md diff --git a/docs/getting-started/architecture-and-components/overview.md b/docs/getting-started/components/overview.md similarity index 85% rename from docs/getting-started/architecture-and-components/overview.md rename to docs/getting-started/components/overview.md index f4d543cd5a..393f436e5b 100644 --- a/docs/getting-started/architecture-and-components/overview.md +++ b/docs/getting-started/components/overview.md @@ -28,11 +28,3 @@ A complete Feast deployment contains the following components: * **Batch Materialization Engine:** The [Batch Materialization Engine](batch-materialization-engine.md) component launches a process which loads data into the online store from the offline store. By default, Feast uses a local in-process engine implementation to materialize data. However, additional infrastructure can be used for a more scalable materialization process. * **Online Store:** The online store is a database that stores only the latest feature values for each entity. The online store is either populated through materialization jobs or through [stream ingestion](../../reference/data-sources/push.md). * **Offline Store:** The offline store persists batch data that has been ingested into Feast. This data is used for producing training datasets. For feature retrieval and materialization, Feast does not manage the offline store directly, but runs queries against it. However, offline stores can be configured to support writes if Feast configures logging functionality of served features. - -{% hint style="info" %} -Java and Go Clients are also available for online feature retrieval. - -In general, we recommend [using Python](language.md) for your Feature Store microservice. - -As mentioned in the document, precomputing features is the recommended optimal path to ensure low latency performance. Reducing feature serving to a lightweight database lookup is the ideal pattern, which means the marginal overhead of Python should be tolerable. Because of this we believe the pros of Python outweigh the costs, as reimplementing feature logic is undesirable. -{% endhint %} diff --git a/docs/getting-started/architecture-and-components/provider.md b/docs/getting-started/components/provider.md similarity index 100% rename from docs/getting-started/architecture-and-components/provider.md rename to docs/getting-started/components/provider.md diff --git a/docs/getting-started/architecture-and-components/registry.md b/docs/getting-started/components/registry.md similarity index 100% rename from docs/getting-started/architecture-and-components/registry.md rename to docs/getting-started/components/registry.md diff --git a/docs/getting-started/architecture-and-components/stream-processor.md b/docs/getting-started/components/stream-processor.md similarity index 100% rename from docs/getting-started/architecture-and-components/stream-processor.md rename to docs/getting-started/components/stream-processor.md diff --git a/docs/getting-started/concepts/dataset.md b/docs/getting-started/concepts/dataset.md index d55adb4703..829ad4284e 100644 --- a/docs/getting-started/concepts/dataset.md +++ b/docs/getting-started/concepts/dataset.md @@ -2,7 +2,7 @@ Feast datasets allow for conveniently saving dataframes that include both features and entities to be subsequently used for data analysis and model training. [Data Quality Monitoring](https://docs.google.com/document/d/110F72d4NTv80p35wDSONxhhPBqWRwbZXG4f9mNEMd98) was the primary motivation for creating dataset concept. -Dataset's metadata is stored in the Feast registry and raw data (features, entities, additional input keys and timestamp) is stored in the [offline store](../architecture-and-components/offline-store.md). +Dataset's metadata is stored in the Feast registry and raw data (features, entities, additional input keys and timestamp) is stored in the [offline store](../components/offline-store.md). Dataset can be created from: diff --git a/docs/getting-started/faq.md b/docs/getting-started/faq.md index d603e12ab6..6567ae181d 100644 --- a/docs/getting-started/faq.md +++ b/docs/getting-started/faq.md @@ -29,7 +29,7 @@ Feature views once they are used by a feature service are intended to be immutab ### What is the difference between data sources and the offline store? -The data source itself defines the underlying data warehouse table in which the features are stored. The offline store interface defines the APIs required to make an arbitrary compute layer work for Feast (e.g. pulling features given a set of feature views from their sources, exporting the data set results to different formats). Please see [data sources](concepts/data-ingestion.md) and [offline store](architecture-and-components/offline-store.md) for more details. +The data source itself defines the underlying data warehouse table in which the features are stored. The offline store interface defines the APIs required to make an arbitrary compute layer work for Feast (e.g. pulling features given a set of feature views from their sources, exporting the data set results to different formats). Please see [data sources](concepts/data-ingestion.md) and [offline store](components/offline-store.md) for more details. ### Is it possible to have offline and online stores from different providers? diff --git a/docs/getting-started/quickstart.md b/docs/getting-started/quickstart.md index 01c039e9c5..ffc01c9d6e 100644 --- a/docs/getting-started/quickstart.md +++ b/docs/getting-started/quickstart.md @@ -623,6 +623,6 @@ show up in the upcoming concepts + architecture + tutorial pages as well. ## Next steps * Read the [Concepts](concepts/) page to understand the Feast data model. -* Read the [Architecture](architecture-and-components/) page. +* Read the [Architecture](architecture/) page. * Check out our [Tutorials](../tutorials/tutorials-overview/) section for more examples on how to use Feast. * Follow our [Running Feast with Snowflake/GCP/AWS](../how-to-guides/feast-snowflake-gcp-aws/) guide for a more in-depth tutorial on using Feast. diff --git a/docs/how-to-guides/scaling-feast.md b/docs/how-to-guides/scaling-feast.md index ce63f027c9..7e4f27b1dd 100644 --- a/docs/how-to-guides/scaling-feast.md +++ b/docs/how-to-guides/scaling-feast.md @@ -20,7 +20,7 @@ The recommended solution in this case is to use the [SQL based registry](../tuto The default Feast materialization process is an in-memory process, which pulls data from the offline store before writing it to the online store. However, this process does not scale for large data sets, since it's executed on a single-process. -Feast supports pluggable [Materialization Engines](../getting-started/architecture-and-components/batch-materialization-engine.md), that allow the materialization process to be scaled up. +Feast supports pluggable [Materialization Engines](../getting-started/components/batch-materialization-engine.md), that allow the materialization process to be scaled up. Aside from the local process, Feast supports a [Lambda-based materialization engine](https://rtd.feast.dev/en/master/#alpha-lambda-based-engine), and a [Bytewax-based materialization engine](https://rtd.feast.dev/en/master/#bytewax-engine). Users may also be able to build an engine to scale up materialization using existing infrastructure in their organizations. \ No newline at end of file diff --git a/docs/reference/batch-materialization/README.md b/docs/reference/batch-materialization/README.md index 8511fd81d0..a05d6d75e5 100644 --- a/docs/reference/batch-materialization/README.md +++ b/docs/reference/batch-materialization/README.md @@ -1,6 +1,6 @@ # Batch materialization -Please see [Batch Materialization Engine](../../getting-started/architecture-and-components/batch-materialization-engine.md) for an explanation of batch materialization engines. +Please see [Batch Materialization Engine](../../getting-started/components/batch-materialization-engine.md) for an explanation of batch materialization engines. {% page-ref page="snowflake.md" %} diff --git a/docs/reference/codebase-structure.md b/docs/reference/codebase-structure.md index 8eb5572679..7077e48fef 100644 --- a/docs/reference/codebase-structure.md +++ b/docs/reference/codebase-structure.md @@ -34,7 +34,7 @@ There are also several important submodules: * `ui/` contains the embedded Web UI, to be launched on the `feast ui` command. Of these submodules, `infra/` is the most important. -It contains the interfaces for the [provider](getting-started/architecture-and-components/provider.md), [offline store](getting-started/architecture-and-components/offline-store.md), [online store](getting-started/architecture-and-components/online-store.md), [batch materialization engine](getting-started/architecture-and-components/batch-materialization-engine.md), and [registry](getting-started/architecture-and-components/registry.md), as well as all of their individual implementations. +It contains the interfaces for the [provider](getting-started/components/provider.md), [offline store](getting-started/components/offline-store.md), [online store](getting-started/components/online-store.md), [batch materialization engine](getting-started/components/batch-materialization-engine.md), and [registry](getting-started/components/registry.md), as well as all of their individual implementations. ``` $ tree --dirsfirst -L 1 infra diff --git a/docs/reference/feature-servers/python-feature-server.md b/docs/reference/feature-servers/python-feature-server.md index 0d8a0aef75..33dfe77ae1 100644 --- a/docs/reference/feature-servers/python-feature-server.md +++ b/docs/reference/feature-servers/python-feature-server.md @@ -153,7 +153,7 @@ curl -X POST \ ### Pushing features to the online and offline stores -The Python feature server also exposes an endpoint for [push sources](../../data-sources/push.md). This endpoint allows you to push data to the online and/or offline store. +The Python feature server also exposes an endpoint for [push sources](../data-sources/push.md). This endpoint allows you to push data to the online and/or offline store. The request definition for `PushMode` is a string parameter `to` where the options are: \[`"online"`, `"offline"`, `"online_and_offline"`]. diff --git a/docs/reference/offline-stores/README.md b/docs/reference/offline-stores/README.md index 33eca6d426..87c92bfcf8 100644 --- a/docs/reference/offline-stores/README.md +++ b/docs/reference/offline-stores/README.md @@ -1,6 +1,6 @@ # Offline stores -Please see [Offline Store](../../getting-started/architecture-and-components/offline-store.md) for a conceptual explanation of offline stores. +Please see [Offline Store](../../getting-started/components/offline-store.md) for a conceptual explanation of offline stores. {% content-ref url="overview.md" %} [overview.md](overview.md) diff --git a/docs/reference/online-stores/README.md b/docs/reference/online-stores/README.md index 0acf6701f9..bf5419b249 100644 --- a/docs/reference/online-stores/README.md +++ b/docs/reference/online-stores/README.md @@ -1,6 +1,6 @@ # Online stores -Please see [Online Store](../../getting-started/architecture-and-components/online-store.md) for an explanation of online stores. +Please see [Online Store](../../getting-started/components/online-store.md) for an explanation of online stores. {% content-ref url="overview.md" %} [overview.md](overview.md) diff --git a/docs/reference/providers/README.md b/docs/reference/providers/README.md index 20686a1e14..925ae8ebc1 100644 --- a/docs/reference/providers/README.md +++ b/docs/reference/providers/README.md @@ -1,6 +1,6 @@ # Providers -Please see [Provider](../../getting-started/architecture-and-components/provider.md) for an explanation of providers. +Please see [Provider](../../getting-started/components/provider.md) for an explanation of providers. {% page-ref page="local.md" %} diff --git a/sdk/python/feast/templates/gcp/README.md b/sdk/python/feast/templates/gcp/README.md index 7929dc2bdf..bc9e51769c 100644 --- a/sdk/python/feast/templates/gcp/README.md +++ b/sdk/python/feast/templates/gcp/README.md @@ -11,7 +11,7 @@ You can run the overall workflow with `python test_workflow.py`. ## To move from this into a more production ready workflow: 1. `feature_store.yaml` points to a local file as a registry. You'll want to setup a remote file (e.g. in S3/GCS) or a SQL registry. See [registry docs](https://docs.feast.dev/getting-started/concepts/registry) for more details. -2. This example uses an already setup BigQuery Feast data warehouse as the [offline store](https://docs.feast.dev/getting-started/architecture-and-components/offline-store) +2. This example uses an already setup BigQuery Feast data warehouse as the [offline store](https://docs.feast.dev/getting-started/components/offline-store) to generate training data. You'll need to connect your own BigQuery instance to make this work. 3. Setup CI/CD + dev vs staging vs prod environments to automatically update the registry as you change Feast feature definitions. See [docs](https://docs.feast.dev/how-to-guides/running-feast-in-production#1.-automatically-deploying-changes-to-your-feature-definitions). 4. (optional) Regularly scheduled materialization to power low latency feature retrieval (e.g. via Airflow). See [Batch data ingestion](https://docs.feast.dev/getting-started/concepts/data-ingestion#batch-data-ingestion) diff --git a/sdk/python/feast/templates/local/README.md b/sdk/python/feast/templates/local/README.md index daf3a686fb..1e617cc442 100644 --- a/sdk/python/feast/templates/local/README.md +++ b/sdk/python/feast/templates/local/README.md @@ -18,7 +18,7 @@ You can run the overall workflow with `python test_workflow.py`. - You can see your options if you run `feast init --help`. 2. `feature_store.yaml` points to a local file as a registry. You'll want to setup a remote file (e.g. in S3/GCS) or a SQL registry. See [registry docs](https://docs.feast.dev/getting-started/concepts/registry) for more details. -3. This example uses a file [offline store](https://docs.feast.dev/getting-started/architecture-and-components/offline-store) +3. This example uses a file [offline store](https://docs.feast.dev/getting-started/components/offline-store) to generate training data. It does not scale. We recommend instead using a data warehouse such as BigQuery, Snowflake, Redshift. There is experimental support for Spark as well. 4. Setup CI/CD + dev vs staging vs prod environments to automatically update the registry as you change Feast feature definitions. See [docs](https://docs.feast.dev/how-to-guides/running-feast-in-production#1.-automatically-deploying-changes-to-your-feature-definitions). From c42d9fd6da85f098914d9113536bd826f7e17501 Mon Sep 17 00:00:00 2001 From: Francisco Arceo Date: Wed, 14 Aug 2024 16:56:40 -0400 Subject: [PATCH 04/96] chore: Update arch/README.md (#4411) Update README.md --- docs/getting-started/architecture/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/getting-started/architecture/README.md b/docs/getting-started/architecture/README.md index a45f4ed6ec..35990373d7 100644 --- a/docs/getting-started/architecture/README.md +++ b/docs/getting-started/architecture/README.md @@ -16,6 +16,6 @@ [write-patterns.md](write-patterns.md) {% endcontent-ref %} -{% content-ref url="feature-transformation-model.md" %} +{% content-ref url="feature-transformation.md" %} [feature-transformation.md](feature-transformation.md) {% endcontent-ref %} From 8851722000549a7639ecff88de9b29f4f00c3b48 Mon Sep 17 00:00:00 2001 From: Francisco Arceo Date: Thu, 15 Aug 2024 12:28:08 -0400 Subject: [PATCH 05/96] chore: Update language.md (#4412) Update language.md --- docs/getting-started/architecture/language.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/getting-started/architecture/language.md b/docs/getting-started/architecture/language.md index 916dff28d7..cff0fc467b 100644 --- a/docs/getting-started/architecture/language.md +++ b/docs/getting-started/architecture/language.md @@ -1,10 +1,10 @@ # Python: The Language of Production Machine Learning -Use Python to serve your features online. +Use Python to serve your features. ## Why should you use Python to Serve features for Machine Learning? -Python has emerged as the primary language for machine learning, and this extends to feature serving and there are five main reasons Feast recommends using a microservice in Feast. +Python has emerged as the primary language for machine learning, and this extends to feature serving and there are five main reasons Feast recommends using a microservice written in Python. ## 1. Python is the language of Machine Learning From 721ec74f17ee95e375054f21135e54e0687104a7 Mon Sep 17 00:00:00 2001 From: Francisco Arceo Date: Fri, 16 Aug 2024 03:00:03 -0400 Subject: [PATCH 06/96] feat: Create ADOPTERS.md (#4410) * Create ADOPTERS.md * Update ADOPTERS.md --- community/ADOPTERS.md | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 community/ADOPTERS.md diff --git a/community/ADOPTERS.md b/community/ADOPTERS.md new file mode 100644 index 0000000000..a16fbef379 --- /dev/null +++ b/community/ADOPTERS.md @@ -0,0 +1,15 @@ +# Adopters of Feast + +Below are the adopters of Feast. If you are using Feast please add +yourself into the following list by a pull request. Please keep the list in +alphabetical order. + +| Organization | Contact | GitHub Username | +| ------------ | ------- | ------- | +| Affirm | Francisco Javier Arceo | franciscojavierarceo | +| Bank of Georgia | Tornike Gurgenidze | tokoko | +| Get Ground | Zhiling Chen | zhilingc | +| Gojek | Pradithya Aria Pura | pradithya | +| Twitter | David Liu | mavysavydav| +| Shopify | Matt Delacour | MattDelac | +| Snowflake | Miles Adkins | sfc-gh-madkins | From 0baeeb5ec524c1e6209edab9605ca8a098a2ec88 Mon Sep 17 00:00:00 2001 From: Dan Baron <84331438+danbaron63@users.noreply.github.com> Date: Fri, 16 Aug 2024 08:01:29 +0100 Subject: [PATCH 07/96] fix: Using repo_config parameter in teardown to allow for feature-store-yaml overrides (#4413) * fix: using repo_config parameter in teardown to allow for feature-store-yaml overrides Signed-off-by: Dan Baron * fix: fixing linting and formatting issues in tests Signed-off-by: Dan Baron * fix: removing unnecessary Path object construction Signed-off-by: Dan Baron --------- Signed-off-by: Dan Baron --- sdk/python/feast/repo_operations.py | 2 +- sdk/python/feast/transformation/pandas_transformation.py | 9 ++++----- .../feature_repos/universal/data_sources/file.py | 7 ++++--- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/sdk/python/feast/repo_operations.py b/sdk/python/feast/repo_operations.py index a3100ca9d7..0a89ab72ca 100644 --- a/sdk/python/feast/repo_operations.py +++ b/sdk/python/feast/repo_operations.py @@ -359,7 +359,7 @@ def apply_total(repo_config: RepoConfig, repo_path: Path, skip_source_validation def teardown(repo_config: RepoConfig, repo_path: Optional[str]): # Cannot pass in both repo_path and repo_config to FeatureStore. - feature_store = FeatureStore(repo_path=repo_path, config=None) + feature_store = FeatureStore(repo_path=repo_path, config=repo_config) feature_store.teardown() diff --git a/sdk/python/feast/transformation/pandas_transformation.py b/sdk/python/feast/transformation/pandas_transformation.py index 41e437fb6b..ac31a4fa20 100644 --- a/sdk/python/feast/transformation/pandas_transformation.py +++ b/sdk/python/feast/transformation/pandas_transformation.py @@ -1,5 +1,4 @@ -from types import FunctionType -from typing import Any +from typing import Any, Callable import dill import pandas as pd @@ -15,7 +14,7 @@ class PandasTransformation: - def __init__(self, udf: FunctionType, udf_string: str = ""): + def __init__(self, udf: Callable[[Any], Any], udf_string: str = ""): """ Creates an PandasTransformation object. @@ -30,11 +29,11 @@ def __init__(self, udf: FunctionType, udf_string: str = ""): def transform_arrow( self, pa_table: pyarrow.Table, features: list[Field] ) -> pyarrow.Table: - output_df_pandas = self.udf.__call__(pa_table.to_pandas()) + output_df_pandas = self.udf(pa_table.to_pandas()) return pyarrow.Table.from_pandas(output_df_pandas) def transform(self, input_df: pd.DataFrame) -> pd.DataFrame: - return self.udf.__call__(input_df) + return self.udf(input_df) def infer_features(self, random_input: dict[str, list[Any]]) -> list[Field]: df = pd.DataFrame.from_dict(random_input) diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py index 5174e16046..e505986350 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py @@ -5,6 +5,7 @@ import tempfile import uuid from pathlib import Path +from subprocess import Popen from typing import Any, Dict, List, Optional import pandas as pd @@ -367,7 +368,7 @@ class RemoteOfflineStoreDataSourceCreator(FileDataSourceCreator): def __init__(self, project_name: str, *args, **kwargs): super().__init__(project_name) self.server_port: int = 0 - self.proc = None + self.proc: Optional[Popen[bytes]] = None def setup(self, registry: RegistryConfig): parent_offline_config = super().create_offline_store_config() @@ -382,13 +383,13 @@ def setup(self, registry: RegistryConfig): repo_path = Path(tempfile.mkdtemp()) with open(repo_path / "feature_store.yaml", "w") as outfile: yaml.dump(config.model_dump(by_alias=True), outfile) - repo_path = str(repo_path.resolve()) + repo_path = repo_path.resolve() self.server_port = free_port() host = "0.0.0.0" cmd = [ "feast", - "-c" + repo_path, + "-c" + str(repo_path), "serve_offline", "--host", host, From cebbe045597b85e1ae4394a8c14741e88347a6b8 Mon Sep 17 00:00:00 2001 From: Francisco Arceo Date: Fri, 16 Aug 2024 03:30:15 -0400 Subject: [PATCH 08/96] feat: Updating docs to include model inference guidelines (#4416) Signed-off-by: Francisco Javier Arceo --- docs/SUMMARY.md | 1 + docs/getting-started/architecture/README.md | 4 + .../architecture/feature-transformation.md | 1 + .../architecture/model-inference.md | 88 +++++++++++++++++++ docs/getting-started/architecture/overview.md | 7 +- 5 files changed, 98 insertions(+), 3 deletions(-) create mode 100644 docs/getting-started/architecture/model-inference.md diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index a6a40fc91d..0060ae729e 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -23,6 +23,7 @@ * [Push vs Pull Model](getting-started/architecture/push-vs-pull-model.md) * [Write Patterns](getting-started/architecture/write-patterns.md) * [Feature Transformation](getting-started/architecture/feature-transformation.md) + * [Feature Serving and Model Inference](getting-started/architecture/model-inference.md) * [Components](getting-started/components/README.md) * [Overview](getting-started/components/overview.md) * [Registry](getting-started/components/registry.md) diff --git a/docs/getting-started/architecture/README.md b/docs/getting-started/architecture/README.md index 35990373d7..f824164339 100644 --- a/docs/getting-started/architecture/README.md +++ b/docs/getting-started/architecture/README.md @@ -19,3 +19,7 @@ {% content-ref url="feature-transformation.md" %} [feature-transformation.md](feature-transformation.md) {% endcontent-ref %} + +{% content-ref url="model-inference.md" %} +[model-inference.md](model-inference.md) +{% endcontent-ref %} diff --git a/docs/getting-started/architecture/feature-transformation.md b/docs/getting-started/architecture/feature-transformation.md index 457e71d85e..1a15d4c3a5 100644 --- a/docs/getting-started/architecture/feature-transformation.md +++ b/docs/getting-started/architecture/feature-transformation.md @@ -3,6 +3,7 @@ A *feature transformation* is a function that takes some set of input data and returns some set of output data. Feature transformations can happen on either raw data or derived data. +## Feature Transformation Engines Feature transformations can be executed by three types of "transformation engines": 1. The Feast Feature Server diff --git a/docs/getting-started/architecture/model-inference.md b/docs/getting-started/architecture/model-inference.md new file mode 100644 index 0000000000..4fe2859c55 --- /dev/null +++ b/docs/getting-started/architecture/model-inference.md @@ -0,0 +1,88 @@ +# Feature Serving and Model Inference + +Production machine learning systems can choose from four approaches to serving machine learning predictions (the output +of model inference): +1. Online model inference with online features +2. Precomputed (batch) model predictions without online features +3. Online model inference with online features and cached predictions +4. Online model inference without features + +*Note: online features can be sourced from batch, streaming, or request data sources.* + +These three approaches have different tradeoffs but, in general, have significant implementation differences. + +## 1. Online Model Inference with Online Features +Online model inference with online features is a powerful approach to serving data-driven machine learning applications. +This requires a feature store to serve online features and a model server to serve model predictions (e.g., KServe). +This approach is particularly useful for applications where request-time data is required to run inference. +```python +features = store.get_online_features( + feature_refs=[ + "user_data:click_through_rate", + "user_data:number_of_clicks", + "user_data:average_page_duration", + ], + entity_rows=[{"user_id": 1}], +) +model_predictions = model_server.predict(features) +``` + +## 2. Precomputed (Batch) Model Predictions without Online Features +Typically, Machine Learning teams find serving precomputed model predictions to be the most straightforward to implement. +This approach simply treats the model predictions as a feature and serves them from the feature store using the standard +Feast sdk. +```python +model_predictions = store.get_online_features( + feature_refs=[ + "user_data:model_predictions", + ], + entity_rows=[{"user_id": 1}], +) +``` +Notice that the model server is not involved in this approach. Instead, the model predictions are precomputed and +materialized to the online store. + +While this approach can lead to quick impact for different business use cases, it suffers from stale data as well +as only serving users/entities that were available at the time of the batch computation. In some cases, this tradeoff +may be tolerable. + +## 3. Online Model Inference with Online Features and Cached Predictions +This approach is the most sophisticated where inference is optimized for low-latency by caching predictions and running +model inference when data producers write features to the online store. This approach is particularly useful for +applications where features are coming from multiple data sources, the model is computationally expensive to run, or +latency is a significant constraint. + +```python +# Client Reads +features = store.get_online_features( + feature_refs=[ + "user_data:click_through_rate", + "user_data:number_of_clicks", + "user_data:average_page_duration", + "user_data:model_predictions", + ], + entity_rows=[{"user_id": 1}], +) +if features.to_dict().get('user_data:model_predictions') is None: + model_predictions = model_server.predict(features) + store.write_to_online_store(feature_view_name="user_data", df=pd.DataFrame(model_predictions)) +``` +Note that in this case a seperate call to `write_to_online_store` is required when the underlying data changes and +predictions change along with it. + +```python +# Client Writes from the Data Producer +user_data = request.POST.get('user_data') +model_predictions = model_server.predict(user_data) # assume this includes `user_data` in the Data Frame +store.write_to_online_store(feature_view_name="user_data", df=pd.DataFrame(model_predictions)) +``` +While this requires additional writes for every data producer, this approach will result in the lowest latency for +model inference. + +## 4. Online Model Inference without Features +This approach does not require Feast. The model server can directly serve predictions without any features. This +approach is common in Large Language Models (LLMs) and other models that do not require features to make predictions. + +Note that generative models using Retrieval Augmented Generation (RAG) do require features where the +[document embeddings](../../reference/alpha-vector-database.md) are treated as features, which Feast supports +(this would fall under "Online Model Inference with Online Features"). \ No newline at end of file diff --git a/docs/getting-started/architecture/overview.md b/docs/getting-started/architecture/overview.md index 7d1180bfd1..44fa5ac260 100644 --- a/docs/getting-started/architecture/overview.md +++ b/docs/getting-started/architecture/overview.md @@ -8,9 +8,10 @@ Feast's architecture is designed to be flexible and scalable. It is composed of online store. This allows Feast to serve features in real-time with low latency. -* Feast supports On Demand and Streaming Transformations for [feature computation](feature-transformation.md) and - will support Batch transformations in the future. For Streaming and Batch, Feast requires a separate Feature Transformation - Engine (in the batch case, this is typically your Offline Store). We are exploring adding a default streaming engine to Feast. +* Feast supports [feature transformation](feature-transformation.md) for On Demand and Streaming data sources and + will support Batch transformations in the future. For Streaming and Batch data sources, Feast requires a separate +[Feature Transformation Engine](feature-transformation.md#feature-transformation-engines) (in the batch case, this is +typically your Offline Store). We are exploring adding a default streaming engine to Feast. * Domain expertise is recommended when integrating a data source with Feast understand the [tradeoffs from different write patterns](write-patterns.md) to your application From 23c6c862e1da4e9523530eb48c7ce79319dc442d Mon Sep 17 00:00:00 2001 From: Shuchu Han Date: Fri, 16 Aug 2024 03:31:24 -0400 Subject: [PATCH 09/96] fix: Retire pytz library (#4406) * fix: Remove pytz. Signed-off-by: Shuchu Han * fix: Keep the pytz.UTC part in dask.py Signed-off-by: Shuchu Han --------- Signed-off-by: Shuchu Han --- sdk/python/feast/driver_test_data.py | 13 +++++++++---- sdk/python/feast/embedded_go/type_map.py | 4 ++-- sdk/python/feast/feature_logging.py | 4 ++-- .../feast/infra/materialization/snowflake_engine.py | 8 +++++--- .../contrib/athena_offline_store/athena.py | 9 ++++----- .../contrib/postgres_offline_store/postgres.py | 7 +++---- .../contrib/spark_offline_store/spark.py | 12 ++++-------- .../trino_offline_store/connectors/upload.py | 5 ++--- sdk/python/feast/infra/offline_stores/dask.py | 12 ++++++++---- sdk/python/feast/infra/offline_stores/ibis.py | 11 +++++------ sdk/python/feast/infra/offline_stores/redshift.py | 11 +++++------ sdk/python/feast/infra/offline_stores/snowflake.py | 11 +++++------ .../infra/online_stores/contrib/elasticsearch.py | 13 +++---------- .../hazelcast_online_store.py | 5 ++--- .../online_stores/contrib/ikv_online_store/ikv.py | 5 ++--- .../contrib/mysql_online_store/mysql.py | 13 +++---------- .../feast/infra/online_stores/contrib/postgres.py | 11 +++-------- .../contrib/singlestore_online_store/singlestore.py | 5 ++--- sdk/python/feast/infra/online_stores/redis.py | 5 ++--- sdk/python/feast/registry_server.py | 8 ++++---- .../templates/aws/feature_repo/test_workflow.py | 9 +++++---- .../feast/templates/snowflake/test_workflow.py | 9 +++++---- sdk/python/feast/utils.py | 5 ++--- sdk/python/tests/data/data_creator.py | 12 ++++++------ .../integration/materialization/test_snowflake.py | 9 ++++----- .../registration/test_universal_registry.py | 11 +++++------ sdk/python/tests/utils/e2e_test_validation.py | 8 ++++---- sdk/python/tests/utils/feature_records.py | 3 +-- sdk/python/tests/utils/test_log_creator.py | 4 ++-- 29 files changed, 109 insertions(+), 133 deletions(-) diff --git a/sdk/python/feast/driver_test_data.py b/sdk/python/feast/driver_test_data.py index defeb404a3..23f1f12477 100644 --- a/sdk/python/feast/driver_test_data.py +++ b/sdk/python/feast/driver_test_data.py @@ -1,10 +1,11 @@ # This module generates dummy data to be used for tests and examples. import itertools +from datetime import timedelta, timezone from enum import Enum import numpy as np import pandas as pd -from pytz import FixedOffset, timezone, utc +from zoneinfo import ZoneInfo from feast.infra.offline_stores.offline_utils import ( DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL, @@ -22,11 +23,15 @@ def _convert_event_timestamp(event_timestamp: pd.Timestamp, t: EventTimestampTyp if t == EventTimestampType.TZ_NAIVE: return event_timestamp elif t == EventTimestampType.TZ_AWARE_UTC: - return event_timestamp.replace(tzinfo=utc) + return event_timestamp.replace(tzinfo=timezone.utc) elif t == EventTimestampType.TZ_AWARE_FIXED_OFFSET: - return event_timestamp.replace(tzinfo=utc).astimezone(FixedOffset(60)) + return event_timestamp.replace(tzinfo=timezone.utc).astimezone( + tz=timezone(timedelta(minutes=60)) + ) elif t == EventTimestampType.TZ_AWARE_US_PACIFIC: - return event_timestamp.replace(tzinfo=utc).astimezone(timezone("US/Pacific")) + return event_timestamp.replace(tzinfo=timezone.utc).astimezone( + tz=ZoneInfo("US/Pacific") + ) def create_orders_df( diff --git a/sdk/python/feast/embedded_go/type_map.py b/sdk/python/feast/embedded_go/type_map.py index e70dc3be86..8f467c57ca 100644 --- a/sdk/python/feast/embedded_go/type_map.py +++ b/sdk/python/feast/embedded_go/type_map.py @@ -1,12 +1,12 @@ +from datetime import timezone from typing import List import pyarrow as pa -import pytz from feast.protos.feast.types import Value_pb2 from feast.types import Array, PrimitiveFeastType -PA_TIMESTAMP_TYPE = pa.timestamp("s", tz=pytz.UTC) +PA_TIMESTAMP_TYPE = pa.timestamp("s", tz=timezone.utc) ARROW_TYPE_TO_PROTO_FIELD = { pa.int32(): "int32_val", diff --git a/sdk/python/feast/feature_logging.py b/sdk/python/feast/feature_logging.py index 2843f87121..9bd5d8a91c 100644 --- a/sdk/python/feast/feature_logging.py +++ b/sdk/python/feast/feature_logging.py @@ -1,8 +1,8 @@ import abc +from datetime import timezone from typing import TYPE_CHECKING, Dict, Optional, Type, cast import pyarrow as pa -from pytz import UTC from feast.data_source import DataSource from feast.embedded_go.type_map import FEAST_TYPE_TO_ARROW_TYPE, PA_TIMESTAMP_TYPE @@ -97,7 +97,7 @@ def get_schema(self, registry: "BaseRegistry") -> pa.Schema: ) # system columns - fields[LOG_TIMESTAMP_FIELD] = pa.timestamp("us", tz=UTC) + fields[LOG_TIMESTAMP_FIELD] = pa.timestamp("us", tz=timezone.utc) fields[LOG_DATE_FIELD] = pa.date32() fields[REQUEST_ID_FIELD] = pa.string() diff --git a/sdk/python/feast/infra/materialization/snowflake_engine.py b/sdk/python/feast/infra/materialization/snowflake_engine.py index 5d0f08c2f5..9f9f41c83d 100644 --- a/sdk/python/feast/infra/materialization/snowflake_engine.py +++ b/sdk/python/feast/infra/materialization/snowflake_engine.py @@ -1,14 +1,13 @@ import os import shutil from dataclasses import dataclass -from datetime import datetime +from datetime import datetime, timezone from typing import Callable, List, Literal, Optional, Sequence, Union import click import pandas as pd from colorama import Fore, Style from pydantic import ConfigDict, Field, StrictStr -from pytz import utc from tqdm import tqdm import feast @@ -276,7 +275,10 @@ def _materialize_one( execute_snowflake_statement(conn, query).fetchall()[0][0] / 1_000_000_000 ) - if last_commit_change_time < start_date.astimezone(tz=utc).timestamp(): + if ( + last_commit_change_time + < start_date.astimezone(tz=timezone.utc).timestamp() + ): return SnowflakeMaterializationJob( job_id=job_id, status=MaterializationJobStatus.SUCCEEDED ) diff --git a/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena.py b/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena.py index ce731f0198..ea0d6386cb 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena.py +++ b/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena.py @@ -1,6 +1,6 @@ import contextlib import uuid -from datetime import datetime +from datetime import datetime, timezone from pathlib import Path from typing import ( Callable, @@ -19,7 +19,6 @@ import pyarrow import pyarrow as pa from pydantic import StrictStr -from pytz import utc from feast import OnDemandFeatureView from feast.data_source import DataSource @@ -100,8 +99,8 @@ def pull_latest_from_table_or_query( athena_client = aws_utils.get_athena_data_client(config.offline_store.region) s3_resource = aws_utils.get_s3_resource(config.offline_store.region) - start_date = start_date.astimezone(tz=utc) - end_date = end_date.astimezone(tz=utc) + start_date = start_date.astimezone(tz=timezone.utc) + end_date = end_date.astimezone(tz=timezone.utc) query = f""" SELECT @@ -151,7 +150,7 @@ def pull_all_from_table_or_query( query = f""" SELECT {field_string} FROM {from_expression} - WHERE {timestamp_field} BETWEEN TIMESTAMP '{start_date.astimezone(tz=utc).strftime("%Y-%m-%d %H:%M:%S.%f")[:-3]}' AND TIMESTAMP '{end_date.astimezone(tz=utc).strftime("%Y-%m-%d %H:%M:%S.%f")[:-3]}' + WHERE {timestamp_field} BETWEEN TIMESTAMP '{start_date.astimezone(tz=timezone.utc).strftime("%Y-%m-%d %H:%M:%S.%f")[:-3]}' AND TIMESTAMP '{end_date.astimezone(tz=timezone.utc).strftime("%Y-%m-%d %H:%M:%S.%f")[:-3]}' {"AND "+date_partition_column+" >= '"+start_date.strftime('%Y-%m-%d')+"' AND "+date_partition_column+" <= '"+end_date.strftime('%Y-%m-%d')+"' " if date_partition_column != "" and date_partition_column is not None else ''} """ diff --git a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py index c4740a960e..5239cfb474 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py +++ b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py @@ -1,6 +1,6 @@ import contextlib from dataclasses import asdict -from datetime import datetime +from datetime import datetime, timezone from typing import ( Any, Callable, @@ -20,7 +20,6 @@ import pyarrow as pa from jinja2 import BaseLoader, Environment from psycopg import sql -from pytz import utc from feast.data_source import DataSource from feast.errors import InvalidEntityType, ZeroColumnQueryResult, ZeroRowsQueryResult @@ -214,8 +213,8 @@ def pull_all_from_table_or_query( join_key_columns + feature_name_columns + [timestamp_field] ) - start_date = start_date.astimezone(tz=utc) - end_date = end_date.astimezone(tz=utc) + start_date = start_date.astimezone(tz=timezone.utc) + end_date = end_date.astimezone(tz=timezone.utc) query = f""" SELECT {field_string} diff --git a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py index 2d5a00c296..2896d565d3 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py +++ b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py @@ -2,7 +2,7 @@ import tempfile import uuid import warnings -from datetime import datetime +from datetime import datetime, timezone from typing import Any, Callable, Dict, List, Optional, Tuple, Union import numpy as np @@ -14,7 +14,6 @@ from pydantic import StrictStr from pyspark import SparkConf from pyspark.sql import SparkSession -from pytz import utc from feast import FeatureView, OnDemandFeatureView from feast.data_source import DataSource @@ -284,8 +283,8 @@ def pull_all_from_table_or_query( fields = ", ".join(join_key_columns + feature_name_columns + [timestamp_field]) from_expression = data_source.get_table_query_string() - start_date = start_date.astimezone(tz=utc) - end_date = end_date.astimezone(tz=utc) + start_date = start_date.astimezone(tz=timezone.utc) + end_date = end_date.astimezone(tz=timezone.utc) query = f""" SELECT {fields} @@ -520,13 +519,10 @@ def _upload_entity_df( entity_df[event_timestamp_col], utc=True ) spark_session.createDataFrame(entity_df).createOrReplaceTempView(table_name) - return elif isinstance(entity_df, str): spark_session.sql(entity_df).createOrReplaceTempView(table_name) - return elif isinstance(entity_df, pyspark.sql.DataFrame): entity_df.createOrReplaceTempView(table_name) - return else: raise InvalidEntityType(type(entity_df)) @@ -534,7 +530,7 @@ def _upload_entity_df( def _format_datetime(t: datetime) -> str: # Since Hive does not support timezone, need to transform to utc. if t.tzinfo: - t = t.astimezone(tz=utc) + t = t.astimezone(tz=timezone.utc) dt = t.strftime("%Y-%m-%d %H:%M:%S.%f") return dt diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/connectors/upload.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/connectors/upload.py index 9e2ea3708d..1b55199193 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/connectors/upload.py +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/connectors/upload.py @@ -18,13 +18,12 @@ ``` """ -from datetime import datetime +from datetime import datetime, timezone from typing import Any, Dict, Iterator, Optional, Set import numpy as np import pandas as pd import pyarrow -from pytz import utc from feast.infra.offline_stores.contrib.trino_offline_store.trino_queries import Trino from feast.infra.offline_stores.contrib.trino_offline_store.trino_type_map import ( @@ -141,7 +140,7 @@ def _format_value(row: pd.Series, schema: Dict[str, Any]) -> str: def format_datetime(t: datetime) -> str: if t.tzinfo: - t = t.astimezone(tz=utc) + t = t.astimezone(tz=timezone.utc) return t.strftime("%Y-%m-%d %H:%M:%S.%f") diff --git a/sdk/python/feast/infra/offline_stores/dask.py b/sdk/python/feast/infra/offline_stores/dask.py index 4a63baf646..52ad88d299 100644 --- a/sdk/python/feast/infra/offline_stores/dask.py +++ b/sdk/python/feast/infra/offline_stores/dask.py @@ -1,6 +1,6 @@ import os import uuid -from datetime import datetime +from datetime import datetime, timezone from pathlib import Path from typing import Any, Callable, Dict, List, Literal, Optional, Tuple, Union @@ -178,6 +178,8 @@ def evaluate_historical_retrieval(): entity_df_event_timestamp_col_type = entity_df_with_features.dtypes[ entity_df_event_timestamp_col ] + + # TODO: need to figure out why the value of entity_df_event_timestamp_col_type.tz is pytz.UTC if ( not hasattr(entity_df_event_timestamp_col_type, "tz") or entity_df_event_timestamp_col_type.tz != pytz.UTC @@ -189,7 +191,7 @@ def evaluate_historical_retrieval(): ].apply( lambda x: x if x.tzinfo is not None - else x.replace(tzinfo=pytz.utc) + else x.replace(tzinfo=timezone.utc) ) ) @@ -616,6 +618,7 @@ def _normalize_timestamp( if created_timestamp_column: created_timestamp_column_type = df_to_join_types[created_timestamp_column] + # TODO: need to figure out why the value of timestamp_field_type.tz is pytz.UTC if not hasattr(timestamp_field_type, "tz") or timestamp_field_type.tz != pytz.UTC: # if you are querying for the event timestamp field, we have to deduplicate if len(df_to_join[timestamp_field].shape) > 1: @@ -624,10 +627,11 @@ def _normalize_timestamp( # Make sure all timestamp fields are tz-aware. We default tz-naive fields to UTC df_to_join[timestamp_field] = df_to_join[timestamp_field].apply( - lambda x: x if x.tzinfo is not None else x.replace(tzinfo=pytz.utc), + lambda x: x if x.tzinfo else x.replace(tzinfo=timezone.utc), meta=(timestamp_field, "datetime64[ns, UTC]"), ) + # TODO: need to figure out why the value of created_timestamp_column_type.tz is pytz.UTC if created_timestamp_column and ( not hasattr(created_timestamp_column_type, "tz") or created_timestamp_column_type.tz != pytz.UTC @@ -640,7 +644,7 @@ def _normalize_timestamp( df_to_join[created_timestamp_column] = df_to_join[ created_timestamp_column ].apply( - lambda x: x if x.tzinfo is not None else x.replace(tzinfo=pytz.utc), + lambda x: x if x.tzinfo else x.replace(tzinfo=timezone.utc), meta=(timestamp_field, "datetime64[ns, UTC]"), ) diff --git a/sdk/python/feast/infra/offline_stores/ibis.py b/sdk/python/feast/infra/offline_stores/ibis.py index 4de16cbda3..61c477baec 100644 --- a/sdk/python/feast/infra/offline_stores/ibis.py +++ b/sdk/python/feast/infra/offline_stores/ibis.py @@ -1,7 +1,7 @@ import random import string import uuid -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from pathlib import Path from typing import Any, Callable, Dict, List, Optional, Tuple, Union @@ -12,7 +12,6 @@ import pyarrow from ibis.expr import datatypes as dt from ibis.expr.types import Table -from pytz import utc from feast.data_source import DataSource from feast.feature_logging import LoggingConfig, LoggingSource @@ -55,8 +54,8 @@ def pull_latest_from_table_or_query_ibis( fields = join_key_columns + feature_name_columns + [timestamp_field] if created_timestamp_column: fields.append(created_timestamp_column) - start_date = start_date.astimezone(tz=utc) - end_date = end_date.astimezone(tz=utc) + start_date = start_date.astimezone(tz=timezone.utc) + end_date = end_date.astimezone(tz=timezone.utc) table = data_source_reader(data_source) @@ -265,8 +264,8 @@ def pull_all_from_table_or_query_ibis( staging_location_endpoint_override: Optional[str] = None, ) -> RetrievalJob: fields = join_key_columns + feature_name_columns + [timestamp_field] - start_date = start_date.astimezone(tz=utc) - end_date = end_date.astimezone(tz=utc) + start_date = start_date.astimezone(tz=timezone.utc) + end_date = end_date.astimezone(tz=timezone.utc) table = data_source_reader(data_source) diff --git a/sdk/python/feast/infra/offline_stores/redshift.py b/sdk/python/feast/infra/offline_stores/redshift.py index cec21c35c1..ed76f830f3 100644 --- a/sdk/python/feast/infra/offline_stores/redshift.py +++ b/sdk/python/feast/infra/offline_stores/redshift.py @@ -1,6 +1,6 @@ import contextlib import uuid -from datetime import datetime +from datetime import datetime, timezone from pathlib import Path from typing import ( Any, @@ -21,7 +21,6 @@ import pyarrow as pa from dateutil import parser from pydantic import StrictStr, model_validator -from pytz import utc from feast import OnDemandFeatureView, RedshiftSource from feast.data_source import DataSource @@ -127,8 +126,8 @@ def pull_latest_from_table_or_query( ) s3_resource = aws_utils.get_s3_resource(config.offline_store.region) - start_date = start_date.astimezone(tz=utc) - end_date = end_date.astimezone(tz=utc) + start_date = start_date.astimezone(tz=timezone.utc) + end_date = end_date.astimezone(tz=timezone.utc) query = f""" SELECT @@ -174,8 +173,8 @@ def pull_all_from_table_or_query( ) s3_resource = aws_utils.get_s3_resource(config.offline_store.region) - start_date = start_date.astimezone(tz=utc) - end_date = end_date.astimezone(tz=utc) + start_date = start_date.astimezone(tz=timezone.utc) + end_date = end_date.astimezone(tz=timezone.utc) query = f""" SELECT {field_string} diff --git a/sdk/python/feast/infra/offline_stores/snowflake.py b/sdk/python/feast/infra/offline_stores/snowflake.py index ada6c99c98..9418171a96 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake.py +++ b/sdk/python/feast/infra/offline_stores/snowflake.py @@ -3,7 +3,7 @@ import os import uuid import warnings -from datetime import datetime +from datetime import datetime, timezone from functools import reduce from pathlib import Path from typing import ( @@ -25,7 +25,6 @@ import pandas as pd import pyarrow from pydantic import ConfigDict, Field, StrictStr -from pytz import utc from feast import OnDemandFeatureView from feast.data_source import DataSource @@ -196,8 +195,8 @@ def pull_latest_from_table_or_query( with GetSnowflakeConnection(config.offline_store) as conn: snowflake_conn = conn - start_date = start_date.astimezone(tz=utc) - end_date = end_date.astimezone(tz=utc) + start_date = start_date.astimezone(tz=timezone.utc) + end_date = end_date.astimezone(tz=timezone.utc) query = f""" SELECT @@ -248,8 +247,8 @@ def pull_all_from_table_or_query( with GetSnowflakeConnection(config.offline_store) as conn: snowflake_conn = conn - start_date = start_date.astimezone(tz=utc) - end_date = end_date.astimezone(tz=utc) + start_date = start_date.astimezone(tz=timezone.utc) + end_date = end_date.astimezone(tz=timezone.utc) query = f""" SELECT {field_string} diff --git a/sdk/python/feast/infra/online_stores/contrib/elasticsearch.py b/sdk/python/feast/infra/online_stores/contrib/elasticsearch.py index 429327e651..c26b4199ae 100644 --- a/sdk/python/feast/infra/online_stores/contrib/elasticsearch.py +++ b/sdk/python/feast/infra/online_stores/contrib/elasticsearch.py @@ -6,7 +6,6 @@ from datetime import datetime from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple -import pytz from elasticsearch import Elasticsearch, helpers from feast import Entity, FeatureView, RepoConfig @@ -15,6 +14,7 @@ from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.repo_config import FeastConfigBaseModel +from feast.utils import to_naive_utc class ElasticSearchOnlineStoreConfig(FeastConfigBaseModel): @@ -96,9 +96,9 @@ def online_write_batch( entity_key_serialization_version=config.entity_key_serialization_version, ) encoded_entity_key = base64.b64encode(entity_key_bin).decode("utf-8") - timestamp = _to_naive_utc(timestamp) + timestamp = to_naive_utc(timestamp) if created_ts is not None: - created_ts = _to_naive_utc(created_ts) + created_ts = to_naive_utc(created_ts) for feature_name, value in values.items(): encoded_value = base64.b64encode(value.SerializeToString()).decode( "utf-8" @@ -267,10 +267,3 @@ def retrieve_online_documents( ) ) return result - - -def _to_naive_utc(ts: datetime): - if ts.tzinfo is None: - return ts - else: - return ts.astimezone(pytz.utc).replace(tzinfo=None) diff --git a/sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/hazelcast_online_store.py b/sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/hazelcast_online_store.py index 497d8909af..c56d394c21 100644 --- a/sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/hazelcast_online_store.py +++ b/sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/hazelcast_online_store.py @@ -23,7 +23,6 @@ from datetime import datetime, timezone from typing import Any, Callable, Dict, List, Literal, Optional, Sequence, Tuple -import pytz from hazelcast.client import HazelcastClient from hazelcast.core import HazelcastJsonValue from hazelcast.discovery import HazelcastCloudDiscovery @@ -167,10 +166,10 @@ def online_write_batch( entity_key_serialization_version=2, ) ).decode("utf-8") - event_ts_utc = pytz.utc.localize(event_ts, is_dst=None).timestamp() + event_ts_utc = event_ts.astimezone(tz=timezone.utc).timestamp() created_ts_utc = 0.0 if created_ts is not None: - created_ts_utc = pytz.utc.localize(created_ts, is_dst=None).timestamp() + created_ts_utc = created_ts.astimezone(tz=timezone.utc).timestamp() for feature_name, value in values.items(): feature_value = base64.b64encode(value.SerializeToString()).decode( "utf-8" diff --git a/sdk/python/feast/infra/online_stores/contrib/ikv_online_store/ikv.py b/sdk/python/feast/infra/online_stores/contrib/ikv_online_store/ikv.py index 6b721bddf8..c8f0ad65c9 100644 --- a/sdk/python/feast/infra/online_stores/contrib/ikv_online_store/ikv.py +++ b/sdk/python/feast/infra/online_stores/contrib/ikv_online_store/ikv.py @@ -1,4 +1,4 @@ -from datetime import datetime +from datetime import datetime, timezone from typing import ( Any, Callable, @@ -11,7 +11,6 @@ Tuple, ) -import pytz from google.protobuf.timestamp_pb2 import Timestamp from ikvpy.client import IKVReader, IKVWriter from ikvpy.clientoptions import ClientOptions, ClientOptionsBuilder @@ -163,7 +162,7 @@ def _decode_fields_for_primary_key( if dt_bytes: proto_timestamp = Timestamp() proto_timestamp.ParseFromString(dt_bytes) - dt = datetime.fromtimestamp(proto_timestamp.seconds, tz=pytz.utc) + dt = datetime.fromtimestamp(proto_timestamp.seconds, tz=timezone.utc) # decode other features features = {} diff --git a/sdk/python/feast/infra/online_stores/contrib/mysql_online_store/mysql.py b/sdk/python/feast/infra/online_stores/contrib/mysql_online_store/mysql.py index 26916a9fcb..64111ca42c 100644 --- a/sdk/python/feast/infra/online_stores/contrib/mysql_online_store/mysql.py +++ b/sdk/python/feast/infra/online_stores/contrib/mysql_online_store/mysql.py @@ -4,7 +4,6 @@ from typing import Any, Callable, Dict, List, Literal, Optional, Sequence, Tuple import pymysql -import pytz from pydantic import StrictStr from pymysql.connections import Connection from pymysql.cursors import Cursor @@ -15,6 +14,7 @@ from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.repo_config import FeastConfigBaseModel +from feast.utils import to_naive_utc class MySQLOnlineStoreConfig(FeastConfigBaseModel): @@ -74,9 +74,9 @@ def online_write_batch( entity_key, entity_key_serialization_version=2, ).hex() - timestamp = _to_naive_utc(timestamp) + timestamp = to_naive_utc(timestamp) if created_ts is not None: - created_ts = _to_naive_utc(created_ts) + created_ts = to_naive_utc(created_ts) for feature_name, val in values.items(): self.write_to_table( @@ -223,10 +223,3 @@ def _drop_table_and_index(cur: Cursor, project: str, table: FeatureView) -> None def _table_id(project: str, table: FeatureView) -> str: return f"{project}_{table.name}" - - -def _to_naive_utc(ts: datetime) -> datetime: - if ts.tzinfo is None: - return ts - else: - return ts.astimezone(pytz.utc).replace(tzinfo=None) diff --git a/sdk/python/feast/infra/online_stores/contrib/postgres.py b/sdk/python/feast/infra/online_stores/contrib/postgres.py index ff73a4a347..8c6d3e0b99 100644 --- a/sdk/python/feast/infra/online_stores/contrib/postgres.py +++ b/sdk/python/feast/infra/online_stores/contrib/postgres.py @@ -16,7 +16,6 @@ Union, ) -import pytz from psycopg import AsyncConnection, sql from psycopg.connection import Connection from psycopg_pool import AsyncConnectionPool, ConnectionPool @@ -24,6 +23,9 @@ from feast import Entity from feast.feature_view import FeatureView from feast.infra.key_encoding_utils import get_list_val_str, serialize_entity_key +from feast.infra.online_stores.contrib.singlestore_online_store.singlestore import ( + _to_naive_utc, +) from feast.infra.online_stores.online_store import OnlineStore from feast.infra.utils.postgres.connection_utils import ( _get_conn, @@ -472,10 +474,3 @@ def _drop_table_and_index(table_name): sql.Identifier(table_name), sql.Identifier(f"{table_name}_ek"), ) - - -def _to_naive_utc(ts: datetime): - if ts.tzinfo is None: - return ts - else: - return ts.astimezone(pytz.utc).replace(tzinfo=None) diff --git a/sdk/python/feast/infra/online_stores/contrib/singlestore_online_store/singlestore.py b/sdk/python/feast/infra/online_stores/contrib/singlestore_online_store/singlestore.py index e17a059c1a..3e921afcea 100644 --- a/sdk/python/feast/infra/online_stores/contrib/singlestore_online_store/singlestore.py +++ b/sdk/python/feast/infra/online_stores/contrib/singlestore_online_store/singlestore.py @@ -1,10 +1,9 @@ from __future__ import absolute_import from collections import defaultdict -from datetime import datetime +from datetime import datetime, timezone from typing import Any, Callable, Dict, List, Literal, Optional, Sequence, Tuple -import pytz import singlestoredb from pydantic import StrictStr from singlestoredb.connection import Connection, Cursor @@ -232,4 +231,4 @@ def _to_naive_utc(ts: datetime) -> datetime: if ts.tzinfo is None: return ts else: - return ts.astimezone(pytz.utc).replace(tzinfo=None) + return ts.astimezone(tz=timezone.utc).replace(tzinfo=None) diff --git a/sdk/python/feast/infra/online_stores/redis.py b/sdk/python/feast/infra/online_stores/redis.py index 5f0156f620..59892fcbe0 100644 --- a/sdk/python/feast/infra/online_stores/redis.py +++ b/sdk/python/feast/infra/online_stores/redis.py @@ -13,7 +13,7 @@ # limitations under the License. import json import logging -from datetime import datetime +from datetime import datetime, timezone from enum import Enum from typing import ( Any, @@ -28,7 +28,6 @@ Union, ) -import pytz from google.protobuf.timestamp_pb2 import Timestamp from pydantic import StrictStr @@ -457,5 +456,5 @@ def _get_features_for_entity( if not res: return None, None else: - timestamp = datetime.fromtimestamp(res_ts.seconds, tz=pytz.utc) + timestamp = datetime.fromtimestamp(res_ts.seconds, tz=timezone.utc) return timestamp, res diff --git a/sdk/python/feast/registry_server.py b/sdk/python/feast/registry_server.py index 4a96ba76a8..53acb9f625 100644 --- a/sdk/python/feast/registry_server.py +++ b/sdk/python/feast/registry_server.py @@ -1,9 +1,8 @@ from concurrent import futures -from datetime import datetime +from datetime import datetime, timezone import grpc from google.protobuf.empty_pb2 import Empty -from pytz import utc from feast import FeatureStore from feast.data_source import DataSource @@ -314,10 +313,11 @@ def ApplyMaterialization( feature_view=FeatureView.from_proto(request.feature_view), project=request.project, start_date=datetime.fromtimestamp( - request.start_date.seconds + request.start_date.nanos / 1e9, tz=utc + request.start_date.seconds + request.start_date.nanos / 1e9, + tz=timezone.utc, ), end_date=datetime.fromtimestamp( - request.end_date.seconds + request.end_date.nanos / 1e9, tz=utc + request.end_date.seconds + request.end_date.nanos / 1e9, tz=timezone.utc ), commit=request.commit, ) diff --git a/sdk/python/feast/templates/aws/feature_repo/test_workflow.py b/sdk/python/feast/templates/aws/feature_repo/test_workflow.py index 59ac1f0ee7..092399e03c 100644 --- a/sdk/python/feast/templates/aws/feature_repo/test_workflow.py +++ b/sdk/python/feast/templates/aws/feature_repo/test_workflow.py @@ -1,9 +1,8 @@ import random import subprocess -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone import pandas as pd -from pytz import utc from feast import FeatureStore from feast.data_source import PushMode @@ -71,9 +70,11 @@ def run_demo(): def fetch_historical_features_entity_sql(store: FeatureStore, for_batch_scoring): end_date = ( - datetime.now().replace(microsecond=0, second=0, minute=0).astimezone(tz=utc) + datetime.now() + .replace(microsecond=0, second=0, minute=0) + .astimezone(tz=timezone.utc) ) - start_date = (end_date - timedelta(days=60)).astimezone(tz=utc) + start_date = (end_date - timedelta(days=60)).astimezone(tz=timezone.utc) # For batch scoring, we want the latest timestamps if for_batch_scoring: print( diff --git a/sdk/python/feast/templates/snowflake/test_workflow.py b/sdk/python/feast/templates/snowflake/test_workflow.py index 3c44342881..f60b014874 100644 --- a/sdk/python/feast/templates/snowflake/test_workflow.py +++ b/sdk/python/feast/templates/snowflake/test_workflow.py @@ -1,10 +1,9 @@ import random import subprocess -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone import pandas as pd import yaml -from pytz import utc from feast import FeatureStore from feast.data_source import PushMode @@ -75,9 +74,11 @@ def run_demo(): def fetch_historical_features_entity_sql(store: FeatureStore, for_batch_scoring): end_date = ( - datetime.now().replace(microsecond=0, second=0, minute=0).astimezone(tz=utc) + datetime.now() + .replace(microsecond=0, second=0, minute=0) + .astimezone(tz=timezone.utc) ) - start_date = (end_date - timedelta(days=60)).astimezone(tz=utc) + start_date = (end_date - timedelta(days=60)).astimezone(tz=timezone.utc) project_name = yaml.safe_load(open("feature_repo/feature_store.yaml"))["project"] table_name = f"{project_name}_feast_driver_hourly_stats" diff --git a/sdk/python/feast/utils.py b/sdk/python/feast/utils.py index 0467393aa2..5862cd4630 100644 --- a/sdk/python/feast/utils.py +++ b/sdk/python/feast/utils.py @@ -25,7 +25,6 @@ import pyarrow from dateutil.tz import tzlocal from google.protobuf.timestamp_pb2 import Timestamp -from pytz import utc from feast.constants import FEAST_FS_YAML_FILE_PATH_ENV_NAME from feast.entity import Entity @@ -63,7 +62,7 @@ def get_user_agent(): def make_tzaware(t: datetime) -> datetime: """We assume tz-naive datetimes are UTC""" if t.tzinfo is None: - return t.replace(tzinfo=utc) + return t.replace(tzinfo=timezone.utc) else: return t @@ -81,7 +80,7 @@ def to_naive_utc(ts: datetime) -> datetime: if ts.tzinfo is None: return ts else: - return ts.astimezone(utc).replace(tzinfo=None) + return ts.astimezone(timezone.utc).replace(tzinfo=None) def maybe_local_tz(t: datetime) -> datetime: diff --git a/sdk/python/tests/data/data_creator.py b/sdk/python/tests/data/data_creator.py index 15d09c5a40..5d6cffeb9d 100644 --- a/sdk/python/tests/data/data_creator.py +++ b/sdk/python/tests/data/data_creator.py @@ -1,8 +1,8 @@ -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from typing import Dict, List, Optional import pandas as pd -from pytz import timezone, utc +from zoneinfo import ZoneInfo from feast.types import FeastType, Float32, Int32, Int64, String from feast.utils import _utc_now @@ -27,11 +27,11 @@ def create_basic_driver_dataset( ts - timedelta(hours=3), # Use different time zones to test tz-naive -> tz-aware conversion (ts - timedelta(hours=4)) - .replace(tzinfo=utc) - .astimezone(tz=timezone("Europe/Berlin")), + .replace(tzinfo=timezone.utc) + .astimezone(tz=ZoneInfo("Europe/Berlin")), (ts - timedelta(hours=1)) - .replace(tzinfo=utc) - .astimezone(tz=timezone("US/Pacific")), + .replace(tzinfo=timezone.utc) + .astimezone(tz=ZoneInfo("US/Pacific")), ], "created_ts": [ts, ts, ts, ts, ts], } diff --git a/sdk/python/tests/integration/materialization/test_snowflake.py b/sdk/python/tests/integration/materialization/test_snowflake.py index f12191363b..f53c3ca753 100644 --- a/sdk/python/tests/integration/materialization/test_snowflake.py +++ b/sdk/python/tests/integration/materialization/test_snowflake.py @@ -1,8 +1,7 @@ import os -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone import pytest -from pytz import utc from feast import Field from feast.entity import Entity @@ -150,7 +149,7 @@ def test_snowflake_materialization_consistency_internal_with_lists( now = _utc_now() full_feature_names = True - start_date = (now - timedelta(hours=5)).replace(tzinfo=utc) + start_date = (now - timedelta(hours=5)).replace(tzinfo=timezone.utc) end_date = split_dt fs.materialize( feature_views=[driver_stats_fv.name], @@ -165,7 +164,7 @@ def test_snowflake_materialization_consistency_internal_with_lists( "string": ["3"] * 2, "bytes": [b"3"] * 2, "bool": [False] * 2, - "datetime": [datetime(1981, 1, 1, tzinfo=utc)] * 2, + "datetime": [datetime(1981, 1, 1, tzinfo=timezone.utc)] * 2, } expected_value = [] if feature_is_empty_list else expected_values[feature_dtype] @@ -234,7 +233,7 @@ def test_snowflake_materialization_entityless_fv(): now = _utc_now() - start_date = (now - timedelta(hours=5)).replace(tzinfo=utc) + start_date = (now - timedelta(hours=5)).replace(tzinfo=timezone.utc) end_date = split_dt fs.materialize( feature_views=[overall_stats_fv.name], diff --git a/sdk/python/tests/integration/registration/test_universal_registry.py b/sdk/python/tests/integration/registration/test_universal_registry.py index b0738c8419..9dcd1b5b91 100644 --- a/sdk/python/tests/integration/registration/test_universal_registry.py +++ b/sdk/python/tests/integration/registration/test_universal_registry.py @@ -14,7 +14,7 @@ import logging import os import time -from datetime import timedelta +from datetime import timedelta, timezone from tempfile import mkstemp from unittest import mock @@ -22,7 +22,6 @@ import pandas as pd import pytest from pytest_lazyfixture import lazy_fixture -from pytz import utc from testcontainers.core.container import DockerContainer from testcontainers.core.waiting_utils import wait_for_logs from testcontainers.minio import MinioContainer @@ -802,8 +801,8 @@ def odfv1(feature_df: pd.DataFrame) -> pd.DataFrame: # Simulate materialization current_date = _utc_now() - end_date = current_date.replace(tzinfo=utc) - start_date = (current_date - timedelta(days=1)).replace(tzinfo=utc) + end_date = current_date.replace(tzinfo=timezone.utc) + start_date = (current_date - timedelta(days=1)).replace(tzinfo=timezone.utc) test_registry.apply_materialization(feature_view, project, start_date, end_date) materialized_feature_view = test_registry.get_feature_view( "my_feature_view_1", project @@ -871,8 +870,8 @@ def odfv1(feature_df: pd.DataFrame) -> pd.DataFrame: # Simulate materialization a second time current_date = _utc_now() - end_date_1 = current_date.replace(tzinfo=utc) - start_date_1 = (current_date - timedelta(days=1)).replace(tzinfo=utc) + end_date_1 = current_date.replace(tzinfo=timezone.utc) + start_date_1 = (current_date - timedelta(days=1)).replace(tzinfo=timezone.utc) test_registry.apply_materialization( updated_feature_view, project, start_date_1, end_date_1 ) diff --git a/sdk/python/tests/utils/e2e_test_validation.py b/sdk/python/tests/utils/e2e_test_validation.py index 1a8bedc796..a08e8fef42 100644 --- a/sdk/python/tests/utils/e2e_test_validation.py +++ b/sdk/python/tests/utils/e2e_test_validation.py @@ -1,13 +1,12 @@ import math import os import time -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from pathlib import Path from typing import Dict, List, Optional, Union import pandas as pd import yaml -from pytz import utc from feast import FeatureStore, FeatureView, RepoConfig from feast.utils import _utc_now @@ -39,7 +38,7 @@ def validate_offline_online_store_consistency( # Run materialize() # use both tz-naive & tz-aware timestamps to test that they're both correctly handled - start_date = (now - timedelta(hours=5)).replace(tzinfo=utc) + start_date = (now - timedelta(hours=5)).replace(tzinfo=timezone.utc) end_date = split_dt fs.materialize(feature_views=[fv.name], start_date=start_date, end_date=end_date) @@ -87,7 +86,8 @@ def validate_offline_online_store_consistency( and updated_fv.materialization_intervals[0][0] == start_date and updated_fv.materialization_intervals[0][1] == end_date and updated_fv.materialization_intervals[1][0] == end_date - and updated_fv.materialization_intervals[1][1] == now.replace(tzinfo=utc) + and updated_fv.materialization_intervals[1][1] + == now.replace(tzinfo=timezone.utc) ) # check result of materialize_incremental() diff --git a/sdk/python/tests/utils/feature_records.py b/sdk/python/tests/utils/feature_records.py index bd3567c9ee..e81666eaa5 100644 --- a/sdk/python/tests/utils/feature_records.py +++ b/sdk/python/tests/utils/feature_records.py @@ -5,7 +5,6 @@ import pandas as pd import pytest from pandas.testing import assert_frame_equal as pd_assert_frame_equal -from pytz import utc from feast import FeatureService, FeatureStore, utils from feast.errors import FeatureNameCollisionError @@ -16,7 +15,7 @@ def convert_timestamp_records_to_utc( records: List[Dict[str, Any]], column: str ) -> List[Dict[str, Any]]: for record in records: - record[column] = utils.make_tzaware(record[column]).astimezone(utc) + record[column] = utils.make_tzaware(record[column]).astimezone(timezone.utc) return records diff --git a/sdk/python/tests/utils/test_log_creator.py b/sdk/python/tests/utils/test_log_creator.py index 987c8d77ef..3e432e11bf 100644 --- a/sdk/python/tests/utils/test_log_creator.py +++ b/sdk/python/tests/utils/test_log_creator.py @@ -1,7 +1,7 @@ import contextlib -import datetime import tempfile import uuid +from datetime import timedelta from pathlib import Path from typing import Iterator, List, Union @@ -80,7 +80,7 @@ def prepare_logs( logs_df[REQUEST_ID_FIELD] = [str(uuid.uuid4()) for _ in range(num_rows)] logs_df[LOG_TIMESTAMP_FIELD] = pd.Series( np.random.randint(0, 7 * 24 * 3600, num_rows) - ).map(lambda secs: pd.Timestamp.utcnow() - datetime.timedelta(seconds=secs)) + ).map(lambda secs: pd.Timestamp.utcnow() - timedelta(seconds=secs)) logs_df[LOG_DATE_FIELD] = logs_df[LOG_TIMESTAMP_FIELD].dt.date for projection in feature_service.feature_view_projections: From 160cd36542e37202bf4d836ad4e6b1d0fefb6779 Mon Sep 17 00:00:00 2001 From: Francisco Arceo Date: Fri, 16 Aug 2024 23:25:35 -0400 Subject: [PATCH 10/96] Update model-inference.md --- .../architecture/model-inference.md | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/docs/getting-started/architecture/model-inference.md b/docs/getting-started/architecture/model-inference.md index 4fe2859c55..3a061603c1 100644 --- a/docs/getting-started/architecture/model-inference.md +++ b/docs/getting-started/architecture/model-inference.md @@ -3,7 +3,7 @@ Production machine learning systems can choose from four approaches to serving machine learning predictions (the output of model inference): 1. Online model inference with online features -2. Precomputed (batch) model predictions without online features +2. Offline mode inference without online features 3. Online model inference with online features and cached predictions 4. Online model inference without features @@ -27,10 +27,13 @@ features = store.get_online_features( model_predictions = model_server.predict(features) ``` -## 2. Precomputed (Batch) Model Predictions without Online Features +## 2. Offline Model Inference without Online Features Typically, Machine Learning teams find serving precomputed model predictions to be the most straightforward to implement. This approach simply treats the model predictions as a feature and serves them from the feature store using the standard -Feast sdk. +Feast sdk. These model predictions are typically generated through some batch process where the model scores are precomputed. +As a concrete example, the batch process can be as simple as a script that runs model inference locally for a set of users that +can output a CSV. This output file could be used for materialization so that the model could be served online as shown in the +code below. ```python model_predictions = store.get_online_features( feature_refs=[ @@ -85,4 +88,10 @@ approach is common in Large Language Models (LLMs) and other models that do not Note that generative models using Retrieval Augmented Generation (RAG) do require features where the [document embeddings](../../reference/alpha-vector-database.md) are treated as features, which Feast supports -(this would fall under "Online Model Inference with Online Features"). \ No newline at end of file +(this would fall under "Online Model Inference with Online Features"). + +### Client Orchestration +Implicit in the code examples above is a design choice about how clients orchestrate calls to get features and run model inference. +The examples had a Feast-centric pattern because they are inputs to the model, so the sequencing is fairly obvious. +An alternative approach can be Inference-centric where a client would call an inference endpoint and the inference +service would be responsible for orchestration. From 09b026bf365c29246c0c4a1ed538c5ccfe21a9c8 Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Sat, 17 Aug 2024 23:25:48 +0400 Subject: [PATCH 11/96] chore: Auto-detect python version in Makefile (#4419) --- Makefile | 13 +++++++------ docs/project/development-guide.md | 20 ++++++++++++-------- 2 files changed, 19 insertions(+), 14 deletions(-) diff --git a/Makefile b/Makefile index 5e3bd0d913..7119bad856 100644 --- a/Makefile +++ b/Makefile @@ -21,6 +21,7 @@ ifeq ($(shell uname -s), Darwin) OS = osx endif TRINO_VERSION ?= 376 +PYTHON_VERSION = ${shell python --version | grep -Eo '[0-9]\.[0-9]+'} # General @@ -37,22 +38,22 @@ build: protos build-java build-docker # Python SDK install-python-ci-dependencies: - python -m piptools sync sdk/python/requirements/py$(PYTHON)-ci-requirements.txt + python -m piptools sync sdk/python/requirements/py$(PYTHON_VERSION)-ci-requirements.txt pip install --no-deps -e . python setup.py build_python_protos --inplace install-python-ci-dependencies-uv: - uv pip sync --system sdk/python/requirements/py$(PYTHON)-ci-requirements.txt + uv pip sync --system sdk/python/requirements/py$(PYTHON_VERSION)-ci-requirements.txt uv pip install --system --no-deps -e . python setup.py build_python_protos --inplace install-python-ci-dependencies-uv-venv: - uv pip sync sdk/python/requirements/py$(PYTHON)-ci-requirements.txt + uv pip sync sdk/python/requirements/py$(PYTHON_VERSION)-ci-requirements.txt uv pip install --no-deps -e . python setup.py build_python_protos --inplace lock-python-ci-dependencies: - uv pip compile --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py$(PYTHON)-ci-requirements.txt + uv pip compile --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py$(PYTHON_VERSION)-ci-requirements.txt package-protos: cp -r ${ROOT_DIR}/protos ${ROOT_DIR}/sdk/python/feast/protos @@ -61,11 +62,11 @@ compile-protos-python: python setup.py build_python_protos --inplace install-python: - python -m piptools sync sdk/python/requirements/py$(PYTHON)-requirements.txt + python -m piptools sync sdk/python/requirements/py$(PYTHON_VERSION)-requirements.txt python setup.py develop lock-python-dependencies: - uv pip compile --system --no-strip-extras setup.py --output-file sdk/python/requirements/py$(PYTHON)-requirements.txt + uv pip compile --system --no-strip-extras setup.py --output-file sdk/python/requirements/py$(PYTHON_VERSION)-requirements.txt lock-python-dependencies-all: pixi run --environment py39 --manifest-path infra/scripts/pixi/pixi.toml "uv pip compile --system --no-strip-extras setup.py --output-file sdk/python/requirements/py3.9-requirements.txt" diff --git a/docs/project/development-guide.md b/docs/project/development-guide.md index e3b09294bc..cec4f68daf 100644 --- a/docs/project/development-guide.md +++ b/docs/project/development-guide.md @@ -132,8 +132,7 @@ Setting up your development environment for Feast Python SDK / CLI: source venv/bin/activate ``` 4. (M1 Mac only): Follow the [dev guide](https://github.com/feast-dev/feast/issues/2105) -5. Install uv -It is recommended to use uv for managing python dependencies. +5. Install uv. It is recommended to use uv for managing python dependencies. ```sh curl -LsSf https://astral.sh/uv/install.sh | sh ``` @@ -145,21 +144,26 @@ pip install uv ``` make build-ui ``` -7. (Optional) install pixi -pixi is necessary to run step 8 for all python versions at once. +7. (Optional) install pixi. pixi is necessary to run step 8 for all python versions at once. ```sh curl -fsSL https://pixi.sh/install.sh | bash ``` -8. (Optional): Recompile python lock files -If you make changes to requirements or simply want to update python lock files to reflect latest versioons. +8. (Optional): Recompile python lock files. Only when you make changes to requirements or simply want to update python lock files to reflect latest versioons. ```sh make lock-python-dependencies-all ``` -9. Install development dependencies for Feast Python SDK / CLI -This will install package versions from the lock file, install editable version of feast and compile protobufs. +9. Install development dependencies for Feast Python SDK / CLI. This will install package versions from the lock file, install editable version of feast and compile protobufs. + +If running inside a virtual environment: +```sh +make install-python-ci-dependencies-uv-venv +``` + +Otherwise: ```sh make install-python-ci-dependencies-uv ``` + 10. Spin up Docker Image ```sh docker build -t docker-whale -f ./sdk/python/feast/infra/feature_servers/multicloud/Dockerfile . From d235832b78027b98df8e8a9e434a51a0c78b3092 Mon Sep 17 00:00:00 2001 From: "bdodla@expedia.com" <13788369+EXPEbdodla@users.noreply.github.com> Date: Sun, 18 Aug 2024 09:46:17 -0700 Subject: [PATCH 12/96] fix: Default to pandas mode if not specified in ODFV proto in database (#4420) --- sdk/python/feast/on_demand_feature_view.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/python/feast/on_demand_feature_view.py b/sdk/python/feast/on_demand_feature_view.py index aeb1cc207a..47fcf29926 100644 --- a/sdk/python/feast/on_demand_feature_view.py +++ b/sdk/python/feast/on_demand_feature_view.py @@ -346,7 +346,7 @@ def from_proto( ], sources=sources, feature_transformation=transformation, - mode=on_demand_feature_view_proto.spec.mode, + mode=on_demand_feature_view_proto.spec.mode or "pandas", description=on_demand_feature_view_proto.spec.description, tags=dict(on_demand_feature_view_proto.spec.tags), owner=on_demand_feature_view_proto.spec.owner, From 8181007dfb7c672f9984815f0813b181076f6926 Mon Sep 17 00:00:00 2001 From: Francisco Arceo Date: Mon, 19 Aug 2024 12:30:36 -0400 Subject: [PATCH 13/96] chore: Update SUMMARY.md (#4422) Update SUMMARY.md --- docs/SUMMARY.md | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index 0060ae729e..3cc2511288 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -8,6 +8,13 @@ ## Getting started * [Quickstart](getting-started/quickstart.md) +* [Architecture](getting-started/architecture/README.md) + * [Overview](getting-started/architecture/overview.md) + * [Language](getting-started/architecture/language.md) + * [Push vs Pull Model](getting-started/architecture/push-vs-pull-model.md) + * [Write Patterns](getting-started/architecture/write-patterns.md) + * [Feature Transformation](getting-started/architecture/feature-transformation.md) + * [Feature Serving and Model Inference](getting-started/architecture/model-inference.md) * [Concepts](getting-started/concepts/README.md) * [Overview](getting-started/concepts/overview.md) * [Data ingestion](getting-started/concepts/data-ingestion.md) @@ -17,13 +24,6 @@ * [Point-in-time joins](getting-started/concepts/point-in-time-joins.md) * [Registry](getting-started/concepts/registry.md) * [\[Alpha\] Saved dataset](getting-started/concepts/dataset.md) -* [Architecture](getting-started/architecture/README.md) - * [Overview](getting-started/architecture/overview.md) - * [Language](getting-started/architecture/language.md) - * [Push vs Pull Model](getting-started/architecture/push-vs-pull-model.md) - * [Write Patterns](getting-started/architecture/write-patterns.md) - * [Feature Transformation](getting-started/architecture/feature-transformation.md) - * [Feature Serving and Model Inference](getting-started/architecture/model-inference.md) * [Components](getting-started/components/README.md) * [Overview](getting-started/components/overview.md) * [Registry](getting-started/components/registry.md) From 66a0a38e72b9ff67dde971e0e0b239e9cfa7f27b Mon Sep 17 00:00:00 2001 From: Daniel Dowler <12484302+dandawg@users.noreply.github.com> Date: Mon, 19 Aug 2024 14:44:03 -0600 Subject: [PATCH 14/96] docs: Updated README template and fixed links to be consistent for HTML (#4423) * chore: fixed README template to be consistent with current README Signed-off-by: dandawg <12484302+dandawg@users.noreply.github.com> * docs: markdown links consistency with html Signed-off-by: dandawg <12484302+dandawg@users.noreply.github.com> --------- Signed-off-by: dandawg <12484302+dandawg@users.noreply.github.com> --- .github/pull_request_template.md | 2 +- docs/project/development-guide.md | 20 ++++++++++---------- infra/templates/README.md.jinja2 | 3 +++ 3 files changed, 14 insertions(+), 11 deletions(-) diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 7849c24976..40986a87db 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -1,6 +1,6 @@ %BQ`n{qZQmCu0MT8gYHDMy_GLh?6ED zD4d-HJTSEg-W{GH;_~MQtI-6y3{(221XoRt;r=!mfkK{(N+tG+C{=g{YmiBDCeftt zVuEitxFD3oR9c3K6mJP-_iirkQmw{0r&8~fWMa*Hmmo73xB9-se8_{-Y%WS#TdG*@ z@8lMHTo_2j<+VU4u66Vv#zUyjd=)=Sy{Bz@=G&^kE|?Xk@rg!{gteopkh)hvEKMa3oXeh z3jV29xR=JV_~ay&bmhZ5EjVXBAFEU*It&)Jtq%fR3NS+<@NA{jUl0x!h#QW@^e;`N z{877{N2O#3lW4%k4-(6+q;;SDHW}J$rsEKDr|n({!Qi1B6-7dg}1PtsA|Ir>K+kZ*eXmi1}RziTnfHHlWCH`#Z5&FJ{uJ zA9S@b$Et7W?}x2sUNZjLNdHulWI~o;rH$M6tySozFSQHe@yc9V)S3+NHIbu)eo_Lv zvTgmy4E--Qo)GDo!8gz-D=9xSo&cP$-(cv0U?yV5cz6^oGRL5JS1KWa+s$9%djKu@ zQ1z;vuvmzpZ|q7m;5i7dee1N&E~oH0NFqOreEm=bbx(H4?lS+)LCoJW-;TNn`aA8zOb`{ z{|+bey7JTBOmT0U`-~9MqxEQI)iNeiJJvzjie?q0O0$UL>0xZyjPzHs7ui>;m{>0- zF-R`fDsW#-nZ8BJ2iHtkL5jP3+|WptR^VwrCuj}sn&WJ!B8OT2s=0c^G(t#7G8Rz% z8l?sE2}5HLDipvmP+8mfHwWIvoU46Et27}Yw}{s+9Pc~!QoY8ay}VXlB8}L;Yzv!y z0K6UA_-h=J^tZA}nM_ktf&Em(+YSLO`3Ft8F=h$8GfMDiut?PU@@(FdWsLT;I=V{5U9ZYVKE&Zrf{7^d7G}du$ zxSCK8K$BlwYNd-E7O-<9V+~+73R+zuleK$q^a}P?bCWz-TDYwLdry3NdY2X)^ z3GUt6wx;>L>2!c41Sx7S@8|F7trR=&3uet-ajJYn+<5PLv|?e?J4|NCq;iXp+AIP7 zLot{ynPRDA)KLcZ5t3%8-{q3qgx&^ADr-*%o3zjZ)HsiTf3`VIh${TDJaMdwze z3t=Hi;jDWl7UYcq801;-1pXy@Ph=|Cb_!`)+SbY#;wUvh+YzHJkE)Wb*F}@2RpnL?)(9bSb4YSbAt}j8%dMEw0Sq;2 zmuj5uvg_{Izf#hGvR05LTy5L1Mn?_~xj#(3(a7P5EYGEwCqXa67yMN&*}NS4P-P_{ z5_fYU8<{$wf}Qqun*w|Wn16cdP#?|g`w{nQFD6TKt}`pb3v()l7jOt)^MUp)?Aol@ ze66CPPXfoE4X&(mCz>d+*v&o9ui@6zf~y%t*E|e5*Q4zj+5*T}{FiGQMDwk*i8}%| zN!EVfXQLs3*`LU1f}bW?YJ}x(^}}Anq7?Qvg(H393vH+}(@~cw=5^kZax;ZQPx`FV zy29}c`~E@2rHm13P>6G4A~v)*L1T7B9Yz>Fv8@a(Z*h%7(ooa*jW?o}Jz^qQ zEktdC3h9FwZdw&q)be&WhNc^Zv=MD_GWdz2&_I#a{=VKlU++aHs$r3U8g7+eSh;(o&%8 z?7>hjE)HU}tMN(0(p#ro<;xQsr;VIE<6m5=`ZsK%ezJ7Jd-M!hv4GEdE=5S?I@WB+x<<>pTrA7+)9^vVMWv!|iqOu_ zYlpVDR_0P)k0}^MCZrV2WxGWT01jf>Q2*wLzt}#Kq4W6SIJxfTJ|Al|7Lh+ks1em9 zBdvh(=5Lj{kiXEy_glqSzV)}f>;KU??3{IXVn@JNH*CD|YB98D=FXNMuvQK-2Mgy# zu9w16*iEWfQ8aPq>F85ht1Zyx1~s`F&5f3gu~RLdWH0qD`b!e6=#;Det})$Ux1##- zW{=`Zgpen=MVk{|KW9rmzR0^`hA8DSQ8Q}$TByeJ=Vq2b;~L%XJIfiHf^dCS^lXZf zuQxiM0mNgF*4Wr)@_NA8UR-345=@5?%x*$4TSmwNk1@ew$Q=l?+A-`KTM00!2XET# zy&wrP%U0J?AKd$~_VD5=an2XTG|t(C-}?#51a|&+KIeAMZCm%nY-qNmTWJ4-fjIf) z@9#+-^Q)s>|6(hbzr2qA^5_3n{Zr~2um|AWR5kEubH`97#WwMYERct*cZ&&ewpX`N zvs1VxgclDTE$&3Q^d9TFCQ*hMeU%9wj8AGaCsk6M#--E3aL*LTcbWzC~i>)m<6#m3iF6- zJc{l1{?!f^d^916@71jt5g)YWx^S+<kdZ|P3 z(uXEo6f;lBN`dx1R1mg005{=dq4;VK>ky<~H{E3-(()_|ZtRuo+w)maI97nS;gko; z!G^Cg0hpOX%9oil`5e%?{DOg}LBY^JkSq6tNd$E2vYx@aUMN_f&~mpGl)j2=wa%j=u7Yu|IdWL?q^z$M{o7_Qm5 zsBsh=x~Nk12x=hmK4FiDV=X*cxl3qjCCmzyCvfDL*}-)J?w+|9dNuv>t!f7vD#OZc z3z`^wDoG!np(Q8T6N^x#ewkO=l0rQc0#bU=7|4$I3Ez2Rju5>ux#fG!elOmt+(sLM zxeIIH(*(W2)`jG&v$^MrjOu~)p5@_yO%6;I9=403fDNU#z`zsc+0d5 zpYfUZtA`N!wGp83D`{p)xA{}--{oSd`H-fOSD zv(L_6Yu$H`=1D*O64~6BFFc@{20*6RPKdF`?61Bek+aLMk&E!pVRJT6Vt^1bVMt!w z0g{_woL`;>4Z(xb+Z*hV{Z)2dJR&(+zPq4T!kP|(QN%%CZGl%)bJij{KTUTSzGh_Q zuIgAP!wT{GafTckp`vv|cX8U^A=v@C?Q?zDZl^^7YzgX>eZuSvU#uKoQ=c_^a%j6 zx9l%tAmtf4%tdsVqg>TJcuRIvKcF2tvgUO~Ntv=(@=zUSbltCC!CMA#;9MDV_ zqzlHhE|ncw`u?Vzk1U8Zg>5NY7z1w{KRQ*TD{G}3=g@*?MpT_7v{^?(+RUKZ7{_#N z_#J5j*@-COPS|=5p2!?7h@8?LO_G-jh@xImriv5|X6wB>7#~fuUIy6^i(AW$sc*jU zoNBa?^Mon803&C-ao$|C8t4G_<7IK58&XbbCF$@PO&ju_YoZC7ly<>}ke4j+tg#^T1;*+a}Z!r6&9z-MkmU%7UGg| zm<+N7~#SKWfV_4N7~@81G`|ja#h+ zE)bTN@%%``P2DNTK*!Rj2iI!DBDYHBlguRrvVZOJW7y1lxD%B_*xK^WdZ+m&vy&FN z=tL#FcujTL$?(Uv83mCzbh*7wq<}#jYGaVMF8P^RK?RiFg`3@a)rI>Zb>S)PYGUf= zY*ugu1+_df!nRO+|8|`3e8&cf^HP9(x-)I%5n_^9RG3tp3p6l;iznv5Ds0K0#eCX( zKgl%R*L4TN&s4%D2h58RD%RIb-_8A6ad$+?p*FX7!U}7jjZva-A5=-yAfH+zaSs2xE^dJ>Ljw#GHjP@NzuyzHAs3L}b^El-<+xh?q0Ibd>qiL&4OU)C5Lwzme8B0;T7OkHZ&mV@GCJV4T6I zl4tUE*D!5rV=O}n_6_xO<&*N|!!jQ$Zdt41%LLt_RtbcXcSHb}wR?Vhl|LEU)NuOA zBb&2)=K~K;ZMPjjumy5O1%A3oOQ}nl(UU6`Hp0qP7aB9NA?PfU`#}-3bV!O7(<)}^ zgk{8pj1U?1a~BI*_zuA$t&{umcgCudeo0Ft1*b&+8jdX}$bV5j%};|{0^z|LYBP=po61s6om(k9`Hjk%YO;<`X8jNBWQ3np6K#Vrz1 z;#U2hy%x3(-swrOD;vgi6H#{h<&0I4y2Tx+&MnZXYn>EgURGwJ_QjSyjiJw~ZcE** zZ0{j0FLovQ%ARzp8f`8DcW(nB1LmeMc0-Je?ibQDtz2G3U!U-jK+1&{|4i)3NkQwu z^f~)j`|PxAyrYJiM?Ubmw?Fw3lBD^Z;N^myt`NFuajs27u*@J|43Mws^I@q{Yx(6! z<*m>kJD@!j&#uz!6WwhvJ&VV8>+Iz{=BEuB%u6@cT5c4gu*Kw?3eChZz0qO|g=e9B zrC041Lh;Sb`)iEA=tbjF2of zwVw<1W&Ih&;7#V#Cj4I)A!A@Nd_{)c}rFsia7)HC!3@b$E`I> z{p&le?V<+z#0gw}BnR2F=&Az5eSY7}d*gOlrpiN)Z9O-4CZ`@@m56^kE=x|A&>~M|jgNZkZb+?nb0>4QLQ(m^-*9`rU z=QQl+f86!gzoQ{VhxO%Mj90r5y%9@I&UXQ(b{%m#YR%=tMWTesU@>~PWM`K9_Gn4i z!%Io(9RB9sfdGYx54&0yS9eFAL{`?Ei ze>f=k+W&z`^xzq{%wvDLCzNUzcn{u%IsU}Mb2&I0mhJY=g}CXy70dl>xp}kGL}|aN zq1rxFvR?l$r^Tx`!}QH7Gn=D6aPk-rPjm0TQ}XZ6eSaA9g-3DIaUaV$X94<}ZI=%s;PuV>f2kmWDrc-BpSC{;QCnw^l!MC60a_*<0Kf4%=e~Qq{F6JzOk&9OW8!kDT zreAd^I(KP9MejQE9p(&|A&xw3!g2}hy=&gGBv<~*L;~mV%6od!!g49)9lH65aOo$D z7FMlrrVxH{2Bj#Q_?B4n!1T9oxFnv*#Zml7Rd6vs*G(Kbsb}%+Vzr8x7?78ut7~4b zyDIwr*i|C&MS{`2{Al%7X7hCMA7+4jO8uF~k-Meufad>^W;K$|g~ie!Z*`mgc9y%< zOc(6c)iuL&iSoilgnj&Nw}7n;)7&623YUS}LfU~8#<#(f|0cH%IHSP|4ly`0&~RDy znI0yBfM_K>x!rQLN5e+KaJoSnRk2#+0q)%afz(;a7uj&L1kp#>L^(fE1KxA6-q? zLQM1p*Zd7!>Q%!ATteDaD-spP?J7XgW9*&(fI;T@Hlo{NlCr)sqwuZ&62)^< z!l|eIhofh<5hM-wt2sUo3Ml!ZXq?$FvFilcrbDQ2@u$I?kNd~1O2{;;pG=Fy3otWR z5~RghRO6`9(|B!L^y({gH?v9j6m$tHE&YB&)4L@ODi^ByXPzO)KNbGt z{b$VRk3}7-)NOBC_a>SB?VS)>T}NAwZGnq3Qe2}_-oWfTPd@@-t)eOYM5~*BtWHbn zkXSOnW~w@C?7UxIkN$w8!J76BbMJjXU=sgH;hX|5dawZzE_*aI1bn)~L*&;cq;&95 zXrG?{{L=o8h6{G{#~pQl(#6P(!s(A2TM0e`F}=R|WBcl=0S?h+u2l{^X9efK*6`mQ z|EDsplS1Z>Q0EhtQ)*KQxQ@M6yL8>>`;6WYEQ~Ze2Z;6lsx&jGMXd}5!x4? zsM~u#FQhQKHHTzVJp z2sCc6`(qu}5-Ib0NpAIJvf#><8zG0`nR%%X>nHy-pWVk2IK2juSN z^c=CR8WhAIjHFDbbVrm?vqlZHLv40{%8OzdFC_(8^+KToTJYlIPy>lS+Hr}iS=d)? z-T!A>QQ|9r@iiQO_2SSz5dk7Wb#jYjXc$y z`O02@3is?DA?4tQ6nCDfb35zL23-D1xccY+3$*y$A}$u!Q-$^5wIxsLia>>N1L5@q z?Z0U_IUn&j21OO}?s`oc04$b6lH6SrF4WJw@~f6s6z>+!To)PBmG~Wlw(!-g(lwXL z{d)shaH+P->{qY)7>f9ByP*uWgYDD4s@DDf12(96Ddn@VP|tDOgMapY{8z&EKVcw# zTHGxEa}i(lP0m-+Db4?E&);_>cfkBVrp43&=zW#*OV$lW*$kJreRy_m5xUk3GT~Rb z1m$xdp+#x(lRGEU6o|xRi zGE3Y0HU4EC^W0|f1$UnKa{+S&ch)L?nQ#3P_uP8#AKYJjz<&^daraD9h=+dG@gq9& zYx{EwV!*GKHH}k6#IrZ4`@vQA-}4;P|D(YF!t3ju;QyERh5xf!|98uDJj=|wPAT2R zz=thwpM!3tfFscF;qT>juH?b7vmT6na2_`s9=vaC>;#j5_wz7plb=FKN? znziQO4balxid|a_&aNX5NZS}hYm>j@-~OwBYa3gvE>g?kRn8Y47m;}I4kkom!EM!m z&Sf(G`qE9ABE+vW&GRXe)`GqM>9fX546zj&3FE_E@vPyRh@Um95-_EKIfJij?Sn(y zXV^|(c+PV}(o4=-c-`15%pQ*$-MaT5x|)~8pAUg?sgJ+9@|6v=F6V!m-!kg!9*;-W z9u58KiuP9~aO^!4v1|FW)t4w(k4HE6p4Vao_Qh&)JlsDG#mQc-9441;;_rM*@SK9q zsja}vF}k*E?^nM`uLH`HSZ*w1BS$^l7$!lhp(C19K$-tlTPDZPi=mBj@j-bvEa`T+ z66gW>yt+;K0S(4(FG$rL88{kq2umzEhu9>o}(6ke{8G_Zr;N-fu|Nn@TIQ;J&~@*L)L>v7VdP z)FQZbhrgow+t+pPRrGJF1fact97a;{({*@z1%cQIs4qYm7!NbYrW zOr^grccaj+xVgW0Zm=K7f8)E~zqw`QH_SAp#=XTb@-%*OR4Z{ACi0(VM^5QCz?kAP z9@r^PbY;R8Lvj;Dd#}FT#-Auv%C=HA0iGTluo2aR>LJZrK4xZH?u~FGo zAhd;f9+#fg+y3K;IP(_IvuNwG7*|-Ys~CgSf4gl;rMq(oD|zx&ft}rUs>l(jI#kym zWb5Gbqae6_XS${zY$Q}+F9J=d{(%>(x2b#^H{@*2DZ9=;3xv(^+hjXp6>Z3m?-qY! zw-@fN*bhEDLpr#-aoA%|&HpR{-OCYmuKAell+OiomHP=^ojqu>$xXn0M5=*k-z-g8 z;HJl({7E6+Q^Wn8@d(!2v#I@wV@O-x~!3BLi`91$i{#L>P z=H|!wBKFz8#6^<7@H`Anz&!JR{QHS**1k~~Bl{BuKL6w|=`(HNwK^?aWY~A_j)u$~ zysAN3>}ZlV?|dcv4}CM8g~#rFTlH5vUVbG^8~tQKGv}hmzJBoU8~t*?XGeNK+S=f{ z)z3wtU$ry%FB2`6f96|E{13CZ*I#~GhOYUyhrK25Sj$e`J~XcBD43E4*$hs z|7WtcELGxWdSk)C)X3o-rTxF!L^xVBIGT6C>-*jiXpID#4t*Xso^=d!B6;0#PXp1* zanvIFtGGEB`I%?uj&<6j-=$tSF2qx95+>3+TlSW^*{WCrC^Yw} zyNIb^^xyz#DC*><AoV*ouO(W$@?hh%|8Aua)@y7y?+uD|SsWdO zjF~pd+q~Z3)iZ`I%o?z<+uJg^OT}Lja(av>Tk4^(Bt?Zo{!a(`j=&rIjJ$OJsNY?q zwC@!*($!DDhoiXJM(0Y5oNPw)o+1jQDa@`TwzF3vBqJ`t)O8GpXIfeY1_rZo_cF!V z<&K&YaUZSpiLRPakEVX@WwG~UOZ#)3an$P^2}?CpE$<;)lEi@w)!D4V(*M9b2%y(!tUC4m-pG zH@N?GNGJ|qaO?CF;ruL2mMZ{YA#Xa{uVh0bC`D2r0X|#;wW!u^q=-Fvbn!DgmyoD4 z{s>gEdjYnVA^Ux;;@WuW`Rf$08)-q7h$n+?tXJUt%2Stg9i#zyBYN=Q9UpAC5hZOQ zFNj)TPiwvJ^=46EkTJlUaEo70c!d+|6WF2=tQs=dI}@V8R!7D?W*KAREjBH#wp|Zu zd)?w*@`FRgsBcDozTwA02u$AgY`$JdusN5~(JTqE%w7Q%kMP}V3wy3`1+Z?iS+xnZ%TP?yg_%sMf_5PW*#6K)gE+Eva6x;eY@)Nd~!19dJnr@%Tmfyk&@GDx8s&W zLNW}K6A0hsP8#@&gG2`y-sF#(F}A8i#^d-7yCuWU;*n_^6n6q`Dw5UXw_6+y8jQ0G z2h+UD9aCCim!^|f-3veb&VUl3f(8Z-Z)C+Q>?o_S-o8RJKSgcr8x&99j*%@A(}@#= zUDUiK2k~l36!geNKnU|!XKtn*h)PS%Pko|}+>|-rgr;EML`4%bzwmUaYS=%DJMHBi z>)shUS4Dg+>wv3y^V-R{&2kN*5zL=IxtZ^`IPEhSO{{UhI^iQyJ*1j1#vXQrs1*7w z;B4+W14Ys{_7In#C`9zG1Il}bWK3+wnKza)R z$2-$a&S@;v7+8pj^M%V+8d|5Boq643ol9WPd#VAVouUE!jq|8n#|aNLzqUSchn#ZV zp%nC|jD;o_K2VCwo{eiQ?f8(2EiJE{KlQ1Y>`XC4x5{@-8h1UN@r7rn34kSdX6Iab z?DmDn$hI4@x*bs1d0sIHM;KV6c(f{BFc>QF819+CGEzt1zemUn&%S`^lG}tP2RefZ z(rO(6v8hgoTj21%SXX#zfyU8O*O$KQHGd86pr3YVPLMVd5BS852T$KpI1mmyV>gfo zj7Rv&1rNknx!ag|m4fQ-7@Y{bbjnm;Nqh1(KYjqijRY-@=>(<>xTE{h9=h05RKVh7 z04hmh^1ja`mmT`vkZXyz0XHD&$1)|M4_jsA)cksW*WbYlU>Nsgi`tDV8Vn&omyQmj}`i$kd+mPKRh#uz?J^u)JpZG-V6H!<&w0YZ=E z8C{t9UWEq*O-gjbX^l;bB+3X}dkE0H;oTgWTTb0Fb8cC+UiFfCa>{B&mA`Diu!7$V zVjh*y80+|ARy=HB)R5d!{($K-=)U7qM{hGP*0Rf<_rC@oSY0p`*vIHaP-%!%fN@?lm*-9Sh#?X0tIIt`QFn(ORXSfKuE`0LTyEz#Lzj3v_rhX22 zUfh#ZM9g~#zjNNp&JLMIg+lcpqsbO^Y7{$Jy1GP9WJyN6R)a%n*}6lsPhW7F=B#kM zr~lo~-IpUjs)B(oI?r^4f|GOVDT8r1jc4FPvMFqlV2f zxIypbRmc>l$f(v$xB)zJYGo_TTa&fHmQBuZgN3O7C>Y-l1aKdt>Y-8FxMYc3G+j&c zSaUzxGUu0}+@)ZD@91=?zOpN1YyGG>eFkGU!FFw`XJKQb%$T`kLPF)@-0mR)T~msC z#GR9Pksp{`CLJ*^X5DqC&Q*@W`_E!!EB8XNX-*3vOr!Jba^TtyNabrleKa?#l5y$7 zh67fsytcBeyKZPpH4Mbc_k`FBTj?Z23zK&wa>ti;T|M zowj2>gQ{eS*3ACa@0Q0L+!#T^xTS%R&1iI~`oLhcmKk6`qvszW8t~V2(+A zcCrTo4Y>T|iWMci7T%sI3ZJz9INCB=9=4x40uHHfr1x4bILGLsfF}+fIhxTlzwm@H zC${FJ$(aT>;-du(k*6#f-*HIWMlBx)H1Hu8JaU4q8x)=l$_rZHhD!%m9u0a7?|9P} zY(!d0&p{a0j#gHgf*mEJt(C(>hEH`p3tsB4&YamoqPh!{*{&UnTf1_sy1>V4Hq^5I zDyRANjqrCW$;hmJmGbZlzePq3)GI@gDp(ZBU+5FD6b#ENUdYTgD;6>Dpz|GXnI(bq zTfO)TDaCznn+)p_6?5Zr4oL;m1b_yp!w2OvirIy^C)Grqh|MA#6smvb(#x_ntK=Vv zEM7lW3Vj=wZ^~-7DHlL-AoBbbiW2pe?^V*3BAB|-7JoNHgErjZMc>li`0HI$hc(1%VIxHOWK4`@(Z4R z%-1u?SCh(Dhu>Dct4aZif9_gy72y5LJ^b72KW;s9_3e%q-DNBIb6ydQe!E{j?ACpM zqP(^29P!E&142J{9nX!COpDoK!yB;`cV@%az6me)869W4*r+yut8%!+bxdDXbM+VD z#{4lub=K9_jndmo;AqG4&Qa8y`g_tD(#;*dZO^`;+m?%mJZ z$L{FQRS#`(buwInS6aTTekOaNJfh_OuZh!V|GMe%#%+acOeNHdi3N=FE7vY3t+&V6DHzTG{fp+>X;?J#j2>m z;Q!I>??sR|aO{uVL7!|LJ;kWi2)>y9!bk zJL^2kVPKQTcCa?Mq=B^Dx`bCSLkDWNT|HFpynn<2XB20)8{1$>{ku-$sv?0UkI3Au z`I`}Vgq)8bEDFrMeelJxB0or|4CHP4>T*K)e=5^ zaHJMeoJ*uz3x>cpiY6dUy{EQZI?IE^ECavgnLsx5zVlmXrCNv}EOdAJ<~D8|p`WoD zEGVs7-^_F67w*Jxd|DUp=y!X%Ob@kI(J`%Se~#pxp8tV!CbV%Qix6+;XG76({?)cD zQLssNUeS+k*IM3T6X2zLHg8&AQf1&fQ%^I<bZuIF4jN6QkZkA+wTO95oR@ccffd9BDZyYsl} zen+E@O^Do2f9ZA9qkeSViydDXveGOiN1%`U?3T?pjc9LA@_qCAI~6$4{$h6qARpLj zNpee-K$kv4`^wRQY(Ir=@Vl-#?S+r4Vq&B%v7{tvzFn+VND0`(<&oXSVH3WOHtJPn zgvopJbIar?!P(~C3NdLuU22~I*dmE(fWuIcLD61Nh|87tJo%g9_6z8`yGrsJCGHfK zJY)JiodweKA>pLCf!?SbksdLnLOU;xcvi0?f?QI0Odi<3zidDRB5o1)Y^)zT8+qGQ zeWy^-;G0|Y%3w{SyGsa>(E&C4K&*Mvtd?_KlT?O%8hVfP(Our~P+8ydu>NGx>tZ-`p>2&)S$5r`gEUoiTSNqU^TY00glXiFmk< z!D5`bs|MUj_mdd^SrJHS$9v<{A&Yay<@*XN`qJvFJC&}tdRtT>Yi$D40C$p)HivyAS#UtY4wEAetD$$9FEeAnp2N zk4K-Ta?%1)`{xDZpOgvX#%YD+C!$-UiGm3gwp&ds?Ur)@(%$dp4UkV*0g%4^o8P;< zHOg*9%vMLRXUytJ)^~#5bUd{jB#x7;x);7f0yKIm1i2Y)D6!?}>Z6c}DpfuI=lT6L zC)@d#wF^@FxSP)dwwfA2Wlp_53CA4as1GvPU|IeGvWT(l^J0aCHZg)vYgIY`u%0DJ z9yYoeWxJ>5rUwlL%VA$73MXFAGn6k}VV5WysOR{#GM8g@Aw%V%&1nRIybh>q#L5k= z>-?Mhu#YT`L<$6ht^aIid|lJH4FdHn!Y9gQ;BN`z32N{vXk@x)I+G6Kg;4ApyzQg6 zwXXDC7F9ka`U9i)j>VZf+C5oCB)KX)qb%*_m<)nTK?V1rk;JyPejr^zc){UkXL7QM z-M>&VIUs+I+G{DJ2zLh;G^CUbO)-$%5(@6VbCP#<(nyetqOGwnG?WG4VEh?1UApv!Gr!W?>&wz;?dxJs;E9spd(dE6e4&AyxBkjN<&v)X_jD*^ z$>6N1rttYKg2%zTy6j-EdsqhCb@$nM8NZ^Oh-$zI8;n-cIbZ=XiJ#)X%_-FNPvSx+ znr&(7QSYM6Mz$YMT47kHI+RM|wq^Mn^^GSV>aGB;qyb@O=cr55K5<%LZk{nHLj(yj z?-Te+DK8{&S{UtjJ_9vefM?lkD z#H62p17A3NQ~P2|*n3|`^4I3XpIY2j`+96f4`)%GEyyk*4R~%a+D1)hJJmL-{h_PS zWL_#2>lpq1=rL z4*J%FD!YES((!%gx2v(rGilSir~Dvyt-RJ~{KkRX(*;>UTFnmA=60@u@VA-KYA?e` zmGxZw#cY;HcwuWv&_U(eR5aX*Z6%d`ssM{9mxgekEnQXr-4ZK8(Fj_86J<3p&~7D`OfpEq-85Cp}J<>oOG#$ zPAt-H+-w*$iI@5v(#uvkmos_^>udVdQon@CHpAO=%Vsg@h~DXe0@5op&`B=tVM$#; zmPp2l04Bz0Jgs~;Vt!Q_J&oCmuw zcJANn{iz73b+=^kra(^1CoehET3mhtwVJ>Bjt}Cxjo{7nAViha0kH%e)?ZR+AQ zim;geA}UG`HlL>=^Ak@J7Z#f1wk6VexaFNK`LwL~esq~bG=dAWWVsKO-e+1+fo z@i*@$`jZg~m>t{pQ$>VXwJZmMuv^l}l`nPBfeWs08G-wD%6E4TWFbc+lpSifS6)sLFXcK|mXz#)buBeE2b{j|%u5bCL9zQW&g<^Dq~FlXnYbttStJ zz=e>l5l|dy2q|#DZ>nH%WT?I7L2G8#`Hu6;nH?vnP1dFz)^Wo-6EO)M#f-ueD-LZt z-eaRSN?Hd;TaR4E15yWVVMdJfb}7eNqGOVvKKz~&YA zjbE^yphJf9NclOzhV=nICu(#$Xca>%ThsGh8qX+fsGInDV2Yq}7wZbv(s3BKP_tV4 zCV(cl=|)=-dS%ruQu9EF7Fc#e)7`+o!Z+8ofYg^V|M*>3L|^slueo^-PllGiN~J_( zT_x_O3iWf@b?j!C#qhUiJI35;f$}-F%MalDL|W(WN6O0zu#%{xBt%(!0xw9)XkmB~ z849gj`i)jR@AC|P+w=gL)U4Uow*ks@P;}%HlOK%M+c)S!oH~29(Tkn9>iR;k2-7|Q z1#8#o&>>J;8Qv;cj_|69g(e282K4Tj-O@$*OK*US(KV;~S{C|F)@ zM^ty2yR%2C_$x?Ack>bx8i4Ym`PZDRly}qd^64e|-TxtpT7uGE>$+XlO^$z=^T5H7 zup();8^W)k2#fqeH!v3%2&Uh$Fbb}UEG*s zgqdb%Dfru75i^@S=zWACYN@Apm~=0?U)ay*6y#HbxK&1FB$W%>whRYQxhx2plt(Bv zFI?>}wVTG-eCVfb7JgYF-vvk(My;-S#-WQhOy*P2&`$>swd$5*QTj26tE;ky#b9Tv z4$^2ZrEhBj^xnvP8SU?6-4i6dp}$_91TIg#5Z2k-i24}qZKjqdM%i6|+2117U9Ic| zZ!+9+M09&DLt*n(!s-mEl_UN2BMFkTgHeN2|CCmz_rP>GunBbi5fQ&OKv~=SzP=k8 zBw}K^jABIWWF%P3+h#KTOM85$@b@kWq}Jta#YQKPEuEVwUX5O5d(#2>X_K^)WT?l5 zS5fCKiH?$_h8LGOcdIwz&AkvtdC!+Ktnp+EhnKk29v~CV3MM*3)~XG(7q)C)>4wS$ zNRbHgdf>)@;GCMVv&V?DxZ{OTs}2_rGaI?*nH}Xe;V=^VY9bdqFl>ZKqWIuLwD{GL zGYvk4f{xGUfp(IhsQHG8lP))da1-_-Ra6M$XSUfKH%VX-nWVMs641sJp$Xgy9IsW` za8TL*&h;*O^d#Ff)Yo&XImHIi*_!phHCc4*QjKf{!5=xm*x6c=sQ0+*nOgOQM|55= zJfk*>6O6sd;6q&U^U+UVS9uxoA(NRO+A&Z*a4tH)e69{32p>x{$K0{-0V1BK+<3_H zX<6>u2PT^&nV!dfaLnw0i#y=Sa#Upht6&mAoL6m%6Y$x@p3I$7zJISiVcjuv{AJ^4 zSj9jwTnx$q(KOR8Iwi5{mm_o_{@tRC^>Q?Hprg3-!jmO%H=>?E>PMIy0_8DYtu)rG zxu%hHLBX_tM$!*=x0TN687#FD2iiWw49N6ppN;{0!*0|Gw|ifA@rczlpR%77bLw=m zlY^|R* zld^DY9uQjE=#lWo0HKQed9J$4iP$Q2{WX$jmT@-ZnvDo|>*Sll;x@nBc3*i|!bxCX z@&xEqDX2OxLRwALv75r7pXGqm7hQdVIZPq5(sGz*kVlEg-F`C8MqEb3z|1{|e#fJ% zu^%A&DnUSFL5@k+up>qHa#?g`E;YY_c;Cun@3eEJYlVbN4TXQpn|b1WtVvWAFtItZ(=Vw>br-!tO?&ht8=fip+|ha!)#0Dn zCJmtO7bz@rpiqwvE`!A_zl>2UO3LEVB*=#?M@hzD=WL(Ki+n??WwRlGEOQ31ywqR* z4r87$=K{|f&wFIlcLSfWd=GJm#uP&?ZnjU9U59lEMM6~6_ z#BuSeu{R^>bC=Ir;8`}x>6KVgpJPG%8^(ayIgQ?DD+&5*E#g>Rzp4u^TJ#W*hg)&$ z@DcfjwJhQ5A$|8X%Y-UVHOnZtwNT1EJci>6=F;{ClBDzr8Z@!M0O-;HxB#f7C(kJP z%|_`)_LVnHL9`Y#q)&4}*+W|L)Jd#ynRo~{Q^JpYLr4Rp2GKK|md{C>5`el@p``iX zTAS1+<*w7TfqVIMN&hm4`I{{bj;tDVMAZX~HjrwKqPeFg#FVo1wKcdfL^H@=j)>p= zmqq9|`C}y?dP-Khl>U)3)B_k1(vFkW`MfT=-{dPVemk&AuZDQ}H=f%cxwm~|H*hoK zjQe(a?ESJWF5ZXJ;jbF`Bi>4DLmveG>&xL2TP>eI`dd^EJ5H6;-pLM0uRrL}S@(@k z9%=h(i}4%8s`}?4jN{7X{k0!ru2KTi54{-0+wq~h-@Uc0w3`?ZF;{s!e@yEaQcYf_ zi@@i+Y8Dr!g01?icn_2tIMgA_J3D=l&!1PdfIZqEbzI)}w~^{E?g3qBpVA#l#SUTCsLffcRjO0d z1|WNXmOy_0kGWyGL$x@s@e{3etpN9Z_-aJFbEMnwI&8Aa^RyVjGr{b z5U8Xlrt9CY_}>K^z#$*zq~oqQMHlq?H8!(NB*n&)xC^}=s7>nv&otovR6~ji%5$B z2pU6MO9%XG=T5u!4vWG9&3lLQEs6@6gVv4}pv}BH5}xS7m}>ovI8+5+s@9}gO1bl* zBGDl_kaMcL%#fQ(N9E!oHy7k@;%QQ9*Pr((5ljBSZPR(jThi27Y8wk?-VdwrFCzm* z-Xa_LdBaE6Qo|SVPe-nNifNcJMBChTbN53qUC#D;_*`>LvUoTla=z5lao&Bn!JrTi z5ZfkKcLqFAhb+98%E(J+nM;XRyAF}A0RL_eJmqvt1^Sj>kpAHI1i`YO9k*b7xP+zHDYZkw_PB=rU5YIxg+W5zqE z)>*RVuW33ngXy}j#nY=-4h4@hF{9ngQ&bO+h>`gvy^9nfzdKSNW?b5S?RdJ~y!<~6q0RS;}o0Zs%Xe6XKM3DMW>`IdWo z6G<)S48##roPrr3DV<9UBt|00iQ8UJU~$5GLU1@Yj&_?=hrgPhmviP8BT~cziYH6Z zavv6yjkYzzh*Rk z;fY^6Usf}(mhHA2-RGdu=g80RHF~j#KG^p%L1S~>zz|Ozrr7NwCBj6QcJ4ch><;yI zwEx++#j&JhZe$zI5pBk}+t?zw=j$47wO%>b za`16zZ>Dk!Q2Hvld??isdncg}+S?5SSL-I-%S+DH$nLI+BylmPYS0{!ik^5!Nyi*~ zT!?Tq&La^GSp$gn(J3fk!5^?x(hq)&%;8I#uK|K_pBgFm`Lhe z&TJ{yd5D+7$pq#2sJ2qgVJos)f;hi?@DB_0GCrhHnSTx|e7AFfJ`_J~Rn zElOiQTs3&Ya#M?d4Evc(8=+qyS{Wf2y&G6TH`FE7B)&G4i}*LPkz36$y}bWqPk>x`%Xlwq0%`!EpSI>QtPh=vGb`6G3Lj*OiKxbTK%o>} zxWWQ05CRa(YfUG_Wvn0+>O(Ka4IM1ol)9)Y}qOEewafd!nbs!VC#T z^4)WmFp)*f1!?fl7i4g0G|V;xo6ut}>F_np!V(bXq@h2=Q=ZH~%*)E-M<#G;Ig)Xp z1iia``9U>YYIIV%h8FyFP8?o|3x3BX4d-Hsad9f^Bf}*hUK)(c5-^M`m^7gC^Mc=2 zRV%9n@CJ(o=+DObZmxlZf10wMwtnISOx3xCZC>3AnSAY#GE{XOQdlv>8wYFMl0{H` zlq-b92(o3!Znc3cySSWj#+631etGtZip9V!v)?U2Hp^>0ZIa^iFNq@Em~TyasTl}&V2KTWEMQu_vo&ww+t=V z134cro|EIGe7oGr4V96oi*N1?Cah^AGMG$!C388w?}p{7YtCS1ot;h0z0Gn``@?En zCV=AMLUih@B8mC;Q92zA_z=Xl8f8KSNT{QF#>wef|9nR9gi*AujQnleIQGVLJTzQv}M3yz@eu;&Vk5Q^Pqn=~a zD=iCmUP&JtdUxx(Ae*D!s_Ob9%jOu=1fFQ+X(Zn`+-R5prqk9cpwI&L3YT#fu=h&S z_@ZNe)YIGbp1$;pjrmdBjoh2g#TspAI9hz>+T5I~Xkfh1>$xtz3WXQ*)zE^#Mxjg_ zuO&iraXJMluPuUA$Pm1?X)rD{JvG zu^`ArFleMv&?%aKb=nUoaO!+hu`oA#UC$5T!yvMJ9Gy_bO9sFe8X<3%IC;v^s@G94 z&I9bk%~1a_F2fw2zOFh^b8&p!bbR-n7gM`!*>Zk4&#es3k(ptFZKfzMOrFh7nU@VZ zM;p#~GX13iVigy-SoZ;n)#v@>4O-C;lUIlsemcgdi0oX(sE(2WkSTGAC{)x-U2}t~ zs_HX+Yt)761)r8v$`!a|DQ!nP7cQ0t#U2oJ{vh%ExL@>g17vYxcai5P7og(*2fME4 zb)J0T`RT$w_TW$+W`A6DEQpe5uBx_~IhkPC<{UkN@9W+0nhts1MnSC6)y;;avHjb~+Z-e6i5Oj+* zC*D3L9dGmbEP2T53r|i>Y#~%jBN;^jUm$Q);&#SU-Od!cO^sB`*%+6?CFi2gq1(C%f$E=iJ>qH{ zhC^0HYS&&#tDK**%8RBhc7@j|)B8T#BvNbI=q zt?``{e-+IJ`k>DaMela)>DrbKetBJ>_MU9_<+uYv}D;5}a_2{$I)bS(fS25X<+E#5Pvfhz4 z=+1}i$MyhEJ{1{KVL~;HkO`8&i{%f!h-xLp!TkYiJ2$Q-CrP&1F7Mg&gD*8GE!~3q zN%DuH*~#_#9j!AALr~Gcs(4mk-aOkM)XJ3Mg~3U^tOjjovm1#<7Q(9xD-c=2%TM;d zvG?9_O=WApICjU5h$uyrIugo&1Ox6heo9NbkK1{xY5^%>B%H&&-_j-uvF)HGk~v&)#c2tL*ix@~riIzfU9jo#}6| zQ?!8e3-3%+lOO>(u2W++juC2JZhH7FE`DLrdfn-vl!Mrs zRFfvP1=cw|p)@p2>uFd&FY4+Wxpw8XyNTU-J(u}byyKvQ2%*5YF22=>ajBHgQ_Eh**tm)4n97+Z zw1r*a&oWaR@ug>94tlFdzx+dDn2xezP6^{yIf>YI(nF&m^uUipE{=MAL<0)4nS5{p z-=GdGKXh38()Racd2X1zw!+@(+x58+GP_6oMLfehF?kvk?TtM*52|oXw{EM;HCzUBI5)oFd+c46~Ly?(b)TU{TjF>IWZcuYx!1H2 zDI~iklNhJwqnuSlFQ=;F&Pj%+`G~P~aW{^2lFKGO4dfr`5lWJzN!)`nr&s5aCXc1R z&LO0AkA@jLGs)abS53uQUSA=ZB44QCVt_>K+VxDkF?@#$6-k?}%X3z{9f8la^^DS1 z)}PNE*^0G}6U3Y3Dt+v+t+fq{N|vvR>LR1}lbU@FLtG2mNI=^mK24ky##58S9Yzkgw>s4>gNsaj&%G#m zu|%2`%?%A{pTVv!$X4kYohG?LbG>s-va^h4Vtl!BJt8wmFf)PxRbHm7aVXCF3(H+J zba)CuvFWE+)2l(yKFa-DG8ZZr#meEl z!iQj7+9%`cmMAitB!6?!40$RJ8vt!Dw?M~5aI|V~LoJLtnf`b-%*Mm<%00!E*sBKT zG$9{0xUDU8WqGQMd& zlI2haZhg=D1c9`*biROswy&gzOhCNJl*0N7F;Ku3ts3KS^R1DeZCjrg`@ zRJSOXT(xj!wzGkOz>8WJXI0G^h`){-UQre@KTC=O;4gM5Tui;aj|8-q0J%{@@u1 z8S8PI34}}@9hV1pWQ;S3?NPI+epj;O!7x2@gU}Gv2Yyb!@wm8sOC4smE&;bTea~q= z%p8-^(Y2giUmfYM{3tNoX(xG^W!F*VqbXIfsds!Lneb|HQHFC}Q*h(eQKlZ0q_G%WjUJY?c4qPWbBU8R(zF{P`cOSa)+cMjSAUaos3Wvu zq=&%kz1sg++ZeR-Ea9G;Eb_;{|1Idhx(l9Ee=*~=TIjJZ8bl5c6e21Q{Sa6P*Znk&%q=Z~=vCptimFDn{1jbzc$7#@^#4;HyY|Xr0x-U9Q-j~2YU1Jv|mFmm1%FG)G zXf7is>zk@(NfLHjVISo?pD!t|vsCm@X6F%wz^4rj((*srC0=|wl0opCh2>ElWnwGe zqWn&0KzdF$@()uAL|q$3JhWgPKBcEmF)TuhH?t)kR7eDzrLV#*o>(P%i5ZC4z36_H zp5m}N6m5fCkfW_E5ZdanYsEr$J432g&(WeFR{V#A?=`qzcvoWfrr__g`5Q zfxH`5Q+vnUs0A_6fBE8n;8Ip+`sOYt+owt#jgY%Q>KTZQu%!x{Kn zf9kT+O^3^HM7H)L4Y`I%YjNF>k(4YE-{7(-OgV=_yl0mz6D$^F{`lp+OJ~CYAX9db zUT?m1wlUn(PV2rNyw4L;=@P*q?-W-v=_lm3PKgk=7b$CC`qF`85@;ebhW@G*g0IQ- zT0nfv{u(JK-?v)c#TI98rI}EvI)!W}FZ!NdDNU-O<=;^PT#x7({ggC90SCOt@ zALeFrRZVel5&P1DyK5Z2&#_e0j&L?tNWx%3@}O#DCT~n<2;q~&3sw3Px);;r^u~Q| zi@UZmF3fPG&DSUA27J(+Y9^u?l7E?65$70mRNKih`H*-G>D>r_Gc8;8!JWpCzNZ6; z?9mgz;4A$1U#3;^9ATiCgLq)EQfs|gv)8-qEY!kF0k(CeeIjp|-G{mxX>I;42zie7}m7B4M@Y#6U5 zCEB#EN8mn-P*1yz7;@(6KTB@BK{4c;J~fR`i^Ytay#pE6&?p7$^`v<9Lo!Fzs-^G8 zJ<#t*Ia^Ooj8gO&6V%E89&(Edc%LdZUm9cohzV)kC5O@qVP&qd%V5R0&g#X6VN2EZ z(|6}D;Ma9DGJVQ6&$aIFLO9EEBR@=r>1&#R(ARApIlO!G+!kgBhWlS-&@3fic0h`H z4z%{}Vb2W{dOPcJ79*tKrJ9w4vvKm$QsMJzjIecFnagWJ)|6$g?SJpuK8sG1t={I2 zA;<+gRwBiY1rKrxrdUhIV}m${R{gWOdN&+B1=vVlBw=?s``CImJ>0tA z;xA(#J=7iOP&xU-k)E@}D$%%Zk$hFdi^QoE!`C%K_gZ`>fY#ooPKYEAnRn6Vp&}guv4B31Fj}+UU$m87&PN+CyDf>15Nf_5n|$ zv*5EgK1+T&BgU3risXNbd8=iG^K-xrm1e6LyJdot@}m>X@*db{)i7sfPD~czP{~%J zUfrDk;J!*Hc|E_{F{!%M=3Zp04kWmq6Tp^e9XR@B+`-r?VdX3{oBO+?Kh?(TP z1-{wKes7DSX*Y&z=gysSF%lF^iE2u#m4M_uON0-BF!LbI)m(HXrfHk)^#C}g0G7?t z6+Y}!0ckE&inuiA7WdubQEpL7scGdJ4?keNWLU9^aX{7VndCw$pcyJ>QA8FUaZLB) z#=7TQ$NFCnA(Rc+iOy3Pb`5Tg(q)5C*{N7@>C^QOOlpNA3BWVqu_(+0jZcJJ$AtZ5 z_hZ+2y!+xCkuowH&A$X7RRBN7q({eRf2=nw5YtU2RqIS4`}M#_pn3i91mnG!EdTGf znWspx-b@>}LWq+vbmrxL$StqB!*SpgFY$UFk5ebgNm;(ZhID!JXleIMcasi45_Yz3 z)caWWUME)*PqsvRZebAsCdvIm-I{xis5cos=v~ri?C9^|aVj5|JZe+m`9k-s(*x9>LcI<=mGPd0H>KaVgg2 z1)GMSmeoKQW#uN?;3xfS-UXdBk7bnQbd?W3sw|d!JXBJ3KsTZ@k3+*@`OsTa{W=l* zrZM=@5Fw}xc$HvmhP}K+n<`33Yp*o z)1dG`S9n3kdQxhPaOKQa&w0l?ad&vPa0QVwV!T?)$G2OJs#i}(R5bV}C`owDbnP+y zEuO=M@mk|GcQ0=^51*(R#KQonSC`5=_Z-%M2ynp+Jg?w6at1>-*kyl7J zjImjKwS7zUv+|~%HE*wp(ywXp1U_At^0$~}kp4*~9VS0l*T#HWy)*q7Sn+GdB6lQa zIijSU%0XLox<~viV*$UW#oSkuT(DUy>!;Zqb=i@z9aYk?9{H^nn*T4Vg|Y1-zhk3; zezLmJHKs<@3kdqQ%7IOp$8z3Q12Yc&N4gcmG`KN_Wf7DDy_)hSFXJ<^4rF3~so2PBJ`h`B5*)(_hGHy?hk zQu*afH1jCP=vQE6A;eddp%W(Wf7S>>Ji_ip1rJlcRwBd=X3TyzW@94Mfpd_ub=WtbyjLS6L010Rp!Hg z{y~LTsQ81nj<$g!1g|-f6l8_QXrN%=)pK`!HnxP5KgVwE$#3!LueJPe;Muu5Kfjz@ zO7U8NE$tdRkoxmW`G16E@!d80^UGu31bwTc|L5A#j?U}doj*3LA0lr+D=&v zPWR=1VWC|ChV;&LUrp2@x*qmp@J<4Ch1@tpi)mY-pUz_lpY0sZqS->PdV; z6Pb}6<5y;f^2d1Y#58SdZr@Advl2!ZZD;k;UEkH4@~`W&e0shAdw z`F(`@?%#K2NB@Ng_Y!0-4iU@aFc-eCpcjr~D1qJTEzKFq^A^|Ui)S}AY?e2471jY8 z2P`udE21l&*@4kM0GG`4>w%wMJ{WZFUx`{Of*=13d+hH_+io#d{5ju0=jv>XRf=ZM zxI+u?TK@U99k`*G#-r*))kl;7fx8zT-5+1t;)63tl~@SXrR z@qf|m<5#b2{a$FFQeUHffP z@&Aj7*R`EE4SdBU!7N3T^X-eN2$Sn?E-umZd%Ke?KDK{ktvgo1lNK zjsG{)(Sfh4m+$g^UQV2wxR{>E?vG^1#me^FRQze3^GDG6&HaIxOB+R>Cmg}Ct7~*+ zf^Jh*(NBT@dw#oiy)BC+AOGCEV9)kx&m%tof_FWAsA;$V-%swUQ~7%)*nhC>LQk{O zc^OOl-0E2su65_J@8Uq7|{bZg8yP^^sY<6oBbYlQt9 zwV%O(xt0yV5BrUkL-Uuz)*m0e%Y1$0w(q;Z+o?}$epzf@Wnq!_>L1kJCMwG((rp8_ zrQc7`7juYvM$5|rPUbc}(0BBxf8@`1e`zzoEN&InZqNu5^q7LGXG)(aCDcVA(&V7M zHE3K2_M*sub@&|tptAABr3X+C-}}9*&0tF?q+6a~vm`xF>99e2240Vdvc|BoCgWvLEtVR%1&jNxEG|XIHd(vjE)?PLDI*kGd z;t^9clff5JGz)Y+O0+1_$S4BsR%F5%;*^eEO&)`lrd?z`c6%Zv4uRxPdQW^@uMV>0 z@!W`G2tox&>1?Ubt(#%l)_qL6t*vuYFo{QYIIlG&=tPS8#kgO_!~H!J(qE}wqAG#{^7k`EIo>_%ic%{VoA{NRaxM4*gJ#|fpad|?5f z7}*jSprg0Nmg?S%8fOA&`(KC_p|bjr>_^Z!TbKH!nHH;aq*4LDD4RQd$JFu~AP!zG zoNY;vYvI;UUjEPmRm#eu8ur&FxYcD&;>eZ<9EaDJCR9j6%jidx}WQ)~9ucDmVRG45n9Hv#4Ri|a2cY1D& zs(>7_3aoyp)`F}6d8Npo852`^YOMwAfkRH^d)${G#4gMu*=lnrZaLPQv68&?zUpj? zR7&rY+@LT3J=v6h$UhR|B_hI)Vxabvd;4~1DYJ5VGt^&LcPJ_E-L@IGj7@NbH4t#yJ!R8eDPClM{-hX|A z|1%ho6+dACxChsPTUy7L9xH)mav2RL)3tL$^F|OWrs>h>>a<5}(*drBXi@`c=Jww0FYP#;jurW8cr z?FWny98-WNNdYIAO3o(MhQD)WDRI~wy!T{Fv)NgfSgXWQSKP3n0iYX4;L^WNXObWK zB{x=xIPq#-o5zOmh<6~55~VNi-6#};#w$G=M~pic*41E{JP$Q1{FdE}lo65ZeLvy7 z=&^Z--4Sv%OFi^aRz)JJZ>AY0^<0n;E0U>U&iG!C&<*PM93FP`^XCaU(0cy1xZk@n zwbX!PNt{^>ub5^XTU(^uHCaufH3Lvhnq00I%8x0unf5J-P zb4zVX!x9|cIUs>2H8|DqGaB3-BI57ec>z%hhVm#ky!*@n`gwo5=WpB?f%M$$5NDrH zI*E1NN-v5{0GJ;k5HoxlqLlA`f}s61&I&u(bi~S{h$Jg5Uu)|Kk3|#Ug_wFqQ-1v6 zxJ!>zw9G+@HhHmd*5)Rrmr)=7Wzvg+UhGsG1Eq6Y>E51C$PmV5Y=`!y;9Kcb;Sz)1 zO9IhZi-sHPeCX(yta08(5#(4ptSsbZXv0iZng}4VLVHbofX+8S(a!VEyX8$Wmw=hJ zc3hlr5RAS~1u{t}t>o1td$QF=#m5JHS8yC_zYYz5@8;Ve`ez5xSn&$S2<5l!yT-$w zn22+5Ie9KE8Gux33W0)99aMT75!K_(Ub|9{*!&DnZOb0j=Sm*mKAY3orgI_H`iU1! z(Il;23)0xrkdoQR*V755Y%IuclfdK|y9t#Abu56A70+vXKu1AJx7wRAv=d6={`ji?bDoD!;y+!#;~a z=L}*D-h`1iksIgC_)U@EQuWL*`Kl%V+pT?o%piQ&pmkQ)_zF)$*pU3OX8WSSl{!Z; zbe9H_(yaEFuCM=N=kuV@kB($utGg{G@3@Zu>pLY8Fs(!NY$##KvfE9{Z8SHg5}bqZ{$*D&@h7U zPKgI_;>($_;8VK6sEz?HJ(fF+Uq_nVHMVP01*WvDJIVcqR z`DI->yS{bCOXabZU;_w!9C=*b#uLGhuy=~q)sy1Gr<+UXd7znoyK0M0DFSIb6((2lR-MlY%bZon&TomgoD-F{H!lQ4 zoXw;pPQGiW1$rO=r|)a}+Sp(3TxZKtV-E5)<>}5Z@9-<--OhaGJLnOyh3gIko1-G6 zL&Z-mv*|+=LrHv!b7f_Gd_uMF8%f?WGMv}mGKnaEY}3l@1SzIvQKivYIc;nZT>z_| zkD*4&D8HAg1q1@72i#qEe;bGY=nJn3S~T!X8~~D1Qpz{^TE#@k$AYQ$HI*Yy&hDFf z3ltB~w3`bFnH>XY5*R#;!5)1h|J`7rMSlCH=TXt=*+KG;D~ocqJ>4S$Bs>y_UIwp# z-)El9-gam-QanJp*$$f|GcMx*ufyqx=-IbIA)0lk`bK&+n`O;`#~pfEI!76xq{ zAWb+6z37%+i3WIrd z_|!@Ks#KSOSE5Y56*UVIirspmX+OPcX|wTN`ejuU=Zwxf2Ton&%0-!I%0;mu;aE0b zm_(OH(u^euujY9m$b4^`cHdFZ`#EWv&8fzFQBb9EQ!bNEm^VEw!mbL3#?t_PUgoP} z#sd*)XD<$e3^m)Vi`~d5NEZq+ty_H8KY1V%X08-MMtO`1g7=Ay%svM8@#K6`uhwE7 zhplxV-@Nns^Tzia)fV2#MV;dGdM&4-PTb8=NA_M~9NZ?(FdKt%6aK;yrm7SImpCkc z^m+V5ZmQO8p)O=>5%O8eb0joYWQ={vx8S6Y1qS5dWZul`*UDM@X5`T%!&$Jy;C8VY zQu>EwZ@?{lVj`-NN=t{Sjx3Vdn;_~gq*A6g^B(Io6M9B>VFH?|K>)sb|<0QGHI_wLJIIwl#f}rV5z#YA0>1u+Yz>{ z9ijBZPM+|V_rpRl=)Tch*swHZEE$E4>&?TPd_bgGY)X)Au$5{I+QAu z^C};P&%y+|7aJHEVF?wl=n|1@G7<>yl=UmwR~a);Pzqq@i2-4CEtl+Qyw{etE7@## zHA>ewzh-_@$2ih#-gHZ(wMRnQ-E{JnhULlny}5?hn@}QhG;$r5tY>hi60~PH|3eqq zaKe(k>vT@v1*W8DCFVmhZ|jYXrPPbRu-HboqPBwg*z8W53vcjqh(@ZE^>TvDEjr@qR!>SwCm%6Vo-4cd}6}NR{q`U^=eUY6o1s`S@+oYj!C296ZmYt`KYC9FSI*!NIYlh z3P<}KgSuvldkJ~c&6P*5h{eFU*z{)HhoDS?m^40q3J3~*Ea&y({Ef3S_D38^SE16XOw@=NuC@g=8xR;#;zb0OppBGV~T$0ggJw%G9FQL z==|%CQ;uv%)&4>+;1vh3(j(oYLf*LK_nFn*C z#yiV$9b-^D;3(_3Bh*@B%zEtTOmag-0HAC1~#nkGtHA~XbHa_{}x(90u!vAslHK0ff-C4~}I+iL1}nUN56 z{+0t4g!WVBtILYAh=3sTm!?j}>t4qL8+ z44xJLF;Ow}^@H)opkhBkX%d)63ZkX-KB{iC)Dn=i%p?mK?kQj^`0o1Oo5KATYxi$f z5aD+Rj;$EBDle+nChh$(%F&&&5l+ zAv?_@uYN1TU%flJ9J7}Ag~g~M;plheq(C;yNsh(yTt7+qqXXOyvlUedTPAlrfmwAw z&(jP{b|$%(m*RgOsZho%2e6I>~mG~pEciu zBR{pv9|n;yFP|)VNKf5A6I0VJ$;#FzbN(lhyG)GM96oTn2$|J2W;|_E9Je=#YbUZ< z@(V#3Huf@O)kr*z%&H~2Gx>kx{`_SxQt|UIEQw0mpZu@hp0#ymZrVxvC%n?V`?sb3 zQybPUrN1n5*DlhgqMfqunf`hg)wSQXtG_Jkw+O#l!9HLoh3A3W1%Xn#UNB$p`i0W- zSHl>SxIE(5PwvEozxzi!c=nsD|McC#V?0wtT;ND#%;nS*+zMT$H@>hm@(SJl8>;h5 z)^_i)UGZ(Wvm{v&*B3g!qeid(jjy~zoquobO#LOfzws~IuKUSx%WB(3TId&B`Npg8~Hc_p?SVB|u>H z!=nkG@iYJMgJQsJKvSx{`A*BeJ@MV&yP0}U9<9njdNdnebIBz^7$O%U)Y21c?IMcz z1De_8N==9iC@Yk!VchT;)03H`JB5C~BEcE(`92fwg7p0tmd+PP54d_x_6YAb|zR?psft>)zg`+4zWGk09J=@1WHyd(y0lY6 z{`~znUHYa=-^QgsuP^^q$ZxvzO_%E zKzr*_0gCfquZswlHZQ5{y7&J7xOU$Knm6Pu6!!Ccy~;d0%7nRL#;yK`1l+gC-zv=i z|3o9Xd#iXNs0{-`0AyXG{@E)|;GRTMt-PzA4bRPUHW{ zj`C1mJrZZ`-;h9))mG-TJRVoPyz$)^mak()P3%qw>ivY$SS3Th!LQem)?N4+Tiw`{ zwXfH8G7Ey1U@&Z7uJHBhe3dhA1hQ^C8}apuz40yb|M?1AFD75z@U^wUx-=2GRvvGE zb<{m?%bczL(bmF&l&_FL3HYDzfp*}j|5uaSzw_?Dq^JE!pgNhOb5nS`iui@)=6tqA zb!YVokYjWIQP7G~?H85;LSW|=ct^pCfY z7(>SGZ_fvAh|3(-^4ve5@uc?0aE<;0uG8)s&1tpew#Fy_6KJs-BcTnEGLB7Nh8r;qJEngL*jt+g_UE)@;&z|Z7$w`YX$r}q~o@To0M_fS9;A;;4^y7IV; zJqZPR!^Q9!$WgzEvBCUMI-QITS&o+Wy2OfS9#;x2-pY>TXl2MY0T9le7S9G@8%*rL zhSRhncKvIlGzkT3UxFX>iju)6COJvTG0)2PC(nXOwNZwHOL8$Y9A;U?=Q>DTVy5=d zg5VXma%d0^J6(a8X@wZ?Pq}F@Ceh4Z?a`s@CPI67+Kd{(Xdmi|uGavZZ8sJcw!*6m zF$d9pjJQvGF5S+M-&#GZw*@Hcwj$@%d#qGR&($`0lrwqhniv%vX2;Qe z*hUs0B(p|bJ^Kt}vTPH3+_0bqOSX?<_g7F~Ik8c)N|KRhf-hLJnvIG~g@g`3l~m?b zs~zK;_t6mky^98PNWWBBQgTSG#n>&K{@p-Cde;@RkOS?68I_xr#OOYf<8XPi%L8Ow zUU}w|x$24s39}Hy9d@8J6`R%8x#?ENbFB(5iu6_ONrEA~S^#abY8f@#;EvkTg|N^g z6vaWm@Q@;vGql=cijFVA*5qnZc}j}>9h=+|MI0_#WQoaE;sLcbddH*+$X%=XaB3h> z|7P=)DUfC<8c%R$--3_MLJ@KkH|;gJ6spT!k``#r&4nV~Z4jk47{V^els_|Z7c+dKhH0g zm2C>2dQmB~_(^qEC+dNo!Bwn@WP)qX5F$`B>Pa1=qi#4Fx>{@BeBzTHl6p4FS0TbM zH`XT^-4Wf_0%RCQSy|^ix0JX-tEB|VpY~2V(83+~!#lTk$*yKu%|4nI7OADn%@7Z> zo`ppwbgkDgs%H&lc>!19xIfy8V)J5{Gc%-X1~CIk%P*rFG!cq7JzRVh0?J=6X6rl7 z_U@i@35S4l0l_$$P0|O-%$4#mYm%KVm!;zMpk)0-hCN zWQ$OeJ2T{N(FShTGbqqAsB(Y|rmN%(cJGZxv`VFEf3$9s_EdOx@;c6$ge0>DHq@>I)C`?zi~G7YwrI!Xys0Fn_q+f z$(vm}cq&gw-DSr51qw3wm$?RWo8-O01bzC9cAFAWsrli-?dMie_TI;|9yF~;T?u|~ zjPOgrl5?&|ghKCB-|=@J2((zx*#k@!srCfy&&OpG?A;}#B}G66M_!Z}UK|+hB1!7K z?;*1`MsY_>saldBYF%y-1GH9#c0bC#+^!*TWNaXA+%Y)oI<|aMau!hWr_&-DZkGh;&)#qiAB>EWiPkd5AF?k~ z9+ty^yao&w!Rg%^>T%+J8kr>z&tA$7(Cy2;&lv2z)0uS=a zD}~EA#Tuui!W%A$2$H&>J}J@`l_sJ3-?!bV6O_gVw5)3#k6^IX4G5+W7&<1S z2c5~?ISJhG>+6UiO4UfQ5IJPnHv`Lu5?O;ew@-N;;g&5QqIiwTu#k~n@--#X!j6=F zYU6(zZSvy5Hw*rLA0jkDjFIhvYQk=l#CctfLU#7lS7rF7Wj%HU1$MTZmk*37 z4ZOJVNj@y6&Dl{OPmOCl)zo!ThW@S)+%sfRa45g`g6GRC2sQ0Rly<_PJ zAhad~%|v4kL3`ic5^Nz}A*2l4g{NE$Z-c5mey^C)2nGohq|Mfhs#X!ORKBS)?7aoY z$RteMwqkbasNn=P{7$7}(AHjUo#?dQnwyDs)eX1~k+Qa<;5Ig_2Ne8d!{=>0>*Yu` zBOvnGEg8Q`Dnr9Fj^50$4vi_DuRMjGG3r}M6VVQxpiJp&C6BE%w zVW1dS@6Us=;>owiaS0yo8JAkDmtFO12onf9dzC|9SeBxH_>})_`XS+FtFwB_sjk^H zl721afsH>sd)OcqB&BW1*^o;<(`OmcBN3Nau2XQ$Eh7S-h*VGSdCwg}L^U~@VY98s z9B&?BPrUvCqf}gP_v%H@IpDP`lLlg9DU74s5(y^lxe+#CM`&HUB-3}c3&-?M7(5q> zAaWe+z1hEDu2(Hiu61DYK22I>V8S5PHLHAJE*p2w=@s2^KEOFVx(qvg(cN$M%u9bO zSbbZH?;$|!Lf?jh)Aa1KLOLHL9ac!TJj}~SBeUB=bylq>??BT`MofB-pzJ+ zbGA!S2koFkR)xUddUHl?>{X&aFod-L7mDjQ>7Jz^nBw@XEB?7nYUj&|^0@ zc;&D8ep0Z2120{{I7)8GJS(nB=7*&xro|;cS=sOaO+l>BYw%F8V0s0&+I&<_lItp$ zN>RF&qk{vUkd6yD0#roe2QC@diAJbNK7Lc_o9j}-n%IWm*)H?Iz5Aso=DDBjT(yu ziih&KQ{05r@F6s}6<*VsVk~rpp=V##14o`>kIIFfBHt)Bva?zPbDjL6?pNDswn;D8+6ja^Y*PI89rdn3hRmEU%>_c!BpoGUDnB|7rl~reLYL>9h6}j~TMr;s&Wto`qZUP=_C7+!Pg~sxHZzqI zc65#Mo<%(a4~U-x!0+TKoz(yeOk}~62bf83YS3w2uP|)n)QX+`NV^9r4DLH;gpT;| zaz9M2$vq>LJ0nj0CwH*-{)l1oj;id*ahHL?ZVsJtz>RHO%X+orYo!s3w=m$)A=#a8 z0>nOF@%wSSqY}Z}c0397l5Ixes9DG3@B1?=9z{Bs%dZhtHl8n0=H{mlXIrx40wGGS z-E+DnQ$Tl%tLb;*w$CX(_n7=dK0fiNrT=Ypk$AGd;q$EmSMQ&2C_->UaIM1=>l*`WI z|E|cFr@e)HQJkgu=k+Rf7Juhg@E;_9Om*!ny431w@hprx5)s$2mQx0>T5W^#Whqg8lA$|9$EIy9<0K>GGI>esNlV`dVxrifj_o zJ?(l!8)HtV13{GL)w|U301eRNPgemHVo%Fg|>X)%eQ84iUW4Z#^yn8x$3tW7}8ZtL3->UMOEjxi&nmh(UP?^vkqaa zH0Ce+;Z#?di=GZ6rvZpa!wa*koM)-S{hc1@U&h(J6YBN_`gc^O|1$DF5AfA^oMKJa zoeX?<-+#MbS@z=^u3vd7)c3Y!l# z`&v!fx>aFbwTCb0H1>wsq$p?k>JZ3HDXm*)21mD&6~$97VyGo|_q?ZE{gWlyjmx)F zdAnTaRDa5(J}#+dqUc88-FR+e;D`>E%Z?LxO9t|6x|f2ZkY@-uzJzfB1epilh1Z* zZE8bpEqM@ezE0Sg)@c`la~i~isMk)8)Dm^^K-8E9lmjF0)J0zd^N1)Cd&qi}@3@#Q z%!>Xv$BCInEPHQ+5T z_ty_JuF;;es4?)skD)E)n!*_^S1Tb9P|E2>9Xu|l zUV$sKR)Cb)#$Ge-Kc|Q8y7g}e{yJmTmHcY@+W*sm|37l0`%m+r zh-lBQzx`Gdli|~X}H$i1Micc{gt9c-tL*w z(%34z^M!?jARyou+;3%~JB?jkD>ELylC-In65`-kqF;FTK(71UzasZfhTVy~XZsLV zEWT;E2&0gR?_8g@8N77ja$s_JK{jyEp6Ww#{!z%NxviDIVz#Bxj1V@G7D1{+d%3mfbuA-~3RAtfQtl zpO>*tMK?5#xQmG@iYB$xboLZ_fj37lNXo^JMFdLsrhg`NE!2c>n0oY8fT$(f@BCUz zA_{n%mrRL2PRLmy+INsZ!M(R{WRbP&ecV4;x)- z3Z3$mTDbF-3hiUhXt&Z$-DohCW^)I`nLI@41!!aj2IUxAhQ*V;4DnP-dsr22JWe5? z-m9h-KGnf==~#_EvLpW=(Y^rU`o~t`EFX zx@>~1YHC%XT&tmuzLR-Yot}6p-k2nF)38x(Lf@uT)o_N|!*7=rp`#{Qg9}-eS5=pj zRgHX;j5m@uzKgih8h^ME8&)^f!Sm9@Hj4rgB=zf0$aesIL?3%E!_yl&g zxDwFK^5&{e{{iQsK9(2u$f>gNTm?!A}5yhrdxj8D%0v| zz>@t8JY^U0S8o?r^`1fSZrWHfjT^kr_zx6v3S~&=6ccK zLF-VIU>)72$mk;edmP-d=a5E=#Fe@cr|BeGwoFg{Jz2Mc`7Kt&!G1udyT>D2zbIC- zverPvR0uY|6CB>0nHGvq#D_i&9Z061!_^hK+3{IPmU9^H!OJ+S(1wd0p((2>;=JAg zad}!nyed4}M1)A~9*Tu`$DK;^_*XP196?GhQU^FGQ1ro-Bi)=zg)AsM_zTstQAvm7jc?(H@{#4mYFr=v9rdJ2a%K3r=A7*u*tu)nI8|E`Hq0 z3ATQm7zSh0Y{v4C`{^n64#*%YS9-ZugFRJwE`rJ@TYm3Br9m zvQRsXuC910T;pQ4$KzIkqDYvqtm=508X=JwD1s8X_`r-UREKk}F2FPQ6KB3u1l>|C zesdo*IxZ27Pl20qVGr=Rg9=kZ!pb+rC(5v57eBxZ`4Axq2_cxz)?NaQW8n>IW1i%< zd@s6@sCr=2tcI4QZ`^g~)|NUCLz!YW zEdR83enB4tKCvX-G{g7yYF}(Ke|MP(6FP^4B!aqVd>7h{!02U+)sl?%BhlR2yi-J9 zT~EUO%XT?Xza;dCYRJ1TcCOBp{n_D_zLhe>W;194{u1W%h_b|x7}ygV;iqwc_a=K{vl`ly<7hIE44fP~|OZ#T*Meri{e2kxr&YL#=UYX64 zErCu<6#R+z8bhPqW=kq6=5OK)KNu1>QGw|m{g-_YwN)VBFCnp`r%xzaC*=q1L-bSZ zH)O$!kDd`tSEjzOsEwDq-eLjtUo)XhcWqKbEz-Y>Az4 zhmktQJXmnR2d0&O8jU%kU@QPd1xlkr6 zsyUT!T^6MKqI~xo7SIpoN?C6*X}wuB;;WOD8_#29xiEExfFVQ4vs`Dn!8UqoRauRW zXHwW<)|$@cV$>g@2aYy(iopCquNyy(&m;<6FoBsLnA`61B{#2ncxIczQp;2k3%|~QdbD7i%#{b?#{xE zeVZ#@mZ&02oak&yo1mb6VVnkn)yF6#e8g>KDxwSJCc*>ASu^v+WS>>rC8%j21hOX- z3rzGOgXpRwGT@i~_#ssP&6qtr)9R*f;_s{il^hM!8Wk?Rt}y}S`$nakc-b_UsSKn; zkj6>??fNV1XSR|7V&u6WlOEUArFbBO`G7nFU4H(E_C{We&Eov93fA{5mGp*c+-mOT zDUb|})pB*;pf*y3wd!8>VrMDH9-fm&Vi7rsczmKHP<7OQm2i5LmF)_5e6~C=+V1on z;TQh*n{O9XCfUMdx(NVWM#RMg0Ynr;y&<%vDstGuZYGTK@F9G7^AufFq$4}A^3@#` z_xgejqiGVXb5351DAHkl2Kg4I;Bk6B-U9{$XXU3HI z>VB2-=Bpw7UTgJfuCr(U{Nc&{<;PR@wf~B#l*)06xMcs(>;DSz$68E3U*`53f|nw@;rH8yd>yhA**xbc9GfGmh$8$j`FatuA`| z^FrT;f4bFA{k4rU%az}9SBiyI?_XNT6v>4Y*G(XbmQ|79>@MaKKDs~Eve)s8RGDgb zRsGqT(vb*FrRL|?x4Wp2%bP(LWqJ&RhP8ec>;awxQ}wkt&@F9}F*?FWgdbWBA3il3 ze)H7c!pjhVcwu-!HU^>iIG}LQ$704h?wmp`!d(#SmuO}3^O)JWHY`5Om&vW(lVZLz zQT@4Y4NBEqkOq)QxcsynIZmcSh(Mx#M3qiyiJB74C8F$M@931kv9^AIEs^iF< z0{m-o^^hTD9h%h8)e>+(aTozb_q~~WSFTPikf>g2jiOB%*V^I5 z&CY=#uNd;?Vce;w7o<(wd?{>U#vH0+t;S9tJ?{?4zT%Mzm`?#n*A8#hCTeY|^;b6;RyhD0$7TGr>ZQsXuOpURgRO=;7J5$_ z6V@GL)$}J(Vu7+*qiZj!gZXWO4W2~_m2?4(Mt2?3C(3S&D|N~8N-$nPEHtfg@r!%f zEL^1r35Q>dV~oN4M@z6?8F@wE9+#$&t+3%Xk5?MM#6p+bWN(+Kt_kGGMqQj({o?c+ z^nJofsB3~yZS-M59hROIed%czFamv!3#FcgeEC^+T_m(_h^44BDzp3>v9d9`t*hN| z&Zq!^+o1WDaPVB~SdplPW%-h%NWLuN(qYoKl$A$}dqYO36#W59D#9Z&l}93K3p=d8 zhDD<(fz~mXCWq085?)izr=?}RCpwO_E*_FO_Q!mQei^bYb%jqFAO5Y!YRkQ!4U|ti zdOp-^UdE{dwJocQ{AClqt)B zK_4u5R9U;LCIl*?4l(IP#zJ)JpafFfR%#uy7Ea}WVM_Wd#gv2NXJ~aTWf#*;(TDjL z-ust|H3wk_{3snV2mr!Q`F*{StQAl%={Q@}q3&fZ$EhnMk~C}D(xay@tJSaQ;9Q%knn65TNj61 z21IYlZI!L5(+TmHC)DXDKNtvpGokPRHXj1GRSLHHU|o8SK!g0n^-B`R$B?QXmM0FU zK*n{i{KSxZ3HbQ@nY>h6xzM@e^K`|Um2+KaJ@Cq)#pNwyK}MoUE2<@fEAm6naw2MNa$dB0ne3$OxVR<{PAt|P zsEHROLu|zzdOwb7j!3cN7C>;#S3}f{PdtemEqi!{A?4@R`KRV_3pMhJ?-d0fzLYc` zUkzG;K$rzalCvPa4LB*Q3=!HFQ-eIot+AErw(?r}zRh>>XAY7Ttq(e6NC5W+*qyBI zu4IzL1N_?Bm_sFQcb)7Vs6O0{uYLe2sdPhKmm71JqGrtB1)1+eo+UrB8R(CPK*B_t zoB8*dZ!$wJ(2VZ4lAu*r9)0e9Iwxhg*3_(CWd_i17_3bd zY^zX@*rP%qYIih+M=;N|HgXrUrO9zK^FgEP&0I`OPz&&2DqmxH6~4)5=0Tv1`k}5% z;vp&LhHa$c*PVWb&#|>cHjs6xS7O&n;h#zb>FHkS7uH)#)3SdWu$nB}JMbbw@r`F=Vm9Oo-GWt&P!rpfviDq`wTGzeR6H}ro{5ckHePiG`jb;M{@FG9 zJD}NddWfwDS^|>17?&k($sGZ)epiV~%{#9#l8ylSdKAO+-D+5YWUV(K5Nh2ncNy@; z9hqZceL**p5w3K!Od^Q^VZOf5_g^`2hp6uo!4`494&kXbuD(AfYDd7!he$dU{e8>9 z5>6l;Z6N(hAM1%d=wENI)zYr(f4!oN$>mg8q(yQeZvNAT|DP7sf6T`E4_j1Vjpy(C zQV^2NfH!1n1|E*4BuKUa9DLqTSB|80bao-1Q5_=+owfJ)b*&yRJ zE+!W9RRglO?523bt;`b0!b;uhF<+Yyb9oXDK`W_8etW;&EY74HtMYcbPzbM|y@Gi^ zciS|HRh*JbP0>)9s<5vp7$7wA2jshWeDn9x_b;eTv^yWW^MGNWGGyug_@W!G2>C}@ zu4Mc`XG0`dJ!e*55u;eAR6XQl8|C)X~)ht(4S@w4guq{*oX9NufS)76I zC)a;fU0c2D)ha)9X}an6wS7tg_28lJ`Px;N^p-SGB%e_eKnEHW6ta_Mr zke@1h@BY~7IAUe-{jag7WHJY=n}tn5Q>hqzJf~pZsbU9}USLnu^~~%A)OV%lY-JDi z#Py5^$I(~w-BQdDHSilt#dHUfSRuOa#UyPaklw*`8Olp0F&+2AnDm57;;j+(@6Sx$ z%)VDHA?ME6I@kV@Dl$&UFN`u-+x8=2b*kr;c)NupBd^ctvAMfB7PyOQZcdd(ez|@M zT{Uok*hAcfy?2$@Sj28Af!n0in7LZDz6Ow6ZHwD?F#&64t#8Ul)+EaptC_GBzT;si z%NJ9&23jhw5`^I8zdujLz_gc{X345+7U%otcE+n#L!R_q+ffHbAxcN_MI+WurB_Zu z({QOfzY3+`um<`gvZ6@=C6?B{(2Y?v`{2uU+|z~^EgdWxr57Q_iFeQ+dEO^roO7iN z+e6{IINbN+GUJQwu&QmPM2{HZgXo`vyA^8vmDvg6dT@WMd5aY9^trBuhWX~S!oHdS}pOMT}(lr6(x zUAZ8h5`4n|MAv{@pzmw@xvyJ&AtA{Kmq_!!xR@PS5#J`aAp5)i=N`1Ltk&e{KTByg zaX8&<2|Ooqd7!drpzNW9ksxtTffWRX;DcgJD&@}(i0ICoF|>U^v%mH_Gq6OZN_KF; z(>7JO9M7J9Kbk>^pLSLcnGMig`b|+p9PBQZGnCkl6&KhI*_uqkNeg|A44vzMnNA5! z*TaV=@*9{jPV!X9_f95QlaZ13!|zRg4RGR_O{--+8Ldbc(n*K z3Nk17JRT14BE0rfNwumJEWRlvCbqty&P~V)1uuVjF`Z=^C~sacWcYyP9sIlj7ShdtUKu4w1bb0N%Rw!1TfA@0%QotAvKI{3`n8FtHMR5ou0}?Ni127 zS-cq7+TCM|&`$WVc9TfnmZ}EEqvOw`C;If(P?P6I+M?0=lm!3FtYU55elptg!?XM&vF@X@(yWCLiyBHlnBO>ysA-AGp3^mSNu5qCBavLqwC zF&uMGE7tBHE9l6u*V;@ed;OpyXLN?$&LHzG=PK7L>+JPv*_pqw8u-?9^KIz6chdO{ zAt(y1`30k>laY>eMJHhdmOTQtYCQsd(&(zJwUWrbNVMPW4?kJ_2k7oC>} zJ9%AwN^(`6eKi~;LYDhrIN!Km|deYO?i<1QsFjD+a1 zRdCwr-{!c7es5;!B=z+NwTlL((#ZS$ih)?5#s0SE^WF1zzl2;q4=UEuSbBC5`|>Kk zfQSiMJ!;1!TQ;CKaK-aBxLh@mig$9-DzuK< zGJ0Gl0=`e>Hr1Q)5Gv+r^H9>Bh+HJ~4gbZZ6E*7Z&}4bbZczRd^PKr7Xg5&)j`h&k zOsO|yv8($D>FZL81j%7)pwU2kf{+tOFBuJTI!CkVy)qzdVUc%9wI=pyl|6(%8ZnmY zMsWD%nWEimu`iDZO^``+^){Swfe+(5FlNaYzi0R%!pGe+`u2Nz`rJdD)uwOx?UF(C z%nmM~Ef*Irb?bE7&Kbv5=-KyZyQei-7iq%=##C$2C5sVH+d)~EKud?NIkSl{3)A?zqn#Z zN1Q70L7FRkx*I!AWO=Tj8kyO5nVch!iNdoe=k}xTB|F9QS+OFgaSy8*p_gym>D_lh z(}-l>720aNROfh6;Te#U#VO^RrHyIy%LiQ+PY{1`aq7nZE!)0+C97Ce2GXf~`AM*8 z`1#*rn&+|9UX}D%5%qGjR|~J8^~Z5J9i6fY{G>QB14SZ2w#aOvQ3}7@C?PKBcKaQ) za%333-T29~11Y~5v~$DCekE+?d8SYwIdA|{Pm)?18-LdCgyV00(r}02B0%Nie&M`r zLw9TSNSd9UrDS&VT;W)z+IMd1O|=e|y;_du`jRBfBiAuIB!9AFd!E9?Y^U2XcbBv( zAl5hCWtp9_%tWx`k$oIP4`gx7`yqofDURqUH|h7l81mJ>MfJyW2lBMr?p?CvjJE8# zE0VD+-Mf|%#XP=vprkk=8 z?3d@s;x(bkvKHyz6sy-!v13v=Cc`Z}93bCA+V+d#zu-;(#aKe}t(r zodp?PnG*ZbLMMaSqPh1p?k*t*EJ_Q%>X+>l{4Exr_$<>duP=>uforDxp&n}y*yu`k z*gwsX`%r;caN!HporQ^B3-lbm14m2=Uwu&ReZ+)tRJE?EGk;88zfP`M!j7peSg-``u!dOZYrVjM_mUpwY3br@Q~%As1Sp6>i)(&o8rDoxTDb`mN)p zbX^TgyFzG?^TUCX*8}!2^8r9r1NA4`&VZ6Rhm5bRO7A3Sz{VI2fdBO1NIqO<=T+V*=J)ETzs3Ecc225kbshW&>xR9? z2=D~e{`e>$h)eVHJX9@7Z^V+pS=syLx} zGN`D|!Jbi^T%jY7N~o9WqD!kTH=ZEyl?{&I@@$PhW1LnD{ua$$7*}y41p@f{wn@{W zphLMD7T(6A!K(lA4U@{%vWFtHj$S6W@VZ_|LWL!C@r{38F|WOPZ%g!X1kDXf^ZHms zs2-%2mf;C0`brozRxZg!}HD>C2t1Sw^2| z88?M92-rDx|ZsvU%T99yDx>B`eaTEAHg7N5qxpJcc! zODc{HX_+@zh7)oG>+Bl{!z90P0-Dd`Zv}tUHDX{2mrzK1nPd0ByXq|c(Ro=jyL_^y^e_}#r9Jn7J+ahU~f$ zT`V59iWG+(4E!w)k_L3NDzt+-w-TU|#p-8PzXEBb1qceW*^_Fix8rRZE49~`;!a(6 z30czz_%*T^z!-fV6R|fH46JrWw|h{@RfiNr=p5Lrc3dKW3UizCec3sF()Qn9aV0x3 z&!cV*os)aJ6Fu|?ZBAA0yOHf}Ms`QhQx}U*g1b!zJ~7I?1bPwz|NUkW;Xolg@Ob4X zpEi~QbJE&jF8>3&lQUyO7Jdb#wkUFjNC2&Cs!4<&|p*ZRJ44){6yaZ`y~&MV4^AvDqUv= zOE*7V#m^~qHf>dJ6S^2+vzkHXrWZMM{VJ$*KkvNuUDFkf)A0bWvVs2K!u-sRWQdVj zM;w7LuP6qP4XcM}i3Py`6ffK@bG07*@Ae@a7oAsJd^+@4+(_>A{DH1s4HKy=mKoLv z;i`9ftf)IT!>$hKRd@2(XH$%vR6I$fY%;`g>L*-!yqVkEKvUx(3QgoTW@eu{EGv1g zZ!E?qd;r?LVrkBB7TZ!d+Cu5+1>nDto_Y;TQZh*5N&bz>w`SsICNk?y%|))C*Jj9D zPb;J+j-S=M{LY73&9KRFgV2Ul4a==ZhIEPqs8s-cvMZsyz)q0b#ei(ogIDBG_&wYE zwUF7g4*3x;qH_u@>DPnMr3EOiQ^<8peSE|>ZtZ1|*{*Da1qACwa#w2$#bc+58A
%NIwF-+sr1~`Bj(o3 zNn3Li>`(Q(r>3jow%@TJ<1Dy;d6@-l4gyhpr zS9|>WZnaE%JTX6-tOkusN4&n>t>y>-PNttR^L+g&f6%MhD=Q$V%9Z=(YR#Npz;LiD zqQI`ReqTK`3YE}K?Dy+zT@_1b`AaTG4Dw(|e-Fdvij{2l;(-VjmN zy?Ewz(Z4z)?d5-;Gbo!;&?#B-*q`7!pJo4#oqpQlU+jx{FKBdRwsnLMZ+Mg3a#QXv zuHUIYpJ_>U3T>ESR=@HuNG?%Hif4gnyVAOZwbhM!qw`)xG&jO}VRZh|u+l4y$>+u( zI$UN2nsc?ge_*>OL|RQegypku9dqo`108P9NlW*Z>cd+@W_PB~s12FthG!@U(9bK5 z*al5XpbZNlX}~1koq-YweJVG1SS>_vSyKmO`+nrZR%!1xk3ybu?R=($xp94)PFHY+ zM1_QP>8md~HnZRq?5zO^R42@`vnQ9cEp=`S=>^(t;1Zh+Z?M=?Ran|57d#>7P^zeV$syixs1dZtGWw$oZG|5bE zONlv~-FdwLoZ?e%2v2$5z<{M4;dQOu(lvY`KEC|+FG>aC98^%R3q0o^+zw*W&SF1p z!4`x3lM6I_3U{Npdt?<})oJ@0FC$Ln=LZddcj{cc&6k8mdvoVN^DI|RMZpgDFe21Z z_&Yjk!!lS5Tbb58Bs)t$pER5;CQ?;ZP1|sO;+7oKQ{gaxt_i8XYA6#L_~Eo%d}gp{ z{KpTD1v!8MO42*yu}lrkOi5&N&dPp$*vr~z_MTc;QmR*MK4TE-F&>;an|k?bt6M$% zkCI4{`qkEC64~q!W#0RCKmXl)*dL4y93*Y|gu?POxMI1Z>~*#!zkxU|KsmK*if=J% zZP)N5_r&ju`lXxoPnH|h&aZkM)yZ?S*)U{!>OiyRl(NImW4+>G_Bq+4gpX@FHRD0W zCZ$(Y#>fd~6-dq`1i>)|rv=LVIKT-B{WAc+pBu(TbciCqnpg9`B}dKR^B zX2~DNjoj(ukeXsMUu}O@j@q{s;^xqrU=`e_f`^{5Pc^z)xz&;d-pg)SH5H{a;@_Fb zeKBhMwjPl|T0n&tv7Lz>a1G8C70F3Hga-!G`! zRZZ-yIh`hkr}A@KOdF>QRjb7s%Q@RsLS=o=(s%X79Wh~Hn%0f;bKj1i9BD-V#r4gs zaO$X*bEG_`xNVZz?6U=Y=Pj5;Q0L>%7#iq@+>M~ZhZ}RznXZ1)@OX-MNkd|ITLqww zG(y7nAzn@Wid`z@qsaM3DKzg)eky(Qv^)1St!o_K^ihTvy=2cM`&pdSuc+w4gm4fB zIV55&Lo4f8Gw{TK1(M-?vpM;lp!jg^ieD^6%BU7Wt)zA&?B3h@cwmj!@DLtYn#^?x z?E>Q@R=jHN8ZRYl39n-aWvo@w!*?8rG_>D8k9$aH8(bEAj<;>WNUMW6=UN5&xd2i2 zkhk*}QeHq&Q@pa3byXa+Nhcdm$lEr9}x^5IPeXMD3#{U6o=nW zn1Q4!$?<^4*X~)HNf^%tv@Hp6%9Cf39UXSrJXPfvQX{4PuN6k#@@%7cey{qZcKu6z z>5irR<(fGd6gL#~>s_7Y1b&IQ9YD~*aT31!MvfiHT z_|64vbwQPb%jf1Dxisy2HO>r?Hz7izM zs|u~7fDFX;dAYuJ2bzgZOJ5)QW+6W#t4p6iO2B0UM*cXw*{RYHH<~-o6x-Wy^#R3; zGRAlzL|UHp*T=)-3PLK5GbH{)h~u4=yI#Zty3Pk0Da>(nzdi zRxW-Z^9~)7rXKi6G_UeY&g+%djL8re5CCt&NCl^kV{tt1#aC0Chvy+4r1r`P`E!#a z4p%`OU%Ywe$DFKlzInDU&P_+K(DNh1uhnq&c1$0+fUjFDP*- zLWI`2eD;kBt8jq|YhHaO-0ZD-s9h5)eKRHYQRa)={7+FxfjG|FC3y8SiMVy==>39l zwi6;v*0S}4CbF#}I}8dyW5dx>WzfL!r7<3v+2oBhJ%pG#dj>UCMZGNVc>YHH#@UGz zv$Sr{_N)ipl|m4xoro4@Dm?U)BYwnZd~s04cGsM_lpRMLtL2@yzq@>b_<17=-yfPm zZG_WC|9ECT-Qwb*<=-!ym<;AR&OGhWyvKntRscYugn=gI0IC zLa$-!Z{8YbAmQ=TdU|@Jk7}Pqak-z0-5|mLO>qmQ$Vo?CII0xvPs;@jk+{| zE)l5`AP#CN*a6eL0P-~pqcS39AX#?LUh}2)PND2|CKLxKL*$X@yaTj-#Xy*s!WBy? zOOf$f6n6AFJku37b?CnA}v^$_9I`%TuPFo@s!i3+yH~L#V zX`ySoD7-*0ew^Y^qi1oNB)qn*e5-2EN9pa%hl$3B>?;ZenM(3zk1&JH_ox_rF@|?( zwADzkVS=4q5AdZw8?uv6>BWo_y>G4I&?DaYoC22|R=w-R+k&A29iwr>RIRwUCmK3e zw-toRnG6l}r&ZUI@@20C`Xprn?CvGa8znFH4(f&Y>5`^uAN)&oq2sn3yKrn|QdRo< zwHW!ErPVR&-YXkIoQTB5riadd{;Q+l|DG@Z-@sA+lkQdx9?l`FQ?bH_qHSLDxigZ# zKrEY2`$?1*6VPD-jL)PJv2gF4l15H+fN?gBVwUx4B55gpR~DiP?$u(%CU4$;9d z=|ibx?}$p3YA**`n;40!IdVWjegNDmnmy#E8T~HNCA3JQTA)$eyK1msE7j`VZ6gt_ z0Mi}z7xhv-P(=FsISx*am;6EGOr7IYioHtUX(G)?(fpv!TZNMPWuwV(K~^o0qow0i zrB7r54^aQ>R7RWTX8(tF#)IwqcafsnuI)gJCk>A6mOwZBfGxz}F7WQa7n5I9RMz4* z0>s3#32h`wC++KtDgTMMH@Ru3P@vVjebV_SZ=B!I$7v%$v>6nwt6PXINx|Wg)FtyH zu$aKtfyq!wupS6X@{Nm&@f&RezG^kGcsVoN-2evqj|^5erf9^zvljjKQb{zSTRg-e zHw)lfujo}~P7$jQHjT1B*izQTZXuPxfq_zmbGO!iZlAyM!aW3QB2QW=PURyZmY1@Q zYm!oGY&C$%%NWp@UCqik1rZ84kFT}&*$Y1%QG}=E)}Se>y%yFMvaV>)i7e`B1jMoz zyaob|nJIV_+ng`3eCO9+UM~eQGu=r!wf>45T&M^hv+oOXgn(BeX7%Uj_#tFDOUAp# zBpBE@b@zs6}5g?7DV(_ zA#xII-lR2n`4pdM&|!I&!^`g3%n}Qo>oi!)qB#;B+en8h7=6KbB_~h#nsoQb_DlX6u=FzlLej1=Wew6rQ3`T ztJZwti?l~ydkYX8)XiQ=*4LYQ8D|xxaz;M=2 zR;Y0~zEb)=XGDPpFjE&ztqOsR-35_VO9UZ@nCt8P@nsyHulPpve@>Z zQkabxW5SpBa!~nz-J%Thi}n@B=`Za<)-A)N)%ejk z_ccBu(@Z5M{7q(N4vVx7y-xC(k0|(ZM|!&CPW~tDkxu2Tt9GU)bu0H*l6SVr%XEG) zJnBF%L^r&oW`e%^jlTJ9fMvja>*N`@-P5I9N!Pq+zG9 zI#kvX@QE$viVgRC2g>+6BclfPb^4`z&AxI}vv!aRe~ir3=nY?@F7 zsB5BZ2sp3-Ks>c!5U?fv^!!t<;rj6UQC6;K$F{*KSA0hCpIl1+Qr$KluA7}&upzqg z<>1(&OoYK!N_Er>JKE`Yhm-Whm^#aquiei{hqm8CxPHcu{KxjC)0G3?=BmmXye+=n z`3`scasNBk`X9U0|JZ;3-;gr_{%GNDi)gOtVs=G0+D)aQgIMJn3`x_Qs*Or5Z+h2+ zkbd=HN%x=mR&W2`e5?PK;(FvC;_t%grF^{(bt}=zqDjl3^CE%nGC^<>wq7fQ&+Mc< z?)y1`9dpS1oTKX(fPnUC|L*nDw(nv?ku!@TXM9Q3Dy9aUxV<3TtOT}6P*T z=^vVZ7~3Z%j6)yLa^iP*ocv?+4+A?-YBcbvnEp!puaB2ATuq8K zbha(#8wkJOOe#xed51PC)SndlvH3%>aN}~1-?Y41p9mx}iRIF|89b`a^Fy=f59%+M z%>H|`@}u_uIVt~Tsr>(8QpTX1H)~Ds9)`m$jdSX9`eE5f*sHU4;4BUT>qI%?-1+-q zD8+=v)5|6%@~cVd`MB`hWRn-NCk3lw7chQLtk0_5U;ZSXmyW)~LrbzswYq;2*K)#&zCI1vlLB4*C7 znwq+E{RIB=ziVn`?V7fpR2Hw}&-K5dL=n8~|b3!*!NVQTbD`Uar*bIg}?zk=<{GX;xz7Hy7-MV>1_rZrFd z)l)%#0?wYfJbklRre;C#X^HB%9k^41nqhW4_T=rKop=sz(GKeIk;(KPm^QcII*8+1 zSht1#1U2SG}L!OESz1p%?=e49O z0;=5pA^P`j*^Y>dxf0T=kq*xxWssBLS>SZEz0&B6dl5RvC8(m(xHZJ|We|eB3qq)? zg259$2L22P54!j~UY4!G+(qbHX3d;?_eEClQ;13p*`Y5T_h^Pj*Dwx=F=~3Z71fhm ze_$tJVCJh)R@$I;|B7Ul_0W|rU&aF%%z@>KFEx7UQIPRMH%)b&#N@aF(^oS>U3-9l z{cy&FiB%PJj`xta5-=MD@mpHLPmItIOsG!#S5UmsiN{a}o)qIvsyKo7yz9kKyNWNyv+uVi{aX594(?$Kp==Ovv}FuU zR#I^0a^kw&pj`Ja4T}v-ZJrQ6*JT9z^YaugWAN@>lolU=cu z@4i}87~9W59ezp`+gRqsIAHtSx72&*S0~B?B9U>c(ja97R@=+8>}K&}8Wi5zMwH=` zc*^}{uuy*9HAf7%hNy1@g`+4eOqr6Z|MMcDBVF9~No@};(=VTkeTTiTC3nA=W4z)p zoW!{+6;TL>fgpKgy=aAD3W*xQpAS};(0P8XcDe>FM_XqVeK~#IHh}DH8c*fD-m>I4 zti(-5)JG-rRH#Zx10cR$iT(L+jt3UpiY}SHocRm9gs|y01Y^&a)EJ2X?lNUWYUNBS zrJvXOGW;I$nh0b}q6>K9vi}sWD63Pu1iw$T3HW%BZV+A2S9wJ~$Yw4$Ee(5GwbXci zJJk%7y6U{w$MOIGE%}4*NIjs^G&);Mi0`aj_|~ij>mAKEoV00TWk^&TCru zxShN+VkBM=Axwz~6RPQlIQH(XjV`eGFKVO`Y|8M#YJB0K$FN~lM3(Qb526}v#?G&N zXte|Hr=W~y^-EI8<_?E;O)k!e_lz-YRXls;NTlXGq%-tYPgUR5pnfZr;Tf}p$w1RC z7G^|tX{IbGbmf%LU<`0za@?`tNIsL-X_Gw%)PqGZjH*bSQr%I&eY{A z@=osnc%_xR&fyLdLy9|7`J@U``yeo%nt|Fg`nkjzrK|hxt*k#^Qx)OLhdKILUh^-k z?Ff}*!`F5V>1Pr&Lgi`}r<07_Y?7^u!nd;%&2}M(jhaP#<71@}kxPsT-Sf8TDj{#k+~dn@dpe*8{Tc7dqe=3q_QnihV|i<)=NQ)m(> zi-PVM9MuvZaGf&<`nNs*SGW8hIYe>&WQ6$T#bU0VRG3I3q&Z-Dl$G513xZrV!5c3e zauP^vx_eXg79Pb?>P-Tz1|`WIvxb)NGu(VMiU5) zmlDYBRY&JYrKE~`!*d5-XRj~YX6sM=Q0y6jg&y_ zswa*0I=1RLHY38+v-<_d`<9;NKsK(CK*$ebNoH;0qbFGcvNLvWT0yHsr#538#q8M9q+iM0s9b|`bG^u{0 zGWdnYmbrbGuXoS1nnlP~ct(3r-w{Dj`PHX`{6DAPXrr7gIB){fU?ISuwKjT`&!fOH zrC+elp&phD;qj>j8;{#<@O^bmwy$p6FZjAM;62RYCIrGXYE@^KSPn|~ zn2l$NXOrrwn&-;`#FOFmyLfg&W%gfOPVahSXAB_EEy&n6tMIz+VgJ2Zw!B<2VDCd< zqng=(CRKRyWiUI!5bIGF5hAW!+x$Fc`Nf0i5#?CzTLx6ih^MD_i#ZHUU2dm=dn@1L z5tBvsO6M+_9yI4ne4^Wx$W)$&AIInA>l>Y2%cAlGtpU)!mvbzh}89%jAG;uDY-`hsZ~s=6`TgNv3_(E5D_TWj$CDDu@UHtv(h z^7jO?;qt4TMRF>sa*tSLO$Sfau8Nqsmz0%NeHd}@v8aJEhCuRWXXafgMX9}@NaI}q zb(snAQ-)Zg*qRr-gc-s*1)8mZO|*UEQKAWWF`Jc>R3x)${UETua|qe>pZF>_ZPj-6c&lp ze_Unty`TLGfX)84R}#h7gbY4-DHdIG9~FrTNA`TM&GaQ0S>YdYt4qdwe!eyp&76fC z#C~1<^Vhg$5o@Y?+s|p#X@%du^xk{#`=;#tPPz~@&o#h)rgYoV=c&SrQWHVhD-v;E zYv-ptq>i#gjDKl^);ht|+bmBbOp@?RsgQzu<@*(NUu2^k8s|LIDg|qY+5tL}pfpLF zsh(B?l%j`f_c%HbCHV}QmhAl5>q)lU{R5074v4FoyD@S7`V6rcvBtAO!NsjIr^3bt3b0r13FZ}zKTKJgPaR*Z5c)-}M9Tkjtv*$B4mBN1dM0Az!azz=UJ5$a{T#ksKA!grCnG?RoiTwu zI|rHyf|w8C-;OP52#?1*^mg7o;^6Dt!AaB_Q5?+7P!uSOs6@UD0?|f5_ludbpu{c|Z7T zYs7LbO?QoM@+TKU^Yy>&{J*&6|A@EXA9Of%!Xy8y&V2_+OtDE4Fa>g($&P6!4ae5m zO?jutTDs(LJI?9%bqb;zIwoB~W+z`qMU?B0`oJG2xZSHD#Iw7#Uh(>ZHyd%OD?F(& zvqMTe!H!c_c5jE93Mz<)aIt8eVPl06zvZs16?;&#Yyol5R0{e9N8JwV)x%8H205m% z1abJzk0A4*osqA9?&h`WsMqg`O`pi?pj|!L7y~sQ3Bl&9eEU7ZV&zm6=R+F>H-=e~AI(h@4 z+FJ)X|9P4JoST1MfIqC8K(}8Gp6Omhjd)~Wa$7LX>qh$HXISg!GwA`(GW#cUC7!!+ zNGH)pqy~Cc=Z>OHP`?4W+<_{4BfsJubm3-ZRiKXT1OpUcz{A}i&BA{pJJebLL`z5Z zn`fHBLj2P2UCzl*LnLWdg8PI_SF#13(nswO5vXFA(TK=t&8ng$&c4w&JHP3Rs!k&e z*(N{>xG!i^I6OB3iL~z)=v6~rYAYL?Yh0>v07Vr%ToPV6R#*A^j@dw^{kGQ5wlKY?q_+`i!ZEj?--b3R#$`oKD|>xh+J{CUx(|RJp}hrCQ-T$1P1!R z7NH@rboGot`%PfTQ+QJHa{Nb7U<1C5(Klbp!igcWXSj8k{l4{o9CZHK3A;~k4Qu-X zQ1_xS8;P6S$=bXr;;zorO}gf+L#i;-;!=M9sC%A`N&!a5S3DN%=v)n5Vnf;xa8@e> z>0miB+84Am8;<)z<*8cT2Twk5F|hD@ZtH&-WcG-SWZKPM2KVS0E=`Z~8ji+~4j0Gs zymfErzN{7DLR7&xGrUsmEdv$U;)5TPpv9?{gp%6iQ#qTLGB9WoP8!Ac9f$DcF5=^J z9ot#*L5%@y4aZW_y!KLi1F@4f84IBT>+G6U+4ENrL7YOT;SBp3b6~=6y-NXSSRN#N zoy2PFfMWFbM<r1ii|KbWR&RY@9^z!kcOc89KilC7U$jc8qh2b}$R&6Qf z4U0@<7<_&;2@C_^)PfzJpTHu&zd#-j4lc7$h9irjs6ej)iY1JLwAH|!^2(Ah4gU%e+*7yiAn7yaJZnvdgy*HAGLQIT@ksnj z&kcxs!Tc*!{0g=Vk6)b4#ku2FDc(1zgR+*9k@caGu`|aQ-a=q+`OfV_>HJ2Ph*(S2 zpixg^GB>ZyE~U>UnU_Dcg9AkK@ahVk6{PA49K2uE-z?tFRCcbZNz%fjD{;Zq!Kg1} z{<|uk$y+{ys|izUjjY|Va9+GEMVXvA9GE;e9;H=F5=2BWvv?kbUJ~3@zsjpOU?PX^ zyvkF^hFCh#H2qGK0GY3t2(;Hs6Sr>vH`?W97|h785Zl;D0w55cY?UF?ZCC#`2D>jX zy@H!lgpUJ$J=|okG}$yO>xR{rLxJq;NTEx<589o&kU3XQ}4peArw|0`GHKh~qwkv*#biW{Q=zTo+>)`Rj$iqwWe~hq!1K3j| zH#;v)**z%8#j&p5BNDw=gsj`o2*+jj`8~CcIy|i=UGM+vYYoG$iOHeD@t62frih%C zs=!=61)Gd+)8ED{9>6qxIpndYkwT;#wFt7?)UmQRLm*jOJI^%FnPl_`PKOr^@_Wi( z`)*E#=Vm|TvbB;@qqc4(`h%#-cK^K!Vg1L--5;N6G->=l?R|GxQ(3n+Q=JiYL`0P4 z2nYiR4qZS{2STI-3?T^+0wbW1KtO?nj-zA22tiOvC{Za1Nl1{AL=wOe=~YUA5CVhr z(3BFIIyduv&%Mqc-}~MB-SWKmx!1q;dCocOoW0iGXYaGuUhDU>bj0sQ0D>>hhdY-N z%$^rU$SLBq#OqQtXSbRD6tBLNV^x5J{$%#;MxfQe4~MW3Ma0=$x_?SiS!_YHl!Q0N zaruTJpO(yNmCYMeFR*4EFzwt4nsgVo0ksLO;AV;zZaB*Q<&FAxhAivy zwR}Gp_AFSG?;n8Nid#xiY)}upEDt>hYAeu?JM!U*cPy&ZE3ni|u*40{zA*NEQG}=4 zPz>?JKHNL1rmKBz?gB5u)Lamc2QpPI%mOH;xvmD>>t88s=fvm~Q%VYG1TzO^X!mHK zeVV$47mYbHB{+RNA)3(X+Ke;GwnF7bJlg+^XM!=myf!<#olCZtMAm8v^>|AzzK5Du z?V0V&e}&=4*EcIF>4jgR(a z*SVS?!-5aU{Xj$eftn5A6CcM=!;8XY% z&h8!18e!?Ue7L3DKAgX8{#$tJ-hsV{+AE!b?2DX)@2D?z7kk+=uDHs<&XbOlP#Pv8 zz4&m2M#e~6$z66^Qp%LPv2q%Kt9zFg9eXVh{o=I31t8SPB*NDC!1>9Pmg!nrZeiM= ze6yTz`8&a|M}fpqD0x%Vi(1YONyg91&XJl7yPz&_#kpq!)e9CURw$_A@n3gwQC&$B z*$M3@*#U1~HS3jx;smP4Pxq%g&KZ&SuHqHM;@75Enx|WI zd=K~@dy=ULvztP%AupBsMEW4NV%{MZytD%MM99e-HXf5X&{Vo=qge%a4c~loy4l1D zvjRWc1hE{0t-(PY`+N;`kDVyjIb5{ljK>|hZJkG#SLQ~Q^ZSs?Pb)kRDWPLTvg!I< z7cYX#EwPlKHkcphxRI)dRh_W=fnoj)*UM{!+3tEezHKym%q=B6R2n~puPx!_e->Oh zyZ~!MSf>j?wg6lCi^b#{CIMH6hC$Cz>D5pS2|644gwIXXSy8ctzSo-t7<=n z)5%3!*Nx#)A5e-zAnuk#DePyUdpXW~ z_zv(wKTHo&+)0R$63bKwD~g8TgqamvavS!1hto8e6<|U$?8pFIC*J{4&0m4I-XFh)%!32o!Wq zI~>^!=ygy$iEeA6%PA-zhjWJ=a{Zj{R9;X`yI`!GARCH`Ob@tp1{xtZr(27&LrWb; zj-S_!ozqrQ^za4B7E*imlF{X6vHCp=?!G5ulOCbucE?sYdPe^^K2D5${vP0v>E)SW z3B2~z$Qmgj9qZFTnX!)u`(41>OC|gu4x%etEN#rJVeP+a+;<&~1I{{q)6c^+Elq_3 z9Lw9kpjm*SqTOvxMJ-7LVOb#?clY&{0(tO0q|&-a%%7@8NlxvW za0N^H?$O1ZjMT=5zsDTkyTmsZDWg%E)6)J+CAi8;GYb~o0gEnOeTfmv-^E_a@>8Oh z5EC}^f1~B!XJ{A<#JlJ`!4oFrqZ|V84r=ACn&EnnhVfJGI1w-ZOD`fZWdW{(M>iTj zlR5DCfF{&uW}$7wJ5_8bRoY#ESl*(P~m=a=z_2hQ>zIVV~wCAwb1EgXzP-NyRHp=xxzF&o%U3{)}mwjkTSgaI4 zZ0P1q3U<{qVy{i@xKbQBn^Cus9k`xx1(6$G5^oau)~!LHoeA~Rkm@o6uqad6^e%+HG%ga(^QlUEYo~ZCk~*(!?6% z6tua=c*=cK-wp<7D=YGiMB89qJ~_-Qn;G0`TxYeq#aK}MseNYSn{z}cB|tO(p#AB65g_JB%t<)oi?2|X0LhhyLQsH5DmWn{x(POXLE z(rhp<5I%mj*Q;|~opI(@--8T{l+3r7@B($wZohHppn>d z_1*S-(A@G6Pj{k9XNU~D<>AE|gihsim^VXWm6DJ%z|p&9s@dlT z)}$WwB$h%0yyu5T1J%v33!n{+*mDm9EixZ@t>^>m73ErVxjrnE=eEx+WsRKXu-DUQ zQ{hQ;M2O4M?t~M8Hi4<`Cd!VcX=_-eU+)60lycPB1g_3hF^vL1N&;S0mq`NzF_N*w zF21$p)k5yo!pvBnPxkABxSRCwPOGg+X?Kppc?PB-HFls0kSxq#*tce1$mQi?{R6EY zmKC_1tfj5>O+bAYZuTAFOjWa$uU5=wDkR&91-`Dw*?8)pk|iGCZ{~T(gE*#F-ItXC zmSZ3{d|IC#)~b1vtfTDuRgI~D`7p~7W8siKX6{i`SjvS#!7sFSR?SGBNX6e_w3z)o zs2n5%te{xwzx|~h@JlEX`X1FrWM4^m*_dTe+!?a@`GvtAcq5i_J7)r}s)9|cR%uNq zR%m*+Fwh0zD-!LCCjBC{K8!yLB3M7FK(09lRPS`$uzm{ysgv7gr&2oO6mXy|@7#sl z&HY5Vd|-bF#PP@Nk$E#_I-%$6tfxu&Z(DhUOjtvMeYpZWs+KA-7vat&B_Tqmp!Qqc z5y;|gJ+h_oIp{gc;e6|2y-|Ul_MB!;0d=mf{X}ndw-k}f9e{j5#MzD9y;O7T%2s{= z)-x1(a{wkni;dwouLoH&ZmOo@R!TYOa1cYzyWi%|>b$K}(?`@Xm@+%cI z7&=-|F|f%v)OaQnoyAg-A`Z72=WFn0toRnVH_}8=1v2=>%)S_cE&Rdg+G{o5)htSx zqvJO1e6Z&9^mNV-0U=>Nm!MFXgvQ)i^8U@)E)D74{W%uFZejX$1PI$?(~1*;a($_$Wgw?fu10sJnm=&dR0H z%G9-uB+U0WTjZ1M`gqh|FrEA0zTqxUA9Z7^ zG|t?BvKC6~NnOXi`%VFCmfKwlH0A!WwFnbjr z=m&PQyv62b*^o`IOLsxTrDTpGCRECAciQL6<3Ib!vEZgozr3M5_PUhboHah2;0nS~ zrX3yecrL!U5U5^Iv^NLH)?CgPgB9!C-{1~m#OSCR zoq92?lm|BSR+}IUhwILLPitdhs=S6;j%qu(P1FBO{LRTVg-3<_D!0E19!{&2w}^=x zJij&pTv!>Z8ar52(cACP=Y9gcrB!k*v@6q7q4zwMyTA-ze17qy3~9G>Cxj^T*~iKJ zv%dx+9Pl%yTb#TAq#7 zMiy=h`R zJ=g(t2Wrhy+wzkLz!2d!!Hni-Do;8!(u=BXx9(L_>SFnvJYq@x)p$Q_ODMK`%Ckc z)7Mb1R+rm8G)|+{af;H!^REklRP6Q`V@o0Dxn9mdrqd;MqY8sPTmqfb=(#5a`8dB( zAYC!NEx_q&;D+xMd+tLpxenBU;r2*ejVw>pdpkxg91tTMJqP>cdXCq3a3J+vm~$A(S{Pe7vVA8Enh(6y(vF@afyALL4fm$h2k@Dxl&sB$s! zYwNP%yEGOnb7MGsKA|8Jf4C^N-+tSJ)OyICE?0tQy7<7Y5=9a%R7x9y=MF$@HqU%J zs-3E(LwMy+jpO-PCLehXZ+bz=^n3-mZ`0f`_O+TssuV7QYRsij@ddjHA7nBeu zwo5?)y;Ngb;Mm3fcT%)xW!ny01~gDw(V@RN}Y`;*2T5NGT+^yW`3Vb$=xLuR%Q zeDn-BmcvJmLOeZ<$vJ_VE9!crDQNgYQhwb}gQT=zM#TlVu5SO?ryb=I?RZf>r;ewx z@i-RKFrCulQAMlZxGY60+$LXU&v$wVOZ#2o(@pOlP`49+yX{0!uM`k(YVmNRr?U&c znjCm%skc3#YaM~dKz^GZINhw$uz+h@q6M{CX)BUKB)#rAxt} zaGtyN5!L)t&}xhzGA74+B}es4jsOu{gH%Z$kg{A5`%XZttXBDH%m6Dj@dNJOMeJ?q`B;9?@g7z7jY!wQ58{vdw40e7X8zwd= zdaFp?1a#H0qlOlu^9#oN^&ggoS7;Lbiqb8cd09kZEu>CZ>(jK>{Vlifp_q`}4au=_ z%&omK55AN>xf5UIfW>Q~8@=wg3{>Z9sPWweX5m)lb;au0)A!#Vdy)pf$o($vH|CJt zdIaAqsG^9DC%5-m2gExk7(=ygsm;*T7nZiH;*VEet{JJ~IQ!Sm*A;l7X^dF{cg(Uf z2&FW4$-b!pZP1cbVPq2J$18`*LC=%5PBCI>y2)Vi2me)cfeO`O*#CZ5BQ=U zmplhU0-zElJ8Ja>Pf_=P79|B7wW`hs5qIicEbeg!o}flG*Ng`n0` z@m+>o^xXjb_?H| z*WrrwEA&k!kV_p#U{Irqup(digpQ>fJf@00lmx5?`_o zlQ6}(AxXBnRd;+MD^1(V-kl!%v5kHx_-cZ9JfKbJ*7Rzm7(_0ir01JQ_*SY+TZwV# zC&p@xI(g31{?@%4Z#J@~*Ec#HUnSVLPS%cANtl`6l=YNG0n3lC>+<&{C1O(LKyk zo3>E11_7?<8*wgKXo1op0iH|29vrLMSi7gak)?hWB$AeTgxO(XB_u&KOcxB@^>CYe zbmU-8Cz7$}S9{nvG8K?&lM)K|xm9k2C29i1??!~Q0?A^zynIqyKVKOd(s5;jqr|`O z6YV_ydR$9CXVUZd^aK_^hI%%IO|9;BGIcAAf}7@b2VeqHgJY0mxR7PHDiVNp0X3DQ zGUK@Uknh}Q_$1yKHWYq3ae4kCM`M!m7&_rtF}imxsHtOZi?xU z{=kHFMa+N_&Fa?4kcZIwwVL?LVdGO$y37SAniiV7lp*4_HZ8hmjwG%V=Mw5}5Tap~ z-#;m!k?Poobah!e0d}`iW7P}!((R45(kX!%R6d;_xiB3)bM28xuOgjxqEP8JJJT!Y z{8GPura7l3=TN)aER`Eo<`Sla)0MN{-%CqTe&Js)j<`1flR&$nBH60ERg4B_62!-$ z6IhGzlc@oDaH9%G-P?$1f-c2w?S_MLN0URv3jIM-5-;?ovMolbJ>a*3Cxh|izH4o6 zw5YlY!EWGpXPF=N)}}S#r9aw*%0nghxCOMa+A?`7 z;bWYG4q1^E0lK*xaJVgCk8^Mq?WdCD;GVKr`!b*gJS{E;hcG`@kabH{rQ3A(=|5zQ zJ1NBljx9$rk(t#RG|+^P#Y|$li!aul5u)t4?3t?_x)FF9J3F zY^1Lg!eV_AL4+Q@A{SQg7*qRuKvdlcnAV7j4Wt_V_5HGv;Nr;^WR&EvJ<% z=Dg?Qr==*|)Ypa$S21vJzii@Nf)q*(Pb}&5Rywh?Pj4MGT-fhciN1Pzh*A^Avd2tc$WsSn1iOtn z<>Rx16~W@D!=4#|ikK7C=b|}ITJmYO_VqLhgm>HLaq-#dCCePjU{4^gXHg2OD!O-!vqqWVEg--yDu7k$?qkQ%}Vxt4r>7 zAf6m})Vce1&5ZgB*5HvRJP#t2I+zrC=uZ3TRu#GOhqKfR)B|*6jludiYl|*p`+6Ck zJ;|?^GbSuKMHZ&a?tFju175zW!;0+~?gfW2OHyL$rhUQrBuTY&8$lASa5!6QO2OG> zVR;ic^U%I)Vf79pvOaQpoa(tR?55ZR+F#Yxs;dmW!t&Pfn@e+hqBQ8BlH!BRyH`Qp zp%7#nn754{xK)v_ffhlzJzCgTE1)!sPWEcE##Tw+gH1*AP*_=Lr${#No8G-%tt?ny z>Bw1M!)EyFHSED>qI=3A?ZBV5GwIqTnVx+l2jo#bGwI4Fl(bJYCgM-vzn6 zG(zO{bp}`Zu@UzeX4&AjArDEjy#!{^3ABK|uMvjwMK1Nd>w`+0%&x)Rd(?Aua`)iG z&NxS6hJ4`K{DrL^DFRn&hLZGGUIdTS&GUDNCcWAT*}hf~bLfk>dgAF`PejIg6Wntm zt!@1l@^ihw6lR2EED%RQYx`+w<4zqCClQD9cb?}RHy=kpRuW*nK_@NpTo58=T|o~V z+Fx9R39qNNx(^ivd-tcCm$&7rYh<)m;-;K_N&OwxQQw^;#0VrH*ZaQX_t5jn2SLSz z7zJqDP!3OBUDrIZQ!*AX=6BZTR9Nt!p{et4GRM#U^UCP2uI~Shf$RTlhkw2N%w_s{ z*HP7-c3wR9mxpo;*QaPH-a>2c__L6huT1rCu=!wR=2bVnqo#n~iwWCC z5hhGR2yBY=%?TN1=AatdQ%n0q)i^ZS_}lx(Y6qmi;z85#fyG;e+~KiBmG0!V=O(QK zJ)QKSwA~joE@O?g!Rcz)xT7OQAqsr z)fy^mZ=iOwx)%vS`C_65xYPG2T1W2hiR8qo{vop)Fq`=D519iOuHgb_t;+!_hU~-s zAy>=g7{QPLwDw*320N?&i#jOPODbK>*XoI%D}PktG7teYE+2bCK6`uP zqlzERGSJhp5cN*y?^zmK6=C&f%>%v(`)J<3D2e%h)$G%&|D{j;XB%8D-|E8^;>PG@ zot62w6CWGIObL{V{sOCava8bDQ6*89L1;!5Ug!KUe^g^m*C4!Y9j2}UK@X+L#ao?f z#&OXUIsIBS(jt!sZ8{bzvp4Yn066~ZANzmcj(JgtljZb65r@`{S;jD266YP;|YG%9rw;8l~S1EAg+pXl=r>y`_;=2$ z|6|3zP*E#CI-)&oq~<}_LC7o3<=|87e52p5i(PGA8UMP=r)YIiHTFSw)t=v93{p?; x0RKGk#i<{$((65co;cgJ=Wo~lJn>23la)SojZfdhr%~h6Q2C!TKb-kP@J}#cuD}2Q literal 0 HcmV?d00001 diff --git a/docs/getting-started/architecture/rbac.md b/docs/getting-started/architecture/rbac.md new file mode 100644 index 0000000000..9a51fba6ac --- /dev/null +++ b/docs/getting-started/architecture/rbac.md @@ -0,0 +1,56 @@ +# Role-Based Access Control (RBAC) in Feast + +## Introduction + +Role-Based Access Control (RBAC) is a security mechanism that restricts access to resources based on the roles of individual users within an organization. In the context of the Feast, RBAC ensures that only authorized users or groups can access or modify specific resources, thereby maintaining data security and operational integrity. + +## Functional Requirements + +The RBAC implementation in Feast is designed to: + +- **Assign Permissions**: Allow administrators to assign permissions for various operations and resources to users or groups based on their roles. +- **Seamless Integration**: Integrate smoothly with existing business code without requiring significant modifications. +- **Backward Compatibility**: Maintain support for non-authorized models as the default to ensure backward compatibility. + +## Business Goals + +The primary business goals of implementing RBAC in the Feast are: + +1. **Feature Sharing**: Enable multiple teams to share the feature store while ensuring controlled access. This allows for collaborative work without compromising data security. +2. **Access Control Management**: Prevent unauthorized access to team-specific resources and spaces, governing the operations that each user or group can perform. + +## Reference Architecture + +Feast operates as a collection of connected services, each enforcing authorization permissions. The architecture is designed as a distributed microservices system with the following key components: + +- **Service Endpoints**: These enforce authorization permissions, ensuring that only authorized requests are processed. +- **Client Integration**: Clients authenticate with feature servers by attaching authorization token to each request. +- **Service-to-Service Communication**: This is always granted. + +![rbac.jpg](rbac.jpg) + +## Permission Model + +The RBAC system in Feast uses a permission model that defines the following concepts: + +- **Resource**: An object within Feast that needs to be secured against unauthorized access. +- **Action**: A logical operation performed on a resource, such as Create, Describe, Update, Delete, Read, or write operations. +- **Policy**: A set of rules that enforce authorization decisions on resources. The default implementation uses role-based policies. + + + +## Authorization Architecture + +The authorization architecture in Feast is built with the following components: + +- **Token Extractor**: Extracts the authorization token from the request header. +- **Token Parser**: Parses the token to retrieve user details. +- **Policy Enforcer**: Validates the secured endpoint against the retrieved user details. +- **Token Injector**: Adds the authorization token to each secured request header. + + + + + + + diff --git a/docs/getting-started/components/README.md b/docs/getting-started/components/README.md index d468714bd4..e1c000abce 100644 --- a/docs/getting-started/components/README.md +++ b/docs/getting-started/components/README.md @@ -19,3 +19,7 @@ {% content-ref url="provider.md" %} [provider.md](provider.md) {% endcontent-ref %} + +{% content-ref url="authz_manager.md" %} +[authz_manager.md](authz_manager.md) +{% endcontent-ref %} diff --git a/docs/getting-started/components/authz_manager.md b/docs/getting-started/components/authz_manager.md new file mode 100644 index 0000000000..09ca4d1366 --- /dev/null +++ b/docs/getting-started/components/authz_manager.md @@ -0,0 +1,102 @@ +# Authorization Manager +An Authorization Manager is an instance of the `AuthManager` class that is plugged into one of the Feast servers to extract user details from the current request and inject them into the [permissions](../../getting-started/concepts/permissions.md) framework. + +{% hint style="info" %} +**Note**: Feast does not provide authentication capabilities; it is the client's responsibility to manage the authentication token and pass it to +the Feast server, which then validates the token and extracts user details from the configured authentication server. +{% endhint %} + +Two authorization managers are supported out-of-the-box: +* One using a configurable OIDC server to extract the user details. +* One using the Kubernetes RBAC resources to extract the user details. + +These instances are created when the Feast servers are initialized, according to the authorization configuration defined in +their own `feature_store.yaml`. + +Feast servers and clients must have consistent authorization configuration, so that the client proxies can automatically inject +the authorization tokens that the server can properly identify and use to enforce permission validations. + + +## Design notes +The server-side implementation of the authorization functionality is defined [here](./../../../sdk/python/feast/permissions/server). +Few of the key models, classes to understand the authorization implementation on the client side can be found [here](./../../../sdk/python/feast/permissions/client). + +## Configuring Authorization +The authorization is configured using a dedicated `auth` section in the `feature_store.yaml` configuration. + +**Note**: As a consequence, when deploying the Feast servers with the Helm [charts](../../../infra/charts/feast-feature-server/README.md), +the `feature_store_yaml_base64` value must include the `auth` section to specify the authorization configuration. + +### No Authorization +This configuration applies the default `no_auth` authorization: +```yaml +project: my-project +auth: + type: no_auth +... +``` + +### OIDC Authorization +With OIDC authorization, the Feast client proxies retrieve the JWT token from an OIDC server (or [Identity Provider](https://openid.net/developers/how-connect-works/)) +and append it in every request to a Feast server, using an [Authorization Bearer Token](https://developer.mozilla.org/en-US/docs/Web/HTTP/Authentication#bearer). + +The server, in turn, uses the same OIDC server to validate the token and extract the user roles from the token itself. + +Some assumptions are made in the OIDC server configuration: +* The OIDC token refers to a client with roles matching the RBAC roles of the configured `Permission`s (*) +* The roles are exposed in the access token passed to the server + +(*) Please note that **the role match is case-sensitive**, e.g. the name of the role in the OIDC server and in the `Permission` configuration +must be exactly the same. + +For example, the access token for a client `app` of a user with `reader` role should have the following `resource_access` section: +```json +{ + "resource_access": { + "app": { + "roles": [ + "reader" + ] + }, +} +``` + +An example of OIDC authorization configuration is the following: +```yaml +project: my-project +auth: + type: oidc + client_id: _CLIENT_ID__ + client_secret: _CLIENT_SECRET__ + realm: _REALM__ + auth_server_url: _OIDC_SERVER_URL_ + auth_discovery_url: _OIDC_SERVER_URL_/realms/master/.well-known/openid-configuration +... +``` + +In case of client configuration, the following settings must be added to specify the current user: +```yaml +auth: + ... + username: _USERNAME_ + password: _PASSWORD_ +``` + +### Kubernetes RBAC Authorization +With Kubernetes RBAC Authorization, the client uses the service account token as the authorizarion bearer token, and the +server fetches the associated roles from the Kubernetes RBAC resources. + +An example of Kubernetes RBAC authorization configuration is the following: +{% hint style="info" %} +**NOTE**: This configuration will only work if you deploy feast on Openshift or a Kubernetes platform. +{% endhint %} +```yaml +project: my-project +auth: + type: kubernetes +... +``` + +In case the client cannot run on the same cluster as the servers, the client token can be injected using the `LOCAL_K8S_TOKEN` +environment variable on the client side. The value must refer to the token of a service account created on the servers cluster +and linked to the desired RBAC roles. \ No newline at end of file diff --git a/docs/getting-started/components/overview.md b/docs/getting-started/components/overview.md index 393f436e5b..0ee3835de6 100644 --- a/docs/getting-started/components/overview.md +++ b/docs/getting-started/components/overview.md @@ -28,3 +28,4 @@ A complete Feast deployment contains the following components: * **Batch Materialization Engine:** The [Batch Materialization Engine](batch-materialization-engine.md) component launches a process which loads data into the online store from the offline store. By default, Feast uses a local in-process engine implementation to materialize data. However, additional infrastructure can be used for a more scalable materialization process. * **Online Store:** The online store is a database that stores only the latest feature values for each entity. The online store is either populated through materialization jobs or through [stream ingestion](../../reference/data-sources/push.md). * **Offline Store:** The offline store persists batch data that has been ingested into Feast. This data is used for producing training datasets. For feature retrieval and materialization, Feast does not manage the offline store directly, but runs queries against it. However, offline stores can be configured to support writes if Feast configures logging functionality of served features. +* **Authorization manager**: The authorization manager detects authentication tokens from client requests to Feast servers and uses this information to enforce permission policies on the requested services. diff --git a/docs/getting-started/concepts/README.md b/docs/getting-started/concepts/README.md index e805e3b486..1769a2d741 100644 --- a/docs/getting-started/concepts/README.md +++ b/docs/getting-started/concepts/README.md @@ -31,3 +31,7 @@ {% content-ref url="dataset.md" %} [dataset.md](dataset.md) {% endcontent-ref %} + +{% content-ref url="permission.md" %} +[permission.md](permission.md) +{% endcontent-ref %} diff --git a/docs/getting-started/concepts/permission.md b/docs/getting-started/concepts/permission.md new file mode 100644 index 0000000000..5bca1bd568 --- /dev/null +++ b/docs/getting-started/concepts/permission.md @@ -0,0 +1,112 @@ +# Permission + +## Overview + +The Feast permissions model allows to configure granular permission policies to all the resources defined in a feature store. + +The configured permissions are stored in the Feast registry and accessible through the CLI and the registry APIs. + +The permission authorization enforcement is performed when requests are executed through one of the Feast (Python) servers +- The online feature server (REST) +- The offline feature server (Arrow Flight) +- The registry server (gRPC) + +Note that there is no permission enforcement when accessing the Feast API with a local provider. + +## Concepts + +The permission model is based on the following components: +- A `resource` is a Feast object that we want to secure against unauthorized access. + - We assume that the resource has a `name` attribute and optional dictionary of associated key-value `tags`. +- An `action` is a logical operation executed on the secured resource, like: + - `create`: Create an instance. + - `describe`: Access the instance state. + - `update`: Update the instance state. + - `delete`: Delete an instance. + - `read`: Read both online and offline stores. + - `read_online`: Read the online store. + - `read_offline`: Read the offline store. + - `write`: Write on any store. + - `write_online`: Write to the online store. + - `write_offline`: Write to the offline store. +- A `policy` identifies the rule for enforcing authorization decisions on secured resources, based on the current user. + - A default implementation is provided for role-based policies, using the user roles to grant or deny access to the requested actions + on the secured resources. + +The `Permission` class identifies a single permission configured on the feature store and is identified by these attributes: +- `name`: The permission name. +- `types`: The list of protected resource types. Defaults to all managed types, e.g. the `ALL_RESOURCE_TYPES` alias. All sub-classes are included in the resource match. +- `name_pattern`: A regex to match the resource name. Defaults to `None`, meaning that no name filtering is applied +- `required_tags`: Dictionary of key-value pairs that must match the resource tags. Defaults to `None`, meaning that no tags filtering is applied. +- `actions`: The actions authorized by this permission. Defaults to `ALL_VALUES`, an alias defined in the `action` module. +- `policy`: The policy to be applied to validate a client request. + +To simplify configuration, several constants are defined to streamline the permissions setup: +- In module `feast.feast_object`: + - `ALL_RESOURCE_TYPES` is the list of all the `FeastObject` types. + - `ALL_FEATURE_VIEW_TYPES` is the list of all the feature view types, including those not inheriting from `FeatureView` type like + `OnDemandFeatureView`. +- In module `feast.permissions.action`: + - `ALL_ACTIONS` is the list of all managed actions. + - `READ` includes all the read actions for online and offline store. + - `WRITE` includes all the write actions for online and offline store. + - `CRUD` includes all the state management actions to create, describe, update or delete a Feast resource. + +Given the above definitions, the feature store can be configured with granular control over each resource, enabling partitioned access by +teams to meet organizational requirements for service and data sharing, and protection of sensitive information. + +The `feast` CLI includes a new `permissions` command to list the registered permissions, with options to identify the matching resources for each configured permission and the existing resources that are not covered by any permission. + +{% hint style="info" %} +**Note**: Feast resources that do not match any of the configured permissions are not secured by any authorization policy, meaning any user can execute any action on such resources. +{% endhint %} + +## Definition examples +This permission definition grants access to the resource state and the ability to read all of the stores for any feature view or +feature service to all users with the role `super-reader`: +```py +Permission( + name="feature-reader", + types=[FeatureView, FeatureService], + policy=RoleBasedPolicy(roles=["super-reader"]), + actions=[AuthzedAction.DESCRIBE, READ], +) +``` + +This example grants permission to write on all the data sources with `risk_level` tag set to `high` only to users with role `admin` or `data_team`: +```py +Permission( + name="ds-writer", + types=[DataSource], + required_tags={"risk_level": "high"}, + policy=RoleBasedPolicy(roles=["admin", "data_team"]), + actions=[AuthzedAction.WRITE], +) +``` + +{% hint style="info" %} +**Note**: When using multiple roles in a role-based policy, the user must be granted at least one of the specified roles. +{% endhint %} + + +The following permission grants authorization to read the offline store of all the feature views including `risky` in the name, to users with role `trusted`: + +```py +Permission( + name="reader", + types=[FeatureView], + name_pattern=".*risky.*", + policy=RoleBasedPolicy(roles=["trusted"]), + actions=[AuthzedAction.READ_OFFLINE], +) +``` + +## Authorization configuration +In order to leverage the permission functionality, the `auth` section is needed in the `feature_store.yaml` configuration. +Currently, Feast supports OIDC and Kubernetes RBAC authorization protocols. + +The default configuration, if you don't specify the `auth` configuration section, is `no_auth`, indicating that no permission +enforcement is applied. + +The `auth` section includes a `type` field specifying the actual authorization protocol, and protocol-specific fields that +are specified in [Authorization Manager](../components/authz_manager.md). diff --git a/docs/reference/feast-cli-commands.md b/docs/reference/feast-cli-commands.md index afcfcfef64..be31720034 100644 --- a/docs/reference/feast-cli-commands.md +++ b/docs/reference/feast-cli-commands.md @@ -24,6 +24,7 @@ Commands: init Create a new Feast repository materialize Run a (non-incremental) materialization job to... materialize-incremental Run an incremental materialization job to ingest... + permissions Access permissions registry-dump Print contents of the metadata registry teardown Tear down deployed feature store infrastructure version Display Feast SDK version @@ -155,6 +156,143 @@ Load data from feature views into the online store, beginning from either the pr feast materialize-incremental 2022-01-01T00:00:00 ``` +## Permissions + +### List permissions +List all registered permission + +```text +feast permissions list + +Options: + --tags TEXT Filter by tags (e.g. --tags 'key:value' --tags 'key:value, + key:value, ...'). Items return when ALL tags match. + -v, --verbose Print the resources matching each configured permission +``` + +```text ++-----------------------+-------------+-----------------------+-----------+----------------+-------------------------+ +| NAME | TYPES | NAME_PATTERN | ACTIONS | ROLES | REQUIRED_TAGS | ++=======================+=============+=======================+===========+================+================+========+ +| reader_permission1234 | FeatureView | transformed_conv_rate | DESCRIBE | reader | - | ++-----------------------+-------------+-----------------------+-----------+----------------+-------------------------+ +| writer_permission1234 | FeatureView | transformed_conv_rate | CREATE | writer | - | ++-----------------------+-------------+-----------------------+-----------+----------------+-------------------------+ +| special | FeatureView | special.* | DESCRIBE | admin | test-key2 : test-value2 | +| | | | UPDATE | special-reader | test-key : test-value | ++-----------------------+-------------+-----------------------+-----------+----------------+-------------------------+ +``` + +`verbose` option describes the resources matching each configured permission: + +```text +feast permissions list -v +``` + +```text +Permissions: + +permissions +β”œβ”€β”€ reader_permission1234 ['reader'] +β”‚ └── FeatureView: none +└── writer_permission1234 ['writer'] + β”œβ”€β”€ FeatureView: none + │── OnDemandFeatureView: ['transformed_conv_rate_fresh', 'transformed_conv_rate'] + └── BatchFeatureView: ['driver_hourly_stats', 'driver_hourly_stats_fresh'] +``` + +### Describe a permission +Describes the provided permission + +```text +feast permissions describe permission-name +name: permission-name +types: +- FEATURE_VIEW +namePattern: transformed_conv_rate +requiredTags: + required1: required-value1 + required2: required-value2 +actions: +- DESCRIBE +policy: + roleBasedPolicy: + roles: + - reader +tags: + key1: value1 + key2: value2 + +``` + +### List of the configured roles +List all the configured roles + +```text +feast permissions list-roles + +Options: + --verbose Print the resources and actions permitted to each configured + role +``` + +```text +ROLE NAME +admin +reader +writer +``` + +`verbose` option describes the resources and actions permitted to each managed role: + +```text +feast permissions list-roles -v +``` + +```text +ROLE NAME RESOURCE NAME RESOURCE TYPE PERMITTED ACTIONS +admin driver_hourly_stats_source FileSource CREATE + DELETE + QUERY_OFFLINE + QUERY_ONLINE + DESCRIBE + UPDATE +admin vals_to_add RequestSource CREATE + DELETE + QUERY_OFFLINE + QUERY_ONLINE + DESCRIBE + UPDATE +admin driver_stats_push_source PushSource CREATE + DELETE + QUERY_OFFLINE + QUERY_ONLINE + DESCRIBE + UPDATE +admin driver_hourly_stats_source FileSource CREATE + DELETE + QUERY_OFFLINE + QUERY_ONLINE + DESCRIBE + UPDATE +admin vals_to_add RequestSource CREATE + DELETE + QUERY_OFFLINE + QUERY_ONLINE + DESCRIBE + UPDATE +admin driver_stats_push_source PushSource CREATE + DELETE + QUERY_OFFLINE + QUERY_ONLINE + DESCRIBE + UPDATE +reader driver_hourly_stats FeatureView DESCRIBE +reader driver_hourly_stats_fresh FeatureView DESCRIBE +... +``` + + ## Teardown Tear down deployed feature store infrastructure diff --git a/docs/reference/feature-servers/offline-feature-server.md b/docs/reference/feature-servers/offline-feature-server.md index 6c2fdf7a25..1db5adacd8 100644 --- a/docs/reference/feature-servers/offline-feature-server.md +++ b/docs/reference/feature-servers/offline-feature-server.md @@ -33,3 +33,20 @@ Please see the detail how to configure offline store client [remote-offline-stor The set of functionalities supported by remote offline stores is the same as those supported by offline stores with the SDK, which are described in detail [here](../offline-stores/overview.md#functionality). +# Offline Feature Server Permissions and Access Control + +## API Endpoints and Permissions + +| Endpoint | Resource Type | Permission | Description | +| ------------------------------------- |------------------|---------------|---------------------------------------------------| +| offline_write_batch | FeatureView | Write Offline | Write a batch of data to the offline store | +| write_logged_features | FeatureService | Write Offline | Write logged features to the offline store | +| persist | DataSource | Write Offline | Persist the result of a read in the offline store | +| get_historical_features | FeatureView | Read Offline | Retrieve historical features | +| pull_all_from_table_or_query | DataSource | Read Offline | Pull all data from a table or read it | +| pull_latest_from_table_or_query | DataSource | Read Offline | Pull the latest data from a table or read it | + + +## How to configure Authentication and Authorization ? + +Please refer the [page](./../../../docs/getting-started/concepts/permission.md) for more details on how to configure authentication and authorization. \ No newline at end of file diff --git a/docs/reference/feature-servers/python-feature-server.md b/docs/reference/feature-servers/python-feature-server.md index 33dfe77ae1..255b85e606 100644 --- a/docs/reference/feature-servers/python-feature-server.md +++ b/docs/reference/feature-servers/python-feature-server.md @@ -199,3 +199,19 @@ requests.post( "http://localhost:6566/push", data=json.dumps(push_data)) ``` + +# Online Feature Server Permissions and Access Control + +## API Endpoints and Permissions + +| Endpoint | Resource Type | Permission | Description | +| ---------------------------- |---------------------------------|-------------------------------------------------------| ------------------------------------------------------------------------ | +| /get-online-features | FeatureView,OnDemandFeatureView | Read Online | Get online features from the feature store | +| /push | FeatureView | Write Online, Write Offline, Write Online and Offline | Push features to the feature store (online, offline, or both) | +| /write-to-online-store | FeatureView | Write Online | Write features to the online store | +| /materialize | FeatureView | Write Online | Materialize features within a specified time range | +| /materialize-incremental | FeatureView | Write Online | Incrementally materialize features up to a specified timestamp | + +## How to configure Authentication and Authorization ? + +Please refer the [page](./../../../docs/getting-started/concepts/permission.md) for more details on how to configure authentication and authorization. \ No newline at end of file diff --git a/docs/reference/offline-stores/remote-offline-store.md b/docs/reference/offline-stores/remote-offline-store.md index 0179e0f06f..8057ae3284 100644 --- a/docs/reference/offline-stores/remote-offline-store.md +++ b/docs/reference/offline-stores/remote-offline-store.md @@ -25,4 +25,7 @@ The complete example can be find under [remote-offline-store-example](../../../e ## How to configure the server -Please see the detail how to configure offline feature server [offline-feature-server.md](../feature-servers/offline-feature-server.md) \ No newline at end of file +Please see the detail how to configure offline feature server [offline-feature-server.md](../feature-servers/offline-feature-server.md) + +## How to configure Authentication and Authorization +Please refer the [page](./../../../docs/getting-started/concepts/permission.md) for more details on how to configure authentication and authorization. diff --git a/docs/reference/online-stores/remote.md b/docs/reference/online-stores/remote.md index c560fa6f22..4dd4fb65b5 100644 --- a/docs/reference/online-stores/remote.md +++ b/docs/reference/online-stores/remote.md @@ -11,11 +11,17 @@ The registry is pointing to registry of remote feature store. If it is not acces {% code title="feature_store.yaml" %} ```yaml project: my-local-project - registry: /remote/data/registry.db - provider: local - online_store: - path: http://localhost:6566 - type: remote - entity_key_serialization_version: 2 +registry: /remote/data/registry.db +provider: local +online_store: + path: http://localhost:6566 + type: remote +entity_key_serialization_version: 2 +auth: + type: no_auth ``` -{% endcode %} \ No newline at end of file +{% endcode %} + +## How to configure Authentication and Authorization +Please refer the [page](./../../../docs/getting-started/concepts/permission.md) for more details on how to configure authentication and authorization. + diff --git a/docs/reference/registry/registry-permissions.md b/docs/reference/registry/registry-permissions.md new file mode 100644 index 0000000000..65508ef5b2 --- /dev/null +++ b/docs/reference/registry/registry-permissions.md @@ -0,0 +1,45 @@ +# Registry Permissions and Access Control + + +## API Endpoints and Permissions + +| Endpoint | Resource Type | Permission | Description | +| ------------------------ |---------------------|------------------------| -------------------------------------------------------------- | +| ApplyEntity | Entity | Create, Update, Delete | Apply an entity to the registry | +| GetEntity | Entity | Read | Get an entity from the registry | +| ListEntities | Entity | Read | List entities in the registry | +| DeleteEntity | Entity | Delete | Delete an entity from the registry | +| ApplyDataSource | DataSource | Create, Update, Delete | Apply a data source to the registry | +| GetDataSource | DataSource | Read | Get a data source from the registry | +| ListDataSources | DataSource | Read | List data sources in the registry | +| DeleteDataSource | DataSource | Delete | Delete a data source from the registry | +| ApplyFeatureView | FeatureView | Create, Update, Delete | Apply a feature view to the registry | +| GetFeatureView | FeatureView | Read | Get a feature view from the registry | +| ListFeatureViews | FeatureView | Read | List feature views in the registry | +| DeleteFeatureView | FeatureView | Delete | Delete a feature view from the registry | +| GetStreamFeatureView | StreamFeatureView | Read | Get a stream feature view from the registry | +| ListStreamFeatureViews | StreamFeatureView | Read | List stream feature views in the registry | +| GetOnDemandFeatureView | OnDemandFeatureView | Read | Get an on-demand feature view from the registry | +| ListOnDemandFeatureViews | OnDemandFeatureView | Read | List on-demand feature views in the registry | +| ApplyFeatureService | FeatureService | Create, Update, Delete | Apply a feature service to the registry | +| GetFeatureService | FeatureService | Read | Get a feature service from the registry | +| ListFeatureServices | FeatureService | Read | List feature services in the registry | +| DeleteFeatureService | FeatureService | Delete | Delete a feature service from the registry | +| ApplySavedDataset | SavedDataset | Create, Update, Delete | Apply a saved dataset to the registry | +| GetSavedDataset | SavedDataset | Read | Get a saved dataset from the registry | +| ListSavedDatasets | SavedDataset | Read | List saved datasets in the registry | +| DeleteSavedDataset | SavedDataset | Delete | Delete a saved dataset from the registry | +| ApplyValidationReference | ValidationReference | Create, Update, Delete | Apply a validation reference to the registry | +| GetValidationReference | ValidationReference | Read | Get a validation reference from the registry | +| ListValidationReferences | ValidationReference | Read | List validation references in the registry | +| DeleteValidationReference| ValidationReference | Delete | Delete a validation reference from the registry | +| ApplyPermission | Permission | Create, Update, Delete | Apply a permission to the registry | +| GetPermission | Permission | Read | Get a permission from the registry | +| ListPermissions | Permission | Read | List permissions in the registry | +| DeletePermission | Permission | Delete | Delete a permission from the registry | +| Commit | | None | Commit changes to the registry | +| Refresh | | None | Refresh the registry | +| Proto | | None | Get the proto representation of the registry | + +## How to configure Authentication and Authorization +Please refer the [page](./../../../docs/getting-started/concepts/permission.md) for more details on how to configure authentication and authorization. diff --git a/protos/feast/core/Permission.proto b/protos/feast/core/Permission.proto new file mode 100644 index 0000000000..57958d3d81 --- /dev/null +++ b/protos/feast/core/Permission.proto @@ -0,0 +1,69 @@ +syntax = "proto3"; +package feast.core; + +option go_package = "github.com/feast-dev/feast/go/protos/feast/core"; +option java_outer_classname = "PermissionProto"; +option java_package = "feast.proto.core"; + +import "feast/core/Policy.proto"; +import "google/protobuf/timestamp.proto"; + +message Permission { + // User-specified specifications of this permission. + PermissionSpec spec = 1; + + // System-populated metadata for this permission. + PermissionMeta meta = 2; +} + +message PermissionSpec { + enum AuthzedAction { + CREATE = 0; + DESCRIBE = 1; + UPDATE = 2; + DELETE = 3; + READ_ONLINE = 4; + READ_OFFLINE = 5; + WRITE_ONLINE = 6; + WRITE_OFFLINE = 7; + } + + // Name of the permission. Must be unique. Not updated. + string name = 1; + + // Name of Feast project. + string project = 2; + + enum Type { + FEATURE_VIEW = 0; + ON_DEMAND_FEATURE_VIEW = 1; + BATCH_FEATURE_VIEW = 2; + STREAM_FEATURE_VIEW= 3; + ENTITY = 4; + FEATURE_SERVICE = 5; + DATA_SOURCE = 6; + VALIDATION_REFERENCE = 7; + SAVED_DATASET = 8; + PERMISSION = 9; + } + + repeated Type types = 3; + + string name_pattern = 4; + + map required_tags = 5; + + // List of actions. + repeated AuthzedAction actions = 6; + + // the policy. + Policy policy = 7; + + // User defined metadata + map tags = 8; +} + +message PermissionMeta { + google.protobuf.Timestamp created_timestamp = 1; + google.protobuf.Timestamp last_updated_timestamp = 2; +} diff --git a/protos/feast/core/Policy.proto b/protos/feast/core/Policy.proto new file mode 100644 index 0000000000..7ad42b9797 --- /dev/null +++ b/protos/feast/core/Policy.proto @@ -0,0 +1,23 @@ +syntax = "proto3"; +package feast.core; + +option go_package = "github.com/feast-dev/feast/go/protos/feast/core"; +option java_outer_classname = "PolicyProto"; +option java_package = "feast.proto.core"; + +message Policy { + // Name of the policy. + string name = 1; + + // Name of Feast project. + string project = 2; + + oneof policy_type { + RoleBasedPolicy role_based_policy = 3; + } +} + +message RoleBasedPolicy { + // List of roles in this policy. + repeated string roles = 1; +} diff --git a/protos/feast/core/Registry.proto b/protos/feast/core/Registry.proto index 0c3f8a53f9..b4f1ffb0a3 100644 --- a/protos/feast/core/Registry.proto +++ b/protos/feast/core/Registry.proto @@ -32,8 +32,9 @@ import "feast/core/DataSource.proto"; import "feast/core/SavedDataset.proto"; import "feast/core/ValidationProfile.proto"; import "google/protobuf/timestamp.proto"; +import "feast/core/Permission.proto"; -// Next id: 16 +// Next id: 17 message Registry { repeated Entity entities = 1; repeated FeatureTable feature_tables = 2; @@ -51,6 +52,7 @@ message Registry { string registry_schema_version = 3; // to support migrations; incremented when schema is changed string version_id = 4; // version id, random string generated on each update of the data; now used only for debugging purposes google.protobuf.Timestamp last_updated = 5; + repeated Permission permissions = 16; } message ProjectMetadata { diff --git a/protos/feast/registry/RegistryServer.proto b/protos/feast/registry/RegistryServer.proto index 44529f5409..928354077b 100644 --- a/protos/feast/registry/RegistryServer.proto +++ b/protos/feast/registry/RegistryServer.proto @@ -14,6 +14,7 @@ import "feast/core/FeatureService.proto"; import "feast/core/SavedDataset.proto"; import "feast/core/ValidationProfile.proto"; import "feast/core/InfraObject.proto"; +import "feast/core/Permission.proto"; service RegistryServer{ // Entity RPCs @@ -59,7 +60,13 @@ service RegistryServer{ rpc GetValidationReference (GetValidationReferenceRequest) returns (feast.core.ValidationReference) {} rpc ListValidationReferences (ListValidationReferencesRequest) returns (ListValidationReferencesResponse) {} rpc DeleteValidationReference (DeleteValidationReferenceRequest) returns (google.protobuf.Empty) {} - + + // Permission RPCs + rpc ApplyPermission (ApplyPermissionRequest) returns (google.protobuf.Empty) {} + rpc GetPermission (GetPermissionRequest) returns (feast.core.Permission) {} + rpc ListPermissions (ListPermissionsRequest) returns (ListPermissionsResponse) {} + rpc DeletePermission (DeletePermissionRequest) returns (google.protobuf.Empty) {} + rpc ApplyMaterialization (ApplyMaterializationRequest) returns (google.protobuf.Empty) {} rpc ListProjectMetadata (ListProjectMetadataRequest) returns (ListProjectMetadataResponse) {} rpc UpdateInfra (UpdateInfraRequest) returns (google.protobuf.Empty) {} @@ -277,6 +284,7 @@ message GetSavedDatasetRequest { message ListSavedDatasetsRequest { string project = 1; bool allow_cache = 2; + map tags = 3; } message ListSavedDatasetsResponse { @@ -306,6 +314,7 @@ message GetValidationReferenceRequest { message ListValidationReferencesRequest { string project = 1; bool allow_cache = 2; + map tags = 3; } message ListValidationReferencesResponse { @@ -316,4 +325,34 @@ message DeleteValidationReferenceRequest { string name = 1; string project = 2; bool commit = 3; -} \ No newline at end of file +} + +// Permissions + +message ApplyPermissionRequest { + feast.core.Permission permission = 1; + string project = 2; + bool commit = 3; +} + +message GetPermissionRequest { + string name = 1; + string project = 2; + bool allow_cache = 3; +} + +message ListPermissionsRequest { + string project = 1; + bool allow_cache = 2; + map tags = 3; +} + +message ListPermissionsResponse { + repeated feast.core.Permission permissions = 1; +} + +message DeletePermissionRequest { + string name = 1; + string project = 2; + bool commit = 3; +} diff --git a/sdk/python/docs/source/feast.rst b/sdk/python/docs/source/feast.rst index 95fbea8d7a..83137574dd 100644 --- a/sdk/python/docs/source/feast.rst +++ b/sdk/python/docs/source/feast.rst @@ -12,6 +12,7 @@ Subpackages feast.embedded_go feast.infra feast.loaders + feast.permissions feast.protos feast.transformation feast.ui @@ -251,6 +252,14 @@ feast.proto\_json module :undoc-members: :show-inheritance: +feast.prova module +------------------ + +.. automodule:: feast.prova + :members: + :undoc-members: + :show-inheritance: + feast.registry\_server module ----------------------------- diff --git a/sdk/python/feast/cli.py b/sdk/python/feast/cli.py index f4e3e97d27..737704dd36 100644 --- a/sdk/python/feast/cli.py +++ b/sdk/python/feast/cli.py @@ -16,23 +16,27 @@ from datetime import datetime from importlib.metadata import version as importlib_version from pathlib import Path -from typing import List, Optional +from typing import Any, List, Optional import click import yaml +from bigtree import Node from colorama import Fore, Style from dateutil import parser from pygments import formatters, highlight, lexers -from feast import utils +import feast.cli_utils as cli_utils +from feast import BatchFeatureView, Entity, FeatureService, StreamFeatureView, utils from feast.constants import ( DEFAULT_FEATURE_TRANSFORMATION_SERVER_PORT, DEFAULT_OFFLINE_SERVER_PORT, DEFAULT_REGISTRY_SERVER_PORT, ) +from feast.data_source import DataSource from feast.errors import FeastObjectNotFoundException, FeastProviderLoginError from feast.feature_view import FeatureView from feast.on_demand_feature_view import OnDemandFeatureView +from feast.permissions.policy import RoleBasedPolicy from feast.repo_config import load_repo_config from feast.repo_operations import ( apply_total, @@ -44,6 +48,7 @@ registry_dump, teardown, ) +from feast.saved_dataset import SavedDataset, ValidationReference from feast.utils import maybe_local_tz _logger = logging.getLogger(__name__) @@ -879,5 +884,253 @@ def validate( exit(1) +@cli.group(name="permissions") +def feast_permissions_cmd(): + """ + Access permissions + """ + pass + + +@feast_permissions_cmd.command(name="list") +@click.option( + "--verbose", + "-v", + is_flag=True, + help="Print the resources matching each configured permission", +) +@tagsOption +@click.pass_context +def feast_permissions_list_command(ctx: click.Context, verbose: bool, tags: list[str]): + from tabulate import tabulate + + table: list[Any] = [] + tags_filter = utils.tags_list_to_dict(tags) + + store = create_feature_store(ctx) + + permissions = store.list_permissions(tags=tags_filter) + + root_node = Node("permissions") + roles: set[str] = set() + + for p in permissions: + policy = p.policy + if not verbose: + cli_utils.handle_not_verbose_permissions_command(p, policy, table) + else: + if isinstance(policy, RoleBasedPolicy) and len(policy.get_roles()) > 0: + roles = set(policy.get_roles()) + permission_node = Node( + p.name + " " + str(list(roles)), parent=root_node + ) + else: + permission_node = Node(p.name, parent=root_node) + + for feast_type in p.types: + if feast_type in [ + FeatureView, + OnDemandFeatureView, + BatchFeatureView, + StreamFeatureView, + ]: + cli_utils.handle_fv_verbose_permissions_command( + feast_type, # type: ignore[arg-type] + p, + permission_node, + store, + tags_filter, + ) + elif feast_type == Entity: + cli_utils.handle_entity_verbose_permissions_command( + feast_type, # type: ignore[arg-type] + p, + permission_node, + store, + tags_filter, + ) + elif feast_type == FeatureService: + cli_utils.handle_fs_verbose_permissions_command( + feast_type, # type: ignore[arg-type] + p, + permission_node, + store, + tags_filter, + ) + elif feast_type == DataSource: + cli_utils.handle_ds_verbose_permissions_command( + feast_type, # type: ignore[arg-type] + p, + permission_node, + store, + tags_filter, + ) + elif feast_type == ValidationReference: + cli_utils.handle_vr_verbose_permissions_command( + feast_type, # type: ignore[arg-type] + p, + permission_node, + store, + tags_filter, + ) + elif feast_type == SavedDataset: + cli_utils.handle_sd_verbose_permissions_command( + feast_type, # type: ignore[arg-type] + p, + permission_node, + store, + tags_filter, + ) + + if not verbose: + print( + tabulate( + table, + headers=[ + "NAME", + "TYPES", + "NAME_PATTERN", + "ACTIONS", + "ROLES", + "REQUIRED_TAGS", + ], + tablefmt="plain", + ) + ) + else: + cli_utils.print_permission_verbose_example() + + print("Permissions:") + print("") + root_node.show() + + +@feast_permissions_cmd.command("describe") +@click.argument("name", type=click.STRING) +@click.pass_context +def permission_describe(ctx: click.Context, name: str): + """ + Describe a permission + """ + store = create_feature_store(ctx) + + try: + permission = store.get_permission(name) + except FeastObjectNotFoundException as e: + print(e) + exit(1) + + print( + yaml.dump( + yaml.safe_load(str(permission)), default_flow_style=False, sort_keys=False + ) + ) + + +@feast_permissions_cmd.command(name="check") +@click.pass_context +def feast_permissions_check_command(ctx: click.Context): + """ + Validate the permissions configuration + """ + from tabulate import tabulate + + all_unsecured_table: list[Any] = [] + store = create_feature_store(ctx) + permissions = store.list_permissions() + objects = cli_utils.fetch_all_feast_objects( + store=store, + ) + + print( + f"{Style.BRIGHT + Fore.RED}The following resources are not secured by any permission configuration:{Style.RESET_ALL}" + ) + for o in objects: + cli_utils.handle_permissions_check_command( + object=o, permissions=permissions, table=all_unsecured_table + ) + print( + tabulate( + all_unsecured_table, + headers=[ + "NAME", + "TYPE", + ], + tablefmt="plain", + ) + ) + + all_unsecured_actions_table: list[Any] = [] + print( + f"{Style.BRIGHT + Fore.RED}The following actions are not secured by any permission configuration (Note: this might not be a security concern, depending on the used APIs):{Style.RESET_ALL}" + ) + for o in objects: + cli_utils.handle_permissions_check_command_with_actions( + object=o, permissions=permissions, table=all_unsecured_actions_table + ) + print( + tabulate( + all_unsecured_actions_table, + headers=[ + "NAME", + "TYPE", + "UNSECURED ACTIONS", + ], + tablefmt="plain", + ) + ) + + +@feast_permissions_cmd.command(name="list-roles") +@click.option( + "--verbose", + "-v", + is_flag=True, + help="Print the resources and actions permitted to each configured role", +) +@click.pass_context +def feast_permissions_list_roles_command(ctx: click.Context, verbose: bool): + """ + List all the configured roles + """ + from tabulate import tabulate + + table: list[Any] = [] + store = create_feature_store(ctx) + permissions = store.list_permissions() + if not verbose: + cli_utils.handler_list_all_permissions_roles( + permissions=permissions, table=table + ) + print( + tabulate( + table, + headers=[ + "ROLE NAME", + ], + tablefmt="grid", + ) + ) + else: + objects = cli_utils.fetch_all_feast_objects( + store=store, + ) + cli_utils.handler_list_all_permissions_roles_verbose( + objects=objects, permissions=permissions, table=table + ) + print( + tabulate( + table, + headers=[ + "ROLE NAME", + "RESOURCE NAME", + "RESOURCE TYPE", + "PERMITTED ACTIONS", + ], + tablefmt="plain", + ) + ) + + if __name__ == "__main__": cli() diff --git a/sdk/python/feast/cli_utils.py b/sdk/python/feast/cli_utils.py new file mode 100644 index 0000000000..edfdab93e3 --- /dev/null +++ b/sdk/python/feast/cli_utils.py @@ -0,0 +1,329 @@ +from typing import Any, Optional + +from bigtree import Node +from colorama import Fore, Style + +from feast import ( + BatchFeatureView, + FeatureService, + FeatureStore, + FeatureView, + OnDemandFeatureView, + StreamFeatureView, +) +from feast.feast_object import FeastObject +from feast.permissions.action import ALL_ACTIONS +from feast.permissions.decision import DecisionEvaluator +from feast.permissions.permission import Permission +from feast.permissions.policy import Policy, RoleBasedPolicy +from feast.permissions.user import User + + +def print_permission_verbose_example(): + print("") + print( + f"{Style.BRIGHT + Fore.GREEN}The structure of the {Style.BRIGHT + Fore.WHITE}feast-permissions list --verbose {Style.BRIGHT + Fore.GREEN}command will be as in the following example:" + ) + print("") + print(f"{Style.DIM}For example: {Style.RESET_ALL}{Style.BRIGHT + Fore.GREEN}") + print("") + explanation_root_node = Node("permissions") + explanation_permission_node = Node( + "permission_1" + " " + str(["role names list"]), + parent=explanation_root_node, + ) + Node( + FeatureView.__name__ + ": " + str(["feature view names"]), + parent=explanation_permission_node, + ) + Node(FeatureService.__name__ + ": none", parent=explanation_permission_node) + Node("..", parent=explanation_permission_node) + Node( + "permission_2" + " " + str(["role names list"]), + parent=explanation_root_node, + ) + Node("..", parent=explanation_root_node) + explanation_root_node.show() + print( + f""" +-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------{Style.RESET_ALL} + """ + ) + + +def handle_sd_verbose_permissions_command( + feast_type: list[FeastObject], + p: Permission, + policy_node: Node, + store: FeatureStore, + tags_filter: Optional[dict[str, str]], +): + saved_datasets = store.list_saved_datasets(tags=tags_filter) + saved_datasets_names = set() + for sd in saved_datasets: + if p.match_resource(sd): + saved_datasets_names.add(sd.name) + if len(saved_datasets_names) > 0: + Node( + feast_type.__name__ + ": " + str(list(saved_datasets_names)), # type: ignore[union-attr, attr-defined] + parent=policy_node, + ) + else: + Node(feast_type.__name__ + ": none", parent=policy_node) # type: ignore[union-attr, attr-defined] + + +def handle_vr_verbose_permissions_command( + feast_type: list[FeastObject], + p: Permission, + policy_node: Node, + store: FeatureStore, + tags_filter: Optional[dict[str, str]], +): + validation_references = store.list_validation_references(tags=tags_filter) + validation_references_names = set() + for vr in validation_references: + if p.match_resource(vr): + validation_references_names.add(vr.name) + if len(validation_references_names) > 0: + Node( + feast_type.__name__ + ": " + str(list(validation_references_names)), # type: ignore[union-attr, attr-defined] + parent=policy_node, + ) + else: + Node(feast_type.__name__ + ": none", parent=policy_node) # type: ignore[union-attr, attr-defined] + + +def handle_ds_verbose_permissions_command( + feast_type: list[FeastObject], + p: Permission, + policy_node: Node, + store: FeatureStore, + tags_filter: Optional[dict[str, str]], +): + data_sources = store.list_data_sources(tags=tags_filter) + data_sources_names = set() + for ds in data_sources: + if p.match_resource(ds): + data_sources_names.add(ds.name) + if len(data_sources_names) > 0: + Node( + feast_type.__name__ + ": " + str(list(data_sources_names)), # type: ignore[union-attr, attr-defined] + parent=policy_node, + ) + else: + Node(feast_type.__name__ + ": none", parent=policy_node) # type: ignore[union-attr, attr-defined] + + +def handle_fs_verbose_permissions_command( + feast_type: list[FeastObject], + p: Permission, + policy_node: Node, + store: FeatureStore, + tags_filter: Optional[dict[str, str]], +): + feature_services = store.list_feature_services(tags=tags_filter) + feature_services_names = set() + for fs in feature_services: + if p.match_resource(fs): + feature_services_names.add(fs.name) + if len(feature_services_names) > 0: + Node( + feast_type.__name__ + ": " + str(list(feature_services_names)), # type: ignore[union-attr, attr-defined] + parent=policy_node, + ) + else: + Node(feast_type.__name__ + ": none", parent=policy_node) # type: ignore[union-attr, attr-defined] + + +def handle_entity_verbose_permissions_command( + feast_type: list[FeastObject], + p: Permission, + policy_node: Node, + store: FeatureStore, + tags_filter: Optional[dict[str, str]], +): + entities = store.list_entities(tags=tags_filter) + entities_names = set() + for e in entities: + if p.match_resource(e): + entities_names.add(e.name) + if len(entities_names) > 0: + Node(feast_type.__name__ + ": " + str(list(entities_names)), parent=policy_node) # type: ignore[union-attr, attr-defined] + else: + Node(feast_type.__name__ + ": none", parent=policy_node) # type: ignore[union-attr, attr-defined] + + +def handle_fv_verbose_permissions_command( + feast_type: list[FeastObject], + p: Permission, + policy_node: Node, + store: FeatureStore, + tags_filter: Optional[dict[str, str]], +): + feature_views = [] + feature_views_names = set() + if feast_type == FeatureView: + feature_views = store.list_all_feature_views(tags=tags_filter) # type: ignore[assignment] + elif feast_type == OnDemandFeatureView: + feature_views = store.list_on_demand_feature_views( + tags=tags_filter # type: ignore[assignment] + ) + elif feast_type == BatchFeatureView: + feature_views = store.list_batch_feature_views(tags=tags_filter) # type: ignore[assignment] + elif feast_type == StreamFeatureView: + feature_views = store.list_stream_feature_views( + tags=tags_filter # type: ignore[assignment] + ) + for fv in feature_views: + if p.match_resource(fv): + feature_views_names.add(fv.name) + if len(feature_views_names) > 0: + Node( + feast_type.__name__ + " " + str(list(feature_views_names)), # type: ignore[union-attr, attr-defined] + parent=policy_node, + ) + else: + Node(feast_type.__name__ + ": none", parent=policy_node) # type: ignore[union-attr, attr-defined] + + +def handle_not_verbose_permissions_command( + p: Permission, policy: Policy, table: list[Any] +): + roles: set[str] = set() + if isinstance(policy, RoleBasedPolicy): + roles = set(policy.get_roles()) + table.append( + [ + p.name, + _to_multi_line([t.__name__ for t in p.types]), # type: ignore[union-attr, attr-defined] + p.name_pattern, + _to_multi_line([a.value.upper() for a in p.actions]), + _to_multi_line(sorted(roles)), + _dict_to_multi_line(p.required_tags), + ], + ) + + +def fetch_all_feast_objects(store: FeatureStore) -> list[FeastObject]: + objects: list[FeastObject] = [] + objects.extend(store.list_entities()) + objects.extend(store.list_all_feature_views()) + objects.extend(store.list_batch_feature_views()) + objects.extend(store.list_feature_services()) + objects.extend(store.list_data_sources()) + objects.extend(store.list_validation_references()) + objects.extend(store.list_saved_datasets()) + objects.extend(store.list_permissions()) + return objects + + +def handle_permissions_check_command( + object: FeastObject, permissions: list[Permission], table: list[Any] +): + for p in permissions: + if p.match_resource(object): + return + table.append( + [ + object.name, + type(object).__name__, + ] + ) + + +def handle_permissions_check_command_with_actions( + object: FeastObject, permissions: list[Permission], table: list[Any] +): + unmatched_actions = ALL_ACTIONS.copy() + for p in permissions: + if p.match_resource(object): + for action in ALL_ACTIONS: + if p.match_actions([action]) and action in unmatched_actions: + unmatched_actions.remove(action) + + if unmatched_actions: + table.append( + [ + object.name, + type(object).__name__, + _to_multi_line([a.value.upper() for a in unmatched_actions]), + ] + ) + + +def fetch_all_permission_roles(permissions: list[Permission]) -> list[str]: + all_roles = set() + for p in permissions: + if isinstance(p.policy, RoleBasedPolicy) and len(p.policy.get_roles()) > 0: + all_roles.update(p.policy.get_roles()) + + return sorted(all_roles) + + +def handler_list_all_permissions_roles(permissions: list[Permission], table: list[Any]): + all_roles = fetch_all_permission_roles(permissions) + for role in all_roles: + table.append( + [ + role, + ] + ) + + +def handler_list_all_permissions_roles_verbose( + objects: list[FeastObject], permissions: list[Permission], table: list[Any] +): + all_roles = fetch_all_permission_roles(permissions) + + for role in all_roles: + for o in objects: + permitted_actions = ALL_ACTIONS.copy() + for action in ALL_ACTIONS: + # Following code is derived from enforcer.enforce_policy but has a different return type and does not raise PermissionError + matching_permissions = [ + p + for p in permissions + if p.match_resource(o) and p.match_actions([action]) + ] + + if matching_permissions: + evaluator = DecisionEvaluator( + len(matching_permissions), + ) + for p in matching_permissions: + permission_grant, permission_explanation = ( + p.policy.validate_user(user=User(username="", roles=[role])) + ) + evaluator.add_grant( + permission_grant, + f"Permission {p.name} denied access: {permission_explanation}", + ) + + if evaluator.is_decided(): + grant, explanations = evaluator.grant() + if not grant: + permitted_actions.remove(action) + break + else: + permitted_actions.remove(action) + + table.append( + [ + role, + o.name, + type(o).__name__, + _to_multi_line([a.value.upper() for a in permitted_actions]), + ] + ) + + +def _to_multi_line(values: list[str]) -> str: + if not values: + return "-" + return "\n".join(values) + + +def _dict_to_multi_line(values: dict[str, str]) -> str: + if not values: + return "-" + return "\n".join([f"{key} : {value}" for key, value in values.items()]) diff --git a/sdk/python/feast/diff/registry_diff.py b/sdk/python/feast/diff/registry_diff.py index 9236b087d4..6235025adc 100644 --- a/sdk/python/feast/diff/registry_diff.py +++ b/sdk/python/feast/diff/registry_diff.py @@ -10,6 +10,7 @@ from feast.feature_view import DUMMY_ENTITY_NAME from feast.infra.registry.base_registry import BaseRegistry from feast.infra.registry.registry import FEAST_OBJECT_TYPES, FeastObjectType +from feast.permissions.permission import Permission from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto from feast.protos.feast.core.Entity_pb2 import Entity as EntityProto from feast.protos.feast.core.FeatureService_pb2 import ( @@ -20,6 +21,7 @@ OnDemandFeatureView as OnDemandFeatureViewProto, ) from feast.protos.feast.core.OnDemandFeatureView_pb2 import OnDemandFeatureViewSpec +from feast.protos.feast.core.Permission_pb2 import Permission as PermissionProto from feast.protos.feast.core.SavedDataset_pb2 import SavedDataset as SavedDatasetProto from feast.protos.feast.core.StreamFeatureView_pb2 import ( StreamFeatureView as StreamFeatureViewProto, @@ -111,6 +113,7 @@ def tag_objects_for_keep_delete_update_add( StreamFeatureViewProto, ValidationReferenceProto, SavedDatasetProto, + PermissionProto, ) @@ -354,6 +357,15 @@ def apply_diff_to_registry( project, commit=False, ) + elif feast_object_diff.feast_object_type == FeastObjectType.PERMISSION: + permission_obj = cast( + Permission, feast_object_diff.current_feast_object + ) + registry.delete_permission( + permission_obj.name, + project, + commit=False, + ) if feast_object_diff.transition_type in [ TransitionType.CREATE, @@ -387,6 +399,12 @@ def apply_diff_to_registry( project, commit=False, ) + elif feast_object_diff.feast_object_type == FeastObjectType.PERMISSION: + registry.apply_permission( + cast(Permission, feast_object_diff.new_feast_object), + project, + commit=False, + ) if commit: registry.commit() diff --git a/sdk/python/feast/errors.py b/sdk/python/feast/errors.py index c4c1157626..ffafe31125 100644 --- a/sdk/python/feast/errors.py +++ b/sdk/python/feast/errors.py @@ -223,6 +223,13 @@ def __init__(self, online_store_class_name: str): ) +class FeastInvalidAuthConfigClass(Exception): + def __init__(self, auth_config_class_name: str): + super().__init__( + f"Auth Config Class '{auth_config_class_name}' should end with the string `AuthConfig`.'" + ) + + class FeastInvalidBaseClass(Exception): def __init__(self, class_name: str, class_type: str): super().__init__( @@ -391,6 +398,19 @@ def __init__(self, input_dict: dict): ) +class PermissionNotFoundException(Exception): + def __init__(self, name, project): + super().__init__(f"Permission {name} does not exist in project {project}") + + +class PermissionObjectNotFoundException(FeastObjectNotFoundException): + def __init__(self, name, project=None): + if project: + super().__init__(f"Permission {name} does not exist in project {project}") + else: + super().__init__(f"Permission {name} does not exist") + + class ZeroRowsQueryResult(Exception): def __init__(self, query: str): super().__init__(f"This query returned zero rows:\n{query}") diff --git a/sdk/python/feast/feast_object.py b/sdk/python/feast/feast_object.py index d9505dcb9f..dfe29b7128 100644 --- a/sdk/python/feast/feast_object.py +++ b/sdk/python/feast/feast_object.py @@ -1,4 +1,4 @@ -from typing import Union +from typing import Union, get_args from .batch_feature_view import BatchFeatureView from .data_source import DataSource @@ -6,11 +6,13 @@ from .feature_service import FeatureService from .feature_view import FeatureView from .on_demand_feature_view import OnDemandFeatureView +from .permissions.permission import Permission from .protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto from .protos.feast.core.Entity_pb2 import EntitySpecV2 from .protos.feast.core.FeatureService_pb2 import FeatureServiceSpec from .protos.feast.core.FeatureView_pb2 import FeatureViewSpec from .protos.feast.core.OnDemandFeatureView_pb2 import OnDemandFeatureViewSpec +from .protos.feast.core.Permission_pb2 import PermissionSpec as PermissionSpec from .protos.feast.core.SavedDataset_pb2 import SavedDatasetSpec from .protos.feast.core.StreamFeatureView_pb2 import StreamFeatureViewSpec from .protos.feast.core.ValidationProfile_pb2 import ( @@ -30,6 +32,7 @@ DataSource, ValidationReference, SavedDataset, + Permission, ] FeastObjectSpecProto = Union[ @@ -41,4 +44,13 @@ DataSourceProto, ValidationReferenceProto, SavedDatasetSpec, + PermissionSpec, +] + +ALL_RESOURCE_TYPES = list(get_args(FeastObject)) +ALL_FEATURE_VIEW_TYPES = [ + FeatureView, + OnDemandFeatureView, + BatchFeatureView, + StreamFeatureView, ] diff --git a/sdk/python/feast/feature_server.py b/sdk/python/feast/feature_server.py index 908c9741c2..7f24580b7a 100644 --- a/sdk/python/feast/feature_server.py +++ b/sdk/python/feast/feature_server.py @@ -9,9 +9,8 @@ import pandas as pd import psutil from dateutil import parser -from fastapi import FastAPI, HTTPException, Request, Response, status +from fastapi import Depends, FastAPI, HTTPException, Request, Response, status from fastapi.logger import logger -from fastapi.params import Depends from google.protobuf.json_format import MessageToDict from prometheus_client import Gauge, start_http_server from pydantic import BaseModel @@ -20,7 +19,16 @@ from feast import proto_json, utils from feast.constants import DEFAULT_FEATURE_SERVER_REGISTRY_TTL from feast.data_source import PushMode -from feast.errors import PushSourceNotFoundException +from feast.errors import FeatureViewNotFoundException, PushSourceNotFoundException +from feast.permissions.action import WRITE, AuthzedAction +from feast.permissions.security_manager import assert_permissions +from feast.permissions.server.rest import inject_user_details +from feast.permissions.server.utils import ( + ServerType, + init_auth_manager, + init_security_manager, + str_to_auth_manager_type, +) # Define prometheus metrics cpu_usage_gauge = Gauge( @@ -93,23 +101,48 @@ async def lifespan(app: FastAPI): async def get_body(request: Request): return await request.body() - @app.post("/get-online-features") + # TODO RBAC: complete the dependencies for the other endpoints + @app.post( + "/get-online-features", + dependencies=[Depends(inject_user_details)], + ) def get_online_features(body=Depends(get_body)): try: body = json.loads(body) + full_feature_names = body.get("full_feature_names", False) + entity_rows = body["entities"] # Initialize parameters for FeatureStore.get_online_features(...) call if "feature_service" in body: - features = store.get_feature_service( + feature_service = store.get_feature_service( body["feature_service"], allow_cache=True ) + assert_permissions( + resource=feature_service, actions=[AuthzedAction.READ_ONLINE] + ) + features = feature_service else: features = body["features"] - - full_feature_names = body.get("full_feature_names", False) + all_feature_views, all_on_demand_feature_views = ( + utils._get_feature_views_to_use( + store.registry, + store.project, + features, + allow_cache=True, + hide_dummy_entity=False, + ) + ) + for feature_view in all_feature_views: + assert_permissions( + resource=feature_view, actions=[AuthzedAction.READ_ONLINE] + ) + for od_feature_view in all_on_demand_feature_views: + assert_permissions( + resource=od_feature_view, actions=[AuthzedAction.READ_ONLINE] + ) response_proto = store.get_online_features( features=features, - entity_rows=body["entities"], + entity_rows=entity_rows, full_feature_names=full_feature_names, ).proto @@ -123,21 +156,46 @@ def get_online_features(body=Depends(get_body)): # Raise HTTPException to return the error message to the client raise HTTPException(status_code=500, detail=str(e)) - @app.post("/push") + @app.post("/push", dependencies=[Depends(inject_user_details)]) def push(body=Depends(get_body)): try: request = PushFeaturesRequest(**json.loads(body)) df = pd.DataFrame(request.df) + actions = [] if request.to == "offline": to = PushMode.OFFLINE + actions = [AuthzedAction.WRITE_OFFLINE] elif request.to == "online": to = PushMode.ONLINE + actions = [AuthzedAction.WRITE_ONLINE] elif request.to == "online_and_offline": to = PushMode.ONLINE_AND_OFFLINE + actions = WRITE else: raise ValueError( f"{request.to} is not a supported push format. Please specify one of these ['online', 'offline', 'online_and_offline']." ) + + from feast.data_source import PushSource + + all_fvs = store.list_feature_views( + allow_cache=request.allow_registry_cache + ) + store.list_stream_feature_views( + allow_cache=request.allow_registry_cache + ) + fvs_with_push_sources = { + fv + for fv in all_fvs + if ( + fv.stream_source is not None + and isinstance(fv.stream_source, PushSource) + and fv.stream_source.name == request.push_source_name + ) + } + + for feature_view in fvs_with_push_sources: + assert_permissions(resource=feature_view, actions=actions) + store.push( push_source_name=request.push_source_name, df=df, @@ -155,15 +213,29 @@ def push(body=Depends(get_body)): # Raise HTTPException to return the error message to the client raise HTTPException(status_code=500, detail=str(e)) - @app.post("/write-to-online-store") + @app.post("/write-to-online-store", dependencies=[Depends(inject_user_details)]) def write_to_online_store(body=Depends(get_body)): try: request = WriteToFeatureStoreRequest(**json.loads(body)) df = pd.DataFrame(request.df) + feature_view_name = request.feature_view_name + allow_registry_cache = request.allow_registry_cache + try: + feature_view = store.get_stream_feature_view( + feature_view_name, allow_registry_cache=allow_registry_cache + ) + except FeatureViewNotFoundException: + feature_view = store.get_feature_view( + feature_view_name, allow_registry_cache=allow_registry_cache + ) + + assert_permissions( + resource=feature_view, actions=[AuthzedAction.WRITE_ONLINE] + ) store.write_to_online_store( - feature_view_name=request.feature_view_name, + feature_view_name=feature_view_name, df=df, - allow_registry_cache=request.allow_registry_cache, + allow_registry_cache=allow_registry_cache, ) except Exception as e: # Print the original exception on the server side @@ -175,10 +247,14 @@ def write_to_online_store(body=Depends(get_body)): def health(): return Response(status_code=status.HTTP_200_OK) - @app.post("/materialize") + @app.post("/materialize", dependencies=[Depends(inject_user_details)]) def materialize(body=Depends(get_body)): try: request = MaterializeRequest(**json.loads(body)) + for feature_view in request.feature_views: + assert_permissions( + resource=feature_view, actions=[AuthzedAction.WRITE_ONLINE] + ) store.materialize( utils.make_tzaware(parser.parse(request.start_ts)), utils.make_tzaware(parser.parse(request.end_ts)), @@ -190,10 +266,14 @@ def materialize(body=Depends(get_body)): # Raise HTTPException to return the error message to the client raise HTTPException(status_code=500, detail=str(e)) - @app.post("/materialize-incremental") + @app.post("/materialize-incremental", dependencies=[Depends(inject_user_details)]) def materialize_incremental(body=Depends(get_body)): try: request = MaterializeIncrementalRequest(**json.loads(body)) + for feature_view in request.feature_views: + assert_permissions( + resource=feature_view, actions=[AuthzedAction.WRITE_ONLINE] + ) store.materialize_incremental( utils.make_tzaware(parser.parse(request.end_ts)), request.feature_views ) @@ -231,15 +311,15 @@ def load(self): def monitor_resources(self, interval: int = 5): """Function to monitor and update CPU and memory usage metrics.""" - print(f"Start monitor_resources({interval})") + logger.debug(f"Starting resource monitoring with interval {interval} seconds") p = psutil.Process() - print(f"PID is {p.pid}") + logger.debug(f"PID is {p.pid}") while True: with p.oneshot(): cpu_usage = p.cpu_percent() memory_usage = p.memory_percent() - print(f"cpu_usage is {cpu_usage}") - print(f"memory_usage is {memory_usage}") + logger.debug(f"CPU usage: {cpu_usage}%, Memory usage: {memory_usage}%") + logger.debug(f"CPU usage: {cpu_usage}%, Memory usage: {memory_usage}%") cpu_usage_gauge.set(cpu_usage) memory_usage_gauge.set(memory_usage) time.sleep(interval) @@ -256,15 +336,27 @@ def start_server( metrics: bool, ): if metrics: - print("Start Prometheus Server") + logger.info("Starting Prometheus Server") start_http_server(8000) - print("Start a background thread to monitor CPU and memory usage") + logger.debug("Starting background thread to monitor CPU and memory usage") monitoring_thread = threading.Thread( target=monitor_resources, args=(5,), daemon=True ) monitoring_thread.start() + logger.debug("start_server called") + auth_type = str_to_auth_manager_type(store.config.auth_config.type) + logger.info(f"Auth type: {auth_type}") + init_security_manager(auth_type=auth_type, fs=store) + logger.debug("Security manager initialized successfully") + init_auth_manager( + auth_type=auth_type, + server_type=ServerType.REST, + auth_config=store.config.auth_config, + ) + logger.debug("Auth manager initialized successfully") + if sys.platform != "win32": FeastServeApplication( store=store, diff --git a/sdk/python/feast/feature_store.py b/sdk/python/feast/feature_store.py index 77638f5a62..a03706e56f 100644 --- a/sdk/python/feast/feature_store.py +++ b/sdk/python/feast/feature_store.py @@ -76,6 +76,7 @@ from feast.infra.registry.sql import SqlRegistry from feast.on_demand_feature_view import OnDemandFeatureView from feast.online_response import OnlineResponse +from feast.permissions.permission import Permission from feast.protos.feast.core.InfraObject_pb2 import Infra as InfraProto from feast.protos.feast.serving.ServingService_pb2 import ( FieldStatus, @@ -157,9 +158,16 @@ def __init__( elif registry_config and registry_config.registry_type == "remote": from feast.infra.registry.remote import RemoteRegistry - self._registry = RemoteRegistry(registry_config, self.config.project, None) + self._registry = RemoteRegistry( + registry_config, self.config.project, None, self.config.auth_config + ) else: - r = Registry(self.config.project, registry_config, repo_path=self.repo_path) + r = Registry( + self.config.project, + registry_config, + repo_path=self.repo_path, + auth_config=self.config.auth_config, + ) r._initialize_registry(self.config.project) self._registry = r @@ -199,7 +207,10 @@ def refresh_registry(self): """ registry_config = self.config.registry registry = Registry( - self.config.project, registry_config, repo_path=self.repo_path + self.config.project, + registry_config, + repo_path=self.repo_path, + auth_config=self.config.auth_config, ) registry.refresh(self.config.project) @@ -734,7 +745,8 @@ def plan( ... on_demand_feature_views=list(), ... stream_feature_views=list(), ... entities=[driver], - ... feature_services=list())) # register entity and feature view + ... feature_services=list(), + ... permissions=list())) # register entity and feature view """ # Validate and run inference on all the objects to be registered. self._validate_all_feature_views( @@ -798,6 +810,7 @@ def apply( StreamFeatureView, FeatureService, ValidationReference, + Permission, List[FeastObject], ], objects_to_delete: Optional[List[FeastObject]] = None, @@ -869,6 +882,7 @@ def apply( validation_references_to_update = [ ob for ob in objects if isinstance(ob, ValidationReference) ] + permissions_to_update = [ob for ob in objects if isinstance(ob, Permission)] batch_sources_to_add: List[DataSource] = [] for data_source in data_sources_set_to_update: @@ -924,10 +938,15 @@ def apply( self._registry.apply_validation_reference( validation_references, project=self.project, commit=False ) + for permission in permissions_to_update: + self._registry.apply_permission( + permission, project=self.project, commit=False + ) entities_to_delete = [] views_to_delete = [] sfvs_to_delete = [] + permissions_to_delete = [] if not partial: # Delete all registry objects that should not exist. entities_to_delete = [ @@ -956,6 +975,9 @@ def apply( validation_references_to_delete = [ ob for ob in objects_to_delete if isinstance(ob, ValidationReference) ] + permissions_to_delete = [ + ob for ob in objects_to_delete if isinstance(ob, Permission) + ] for data_source in data_sources_to_delete: self._registry.delete_data_source( @@ -985,6 +1007,10 @@ def apply( self._registry.delete_validation_reference( validation_references.name, project=self.project, commit=False ) + for permission in permissions_to_delete: + self._registry.delete_permission( + permission.name, project=self.project, commit=False + ) tables_to_delete: List[FeatureView] = ( views_to_delete + sfvs_to_delete if not partial else [] # type: ignore @@ -1915,6 +1941,72 @@ def get_validation_reference( ref._dataset = self.get_saved_dataset(ref.dataset_name) return ref + def list_validation_references( + self, allow_cache: bool = False, tags: Optional[dict[str, str]] = None + ) -> List[ValidationReference]: + """ + Retrieves the list of validation references from the registry. + + Args: + allow_cache: Whether to allow returning validation references from a cached registry. + tags: Filter by tags. + + Returns: + A list of validation references. + """ + return self._registry.list_validation_references( + self.project, allow_cache=allow_cache, tags=tags + ) + + def list_permissions( + self, allow_cache: bool = False, tags: Optional[dict[str, str]] = None + ) -> List[Permission]: + """ + Retrieves the list of permissions from the registry. + + Args: + allow_cache: Whether to allow returning permissions from a cached registry. + tags: Filter by tags. + + Returns: + A list of permissions. + """ + return self._registry.list_permissions( + self.project, allow_cache=allow_cache, tags=tags + ) + + def get_permission(self, name: str) -> Permission: + """ + Retrieves a permission from the registry. + + Args: + name: Name of the permission. + + Returns: + The specified permission. + + Raises: + PermissionObjectNotFoundException: The permission could not be found. + """ + return self._registry.get_permission(name, self.project) + + def list_saved_datasets( + self, allow_cache: bool = False, tags: Optional[dict[str, str]] = None + ) -> List[SavedDataset]: + """ + Retrieves the list of saved datasets from the registry. + + Args: + allow_cache: Whether to allow returning saved datasets from a cached registry. + tags: Filter by tags. + + Returns: + A list of saved datasets. + """ + return self._registry.list_saved_datasets( + self.project, allow_cache=allow_cache, tags=tags + ) + def _print_materialization_log( start_date, end_date, num_feature_views: int, online_store: str diff --git a/sdk/python/feast/infra/offline_stores/remote.py b/sdk/python/feast/infra/offline_stores/remote.py index dc657017d9..40239c8950 100644 --- a/sdk/python/feast/infra/offline_stores/remote.py +++ b/sdk/python/feast/infra/offline_stores/remote.py @@ -27,6 +27,9 @@ RetrievalMetadata, ) from feast.infra.registry.base_registry import BaseRegistry +from feast.permissions.client.arrow_flight_auth_interceptor import ( + build_arrow_flight_client, +) from feast.repo_config import FeastConfigBaseModel, RepoConfig from feast.saved_dataset import SavedDatasetStorage @@ -69,7 +72,11 @@ def _to_df_internal(self, timeout: Optional[int] = None) -> pd.DataFrame: # This is where do_get service is invoked def _to_arrow_internal(self, timeout: Optional[int] = None) -> pa.Table: return _send_retrieve_remote( - self.api, self.api_parameters, self.entity_df, self.table, self.client + self.api, + self.api_parameters, + self.entity_df, + self.table, + self.client, ) @property @@ -128,8 +135,9 @@ def get_historical_features( ) -> RemoteRetrievalJob: assert isinstance(config.offline_store, RemoteOfflineStoreConfig) - # Initialize the client connection - client = RemoteOfflineStore.init_client(config) + client = build_arrow_flight_client( + config.offline_store.host, config.offline_store.port, config.auth_config + ) feature_view_names = [fv.name for fv in feature_views] name_aliases = [fv.projection.name_alias for fv in feature_views] @@ -163,7 +171,9 @@ def pull_all_from_table_or_query( assert isinstance(config.offline_store, RemoteOfflineStoreConfig) # Initialize the client connection - client = RemoteOfflineStore.init_client(config) + client = build_arrow_flight_client( + config.offline_store.host, config.offline_store.port, config.auth_config + ) api_parameters = { "data_source_name": data_source.name, @@ -194,7 +204,9 @@ def pull_latest_from_table_or_query( assert isinstance(config.offline_store, RemoteOfflineStoreConfig) # Initialize the client connection - client = RemoteOfflineStore.init_client(config) + client = build_arrow_flight_client( + config.offline_store.host, config.offline_store.port, config.auth_config + ) api_parameters = { "data_source_name": data_source.name, @@ -227,7 +239,9 @@ def write_logged_features( data = pyarrow.parquet.read_table(data, use_threads=False, pre_buffer=False) # Initialize the client connection - client = RemoteOfflineStore.init_client(config) + client = build_arrow_flight_client( + config.offline_store.host, config.offline_store.port, config.auth_config + ) api_parameters = { "feature_service_name": source._feature_service.name, @@ -251,7 +265,9 @@ def offline_write_batch( assert isinstance(config.offline_store, RemoteOfflineStoreConfig) # Initialize the client connection - client = RemoteOfflineStore.init_client(config) + client = build_arrow_flight_client( + config.offline_store.host, config.offline_store.port, config.auth_config + ) feature_view_names = [feature_view.name] name_aliases = [feature_view.projection.name_alias] @@ -270,13 +286,6 @@ def offline_write_batch( entity_df=None, ) - @staticmethod - def init_client(config): - location = f"grpc://{config.offline_store.host}:{config.offline_store.port}" - client = fl.connect(location=location) - logger.info(f"Connecting FlightClient at {location}") - return client - def _create_retrieval_metadata(feature_refs: List[str], entity_df: pd.DataFrame): entity_schema = _get_entity_schema( @@ -331,11 +340,20 @@ def _send_retrieve_remote( table: pa.Table, client: fl.FlightClient, ): - command_descriptor = _call_put(api, api_parameters, client, entity_df, table) + command_descriptor = _call_put( + api, + api_parameters, + client, + entity_df, + table, + ) return _call_get(client, command_descriptor) -def _call_get(client: fl.FlightClient, command_descriptor: fl.FlightDescriptor): +def _call_get( + client: fl.FlightClient, + command_descriptor: fl.FlightDescriptor, +): flight = client.get_flight_info(command_descriptor) ticket = flight.endpoints[0].ticket reader = client.do_get(ticket) @@ -384,10 +402,7 @@ def _put_parameters( else: updatedTable = _create_empty_table() - writer, _ = client.do_put( - command_descriptor, - updatedTable.schema, - ) + writer, _ = client.do_put(command_descriptor, updatedTable.schema) writer.write_table(updatedTable) writer.close() diff --git a/sdk/python/feast/infra/online_stores/remote.py b/sdk/python/feast/infra/online_stores/remote.py index 19e1b7d515..93fbcaf771 100644 --- a/sdk/python/feast/infra/online_stores/remote.py +++ b/sdk/python/feast/infra/online_stores/remote.py @@ -16,11 +16,13 @@ from datetime import datetime from typing import Any, Callable, Dict, List, Literal, Optional, Sequence, Tuple -import requests from pydantic import StrictStr from feast import Entity, FeatureView, RepoConfig from feast.infra.online_stores.online_store import OnlineStore +from feast.permissions.client.http_auth_requests_wrapper import ( + get_http_auth_requests_session, +) from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.repo_config import FeastConfigBaseModel @@ -70,7 +72,7 @@ def online_read( req_body = self._construct_online_read_api_json_request( entity_keys, table, requested_features ) - response = requests.post( + response = get_http_auth_requests_session(config.auth_config).post( f"{config.online_store.path}/get-online-features", data=req_body ) if response.status_code == 200: diff --git a/sdk/python/feast/infra/registry/base_registry.py b/sdk/python/feast/infra/registry/base_registry.py index 03bec64830..33adb6b7c9 100644 --- a/sdk/python/feast/infra/registry/base_registry.py +++ b/sdk/python/feast/infra/registry/base_registry.py @@ -28,6 +28,7 @@ from feast.feature_view import FeatureView from feast.infra.infra_object import Infra from feast.on_demand_feature_view import OnDemandFeatureView +from feast.permissions.permission import Permission from feast.project_metadata import ProjectMetadata from feast.protos.feast.core.Entity_pb2 import Entity as EntityProto from feast.protos.feast.core.FeatureService_pb2 import ( @@ -37,6 +38,7 @@ from feast.protos.feast.core.OnDemandFeatureView_pb2 import ( OnDemandFeatureView as OnDemandFeatureViewProto, ) +from feast.protos.feast.core.Permission_pb2 import Permission as PermissionProto from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto from feast.protos.feast.core.SavedDataset_pb2 import SavedDataset as SavedDatasetProto from feast.protos.feast.core.StreamFeatureView_pb2 import ( @@ -457,7 +459,10 @@ def delete_saved_dataset(self, name: str, project: str, commit: bool = True): @abstractmethod def list_saved_datasets( - self, project: str, allow_cache: bool = False + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, ) -> List[SavedDataset]: """ Retrieves a list of all saved datasets in specified project @@ -465,6 +470,7 @@ def list_saved_datasets( Args: project: Feast project allow_cache: Whether to allow returning this dataset from a cached registry + tags: Filter by tags Returns: Returns the list of SavedDatasets @@ -521,17 +527,21 @@ def get_validation_reference( # TODO: Needs to be implemented. def list_validation_references( - self, project: str, allow_cache: bool = False + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, ) -> List[ValidationReference]: """ Retrieve a list of validation references from the registry Args: - allow_cache: Allow returning feature views from the cached registry - project: Filter feature views based on project name + project: Filter validation references based on project name + allow_cache: Allow returning validation references from the cached registry + tags: Filter by tags Returns: - List of request feature views + List of request validation references """ raise NotImplementedError @@ -590,6 +600,69 @@ def get_user_metadata( self, project: str, feature_view: BaseFeatureView ) -> Optional[bytes]: ... + # Permission operations + @abstractmethod + def apply_permission( + self, permission: Permission, project: str, commit: bool = True + ): + """ + Registers a single permission with Feast + + Args: + permission: A permission that will be registered + project: Feast project that this permission belongs to + commit: Whether to immediately commit to the registry + """ + raise NotImplementedError + + @abstractmethod + def delete_permission(self, name: str, project: str, commit: bool = True): + """ + Deletes a permission or raises an exception if not found. + + Args: + name: Name of permission + project: Feast project that this permission belongs to + commit: Whether the change should be persisted immediately + """ + raise NotImplementedError + + @abstractmethod + def get_permission( + self, name: str, project: str, allow_cache: bool = False + ) -> Permission: + """ + Retrieves a permission. + + Args: + name: Name of permission + project: Feast project that this permission belongs to + allow_cache: Whether to allow returning this permission from a cached registry + + Returns: + Returns either the specified permission, or raises an exception if none is found + """ + raise NotImplementedError + + @abstractmethod + def list_permissions( + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[Permission]: + """ + Retrieve a list of permissions from the registry + + Args: + project: Filter permission based on project name + allow_cache: Whether to allow returning permissions from a cached registry + + Returns: + List of permissions + """ + raise NotImplementedError + @abstractmethod def proto(self) -> RegistryProto: """ @@ -716,6 +789,13 @@ def to_dict(self, project: str) -> Dict[str, List[Any]]: registry_dict["infra"].append( self._message_to_sorted_dict(infra_object.to_proto()) ) + for permission in sorted( + self.list_permissions(project=project), key=lambda ds: ds.name + ): + registry_dict["permissions"].append( + self._message_to_sorted_dict(permission.to_proto()) + ) + return registry_dict @staticmethod @@ -732,4 +812,6 @@ def deserialize_registry_values(serialized_proto, feast_obj_type) -> Any: return OnDemandFeatureViewProto.FromString(serialized_proto) if feast_obj_type == FeatureService: return FeatureServiceProto.FromString(serialized_proto) + if feast_obj_type == Permission: + return PermissionProto.FromString(serialized_proto) return None diff --git a/sdk/python/feast/infra/registry/caching_registry.py b/sdk/python/feast/infra/registry/caching_registry.py index 298639028d..611d67de96 100644 --- a/sdk/python/feast/infra/registry/caching_registry.py +++ b/sdk/python/feast/infra/registry/caching_registry.py @@ -14,6 +14,7 @@ from feast.infra.registry import proto_registry_utils from feast.infra.registry.base_registry import BaseRegistry from feast.on_demand_feature_view import OnDemandFeatureView +from feast.permissions.permission import Permission from feast.project_metadata import ProjectMetadata from feast.saved_dataset import SavedDataset, ValidationReference from feast.stream_feature_view import StreamFeatureView @@ -249,18 +250,23 @@ def get_saved_dataset( return self._get_saved_dataset(name, project) @abstractmethod - def _list_saved_datasets(self, project: str) -> List[SavedDataset]: + def _list_saved_datasets( + self, project: str, tags: Optional[dict[str, str]] = None + ) -> List[SavedDataset]: pass def list_saved_datasets( - self, project: str, allow_cache: bool = False + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, ) -> List[SavedDataset]: if allow_cache: self._refresh_cached_registry_if_necessary() return proto_registry_utils.list_saved_datasets( - self.cached_registry_proto, project + self.cached_registry_proto, project, tags ) - return self._list_saved_datasets(project) + return self._list_saved_datasets(project, tags) @abstractmethod def _get_validation_reference(self, name: str, project: str) -> ValidationReference: @@ -277,18 +283,23 @@ def get_validation_reference( return self._get_validation_reference(name, project) @abstractmethod - def _list_validation_references(self, project: str) -> List[ValidationReference]: + def _list_validation_references( + self, project: str, tags: Optional[dict[str, str]] = None + ) -> List[ValidationReference]: pass def list_validation_references( - self, project: str, allow_cache: bool = False + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, ) -> List[ValidationReference]: if allow_cache: self._refresh_cached_registry_if_necessary() return proto_registry_utils.list_validation_references( - self.cached_registry_proto, project + self.cached_registry_proto, project, tags ) - return self._list_validation_references(project) + return self._list_validation_references(project, tags) @abstractmethod def _list_project_metadata(self, project: str) -> List[ProjectMetadata]: @@ -311,6 +322,39 @@ def _get_infra(self, project: str) -> Infra: def get_infra(self, project: str, allow_cache: bool = False) -> Infra: return self._get_infra(project) + @abstractmethod + def _get_permission(self, name: str, project: str) -> Permission: + pass + + def get_permission( + self, name: str, project: str, allow_cache: bool = False + ) -> Permission: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.get_permission( + self.cached_registry_proto, name, project + ) + return self._get_permission(name, project) + + @abstractmethod + def _list_permissions( + self, project: str, tags: Optional[dict[str, str]] + ) -> List[Permission]: + pass + + def list_permissions( + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[Permission]: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.list_permissions( + self.cached_registry_proto, project, tags + ) + return self._list_permissions(project, tags) + def refresh(self, project: Optional[str] = None): if project: project_metadata = proto_registry_utils.get_project_metadata( diff --git a/sdk/python/feast/infra/registry/proto_registry_utils.py b/sdk/python/feast/infra/registry/proto_registry_utils.py index 0e85f5b0a9..f67808aab5 100644 --- a/sdk/python/feast/infra/registry/proto_registry_utils.py +++ b/sdk/python/feast/infra/registry/proto_registry_utils.py @@ -10,12 +10,14 @@ EntityNotFoundException, FeatureServiceNotFoundException, FeatureViewNotFoundException, + PermissionObjectNotFoundException, SavedDatasetNotFound, ValidationReferenceNotFound, ) from feast.feature_service import FeatureService from feast.feature_view import FeatureView from feast.on_demand_feature_view import OnDemandFeatureView +from feast.permissions.permission import Permission from feast.project_metadata import ProjectMetadata from feast.protos.feast.core.Registry_pb2 import ProjectMetadata as ProjectMetadataProto from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto @@ -252,24 +254,28 @@ def list_data_sources( return data_sources -@registry_proto_cache +@registry_proto_cache_with_tags def list_saved_datasets( - registry_proto: RegistryProto, project: str + registry_proto: RegistryProto, project: str, tags: Optional[dict[str, str]] ) -> List[SavedDataset]: saved_datasets = [] for saved_dataset in registry_proto.saved_datasets: - if saved_dataset.spec.project == project: + if saved_dataset.spec.project == project and utils.has_all_tags( + saved_dataset.tags, tags + ): saved_datasets.append(SavedDataset.from_proto(saved_dataset)) return saved_datasets -@registry_proto_cache +@registry_proto_cache_with_tags def list_validation_references( - registry_proto: RegistryProto, project: str + registry_proto: RegistryProto, project: str, tags: Optional[dict[str, str]] ) -> List[ValidationReference]: validation_references = [] for validation_reference in registry_proto.validation_references: - if validation_reference.project == project: + if validation_reference.project == project and utils.has_all_tags( + validation_reference.tags, tags + ): validation_references.append( ValidationReference.from_proto(validation_reference) ) @@ -285,3 +291,28 @@ def list_project_metadata( for project_metadata in registry_proto.project_metadata if project_metadata.project == project ] + + +@registry_proto_cache_with_tags +def list_permissions( + registry_proto: RegistryProto, project: str, tags: Optional[dict[str, str]] +) -> List[Permission]: + permissions = [] + for permission_proto in registry_proto.permissions: + if permission_proto.spec.project == project and utils.has_all_tags( + permission_proto.spec.tags, tags + ): + permissions.append(Permission.from_proto(permission_proto)) + return permissions + + +def get_permission( + registry_proto: RegistryProto, name: str, project: str +) -> Permission: + for permission_proto in registry_proto.permissions: + if ( + permission_proto.spec.project == project + and permission_proto.spec.name == name + ): + return Permission.from_proto(permission_proto) + raise PermissionObjectNotFoundException(name=name, project=project) diff --git a/sdk/python/feast/infra/registry/registry.py b/sdk/python/feast/infra/registry/registry.py index fe44e6253a..366f3aacaa 100644 --- a/sdk/python/feast/infra/registry/registry.py +++ b/sdk/python/feast/infra/registry/registry.py @@ -31,6 +31,7 @@ EntityNotFoundException, FeatureServiceNotFoundException, FeatureViewNotFoundException, + PermissionNotFoundException, ValidationReferenceNotFound, ) from feast.feature_service import FeatureService @@ -41,6 +42,8 @@ from feast.infra.registry.base_registry import BaseRegistry from feast.infra.registry.registry_store import NoopRegistryStore from feast.on_demand_feature_view import OnDemandFeatureView +from feast.permissions.auth_model import AuthConfig, NoAuthConfig +from feast.permissions.permission import Permission from feast.project_metadata import ProjectMetadata from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto from feast.repo_config import RegistryConfig @@ -73,6 +76,7 @@ class FeastObjectType(Enum): ON_DEMAND_FEATURE_VIEW = "on demand feature view" STREAM_FEATURE_VIEW = "stream feature view" FEATURE_SERVICE = "feature service" + PERMISSION = "permission" @staticmethod def get_objects_from_registry( @@ -91,6 +95,7 @@ def get_objects_from_registry( FeastObjectType.FEATURE_SERVICE: registry.list_feature_services( project=project ), + FeastObjectType.PERMISSION: registry.list_permissions(project=project), } @staticmethod @@ -104,6 +109,7 @@ def get_objects_from_repo_contents( FeastObjectType.ON_DEMAND_FEATURE_VIEW: repo_contents.on_demand_feature_views, FeastObjectType.STREAM_FEATURE_VIEW: repo_contents.stream_feature_views, FeastObjectType.FEATURE_SERVICE: repo_contents.feature_services, + FeastObjectType.PERMISSION: repo_contents.permissions, } @@ -160,6 +166,7 @@ def __new__( project: str, registry_config: Optional[RegistryConfig], repo_path: Optional[Path], + auth_config: AuthConfig = NoAuthConfig(), ): # We override __new__ so that we can inspect registry_config and create a SqlRegistry without callers # needing to make any changes. @@ -174,7 +181,7 @@ def __new__( elif registry_config and registry_config.registry_type == "remote": from feast.infra.registry.remote import RemoteRegistry - return RemoteRegistry(registry_config, project, repo_path) + return RemoteRegistry(registry_config, project, repo_path, auth_config) else: return super(Registry, cls).__new__(cls) @@ -183,6 +190,7 @@ def __init__( project: str, registry_config: Optional[RegistryConfig], repo_path: Optional[Path], + auth_config: AuthConfig = NoAuthConfig(), ): """ Create the Registry object. @@ -194,6 +202,7 @@ def __init__( """ self._refresh_lock = Lock() + self._auth_config = auth_config if registry_config: registry_store_type = registry_config.registry_store_type @@ -211,7 +220,7 @@ def __init__( ) def clone(self) -> "Registry": - new_registry = Registry("project", None, None) + new_registry = Registry("project", None, None, self._auth_config) new_registry.cached_registry_proto_ttl = timedelta(seconds=0) new_registry.cached_registry_proto = ( self.cached_registry_proto.__deepcopy__() @@ -307,9 +316,6 @@ def apply_data_source( if existing_data_source_proto.name == data_source.name: del registry.data_sources[idx] data_source_proto = data_source.to_proto() - data_source_proto.data_source_class_type = ( - f"{data_source.__class__.__module__}.{data_source.__class__.__name__}" - ) data_source_proto.project = project data_source_proto.data_source_class_type = ( f"{data_source.__class__.__module__}.{data_source.__class__.__name__}" @@ -709,12 +715,15 @@ def get_saved_dataset( return proto_registry_utils.get_saved_dataset(registry_proto, name, project) def list_saved_datasets( - self, project: str, allow_cache: bool = False + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, ) -> List[SavedDataset]: registry_proto = self._get_registry_proto( project=project, allow_cache=allow_cache ) - return proto_registry_utils.list_saved_datasets(registry_proto, project) + return proto_registry_utils.list_saved_datasets(registry_proto, project, tags) def apply_validation_reference( self, @@ -751,12 +760,17 @@ def get_validation_reference( ) def list_validation_references( - self, project: str, allow_cache: bool = False + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, ) -> List[ValidationReference]: registry_proto = self._get_registry_proto( project=project, allow_cache=allow_cache ) - return proto_registry_utils.list_validation_references(registry_proto, project) + return proto_registry_utils.list_validation_references( + registry_proto, project, tags + ) def delete_validation_reference(self, name: str, project: str, commit: bool = True): registry_proto = self._prepare_registry_for_changes(project) @@ -905,3 +919,62 @@ def _existing_feature_view_names_to_fvs(self) -> Dict[str, Message]: fv.spec.name: fv for fv in self.cached_registry_proto.stream_feature_views } return {**odfvs, **fvs, **sfv} + + def get_permission( + self, name: str, project: str, allow_cache: bool = False + ) -> Permission: + registry_proto = self._get_registry_proto( + project=project, allow_cache=allow_cache + ) + return proto_registry_utils.get_permission(registry_proto, name, project) + + def list_permissions( + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[Permission]: + registry_proto = self._get_registry_proto( + project=project, allow_cache=allow_cache + ) + return proto_registry_utils.list_permissions(registry_proto, project, tags) + + def apply_permission( + self, permission: Permission, project: str, commit: bool = True + ): + now = _utc_now() + if not permission.created_timestamp: + permission.created_timestamp = now + permission.last_updated_timestamp = now + + registry = self._prepare_registry_for_changes(project) + for idx, existing_permission_proto in enumerate(registry.permissions): + if ( + existing_permission_proto.spec.name == permission.name + and existing_permission_proto.spec.project == project + ): + permission.created_timestamp = ( + existing_permission_proto.meta.created_timestamp.ToDatetime() + ) + del registry.permissions[idx] + + permission_proto = permission.to_proto() + permission_proto.spec.project = project + registry.permissions.append(permission_proto) + if commit: + self.commit() + + def delete_permission(self, name: str, project: str, commit: bool = True): + self._prepare_registry_for_changes(project) + assert self.cached_registry_proto + + for idx, permission_proto in enumerate(self.cached_registry_proto.permissions): + if ( + permission_proto.spec.name == name + and permission_proto.spec.project == project + ): + del self.cached_registry_proto.permissions[idx] + if commit: + self.commit() + return + raise PermissionNotFoundException(name, project) diff --git a/sdk/python/feast/infra/registry/remote.py b/sdk/python/feast/infra/registry/remote.py index 9fa6d8ebee..618628bc07 100644 --- a/sdk/python/feast/infra/registry/remote.py +++ b/sdk/python/feast/infra/registry/remote.py @@ -15,6 +15,15 @@ from feast.infra.infra_object import Infra from feast.infra.registry.base_registry import BaseRegistry from feast.on_demand_feature_view import OnDemandFeatureView +from feast.permissions.auth.auth_type import AuthType +from feast.permissions.auth_model import ( + AuthConfig, + NoAuthConfig, +) +from feast.permissions.client.grpc_client_auth_interceptor import ( + GrpcClientAuthHeaderInterceptor, +) +from feast.permissions.permission import Permission from feast.project_metadata import ProjectMetadata from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto from feast.protos.feast.registry import RegistryServer_pb2, RegistryServer_pb2_grpc @@ -38,31 +47,32 @@ def __init__( registry_config: Union[RegistryConfig, RemoteRegistryConfig], project: str, repo_path: Optional[Path], + auth_config: AuthConfig = NoAuthConfig(), ): - self.channel = grpc.insecure_channel(registry_config.path) - self.stub = RegistryServer_pb2_grpc.RegistryServerStub(self.channel) + self.auth_config = auth_config + channel = grpc.insecure_channel(registry_config.path) + if self.auth_config.type != AuthType.NONE.value: + auth_header_interceptor = GrpcClientAuthHeaderInterceptor(auth_config) + channel = grpc.intercept_channel(channel, auth_header_interceptor) + self.stub = RegistryServer_pb2_grpc.RegistryServerStub(channel) def apply_entity(self, entity: Entity, project: str, commit: bool = True): request = RegistryServer_pb2.ApplyEntityRequest( entity=entity.to_proto(), project=project, commit=commit ) - self.stub.ApplyEntity(request) def delete_entity(self, name: str, project: str, commit: bool = True): request = RegistryServer_pb2.DeleteEntityRequest( name=name, project=project, commit=commit ) - self.stub.DeleteEntity(request) def get_entity(self, name: str, project: str, allow_cache: bool = False) -> Entity: request = RegistryServer_pb2.GetEntityRequest( name=name, project=project, allow_cache=allow_cache ) - response = self.stub.GetEntity(request) - return Entity.from_proto(response) def list_entities( @@ -74,9 +84,7 @@ def list_entities( request = RegistryServer_pb2.ListEntitiesRequest( project=project, allow_cache=allow_cache, tags=tags ) - response = self.stub.ListEntities(request) - return [Entity.from_proto(entity) for entity in response.entities] def apply_data_source( @@ -85,14 +93,12 @@ def apply_data_source( request = RegistryServer_pb2.ApplyDataSourceRequest( data_source=data_source.to_proto(), project=project, commit=commit ) - self.stub.ApplyDataSource(request) def delete_data_source(self, name: str, project: str, commit: bool = True): request = RegistryServer_pb2.DeleteDataSourceRequest( name=name, project=project, commit=commit ) - self.stub.DeleteDataSource(request) def get_data_source( @@ -101,9 +107,7 @@ def get_data_source( request = RegistryServer_pb2.GetDataSourceRequest( name=name, project=project, allow_cache=allow_cache ) - response = self.stub.GetDataSource(request) - return DataSource.from_proto(response) def list_data_sources( @@ -115,9 +119,7 @@ def list_data_sources( request = RegistryServer_pb2.ListDataSourcesRequest( project=project, allow_cache=allow_cache, tags=tags ) - response = self.stub.ListDataSources(request) - return [ DataSource.from_proto(data_source) for data_source in response.data_sources ] @@ -128,14 +130,12 @@ def apply_feature_service( request = RegistryServer_pb2.ApplyFeatureServiceRequest( feature_service=feature_service.to_proto(), project=project, commit=commit ) - self.stub.ApplyFeatureService(request) def delete_feature_service(self, name: str, project: str, commit: bool = True): request = RegistryServer_pb2.DeleteFeatureServiceRequest( name=name, project=project, commit=commit ) - self.stub.DeleteFeatureService(request) def get_feature_service( @@ -144,9 +144,7 @@ def get_feature_service( request = RegistryServer_pb2.GetFeatureServiceRequest( name=name, project=project, allow_cache=allow_cache ) - response = self.stub.GetFeatureService(request) - return FeatureService.from_proto(response) def list_feature_services( @@ -158,9 +156,7 @@ def list_feature_services( request = RegistryServer_pb2.ListFeatureServicesRequest( project=project, allow_cache=allow_cache, tags=tags ) - response = self.stub.ListFeatureServices(request) - return [ FeatureService.from_proto(feature_service) for feature_service in response.feature_services @@ -196,7 +192,6 @@ def delete_feature_view(self, name: str, project: str, commit: bool = True): request = RegistryServer_pb2.DeleteFeatureViewRequest( name=name, project=project, commit=commit ) - self.stub.DeleteFeatureView(request) def get_stream_feature_view( @@ -205,9 +200,7 @@ def get_stream_feature_view( request = RegistryServer_pb2.GetStreamFeatureViewRequest( name=name, project=project, allow_cache=allow_cache ) - response = self.stub.GetStreamFeatureView(request) - return StreamFeatureView.from_proto(response) def list_stream_feature_views( @@ -219,9 +212,7 @@ def list_stream_feature_views( request = RegistryServer_pb2.ListStreamFeatureViewsRequest( project=project, allow_cache=allow_cache, tags=tags ) - response = self.stub.ListStreamFeatureViews(request) - return [ StreamFeatureView.from_proto(stream_feature_view) for stream_feature_view in response.stream_feature_views @@ -233,9 +224,7 @@ def get_on_demand_feature_view( request = RegistryServer_pb2.GetOnDemandFeatureViewRequest( name=name, project=project, allow_cache=allow_cache ) - response = self.stub.GetOnDemandFeatureView(request) - return OnDemandFeatureView.from_proto(response) def list_on_demand_feature_views( @@ -247,9 +236,7 @@ def list_on_demand_feature_views( request = RegistryServer_pb2.ListOnDemandFeatureViewsRequest( project=project, allow_cache=allow_cache, tags=tags ) - response = self.stub.ListOnDemandFeatureViews(request) - return [ OnDemandFeatureView.from_proto(on_demand_feature_view) for on_demand_feature_view in response.on_demand_feature_views @@ -261,9 +248,7 @@ def get_feature_view( request = RegistryServer_pb2.GetFeatureViewRequest( name=name, project=project, allow_cache=allow_cache ) - response = self.stub.GetFeatureView(request) - return FeatureView.from_proto(response) def list_feature_views( @@ -275,7 +260,6 @@ def list_feature_views( request = RegistryServer_pb2.ListFeatureViewsRequest( project=project, allow_cache=allow_cache, tags=tags ) - response = self.stub.ListFeatureViews(request) return [ @@ -304,7 +288,6 @@ def apply_materialization( end_date=end_date_timestamp, commit=commit, ) - self.stub.ApplyMaterialization(request) def apply_saved_dataset( @@ -316,14 +299,12 @@ def apply_saved_dataset( request = RegistryServer_pb2.ApplySavedDatasetRequest( saved_dataset=saved_dataset.to_proto(), project=project, commit=commit ) - self.stub.ApplyFeatureService(request) def delete_saved_dataset(self, name: str, project: str, commit: bool = True): request = RegistryServer_pb2.DeleteSavedDatasetRequest( name=name, project=project, commit=commit ) - self.stub.DeleteSavedDataset(request) def get_saved_dataset( @@ -332,20 +313,19 @@ def get_saved_dataset( request = RegistryServer_pb2.GetSavedDatasetRequest( name=name, project=project, allow_cache=allow_cache ) - response = self.stub.GetSavedDataset(request) - return SavedDataset.from_proto(response) def list_saved_datasets( - self, project: str, allow_cache: bool = False + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, ) -> List[SavedDataset]: request = RegistryServer_pb2.ListSavedDatasetsRequest( - project=project, allow_cache=allow_cache + project=project, allow_cache=allow_cache, tags=tags ) - response = self.stub.ListSavedDatasets(request) - return [ SavedDataset.from_proto(saved_dataset) for saved_dataset in response.saved_datasets @@ -362,14 +342,12 @@ def apply_validation_reference( project=project, commit=commit, ) - self.stub.ApplyValidationReference(request) def delete_validation_reference(self, name: str, project: str, commit: bool = True): request = RegistryServer_pb2.DeleteValidationReferenceRequest( name=name, project=project, commit=commit ) - self.stub.DeleteValidationReference(request) def get_validation_reference( @@ -378,20 +356,19 @@ def get_validation_reference( request = RegistryServer_pb2.GetValidationReferenceRequest( name=name, project=project, allow_cache=allow_cache ) - response = self.stub.GetValidationReference(request) - return ValidationReference.from_proto(response) def list_validation_references( - self, project: str, allow_cache: bool = False + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, ) -> List[ValidationReference]: request = RegistryServer_pb2.ListValidationReferencesRequest( - project=project, allow_cache=allow_cache + project=project, allow_cache=allow_cache, tags=tags ) - response = self.stub.ListValidationReferences(request) - return [ ValidationReference.from_proto(validation_reference) for validation_reference in response.validation_references @@ -403,25 +380,20 @@ def list_project_metadata( request = RegistryServer_pb2.ListProjectMetadataRequest( project=project, allow_cache=allow_cache ) - response = self.stub.ListProjectMetadata(request) - return [ProjectMetadata.from_proto(pm) for pm in response.project_metadata] def update_infra(self, infra: Infra, project: str, commit: bool = True): request = RegistryServer_pb2.UpdateInfraRequest( infra=infra.to_proto(), project=project, commit=commit ) - self.stub.UpdateInfra(request) def get_infra(self, project: str, allow_cache: bool = False) -> Infra: request = RegistryServer_pb2.GetInfraRequest( project=project, allow_cache=allow_cache ) - response = self.stub.GetInfra(request) - return Infra.from_proto(response) def apply_user_metadata( @@ -437,6 +409,47 @@ def get_user_metadata( ) -> Optional[bytes]: pass + def apply_permission( + self, permission: Permission, project: str, commit: bool = True + ): + permission_proto = permission.to_proto() + permission_proto.spec.project = project + + request = RegistryServer_pb2.ApplyPermissionRequest( + permission=permission_proto, project=project, commit=commit + ) + self.stub.ApplyPermission(request) + + def delete_permission(self, name: str, project: str, commit: bool = True): + request = RegistryServer_pb2.DeletePermissionRequest( + name=name, project=project, commit=commit + ) + self.stub.DeletePermission(request) + + def get_permission( + self, name: str, project: str, allow_cache: bool = False + ) -> Permission: + request = RegistryServer_pb2.GetPermissionRequest( + name=name, project=project, allow_cache=allow_cache + ) + response = self.stub.GetPermission(request) + + return Permission.from_proto(response) + + def list_permissions( + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[Permission]: + request = RegistryServer_pb2.ListPermissionsRequest( + project=project, allow_cache=allow_cache, tags=tags + ) + response = self.stub.ListPermissions(request) + return [ + Permission.from_proto(permission) for permission in response.permissions + ] + def proto(self) -> RegistryProto: return self.stub.Proto(Empty()) @@ -445,7 +458,6 @@ def commit(self): def refresh(self, project: Optional[str] = None): request = RegistryServer_pb2.RefreshRequest(project=str(project)) - self.stub.Refresh(request) def teardown(self): diff --git a/sdk/python/feast/infra/registry/snowflake.py b/sdk/python/feast/infra/registry/snowflake.py index ac4f52dc06..801b90afe3 100644 --- a/sdk/python/feast/infra/registry/snowflake.py +++ b/sdk/python/feast/infra/registry/snowflake.py @@ -18,6 +18,7 @@ EntityNotFoundException, FeatureServiceNotFoundException, FeatureViewNotFoundException, + PermissionNotFoundException, SavedDatasetNotFound, ValidationReferenceNotFound, ) @@ -31,6 +32,7 @@ execute_snowflake_statement, ) from feast.on_demand_feature_view import OnDemandFeatureView +from feast.permissions.permission import Permission from feast.project_metadata import ProjectMetadata from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto from feast.protos.feast.core.Entity_pb2 import Entity as EntityProto @@ -42,6 +44,7 @@ from feast.protos.feast.core.OnDemandFeatureView_pb2 import ( OnDemandFeatureView as OnDemandFeatureViewProto, ) +from feast.protos.feast.core.Permission_pb2 import Permission as PermissionProto from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto from feast.protos.feast.core.SavedDataset_pb2 import SavedDataset as SavedDatasetProto from feast.protos.feast.core.StreamFeatureView_pb2 import ( @@ -342,6 +345,17 @@ def _apply_object( self._set_last_updated_metadata(update_datetime, project) + def apply_permission( + self, permission: Permission, project: str, commit: bool = True + ): + return self._apply_object( + "PERMISSIONS", + project, + "PERMISSION_NAME", + permission, + "PERMISSION_PROTO", + ) + # delete operations def delete_data_source(self, name: str, project: str, commit: bool = True): return self._delete_object( @@ -421,6 +435,15 @@ def _delete_object( return cursor.rowcount + def delete_permission(self, name: str, project: str, commit: bool = True): + return self._delete_object( + "PERMISSIONS", + name, + project, + "PERMISSION_NAME", + PermissionNotFoundException, + ) + # get operations def get_data_source( self, name: str, project: str, allow_cache: bool = False @@ -619,6 +642,25 @@ def _get_object( else: return None + def get_permission( + self, name: str, project: str, allow_cache: bool = False + ) -> Permission: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.get_permission( + self.cached_registry_proto, name, project + ) + return self._get_object( + "PERMISSIONS", + name, + project, + PermissionProto, + Permission, + "PERMISSION_NAME", + "PERMISSION_PROTO", + PermissionNotFoundException, + ) + # list operations def list_data_sources( self, @@ -716,12 +758,15 @@ def list_on_demand_feature_views( ) def list_saved_datasets( - self, project: str, allow_cache: bool = False + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, ) -> List[SavedDataset]: if allow_cache: self._refresh_cached_registry_if_necessary() return proto_registry_utils.list_saved_datasets( - self.cached_registry_proto, project + self.cached_registry_proto, project, tags ) return self._list_objects( "SAVED_DATASETS", @@ -729,6 +774,7 @@ def list_saved_datasets( SavedDatasetProto, SavedDataset, "SAVED_DATASET_PROTO", + tags=tags, ) def list_stream_feature_views( @@ -752,7 +798,10 @@ def list_stream_feature_views( ) def list_validation_references( - self, project: str, allow_cache: bool = False + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, ) -> List[ValidationReference]: return self._list_objects( "VALIDATION_REFERENCES", @@ -760,6 +809,7 @@ def list_validation_references( ValidationReferenceProto, ValidationReference, "VALIDATION_REFERENCE_PROTO", + tags=tags, ) def _list_objects( @@ -793,6 +843,26 @@ def _list_objects( return objects return [] + def list_permissions( + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[Permission]: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.list_permissions( + self.cached_registry_proto, project + ) + return self._list_objects( + "PERMISSIONS", + project, + PermissionProto, + Permission, + "PERMISSION_PROTO", + tags, + ) + def apply_materialization( self, feature_view: FeatureView, @@ -934,6 +1004,7 @@ def proto(self) -> RegistryProto: (self.list_saved_datasets, r.saved_datasets), (self.list_validation_references, r.validation_references), (self.list_project_metadata, r.project_metadata), + (self.list_permissions, r.permissions), ]: objs: List[Any] = lister(project) # type: ignore if objs: @@ -964,6 +1035,7 @@ def _get_all_projects(self) -> Set[str]: "FEATURE_VIEWS", "ON_DEMAND_FEATURE_VIEWS", "STREAM_FEATURE_VIEWS", + "PERMISSIONS", ] with GetSnowflakeConnection(self.registry_config) as conn: diff --git a/sdk/python/feast/infra/registry/sql.py b/sdk/python/feast/infra/registry/sql.py index a2b16a3a09..90c6e82e7d 100644 --- a/sdk/python/feast/infra/registry/sql.py +++ b/sdk/python/feast/infra/registry/sql.py @@ -30,6 +30,7 @@ EntityNotFoundException, FeatureServiceNotFoundException, FeatureViewNotFoundException, + PermissionNotFoundException, SavedDatasetNotFound, ValidationReferenceNotFound, ) @@ -38,6 +39,7 @@ from feast.infra.infra_object import Infra from feast.infra.registry.caching_registry import CachingRegistry from feast.on_demand_feature_view import OnDemandFeatureView +from feast.permissions.permission import Permission from feast.project_metadata import ProjectMetadata from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto from feast.protos.feast.core.Entity_pb2 import Entity as EntityProto @@ -49,6 +51,7 @@ from feast.protos.feast.core.OnDemandFeatureView_pb2 import ( OnDemandFeatureView as OnDemandFeatureViewProto, ) +from feast.protos.feast.core.Permission_pb2 import Permission as PermissionProto from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto from feast.protos.feast.core.SavedDataset_pb2 import SavedDataset as SavedDatasetProto from feast.protos.feast.core.StreamFeatureView_pb2 import ( @@ -149,6 +152,15 @@ Column("infra_proto", LargeBinary, nullable=False), ) +permissions = Table( + "permissions", + metadata, + Column("permission_name", String(255), primary_key=True), + Column("project_id", String(50), primary_key=True), + Column("last_updated_timestamp", BigInteger, nullable=False), + Column("permission_proto", LargeBinary, nullable=False), +) + class FeastMetadataKeys(Enum): LAST_UPDATED_TIMESTAMP = "last_updated_timestamp" @@ -207,6 +219,7 @@ def teardown(self): on_demand_feature_views, saved_datasets, validation_references, + permissions, }: with self.engine.begin() as conn: stmt = delete(t) @@ -319,13 +332,16 @@ def _get_validation_reference(self, name: str, project: str) -> ValidationRefere not_found_exception=ValidationReferenceNotFound, ) - def _list_validation_references(self, project: str) -> List[ValidationReference]: + def _list_validation_references( + self, project: str, tags: Optional[dict[str, str]] = None + ) -> List[ValidationReference]: return self._list_objects( table=validation_references, project=project, proto_class=ValidationReferenceProto, python_class=ValidationReference, proto_field_name="validation_reference_proto", + tags=tags, ) def _list_entities( @@ -447,13 +463,16 @@ def _list_feature_views( tags=tags, ) - def _list_saved_datasets(self, project: str) -> List[SavedDataset]: + def _list_saved_datasets( + self, project: str, tags: Optional[dict[str, str]] = None + ) -> List[SavedDataset]: return self._list_objects( saved_datasets, project, SavedDatasetProto, SavedDataset, "saved_dataset_proto", + tags=tags, ) def _list_on_demand_feature_views( @@ -666,6 +685,7 @@ def proto(self) -> RegistryProto: (self.list_saved_datasets, r.saved_datasets), (self.list_validation_references, r.validation_references), (self.list_project_metadata, r.project_metadata), + (self.list_permissions, r.permissions), ]: objs: List[Any] = lister(project) # type: ignore if objs: @@ -721,6 +741,7 @@ def _apply_object( "saved_dataset_proto", "feature_view_proto", "feature_service_proto", + "permission_proto", ]: deserialized_proto = self.deserialize_registry_values( row._mapping[proto_field_name], type(obj) @@ -917,6 +938,7 @@ def _get_all_projects(self) -> Set[str]: feature_views, on_demand_feature_views, stream_feature_views, + permissions, }: stmt = select(table) rows = conn.execute(stmt).all() @@ -924,3 +946,44 @@ def _get_all_projects(self) -> Set[str]: projects.add(row._mapping["project_id"]) return projects + + def _get_permission(self, name: str, project: str) -> Permission: + return self._get_object( + table=permissions, + name=name, + project=project, + proto_class=PermissionProto, + python_class=Permission, + id_field_name="permission_name", + proto_field_name="permission_proto", + not_found_exception=PermissionNotFoundException, + ) + + def _list_permissions( + self, project: str, tags: Optional[dict[str, str]] + ) -> List[Permission]: + return self._list_objects( + permissions, + project, + PermissionProto, + Permission, + "permission_proto", + tags=tags, + ) + + def apply_permission( + self, permission: Permission, project: str, commit: bool = True + ): + return self._apply_object( + permissions, project, "permission_name", permission, "permission_proto" + ) + + def delete_permission(self, name: str, project: str, commit: bool = True): + with self.engine.begin() as conn: + stmt = delete(permissions).where( + permissions.c.permission_name == name, + permissions.c.project_id == project, + ) + rows = conn.execute(stmt) + if rows.rowcount < 1: + raise PermissionNotFoundException(name, project) diff --git a/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_creation.sql b/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_creation.sql index aa35caeac4..021d175b4e 100644 --- a/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_creation.sql +++ b/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_creation.sql @@ -80,4 +80,12 @@ CREATE TABLE IF NOT EXISTS REGISTRY_PATH."VALIDATION_REFERENCES" ( last_updated_timestamp TIMESTAMP_LTZ NOT NULL, validation_reference_proto BINARY NOT NULL, PRIMARY KEY (validation_reference_name, project_id) -) +); + +CREATE TABLE IF NOT EXISTS REGISTRY_PATH."PERMISSIONS" ( + permission_name VARCHAR, + project_id VARCHAR, + last_updated_timestamp TIMESTAMP_LTZ NOT NULL, + permission_proto BINARY NOT NULL, + PRIMARY KEY (permission_name, project_id) +); diff --git a/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_deletion.sql b/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_deletion.sql index a355c72062..780424abd1 100644 --- a/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_deletion.sql +++ b/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_deletion.sql @@ -17,3 +17,5 @@ DROP TABLE IF EXISTS REGISTRY_PATH."SAVED_DATASETS"; DROP TABLE IF EXISTS REGISTRY_PATH."STREAM_FEATURE_VIEWS"; DROP TABLE IF EXISTS REGISTRY_PATH."VALIDATION_REFERENCES" + +DROP TABLE IF EXISTS REGISTRY_PATH."PERMISSIONS" diff --git a/sdk/python/feast/offline_server.py b/sdk/python/feast/offline_server.py index be92620d68..839acada93 100644 --- a/sdk/python/feast/offline_server.py +++ b/sdk/python/feast/offline_server.py @@ -3,7 +3,7 @@ import logging import traceback from datetime import datetime -from typing import Any, Dict, List +from typing import Any, Dict, List, cast import pyarrow as pa import pyarrow.flight as fl @@ -12,14 +12,33 @@ from feast.feature_logging import FeatureServiceLoggingSource from feast.feature_view import DUMMY_ENTITY_NAME from feast.infra.offline_stores.offline_utils import get_offline_store_from_config +from feast.permissions.action import AuthzedAction +from feast.permissions.security_manager import assert_permissions +from feast.permissions.server.arrow import ( + arrowflight_middleware, + inject_user_details_decorator, +) +from feast.permissions.server.utils import ( + ServerType, + init_auth_manager, + init_security_manager, + str_to_auth_manager_type, +) from feast.saved_dataset import SavedDatasetStorage logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) class OfflineServer(fl.FlightServerBase): def __init__(self, store: FeatureStore, location: str, **kwargs): - super(OfflineServer, self).__init__(location, **kwargs) + super(OfflineServer, self).__init__( + location, + middleware=arrowflight_middleware( + str_to_auth_manager_type(store.config.auth_config.type) + ), + **kwargs, + ) self._location = location # A dictionary of configured flights, e.g. API calls received and not yet served self.flights: Dict[str, Any] = {} @@ -41,6 +60,7 @@ def _make_flight_info(self, key: Any, descriptor: fl.FlightDescriptor): return fl.FlightInfo(schema, descriptor, endpoints, -1, -1) + @inject_user_details_decorator def get_flight_info( self, context: fl.ServerCallContext, descriptor: fl.FlightDescriptor ): @@ -49,6 +69,7 @@ def get_flight_info( return self._make_flight_info(key, descriptor) raise KeyError("Flight not found.") + @inject_user_details_decorator def list_flights(self, context: fl.ServerCallContext, criteria: bytes): for key, table in self.flights.items(): if key[1] is not None: @@ -60,6 +81,7 @@ def list_flights(self, context: fl.ServerCallContext, criteria: bytes): # Expects to receive request parameters and stores them in the flights dictionary # Indexed by the unique command + @inject_user_details_decorator def do_put( self, context: fl.ServerCallContext, @@ -156,6 +178,7 @@ def _validate_do_get_parameters(self, command: dict): # Extracts the API parameters from the flights dictionary, delegates the execution to the FeatureStore instance # and returns the stream of data + @inject_user_details_decorator def do_get(self, context: fl.ServerCallContext, ticket: fl.Ticket): key = ast.literal_eval(ticket.ticket.decode()) if key not in self.flights: @@ -217,7 +240,15 @@ def offline_write_batch(self, command: dict, key: str): assert len(feature_views) == 1, "incorrect feature view" table = self.flights[key] self.offline_store.offline_write_batch( - self.store.config, feature_views[0], table, command["progress"] + self.store.config, + cast( + FeatureView, + assert_permissions( + feature_views[0], actions=[AuthzedAction.WRITE_OFFLINE] + ), + ), + table, + command["progress"], ) def _validate_write_logged_features_parameters(self, command: dict): @@ -234,6 +265,10 @@ def write_logged_features(self, command: dict, key: str): feature_service.logging_config is not None ), "feature service must have logging_config set" + assert_permissions( + resource=feature_service, + actions=[AuthzedAction.WRITE_OFFLINE], + ) self.offline_store.write_logged_features( config=self.store.config, data=table, @@ -260,10 +295,12 @@ def _validate_pull_all_from_table_or_query_parameters(self, command: dict): def pull_all_from_table_or_query(self, command: dict): self._validate_pull_all_from_table_or_query_parameters(command) + data_source = self.store.get_data_source(command["data_source_name"]) + assert_permissions(data_source, actions=[AuthzedAction.READ_OFFLINE]) return self.offline_store.pull_all_from_table_or_query( self.store.config, - self.store.get_data_source(command["data_source_name"]), + data_source, command["join_key_columns"], command["feature_name_columns"], command["timestamp_field"], @@ -287,10 +324,11 @@ def _validate_pull_latest_from_table_or_query_parameters(self, command: dict): def pull_latest_from_table_or_query(self, command: dict): self._validate_pull_latest_from_table_or_query_parameters(command) - + data_source = self.store.get_data_source(command["data_source_name"]) + assert_permissions(resource=data_source, actions=[AuthzedAction.READ_OFFLINE]) return self.offline_store.pull_latest_from_table_or_query( self.store.config, - self.store.get_data_source(command["data_source_name"]), + data_source, command["join_key_columns"], command["feature_name_columns"], command["timestamp_field"], @@ -343,6 +381,11 @@ def get_historical_features(self, command: dict, key: str): project=project, ) + for feature_view in feature_views: + assert_permissions( + resource=feature_view, actions=[AuthzedAction.READ_OFFLINE] + ) + retJob = self.offline_store.get_historical_features( config=self.store.config, feature_views=feature_views, @@ -377,6 +420,10 @@ def persist(self, command: dict, key: str): raise NotImplementedError data_source = self.store.get_data_source(command["data_source_name"]) + assert_permissions( + resource=data_source, + actions=[AuthzedAction.WRITE_OFFLINE], + ) storage = SavedDatasetStorage.from_data_source(data_source) ret_job.persist(storage, command["allow_overwrite"], command["timeout"]) except Exception as e: @@ -401,11 +448,23 @@ def remove_dummies(fv: FeatureView) -> FeatureView: return fv +def _init_auth_manager(store: FeatureStore): + auth_type = str_to_auth_manager_type(store.config.auth_config.type) + init_security_manager(auth_type=auth_type, fs=store) + init_auth_manager( + auth_type=auth_type, + server_type=ServerType.ARROW, + auth_config=store.config.auth_config, + ) + + def start_server( store: FeatureStore, host: str, port: int, ): + _init_auth_manager(store) + location = "grpc+tcp://{}:{}".format(host, port) server = OfflineServer(store, location) logger.info(f"Offline store server serving on {location}") diff --git a/sdk/python/feast/permissions/__init__.py b/sdk/python/feast/permissions/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/permissions/action.py b/sdk/python/feast/permissions/action.py new file mode 100644 index 0000000000..0e85c1685f --- /dev/null +++ b/sdk/python/feast/permissions/action.py @@ -0,0 +1,40 @@ +import enum + + +class AuthzedAction(enum.Enum): + """ + Identify the type of action being secured by the permissions framework, according to the familiar CRUD and Feast terminology. + """ + + CREATE = "create" # Create an instance + DESCRIBE = "describe" # Access the instance state + UPDATE = "update" # Update the instance state + DELETE = "delete" # Delete an instance + READ_ONLINE = "read_online" # Read the online store only + READ_OFFLINE = "read_offline" # Read the offline store only + WRITE_ONLINE = "write_online" # Write to the online store only + WRITE_OFFLINE = "write_offline" # Write to the offline store only + + +# Alias for all available actions +ALL_ACTIONS = [a for a in AuthzedAction.__members__.values()] + +# Alias for all read actions +READ = [ + AuthzedAction.READ_OFFLINE, + AuthzedAction.READ_ONLINE, +] +# Alias for all write actions +WRITE = [ + AuthzedAction.WRITE_OFFLINE, + AuthzedAction.WRITE_ONLINE, +] + + +# Alias for CRUD actions +CRUD = [ + AuthzedAction.CREATE, + AuthzedAction.DESCRIBE, + AuthzedAction.UPDATE, + AuthzedAction.DELETE, +] diff --git a/sdk/python/feast/permissions/auth/__init__.py b/sdk/python/feast/permissions/auth/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/permissions/auth/auth_manager.py b/sdk/python/feast/permissions/auth/auth_manager.py new file mode 100644 index 0000000000..e608904567 --- /dev/null +++ b/sdk/python/feast/permissions/auth/auth_manager.py @@ -0,0 +1,68 @@ +from abc import ABC +from typing import Optional + +from .token_extractor import NoAuthTokenExtractor, TokenExtractor +from .token_parser import NoAuthTokenParser, TokenParser + + +class AuthManager(ABC): + """ + The authorization manager offers services to manage authorization tokens from client requests + to extract user details before injecting them in the security context. + """ + + _token_parser: TokenParser + _token_extractor: TokenExtractor + + def __init__(self, token_parser: TokenParser, token_extractor: TokenExtractor): + self._token_parser = token_parser + self._token_extractor = token_extractor + + @property + def token_parser(self) -> TokenParser: + return self._token_parser + + @property + def token_extractor(self) -> TokenExtractor: + return self._token_extractor + + +""" +The possibly empty global instance of `AuthManager`. +""" +_auth_manager: Optional[AuthManager] = None + + +def get_auth_manager() -> AuthManager: + """ + Return the global instance of `AuthManager`. + + Raises: + RuntimeError if the clobal instance is not set. + """ + global _auth_manager + if _auth_manager is None: + raise RuntimeError( + "AuthManager is not initialized. Call 'set_auth_manager' first." + ) + return _auth_manager + + +def set_auth_manager(auth_manager: AuthManager): + """ + Initialize the global instance of `AuthManager`. + """ + + global _auth_manager + _auth_manager = auth_manager + + +class AllowAll(AuthManager): + """ + An AuthManager not extracting nor parsing the authorization token. + """ + + def __init__(self): + super().__init__( + token_extractor=NoAuthTokenExtractor(), token_parser=NoAuthTokenParser() + ) diff --git a/sdk/python/feast/permissions/auth/auth_type.py b/sdk/python/feast/permissions/auth/auth_type.py new file mode 100644 index 0000000000..3fa34f97bd --- /dev/null +++ b/sdk/python/feast/permissions/auth/auth_type.py @@ -0,0 +1,11 @@ +import enum + + +class AuthType(enum.Enum): + """ + Identify the type of authorization. + """ + + NONE = "no_auth" + OIDC = "oidc" + KUBERNETES = "kubernetes" diff --git a/sdk/python/feast/permissions/auth/kubernetes_token_parser.py b/sdk/python/feast/permissions/auth/kubernetes_token_parser.py new file mode 100644 index 0000000000..c16e5232fb --- /dev/null +++ b/sdk/python/feast/permissions/auth/kubernetes_token_parser.py @@ -0,0 +1,107 @@ +import logging + +import jwt +from kubernetes import client, config +from starlette.authentication import ( + AuthenticationError, +) + +from feast.permissions.auth.token_parser import TokenParser +from feast.permissions.user import User + +logger = logging.getLogger(__name__) + + +class KubernetesTokenParser(TokenParser): + """ + A `TokenParser` implementation to use Kubernetes RBAC resources to retrieve the user details. + The assumption is that the request header includes an authorization bearer with the token of the + client `ServiceAccount`. + By inspecting the role bindings, this `TokenParser` extracts the associated `Role`s. + + The client `ServiceAccount` is instead used as the user name, together with the current namespace. + """ + + def __init__(self): + config.load_incluster_config() + self.v1 = client.CoreV1Api() + self.rbac_v1 = client.RbacAuthorizationV1Api() + + async def user_details_from_access_token(self, access_token: str) -> User: + """ + Extract the service account from the token and search the roles associated with it. + + Returns: + User: Current user, with associated roles. The `username` is the `:` separated concatenation of `namespace` and `service account name`. + + Raises: + AuthenticationError if any error happens. + """ + sa_namespace, sa_name = _decode_token(access_token) + current_user = f"{sa_namespace}:{sa_name}" + logging.info(f"Received request from {sa_name} in {sa_namespace}") + + roles = self.get_roles(sa_namespace, sa_name) + logging.info(f"SA roles are: {roles}") + + return User(username=current_user, roles=roles) + + def get_roles(self, namespace: str, service_account_name: str) -> list[str]: + """ + Fetches the Kubernetes `Role`s associated to the given `ServiceAccount` in the given `namespace`. + + The research also includes the `ClusterRole`s, so the running deployment must be granted enough permissions to query + for such instances in all the namespaces. + + Returns: + list[str]: Name of the `Role`s and `ClusterRole`s associated to the service account. No string manipulation is performed on the role name. + """ + role_bindings = self.rbac_v1.list_namespaced_role_binding(namespace) + cluster_role_bindings = self.rbac_v1.list_cluster_role_binding() + + roles: set[str] = set() + + for binding in role_bindings.items: + if binding.subjects is not None: + for subject in binding.subjects: + if ( + subject.kind == "ServiceAccount" + and subject.name == service_account_name + ): + roles.add(binding.role_ref.name) + + for binding in cluster_role_bindings.items: + if binding.subjects is not None: + for subject in binding.subjects: + if ( + subject.kind == "ServiceAccount" + and subject.name == service_account_name + and subject.namespace == namespace + ): + roles.add(binding.role_ref.name) + + return list(roles) + + +def _decode_token(access_token: str) -> tuple[str, str]: + """ + The `sub` portion of the decoded token includes the service account name in the format: `system:serviceaccount:NAMESPACE:SA_NAME` + + Returns: + str: the namespace name. + str: the `ServiceAccount` name. + """ + try: + decoded_token = jwt.decode(access_token, options={"verify_signature": False}) + if "sub" in decoded_token: + subject: str = decoded_token["sub"] + if len(subject.split(":")) != 4: + raise AuthenticationError( + f"Expecting 4 elements separated by : in th subject section, instead of {len(subject.split(':'))}." + ) + _, _, sa_namespace, sa_name = subject.split(":") + return (sa_namespace, sa_name) + else: + raise AuthenticationError("Missing sub section in received token.") + except jwt.DecodeError as e: + raise AuthenticationError(f"Error decoding JWT token: {e}") diff --git a/sdk/python/feast/permissions/auth/oidc_token_parser.py b/sdk/python/feast/permissions/auth/oidc_token_parser.py new file mode 100644 index 0000000000..921a585bc2 --- /dev/null +++ b/sdk/python/feast/permissions/auth/oidc_token_parser.py @@ -0,0 +1,105 @@ +import logging +from unittest.mock import Mock + +import jwt +from fastapi import Request +from fastapi.security import OAuth2AuthorizationCodeBearer +from jwt import PyJWKClient +from starlette.authentication import ( + AuthenticationError, +) + +from feast.permissions.auth.token_parser import TokenParser +from feast.permissions.auth_model import OidcAuthConfig +from feast.permissions.user import User + +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) + + +class OidcTokenParser(TokenParser): + """ + A `TokenParser` to use an OIDC server to retrieve the user details. + Server settings are retrieved from the `auth` configurationof the Feature store. + """ + + _auth_config: OidcAuthConfig + + def __init__(self, auth_config: OidcAuthConfig): + self._auth_config = auth_config + + async def _validate_token(self, access_token: str): + """ + Validate the token extracted from the headrer of the user request against the OAuth2 server. + """ + # FastAPI's OAuth2AuthorizationCodeBearer requires a Request type but actually uses only the headers field + # https://github.com/tiangolo/fastapi/blob/eca465f4c96acc5f6a22e92fd2211675ca8a20c8/fastapi/security/oauth2.py#L380 + request = Mock(spec=Request) + request.headers = {"Authorization": f"Bearer {access_token}"} + + oauth_2_scheme = OAuth2AuthorizationCodeBearer( + tokenUrl=f"{self._auth_config.auth_server_url}/realms/{self._auth_config.realm}/protocol/openid-connect/token", + authorizationUrl=f"{self._auth_config.auth_server_url}/realms/{self._auth_config.realm}/protocol/openid-connect/auth", + refreshUrl=f"{self._auth_config.auth_server_url}/realms/{self._auth_config.realm}/protocol/openid-connect/token", + ) + + await oauth_2_scheme(request=request) + + async def user_details_from_access_token(self, access_token: str) -> User: + """ + Validate the access token then decode it to extract the user credential and roles. + + Returns: + User: Current user, with associated roles. + + Raises: + AuthenticationError if any error happens. + """ + + try: + await self._validate_token(access_token) + logger.info("Validated token") + except Exception as e: + raise AuthenticationError(f"Invalid token: {e}") + + url = f"{self._auth_config.auth_server_url}/realms/{self._auth_config.realm}/protocol/openid-connect/certs" + optional_custom_headers = {"User-agent": "custom-user-agent"} + jwks_client = PyJWKClient(url, headers=optional_custom_headers) + + try: + signing_key = jwks_client.get_signing_key_from_jwt(access_token) + data = jwt.decode( + access_token, + signing_key.key, + algorithms=["RS256"], + audience="account", + options={ + "verify_aud": False, + "verify_signature": True, + "verify_exp": True, + }, + leeway=10, # accepts tokens generated up to 10 seconds in the past, in case of clock skew + ) + + if "preferred_username" not in data: + raise AuthenticationError( + "Missing preferred_username field in access token." + ) + current_user = data["preferred_username"] + + if "resource_access" not in data: + logger.warning("Missing resource_access field in access token.") + client_id = self._auth_config.client_id + if client_id not in data["resource_access"]: + logger.warning( + f"Missing resource_access.{client_id} field in access token. Defaulting to empty roles." + ) + roles = [] + else: + roles = data["resource_access"][client_id]["roles"] + + logger.info(f"Extracted user {current_user} and roles {roles}") + return User(username=current_user, roles=roles) + except jwt.exceptions.InvalidTokenError: + logger.exception("Exception while parsing the token:") + raise AuthenticationError("Invalid token.") diff --git a/sdk/python/feast/permissions/auth/token_extractor.py b/sdk/python/feast/permissions/auth/token_extractor.py new file mode 100644 index 0000000000..37779d7640 --- /dev/null +++ b/sdk/python/feast/permissions/auth/token_extractor.py @@ -0,0 +1,51 @@ +import re +from abc import ABC + +from starlette.authentication import ( + AuthenticationError, +) + + +class TokenExtractor(ABC): + """ + A class to extract the authorization token from a user request. + """ + + def extract_access_token(self, **kwargs) -> str: + """ + Extract the authorization token from a user request. + + The actual implementation has to specify what arguments have to be defined in the kwywork args `kwargs` + + Returns: + The extracted access token. + """ + raise NotImplementedError() + + def _extract_bearer_token(self, auth_header: str) -> str: + """ + Extract the bearer token from the authorization header value. + + Args: + auth_header: The full value of the authorization header. + + Returns: + str: The token value, without the `Bearer` part. + + Raises: + AuthenticationError if the authorization token does not match the `Bearer` scheme. + """ + pattern = r"(?i)Bearer .+" + if not bool(re.match(pattern, auth_header)): + raise AuthenticationError(f"Expected Bearer schema, found {auth_header}") + _, access_token = auth_header.split() + return access_token + + +class NoAuthTokenExtractor(TokenExtractor): + """ + A `TokenExtractor` always returning an empty token + """ + + def extract_access_token(self, **kwargs) -> str: + return "" diff --git a/sdk/python/feast/permissions/auth/token_parser.py b/sdk/python/feast/permissions/auth/token_parser.py new file mode 100644 index 0000000000..f8f2aee44a --- /dev/null +++ b/sdk/python/feast/permissions/auth/token_parser.py @@ -0,0 +1,28 @@ +from abc import ABC, abstractmethod + +from feast.permissions.user import User + + +class TokenParser(ABC): + """ + A class to parse an access token to extract the user credential and roles. + """ + + @abstractmethod + async def user_details_from_access_token(self, access_token: str) -> User: + """ + Parse the access token and return the current user and the list of associated roles. + + Returns: + User: Current user, with associated roles. + """ + raise NotImplementedError() + + +class NoAuthTokenParser(TokenParser): + """ + A `TokenParser` always returning an empty token + """ + + async def user_details_from_access_token(self, access_token: str, **kwargs) -> User: + return User(username="", roles=[]) diff --git a/sdk/python/feast/permissions/auth_model.py b/sdk/python/feast/permissions/auth_model.py new file mode 100644 index 0000000000..afb0a22bc9 --- /dev/null +++ b/sdk/python/feast/permissions/auth_model.py @@ -0,0 +1,25 @@ +from typing import Literal, Optional + +from feast.repo_config import FeastConfigBaseModel + + +class AuthConfig(FeastConfigBaseModel): + type: Literal["oidc", "kubernetes", "no_auth"] = "no_auth" + + +class OidcAuthConfig(AuthConfig): + auth_server_url: Optional[str] = None + auth_discovery_url: str + client_id: str + client_secret: Optional[str] = None + username: str + password: str + realm: str = "master" + + +class NoAuthConfig(AuthConfig): + pass + + +class KubernetesAuthConfig(AuthConfig): + pass diff --git a/sdk/python/feast/permissions/client/__init__.py b/sdk/python/feast/permissions/client/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/permissions/client/arrow_flight_auth_interceptor.py b/sdk/python/feast/permissions/client/arrow_flight_auth_interceptor.py new file mode 100644 index 0000000000..724c7df5ca --- /dev/null +++ b/sdk/python/feast/permissions/client/arrow_flight_auth_interceptor.py @@ -0,0 +1,38 @@ +import pyarrow.flight as fl + +from feast.permissions.auth.auth_type import AuthType +from feast.permissions.auth_model import AuthConfig +from feast.permissions.client.auth_client_manager_factory import get_auth_token + + +class FlightBearerTokenInterceptor(fl.ClientMiddleware): + def __init__(self, auth_config: AuthConfig): + super().__init__() + self.auth_config = auth_config + + def call_completed(self, exception): + pass + + def received_headers(self, headers): + pass + + def sending_headers(self): + access_token = get_auth_token(self.auth_config) + return {b"authorization": b"Bearer " + access_token.encode("utf-8")} + + +class FlightAuthInterceptorFactory(fl.ClientMiddlewareFactory): + def __init__(self, auth_config: AuthConfig): + super().__init__() + self.auth_config = auth_config + + def start_call(self, info): + return FlightBearerTokenInterceptor(self.auth_config) + + +def build_arrow_flight_client(host: str, port, auth_config: AuthConfig): + if auth_config.type != AuthType.NONE.value: + middleware_factory = FlightAuthInterceptorFactory(auth_config) + return fl.FlightClient(f"grpc://{host}:{port}", middleware=[middleware_factory]) + else: + return fl.FlightClient(f"grpc://{host}:{port}") diff --git a/sdk/python/feast/permissions/client/auth_client_manager.py b/sdk/python/feast/permissions/client/auth_client_manager.py new file mode 100644 index 0000000000..82f9b7433e --- /dev/null +++ b/sdk/python/feast/permissions/client/auth_client_manager.py @@ -0,0 +1,8 @@ +from abc import ABC, abstractmethod + + +class AuthenticationClientManager(ABC): + @abstractmethod + def get_token(self) -> str: + """Retrieves the token based on the authentication type configuration""" + pass diff --git a/sdk/python/feast/permissions/client/auth_client_manager_factory.py b/sdk/python/feast/permissions/client/auth_client_manager_factory.py new file mode 100644 index 0000000000..4e49802047 --- /dev/null +++ b/sdk/python/feast/permissions/client/auth_client_manager_factory.py @@ -0,0 +1,30 @@ +from feast.permissions.auth.auth_type import AuthType +from feast.permissions.auth_model import ( + AuthConfig, + KubernetesAuthConfig, + OidcAuthConfig, +) +from feast.permissions.client.auth_client_manager import AuthenticationClientManager +from feast.permissions.client.kubernetes_auth_client_manager import ( + KubernetesAuthClientManager, +) +from feast.permissions.client.oidc_authentication_client_manager import ( + OidcAuthClientManager, +) + + +def get_auth_client_manager(auth_config: AuthConfig) -> AuthenticationClientManager: + if auth_config.type == AuthType.OIDC.value: + assert isinstance(auth_config, OidcAuthConfig) + return OidcAuthClientManager(auth_config) + elif auth_config.type == AuthType.KUBERNETES.value: + assert isinstance(auth_config, KubernetesAuthConfig) + return KubernetesAuthClientManager(auth_config) + else: + raise RuntimeError( + f"No Auth client manager implemented for the auth type:${auth_config.type}" + ) + + +def get_auth_token(auth_config: AuthConfig) -> str: + return get_auth_client_manager(auth_config).get_token() diff --git a/sdk/python/feast/permissions/client/grpc_client_auth_interceptor.py b/sdk/python/feast/permissions/client/grpc_client_auth_interceptor.py new file mode 100644 index 0000000000..98cc445c7b --- /dev/null +++ b/sdk/python/feast/permissions/client/grpc_client_auth_interceptor.py @@ -0,0 +1,52 @@ +import logging + +import grpc + +from feast.permissions.auth_model import AuthConfig +from feast.permissions.client.auth_client_manager_factory import get_auth_token + +logger = logging.getLogger(__name__) + + +class GrpcClientAuthHeaderInterceptor( + grpc.UnaryUnaryClientInterceptor, + grpc.UnaryStreamClientInterceptor, + grpc.StreamUnaryClientInterceptor, + grpc.StreamStreamClientInterceptor, +): + def __init__(self, auth_type: AuthConfig): + self._auth_type = auth_type + + def intercept_unary_unary( + self, continuation, client_call_details, request_iterator + ): + client_call_details = self._append_auth_header_metadata(client_call_details) + return continuation(client_call_details, request_iterator) + + def intercept_unary_stream( + self, continuation, client_call_details, request_iterator + ): + client_call_details = self._append_auth_header_metadata(client_call_details) + return continuation(client_call_details, request_iterator) + + def intercept_stream_unary( + self, continuation, client_call_details, request_iterator + ): + client_call_details = self._append_auth_header_metadata(client_call_details) + return continuation(client_call_details, request_iterator) + + def intercept_stream_stream( + self, continuation, client_call_details, request_iterator + ): + client_call_details = self._append_auth_header_metadata(client_call_details) + return continuation(client_call_details, request_iterator) + + def _append_auth_header_metadata(self, client_call_details): + logger.debug( + "Intercepted the grpc api method call to inject Authorization header " + ) + metadata = client_call_details.metadata or [] + access_token = get_auth_token(self._auth_type) + metadata.append((b"authorization", b"Bearer " + access_token.encode("utf-8"))) + client_call_details = client_call_details._replace(metadata=metadata) + return client_call_details diff --git a/sdk/python/feast/permissions/client/http_auth_requests_wrapper.py b/sdk/python/feast/permissions/client/http_auth_requests_wrapper.py new file mode 100644 index 0000000000..3232e25025 --- /dev/null +++ b/sdk/python/feast/permissions/client/http_auth_requests_wrapper.py @@ -0,0 +1,22 @@ +import requests +from requests import Session + +from feast.permissions.auth.auth_type import AuthType +from feast.permissions.auth_model import ( + AuthConfig, +) +from feast.permissions.client.auth_client_manager_factory import get_auth_token + + +class AuthenticatedRequestsSession(Session): + def __init__(self, auth_token: str): + super().__init__() + self.headers.update({"Authorization": f"Bearer {auth_token}"}) + + +def get_http_auth_requests_session(auth_config: AuthConfig) -> Session: + if auth_config.type == AuthType.NONE.value: + request_session = requests.session() + else: + request_session = AuthenticatedRequestsSession(get_auth_token(auth_config)) + return request_session diff --git a/sdk/python/feast/permissions/client/kubernetes_auth_client_manager.py b/sdk/python/feast/permissions/client/kubernetes_auth_client_manager.py new file mode 100644 index 0000000000..1ca3c5a2ae --- /dev/null +++ b/sdk/python/feast/permissions/client/kubernetes_auth_client_manager.py @@ -0,0 +1,43 @@ +import logging +import os + +from feast.permissions.auth_model import KubernetesAuthConfig +from feast.permissions.client.auth_client_manager import AuthenticationClientManager + +logger = logging.getLogger(__name__) + + +class KubernetesAuthClientManager(AuthenticationClientManager): + def __init__(self, auth_config: KubernetesAuthConfig): + self.auth_config = auth_config + self.token_file_path = "/var/run/secrets/kubernetes.io/serviceaccount/token" + + def get_token(self): + try: + token = self._read_token_from_file() + return token + except Exception as e: + logger.info(f"Error reading token from file: {e}") + logger.info("Attempting to read token from environment variable.") + try: + token = self._read_token_from_env() + return token + except Exception as env_e: + logger.exception( + f"Error reading token from environment variable: {env_e}" + ) + raise env_e + + def _read_token_from_file(self): + try: + with open(self.token_file_path, "r") as file: + token = file.read().strip() + return token + except Exception as e: + raise e + + def _read_token_from_env(self): + token = os.getenv("LOCAL_K8S_TOKEN") + if not token: + raise KeyError("LOCAL_K8S_TOKEN environment variable is not set.") + return token diff --git a/sdk/python/feast/permissions/client/oidc_authentication_client_manager.py b/sdk/python/feast/permissions/client/oidc_authentication_client_manager.py new file mode 100644 index 0000000000..544764aae0 --- /dev/null +++ b/sdk/python/feast/permissions/client/oidc_authentication_client_manager.py @@ -0,0 +1,58 @@ +import logging + +import requests + +from feast.permissions.auth_model import OidcAuthConfig +from feast.permissions.client.auth_client_manager import AuthenticationClientManager + +logger = logging.getLogger(__name__) + + +class OidcAuthClientManager(AuthenticationClientManager): + def __init__(self, auth_config: OidcAuthConfig): + self.auth_config = auth_config + + def _get_token_endpoint(self): + response = requests.get(self.auth_config.auth_discovery_url) + if response.status_code == 200: + oidc_config = response.json() + if not oidc_config["token_endpoint"]: + raise RuntimeError( + " OIDC token_endpoint is not available from discovery url response." + ) + return oidc_config["token_endpoint"].replace( + "master", self.auth_config.realm + ) + else: + raise RuntimeError( + f"Error fetching OIDC token endpoint configuration: {response.status_code} - {response.text}" + ) + + def get_token(self): + # Fetch the token endpoint from the discovery URL + token_endpoint = self._get_token_endpoint() + + token_request_body = { + "grant_type": "password", + "client_id": self.auth_config.client_id, + "client_secret": self.auth_config.client_secret, + "username": self.auth_config.username, + "password": self.auth_config.password, + } + headers = {"Content-Type": "application/x-www-form-urlencoded"} + + token_response = requests.post( + token_endpoint, data=token_request_body, headers=headers + ) + if token_response.status_code == 200: + access_token = token_response.json()["access_token"] + if not access_token: + logger.debug( + f"access_token is empty for the client_id=${self.auth_config.client_id}" + ) + raise RuntimeError("access token is empty") + return access_token + else: + raise RuntimeError( + f"""Failed to obtain oidc access token:url=[{token_endpoint}] {token_response.status_code} - {token_response.text}""" + ) diff --git a/sdk/python/feast/permissions/decision.py b/sdk/python/feast/permissions/decision.py new file mode 100644 index 0000000000..963befe831 --- /dev/null +++ b/sdk/python/feast/permissions/decision.py @@ -0,0 +1,114 @@ +import enum +import logging +from typing import Optional + +logger = logging.getLogger(__name__) + + +class DecisionStrategy(enum.Enum): + """ + The strategy to be adopted in case multiple permissions match an execution request. + """ + + UNANIMOUS = "unanimous" # All policies must evaluate to a positive decision for the final decision to be also positive. + AFFIRMATIVE = ( + "affirmative" # At least one policy must evaluate to a positive decision + ) + # The number of positive decisions must be greater than the number of negative decisions. + # If the number of positive and negative decisions is the same, the final decision will be negative. + CONSENSUS = "consensus" + + +class DecisionEvaluator: + """ + A class to implement the decision logic, according to the selected strategy. + + Args: + decision_strategy: The associated `DecisionStrategy`. + num_of_voters: The expected number of votes to complete the decision. + + Examples: + Create the instance and specify the strategy and number of decisions: + `evaluator = DecisionEvaluator(DecisionStrategy.UNANIMOUS, 3) + + For each vote that you receivem, add a decision grant: `evaluator.add_grant(vote, message)` + and check if the decision process ended: `if evaluator.is_decided():` + Once decided, get the result and the failure explanations using: + `grant, explanations = evaluator.grant()` + """ + + def __init__( + self, + num_of_voters: int, + ): + # Only AFFIRMATIVE strategy is managed available + decision_strategy = DecisionStrategy.AFFIRMATIVE + self.num_of_voters = num_of_voters + + self.grant_count = 0 + self.deny_count = 0 + + self.grant_quorum = ( + 1 + if decision_strategy == DecisionStrategy.AFFIRMATIVE + else num_of_voters + if decision_strategy == DecisionStrategy.UNANIMOUS + else num_of_voters // 2 + 1 + ) + self.deny_quorum = ( + num_of_voters + if decision_strategy == DecisionStrategy.AFFIRMATIVE + else 1 + if decision_strategy == DecisionStrategy.UNANIMOUS + else num_of_voters // 2 + (num_of_voters % 2) + ) + self.grant_decision: Optional[bool] = None + self.explanations: list[str] = [] + logger.info( + f"Decision evaluation started with grant_quorum={self.grant_quorum}, deny_quorum={self.deny_quorum}" + ) + + def is_decided(self) -> bool: + """ + Returns: + bool: `True` when the decision process completed (e.g. we added as many votes as specified in the `num_of_voters` creation argument). + """ + return self.grant_decision is not None + + def grant(self) -> tuple[bool, list[str]]: + """ + Returns: + tuple[bool, list[str]]: The tuple of decision computation: a `bool` with the computation decision and a `list[str]` with the + denial explanations (possibly empty). + """ + logger.info( + f"Decided grant is {self.grant_decision}, explanations={self.explanations}" + ) + return bool(self.grant_decision), self.explanations + + def add_grant(self, grant: bool, explanation: str): + """ + Add a single vote to the decision computation, with a possible denial reason. + If the evaluation process already ended, additional votes are discarded. + + Args: + grant: `True` is the decision is accepted, `False` otherwise. + explanation: Denial reason (not considered when `vote` is `True`). + """ + + if self.is_decided(): + logger.warning("Grant decision already decided, discarding vote") + return + if grant: + self.grant_count += 1 + else: + self.deny_count += 1 + self.explanations.append(explanation) + + if self.grant_count >= self.grant_quorum: + self.grant_decision = True + if self.deny_count >= self.deny_quorum: + self.grant_decision = False + logger.debug( + f"After new grant: grants={self.grant_count}, deny_count={self.deny_count}, grant_decision={self.grant_decision}" + ) diff --git a/sdk/python/feast/permissions/decorator.py b/sdk/python/feast/permissions/decorator.py new file mode 100644 index 0000000000..3b9f7a4ae3 --- /dev/null +++ b/sdk/python/feast/permissions/decorator.py @@ -0,0 +1,42 @@ +import logging +from typing import Union + +from feast.permissions.action import AuthzedAction +from feast.permissions.matcher import is_a_feast_object +from feast.permissions.security_manager import assert_permissions + +logger = logging.getLogger(__name__) + + +def require_permissions(actions: Union[list[AuthzedAction], AuthzedAction]): + """ + A decorator to define the actions that are executed from the decorated class method and that must be protected + against unauthorized access. + + The first parameter of the protected method must be `self` + Args: + actions: The list of actions that must be permitted to the current user. + """ + + def require_permissions_decorator(func): + def permission_checker(*args, **kwargs): + logger.debug(f"permission_checker for {args}, {kwargs}") + resource = args[0] + if not is_a_feast_object(resource): + raise NotImplementedError( + f"The first argument is not of a managed type but {type(resource)}" + ) + + return assert_permissions( + resource=resource, + actions=actions, + ) + logger.debug( + f"Current User can invoke {actions} on {resource.name}:{type(resource)} " + ) + result = func(*args, **kwargs) + return result + + return permission_checker + + return require_permissions_decorator diff --git a/sdk/python/feast/permissions/enforcer.py b/sdk/python/feast/permissions/enforcer.py new file mode 100644 index 0000000000..af41d12a2c --- /dev/null +++ b/sdk/python/feast/permissions/enforcer.py @@ -0,0 +1,77 @@ +import logging + +from feast.feast_object import FeastObject +from feast.permissions.decision import DecisionEvaluator +from feast.permissions.permission import ( + AuthzedAction, + Permission, +) +from feast.permissions.user import User + +logger = logging.getLogger(__name__) + + +def enforce_policy( + permissions: list[Permission], + user: User, + resources: list[FeastObject], + actions: list[AuthzedAction], + filter_only: bool = False, +) -> list[FeastObject]: + """ + Define the logic to apply the configured permissions when a given action is requested on + a protected resource. + + If no permissions are defined, the result is to allow the execution. + + Args: + permissions: The configured set of `Permission`. + user: The current user. + resources: The resources for which we need to enforce authorized permission. + actions: The requested actions to be authorized. + filter_only: If `True`, it removes unauthorized resources from the returned value, otherwise it raises a `PermissionError` the + first unauthorized resource. Defaults to `False`. + + Returns: + list[FeastObject]: A filtered list of the permitted resources. + + Raises: + PermissionError: If the current user is not authorized to eecute the requested actions on the given resources (and `filter_only` is `False`). + """ + if not permissions: + return resources + + _permitted_resources: list[FeastObject] = [] + for resource in resources: + logger.debug( + f"Enforcing permission policies for {type(resource).__name__}:{resource.name} to execute {actions}" + ) + matching_permissions = [ + p + for p in permissions + if p.match_resource(resource) and p.match_actions(actions) + ] + + if matching_permissions: + evaluator = DecisionEvaluator(len(matching_permissions)) + for p in matching_permissions: + permission_grant, permission_explanation = p.policy.validate_user( + user=user + ) + evaluator.add_grant( + permission_grant, + f"Permission {p.name} denied execution of {[a.value.upper() for a in actions]} to {type(resource).__name__}:{resource.name}: {permission_explanation}", + ) + + if evaluator.is_decided(): + grant, explanations = evaluator.grant() + if not grant and not filter_only: + raise PermissionError(",".join(explanations)) + if grant: + _permitted_resources.append(resource) + break + else: + message = f"No permissions defined to manage {actions} on {type(resource)}/{resource.name}." + logger.exception(f"**PERMISSION NOT GRANTED**: {message}") + raise PermissionError(message) + return _permitted_resources diff --git a/sdk/python/feast/permissions/matcher.py b/sdk/python/feast/permissions/matcher.py new file mode 100644 index 0000000000..337bfd5c57 --- /dev/null +++ b/sdk/python/feast/permissions/matcher.py @@ -0,0 +1,129 @@ +""" +This module provides utility matching functions. +""" + +import logging +import re +from typing import TYPE_CHECKING, Any, Optional +from unittest.mock import Mock + +from feast.permissions.action import AuthzedAction + +if TYPE_CHECKING: + from feast.feast_object import FeastObject + +logger = logging.getLogger(__name__) + + +def is_a_feast_object(resource: Any): + """ + A matcher to verify that a given object is one of the Feast objects defined in the `FeastObject` type. + + Args: + resource: An object instance to verify. + Returns: + `True` if the given object is one of the types in the FeastObject alias or a subclass of one of them. + """ + from feast.feast_object import ALL_RESOURCE_TYPES + + for t in ALL_RESOURCE_TYPES: + # Use isinstance to pass Mock validation + if isinstance(resource, t): + return True + return False + + +def _get_type(resource: "FeastObject") -> Any: + is_mock = isinstance(resource, Mock) + if not is_mock: + return type(resource) + else: + return getattr(resource, "_spec_class", None) + + +def resource_match_config( + resource: "FeastObject", + expected_types: list["FeastObject"], + name_pattern: Optional[str] = None, + required_tags: Optional[dict[str, str]] = None, +) -> bool: + """ + Match a given Feast object against the configured type, name and tags in a permission configuration. + + Args: + resource: A FeastObject instance to match agains the permission. + expected_types: The list of object types configured in the permission. Type match also includes all the sub-classes. + name_pattern: The optional name pattern filter configured in the permission. + required_tags: The optional dictionary of required tags configured in the permission. + + Returns: + bool: `True` if the resource matches the configured permission filters. + """ + if resource is None: + logger.warning(f"None passed to {resource_match_config.__name__}") + return False + + _type = _get_type(resource) + if not is_a_feast_object(resource): + logger.warning(f"Given resource is not of a managed type but {_type}") + return False + + # mypy check ignored because of https://github.com/python/mypy/issues/11673, or it raises "Argument 2 to "isinstance" has incompatible type "tuple[Featu ..." + if not isinstance(resource, tuple(expected_types)): # type: ignore + logger.info( + f"Resource does not match any of the expected type {expected_types}" + ) + return False + + if name_pattern is not None: + if hasattr(resource, "name"): + if isinstance(resource.name, str): + match = bool(re.fullmatch(name_pattern, resource.name)) + if not match: + logger.info( + f"Resource name {resource.name} does not match pattern {name_pattern}" + ) + return False + else: + logger.warning( + f"Resource {resource} has no `name` attribute of unexpected type {type(resource.name)}" + ) + else: + logger.warning(f"Resource {resource} has no `name` attribute") + + if required_tags: + if hasattr(resource, "required_tags"): + if isinstance(resource.required_tags, dict): + for tag in required_tags.keys(): + required_value = required_tags.get(tag) + actual_value = resource.required_tags.get(tag) + if required_value != actual_value: + logger.info( + f"Unmatched value {actual_value} for required tag {tag}: expected {required_value}" + ) + return False + else: + logger.warning( + f"Resource {resource} has no `required_tags` attribute of unexpected type {type(resource.required_tags)}" + ) + else: + logger.warning(f"Resource {resource} has no `required_tags` attribute") + + return True + + +def actions_match_config( + requested_actions: list[AuthzedAction], + allowed_actions: list[AuthzedAction], +) -> bool: + """ + Match a list of actions against the actions defined in a permission configuration. + + Args: + requested_actions: A list of actions to be executed. + allowed_actions: The list of actions configured in the permission. + + Returns: + bool: `True` if all the given `requested_actions` are defined in the `allowed_actions`. + """ + return all(a in allowed_actions for a in requested_actions) diff --git a/sdk/python/feast/permissions/permission.py b/sdk/python/feast/permissions/permission.py new file mode 100644 index 0000000000..1117a3ee82 --- /dev/null +++ b/sdk/python/feast/permissions/permission.py @@ -0,0 +1,269 @@ +import logging +import re +from abc import ABC +from datetime import datetime +from typing import TYPE_CHECKING, Any, Dict, Optional, Union + +from google.protobuf.json_format import MessageToJson + +from feast.importer import import_class +from feast.permissions.action import ALL_ACTIONS, AuthzedAction +from feast.permissions.matcher import actions_match_config, resource_match_config +from feast.permissions.policy import AllowAll, Policy +from feast.protos.feast.core.Permission_pb2 import Permission as PermissionProto +from feast.protos.feast.core.Permission_pb2 import PermissionMeta as PermissionMetaProto +from feast.protos.feast.core.Permission_pb2 import PermissionSpec as PermissionSpecProto + +if TYPE_CHECKING: + from feast.feast_object import FeastObject + +logger = logging.getLogger(__name__) + +""" +Constant to refer to all the managed types. +""" + + +class Permission(ABC): + """ + The Permission class defines the authorization policy to be validated whenever the identified actions are + requested on the matching resources. + + Attributes: + name: The permission name (can be duplicated, used for logging troubleshooting). + types: The list of protected resource types as defined by the `FeastObject` type. The match includes all the sub-classes of the given types. + Defaults to all managed types (e.g. the `ALL_RESOURCE_TYPES` constant) + name_pattern: A regex to match the resource name. Defaults to None, meaning that no name filtering is applied + be present in a resource tags with the given value. Defaults to None, meaning that no tags filtering is applied. + actions: The actions authorized by this permission. Defaults to `ALL_ACTIONS`. + policy: The policy to be applied to validate a client request. + tags: A dictionary of key-value pairs to store arbitrary metadata. + required_tags: Dictionary of key-value pairs that must match the resource tags. All these tags must + """ + + _name: str + _types: list["FeastObject"] + _name_pattern: Optional[str] + _actions: list[AuthzedAction] + _policy: Policy + _tags: Dict[str, str] + _required_tags: dict[str, str] + created_timestamp: Optional[datetime] + last_updated_timestamp: Optional[datetime] + + def __init__( + self, + name: str, + types: Optional[Union[list["FeastObject"], "FeastObject"]] = None, + name_pattern: Optional[str] = None, + actions: Union[list[AuthzedAction], AuthzedAction] = ALL_ACTIONS, + policy: Policy = AllowAll, + tags: Optional[dict[str, str]] = None, + required_tags: Optional[dict[str, str]] = None, + ): + from feast.feast_object import ALL_RESOURCE_TYPES + + if not types: + types = ALL_RESOURCE_TYPES + for t in types if isinstance(types, list) else [types]: + if t not in ALL_RESOURCE_TYPES: + raise ValueError(f"{t} is not one of the managed types") + if actions is None or not actions: + raise ValueError("The list 'actions' must be non-empty.") + if not policy: + raise ValueError("The list 'policy' must be non-empty.") + self._name = name + self._types = types if isinstance(types, list) else [types] + self._name_pattern = _normalize_name_pattern(name_pattern) + self._actions = actions if isinstance(actions, list) else [actions] + self._policy = policy + self._tags = _normalize_tags(tags) + self._required_tags = _normalize_tags(required_tags) + self.created_timestamp = None + self.last_updated_timestamp = None + + def __eq__(self, other): + if not isinstance(other, Permission): + raise TypeError("Comparisons should only involve Permission class objects.") + + if ( + self.name != other.name + or self.name_pattern != other.name_pattern + or self.tags != other.tags + or self.policy != other.policy + or self.actions != other.actions + or self.required_tags != other.required_tags + ): + return False + + if set(self.types) != set(other.types): + return False + + return True + + def __hash__(self): + return hash(self.name) + + def __str__(self): + return str(MessageToJson(self.to_proto())) + + @property + def name(self) -> str: + return self._name + + @property + def types(self) -> list["FeastObject"]: + return self._types + + @property + def name_pattern(self) -> Optional[str]: + return self._name_pattern + + @property + def actions(self) -> list[AuthzedAction]: + return self._actions + + @property + def policy(self) -> Policy: + return self._policy + + @property + def tags(self) -> Dict[str, str]: + return self._tags + + @property + def required_tags(self) -> Dict[str, str]: + return self._required_tags + + def match_resource(self, resource: "FeastObject") -> bool: + """ + Returns: + `True` when the given resource matches the type, name and tags filters defined in the permission. + """ + return resource_match_config( + resource=resource, + expected_types=self.types, + name_pattern=self.name_pattern, + required_tags=self.required_tags, + ) + + def match_actions(self, requested_actions: list[AuthzedAction]) -> bool: + """ + Returns: + `True` when the given actions are included in the permitted actions. + """ + return actions_match_config( + allowed_actions=self.actions, + requested_actions=requested_actions, + ) + + @staticmethod + def from_proto(permission_proto: PermissionProto) -> Any: + """ + Converts permission config in protobuf spec to a Permission class object. + + Args: + permission_proto: A protobuf representation of a Permission. + + Returns: + A Permission class object. + """ + + types = [ + get_type_class_from_permission_type( + _PERMISSION_TYPES[PermissionSpecProto.Type.Name(t)] + ) + for t in permission_proto.spec.types + ] + actions = [ + AuthzedAction[PermissionSpecProto.AuthzedAction.Name(action)] + for action in permission_proto.spec.actions + ] + + permission = Permission( + permission_proto.spec.name, + types, + permission_proto.spec.name_pattern or None, + actions, + Policy.from_proto(permission_proto.spec.policy), + dict(permission_proto.spec.tags) or None, + dict(permission_proto.spec.required_tags) or None, + ) + + if permission_proto.meta.HasField("created_timestamp"): + permission.created_timestamp = ( + permission_proto.meta.created_timestamp.ToDatetime() + ) + if permission_proto.meta.HasField("last_updated_timestamp"): + permission.last_updated_timestamp = ( + permission_proto.meta.last_updated_timestamp.ToDatetime() + ) + + return permission + + def to_proto(self) -> PermissionProto: + """ + Converts a PermissionProto object to its protobuf representation. + """ + types = [ + PermissionSpecProto.Type.Value( + re.sub(r"([a-z])([A-Z])", r"\1_\2", t.__name__).upper() # type: ignore[union-attr] + ) + for t in self.types + ] + + actions = [ + PermissionSpecProto.AuthzedAction.Value(action.name) + for action in self.actions + ] + + permission_spec = PermissionSpecProto( + name=self.name, + types=types, + name_pattern=self.name_pattern if self.name_pattern is not None else "", + actions=actions, + policy=self.policy.to_proto(), + tags=self.tags, + required_tags=self.required_tags, + ) + + meta = PermissionMetaProto() + if self.created_timestamp: + meta.created_timestamp.FromDatetime(self.created_timestamp) + if self.last_updated_timestamp: + meta.last_updated_timestamp.FromDatetime(self.last_updated_timestamp) + + return PermissionProto(spec=permission_spec, meta=meta) + + +def _normalize_name_pattern(name_pattern: Optional[str]): + if name_pattern is not None: + return name_pattern.strip() + return None + + +def _normalize_tags(tags: Optional[dict[str, str]]): + if tags: + return { + k.strip(): v.strip() if isinstance(v, str) else v for k, v in tags.items() + } + return None + + +def get_type_class_from_permission_type(permission_type: str): + module_name, config_class_name = permission_type.rsplit(".", 1) + return import_class(module_name, config_class_name) + + +_PERMISSION_TYPES = { + "FEATURE_VIEW": "feast.feature_view.FeatureView", + "ON_DEMAND_FEATURE_VIEW": "feast.on_demand_feature_view.OnDemandFeatureView", + "BATCH_FEATURE_VIEW": "feast.batch_feature_view.BatchFeatureView", + "STREAM_FEATURE_VIEW": "feast.stream_feature_view.StreamFeatureView", + "ENTITY": "feast.entity.Entity", + "FEATURE_SERVICE": "feast.feature_service.FeatureService", + "DATA_SOURCE": "feast.data_source.DataSource", + "VALIDATION_REFERENCE": "feast.saved_dataset.ValidationReference", + "SAVED_DATASET": "feast.saved_dataset.SavedDataset", + "PERMISSION": "feast.permissions.permission.Permission", +} diff --git a/sdk/python/feast/permissions/policy.py b/sdk/python/feast/permissions/policy.py new file mode 100644 index 0000000000..271448422f --- /dev/null +++ b/sdk/python/feast/permissions/policy.py @@ -0,0 +1,129 @@ +from abc import ABC, abstractmethod +from typing import Any + +from feast.permissions.user import User +from feast.protos.feast.core.Policy_pb2 import Policy as PolicyProto +from feast.protos.feast.core.Policy_pb2 import RoleBasedPolicy as RoleBasedPolicyProto + + +class Policy(ABC): + """ + An abstract class to ensure that the current user matches the configured security policies. + """ + + @abstractmethod + def validate_user(self, user: User) -> tuple[bool, str]: + """ + Validate the given user against the configured policy. + + Args: + user: The current user. + + Returns: + bool: `True` if the user matches the policy criteria, `False` otherwise. + str: A possibly empty explanation of the reason for not matching the configured policy. + """ + raise NotImplementedError + + @staticmethod + def from_proto(policy_proto: PolicyProto) -> Any: + """ + Converts policy config in protobuf spec to a Policy class object. + + Args: + policy_proto: A protobuf representation of a Policy. + + Returns: + A Policy class object. + """ + policy_type = policy_proto.WhichOneof("policy_type") + if policy_type == "role_based_policy": + return RoleBasedPolicy.from_proto(policy_proto) + if policy_type is None: + return None + raise NotImplementedError(f"policy_type is unsupported: {policy_type}") + + @abstractmethod + def to_proto(self) -> PolicyProto: + """ + Converts a PolicyProto object to its protobuf representation. + """ + raise NotImplementedError + + +class RoleBasedPolicy(Policy): + """ + A `Policy` implementation where the user roles must be enforced to grant access to the requested action. + At least one of the configured roles must be granted to the current user in order to allow the execution of the secured operation. + + E.g., if the policy enforces roles `a` and `b`, the user must have at least one of them in order to satisfy the policy. + """ + + def __init__( + self, + roles: list[str], + ): + self.roles = roles + + def __eq__(self, other): + if not isinstance(other, RoleBasedPolicy): + raise TypeError( + "Comparisons should only involve RoleBasedPolicy class objects." + ) + + if sorted(self.roles) != sorted(other.roles): + return False + + return True + + def get_roles(self) -> list[str]: + return self.roles + + def validate_user(self, user: User) -> tuple[bool, str]: + """ + Validate the given `user` against the configured roles. + """ + result = user.has_matching_role(self.roles) + explain = "" if result else f"Requires roles {self.roles}" + return (result, explain) + + @staticmethod + def from_proto(policy_proto: PolicyProto) -> Any: + """ + Converts policy config in protobuf spec to a Policy class object. + + Args: + policy_proto: A protobuf representation of a Policy. + + Returns: + A RoleBasedPolicy class object. + """ + return RoleBasedPolicy(roles=list(policy_proto.role_based_policy.roles)) + + def to_proto(self) -> PolicyProto: + """ + Converts a PolicyProto object to its protobuf representation. + """ + + role_based_policy_proto = RoleBasedPolicyProto(roles=self.roles) + policy_proto = PolicyProto(role_based_policy=role_based_policy_proto) + + return policy_proto + + +def allow_all(self, user: User) -> tuple[bool, str]: + return True, "" + + +def empty_policy(self) -> PolicyProto: + return PolicyProto() + + +""" +A `Policy` instance to allow execution of any action to each user +""" +AllowAll = type( + "AllowAll", + (Policy,), + {Policy.validate_user.__name__: allow_all, Policy.to_proto.__name__: empty_policy}, +)() diff --git a/sdk/python/feast/permissions/security_manager.py b/sdk/python/feast/permissions/security_manager.py new file mode 100644 index 0000000000..178db6e6e9 --- /dev/null +++ b/sdk/python/feast/permissions/security_manager.py @@ -0,0 +1,163 @@ +import logging +from contextvars import ContextVar +from typing import List, Optional, Union + +from feast.feast_object import FeastObject +from feast.infra.registry.base_registry import BaseRegistry +from feast.permissions.action import AuthzedAction +from feast.permissions.enforcer import enforce_policy +from feast.permissions.permission import Permission +from feast.permissions.user import User + +logger = logging.getLogger(__name__) + + +class SecurityManager: + """ + The security manager it's the entry point to validate the configuration of the current user against the configured permission policies. + It is accessed and defined using the global functions `get_security_manager` and `set_security_manager` + """ + + def __init__( + self, + project: str, + registry: BaseRegistry, + ): + self._project = project + self._registry = registry + self._current_user: ContextVar[Optional[User]] = ContextVar( + "current_user", default=None + ) + + def set_current_user(self, current_user: User): + """ + Init the user for the current context. + """ + self._current_user.set(current_user) + + @property + def current_user(self) -> Optional[User]: + """ + Returns: + str: the possibly empty instance of the current user. `contextvars` module is used to ensure that each concurrent request has its own + individual user. + """ + return self._current_user.get() + + @property + def permissions(self) -> list[Permission]: + """ + Returns: + list[Permission]: the list of `Permission` configured in the Feast registry. + """ + return self._registry.list_permissions(project=self._project) + + def assert_permissions( + self, + resources: list[FeastObject], + actions: Union[AuthzedAction, List[AuthzedAction]], + filter_only: bool = False, + ) -> list[FeastObject]: + """ + Verify if the current user is authorized ro execute the requested actions on the given resources. + + If no permissions are defined, the result is to allow the execution. + + Args: + resources: The resources for which we need to enforce authorized permission. + actions: The requested actions to be authorized. + filter_only: If `True`, it removes unauthorized resources from the returned value, otherwise it raises a `PermissionError` the + first unauthorized resource. Defaults to `False`. + + Returns: + list[FeastObject]: A filtered list of the permitted resources, possibly empty. + + Raises: + PermissionError: If the current user is not authorized to eecute all the requested actions on the given resources. + """ + return enforce_policy( + permissions=self.permissions, + user=self.current_user if self.current_user is not None else User("", []), + resources=resources, + actions=actions if isinstance(actions, list) else [actions], + filter_only=filter_only, + ) + + +def assert_permissions( + resource: FeastObject, + actions: Union[AuthzedAction, List[AuthzedAction]], +) -> FeastObject: + """ + A utility function to invoke the `assert_permissions` method on the global security manager. + + If no global `SecurityManager` is defined, the execution is permitted. + + Args: + resource: The resource for which we need to enforce authorized permission. + actions: The requested actions to be authorized. + Returns: + FeastObject: The original `resource`, if permitted. + + Raises: + PermissionError: If the current user is not authorized to execute the requested actions on the given resources. + """ + sm = get_security_manager() + if sm is None: + return resource + return sm.assert_permissions( + resources=[resource], actions=actions, filter_only=False + )[0] + + +def permitted_resources( + resources: list[FeastObject], + actions: Union[AuthzedAction, List[AuthzedAction]], +) -> list[FeastObject]: + """ + A utility function to invoke the `assert_permissions` method on the global security manager. + + If no global `SecurityManager` is defined, the execution is permitted. + + Args: + resources: The resources for which we need to enforce authorized permission. + actions: The requested actions to be authorized. + Returns: + list[FeastObject]]: A filtered list of the permitted resources, possibly empty. + """ + sm = get_security_manager() + if sm is None: + return resources + return sm.assert_permissions(resources=resources, actions=actions, filter_only=True) + + +""" +The possibly empty global instance of `SecurityManager`. +""" +_sm: Optional[SecurityManager] = None + + +def get_security_manager() -> Optional[SecurityManager]: + """ + Return the global instance of `SecurityManager`. + """ + global _sm + return _sm + + +def set_security_manager(sm: SecurityManager): + """ + Initialize the global instance of `SecurityManager`. + """ + + global _sm + _sm = sm + + +def no_security_manager(): + """ + Initialize the empty global instance of `SecurityManager`. + """ + + global _sm + _sm = None diff --git a/sdk/python/feast/permissions/server/__init__.py b/sdk/python/feast/permissions/server/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/permissions/server/arrow.py b/sdk/python/feast/permissions/server/arrow.py new file mode 100644 index 0000000000..5eba7d0916 --- /dev/null +++ b/sdk/python/feast/permissions/server/arrow.py @@ -0,0 +1,111 @@ +""" +A module with utility functions and classes to support authorizing the Arrow Flight servers. +""" + +import asyncio +import functools +import logging +from typing import Optional, cast + +import pyarrow.flight as fl +from pyarrow.flight import ServerCallContext + +from feast.permissions.auth.auth_manager import ( + get_auth_manager, +) +from feast.permissions.security_manager import get_security_manager +from feast.permissions.server.utils import ( + AuthManagerType, +) +from feast.permissions.user import User + +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) + + +def arrowflight_middleware( + auth_type: AuthManagerType, +) -> Optional[dict[str, fl.ServerMiddlewareFactory]]: + """ + A dictionary with the configured middlewares to support extracting the user details when the authorization manager is defined. + The authorization middleware key is `auth`. + + Returns: + dict[str, fl.ServerMiddlewareFactory]: Optional dictionary of middlewares. If the authorization type is set to `NONE`, it returns `None`. + """ + + if auth_type == AuthManagerType.NONE: + return None + + return { + "auth": AuthorizationMiddlewareFactory(), + } + + +class AuthorizationMiddlewareFactory(fl.ServerMiddlewareFactory): + """ + A middleware factory to intercept the authorization header and propagate it to the authorization middleware. + """ + + def __init__(self): + pass + + def start_call(self, info, headers): + """ + Intercept the authorization header and propagate it to the authorization middleware. + """ + access_token = get_auth_manager().token_extractor.extract_access_token( + headers=headers + ) + return AuthorizationMiddleware(access_token=access_token) + + +class AuthorizationMiddleware(fl.ServerMiddleware): + """ + A server middleware holding the authorization header and offering a method to extract the user credentials. + """ + + def __init__(self, access_token: str): + self.access_token = access_token + + def call_completed(self, exception): + if exception: + print(f"{AuthorizationMiddleware.__name__} received {exception}") + + async def extract_user(self) -> User: + """ + Use the configured `TokenParser` to extract the user credentials. + """ + return await get_auth_manager().token_parser.user_details_from_access_token( + self.access_token + ) + + +def inject_user_details(context: ServerCallContext): + """ + Function to use in Arrow Flight endpoints (e.g. `do_get`, `do_put` and so on) to access the token extracted from the header, + extract the user details out of it and propagate them to the current security manager, if any. + + Args: + context: The endpoint context. + """ + if context.get_middleware("auth") is None: + logger.info("No `auth` middleware.") + return + + sm = get_security_manager() + if sm is not None: + auth_middleware = cast(AuthorizationMiddleware, context.get_middleware("auth")) + current_user = asyncio.run(auth_middleware.extract_user()) + print(f"extracted user: {current_user}") + + sm.set_current_user(current_user) + + +def inject_user_details_decorator(func): + @functools.wraps(func) + def wrapper(self, context, *args, **kwargs): + inject_user_details(context) + return func(self, context, *args, **kwargs) + + return wrapper diff --git a/sdk/python/feast/permissions/server/arrow_flight_token_extractor.py b/sdk/python/feast/permissions/server/arrow_flight_token_extractor.py new file mode 100644 index 0000000000..2378fa8b19 --- /dev/null +++ b/sdk/python/feast/permissions/server/arrow_flight_token_extractor.py @@ -0,0 +1,42 @@ +from starlette.authentication import ( + AuthenticationError, +) + +from feast.permissions.auth.token_extractor import TokenExtractor + + +class ArrowFlightTokenExtractor(TokenExtractor): + def extract_access_token(self, **kwargs) -> str: + """ + Token extractor for Arrow Flight requests. + + Requires a keyword argument called `headers` of type `dict`. + + Returns: + The extracted access token. + """ + + if "headers" not in kwargs: + raise ValueError("Missing keywork argument 'headers'") + if not isinstance(kwargs["headers"], dict): + raise ValueError( + f"The keywork argument 'headers' is not of the expected type {dict.__name__}" + ) + + access_token = None + headers = kwargs["headers"] + if isinstance(headers, dict): + for header in headers: + if header.lower() == "authorization": + # With Arrow Flight, the header value is a list and we take the 0-th element + if not isinstance(headers[header], list): + raise AuthenticationError( + f"Authorization header must be of type list, found {type(headers[header])}" + ) + + return self._extract_bearer_token(headers[header][0]) + + if access_token is None: + raise AuthenticationError("Missing authorization header") + + return access_token diff --git a/sdk/python/feast/permissions/server/grpc.py b/sdk/python/feast/permissions/server/grpc.py new file mode 100644 index 0000000000..3c94240869 --- /dev/null +++ b/sdk/python/feast/permissions/server/grpc.py @@ -0,0 +1,54 @@ +import asyncio +import logging +from typing import Optional + +import grpc + +from feast.permissions.auth.auth_manager import ( + get_auth_manager, +) +from feast.permissions.security_manager import get_security_manager +from feast.permissions.server.utils import ( + AuthManagerType, +) + +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) + + +def grpc_interceptors( + auth_type: AuthManagerType, +) -> Optional[list[grpc.ServerInterceptor]]: + """ + A list of the authorization interceptors. + + Args: + auth_type: The type of authorization manager, from the feature store configuration. + + Returns: + list[grpc.ServerInterceptor]: Optional list of interceptors. If the authorization type is set to `NONE`, it returns `None`. + """ + if auth_type == AuthManagerType.NONE: + return None + + return [AuthInterceptor()] + + +class AuthInterceptor(grpc.ServerInterceptor): + def intercept_service(self, continuation, handler_call_details): + sm = get_security_manager() + + if sm is not None: + auth_manager = get_auth_manager() + access_token = auth_manager.token_extractor.extract_access_token( + metadata=dict(handler_call_details.invocation_metadata) + ) + + print(f"Fetching user for token: {len(access_token)}") + current_user = asyncio.run( + auth_manager.token_parser.user_details_from_access_token(access_token) + ) + print(f"User is: {current_user}") + sm.set_current_user(current_user) + + return continuation(handler_call_details) diff --git a/sdk/python/feast/permissions/server/grpc_token_extractor.py b/sdk/python/feast/permissions/server/grpc_token_extractor.py new file mode 100644 index 0000000000..d75a18ded5 --- /dev/null +++ b/sdk/python/feast/permissions/server/grpc_token_extractor.py @@ -0,0 +1,36 @@ +from starlette.authentication import ( + AuthenticationError, +) + +from feast.permissions.auth.token_extractor import TokenExtractor + + +class GrpcTokenExtractor(TokenExtractor): + def extract_access_token(self, **kwargs) -> str: + """ + Token extractor for grpc server requests. + + Requires a keyword argument called `metadata` of type `dict`. + + Returns: + The extracted access token. + """ + + if "metadata" not in kwargs: + raise ValueError("Missing keywork argument 'metadata'") + if not isinstance(kwargs["metadata"], dict): + raise ValueError( + f"The keywork argument 'metadata' is not of the expected type {dict.__name__} but {type(kwargs['metadata'])}" + ) + + access_token = None + metadata = kwargs["metadata"] + if isinstance(metadata, dict): + for header in metadata: + if header.lower() == "authorization": + return self._extract_bearer_token(metadata[header]) + + if access_token is None: + raise AuthenticationError("Missing authorization header") + + return access_token diff --git a/sdk/python/feast/permissions/server/rest.py b/sdk/python/feast/permissions/server/rest.py new file mode 100644 index 0000000000..ecced3b34a --- /dev/null +++ b/sdk/python/feast/permissions/server/rest.py @@ -0,0 +1,33 @@ +""" +A module with utility functions to support authorizing the REST servers using the FastAPI framework. +""" + +from typing import Any + +from fastapi.requests import Request + +from feast.permissions.auth.auth_manager import ( + get_auth_manager, +) +from feast.permissions.security_manager import get_security_manager + + +async def inject_user_details(request: Request) -> Any: + """ + A function to extract the authorization token from a user request, extract the user details and propagate them to the + current security manager, if any. + """ + sm = get_security_manager() + current_user = None + if sm is not None: + auth_manager = get_auth_manager() + access_token = auth_manager.token_extractor.extract_access_token( + request=request + ) + current_user = await auth_manager.token_parser.user_details_from_access_token( + access_token=access_token + ) + + sm.set_current_user(current_user) + + return current_user diff --git a/sdk/python/feast/permissions/server/rest_token_extractor.py b/sdk/python/feast/permissions/server/rest_token_extractor.py new file mode 100644 index 0000000000..894c18eedb --- /dev/null +++ b/sdk/python/feast/permissions/server/rest_token_extractor.py @@ -0,0 +1,38 @@ +from fastapi.requests import Request +from starlette.authentication import ( + AuthenticationError, +) + +from feast.permissions.auth.token_extractor import TokenExtractor + + +class RestTokenExtractor(TokenExtractor): + def extract_access_token(self, **kwargs) -> str: + """ + Token extractor for REST requests. + + Requires a keyword argument called `request` of type `Request` + + Returns: + The extracted access token. + """ + + if "request" not in kwargs: + raise ValueError("Missing keywork argument 'request'") + if not isinstance(kwargs["request"], Request): + raise ValueError( + f"The keywork argument 'request' is not of the expected type {Request.__name__}" + ) + + access_token = None + request = kwargs["request"] + if isinstance(request, Request): + headers = request.headers + for header in headers: + if header.lower() == "authorization": + return self._extract_bearer_token(headers[header]) + + if access_token is None: + raise AuthenticationError("Missing authorization header") + + return access_token diff --git a/sdk/python/feast/permissions/server/utils.py b/sdk/python/feast/permissions/server/utils.py new file mode 100644 index 0000000000..34a2c0024a --- /dev/null +++ b/sdk/python/feast/permissions/server/utils.py @@ -0,0 +1,126 @@ +""" +A module with utility functions to support the authorization management in Feast servers. +""" + +import enum +import logging + +import feast +from feast.permissions.auth.auth_manager import ( + AllowAll, + AuthManager, + set_auth_manager, +) +from feast.permissions.auth.kubernetes_token_parser import KubernetesTokenParser +from feast.permissions.auth.oidc_token_parser import OidcTokenParser +from feast.permissions.auth.token_extractor import TokenExtractor +from feast.permissions.auth.token_parser import TokenParser +from feast.permissions.auth_model import AuthConfig, OidcAuthConfig +from feast.permissions.security_manager import ( + SecurityManager, + no_security_manager, + set_security_manager, +) +from feast.permissions.server.arrow_flight_token_extractor import ( + ArrowFlightTokenExtractor, +) +from feast.permissions.server.grpc_token_extractor import GrpcTokenExtractor +from feast.permissions.server.rest_token_extractor import RestTokenExtractor + +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) + + +class ServerType(enum.Enum): + """ + Identify the server type. + """ + + REST = "rest" + ARROW = "arrow" + GRPC = "grpc" # TODO RBAC: to be completed + + +class AuthManagerType(enum.Enum): + """ + Identify the type of authorization manager. + """ + + NONE = "no_auth" + OIDC = "oidc" + KUBERNETES = "kubernetes" + + +def str_to_auth_manager_type(value: str) -> AuthManagerType: + for t in AuthManagerType.__members__.values(): + if t.value.lower() == value.lower(): + return t + + logger.warning( + f"Requested unmanaged AuthManagerType of value {value}. Using NONE instead." + ) + return AuthManagerType.NONE + + +def init_security_manager(auth_type: AuthManagerType, fs: "feast.FeatureStore"): + """ + Initialize the global security manager. + Must be invoked at Feast server initialization time to create the `SecurityManager` instance. + + Args: + auth_type: The authorization manager type. + registry: The feature store registry. + """ + if auth_type == AuthManagerType.NONE: + no_security_manager() + else: + # TODO permissions from registry + set_security_manager( + SecurityManager( + project=fs.project, + registry=fs.registry, + ) + ) + + +def init_auth_manager( + server_type: ServerType, auth_type: AuthManagerType, auth_config: AuthConfig +): + """ + Initialize the global authorization manager. + Must be invoked at Feast server initialization time to create the `AuthManager` instance. + + Args: + server_type: The server type. + auth_type: The authorization manager type. + + Raises: + ValueError: If any input argument has an unmanaged value. + """ + if auth_type == AuthManagerType.NONE: + set_auth_manager(AllowAll()) + else: + token_extractor: TokenExtractor + token_parser: TokenParser + + if server_type == ServerType.REST: + token_extractor = RestTokenExtractor() + elif server_type == ServerType.ARROW: + token_extractor = ArrowFlightTokenExtractor() + elif server_type == ServerType.GRPC: + token_extractor = GrpcTokenExtractor() + else: + raise ValueError(f"Unmanaged server type {server_type}") + + if auth_type == AuthManagerType.KUBERNETES: + token_parser = KubernetesTokenParser() + elif auth_type == AuthManagerType.OIDC: + assert isinstance(auth_config, OidcAuthConfig) + token_parser = OidcTokenParser(auth_config=auth_config) + else: + raise ValueError(f"Unmanaged authorization manager type {auth_type}") + + auth_manager = AuthManager( + token_extractor=token_extractor, token_parser=token_parser + ) + set_auth_manager(auth_manager) diff --git a/sdk/python/feast/permissions/user.py b/sdk/python/feast/permissions/user.py new file mode 100644 index 0000000000..783b683de6 --- /dev/null +++ b/sdk/python/feast/permissions/user.py @@ -0,0 +1,38 @@ +import logging + +logger = logging.getLogger(__name__) + + +class User: + _username: str + _roles: list[str] + + def __init__(self, username: str, roles: list[str]): + self._username = username + self._roles = roles + + @property + def username(self): + return self._username + + @property + def roles(self): + return self._roles + + def has_matching_role(self, requested_roles: list[str]) -> bool: + """ + Verify that the user has at least one of the requested roles. + + Args: + requested_roles: The list of requested roles. + + Returns: + bool: `True` only if the user has any registered role and all the given roles are registered. + """ + logger.debug( + f"Check {self.username} has all {requested_roles}: currently {self.roles}" + ) + return any(role in self.roles for role in requested_roles) + + def __str__(self): + return f"{self.username} ({self.roles})" diff --git a/sdk/python/feast/registry_server.py b/sdk/python/feast/registry_server.py index 2ac968bed8..6b37aba08d 100644 --- a/sdk/python/feast/registry_server.py +++ b/sdk/python/feast/registry_server.py @@ -1,19 +1,31 @@ from concurrent import futures from datetime import datetime, timezone +from typing import Union, cast import grpc from google.protobuf.empty_pb2 import Empty from grpc_health.v1 import health, health_pb2, health_pb2_grpc from grpc_reflection.v1alpha import reflection -from feast import FeatureStore +from feast import FeatureService, FeatureStore from feast.data_source import DataSource from feast.entity import Entity -from feast.feature_service import FeatureService +from feast.errors import FeatureViewNotFoundException +from feast.feast_object import FeastObject from feast.feature_view import FeatureView from feast.infra.infra_object import Infra from feast.infra.registry.base_registry import BaseRegistry from feast.on_demand_feature_view import OnDemandFeatureView +from feast.permissions.action import CRUD, AuthzedAction +from feast.permissions.permission import Permission +from feast.permissions.security_manager import assert_permissions, permitted_resources +from feast.permissions.server.grpc import grpc_interceptors +from feast.permissions.server.utils import ( + ServerType, + init_auth_manager, + init_security_manager, + str_to_auth_manager_type, +) from feast.protos.feast.registry import RegistryServer_pb2, RegistryServer_pb2_grpc from feast.saved_dataset import SavedDataset, ValidationReference from feast.stream_feature_view import StreamFeatureView @@ -26,30 +38,55 @@ def __init__(self, registry: BaseRegistry) -> None: def ApplyEntity(self, request: RegistryServer_pb2.ApplyEntityRequest, context): self.proxied_registry.apply_entity( - entity=Entity.from_proto(request.entity), + entity=cast( + Entity, + assert_permissions( + resource=Entity.from_proto(request.entity), + actions=CRUD, + ), + ), project=request.project, commit=request.commit, ) + return Empty() def GetEntity(self, request: RegistryServer_pb2.GetEntityRequest, context): - return self.proxied_registry.get_entity( - name=request.name, project=request.project, allow_cache=request.allow_cache + return assert_permissions( + self.proxied_registry.get_entity( + name=request.name, + project=request.project, + allow_cache=request.allow_cache, + ), + actions=[AuthzedAction.DESCRIBE], ).to_proto() def ListEntities(self, request: RegistryServer_pb2.ListEntitiesRequest, context): return RegistryServer_pb2.ListEntitiesResponse( entities=[ entity.to_proto() - for entity in self.proxied_registry.list_entities( - project=request.project, - allow_cache=request.allow_cache, - tags=dict(request.tags), + for entity in permitted_resources( + resources=cast( + list[FeastObject], + self.proxied_registry.list_entities( + project=request.project, + allow_cache=request.allow_cache, + tags=dict(request.tags), + ), + ), + actions=AuthzedAction.DESCRIBE, ) ] ) def DeleteEntity(self, request: RegistryServer_pb2.DeleteEntityRequest, context): + assert_permissions( + resource=self.proxied_registry.get_entity( + name=request.name, project=request.project + ), + actions=AuthzedAction.DELETE, + ) + self.proxied_registry.delete_entity( name=request.name, project=request.project, commit=request.commit ) @@ -58,16 +95,30 @@ def DeleteEntity(self, request: RegistryServer_pb2.DeleteEntityRequest, context) def ApplyDataSource( self, request: RegistryServer_pb2.ApplyDataSourceRequest, context ): - self.proxied_registry.apply_data_source( - data_source=DataSource.from_proto(request.data_source), - project=request.project, - commit=request.commit, + ( + self.proxied_registry.apply_data_source( + data_source=cast( + DataSource, + assert_permissions( + resource=DataSource.from_proto(request.data_source), + actions=CRUD, + ), + ), + project=request.project, + commit=request.commit, + ), ) + return Empty() def GetDataSource(self, request: RegistryServer_pb2.GetDataSourceRequest, context): - return self.proxied_registry.get_data_source( - name=request.name, project=request.project, allow_cache=request.allow_cache + return assert_permissions( + resource=self.proxied_registry.get_data_source( + name=request.name, + project=request.project, + allow_cache=request.allow_cache, + ), + actions=AuthzedAction.DESCRIBE, ).to_proto() def ListDataSources( @@ -76,10 +127,16 @@ def ListDataSources( return RegistryServer_pb2.ListDataSourcesResponse( data_sources=[ data_source.to_proto() - for data_source in self.proxied_registry.list_data_sources( - project=request.project, - allow_cache=request.allow_cache, - tags=dict(request.tags), + for data_source in permitted_resources( + resources=cast( + list[FeastObject], + self.proxied_registry.list_data_sources( + project=request.project, + allow_cache=request.allow_cache, + tags=dict(request.tags), + ), + ), + actions=AuthzedAction.DESCRIBE, ) ] ) @@ -87,6 +144,14 @@ def ListDataSources( def DeleteDataSource( self, request: RegistryServer_pb2.DeleteDataSourceRequest, context ): + assert_permissions( + resource=self.proxied_registry.get_data_source( + name=request.name, + project=request.project, + ), + actions=AuthzedAction.DELETE, + ) + self.proxied_registry.delete_data_source( name=request.name, project=request.project, commit=request.commit ) @@ -95,8 +160,13 @@ def DeleteDataSource( def GetFeatureView( self, request: RegistryServer_pb2.GetFeatureViewRequest, context ): - return self.proxied_registry.get_feature_view( - name=request.name, project=request.project, allow_cache=request.allow_cache + return assert_permissions( + self.proxied_registry.get_feature_view( + name=request.name, + project=request.project, + allow_cache=request.allow_cache, + ), + actions=[AuthzedAction.DESCRIBE], ).to_proto() def ApplyFeatureView( @@ -112,9 +182,17 @@ def ApplyFeatureView( elif feature_view_type == "stream_feature_view": feature_view = StreamFeatureView.from_proto(request.stream_feature_view) - self.proxied_registry.apply_feature_view( - feature_view=feature_view, project=request.project, commit=request.commit + ( + self.proxied_registry.apply_feature_view( + feature_view=cast( + FeatureView, + assert_permissions(resource=feature_view, actions=CRUD), + ), + project=request.project, + commit=request.commit, + ), ) + return Empty() def ListFeatureViews( @@ -123,10 +201,16 @@ def ListFeatureViews( return RegistryServer_pb2.ListFeatureViewsResponse( feature_views=[ feature_view.to_proto() - for feature_view in self.proxied_registry.list_feature_views( - project=request.project, - allow_cache=request.allow_cache, - tags=dict(request.tags), + for feature_view in permitted_resources( + resources=cast( + list[FeastObject], + self.proxied_registry.list_feature_views( + project=request.project, + allow_cache=request.allow_cache, + tags=dict(request.tags), + ), + ), + actions=AuthzedAction.DESCRIBE, ) ] ) @@ -134,6 +218,21 @@ def ListFeatureViews( def DeleteFeatureView( self, request: RegistryServer_pb2.DeleteFeatureViewRequest, context ): + feature_view: Union[StreamFeatureView, FeatureView] + + try: + feature_view = self.proxied_registry.get_stream_feature_view( + name=request.name, project=request.project, allow_cache=False + ) + except FeatureViewNotFoundException: + feature_view = self.proxied_registry.get_feature_view( + name=request.name, project=request.project, allow_cache=False + ) + + assert_permissions( + resource=feature_view, + actions=[AuthzedAction.DELETE], + ) self.proxied_registry.delete_feature_view( name=request.name, project=request.project, commit=request.commit ) @@ -142,8 +241,13 @@ def DeleteFeatureView( def GetStreamFeatureView( self, request: RegistryServer_pb2.GetStreamFeatureViewRequest, context ): - return self.proxied_registry.get_stream_feature_view( - name=request.name, project=request.project, allow_cache=request.allow_cache + return assert_permissions( + resource=self.proxied_registry.get_stream_feature_view( + name=request.name, + project=request.project, + allow_cache=request.allow_cache, + ), + actions=[AuthzedAction.DESCRIBE], ).to_proto() def ListStreamFeatureViews( @@ -152,10 +256,16 @@ def ListStreamFeatureViews( return RegistryServer_pb2.ListStreamFeatureViewsResponse( stream_feature_views=[ stream_feature_view.to_proto() - for stream_feature_view in self.proxied_registry.list_stream_feature_views( - project=request.project, - allow_cache=request.allow_cache, - tags=dict(request.tags), + for stream_feature_view in permitted_resources( + resources=cast( + list[FeastObject], + self.proxied_registry.list_stream_feature_views( + project=request.project, + allow_cache=request.allow_cache, + tags=dict(request.tags), + ), + ), + actions=AuthzedAction.DESCRIBE, ) ] ) @@ -163,8 +273,13 @@ def ListStreamFeatureViews( def GetOnDemandFeatureView( self, request: RegistryServer_pb2.GetOnDemandFeatureViewRequest, context ): - return self.proxied_registry.get_on_demand_feature_view( - name=request.name, project=request.project, allow_cache=request.allow_cache + return assert_permissions( + resource=self.proxied_registry.get_on_demand_feature_view( + name=request.name, + project=request.project, + allow_cache=request.allow_cache, + ), + actions=[AuthzedAction.DESCRIBE], ).to_proto() def ListOnDemandFeatureViews( @@ -173,10 +288,16 @@ def ListOnDemandFeatureViews( return RegistryServer_pb2.ListOnDemandFeatureViewsResponse( on_demand_feature_views=[ on_demand_feature_view.to_proto() - for on_demand_feature_view in self.proxied_registry.list_on_demand_feature_views( - project=request.project, - allow_cache=request.allow_cache, - tags=dict(request.tags), + for on_demand_feature_view in permitted_resources( + resources=cast( + list[FeastObject], + self.proxied_registry.list_on_demand_feature_views( + project=request.project, + allow_cache=request.allow_cache, + tags=dict(request.tags), + ), + ), + actions=AuthzedAction.DESCRIBE, ) ] ) @@ -185,17 +306,29 @@ def ApplyFeatureService( self, request: RegistryServer_pb2.ApplyFeatureServiceRequest, context ): self.proxied_registry.apply_feature_service( - feature_service=FeatureService.from_proto(request.feature_service), + feature_service=cast( + FeatureService, + assert_permissions( + resource=FeatureService.from_proto(request.feature_service), + actions=CRUD, + ), + ), project=request.project, commit=request.commit, ) + return Empty() def GetFeatureService( self, request: RegistryServer_pb2.GetFeatureServiceRequest, context ): - return self.proxied_registry.get_feature_service( - name=request.name, project=request.project, allow_cache=request.allow_cache + return assert_permissions( + resource=self.proxied_registry.get_feature_service( + name=request.name, + project=request.project, + allow_cache=request.allow_cache, + ), + actions=[AuthzedAction.DESCRIBE], ).to_proto() def ListFeatureServices( @@ -204,10 +337,16 @@ def ListFeatureServices( return RegistryServer_pb2.ListFeatureServicesResponse( feature_services=[ feature_service.to_proto() - for feature_service in self.proxied_registry.list_feature_services( - project=request.project, - allow_cache=request.allow_cache, - tags=dict(request.tags), + for feature_service in permitted_resources( + resources=cast( + list[FeastObject], + self.proxied_registry.list_feature_services( + project=request.project, + allow_cache=request.allow_cache, + tags=dict(request.tags), + ), + ), + actions=AuthzedAction.DESCRIBE, ) ] ) @@ -215,6 +354,15 @@ def ListFeatureServices( def DeleteFeatureService( self, request: RegistryServer_pb2.DeleteFeatureServiceRequest, context ): + ( + assert_permissions( + resource=self.proxied_registry.get_feature_service( + name=request.name, project=request.project + ), + actions=[AuthzedAction.DELETE], + ), + ) + self.proxied_registry.delete_feature_service( name=request.name, project=request.project, commit=request.commit ) @@ -223,18 +371,32 @@ def DeleteFeatureService( def ApplySavedDataset( self, request: RegistryServer_pb2.ApplySavedDatasetRequest, context ): - self.proxied_registry.apply_saved_dataset( - saved_dataset=SavedDataset.from_proto(request.saved_dataset), - project=request.project, - commit=request.commit, + ( + self.proxied_registry.apply_saved_dataset( + saved_dataset=cast( + SavedDataset, + assert_permissions( + resource=SavedDataset.from_proto(request.saved_dataset), + actions=CRUD, + ), + ), + project=request.project, + commit=request.commit, + ), ) + return Empty() def GetSavedDataset( self, request: RegistryServer_pb2.GetSavedDatasetRequest, context ): - return self.proxied_registry.get_saved_dataset( - name=request.name, project=request.project, allow_cache=request.allow_cache + return assert_permissions( + self.proxied_registry.get_saved_dataset( + name=request.name, + project=request.project, + allow_cache=request.allow_cache, + ), + actions=[AuthzedAction.DESCRIBE], ).to_proto() def ListSavedDatasets( @@ -243,8 +405,16 @@ def ListSavedDatasets( return RegistryServer_pb2.ListSavedDatasetsResponse( saved_datasets=[ saved_dataset.to_proto() - for saved_dataset in self.proxied_registry.list_saved_datasets( - project=request.project, allow_cache=request.allow_cache + for saved_dataset in permitted_resources( + resources=cast( + list[FeastObject], + self.proxied_registry.list_saved_datasets( + project=request.project, + allow_cache=request.allow_cache, + tags=dict(request.tags), + ), + ), + actions=AuthzedAction.DESCRIBE, ) ] ) @@ -252,6 +422,13 @@ def ListSavedDatasets( def DeleteSavedDataset( self, request: RegistryServer_pb2.DeleteSavedDatasetRequest, context ): + assert_permissions( + resource=self.proxied_registry.get_saved_dataset( + name=request.name, project=request.project + ), + actions=[AuthzedAction.DELETE], + ) + self.proxied_registry.delete_saved_dataset( name=request.name, project=request.project, commit=request.commit ) @@ -261,19 +438,29 @@ def ApplyValidationReference( self, request: RegistryServer_pb2.ApplyValidationReferenceRequest, context ): self.proxied_registry.apply_validation_reference( - validation_reference=ValidationReference.from_proto( - request.validation_reference + validation_reference=cast( + ValidationReference, + assert_permissions( + ValidationReference.from_proto(request.validation_reference), + actions=CRUD, + ), ), project=request.project, commit=request.commit, ) + return Empty() def GetValidationReference( self, request: RegistryServer_pb2.GetValidationReferenceRequest, context ): - return self.proxied_registry.get_validation_reference( - name=request.name, project=request.project, allow_cache=request.allow_cache + return assert_permissions( + self.proxied_registry.get_validation_reference( + name=request.name, + project=request.project, + allow_cache=request.allow_cache, + ), + actions=[AuthzedAction.DESCRIBE], ).to_proto() def ListValidationReferences( @@ -282,8 +469,16 @@ def ListValidationReferences( return RegistryServer_pb2.ListValidationReferencesResponse( validation_references=[ validation_reference.to_proto() - for validation_reference in self.proxied_registry.list_validation_references( - project=request.project, allow_cache=request.allow_cache + for validation_reference in permitted_resources( + resources=cast( + list[FeastObject], + self.proxied_registry.list_validation_references( + project=request.project, + allow_cache=request.allow_cache, + tags=dict(request.tags), + ), + ), + actions=AuthzedAction.DESCRIBE, ) ] ) @@ -291,6 +486,12 @@ def ListValidationReferences( def DeleteValidationReference( self, request: RegistryServer_pb2.DeleteValidationReferenceRequest, context ): + assert_permissions( + resource=self.proxied_registry.get_validation_reference( + name=request.name, project=request.project + ), + actions=[AuthzedAction.DELETE], + ) self.proxied_registry.delete_validation_reference( name=request.name, project=request.project, commit=request.commit ) @@ -311,6 +512,11 @@ def ListProjectMetadata( def ApplyMaterialization( self, request: RegistryServer_pb2.ApplyMaterializationRequest, context ): + assert_permissions( + resource=FeatureView.from_proto(request.feature_view), + actions=[AuthzedAction.WRITE_ONLINE], + ) + self.proxied_registry.apply_materialization( feature_view=FeatureView.from_proto(request.feature_view), project=request.project, @@ -338,6 +544,67 @@ def GetInfra(self, request: RegistryServer_pb2.GetInfraRequest, context): project=request.project, allow_cache=request.allow_cache ).to_proto() + def ApplyPermission( + self, request: RegistryServer_pb2.ApplyPermissionRequest, context + ): + self.proxied_registry.apply_permission( + permission=cast( + Permission, + assert_permissions( + Permission.from_proto(request.permission), actions=CRUD + ), + ), + project=request.project, + commit=request.commit, + ) + return Empty() + + def GetPermission(self, request: RegistryServer_pb2.GetPermissionRequest, context): + permission = self.proxied_registry.get_permission( + name=request.name, project=request.project, allow_cache=request.allow_cache + ) + assert_permissions( + resource=permission, + actions=[AuthzedAction.DESCRIBE], + ) + permission.to_proto().spec.project = request.project + + return permission.to_proto() + + def ListPermissions( + self, request: RegistryServer_pb2.ListPermissionsRequest, context + ): + return RegistryServer_pb2.ListPermissionsResponse( + permissions=[ + permission.to_proto() + for permission in permitted_resources( + resources=cast( + list[FeastObject], + self.proxied_registry.list_permissions( + project=request.project, allow_cache=request.allow_cache + ), + ), + actions=AuthzedAction.DESCRIBE, + ) + ] + ) + + def DeletePermission( + self, request: RegistryServer_pb2.DeletePermissionRequest, context + ): + assert_permissions( + resource=self.proxied_registry.get_permission( + name=request.name, + project=request.project, + ), + actions=[AuthzedAction.DELETE], + ) + + self.proxied_registry.delete_permission( + name=request.name, project=request.project, commit=request.commit + ) + return Empty() + def Commit(self, request, context): self.proxied_registry.commit() return Empty() @@ -350,8 +617,19 @@ def Proto(self, request, context): return self.proxied_registry.proto() -def start_server(store: FeatureStore, port: int): - server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) +def start_server(store: FeatureStore, port: int, wait_for_termination: bool = True): + auth_manager_type = str_to_auth_manager_type(store.config.auth_config.type) + init_security_manager(auth_type=auth_manager_type, fs=store) + init_auth_manager( + auth_type=auth_manager_type, + server_type=ServerType.GRPC, + auth_config=store.config.auth_config, + ) + + server = grpc.server( + futures.ThreadPoolExecutor(max_workers=10), + interceptors=grpc_interceptors(auth_manager_type), + ) RegistryServer_pb2_grpc.add_RegistryServerServicer_to_server( RegistryServer(store.registry), server ) @@ -369,4 +647,7 @@ def start_server(store: FeatureStore, port: int): server.add_insecure_port(f"[::]:{port}") server.start() - server.wait_for_termination() + if wait_for_termination: + server.wait_for_termination() + else: + return server diff --git a/sdk/python/feast/repo_config.py b/sdk/python/feast/repo_config.py index fc2792e323..069b579999 100644 --- a/sdk/python/feast/repo_config.py +++ b/sdk/python/feast/repo_config.py @@ -19,12 +19,14 @@ from feast.errors import ( FeastFeatureServerTypeInvalidError, + FeastInvalidAuthConfigClass, FeastOfflineStoreInvalidName, FeastOnlineStoreInvalidName, FeastRegistryNotSetError, FeastRegistryTypeInvalidError, ) from feast.importer import import_class +from feast.permissions.auth.auth_type import AuthType warnings.simplefilter("once", RuntimeWarning) @@ -86,6 +88,12 @@ "local": "feast.infra.feature_servers.local_process.config.LocalFeatureServerConfig", } +AUTH_CONFIGS_CLASS_FOR_TYPE = { + "no_auth": "feast.permissions.auth_model.NoAuthConfig", + "kubernetes": "feast.permissions.auth_model.KubernetesAuthConfig", + "oidc": "feast.permissions.auth_model.OidcAuthConfig", +} + class FeastBaseModel(BaseModel): """Feast Pydantic Configuration Class""" @@ -167,6 +175,9 @@ class RepoConfig(FeastBaseModel): online_config: Any = Field(None, alias="online_store") """ OnlineStoreConfig: Online store configuration (optional depending on provider) """ + auth: Any = Field(None, alias="auth") + """ auth: Optional if the services needs the authentication against IDPs (optional depending on provider) """ + offline_config: Any = Field(None, alias="offline_store") """ OfflineStoreConfig: Offline store configuration (optional depending on provider) """ @@ -211,6 +222,13 @@ def __init__(self, **data: Any): self._online_store = None self.online_config = data.get("online_store", "sqlite") + self._auth = None + if "auth" not in data: + self.auth = dict() + self.auth["type"] = AuthType.NONE.value + else: + self.auth = data.get("auth") + self._batch_engine = None if "batch_engine" in data: self.batch_engine_config = data["batch_engine"] @@ -270,6 +288,20 @@ def offline_store(self): self._offline_store = self.offline_config return self._offline_store + @property + def auth_config(self): + if not self._auth: + if isinstance(self.auth, Dict): + self._auth = get_auth_config_from_type(self.auth.get("type"))( + **self.auth + ) + elif isinstance(self.auth, str): + self._auth = get_auth_config_from_type(self.auth.get("type"))() + elif self.auth: + self._auth = self.auth + + return self._auth + @property def online_store(self): if not self._online_store: @@ -300,6 +332,30 @@ def batch_engine(self): return self._batch_engine + @model_validator(mode="before") + def _validate_auth_config(cls, values: Any) -> Any: + from feast.permissions.auth_model import AuthConfig + + if "auth" in values: + allowed_auth_types = AUTH_CONFIGS_CLASS_FOR_TYPE.keys() + if isinstance(values["auth"], Dict): + if values["auth"].get("type") is None: + raise ValueError( + f"auth configuration is missing authentication type. Possible values={allowed_auth_types}" + ) + elif values["auth"]["type"] not in allowed_auth_types: + raise ValueError( + f'auth configuration has invalid authentication type={values["auth"]["type"]}. Possible ' + f'values={allowed_auth_types}' + ) + elif isinstance(values["auth"], AuthConfig): + if values["auth"].type not in allowed_auth_types: + raise ValueError( + f'auth configuration has invalid authentication type={values["auth"].type}. Possible ' + f'values={allowed_auth_types}' + ) + return values + @model_validator(mode="before") def _validate_online_store_config(cls, values: Any) -> Any: # This method will validate whether the online store configurations are set correctly. This explicit validation @@ -480,6 +536,17 @@ def get_online_config_from_type(online_store_type: str): return import_class(module_name, config_class_name, config_class_name) +def get_auth_config_from_type(auth_config_type: str): + if auth_config_type in AUTH_CONFIGS_CLASS_FOR_TYPE: + auth_config_type = AUTH_CONFIGS_CLASS_FOR_TYPE[auth_config_type] + elif not auth_config_type.endswith("AuthConfig"): + raise FeastInvalidAuthConfigClass(auth_config_type) + module_name, online_store_class_type = auth_config_type.rsplit(".", 1) + config_class_name = f"{online_store_class_type}" + + return import_class(module_name, config_class_name, config_class_name) + + def get_offline_config_from_type(offline_store_type: str): if offline_store_type in OFFLINE_STORE_CLASS_FOR_TYPE: offline_store_type = OFFLINE_STORE_CLASS_FOR_TYPE[offline_store_type] diff --git a/sdk/python/feast/repo_contents.py b/sdk/python/feast/repo_contents.py index 33b99f29b2..9893d5be4e 100644 --- a/sdk/python/feast/repo_contents.py +++ b/sdk/python/feast/repo_contents.py @@ -18,6 +18,7 @@ from feast.feature_service import FeatureService from feast.feature_view import FeatureView from feast.on_demand_feature_view import OnDemandFeatureView +from feast.permissions.permission import Permission from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto from feast.stream_feature_view import StreamFeatureView @@ -33,6 +34,7 @@ class RepoContents(NamedTuple): stream_feature_views: List[StreamFeatureView] entities: List[Entity] feature_services: List[FeatureService] + permissions: List[Permission] def to_registry_proto(self) -> RegistryProto: registry_proto = RegistryProto() @@ -50,4 +52,6 @@ def to_registry_proto(self) -> RegistryProto: registry_proto.stream_feature_views.extend( [fv.to_proto() for fv in self.stream_feature_views] ) + registry_proto.permissions.extend([p.to_proto() for p in self.permissions]) + return registry_proto diff --git a/sdk/python/feast/repo_operations.py b/sdk/python/feast/repo_operations.py index 0a89ab72ca..cb27568957 100644 --- a/sdk/python/feast/repo_operations.py +++ b/sdk/python/feast/repo_operations.py @@ -27,6 +27,7 @@ from feast.infra.registry.registry import FEAST_OBJECT_TYPES, FeastObjectType, Registry from feast.names import adjectives, animals from feast.on_demand_feature_view import OnDemandFeatureView +from feast.permissions.permission import Permission from feast.repo_config import RepoConfig from feast.repo_contents import RepoContents from feast.stream_feature_view import StreamFeatureView @@ -120,6 +121,7 @@ def parse_repo(repo_root: Path) -> RepoContents: feature_services=[], on_demand_feature_views=[], stream_feature_views=[], + permissions=[], ) for repo_file in get_repo_files(repo_root): @@ -201,6 +203,10 @@ def parse_repo(repo_root: Path) -> RepoContents: (obj is odfv) for odfv in res.on_demand_feature_views ): res.on_demand_feature_views.append(obj) + elif isinstance(obj, Permission) and not any( + (obj is p) for p in res.permissions + ): + res.permissions.append(obj) res.entities.append(DUMMY_ENTITY) return res @@ -367,7 +373,12 @@ def registry_dump(repo_config: RepoConfig, repo_path: Path) -> str: """For debugging only: output contents of the metadata registry""" registry_config = repo_config.registry project = repo_config.project - registry = Registry(project, registry_config=registry_config, repo_path=repo_path) + registry = Registry( + project, + registry_config=registry_config, + repo_path=repo_path, + auth_config=repo_config.auth_config, + ) registry_dict = registry.to_dict(project=project) return json.dumps(registry_dict, indent=2, sort_keys=True) diff --git a/sdk/python/feast/templates/local/feature_repo/feature_store.yaml b/sdk/python/feast/templates/local/feature_repo/feature_store.yaml index 3e6a360316..11b339583e 100644 --- a/sdk/python/feast/templates/local/feature_repo/feature_store.yaml +++ b/sdk/python/feast/templates/local/feature_repo/feature_store.yaml @@ -7,3 +7,6 @@ online_store: type: sqlite path: data/online_store.db entity_key_serialization_version: 2 +# By default, no_auth for authentication and authorization, other possible values kubernetes and oidc. Refer the documentation for more details. +auth: + type: no_auth diff --git a/sdk/python/feast/templates/minimal/feature_repo/feature_store.yaml b/sdk/python/feast/templates/minimal/feature_repo/feature_store.yaml index 9808690005..45a0ce7718 100644 --- a/sdk/python/feast/templates/minimal/feature_repo/feature_store.yaml +++ b/sdk/python/feast/templates/minimal/feature_repo/feature_store.yaml @@ -3,4 +3,4 @@ registry: /path/to/registry.db provider: local online_store: path: /path/to/online_store.db -entity_key_serialization_version: 2 +entity_key_serialization_version: 2 \ No newline at end of file diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 785342550a..89459d1a69 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -40,6 +40,8 @@ async-timeout==4.0.3 # via # aiohttp # redis +async-property==0.2.2 + # via python-keycloak atpublic==4.1.0 # via ibis-framework attrs==23.2.0 @@ -63,6 +65,8 @@ beautifulsoup4==4.12.3 # via nbconvert bidict==0.23.1 # via ibis-framework +bigtree==0.19.2 + # via feast (setup.py) bleach==6.1.0 # via nbconvert boto3==1.34.131 @@ -131,6 +135,7 @@ cryptography==42.0.8 # azure-identity # azure-storage-blob # great-expectations + # jwcrypto # moto # msal # pyjwt @@ -154,6 +159,8 @@ defusedxml==0.7.1 # via nbconvert deltalake==0.18.1 # via feast (setup.py) +deprecation==2.1.0 + # via python-keycloak dill==0.3.8 # via feast (setup.py) distlib==0.3.8 @@ -300,6 +307,7 @@ httpx==0.27.0 # feast (setup.py) # fastapi # jupyterlab + # python-keycloak ibis-framework[duckdb]==9.1.0 # via # feast (setup.py) @@ -409,6 +417,8 @@ jupyterlab-server==2.27.2 # notebook jupyterlab-widgets==3.0.11 # via ipywidgets +jwcrypto==1.5.6 + # via python-keycloak kubernetes==20.13.0 # via feast (setup.py) locket==1.0.0 @@ -502,6 +512,7 @@ packaging==24.1 # build # dask # db-dtypes + # deprecation # google-cloud-bigquery # great-expectations # gunicorn @@ -712,6 +723,8 @@ python-dotenv==1.0.1 # via uvicorn python-json-logger==2.0.7 # via jupyter-events +python-keycloak==4.2.2 + # via feast (setup.py) python-multipart==0.0.9 # via fastapi pytz==2024.1 @@ -760,7 +773,9 @@ requests==2.32.3 # kubernetes # moto # msal + # python-keycloak # requests-oauthlib + # requests-toolbelt # responses # singlestoredb # snowflake-connector-python @@ -768,6 +783,8 @@ requests==2.32.3 # trino requests-oauthlib==2.0.0 # via kubernetes +requests-toolbelt==1.0.0 + # via python-keycloak responses==0.25.3 # via moto rfc3339-validator==0.1.4 @@ -972,6 +989,7 @@ typing-extensions==4.12.2 # great-expectations # ibis-framework # ipython + # jwcrypto # mypy # psycopg # psycopg-pool @@ -1050,3 +1068,4 @@ yarl==1.9.4 # via aiohttp zipp==3.19.1 # via importlib-metadata +bigtree==0.19.2 diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index 250e617b85..f1ec2b16ab 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -222,3 +222,4 @@ websockets==12.0 # via uvicorn zipp==3.19.1 # via importlib-metadata +bigtree==0.19.2 diff --git a/sdk/python/requirements/py3.11-ci-requirements.txt b/sdk/python/requirements/py3.11-ci-requirements.txt index f16b486aa5..fd0b5a6d26 100644 --- a/sdk/python/requirements/py3.11-ci-requirements.txt +++ b/sdk/python/requirements/py3.11-ci-requirements.txt @@ -36,6 +36,8 @@ asttokens==2.4.1 # via stack-data async-lru==2.0.4 # via jupyterlab +async-property==0.2.2 + # via python-keycloak atpublic==4.1.0 # via ibis-framework attrs==23.2.0 @@ -59,6 +61,8 @@ beautifulsoup4==4.12.3 # via nbconvert bidict==0.23.1 # via ibis-framework +bigtree==0.19.2 + # via feast (setup.py) bleach==6.1.0 # via nbconvert boto3==1.34.131 @@ -127,6 +131,7 @@ cryptography==42.0.8 # azure-identity # azure-storage-blob # great-expectations + # jwcrypto # moto # msal # pyjwt @@ -150,6 +155,8 @@ defusedxml==0.7.1 # via nbconvert deltalake==0.18.1 # via feast (setup.py) +deprecation==2.1.0 + # via python-keycloak dill==0.3.8 # via feast (setup.py) distlib==0.3.8 @@ -291,6 +298,7 @@ httpx==0.27.0 # feast (setup.py) # fastapi # jupyterlab + # python-keycloak ibis-framework[duckdb]==9.1.0 # via # feast (setup.py) @@ -400,6 +408,8 @@ jupyterlab-server==2.27.2 # notebook jupyterlab-widgets==3.0.11 # via ipywidgets +jwcrypto==1.5.6 + # via python-keycloak kubernetes==20.13.0 # via feast (setup.py) locket==1.0.0 @@ -493,6 +503,7 @@ packaging==24.1 # build # dask # db-dtypes + # deprecation # google-cloud-bigquery # great-expectations # gunicorn @@ -703,6 +714,8 @@ python-dotenv==1.0.1 # via uvicorn python-json-logger==2.0.7 # via jupyter-events +python-keycloak==4.2.2 + # via feast (setup.py) python-multipart==0.0.9 # via fastapi pytz==2024.1 @@ -751,7 +764,9 @@ requests==2.32.3 # kubernetes # moto # msal + # python-keycloak # requests-oauthlib + # requests-toolbelt # responses # singlestoredb # snowflake-connector-python @@ -759,6 +774,8 @@ requests==2.32.3 # trino requests-oauthlib==2.0.0 # via kubernetes +requests-toolbelt==1.0.0 + # via python-keycloak responses==0.25.3 # via moto rfc3339-validator==0.1.4 @@ -951,6 +968,7 @@ typing-extensions==4.12.2 # great-expectations # ibis-framework # ipython + # jwcrypto # mypy # psycopg # psycopg-pool diff --git a/sdk/python/requirements/py3.11-requirements.txt b/sdk/python/requirements/py3.11-requirements.txt index 4f1655de09..e51452a594 100644 --- a/sdk/python/requirements/py3.11-requirements.txt +++ b/sdk/python/requirements/py3.11-requirements.txt @@ -216,3 +216,4 @@ websockets==12.0 # via uvicorn zipp==3.19.1 # via importlib-metadata +bigtree==0.19.2 diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index 94bfa82058..be30f032a9 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -40,6 +40,8 @@ async-timeout==4.0.3 # via # aiohttp # redis +async-property==0.2.2 + # via python-keycloak atpublic==4.1.0 # via ibis-framework attrs==23.2.0 @@ -63,6 +65,8 @@ beautifulsoup4==4.12.3 # via nbconvert bidict==0.23.1 # via ibis-framework +bigtree==0.19.2 + # via feast (setup.py) bleach==6.1.0 # via nbconvert boto3==1.34.131 @@ -131,6 +135,7 @@ cryptography==42.0.8 # azure-identity # azure-storage-blob # great-expectations + # jwcrypto # moto # msal # pyjwt @@ -154,6 +159,8 @@ defusedxml==0.7.1 # via nbconvert deltalake==0.18.1 # via feast (setup.py) +deprecation==2.1.0 + # via python-keycloak dill==0.3.8 # via feast (setup.py) distlib==0.3.8 @@ -300,6 +307,7 @@ httpx==0.27.0 # feast (setup.py) # fastapi # jupyterlab + # python-keycloak ibis-framework[duckdb]==9.0.0 # via # feast (setup.py) @@ -418,6 +426,8 @@ jupyterlab-server==2.27.2 # notebook jupyterlab-widgets==3.0.11 # via ipywidgets +jwcrypto==1.5.6 + # via python-keycloak kubernetes==20.13.0 # via feast (setup.py) locket==1.0.0 @@ -511,6 +521,7 @@ packaging==24.1 # build # dask # db-dtypes + # deprecation # google-cloud-bigquery # great-expectations # gunicorn @@ -721,6 +732,8 @@ python-dotenv==1.0.1 # via uvicorn python-json-logger==2.0.7 # via jupyter-events +python-keycloak==4.2.2 + # via feast (setup.py) python-multipart==0.0.9 # via fastapi pytz==2024.1 @@ -769,7 +782,9 @@ requests==2.32.3 # kubernetes # moto # msal + # python-keycloak # requests-oauthlib + # requests-toolbelt # responses # singlestoredb # snowflake-connector-python @@ -777,6 +792,8 @@ requests==2.32.3 # trino requests-oauthlib==2.0.0 # via kubernetes +requests-toolbelt==1.0.0 + # via python-keycloak responses==0.25.3 # via moto rfc3339-validator==0.1.4 @@ -984,6 +1001,7 @@ typing-extensions==4.12.2 # great-expectations # ibis-framework # ipython + # jwcrypto # mypy # psycopg # psycopg-pool diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index f9fa856a0e..0b3c8a33c9 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -225,3 +225,4 @@ websockets==12.0 # via uvicorn zipp==3.19.2 # via importlib-metadata +bigtree==0.19.2 diff --git a/sdk/python/tests/conftest.py b/sdk/python/tests/conftest.py index 1fd510d104..74aa68e984 100644 --- a/sdk/python/tests/conftest.py +++ b/sdk/python/tests/conftest.py @@ -15,9 +15,11 @@ import multiprocessing import os import random +import tempfile from datetime import timedelta from multiprocessing import Process from sys import platform +from textwrap import dedent from typing import Any, Dict, List, Tuple, no_type_check from unittest import mock @@ -29,8 +31,8 @@ from feast.feature_store import FeatureStore # noqa: E402 from feast.utils import _utc_now from feast.wait import wait_retry_backoff # noqa: E402 -from tests.data.data_creator import ( # noqa: E402 - create_basic_driver_dataset, +from tests.data.data_creator import ( + create_basic_driver_dataset, # noqa: E402 create_document_dataset, ) from tests.integration.feature_repos.integration_test_repo_config import ( @@ -54,6 +56,7 @@ driver, location, ) +from tests.utils.auth_permissions_util import default_store from tests.utils.http_server import check_port_open, free_port # noqa: E402 logger = logging.getLogger(__name__) @@ -406,3 +409,75 @@ def fake_document_data(environment: Environment) -> Tuple[pd.DataFrame, DataSour environment.feature_store.project, ) return df, data_source + + +@pytest.fixture +def temp_dir(): + with tempfile.TemporaryDirectory() as temp_dir: + print(f"Created {temp_dir}") + yield temp_dir + + +@pytest.fixture +def server_port(): + return free_port() + + +@pytest.fixture +def feature_store(temp_dir, auth_config, applied_permissions): + print(f"Creating store at {temp_dir}") + return default_store(str(temp_dir), auth_config, applied_permissions) + + +@pytest.fixture(scope="module") +def all_markers_from_module(request): + markers = set() + for item in request.session.items: + for marker in item.iter_markers(): + markers.add(marker.name) + + return markers + + +@pytest.fixture(scope="module") +def is_integration_test(all_markers_from_module): + return "integration" in all_markers_from_module + + +@pytest.fixture( + scope="module", + params=[ + dedent(""" + auth: + type: no_auth + """), + dedent(""" + auth: + type: kubernetes + """), + dedent(""" + auth: + type: oidc + client_id: feast-integration-client + client_secret: feast-integration-client-secret + username: reader_writer + password: password + realm: master + auth_server_url: KEYCLOAK_URL_PLACE_HOLDER + auth_discovery_url: KEYCLOAK_URL_PLACE_HOLDER/realms/master/.well-known/openid-configuration + """), + ], +) +def auth_config(request, is_integration_test): + auth_configuration = request.param + + if is_integration_test: + if "kubernetes" in auth_configuration: + pytest.skip( + "skipping integration tests for kubernetes platform, unit tests are covering this functionality." + ) + elif "oidc" in auth_configuration: + keycloak_url = request.getfixturevalue("start_keycloak_server") + return auth_configuration.replace("KEYCLOAK_URL_PLACE_HOLDER", keycloak_url) + + return auth_configuration diff --git a/sdk/python/tests/integration/conftest.py b/sdk/python/tests/integration/conftest.py new file mode 100644 index 0000000000..5c34a448e2 --- /dev/null +++ b/sdk/python/tests/integration/conftest.py @@ -0,0 +1,16 @@ +import logging + +import pytest +from testcontainers.keycloak import KeycloakContainer + +from tests.utils.auth_permissions_util import setup_permissions_on_keycloak + +logger = logging.getLogger(__name__) + + +@pytest.fixture(scope="session") +def start_keycloak_server(): + logger.info("Starting keycloak instance") + with KeycloakContainer("quay.io/keycloak/keycloak:24.0.1") as keycloak_container: + setup_permissions_on_keycloak(keycloak_container.get_client()) + yield keycloak_container.get_url() diff --git a/sdk/python/tests/integration/feature_repos/repo_configuration.py b/sdk/python/tests/integration/feature_repos/repo_configuration.py index 48f5070f1e..235c909d5f 100644 --- a/sdk/python/tests/integration/feature_repos/repo_configuration.py +++ b/sdk/python/tests/integration/feature_repos/repo_configuration.py @@ -11,15 +11,26 @@ import pandas as pd import pytest -from feast import FeatureStore, FeatureView, OnDemandFeatureView, driver_test_data +from feast import ( + FeatureStore, + FeatureView, + OnDemandFeatureView, + StreamFeatureView, + driver_test_data, +) from feast.constants import FULL_REPO_CONFIGS_MODULE_ENV_NAME from feast.data_source import DataSource from feast.errors import FeastModuleImportError +from feast.feature_service import FeatureService from feast.infra.feature_servers.base_config import ( BaseFeatureServerConfig, FeatureLoggingConfig, ) from feast.infra.feature_servers.local_process.config import LocalFeatureServerConfig +from feast.permissions.action import AuthzedAction +from feast.permissions.auth_model import OidcAuthConfig +from feast.permissions.permission import Permission +from feast.permissions.policy import RoleBasedPolicy from feast.repo_config import RegistryConfig, RepoConfig from feast.utils import _utc_now from tests.integration.feature_repos.integration_test_repo_config import ( @@ -36,6 +47,7 @@ DuckDBDataSourceCreator, DuckDBDeltaDataSourceCreator, FileDataSourceCreator, + RemoteOfflineOidcAuthStoreDataSourceCreator, RemoteOfflineStoreDataSourceCreator, ) from tests.integration.feature_repos.universal.data_sources.redshift import ( @@ -124,6 +136,7 @@ ("local", DuckDBDataSourceCreator), ("local", DuckDBDeltaDataSourceCreator), ("local", RemoteOfflineStoreDataSourceCreator), + ("local", RemoteOfflineOidcAuthStoreDataSourceCreator), ] if os.getenv("FEAST_IS_LOCAL_TEST", "False") == "True": @@ -134,7 +147,6 @@ ] ) - AVAILABLE_ONLINE_STORES: Dict[ str, Tuple[Union[str, Dict[Any, Any]], Optional[Type[OnlineStoreCreator]]] ] = {"sqlite": ({"type": "sqlite"}, None)} @@ -164,7 +176,6 @@ # containerized version of IKV. # AVAILABLE_ONLINE_STORES["ikv"] = (IKV_CONFIG, None) - full_repo_configs_module = os.environ.get(FULL_REPO_CONFIGS_MODULE_ENV_NAME) if full_repo_configs_module is not None: try: @@ -200,7 +211,6 @@ for c in FULL_REPO_CONFIGS } - # Replace online stores with emulated online stores if we're running local integration tests if os.getenv("FEAST_LOCAL_ONLINE_CONTAINER", "False").lower() == "true": replacements: Dict[ @@ -432,6 +442,7 @@ def setup(self): feature_server=self.feature_server, entity_key_serialization_version=self.entity_key_serialization_version, ) + self.feature_store = FeatureStore(config=self.config) def teardown(self): @@ -441,6 +452,73 @@ def teardown(self): self.online_store_creator.teardown() +@dataclass +class OfflineServerPermissionsEnvironment(Environment): + def setup(self): + self.data_source_creator.setup(self.registry) + keycloak_url = self.data_source_creator.get_keycloak_url() + auth_config = OidcAuthConfig( + client_id="feast-integration-client", + client_secret="feast-integration-client-secret", + username="reader_writer", + password="password", + realm="master", + type="oidc", + auth_server_url=keycloak_url, + auth_discovery_url=f"{keycloak_url}/realms/master/.well-known" + f"/openid-configuration", + ) + self.config = RepoConfig( + registry=self.registry, + project=self.project, + provider=self.provider, + offline_store=self.data_source_creator.create_offline_store_config(), + online_store=self.online_store_creator.create_online_store() + if self.online_store_creator + else self.online_store, + batch_engine=self.batch_engine, + repo_path=self.repo_dir_name, + feature_server=self.feature_server, + entity_key_serialization_version=self.entity_key_serialization_version, + auth=auth_config, + ) + + self.feature_store = FeatureStore(config=self.config) + permissions_list = [ + Permission( + name="offline_fv_perm", + types=FeatureView, + policy=RoleBasedPolicy(roles=["writer"]), + actions=[AuthzedAction.READ_OFFLINE, AuthzedAction.WRITE_OFFLINE], + ), + Permission( + name="offline_odfv_perm", + types=OnDemandFeatureView, + policy=RoleBasedPolicy(roles=["writer"]), + actions=[AuthzedAction.READ_OFFLINE, AuthzedAction.WRITE_OFFLINE], + ), + Permission( + name="offline_sfv_perm", + types=StreamFeatureView, + policy=RoleBasedPolicy(roles=["writer"]), + actions=[AuthzedAction.READ_OFFLINE, AuthzedAction.WRITE_OFFLINE], + ), + Permission( + name="offline_fs_perm", + types=FeatureService, + policy=RoleBasedPolicy(roles=["writer"]), + actions=[AuthzedAction.READ_OFFLINE, AuthzedAction.WRITE_OFFLINE], + ), + Permission( + name="offline_datasource_perm", + types=DataSource, + policy=RoleBasedPolicy(roles=["writer"]), + actions=[AuthzedAction.READ_OFFLINE, AuthzedAction.WRITE_OFFLINE], + ), + ] + self.feature_store.apply(permissions_list) + + def table_name_from_data_source(ds: DataSource) -> Optional[str]: if hasattr(ds, "table_ref"): return ds.table_ref # type: ignore @@ -491,23 +569,27 @@ def construct_test_environment( cache_ttl_seconds=1, ) - environment = Environment( - name=project, - provider=test_repo_config.provider, - data_source_creator=offline_creator, - python_feature_server=test_repo_config.python_feature_server, - worker_id=worker_id, - online_store_creator=online_creator, - fixture_request=fixture_request, - project=project, - registry=registry, - feature_server=feature_server, - entity_key_serialization_version=entity_key_serialization_version, - repo_dir_name=repo_dir_name, - batch_engine=test_repo_config.batch_engine, - online_store=test_repo_config.online_store, - ) + environment_params = { + "name": project, + "provider": test_repo_config.provider, + "data_source_creator": offline_creator, + "python_feature_server": test_repo_config.python_feature_server, + "worker_id": worker_id, + "online_store_creator": online_creator, + "fixture_request": fixture_request, + "project": project, + "registry": registry, + "feature_server": feature_server, + "entity_key_serialization_version": entity_key_serialization_version, + "repo_dir_name": repo_dir_name, + "batch_engine": test_repo_config.batch_engine, + "online_store": test_repo_config.online_store, + } + if not isinstance(offline_creator, RemoteOfflineOidcAuthStoreDataSourceCreator): + environment = Environment(**environment_params) + else: + environment = OfflineServerPermissionsEnvironment(**environment_params) return environment diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py index e505986350..b600699f81 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py @@ -33,6 +33,7 @@ from tests.integration.feature_repos.universal.data_source_creator import ( DataSourceCreator, ) +from tests.utils.auth_permissions_util import include_auth_config from tests.utils.http_server import check_port_open, free_port # noqa: E402 logger = logging.getLogger(__name__) @@ -428,3 +429,95 @@ def teardown(self): ), timeout_secs=30, ) + + +class RemoteOfflineOidcAuthStoreDataSourceCreator(FileDataSourceCreator): + def __init__(self, project_name: str, *args, **kwargs): + super().__init__(project_name) + if "fixture_request" in kwargs: + request = kwargs["fixture_request"] + self.keycloak_url = request.getfixturevalue("start_keycloak_server") + else: + raise RuntimeError( + "fixture_request object is not passed to inject keycloak fixture dynamically." + ) + auth_config_template = """ +auth: + type: oidc + client_id: feast-integration-client + client_secret: feast-integration-client-secret + username: reader_writer + password: password + realm: master + auth_server_url: {keycloak_url} + auth_discovery_url: {keycloak_url}/realms/master/.well-known/openid-configuration +""" + self.auth_config = auth_config_template.format(keycloak_url=self.keycloak_url) + self.server_port: int = 0 + self.proc = None + + def setup(self, registry: RegistryConfig): + parent_offline_config = super().create_offline_store_config() + config = RepoConfig( + project=self.project_name, + provider="local", + offline_store=parent_offline_config, + registry=registry.path, + entity_key_serialization_version=2, + ) + + repo_path = Path(tempfile.mkdtemp()) + with open(repo_path / "feature_store.yaml", "w") as outfile: + yaml.dump(config.model_dump(by_alias=True), outfile) + repo_path = str(repo_path.resolve()) + + include_auth_config( + file_path=f"{repo_path}/feature_store.yaml", auth_config=self.auth_config + ) + + self.server_port = free_port() + host = "0.0.0.0" + cmd = [ + "feast", + "-c" + repo_path, + "serve_offline", + "--host", + host, + "--port", + str(self.server_port), + ] + self.proc = subprocess.Popen( + cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL + ) + + _time_out_sec: int = 60 + # Wait for server to start + wait_retry_backoff( + lambda: (None, check_port_open(host, self.server_port)), + timeout_secs=_time_out_sec, + timeout_msg=f"Unable to start the feast remote offline server in {_time_out_sec} seconds at port={self.server_port}", + ) + return "grpc+tcp://{}:{}".format(host, self.server_port) + + def create_offline_store_config(self) -> FeastConfigBaseModel: + self.remote_offline_store_config = RemoteOfflineStoreConfig( + type="remote", host="0.0.0.0", port=self.server_port + ) + return self.remote_offline_store_config + + def get_keycloak_url(self): + return self.keycloak_url + + def teardown(self): + super().teardown() + if self.proc is not None: + self.proc.kill() + + # wait server to free the port + wait_retry_backoff( + lambda: ( + None, + not check_port_open("localhost", self.server_port), + ), + timeout_secs=30, + ) diff --git a/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py b/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py index ecaa5f40db..97ad54251f 100644 --- a/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py +++ b/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py @@ -21,6 +21,7 @@ table_name_from_data_source, ) from tests.integration.feature_repos.universal.data_sources.file import ( + RemoteOfflineOidcAuthStoreDataSourceCreator, RemoteOfflineStoreDataSourceCreator, ) from tests.integration.feature_repos.universal.data_sources.snowflake import ( @@ -162,7 +163,11 @@ def test_historical_features_main( ) if not isinstance( - environment.data_source_creator, RemoteOfflineStoreDataSourceCreator + environment.data_source_creator, + ( + RemoteOfflineStoreDataSourceCreator, + RemoteOfflineOidcAuthStoreDataSourceCreator, + ), ): assert_feature_service_correctness( store, diff --git a/sdk/python/tests/integration/online_store/test_remote_online_store.py b/sdk/python/tests/integration/online_store/test_remote_online_store.py index 21ac00583b..f74fb14a86 100644 --- a/sdk/python/tests/integration/online_store/test_remote_online_store.py +++ b/sdk/python/tests/integration/online_store/test_remote_online_store.py @@ -1,28 +1,59 @@ import os -import subprocess import tempfile from textwrap import dedent import pytest +from feast import FeatureView, OnDemandFeatureView, StreamFeatureView from feast.feature_store import FeatureStore -from feast.utils import _utc_now -from feast.wait import wait_retry_backoff +from feast.permissions.action import AuthzedAction +from feast.permissions.permission import Permission +from feast.permissions.policy import RoleBasedPolicy +from tests.utils.auth_permissions_util import ( + PROJECT_NAME, + default_store, + start_feature_server, +) from tests.utils.cli_repo_creator import CliRunner -from tests.utils.http_server import check_port_open, free_port +from tests.utils.http_server import free_port @pytest.mark.integration -def test_remote_online_store_read(): +def test_remote_online_store_read(auth_config): with tempfile.TemporaryDirectory() as remote_server_tmp_dir, tempfile.TemporaryDirectory() as remote_client_tmp_dir: + permissions_list = [ + Permission( + name="online_list_fv_perm", + types=FeatureView, + policy=RoleBasedPolicy(roles=["reader"]), + actions=[AuthzedAction.READ_ONLINE], + ), + Permission( + name="online_list_odfv_perm", + types=OnDemandFeatureView, + policy=RoleBasedPolicy(roles=["reader"]), + actions=[AuthzedAction.READ_ONLINE], + ), + Permission( + name="online_list_sfv_perm", + types=StreamFeatureView, + policy=RoleBasedPolicy(roles=["reader"]), + actions=[AuthzedAction.READ_ONLINE], + ), + ] server_store, server_url, registry_path = ( - _create_server_store_spin_feature_server(temp_dir=remote_server_tmp_dir) + _create_server_store_spin_feature_server( + temp_dir=remote_server_tmp_dir, + auth_config=auth_config, + permissions_list=permissions_list, + ) ) assert None not in (server_store, server_url, registry_path) client_store = _create_remote_client_feature_store( temp_dir=remote_client_tmp_dir, server_registry_path=str(registry_path), feature_server_url=server_url, + auth_config=auth_config, ) assert client_store is not None _assert_non_existing_entity_feature_views_entity( @@ -127,11 +158,13 @@ def _assert_client_server_online_stores_are_matching( assert online_features_from_client == online_features_from_server -def _create_server_store_spin_feature_server(temp_dir): +def _create_server_store_spin_feature_server( + temp_dir, auth_config: str, permissions_list +): + store = default_store(str(temp_dir), auth_config, permissions_list) feast_server_port = free_port() - store = _default_store(str(temp_dir), "REMOTE_ONLINE_SERVER_PROJECT") server_url = next( - _start_feature_server( + start_feature_server( repo_path=str(store.repo_path), server_port=feast_server_port ) ) @@ -139,24 +172,8 @@ def _create_server_store_spin_feature_server(temp_dir): return store, server_url, os.path.join(store.repo_path, "data", "registry.db") -def _default_store(temp_dir, project_name) -> FeatureStore: - runner = CliRunner() - result = runner.run(["init", project_name], cwd=temp_dir) - repo_path = os.path.join(temp_dir, project_name, "feature_repo") - assert result.returncode == 0 - - result = runner.run(["--chdir", repo_path, "apply"], cwd=temp_dir) - assert result.returncode == 0 - - fs = FeatureStore(repo_path=repo_path) - fs.materialize_incremental( - end_date=_utc_now(), feature_views=["driver_hourly_stats"] - ) - return fs - - def _create_remote_client_feature_store( - temp_dir, server_registry_path: str, feature_server_url: str + temp_dir, server_registry_path: str, feature_server_url: str, auth_config: str ) -> FeatureStore: project_name = "REMOTE_ONLINE_CLIENT_PROJECT" runner = CliRunner() @@ -167,6 +184,7 @@ def _create_remote_client_feature_store( repo_path=str(repo_path), registry_path=server_registry_path, feature_server_url=feature_server_url, + auth_config=auth_config, ) result = runner.run(["--chdir", repo_path, "apply"], cwd=temp_dir) @@ -176,14 +194,14 @@ def _create_remote_client_feature_store( def _overwrite_remote_client_feature_store_yaml( - repo_path: str, registry_path: str, feature_server_url: str + repo_path: str, registry_path: str, feature_server_url: str, auth_config: str ): repo_config = os.path.join(repo_path, "feature_store.yaml") with open(repo_config, "w") as repo_config: repo_config.write( dedent( f""" - project: REMOTE_ONLINE_CLIENT_PROJECT + project: {PROJECT_NAME} registry: {registry_path} provider: local online_store: @@ -192,57 +210,5 @@ def _overwrite_remote_client_feature_store_yaml( entity_key_serialization_version: 2 """ ) - ) - - -def _start_feature_server(repo_path: str, server_port: int, metrics: bool = False): - host = "0.0.0.0" - cmd = [ - "feast", - "-c" + repo_path, - "serve", - "--host", - host, - "--port", - str(server_port), - ] - feast_server_process = subprocess.Popen( - cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL - ) - _time_out_sec: int = 60 - # Wait for server to start - wait_retry_backoff( - lambda: (None, check_port_open(host, server_port)), - timeout_secs=_time_out_sec, - timeout_msg=f"Unable to start the feast server in {_time_out_sec} seconds for remote online store type, port={server_port}", - ) - - if metrics: - cmd.append("--metrics") - - # Check if metrics are enabled and Prometheus server is running - if metrics: - wait_retry_backoff( - lambda: (None, check_port_open("localhost", 8000)), - timeout_secs=_time_out_sec, - timeout_msg="Unable to start the Prometheus server in 60 seconds.", - ) - else: - assert not check_port_open( - "localhost", 8000 - ), "Prometheus server is running when it should be disabled." - - yield f"http://localhost:{server_port}" - - if feast_server_process is not None: - feast_server_process.kill() - - # wait server to free the port - wait_retry_backoff( - lambda: ( - None, - not check_port_open("localhost", server_port), - ), - timeout_msg=f"Unable to stop the feast server in {_time_out_sec} seconds for remote online store type, port={server_port}", - timeout_secs=_time_out_sec, + + auth_config ) diff --git a/sdk/python/tests/integration/registration/test_universal_cli.py b/sdk/python/tests/integration/registration/test_universal_cli.py index fc90108d78..9e02ded4e4 100644 --- a/sdk/python/tests/integration/registration/test_universal_cli.py +++ b/sdk/python/tests/integration/registration/test_universal_cli.py @@ -61,6 +61,8 @@ def test_universal_cli(): assertpy.assert_that(result.returncode).is_equal_to(0) result = runner.run(["data-sources", "list"], cwd=repo_path) assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["permissions", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) # entity & feature view describe commands should succeed when objects exist result = runner.run(["entities", "describe", "driver"], cwd=repo_path) @@ -91,6 +93,8 @@ def test_universal_cli(): assertpy.assert_that(result.returncode).is_equal_to(1) result = runner.run(["data-sources", "describe", "foo"], cwd=repo_path) assertpy.assert_that(result.returncode).is_equal_to(1) + result = runner.run(["permissions", "describe", "foo"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(1) # Doing another apply should be a no op, and should not cause errors result = runner.run(["apply"], cwd=repo_path) diff --git a/sdk/python/tests/integration/registration/test_universal_registry.py b/sdk/python/tests/integration/registration/test_universal_registry.py index 9dcd1b5b91..c528cee4a8 100644 --- a/sdk/python/tests/integration/registration/test_universal_registry.py +++ b/sdk/python/tests/integration/registration/test_universal_registry.py @@ -40,6 +40,9 @@ from feast.infra.registry.remote import RemoteRegistry, RemoteRegistryConfig from feast.infra.registry.sql import SqlRegistry from feast.on_demand_feature_view import on_demand_feature_view +from feast.permissions.action import AuthzedAction +from feast.permissions.permission import Permission +from feast.permissions.policy import RoleBasedPolicy from feast.protos.feast.registry import RegistryServer_pb2, RegistryServer_pb2_grpc from feast.registry_server import RegistryServer from feast.repo_config import RegistryConfig @@ -270,7 +273,9 @@ def mock_remote_registry(): proxied_registry = Registry("project", registry_config, None) registry = RemoteRegistry( - registry_config=RemoteRegistryConfig(path=""), project=None, repo_path=None + registry_config=RemoteRegistryConfig(path=""), + project=None, + repo_path=None, ) mock_channel = GrpcMockChannel( RegistryServer_pb2.DESCRIPTOR.services_by_name["RegistryServer"], @@ -1154,7 +1159,9 @@ def simple_udf(x: int): assert stream_feature_views[0] == sfv test_registry.delete_feature_view("test kafka stream feature view", project) - stream_feature_views = test_registry.list_stream_feature_views(project) + stream_feature_views = test_registry.list_stream_feature_views( + project, tags=sfv.tags + ) assert len(stream_feature_views) == 0 test_registry.teardown() @@ -1343,3 +1350,138 @@ def validate_project_uuid(project_uuid, test_registry): assert len(test_registry.cached_registry_proto.project_metadata) == 1 project_metadata = test_registry.cached_registry_proto.project_metadata[0] assert project_metadata.project_uuid == project_uuid + + +@pytest.mark.integration +@pytest.mark.parametrize("test_registry", all_fixtures) +def test_apply_permission_success(test_registry): + permission = Permission( + name="read_permission", + actions=AuthzedAction.DESCRIBE, + policy=RoleBasedPolicy(roles=["reader"]), + types=FeatureView, + ) + + project = "project" + + # Register Permission + test_registry.apply_permission(permission, project) + project_metadata = test_registry.list_project_metadata(project=project) + assert len(project_metadata) == 1 + project_uuid = project_metadata[0].project_uuid + assert len(project_metadata[0].project_uuid) == 36 + assert_project_uuid(project, project_uuid, test_registry) + + permissions = test_registry.list_permissions(project) + assert_project_uuid(project, project_uuid, test_registry) + + permission = permissions[0] + assert ( + len(permissions) == 1 + and permission.name == "read_permission" + and len(permission.types) == 1 + and permission.types[0] == FeatureView + and len(permission.actions) == 1 + and permission.actions[0] == AuthzedAction.DESCRIBE + and isinstance(permission.policy, RoleBasedPolicy) + and len(permission.policy.roles) == 1 + and permission.policy.roles[0] == "reader" + and permission.name_pattern is None + and permission.tags is None + and permission.required_tags is None + ) + + # After the first apply, the created_timestamp should be the same as the last_update_timestamp. + assert permission.created_timestamp == permission.last_updated_timestamp + + permission = test_registry.get_permission("read_permission", project) + assert ( + permission.name == "read_permission" + and len(permission.types) == 1 + and permission.types[0] == FeatureView + and len(permission.actions) == 1 + and permission.actions[0] == AuthzedAction.DESCRIBE + and isinstance(permission.policy, RoleBasedPolicy) + and len(permission.policy.roles) == 1 + and permission.policy.roles[0] == "reader" + and permission.name_pattern is None + and permission.tags is None + and permission.required_tags is None + ) + + # Update permission + updated_permission = Permission( + name="read_permission", + actions=[AuthzedAction.DESCRIBE, AuthzedAction.WRITE_ONLINE], + policy=RoleBasedPolicy(roles=["reader", "writer"]), + types=FeatureView, + ) + test_registry.apply_permission(updated_permission, project) + + permissions = test_registry.list_permissions(project) + assert_project_uuid(project, project_uuid, test_registry) + assert len(permissions) == 1 + + updated_permission = test_registry.get_permission("read_permission", project) + assert ( + updated_permission.name == "read_permission" + and len(updated_permission.types) == 1 + and updated_permission.types[0] == FeatureView + and len(updated_permission.actions) == 2 + and AuthzedAction.DESCRIBE in updated_permission.actions + and AuthzedAction.WRITE_ONLINE in updated_permission.actions + and isinstance(updated_permission.policy, RoleBasedPolicy) + and len(updated_permission.policy.roles) == 2 + and "reader" in updated_permission.policy.roles + and "writer" in updated_permission.policy.roles + and updated_permission.name_pattern is None + and updated_permission.tags is None + and updated_permission.required_tags is None + ) + + # The created_timestamp for the entity should be set to the created_timestamp value stored from the previous apply + assert ( + updated_permission.created_timestamp is not None + and updated_permission.created_timestamp == permission.created_timestamp + ) + + updated_permission = Permission( + name="read_permission", + actions=[AuthzedAction.DESCRIBE, AuthzedAction.WRITE_ONLINE], + policy=RoleBasedPolicy(roles=["reader", "writer"]), + types=FeatureView, + name_pattern="aaa", + tags={"team": "matchmaking"}, + required_tags={"tag1": "tag1-value"}, + ) + test_registry.apply_permission(updated_permission, project) + + permissions = test_registry.list_permissions(project) + assert_project_uuid(project, project_uuid, test_registry) + assert len(permissions) == 1 + + updated_permission = test_registry.get_permission("read_permission", project) + assert ( + updated_permission.name == "read_permission" + and len(updated_permission.types) == 1 + and updated_permission.types[0] == FeatureView + and len(updated_permission.actions) == 2 + and AuthzedAction.DESCRIBE in updated_permission.actions + and AuthzedAction.WRITE_ONLINE in updated_permission.actions + and isinstance(updated_permission.policy, RoleBasedPolicy) + and len(updated_permission.policy.roles) == 2 + and "reader" in updated_permission.policy.roles + and "writer" in updated_permission.policy.roles + and updated_permission.name_pattern == "aaa" + and "team" in updated_permission.tags + and updated_permission.tags["team"] == "matchmaking" + and updated_permission.required_tags["tag1"] == "tag1-value" + ) + + test_registry.delete_permission("read_permission", project) + assert_project_uuid(project, project_uuid, test_registry) + permissions = test_registry.list_permissions(project) + assert_project_uuid(project, project_uuid, test_registry) + assert len(permissions) == 0 + + test_registry.teardown() diff --git a/sdk/python/tests/unit/diff/test_registry_diff.py b/sdk/python/tests/unit/diff/test_registry_diff.py index c209f1e0e0..2834c57800 100644 --- a/sdk/python/tests/unit/diff/test_registry_diff.py +++ b/sdk/python/tests/unit/diff/test_registry_diff.py @@ -6,8 +6,12 @@ tag_objects_for_keep_delete_update_add, ) from feast.entity import Entity +from feast.feast_object import ALL_RESOURCE_TYPES from feast.feature_view import FeatureView from feast.on_demand_feature_view import on_demand_feature_view +from feast.permissions.action import AuthzedAction +from feast.permissions.permission import Permission +from feast.permissions.policy import RoleBasedPolicy from feast.types import String from tests.utils.data_source_test_creator import prep_file_source @@ -170,3 +174,22 @@ def test_diff_registry_objects_batch_to_push_source(simple_dataset_1): feast_object_diffs.feast_object_property_diffs[0].property_name == "stream_source" ) + + +def test_diff_registry_objects_permissions(): + pre_changed = Permission( + name="reader", + types=ALL_RESOURCE_TYPES, + policy=RoleBasedPolicy(roles=["reader"]), + actions=[AuthzedAction.DESCRIBE], + ) + post_changed = Permission( + name="reader", + types=ALL_RESOURCE_TYPES, + policy=RoleBasedPolicy(roles=["reader"]), + actions=[AuthzedAction.CREATE], + ) + + feast_object_diffs = diff_registry_objects(pre_changed, post_changed, "permission") + assert len(feast_object_diffs.feast_object_property_diffs) == 1 + assert feast_object_diffs.feast_object_property_diffs[0].property_name == "actions" diff --git a/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py b/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py index 98d82ce357..0725d6d261 100644 --- a/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py +++ b/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py @@ -4,6 +4,12 @@ from typing import Optional from feast.infra.online_stores.sqlite import SqliteOnlineStoreConfig +from feast.permissions.auth.auth_type import AuthType +from feast.permissions.auth_model import ( + KubernetesAuthConfig, + NoAuthConfig, + OidcAuthConfig, +) from feast.repo_config import FeastConfigError, load_repo_config @@ -195,3 +201,119 @@ def test_no_provider(): ), expect_error=None, ) + + +def test_auth_config(): + _test_config( + dedent( + """ + project: foo + auth: + client_id: test_client_id + client_secret: test_client_secret + username: test_user_name + password: test_password + realm: master + auth_server_url: http://localhost:8712 + auth_discovery_url: http://localhost:8080/realms/master/.well-known/openid-configuration + registry: "registry.db" + provider: local + online_store: + path: foo + entity_key_serialization_version: 2 + """ + ), + expect_error="missing authentication type", + ) + + _test_config( + dedent( + """ + project: foo + auth: + type: not_valid_auth_type + client_id: test_client_id + client_secret: test_client_secret + username: test_user_name + password: test_password + realm: master + auth_server_url: http://localhost:8712 + auth_discovery_url: http://localhost:8080/realms/master/.well-known/openid-configuration + registry: "registry.db" + provider: local + online_store: + path: foo + entity_key_serialization_version: 2 + """ + ), + expect_error="invalid authentication type=not_valid_auth_type", + ) + + oidc_repo_config = _test_config( + dedent( + """ + project: foo + auth: + type: oidc + client_id: test_client_id + client_secret: test_client_secret + username: test_user_name + password: test_password + realm: master + auth_server_url: http://localhost:8080 + auth_discovery_url: http://localhost:8080/realms/master/.well-known/openid-configuration + registry: "registry.db" + provider: local + online_store: + path: foo + entity_key_serialization_version: 2 + """ + ), + expect_error=None, + ) + assert oidc_repo_config.auth["type"] == AuthType.OIDC.value + assert isinstance(oidc_repo_config.auth_config, OidcAuthConfig) + assert oidc_repo_config.auth_config.client_id == "test_client_id" + assert oidc_repo_config.auth_config.client_secret == "test_client_secret" + assert oidc_repo_config.auth_config.username == "test_user_name" + assert oidc_repo_config.auth_config.password == "test_password" + assert oidc_repo_config.auth_config.realm == "master" + assert oidc_repo_config.auth_config.auth_server_url == "http://localhost:8080" + assert ( + oidc_repo_config.auth_config.auth_discovery_url + == "http://localhost:8080/realms/master/.well-known/openid-configuration" + ) + + no_auth_repo_config = _test_config( + dedent( + """ + project: foo + registry: "registry.db" + provider: local + online_store: + path: foo + entity_key_serialization_version: 2 + """ + ), + expect_error=None, + ) + assert no_auth_repo_config.auth.get("type") == AuthType.NONE.value + assert isinstance(no_auth_repo_config.auth_config, NoAuthConfig) + + k8_repo_config = _test_config( + dedent( + """ + auth: + type: kubernetes + project: foo + registry: "registry.db" + provider: local + online_store: + path: foo + entity_key_serialization_version: 2 + """ + ), + expect_error=None, + ) + assert k8_repo_config.auth.get("type") == AuthType.KUBERNETES.value + assert isinstance(k8_repo_config.auth_config, KubernetesAuthConfig) diff --git a/sdk/python/tests/unit/local_feast_tests/test_local_feature_store.py b/sdk/python/tests/unit/local_feast_tests/test_local_feature_store.py index 0e834e314b..c86441d56c 100644 --- a/sdk/python/tests/unit/local_feast_tests/test_local_feature_store.py +++ b/sdk/python/tests/unit/local_feast_tests/test_local_feature_store.py @@ -9,11 +9,15 @@ from feast.data_format import AvroFormat, ParquetFormat from feast.data_source import KafkaSource from feast.entity import Entity +from feast.feast_object import ALL_RESOURCE_TYPES from feast.feature_store import FeatureStore from feast.feature_view import FeatureView from feast.field import Field from feast.infra.offline_stores.file_source import FileSource from feast.infra.online_stores.sqlite import SqliteOnlineStoreConfig +from feast.permissions.action import AuthzedAction +from feast.permissions.permission import Permission +from feast.permissions.policy import RoleBasedPolicy from feast.repo_config import RepoConfig from feast.stream_feature_view import stream_feature_view from feast.types import Array, Bytes, Float32, Int64, String @@ -338,6 +342,36 @@ def test_apply_entities_and_feature_views(test_feature_store): test_feature_store.teardown() +@pytest.mark.parametrize( + "test_feature_store", + [lazy_fixture("feature_store_with_local_registry")], +) +def test_apply_permissions(test_feature_store): + assert isinstance(test_feature_store, FeatureStore) + + permission = Permission( + name="reader", + types=ALL_RESOURCE_TYPES, + policy=RoleBasedPolicy(roles=["reader"]), + actions=[AuthzedAction.DESCRIBE], + ) + + # Register Permission + test_feature_store.apply([permission]) + + permissions = test_feature_store.list_permissions() + assert len(permissions) == 1 + assert permissions[0] == permission + + # delete Permission + test_feature_store.apply(objects=[], objects_to_delete=[permission], partial=False) + + permissions = test_feature_store.list_permissions() + assert len(permissions) == 0 + + test_feature_store.teardown() + + @pytest.mark.parametrize( "test_feature_store", [lazy_fixture("feature_store_with_local_registry")], diff --git a/sdk/python/tests/unit/permissions/__init__.py b/sdk/python/tests/unit/permissions/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/tests/unit/permissions/auth/conftest.py b/sdk/python/tests/unit/permissions/auth/conftest.py new file mode 100644 index 0000000000..dc71aba23b --- /dev/null +++ b/sdk/python/tests/unit/permissions/auth/conftest.py @@ -0,0 +1,101 @@ +import pytest +from kubernetes import client + +from feast.permissions.auth_model import OidcAuthConfig +from tests.unit.permissions.auth.server.test_utils import ( + invalid_list_entities_perm, + read_entities_perm, + read_fv_perm, + read_odfv_perm, + read_permissions_perm, + read_sfv_perm, +) +from tests.unit.permissions.auth.test_token_parser import _CLIENT_ID + + +@pytest.fixture +def sa_name(): + return "my-name" + + +@pytest.fixture +def namespace(): + return "my-ns" + + +@pytest.fixture +def rolebindings(sa_name, namespace) -> dict: + roles = ["reader", "writer"] + items = [] + for r in roles: + items.append( + client.V1RoleBinding( + metadata=client.V1ObjectMeta(name=r, namespace=namespace), + subjects=[ + client.V1Subject( + kind="ServiceAccount", + name=sa_name, + api_group="rbac.authorization.k8s.io", + ) + ], + role_ref=client.V1RoleRef( + kind="Role", name=r, api_group="rbac.authorization.k8s.io" + ), + ) + ) + return {"items": client.V1RoleBindingList(items=items), "roles": roles} + + +@pytest.fixture +def clusterrolebindings(sa_name, namespace) -> dict: + roles = ["updater"] + items = [] + for r in roles: + items.append( + client.V1ClusterRoleBinding( + metadata=client.V1ObjectMeta(name=r, namespace=namespace), + subjects=[ + client.V1Subject( + kind="ServiceAccount", + name=sa_name, + namespace=namespace, + api_group="rbac.authorization.k8s.io", + ) + ], + role_ref=client.V1RoleRef( + kind="Role", name=r, api_group="rbac.authorization.k8s.io" + ), + ) + ) + return {"items": client.V1RoleBindingList(items=items), "roles": roles} + + +@pytest.fixture +def oidc_config() -> OidcAuthConfig: + return OidcAuthConfig( + auth_server_url="", + auth_discovery_url="", + client_id=_CLIENT_ID, + client_secret="", + username="", + password="", + realm="", + ) + + +@pytest.fixture( + scope="module", + params=[ + [], + [invalid_list_entities_perm], + [ + read_entities_perm, + read_permissions_perm, + read_fv_perm, + read_odfv_perm, + read_sfv_perm, + ], + ], +) +def applied_permissions(request): + return request.param diff --git a/sdk/python/tests/unit/permissions/auth/server/mock_utils.py b/sdk/python/tests/unit/permissions/auth/server/mock_utils.py new file mode 100644 index 0000000000..8f598774ee --- /dev/null +++ b/sdk/python/tests/unit/permissions/auth/server/mock_utils.py @@ -0,0 +1,72 @@ +from unittest.mock import MagicMock, Mock + +from requests import Response + + +def mock_oidc(request, monkeypatch, client_id): + async def mock_oath2(self, request): + return "OK" + + monkeypatch.setattr( + "feast.permissions.auth.oidc_token_parser.OAuth2AuthorizationCodeBearer.__call__", + mock_oath2, + ) + signing_key = MagicMock() + signing_key.key = "a-key" + monkeypatch.setattr( + "feast.permissions.auth.oidc_token_parser.PyJWKClient.get_signing_key_from_jwt", + lambda self, access_token: signing_key, + ) + user_data = { + "preferred_username": "my-name", + "resource_access": {client_id: {"roles": ["reader", "writer"]}}, + } + monkeypatch.setattr( + "feast.permissions.auth.oidc_token_parser.jwt.decode", + lambda self, *args, **kwargs: user_data, + ) + discovery_response = Mock(spec=Response) + discovery_response.status_code = 200 + discovery_response.json.return_value = { + "token_endpoint": "http://localhost:8080/realms/master/protocol/openid-connect/token" + } + monkeypatch.setattr( + "feast.permissions.client.oidc_authentication_client_manager.requests.get", + lambda url: discovery_response, + ) + token_response = Mock(spec=Response) + token_response.status_code = 200 + token_response.json.return_value = {"access_token": "my-token"} + monkeypatch.setattr( + "feast.permissions.client.oidc_authentication_client_manager.requests.post", + lambda url, data, headers: token_response, + ) + + +def mock_kubernetes(request, monkeypatch): + sa_name = request.getfixturevalue("sa_name") + namespace = request.getfixturevalue("namespace") + subject = f"system:serviceaccount:{namespace}:{sa_name}" + rolebindings = request.getfixturevalue("rolebindings") + clusterrolebindings = request.getfixturevalue("clusterrolebindings") + + monkeypatch.setattr( + "feast.permissions.auth.kubernetes_token_parser.config.load_incluster_config", + lambda: None, + ) + monkeypatch.setattr( + "feast.permissions.auth.kubernetes_token_parser.jwt.decode", + lambda *args, **kwargs: {"sub": subject}, + ) + monkeypatch.setattr( + "feast.permissions.auth.kubernetes_token_parser.client.RbacAuthorizationV1Api.list_namespaced_role_binding", + lambda *args, **kwargs: rolebindings["items"], + ) + monkeypatch.setattr( + "feast.permissions.auth.kubernetes_token_parser.client.RbacAuthorizationV1Api.list_cluster_role_binding", + lambda *args, **kwargs: clusterrolebindings["items"], + ) + monkeypatch.setattr( + "feast.permissions.client.kubernetes_auth_client_manager.KubernetesAuthClientManager.get_token", + lambda self: "my-token", + ) diff --git a/sdk/python/tests/unit/permissions/auth/server/test_auth_registry_server.py b/sdk/python/tests/unit/permissions/auth/server/test_auth_registry_server.py new file mode 100644 index 0000000000..bc16bdac3b --- /dev/null +++ b/sdk/python/tests/unit/permissions/auth/server/test_auth_registry_server.py @@ -0,0 +1,239 @@ +from datetime import datetime + +import assertpy +import pandas as pd +import pytest +import yaml + +from feast import ( + FeatureStore, +) +from feast.permissions.permission import Permission +from feast.registry_server import start_server +from feast.wait import wait_retry_backoff # noqa: E402 +from tests.unit.permissions.auth.server import mock_utils +from tests.unit.permissions.auth.server.test_utils import ( + invalid_list_entities_perm, + read_entities_perm, + read_fv_perm, + read_odfv_perm, + read_permissions_perm, + read_sfv_perm, +) +from tests.utils.auth_permissions_util import get_remote_registry_store +from tests.utils.http_server import check_port_open # noqa: E402 + + +@pytest.fixture +def start_registry_server( + request, + auth_config, + server_port, + feature_store, + monkeypatch, +): + if "kubernetes" in auth_config: + mock_utils.mock_kubernetes(request=request, monkeypatch=monkeypatch) + elif "oidc" in auth_config: + auth_config_yaml = yaml.safe_load(auth_config) + mock_utils.mock_oidc( + request=request, + monkeypatch=monkeypatch, + client_id=auth_config_yaml["auth"]["client_id"], + ) + + assertpy.assert_that(server_port).is_not_equal_to(0) + + print(f"Starting Registry at {server_port}") + server = start_server(feature_store, server_port, wait_for_termination=False) + print("Waiting server availability") + wait_retry_backoff( + lambda: (None, check_port_open("localhost", server_port)), + timeout_secs=10, + ) + print("Server started") + + yield server + + print("Stopping server") + server.stop(grace=None) # Teardown server + + +def test_registry_apis( + auth_config, + temp_dir, + server_port, + start_registry_server, + feature_store, + applied_permissions, +): + print(f"Running for\n:{auth_config}") + remote_feature_store = get_remote_registry_store(server_port, feature_store) + permissions = _test_list_permissions(remote_feature_store, applied_permissions) + _test_list_entities(remote_feature_store, applied_permissions) + _test_list_fvs(remote_feature_store, applied_permissions) + + if _permissions_exist_in_permission_list( + [ + read_entities_perm, + read_permissions_perm, + read_fv_perm, + read_odfv_perm, + read_sfv_perm, + ], + permissions, + ): + _test_get_historical_features(remote_feature_store) + + +def _test_get_historical_features(client_fs: FeatureStore): + entity_df = pd.DataFrame.from_dict( + { + # entity's join key -> entity values + "driver_id": [1001, 1002, 1003], + # "event_timestamp" (reserved key) -> timestamps + "event_timestamp": [ + datetime(2021, 4, 12, 10, 59, 42), + datetime(2021, 4, 12, 8, 12, 10), + datetime(2021, 4, 12, 16, 40, 26), + ], + # (optional) label name -> label values. Feast does not process these + "label_driver_reported_satisfaction": [1, 5, 3], + # values we're using for an on-demand transformation + "val_to_add": [1, 2, 3], + "val_to_add_2": [10, 20, 30], + } + ) + + training_df = client_fs.get_historical_features( + entity_df=entity_df, + features=[ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips", + "transformed_conv_rate:conv_rate_plus_val1", + "transformed_conv_rate:conv_rate_plus_val2", + ], + ).to_df() + assertpy.assert_that(training_df).is_not_none() + + +def _test_list_entities(client_fs: FeatureStore, permissions: list[Permission]): + entities = client_fs.list_entities() + + if not _is_auth_enabled(client_fs) or _is_permission_enabled( + client_fs, permissions, read_entities_perm + ): + assertpy.assert_that(entities).is_not_none() + assertpy.assert_that(len(entities)).is_equal_to(1) + assertpy.assert_that(entities[0].name).is_equal_to("driver") + else: + assertpy.assert_that(entities).is_not_none() + assertpy.assert_that(len(entities)).is_equal_to(0) + + +def _no_permission_retrieved(permissions: list[Permission]) -> bool: + return len(permissions) == 0 + + +def _test_list_permissions( + client_fs: FeatureStore, applied_permissions: list[Permission] +) -> list[Permission]: + if _is_auth_enabled(client_fs) and _permissions_exist_in_permission_list( + [invalid_list_entities_perm], applied_permissions + ): + with pytest.raises(Exception): + client_fs.list_permissions() + return [] + else: + permissions = client_fs.list_permissions() + + if not _is_auth_enabled(client_fs): + assertpy.assert_that(permissions).is_not_none() + assertpy.assert_that(len(permissions)).is_equal_to(len(applied_permissions)) + elif _is_auth_enabled(client_fs) and _permissions_exist_in_permission_list( + [ + read_entities_perm, + read_permissions_perm, + read_fv_perm, + read_odfv_perm, + read_sfv_perm, + ], + permissions, + ): + assertpy.assert_that(permissions).is_not_none() + assertpy.assert_that(len(permissions)).is_equal_to( + len( + [ + read_entities_perm, + read_permissions_perm, + read_fv_perm, + read_odfv_perm, + read_sfv_perm, + ] + ) + ) + elif _is_auth_enabled(client_fs) and _is_listing_permissions_allowed(permissions): + assertpy.assert_that(permissions).is_not_none() + assertpy.assert_that(len(permissions)).is_equal_to(1) + + return permissions + + +def _is_listing_permissions_allowed(permissions: list[Permission]) -> bool: + return read_permissions_perm in permissions + + +def _is_auth_enabled(client_fs: FeatureStore) -> bool: + return client_fs.config.auth_config.type != "no_auth" + + +def _test_list_fvs(client_fs: FeatureStore, permissions: list[Permission]): + if _is_auth_enabled(client_fs) and _permissions_exist_in_permission_list( + [invalid_list_entities_perm], permissions + ): + with pytest.raises(Exception): + client_fs.list_feature_views() + return [] + else: + fvs = client_fs.list_feature_views() + for fv in fvs: + print(f"{fv.name}, {type(fv).__name__}") + + if not _is_auth_enabled(client_fs) or _is_permission_enabled( + client_fs, permissions, read_fv_perm + ): + assertpy.assert_that(fvs).is_not_none() + assertpy.assert_that(len(fvs)).is_equal_to(2) + + names = _to_names(fvs) + assertpy.assert_that(names).contains("driver_hourly_stats") + assertpy.assert_that(names).contains("driver_hourly_stats_fresh") + else: + assertpy.assert_that(fvs).is_not_none() + assertpy.assert_that(len(fvs)).is_equal_to(0) + + +def _permissions_exist_in_permission_list( + permission_to_test: list[Permission], permission_list: list[Permission] +) -> bool: + return all(e in permission_list for e in permission_to_test) + + +def _is_permission_enabled( + client_fs: FeatureStore, + permissions: list[Permission], + permission: Permission, +): + return _is_auth_enabled(client_fs) and ( + _no_permission_retrieved(permissions) + or ( + _permissions_exist_in_permission_list( + [read_permissions_perm, permission], permissions + ) + ) + ) + + +def _to_names(items): + return [i.name for i in items] diff --git a/sdk/python/tests/unit/permissions/auth/server/test_utils.py b/sdk/python/tests/unit/permissions/auth/server/test_utils.py new file mode 100644 index 0000000000..5d781919a0 --- /dev/null +++ b/sdk/python/tests/unit/permissions/auth/server/test_utils.py @@ -0,0 +1,61 @@ +import assertpy +import pytest + +from feast import Entity, FeatureView, OnDemandFeatureView, StreamFeatureView +from feast.permissions.action import AuthzedAction +from feast.permissions.permission import Permission +from feast.permissions.policy import RoleBasedPolicy +from feast.permissions.server.utils import AuthManagerType, str_to_auth_manager_type + +read_permissions_perm = Permission( + name="read_permissions_perm", + types=Permission, + policy=RoleBasedPolicy(roles=["reader"]), + actions=[AuthzedAction.DESCRIBE], +) + +read_entities_perm = Permission( + name="read_entities_perm", + types=Entity, + policy=RoleBasedPolicy(roles=["reader"]), + actions=[AuthzedAction.DESCRIBE], +) + +read_fv_perm = Permission( + name="read_fv_perm", + types=FeatureView, + policy=RoleBasedPolicy(roles=["reader"]), + actions=[AuthzedAction.DESCRIBE], +) + +read_odfv_perm = Permission( + name="read_odfv_perm", + types=OnDemandFeatureView, + policy=RoleBasedPolicy(roles=["reader"]), + actions=[AuthzedAction.DESCRIBE], +) + +read_sfv_perm = Permission( + name="read_sfv_perm", + types=StreamFeatureView, + policy=RoleBasedPolicy(roles=["reader"]), + actions=[AuthzedAction.DESCRIBE], +) + +invalid_list_entities_perm = Permission( + name="invalid_list_entity_perm", + types=Entity, + policy=RoleBasedPolicy(roles=["dancer"]), + actions=[AuthzedAction.DESCRIBE], +) + + +@pytest.mark.parametrize( + "label, value", + [(t.value, t) for t in AuthManagerType] + + [(t.value.upper(), t) for t in AuthManagerType] + + [(t.value.lower(), t) for t in AuthManagerType] + + [("none", AuthManagerType.NONE)], +) +def test_str_to_auth_type(label, value): + assertpy.assert_that(str_to_auth_manager_type(label)).is_equal_to(value) diff --git a/sdk/python/tests/unit/permissions/auth/test_token_extractor.py b/sdk/python/tests/unit/permissions/auth/test_token_extractor.py new file mode 100644 index 0000000000..a6fcd89e5b --- /dev/null +++ b/sdk/python/tests/unit/permissions/auth/test_token_extractor.py @@ -0,0 +1,140 @@ +from unittest.mock import Mock + +import assertpy +import pytest +from fastapi.requests import Request +from starlette.authentication import ( + AuthenticationError, +) + +from feast.permissions.server.arrow_flight_token_extractor import ( + ArrowFlightTokenExtractor, +) +from feast.permissions.server.grpc_token_extractor import GrpcTokenExtractor +from feast.permissions.server.rest_token_extractor import RestTokenExtractor + + +@pytest.mark.parametrize( + "error_type, dict, header", + [ + (ValueError, {}, None), + (ValueError, {"other": 123}, None), + (AuthenticationError, {}, ""), + (AuthenticationError, {}, "abcd"), + (AuthenticationError, {}, "other-scheme abcd"), + ], +) +def test_rest_token_extractor_failures(error_type, dict, header): + token_extractor = RestTokenExtractor() + + request = None + if header is not None: + request = Mock(spec=Request) + if header != "": + request.headers = {"authorization": header} + else: + request.headers = {} + with pytest.raises(error_type): + if request is None: + token_extractor.extract_access_token(**dict) + else: + token_extractor.extract_access_token(request=request) + + +@pytest.mark.parametrize( + "error_type, dict, header", + [ + (ValueError, {}, None), + (ValueError, {"other": 123}, None), + (AuthenticationError, {}, ""), + (AuthenticationError, {}, "abcd"), + (AuthenticationError, {}, "other-scheme abcd"), + ], +) +def test_grpc_token_extractor_failures(error_type, dict, header): + token_extractor = GrpcTokenExtractor() + + metadata = None + if header is not None: + metadata = {} + if metadata != "": + metadata["authorization"] = header + with pytest.raises(error_type): + if metadata is None: + token_extractor.extract_access_token(**dict) + else: + token_extractor.extract_access_token(metadata=metadata) + + +def test_rest_token_extractor(): + token_extractor = RestTokenExtractor() + request: Request = Mock(spec=Request) + token = "abcd" + + request.headers = {"authorization": f"Bearer {token}"} + assertpy.assert_that( + token_extractor.extract_access_token(request=request) + ).is_equal_to(token) + + request.headers = {"authorization": f"bearer {token}"} + assertpy.assert_that( + token_extractor.extract_access_token(request=request) + ).is_equal_to(token) + + +def test_grpc_token_extractor(): + token_extractor = GrpcTokenExtractor() + metadata = {} + token = "abcd" + + metadata["authorization"] = f"Bearer {token}" + assertpy.assert_that( + token_extractor.extract_access_token(metadata=metadata) + ).is_equal_to(token) + + metadata["authorization"] = f"bearer {token}" + assertpy.assert_that( + token_extractor.extract_access_token(metadata=metadata) + ).is_equal_to(token) + + +@pytest.mark.parametrize( + "error_type, dict, header", + [ + (ValueError, {}, None), + (ValueError, {"other": 123}, None), + (AuthenticationError, {}, ""), + (AuthenticationError, {}, "abcd"), + (AuthenticationError, {}, ["abcd"]), + (AuthenticationError, {}, ["other-scheme abcd"]), + ], +) +def test_arrow_flight_token_extractor_failures(error_type, dict, header): + token_extractor = ArrowFlightTokenExtractor() + + headers = None + if header is not None: + if header != "": + headers = {"authorization": header} + else: + headers = {} + with pytest.raises(error_type): + if headers is None: + token_extractor.extract_access_token(**dict) + else: + token_extractor.extract_access_token(headers=headers) + + +def test_arrow_flight_token_extractor(): + token_extractor = ArrowFlightTokenExtractor() + token = "abcd" + + headers = {"authorization": [f"Bearer {token}"]} + assertpy.assert_that( + token_extractor.extract_access_token(headers=headers) + ).is_equal_to(token) + + headers = {"authorization": [f"bearer {token}"]} + assertpy.assert_that( + token_extractor.extract_access_token(headers=headers) + ).is_equal_to(token) diff --git a/sdk/python/tests/unit/permissions/auth/test_token_parser.py b/sdk/python/tests/unit/permissions/auth/test_token_parser.py new file mode 100644 index 0000000000..6ae9094f81 --- /dev/null +++ b/sdk/python/tests/unit/permissions/auth/test_token_parser.py @@ -0,0 +1,122 @@ +# test_token_validator.py + +import asyncio +from unittest.mock import MagicMock, patch + +import assertpy +import pytest +from starlette.authentication import ( + AuthenticationError, +) + +from feast.permissions.auth.kubernetes_token_parser import KubernetesTokenParser +from feast.permissions.auth.oidc_token_parser import OidcTokenParser +from feast.permissions.user import User + +_CLIENT_ID = "test" + + +@patch( + "feast.permissions.auth.oidc_token_parser.OAuth2AuthorizationCodeBearer.__call__" +) +@patch("feast.permissions.auth.oidc_token_parser.PyJWKClient.get_signing_key_from_jwt") +@patch("feast.permissions.auth.oidc_token_parser.jwt.decode") +def test_oidc_token_validation_success( + mock_jwt, mock_signing_key, mock_oauth2, oidc_config +): + signing_key = MagicMock() + signing_key.key = "a-key" + mock_signing_key.return_value = signing_key + + user_data = { + "preferred_username": "my-name", + "resource_access": {_CLIENT_ID: {"roles": ["reader", "writer"]}}, + } + mock_jwt.return_value = user_data + + access_token = "aaa-bbb-ccc" + token_parser = OidcTokenParser(auth_config=oidc_config) + user = asyncio.run( + token_parser.user_details_from_access_token(access_token=access_token) + ) + + assertpy.assert_that(user).is_type_of(User) + if isinstance(user, User): + assertpy.assert_that(user.username).is_equal_to("my-name") + assertpy.assert_that(user.roles.sort()).is_equal_to(["reader", "writer"].sort()) + assertpy.assert_that(user.has_matching_role(["reader"])).is_true() + assertpy.assert_that(user.has_matching_role(["writer"])).is_true() + assertpy.assert_that(user.has_matching_role(["updater"])).is_false() + + +@patch( + "feast.permissions.auth.oidc_token_parser.OAuth2AuthorizationCodeBearer.__call__" +) +def test_oidc_token_validation_failure(mock_oauth2, oidc_config): + mock_oauth2.side_effect = AuthenticationError("wrong token") + + access_token = "aaa-bbb-ccc" + token_parser = OidcTokenParser(auth_config=oidc_config) + with pytest.raises(AuthenticationError): + asyncio.run( + token_parser.user_details_from_access_token(access_token=access_token) + ) + + +# TODO RBAC: Move role bindings to a reusable fixture +@patch("feast.permissions.auth.kubernetes_token_parser.config.load_incluster_config") +@patch("feast.permissions.auth.kubernetes_token_parser.jwt.decode") +@patch( + "feast.permissions.auth.kubernetes_token_parser.client.RbacAuthorizationV1Api.list_namespaced_role_binding" +) +@patch( + "feast.permissions.auth.kubernetes_token_parser.client.RbacAuthorizationV1Api.list_cluster_role_binding" +) +def test_k8s_token_validation_success( + mock_crb, + mock_rb, + mock_jwt, + mock_config, + rolebindings, + clusterrolebindings, +): + sa_name = "my-name" + namespace = "my-ns" + subject = f"system:serviceaccount:{namespace}:{sa_name}" + mock_jwt.return_value = {"sub": subject} + + mock_rb.return_value = rolebindings["items"] + mock_crb.return_value = clusterrolebindings["items"] + + roles = rolebindings["roles"] + croles = clusterrolebindings["roles"] + + access_token = "aaa-bbb-ccc" + token_parser = KubernetesTokenParser() + user = asyncio.run( + token_parser.user_details_from_access_token(access_token=access_token) + ) + + assertpy.assert_that(user).is_type_of(User) + if isinstance(user, User): + assertpy.assert_that(user.username).is_equal_to(f"{namespace}:{sa_name}") + assertpy.assert_that(user.roles.sort()).is_equal_to((roles + croles).sort()) + for r in roles: + assertpy.assert_that(user.has_matching_role([r])).is_true() + for cr in croles: + assertpy.assert_that(user.has_matching_role([cr])).is_true() + assertpy.assert_that(user.has_matching_role(["foo"])).is_false() + + +@patch("feast.permissions.auth.kubernetes_token_parser.config.load_incluster_config") +@patch("feast.permissions.auth.kubernetes_token_parser.jwt.decode") +def test_k8s_token_validation_failure(mock_jwt, mock_config): + subject = "wrong-subject" + mock_jwt.return_value = {"sub": subject} + + access_token = "aaa-bbb-ccc" + token_parser = KubernetesTokenParser() + with pytest.raises(AuthenticationError): + asyncio.run( + token_parser.user_details_from_access_token(access_token=access_token) + ) diff --git a/sdk/python/tests/unit/permissions/conftest.py b/sdk/python/tests/unit/permissions/conftest.py new file mode 100644 index 0000000000..7cd944fb47 --- /dev/null +++ b/sdk/python/tests/unit/permissions/conftest.py @@ -0,0 +1,88 @@ +from unittest.mock import Mock + +import pytest + +from feast import FeatureView +from feast.infra.registry.base_registry import BaseRegistry +from feast.permissions.decorator import require_permissions +from feast.permissions.permission import AuthzedAction, Permission +from feast.permissions.policy import RoleBasedPolicy +from feast.permissions.security_manager import ( + SecurityManager, + set_security_manager, +) +from feast.permissions.user import User + + +class SecuredFeatureView(FeatureView): + def __init__(self, name, tags): + super().__init__( + name=name, + source=Mock(), + tags=tags, + ) + + @require_permissions(actions=[AuthzedAction.DESCRIBE]) + def read_protected(self) -> bool: + return True + + @require_permissions(actions=[AuthzedAction.UPDATE]) + def write_protected(self) -> bool: + return True + + def unprotected(self) -> bool: + return True + + +@pytest.fixture +def feature_views() -> list[FeatureView]: + return [ + SecuredFeatureView("secured", {}), + SecuredFeatureView("special-secured", {}), + ] + + +@pytest.fixture +def users() -> list[User]: + users = [] + users.append(User("r", ["reader"])) + users.append(User("w", ["writer"])) + users.append(User("rw", ["reader", "writer"])) + users.append(User("admin", ["reader", "writer", "admin"])) + return dict([(u.username, u) for u in users]) + + +@pytest.fixture +def security_manager() -> SecurityManager: + permissions = [] + permissions.append( + Permission( + name="reader", + types=FeatureView, + policy=RoleBasedPolicy(roles=["reader"]), + actions=[AuthzedAction.DESCRIBE], + ) + ) + permissions.append( + Permission( + name="writer", + types=FeatureView, + policy=RoleBasedPolicy(roles=["writer"]), + actions=[AuthzedAction.UPDATE], + ) + ) + permissions.append( + Permission( + name="special", + types=FeatureView, + name_pattern="special.*", + policy=RoleBasedPolicy(roles=["admin", "special-reader"]), + actions=[AuthzedAction.DESCRIBE, AuthzedAction.UPDATE], + ) + ) + + registry = Mock(spec=BaseRegistry) + registry.list_permissions = Mock(return_value=permissions) + sm = SecurityManager(project="any", registry=registry) + set_security_manager(sm) + return sm diff --git a/sdk/python/tests/unit/permissions/test_decision.py b/sdk/python/tests/unit/permissions/test_decision.py new file mode 100644 index 0000000000..23bafedeab --- /dev/null +++ b/sdk/python/tests/unit/permissions/test_decision.py @@ -0,0 +1,34 @@ +import assertpy +import pytest + +from feast.permissions.decision import DecisionEvaluator + +# Each vote is a tuple of `current_vote` and expected output of `is_decided` + + +@pytest.mark.parametrize( + "evaluator, votes, decision, no_of_explanations", + [ + (DecisionEvaluator(3), [(True, True)], True, 0), + (DecisionEvaluator(3), [(True, True)], True, 0), + ( + DecisionEvaluator(3), + [(False, False), (False, False), (False, True)], + False, + 3, + ), + ], +) +def test_decision_evaluator(evaluator, votes, decision, no_of_explanations): + for v in votes: + vote = v[0] + decided = v[1] + evaluator.add_grant(vote, "" if vote else "a message") + if decided: + assertpy.assert_that(evaluator.is_decided()).is_true() + else: + assertpy.assert_that(evaluator.is_decided()).is_false() + + grant, explanations = evaluator.grant() + assertpy.assert_that(grant).is_equal_to(decision) + assertpy.assert_that(explanations).is_length(no_of_explanations) diff --git a/sdk/python/tests/unit/permissions/test_decorator.py b/sdk/python/tests/unit/permissions/test_decorator.py new file mode 100644 index 0000000000..8f6c2c420b --- /dev/null +++ b/sdk/python/tests/unit/permissions/test_decorator.py @@ -0,0 +1,32 @@ +import assertpy +import pytest + + +@pytest.mark.parametrize( + "username, can_read, can_write", + [ + (None, False, False), + ("r", True, False), + ("w", False, True), + ("rw", True, True), + ], +) +def test_access_SecuredFeatureView( + security_manager, feature_views, users, username, can_read, can_write +): + sm = security_manager + fv = feature_views[0] + user = users.get(username) + + sm.set_current_user(user) + if can_read: + fv.read_protected() + else: + with pytest.raises(PermissionError): + fv.read_protected() + if can_write: + fv.write_protected() + else: + with pytest.raises(PermissionError): + fv.write_protected() + assertpy.assert_that(fv.unprotected()).is_true() diff --git a/sdk/python/tests/unit/permissions/test_oidc_auth_client.py b/sdk/python/tests/unit/permissions/test_oidc_auth_client.py new file mode 100644 index 0000000000..22ed5b6f87 --- /dev/null +++ b/sdk/python/tests/unit/permissions/test_oidc_auth_client.py @@ -0,0 +1,62 @@ +from unittest.mock import patch + +from requests import Session + +from feast.permissions.auth_model import ( + KubernetesAuthConfig, + NoAuthConfig, + OidcAuthConfig, +) +from feast.permissions.client.http_auth_requests_wrapper import ( + AuthenticatedRequestsSession, + get_http_auth_requests_session, +) +from feast.permissions.client.kubernetes_auth_client_manager import ( + KubernetesAuthClientManager, +) +from feast.permissions.client.oidc_authentication_client_manager import ( + OidcAuthClientManager, +) + +MOCKED_TOKEN_VALUE: str = "dummy_token" + + +def _get_dummy_oidc_auth_type() -> OidcAuthConfig: + oidc_config = OidcAuthConfig( + auth_discovery_url="http://localhost:8080/realms/master/.well-known/openid-configuration", + type="oidc", + username="admin_test", + password="password_test", + client_id="dummy_client_id", + ) + return oidc_config + + +@patch.object(KubernetesAuthClientManager, "get_token", return_value=MOCKED_TOKEN_VALUE) +@patch.object(OidcAuthClientManager, "get_token", return_value=MOCKED_TOKEN_VALUE) +def test_http_auth_requests_session(mock_kubernetes_token, mock_oidc_token): + no_auth_config = NoAuthConfig() + assert isinstance(get_http_auth_requests_session(no_auth_config), Session) + + oidc_auth_config = _get_dummy_oidc_auth_type() + oidc_auth_requests_session = get_http_auth_requests_session(oidc_auth_config) + _assert_auth_requests_session(oidc_auth_requests_session, MOCKED_TOKEN_VALUE) + + kubernetes_auth_config = KubernetesAuthConfig(type="kubernetes") + kubernetes_auth_requests_session = get_http_auth_requests_session( + kubernetes_auth_config + ) + _assert_auth_requests_session(kubernetes_auth_requests_session, MOCKED_TOKEN_VALUE) + + +def _assert_auth_requests_session( + auth_req_session: AuthenticatedRequestsSession, expected_token: str +): + assert isinstance(auth_req_session, AuthenticatedRequestsSession) + assert "Authorization" in auth_req_session.headers, ( + "Authorization header is missing in object of class: " + "AuthenticatedRequestsSession " + ) + assert ( + auth_req_session.headers["Authorization"] == f"Bearer {expected_token}" + ), "Authorization token is incorrect" diff --git a/sdk/python/tests/unit/permissions/test_permission.py b/sdk/python/tests/unit/permissions/test_permission.py new file mode 100644 index 0000000000..606d750d81 --- /dev/null +++ b/sdk/python/tests/unit/permissions/test_permission.py @@ -0,0 +1,205 @@ +from unittest.mock import Mock + +import assertpy +import pytest + +from feast.batch_feature_view import BatchFeatureView +from feast.data_source import DataSource +from feast.entity import Entity +from feast.feast_object import ALL_RESOURCE_TYPES +from feast.feature_service import FeatureService +from feast.feature_view import FeatureView +from feast.on_demand_feature_view import OnDemandFeatureView +from feast.permissions.action import ALL_ACTIONS, AuthzedAction +from feast.permissions.permission import ( + Permission, +) +from feast.permissions.policy import AllowAll, Policy +from feast.saved_dataset import ValidationReference +from feast.stream_feature_view import StreamFeatureView + + +def test_defaults(): + p = Permission(name="test") + assertpy.assert_that(type(p.types)).is_equal_to(list) + assertpy.assert_that(p.types).is_equal_to(ALL_RESOURCE_TYPES) + assertpy.assert_that(p.name_pattern).is_none() + assertpy.assert_that(p.tags).is_none() + assertpy.assert_that(type(p.actions)).is_equal_to(list) + assertpy.assert_that(p.actions).is_equal_to(ALL_ACTIONS) + assertpy.assert_that(type(p.actions)).is_equal_to(list) + assertpy.assert_that(isinstance(p.policy, Policy)).is_true() + assertpy.assert_that(p.policy).is_equal_to(AllowAll) + + +@pytest.mark.parametrize( + "dict, result", + [ + ({"types": None}, True), + ({"types": []}, True), + ({"types": ALL_RESOURCE_TYPES}, True), + ({"types": [FeatureView, FeatureService]}, True), + ({"actions": None}, False), + ({"actions": []}, False), + ({"actions": ALL_ACTIONS}, True), + ({"actions": ALL_ACTIONS}, True), + ({"actions": [AuthzedAction.CREATE, AuthzedAction.DELETE]}, True), + ({"policy": None}, False), + ({"policy": []}, False), + ({"policy": Mock(spec=Policy)}, True), + ], +) +def test_validity(dict, result): + if not result: + with pytest.raises(ValueError): + Permission(name="test", **dict) + else: + Permission(name="test", **dict) + + +def test_normalized_args(): + p = Permission(name="test") + assertpy.assert_that(type(p.types)).is_equal_to(list) + assertpy.assert_that(p.types).is_equal_to(ALL_RESOURCE_TYPES) + + p = Permission(name="test", actions=AuthzedAction.CREATE) + assertpy.assert_that(type(p.actions)).is_equal_to(list) + assertpy.assert_that(p.actions).is_equal_to([AuthzedAction.CREATE]) + + +@pytest.mark.parametrize( + "resource, types, result", + [ + (None, ALL_RESOURCE_TYPES, False), + ("invalid string", ALL_RESOURCE_TYPES, False), + ("ALL", ALL_RESOURCE_TYPES, False), + ("ALL", ALL_RESOURCE_TYPES, False), + ( + Mock(spec=FeatureView), + [t for t in ALL_RESOURCE_TYPES if t not in [FeatureView]], + False, + ), + ( + Mock(spec=OnDemandFeatureView), + [t for t in ALL_RESOURCE_TYPES if t not in [OnDemandFeatureView]], + False, + ), # OnDemandFeatureView is a BaseFeatureView + ( + Mock(spec=BatchFeatureView), + FeatureView, + True, + ), # BatchFeatureView is a FeatureView + ( + Mock(spec=BatchFeatureView), + [t for t in ALL_RESOURCE_TYPES if t not in [FeatureView, BatchFeatureView]], + False, + ), + ( + Mock(spec=StreamFeatureView), + FeatureView, + True, + ), # StreamFeatureView is a FeatureView + ( + Mock(spec=StreamFeatureView), + [ + t + for t in ALL_RESOURCE_TYPES + if t not in [FeatureView, StreamFeatureView] + ], + False, + ), + ( + Mock(spec=Entity), + [t for t in ALL_RESOURCE_TYPES if t not in [Entity]], + False, + ), + ( + Mock(spec=FeatureService), + [t for t in ALL_RESOURCE_TYPES if t not in [FeatureService]], + False, + ), + ( + Mock(spec=DataSource), + [t for t in ALL_RESOURCE_TYPES if t not in [DataSource]], + False, + ), + ( + Mock(spec=ValidationReference), + [t for t in ALL_RESOURCE_TYPES if t not in [ValidationReference]], + False, + ), + ( + Mock(spec=Permission), + [t for t in ALL_RESOURCE_TYPES if t not in [Permission]], + False, + ), + ] + + [(Mock(spec=t), ALL_RESOURCE_TYPES, True) for t in ALL_RESOURCE_TYPES] + + [(Mock(spec=t), [t], True) for t in ALL_RESOURCE_TYPES], +) +def test_match_resource_with_subclasses(resource, types, result): + p = Permission(name="test", types=types) + assertpy.assert_that(p.match_resource(resource)).is_equal_to(result) + + +@pytest.mark.parametrize( + "pattern, name, match", + [ + ("test.*", "test", True), + ("test.*", "test1", True), + ("test.*", "wrongtest", False), + (".*test.*", "wrongtest", True), + ], +) +def test_resource_match_with_name_filter(pattern, name, match): + p = Permission(name="test", name_pattern=pattern) + for t in ALL_RESOURCE_TYPES: + resource = Mock(spec=t) + resource.name = name + assertpy.assert_that(p.match_resource(resource)).is_equal_to(match) + + +@pytest.mark.parametrize( + ("required_tags, tags, result"), + [ + ({"owner": "dev"}, {}, False), + ({"owner": "dev"}, {"owner": "master"}, False), + ({"owner": "dev"}, {"owner": "dev", "other": 1}, True), + ({"owner": "dev", "dep": 1}, {"owner": "dev", "other": 1}, False), + ({"owner": "dev", "dep": 1}, {"owner": "dev", "other": 1, "dep": 1}, True), + ], +) +def test_resource_match_with_tags(required_tags, tags, result): + # Missing tags + p = Permission(name="test", required_tags=required_tags) + for t in ALL_RESOURCE_TYPES: + resource = Mock(spec=t) + resource.name = "test" + resource.required_tags = tags + assertpy.assert_that(p.match_resource(resource)).is_equal_to(result) + + +@pytest.mark.parametrize( + ("permitted_actions, requested_actions, result"), + [(ALL_ACTIONS, [a], True) for a in AuthzedAction.__members__.values()] + + [ + ( + [AuthzedAction.CREATE, AuthzedAction.DELETE], + [AuthzedAction.CREATE, AuthzedAction.DELETE], + True, + ), + ([AuthzedAction.CREATE, AuthzedAction.DELETE], [AuthzedAction.CREATE], True), + ([AuthzedAction.CREATE, AuthzedAction.DELETE], [AuthzedAction.DELETE], True), + ([AuthzedAction.CREATE, AuthzedAction.DELETE], [AuthzedAction.UPDATE], False), + ( + [AuthzedAction.CREATE, AuthzedAction.DELETE], + [AuthzedAction.CREATE, AuthzedAction.DELETE, AuthzedAction.UPDATE], + False, + ), + ], +) +def test_match_actions(permitted_actions, requested_actions, result): + p = Permission(name="test", actions=permitted_actions) + assertpy.assert_that( + p.match_actions(requested_actions=requested_actions) + ).is_equal_to(result) diff --git a/sdk/python/tests/unit/permissions/test_policy.py b/sdk/python/tests/unit/permissions/test_policy.py new file mode 100644 index 0000000000..4e78282d4f --- /dev/null +++ b/sdk/python/tests/unit/permissions/test_policy.py @@ -0,0 +1,44 @@ +import assertpy +import pytest + +from feast.permissions.policy import AllowAll, RoleBasedPolicy +from feast.permissions.user import User + + +@pytest.mark.parametrize( + "username", + [("r"), ("w"), ("rw"), ("missing")], +) +def test_allow_all(users, username): + user = users.get(username, User(username, [])) + assertpy.assert_that(AllowAll.validate_user(user)).is_true() + + +@pytest.mark.parametrize( + "required_roles, username, result", + [ + (["reader"], "r", True), + (["writer"], "r", False), + (["reader", "writer"], "r", True), + (["writer", "updater"], "r", False), + (["reader"], "w", False), + (["writer"], "w", True), + (["reader", "writer"], "w", True), + (["reader", "updater"], "w", False), + (["reader"], "rw", True), + (["writer"], "rw", True), + (["reader", "writer"], "rw", True), + (["updater"], "rw", False), + ], +) +def test_role_based_policy(users, required_roles, username, result): + user = users.get(username) + policy = RoleBasedPolicy(roles=required_roles) + + validate_result, explain = policy.validate_user(user) + assertpy.assert_that(validate_result).is_equal_to(result) + + if result is True: + assertpy.assert_that(explain).is_equal_to("") + else: + assertpy.assert_that(len(explain)).is_greater_than(0) diff --git a/sdk/python/tests/unit/permissions/test_security_manager.py b/sdk/python/tests/unit/permissions/test_security_manager.py new file mode 100644 index 0000000000..192542da78 --- /dev/null +++ b/sdk/python/tests/unit/permissions/test_security_manager.py @@ -0,0 +1,83 @@ +import assertpy +import pytest + +from feast.permissions.action import READ, AuthzedAction +from feast.permissions.security_manager import assert_permissions, permitted_resources + + +@pytest.mark.parametrize( + "username, requested_actions, allowed, allowed_single, raise_error_in_assert, raise_error_in_permit", + [ + (None, [], False, [False, False], [True, True], False), + ("r", [AuthzedAction.DESCRIBE], True, [True, True], [False, False], False), + ("r", [AuthzedAction.UPDATE], False, [False, False], [True, True], False), + ("w", [AuthzedAction.DESCRIBE], False, [False, False], [True, True], False), + ("w", [AuthzedAction.UPDATE], False, [True, True], [False, False], False), + ("rw", [AuthzedAction.DESCRIBE], False, [True, True], [False, False], False), + ("rw", [AuthzedAction.UPDATE], False, [True, True], [False, False], False), + ( + "rw", + [AuthzedAction.DESCRIBE, AuthzedAction.UPDATE], + False, + [False, False], + [True, True], + True, + ), + ( + "admin", + [AuthzedAction.DESCRIBE, AuthzedAction.UPDATE], + False, + [False, True], + [True, False], + True, + ), + ( + "admin", + READ + [AuthzedAction.UPDATE], + False, + [False, False], + [True, True], + True, + ), + ], +) +def test_access_SecuredFeatureView( + security_manager, + feature_views, + users, + username, + requested_actions, + allowed, + allowed_single, + raise_error_in_assert, + raise_error_in_permit, +): + sm = security_manager + resources = feature_views + + user = users.get(username) + sm.set_current_user(user) + + result = [] + if raise_error_in_permit: + with pytest.raises(PermissionError): + result = permitted_resources(resources=resources, actions=requested_actions) + else: + result = permitted_resources(resources=resources, actions=requested_actions) + + if allowed: + assertpy.assert_that(result).is_equal_to(resources) + elif not raise_error_in_permit: + filtered = [r for i, r in enumerate(resources) if allowed_single[i]] + assertpy.assert_that(result).is_equal_to(filtered) + + for i, r in enumerate(resources): + if allowed_single[i]: + result = assert_permissions(resource=r, actions=requested_actions) + assertpy.assert_that(result).is_equal_to(r) + elif raise_error_in_assert[i]: + with pytest.raises(PermissionError): + assert_permissions(resource=r, actions=requested_actions) + else: + result = assert_permissions(resource=r, actions=requested_actions) + assertpy.assert_that(result).is_none() diff --git a/sdk/python/tests/unit/permissions/test_user.py b/sdk/python/tests/unit/permissions/test_user.py new file mode 100644 index 0000000000..cce318cba7 --- /dev/null +++ b/sdk/python/tests/unit/permissions/test_user.py @@ -0,0 +1,34 @@ +import assertpy +import pytest + +from feast.permissions.user import User + + +@pytest.fixture(scope="module") +def users(): + users = [] + users.append(User("a", ["a1", "a2"])) + users.append(User("b", ["b1", "b2"])) + return dict([(u.username, u) for u in users]) + + +@pytest.mark.parametrize( + "username, roles, result", + [ + ("c", [], False), + ("a", ["b1"], False), + ("a", ["a1", "b1"], True), + ("a", ["a1"], True), + ("a", ["a1", "a2"], True), + ("a", ["a1", "a2", "a3"], True), + ("b", ["a1", "a3"], False), + ("b", ["a1", "b1"], True), + ("b", ["b1", "b2"], True), + ("b", ["b1", "b2", "b3"], True), + ], +) +def test_user_has_matching_role(users, username, roles, result): + user = users.get(username, User(username, [])) + assertpy.assert_that(user.has_matching_role(requested_roles=roles)).is_equal_to( + result + ) diff --git a/sdk/python/tests/unit/test_offline_server.py b/sdk/python/tests/unit/test_offline_server.py index 5991e7450d..237e2ecad4 100644 --- a/sdk/python/tests/unit/test_offline_server.py +++ b/sdk/python/tests/unit/test_offline_server.py @@ -14,7 +14,7 @@ RemoteOfflineStore, RemoteOfflineStoreConfig, ) -from feast.offline_server import OfflineServer +from feast.offline_server import OfflineServer, _init_auth_manager from feast.repo_config import RepoConfig from tests.utils.cli_repo_creator import CliRunner @@ -26,6 +26,7 @@ def empty_offline_server(environment): store = environment.feature_store location = "grpc+tcp://localhost:0" + _init_auth_manager(store=store) return OfflineServer(store=store, location=location) @@ -102,6 +103,8 @@ def test_remote_offline_store_apis(): with tempfile.TemporaryDirectory() as temp_dir: store = default_store(str(temp_dir)) location = "grpc+tcp://localhost:0" + + _init_auth_manager(store=store) server = OfflineServer(store=store, location=location) assertpy.assert_that(server).is_not_none diff --git a/sdk/python/tests/utils/auth_permissions_util.py b/sdk/python/tests/utils/auth_permissions_util.py new file mode 100644 index 0000000000..3b5e589812 --- /dev/null +++ b/sdk/python/tests/utils/auth_permissions_util.py @@ -0,0 +1,245 @@ +import os +import subprocess + +import yaml +from keycloak import KeycloakAdmin + +from feast import ( + FeatureStore, + RepoConfig, +) +from feast.infra.registry.remote import RemoteRegistryConfig +from feast.permissions.permission import Permission +from feast.wait import wait_retry_backoff +from tests.utils.cli_repo_creator import CliRunner +from tests.utils.http_server import check_port_open + +PROJECT_NAME = "feast_test_project" + + +def include_auth_config(file_path, auth_config: str): + with open(file_path, "r") as file: + existing_content = yaml.safe_load(file) + new_section = yaml.safe_load(auth_config) + if isinstance(existing_content, dict) and isinstance(new_section, dict): + existing_content.update(new_section) + else: + raise ValueError("Both existing content and new section must be dictionaries.") + with open(file_path, "w") as file: + yaml.safe_dump(existing_content, file, default_flow_style=False) + print(f"Updated auth section at {file_path}") + + +def default_store( + temp_dir, + auth_config: str, + permissions: list[Permission], +): + runner = CliRunner() + result = runner.run(["init", PROJECT_NAME], cwd=temp_dir) + repo_path = os.path.join(temp_dir, PROJECT_NAME, "feature_repo") + assert result.returncode == 0 + + include_auth_config( + file_path=f"{repo_path}/feature_store.yaml", auth_config=auth_config + ) + + result = runner.run(["--chdir", repo_path, "apply"], cwd=temp_dir) + assert result.returncode == 0 + + fs = FeatureStore(repo_path=repo_path) + + fs.apply(permissions) + + return fs + + +def start_feature_server(repo_path: str, server_port: int, metrics: bool = False): + host = "0.0.0.0" + cmd = [ + "feast", + "-c" + repo_path, + "serve", + "--host", + host, + "--port", + str(server_port), + ] + feast_server_process = subprocess.Popen( + cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + _time_out_sec: int = 60 + # Wait for server to start + wait_retry_backoff( + lambda: (None, check_port_open(host, server_port)), + timeout_secs=_time_out_sec, + timeout_msg=f"Unable to start the feast server in {_time_out_sec} seconds for remote online store type, port={server_port}", + ) + + if metrics: + cmd.append("--metrics") + + # Check if metrics are enabled and Prometheus server is running + if metrics: + wait_retry_backoff( + lambda: (None, check_port_open("localhost", 8000)), + timeout_secs=_time_out_sec, + timeout_msg="Unable to start the Prometheus server in 60 seconds.", + ) + else: + assert not check_port_open( + "localhost", 8000 + ), "Prometheus server is running when it should be disabled." + + yield f"http://localhost:{server_port}" + + if feast_server_process is not None: + feast_server_process.kill() + + # wait server to free the port + wait_retry_backoff( + lambda: ( + None, + not check_port_open("localhost", server_port), + ), + timeout_msg=f"Unable to stop the feast server in {_time_out_sec} seconds for remote online store type, port={server_port}", + timeout_secs=_time_out_sec, + ) + + +def get_remote_registry_store(server_port, feature_store): + registry_config = RemoteRegistryConfig( + registry_type="remote", path=f"localhost:{server_port}" + ) + + store = FeatureStore( + config=RepoConfig( + project=PROJECT_NAME, + auth=feature_store.config.auth, + registry=registry_config, + provider="local", + entity_key_serialization_version=2, + ) + ) + return store + + +def setup_permissions_on_keycloak(keycloak_admin: KeycloakAdmin): + new_client_id = "feast-integration-client" + new_client_secret = "feast-integration-client-secret" + # Create a new client + client_representation = { + "clientId": new_client_id, + "secret": new_client_secret, + "enabled": True, + "directAccessGrantsEnabled": True, + "publicClient": False, + "redirectUris": ["*"], + "serviceAccountsEnabled": True, + "standardFlowEnabled": True, + } + keycloak_admin.create_client(client_representation) + + # Get the client ID + client_id = keycloak_admin.get_client_id(new_client_id) + + # Role representation + reader_role_rep = { + "name": "reader", + "description": "feast reader client role", + "composite": False, + "clientRole": True, + "containerId": client_id, + } + keycloak_admin.create_client_role(client_id, reader_role_rep, True) + reader_role_id = keycloak_admin.get_client_role( + client_id=client_id, role_name="reader" + ) + + # Role representation + writer_role_rep = { + "name": "writer", + "description": "feast writer client role", + "composite": False, + "clientRole": True, + "containerId": client_id, + } + keycloak_admin.create_client_role(client_id, writer_role_rep, True) + writer_role_id = keycloak_admin.get_client_role( + client_id=client_id, role_name="writer" + ) + + # Mapper representation + mapper_representation = { + "name": "client-roles-mapper", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-client-role-mapper", + "consentRequired": False, + "config": { + "multivalued": "true", + "userinfo.token.claim": "true", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "roles", + "jsonType.label": "String", + "client.id": client_id, + }, + } + + # Add predefined client roles mapper to the client + keycloak_admin.add_mapper_to_client(client_id, mapper_representation) + + reader_writer_user = { + "username": "reader_writer", + "enabled": True, + "firstName": "reader_writer fn", + "lastName": "reader_writer ln", + "email": "reader_writer@email.com", + "emailVerified": True, + "credentials": [{"value": "password", "type": "password", "temporary": False}], + } + reader_writer_user_id = keycloak_admin.create_user(reader_writer_user) + keycloak_admin.assign_client_role( + user_id=reader_writer_user_id, + client_id=client_id, + roles=[reader_role_id, writer_role_id], + ) + + reader_user = { + "username": "reader", + "enabled": True, + "firstName": "reader fn", + "lastName": "reader ln", + "email": "reader@email.com", + "emailVerified": True, + "credentials": [{"value": "password", "type": "password", "temporary": False}], + } + reader_user_id = keycloak_admin.create_user(reader_user) + keycloak_admin.assign_client_role( + user_id=reader_user_id, client_id=client_id, roles=[reader_role_id] + ) + + writer_user = { + "username": "writer", + "enabled": True, + "firstName": "writer fn", + "lastName": "writer ln", + "email": "writer@email.com", + "emailVerified": True, + "credentials": [{"value": "password", "type": "password", "temporary": False}], + } + writer_user_id = keycloak_admin.create_user(writer_user) + keycloak_admin.assign_client_role( + user_id=writer_user_id, client_id=client_id, roles=[writer_role_id] + ) + + no_roles_user = { + "username": "no_roles_user", + "enabled": True, + "firstName": "no_roles_user fn", + "lastName": "no_roles_user ln", + "email": "no_roles_user@email.com", + "emailVerified": True, + "credentials": [{"value": "password", "type": "password", "temporary": False}], + } + keycloak_admin.create_user(no_roles_user) diff --git a/setup.py b/setup.py index 6fb5bfee61..d53aee1002 100644 --- a/setup.py +++ b/setup.py @@ -61,6 +61,9 @@ "dask[dataframe]>=2024.2.1", "prometheus_client", "psutil", + "bigtree>=0.19.2", + "pyjwt", + "kubernetes<=20.13.0", ] GCP_REQUIRED = [ @@ -183,6 +186,7 @@ "pytest-env", "Sphinx>4.0.0,<7", "testcontainers==4.4.0", + "python-keycloak==4.2.2", "pre-commit<3.3.2", "assertpy==1.1", "pip-tools", From 75983f2b84dae125ada63193afd3d3946fd1114f Mon Sep 17 00:00:00 2001 From: Francisco Arceo Date: Wed, 21 Aug 2024 11:37:16 -0400 Subject: [PATCH 18/96] chore: Fix rbac url. --- docs/SUMMARY.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index b176eec3c1..4f8379b8ed 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -15,6 +15,7 @@ * [Write Patterns](getting-started/architecture/write-patterns.md) * [Feature Transformation](getting-started/architecture/feature-transformation.md) * [Feature Serving and Model Inference](getting-started/architecture/model-inference.md) + * [Role-Based Access Control (RBAC)](getting-started/architecture/rbac.md) * [Concepts](getting-started/concepts/README.md) * [Overview](getting-started/concepts/overview.md) * [Data ingestion](getting-started/concepts/data-ingestion.md) @@ -23,7 +24,6 @@ * [Feature retrieval](getting-started/concepts/feature-retrieval.md) * [Point-in-time joins](getting-started/concepts/point-in-time-joins.md) * [Registry](getting-started/concepts/registry.md) - * [Role-Based Access Control (RBAC)](getting-started/architecture/rbac.md) * [\[Alpha\] Saved dataset](getting-started/concepts/dataset.md) * [Components](getting-started/components/README.md) * [Overview](getting-started/components/overview.md) From 0a48f7bb436febb0171c78a559a577eedeff421f Mon Sep 17 00:00:00 2001 From: Abdul Hameed Date: Wed, 21 Aug 2024 14:34:17 -0400 Subject: [PATCH 19/96] fix: Links to the RBAC documentation under Concepts and Components (#4430) * fix the rbac docs links Signed-off-by: Abdul Hameed * fix: links to the RBAC documentation under Concepts and Components sections Signed-off-by: Abdul Hameed --------- Signed-off-by: Abdul Hameed --- docs/SUMMARY.md | 2 ++ docs/getting-started/components/overview.md | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index 4f8379b8ed..a5d02c4f64 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -24,6 +24,7 @@ * [Feature retrieval](getting-started/concepts/feature-retrieval.md) * [Point-in-time joins](getting-started/concepts/point-in-time-joins.md) * [Registry](getting-started/concepts/registry.md) + * [Permission](getting-started/concepts/permission.md) * [\[Alpha\] Saved dataset](getting-started/concepts/dataset.md) * [Components](getting-started/components/README.md) * [Overview](getting-started/components/overview.md) @@ -32,6 +33,7 @@ * [Online store](getting-started/components/online-store.md) * [Batch Materialization Engine](getting-started/components/batch-materialization-engine.md) * [Provider](getting-started/components/provider.md) + * [Authorization Manager](getting-started/components/authz_manager.md) * [Third party integrations](getting-started/third-party-integrations.md) * [FAQ](getting-started/faq.md) diff --git a/docs/getting-started/components/overview.md b/docs/getting-started/components/overview.md index 0ee3835de6..ac0b99de8a 100644 --- a/docs/getting-started/components/overview.md +++ b/docs/getting-started/components/overview.md @@ -28,4 +28,4 @@ A complete Feast deployment contains the following components: * **Batch Materialization Engine:** The [Batch Materialization Engine](batch-materialization-engine.md) component launches a process which loads data into the online store from the offline store. By default, Feast uses a local in-process engine implementation to materialize data. However, additional infrastructure can be used for a more scalable materialization process. * **Online Store:** The online store is a database that stores only the latest feature values for each entity. The online store is either populated through materialization jobs or through [stream ingestion](../../reference/data-sources/push.md). * **Offline Store:** The offline store persists batch data that has been ingested into Feast. This data is used for producing training datasets. For feature retrieval and materialization, Feast does not manage the offline store directly, but runs queries against it. However, offline stores can be configured to support writes if Feast configures logging functionality of served features. -* **Authorization manager**: The authorization manager detects authentication tokens from client requests to Feast servers and uses this information to enforce permission policies on the requested services. +* **Authorization Manager**: The authorization manager detects authentication tokens from client requests to Feast servers and uses this information to enforce permission policies on the requested services. From 42d659f2aeb7f07731d2815b5068a63fff472504 Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Fri, 23 Aug 2024 14:02:43 +0400 Subject: [PATCH 20/96] docs: Reorganize registry docs (#4407) * reorganize registry docs Signed-off-by: tokoko * remove commented out text Signed-off-by: tokoko * changes in registry.md Signed-off-by: tokoko --------- Signed-off-by: tokoko Co-authored-by: tokoko --- docs/SUMMARY.md | 8 +- docs/getting-started/components/registry.md | 52 ++++++--- docs/getting-started/concepts/README.md | 4 - docs/getting-started/concepts/registry.md | 107 ------------------ docs/reference/registries/README.md | 23 ++++ docs/reference/registries/gcs.md | 23 ++++ docs/reference/registries/local.md | 23 ++++ docs/reference/registries/s3.md | 23 ++++ .../{registry => registries}/snowflake.md | 2 +- .../registries/sql.md} | 7 +- 10 files changed, 136 insertions(+), 136 deletions(-) delete mode 100644 docs/getting-started/concepts/registry.md create mode 100644 docs/reference/registries/README.md create mode 100644 docs/reference/registries/gcs.md create mode 100644 docs/reference/registries/local.md create mode 100644 docs/reference/registries/s3.md rename docs/reference/{registry => registries}/snowflake.md (97%) rename docs/{tutorials/using-scalable-registry.md => reference/registries/sql.md} (97%) diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index a5d02c4f64..c8e313850f 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -23,7 +23,6 @@ * [Feature view](getting-started/concepts/feature-view.md) * [Feature retrieval](getting-started/concepts/feature-retrieval.md) * [Point-in-time joins](getting-started/concepts/point-in-time-joins.md) - * [Registry](getting-started/concepts/registry.md) * [Permission](getting-started/concepts/permission.md) * [\[Alpha\] Saved dataset](getting-started/concepts/dataset.md) * [Components](getting-started/components/README.md) @@ -45,7 +44,6 @@ * [Real-time credit scoring on AWS](tutorials/tutorials-overview/real-time-credit-scoring-on-aws.md) * [Driver stats on Snowflake](tutorials/tutorials-overview/driver-stats-on-snowflake.md) * [Validating historical features with Great Expectations](tutorials/validating-historical-features.md) -* [Using Scalable Registry](tutorials/using-scalable-registry.md) * [Building streaming features](tutorials/building-streaming-features.md) ## How-to Guides @@ -114,6 +112,12 @@ * [Hazelcast (contrib)](reference/online-stores/hazelcast.md) * [ScyllaDB (contrib)](reference/online-stores/scylladb.md) * [SingleStore (contrib)](reference/online-stores/singlestore.md) +* [Registries](reference/registries/README.md) + * [Local](reference/registries/local.md) + * [S3](reference/registries/s3.md) + * [GCS](reference/registries/gcs.md) + * [SQL](reference/registries/sql.md) + * [Snowflake](reference/registries/snowflake.md) * [Providers](reference/providers/README.md) * [Local](reference/providers/local.md) * [Google Cloud Platform](reference/providers/google-cloud-platform.md) diff --git a/docs/getting-started/components/registry.md b/docs/getting-started/components/registry.md index 0939fb53fc..0c85c5ad36 100644 --- a/docs/getting-started/components/registry.md +++ b/docs/getting-started/components/registry.md @@ -1,31 +1,51 @@ # Registry -The Feast feature registry is a central catalog of all the feature definitions and their related metadata. It allows data scientists to search, discover, and collaborate on new features. +The Feast feature registry is a central catalog of all feature definitions and their related metadata. Feast uses the registry to store all applied Feast objects (e.g. Feature views, entities, etc). It allows data scientists to search, discover, and collaborate on new features. The registry exposes methods to apply, list, retrieve and delete these objects, and is an abstraction with multiple implementations. -Each Feast deployment has a single feature registry. Feast only supports file-based registries today, but supports four different backends. +Feast comes with built-in file-based and sql-based registry implementations. By default, Feast uses a file-based registry, which stores the protobuf representation of the registry as a serialized file in the local file system. For more details on which registries are supported, please see [Registries](../../reference/registries/). -* `Local`: Used as a local backend for storing the registry during development -* `S3`: Used as a centralized backend for storing the registry on AWS -* `GCS`: Used as a centralized backend for storing the registry on GCP -* `[Alpha] Azure`: Used as centralized backend for storing the registry on Azure Blob storage. +## Updating the registry -The feature registry is updated during different operations when using Feast. More specifically, objects within the registry \(entities, feature views, feature services\) are updated when running `apply` from the Feast CLI, but metadata about objects can also be updated during operations like materialization. +We recommend users store their Feast feature definitions in a version controlled repository, which then via CI/CD +automatically stays synced with the registry. Users will often also want multiple registries to correspond to +different environments (e.g. dev vs staging vs prod), with staging and production registries with locked down write +access since they can impact real user traffic. See [Running Feast in Production](../../how-to-guides/running-feast-in-production.md#1.-automatically-deploying-changes-to-your-feature-definitions) for details on how to set this up. -Users interact with a feature registry through the Feast SDK. Listing all feature views: +## Accessing the registry from clients + +Users can specify the registry through a `feature_store.yaml` config file, or programmatically. We often see teams +preferring the programmatic approach because it makes notebook driven development very easy: + +### Option 1: programmatically specifying the registry ```python -fs = FeatureStore("my_feature_repo/") -print(fs.list_feature_views()) +repo_config = RepoConfig( + registry=RegistryConfig(path="gs://feast-test-gcs-bucket/registry.pb"), + project="feast_demo_gcp", + provider="gcp", + offline_store="file", # Could also be the OfflineStoreConfig e.g. FileOfflineStoreConfig + online_store="null", # Could also be the OnlineStoreConfig e.g. RedisOnlineStoreConfig +) +store = FeatureStore(config=repo_config) +``` + +### Option 2: specifying the registry in the project's `feature_store.yaml` file + +```yaml +project: feast_demo_aws +provider: aws +registry: s3://feast-test-s3-bucket/registry.pb +online_store: null +offline_store: + type: file ``` -Or retrieving a specific feature view: +Instantiating a `FeatureStore` object can then point to this: ```python -fs = FeatureStore("my_feature_repo/") -fv = fs.get_feature_view(β€œmy_fv1”) +store = FeatureStore(repo_path=".") ``` {% hint style="info" %} -The feature registry is a [Protobuf representation](https://github.com/feast-dev/feast/blob/master/protos/feast/core/Registry.proto) of Feast metadata. This Protobuf file can be read programmatically from other programming languages, but no compatibility guarantees are made on the internal structure of the registry. -{% endhint %} - +The file-based feature registry is a [Protobuf representation](https://github.com/feast-dev/feast/blob/master/protos/feast/core/Registry.proto) of Feast metadata. This Protobuf file can be read programmatically from other programming languages, but no compatibility guarantees are made on the internal structure of the registry. +{% endhint %} \ No newline at end of file diff --git a/docs/getting-started/concepts/README.md b/docs/getting-started/concepts/README.md index 1769a2d741..9b967fb5af 100644 --- a/docs/getting-started/concepts/README.md +++ b/docs/getting-started/concepts/README.md @@ -24,10 +24,6 @@ [point-in-time-joins.md](point-in-time-joins.md) {% endcontent-ref %} -{% content-ref url="registry.md" %} -[registry.md](registry.md) -{% endcontent-ref %} - {% content-ref url="dataset.md" %} [dataset.md](dataset.md) {% endcontent-ref %} diff --git a/docs/getting-started/concepts/registry.md b/docs/getting-started/concepts/registry.md deleted file mode 100644 index 8ac32ce87b..0000000000 --- a/docs/getting-started/concepts/registry.md +++ /dev/null @@ -1,107 +0,0 @@ -# Registry - -Feast uses a registry to store all applied Feast objects (e.g. Feature views, entities, etc). The registry exposes -methods to apply, list, retrieve and delete these objects, and is an abstraction with multiple implementations. - -### Options for registry implementations - -#### File-based registry -By default, Feast uses a file-based registry implementation, which stores the protobuf representation of the registry as -a serialized file. This registry file can be stored in a local file system, or in cloud storage (in, say, S3 or GCS, or Azure). - -The quickstart guides that use `feast init` will use a registry on a local file system. To allow Feast to configure -a remote file registry, you need to create a GCS / S3 bucket that Feast can understand: -{% tabs %} -{% tab title="Example S3 file registry" %} -```yaml -project: feast_demo_aws -provider: aws -registry: - path: s3://[YOUR BUCKET YOU CREATED]/registry.pb - cache_ttl_seconds: 60 -online_store: null -offline_store: - type: file -``` -{% endtab %} - -{% tab title="Example GCS file registry" %} -```yaml -project: feast_demo_gcp -provider: gcp -registry: - path: gs://[YOUR BUCKET YOU CREATED]/registry.pb - cache_ttl_seconds: 60 -online_store: null -offline_store: - type: file -``` -{% endtab %} -{% endtabs %} - -However, there are inherent limitations with a file-based registry, since changing a single field in the registry -requires re-writing the whole registry file. With multiple concurrent writers, this presents a risk of data loss, or -bottlenecks writes to the registry since all changes have to be serialized (e.g. when running materialization for -multiple feature views or time ranges concurrently). - -#### SQL Registry -Alternatively, a [SQL Registry](../../tutorials/using-scalable-registry.md) can be used for a more scalable registry. - -The configuration roughly looks like: -```yaml -project: -provider: -online_store: redis -offline_store: file -registry: - registry_type: sql - path: postgresql://postgres:mysecretpassword@127.0.0.1:55001/feast - cache_ttl_seconds: 60 - sqlalchemy_config_kwargs: - echo: false - pool_pre_ping: true -``` - -This supports any SQLAlchemy compatible database as a backend. The exact schema can be seen in [sql.py](https://github.com/feast-dev/feast/blob/master/sdk/python/feast/infra/registry/sql.py) - -### Updating the registry - -We recommend users store their Feast feature definitions in a version controlled repository, which then via CI/CD -automatically stays synced with the registry. Users will often also want multiple registries to correspond to -different environments (e.g. dev vs staging vs prod), with staging and production registries with locked down write -access since they can impact real user traffic. See [Running Feast in Production](../../how-to-guides/running-feast-in-production.md#1.-automatically-deploying-changes-to-your-feature-definitions) for details on how to set this up. - -### Accessing the registry from clients - -Users can specify the registry through a `feature_store.yaml` config file, or programmatically. We often see teams -preferring the programmatic approach because it makes notebook driven development very easy: - -#### Option 1: programmatically specifying the registry - -```python -repo_config = RepoConfig( - registry=RegistryConfig(path="gs://feast-test-gcs-bucket/registry.pb"), - project="feast_demo_gcp", - provider="gcp", - offline_store="file", # Could also be the OfflineStoreConfig e.g. FileOfflineStoreConfig - online_store="null", # Could also be the OnlineStoreConfig e.g. RedisOnlineStoreConfig -) -store = FeatureStore(config=repo_config) -``` - -#### Option 2: specifying the registry in the project's `feature_store.yaml` file - -```yaml -project: feast_demo_aws -provider: aws -registry: s3://feast-test-s3-bucket/registry.pb -online_store: null -offline_store: - type: file -``` - -Instantiating a `FeatureStore` object can then point to this: - -```python -store = FeatureStore(repo_path=".") -``` \ No newline at end of file diff --git a/docs/reference/registries/README.md b/docs/reference/registries/README.md new file mode 100644 index 0000000000..1310506f1d --- /dev/null +++ b/docs/reference/registries/README.md @@ -0,0 +1,23 @@ +# Registies + +Please see [Registry](../../getting-started/architecture-and-components/registry.md) for a conceptual explanation of registries. + +{% content-ref url="local.md" %} +[local.md](local.md) +{% endcontent-ref %} + +{% content-ref url="s3.md" %} +[s3.md](s3.md) +{% endcontent-ref %} + +{% content-ref url="gcs.md" %} +[gcs.md](gcs.md) +{% endcontent-ref %} + +{% content-ref url="sql.md" %} +[sql.md](sql.md) +{% endcontent-ref %} + +{% content-ref url="snowflake.md" %} +[snowflake.md](snowflake.md) +{% endcontent-ref %} diff --git a/docs/reference/registries/gcs.md b/docs/reference/registries/gcs.md new file mode 100644 index 0000000000..13c9657aa1 --- /dev/null +++ b/docs/reference/registries/gcs.md @@ -0,0 +1,23 @@ +# GCS Registry + +## Description + +GCS registry provides support for storing the protobuf representation of your feature store objects (data sources, feature views, feature services, etc.) uing Google Cloud Storage. + +While it can be used in production, there are still inherent limitations with a file-based registries, since changing a single field in the registry requires re-writing the whole registry file. With multiple concurrent writers, this presents a risk of data loss, or bottlenecks writes to the registry since all changes have to be serialized (e.g. when running materialization for multiple feature views or time ranges concurrently). + +An example of how to configure this would be: + +## Example + +{% code title="feature_store.yaml" %} +```yaml +project: feast_gcp +registry: + path: gs://[YOUR BUCKET YOU CREATED]/registry.pb + cache_ttl_seconds: 60 +online_store: null +offline_store: + type: dask +``` +{% endcode %} \ No newline at end of file diff --git a/docs/reference/registries/local.md b/docs/reference/registries/local.md new file mode 100644 index 0000000000..ad1d98cea9 --- /dev/null +++ b/docs/reference/registries/local.md @@ -0,0 +1,23 @@ +# Local Registry + +## Description + +Local registry provides support for storing the protobuf representation of your feature store objects (data sources, feature views, feature services, etc.) in local file system. It is only intended to be used for experimentation with Feast and should not be used in production. + +There are inherent limitations with a file-based registries, since changing a single field in the registry requires re-writing the whole registry file. With multiple concurrent writers, this presents a risk of data loss, or bottlenecks writes to the registry since all changes have to be serialized (e.g. when running materialization for multiple feature views or time ranges concurrently). + +An example of how to configure this would be: + +## Example + +{% code title="feature_store.yaml" %} +```yaml +project: feast_local +registry: + path: registry.pb + cache_ttl_seconds: 60 +online_store: null +offline_store: + type: dask +``` +{% endcode %} \ No newline at end of file diff --git a/docs/reference/registries/s3.md b/docs/reference/registries/s3.md new file mode 100644 index 0000000000..65069c415c --- /dev/null +++ b/docs/reference/registries/s3.md @@ -0,0 +1,23 @@ +# S3 Registry + +## Description + +S3 registry provides support for storing the protobuf representation of your feature store objects (data sources, feature views, feature services, etc.) in S3 file system. + +While it can be used in production, there are still inherent limitations with a file-based registries, since changing a single field in the registry requires re-writing the whole registry file. With multiple concurrent writers, this presents a risk of data loss, or bottlenecks writes to the registry since all changes have to be serialized (e.g. when running materialization for multiple feature views or time ranges concurrently). + +An example of how to configure this would be: + +## Example + +{% code title="feature_store.yaml" %} +```yaml +project: feast_aws_s3 +registry: + path: s3://[YOUR BUCKET YOU CREATED]/registry.pb + cache_ttl_seconds: 60 +online_store: null +offline_store: + type: dask +``` +{% endcode %} \ No newline at end of file diff --git a/docs/reference/registry/snowflake.md b/docs/reference/registries/snowflake.md similarity index 97% rename from docs/reference/registry/snowflake.md rename to docs/reference/registries/snowflake.md index 31b0db9582..00d87b1977 100644 --- a/docs/reference/registry/snowflake.md +++ b/docs/reference/registries/snowflake.md @@ -1,4 +1,4 @@ -# Snowflake registry +# Snowflake Registry ## Description diff --git a/docs/tutorials/using-scalable-registry.md b/docs/reference/registries/sql.md similarity index 97% rename from docs/tutorials/using-scalable-registry.md rename to docs/reference/registries/sql.md index 25746f60e2..631a20cbe3 100644 --- a/docs/tutorials/using-scalable-registry.md +++ b/docs/reference/registries/sql.md @@ -1,9 +1,4 @@ ---- -description: >- - Tutorial on how to use the SQL registry for scalable registry updates ---- - -# Using Scalable Registry +# SQL Registry ## Overview From 7d744ad3aa4a0057f136aa5bc2a8b416ab827398 Mon Sep 17 00:00:00 2001 From: Francisco Arceo Date: Fri, 23 Aug 2024 12:14:49 -0400 Subject: [PATCH 21/96] chore: Update Slack link --- infra/templates/README.md.jinja2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/infra/templates/README.md.jinja2 b/infra/templates/README.md.jinja2 index 4e71ac2900..9c7df17da9 100644 --- a/infra/templates/README.md.jinja2 +++ b/infra/templates/README.md.jinja2 @@ -15,7 +15,7 @@ [![GitHub Release](https://img.shields.io/github/v/release/feast-dev/feast.svg?style=flat&sort=semver&color=blue)](https://github.com/feast-dev/feast/releases) ## Join us on Slack! -πŸ‘‹πŸ‘‹πŸ‘‹ [Come say hi on Slack!](https://join.slack.com/t/feastopensource/signup) +πŸ‘‹πŸ‘‹πŸ‘‹ [Come say hi on Slack!](https://communityinviter.com/apps/feastopensource/feast-the-open-source-feature-store) ## Overview From a2460d9e65a406cc3641552e0c2f497eeb2fc735 Mon Sep 17 00:00:00 2001 From: "Yang, Bo" Date: Fri, 23 Aug 2024 09:23:34 -0700 Subject: [PATCH 22/96] build: Set a proper build-system protobuf version (#4438) build: force the protobuf version in the build system so that it is compatible with the runtime dependency Signed-off-by: Yang, Bo --- pyproject.toml | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 00170ab443..af44861502 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,14 @@ [build-system] -requires = ["setuptools>=60", "wheel", "setuptools_scm>=6.2", "grpcio", "grpcio-tools>=1.47.0", "mypy-protobuf==3.1", "sphinx!=4.0.0"] +requires = [ + "setuptools>=60", + "wheel", + "setuptools_scm>=6.2", + "grpcio", + "grpcio-tools>=1.47.0", + "mypy-protobuf==3.1", + "protobuf>=4.24.0,<5.0.0", + "sphinx!=4.0.0", +] build-backend = "setuptools.build_meta" [tool.setuptools_scm] From 2ba93f650e975f3b02c60dda7c8f5f2407852d74 Mon Sep 17 00:00:00 2001 From: Francisco Arceo Date: Fri, 23 Aug 2024 12:32:47 -0400 Subject: [PATCH 23/96] Update README.md --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index ede28c4c95..10c20050d3 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ [![GitHub Release](https://img.shields.io/github/v/release/feast-dev/feast.svg?style=flat&sort=semver&color=blue)](https://github.com/feast-dev/feast/releases) ## Join us on Slack! -πŸ‘‹πŸ‘‹πŸ‘‹ [Come say hi on Slack!](https://join.slack.com/t/feastopensource/signup) +πŸ‘‹πŸ‘‹πŸ‘‹ [Come say hi on Slack!](https://communityinviter.com/apps/feastopensource/feast-the-open-source-feature-store) ## Overview @@ -230,4 +230,4 @@ Thanks goes to these incredible people:
- \ No newline at end of file + From dda0088f25eab5828613bd6d080aeddf681641f0 Mon Sep 17 00:00:00 2001 From: brijesh-vora-sp <137945907+brijesh-vora-sp@users.noreply.github.com> Date: Sat, 24 Aug 2024 03:48:08 -0500 Subject: [PATCH 24/96] fix: Typos related to k8s (#4442) fix typos Signed-off-by: Brijesh Vora --- sdk/python/feast/infra/materialization/kubernetes/Dockerfile | 2 +- sdk/python/feast/repo_config.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sdk/python/feast/infra/materialization/kubernetes/Dockerfile b/sdk/python/feast/infra/materialization/kubernetes/Dockerfile index 510bb72285..38d4f5f188 100644 --- a/sdk/python/feast/infra/materialization/kubernetes/Dockerfile +++ b/sdk/python/feast/infra/materialization/kubernetes/Dockerfile @@ -5,7 +5,7 @@ RUN apt-get update && \ WORKDIR /app -COPY sdk/python/feast/infra/materialization/kuberentes/main.py /app +COPY sdk/python/feast/infra/materialization/kubernetes/main.py /app # Copy necessary parts of the Feast codebase COPY sdk/python sdk/python diff --git a/sdk/python/feast/repo_config.py b/sdk/python/feast/repo_config.py index 069b579999..a270e6795c 100644 --- a/sdk/python/feast/repo_config.py +++ b/sdk/python/feast/repo_config.py @@ -46,7 +46,7 @@ "local": "feast.infra.materialization.local_engine.LocalMaterializationEngine", "snowflake.engine": "feast.infra.materialization.snowflake_engine.SnowflakeMaterializationEngine", "lambda": "feast.infra.materialization.aws_lambda.lambda_engine.LambdaMaterializationEngine", - "k8s": "feast.infra.materialization.kubernetes.kubernetes_materialization_engine.KubernetesMaterializationEngine", + "k8s": "feast.infra.materialization.kubernetes.k8s_materialization_engine.KubernetesMaterializationEngine", "spark.engine": "feast.infra.materialization.contrib.spark.spark_materialization_engine.SparkMaterializationEngine", } From 896360af19a37c9a2a4634ec88021c4f69bdb141 Mon Sep 17 00:00:00 2001 From: lokeshrangineni <19699092+lokeshrangineni@users.noreply.github.com> Date: Sat, 24 Aug 2024 05:16:46 -0400 Subject: [PATCH 25/96] feat: Refactoring code to get oidc end points from discovery URL. (#4429) * refactoring the permissions side server side code to get the OIDC end points from the discovery URL. Also removing the auth_server_url config from oidc auth config. Signed-off-by: Lokesh Rangineni <19699092+lokeshrangineni@users.noreply.github.com> * refactoring the permissions side server side code to get the OIDC end points from the discovery URL. Also removing the auth_server_url config from oidc auth config. Signed-off-by: Lokesh Rangineni <19699092+lokeshrangineni@users.noreply.github.com> * refactoring the permissions side server side code to get the OIDC end points from the discovery URL. Also removing the auth_server_url config from oidc auth config. Signed-off-by: Lokesh Rangineni <19699092+lokeshrangineni@users.noreply.github.com> * refactoring the permissions side server side code to get the OIDC end points from the discovery URL. Also removing the auth_server_url config from oidc auth config. Signed-off-by: Lokesh Rangineni <19699092+lokeshrangineni@users.noreply.github.com> * Fixing the issue with pre-commit hook template. Accidentally this was reverted in previous rebase and reverting it now. Signed-off-by: Lokesh Rangineni <19699092+lokeshrangineni@users.noreply.github.com> --------- Signed-off-by: Lokesh Rangineni <19699092+lokeshrangineni@users.noreply.github.com> --- .../components/authz_manager.md | 3 +- .../permissions/auth/oidc_token_parser.py | 15 ++++--- sdk/python/feast/permissions/auth_model.py | 1 - .../oidc_authentication_client_manager.py | 21 ++-------- sdk/python/feast/permissions/oidc_service.py | 40 +++++++++++++++++++ sdk/python/tests/conftest.py | 1 - .../feature_repos/repo_configuration.py | 1 - .../universal/data_sources/file.py | 1 - .../infra/scaffolding/test_repo_config.py | 4 -- .../tests/unit/permissions/auth/conftest.py | 3 +- .../permissions/auth/server/mock_utils.py | 9 +++++ .../permissions/auth/test_token_parser.py | 9 ++++- 12 files changed, 73 insertions(+), 35 deletions(-) create mode 100644 sdk/python/feast/permissions/oidc_service.py diff --git a/docs/getting-started/components/authz_manager.md b/docs/getting-started/components/authz_manager.md index 09ca4d1366..876dd84f2e 100644 --- a/docs/getting-started/components/authz_manager.md +++ b/docs/getting-started/components/authz_manager.md @@ -68,8 +68,7 @@ auth: type: oidc client_id: _CLIENT_ID__ client_secret: _CLIENT_SECRET__ - realm: _REALM__ - auth_server_url: _OIDC_SERVER_URL_ + realm: _REALM__ auth_discovery_url: _OIDC_SERVER_URL_/realms/master/.well-known/openid-configuration ... ``` diff --git a/sdk/python/feast/permissions/auth/oidc_token_parser.py b/sdk/python/feast/permissions/auth/oidc_token_parser.py index 921a585bc2..fce9fdcbb2 100644 --- a/sdk/python/feast/permissions/auth/oidc_token_parser.py +++ b/sdk/python/feast/permissions/auth/oidc_token_parser.py @@ -11,6 +11,7 @@ from feast.permissions.auth.token_parser import TokenParser from feast.permissions.auth_model import OidcAuthConfig +from feast.permissions.oidc_service import OIDCDiscoveryService from feast.permissions.user import User logger = logging.getLogger(__name__) @@ -27,6 +28,9 @@ class OidcTokenParser(TokenParser): def __init__(self, auth_config: OidcAuthConfig): self._auth_config = auth_config + self.oidc_discovery_service = OIDCDiscoveryService( + self._auth_config.auth_discovery_url + ) async def _validate_token(self, access_token: str): """ @@ -38,9 +42,9 @@ async def _validate_token(self, access_token: str): request.headers = {"Authorization": f"Bearer {access_token}"} oauth_2_scheme = OAuth2AuthorizationCodeBearer( - tokenUrl=f"{self._auth_config.auth_server_url}/realms/{self._auth_config.realm}/protocol/openid-connect/token", - authorizationUrl=f"{self._auth_config.auth_server_url}/realms/{self._auth_config.realm}/protocol/openid-connect/auth", - refreshUrl=f"{self._auth_config.auth_server_url}/realms/{self._auth_config.realm}/protocol/openid-connect/token", + tokenUrl=self.oidc_discovery_service.get_token_url(), + authorizationUrl=self.oidc_discovery_service.get_authorization_url(), + refreshUrl=self.oidc_discovery_service.get_refresh_url(), ) await oauth_2_scheme(request=request) @@ -62,9 +66,10 @@ async def user_details_from_access_token(self, access_token: str) -> User: except Exception as e: raise AuthenticationError(f"Invalid token: {e}") - url = f"{self._auth_config.auth_server_url}/realms/{self._auth_config.realm}/protocol/openid-connect/certs" optional_custom_headers = {"User-agent": "custom-user-agent"} - jwks_client = PyJWKClient(url, headers=optional_custom_headers) + jwks_client = PyJWKClient( + self.oidc_discovery_service.get_jwks_url(), headers=optional_custom_headers + ) try: signing_key = jwks_client.get_signing_key_from_jwt(access_token) diff --git a/sdk/python/feast/permissions/auth_model.py b/sdk/python/feast/permissions/auth_model.py index afb0a22bc9..28eeb951a7 100644 --- a/sdk/python/feast/permissions/auth_model.py +++ b/sdk/python/feast/permissions/auth_model.py @@ -8,7 +8,6 @@ class AuthConfig(FeastConfigBaseModel): class OidcAuthConfig(AuthConfig): - auth_server_url: Optional[str] = None auth_discovery_url: str client_id: str client_secret: Optional[str] = None diff --git a/sdk/python/feast/permissions/client/oidc_authentication_client_manager.py b/sdk/python/feast/permissions/client/oidc_authentication_client_manager.py index 544764aae0..6744a1d2ad 100644 --- a/sdk/python/feast/permissions/client/oidc_authentication_client_manager.py +++ b/sdk/python/feast/permissions/client/oidc_authentication_client_manager.py @@ -4,6 +4,7 @@ from feast.permissions.auth_model import OidcAuthConfig from feast.permissions.client.auth_client_manager import AuthenticationClientManager +from feast.permissions.oidc_service import OIDCDiscoveryService logger = logging.getLogger(__name__) @@ -12,25 +13,11 @@ class OidcAuthClientManager(AuthenticationClientManager): def __init__(self, auth_config: OidcAuthConfig): self.auth_config = auth_config - def _get_token_endpoint(self): - response = requests.get(self.auth_config.auth_discovery_url) - if response.status_code == 200: - oidc_config = response.json() - if not oidc_config["token_endpoint"]: - raise RuntimeError( - " OIDC token_endpoint is not available from discovery url response." - ) - return oidc_config["token_endpoint"].replace( - "master", self.auth_config.realm - ) - else: - raise RuntimeError( - f"Error fetching OIDC token endpoint configuration: {response.status_code} - {response.text}" - ) - def get_token(self): # Fetch the token endpoint from the discovery URL - token_endpoint = self._get_token_endpoint() + token_endpoint = OIDCDiscoveryService( + self.auth_config.auth_discovery_url + ).get_token_url() token_request_body = { "grant_type": "password", diff --git a/sdk/python/feast/permissions/oidc_service.py b/sdk/python/feast/permissions/oidc_service.py new file mode 100644 index 0000000000..73d0ec8f1b --- /dev/null +++ b/sdk/python/feast/permissions/oidc_service.py @@ -0,0 +1,40 @@ +import requests + + +class OIDCDiscoveryService: + def __init__(self, discovery_url: str): + self.discovery_url = discovery_url + self._discovery_data = None # Initialize it lazily. + + @property + def discovery_data(self): + """Lazily fetches and caches the OIDC discovery data.""" + if self._discovery_data is None: + self._discovery_data = self._fetch_discovery_data() + return self._discovery_data + + def _fetch_discovery_data(self) -> dict: + try: + response = requests.get(self.discovery_url) + response.raise_for_status() + return response.json() + except requests.RequestException as e: + raise RuntimeError( + f"Error fetching OIDC discovery response, discovery url - {self.discovery_url}, exception - {e} " + ) + + def get_authorization_url(self) -> str: + """Returns the authorization endpoint URL.""" + return self.discovery_data.get("authorization_endpoint") + + def get_token_url(self) -> str: + """Returns the token endpoint URL.""" + return self.discovery_data.get("token_endpoint") + + def get_jwks_url(self) -> str: + """Returns the jwks endpoint URL.""" + return self.discovery_data.get("jwks_uri") + + def get_refresh_url(self) -> str: + """Returns the refresh token URL (usually same as token URL).""" + return self.get_token_url() diff --git a/sdk/python/tests/conftest.py b/sdk/python/tests/conftest.py index 74aa68e984..d40f699b6b 100644 --- a/sdk/python/tests/conftest.py +++ b/sdk/python/tests/conftest.py @@ -463,7 +463,6 @@ def is_integration_test(all_markers_from_module): username: reader_writer password: password realm: master - auth_server_url: KEYCLOAK_URL_PLACE_HOLDER auth_discovery_url: KEYCLOAK_URL_PLACE_HOLDER/realms/master/.well-known/openid-configuration """), ], diff --git a/sdk/python/tests/integration/feature_repos/repo_configuration.py b/sdk/python/tests/integration/feature_repos/repo_configuration.py index 235c909d5f..0bf737f616 100644 --- a/sdk/python/tests/integration/feature_repos/repo_configuration.py +++ b/sdk/python/tests/integration/feature_repos/repo_configuration.py @@ -464,7 +464,6 @@ def setup(self): password="password", realm="master", type="oidc", - auth_server_url=keycloak_url, auth_discovery_url=f"{keycloak_url}/realms/master/.well-known" f"/openid-configuration", ) diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py index b600699f81..adbb248a20 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py @@ -449,7 +449,6 @@ def __init__(self, project_name: str, *args, **kwargs): username: reader_writer password: password realm: master - auth_server_url: {keycloak_url} auth_discovery_url: {keycloak_url}/realms/master/.well-known/openid-configuration """ self.auth_config = auth_config_template.format(keycloak_url=self.keycloak_url) diff --git a/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py b/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py index 0725d6d261..5331d350e2 100644 --- a/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py +++ b/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py @@ -214,7 +214,6 @@ def test_auth_config(): username: test_user_name password: test_password realm: master - auth_server_url: http://localhost:8712 auth_discovery_url: http://localhost:8080/realms/master/.well-known/openid-configuration registry: "registry.db" provider: local @@ -237,7 +236,6 @@ def test_auth_config(): username: test_user_name password: test_password realm: master - auth_server_url: http://localhost:8712 auth_discovery_url: http://localhost:8080/realms/master/.well-known/openid-configuration registry: "registry.db" provider: local @@ -260,7 +258,6 @@ def test_auth_config(): username: test_user_name password: test_password realm: master - auth_server_url: http://localhost:8080 auth_discovery_url: http://localhost:8080/realms/master/.well-known/openid-configuration registry: "registry.db" provider: local @@ -278,7 +275,6 @@ def test_auth_config(): assert oidc_repo_config.auth_config.username == "test_user_name" assert oidc_repo_config.auth_config.password == "test_password" assert oidc_repo_config.auth_config.realm == "master" - assert oidc_repo_config.auth_config.auth_server_url == "http://localhost:8080" assert ( oidc_repo_config.auth_config.auth_discovery_url == "http://localhost:8080/realms/master/.well-known/openid-configuration" diff --git a/sdk/python/tests/unit/permissions/auth/conftest.py b/sdk/python/tests/unit/permissions/auth/conftest.py index dc71aba23b..0d6acd7fb2 100644 --- a/sdk/python/tests/unit/permissions/auth/conftest.py +++ b/sdk/python/tests/unit/permissions/auth/conftest.py @@ -73,8 +73,7 @@ def clusterrolebindings(sa_name, namespace) -> dict: @pytest.fixture def oidc_config() -> OidcAuthConfig: return OidcAuthConfig( - auth_server_url="", - auth_discovery_url="", + auth_discovery_url="https://localhost:8080/realms/master/.well-known/openid-configuration", client_id=_CLIENT_ID, client_secret="", username="", diff --git a/sdk/python/tests/unit/permissions/auth/server/mock_utils.py b/sdk/python/tests/unit/permissions/auth/server/mock_utils.py index 8f598774ee..12f7785b05 100644 --- a/sdk/python/tests/unit/permissions/auth/server/mock_utils.py +++ b/sdk/python/tests/unit/permissions/auth/server/mock_utils.py @@ -42,6 +42,15 @@ async def mock_oath2(self, request): lambda url, data, headers: token_response, ) + monkeypatch.setattr( + "feast.permissions.oidc_service.OIDCDiscoveryService._fetch_discovery_data", + lambda self, *args, **kwargs: { + "authorization_endpoint": "https://localhost:8080/realms/master/protocol/openid-connect/auth", + "token_endpoint": "https://localhost:8080/realms/master/protocol/openid-connect/token", + "jwks_uri": "https://localhost:8080/realms/master/protocol/openid-connect/certs", + }, + ) + def mock_kubernetes(request, monkeypatch): sa_name = request.getfixturevalue("sa_name") diff --git a/sdk/python/tests/unit/permissions/auth/test_token_parser.py b/sdk/python/tests/unit/permissions/auth/test_token_parser.py index 6ae9094f81..cb153a17c9 100644 --- a/sdk/python/tests/unit/permissions/auth/test_token_parser.py +++ b/sdk/python/tests/unit/permissions/auth/test_token_parser.py @@ -21,13 +21,20 @@ ) @patch("feast.permissions.auth.oidc_token_parser.PyJWKClient.get_signing_key_from_jwt") @patch("feast.permissions.auth.oidc_token_parser.jwt.decode") +@patch("feast.permissions.oidc_service.OIDCDiscoveryService._fetch_discovery_data") def test_oidc_token_validation_success( - mock_jwt, mock_signing_key, mock_oauth2, oidc_config + mock_discovery_data, mock_jwt, mock_signing_key, mock_oauth2, oidc_config ): signing_key = MagicMock() signing_key.key = "a-key" mock_signing_key.return_value = signing_key + mock_discovery_data.return_value = { + "authorization_endpoint": "https://localhost:8080/realms/master/protocol/openid-connect/auth", + "token_endpoint": "https://localhost:8080/realms/master/protocol/openid-connect/token", + "jwks_uri": "https://localhost:8080/realms/master/protocol/openid-connect/certs", + } + user_data = { "preferred_username": "my-name", "resource_access": {_CLIENT_ID: {"roles": ["reader", "writer"]}}, From 19cf2222214d5d5b7db766e833720d7706caeb30 Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Sun, 25 Aug 2024 10:23:47 +0400 Subject: [PATCH 26/96] chore: Mark tests using keycloak with xdist_group (#4436) * mark keycloak tests with xdist_group Signed-off-by: tokoko * apply changes to test-python-integration Signed-off-by: tokoko --------- Signed-off-by: tokoko Co-authored-by: tokoko --- Makefile | 4 ++-- sdk/python/tests/conftest.py | 6 +++++- .../feature_repos/universal/data_source_creator.py | 3 +++ .../feature_repos/universal/data_sources/file.py | 3 +++ 4 files changed, 13 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 7851c1f1c5..6ebab4e3be 100644 --- a/Makefile +++ b/Makefile @@ -86,14 +86,14 @@ test-python-unit: python -m pytest -n 8 --color=yes sdk/python/tests test-python-integration: - python -m pytest -n 4 --integration --color=yes --durations=10 --timeout=1200 --timeout_method=thread \ + python -m pytest -n 8 --integration --color=yes --durations=10 --timeout=1200 --timeout_method=thread --dist loadgroup \ -k "(not snowflake or not test_historical_features_main)" \ sdk/python/tests test-python-integration-local: FEAST_IS_LOCAL_TEST=True \ FEAST_LOCAL_ONLINE_CONTAINER=True \ - python -m pytest -n 4 --color=yes --integration --durations=10 --timeout=1200 --timeout_method=thread --dist loadgroup \ + python -m pytest -n 8 --color=yes --integration --durations=10 --timeout=1200 --timeout_method=thread --dist loadgroup \ -k "not test_lambda_materialization and not test_snowflake_materialization" \ sdk/python/tests diff --git a/sdk/python/tests/conftest.py b/sdk/python/tests/conftest.py index d40f699b6b..b5b3e2d9e5 100644 --- a/sdk/python/tests/conftest.py +++ b/sdk/python/tests/conftest.py @@ -279,7 +279,11 @@ def pytest_generate_tests(metafunc: pytest.Metafunc): c = IntegrationTestRepoConfig(**config) if c not in _config_cache: - _config_cache[c] = c + marks = [ + pytest.mark.xdist_group(name=m) + for m in c.offline_store_creator.xdist_groups() + ] + _config_cache[c] = pytest.param(c, marks=marks) configs.append(_config_cache[c]) else: diff --git a/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py b/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py index f1cab21429..aa46160358 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py @@ -60,3 +60,6 @@ def create_logged_features_destination(self) -> LoggingDestination: @abstractmethod def teardown(self): raise NotImplementedError + + def xdist_groups() -> list[str]: + return [] diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py index adbb248a20..10d348c056 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py @@ -455,6 +455,9 @@ def __init__(self, project_name: str, *args, **kwargs): self.server_port: int = 0 self.proc = None + def xdist_groups() -> list[str]: + return ["keycloak"] + def setup(self, registry: RegistryConfig): parent_offline_config = super().create_offline_store_config() config = RepoConfig( From 20290ce28c513f705db1dbb6b0f719ba1846217f Mon Sep 17 00:00:00 2001 From: "Yang, Bo" Date: Sun, 25 Aug 2024 06:58:44 -0700 Subject: [PATCH 27/96] fix: Locate feature_store.yaml from __file__ (#4443) fix: locate feature_store.yaml from __file__ Signed-off-by: Yang, Bo --- .../feast/templates/postgres/feature_repo/test_workflow.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sdk/python/feast/templates/postgres/feature_repo/test_workflow.py b/sdk/python/feast/templates/postgres/feature_repo/test_workflow.py index f657aba15f..30927d3c7a 100644 --- a/sdk/python/feast/templates/postgres/feature_repo/test_workflow.py +++ b/sdk/python/feast/templates/postgres/feature_repo/test_workflow.py @@ -1,3 +1,4 @@ +import os.path import subprocess from datetime import datetime @@ -8,7 +9,7 @@ def run_demo(): - store = FeatureStore(repo_path=".") + store = FeatureStore(repo_path=os.path.dirname(__file__)) print("\n--- Run feast apply to setup feature store on Postgres ---") subprocess.run(["feast", "apply"]) From 34238d2a0bfe9dbad753fec9613c83d848b1a520 Mon Sep 17 00:00:00 2001 From: Francisco Arceo Date: Sun, 25 Aug 2024 10:19:54 -0400 Subject: [PATCH 28/96] feat: Update roadmap.md (#4445) --- docs/roadmap.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/roadmap.md b/docs/roadmap.md index e1ba6f3333..ff6549a3cb 100644 --- a/docs/roadmap.md +++ b/docs/roadmap.md @@ -42,6 +42,7 @@ The list below contains the functionality that contributors are planning to deve * [x] On-demand Transformations (Beta release. See [RFC](https://docs.google.com/document/d/1lgfIw0Drc65LpaxbUu49RCeJgMew547meSJttnUqz7c/edit#)) * [x] Streaming Transformations (Alpha release. See [RFC](https://docs.google.com/document/d/1UzEyETHUaGpn0ap4G82DHluiCj7zEbrQLkJJkKSv4e8/edit)) * [ ] Batch transformation (In progress. See [RFC](https://docs.google.com/document/d/1964OkzuBljifDvkV-0fakp2uaijnVzdwWNGdz7Vz50A/edit)) + * [ ] Persistent On-demand Transformations (Beta release. See [GitHub Issue](https://github.com/feast-dev/feast/issues/4376)) * **Streaming** * [x] [Custom streaming ingestion job support](https://docs.feast.dev/how-to-guides/customizing-feast/creating-a-custom-provider) * [x] [Push based streaming data ingestion to online store](https://docs.feast.dev/reference/data-sources/push) @@ -63,3 +64,6 @@ The list below contains the functionality that contributors are planning to deve * [x] Amundsen integration (see [Feast extractor](https://github.com/amundsen-io/amundsen/blob/main/databuilder/databuilder/extractor/feast_extractor.py)) * [x] DataHub integration (see [DataHub Feast docs](https://datahubproject.io/docs/generated/ingestion/sources/feast/)) * [x] Feast Web UI (Beta release. See [docs](https://docs.feast.dev/reference/alpha-web-ui)) + * [ ] Feast Lineage Explorer +* **Natural Language Processing** + * [x] Vector Search (Alpha release. See [RFC](https://docs.google.com/document/d/18IWzLEA9i2lDWnbfbwXnMCg3StlqaLVI-uRpQjr_Vos/edit#heading=h.9gaqqtox9jg6)) From 55a61e8de4b6240670588c7e212fce2fd473f2ce Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Mon, 26 Aug 2024 14:30:39 +0400 Subject: [PATCH 29/96] chore: Remove Rockset from feast (#4434) --- docs/SUMMARY.md | 1 - docs/reference/online-stores/README.md | 4 - docs/reference/online-stores/rockset.md | 84 --- ...feast.infra.feature_servers.aws_lambda.rst | 29 - ...ast.infra.feature_servers.gcp_cloudrun.rst | 29 - ....infra.materialization.contrib.bytewax.rst | 29 - .../feast.infra.materialization.lambda.rst | 29 - ...ne_stores.contrib.rockset_online_store.rst | 21 - .../feast.infra.online_stores.contrib.rst | 1 - .../feast.infra.registry.contrib.postgres.rst | 21 - .../source/feast.infra.registry_stores.rst | 21 - .../docs/source/feast.permissions.auth.rst | 61 ++ .../docs/source/feast.permissions.client.rst | 69 +++ sdk/python/docs/source/feast.permissions.rst | 103 ++++ .../docs/source/feast.permissions.server.rst | 69 +++ .../docs/source/feast.protos.feast.core.rst | 32 ++ sdk/python/docs/source/feast.rst | 16 +- sdk/python/feast/cli.py | 1 - .../contrib/rockset_online_store/__init__.py | 0 .../contrib/rockset_online_store/rockset.py | 520 ------------------ sdk/python/feast/repo_config.py | 1 - sdk/python/feast/templates/rockset/README.md | 21 - .../feast/templates/rockset/__init__.py | 0 .../feast/templates/rockset/bootstrap.py | 30 - .../rockset/feature_repo/feature_store.yaml | 8 - .../requirements/py3.10-ci-requirements.txt | 162 +----- .../requirements/py3.10-requirements.txt | 89 +-- .../requirements/py3.11-ci-requirements.txt | 157 +----- .../requirements/py3.11-requirements.txt | 89 +-- .../requirements/py3.9-ci-requirements.txt | 161 +----- .../requirements/py3.9-requirements.txt | 89 +-- .../feature_repos/repo_configuration.py | 10 - setup.py | 6 - 33 files changed, 532 insertions(+), 1431 deletions(-) delete mode 100644 docs/reference/online-stores/rockset.md delete mode 100644 sdk/python/docs/source/feast.infra.feature_servers.aws_lambda.rst delete mode 100644 sdk/python/docs/source/feast.infra.feature_servers.gcp_cloudrun.rst delete mode 100644 sdk/python/docs/source/feast.infra.materialization.contrib.bytewax.rst delete mode 100644 sdk/python/docs/source/feast.infra.materialization.lambda.rst delete mode 100644 sdk/python/docs/source/feast.infra.online_stores.contrib.rockset_online_store.rst delete mode 100644 sdk/python/docs/source/feast.infra.registry.contrib.postgres.rst delete mode 100644 sdk/python/docs/source/feast.infra.registry_stores.rst create mode 100644 sdk/python/docs/source/feast.permissions.auth.rst create mode 100644 sdk/python/docs/source/feast.permissions.client.rst create mode 100644 sdk/python/docs/source/feast.permissions.rst create mode 100644 sdk/python/docs/source/feast.permissions.server.rst delete mode 100644 sdk/python/feast/infra/online_stores/contrib/rockset_online_store/__init__.py delete mode 100644 sdk/python/feast/infra/online_stores/contrib/rockset_online_store/rockset.py delete mode 100644 sdk/python/feast/templates/rockset/README.md delete mode 100644 sdk/python/feast/templates/rockset/__init__.py delete mode 100644 sdk/python/feast/templates/rockset/bootstrap.py delete mode 100644 sdk/python/feast/templates/rockset/feature_repo/feature_store.yaml diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index c8e313850f..1c4cece799 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -108,7 +108,6 @@ * [PostgreSQL (contrib)](reference/online-stores/postgres.md) * [Cassandra + Astra DB (contrib)](reference/online-stores/cassandra.md) * [MySQL (contrib)](reference/online-stores/mysql.md) - * [Rockset (contrib)](reference/online-stores/rockset.md) * [Hazelcast (contrib)](reference/online-stores/hazelcast.md) * [ScyllaDB (contrib)](reference/online-stores/scylladb.md) * [SingleStore (contrib)](reference/online-stores/singlestore.md) diff --git a/docs/reference/online-stores/README.md b/docs/reference/online-stores/README.md index bf5419b249..cdb9c37c1d 100644 --- a/docs/reference/online-stores/README.md +++ b/docs/reference/online-stores/README.md @@ -50,10 +50,6 @@ Please see [Online Store](../../getting-started/components/online-store.md) for [mysql.md](mysql.md) {% endcontent-ref %} -{% content-ref url="rockset.md" %} -[rockset.md](rockset.md) -{% endcontent-ref %} - {% content-ref url="hazelcast.md" %} [hazelcast.md](hazelcast.md) {% endcontent-ref %} diff --git a/docs/reference/online-stores/rockset.md b/docs/reference/online-stores/rockset.md deleted file mode 100644 index 082bddf37b..0000000000 --- a/docs/reference/online-stores/rockset.md +++ /dev/null @@ -1,84 +0,0 @@ -# Rockset (contrib) - -## Description - -In Alpha Development. - -The [Rockset](https://rockset.com/demo-signup/) online store provides support for materializing feature values within a Rockset collection in order to serve features in real-time. - -* Each document is uniquely identified by its '_id' value. Repeated inserts into the same document '_id' will result in an upsert. - -Rockset indexes all columns allowing for quick per feature look up and also allows for a dynamic typed schema that can change based on any new requirements. API Keys can be found in the Rockset console. -You can also find host urls on the same tab by clicking "View Region Endpoint Urls". - -Data Model Used Per Doc - -``` -{ - "_id": (STRING) Unique Identifier for the feature document. - : (STRING) Feature Values Mapped by Feature Name. Feature - values stored as a serialized hex string. - .... - "event_ts": (STRING) ISO Stringified Timestamp. - "created_ts": (STRING) ISO Stringified Timestamp. -} -``` - - -## Example - -```yaml -project: my_feature_app -registry: data/registry.db -provider: local -online_store: - ## Basic Configs ## - - # If apikey or host is left blank the driver will try to pull - # these values from environment variables ROCKSET_APIKEY and - # ROCKSET_APISERVER respectively. - type: rockset - api_key: - host: - - ## Advanced Configs ## - - # Batch size of records that will be turned per page when - # paginating a batched read. - # - # read_pagination_batch_size: 100 - - # The amount of time, in seconds, we will wait for the - # collection to become visible to the API. - # - # collection_created_timeout_secs: 60 - - # The amount of time, in seconds, we will wait for the - # collection to enter READY state. - # - # collection_ready_timeout_secs: 1800 - - # Whether to wait for all writes to be flushed from log - # and queryable before returning write as completed. If - # False, documents that are written may not be seen - # immediately in subsequent reads. - # - # fence_all_writes: True - - # The amount of time we will wait, in seconds, for the - # write fence to be passed - # - # fence_timeout_secs: 600 - - # Initial backoff, in seconds, we will wait between - # requests when polling for a response. - # - # initial_request_backoff_secs: 2 - - # Initial backoff, in seconds, we will wait between - # requests when polling for a response. - # max_request_backoff_secs: 30 - - # The max amount of times we will retry a failed request. - # max_request_attempts: 10000 -``` diff --git a/sdk/python/docs/source/feast.infra.feature_servers.aws_lambda.rst b/sdk/python/docs/source/feast.infra.feature_servers.aws_lambda.rst deleted file mode 100644 index de90bfc000..0000000000 --- a/sdk/python/docs/source/feast.infra.feature_servers.aws_lambda.rst +++ /dev/null @@ -1,29 +0,0 @@ -feast.infra.feature\_servers.aws\_lambda package -================================================ - -Submodules ----------- - -feast.infra.feature\_servers.aws\_lambda.app module ---------------------------------------------------- - -.. automodule:: feast.infra.feature_servers.aws_lambda.app - :members: - :undoc-members: - :show-inheritance: - -feast.infra.feature\_servers.aws\_lambda.config module ------------------------------------------------------- - -.. automodule:: feast.infra.feature_servers.aws_lambda.config - :members: - :undoc-members: - :show-inheritance: - -Module contents ---------------- - -.. automodule:: feast.infra.feature_servers.aws_lambda - :members: - :undoc-members: - :show-inheritance: diff --git a/sdk/python/docs/source/feast.infra.feature_servers.gcp_cloudrun.rst b/sdk/python/docs/source/feast.infra.feature_servers.gcp_cloudrun.rst deleted file mode 100644 index f7fdaf5b36..0000000000 --- a/sdk/python/docs/source/feast.infra.feature_servers.gcp_cloudrun.rst +++ /dev/null @@ -1,29 +0,0 @@ -feast.infra.feature\_servers.gcp\_cloudrun package -================================================== - -Submodules ----------- - -feast.infra.feature\_servers.gcp\_cloudrun.app module ------------------------------------------------------ - -.. automodule:: feast.infra.feature_servers.gcp_cloudrun.app - :members: - :undoc-members: - :show-inheritance: - -feast.infra.feature\_servers.gcp\_cloudrun.config module --------------------------------------------------------- - -.. automodule:: feast.infra.feature_servers.gcp_cloudrun.config - :members: - :undoc-members: - :show-inheritance: - -Module contents ---------------- - -.. automodule:: feast.infra.feature_servers.gcp_cloudrun - :members: - :undoc-members: - :show-inheritance: diff --git a/sdk/python/docs/source/feast.infra.materialization.contrib.bytewax.rst b/sdk/python/docs/source/feast.infra.materialization.contrib.bytewax.rst deleted file mode 100644 index 86fbaa6151..0000000000 --- a/sdk/python/docs/source/feast.infra.materialization.contrib.bytewax.rst +++ /dev/null @@ -1,29 +0,0 @@ -feast.infra.materialization.contrib.bytewax package -================================================================= - -Submodules ----------- - -feast.infra.materialization.contrib.bytewax.bytewax\_materialization\_engine ----------------------------------------------------------------------- - -.. automodule:: feast.infra.materialization.contrib.bytewax.bytewax_materialization_engine - :members: - :undoc-members: - :show-inheritance: - -feast.infra.materialization.contrib.bytewax.bytewax\_materialization\_job ----------------------------------------------------------------------- - -.. automodule:: feast.infra.materialization.contrib.bytewax.bytewax_materialization_job - :members: - :undoc-members: - :show-inheritance: - -Module contents ---------------- - -.. automodule:: feast.infra.materialization.contrib.bytewax - :members: - :undoc-members: - :show-inheritance: diff --git a/sdk/python/docs/source/feast.infra.materialization.lambda.rst b/sdk/python/docs/source/feast.infra.materialization.lambda.rst deleted file mode 100644 index 7ca1d44314..0000000000 --- a/sdk/python/docs/source/feast.infra.materialization.lambda.rst +++ /dev/null @@ -1,29 +0,0 @@ -feast.infra.materialization.lambda package -========================================== - -Submodules ----------- - -feast.infra.materialization.lambda.app module ---------------------------------------------- - -.. automodule:: feast.infra.materialization.lambda.app - :members: - :undoc-members: - :show-inheritance: - -feast.infra.materialization.lambda.lambda\_engine module --------------------------------------------------------- - -.. automodule:: feast.infra.materialization.lambda.lambda_engine - :members: - :undoc-members: - :show-inheritance: - -Module contents ---------------- - -.. automodule:: feast.infra.materialization.lambda - :members: - :undoc-members: - :show-inheritance: diff --git a/sdk/python/docs/source/feast.infra.online_stores.contrib.rockset_online_store.rst b/sdk/python/docs/source/feast.infra.online_stores.contrib.rockset_online_store.rst deleted file mode 100644 index b3de7479a0..0000000000 --- a/sdk/python/docs/source/feast.infra.online_stores.contrib.rockset_online_store.rst +++ /dev/null @@ -1,21 +0,0 @@ -feast.infra.online\_stores.contrib.rockset\_online\_store package -================================================================= - -Submodules ----------- - -feast.infra.online\_stores.contrib.rockset\_online\_store.rockset module ------------------------------------------------------------------------- - -.. automodule:: feast.infra.online_stores.contrib.rockset_online_store.rockset - :members: - :undoc-members: - :show-inheritance: - -Module contents ---------------- - -.. automodule:: feast.infra.online_stores.contrib.rockset_online_store - :members: - :undoc-members: - :show-inheritance: diff --git a/sdk/python/docs/source/feast.infra.online_stores.contrib.rst b/sdk/python/docs/source/feast.infra.online_stores.contrib.rst index 9d301fcd0d..8c9dd7e549 100644 --- a/sdk/python/docs/source/feast.infra.online_stores.contrib.rst +++ b/sdk/python/docs/source/feast.infra.online_stores.contrib.rst @@ -12,7 +12,6 @@ Subpackages feast.infra.online_stores.contrib.hbase_online_store feast.infra.online_stores.contrib.ikv_online_store feast.infra.online_stores.contrib.mysql_online_store - feast.infra.online_stores.contrib.rockset_online_store Submodules ---------- diff --git a/sdk/python/docs/source/feast.infra.registry.contrib.postgres.rst b/sdk/python/docs/source/feast.infra.registry.contrib.postgres.rst deleted file mode 100644 index 3f31990805..0000000000 --- a/sdk/python/docs/source/feast.infra.registry.contrib.postgres.rst +++ /dev/null @@ -1,21 +0,0 @@ -feast.infra.registry.contrib.postgres package -============================================= - -Submodules ----------- - -feast.infra.registry.contrib.postgres.postgres\_registry\_store module ----------------------------------------------------------------------- - -.. automodule:: feast.infra.registry.contrib.postgres.postgres_registry_store - :members: - :undoc-members: - :show-inheritance: - -Module contents ---------------- - -.. automodule:: feast.infra.registry.contrib.postgres - :members: - :undoc-members: - :show-inheritance: diff --git a/sdk/python/docs/source/feast.infra.registry_stores.rst b/sdk/python/docs/source/feast.infra.registry_stores.rst deleted file mode 100644 index cff02fa338..0000000000 --- a/sdk/python/docs/source/feast.infra.registry_stores.rst +++ /dev/null @@ -1,21 +0,0 @@ -feast.infra.registry\_stores package -==================================== - -Submodules ----------- - -feast.infra.registry\_stores.sql module ---------------------------------------- - -.. automodule:: feast.infra.registry_stores.sql - :members: - :undoc-members: - :show-inheritance: - -Module contents ---------------- - -.. automodule:: feast.infra.registry_stores - :members: - :undoc-members: - :show-inheritance: diff --git a/sdk/python/docs/source/feast.permissions.auth.rst b/sdk/python/docs/source/feast.permissions.auth.rst new file mode 100644 index 0000000000..3826bfc217 --- /dev/null +++ b/sdk/python/docs/source/feast.permissions.auth.rst @@ -0,0 +1,61 @@ +feast.permissions.auth package +============================== + +Submodules +---------- + +feast.permissions.auth.auth\_manager module +------------------------------------------- + +.. automodule:: feast.permissions.auth.auth_manager + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.auth.auth\_type module +---------------------------------------- + +.. automodule:: feast.permissions.auth.auth_type + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.auth.kubernetes\_token\_parser module +------------------------------------------------------- + +.. automodule:: feast.permissions.auth.kubernetes_token_parser + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.auth.oidc\_token\_parser module +------------------------------------------------- + +.. automodule:: feast.permissions.auth.oidc_token_parser + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.auth.token\_extractor module +---------------------------------------------- + +.. automodule:: feast.permissions.auth.token_extractor + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.auth.token\_parser module +------------------------------------------- + +.. automodule:: feast.permissions.auth.token_parser + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: feast.permissions.auth + :members: + :undoc-members: + :show-inheritance: diff --git a/sdk/python/docs/source/feast.permissions.client.rst b/sdk/python/docs/source/feast.permissions.client.rst new file mode 100644 index 0000000000..f346801210 --- /dev/null +++ b/sdk/python/docs/source/feast.permissions.client.rst @@ -0,0 +1,69 @@ +feast.permissions.client package +================================ + +Submodules +---------- + +feast.permissions.client.arrow\_flight\_auth\_interceptor module +---------------------------------------------------------------- + +.. automodule:: feast.permissions.client.arrow_flight_auth_interceptor + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.client.auth\_client\_manager module +----------------------------------------------------- + +.. automodule:: feast.permissions.client.auth_client_manager + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.client.auth\_client\_manager\_factory module +-------------------------------------------------------------- + +.. automodule:: feast.permissions.client.auth_client_manager_factory + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.client.grpc\_client\_auth\_interceptor module +--------------------------------------------------------------- + +.. automodule:: feast.permissions.client.grpc_client_auth_interceptor + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.client.http\_auth\_requests\_wrapper module +------------------------------------------------------------- + +.. automodule:: feast.permissions.client.http_auth_requests_wrapper + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.client.kubernetes\_auth\_client\_manager module +----------------------------------------------------------------- + +.. automodule:: feast.permissions.client.kubernetes_auth_client_manager + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.client.oidc\_authentication\_client\_manager module +--------------------------------------------------------------------- + +.. automodule:: feast.permissions.client.oidc_authentication_client_manager + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: feast.permissions.client + :members: + :undoc-members: + :show-inheritance: diff --git a/sdk/python/docs/source/feast.permissions.rst b/sdk/python/docs/source/feast.permissions.rst new file mode 100644 index 0000000000..8c33ab6273 --- /dev/null +++ b/sdk/python/docs/source/feast.permissions.rst @@ -0,0 +1,103 @@ +feast.permissions package +========================= + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + feast.permissions.auth + feast.permissions.client + feast.permissions.server + +Submodules +---------- + +feast.permissions.action module +------------------------------- + +.. automodule:: feast.permissions.action + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.auth\_model module +------------------------------------ + +.. automodule:: feast.permissions.auth_model + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.decision module +--------------------------------- + +.. automodule:: feast.permissions.decision + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.decorator module +---------------------------------- + +.. automodule:: feast.permissions.decorator + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.enforcer module +--------------------------------- + +.. automodule:: feast.permissions.enforcer + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.matcher module +-------------------------------- + +.. automodule:: feast.permissions.matcher + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.permission module +----------------------------------- + +.. automodule:: feast.permissions.permission + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.policy module +------------------------------- + +.. automodule:: feast.permissions.policy + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.security\_manager module +------------------------------------------ + +.. automodule:: feast.permissions.security_manager + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.user module +----------------------------- + +.. automodule:: feast.permissions.user + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: feast.permissions + :members: + :undoc-members: + :show-inheritance: diff --git a/sdk/python/docs/source/feast.permissions.server.rst b/sdk/python/docs/source/feast.permissions.server.rst new file mode 100644 index 0000000000..33a9d8df64 --- /dev/null +++ b/sdk/python/docs/source/feast.permissions.server.rst @@ -0,0 +1,69 @@ +feast.permissions.server package +================================ + +Submodules +---------- + +feast.permissions.server.arrow module +------------------------------------- + +.. automodule:: feast.permissions.server.arrow + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.server.arrow\_flight\_token\_extractor module +--------------------------------------------------------------- + +.. automodule:: feast.permissions.server.arrow_flight_token_extractor + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.server.grpc module +------------------------------------ + +.. automodule:: feast.permissions.server.grpc + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.server.grpc\_token\_extractor module +------------------------------------------------------ + +.. automodule:: feast.permissions.server.grpc_token_extractor + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.server.rest module +------------------------------------ + +.. automodule:: feast.permissions.server.rest + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.server.rest\_token\_extractor module +------------------------------------------------------ + +.. automodule:: feast.permissions.server.rest_token_extractor + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.server.utils module +------------------------------------- + +.. automodule:: feast.permissions.server.utils + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: feast.permissions.server + :members: + :undoc-members: + :show-inheritance: diff --git a/sdk/python/docs/source/feast.protos.feast.core.rst b/sdk/python/docs/source/feast.protos.feast.core.rst index a8691c20fe..9d079953c1 100644 --- a/sdk/python/docs/source/feast.protos.feast.core.rst +++ b/sdk/python/docs/source/feast.protos.feast.core.rst @@ -212,6 +212,38 @@ feast.protos.feast.core.OnDemandFeatureView\_pb2\_grpc module :undoc-members: :show-inheritance: +feast.protos.feast.core.Permission\_pb2 module +---------------------------------------------- + +.. automodule:: feast.protos.feast.core.Permission_pb2 + :members: + :undoc-members: + :show-inheritance: + +feast.protos.feast.core.Permission\_pb2\_grpc module +---------------------------------------------------- + +.. automodule:: feast.protos.feast.core.Permission_pb2_grpc + :members: + :undoc-members: + :show-inheritance: + +feast.protos.feast.core.Policy\_pb2 module +------------------------------------------ + +.. automodule:: feast.protos.feast.core.Policy_pb2 + :members: + :undoc-members: + :show-inheritance: + +feast.protos.feast.core.Policy\_pb2\_grpc module +------------------------------------------------ + +.. automodule:: feast.protos.feast.core.Policy_pb2_grpc + :members: + :undoc-members: + :show-inheritance: + feast.protos.feast.core.Registry\_pb2 module -------------------------------------------- diff --git a/sdk/python/docs/source/feast.rst b/sdk/python/docs/source/feast.rst index 83137574dd..b8c04ebde6 100644 --- a/sdk/python/docs/source/feast.rst +++ b/sdk/python/docs/source/feast.rst @@ -52,6 +52,14 @@ feast.cli module :undoc-members: :show-inheritance: +feast.cli\_utils module +----------------------- + +.. automodule:: feast.cli_utils + :members: + :undoc-members: + :show-inheritance: + feast.constants module ---------------------- @@ -252,14 +260,6 @@ feast.proto\_json module :undoc-members: :show-inheritance: -feast.prova module ------------------- - -.. automodule:: feast.prova - :members: - :undoc-members: - :show-inheritance: - feast.registry\_server module ----------------------------- diff --git a/sdk/python/feast/cli.py b/sdk/python/feast/cli.py index 737704dd36..0a12d1dcbc 100644 --- a/sdk/python/feast/cli.py +++ b/sdk/python/feast/cli.py @@ -614,7 +614,6 @@ def materialize_incremental_command(ctx: click.Context, end_ts: str, views: List "postgres", "hbase", "cassandra", - "rockset", "hazelcast", "ikv", ], diff --git a/sdk/python/feast/infra/online_stores/contrib/rockset_online_store/__init__.py b/sdk/python/feast/infra/online_stores/contrib/rockset_online_store/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/sdk/python/feast/infra/online_stores/contrib/rockset_online_store/rockset.py b/sdk/python/feast/infra/online_stores/contrib/rockset_online_store/rockset.py deleted file mode 100644 index 31de7f9e9b..0000000000 --- a/sdk/python/feast/infra/online_stores/contrib/rockset_online_store/rockset.py +++ /dev/null @@ -1,520 +0,0 @@ -# Copyright 2022 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import json -import logging -import os -import random -import time -from datetime import datetime -from typing import Any, Callable, Dict, List, Literal, Optional, Sequence, Tuple, cast - -import requests -from rockset.exceptions import BadRequestException, RocksetException -from rockset.models import QueryRequestSql -from rockset.query_paginator import QueryPaginator -from rockset.rockset_client import RocksetClient - -from feast.entity import Entity -from feast.feature_view import FeatureView -from feast.infra.online_stores.helpers import compute_entity_id -from feast.infra.online_stores.online_store import OnlineStore -from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto -from feast.protos.feast.types.Value_pb2 import Value as ValueProto -from feast.repo_config import FeastConfigBaseModel, RepoConfig - -logger = logging.getLogger(__name__) - - -class RocksetOnlineStoreConfig(FeastConfigBaseModel): - """Online store config for Rockset store""" - - type: Literal["rockset"] = "rockset" - """Online store type selector""" - - api_key: Optional[str] = None - """Api Key to be used for Rockset Account. If not set the env var ROCKSET_APIKEY will be used.""" - - host: Optional[str] = None - """The Host Url for Rockset requests. If not set the env var ROCKSET_APISERVER will be used.""" - - read_pagination_batch_size: int = 100 - """Batch size of records that will be turned per page when paginating a batched read""" - - collection_created_timeout_secs: int = 60 - """The amount of time, in seconds, we will wait for the collection to become visible to the API""" - - collection_ready_timeout_secs: int = 30 * 60 - """The amount of time, in seconds, we will wait for the collection to enter READY state""" - - fence_all_writes: bool = True - """Whether to wait for all writes to be flushed from log and queryable. If False, documents that are written may not be seen immediately in subsequent reads""" - - fence_timeout_secs: int = 10 * 60 - """The amount of time we will wait, in seconds, for the write fence to be passed""" - - initial_request_backoff_secs: int = 2 - """Initial backoff, in seconds, we will wait between requests when polling for a response""" - - max_request_backoff_secs: int = 30 - """Initial backoff, in seconds, we will wait between requests when polling for a response""" - - max_request_attempts: int = 10 * 1000 - """The max amount of times we will retry a failed request""" - - -class RocksetOnlineStore(OnlineStore): - """ - Rockset implementation of the online store interface. - - Attributes: - _rockset_client: Rockset openapi client. - """ - - _rockset_client = None - - def online_write_batch( - self, - config: RepoConfig, - table: FeatureView, - data: List[ - Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]] - ], - progress: Optional[Callable[[int], Any]], - ) -> None: - """ - Write a batch of feature rows to online Rockset store. - - Args: - config: The RepoConfig for the current FeatureStore. - table: Feast FeatureView. - data: a list of quadruplets containing Feature data. Each quadruplet contains an Entity Key, - a dict containing feature values, an event timestamp for the row, and - the created timestamp for the row if it exists. - progress: Optional function to be called once every mini-batch of rows is written to - the online store. Can be used to display progress. - """ - - online_config = config.online_store - assert isinstance(online_config, RocksetOnlineStoreConfig) - - rs = self.get_rockset_client(online_config) - collection_name = self.get_collection_name(config, table) - - # We need to deduplicate on entity_id and we will save the latest timestamp version. - dedup_dict = {} - for feature_vals in data: - entity_key, features, timestamp, created_ts = feature_vals - serialized_key = compute_entity_id( - entity_key=entity_key, - entity_key_serialization_version=config.entity_key_serialization_version, - ) - - if serialized_key not in dedup_dict: - dedup_dict[serialized_key] = feature_vals - continue - - # If the entity already existings in the dictionary ignore the entry if it has a lower timestamp. - if timestamp <= dedup_dict[serialized_key][2]: - continue - - dedup_dict[serialized_key] = feature_vals - - request_batch = [] - for serialized_key, feature_vals in dedup_dict.items(): - document = {} - entity_key, features, timestamp, created_ts = feature_vals - document["_id"] = serialized_key - - # Rockset python client currently does not handle datetime correctly and will convert - # to string instead of native Rockset DATETIME. This will be fixed, but until then we - # use isoformat. - document["event_ts"] = timestamp.isoformat() - document["created_ts"] = ( - "" if created_ts is None else created_ts.isoformat() - ) - for k, v in features.items(): - # Rockset client currently does not support bytes type. - document[k] = v.SerializeToString().hex() - - # TODO: Implement async batching with retries. - request_batch.append(document) - - if progress: - progress(1) - - resp = rs.Documents.add_documents( - collection=collection_name, data=request_batch - ) - if online_config.fence_all_writes: - self.wait_for_fence(rs, collection_name, resp["last_offset"], online_config) - - return None - - def online_read( - self, - config: RepoConfig, - table: FeatureView, - entity_keys: List[EntityKeyProto], - requested_features: Optional[List[str]] = None, - ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: - """ - Retrieve feature values from the online Rockset store. - - Args: - config: The RepoConfig for the current FeatureStore. - table: Feast FeatureView. - entity_keys: a list of entity keys that should be read from the FeatureStore. - """ - online_config = config.online_store - assert isinstance(online_config, RocksetOnlineStoreConfig) - - rs = self.get_rockset_client(online_config) - collection_name = self.get_collection_name(config, table) - - feature_list = "" - if requested_features is not None: - feature_list = ",".join(requested_features) - - entity_serialized_key_list = [ - compute_entity_id( - k, - entity_key_serialization_version=config.entity_key_serialization_version, - ) - for k in entity_keys - ] - - entity_query_str = ",".join( - "'{id}'".format(id=s) for s in entity_serialized_key_list - ) - - query_str = f""" - SELECT - "_id", - "event_ts", - {feature_list} - FROM - {collection_name} - WHERE - "_id" IN ({entity_query_str}) - """ - - feature_set = set() - if requested_features: - feature_set.update(requested_features) - - result_map = {} - for page in QueryPaginator( - rs, - rs.Queries.query( - sql=QueryRequestSql( - query=query_str, - paginate=True, - initial_paginate_response_doc_count=online_config.read_pagination_batch_size, - ) - ), - ): - for doc in page: - result = {} - for k, v in doc.items(): - if k not in feature_set: - # We want to skip deserializing values that are not feature values like bookeeping values. - continue - - val = ValueProto() - - # TODO: Remove bytes <-> string parsing once client supports bytes. - val.ParseFromString(bytes.fromhex(v)) - result[k] = val - result_map[doc["_id"]] = ( - datetime.fromisoformat(doc["event_ts"]), - result, - ) - - results_list: List[ - Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]] - ] = [] - for key in entity_serialized_key_list: - if key not in result_map: - # If not found, we add a gap to let the client know. - results_list.append((None, None)) - continue - - results_list.append(result_map[key]) - - return results_list - - def update( - self, - config: RepoConfig, - tables_to_delete: Sequence[FeatureView], - tables_to_keep: Sequence[FeatureView], - entities_to_delete: Sequence[Entity], - entities_to_keep: Sequence[Entity], - partial: bool, - ): - """ - Update tables from the Rockset Online Store. - - Args: - config: The RepoConfig for the current FeatureStore. - tables_to_delete: Tables to delete from the Rockset Online Store. - tables_to_keep: Tables to keep in the Rockset Online Store. - """ - online_config = config.online_store - assert isinstance(online_config, RocksetOnlineStoreConfig) - rs = self.get_rockset_client(online_config) - - created_collections = [] - for table_instance in tables_to_keep: - try: - collection_name = self.get_collection_name(config, table_instance) - rs.Collections.create_file_upload_collection(name=collection_name) - created_collections.append(collection_name) - except BadRequestException as e: - if self.parse_request_error_type(e) == "AlreadyExists": - # Table already exists nothing to do. We should still make sure it is ready though. - created_collections.append(collection_name) - continue - raise - - for table_to_delete in tables_to_delete: - self.delete_collection( - rs, collection_name=self.get_collection_name(config, table_to_delete) - ) - - # Now wait for all collections to be READY. - self.wait_for_ready_collections( - rs, created_collections, online_config=online_config - ) - - def teardown( - self, - config: RepoConfig, - tables: Sequence[FeatureView], - entities: Sequence[Entity], - ): - """ - Delete all collections from the Rockset Online Store. - - Args: - config: The RepoConfig for the current FeatureStore. - tables: Tables to delete from the feature repo. - """ - online_config = config.online_store - assert isinstance(online_config, RocksetOnlineStoreConfig) - rs = self.get_rockset_client(online_config) - for table in tables: - self.delete_collection( - rs, collection_name=self.get_collection_name(config, table) - ) - - def get_rockset_client( - self, onlineConfig: RocksetOnlineStoreConfig - ) -> RocksetClient: - """ - Fetches the RocksetClient to be used for all requests for this online store based on the api - configuration in the provided config. If no configuration provided local ENV vars will be used. - - Args: - onlineConfig: The RocksetOnlineStoreConfig associated with this online store. - """ - if self._rockset_client is not None: - return self._rockset_client - - _api_key = ( - os.getenv("ROCKSET_APIKEY") - if isinstance(onlineConfig.api_key, type(None)) - else onlineConfig.api_key - ) - _host = ( - os.getenv("ROCKSET_APISERVER") - if isinstance(onlineConfig.host, type(None)) - else onlineConfig.host - ) - self._rockset_client = RocksetClient(host=_host, api_key=_api_key) - return self._rockset_client - - @staticmethod - def delete_collection(rs: RocksetClient, collection_name: str): - """ - Deletes the collection whose name was provided - - Args: - rs: The RocksetClient to be used for the deletion. - collection_name: The name of the collection to be deleted. - """ - - try: - rs.Collections.delete(collection=collection_name) - except RocksetException as e: - if RocksetOnlineStore.parse_request_error_type(e) == "NotFound": - logger.warning( - f"Trying to delete collection that does not exist {collection_name}" - ) - return - raise - - @staticmethod - def get_collection_name(config: RepoConfig, feature_view: FeatureView) -> str: - """ - Returns the collection name based on the provided config and FeatureView. - - Args: - config: RepoConfig for the online store. - feature_view: FeatureView that is backed by the returned collection name. - - Returns: - The collection name as a string. - """ - project_val = config.project if config.project else "feast" - table_name = feature_view.name if feature_view.name else "feature_store" - return f"{project_val}_{table_name}" - - @staticmethod - def parse_request_error_type(e: RocksetException) -> str: - """ - Parse a throw RocksetException. Will return a string representing the type of error that was thrown. - - Args: - e: The RockException that is being parsed. - - Returns: - Error type parsed as a string. - """ - - body_dict = json.loads(e.body) - return body_dict["type"] - - @staticmethod - def wait_for_fence( - rs: RocksetClient, - collection_name: str, - last_offset: str, - online_config: RocksetOnlineStoreConfig, - ): - """ - Waits until 'last_offset' is flushed and values are ready to be read. If wait lasts longer than the timeout specified in config - a timeout exception will be throw. - - Args: - rs: Rockset client that will be used to make all requests. - collection_name: Collection associated with the offsets we are waiting for. - last_offset: The actual offsets we are waiting to be flushed. - online_config: The config that will be used to determine timeouts and backout configurations. - """ - - resource_path = ( - f"/v1/orgs/self/ws/commons/collections/{collection_name}/offsets/commit" - ) - request = {"name": [last_offset]} - - headers = {} - headers["Content-Type"] = "application/json" - headers["Authorization"] = f"ApiKey {rs.api_client.configuration.api_key}" - - t_start = time.time() - for num_attempts in range(online_config.max_request_attempts): - delay = time.time() - t_start - resp = requests.post( - url=f"{rs.api_client.configuration.host}{resource_path}", - json=request, - headers=headers, - ) - - if resp.status_code == 200 and resp.json()["data"]["passed"] is True: - break - - if delay > online_config.fence_timeout_secs: - raise TimeoutError( - f"Write to collection {collection_name} at offset {last_offset} was not available for read after {delay} secs" - ) - - if resp.status_code == 429: - RocksetOnlineStore.backoff_sleep(num_attempts, online_config) - continue - elif resp.status_code != 200: - raise Exception(f"[{resp.status_code}]: {resp.reason}") - - RocksetOnlineStore.backoff_sleep(num_attempts, online_config) - - @staticmethod - def wait_for_ready_collections( - rs: RocksetClient, - collection_names: List[str], - online_config: RocksetOnlineStoreConfig, - ): - """ - Waits until all collections provided have entered READY state and can accept new documents. If wait - lasts longer than timeout a TimeoutError exception will be thrown. - - Args: - rs: Rockset client that will be used to make all requests. - collection_names: All collections that we will wait for. - timeout: The max amount of time we will wait for the collections to become READY. - """ - - t_start = time.time() - for cname in collection_names: - # We will wait until the provided timeout for all collections to become READY. - for num_attempts in range(online_config.max_request_attempts): - resp = None - delay = time.time() - t_start - try: - resp = rs.Collections.get(collection=cname) - except RocksetException as e: - error_type = RocksetOnlineStore.parse_request_error_type(e) - if error_type == "NotFound": - if delay > online_config.collection_created_timeout_secs: - raise TimeoutError( - f"Collection {cname} failed to become visible after {delay} seconds" - ) - elif error_type == "RateLimitExceeded": - RocksetOnlineStore.backoff_sleep(num_attempts, online_config) - continue - else: - raise - - if ( - resp is not None - and cast(Dict[str, dict], resp)["data"]["status"] == "READY" - ): - break - - if delay > online_config.collection_ready_timeout_secs: - raise TimeoutError( - f"Collection {cname} failed to become ready after {delay} seconds" - ) - - RocksetOnlineStore.backoff_sleep(num_attempts, online_config) - - @staticmethod - def backoff_sleep(attempts: int, online_config: RocksetOnlineStoreConfig): - """ - Sleep for the needed amount of time based on the number of request attempts. - - Args: - backoff: The amount of time we will sleep for - max_backoff: The max amount of time we should ever backoff for. - rate_limited: Whether this method is being called as part of a rate limited request. - """ - - default_backoff = online_config.initial_request_backoff_secs - - # Full jitter, exponential backoff. - backoff = random.uniform( - default_backoff, - min(default_backoff << attempts, online_config.max_request_backoff_secs), - ) - time.sleep(backoff) diff --git a/sdk/python/feast/repo_config.py b/sdk/python/feast/repo_config.py index a270e6795c..199ef31412 100644 --- a/sdk/python/feast/repo_config.py +++ b/sdk/python/feast/repo_config.py @@ -61,7 +61,6 @@ "hbase": "feast.infra.online_stores.contrib.hbase_online_store.hbase.HbaseOnlineStore", "cassandra": "feast.infra.online_stores.contrib.cassandra_online_store.cassandra_online_store.CassandraOnlineStore", "mysql": "feast.infra.online_stores.contrib.mysql_online_store.mysql.MySQLOnlineStore", - "rockset": "feast.infra.online_stores.contrib.rockset_online_store.rockset.RocksetOnlineStore", "hazelcast": "feast.infra.online_stores.contrib.hazelcast_online_store.hazelcast_online_store.HazelcastOnlineStore", "ikv": "feast.infra.online_stores.contrib.ikv_online_store.ikv.IKVOnlineStore", "elasticsearch": "feast.infra.online_stores.contrib.elasticsearch.ElasticSearchOnlineStore", diff --git a/sdk/python/feast/templates/rockset/README.md b/sdk/python/feast/templates/rockset/README.md deleted file mode 100644 index d4f1ef6faf..0000000000 --- a/sdk/python/feast/templates/rockset/README.md +++ /dev/null @@ -1,21 +0,0 @@ -# Feast Quickstart -A quick view of what's in this repository: - -* `data/` contains raw demo parquet data -* `feature_repo/driver_repo.py` contains demo feature definitions -* `feature_repo/feature_store.yaml` contains a demo setup configuring where data sources are -* `test_workflow.py` showcases how to run all key Feast commands, including defining, retrieving, and pushing features. - -You can run the overall workflow with `python test_workflow.py`. - -## To move from this into a more production ready workflow: -> See more details in [Running Feast in production](https://docs.feast.dev/how-to-guides/running-feast-in-production) - -1. `feature_store.yaml` points to a local file as a registry. You'll want to setup a remote file (e.g. in S3/GCS) or a - SQL registry. See [registry docs](https://docs.feast.dev/getting-started/concepts/registry) for more details. -2. Setup CI/CD + dev vs staging vs prod environments to automatically update the registry as you change Feast feature definitions. See [docs](https://docs.feast.dev/how-to-guides/running-feast-in-production#1.-automatically-deploying-changes-to-your-feature-definitions). -3. (optional) Regularly scheduled materialization to power low latency feature retrieval (e.g. via Airflow). See [Batch data ingestion](https://docs.feast.dev/getting-started/concepts/data-ingestion#batch-data-ingestion) - for more details. -4. (optional) Deploy feature server instances with `feast serve` to expose endpoints to retrieve online features. - - See [Python feature server](https://docs.feast.dev/reference/feature-servers/python-feature-server) for details. - - Use cases can also directly call the Feast client to fetch features as per [Feature retrieval](https://docs.feast.dev/getting-started/concepts/feature-retrieval) diff --git a/sdk/python/feast/templates/rockset/__init__.py b/sdk/python/feast/templates/rockset/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/sdk/python/feast/templates/rockset/bootstrap.py b/sdk/python/feast/templates/rockset/bootstrap.py deleted file mode 100644 index a3dc17f18e..0000000000 --- a/sdk/python/feast/templates/rockset/bootstrap.py +++ /dev/null @@ -1,30 +0,0 @@ -import click - -from feast.file_utils import replace_str_in_file - - -def bootstrap(): - # Bootstrap() will automatically be called from the init_repo() during `feast init` - import pathlib - - repo_path = pathlib.Path(__file__).parent.absolute() / "feature_repo" - config_file = repo_path / "feature_store.yaml" - data_path = repo_path / "data" - data_path.mkdir(exist_ok=True) - - rockset_apikey = click.prompt( - "Rockset Api Key (If blank will be read from ROCKSET_APIKEY in ENV):", - default="", - ) - - rockset_host = click.prompt( - "Rockset Host (If blank will be read from ROCKSET_APISERVER in ENV):", - default="", - ) - - replace_str_in_file(config_file, "ROCKSET_APIKEY", rockset_apikey) - replace_str_in_file(config_file, "ROCKSET_APISERVER", rockset_host) - - -if __name__ == "__main__": - bootstrap() diff --git a/sdk/python/feast/templates/rockset/feature_repo/feature_store.yaml b/sdk/python/feast/templates/rockset/feature_repo/feature_store.yaml deleted file mode 100644 index 57cf8e73bb..0000000000 --- a/sdk/python/feast/templates/rockset/feature_repo/feature_store.yaml +++ /dev/null @@ -1,8 +0,0 @@ -project: my_project -registry: registry.db -provider: local -online_store: - type: rockset - api_key: ROCKSET_APIKEY - host: ROCKSET_APISERVER # (api.usw2a1.rockset.com, api.euc1a1.rockset.com, api.use1a1.rockset.com) -entity_key_serialization_version: 2 diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 89459d1a69..8128eb094d 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -1,7 +1,6 @@ # This file was autogenerated by uv via the following command: # uv pip compile --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.10-ci-requirements.txt aiobotocore==2.13.1 - # via feast (setup.py) aiohttp==3.9.5 # via aiobotocore aioitertools==0.11.0 @@ -20,8 +19,6 @@ anyio==4.4.0 # jupyter-server # starlette # watchfiles -appnope==0.1.4 - # via ipykernel argon2-cffi==23.1.0 # via jupyter-server argon2-cffi-bindings==21.2.0 @@ -31,17 +28,16 @@ arrow==1.3.0 asn1crypto==1.5.1 # via snowflake-connector-python assertpy==1.1 - # via feast (setup.py) asttokens==2.4.1 # via stack-data async-lru==2.0.4 # via jupyterlab +async-property==0.2.2 + # via python-keycloak async-timeout==4.0.3 # via # aiohttp # redis -async-property==0.2.2 - # via python-keycloak atpublic==4.1.0 # via ibis-framework attrs==23.2.0 @@ -54,9 +50,7 @@ azure-core==1.30.2 # azure-identity # azure-storage-blob azure-identity==1.17.1 - # via feast (setup.py) azure-storage-blob==12.20.0 - # via feast (setup.py) babel==2.15.0 # via # jupyterlab-server @@ -66,13 +60,10 @@ beautifulsoup4==4.12.3 bidict==0.23.1 # via ibis-framework bigtree==0.19.2 - # via feast (setup.py) bleach==6.1.0 # via nbconvert boto3==1.34.131 - # via - # feast (setup.py) - # moto + # via moto botocore==1.34.131 # via # aiobotocore @@ -81,13 +72,11 @@ botocore==1.34.131 # s3transfer build==1.2.1 # via - # feast (setup.py) # pip-tools # singlestoredb cachetools==5.3.3 # via google-auth cassandra-driver==3.29.1 - # via feast (setup.py) certifi==2024.7.4 # via # elastic-transport @@ -110,7 +99,6 @@ charset-normalizer==3.3.2 # snowflake-connector-python click==8.1.7 # via - # feast (setup.py) # dask # geomet # great-expectations @@ -120,9 +108,7 @@ click==8.1.7 cloudpickle==3.0.0 # via dask colorama==0.4.6 - # via - # feast (setup.py) - # great-expectations + # via great-expectations comm==0.2.2 # via # ipykernel @@ -131,7 +117,6 @@ coverage[toml]==7.5.4 # via pytest-cov cryptography==42.0.8 # via - # feast (setup.py) # azure-identity # azure-storage-blob # great-expectations @@ -144,9 +129,7 @@ cryptography==42.0.8 # types-pyopenssl # types-redis dask[dataframe]==2024.6.2 - # via - # feast (setup.py) - # dask-expr + # via dask-expr dask-expr==1.1.6 # via dask db-dtypes==1.2.0 @@ -158,11 +141,9 @@ decorator==5.1.1 defusedxml==0.7.1 # via nbconvert deltalake==0.18.1 - # via feast (setup.py) deprecation==2.1.0 # via python-keycloak dill==0.3.8 - # via feast (setup.py) distlib==0.3.8 # via virtualenv dnspython==2.6.1 @@ -176,7 +157,6 @@ duckdb==0.10.3 elastic-transport==8.13.1 # via elasticsearch elasticsearch==8.14.0 - # via feast (setup.py) email-validator==2.2.0 # via fastapi entrypoints==0.4 @@ -191,7 +171,6 @@ execnet==2.1.1 executing==2.0.1 # via stack-data fastapi==0.111.0 - # via feast (setup.py) fastapi-cli==0.0.4 # via fastapi fastjsonschema==2.20.0 @@ -207,16 +186,11 @@ frozenlist==1.4.1 # aiohttp # aiosignal fsspec==2023.12.2 - # via - # feast (setup.py) - # dask -geojson==2.5.0 - # via rockset + # via dask geomet==0.2.1.post1 # via cassandra-driver google-api-core[grpc]==2.19.1 # via - # feast (setup.py) # google-cloud-bigquery # google-cloud-bigquery-storage # google-cloud-bigtable @@ -231,11 +205,8 @@ google-auth==2.30.0 # google-cloud-storage # kubernetes google-cloud-bigquery[pandas]==3.13.0 - # via feast (setup.py) google-cloud-bigquery-storage==2.25.0 - # via feast (setup.py) google-cloud-bigtable==2.24.0 - # via feast (setup.py) google-cloud-core==2.4.1 # via # google-cloud-bigquery @@ -243,9 +214,7 @@ google-cloud-core==2.4.1 # google-cloud-datastore # google-cloud-storage google-cloud-datastore==2.19.0 - # via feast (setup.py) google-cloud-storage==2.17.0 - # via feast (setup.py) google-crc32c==1.5.0 # via # google-cloud-storage @@ -256,17 +225,16 @@ google-resumable-media==2.7.1 # google-cloud-storage googleapis-common-protos[grpc]==1.63.2 # via - # feast (setup.py) # google-api-core # grpc-google-iam-v1 # grpcio-status great-expectations==0.18.16 - # via feast (setup.py) +greenlet==3.0.3 + # via sqlalchemy grpc-google-iam-v1==0.13.1 # via google-cloud-bigtable grpcio==1.64.1 # via - # feast (setup.py) # google-api-core # google-cloud-bigquery # googleapis-common-protos @@ -277,43 +245,31 @@ grpcio==1.64.1 # grpcio-testing # grpcio-tools grpcio-health-checking==1.62.2 - # via feast (setup.py) grpcio-reflection==1.62.2 - # via feast (setup.py) grpcio-status==1.62.2 # via google-api-core grpcio-testing==1.62.2 - # via feast (setup.py) grpcio-tools==1.62.2 - # via feast (setup.py) gunicorn==22.0.0 - # via feast (setup.py) h11==0.14.0 # via # httpcore # uvicorn happybase==1.2.0 - # via feast (setup.py) hazelcast-python-client==5.4.0 - # via feast (setup.py) hiredis==2.3.2 - # via feast (setup.py) httpcore==1.0.5 # via httpx httptools==0.6.1 # via uvicorn httpx==0.27.0 # via - # feast (setup.py) # fastapi # jupyterlab # python-keycloak ibis-framework[duckdb]==9.1.0 - # via - # feast (setup.py) - # ibis-substrait + # via ibis-substrait ibis-substrait==4.0.0 - # via feast (setup.py) identify==2.5.36 # via pre-commit idna==3.7 @@ -348,7 +304,6 @@ jedi==0.19.1 # via ipython jinja2==3.1.4 # via - # feast (setup.py) # altair # fastapi # great-expectations @@ -372,7 +327,6 @@ jsonpointer==3.0.0 # jsonschema jsonschema[format-nongpl]==4.22.0 # via - # feast (setup.py) # altair # great-expectations # jupyter-events @@ -420,7 +374,6 @@ jupyterlab-widgets==3.0.11 jwcrypto==1.5.6 # via python-keycloak kubernetes==20.13.0 - # via feast (setup.py) locket==1.0.0 # via partd makefun==1.15.2 @@ -441,17 +394,13 @@ matplotlib-inline==0.1.7 mdurl==0.1.2 # via markdown-it-py minio==7.1.0 - # via feast (setup.py) mistune==3.0.2 # via # great-expectations # nbconvert mmh3==4.1.0 - # via feast (setup.py) mock==2.0.0 - # via feast (setup.py) moto==4.2.14 - # via feast (setup.py) msal==1.29.0 # via # azure-identity @@ -463,13 +412,10 @@ multidict==6.0.5 # aiohttp # yarl mypy==1.10.1 - # via - # feast (setup.py) - # sqlalchemy + # via sqlalchemy mypy-extensions==1.0.0 # via mypy mypy-protobuf==3.3.0 - # via feast (setup.py) nbclient==0.10.0 # via nbconvert nbconvert==7.16.4 @@ -492,7 +438,6 @@ notebook-shim==0.2.4 # notebook numpy==1.26.4 # via - # feast (setup.py) # altair # dask # db-dtypes @@ -528,7 +473,6 @@ packaging==24.1 # sphinx pandas==2.2.2 # via - # feast (setup.py) # altair # dask # dask-expr @@ -554,7 +498,6 @@ pexpect==4.9.0 pip==24.1.1 # via pip-tools pip-tools==7.4.1 - # via feast (setup.py) platformdirs==3.11.0 # via # jupyter-core @@ -567,11 +510,8 @@ ply==3.11 portalocker==2.10.0 # via msal-extensions pre-commit==3.3.1 - # via feast (setup.py) prometheus-client==0.20.0 - # via - # feast (setup.py) - # jupyter-server + # via jupyter-server prompt-toolkit==3.0.47 # via ipython proto-plus==1.24.0 @@ -583,7 +523,6 @@ proto-plus==1.24.0 # google-cloud-datastore protobuf==4.25.3 # via - # feast (setup.py) # google-api-core # google-cloud-bigquery # google-cloud-bigquery-storage @@ -600,11 +539,8 @@ protobuf==4.25.3 # proto-plus # substrait psutil==5.9.0 - # via - # feast (setup.py) - # ipykernel + # via ipykernel psycopg[binary, pool]==3.1.19 - # via feast (setup.py) psycopg-binary==3.1.19 # via psycopg psycopg-pool==3.2.2 @@ -616,14 +552,12 @@ ptyprocess==0.7.0 pure-eval==0.2.2 # via stack-data py==1.11.0 - # via feast (setup.py) py-cpuinfo==9.0.0 # via pytest-benchmark py4j==0.10.9.7 # via pyspark pyarrow==15.0.2 # via - # feast (setup.py) # dask-expr # db-dtypes # deltalake @@ -641,19 +575,16 @@ pyasn1==0.6.0 pyasn1-modules==0.4.0 # via google-auth pybindgen==0.22.1 - # via feast (setup.py) pycparser==2.22 # via cffi pydantic==2.7.4 # via - # feast (setup.py) # fastapi # great-expectations pydantic-core==2.18.4 # via pydantic pygments==2.18.0 # via - # feast (setup.py) # ipython # nbconvert # rich @@ -664,11 +595,8 @@ pyjwt[crypto]==2.8.0 # singlestoredb # snowflake-connector-python pymssql==2.3.0 - # via feast (setup.py) pymysql==1.1.1 - # via feast (setup.py) pyodbc==5.1.0 - # via feast (setup.py) pyopenssl==24.1.0 # via snowflake-connector-python pyparsing==3.1.2 @@ -678,10 +606,8 @@ pyproject-hooks==1.1.0 # build # pip-tools pyspark==3.5.1 - # via feast (setup.py) pytest==7.4.4 # via - # feast (setup.py) # pytest-benchmark # pytest-cov # pytest-env @@ -691,21 +617,13 @@ pytest==7.4.4 # pytest-timeout # pytest-xdist pytest-benchmark==3.4.1 - # via feast (setup.py) pytest-cov==5.0.0 - # via feast (setup.py) pytest-env==1.1.3 - # via feast (setup.py) pytest-lazy-fixture==0.6.3 - # via feast (setup.py) pytest-mock==1.10.4 - # via feast (setup.py) pytest-ordering==0.6 - # via feast (setup.py) pytest-timeout==1.4.2 - # via feast (setup.py) pytest-xdist==3.6.1 - # via feast (setup.py) python-dateutil==2.9.0.post0 # via # arrow @@ -717,14 +635,12 @@ python-dateutil==2.9.0.post0 # kubernetes # moto # pandas - # rockset # trino python-dotenv==1.0.1 # via uvicorn python-json-logger==2.0.7 # via jupyter-events python-keycloak==4.2.2 - # via feast (setup.py) python-multipart==0.0.9 # via fastapi pytz==2024.1 @@ -736,7 +652,6 @@ pytz==2024.1 # trino pyyaml==6.0.1 # via - # feast (setup.py) # dask # ibis-substrait # jupyter-events @@ -750,19 +665,15 @@ pyzmq==26.0.3 # jupyter-client # jupyter-server redis==4.6.0 - # via feast (setup.py) referencing==0.35.1 # via # jsonschema # jsonschema-specifications # jupyter-events regex==2024.5.15 - # via - # feast (setup.py) - # parsimonious + # via parsimonious requests==2.32.3 # via - # feast (setup.py) # azure-core # docker # google-api-core @@ -799,8 +710,6 @@ rich==13.7.1 # via # ibis-framework # typer -rockset==2.1.2 - # via feast (setup.py) rpds-py==0.18.1 # via # jsonschema @@ -810,7 +719,6 @@ rsa==4.9 ruamel-yaml==0.17.17 # via great-expectations ruff==0.4.10 - # via feast (setup.py) s3transfer==0.10.2 # via boto3 scipy==1.14.0 @@ -827,7 +735,6 @@ setuptools==70.1.1 shellingham==1.5.4 # via typer singlestoredb==1.4.0 - # via feast (setup.py) six==1.16.0 # via # asttokens @@ -848,13 +755,11 @@ sniffio==1.3.1 snowballstemmer==2.2.0 # via sphinx snowflake-connector-python[pandas]==3.11.0 - # via feast (setup.py) sortedcontainers==2.4.0 # via snowflake-connector-python soupsieve==2.5 # via beautifulsoup4 sphinx==6.2.1 - # via feast (setup.py) sphinxcontrib-applehelp==1.0.8 # via sphinx sphinxcontrib-devhelp==1.0.6 @@ -868,11 +773,9 @@ sphinxcontrib-qthelp==1.0.7 sphinxcontrib-serializinghtml==1.1.10 # via sphinx sqlalchemy[mypy]==2.0.31 - # via feast (setup.py) sqlglot==25.1.0 # via ibis-framework sqlite-vec==0.1.1 - # via feast (setup.py) sqlparams==6.0.1 # via singlestoredb stack-data==0.6.3 @@ -882,21 +785,17 @@ starlette==0.37.2 substrait==0.19.0 # via ibis-substrait tabulate==0.9.0 - # via feast (setup.py) tenacity==8.4.2 - # via feast (setup.py) terminado==0.18.1 # via # jupyter-server # jupyter-server-terminals testcontainers==4.4.0 - # via feast (setup.py) thriftpy2==0.5.1 # via happybase tinycss2==1.3.0 # via nbconvert toml==0.10.2 - # via feast (setup.py) tomli==2.0.1 # via # build @@ -924,9 +823,7 @@ tornado==6.4.1 # notebook # terminado tqdm==4.66.4 - # via - # feast (setup.py) - # great-expectations + # via great-expectations traitlets==5.14.3 # via # comm @@ -943,39 +840,25 @@ traitlets==5.14.3 # nbconvert # nbformat trino==0.328.0 - # via feast (setup.py) typeguard==4.3.0 - # via feast (setup.py) typer==0.12.3 # via fastapi-cli types-cffi==1.16.0.20240331 # via types-pyopenssl types-protobuf==3.19.22 - # via - # feast (setup.py) - # mypy-protobuf + # via mypy-protobuf types-pymysql==1.1.0.20240524 - # via feast (setup.py) types-pyopenssl==24.1.0.20240425 # via types-redis types-python-dateutil==2.9.0.20240316 - # via - # feast (setup.py) - # arrow + # via arrow types-pytz==2024.1.0.20240417 - # via feast (setup.py) types-pyyaml==6.0.12.20240311 - # via feast (setup.py) types-redis==4.6.0.20240425 - # via feast (setup.py) types-requests==2.30.0.0 - # via feast (setup.py) types-setuptools==70.1.0.20240627 - # via - # feast (setup.py) - # types-cffi + # via types-cffi types-tabulate==0.9.0.20240106 - # via feast (setup.py) types-urllib3==1.26.25.14 # via types-requests typing-extensions==4.12.2 @@ -1013,7 +896,6 @@ uri-template==1.3.0 # via jsonschema urllib3==1.26.19 # via - # feast (setup.py) # botocore # docker # elastic-transport @@ -1022,18 +904,13 @@ urllib3==1.26.19 # minio # requests # responses - # rockset # testcontainers uvicorn[standard]==0.30.1 - # via - # feast (setup.py) - # fastapi + # via fastapi uvloop==0.19.0 # via uvicorn virtualenv==20.23.0 - # via - # feast (setup.py) - # pre-commit + # via pre-commit watchfiles==0.22.0 # via uvicorn wcwidth==0.2.13 @@ -1068,4 +945,3 @@ yarl==1.9.4 # via aiohttp zipp==3.19.1 # via importlib-metadata -bigtree==0.19.2 diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index f1ec2b16ab..eed2baaefe 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -11,31 +11,30 @@ attrs==23.2.0 # via # jsonschema # referencing +bigtree==0.19.2 +cachetools==5.5.0 + # via google-auth certifi==2024.7.4 # via # httpcore # httpx + # kubernetes # requests charset-normalizer==3.3.2 # via requests click==8.1.7 # via - # feast (setup.py) # dask # typer # uvicorn cloudpickle==3.0.0 # via dask colorama==0.4.6 - # via feast (setup.py) dask[dataframe]==2024.5.0 - # via - # feast (setup.py) - # dask-expr + # via dask-expr dask-expr==1.1.0 # via dask dill==0.3.8 - # via feast (setup.py) dnspython==2.6.1 # via email-validator email-validator==2.1.1 @@ -43,15 +42,16 @@ email-validator==2.1.1 exceptiongroup==1.2.1 # via anyio fastapi==0.111.0 - # via - # feast (setup.py) - # fastapi-cli + # via fastapi-cli fastapi-cli==0.0.2 # via fastapi fsspec==2024.3.1 # via dask +google-auth==2.34.0 + # via kubernetes +greenlet==3.0.3 + # via sqlalchemy gunicorn==22.0.0 - # via feast (setup.py) h11==0.14.0 # via # httpcore @@ -71,13 +71,11 @@ idna==3.7 importlib-metadata==7.1.0 # via dask jinja2==3.1.4 - # via - # feast (setup.py) - # fastapi + # via fastapi jsonschema==4.22.0 - # via feast (setup.py) jsonschema-specifications==2023.12.1 # via jsonschema +kubernetes==20.13.0 locket==1.0.0 # via partd markdown-it-py==3.0.0 @@ -87,19 +85,18 @@ markupsafe==2.1.5 mdurl==0.1.2 # via markdown-it-py mmh3==4.1.0 - # via feast (setup.py) mypy==1.10.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy mypy-protobuf==3.6.0 - # via feast (setup.py) numpy==1.26.4 # via - # feast (setup.py) # dask # pandas # pyarrow +oauthlib==3.2.2 + # via requests-oauthlib orjson==3.10.3 # via fastapi packaging==24.0 @@ -108,35 +105,33 @@ packaging==24.0 # gunicorn pandas==2.2.2 # via - # feast (setup.py) # dask # dask-expr partd==1.4.2 # via dask prometheus-client==0.20.0 - # via feast (setup.py) protobuf==4.25.3 - # via - # feast (setup.py) - # mypy-protobuf + # via mypy-protobuf psutil==6.0.0 - # via feast (setup.py) pyarrow==16.0.0 + # via dask-expr +pyasn1==0.6.0 # via - # feast (setup.py) - # dask-expr + # pyasn1-modules + # rsa +pyasn1-modules==0.4.0 + # via google-auth pydantic==2.7.1 - # via - # feast (setup.py) - # fastapi + # via fastapi pydantic-core==2.18.2 # via pydantic pygments==2.18.0 - # via - # feast (setup.py) - # rich + # via rich +pyjwt==2.9.0 python-dateutil==2.9.0.post0 - # via pandas + # via + # kubernetes + # pandas python-dotenv==1.0.1 # via uvicorn python-multipart==0.0.9 @@ -145,39 +140,45 @@ pytz==2024.1 # via pandas pyyaml==6.0.1 # via - # feast (setup.py) # dask + # kubernetes # uvicorn referencing==0.35.1 # via # jsonschema # jsonschema-specifications requests==2.31.0 - # via feast (setup.py) + # via + # kubernetes + # requests-oauthlib +requests-oauthlib==2.0.0 + # via kubernetes rich==13.7.1 # via typer rpds-py==0.18.1 # via # jsonschema # referencing +rsa==4.9 + # via google-auth +setuptools==73.0.1 + # via kubernetes shellingham==1.5.4 # via typer six==1.16.0 - # via python-dateutil + # via + # kubernetes + # python-dateutil sniffio==1.3.1 # via # anyio # httpx sqlalchemy[mypy]==2.0.30 - # via feast (setup.py) starlette==0.37.2 # via fastapi tabulate==0.9.0 - # via feast (setup.py) tenacity==8.3.0 - # via feast (setup.py) toml==0.10.2 - # via feast (setup.py) tomli==2.0.1 # via mypy toolz==0.12.1 @@ -185,9 +186,7 @@ toolz==0.12.1 # dask # partd tqdm==4.66.4 - # via feast (setup.py) typeguard==4.2.1 - # via feast (setup.py) typer==0.12.3 # via fastapi-cli types-protobuf==5.26.0.20240422 @@ -208,18 +207,20 @@ tzdata==2024.1 ujson==5.9.0 # via fastapi urllib3==2.2.1 - # via requests + # via + # kubernetes + # requests uvicorn[standard]==0.29.0 # via - # feast (setup.py) # fastapi # fastapi-cli uvloop==0.19.0 # via uvicorn watchfiles==0.21.0 # via uvicorn +websocket-client==1.8.0 + # via kubernetes websockets==12.0 # via uvicorn zipp==3.19.1 # via importlib-metadata -bigtree==0.19.2 diff --git a/sdk/python/requirements/py3.11-ci-requirements.txt b/sdk/python/requirements/py3.11-ci-requirements.txt index fd0b5a6d26..6458540f27 100644 --- a/sdk/python/requirements/py3.11-ci-requirements.txt +++ b/sdk/python/requirements/py3.11-ci-requirements.txt @@ -1,7 +1,6 @@ # This file was autogenerated by uv via the following command: # uv pip compile --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.11-ci-requirements.txt aiobotocore==2.13.1 - # via feast (setup.py) aiohttp==3.9.5 # via aiobotocore aioitertools==0.11.0 @@ -20,8 +19,6 @@ anyio==4.4.0 # jupyter-server # starlette # watchfiles -appnope==0.1.4 - # via ipykernel argon2-cffi==23.1.0 # via jupyter-server argon2-cffi-bindings==21.2.0 @@ -31,7 +28,6 @@ arrow==1.3.0 asn1crypto==1.5.1 # via snowflake-connector-python assertpy==1.1 - # via feast (setup.py) asttokens==2.4.1 # via stack-data async-lru==2.0.4 @@ -50,9 +46,7 @@ azure-core==1.30.2 # azure-identity # azure-storage-blob azure-identity==1.17.1 - # via feast (setup.py) azure-storage-blob==12.20.0 - # via feast (setup.py) babel==2.15.0 # via # jupyterlab-server @@ -62,13 +56,10 @@ beautifulsoup4==4.12.3 bidict==0.23.1 # via ibis-framework bigtree==0.19.2 - # via feast (setup.py) bleach==6.1.0 # via nbconvert boto3==1.34.131 - # via - # feast (setup.py) - # moto + # via moto botocore==1.34.131 # via # aiobotocore @@ -77,13 +68,11 @@ botocore==1.34.131 # s3transfer build==1.2.1 # via - # feast (setup.py) # pip-tools # singlestoredb cachetools==5.3.3 # via google-auth cassandra-driver==3.29.1 - # via feast (setup.py) certifi==2024.7.4 # via # elastic-transport @@ -106,7 +95,6 @@ charset-normalizer==3.3.2 # snowflake-connector-python click==8.1.7 # via - # feast (setup.py) # dask # geomet # great-expectations @@ -116,9 +104,7 @@ click==8.1.7 cloudpickle==3.0.0 # via dask colorama==0.4.6 - # via - # feast (setup.py) - # great-expectations + # via great-expectations comm==0.2.2 # via # ipykernel @@ -127,7 +113,6 @@ coverage[toml]==7.5.4 # via pytest-cov cryptography==42.0.8 # via - # feast (setup.py) # azure-identity # azure-storage-blob # great-expectations @@ -140,9 +125,7 @@ cryptography==42.0.8 # types-pyopenssl # types-redis dask[dataframe]==2024.6.2 - # via - # feast (setup.py) - # dask-expr + # via dask-expr dask-expr==1.1.6 # via dask db-dtypes==1.2.0 @@ -154,11 +137,9 @@ decorator==5.1.1 defusedxml==0.7.1 # via nbconvert deltalake==0.18.1 - # via feast (setup.py) deprecation==2.1.0 # via python-keycloak dill==0.3.8 - # via feast (setup.py) distlib==0.3.8 # via virtualenv dnspython==2.6.1 @@ -172,7 +153,6 @@ duckdb==0.10.3 elastic-transport==8.13.1 # via elasticsearch elasticsearch==8.14.0 - # via feast (setup.py) email-validator==2.2.0 # via fastapi entrypoints==0.4 @@ -182,7 +162,6 @@ execnet==2.1.1 executing==2.0.1 # via stack-data fastapi==0.111.0 - # via feast (setup.py) fastapi-cli==0.0.4 # via fastapi fastjsonschema==2.20.0 @@ -198,16 +177,11 @@ frozenlist==1.4.1 # aiohttp # aiosignal fsspec==2023.12.2 - # via - # feast (setup.py) - # dask -geojson==2.5.0 - # via rockset + # via dask geomet==0.2.1.post1 # via cassandra-driver google-api-core[grpc]==2.19.1 # via - # feast (setup.py) # google-cloud-bigquery # google-cloud-bigquery-storage # google-cloud-bigtable @@ -222,11 +196,8 @@ google-auth==2.30.0 # google-cloud-storage # kubernetes google-cloud-bigquery[pandas]==3.13.0 - # via feast (setup.py) google-cloud-bigquery-storage==2.25.0 - # via feast (setup.py) google-cloud-bigtable==2.24.0 - # via feast (setup.py) google-cloud-core==2.4.1 # via # google-cloud-bigquery @@ -234,9 +205,7 @@ google-cloud-core==2.4.1 # google-cloud-datastore # google-cloud-storage google-cloud-datastore==2.19.0 - # via feast (setup.py) google-cloud-storage==2.17.0 - # via feast (setup.py) google-crc32c==1.5.0 # via # google-cloud-storage @@ -247,17 +216,16 @@ google-resumable-media==2.7.1 # google-cloud-storage googleapis-common-protos[grpc]==1.63.2 # via - # feast (setup.py) # google-api-core # grpc-google-iam-v1 # grpcio-status great-expectations==0.18.16 - # via feast (setup.py) +greenlet==3.0.3 + # via sqlalchemy grpc-google-iam-v1==0.13.1 # via google-cloud-bigtable grpcio==1.64.1 # via - # feast (setup.py) # google-api-core # google-cloud-bigquery # googleapis-common-protos @@ -268,43 +236,31 @@ grpcio==1.64.1 # grpcio-testing # grpcio-tools grpcio-health-checking==1.62.2 - # via feast (setup.py) grpcio-reflection==1.62.2 - # via feast (setup.py) grpcio-status==1.62.2 # via google-api-core grpcio-testing==1.62.2 - # via feast (setup.py) grpcio-tools==1.62.2 - # via feast (setup.py) gunicorn==22.0.0 - # via feast (setup.py) h11==0.14.0 # via # httpcore # uvicorn happybase==1.2.0 - # via feast (setup.py) hazelcast-python-client==5.4.0 - # via feast (setup.py) hiredis==2.3.2 - # via feast (setup.py) httpcore==1.0.5 # via httpx httptools==0.6.1 # via uvicorn httpx==0.27.0 # via - # feast (setup.py) # fastapi # jupyterlab # python-keycloak ibis-framework[duckdb]==9.1.0 - # via - # feast (setup.py) - # ibis-substrait + # via ibis-substrait ibis-substrait==4.0.0 - # via feast (setup.py) identify==2.5.36 # via pre-commit idna==3.7 @@ -339,7 +295,6 @@ jedi==0.19.1 # via ipython jinja2==3.1.4 # via - # feast (setup.py) # altair # fastapi # great-expectations @@ -363,7 +318,6 @@ jsonpointer==3.0.0 # jsonschema jsonschema[format-nongpl]==4.22.0 # via - # feast (setup.py) # altair # great-expectations # jupyter-events @@ -411,7 +365,6 @@ jupyterlab-widgets==3.0.11 jwcrypto==1.5.6 # via python-keycloak kubernetes==20.13.0 - # via feast (setup.py) locket==1.0.0 # via partd makefun==1.15.2 @@ -432,17 +385,13 @@ matplotlib-inline==0.1.7 mdurl==0.1.2 # via markdown-it-py minio==7.1.0 - # via feast (setup.py) mistune==3.0.2 # via # great-expectations # nbconvert mmh3==4.1.0 - # via feast (setup.py) mock==2.0.0 - # via feast (setup.py) moto==4.2.14 - # via feast (setup.py) msal==1.29.0 # via # azure-identity @@ -454,13 +403,10 @@ multidict==6.0.5 # aiohttp # yarl mypy==1.10.1 - # via - # feast (setup.py) - # sqlalchemy + # via sqlalchemy mypy-extensions==1.0.0 # via mypy mypy-protobuf==3.3.0 - # via feast (setup.py) nbclient==0.10.0 # via nbconvert nbconvert==7.16.4 @@ -483,7 +429,6 @@ notebook-shim==0.2.4 # notebook numpy==1.26.4 # via - # feast (setup.py) # altair # dask # db-dtypes @@ -519,7 +464,6 @@ packaging==24.1 # sphinx pandas==2.2.2 # via - # feast (setup.py) # altair # dask # dask-expr @@ -545,7 +489,6 @@ pexpect==4.9.0 pip==24.1.1 # via pip-tools pip-tools==7.4.1 - # via feast (setup.py) platformdirs==3.11.0 # via # jupyter-core @@ -558,11 +501,8 @@ ply==3.11 portalocker==2.10.0 # via msal-extensions pre-commit==3.3.1 - # via feast (setup.py) prometheus-client==0.20.0 - # via - # feast (setup.py) - # jupyter-server + # via jupyter-server prompt-toolkit==3.0.47 # via ipython proto-plus==1.24.0 @@ -574,7 +514,6 @@ proto-plus==1.24.0 # google-cloud-datastore protobuf==4.25.3 # via - # feast (setup.py) # google-api-core # google-cloud-bigquery # google-cloud-bigquery-storage @@ -591,11 +530,8 @@ protobuf==4.25.3 # proto-plus # substrait psutil==5.9.0 - # via - # feast (setup.py) - # ipykernel + # via ipykernel psycopg[binary, pool]==3.1.19 - # via feast (setup.py) psycopg-binary==3.1.19 # via psycopg psycopg-pool==3.2.2 @@ -607,14 +543,12 @@ ptyprocess==0.7.0 pure-eval==0.2.2 # via stack-data py==1.11.0 - # via feast (setup.py) py-cpuinfo==9.0.0 # via pytest-benchmark py4j==0.10.9.7 # via pyspark pyarrow==15.0.2 # via - # feast (setup.py) # dask-expr # db-dtypes # deltalake @@ -632,19 +566,16 @@ pyasn1==0.6.0 pyasn1-modules==0.4.0 # via google-auth pybindgen==0.22.1 - # via feast (setup.py) pycparser==2.22 # via cffi pydantic==2.7.4 # via - # feast (setup.py) # fastapi # great-expectations pydantic-core==2.18.4 # via pydantic pygments==2.18.0 # via - # feast (setup.py) # ipython # nbconvert # rich @@ -655,11 +586,8 @@ pyjwt[crypto]==2.8.0 # singlestoredb # snowflake-connector-python pymssql==2.3.0 - # via feast (setup.py) pymysql==1.1.1 - # via feast (setup.py) pyodbc==5.1.0 - # via feast (setup.py) pyopenssl==24.1.0 # via snowflake-connector-python pyparsing==3.1.2 @@ -669,10 +597,8 @@ pyproject-hooks==1.1.0 # build # pip-tools pyspark==3.5.1 - # via feast (setup.py) pytest==7.4.4 # via - # feast (setup.py) # pytest-benchmark # pytest-cov # pytest-env @@ -682,21 +608,13 @@ pytest==7.4.4 # pytest-timeout # pytest-xdist pytest-benchmark==3.4.1 - # via feast (setup.py) pytest-cov==5.0.0 - # via feast (setup.py) pytest-env==1.1.3 - # via feast (setup.py) pytest-lazy-fixture==0.6.3 - # via feast (setup.py) pytest-mock==1.10.4 - # via feast (setup.py) pytest-ordering==0.6 - # via feast (setup.py) pytest-timeout==1.4.2 - # via feast (setup.py) pytest-xdist==3.6.1 - # via feast (setup.py) python-dateutil==2.9.0.post0 # via # arrow @@ -708,14 +626,12 @@ python-dateutil==2.9.0.post0 # kubernetes # moto # pandas - # rockset # trino python-dotenv==1.0.1 # via uvicorn python-json-logger==2.0.7 # via jupyter-events python-keycloak==4.2.2 - # via feast (setup.py) python-multipart==0.0.9 # via fastapi pytz==2024.1 @@ -727,7 +643,6 @@ pytz==2024.1 # trino pyyaml==6.0.1 # via - # feast (setup.py) # dask # ibis-substrait # jupyter-events @@ -741,19 +656,15 @@ pyzmq==26.0.3 # jupyter-client # jupyter-server redis==4.6.0 - # via feast (setup.py) referencing==0.35.1 # via # jsonschema # jsonschema-specifications # jupyter-events regex==2024.5.15 - # via - # feast (setup.py) - # parsimonious + # via parsimonious requests==2.32.3 # via - # feast (setup.py) # azure-core # docker # google-api-core @@ -790,8 +701,6 @@ rich==13.7.1 # via # ibis-framework # typer -rockset==2.1.2 - # via feast (setup.py) rpds-py==0.18.1 # via # jsonschema @@ -801,7 +710,6 @@ rsa==4.9 ruamel-yaml==0.17.17 # via great-expectations ruff==0.4.10 - # via feast (setup.py) s3transfer==0.10.2 # via boto3 scipy==1.14.0 @@ -818,7 +726,6 @@ setuptools==70.1.1 shellingham==1.5.4 # via typer singlestoredb==1.4.0 - # via feast (setup.py) six==1.16.0 # via # asttokens @@ -839,13 +746,11 @@ sniffio==1.3.1 snowballstemmer==2.2.0 # via sphinx snowflake-connector-python[pandas]==3.11.0 - # via feast (setup.py) sortedcontainers==2.4.0 # via snowflake-connector-python soupsieve==2.5 # via beautifulsoup4 sphinx==6.2.1 - # via feast (setup.py) sphinxcontrib-applehelp==1.0.8 # via sphinx sphinxcontrib-devhelp==1.0.6 @@ -859,11 +764,9 @@ sphinxcontrib-qthelp==1.0.7 sphinxcontrib-serializinghtml==1.1.10 # via sphinx sqlalchemy[mypy]==2.0.31 - # via feast (setup.py) sqlglot==25.1.0 # via ibis-framework sqlite-vec==0.1.1 - # via feast (setup.py) sqlparams==6.0.1 # via singlestoredb stack-data==0.6.3 @@ -873,21 +776,17 @@ starlette==0.37.2 substrait==0.19.0 # via ibis-substrait tabulate==0.9.0 - # via feast (setup.py) tenacity==8.4.2 - # via feast (setup.py) terminado==0.18.1 # via # jupyter-server # jupyter-server-terminals testcontainers==4.4.0 - # via feast (setup.py) thriftpy2==0.5.1 # via happybase tinycss2==1.3.0 # via nbconvert toml==0.10.2 - # via feast (setup.py) tomlkit==0.12.5 # via snowflake-connector-python toolz==0.12.1 @@ -905,9 +804,7 @@ tornado==6.4.1 # notebook # terminado tqdm==4.66.4 - # via - # feast (setup.py) - # great-expectations + # via great-expectations traitlets==5.14.3 # via # comm @@ -924,39 +821,25 @@ traitlets==5.14.3 # nbconvert # nbformat trino==0.328.0 - # via feast (setup.py) typeguard==4.3.0 - # via feast (setup.py) typer==0.12.3 # via fastapi-cli types-cffi==1.16.0.20240331 # via types-pyopenssl types-protobuf==3.19.22 - # via - # feast (setup.py) - # mypy-protobuf + # via mypy-protobuf types-pymysql==1.1.0.20240524 - # via feast (setup.py) types-pyopenssl==24.1.0.20240425 # via types-redis types-python-dateutil==2.9.0.20240316 - # via - # feast (setup.py) - # arrow + # via arrow types-pytz==2024.1.0.20240417 - # via feast (setup.py) types-pyyaml==6.0.12.20240311 - # via feast (setup.py) types-redis==4.6.0.20240425 - # via feast (setup.py) types-requests==2.30.0.0 - # via feast (setup.py) types-setuptools==70.1.0.20240627 - # via - # feast (setup.py) - # types-cffi + # via types-cffi types-tabulate==0.9.0.20240106 - # via feast (setup.py) types-urllib3==1.26.25.14 # via types-requests typing-extensions==4.12.2 @@ -991,7 +874,6 @@ uri-template==1.3.0 # via jsonschema urllib3==1.26.19 # via - # feast (setup.py) # botocore # docker # elastic-transport @@ -1000,18 +882,13 @@ urllib3==1.26.19 # minio # requests # responses - # rockset # testcontainers uvicorn[standard]==0.30.1 - # via - # feast (setup.py) - # fastapi + # via fastapi uvloop==0.19.0 # via uvicorn virtualenv==20.23.0 - # via - # feast (setup.py) - # pre-commit + # via pre-commit watchfiles==0.22.0 # via uvicorn wcwidth==0.2.13 diff --git a/sdk/python/requirements/py3.11-requirements.txt b/sdk/python/requirements/py3.11-requirements.txt index e51452a594..9f6dff962b 100644 --- a/sdk/python/requirements/py3.11-requirements.txt +++ b/sdk/python/requirements/py3.11-requirements.txt @@ -11,45 +11,45 @@ attrs==23.2.0 # via # jsonschema # referencing +bigtree==0.19.2 +cachetools==5.5.0 + # via google-auth certifi==2024.7.4 # via # httpcore # httpx + # kubernetes # requests charset-normalizer==3.3.2 # via requests click==8.1.7 # via - # feast (setup.py) # dask # typer # uvicorn cloudpickle==3.0.0 # via dask colorama==0.4.6 - # via feast (setup.py) dask[dataframe]==2024.5.0 - # via - # feast (setup.py) - # dask-expr + # via dask-expr dask-expr==1.1.0 # via dask dill==0.3.8 - # via feast (setup.py) dnspython==2.6.1 # via email-validator email-validator==2.1.1 # via fastapi fastapi==0.111.0 - # via - # feast (setup.py) - # fastapi-cli + # via fastapi-cli fastapi-cli==0.0.2 # via fastapi fsspec==2024.3.1 # via dask +google-auth==2.34.0 + # via kubernetes +greenlet==3.0.3 + # via sqlalchemy gunicorn==22.0.0 - # via feast (setup.py) h11==0.14.0 # via # httpcore @@ -69,13 +69,11 @@ idna==3.7 importlib-metadata==7.1.0 # via dask jinja2==3.1.4 - # via - # feast (setup.py) - # fastapi + # via fastapi jsonschema==4.22.0 - # via feast (setup.py) jsonschema-specifications==2023.12.1 # via jsonschema +kubernetes==20.13.0 locket==1.0.0 # via partd markdown-it-py==3.0.0 @@ -85,19 +83,18 @@ markupsafe==2.1.5 mdurl==0.1.2 # via markdown-it-py mmh3==4.1.0 - # via feast (setup.py) mypy==1.10.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy mypy-protobuf==3.6.0 - # via feast (setup.py) numpy==1.26.4 # via - # feast (setup.py) # dask # pandas # pyarrow +oauthlib==3.2.2 + # via requests-oauthlib orjson==3.10.3 # via fastapi packaging==24.0 @@ -106,35 +103,33 @@ packaging==24.0 # gunicorn pandas==2.2.2 # via - # feast (setup.py) # dask # dask-expr partd==1.4.2 # via dask prometheus-client==0.20.0 - # via feast (setup.py) protobuf==4.25.3 - # via - # feast (setup.py) - # mypy-protobuf + # via mypy-protobuf psutil==6.0.0 - # via feast (setup.py) pyarrow==16.0.0 + # via dask-expr +pyasn1==0.6.0 # via - # feast (setup.py) - # dask-expr + # pyasn1-modules + # rsa +pyasn1-modules==0.4.0 + # via google-auth pydantic==2.7.1 - # via - # feast (setup.py) - # fastapi + # via fastapi pydantic-core==2.18.2 # via pydantic pygments==2.18.0 - # via - # feast (setup.py) - # rich + # via rich +pyjwt==2.9.0 python-dateutil==2.9.0.post0 - # via pandas + # via + # kubernetes + # pandas python-dotenv==1.0.1 # via uvicorn python-multipart==0.0.9 @@ -143,47 +138,51 @@ pytz==2024.1 # via pandas pyyaml==6.0.1 # via - # feast (setup.py) # dask + # kubernetes # uvicorn referencing==0.35.1 # via # jsonschema # jsonschema-specifications requests==2.31.0 - # via feast (setup.py) + # via + # kubernetes + # requests-oauthlib +requests-oauthlib==2.0.0 + # via kubernetes rich==13.7.1 # via typer rpds-py==0.18.1 # via # jsonschema # referencing +rsa==4.9 + # via google-auth +setuptools==73.0.1 + # via kubernetes shellingham==1.5.4 # via typer six==1.16.0 - # via python-dateutil + # via + # kubernetes + # python-dateutil sniffio==1.3.1 # via # anyio # httpx sqlalchemy[mypy]==2.0.30 - # via feast (setup.py) starlette==0.37.2 # via fastapi tabulate==0.9.0 - # via feast (setup.py) tenacity==8.3.0 - # via feast (setup.py) toml==0.10.2 - # via feast (setup.py) toolz==0.12.1 # via # dask # partd tqdm==4.66.4 - # via feast (setup.py) typeguard==4.2.1 - # via feast (setup.py) typer==0.12.3 # via fastapi-cli types-protobuf==5.26.0.20240422 @@ -202,18 +201,20 @@ tzdata==2024.1 ujson==5.9.0 # via fastapi urllib3==2.2.1 - # via requests + # via + # kubernetes + # requests uvicorn[standard]==0.29.0 # via - # feast (setup.py) # fastapi # fastapi-cli uvloop==0.19.0 # via uvicorn watchfiles==0.21.0 # via uvicorn +websocket-client==1.8.0 + # via kubernetes websockets==12.0 # via uvicorn zipp==3.19.1 # via importlib-metadata -bigtree==0.19.2 diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index be30f032a9..58ec69fe2d 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -1,7 +1,6 @@ # This file was autogenerated by uv via the following command: # uv pip compile --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.9-ci-requirements.txt aiobotocore==2.13.1 - # via feast (setup.py) aiohttp==3.9.5 # via aiobotocore aioitertools==0.11.0 @@ -20,8 +19,6 @@ anyio==4.4.0 # jupyter-server # starlette # watchfiles -appnope==0.1.4 - # via ipykernel argon2-cffi==23.1.0 # via jupyter-server argon2-cffi-bindings==21.2.0 @@ -31,17 +28,16 @@ arrow==1.3.0 asn1crypto==1.5.1 # via snowflake-connector-python assertpy==1.1 - # via feast (setup.py) asttokens==2.4.1 # via stack-data async-lru==2.0.4 # via jupyterlab +async-property==0.2.2 + # via python-keycloak async-timeout==4.0.3 # via # aiohttp # redis -async-property==0.2.2 - # via python-keycloak atpublic==4.1.0 # via ibis-framework attrs==23.2.0 @@ -54,9 +50,7 @@ azure-core==1.30.2 # azure-identity # azure-storage-blob azure-identity==1.17.1 - # via feast (setup.py) azure-storage-blob==12.20.0 - # via feast (setup.py) babel==2.15.0 # via # jupyterlab-server @@ -66,13 +60,10 @@ beautifulsoup4==4.12.3 bidict==0.23.1 # via ibis-framework bigtree==0.19.2 - # via feast (setup.py) bleach==6.1.0 # via nbconvert boto3==1.34.131 - # via - # feast (setup.py) - # moto + # via moto botocore==1.34.131 # via # aiobotocore @@ -81,13 +72,11 @@ botocore==1.34.131 # s3transfer build==1.2.1 # via - # feast (setup.py) # pip-tools # singlestoredb cachetools==5.3.3 # via google-auth cassandra-driver==3.29.1 - # via feast (setup.py) certifi==2024.7.4 # via # elastic-transport @@ -110,7 +99,6 @@ charset-normalizer==3.3.2 # snowflake-connector-python click==8.1.7 # via - # feast (setup.py) # dask # geomet # great-expectations @@ -120,9 +108,7 @@ click==8.1.7 cloudpickle==3.0.0 # via dask colorama==0.4.6 - # via - # feast (setup.py) - # great-expectations + # via great-expectations comm==0.2.2 # via # ipykernel @@ -131,7 +117,6 @@ coverage[toml]==7.5.4 # via pytest-cov cryptography==42.0.8 # via - # feast (setup.py) # azure-identity # azure-storage-blob # great-expectations @@ -144,9 +129,7 @@ cryptography==42.0.8 # types-pyopenssl # types-redis dask[dataframe]==2024.6.2 - # via - # feast (setup.py) - # dask-expr + # via dask-expr dask-expr==1.1.6 # via dask db-dtypes==1.2.0 @@ -158,11 +141,9 @@ decorator==5.1.1 defusedxml==0.7.1 # via nbconvert deltalake==0.18.1 - # via feast (setup.py) deprecation==2.1.0 # via python-keycloak dill==0.3.8 - # via feast (setup.py) distlib==0.3.8 # via virtualenv dnspython==2.6.1 @@ -176,7 +157,6 @@ duckdb==0.10.3 elastic-transport==8.13.1 # via elasticsearch elasticsearch==8.14.0 - # via feast (setup.py) email-validator==2.2.0 # via fastapi entrypoints==0.4 @@ -191,7 +171,6 @@ execnet==2.1.1 executing==2.0.1 # via stack-data fastapi==0.111.0 - # via feast (setup.py) fastapi-cli==0.0.4 # via fastapi fastjsonschema==2.20.0 @@ -207,16 +186,11 @@ frozenlist==1.4.1 # aiohttp # aiosignal fsspec==2023.12.2 - # via - # feast (setup.py) - # dask -geojson==2.5.0 - # via rockset + # via dask geomet==0.2.1.post1 # via cassandra-driver google-api-core[grpc]==2.19.1 # via - # feast (setup.py) # google-cloud-bigquery # google-cloud-bigquery-storage # google-cloud-bigtable @@ -231,11 +205,8 @@ google-auth==2.30.0 # google-cloud-storage # kubernetes google-cloud-bigquery[pandas]==3.13.0 - # via feast (setup.py) google-cloud-bigquery-storage==2.25.0 - # via feast (setup.py) google-cloud-bigtable==2.24.0 - # via feast (setup.py) google-cloud-core==2.4.1 # via # google-cloud-bigquery @@ -243,9 +214,7 @@ google-cloud-core==2.4.1 # google-cloud-datastore # google-cloud-storage google-cloud-datastore==2.19.0 - # via feast (setup.py) google-cloud-storage==2.17.0 - # via feast (setup.py) google-crc32c==1.5.0 # via # google-cloud-storage @@ -256,17 +225,16 @@ google-resumable-media==2.7.1 # google-cloud-storage googleapis-common-protos[grpc]==1.63.2 # via - # feast (setup.py) # google-api-core # grpc-google-iam-v1 # grpcio-status great-expectations==0.18.16 - # via feast (setup.py) +greenlet==3.0.3 + # via sqlalchemy grpc-google-iam-v1==0.13.1 # via google-cloud-bigtable grpcio==1.64.1 # via - # feast (setup.py) # google-api-core # google-cloud-bigquery # googleapis-common-protos @@ -277,43 +245,31 @@ grpcio==1.64.1 # grpcio-testing # grpcio-tools grpcio-health-checking==1.62.2 - # via feast (setup.py) grpcio-reflection==1.62.2 - # via feast (setup.py) grpcio-status==1.62.2 # via google-api-core grpcio-testing==1.62.2 - # via feast (setup.py) grpcio-tools==1.62.2 - # via feast (setup.py) gunicorn==22.0.0 - # via feast (setup.py) h11==0.14.0 # via # httpcore # uvicorn happybase==1.2.0 - # via feast (setup.py) hazelcast-python-client==5.4.0 - # via feast (setup.py) hiredis==2.3.2 - # via feast (setup.py) httpcore==1.0.5 # via httpx httptools==0.6.1 # via uvicorn httpx==0.27.0 # via - # feast (setup.py) # fastapi # jupyterlab # python-keycloak ibis-framework[duckdb]==9.0.0 - # via - # feast (setup.py) - # ibis-substrait + # via ibis-substrait ibis-substrait==4.0.0 - # via feast (setup.py) identify==2.5.36 # via pre-commit idna==3.7 @@ -357,7 +313,6 @@ jedi==0.19.1 # via ipython jinja2==3.1.4 # via - # feast (setup.py) # altair # fastapi # great-expectations @@ -381,7 +336,6 @@ jsonpointer==3.0.0 # jsonschema jsonschema[format-nongpl]==4.22.0 # via - # feast (setup.py) # altair # great-expectations # jupyter-events @@ -429,7 +383,6 @@ jupyterlab-widgets==3.0.11 jwcrypto==1.5.6 # via python-keycloak kubernetes==20.13.0 - # via feast (setup.py) locket==1.0.0 # via partd makefun==1.15.2 @@ -450,17 +403,13 @@ matplotlib-inline==0.1.7 mdurl==0.1.2 # via markdown-it-py minio==7.1.0 - # via feast (setup.py) mistune==3.0.2 # via # great-expectations # nbconvert mmh3==4.1.0 - # via feast (setup.py) mock==2.0.0 - # via feast (setup.py) moto==4.2.14 - # via feast (setup.py) msal==1.29.0 # via # azure-identity @@ -472,13 +421,10 @@ multidict==6.0.5 # aiohttp # yarl mypy==1.10.1 - # via - # feast (setup.py) - # sqlalchemy + # via sqlalchemy mypy-extensions==1.0.0 # via mypy mypy-protobuf==3.3.0 - # via feast (setup.py) nbclient==0.10.0 # via nbconvert nbconvert==7.16.4 @@ -501,7 +447,6 @@ notebook-shim==0.2.4 # notebook numpy==1.26.4 # via - # feast (setup.py) # altair # dask # db-dtypes @@ -537,7 +482,6 @@ packaging==24.1 # sphinx pandas==2.2.2 # via - # feast (setup.py) # altair # dask # dask-expr @@ -563,7 +507,6 @@ pexpect==4.9.0 pip==24.1.1 # via pip-tools pip-tools==7.4.1 - # via feast (setup.py) platformdirs==3.11.0 # via # jupyter-core @@ -576,11 +519,8 @@ ply==3.11 portalocker==2.10.0 # via msal-extensions pre-commit==3.3.1 - # via feast (setup.py) prometheus-client==0.20.0 - # via - # feast (setup.py) - # jupyter-server + # via jupyter-server prompt-toolkit==3.0.47 # via ipython proto-plus==1.24.0 @@ -592,7 +532,6 @@ proto-plus==1.24.0 # google-cloud-datastore protobuf==4.25.3 # via - # feast (setup.py) # google-api-core # google-cloud-bigquery # google-cloud-bigquery-storage @@ -609,11 +548,8 @@ protobuf==4.25.3 # proto-plus # substrait psutil==5.9.0 - # via - # feast (setup.py) - # ipykernel + # via ipykernel psycopg[binary, pool]==3.1.18 - # via feast (setup.py) psycopg-binary==3.1.18 # via psycopg psycopg-pool==3.2.2 @@ -625,14 +561,12 @@ ptyprocess==0.7.0 pure-eval==0.2.2 # via stack-data py==1.11.0 - # via feast (setup.py) py-cpuinfo==9.0.0 # via pytest-benchmark py4j==0.10.9.7 # via pyspark pyarrow==15.0.2 # via - # feast (setup.py) # dask-expr # db-dtypes # deltalake @@ -650,19 +584,16 @@ pyasn1==0.6.0 pyasn1-modules==0.4.0 # via google-auth pybindgen==0.22.1 - # via feast (setup.py) pycparser==2.22 # via cffi pydantic==2.7.4 # via - # feast (setup.py) # fastapi # great-expectations pydantic-core==2.18.4 # via pydantic pygments==2.18.0 # via - # feast (setup.py) # ipython # nbconvert # rich @@ -673,11 +604,8 @@ pyjwt[crypto]==2.8.0 # singlestoredb # snowflake-connector-python pymssql==2.3.0 - # via feast (setup.py) pymysql==1.1.1 - # via feast (setup.py) pyodbc==5.1.0 - # via feast (setup.py) pyopenssl==24.1.0 # via snowflake-connector-python pyparsing==3.1.2 @@ -687,10 +615,8 @@ pyproject-hooks==1.1.0 # build # pip-tools pyspark==3.5.1 - # via feast (setup.py) pytest==7.4.4 # via - # feast (setup.py) # pytest-benchmark # pytest-cov # pytest-env @@ -700,21 +626,13 @@ pytest==7.4.4 # pytest-timeout # pytest-xdist pytest-benchmark==3.4.1 - # via feast (setup.py) pytest-cov==5.0.0 - # via feast (setup.py) pytest-env==1.1.3 - # via feast (setup.py) pytest-lazy-fixture==0.6.3 - # via feast (setup.py) pytest-mock==1.10.4 - # via feast (setup.py) pytest-ordering==0.6 - # via feast (setup.py) pytest-timeout==1.4.2 - # via feast (setup.py) pytest-xdist==3.6.1 - # via feast (setup.py) python-dateutil==2.9.0.post0 # via # arrow @@ -726,14 +644,12 @@ python-dateutil==2.9.0.post0 # kubernetes # moto # pandas - # rockset # trino python-dotenv==1.0.1 # via uvicorn python-json-logger==2.0.7 # via jupyter-events python-keycloak==4.2.2 - # via feast (setup.py) python-multipart==0.0.9 # via fastapi pytz==2024.1 @@ -745,7 +661,6 @@ pytz==2024.1 # trino pyyaml==6.0.1 # via - # feast (setup.py) # dask # ibis-substrait # jupyter-events @@ -759,19 +674,15 @@ pyzmq==26.0.3 # jupyter-client # jupyter-server redis==4.6.0 - # via feast (setup.py) referencing==0.35.1 # via # jsonschema # jsonschema-specifications # jupyter-events regex==2024.5.15 - # via - # feast (setup.py) - # parsimonious + # via parsimonious requests==2.32.3 # via - # feast (setup.py) # azure-core # docker # google-api-core @@ -808,8 +719,6 @@ rich==13.7.1 # via # ibis-framework # typer -rockset==2.1.2 - # via feast (setup.py) rpds-py==0.18.1 # via # jsonschema @@ -821,7 +730,6 @@ ruamel-yaml==0.17.17 ruamel-yaml-clib==0.2.8 # via ruamel-yaml ruff==0.4.10 - # via feast (setup.py) s3transfer==0.10.2 # via boto3 scipy==1.13.1 @@ -838,7 +746,6 @@ setuptools==70.1.1 shellingham==1.5.4 # via typer singlestoredb==1.4.0 - # via feast (setup.py) six==1.16.0 # via # asttokens @@ -859,13 +766,11 @@ sniffio==1.3.1 snowballstemmer==2.2.0 # via sphinx snowflake-connector-python[pandas]==3.11.0 - # via feast (setup.py) sortedcontainers==2.4.0 # via snowflake-connector-python soupsieve==2.5 # via beautifulsoup4 sphinx==6.2.1 - # via feast (setup.py) sphinxcontrib-applehelp==1.0.8 # via sphinx sphinxcontrib-devhelp==1.0.6 @@ -879,11 +784,9 @@ sphinxcontrib-qthelp==1.0.7 sphinxcontrib-serializinghtml==1.1.10 # via sphinx sqlalchemy[mypy]==2.0.31 - # via feast (setup.py) sqlglot==23.12.2 # via ibis-framework sqlite-vec==0.1.1 - # via feast (setup.py) sqlparams==6.0.1 # via singlestoredb stack-data==0.6.3 @@ -893,21 +796,17 @@ starlette==0.37.2 substrait==0.19.0 # via ibis-substrait tabulate==0.9.0 - # via feast (setup.py) tenacity==8.4.2 - # via feast (setup.py) terminado==0.18.1 # via # jupyter-server # jupyter-server-terminals testcontainers==4.4.0 - # via feast (setup.py) thriftpy2==0.5.1 # via happybase tinycss2==1.3.0 # via nbconvert toml==0.10.2 - # via feast (setup.py) tomli==2.0.1 # via # build @@ -935,9 +834,7 @@ tornado==6.4.1 # notebook # terminado tqdm==4.66.4 - # via - # feast (setup.py) - # great-expectations + # via great-expectations traitlets==5.14.3 # via # comm @@ -954,39 +851,25 @@ traitlets==5.14.3 # nbconvert # nbformat trino==0.328.0 - # via feast (setup.py) typeguard==4.3.0 - # via feast (setup.py) typer==0.12.3 # via fastapi-cli types-cffi==1.16.0.20240331 # via types-pyopenssl types-protobuf==3.19.22 - # via - # feast (setup.py) - # mypy-protobuf + # via mypy-protobuf types-pymysql==1.1.0.20240524 - # via feast (setup.py) types-pyopenssl==24.1.0.20240425 # via types-redis types-python-dateutil==2.9.0.20240316 - # via - # feast (setup.py) - # arrow + # via arrow types-pytz==2024.1.0.20240417 - # via feast (setup.py) types-pyyaml==6.0.12.20240311 - # via feast (setup.py) types-redis==4.6.0.20240425 - # via feast (setup.py) types-requests==2.30.0.0 - # via feast (setup.py) types-setuptools==70.1.0.20240627 - # via - # feast (setup.py) - # types-cffi + # via types-cffi types-tabulate==0.9.0.20240106 - # via feast (setup.py) types-urllib3==1.26.25.14 # via types-requests typing-extensions==4.12.2 @@ -1026,7 +909,6 @@ uri-template==1.3.0 # via jsonschema urllib3==1.26.19 # via - # feast (setup.py) # botocore # docker # elastic-transport @@ -1035,19 +917,14 @@ urllib3==1.26.19 # minio # requests # responses - # rockset # snowflake-connector-python # testcontainers uvicorn[standard]==0.30.1 - # via - # feast (setup.py) - # fastapi + # via fastapi uvloop==0.19.0 # via uvicorn virtualenv==20.23.0 - # via - # feast (setup.py) - # pre-commit + # via pre-commit watchfiles==0.22.0 # via uvicorn wcwidth==0.2.13 diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index 0b3c8a33c9..960eaa6554 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -11,31 +11,30 @@ attrs==23.2.0 # via # jsonschema # referencing +bigtree==0.19.2 +cachetools==5.5.0 + # via google-auth certifi==2024.7.4 # via # httpcore # httpx + # kubernetes # requests charset-normalizer==3.3.2 # via requests click==8.1.7 # via - # feast (setup.py) # dask # typer # uvicorn cloudpickle==3.0.0 # via dask colorama==0.4.6 - # via feast (setup.py) dask[dataframe]==2024.5.0 - # via - # feast (setup.py) - # dask-expr + # via dask-expr dask-expr==1.1.0 # via dask dill==0.3.8 - # via feast (setup.py) dnspython==2.6.1 # via email-validator email-validator==2.1.1 @@ -43,15 +42,16 @@ email-validator==2.1.1 exceptiongroup==1.2.2 # via anyio fastapi==0.111.0 - # via - # feast (setup.py) - # fastapi-cli + # via fastapi-cli fastapi-cli==0.0.2 # via fastapi fsspec==2024.3.1 # via dask +google-auth==2.34.0 + # via kubernetes +greenlet==3.0.3 + # via sqlalchemy gunicorn==22.0.0 - # via feast (setup.py) h11==0.14.0 # via # httpcore @@ -73,13 +73,11 @@ importlib-metadata==8.2.0 # dask # typeguard jinja2==3.1.4 - # via - # feast (setup.py) - # fastapi + # via fastapi jsonschema==4.22.0 - # via feast (setup.py) jsonschema-specifications==2023.12.1 # via jsonschema +kubernetes==20.13.0 locket==1.0.0 # via partd markdown-it-py==3.0.0 @@ -89,19 +87,18 @@ markupsafe==2.1.5 mdurl==0.1.2 # via markdown-it-py mmh3==4.1.0 - # via feast (setup.py) mypy==1.10.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy mypy-protobuf==3.6.0 - # via feast (setup.py) numpy==1.26.4 # via - # feast (setup.py) # dask # pandas # pyarrow +oauthlib==3.2.2 + # via requests-oauthlib orjson==3.10.3 # via fastapi packaging==24.0 @@ -110,35 +107,33 @@ packaging==24.0 # gunicorn pandas==2.2.2 # via - # feast (setup.py) # dask # dask-expr partd==1.4.2 # via dask prometheus-client==0.20.0 - # via feast (setup.py) protobuf==4.25.3 - # via - # feast (setup.py) - # mypy-protobuf + # via mypy-protobuf psutil==6.0.0 - # via feast (setup.py) pyarrow==16.0.0 + # via dask-expr +pyasn1==0.6.0 # via - # feast (setup.py) - # dask-expr + # pyasn1-modules + # rsa +pyasn1-modules==0.4.0 + # via google-auth pydantic==2.7.1 - # via - # feast (setup.py) - # fastapi + # via fastapi pydantic-core==2.18.2 # via pydantic pygments==2.18.0 - # via - # feast (setup.py) - # rich + # via rich +pyjwt==2.9.0 python-dateutil==2.9.0.post0 - # via pandas + # via + # kubernetes + # pandas python-dotenv==1.0.1 # via uvicorn python-multipart==0.0.9 @@ -147,39 +142,45 @@ pytz==2024.1 # via pandas pyyaml==6.0.1 # via - # feast (setup.py) # dask + # kubernetes # uvicorn referencing==0.35.1 # via # jsonschema # jsonschema-specifications requests==2.31.0 - # via feast (setup.py) + # via + # kubernetes + # requests-oauthlib +requests-oauthlib==2.0.0 + # via kubernetes rich==13.7.1 # via typer rpds-py==0.18.1 # via # jsonschema # referencing +rsa==4.9 + # via google-auth +setuptools==73.0.1 + # via kubernetes shellingham==1.5.4 # via typer six==1.16.0 - # via python-dateutil + # via + # kubernetes + # python-dateutil sniffio==1.3.1 # via # anyio # httpx sqlalchemy[mypy]==2.0.30 - # via feast (setup.py) starlette==0.37.2 # via fastapi tabulate==0.9.0 - # via feast (setup.py) tenacity==8.3.0 - # via feast (setup.py) toml==0.10.2 - # via feast (setup.py) tomli==2.0.1 # via mypy toolz==0.12.1 @@ -187,9 +188,7 @@ toolz==0.12.1 # dask # partd tqdm==4.66.4 - # via feast (setup.py) typeguard==4.2.1 - # via feast (setup.py) typer==0.12.3 # via fastapi-cli types-protobuf==5.26.0.20240422 @@ -211,18 +210,20 @@ tzdata==2024.1 ujson==5.9.0 # via fastapi urllib3==2.2.1 - # via requests + # via + # kubernetes + # requests uvicorn[standard]==0.29.0 # via - # feast (setup.py) # fastapi # fastapi-cli uvloop==0.19.0 # via uvicorn watchfiles==0.21.0 # via uvicorn +websocket-client==1.8.0 + # via kubernetes websockets==12.0 # via uvicorn zipp==3.19.2 # via importlib-metadata -bigtree==0.19.2 diff --git a/sdk/python/tests/integration/feature_repos/repo_configuration.py b/sdk/python/tests/integration/feature_repos/repo_configuration.py index 0bf737f616..660a937f5a 100644 --- a/sdk/python/tests/integration/feature_repos/repo_configuration.py +++ b/sdk/python/tests/integration/feature_repos/repo_configuration.py @@ -110,12 +110,6 @@ "instance": os.getenv("BIGTABLE_INSTANCE_ID", "feast-integration-tests"), } -ROCKSET_CONFIG = { - "type": "rockset", - "api_key": os.getenv("ROCKSET_APIKEY", ""), - "host": os.getenv("ROCKSET_APISERVER", "api.rs2.usw2.rockset.com"), -} - IKV_CONFIG = { "type": "ikv", "account_id": os.getenv("IKV_ACCOUNT_ID", ""), @@ -166,10 +160,6 @@ AVAILABLE_ONLINE_STORES["datastore"] = ("datastore", None) AVAILABLE_ONLINE_STORES["snowflake"] = (SNOWFLAKE_CONFIG, None) AVAILABLE_ONLINE_STORES["bigtable"] = (BIGTABLE_CONFIG, None) - # Uncomment to test using private Rockset account. Currently not enabled as - # there is no dedicated Rockset instance for CI testing and there is no - # containerized version of Rockset. - # AVAILABLE_ONLINE_STORES["rockset"] = (ROCKSET_CONFIG, None) # Uncomment to test using private IKV account. Currently not enabled as # there is no dedicated IKV instance for CI testing and there is no diff --git a/setup.py b/setup.py index d53aee1002..a9f9cafacc 100644 --- a/setup.py +++ b/setup.py @@ -125,10 +125,6 @@ "pymssql", ] -ROCKSET_REQUIRED = [ - "rockset>=1.0.3", -] - IKV_REQUIRED = [ "ikvpy>=0.0.36", ] @@ -214,7 +210,6 @@ + HBASE_REQUIRED + CASSANDRA_REQUIRED + AZURE_REQUIRED - + ROCKSET_REQUIRED + HAZELCAST_REQUIRED + IBIS_REQUIRED + GRPCIO_REQUIRED @@ -386,7 +381,6 @@ def run(self): "cassandra": CASSANDRA_REQUIRED, "hazelcast": HAZELCAST_REQUIRED, "grpcio": GRPCIO_REQUIRED, - "rockset": ROCKSET_REQUIRED, "ibis": IBIS_REQUIRED, "duckdb": DUCKDB_REQUIRED, "ikv": IKV_REQUIRED, From 6b2f026747b8adebe659aed3d4d2f95d551d5d1e Mon Sep 17 00:00:00 2001 From: "Yang, Bo" Date: Mon, 26 Aug 2024 21:07:27 -0700 Subject: [PATCH 30/96] fix: Add --chdir to test_workflow.py (#4453) Signed-off-by: Yang, Bo --- .../feast/templates/postgres/feature_repo/test_workflow.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sdk/python/feast/templates/postgres/feature_repo/test_workflow.py b/sdk/python/feast/templates/postgres/feature_repo/test_workflow.py index 30927d3c7a..843ade164c 100644 --- a/sdk/python/feast/templates/postgres/feature_repo/test_workflow.py +++ b/sdk/python/feast/templates/postgres/feature_repo/test_workflow.py @@ -11,7 +11,7 @@ def run_demo(): store = FeatureStore(repo_path=os.path.dirname(__file__)) print("\n--- Run feast apply to setup feature store on Postgres ---") - subprocess.run(["feast", "apply"]) + subprocess.run(["feast", "--chdir", os.path.dirname(__file__), "apply"]) print("\n--- Historical features for training ---") fetch_historical_features_entity_df(store, for_batch_scoring=False) @@ -55,7 +55,7 @@ def run_demo(): fetch_online_features(store, source="push") print("\n--- Run feast teardown ---") - subprocess.run(["feast", "teardown"]) + subprocess.run(["feast", "--chdir", os.path.dirname(__file__), "teardown"]) def fetch_historical_features_entity_df(store: FeatureStore, for_batch_scoring: bool): From 635a01b4c77db781d67f9f5ebb1067806b1e2a13 Mon Sep 17 00:00:00 2001 From: Daniele Martinoli <86618610+dmartinol@users.noreply.github.com> Date: Tue, 27 Aug 2024 12:51:21 +0200 Subject: [PATCH 31/96] fix: Validating permission to update an existing request on both the new and the old instance (#4449) * Validating permission to update an existing request on both the new and the old instance Signed-off-by: Daniele Martinoli <86618610+dmartinol@users.noreply.github.com> * Reviewed update permission logic as per comment, added UT Signed-off-by: Daniele Martinoli --------- Signed-off-by: Daniele Martinoli <86618610+dmartinol@users.noreply.github.com> Signed-off-by: Daniele Martinoli --- sdk/python/feast/permissions/enforcer.py | 2 +- .../feast/permissions/security_manager.py | 48 ++++++- sdk/python/feast/registry_server.py | 123 ++++++++++-------- sdk/python/tests/unit/permissions/conftest.py | 24 +++- .../unit/permissions/test_security_manager.py | 92 ++++++++++++- 5 files changed, 226 insertions(+), 63 deletions(-) diff --git a/sdk/python/feast/permissions/enforcer.py b/sdk/python/feast/permissions/enforcer.py index af41d12a2c..ae45b8a78b 100644 --- a/sdk/python/feast/permissions/enforcer.py +++ b/sdk/python/feast/permissions/enforcer.py @@ -22,7 +22,7 @@ def enforce_policy( Define the logic to apply the configured permissions when a given action is requested on a protected resource. - If no permissions are defined, the result is to allow the execution. + If no permissions are defined, the result is to deny the execution. Args: permissions: The configured set of `Permission`. diff --git a/sdk/python/feast/permissions/security_manager.py b/sdk/python/feast/permissions/security_manager.py index 178db6e6e9..2322602388 100644 --- a/sdk/python/feast/permissions/security_manager.py +++ b/sdk/python/feast/permissions/security_manager.py @@ -1,7 +1,8 @@ import logging from contextvars import ContextVar -from typing import List, Optional, Union +from typing import Callable, List, Optional, Union +from feast.errors import FeastObjectNotFoundException from feast.feast_object import FeastObject from feast.infra.registry.base_registry import BaseRegistry from feast.permissions.action import AuthzedAction @@ -59,9 +60,9 @@ def assert_permissions( filter_only: bool = False, ) -> list[FeastObject]: """ - Verify if the current user is authorized ro execute the requested actions on the given resources. + Verify if the current user is authorized to execute the requested actions on the given resources. - If no permissions are defined, the result is to allow the execution. + If no permissions are defined, the result is to deny the execution. Args: resources: The resources for which we need to enforce authorized permission. @@ -73,7 +74,7 @@ def assert_permissions( list[FeastObject]: A filtered list of the permitted resources, possibly empty. Raises: - PermissionError: If the current user is not authorized to eecute all the requested actions on the given resources. + PermissionError: If the current user is not authorized to execute all the requested actions on the given resources. """ return enforce_policy( permissions=self.permissions, @@ -84,6 +85,45 @@ def assert_permissions( ) +def assert_permissions_to_update( + resource: FeastObject, + getter: Callable[[str, str, bool], FeastObject], + project: str, + allow_cache: bool = True, +) -> FeastObject: + """ + Verify if the current user is authorized to create or update the given resource. + If the resource already exists, the user must be granted permission to execute DESCRIBE and UPDATE actions. + If the resource does not exist, the user must be granted permission to execute the CREATE action. + + If no permissions are defined, the result is to deny the execution. + + Args: + resource: The resources for which we need to enforce authorized permission. + getter: The getter function used to retrieve the existing resource instance by name. + The signature must be `get_permission(self, name: str, project: str, allow_cache: bool)` + project: The project nane used in the getter function. + allow_cache: Whether to use cached data. Defaults to `True`. + Returns: + FeastObject: The original `resource`, if permitted. + + Raises: + PermissionError: If the current user is not authorized to execute all the requested actions on the given resource or on the existing one. + """ + actions = [AuthzedAction.DESCRIBE, AuthzedAction.UPDATE] + try: + existing_resource = getter( + name=resource.name, + project=project, + allow_cache=allow_cache, + ) # type: ignore[call-arg] + assert_permissions(resource=existing_resource, actions=actions) + except FeastObjectNotFoundException: + actions = [AuthzedAction.CREATE] + resource_to_update = assert_permissions(resource=resource, actions=actions) + return resource_to_update + + def assert_permissions( resource: FeastObject, actions: Union[AuthzedAction, List[AuthzedAction]], diff --git a/sdk/python/feast/registry_server.py b/sdk/python/feast/registry_server.py index 6b37aba08d..7b779e9f9e 100644 --- a/sdk/python/feast/registry_server.py +++ b/sdk/python/feast/registry_server.py @@ -16,9 +16,13 @@ from feast.infra.infra_object import Infra from feast.infra.registry.base_registry import BaseRegistry from feast.on_demand_feature_view import OnDemandFeatureView -from feast.permissions.action import CRUD, AuthzedAction +from feast.permissions.action import AuthzedAction from feast.permissions.permission import Permission -from feast.permissions.security_manager import assert_permissions, permitted_resources +from feast.permissions.security_manager import ( + assert_permissions, + assert_permissions_to_update, + permitted_resources, +) from feast.permissions.server.grpc import grpc_interceptors from feast.permissions.server.utils import ( ServerType, @@ -37,14 +41,16 @@ def __init__(self, registry: BaseRegistry) -> None: self.proxied_registry = registry def ApplyEntity(self, request: RegistryServer_pb2.ApplyEntityRequest, context): - self.proxied_registry.apply_entity( - entity=cast( - Entity, - assert_permissions( - resource=Entity.from_proto(request.entity), - actions=CRUD, - ), + entity = cast( + Entity, + assert_permissions_to_update( + resource=Entity.from_proto(request.entity), + getter=self.proxied_registry.get_entity, + project=request.project, ), + ) + self.proxied_registry.apply_entity( + entity=entity, project=request.project, commit=request.commit, ) @@ -95,19 +101,19 @@ def DeleteEntity(self, request: RegistryServer_pb2.DeleteEntityRequest, context) def ApplyDataSource( self, request: RegistryServer_pb2.ApplyDataSourceRequest, context ): - ( - self.proxied_registry.apply_data_source( - data_source=cast( - DataSource, - assert_permissions( - resource=DataSource.from_proto(request.data_source), - actions=CRUD, - ), - ), + data_source = cast( + DataSource, + assert_permissions_to_update( + resource=DataSource.from_proto(request.data_source), + getter=self.proxied_registry.get_data_source, project=request.project, - commit=request.commit, ), ) + self.proxied_registry.apply_data_source( + data_source=data_source, + project=request.project, + commit=request.commit, + ) return Empty() @@ -182,12 +188,16 @@ def ApplyFeatureView( elif feature_view_type == "stream_feature_view": feature_view = StreamFeatureView.from_proto(request.stream_feature_view) + assert_permissions_to_update( + resource=feature_view, + # Will replace with the new get_any_feature_view method later + getter=self.proxied_registry.get_feature_view, + project=request.project, + ) + ( self.proxied_registry.apply_feature_view( - feature_view=cast( - FeatureView, - assert_permissions(resource=feature_view, actions=CRUD), - ), + feature_view=feature_view, project=request.project, commit=request.commit, ), @@ -305,14 +315,16 @@ def ListOnDemandFeatureViews( def ApplyFeatureService( self, request: RegistryServer_pb2.ApplyFeatureServiceRequest, context ): - self.proxied_registry.apply_feature_service( - feature_service=cast( - FeatureService, - assert_permissions( - resource=FeatureService.from_proto(request.feature_service), - actions=CRUD, - ), + feature_service = cast( + FeatureService, + assert_permissions_to_update( + resource=FeatureService.from_proto(request.feature_service), + getter=self.proxied_registry.get_feature_service, + project=request.project, ), + ) + self.proxied_registry.apply_feature_service( + feature_service=feature_service, project=request.project, commit=request.commit, ) @@ -371,19 +383,19 @@ def DeleteFeatureService( def ApplySavedDataset( self, request: RegistryServer_pb2.ApplySavedDatasetRequest, context ): - ( - self.proxied_registry.apply_saved_dataset( - saved_dataset=cast( - SavedDataset, - assert_permissions( - resource=SavedDataset.from_proto(request.saved_dataset), - actions=CRUD, - ), - ), + saved_dataset = cast( + SavedDataset, + assert_permissions_to_update( + resource=SavedDataset.from_proto(request.saved_dataset), + getter=self.proxied_registry.get_saved_dataset, project=request.project, - commit=request.commit, ), ) + self.proxied_registry.apply_saved_dataset( + saved_dataset=saved_dataset, + project=request.project, + commit=request.commit, + ) return Empty() @@ -437,14 +449,16 @@ def DeleteSavedDataset( def ApplyValidationReference( self, request: RegistryServer_pb2.ApplyValidationReferenceRequest, context ): - self.proxied_registry.apply_validation_reference( - validation_reference=cast( - ValidationReference, - assert_permissions( - ValidationReference.from_proto(request.validation_reference), - actions=CRUD, - ), + validation_reference = cast( + ValidationReference, + assert_permissions_to_update( + resource=ValidationReference.from_proto(request.validation_reference), + getter=self.proxied_registry.get_validation_reference, + project=request.project, ), + ) + self.proxied_registry.apply_validation_reference( + validation_reference=validation_reference, project=request.project, commit=request.commit, ) @@ -547,13 +561,16 @@ def GetInfra(self, request: RegistryServer_pb2.GetInfraRequest, context): def ApplyPermission( self, request: RegistryServer_pb2.ApplyPermissionRequest, context ): - self.proxied_registry.apply_permission( - permission=cast( - Permission, - assert_permissions( - Permission.from_proto(request.permission), actions=CRUD - ), + permission = cast( + Permission, + assert_permissions_to_update( + resource=Permission.from_proto(request.permission), + getter=self.proxied_registry.get_permission, + project=request.project, ), + ) + self.proxied_registry.apply_permission( + permission=permission, project=request.project, commit=request.commit, ) diff --git a/sdk/python/tests/unit/permissions/conftest.py b/sdk/python/tests/unit/permissions/conftest.py index 7cd944fb47..6adbc6ec54 100644 --- a/sdk/python/tests/unit/permissions/conftest.py +++ b/sdk/python/tests/unit/permissions/conftest.py @@ -3,6 +3,7 @@ import pytest from feast import FeatureView +from feast.entity import Entity from feast.infra.registry.base_registry import BaseRegistry from feast.permissions.decorator import require_permissions from feast.permissions.permission import AuthzedAction, Permission @@ -48,7 +49,10 @@ def users() -> list[User]: users.append(User("r", ["reader"])) users.append(User("w", ["writer"])) users.append(User("rw", ["reader", "writer"])) - users.append(User("admin", ["reader", "writer", "admin"])) + users.append(User("special", ["reader", "writer", "special-reader"])) + users.append(User("updater", ["updater"])) + users.append(User("creator", ["creator"])) + users.append(User("admin", ["updater", "creator"])) return dict([(u.username, u) for u in users]) @@ -76,10 +80,26 @@ def security_manager() -> SecurityManager: name="special", types=FeatureView, name_pattern="special.*", - policy=RoleBasedPolicy(roles=["admin", "special-reader"]), + policy=RoleBasedPolicy(roles=["special-reader"]), actions=[AuthzedAction.DESCRIBE, AuthzedAction.UPDATE], ) ) + permissions.append( + Permission( + name="entity_updater", + types=Entity, + policy=RoleBasedPolicy(roles=["updater"]), + actions=[AuthzedAction.DESCRIBE, AuthzedAction.UPDATE], + ) + ) + permissions.append( + Permission( + name="entity_creator", + types=Entity, + policy=RoleBasedPolicy(roles=["creator"]), + actions=[AuthzedAction.CREATE], + ) + ) registry = Mock(spec=BaseRegistry) registry.list_permissions = Mock(return_value=permissions) diff --git a/sdk/python/tests/unit/permissions/test_security_manager.py b/sdk/python/tests/unit/permissions/test_security_manager.py index 192542da78..228dddb01f 100644 --- a/sdk/python/tests/unit/permissions/test_security_manager.py +++ b/sdk/python/tests/unit/permissions/test_security_manager.py @@ -1,8 +1,14 @@ import assertpy import pytest +from feast.entity import Entity +from feast.errors import FeastObjectNotFoundException from feast.permissions.action import READ, AuthzedAction -from feast.permissions.security_manager import assert_permissions, permitted_resources +from feast.permissions.security_manager import ( + assert_permissions, + assert_permissions_to_update, + permitted_resources, +) @pytest.mark.parametrize( @@ -24,7 +30,7 @@ True, ), ( - "admin", + "special", [AuthzedAction.DESCRIBE, AuthzedAction.UPDATE], False, [False, True], @@ -32,7 +38,7 @@ True, ), ( - "admin", + "special", READ + [AuthzedAction.UPDATE], False, [False, False], @@ -81,3 +87,83 @@ def test_access_SecuredFeatureView( else: result = assert_permissions(resource=r, actions=requested_actions) assertpy.assert_that(result).is_none() + + +@pytest.mark.parametrize( + "username, allowed", + [ + (None, False), + ("r", False), + ("w", False), + ("rw", False), + ("special", False), + ("updater", False), + ("creator", True), + ("admin", True), + ], +) +def test_create_entity( + security_manager, + users, + username, + allowed, +): + sm = security_manager + entity = Entity( + name="", + ) + + user = users.get(username) + sm.set_current_user(user) + + def getter(name: str, project: str, allow_cache: bool): + raise FeastObjectNotFoundException() + + if allowed: + result = assert_permissions_to_update( + resource=entity, getter=getter, project="" + ) + assertpy.assert_that(result).is_equal_to(entity) + else: + with pytest.raises(PermissionError): + assert_permissions_to_update(resource=entity, getter=getter, project="") + + +@pytest.mark.parametrize( + "username, allowed", + [ + (None, False), + ("r", False), + ("w", False), + ("rw", False), + ("special", False), + ("updater", True), + ("creator", False), + ("admin", True), + ], +) +def test_update_entity( + security_manager, + users, + username, + allowed, +): + sm = security_manager + entity = Entity( + name="", + ) + + user = users.get(username) + sm.set_current_user(user) + + def getter(name: str, project: str, allow_cache: bool): + return entity + + if allowed: + result = assert_permissions_to_update( + resource=entity, getter=getter, project="" + ) + assertpy.assert_that(result).is_equal_to(entity) + else: + with pytest.raises(PermissionError): + assert_permissions_to_update(resource=entity, getter=getter, project="") From 5e753e4f72898f3723b5f69e11c2c968744e0815 Mon Sep 17 00:00:00 2001 From: Daniele Martinoli <86618610+dmartinol@users.noreply.github.com> Date: Thu, 29 Aug 2024 09:39:17 +0200 Subject: [PATCH 32/96] refactor: Introduced base class FeastError for all Feast exceptions (#4465) introduced base class FeastError for all Feast exceptions, with initial methods to map the grpc and HTTP status code Signed-off-by: Daniele Martinoli --- sdk/python/feast/cli_utils.py | 2 +- sdk/python/feast/errors.py | 125 +++++++++++------- sdk/python/feast/permissions/enforcer.py | 9 +- .../feast/permissions/security_manager.py | 8 +- .../tests/unit/permissions/test_decorator.py | 6 +- .../unit/permissions/test_security_manager.py | 10 +- 6 files changed, 96 insertions(+), 64 deletions(-) diff --git a/sdk/python/feast/cli_utils.py b/sdk/python/feast/cli_utils.py index edfdab93e3..264a633c31 100644 --- a/sdk/python/feast/cli_utils.py +++ b/sdk/python/feast/cli_utils.py @@ -279,7 +279,7 @@ def handler_list_all_permissions_roles_verbose( for o in objects: permitted_actions = ALL_ACTIONS.copy() for action in ALL_ACTIONS: - # Following code is derived from enforcer.enforce_policy but has a different return type and does not raise PermissionError + # Following code is derived from enforcer.enforce_policy but has a different return type and does not raise FeastPermissionError matching_permissions = [ p for p in permissions diff --git a/sdk/python/feast/errors.py b/sdk/python/feast/errors.py index ffafe31125..2eed986d7f 100644 --- a/sdk/python/feast/errors.py +++ b/sdk/python/feast/errors.py @@ -1,34 +1,52 @@ from typing import Any, List, Set from colorama import Fore, Style +from fastapi import status as HttpStatusCode +from grpc import StatusCode as GrpcStatusCode from feast.field import Field -class DataSourceNotFoundException(Exception): +class FeastError(Exception): + pass + + def rpc_status_code(self) -> GrpcStatusCode: + return GrpcStatusCode.INTERNAL + + def http_status_code(self) -> int: + return HttpStatusCode.HTTP_500_INTERNAL_SERVER_ERROR + + +class DataSourceNotFoundException(FeastError): def __init__(self, path): super().__init__( f"Unable to find table at '{path}'. Please check that table exists." ) -class DataSourceNoNameException(Exception): +class DataSourceNoNameException(FeastError): def __init__(self): super().__init__( "Unable to infer a name for this data source. Either table or name must be specified." ) -class DataSourceRepeatNamesException(Exception): +class DataSourceRepeatNamesException(FeastError): def __init__(self, ds_name: str): super().__init__( f"Multiple data sources share the same case-insensitive name {ds_name}." ) -class FeastObjectNotFoundException(Exception): +class FeastObjectNotFoundException(FeastError): pass + def rpc_status_code(self) -> GrpcStatusCode: + return GrpcStatusCode.NOT_FOUND + + def http_status_code(self) -> int: + return HttpStatusCode.HTTP_404_NOT_FOUND + class EntityNotFoundException(FeastObjectNotFoundException): def __init__(self, name, project=None): @@ -110,49 +128,49 @@ def __init__(self, name: str, project: str): ) -class FeastProviderLoginError(Exception): +class FeastProviderLoginError(FeastError): """Error class that indicates a user has not authenticated with their provider.""" -class FeastProviderNotImplementedError(Exception): +class FeastProviderNotImplementedError(FeastError): def __init__(self, provider_name): super().__init__(f"Provider '{provider_name}' is not implemented") -class FeastRegistryNotSetError(Exception): +class FeastRegistryNotSetError(FeastError): def __init__(self): super().__init__("Registry is not set, but is required") -class FeastFeatureServerTypeInvalidError(Exception): +class FeastFeatureServerTypeInvalidError(FeastError): def __init__(self, feature_server_type: str): super().__init__( f"Feature server type was set to {feature_server_type}, but this type is invalid" ) -class FeastRegistryTypeInvalidError(Exception): +class FeastRegistryTypeInvalidError(FeastError): def __init__(self, registry_type: str): super().__init__( f"Feature server type was set to {registry_type}, but this type is invalid" ) -class FeastModuleImportError(Exception): +class FeastModuleImportError(FeastError): def __init__(self, module_name: str, class_name: str): super().__init__( f"Could not import module '{module_name}' while attempting to load class '{class_name}'" ) -class FeastClassImportError(Exception): +class FeastClassImportError(FeastError): def __init__(self, module_name: str, class_name: str): super().__init__( f"Could not import class '{class_name}' from module '{module_name}'" ) -class FeastExtrasDependencyImportError(Exception): +class FeastExtrasDependencyImportError(FeastError): def __init__(self, extras_type: str, nested_error: str): message = ( nested_error @@ -162,14 +180,14 @@ def __init__(self, extras_type: str, nested_error: str): super().__init__(message) -class FeastOfflineStoreUnsupportedDataSource(Exception): +class FeastOfflineStoreUnsupportedDataSource(FeastError): def __init__(self, offline_store_name: str, data_source_name: str): super().__init__( f"Offline Store '{offline_store_name}' does not support data source '{data_source_name}'" ) -class FeatureNameCollisionError(Exception): +class FeatureNameCollisionError(FeastError): def __init__(self, feature_refs_collisions: List[str], full_feature_names: bool): if full_feature_names: collisions = [ref.replace(":", "__") for ref in feature_refs_collisions] @@ -191,7 +209,7 @@ def __init__(self, feature_refs_collisions: List[str], full_feature_names: bool) ) -class SpecifiedFeaturesNotPresentError(Exception): +class SpecifiedFeaturesNotPresentError(FeastError): def __init__( self, specified_features: List[Field], @@ -204,47 +222,47 @@ def __init__( ) -class SavedDatasetLocationAlreadyExists(Exception): +class SavedDatasetLocationAlreadyExists(FeastError): def __init__(self, location: str): super().__init__(f"Saved dataset location {location} already exists.") -class FeastOfflineStoreInvalidName(Exception): +class FeastOfflineStoreInvalidName(FeastError): def __init__(self, offline_store_class_name: str): super().__init__( f"Offline Store Class '{offline_store_class_name}' should end with the string `OfflineStore`.'" ) -class FeastOnlineStoreInvalidName(Exception): +class FeastOnlineStoreInvalidName(FeastError): def __init__(self, online_store_class_name: str): super().__init__( f"Online Store Class '{online_store_class_name}' should end with the string `OnlineStore`.'" ) -class FeastInvalidAuthConfigClass(Exception): +class FeastInvalidAuthConfigClass(FeastError): def __init__(self, auth_config_class_name: str): super().__init__( f"Auth Config Class '{auth_config_class_name}' should end with the string `AuthConfig`.'" ) -class FeastInvalidBaseClass(Exception): +class FeastInvalidBaseClass(FeastError): def __init__(self, class_name: str, class_type: str): super().__init__( f"Class '{class_name}' should have `{class_type}` as a base class." ) -class FeastOnlineStoreUnsupportedDataSource(Exception): +class FeastOnlineStoreUnsupportedDataSource(FeastError): def __init__(self, online_store_name: str, data_source_name: str): super().__init__( f"Online Store '{online_store_name}' does not support data source '{data_source_name}'" ) -class FeastEntityDFMissingColumnsError(Exception): +class FeastEntityDFMissingColumnsError(FeastError): def __init__(self, expected, missing): super().__init__( f"The entity dataframe you have provided must contain columns {expected}, " @@ -252,7 +270,7 @@ def __init__(self, expected, missing): ) -class FeastJoinKeysDuringMaterialization(Exception): +class FeastJoinKeysDuringMaterialization(FeastError): def __init__( self, source: str, join_key_columns: Set[str], source_columns: Set[str] ): @@ -262,7 +280,7 @@ def __init__( ) -class DockerDaemonNotRunning(Exception): +class DockerDaemonNotRunning(FeastError): def __init__(self): super().__init__( "The Docker Python sdk cannot connect to the Docker daemon. Please make sure you have" @@ -270,7 +288,7 @@ def __init__(self): ) -class RegistryInferenceFailure(Exception): +class RegistryInferenceFailure(FeastError): def __init__(self, repo_obj_type: str, specific_issue: str): super().__init__( f"Inference to fill in missing information for {repo_obj_type} failed. {specific_issue}. " @@ -278,58 +296,58 @@ def __init__(self, repo_obj_type: str, specific_issue: str): ) -class BigQueryJobStillRunning(Exception): +class BigQueryJobStillRunning(FeastError): def __init__(self, job_id): super().__init__(f"The BigQuery job with ID '{job_id}' is still running.") -class BigQueryJobCancelled(Exception): +class BigQueryJobCancelled(FeastError): def __init__(self, job_id): super().__init__(f"The BigQuery job with ID '{job_id}' was cancelled") -class RedshiftCredentialsError(Exception): +class RedshiftCredentialsError(FeastError): def __init__(self): super().__init__("Redshift API failed due to incorrect credentials") -class RedshiftQueryError(Exception): +class RedshiftQueryError(FeastError): def __init__(self, details): super().__init__(f"Redshift SQL Query failed to finish. Details: {details}") -class RedshiftTableNameTooLong(Exception): +class RedshiftTableNameTooLong(FeastError): def __init__(self, table_name: str): super().__init__( f"Redshift table names have a maximum length of 127 characters, but the table name {table_name} has length {len(table_name)} characters." ) -class SnowflakeCredentialsError(Exception): +class SnowflakeCredentialsError(FeastError): def __init__(self): super().__init__("Snowflake Connector failed due to incorrect credentials") -class SnowflakeQueryError(Exception): +class SnowflakeQueryError(FeastError): def __init__(self, details): super().__init__(f"Snowflake SQL Query failed to finish. Details: {details}") -class EntityTimestampInferenceException(Exception): +class EntityTimestampInferenceException(FeastError): def __init__(self, expected_column_name: str): super().__init__( f"Please provide an entity_df with a column named {expected_column_name} representing the time of events." ) -class FeatureViewMissingDuringFeatureServiceInference(Exception): +class FeatureViewMissingDuringFeatureServiceInference(FeastError): def __init__(self, feature_view_name: str, feature_service_name: str): super().__init__( f"Missing {feature_view_name} feature view during inference for {feature_service_name} feature service." ) -class InvalidEntityType(Exception): +class InvalidEntityType(FeastError): def __init__(self, entity_type: type): super().__init__( f"The entity dataframe you have provided must be a Pandas DataFrame or a SQL query, " @@ -337,7 +355,7 @@ def __init__(self, entity_type: type): ) -class ConflictingFeatureViewNames(Exception): +class ConflictingFeatureViewNames(FeastError): # TODO: print file location of conflicting feature views def __init__(self, feature_view_name: str): super().__init__( @@ -345,60 +363,60 @@ def __init__(self, feature_view_name: str): ) -class FeastInvalidInfraObjectType(Exception): +class FeastInvalidInfraObjectType(FeastError): def __init__(self): super().__init__("Could not identify the type of the InfraObject.") -class SnowflakeIncompleteConfig(Exception): +class SnowflakeIncompleteConfig(FeastError): def __init__(self, e: KeyError): super().__init__(f"{e} not defined in a config file or feature_store.yaml file") -class SnowflakeQueryUnknownError(Exception): +class SnowflakeQueryUnknownError(FeastError): def __init__(self, query: str): super().__init__(f"Snowflake query failed: {query}") -class InvalidFeaturesParameterType(Exception): +class InvalidFeaturesParameterType(FeastError): def __init__(self, features: Any): super().__init__( f"Invalid `features` parameter type {type(features)}. Expected one of List[str] and FeatureService." ) -class EntitySQLEmptyResults(Exception): +class EntitySQLEmptyResults(FeastError): def __init__(self, entity_sql: str): super().__init__( f"No entity values found from the specified SQL query to generate the entity dataframe: {entity_sql}." ) -class EntityDFNotDateTime(Exception): +class EntityDFNotDateTime(FeastError): def __init__(self): super().__init__( "The entity dataframe specified does not have the timestamp field as a datetime." ) -class PushSourceNotFoundException(Exception): +class PushSourceNotFoundException(FeastError): def __init__(self, push_source_name: str): super().__init__(f"Unable to find push source '{push_source_name}'.") -class ReadOnlyRegistryException(Exception): +class ReadOnlyRegistryException(FeastError): def __init__(self): super().__init__("Registry implementation is read-only.") -class DataFrameSerializationError(Exception): +class DataFrameSerializationError(FeastError): def __init__(self, input_dict: dict): super().__init__( f"Failed to serialize the provided dictionary into a pandas DataFrame: {input_dict.keys()}" ) -class PermissionNotFoundException(Exception): +class PermissionNotFoundException(FeastError): def __init__(self, name, project): super().__init__(f"Permission {name} does not exist in project {project}") @@ -411,11 +429,22 @@ def __init__(self, name, project=None): super().__init__(f"Permission {name} does not exist") -class ZeroRowsQueryResult(Exception): +class ZeroRowsQueryResult(FeastError): def __init__(self, query: str): super().__init__(f"This query returned zero rows:\n{query}") -class ZeroColumnQueryResult(Exception): +class ZeroColumnQueryResult(FeastError): def __init__(self, query: str): super().__init__(f"This query returned zero columns:\n{query}") + + +class FeastPermissionError(FeastError, PermissionError): + def __init__(self, details: str): + super().__init__(f"Permission error:\n{details}") + + def rpc_status_code(self) -> GrpcStatusCode: + return GrpcStatusCode.PERMISSION_DENIED + + def http_status_code(self) -> int: + return HttpStatusCode.HTTP_403_FORBIDDEN diff --git a/sdk/python/feast/permissions/enforcer.py b/sdk/python/feast/permissions/enforcer.py index ae45b8a78b..d94a81ba04 100644 --- a/sdk/python/feast/permissions/enforcer.py +++ b/sdk/python/feast/permissions/enforcer.py @@ -1,5 +1,6 @@ import logging +from feast.errors import FeastPermissionError from feast.feast_object import FeastObject from feast.permissions.decision import DecisionEvaluator from feast.permissions.permission import ( @@ -29,14 +30,14 @@ def enforce_policy( user: The current user. resources: The resources for which we need to enforce authorized permission. actions: The requested actions to be authorized. - filter_only: If `True`, it removes unauthorized resources from the returned value, otherwise it raises a `PermissionError` the + filter_only: If `True`, it removes unauthorized resources from the returned value, otherwise it raises a `FeastPermissionError` the first unauthorized resource. Defaults to `False`. Returns: list[FeastObject]: A filtered list of the permitted resources. Raises: - PermissionError: If the current user is not authorized to eecute the requested actions on the given resources (and `filter_only` is `False`). + FeastPermissionError: If the current user is not authorized to eecute the requested actions on the given resources (and `filter_only` is `False`). """ if not permissions: return resources @@ -66,12 +67,12 @@ def enforce_policy( if evaluator.is_decided(): grant, explanations = evaluator.grant() if not grant and not filter_only: - raise PermissionError(",".join(explanations)) + raise FeastPermissionError(",".join(explanations)) if grant: _permitted_resources.append(resource) break else: message = f"No permissions defined to manage {actions} on {type(resource)}/{resource.name}." logger.exception(f"**PERMISSION NOT GRANTED**: {message}") - raise PermissionError(message) + raise FeastPermissionError(message) return _permitted_resources diff --git a/sdk/python/feast/permissions/security_manager.py b/sdk/python/feast/permissions/security_manager.py index 2322602388..29c0e06753 100644 --- a/sdk/python/feast/permissions/security_manager.py +++ b/sdk/python/feast/permissions/security_manager.py @@ -67,14 +67,14 @@ def assert_permissions( Args: resources: The resources for which we need to enforce authorized permission. actions: The requested actions to be authorized. - filter_only: If `True`, it removes unauthorized resources from the returned value, otherwise it raises a `PermissionError` the + filter_only: If `True`, it removes unauthorized resources from the returned value, otherwise it raises a `FeastPermissionError` the first unauthorized resource. Defaults to `False`. Returns: list[FeastObject]: A filtered list of the permitted resources, possibly empty. Raises: - PermissionError: If the current user is not authorized to execute all the requested actions on the given resources. + FeastPermissionError: If the current user is not authorized to execute all the requested actions on the given resources. """ return enforce_policy( permissions=self.permissions, @@ -108,7 +108,7 @@ def assert_permissions_to_update( FeastObject: The original `resource`, if permitted. Raises: - PermissionError: If the current user is not authorized to execute all the requested actions on the given resource or on the existing one. + FeastPermissionError: If the current user is not authorized to execute all the requested actions on the given resource or on the existing one. """ actions = [AuthzedAction.DESCRIBE, AuthzedAction.UPDATE] try: @@ -140,7 +140,7 @@ def assert_permissions( FeastObject: The original `resource`, if permitted. Raises: - PermissionError: If the current user is not authorized to execute the requested actions on the given resources. + FeastPermissionError: If the current user is not authorized to execute the requested actions on the given resources. """ sm = get_security_manager() if sm is None: diff --git a/sdk/python/tests/unit/permissions/test_decorator.py b/sdk/python/tests/unit/permissions/test_decorator.py index 8f6c2c420b..92db72c93d 100644 --- a/sdk/python/tests/unit/permissions/test_decorator.py +++ b/sdk/python/tests/unit/permissions/test_decorator.py @@ -1,6 +1,8 @@ import assertpy import pytest +from feast.errors import FeastPermissionError + @pytest.mark.parametrize( "username, can_read, can_write", @@ -22,11 +24,11 @@ def test_access_SecuredFeatureView( if can_read: fv.read_protected() else: - with pytest.raises(PermissionError): + with pytest.raises(FeastPermissionError): fv.read_protected() if can_write: fv.write_protected() else: - with pytest.raises(PermissionError): + with pytest.raises(FeastPermissionError): fv.write_protected() assertpy.assert_that(fv.unprotected()).is_true() diff --git a/sdk/python/tests/unit/permissions/test_security_manager.py b/sdk/python/tests/unit/permissions/test_security_manager.py index 228dddb01f..d403c8123b 100644 --- a/sdk/python/tests/unit/permissions/test_security_manager.py +++ b/sdk/python/tests/unit/permissions/test_security_manager.py @@ -2,7 +2,7 @@ import pytest from feast.entity import Entity -from feast.errors import FeastObjectNotFoundException +from feast.errors import FeastObjectNotFoundException, FeastPermissionError from feast.permissions.action import READ, AuthzedAction from feast.permissions.security_manager import ( assert_permissions, @@ -66,7 +66,7 @@ def test_access_SecuredFeatureView( result = [] if raise_error_in_permit: - with pytest.raises(PermissionError): + with pytest.raises(FeastPermissionError): result = permitted_resources(resources=resources, actions=requested_actions) else: result = permitted_resources(resources=resources, actions=requested_actions) @@ -82,7 +82,7 @@ def test_access_SecuredFeatureView( result = assert_permissions(resource=r, actions=requested_actions) assertpy.assert_that(result).is_equal_to(r) elif raise_error_in_assert[i]: - with pytest.raises(PermissionError): + with pytest.raises(FeastPermissionError): assert_permissions(resource=r, actions=requested_actions) else: result = assert_permissions(resource=r, actions=requested_actions) @@ -125,7 +125,7 @@ def getter(name: str, project: str, allow_cache: bool): ) assertpy.assert_that(result).is_equal_to(entity) else: - with pytest.raises(PermissionError): + with pytest.raises(FeastPermissionError): assert_permissions_to_update(resource=entity, getter=getter, project="") @@ -165,5 +165,5 @@ def getter(name: str, project: str, allow_cache: bool): ) assertpy.assert_that(result).is_equal_to(entity) else: - with pytest.raises(PermissionError): + with pytest.raises(FeastPermissionError): assert_permissions_to_update(resource=entity, getter=getter, project="") From 729c874e8c30719f23ad287d3cb84f1d654274ec Mon Sep 17 00:00:00 2001 From: Theodor Mihalache <84387487+tmihalac@users.noreply.github.com> Date: Thu, 29 Aug 2024 12:10:43 -0400 Subject: [PATCH 33/96] feat: Intra server to server communication (#4433) Intra server communication Signed-off-by: Theodor Mihalache --- .../templates/deployment.yaml | 2 + .../auth/kubernetes_token_parser.py | 11 +- .../permissions/auth/oidc_token_parser.py | 26 +- .../client/kubernetes_auth_client_manager.py | 11 + .../oidc_authentication_client_manager.py | 11 + .../feast/permissions/security_manager.py | 25 +- .../permissions/auth/test_token_parser.py | 147 +++++++++++- .../tests/unit/permissions/test_decorator.py | 2 +- .../unit/permissions/test_security_manager.py | 227 +++++++++++++++--- 9 files changed, 417 insertions(+), 45 deletions(-) diff --git a/infra/charts/feast-feature-server/templates/deployment.yaml b/infra/charts/feast-feature-server/templates/deployment.yaml index a550433db5..8dddeed6fd 100644 --- a/infra/charts/feast-feature-server/templates/deployment.yaml +++ b/infra/charts/feast-feature-server/templates/deployment.yaml @@ -36,6 +36,8 @@ spec: env: - name: FEATURE_STORE_YAML_BASE64 value: {{ .Values.feature_store_yaml_base64 }} + - name: INTRA_COMMUNICATION_BASE64 + value: {{ "intra-server-communication" | b64enc }} command: {{- if eq .Values.feast_mode "offline" }} - "feast" diff --git a/sdk/python/feast/permissions/auth/kubernetes_token_parser.py b/sdk/python/feast/permissions/auth/kubernetes_token_parser.py index c16e5232fb..7724163e5f 100644 --- a/sdk/python/feast/permissions/auth/kubernetes_token_parser.py +++ b/sdk/python/feast/permissions/auth/kubernetes_token_parser.py @@ -1,4 +1,5 @@ import logging +import os import jwt from kubernetes import client, config @@ -41,10 +42,14 @@ async def user_details_from_access_token(self, access_token: str) -> User: current_user = f"{sa_namespace}:{sa_name}" logging.info(f"Received request from {sa_name} in {sa_namespace}") - roles = self.get_roles(sa_namespace, sa_name) - logging.info(f"SA roles are: {roles}") + intra_communication_base64 = os.getenv("INTRA_COMMUNICATION_BASE64") + if sa_name is not None and sa_name == intra_communication_base64: + return User(username=sa_name, roles=[]) + else: + roles = self.get_roles(sa_namespace, sa_name) + logging.info(f"SA roles are: {roles}") - return User(username=current_user, roles=roles) + return User(username=current_user, roles=roles) def get_roles(self, namespace: str, service_account_name: str) -> list[str]: """ diff --git a/sdk/python/feast/permissions/auth/oidc_token_parser.py b/sdk/python/feast/permissions/auth/oidc_token_parser.py index fce9fdcbb2..28273e8c10 100644 --- a/sdk/python/feast/permissions/auth/oidc_token_parser.py +++ b/sdk/python/feast/permissions/auth/oidc_token_parser.py @@ -1,4 +1,6 @@ import logging +import os +from typing import Optional from unittest.mock import Mock import jwt @@ -34,7 +36,7 @@ def __init__(self, auth_config: OidcAuthConfig): async def _validate_token(self, access_token: str): """ - Validate the token extracted from the headrer of the user request against the OAuth2 server. + Validate the token extracted from the header of the user request against the OAuth2 server. """ # FastAPI's OAuth2AuthorizationCodeBearer requires a Request type but actually uses only the headers field # https://github.com/tiangolo/fastapi/blob/eca465f4c96acc5f6a22e92fd2211675ca8a20c8/fastapi/security/oauth2.py#L380 @@ -60,6 +62,11 @@ async def user_details_from_access_token(self, access_token: str) -> User: AuthenticationError if any error happens. """ + # check if intra server communication + user = self._get_intra_comm_user(access_token) + if user: + return user + try: await self._validate_token(access_token) logger.info("Validated token") @@ -108,3 +115,20 @@ async def user_details_from_access_token(self, access_token: str) -> User: except jwt.exceptions.InvalidTokenError: logger.exception("Exception while parsing the token:") raise AuthenticationError("Invalid token.") + + def _get_intra_comm_user(self, access_token: str) -> Optional[User]: + intra_communication_base64 = os.getenv("INTRA_COMMUNICATION_BASE64") + + if intra_communication_base64: + decoded_token = jwt.decode( + access_token, options={"verify_signature": False} + ) + if "preferred_username" in decoded_token: + preferred_username: str = decoded_token["preferred_username"] + if ( + preferred_username is not None + and preferred_username == intra_communication_base64 + ): + return User(username=preferred_username, roles=[]) + + return None diff --git a/sdk/python/feast/permissions/client/kubernetes_auth_client_manager.py b/sdk/python/feast/permissions/client/kubernetes_auth_client_manager.py index 1ca3c5a2ae..9957ff93a7 100644 --- a/sdk/python/feast/permissions/client/kubernetes_auth_client_manager.py +++ b/sdk/python/feast/permissions/client/kubernetes_auth_client_manager.py @@ -1,6 +1,8 @@ import logging import os +import jwt + from feast.permissions.auth_model import KubernetesAuthConfig from feast.permissions.client.auth_client_manager import AuthenticationClientManager @@ -13,6 +15,15 @@ def __init__(self, auth_config: KubernetesAuthConfig): self.token_file_path = "/var/run/secrets/kubernetes.io/serviceaccount/token" def get_token(self): + intra_communication_base64 = os.getenv("INTRA_COMMUNICATION_BASE64") + # If intra server communication call + if intra_communication_base64: + payload = { + "sub": f":::{intra_communication_base64}", # Subject claim + } + + return jwt.encode(payload, "") + try: token = self._read_token_from_file() return token diff --git a/sdk/python/feast/permissions/client/oidc_authentication_client_manager.py b/sdk/python/feast/permissions/client/oidc_authentication_client_manager.py index 6744a1d2ad..0f99cea86f 100644 --- a/sdk/python/feast/permissions/client/oidc_authentication_client_manager.py +++ b/sdk/python/feast/permissions/client/oidc_authentication_client_manager.py @@ -1,5 +1,7 @@ import logging +import os +import jwt import requests from feast.permissions.auth_model import OidcAuthConfig @@ -14,6 +16,15 @@ def __init__(self, auth_config: OidcAuthConfig): self.auth_config = auth_config def get_token(self): + intra_communication_base64 = os.getenv("INTRA_COMMUNICATION_BASE64") + # If intra server communication call + if intra_communication_base64: + payload = { + "preferred_username": f"{intra_communication_base64}", # Subject claim + } + + return jwt.encode(payload, "") + # Fetch the token endpoint from the discovery URL token_endpoint = OIDCDiscoveryService( self.auth_config.auth_discovery_url diff --git a/sdk/python/feast/permissions/security_manager.py b/sdk/python/feast/permissions/security_manager.py index 29c0e06753..c00a3d8853 100644 --- a/sdk/python/feast/permissions/security_manager.py +++ b/sdk/python/feast/permissions/security_manager.py @@ -1,4 +1,5 @@ import logging +import os from contextvars import ContextVar from typing import Callable, List, Optional, Union @@ -110,6 +111,10 @@ def assert_permissions_to_update( Raises: FeastPermissionError: If the current user is not authorized to execute all the requested actions on the given resource or on the existing one. """ + sm = get_security_manager() + if not is_auth_necessary(sm): + return resource + actions = [AuthzedAction.DESCRIBE, AuthzedAction.UPDATE] try: existing_resource = getter( @@ -142,10 +147,11 @@ def assert_permissions( Raises: FeastPermissionError: If the current user is not authorized to execute the requested actions on the given resources. """ + sm = get_security_manager() - if sm is None: + if not is_auth_necessary(sm): return resource - return sm.assert_permissions( + return sm.assert_permissions( # type: ignore[union-attr] resources=[resource], actions=actions, filter_only=False )[0] @@ -165,10 +171,11 @@ def permitted_resources( Returns: list[FeastObject]]: A filtered list of the permitted resources, possibly empty. """ + sm = get_security_manager() - if sm is None: + if not is_auth_necessary(sm): return resources - return sm.assert_permissions(resources=resources, actions=actions, filter_only=True) + return sm.assert_permissions(resources=resources, actions=actions, filter_only=True) # type: ignore[union-attr] """ @@ -201,3 +208,13 @@ def no_security_manager(): global _sm _sm = None + + +def is_auth_necessary(sm: Optional[SecurityManager]) -> bool: + intra_communication_base64 = os.getenv("INTRA_COMMUNICATION_BASE64") + + return ( + sm is not None + and sm.current_user is not None + and sm.current_user.username != intra_communication_base64 + ) diff --git a/sdk/python/tests/unit/permissions/auth/test_token_parser.py b/sdk/python/tests/unit/permissions/auth/test_token_parser.py index cb153a17c9..bac2103b4f 100644 --- a/sdk/python/tests/unit/permissions/auth/test_token_parser.py +++ b/sdk/python/tests/unit/permissions/auth/test_token_parser.py @@ -1,6 +1,6 @@ -# test_token_validator.py - import asyncio +import os +from unittest import mock from unittest.mock import MagicMock, patch import assertpy @@ -70,6 +70,75 @@ def test_oidc_token_validation_failure(mock_oauth2, oidc_config): ) +@mock.patch.dict(os.environ, {"INTRA_COMMUNICATION_BASE64": "test1234"}) +@pytest.mark.parametrize( + "intra_communication_val, is_intra_server", + [ + ("test1234", True), + ("my-name", False), + ], +) +def test_oidc_inter_server_comm( + intra_communication_val, is_intra_server, oidc_config, monkeypatch +): + async def mock_oath2(self, request): + return "OK" + + monkeypatch.setattr( + "feast.permissions.auth.oidc_token_parser.OAuth2AuthorizationCodeBearer.__call__", + mock_oath2, + ) + signing_key = MagicMock() + signing_key.key = "a-key" + monkeypatch.setattr( + "feast.permissions.auth.oidc_token_parser.PyJWKClient.get_signing_key_from_jwt", + lambda self, access_token: signing_key, + ) + + user_data = { + "preferred_username": f"{intra_communication_val}", + } + + if not is_intra_server: + user_data["resource_access"] = {_CLIENT_ID: {"roles": ["reader", "writer"]}} + + monkeypatch.setattr( + "feast.permissions.oidc_service.OIDCDiscoveryService._fetch_discovery_data", + lambda self, *args, **kwargs: { + "authorization_endpoint": "https://localhost:8080/realms/master/protocol/openid-connect/auth", + "token_endpoint": "https://localhost:8080/realms/master/protocol/openid-connect/token", + "jwks_uri": "https://localhost:8080/realms/master/protocol/openid-connect/certs", + }, + ) + + monkeypatch.setattr( + "feast.permissions.auth.oidc_token_parser.jwt.decode", + lambda self, *args, **kwargs: user_data, + ) + + access_token = "aaa-bbb-ccc" + token_parser = OidcTokenParser(auth_config=oidc_config) + user = asyncio.run( + token_parser.user_details_from_access_token(access_token=access_token) + ) + + if is_intra_server: + assertpy.assert_that(user).is_not_none() + assertpy.assert_that(user.username).is_equal_to(intra_communication_val) + assertpy.assert_that(user.roles).is_equal_to([]) + else: + assertpy.assert_that(user).is_not_none() + assertpy.assert_that(user).is_type_of(User) + if isinstance(user, User): + assertpy.assert_that(user.username).is_equal_to("my-name") + assertpy.assert_that(user.roles.sort()).is_equal_to( + ["reader", "writer"].sort() + ) + assertpy.assert_that(user.has_matching_role(["reader"])).is_true() + assertpy.assert_that(user.has_matching_role(["writer"])).is_true() + assertpy.assert_that(user.has_matching_role(["updater"])).is_false() + + # TODO RBAC: Move role bindings to a reusable fixture @patch("feast.permissions.auth.kubernetes_token_parser.config.load_incluster_config") @patch("feast.permissions.auth.kubernetes_token_parser.jwt.decode") @@ -127,3 +196,77 @@ def test_k8s_token_validation_failure(mock_jwt, mock_config): asyncio.run( token_parser.user_details_from_access_token(access_token=access_token) ) + + +@mock.patch.dict(os.environ, {"INTRA_COMMUNICATION_BASE64": "test1234"}) +@pytest.mark.parametrize( + "intra_communication_val, is_intra_server", + [ + ("test1234", True), + ("my-name", False), + ], +) +def test_k8s_inter_server_comm( + intra_communication_val, + is_intra_server, + oidc_config, + request, + rolebindings, + clusterrolebindings, + monkeypatch, +): + if is_intra_server: + subject = f":::{intra_communication_val}" + else: + sa_name = request.getfixturevalue("sa_name") + namespace = request.getfixturevalue("namespace") + subject = f"system:serviceaccount:{namespace}:{sa_name}" + rolebindings = request.getfixturevalue("rolebindings") + clusterrolebindings = request.getfixturevalue("clusterrolebindings") + + monkeypatch.setattr( + "feast.permissions.auth.kubernetes_token_parser.client.RbacAuthorizationV1Api.list_namespaced_role_binding", + lambda *args, **kwargs: rolebindings["items"], + ) + monkeypatch.setattr( + "feast.permissions.auth.kubernetes_token_parser.client.RbacAuthorizationV1Api.list_cluster_role_binding", + lambda *args, **kwargs: clusterrolebindings["items"], + ) + monkeypatch.setattr( + "feast.permissions.client.kubernetes_auth_client_manager.KubernetesAuthClientManager.get_token", + lambda self: "my-token", + ) + + monkeypatch.setattr( + "feast.permissions.auth.kubernetes_token_parser.config.load_incluster_config", + lambda: None, + ) + + monkeypatch.setattr( + "feast.permissions.auth.kubernetes_token_parser.jwt.decode", + lambda *args, **kwargs: {"sub": subject}, + ) + + roles = rolebindings["roles"] + croles = clusterrolebindings["roles"] + + access_token = "aaa-bbb-ccc" + token_parser = KubernetesTokenParser() + user = asyncio.run( + token_parser.user_details_from_access_token(access_token=access_token) + ) + + if is_intra_server: + assertpy.assert_that(user).is_not_none() + assertpy.assert_that(user.username).is_equal_to(intra_communication_val) + assertpy.assert_that(user.roles).is_equal_to([]) + else: + assertpy.assert_that(user).is_type_of(User) + if isinstance(user, User): + assertpy.assert_that(user.username).is_equal_to(f"{namespace}:{sa_name}") + assertpy.assert_that(user.roles.sort()).is_equal_to((roles + croles).sort()) + for r in roles: + assertpy.assert_that(user.has_matching_role([r])).is_true() + for cr in croles: + assertpy.assert_that(user.has_matching_role([cr])).is_true() + assertpy.assert_that(user.has_matching_role(["foo"])).is_false() diff --git a/sdk/python/tests/unit/permissions/test_decorator.py b/sdk/python/tests/unit/permissions/test_decorator.py index 92db72c93d..f434301a2c 100644 --- a/sdk/python/tests/unit/permissions/test_decorator.py +++ b/sdk/python/tests/unit/permissions/test_decorator.py @@ -7,7 +7,7 @@ @pytest.mark.parametrize( "username, can_read, can_write", [ - (None, False, False), + (None, True, True), ("r", True, False), ("w", False, True), ("rw", True, True), diff --git a/sdk/python/tests/unit/permissions/test_security_manager.py b/sdk/python/tests/unit/permissions/test_security_manager.py index d403c8123b..11b8dfb88e 100644 --- a/sdk/python/tests/unit/permissions/test_security_manager.py +++ b/sdk/python/tests/unit/permissions/test_security_manager.py @@ -9,18 +9,107 @@ assert_permissions_to_update, permitted_resources, ) +from feast.permissions.user import User @pytest.mark.parametrize( - "username, requested_actions, allowed, allowed_single, raise_error_in_assert, raise_error_in_permit", + "username, requested_actions, allowed, allowed_single, raise_error_in_assert, raise_error_in_permit, intra_communication_flag", [ - (None, [], False, [False, False], [True, True], False), - ("r", [AuthzedAction.DESCRIBE], True, [True, True], [False, False], False), - ("r", [AuthzedAction.UPDATE], False, [False, False], [True, True], False), - ("w", [AuthzedAction.DESCRIBE], False, [False, False], [True, True], False), - ("w", [AuthzedAction.UPDATE], False, [True, True], [False, False], False), - ("rw", [AuthzedAction.DESCRIBE], False, [True, True], [False, False], False), - ("rw", [AuthzedAction.UPDATE], False, [True, True], [False, False], False), + (None, [], True, [True, True], [False, False], False, False), + (None, [], True, [True, True], [False, False], False, True), + ( + "r", + [AuthzedAction.DESCRIBE], + True, + [True, True], + [False, False], + False, + False, + ), + ( + "r", + [AuthzedAction.DESCRIBE], + True, + [True, True], + [False, False], + False, + True, + ), + ("server_intra_com_val", [], True, [True, True], [False, False], False, True), + ( + "r", + [AuthzedAction.UPDATE], + False, + [False, False], + [True, True], + False, + False, + ), + ("r", [AuthzedAction.UPDATE], True, [True, True], [False, False], False, True), + ( + "w", + [AuthzedAction.DESCRIBE], + False, + [False, False], + [True, True], + False, + False, + ), + ( + "w", + [AuthzedAction.DESCRIBE], + True, + [True, True], + [True, True], + False, + True, + ), + ( + "w", + [AuthzedAction.UPDATE], + False, + [True, True], + [False, False], + False, + False, + ), + ("w", [AuthzedAction.UPDATE], False, [True, True], [False, False], False, True), + ( + "rw", + [AuthzedAction.DESCRIBE], + False, + [True, True], + [False, False], + False, + False, + ), + ( + "rw", + [AuthzedAction.DESCRIBE], + False, + [True, True], + [False, False], + False, + True, + ), + ( + "rw", + [AuthzedAction.UPDATE], + False, + [True, True], + [False, False], + False, + False, + ), + ( + "rw", + [AuthzedAction.UPDATE], + False, + [True, True], + [False, False], + False, + True, + ), ( "rw", [AuthzedAction.DESCRIBE, AuthzedAction.UPDATE], @@ -28,6 +117,16 @@ [False, False], [True, True], True, + False, + ), + ( + "rw", + [AuthzedAction.DESCRIBE, AuthzedAction.UPDATE], + True, + [True, True], + [False, False], + False, + True, ), ( "special", @@ -36,6 +135,16 @@ [False, True], [True, False], True, + False, + ), + ( + "admin", + [AuthzedAction.DESCRIBE, AuthzedAction.UPDATE], + True, + [True, True], + [False, False], + False, + True, ), ( "special", @@ -44,6 +153,16 @@ [False, False], [True, True], True, + False, + ), + ( + "admin", + READ + [AuthzedAction.UPDATE], + True, + [True, True], + [False, False], + False, + True, ), ], ) @@ -57,13 +176,21 @@ def test_access_SecuredFeatureView( allowed_single, raise_error_in_assert, raise_error_in_permit, + intra_communication_flag, + monkeypatch, ): sm = security_manager - resources = feature_views - user = users.get(username) sm.set_current_user(user) + if intra_communication_flag: + monkeypatch.setenv("INTRA_COMMUNICATION_BASE64", "server_intra_com_val") + sm.set_current_user(User("server_intra_com_val", [])) + else: + monkeypatch.delenv("INTRA_COMMUNICATION_BASE64", False) + + resources = feature_views + result = [] if raise_error_in_permit: with pytest.raises(FeastPermissionError): @@ -90,16 +217,24 @@ def test_access_SecuredFeatureView( @pytest.mark.parametrize( - "username, allowed", + "username, allowed, intra_communication_flag", [ - (None, False), - ("r", False), - ("w", False), - ("rw", False), - ("special", False), - ("updater", False), - ("creator", True), - ("admin", True), + (None, True, False), + (None, True, True), + ("r", False, False), + ("r", True, True), + ("w", False, False), + ("w", True, True), + ("rw", False, False), + ("rw", True, True), + ("special", False, False), + ("special", True, True), + ("updater", False, False), + ("updater", True, True), + ("creator", True, False), + ("creator", True, True), + ("admin", True, False), + ("admin", True, True), ], ) def test_create_entity( @@ -107,15 +242,23 @@ def test_create_entity( users, username, allowed, + intra_communication_flag, + monkeypatch, ): sm = security_manager + user = users.get(username) + sm.set_current_user(user) + + if intra_communication_flag: + monkeypatch.setenv("INTRA_COMMUNICATION_BASE64", "server_intra_com_val") + sm.set_current_user(User("server_intra_com_val", [])) + else: + monkeypatch.delenv("INTRA_COMMUNICATION_BASE64", False) + entity = Entity( name="", ) - user = users.get(username) - sm.set_current_user(user) - def getter(name: str, project: str, allow_cache: bool): raise FeastObjectNotFoundException() @@ -130,16 +273,24 @@ def getter(name: str, project: str, allow_cache: bool): @pytest.mark.parametrize( - "username, allowed", + "username, allowed, intra_communication_flag", [ - (None, False), - ("r", False), - ("w", False), - ("rw", False), - ("special", False), - ("updater", True), - ("creator", False), - ("admin", True), + (None, True, False), + (None, True, True), + ("r", False, False), + ("r", True, True), + ("w", False, False), + ("w", True, True), + ("rw", False, False), + ("rw", True, True), + ("special", False, False), + ("special", True, True), + ("updater", True, False), + ("updater", True, True), + ("creator", False, False), + ("creator", True, True), + ("admin", True, False), + ("admin", True, True), ], ) def test_update_entity( @@ -147,15 +298,23 @@ def test_update_entity( users, username, allowed, + intra_communication_flag, + monkeypatch, ): sm = security_manager + user = users.get(username) + sm.set_current_user(user) + + if intra_communication_flag: + monkeypatch.setenv("INTRA_COMMUNICATION_BASE64", "server_intra_com_val") + sm.set_current_user(User("server_intra_com_val", [])) + else: + monkeypatch.delenv("INTRA_COMMUNICATION_BASE64", False) + entity = Entity( name="", ) - user = users.get(username) - sm.set_current_user(user) - def getter(name: str, project: str, allow_cache: bool): return entity From 4186f0346ea0140b6c4cc1ae626582e6a70ec8f6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 30 Aug 2024 00:53:27 +0400 Subject: [PATCH 34/96] chore: Bump notebook from 7.2.1 to 7.2.2 in /sdk/python/requirements (#4467) Bumps [notebook](https://github.com/jupyter/notebook) from 7.2.1 to 7.2.2. - [Release notes](https://github.com/jupyter/notebook/releases) - [Changelog](https://github.com/jupyter/notebook/blob/@jupyter-notebook/tree@7.2.2/CHANGELOG.md) - [Commits](https://github.com/jupyter/notebook/compare/@jupyter-notebook/tree@7.2.1...@jupyter-notebook/tree@7.2.2) --- updated-dependencies: - dependency-name: notebook dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- sdk/python/requirements/py3.10-ci-requirements.txt | 2 +- sdk/python/requirements/py3.11-ci-requirements.txt | 2 +- sdk/python/requirements/py3.9-ci-requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 8128eb094d..a0ed668216 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -430,7 +430,7 @@ nest-asyncio==1.6.0 # via ipykernel nodeenv==1.9.1 # via pre-commit -notebook==7.2.1 +notebook==7.2.2 # via great-expectations notebook-shim==0.2.4 # via diff --git a/sdk/python/requirements/py3.11-ci-requirements.txt b/sdk/python/requirements/py3.11-ci-requirements.txt index 6458540f27..b0021b8980 100644 --- a/sdk/python/requirements/py3.11-ci-requirements.txt +++ b/sdk/python/requirements/py3.11-ci-requirements.txt @@ -421,7 +421,7 @@ nest-asyncio==1.6.0 # via ipykernel nodeenv==1.9.1 # via pre-commit -notebook==7.2.1 +notebook==7.2.2 # via great-expectations notebook-shim==0.2.4 # via diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index 58ec69fe2d..8ca628844c 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -439,7 +439,7 @@ nest-asyncio==1.6.0 # via ipykernel nodeenv==1.9.1 # via pre-commit -notebook==7.2.1 +notebook==7.2.2 # via great-expectations notebook-shim==0.2.4 # via From a68cf3707495c375fc3fa9a2001ac02e0293772a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 29 Aug 2024 22:41:06 -0400 Subject: [PATCH 35/96] chore: Bump jupyterlab from 4.2.3 to 4.2.5 in /sdk/python/requirements (#4468) Bumps [jupyterlab](https://github.com/jupyterlab/jupyterlab) from 4.2.3 to 4.2.5. - [Release notes](https://github.com/jupyterlab/jupyterlab/releases) - [Changelog](https://github.com/jupyterlab/jupyterlab/blob/@jupyterlab/lsp@4.2.5/CHANGELOG.md) - [Commits](https://github.com/jupyterlab/jupyterlab/compare/@jupyterlab/lsp@4.2.3...@jupyterlab/lsp@4.2.5) --- updated-dependencies: - dependency-name: jupyterlab dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- sdk/python/requirements/py3.10-ci-requirements.txt | 2 +- sdk/python/requirements/py3.11-ci-requirements.txt | 2 +- sdk/python/requirements/py3.9-ci-requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index a0ed668216..6970dd2aed 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -361,7 +361,7 @@ jupyter-server==2.14.1 # notebook-shim jupyter-server-terminals==0.5.3 # via jupyter-server -jupyterlab==4.2.3 +jupyterlab==4.2.5 # via notebook jupyterlab-pygments==0.3.0 # via nbconvert diff --git a/sdk/python/requirements/py3.11-ci-requirements.txt b/sdk/python/requirements/py3.11-ci-requirements.txt index b0021b8980..2d7a5b252e 100644 --- a/sdk/python/requirements/py3.11-ci-requirements.txt +++ b/sdk/python/requirements/py3.11-ci-requirements.txt @@ -352,7 +352,7 @@ jupyter-server==2.14.1 # notebook-shim jupyter-server-terminals==0.5.3 # via jupyter-server -jupyterlab==4.2.3 +jupyterlab==4.2.5 # via notebook jupyterlab-pygments==0.3.0 # via nbconvert diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index 8ca628844c..62f9280fe5 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -370,7 +370,7 @@ jupyter-server==2.14.1 # notebook-shim jupyter-server-terminals==0.5.3 # via jupyter-server -jupyterlab==4.2.3 +jupyterlab==4.2.5 # via notebook jupyterlab-pygments==0.3.0 # via nbconvert From c365b4e71a16fb69883608c5f781c6d55502bb8e Mon Sep 17 00:00:00 2001 From: Ben Stuart Date: Fri, 30 Aug 2024 05:45:20 +0100 Subject: [PATCH 36/96] fix: Check for snowflake functions when setting up materialization engine (#4456) * Check for snowflake functions over stage Signed-off-by: Ben Stuart * Refactor stage_list to function_list Signed-off-by: Ben Stuart --------- Signed-off-by: Ben Stuart --- .../feast/infra/materialization/snowflake_engine.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/sdk/python/feast/infra/materialization/snowflake_engine.py b/sdk/python/feast/infra/materialization/snowflake_engine.py index 9f9f41c83d..e8b0857e5d 100644 --- a/sdk/python/feast/infra/materialization/snowflake_engine.py +++ b/sdk/python/feast/infra/materialization/snowflake_engine.py @@ -128,16 +128,16 @@ def update( stage_context = f'"{self.repo_config.batch_engine.database}"."{self.repo_config.batch_engine.schema_}"' stage_path = f'{stage_context}."feast_{project}"' with GetSnowflakeConnection(self.repo_config.batch_engine) as conn: - query = f"SHOW STAGES IN {stage_context}" + query = f"SHOW USER FUNCTIONS LIKE 'FEAST_{project.upper()}%' IN SCHEMA {stage_context}" cursor = execute_snowflake_statement(conn, query) - stage_list = pd.DataFrame( + function_list = pd.DataFrame( cursor.fetchall(), columns=[column.name for column in cursor.description], ) - # if the stage already exists, + # if the SHOW FUNCTIONS query returns results, # assumes that the materialization functions have been deployed - if f"feast_{project}" in stage_list["name"].tolist(): + if len(function_list.index) > 0: click.echo( f"Materialization functions for {Style.BRIGHT + Fore.GREEN}{project}{Style.RESET_ALL} already detected." ) @@ -149,7 +149,7 @@ def update( ) click.echo() - query = f"CREATE STAGE {stage_path}" + query = f"CREATE STAGE IF NOT EXISTS {stage_path}" execute_snowflake_statement(conn, query) copy_path, zip_path = package_snowpark_zip(project) From 484240c4e783d68bc521b62b723c2dcbd00fab5e Mon Sep 17 00:00:00 2001 From: Daniele Martinoli <86618610+dmartinol@users.noreply.github.com> Date: Fri, 30 Aug 2024 08:57:56 +0200 Subject: [PATCH 37/96] fix: Initial commit targetting grpc registry server (#4458) * initial commit targetting grpc registry server Signed-off-by: Daniele Martinoli * refactor: Introduced base class FeastError for all Feast exceptions (#4465) introduced base class FeastError for all Feast exceptions, with initial methods to map the grpc and HTTP status code Signed-off-by: Daniele Martinoli * initial commit targetting grpc registry server Signed-off-by: Daniele Martinoli * fixed merge error Signed-off-by: Daniele Martinoli * initial commit targetting grpc registry server Signed-off-by: Daniele Martinoli * fixed merge error Signed-off-by: Daniele Martinoli * integrated comment Signed-off-by: Daniele Martinoli * moved imports as per comment Signed-off-by: Daniele Martinoli --------- Signed-off-by: Daniele Martinoli --- sdk/python/feast/errors.py | 56 +++++++++++++++++-- sdk/python/feast/grpc_error_interceptor.py | 48 ++++++++++++++++ .../client/grpc_client_auth_interceptor.py | 20 ++++--- sdk/python/feast/permissions/server/grpc.py | 22 -------- sdk/python/feast/registry_server.py | 26 ++++++++- .../auth/server/test_auth_registry_server.py | 35 ++++++++++++ sdk/python/tests/unit/test_errors.py | 26 +++++++++ 7 files changed, 197 insertions(+), 36 deletions(-) create mode 100644 sdk/python/feast/grpc_error_interceptor.py create mode 100644 sdk/python/tests/unit/test_errors.py diff --git a/sdk/python/feast/errors.py b/sdk/python/feast/errors.py index 2eed986d7f..d39009ae7a 100644 --- a/sdk/python/feast/errors.py +++ b/sdk/python/feast/errors.py @@ -1,4 +1,7 @@ -from typing import Any, List, Set +import importlib +import json +import logging +from typing import Any, List, Optional, Set from colorama import Fore, Style from fastapi import status as HttpStatusCode @@ -6,16 +9,61 @@ from feast.field import Field +logger = logging.getLogger(__name__) + class FeastError(Exception): pass - def rpc_status_code(self) -> GrpcStatusCode: + def grpc_status_code(self) -> GrpcStatusCode: return GrpcStatusCode.INTERNAL def http_status_code(self) -> int: return HttpStatusCode.HTTP_500_INTERNAL_SERVER_ERROR + def __str__(self) -> str: + if hasattr(self, "__overridden_message__"): + return str(getattr(self, "__overridden_message__")) + return super().__str__() + + def __repr__(self) -> str: + if hasattr(self, "__overridden_message__"): + return f"{type(self).__name__}('{getattr(self,'__overridden_message__')}')" + return super().__repr__() + + def to_error_detail(self) -> str: + """ + Returns a JSON representation of the error for serialization purposes. + + Returns: + str: a string representation of a JSON document including `module`, `class` and `message` fields. + """ + + m = { + "module": f"{type(self).__module__}", + "class": f"{type(self).__name__}", + "message": f"{str(self)}", + } + return json.dumps(m) + + @staticmethod + def from_error_detail(detail: str) -> Optional["FeastError"]: + try: + m = json.loads(detail) + if all(f in m for f in ["module", "class", "message"]): + module_name = m["module"] + class_name = m["class"] + message = m["message"] + module = importlib.import_module(module_name) + class_reference = getattr(module, class_name) + + instance = class_reference(message) + setattr(instance, "__overridden_message__", message) + return instance + except Exception as e: + logger.warning(f"Invalid error detail: {detail}: {e}") + return None + class DataSourceNotFoundException(FeastError): def __init__(self, path): @@ -41,7 +89,7 @@ def __init__(self, ds_name: str): class FeastObjectNotFoundException(FeastError): pass - def rpc_status_code(self) -> GrpcStatusCode: + def grpc_status_code(self) -> GrpcStatusCode: return GrpcStatusCode.NOT_FOUND def http_status_code(self) -> int: @@ -443,7 +491,7 @@ class FeastPermissionError(FeastError, PermissionError): def __init__(self, details: str): super().__init__(f"Permission error:\n{details}") - def rpc_status_code(self) -> GrpcStatusCode: + def grpc_status_code(self) -> GrpcStatusCode: return GrpcStatusCode.PERMISSION_DENIED def http_status_code(self) -> int: diff --git a/sdk/python/feast/grpc_error_interceptor.py b/sdk/python/feast/grpc_error_interceptor.py new file mode 100644 index 0000000000..c638d461ed --- /dev/null +++ b/sdk/python/feast/grpc_error_interceptor.py @@ -0,0 +1,48 @@ +import grpc + +from feast.errors import FeastError + + +def exception_wrapper(behavior, request, context): + try: + return behavior(request, context) + except grpc.RpcError as e: + context.abort(e.code(), e.details()) + except FeastError as e: + context.abort( + e.grpc_status_code(), + e.to_error_detail(), + ) + + +class ErrorInterceptor(grpc.ServerInterceptor): + def intercept_service(self, continuation, handler_call_details): + handler = continuation(handler_call_details) + if handler is None: + return None + + if handler.unary_unary: + return grpc.unary_unary_rpc_method_handler( + lambda req, ctx: exception_wrapper(handler.unary_unary, req, ctx), + request_deserializer=handler.request_deserializer, + response_serializer=handler.response_serializer, + ) + elif handler.unary_stream: + return grpc.unary_stream_rpc_method_handler( + lambda req, ctx: exception_wrapper(handler.unary_stream, req, ctx), + request_deserializer=handler.request_deserializer, + response_serializer=handler.response_serializer, + ) + elif handler.stream_unary: + return grpc.stream_unary_rpc_method_handler( + lambda req, ctx: exception_wrapper(handler.stream_unary, req, ctx), + request_deserializer=handler.request_deserializer, + response_serializer=handler.response_serializer, + ) + elif handler.stream_stream: + return grpc.stream_stream_rpc_method_handler( + lambda req, ctx: exception_wrapper(handler.stream_stream, req, ctx), + request_deserializer=handler.request_deserializer, + response_serializer=handler.response_serializer, + ) + return handler diff --git a/sdk/python/feast/permissions/client/grpc_client_auth_interceptor.py b/sdk/python/feast/permissions/client/grpc_client_auth_interceptor.py index 98cc445c7b..5155b80cb5 100644 --- a/sdk/python/feast/permissions/client/grpc_client_auth_interceptor.py +++ b/sdk/python/feast/permissions/client/grpc_client_auth_interceptor.py @@ -2,6 +2,7 @@ import grpc +from feast.errors import FeastError from feast.permissions.auth_model import AuthConfig from feast.permissions.client.auth_client_manager_factory import get_auth_token @@ -20,26 +21,31 @@ def __init__(self, auth_type: AuthConfig): def intercept_unary_unary( self, continuation, client_call_details, request_iterator ): - client_call_details = self._append_auth_header_metadata(client_call_details) - return continuation(client_call_details, request_iterator) + return self._handle_call(continuation, client_call_details, request_iterator) def intercept_unary_stream( self, continuation, client_call_details, request_iterator ): - client_call_details = self._append_auth_header_metadata(client_call_details) - return continuation(client_call_details, request_iterator) + return self._handle_call(continuation, client_call_details, request_iterator) def intercept_stream_unary( self, continuation, client_call_details, request_iterator ): - client_call_details = self._append_auth_header_metadata(client_call_details) - return continuation(client_call_details, request_iterator) + return self._handle_call(continuation, client_call_details, request_iterator) def intercept_stream_stream( self, continuation, client_call_details, request_iterator ): + return self._handle_call(continuation, client_call_details, request_iterator) + + def _handle_call(self, continuation, client_call_details, request_iterator): client_call_details = self._append_auth_header_metadata(client_call_details) - return continuation(client_call_details, request_iterator) + result = continuation(client_call_details, request_iterator) + if result.exception() is not None: + mapped_error = FeastError.from_error_detail(result.exception().details()) + if mapped_error is not None: + raise mapped_error + return result def _append_auth_header_metadata(self, client_call_details): logger.debug( diff --git a/sdk/python/feast/permissions/server/grpc.py b/sdk/python/feast/permissions/server/grpc.py index 3c94240869..96f2690b88 100644 --- a/sdk/python/feast/permissions/server/grpc.py +++ b/sdk/python/feast/permissions/server/grpc.py @@ -1,6 +1,5 @@ import asyncio import logging -from typing import Optional import grpc @@ -8,32 +7,11 @@ get_auth_manager, ) from feast.permissions.security_manager import get_security_manager -from feast.permissions.server.utils import ( - AuthManagerType, -) logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) -def grpc_interceptors( - auth_type: AuthManagerType, -) -> Optional[list[grpc.ServerInterceptor]]: - """ - A list of the authorization interceptors. - - Args: - auth_type: The type of authorization manager, from the feature store configuration. - - Returns: - list[grpc.ServerInterceptor]: Optional list of interceptors. If the authorization type is set to `NONE`, it returns `None`. - """ - if auth_type == AuthManagerType.NONE: - return None - - return [AuthInterceptor()] - - class AuthInterceptor(grpc.ServerInterceptor): def intercept_service(self, continuation, handler_call_details): sm = get_security_manager() diff --git a/sdk/python/feast/registry_server.py b/sdk/python/feast/registry_server.py index 7b779e9f9e..40475aa580 100644 --- a/sdk/python/feast/registry_server.py +++ b/sdk/python/feast/registry_server.py @@ -1,6 +1,6 @@ from concurrent import futures from datetime import datetime, timezone -from typing import Union, cast +from typing import Optional, Union, cast import grpc from google.protobuf.empty_pb2 import Empty @@ -13,6 +13,7 @@ from feast.errors import FeatureViewNotFoundException from feast.feast_object import FeastObject from feast.feature_view import FeatureView +from feast.grpc_error_interceptor import ErrorInterceptor from feast.infra.infra_object import Infra from feast.infra.registry.base_registry import BaseRegistry from feast.on_demand_feature_view import OnDemandFeatureView @@ -23,8 +24,9 @@ assert_permissions_to_update, permitted_resources, ) -from feast.permissions.server.grpc import grpc_interceptors +from feast.permissions.server.grpc import AuthInterceptor from feast.permissions.server.utils import ( + AuthManagerType, ServerType, init_auth_manager, init_security_manager, @@ -645,7 +647,7 @@ def start_server(store: FeatureStore, port: int, wait_for_termination: bool = Tr server = grpc.server( futures.ThreadPoolExecutor(max_workers=10), - interceptors=grpc_interceptors(auth_manager_type), + interceptors=_grpc_interceptors(auth_manager_type), ) RegistryServer_pb2_grpc.add_RegistryServerServicer_to_server( RegistryServer(store.registry), server @@ -668,3 +670,21 @@ def start_server(store: FeatureStore, port: int, wait_for_termination: bool = Tr server.wait_for_termination() else: return server + + +def _grpc_interceptors( + auth_type: AuthManagerType, +) -> Optional[list[grpc.ServerInterceptor]]: + """ + A list of the interceptors for the registry server. + + Args: + auth_type: The type of authorization manager, from the feature store configuration. + + Returns: + list[grpc.ServerInterceptor]: Optional list of interceptors. If the authorization type is set to `NONE`, it returns `None`. + """ + if auth_type == AuthManagerType.NONE: + return [ErrorInterceptor()] + + return [AuthInterceptor(), ErrorInterceptor()] diff --git a/sdk/python/tests/unit/permissions/auth/server/test_auth_registry_server.py b/sdk/python/tests/unit/permissions/auth/server/test_auth_registry_server.py index bc16bdac3b..9e9bc1473e 100644 --- a/sdk/python/tests/unit/permissions/auth/server/test_auth_registry_server.py +++ b/sdk/python/tests/unit/permissions/auth/server/test_auth_registry_server.py @@ -8,6 +8,11 @@ from feast import ( FeatureStore, ) +from feast.errors import ( + EntityNotFoundException, + FeastPermissionError, + FeatureViewNotFoundException, +) from feast.permissions.permission import Permission from feast.registry_server import start_server from feast.wait import wait_retry_backoff # noqa: E402 @@ -70,7 +75,9 @@ def test_registry_apis( print(f"Running for\n:{auth_config}") remote_feature_store = get_remote_registry_store(server_port, feature_store) permissions = _test_list_permissions(remote_feature_store, applied_permissions) + _test_get_entity(remote_feature_store, applied_permissions) _test_list_entities(remote_feature_store, applied_permissions) + _test_get_fv(remote_feature_store, applied_permissions) _test_list_fvs(remote_feature_store, applied_permissions) if _permissions_exist_in_permission_list( @@ -118,6 +125,20 @@ def _test_get_historical_features(client_fs: FeatureStore): assertpy.assert_that(training_df).is_not_none() +def _test_get_entity(client_fs: FeatureStore, permissions: list[Permission]): + if not _is_auth_enabled(client_fs) or _is_permission_enabled( + client_fs, permissions, read_entities_perm + ): + entity = client_fs.get_entity("driver") + assertpy.assert_that(entity).is_not_none() + assertpy.assert_that(entity.name).is_equal_to("driver") + else: + with pytest.raises(FeastPermissionError): + client_fs.get_entity("driver") + with pytest.raises(EntityNotFoundException): + client_fs.get_entity("invalid-name") + + def _test_list_entities(client_fs: FeatureStore, permissions: list[Permission]): entities = client_fs.list_entities() @@ -188,6 +209,20 @@ def _is_auth_enabled(client_fs: FeatureStore) -> bool: return client_fs.config.auth_config.type != "no_auth" +def _test_get_fv(client_fs: FeatureStore, permissions: list[Permission]): + if not _is_auth_enabled(client_fs) or _is_permission_enabled( + client_fs, permissions, read_fv_perm + ): + fv = client_fs.get_feature_view("driver_hourly_stats") + assertpy.assert_that(fv).is_not_none() + assertpy.assert_that(fv.name).is_equal_to("driver_hourly_stats") + else: + with pytest.raises(FeastPermissionError): + client_fs.get_feature_view("driver_hourly_stats") + with pytest.raises(FeatureViewNotFoundException): + client_fs.get_feature_view("invalid-name") + + def _test_list_fvs(client_fs: FeatureStore, permissions: list[Permission]): if _is_auth_enabled(client_fs) and _permissions_exist_in_permission_list( [invalid_list_entities_perm], permissions diff --git a/sdk/python/tests/unit/test_errors.py b/sdk/python/tests/unit/test_errors.py new file mode 100644 index 0000000000..b3f33690da --- /dev/null +++ b/sdk/python/tests/unit/test_errors.py @@ -0,0 +1,26 @@ +import re + +import assertpy + +import feast.errors as errors + + +def test_error_error_detail(): + e = errors.FeatureViewNotFoundException("abc") + + d = e.to_error_detail() + + assertpy.assert_that(d).is_not_none() + assertpy.assert_that(d).contains('"module": "feast.errors"') + assertpy.assert_that(d).contains('"class": "FeatureViewNotFoundException"') + assertpy.assert_that(re.search(r"abc", d)).is_true() + + converted_e = errors.FeastError.from_error_detail(d) + assertpy.assert_that(converted_e).is_not_none() + assertpy.assert_that(str(converted_e)).is_equal_to(str(e)) + assertpy.assert_that(repr(converted_e)).is_equal_to(repr(e)) + + +def test_invalid_error_error_detail(): + e = errors.FeastError.from_error_detail("invalid") + assertpy.assert_that(e).is_none() From 8fef1944d9b6acb6b47a80d0fab27a724cf1bc5c Mon Sep 17 00:00:00 2001 From: cburroughs Date: Tue, 3 Sep 2024 03:01:41 -0400 Subject: [PATCH 38/96] build: Explicit protobuf build version; consistent build/setup deps (#4472) build: explicit protobuf build version; consistent build/setup deps Right now if one downloads `feast-0.40.1-py2.py3-none-any.whl` from PyPi it contains: ``` $ grep 'Protobuf Python Version' feast/protos/feast/registry/RegistryServer_pb2.py ``` Which is outside ``` $ grep 'protobuf<' feast-0.40.1.dist-info/METADATA Requires-Dist: protobuf<5.0.0,>=4.24.0 ``` Leading to runtime errors (#4437). This was mitigated by #4438. This change tightens this up further by: * Deleting the Makefile command that was trying to do this unsuccessfully. * Aligns the setup/build requirements * Sets the version of protobuf to match the *minimum* of the range. There is no guarantee that protos generated by `4.X` will work with `4.(X-1)`. Signed-off-by: Chris Burroughs --- .github/workflows/build_wheels.yml | 1 - Makefile | 3 --- environment-setup.md | 3 +-- pyproject.toml | 11 ++++++----- setup.py | 7 ++++--- 5 files changed, 11 insertions(+), 14 deletions(-) diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index f04015a989..df8534d078 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -97,7 +97,6 @@ jobs: # There's a `git restore` in here because `make install-go-ci-dependencies` is actually messing up go.mod & go.sum. run: | pip install -U pip setuptools wheel twine - make install-protoc-dependencies make build-ui git status git restore go.mod go.sum diff --git a/Makefile b/Makefile index 6ebab4e3be..78a0b6d328 100644 --- a/Makefile +++ b/Makefile @@ -395,9 +395,6 @@ test-trino-plugin-locally: kill-trino-locally: cd ${ROOT_DIR}; docker stop trino -install-protoc-dependencies: - pip install --ignore-installed protobuf==4.24.0 "grpcio-tools>=1.56.2,<2" mypy-protobuf==3.1.0 - # Docker build-docker: build-feature-server-python-aws-docker build-feature-transformation-server-docker build-feature-server-java-docker diff --git a/environment-setup.md b/environment-setup.md index 5dde9dfd94..581dc35f77 100644 --- a/environment-setup.md +++ b/environment-setup.md @@ -13,11 +13,10 @@ pip install cryptography -U conda install protobuf conda install pymssql pip install -e ".[dev]" -make install-protoc-dependencies PYTHON=3.9 make install-python-ci-dependencies PYTHON=3.9 ``` 4. start the docker daemon 5. run unit tests: ```bash make test-python-unit -``` \ No newline at end of file +``` diff --git a/pyproject.toml b/pyproject.toml index af44861502..15921e633c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,13 +1,14 @@ [build-system] requires = [ + "grpcio-tools>=1.56.2,<2", + "grpcio>=1.56.2,<2", + "mypy-protobuf==3.1", + "protobuf==4.24.0", + "pybindgen==0.22.0", "setuptools>=60", - "wheel", "setuptools_scm>=6.2", - "grpcio", - "grpcio-tools>=1.47.0", - "mypy-protobuf==3.1", - "protobuf>=4.24.0,<5.0.0", "sphinx!=4.0.0", + "wheel", ] build-backend = "setuptools.build_meta" diff --git a/setup.py b/setup.py index a9f9cafacc..6da5e8226a 100644 --- a/setup.py +++ b/setup.py @@ -403,11 +403,12 @@ def run(self): entry_points={"console_scripts": ["feast=feast.cli:cli"]}, use_scm_version=use_scm_version, setup_requires=[ - "setuptools_scm", - "grpcio>=1.56.2,<2", "grpcio-tools>=1.56.2,<2", - "mypy-protobuf>=3.1", + "grpcio>=1.56.2,<2", + "mypy-protobuf==3.1", + "protobuf==4.24.0", "pybindgen==0.22.0", + "setuptools_scm>=6.2", ], cmdclass={ "build_python_protos": BuildPythonProtosCommand, From 3f3a4e852c3f508e38560e248c1ba68d64c4e799 Mon Sep 17 00:00:00 2001 From: lokeshrangineni <19699092+lokeshrangineni@users.noreply.github.com> Date: Wed, 4 Sep 2024 12:05:08 -0400 Subject: [PATCH 39/96] =?UTF-8?q?refactor:=20Making=20username=20and=20pas?= =?UTF-8?q?sword=20fields=20in=20OidcAuthModel=20as=20mandatory=20onl?= =?UTF-8?q?=E2=80=A6=20(#4460)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * squashed the last 23 commits and make username, password, client_Secret fields are required for oidc client configuration Signed-off-by: Lokesh Rangineni <19699092+lokeshrangineni@users.noreply.github.com> * squashed the last 23 commits and make username, password, client_Secret fields are required for oidc client configuration Signed-off-by: Lokesh Rangineni <19699092+lokeshrangineni@users.noreply.github.com> * Fixing the failing tests. Signed-off-by: Lokesh Rangineni <19699092+lokeshrangineni@users.noreply.github.com> * Fixing the Integration test failures. Signed-off-by: Lokesh Rangineni <19699092+lokeshrangineni@users.noreply.github.com> * Fixing the Integration test failures. Signed-off-by: Lokesh Rangineni <19699092+lokeshrangineni@users.noreply.github.com> * Removing the unnecessary configuration not needed after recent change. Signed-off-by: Lokesh Rangineni <19699092+lokeshrangineni@users.noreply.github.com> * Added client_secret also to calculate the oidc_client type calculation. Signed-off-by: Lokesh Rangineni <19699092+lokeshrangineni@users.noreply.github.com> --------- Signed-off-by: Lokesh Rangineni <19699092+lokeshrangineni@users.noreply.github.com> --- .../components/authz_manager.md | 20 ++++++-- sdk/python/feast/permissions/auth_model.py | 8 ++-- .../client/auth_client_manager_factory.py | 4 +- .../oidc_authentication_client_manager.py | 4 +- sdk/python/feast/permissions/server/utils.py | 5 +- sdk/python/feast/repo_config.py | 26 +++++++---- sdk/python/tests/conftest.py | 1 - .../feature_repos/repo_configuration.py | 11 ++--- .../universal/data_sources/file.py | 4 -- .../infra/scaffolding/test_repo_config.py | 46 ++++++++++++++----- .../tests/unit/permissions/auth/conftest.py | 5 +- .../unit/permissions/test_oidc_auth_client.py | 7 +-- 12 files changed, 90 insertions(+), 51 deletions(-) diff --git a/docs/getting-started/components/authz_manager.md b/docs/getting-started/components/authz_manager.md index 876dd84f2e..0d011fbf2b 100644 --- a/docs/getting-started/components/authz_manager.md +++ b/docs/getting-started/components/authz_manager.md @@ -61,24 +61,36 @@ For example, the access token for a client `app` of a user with `reader` role sh } ``` -An example of OIDC authorization configuration is the following: +An example of feast OIDC authorization configuration on the server side is the following: ```yaml project: my-project auth: type: oidc client_id: _CLIENT_ID__ - client_secret: _CLIENT_SECRET__ - realm: _REALM__ auth_discovery_url: _OIDC_SERVER_URL_/realms/master/.well-known/openid-configuration ... ``` -In case of client configuration, the following settings must be added to specify the current user: +In case of client configuration, the following settings username, password and client_secret must be added to specify the current user: ```yaml auth: + type: oidc ... username: _USERNAME_ password: _PASSWORD_ + client_secret: _CLIENT_SECRET__ +``` + +Below is an example of feast full OIDC client auth configuration: +```yaml +project: my-project +auth: + type: oidc + client_id: test_client_id + client_secret: test_client_secret + username: test_user_name + password: test_password + auth_discovery_url: http://localhost:8080/realms/master/.well-known/openid-configuration ``` ### Kubernetes RBAC Authorization diff --git a/sdk/python/feast/permissions/auth_model.py b/sdk/python/feast/permissions/auth_model.py index 28eeb951a7..a3a3b32a4b 100644 --- a/sdk/python/feast/permissions/auth_model.py +++ b/sdk/python/feast/permissions/auth_model.py @@ -1,4 +1,4 @@ -from typing import Literal, Optional +from typing import Literal from feast.repo_config import FeastConfigBaseModel @@ -10,10 +10,12 @@ class AuthConfig(FeastConfigBaseModel): class OidcAuthConfig(AuthConfig): auth_discovery_url: str client_id: str - client_secret: Optional[str] = None + + +class OidcClientAuthConfig(OidcAuthConfig): username: str password: str - realm: str = "master" + client_secret: str class NoAuthConfig(AuthConfig): diff --git a/sdk/python/feast/permissions/client/auth_client_manager_factory.py b/sdk/python/feast/permissions/client/auth_client_manager_factory.py index 4e49802047..3dff5fb45d 100644 --- a/sdk/python/feast/permissions/client/auth_client_manager_factory.py +++ b/sdk/python/feast/permissions/client/auth_client_manager_factory.py @@ -2,7 +2,7 @@ from feast.permissions.auth_model import ( AuthConfig, KubernetesAuthConfig, - OidcAuthConfig, + OidcClientAuthConfig, ) from feast.permissions.client.auth_client_manager import AuthenticationClientManager from feast.permissions.client.kubernetes_auth_client_manager import ( @@ -15,7 +15,7 @@ def get_auth_client_manager(auth_config: AuthConfig) -> AuthenticationClientManager: if auth_config.type == AuthType.OIDC.value: - assert isinstance(auth_config, OidcAuthConfig) + assert isinstance(auth_config, OidcClientAuthConfig) return OidcAuthClientManager(auth_config) elif auth_config.type == AuthType.KUBERNETES.value: assert isinstance(auth_config, KubernetesAuthConfig) diff --git a/sdk/python/feast/permissions/client/oidc_authentication_client_manager.py b/sdk/python/feast/permissions/client/oidc_authentication_client_manager.py index 0f99cea86f..3ba1c1b6a7 100644 --- a/sdk/python/feast/permissions/client/oidc_authentication_client_manager.py +++ b/sdk/python/feast/permissions/client/oidc_authentication_client_manager.py @@ -4,7 +4,7 @@ import jwt import requests -from feast.permissions.auth_model import OidcAuthConfig +from feast.permissions.auth_model import OidcClientAuthConfig from feast.permissions.client.auth_client_manager import AuthenticationClientManager from feast.permissions.oidc_service import OIDCDiscoveryService @@ -12,7 +12,7 @@ class OidcAuthClientManager(AuthenticationClientManager): - def __init__(self, auth_config: OidcAuthConfig): + def __init__(self, auth_config: OidcClientAuthConfig): self.auth_config = auth_config def get_token(self): diff --git a/sdk/python/feast/permissions/server/utils.py b/sdk/python/feast/permissions/server/utils.py index 34a2c0024a..ac70f187ce 100644 --- a/sdk/python/feast/permissions/server/utils.py +++ b/sdk/python/feast/permissions/server/utils.py @@ -15,7 +15,10 @@ from feast.permissions.auth.oidc_token_parser import OidcTokenParser from feast.permissions.auth.token_extractor import TokenExtractor from feast.permissions.auth.token_parser import TokenParser -from feast.permissions.auth_model import AuthConfig, OidcAuthConfig +from feast.permissions.auth_model import ( + AuthConfig, + OidcAuthConfig, +) from feast.permissions.security_manager import ( SecurityManager, no_security_manager, diff --git a/sdk/python/feast/repo_config.py b/sdk/python/feast/repo_config.py index 199ef31412..52372f2987 100644 --- a/sdk/python/feast/repo_config.py +++ b/sdk/python/feast/repo_config.py @@ -87,10 +87,13 @@ "local": "feast.infra.feature_servers.local_process.config.LocalFeatureServerConfig", } +ALLOWED_AUTH_TYPES = ["no_auth", "kubernetes", "oidc"] + AUTH_CONFIGS_CLASS_FOR_TYPE = { "no_auth": "feast.permissions.auth_model.NoAuthConfig", "kubernetes": "feast.permissions.auth_model.KubernetesAuthConfig", "oidc": "feast.permissions.auth_model.OidcAuthConfig", + "oidc_client": "feast.permissions.auth_model.OidcClientAuthConfig", } @@ -291,11 +294,17 @@ def offline_store(self): def auth_config(self): if not self._auth: if isinstance(self.auth, Dict): - self._auth = get_auth_config_from_type(self.auth.get("type"))( - **self.auth + is_oidc_client = ( + self.auth.get("type") == AuthType.OIDC.value + and "username" in self.auth + and "password" in self.auth + and "client_secret" in self.auth ) + self._auth = get_auth_config_from_type( + "oidc_client" if is_oidc_client else self.auth.get("type") + )(**self.auth) elif isinstance(self.auth, str): - self._auth = get_auth_config_from_type(self.auth.get("type"))() + self._auth = get_auth_config_from_type(self.auth)() elif self.auth: self._auth = self.auth @@ -336,22 +345,21 @@ def _validate_auth_config(cls, values: Any) -> Any: from feast.permissions.auth_model import AuthConfig if "auth" in values: - allowed_auth_types = AUTH_CONFIGS_CLASS_FOR_TYPE.keys() if isinstance(values["auth"], Dict): if values["auth"].get("type") is None: raise ValueError( - f"auth configuration is missing authentication type. Possible values={allowed_auth_types}" + f"auth configuration is missing authentication type. Possible values={ALLOWED_AUTH_TYPES}" ) - elif values["auth"]["type"] not in allowed_auth_types: + elif values["auth"]["type"] not in ALLOWED_AUTH_TYPES: raise ValueError( f'auth configuration has invalid authentication type={values["auth"]["type"]}. Possible ' - f'values={allowed_auth_types}' + f'values={ALLOWED_AUTH_TYPES}' ) elif isinstance(values["auth"], AuthConfig): - if values["auth"].type not in allowed_auth_types: + if values["auth"].type not in ALLOWED_AUTH_TYPES: raise ValueError( f'auth configuration has invalid authentication type={values["auth"].type}. Possible ' - f'values={allowed_auth_types}' + f'values={ALLOWED_AUTH_TYPES}' ) return values diff --git a/sdk/python/tests/conftest.py b/sdk/python/tests/conftest.py index b5b3e2d9e5..5e70da074c 100644 --- a/sdk/python/tests/conftest.py +++ b/sdk/python/tests/conftest.py @@ -466,7 +466,6 @@ def is_integration_test(all_markers_from_module): client_secret: feast-integration-client-secret username: reader_writer password: password - realm: master auth_discovery_url: KEYCLOAK_URL_PLACE_HOLDER/realms/master/.well-known/openid-configuration """), ], diff --git a/sdk/python/tests/integration/feature_repos/repo_configuration.py b/sdk/python/tests/integration/feature_repos/repo_configuration.py index 660a937f5a..73f99fb7c2 100644 --- a/sdk/python/tests/integration/feature_repos/repo_configuration.py +++ b/sdk/python/tests/integration/feature_repos/repo_configuration.py @@ -28,7 +28,7 @@ ) from feast.infra.feature_servers.local_process.config import LocalFeatureServerConfig from feast.permissions.action import AuthzedAction -from feast.permissions.auth_model import OidcAuthConfig +from feast.permissions.auth_model import OidcClientAuthConfig from feast.permissions.permission import Permission from feast.permissions.policy import RoleBasedPolicy from feast.repo_config import RegistryConfig, RepoConfig @@ -447,15 +447,14 @@ class OfflineServerPermissionsEnvironment(Environment): def setup(self): self.data_source_creator.setup(self.registry) keycloak_url = self.data_source_creator.get_keycloak_url() - auth_config = OidcAuthConfig( + auth_config = OidcClientAuthConfig( client_id="feast-integration-client", - client_secret="feast-integration-client-secret", - username="reader_writer", - password="password", - realm="master", type="oidc", auth_discovery_url=f"{keycloak_url}/realms/master/.well-known" f"/openid-configuration", + client_secret="feast-integration-client-secret", + username="reader_writer", + password="password", ) self.config = RepoConfig( registry=self.registry, diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py index 10d348c056..d8b75aca24 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py @@ -445,10 +445,6 @@ def __init__(self, project_name: str, *args, **kwargs): auth: type: oidc client_id: feast-integration-client - client_secret: feast-integration-client-secret - username: reader_writer - password: password - realm: master auth_discovery_url: {keycloak_url}/realms/master/.well-known/openid-configuration """ self.auth_config = auth_config_template.format(keycloak_url=self.keycloak_url) diff --git a/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py b/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py index 5331d350e2..9dcf7e4caf 100644 --- a/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py +++ b/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py @@ -9,6 +9,7 @@ KubernetesAuthConfig, NoAuthConfig, OidcAuthConfig, + OidcClientAuthConfig, ) from feast.repo_config import FeastConfigError, load_repo_config @@ -213,7 +214,6 @@ def test_auth_config(): client_secret: test_client_secret username: test_user_name password: test_password - realm: master auth_discovery_url: http://localhost:8080/realms/master/.well-known/openid-configuration registry: "registry.db" provider: local @@ -235,7 +235,6 @@ def test_auth_config(): client_secret: test_client_secret username: test_user_name password: test_password - realm: master auth_discovery_url: http://localhost:8080/realms/master/.well-known/openid-configuration registry: "registry.db" provider: local @@ -247,7 +246,32 @@ def test_auth_config(): expect_error="invalid authentication type=not_valid_auth_type", ) - oidc_repo_config = _test_config( + oidc_server_repo_config = _test_config( + dedent( + """ + project: foo + auth: + type: oidc + client_id: test_client_id + auth_discovery_url: http://localhost:8080/realms/master/.well-known/openid-configuration + registry: "registry.db" + provider: local + online_store: + path: foo + entity_key_serialization_version: 2 + """ + ), + expect_error=None, + ) + assert oidc_server_repo_config.auth["type"] == AuthType.OIDC.value + assert isinstance(oidc_server_repo_config.auth_config, OidcAuthConfig) + assert oidc_server_repo_config.auth_config.client_id == "test_client_id" + assert ( + oidc_server_repo_config.auth_config.auth_discovery_url + == "http://localhost:8080/realms/master/.well-known/openid-configuration" + ) + + oidc_client_repo_config = _test_config( dedent( """ project: foo @@ -257,7 +281,6 @@ def test_auth_config(): client_secret: test_client_secret username: test_user_name password: test_password - realm: master auth_discovery_url: http://localhost:8080/realms/master/.well-known/openid-configuration registry: "registry.db" provider: local @@ -268,15 +291,14 @@ def test_auth_config(): ), expect_error=None, ) - assert oidc_repo_config.auth["type"] == AuthType.OIDC.value - assert isinstance(oidc_repo_config.auth_config, OidcAuthConfig) - assert oidc_repo_config.auth_config.client_id == "test_client_id" - assert oidc_repo_config.auth_config.client_secret == "test_client_secret" - assert oidc_repo_config.auth_config.username == "test_user_name" - assert oidc_repo_config.auth_config.password == "test_password" - assert oidc_repo_config.auth_config.realm == "master" + assert oidc_client_repo_config.auth["type"] == AuthType.OIDC.value + assert isinstance(oidc_client_repo_config.auth_config, OidcClientAuthConfig) + assert oidc_client_repo_config.auth_config.client_id == "test_client_id" + assert oidc_client_repo_config.auth_config.client_secret == "test_client_secret" + assert oidc_client_repo_config.auth_config.username == "test_user_name" + assert oidc_client_repo_config.auth_config.password == "test_password" assert ( - oidc_repo_config.auth_config.auth_discovery_url + oidc_client_repo_config.auth_config.auth_discovery_url == "http://localhost:8080/realms/master/.well-known/openid-configuration" ) diff --git a/sdk/python/tests/unit/permissions/auth/conftest.py b/sdk/python/tests/unit/permissions/auth/conftest.py index 0d6acd7fb2..ea6e2e4311 100644 --- a/sdk/python/tests/unit/permissions/auth/conftest.py +++ b/sdk/python/tests/unit/permissions/auth/conftest.py @@ -75,10 +75,7 @@ def oidc_config() -> OidcAuthConfig: return OidcAuthConfig( auth_discovery_url="https://localhost:8080/realms/master/.well-known/openid-configuration", client_id=_CLIENT_ID, - client_secret="", - username="", - password="", - realm="", + type="oidc", ) diff --git a/sdk/python/tests/unit/permissions/test_oidc_auth_client.py b/sdk/python/tests/unit/permissions/test_oidc_auth_client.py index 22ed5b6f87..68aec70fc7 100644 --- a/sdk/python/tests/unit/permissions/test_oidc_auth_client.py +++ b/sdk/python/tests/unit/permissions/test_oidc_auth_client.py @@ -5,7 +5,7 @@ from feast.permissions.auth_model import ( KubernetesAuthConfig, NoAuthConfig, - OidcAuthConfig, + OidcClientAuthConfig, ) from feast.permissions.client.http_auth_requests_wrapper import ( AuthenticatedRequestsSession, @@ -21,13 +21,14 @@ MOCKED_TOKEN_VALUE: str = "dummy_token" -def _get_dummy_oidc_auth_type() -> OidcAuthConfig: - oidc_config = OidcAuthConfig( +def _get_dummy_oidc_auth_type() -> OidcClientAuthConfig: + oidc_config = OidcClientAuthConfig( auth_discovery_url="http://localhost:8080/realms/master/.well-known/openid-configuration", type="oidc", username="admin_test", password="password_test", client_id="dummy_client_id", + client_secret="client_secret", ) return oidc_config From 2bd03fa4da5e76f6b29b0b54b455d5552d256838 Mon Sep 17 00:00:00 2001 From: Daniele Martinoli <86618610+dmartinol@users.noreply.github.com> Date: Thu, 5 Sep 2024 10:34:28 +0200 Subject: [PATCH 40/96] fix: Added Permission API docs (#4485) * Updated API docs with Permission types and functions Signed-off-by: Daniele Martinoli * Updated API docs with Permission types and functions Signed-off-by: Daniele Martinoli --------- Signed-off-by: Daniele Martinoli --- sdk/python/docs/index.rst | 58 +++++++++++++++++++- sdk/python/docs/source/feast.permissions.rst | 8 +++ sdk/python/docs/source/index.rst | 58 +++++++++++++++++++- 3 files changed, 122 insertions(+), 2 deletions(-) diff --git a/sdk/python/docs/index.rst b/sdk/python/docs/index.rst index 1ef6bd16c8..86354f80c7 100644 --- a/sdk/python/docs/index.rst +++ b/sdk/python/docs/index.rst @@ -453,4 +453,60 @@ Snowflake Engine :members: .. autoclass:: feast.infra.materialization.contrib.spark.spark_materialization_engine.SparkMaterializationJob - :members: \ No newline at end of file + :members: + +Permission +============================ + +.. autoclass:: feast.permissions.permission.Permission + :members: + +.. autoclass:: feast.permissions.action.AuthzedAction + :members: + +.. autoclass:: feast.permissions.policy.Policy + :members: + +.. autofunction:: feast.permissions.enforcer.enforce_policy + +Auth Config +--------------------------- + +.. autoclass:: feast.permissions.auth_model.AuthConfig + :members: + +.. autoclass:: feast.permissions.auth_model.KubernetesAuthConfig + :members: + +.. autoclass:: feast.permissions.auth_model.OidcAuthConfig + :members: + +Auth Manager +--------------------------- + +.. autoclass:: feast.permissions.auth.AuthManager + :members: + +.. autoclass:: feast.permissions.auth.token_parser.TokenParser + :members: + +.. autoclass:: feast.permissions.auth.token_extractor.TokenExtractor + :members: + +.. autoclass:: feast.permissions.auth.kubernetes_token_parser.KubernetesTokenParser + :members: + +.. autoclass:: feast.permissions.auth.oidc_token_parser.OidcTokenParser + :members: + +Auth Client Manager +--------------------------- + +.. autoclass:: feast.permissions.client.auth_client_manager.AuthenticationClientManager + :members: + +.. autoclass:: feast.permissions.client.kubernetes_auth_client_manager.KubernetesAuthClientManager + :members: + +.. autoclass:: feast.permissions.client.oidc_authentication_client_manager.OidcAuthClientManager + :members: diff --git a/sdk/python/docs/source/feast.permissions.rst b/sdk/python/docs/source/feast.permissions.rst index 8c33ab6273..d8731111e1 100644 --- a/sdk/python/docs/source/feast.permissions.rst +++ b/sdk/python/docs/source/feast.permissions.rst @@ -62,6 +62,14 @@ feast.permissions.matcher module :undoc-members: :show-inheritance: +feast.permissions.oidc\_service module +-------------------------------------- + +.. automodule:: feast.permissions.oidc_service + :members: + :undoc-members: + :show-inheritance: + feast.permissions.permission module ----------------------------------- diff --git a/sdk/python/docs/source/index.rst b/sdk/python/docs/source/index.rst index 1ef6bd16c8..86354f80c7 100644 --- a/sdk/python/docs/source/index.rst +++ b/sdk/python/docs/source/index.rst @@ -453,4 +453,60 @@ Snowflake Engine :members: .. autoclass:: feast.infra.materialization.contrib.spark.spark_materialization_engine.SparkMaterializationJob - :members: \ No newline at end of file + :members: + +Permission +============================ + +.. autoclass:: feast.permissions.permission.Permission + :members: + +.. autoclass:: feast.permissions.action.AuthzedAction + :members: + +.. autoclass:: feast.permissions.policy.Policy + :members: + +.. autofunction:: feast.permissions.enforcer.enforce_policy + +Auth Config +--------------------------- + +.. autoclass:: feast.permissions.auth_model.AuthConfig + :members: + +.. autoclass:: feast.permissions.auth_model.KubernetesAuthConfig + :members: + +.. autoclass:: feast.permissions.auth_model.OidcAuthConfig + :members: + +Auth Manager +--------------------------- + +.. autoclass:: feast.permissions.auth.AuthManager + :members: + +.. autoclass:: feast.permissions.auth.token_parser.TokenParser + :members: + +.. autoclass:: feast.permissions.auth.token_extractor.TokenExtractor + :members: + +.. autoclass:: feast.permissions.auth.kubernetes_token_parser.KubernetesTokenParser + :members: + +.. autoclass:: feast.permissions.auth.oidc_token_parser.OidcTokenParser + :members: + +Auth Client Manager +--------------------------- + +.. autoclass:: feast.permissions.client.auth_client_manager.AuthenticationClientManager + :members: + +.. autoclass:: feast.permissions.client.kubernetes_auth_client_manager.KubernetesAuthClientManager + :members: + +.. autoclass:: feast.permissions.client.oidc_authentication_client_manager.OidcAuthClientManager + :members: From 21187199173f4c4f5417205d99535af6be492a9a Mon Sep 17 00:00:00 2001 From: Theodor Mihalache <84387487+tmihalac@users.noreply.github.com> Date: Thu, 5 Sep 2024 14:39:47 -0400 Subject: [PATCH 41/96] fix: Added Online Store REST client errors handler (#4488) * Added Online Store rest client errors handler Signed-off-by: Theodor Mihalache * Added Online Store rest client errors handler - Small refactor to from_error_detail and FeastErrors - Fixed tests Signed-off-by: Theodor Mihalache * Added Online Store rest client errors handler - Fixed linter Signed-off-by: Theodor Mihalache --------- Signed-off-by: Theodor Mihalache --- sdk/python/feast/errors.py | 5 +- sdk/python/feast/feature_server.py | 278 ++++++++---------- .../feast/infra/online_stores/remote.py | 18 +- sdk/python/feast/rest_error_handler.py | 57 ++++ .../test_python_feature_server.py | 34 ++- .../tests/unit/test_rest_error_decorator.py | 78 +++++ 6 files changed, 299 insertions(+), 171 deletions(-) create mode 100644 sdk/python/feast/rest_error_handler.py create mode 100644 sdk/python/tests/unit/test_rest_error_decorator.py diff --git a/sdk/python/feast/errors.py b/sdk/python/feast/errors.py index d39009ae7a..fd5955fd98 100644 --- a/sdk/python/feast/errors.py +++ b/sdk/python/feast/errors.py @@ -57,7 +57,7 @@ def from_error_detail(detail: str) -> Optional["FeastError"]: module = importlib.import_module(module_name) class_reference = getattr(module, class_name) - instance = class_reference(message) + instance = class_reference.__new__(class_reference) setattr(instance, "__overridden_message__", message) return instance except Exception as e: @@ -451,6 +451,9 @@ class PushSourceNotFoundException(FeastError): def __init__(self, push_source_name: str): super().__init__(f"Unable to find push source '{push_source_name}'.") + def http_status_code(self) -> int: + return HttpStatusCode.HTTP_422_UNPROCESSABLE_ENTITY + class ReadOnlyRegistryException(FeastError): def __init__(self): diff --git a/sdk/python/feast/feature_server.py b/sdk/python/feast/feature_server.py index 7f24580b7a..4f8de1eef5 100644 --- a/sdk/python/feast/feature_server.py +++ b/sdk/python/feast/feature_server.py @@ -9,8 +9,9 @@ import pandas as pd import psutil from dateutil import parser -from fastapi import Depends, FastAPI, HTTPException, Request, Response, status +from fastapi import Depends, FastAPI, Request, Response, status from fastapi.logger import logger +from fastapi.responses import JSONResponse from google.protobuf.json_format import MessageToDict from prometheus_client import Gauge, start_http_server from pydantic import BaseModel @@ -19,7 +20,10 @@ from feast import proto_json, utils from feast.constants import DEFAULT_FEATURE_SERVER_REGISTRY_TTL from feast.data_source import PushMode -from feast.errors import FeatureViewNotFoundException, PushSourceNotFoundException +from feast.errors import ( + FeastError, + FeatureViewNotFoundException, +) from feast.permissions.action import WRITE, AuthzedAction from feast.permissions.security_manager import assert_permissions from feast.permissions.server.rest import inject_user_details @@ -101,147 +105,119 @@ async def lifespan(app: FastAPI): async def get_body(request: Request): return await request.body() - # TODO RBAC: complete the dependencies for the other endpoints @app.post( "/get-online-features", dependencies=[Depends(inject_user_details)], ) def get_online_features(body=Depends(get_body)): - try: - body = json.loads(body) - full_feature_names = body.get("full_feature_names", False) - entity_rows = body["entities"] - # Initialize parameters for FeatureStore.get_online_features(...) call - if "feature_service" in body: - feature_service = store.get_feature_service( - body["feature_service"], allow_cache=True + body = json.loads(body) + full_feature_names = body.get("full_feature_names", False) + entity_rows = body["entities"] + # Initialize parameters for FeatureStore.get_online_features(...) call + if "feature_service" in body: + feature_service = store.get_feature_service( + body["feature_service"], allow_cache=True + ) + assert_permissions( + resource=feature_service, actions=[AuthzedAction.READ_ONLINE] + ) + features = feature_service + else: + features = body["features"] + all_feature_views, all_on_demand_feature_views = ( + utils._get_feature_views_to_use( + store.registry, + store.project, + features, + allow_cache=True, + hide_dummy_entity=False, ) + ) + for feature_view in all_feature_views: assert_permissions( - resource=feature_service, actions=[AuthzedAction.READ_ONLINE] + resource=feature_view, actions=[AuthzedAction.READ_ONLINE] ) - features = feature_service - else: - features = body["features"] - all_feature_views, all_on_demand_feature_views = ( - utils._get_feature_views_to_use( - store.registry, - store.project, - features, - allow_cache=True, - hide_dummy_entity=False, - ) + for od_feature_view in all_on_demand_feature_views: + assert_permissions( + resource=od_feature_view, actions=[AuthzedAction.READ_ONLINE] ) - for feature_view in all_feature_views: - assert_permissions( - resource=feature_view, actions=[AuthzedAction.READ_ONLINE] - ) - for od_feature_view in all_on_demand_feature_views: - assert_permissions( - resource=od_feature_view, actions=[AuthzedAction.READ_ONLINE] - ) - - response_proto = store.get_online_features( - features=features, - entity_rows=entity_rows, - full_feature_names=full_feature_names, - ).proto - - # Convert the Protobuf object to JSON and return it - return MessageToDict( - response_proto, preserving_proto_field_name=True, float_precision=18 - ) - except Exception as e: - # Print the original exception on the server side - logger.exception(traceback.format_exc()) - # Raise HTTPException to return the error message to the client - raise HTTPException(status_code=500, detail=str(e)) + + response_proto = store.get_online_features( + features=features, + entity_rows=entity_rows, + full_feature_names=full_feature_names, + ).proto + + # Convert the Protobuf object to JSON and return it + return MessageToDict( + response_proto, preserving_proto_field_name=True, float_precision=18 + ) @app.post("/push", dependencies=[Depends(inject_user_details)]) def push(body=Depends(get_body)): - try: - request = PushFeaturesRequest(**json.loads(body)) - df = pd.DataFrame(request.df) - actions = [] - if request.to == "offline": - to = PushMode.OFFLINE - actions = [AuthzedAction.WRITE_OFFLINE] - elif request.to == "online": - to = PushMode.ONLINE - actions = [AuthzedAction.WRITE_ONLINE] - elif request.to == "online_and_offline": - to = PushMode.ONLINE_AND_OFFLINE - actions = WRITE - else: - raise ValueError( - f"{request.to} is not a supported push format. Please specify one of these ['online', 'offline', 'online_and_offline']." - ) - - from feast.data_source import PushSource + request = PushFeaturesRequest(**json.loads(body)) + df = pd.DataFrame(request.df) + actions = [] + if request.to == "offline": + to = PushMode.OFFLINE + actions = [AuthzedAction.WRITE_OFFLINE] + elif request.to == "online": + to = PushMode.ONLINE + actions = [AuthzedAction.WRITE_ONLINE] + elif request.to == "online_and_offline": + to = PushMode.ONLINE_AND_OFFLINE + actions = WRITE + else: + raise ValueError( + f"{request.to} is not a supported push format. Please specify one of these ['online', 'offline', 'online_and_offline']." + ) - all_fvs = store.list_feature_views( - allow_cache=request.allow_registry_cache - ) + store.list_stream_feature_views( - allow_cache=request.allow_registry_cache + from feast.data_source import PushSource + + all_fvs = store.list_feature_views( + allow_cache=request.allow_registry_cache + ) + store.list_stream_feature_views(allow_cache=request.allow_registry_cache) + fvs_with_push_sources = { + fv + for fv in all_fvs + if ( + fv.stream_source is not None + and isinstance(fv.stream_source, PushSource) + and fv.stream_source.name == request.push_source_name ) - fvs_with_push_sources = { - fv - for fv in all_fvs - if ( - fv.stream_source is not None - and isinstance(fv.stream_source, PushSource) - and fv.stream_source.name == request.push_source_name - ) - } + } - for feature_view in fvs_with_push_sources: - assert_permissions(resource=feature_view, actions=actions) + for feature_view in fvs_with_push_sources: + assert_permissions(resource=feature_view, actions=actions) - store.push( - push_source_name=request.push_source_name, - df=df, - allow_registry_cache=request.allow_registry_cache, - to=to, - ) - except PushSourceNotFoundException as e: - # Print the original exception on the server side - logger.exception(traceback.format_exc()) - # Raise HTTPException to return the error message to the client - raise HTTPException(status_code=422, detail=str(e)) - except Exception as e: - # Print the original exception on the server side - logger.exception(traceback.format_exc()) - # Raise HTTPException to return the error message to the client - raise HTTPException(status_code=500, detail=str(e)) + store.push( + push_source_name=request.push_source_name, + df=df, + allow_registry_cache=request.allow_registry_cache, + to=to, + ) @app.post("/write-to-online-store", dependencies=[Depends(inject_user_details)]) def write_to_online_store(body=Depends(get_body)): + request = WriteToFeatureStoreRequest(**json.loads(body)) + df = pd.DataFrame(request.df) + feature_view_name = request.feature_view_name + allow_registry_cache = request.allow_registry_cache try: - request = WriteToFeatureStoreRequest(**json.loads(body)) - df = pd.DataFrame(request.df) - feature_view_name = request.feature_view_name - allow_registry_cache = request.allow_registry_cache - try: - feature_view = store.get_stream_feature_view( - feature_view_name, allow_registry_cache=allow_registry_cache - ) - except FeatureViewNotFoundException: - feature_view = store.get_feature_view( - feature_view_name, allow_registry_cache=allow_registry_cache - ) - - assert_permissions( - resource=feature_view, actions=[AuthzedAction.WRITE_ONLINE] + feature_view = store.get_stream_feature_view( + feature_view_name, allow_registry_cache=allow_registry_cache ) - store.write_to_online_store( - feature_view_name=feature_view_name, - df=df, - allow_registry_cache=allow_registry_cache, + except FeatureViewNotFoundException: + feature_view = store.get_feature_view( + feature_view_name, allow_registry_cache=allow_registry_cache ) - except Exception as e: - # Print the original exception on the server side - logger.exception(traceback.format_exc()) - # Raise HTTPException to return the error message to the client - raise HTTPException(status_code=500, detail=str(e)) + + assert_permissions(resource=feature_view, actions=[AuthzedAction.WRITE_ONLINE]) + store.write_to_online_store( + feature_view_name=feature_view_name, + df=df, + allow_registry_cache=allow_registry_cache, + ) @app.get("/health") def health(): @@ -249,39 +225,43 @@ def health(): @app.post("/materialize", dependencies=[Depends(inject_user_details)]) def materialize(body=Depends(get_body)): - try: - request = MaterializeRequest(**json.loads(body)) - for feature_view in request.feature_views: - assert_permissions( - resource=feature_view, actions=[AuthzedAction.WRITE_ONLINE] - ) - store.materialize( - utils.make_tzaware(parser.parse(request.start_ts)), - utils.make_tzaware(parser.parse(request.end_ts)), - request.feature_views, + request = MaterializeRequest(**json.loads(body)) + for feature_view in request.feature_views: + assert_permissions( + resource=feature_view, actions=[AuthzedAction.WRITE_ONLINE] ) - except Exception as e: - # Print the original exception on the server side - logger.exception(traceback.format_exc()) - # Raise HTTPException to return the error message to the client - raise HTTPException(status_code=500, detail=str(e)) + store.materialize( + utils.make_tzaware(parser.parse(request.start_ts)), + utils.make_tzaware(parser.parse(request.end_ts)), + request.feature_views, + ) @app.post("/materialize-incremental", dependencies=[Depends(inject_user_details)]) def materialize_incremental(body=Depends(get_body)): - try: - request = MaterializeIncrementalRequest(**json.loads(body)) - for feature_view in request.feature_views: - assert_permissions( - resource=feature_view, actions=[AuthzedAction.WRITE_ONLINE] - ) - store.materialize_incremental( - utils.make_tzaware(parser.parse(request.end_ts)), request.feature_views + request = MaterializeIncrementalRequest(**json.loads(body)) + for feature_view in request.feature_views: + assert_permissions( + resource=feature_view, actions=[AuthzedAction.WRITE_ONLINE] + ) + store.materialize_incremental( + utils.make_tzaware(parser.parse(request.end_ts)), request.feature_views + ) + + @app.exception_handler(Exception) + async def rest_exception_handler(request: Request, exc: Exception): + # Print the original exception on the server side + logger.exception(traceback.format_exc()) + + if isinstance(exc, FeastError): + return JSONResponse( + status_code=exc.http_status_code(), + content=exc.to_error_detail(), + ) + else: + return JSONResponse( + status_code=500, + content=str(exc), ) - except Exception as e: - # Print the original exception on the server side - logger.exception(traceback.format_exc()) - # Raise HTTPException to return the error message to the client - raise HTTPException(status_code=500, detail=str(e)) return app diff --git a/sdk/python/feast/infra/online_stores/remote.py b/sdk/python/feast/infra/online_stores/remote.py index 93fbcaf771..5f65d8da8b 100644 --- a/sdk/python/feast/infra/online_stores/remote.py +++ b/sdk/python/feast/infra/online_stores/remote.py @@ -16,16 +16,15 @@ from datetime import datetime from typing import Any, Callable, Dict, List, Literal, Optional, Sequence, Tuple +import requests from pydantic import StrictStr from feast import Entity, FeatureView, RepoConfig from feast.infra.online_stores.online_store import OnlineStore -from feast.permissions.client.http_auth_requests_wrapper import ( - get_http_auth_requests_session, -) from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.repo_config import FeastConfigBaseModel +from feast.rest_error_handler import rest_error_handling_decorator from feast.type_map import python_values_to_proto_values from feast.value_type import ValueType @@ -72,9 +71,7 @@ def online_read( req_body = self._construct_online_read_api_json_request( entity_keys, table, requested_features ) - response = get_http_auth_requests_session(config.auth_config).post( - f"{config.online_store.path}/get-online-features", data=req_body - ) + response = get_remote_online_features(config=config, req_body=req_body) if response.status_code == 200: logger.debug("Able to retrieve the online features from feature server.") response_json = json.loads(response.text) @@ -167,3 +164,12 @@ def teardown( entities: Sequence[Entity], ): pass + + +@rest_error_handling_decorator +def get_remote_online_features( + session: requests.Session, config: RepoConfig, req_body: str +) -> requests.Response: + return session.post( + f"{config.online_store.path}/get-online-features", data=req_body + ) diff --git a/sdk/python/feast/rest_error_handler.py b/sdk/python/feast/rest_error_handler.py new file mode 100644 index 0000000000..fc802866f9 --- /dev/null +++ b/sdk/python/feast/rest_error_handler.py @@ -0,0 +1,57 @@ +import logging +from functools import wraps + +import requests + +from feast import RepoConfig +from feast.errors import FeastError +from feast.permissions.client.http_auth_requests_wrapper import ( + get_http_auth_requests_session, +) + +logger = logging.getLogger(__name__) + + +def rest_error_handling_decorator(func): + @wraps(func) + def wrapper(config: RepoConfig, *args, **kwargs): + assert isinstance(config, RepoConfig) + + # Get a Session object + with get_http_auth_requests_session(config.auth_config) as session: + # Define a wrapper for session methods + def method_wrapper(method_name): + original_method = getattr(session, method_name) + + @wraps(original_method) + def wrapped_method(*args, **kwargs): + logger.debug( + f"Calling {method_name} with args: {args}, kwargs: {kwargs}" + ) + response = original_method(*args, **kwargs) + logger.debug( + f"{method_name} response status code: {response.status_code}" + ) + + try: + response.raise_for_status() + except requests.RequestException: + logger.debug(f"response.json() = {response.json()}") + mapped_error = FeastError.from_error_detail(response.json()) + logger.debug(f"mapped_error = {str(mapped_error)}") + if mapped_error is not None: + raise mapped_error + return response + + return wrapped_method + + # Enhance session methods + session.get = method_wrapper("get") # type: ignore[method-assign] + session.post = method_wrapper("post") # type: ignore[method-assign] + session.put = method_wrapper("put") # type: ignore[method-assign] + session.delete = method_wrapper("delete") # type: ignore[method-assign] + + # Pass the enhanced session object to the decorated function + return func(session, config, *args, **kwargs) + + return wrapper diff --git a/sdk/python/tests/integration/online_store/test_python_feature_server.py b/sdk/python/tests/integration/online_store/test_python_feature_server.py index 1010e73178..d08e1104eb 100644 --- a/sdk/python/tests/integration/online_store/test_python_feature_server.py +++ b/sdk/python/tests/integration/online_store/test_python_feature_server.py @@ -4,6 +4,7 @@ import pytest from fastapi.testclient import TestClient +from feast.errors import PushSourceNotFoundException from feast.feast_object import FeastObject from feast.feature_server import get_app from feast.utils import _utc_now @@ -90,21 +91,24 @@ def test_push_source_does_not_exist(python_fs_client): initial_temp = _get_temperatures_from_feature_server( python_fs_client, location_ids=[1] )[0] - response = python_fs_client.post( - "/push", - data=json.dumps( - { - "push_source_name": "push_source_does_not_exist", - "df": { - "location_id": [1], - "temperature": [initial_temp * 100], - "event_timestamp": [str(_utc_now())], - "created": [str(_utc_now())], - }, - } - ), - ) - assert response.status_code == 422 + with pytest.raises( + PushSourceNotFoundException, + match="Unable to find push source 'push_source_does_not_exist'", + ): + python_fs_client.post( + "/push", + data=json.dumps( + { + "push_source_name": "push_source_does_not_exist", + "df": { + "location_id": [1], + "temperature": [initial_temp * 100], + "event_timestamp": [str(_utc_now())], + "created": [str(_utc_now())], + }, + } + ), + ) def _get_temperatures_from_feature_server(client, location_ids: List[int]): diff --git a/sdk/python/tests/unit/test_rest_error_decorator.py b/sdk/python/tests/unit/test_rest_error_decorator.py new file mode 100644 index 0000000000..147ae767bd --- /dev/null +++ b/sdk/python/tests/unit/test_rest_error_decorator.py @@ -0,0 +1,78 @@ +from unittest.mock import Mock, patch + +import assertpy +import pytest +import requests + +from feast import RepoConfig +from feast.errors import PermissionNotFoundException +from feast.infra.online_stores.remote import ( + RemoteOnlineStoreConfig, + get_remote_online_features, +) + + +@pytest.fixture +def feast_exception() -> PermissionNotFoundException: + return PermissionNotFoundException("dummy_name", "dummy_project") + + +@pytest.fixture +def none_feast_exception() -> RuntimeError: + return RuntimeError("dummy_name", "dummy_project") + + +@patch("feast.infra.online_stores.remote.requests.sessions.Session.post") +def test_rest_error_handling_with_feast_exception( + mock_post, environment, feast_exception +): + # Create a mock response object + mock_response = Mock() + mock_response.status_code = feast_exception.http_status_code() + mock_response.json.return_value = feast_exception.to_error_detail() + mock_response.raise_for_status.side_effect = requests.exceptions.HTTPError() + + # Configure the mock to return the mock response + mock_post.return_value = mock_response + + store = environment.feature_store + online_config = RemoteOnlineStoreConfig(type="remote", path="dummy") + + with pytest.raises( + PermissionNotFoundException, + match="Permission dummy_name does not exist in project dummy_project", + ): + get_remote_online_features( + config=RepoConfig( + project="test", online_store=online_config, registry=store.registry + ), + req_body="{test:test}", + ) + + +@patch("feast.infra.online_stores.remote.requests.sessions.Session.post") +def test_rest_error_handling_with_none_feast_exception( + mock_post, environment, none_feast_exception +): + # Create a mock response object + mock_response = Mock() + mock_response.status_code = 500 + mock_response.json.return_value = str(none_feast_exception) + mock_response.raise_for_status.side_effect = requests.exceptions.HTTPError() + + # Configure the mock to return the mock response + mock_post.return_value = mock_response + + store = environment.feature_store + online_config = RemoteOnlineStoreConfig(type="remote", path="dummy") + + response = get_remote_online_features( + config=RepoConfig( + project="test", online_store=online_config, registry=store.registry + ), + req_body="{test:test}", + ) + + assertpy.assert_that(response).is_not_none() + assertpy.assert_that(response.status_code).is_equal_to(500) + assertpy.assert_that(response.json()).is_equal_to("('dummy_name', 'dummy_project')") From 7b250e5eff5de56f5c5da103e91051276940298a Mon Sep 17 00:00:00 2001 From: Tommy Hughes IV Date: Thu, 5 Sep 2024 21:12:15 -0500 Subject: [PATCH 42/96] =?UTF-8?q?feat:=20Add=20cli=20list/describe=20for?= =?UTF-8?q?=20SavedDatasets,=20StreamFeatureViews,=20&=20=E2=80=A6=20(#448?= =?UTF-8?q?7)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit feat: Add cli list/describe for SavedDatasets, StreamFeatureViews, & ValidationReferences Signed-off-by: Tommy Hughes --- sdk/python/feast/cli.py | 150 ++++++++++++++++++ .../offline_store/test_validation.py | 17 ++ .../registration/test_universal_cli.py | 16 ++ 3 files changed, 183 insertions(+) diff --git a/sdk/python/feast/cli.py b/sdk/python/feast/cli.py index 0a12d1dcbc..ec90b31151 100644 --- a/sdk/python/feast/cli.py +++ b/sdk/python/feast/cli.py @@ -470,6 +470,156 @@ def on_demand_feature_view_list(ctx: click.Context, tags: list[str]): print(tabulate(table, headers=["NAME"], tablefmt="plain")) +@cli.group(name="saved-datasets") +def saved_datasets_cmd(): + """ + [Experimental] Access saved datasets + """ + pass + + +@saved_datasets_cmd.command("describe") +@click.argument("name", type=click.STRING) +@click.pass_context +def saved_datasets_describe(ctx: click.Context, name: str): + """ + [Experimental] Describe a saved dataset + """ + store = create_feature_store(ctx) + + try: + saved_dataset = store.get_saved_dataset(name) + except FeastObjectNotFoundException as e: + print(e) + exit(1) + + print( + yaml.dump( + yaml.safe_load(str(saved_dataset)), + default_flow_style=False, + sort_keys=False, + ) + ) + + +@saved_datasets_cmd.command(name="list") +@tagsOption +@click.pass_context +def saved_datasets_list(ctx: click.Context, tags: list[str]): + """ + [Experimental] List all saved datasets + """ + store = create_feature_store(ctx) + table = [] + tags_filter = utils.tags_list_to_dict(tags) + for saved_dataset in store.list_saved_datasets(tags=tags_filter): + table.append([saved_dataset.name]) + + from tabulate import tabulate + + print(tabulate(table, headers=["NAME"], tablefmt="plain")) + + +@cli.group(name="stream-feature-views") +def stream_feature_views_cmd(): + """ + [Experimental] Access stream feature views + """ + pass + + +@stream_feature_views_cmd.command("describe") +@click.argument("name", type=click.STRING) +@click.pass_context +def stream_feature_views_describe(ctx: click.Context, name: str): + """ + [Experimental] Describe a stream feature view + """ + store = create_feature_store(ctx) + + try: + stream_feature_view = store.get_stream_feature_view(name) + except FeastObjectNotFoundException as e: + print(e) + exit(1) + + print( + yaml.dump( + yaml.safe_load(str(stream_feature_view)), + default_flow_style=False, + sort_keys=False, + ) + ) + + +@stream_feature_views_cmd.command(name="list") +@tagsOption +@click.pass_context +def stream_feature_views_list(ctx: click.Context, tags: list[str]): + """ + [Experimental] List all stream feature views + """ + store = create_feature_store(ctx) + table = [] + tags_filter = utils.tags_list_to_dict(tags) + for stream_feature_view in store.list_stream_feature_views(tags=tags_filter): + table.append([stream_feature_view.name]) + + from tabulate import tabulate + + print(tabulate(table, headers=["NAME"], tablefmt="plain")) + + +@cli.group(name="validation-references") +def validation_references_cmd(): + """ + [Experimental] Access validation references + """ + pass + + +@validation_references_cmd.command("describe") +@click.argument("name", type=click.STRING) +@click.pass_context +def validation_references_describe(ctx: click.Context, name: str): + """ + [Experimental] Describe a validation reference + """ + store = create_feature_store(ctx) + + try: + validation_reference = store.get_validation_reference(name) + except FeastObjectNotFoundException as e: + print(e) + exit(1) + + print( + yaml.dump( + yaml.safe_load(str(validation_reference)), + default_flow_style=False, + sort_keys=False, + ) + ) + + +@validation_references_cmd.command(name="list") +@tagsOption +@click.pass_context +def validation_references_list(ctx: click.Context, tags: list[str]): + """ + [Experimental] List all validation references + """ + store = create_feature_store(ctx) + table = [] + tags_filter = utils.tags_list_to_dict(tags) + for validation_reference in store.list_validation_references(tags=tags_filter): + table.append([validation_reference.name]) + + from tabulate import tabulate + + print(tabulate(table, headers=["NAME"], tablefmt="plain")) + + @cli.command("plan", cls=NoOptionDefaultFormat) @click.option( "--skip-source-validation", diff --git a/sdk/python/tests/integration/offline_store/test_validation.py b/sdk/python/tests/integration/offline_store/test_validation.py index 6f0496e8c8..52d83ab8d8 100644 --- a/sdk/python/tests/integration/offline_store/test_validation.py +++ b/sdk/python/tests/integration/offline_store/test_validation.py @@ -305,6 +305,23 @@ def test_e2e_validation_via_cli(environment, universal_data_sources): assert p.returncode == 0, p.stderr.decode() assert "Validation successful" in p.stdout.decode(), p.stderr.decode() + p = runner.run( + ["saved-datasets", "describe", saved_dataset.name], cwd=local_repo.repo_path + ) + assert p.returncode == 0, p.stderr.decode() + + p = runner.run( + ["validation-references", "describe", reference.name], + cwd=local_repo.repo_path, + ) + assert p.returncode == 0, p.stderr.decode() + + p = runner.run( + ["feature-services", "describe", feature_service.name], + cwd=local_repo.repo_path, + ) + assert p.returncode == 0, p.stderr.decode() + # make sure second validation will use cached profile shutil.rmtree(saved_dataset.storage.file_options.uri) diff --git a/sdk/python/tests/integration/registration/test_universal_cli.py b/sdk/python/tests/integration/registration/test_universal_cli.py index 9e02ded4e4..5c238da24d 100644 --- a/sdk/python/tests/integration/registration/test_universal_cli.py +++ b/sdk/python/tests/integration/registration/test_universal_cli.py @@ -63,6 +63,12 @@ def test_universal_cli(): assertpy.assert_that(result.returncode).is_equal_to(0) result = runner.run(["permissions", "list"], cwd=repo_path) assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["validation-references", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["stream-feature-views", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["saved-datasets", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) # entity & feature view describe commands should succeed when objects exist result = runner.run(["entities", "describe", "driver"], cwd=repo_path) @@ -95,6 +101,16 @@ def test_universal_cli(): assertpy.assert_that(result.returncode).is_equal_to(1) result = runner.run(["permissions", "describe", "foo"], cwd=repo_path) assertpy.assert_that(result.returncode).is_equal_to(1) + result = runner.run( + ["validation-references", "describe", "foo"], cwd=repo_path + ) + assertpy.assert_that(result.returncode).is_equal_to(1) + result = runner.run( + ["stream-feature-views", "describe", "foo"], cwd=repo_path + ) + assertpy.assert_that(result.returncode).is_equal_to(1) + result = runner.run(["saved-datasets", "describe", "foo"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(1) # Doing another apply should be a no op, and should not cause errors result = runner.run(["apply"], cwd=repo_path) From 1015618f4828ab00f733971791a7b76a6c099189 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 5 Sep 2024 22:12:35 -0400 Subject: [PATCH 43/96] chore: Bump cryptography from 42.0.8 to 43.0.1 in /sdk/python/requirements (#4483) chore: Bump cryptography in /sdk/python/requirements Bumps [cryptography](https://github.com/pyca/cryptography) from 42.0.8 to 43.0.1. - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/42.0.8...43.0.1) --- updated-dependencies: - dependency-name: cryptography dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- sdk/python/requirements/py3.10-ci-requirements.txt | 2 +- sdk/python/requirements/py3.11-ci-requirements.txt | 2 +- sdk/python/requirements/py3.9-ci-requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 6970dd2aed..bfe855a2d8 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -115,7 +115,7 @@ comm==0.2.2 # ipywidgets coverage[toml]==7.5.4 # via pytest-cov -cryptography==42.0.8 +cryptography==43.0.1 # via # azure-identity # azure-storage-blob diff --git a/sdk/python/requirements/py3.11-ci-requirements.txt b/sdk/python/requirements/py3.11-ci-requirements.txt index 2d7a5b252e..6a097526d7 100644 --- a/sdk/python/requirements/py3.11-ci-requirements.txt +++ b/sdk/python/requirements/py3.11-ci-requirements.txt @@ -111,7 +111,7 @@ comm==0.2.2 # ipywidgets coverage[toml]==7.5.4 # via pytest-cov -cryptography==42.0.8 +cryptography==43.0.1 # via # azure-identity # azure-storage-blob diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index 62f9280fe5..f32f6790d3 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -115,7 +115,7 @@ comm==0.2.2 # ipywidgets coverage[toml]==7.5.4 # via pytest-cov -cryptography==42.0.8 +cryptography==43.0.1 # via # azure-identity # azure-storage-blob From 3c4745c64ddbc589864d81572fe720dd9c0aaddc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 5 Sep 2024 22:15:48 -0400 Subject: [PATCH 44/96] chore: Bump webpack from 5.76.1 to 5.94.0 in /sdk/python/feast/ui (#4469) Bumps [webpack](https://github.com/webpack/webpack) from 5.76.1 to 5.94.0. - [Release notes](https://github.com/webpack/webpack/releases) - [Commits](https://github.com/webpack/webpack/compare/v5.76.1...v5.94.0) --- updated-dependencies: - dependency-name: webpack dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- sdk/python/feast/ui/yarn.lock | 428 ++++++++++++++++++++++------------ 1 file changed, 273 insertions(+), 155 deletions(-) diff --git a/sdk/python/feast/ui/yarn.lock b/sdk/python/feast/ui/yarn.lock index cd1913bbb1..452b6f9f31 100644 --- a/sdk/python/feast/ui/yarn.lock +++ b/sdk/python/feast/ui/yarn.lock @@ -1743,11 +1743,25 @@ "@jridgewell/sourcemap-codec" "^1.4.10" "@jridgewell/trace-mapping" "^0.3.9" +"@jridgewell/gen-mapping@^0.3.5": + version "0.3.5" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz#dcce6aff74bdf6dad1a95802b69b04a2fcb1fb36" + integrity sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg== + dependencies: + "@jridgewell/set-array" "^1.2.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.24" + "@jridgewell/resolve-uri@3.1.0", "@jridgewell/resolve-uri@^3.0.3": version "3.1.0" resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== +"@jridgewell/resolve-uri@^3.1.0": + version "3.1.2" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz#7a0ee601f60f99a20c7c7c5ff0c80388c1189bd6" + integrity sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw== + "@jridgewell/set-array@^1.0.0": version "1.1.1" resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.1.tgz#36a6acc93987adcf0ba50c66908bd0b70de8afea" @@ -1758,6 +1772,11 @@ resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== +"@jridgewell/set-array@^1.2.1": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.2.1.tgz#558fb6472ed16a4c850b889530e6b36438c49280" + integrity sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A== + "@jridgewell/source-map@^0.3.2": version "0.3.2" resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.2.tgz#f45351aaed4527a298512ec72f81040c998580fb" @@ -1766,11 +1785,24 @@ "@jridgewell/gen-mapping" "^0.3.0" "@jridgewell/trace-mapping" "^0.3.9" +"@jridgewell/source-map@^0.3.3": + version "0.3.6" + resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.6.tgz#9d71ca886e32502eb9362c9a74a46787c36df81a" + integrity sha512-1ZJTZebgqllO79ue2bm3rIGud/bOe0pP5BjSRCRxxYkEZS8STV7zN84UBbiYu7jy+eCKSnVIUgoWWE/tt+shMQ== + dependencies: + "@jridgewell/gen-mapping" "^0.3.5" + "@jridgewell/trace-mapping" "^0.3.25" + "@jridgewell/sourcemap-codec@1.4.14", "@jridgewell/sourcemap-codec@^1.4.10": version "1.4.14" resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== +"@jridgewell/sourcemap-codec@^1.4.14": + version "1.5.0" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz#3188bcb273a414b0d215fd22a58540b989b9409a" + integrity sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ== + "@jridgewell/trace-mapping@^0.3.17": version "0.3.18" resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.18.tgz#25783b2086daf6ff1dcb53c9249ae480e4dd4cd6" @@ -1779,6 +1811,14 @@ "@jridgewell/resolve-uri" "3.1.0" "@jridgewell/sourcemap-codec" "1.4.14" +"@jridgewell/trace-mapping@^0.3.20", "@jridgewell/trace-mapping@^0.3.24", "@jridgewell/trace-mapping@^0.3.25": + version "0.3.25" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz#15f190e98895f3fc23276ee14bc76b675c2e50f0" + integrity sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ== + dependencies: + "@jridgewell/resolve-uri" "^3.1.0" + "@jridgewell/sourcemap-codec" "^1.4.14" + "@jridgewell/trace-mapping@^0.3.9": version "0.3.14" resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.14.tgz#b231a081d8f66796e475ad588a1ef473112701ed" @@ -2400,22 +2440,6 @@ "@types/d3-transition" "*" "@types/d3-zoom" "*" -"@types/eslint-scope@^3.7.3": - version "3.7.3" - resolved "https://registry.yarnpkg.com/@types/eslint-scope/-/eslint-scope-3.7.3.tgz#125b88504b61e3c8bc6f870882003253005c3224" - integrity sha512-PB3ldyrcnAicT35TWPs5IcwKD8S333HMaa2VVv4+wdvebJkjWuW/xESoB8IwRcog8HYVYamb1g/R31Qv5Bx03g== - dependencies: - "@types/eslint" "*" - "@types/estree" "*" - -"@types/eslint@*": - version "8.4.2" - resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-8.4.2.tgz#48f2ac58ab9c631cb68845c3d956b28f79fad575" - integrity sha512-Z1nseZON+GEnFjJc04sv4NSALGjhFwy6K0HXt7qsn5ArfAKtb63dXNJHf+1YW6IpOIYRBGUbu3GwJdj8DGnCjA== - dependencies: - "@types/estree" "*" - "@types/json-schema" "*" - "@types/eslint@^7.28.2": version "7.29.0" resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-7.29.0.tgz#e56ddc8e542815272720bb0b4ccc2aff9c3e1c78" @@ -2424,7 +2448,7 @@ "@types/estree" "*" "@types/json-schema" "*" -"@types/estree@*", "@types/estree@^0.0.51": +"@types/estree@*": version "0.0.51" resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.51.tgz#cfd70924a25a3fd32b218e5e420e6897e1ac4f40" integrity sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ== @@ -2434,6 +2458,11 @@ resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.39.tgz#e177e699ee1b8c22d23174caaa7422644389509f" integrity sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw== +"@types/estree@^1.0.5": + version "1.0.5" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.5.tgz#a6ce3e556e00fd9895dd872dd172ad0d4bd687f4" + integrity sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw== + "@types/express-serve-static-core@*", "@types/express-serve-static-core@^4.17.18": version "4.17.28" resolved "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.17.28.tgz#c47def9f34ec81dc6328d0b1b5303d1ec98d86b8" @@ -2869,125 +2898,125 @@ "@typescript-eslint/types" "5.23.0" eslint-visitor-keys "^3.0.0" -"@webassemblyjs/ast@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.11.1.tgz#2bfd767eae1a6996f432ff7e8d7fc75679c0b6a7" - integrity sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw== +"@webassemblyjs/ast@1.12.1", "@webassemblyjs/ast@^1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.12.1.tgz#bb16a0e8b1914f979f45864c23819cc3e3f0d4bb" + integrity sha512-EKfMUOPRRUTy5UII4qJDGPpqfwjOmZ5jeGFwid9mnoqIFK+e0vqoi1qH56JpmZSzEL53jKnNzScdmftJyG5xWg== dependencies: - "@webassemblyjs/helper-numbers" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/helper-numbers" "1.11.6" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" -"@webassemblyjs/floating-point-hex-parser@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz#f6c61a705f0fd7a6aecaa4e8198f23d9dc179e4f" - integrity sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ== +"@webassemblyjs/floating-point-hex-parser@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.6.tgz#dacbcb95aff135c8260f77fa3b4c5fea600a6431" + integrity sha512-ejAj9hfRJ2XMsNHk/v6Fu2dGS+i4UaXBXGemOfQ/JfQ6mdQg/WXtwleQRLLS4OvfDhv8rYnVwH27YJLMyYsxhw== -"@webassemblyjs/helper-api-error@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.1.tgz#1a63192d8788e5c012800ba6a7a46c705288fd16" - integrity sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg== +"@webassemblyjs/helper-api-error@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.6.tgz#6132f68c4acd59dcd141c44b18cbebbd9f2fa768" + integrity sha512-o0YkoP4pVu4rN8aTJgAyj9hC2Sv5UlkzCHhxqWj8butaLvnpdc2jOwh4ewE6CX0txSfLn/UYaV/pheS2Txg//Q== -"@webassemblyjs/helper-buffer@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.1.tgz#832a900eb444884cde9a7cad467f81500f5e5ab5" - integrity sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA== +"@webassemblyjs/helper-buffer@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.12.1.tgz#6df20d272ea5439bf20ab3492b7fb70e9bfcb3f6" + integrity sha512-nzJwQw99DNDKr9BVCOZcLuJJUlqkJh+kVzVl6Fmq/tI5ZtEyWT1KZMyOXltXLZJmDtvLCDgwsyrkohEtopTXCw== -"@webassemblyjs/helper-numbers@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.1.tgz#64d81da219fbbba1e3bd1bfc74f6e8c4e10a62ae" - integrity sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ== +"@webassemblyjs/helper-numbers@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.6.tgz#cbce5e7e0c1bd32cf4905ae444ef64cea919f1b5" + integrity sha512-vUIhZ8LZoIWHBohiEObxVm6hwP034jwmc9kuq5GdHZH0wiLVLIPcMCdpJzG4C11cHoQ25TFIQj9kaVADVX7N3g== dependencies: - "@webassemblyjs/floating-point-hex-parser" "1.11.1" - "@webassemblyjs/helper-api-error" "1.11.1" + "@webassemblyjs/floating-point-hex-parser" "1.11.6" + "@webassemblyjs/helper-api-error" "1.11.6" "@xtuc/long" "4.2.2" -"@webassemblyjs/helper-wasm-bytecode@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz#f328241e41e7b199d0b20c18e88429c4433295e1" - integrity sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q== +"@webassemblyjs/helper-wasm-bytecode@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.6.tgz#bb2ebdb3b83aa26d9baad4c46d4315283acd51e9" + integrity sha512-sFFHKwcmBprO9e7Icf0+gddyWYDViL8bpPjJJl0WHxCdETktXdmtWLGVzoHbqUcY4Be1LkNfwTmXOJUFZYSJdA== -"@webassemblyjs/helper-wasm-section@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.1.tgz#21ee065a7b635f319e738f0dd73bfbda281c097a" - integrity sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg== +"@webassemblyjs/helper-wasm-section@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.12.1.tgz#3da623233ae1a60409b509a52ade9bc22a37f7bf" + integrity sha512-Jif4vfB6FJlUlSbgEMHUyk1j234GTNG9dBJ4XJdOySoj518Xj0oGsNi59cUQF4RRMS9ouBUxDDdyBVfPTypa5g== dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-buffer" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/wasm-gen" "1.11.1" + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-buffer" "1.12.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + "@webassemblyjs/wasm-gen" "1.12.1" -"@webassemblyjs/ieee754@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.11.1.tgz#963929e9bbd05709e7e12243a099180812992614" - integrity sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ== +"@webassemblyjs/ieee754@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.11.6.tgz#bb665c91d0b14fffceb0e38298c329af043c6e3a" + integrity sha512-LM4p2csPNvbij6U1f19v6WR56QZ8JcHg3QIJTlSwzFcmx6WSORicYj6I63f9yU1kEUtrpG+kjkiIAkevHpDXrg== dependencies: "@xtuc/ieee754" "^1.2.0" -"@webassemblyjs/leb128@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.11.1.tgz#ce814b45574e93d76bae1fb2644ab9cdd9527aa5" - integrity sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw== +"@webassemblyjs/leb128@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.11.6.tgz#70e60e5e82f9ac81118bc25381a0b283893240d7" + integrity sha512-m7a0FhE67DQXgouf1tbN5XQcdWoNgaAuoULHIfGFIEVKA6tu/edls6XnIlkmS6FrXAquJRPni3ZZKjw6FSPjPQ== dependencies: "@xtuc/long" "4.2.2" -"@webassemblyjs/utf8@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.11.1.tgz#d1f8b764369e7c6e6bae350e854dec9a59f0a3ff" - integrity sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ== - -"@webassemblyjs/wasm-edit@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.1.tgz#ad206ebf4bf95a058ce9880a8c092c5dec8193d6" - integrity sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-buffer" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/helper-wasm-section" "1.11.1" - "@webassemblyjs/wasm-gen" "1.11.1" - "@webassemblyjs/wasm-opt" "1.11.1" - "@webassemblyjs/wasm-parser" "1.11.1" - "@webassemblyjs/wast-printer" "1.11.1" - -"@webassemblyjs/wasm-gen@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.1.tgz#86c5ea304849759b7d88c47a32f4f039ae3c8f76" - integrity sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/ieee754" "1.11.1" - "@webassemblyjs/leb128" "1.11.1" - "@webassemblyjs/utf8" "1.11.1" - -"@webassemblyjs/wasm-opt@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.1.tgz#657b4c2202f4cf3b345f8a4c6461c8c2418985f2" - integrity sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-buffer" "1.11.1" - "@webassemblyjs/wasm-gen" "1.11.1" - "@webassemblyjs/wasm-parser" "1.11.1" - -"@webassemblyjs/wasm-parser@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.1.tgz#86ca734534f417e9bd3c67c7a1c75d8be41fb199" - integrity sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-api-error" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/ieee754" "1.11.1" - "@webassemblyjs/leb128" "1.11.1" - "@webassemblyjs/utf8" "1.11.1" - -"@webassemblyjs/wast-printer@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.11.1.tgz#d0c73beda8eec5426f10ae8ef55cee5e7084c2f0" - integrity sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg== - dependencies: - "@webassemblyjs/ast" "1.11.1" +"@webassemblyjs/utf8@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.11.6.tgz#90f8bc34c561595fe156603be7253cdbcd0fab5a" + integrity sha512-vtXf2wTQ3+up9Zsg8sa2yWiQpzSsMyXj0qViVP6xKGCUT8p8YJ6HqI7l5eCnWx1T/FYdsv07HQs2wTFbbof/RA== + +"@webassemblyjs/wasm-edit@^1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.12.1.tgz#9f9f3ff52a14c980939be0ef9d5df9ebc678ae3b" + integrity sha512-1DuwbVvADvS5mGnXbE+c9NfA8QRcZ6iKquqjjmR10k6o+zzsRVesil54DKexiowcFCPdr/Q0qaMgB01+SQ1u6g== + dependencies: + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-buffer" "1.12.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + "@webassemblyjs/helper-wasm-section" "1.12.1" + "@webassemblyjs/wasm-gen" "1.12.1" + "@webassemblyjs/wasm-opt" "1.12.1" + "@webassemblyjs/wasm-parser" "1.12.1" + "@webassemblyjs/wast-printer" "1.12.1" + +"@webassemblyjs/wasm-gen@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.12.1.tgz#a6520601da1b5700448273666a71ad0a45d78547" + integrity sha512-TDq4Ojh9fcohAw6OIMXqiIcTq5KUXTGRkVxbSo1hQnSy6lAM5GSdfwWeSxpAo0YzgsgF182E/U0mDNhuA0tW7w== + dependencies: + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + "@webassemblyjs/ieee754" "1.11.6" + "@webassemblyjs/leb128" "1.11.6" + "@webassemblyjs/utf8" "1.11.6" + +"@webassemblyjs/wasm-opt@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.12.1.tgz#9e6e81475dfcfb62dab574ac2dda38226c232bc5" + integrity sha512-Jg99j/2gG2iaz3hijw857AVYekZe2SAskcqlWIZXjji5WStnOpVoat3gQfT/Q5tb2djnCjBtMocY/Su1GfxPBg== + dependencies: + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-buffer" "1.12.1" + "@webassemblyjs/wasm-gen" "1.12.1" + "@webassemblyjs/wasm-parser" "1.12.1" + +"@webassemblyjs/wasm-parser@1.12.1", "@webassemblyjs/wasm-parser@^1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.12.1.tgz#c47acb90e6f083391e3fa61d113650eea1e95937" + integrity sha512-xikIi7c2FHXysxXe3COrVUPSheuBtpcfhbpFj4gmu7KRLYOzANztwUU0IbsqvMqzuNK2+glRGWCEqZo1WCLyAQ== + dependencies: + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-api-error" "1.11.6" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + "@webassemblyjs/ieee754" "1.11.6" + "@webassemblyjs/leb128" "1.11.6" + "@webassemblyjs/utf8" "1.11.6" + +"@webassemblyjs/wast-printer@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.12.1.tgz#bcecf661d7d1abdaf989d8341a4833e33e2b31ac" + integrity sha512-+X4WAlOisVWQMikjbcvY2e0rwPsKQ9F688lksZhBcPycBBuii3O7m8FACbDMWDojpAqvjIncrG8J0XHKyQfVeA== + dependencies: + "@webassemblyjs/ast" "1.12.1" "@xtuc/long" "4.2.2" "@xtuc/ieee754@^1.2.0": @@ -3021,10 +3050,10 @@ acorn-globals@^6.0.0: acorn "^7.1.1" acorn-walk "^7.1.1" -acorn-import-assertions@^1.7.6: - version "1.8.0" - resolved "https://registry.yarnpkg.com/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz#ba2b5939ce62c238db6d93d81c9b111b29b855e9" - integrity sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw== +acorn-import-attributes@^1.9.5: + version "1.9.5" + resolved "https://registry.yarnpkg.com/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz#7eb1557b1ba05ef18b5ed0ec67591bfab04688ef" + integrity sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ== acorn-jsx@^5.3.2: version "5.3.2" @@ -3055,6 +3084,11 @@ acorn@^8.2.4, acorn@^8.5.0, acorn@^8.7.1: resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.7.1.tgz#0197122c843d1bf6d0a5e83220a788f278f63c30" integrity sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A== +acorn@^8.8.2: + version "8.12.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.12.1.tgz#71616bdccbe25e27a54439e0046e89ca76df2248" + integrity sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg== + address@^1.0.1, address@^1.1.2: version "1.2.0" resolved "https://registry.yarnpkg.com/address/-/address-1.2.0.tgz#d352a62c92fee90f89a693eccd2a8b2139ab02d9" @@ -3585,7 +3619,7 @@ browser-process-hrtime@^1.0.0: resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== -browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.16.6, browserslist@^4.18.1, browserslist@^4.20.2, browserslist@^4.20.3: +browserslist@^4.0.0, browserslist@^4.16.6, browserslist@^4.18.1, browserslist@^4.20.2, browserslist@^4.20.3: version "4.20.3" resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.20.3.tgz#eb7572f49ec430e054f56d52ff0ebe9be915f8bf" integrity sha512-NBhymBQl1zM0Y5dQT/O+xiLP9/rzOIQdKM/eMJBAq7yBgaB6krIYLGejrwVYnSHZdqjscB1SPuAjHwxjvN6Wdg== @@ -3596,6 +3630,16 @@ browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.16.6, browserslist@^4 node-releases "^2.0.3" picocolors "^1.0.0" +browserslist@^4.21.10: + version "4.23.3" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.23.3.tgz#debb029d3c93ebc97ffbc8d9cbb03403e227c800" + integrity sha512-btwCFJVjI4YWDNfau8RhZ+B1Q/VLoUITrm3RlP6y1tYGWIOa+InuYiRGXUBXo8nA1qKmHMyLB/iVQg5TT4eFoA== + dependencies: + caniuse-lite "^1.0.30001646" + electron-to-chromium "^1.5.4" + node-releases "^2.0.18" + update-browserslist-db "^1.1.0" + bser@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" @@ -3679,6 +3723,11 @@ caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001332, caniuse-lite@^1.0.30001335: resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001339.tgz#f9aece4ea8156071613b27791547ba0b33f176cf" integrity sha512-Es8PiVqCe+uXdms0Gu5xP5PF2bxLR7OBp3wUzUnuO7OHzhOfCyg3hdiGWVPVxhiuniOzng+hTc1u3fEQ0TlkSQ== +caniuse-lite@^1.0.30001646: + version "1.0.30001653" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001653.tgz#b8af452f8f33b1c77f122780a4aecebea0caca56" + integrity sha512-XGWQVB8wFQ2+9NZwZ10GxTYC5hk0Fa+q8cSkr0tgvMhYhMHP/QC+WTgrePMDBWiWc/pV+1ik82Al20XOK25Gcw== + case-sensitive-paths-webpack-plugin@^2.4.0: version "2.4.0" resolved "https://registry.yarnpkg.com/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.4.0.tgz#db64066c6422eed2e08cc14b986ca43796dbc6d4" @@ -4823,6 +4872,11 @@ electron-to-chromium@^1.4.118: resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.137.tgz#186180a45617283f1c012284458510cd99d6787f" integrity sha512-0Rcpald12O11BUogJagX3HsCN3FE83DSqWjgXoHo5a72KUKMSfI39XBgJpgNNxS9fuGzytaFjE06kZkiVFy2qA== +electron-to-chromium@^1.5.4: + version "1.5.13" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.5.13.tgz#1abf0410c5344b2b829b7247e031f02810d442e6" + integrity sha512-lbBcvtIJ4J6sS4tb5TLp1b4LyfCdMkwStzXPyAgVgTRAsep4bvrAGaBOP7ZJtQMNJpSQ9SqG4brWOroNaQtm7Q== + emittery@^0.10.2: version "0.10.2" resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.10.2.tgz#902eec8aedb8c41938c46e9385e9db7e03182933" @@ -4858,10 +4912,10 @@ encodeurl@~1.0.2: resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k= -enhanced-resolve@^5.10.0: - version "5.12.0" - resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.12.0.tgz#300e1c90228f5b570c4d35babf263f6da7155634" - integrity sha512-QHTXI/sZQmko1cbDoNAa3mJ5qhWUUNAq3vR0/YiD379fWQrcfuoX1+HW2S0MTt7XmoPLapdaDKUtelUSPic7hQ== +enhanced-resolve@^5.17.1: + version "5.17.1" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.17.1.tgz#67bfbbcc2f81d511be77d686a90267ef7f898a15" + integrity sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg== dependencies: graceful-fs "^4.2.4" tapable "^2.2.0" @@ -4914,10 +4968,10 @@ es-abstract@^1.17.2, es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.19 string.prototype.trimstart "^1.0.5" unbox-primitive "^1.0.2" -es-module-lexer@^0.9.0: - version "0.9.3" - resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-0.9.3.tgz#6f13db00cc38417137daf74366f535c8eb438f19" - integrity sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ== +es-module-lexer@^1.2.1: + version "1.5.4" + resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-1.5.4.tgz#a8efec3a3da991e60efa6b633a7cad6ab8d26b78" + integrity sha512-MVNK56NiMrOwitFB7cqDwq0CQutbw+0BvLshJSse0MUNU+y1FC3bUS/AQg7oUng+/wKrrki7JfmwtVHkVfPLlw== es-shim-unscopables@^1.0.0: version "1.0.0" @@ -4940,6 +4994,11 @@ escalade@^3.1.1: resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== +escalade@^3.1.2: + version "3.2.0" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.2.0.tgz#011a3f69856ba189dffa7dc8fcce99d2a87903e5" + integrity sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA== + escape-html@~1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" @@ -5736,6 +5795,11 @@ graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== +graceful-fs@^4.2.11: + version "4.2.11" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" + integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== + gzip-size@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/gzip-size/-/gzip-size-6.0.0.tgz#065367fd50c239c0671cbcbad5be3e2eeb10e462" @@ -7536,6 +7600,11 @@ node-int64@^0.4.0: resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" integrity sha1-h6kGXNs1XTGC2PlM4RGIuCXGijs= +node-releases@^2.0.18: + version "2.0.18" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.18.tgz#f010e8d35e2fe8d6b2944f03f70213ecedc4ca3f" + integrity sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g== + node-releases@^2.0.3: version "2.0.4" resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.4.tgz#f38252370c43854dc48aa431c766c6c398f40476" @@ -7914,6 +7983,11 @@ picocolors@^1.0.0: resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== +picocolors@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.1.tgz#a8ad579b571952f0e5d25892de5445bcfe25aaa1" + integrity sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew== + picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.2, picomatch@^2.2.3, picomatch@^2.3.1: version "2.3.1" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" @@ -9397,7 +9471,7 @@ schema-utils@^2.6.5: ajv "^6.12.4" ajv-keywords "^3.5.2" -schema-utils@^3.0.0, schema-utils@^3.1.0, schema-utils@^3.1.1: +schema-utils@^3.0.0, schema-utils@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-3.1.1.tgz#bc74c4b6b6995c1d88f76a8b77bea7219e0c8281" integrity sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw== @@ -9406,6 +9480,15 @@ schema-utils@^3.0.0, schema-utils@^3.1.0, schema-utils@^3.1.1: ajv "^6.12.5" ajv-keywords "^3.5.2" +schema-utils@^3.2.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-3.3.0.tgz#f50a88877c3c01652a15b622ae9e9795df7a60fe" + integrity sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg== + dependencies: + "@types/json-schema" "^7.0.8" + ajv "^6.12.5" + ajv-keywords "^3.5.2" + schema-utils@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-4.0.0.tgz#60331e9e3ae78ec5d16353c467c34b3a0a1d3df7" @@ -9483,6 +9566,13 @@ serialize-javascript@^6.0.0: dependencies: randombytes "^2.1.0" +serialize-javascript@^6.0.1: + version "6.0.2" + resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-6.0.2.tgz#defa1e055c83bf6d59ea805d8da862254eb6a6c2" + integrity sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g== + dependencies: + randombytes "^2.1.0" + serialize-query-params@^1.3.5: version "1.3.6" resolved "https://registry.yarnpkg.com/serialize-query-params/-/serialize-query-params-1.3.6.tgz#5dd5225db85ce747fe6fbc4897628504faafec6d" @@ -10040,7 +10130,7 @@ terminal-link@^2.0.0: ansi-escapes "^4.2.1" supports-hyperlinks "^2.0.0" -terser-webpack-plugin@^5.1.3, terser-webpack-plugin@^5.2.5: +terser-webpack-plugin@^5.2.5: version "5.3.1" resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.1.tgz#0320dcc270ad5372c1e8993fabbd927929773e54" integrity sha512-GvlZdT6wPQKbDNW/GDQzZFg/j4vKU96yl2q6mcUkzKOgW4gwf1Z8cZToUCrz31XHlPWH8MVb1r2tFtdDtTGJ7g== @@ -10051,6 +10141,17 @@ terser-webpack-plugin@^5.1.3, terser-webpack-plugin@^5.2.5: source-map "^0.6.1" terser "^5.7.2" +terser-webpack-plugin@^5.3.10: + version "5.3.10" + resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.10.tgz#904f4c9193c6fd2a03f693a2150c62a92f40d199" + integrity sha512-BKFPWlPDndPs+NGGCr1U59t0XScL5317Y0UReNrHaw9/FwhPENlq6bfgs+4yPfyP51vqC1bQ4rp1EfXW5ZSH9w== + dependencies: + "@jridgewell/trace-mapping" "^0.3.20" + jest-worker "^27.4.5" + schema-utils "^3.1.1" + serialize-javascript "^6.0.1" + terser "^5.26.0" + terser@^5.0.0, terser@^5.10.0, terser@^5.7.2: version "5.14.2" resolved "https://registry.yarnpkg.com/terser/-/terser-5.14.2.tgz#9ac9f22b06994d736174f4091aa368db896f1c10" @@ -10061,6 +10162,16 @@ terser@^5.0.0, terser@^5.10.0, terser@^5.7.2: commander "^2.20.0" source-map-support "~0.5.20" +terser@^5.26.0: + version "5.31.6" + resolved "https://registry.yarnpkg.com/terser/-/terser-5.31.6.tgz#c63858a0f0703988d0266a82fcbf2d7ba76422b1" + integrity sha512-PQ4DAriWzKj+qgehQ7LK5bQqCFNMmlhjR2PFFLuqGCpuCAauxemVBWwWOxo3UIwWQx8+Pr61Df++r76wDmkQBg== + dependencies: + "@jridgewell/source-map" "^0.3.3" + acorn "^8.8.2" + commander "^2.20.0" + source-map-support "~0.5.20" + test-exclude@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" @@ -10417,6 +10528,14 @@ upath@^1.2.0: resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== +update-browserslist-db@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.1.0.tgz#7ca61c0d8650766090728046e416a8cde682859e" + integrity sha512-EdRAaAyk2cUE1wOf2DkEhzxqOQvFOoRJFNS6NeyJ01Gp2beMRpBAINjM2iDXE3KCuKhwnvHIQCJm6ThL2Z+HzQ== + dependencies: + escalade "^3.1.2" + picocolors "^1.0.1" + uri-js@^4.2.2: version "4.4.1" resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" @@ -10560,10 +10679,10 @@ walker@^1.0.7: dependencies: makeerror "1.0.12" -watchpack@^2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.4.0.tgz#fa33032374962c78113f93c7f2fb4c54c9862a5d" - integrity sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg== +watchpack@^2.4.1: + version "2.4.2" + resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.4.2.tgz#2feeaed67412e7c33184e5a79ca738fbd38564da" + integrity sha512-TnbFSbcOCcDgjZ4piURLCbJ3nJhznVh9kw6F6iokjiFPl8ONxe9A6nMDVXDiNbrSfLILs6vB07F7wLBrwPYzJw== dependencies: glob-to-regexp "^0.4.1" graceful-fs "^4.1.2" @@ -10675,33 +10794,32 @@ webpack-sources@^3.2.3: integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== webpack@^5.64.4: - version "5.76.1" - resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.76.1.tgz#7773de017e988bccb0f13c7d75ec245f377d295c" - integrity sha512-4+YIK4Abzv8172/SGqObnUjaIHjLEuUasz9EwQj/9xmPPkYJy2Mh03Q/lJfSD3YLzbxy5FeTq5Uw0323Oh6SJQ== - dependencies: - "@types/eslint-scope" "^3.7.3" - "@types/estree" "^0.0.51" - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/wasm-edit" "1.11.1" - "@webassemblyjs/wasm-parser" "1.11.1" + version "5.94.0" + resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.94.0.tgz#77a6089c716e7ab90c1c67574a28da518a20970f" + integrity sha512-KcsGn50VT+06JH/iunZJedYGUJS5FGjow8wb9c0v5n1Om8O1g4L6LjtfxwlXIATopoQu+vOXXa7gYisWxCoPyg== + dependencies: + "@types/estree" "^1.0.5" + "@webassemblyjs/ast" "^1.12.1" + "@webassemblyjs/wasm-edit" "^1.12.1" + "@webassemblyjs/wasm-parser" "^1.12.1" acorn "^8.7.1" - acorn-import-assertions "^1.7.6" - browserslist "^4.14.5" + acorn-import-attributes "^1.9.5" + browserslist "^4.21.10" chrome-trace-event "^1.0.2" - enhanced-resolve "^5.10.0" - es-module-lexer "^0.9.0" + enhanced-resolve "^5.17.1" + es-module-lexer "^1.2.1" eslint-scope "5.1.1" events "^3.2.0" glob-to-regexp "^0.4.1" - graceful-fs "^4.2.9" + graceful-fs "^4.2.11" json-parse-even-better-errors "^2.3.1" loader-runner "^4.2.0" mime-types "^2.1.27" neo-async "^2.6.2" - schema-utils "^3.1.0" + schema-utils "^3.2.0" tapable "^2.1.1" - terser-webpack-plugin "^5.1.3" - watchpack "^2.4.0" + terser-webpack-plugin "^5.3.10" + watchpack "^2.4.1" webpack-sources "^3.2.3" websocket-driver@>=0.5.1, websocket-driver@^0.7.4: From c28bee5b37052c6eefacf5417d3232161690f25a Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Fri, 6 Sep 2024 06:17:19 +0400 Subject: [PATCH 45/96] chore: Cleanup CODEOWNERS (#4477) Signed-off-by: tokoko --- CODEOWNERS | 36 ++++-------------------------------- 1 file changed, 4 insertions(+), 32 deletions(-) diff --git a/CODEOWNERS b/CODEOWNERS index 18914d9f5d..75ede8b6aa 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -2,13 +2,13 @@ # for more info about CODEOWNERS file # Core Interfaces -/sdk/python/feast/infra/offline_stores/offline_store.py @feast-dev/maintainers @sfc-gh-madkins -/sdk/python/feast/infra/online_stores/online_store.py @feast-dev/maintainers @DvirDukhan -/sdk/python/feast/infra/materialization_engine/batch_materialization_engine.py @feast-dev/maintainers @whoahbot @sfc-gh-madkins +/sdk/python/feast/infra/offline_stores/offline_store.py @feast-dev/maintainers +/sdk/python/feast/infra/online_stores/online_store.py @feast-dev/maintainers +/sdk/python/feast/infra/materialization_engine/batch_materialization_engine.py @feast-dev/maintainers # ==== Offline Stores ==== # Core utils -/sdk/python/feast/infra/offline_stores/offline_utils.py @feast-dev/maintainers @sfc-gh-madkins +/sdk/python/feast/infra/offline_stores/offline_utils.py @feast-dev/maintainers # Offline interfaces /sdk/python/feast/infra/offline_stores/offline_store.py @feast-dev/maintainers @@ -18,38 +18,10 @@ /sdk/python/feast/infra/offline_stores/bigquery_source.py @sudohainguyen /sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py @sudohainguyen -# Snowflake -/sdk/python/feast/infra/offline_stores/snowflake* @sfc-gh-madkins - -# Athena (contrib) -/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/ @toping4445 - -# Azure SQL (contrib) -/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/ @kevjumba - -# Spark (contrib) -/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/ @niklasvm @kevjumba - # ==== Online Stores ==== # HBase /sdk/python/feast/infra/online_stores/hbase.py @sudohainguyen /sdk/python/feast/infra/online_stores/contrib/hbase_online_store @sudohainguyen -# Redis -/sdk/python/feast/infra/online_stores/redis.py @DvirDukhan -/java/feast/serving/connectors/redis/ @DvirDukhan - -# Snowflake -/sdk/python/feast/infra/online_stores/snowflake.py @sfc-gh-madkins - -# Cassandra (contrib) -/sdk/python/feast/infra/online_stores/cassandra_online_store/ @hemidactylus - # ==== Batch Materialization Engines ==== - -# Snowflake -/sdk/python/feast/infra/materialization/snowflake* @sfc-gh-madkins - -# AWS Lambda -/sdk/python/feast/infra/materialization/contrib/aws_lambda/ @achals From 4a6b663f80bc91d6de35ed2ec428d34811d17a18 Mon Sep 17 00:00:00 2001 From: Bhargav Dodla <13788369+EXPEbdodla@users.noreply.github.com> Date: Thu, 5 Sep 2024 23:02:01 -0700 Subject: [PATCH 46/96] feat: Added Project object to Feast Objects (#4475) * feat: Added Project object to Feast Objects Signed-off-by: Bhargav Dodla Signed-off-by: Bhargav Dodla * fix: Extend FeastError and fixed integration tests Signed-off-by: Bhargav Dodla * fix: Small optimization to test_modify_feature_views_success test Signed-off-by: Bhargav Dodla * fix: Added Project object to template and quick start Signed-off-by: Bhargav Dodla --------- Signed-off-by: Bhargav Dodla Co-authored-by: Bhargav Dodla --- docs/getting-started/quickstart.md | 5 + protos/feast/core/Permission.proto | 1 + protos/feast/core/Project.proto | 52 ++++ protos/feast/core/Registry.proto | 6 +- protos/feast/registry/RegistryServer.proto | 33 ++ sdk/python/feast/__init__.py | 2 + sdk/python/feast/cli.py | 73 +++++ sdk/python/feast/diff/registry_diff.py | 6 + sdk/python/feast/errors.py | 10 + sdk/python/feast/feast_object.py | 5 + sdk/python/feast/feature_store.py | 58 ++-- sdk/python/feast/feature_view.py | 2 +- .../feast/infra/online_stores/remote.py | 2 +- .../feast/infra/registry/base_registry.py | 69 +++++ .../feast/infra/registry/caching_registry.py | 45 ++- .../infra/registry/proto_registry_utils.py | 28 +- sdk/python/feast/infra/registry/registry.py | 286 ++++++++++++------ sdk/python/feast/infra/registry/remote.py | 82 ++++- sdk/python/feast/infra/registry/snowflake.py | 244 ++++++++++++--- sdk/python/feast/infra/registry/sql.py | 279 +++++++++++++---- .../registry/snowflake_table_creation.sql | 8 + sdk/python/feast/permissions/permission.py | 1 + .../feast/permissions/security_manager.py | 21 +- sdk/python/feast/project.py | 175 +++++++++++ sdk/python/feast/registry_server.py | 53 ++++ sdk/python/feast/repo_config.py | 10 +- sdk/python/feast/repo_contents.py | 3 + sdk/python/feast/repo_operations.py | 97 ++++-- sdk/python/feast/templates/local/bootstrap.py | 2 + .../local/feature_repo/example_repo.py | 4 + sdk/python/tests/conftest.py | 26 +- .../example_feature_repo_with_project_1.py | 151 +++++++++ .../online_store/test_remote_online_store.py | 3 - .../registration/test_universal_cli.py | 145 ++++++++- .../registration/test_universal_registry.py | 257 +++++++++++++++- .../tests/unit/permissions/auth/conftest.py | 2 + .../auth/server/test_auth_registry_server.py | 13 +- .../permissions/auth/server/test_utils.py | 8 + .../tests/unit/test_on_demand_feature_view.py | 8 +- sdk/python/tests/unit/test_project.py | 122 ++++++++ 40 files changed, 2064 insertions(+), 333 deletions(-) create mode 100644 protos/feast/core/Project.proto create mode 100644 sdk/python/feast/project.py create mode 100644 sdk/python/tests/example_repos/example_feature_repo_with_project_1.py create mode 100644 sdk/python/tests/unit/test_project.py diff --git a/docs/getting-started/quickstart.md b/docs/getting-started/quickstart.md index ffc01c9d6e..7169989e7e 100644 --- a/docs/getting-started/quickstart.md +++ b/docs/getting-started/quickstart.md @@ -103,12 +103,17 @@ from feast import ( FeatureView, Field, FileSource, + Project, PushSource, RequestSource, ) from feast.on_demand_feature_view import on_demand_feature_view from feast.types import Float32, Float64, Int64 +# Define a project for the feature repo +project = Project(name="my_project", description="A project for driver statistics") + + # Define an entity for the driver. You can think of an entity as a primary key used to # fetch features. driver = Entity(name="driver", join_keys=["driver_id"]) diff --git a/protos/feast/core/Permission.proto b/protos/feast/core/Permission.proto index 57958d3d81..400f70a11b 100644 --- a/protos/feast/core/Permission.proto +++ b/protos/feast/core/Permission.proto @@ -45,6 +45,7 @@ message PermissionSpec { VALIDATION_REFERENCE = 7; SAVED_DATASET = 8; PERMISSION = 9; + PROJECT = 10; } repeated Type types = 3; diff --git a/protos/feast/core/Project.proto b/protos/feast/core/Project.proto new file mode 100644 index 0000000000..08e8b38f23 --- /dev/null +++ b/protos/feast/core/Project.proto @@ -0,0 +1,52 @@ +// +// * Copyright 2020 The Feast Authors +// * +// * Licensed under the Apache License, Version 2.0 (the "License"); +// * you may not use this file except in compliance with the License. +// * You may obtain a copy of the License at +// * +// * https://www.apache.org/licenses/LICENSE-2.0 +// * +// * Unless required by applicable law or agreed to in writing, software +// * distributed under the License is distributed on an "AS IS" BASIS, +// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// * See the License for the specific language governing permissions and +// * limitations under the License. +// + +syntax = "proto3"; + +package feast.core; +option java_package = "feast.proto.core"; +option java_outer_classname = "ProjectProto"; +option go_package = "github.com/feast-dev/feast/go/protos/feast/core"; + +import "google/protobuf/timestamp.proto"; + +message Project { + // User-specified specifications of this entity. + ProjectSpec spec = 1; + // System-populated metadata for this entity. + ProjectMeta meta = 2; +} + +message ProjectSpec { + // Name of the Project + string name = 1; + + // Description of the Project + string description = 2; + + // User defined metadata + map tags = 3; + + // Owner of the Project + string owner = 4; +} + +message ProjectMeta { + // Time when the Project is created + google.protobuf.Timestamp created_timestamp = 1; + // Time when the Project is last updated with registry changes (Apply stage) + google.protobuf.Timestamp last_updated_timestamp = 2; +} diff --git a/protos/feast/core/Registry.proto b/protos/feast/core/Registry.proto index b4f1ffb0a3..45ecd2c173 100644 --- a/protos/feast/core/Registry.proto +++ b/protos/feast/core/Registry.proto @@ -33,8 +33,9 @@ import "feast/core/SavedDataset.proto"; import "feast/core/ValidationProfile.proto"; import "google/protobuf/timestamp.proto"; import "feast/core/Permission.proto"; +import "feast/core/Project.proto"; -// Next id: 17 +// Next id: 18 message Registry { repeated Entity entities = 1; repeated FeatureTable feature_tables = 2; @@ -47,12 +48,13 @@ message Registry { repeated ValidationReference validation_references = 13; Infra infra = 10; // Tracking metadata of Feast by project - repeated ProjectMetadata project_metadata = 15; + repeated ProjectMetadata project_metadata = 15 [deprecated = true]; string registry_schema_version = 3; // to support migrations; incremented when schema is changed string version_id = 4; // version id, random string generated on each update of the data; now used only for debugging purposes google.protobuf.Timestamp last_updated = 5; repeated Permission permissions = 16; + repeated Project projects = 17; } message ProjectMetadata { diff --git a/protos/feast/registry/RegistryServer.proto b/protos/feast/registry/RegistryServer.proto index 928354077b..3ad64b5b34 100644 --- a/protos/feast/registry/RegistryServer.proto +++ b/protos/feast/registry/RegistryServer.proto @@ -15,6 +15,7 @@ import "feast/core/SavedDataset.proto"; import "feast/core/ValidationProfile.proto"; import "feast/core/InfraObject.proto"; import "feast/core/Permission.proto"; +import "feast/core/Project.proto"; service RegistryServer{ // Entity RPCs @@ -67,6 +68,12 @@ service RegistryServer{ rpc ListPermissions (ListPermissionsRequest) returns (ListPermissionsResponse) {} rpc DeletePermission (DeletePermissionRequest) returns (google.protobuf.Empty) {} + // Project RPCs + rpc ApplyProject (ApplyProjectRequest) returns (google.protobuf.Empty) {} + rpc GetProject (GetProjectRequest) returns (feast.core.Project) {} + rpc ListProjects (ListProjectsRequest) returns (ListProjectsResponse) {} + rpc DeleteProject (DeleteProjectRequest) returns (google.protobuf.Empty) {} + rpc ApplyMaterialization (ApplyMaterializationRequest) returns (google.protobuf.Empty) {} rpc ListProjectMetadata (ListProjectMetadataRequest) returns (ListProjectMetadataResponse) {} rpc UpdateInfra (UpdateInfraRequest) returns (google.protobuf.Empty) {} @@ -356,3 +363,29 @@ message DeletePermissionRequest { string project = 2; bool commit = 3; } + +// Projects + +message ApplyProjectRequest { + feast.core.Project project = 1; + bool commit = 2; +} + +message GetProjectRequest { + string name = 1; + bool allow_cache = 2; +} + +message ListProjectsRequest { + bool allow_cache = 1; + map tags = 2; +} + +message ListProjectsResponse { + repeated feast.core.Project projects = 1; +} + +message DeleteProjectRequest { + string name = 1; + bool commit = 2; +} diff --git a/sdk/python/feast/__init__.py b/sdk/python/feast/__init__.py index 52734bc71e..71122b7047 100644 --- a/sdk/python/feast/__init__.py +++ b/sdk/python/feast/__init__.py @@ -18,6 +18,7 @@ from .feature_view import FeatureView from .field import Field from .on_demand_feature_view import OnDemandFeatureView +from .project import Project from .repo_config import RepoConfig from .stream_feature_view import StreamFeatureView from .value_type import ValueType @@ -49,4 +50,5 @@ "PushSource", "RequestSource", "AthenaSource", + "Project", ] diff --git a/sdk/python/feast/cli.py b/sdk/python/feast/cli.py index ec90b31151..499788101e 100644 --- a/sdk/python/feast/cli.py +++ b/sdk/python/feast/cli.py @@ -254,6 +254,79 @@ def data_source_list(ctx: click.Context, tags: list[str]): print(tabulate(table, headers=["NAME", "CLASS"], tablefmt="plain")) +@cli.group(name="projects") +def projects_cmd(): + """ + Access projects + """ + pass + + +@projects_cmd.command("describe") +@click.argument("name", type=click.STRING) +@click.pass_context +def project_describe(ctx: click.Context, name: str): + """ + Describe a project + """ + store = create_feature_store(ctx) + + try: + project = store.get_project(name) + except FeastObjectNotFoundException as e: + print(e) + exit(1) + + print( + yaml.dump( + yaml.safe_load(str(project)), default_flow_style=False, sort_keys=False + ) + ) + + +@projects_cmd.command("current_project") +@click.pass_context +def project_current(ctx: click.Context): + """ + Returns the current project configured with FeatureStore object + """ + store = create_feature_store(ctx) + + try: + project = store.get_project(name=None) + except FeastObjectNotFoundException as e: + print(e) + exit(1) + + print( + yaml.dump( + yaml.safe_load(str(project)), default_flow_style=False, sort_keys=False + ) + ) + + +@projects_cmd.command(name="list") +@tagsOption +@click.pass_context +def project_list(ctx: click.Context, tags: list[str]): + """ + List all projects + """ + store = create_feature_store(ctx) + table = [] + tags_filter = utils.tags_list_to_dict(tags) + for project in store.list_projects(tags=tags_filter): + table.append([project.name, project.description, project.tags, project.owner]) + + from tabulate import tabulate + + print( + tabulate( + table, headers=["NAME", "DESCRIPTION", "TAGS", "OWNER"], tablefmt="plain" + ) + ) + + @cli.group(name="entities") def entities_cmd(): """ diff --git a/sdk/python/feast/diff/registry_diff.py b/sdk/python/feast/diff/registry_diff.py index 6235025adc..272c4590d8 100644 --- a/sdk/python/feast/diff/registry_diff.py +++ b/sdk/python/feast/diff/registry_diff.py @@ -11,6 +11,7 @@ from feast.infra.registry.base_registry import BaseRegistry from feast.infra.registry.registry import FEAST_OBJECT_TYPES, FeastObjectType from feast.permissions.permission import Permission +from feast.project import Project from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto from feast.protos.feast.core.Entity_pb2 import Entity as EntityProto from feast.protos.feast.core.FeatureService_pb2 import ( @@ -371,6 +372,11 @@ def apply_diff_to_registry( TransitionType.CREATE, TransitionType.UPDATE, ]: + if feast_object_diff.feast_object_type == FeastObjectType.PROJECT: + registry.apply_project( + cast(Project, feast_object_diff.new_feast_object), + commit=False, + ) if feast_object_diff.feast_object_type == FeastObjectType.DATA_SOURCE: registry.apply_data_source( cast(DataSource, feast_object_diff.new_feast_object), diff --git a/sdk/python/feast/errors.py b/sdk/python/feast/errors.py index fd5955fd98..4dbb220c1e 100644 --- a/sdk/python/feast/errors.py +++ b/sdk/python/feast/errors.py @@ -480,6 +480,16 @@ def __init__(self, name, project=None): super().__init__(f"Permission {name} does not exist") +class ProjectNotFoundException(FeastError): + def __init__(self, project): + super().__init__(f"Project {project} does not exist in registry") + + +class ProjectObjectNotFoundException(FeastObjectNotFoundException): + def __init__(self, name, project=None): + super().__init__(f"Project {name} does not exist") + + class ZeroRowsQueryResult(FeastError): def __init__(self, query: str): super().__init__(f"This query returned zero rows:\n{query}") diff --git a/sdk/python/feast/feast_object.py b/sdk/python/feast/feast_object.py index dfe29b7128..63fa1e913b 100644 --- a/sdk/python/feast/feast_object.py +++ b/sdk/python/feast/feast_object.py @@ -1,5 +1,8 @@ from typing import Union, get_args +from feast.project import Project +from feast.protos.feast.core.Project_pb2 import ProjectSpec + from .batch_feature_view import BatchFeatureView from .data_source import DataSource from .entity import Entity @@ -23,6 +26,7 @@ # Convenience type representing all Feast objects FeastObject = Union[ + Project, FeatureView, OnDemandFeatureView, BatchFeatureView, @@ -36,6 +40,7 @@ ] FeastObjectSpecProto = Union[ + ProjectSpec, FeatureViewSpec, OnDemandFeatureViewSpec, StreamFeatureViewSpec, diff --git a/sdk/python/feast/feature_store.py b/sdk/python/feast/feature_store.py index a03706e56f..27b6eade5b 100644 --- a/sdk/python/feast/feature_store.py +++ b/sdk/python/feast/feature_store.py @@ -60,11 +60,7 @@ ) from feast.feast_object import FeastObject from feast.feature_service import FeatureService -from feast.feature_view import ( - DUMMY_ENTITY, - DUMMY_ENTITY_NAME, - FeatureView, -) +from feast.feature_view import DUMMY_ENTITY, DUMMY_ENTITY_NAME, FeatureView from feast.inference import ( update_data_sources_with_inferred_event_timestamp_col, update_feature_views_with_inferred_features_and_entities, @@ -77,6 +73,7 @@ from feast.on_demand_feature_view import OnDemandFeatureView from feast.online_response import OnlineResponse from feast.permissions.permission import Permission +from feast.project import Project from feast.protos.feast.core.InfraObject_pb2 import Infra as InfraProto from feast.protos.feast.serving.ServingService_pb2 import ( FieldStatus, @@ -162,14 +159,12 @@ def __init__( registry_config, self.config.project, None, self.config.auth_config ) else: - r = Registry( + self._registry = Registry( self.config.project, registry_config, repo_path=self.repo_path, auth_config=self.config.auth_config, ) - r._initialize_registry(self.config.project) - self._registry = r self._provider = get_provider(self.config) @@ -205,16 +200,8 @@ def refresh_registry(self): greater than 0, then once the cache becomes stale (more time than the TTL has passed), a new cache will be downloaded synchronously, which may increase latencies if the triggering method is get_online_features(). """ - registry_config = self.config.registry - registry = Registry( - self.config.project, - registry_config, - repo_path=self.repo_path, - auth_config=self.config.auth_config, - ) - registry.refresh(self.config.project) - self._registry = registry + self._registry.refresh(self.project) def list_entities( self, allow_cache: bool = False, tags: Optional[dict[str, str]] = None @@ -740,6 +727,7 @@ def plan( ... source=driver_hourly_stats, ... ) >>> registry_diff, infra_diff, new_infra = fs.plan(RepoContents( + ... projects=[Project(name="project")], ... data_sources=[driver_hourly_stats], ... feature_views=[driver_hourly_stats_view], ... on_demand_feature_views=list(), @@ -802,6 +790,7 @@ def _apply_diffs( def apply( self, objects: Union[ + Project, DataSource, Entity, FeatureView, @@ -862,6 +851,9 @@ def apply( objects_to_delete = [] # Separate all objects into entities, feature services, and different feature view types. + projects_to_update = [ob for ob in objects if isinstance(ob, Project)] + if len(projects_to_update) > 1: + raise ValueError("Only one project can be applied at a time.") entities_to_update = [ob for ob in objects if isinstance(ob, Entity)] views_to_update = [ ob @@ -924,6 +916,8 @@ def apply( ) # Add all objects to the registry and update the provider's infrastructure. + for project in projects_to_update: + self._registry.apply_project(project, commit=False) for ds in data_sources_to_update: self._registry.apply_data_source(ds, project=self.project, commit=False) for view in itertools.chain(views_to_update, odfvs_to_update, sfvs_to_update): @@ -1990,6 +1984,36 @@ def get_permission(self, name: str) -> Permission: """ return self._registry.get_permission(name, self.project) + def list_projects( + self, allow_cache: bool = False, tags: Optional[dict[str, str]] = None + ) -> List[Project]: + """ + Retrieves the list of projects from the registry. + + Args: + allow_cache: Whether to allow returning projects from a cached registry. + tags: Filter by tags. + + Returns: + A list of projects. + """ + return self._registry.list_projects(allow_cache=allow_cache, tags=tags) + + def get_project(self, name: Optional[str]) -> Project: + """ + Retrieves a project from the registry. + + Args: + name: Name of the project. + + Returns: + The specified project. + + Raises: + ProjectObjectNotFoundException: The project could not be found. + """ + return self._registry.get_project(name or self.project) + def list_saved_datasets( self, allow_cache: bool = False, tags: Optional[dict[str, str]] = None ) -> List[SavedDataset]: diff --git a/sdk/python/feast/feature_view.py b/sdk/python/feast/feature_view.py index 1a85a4b90c..dd01078e20 100644 --- a/sdk/python/feast/feature_view.py +++ b/sdk/python/feast/feature_view.py @@ -423,7 +423,7 @@ def from_proto(cls, feature_view_proto: FeatureViewProto): if len(feature_view.entities) != len(feature_view.entity_columns): warnings.warn( - f"There are some mismatches in your feature view's registered entities. Please check if you have applied your entities correctly." + f"There are some mismatches in your feature view: {feature_view.name} registered entities. Please check if you have applied your entities correctly." f"Entities: {feature_view.entities} vs Entity Columns: {feature_view.entity_columns}" ) diff --git a/sdk/python/feast/infra/online_stores/remote.py b/sdk/python/feast/infra/online_stores/remote.py index 5f65d8da8b..8a7e299516 100644 --- a/sdk/python/feast/infra/online_stores/remote.py +++ b/sdk/python/feast/infra/online_stores/remote.py @@ -109,7 +109,7 @@ def online_read( result_tuples.append((event_ts, feature_values_dict)) return result_tuples else: - error_msg = f"Unable to retrieve the online store data using feature server API. Error_code={response.status_code}, error_message={response.reason}" + error_msg = f"Unable to retrieve the online store data using feature server API. Error_code={response.status_code}, error_message={response.text}" logger.error(error_msg) raise RuntimeError(error_msg) diff --git a/sdk/python/feast/infra/registry/base_registry.py b/sdk/python/feast/infra/registry/base_registry.py index 33adb6b7c9..f5040d9752 100644 --- a/sdk/python/feast/infra/registry/base_registry.py +++ b/sdk/python/feast/infra/registry/base_registry.py @@ -29,6 +29,7 @@ from feast.infra.infra_object import Infra from feast.on_demand_feature_view import OnDemandFeatureView from feast.permissions.permission import Permission +from feast.project import Project from feast.project_metadata import ProjectMetadata from feast.protos.feast.core.Entity_pb2 import Entity as EntityProto from feast.protos.feast.core.FeatureService_pb2 import ( @@ -39,6 +40,7 @@ OnDemandFeatureView as OnDemandFeatureViewProto, ) from feast.protos.feast.core.Permission_pb2 import Permission as PermissionProto +from feast.protos.feast.core.Project_pb2 import Project as ProjectProto from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto from feast.protos.feast.core.SavedDataset_pb2 import SavedDataset as SavedDatasetProto from feast.protos.feast.core.StreamFeatureView_pb2 import ( @@ -663,6 +665,71 @@ def list_permissions( """ raise NotImplementedError + @abstractmethod + def apply_project( + self, + project: Project, + commit: bool = True, + ): + """ + Registers a project with Feast + + Args: + project: A project that will be registered + commit: Whether to immediately commit to the registry + """ + raise NotImplementedError + + @abstractmethod + def delete_project( + self, + name: str, + commit: bool = True, + ): + """ + Deletes a project or raises an ProjectNotFoundException exception if not found. + + Args: + project: Feast project name that needs to be deleted + commit: Whether the change should be persisted immediately + """ + raise NotImplementedError + + @abstractmethod + def get_project( + self, + name: str, + allow_cache: bool = False, + ) -> Project: + """ + Retrieves a project. + + Args: + name: Feast project name + allow_cache: Whether to allow returning this permission from a cached registry + + Returns: + Returns either the specified project, or raises ProjectObjectNotFoundException exception if none is found + """ + raise NotImplementedError + + @abstractmethod + def list_projects( + self, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[Project]: + """ + Retrieve a list of projects from the registry + + Args: + allow_cache: Whether to allow returning permissions from a cached registry + + Returns: + List of project + """ + raise NotImplementedError + @abstractmethod def proto(self) -> RegistryProto: """ @@ -814,4 +881,6 @@ def deserialize_registry_values(serialized_proto, feast_obj_type) -> Any: return FeatureServiceProto.FromString(serialized_proto) if feast_obj_type == Permission: return PermissionProto.FromString(serialized_proto) + if feast_obj_type == Project: + return ProjectProto.FromString(serialized_proto) return None diff --git a/sdk/python/feast/infra/registry/caching_registry.py b/sdk/python/feast/infra/registry/caching_registry.py index 611d67de96..c04a62552b 100644 --- a/sdk/python/feast/infra/registry/caching_registry.py +++ b/sdk/python/feast/infra/registry/caching_registry.py @@ -1,6 +1,7 @@ import atexit import logging import threading +import warnings from abc import abstractmethod from datetime import timedelta from threading import Lock @@ -15,6 +16,7 @@ from feast.infra.registry.base_registry import BaseRegistry from feast.on_demand_feature_view import OnDemandFeatureView from feast.permissions.permission import Permission +from feast.project import Project from feast.project_metadata import ProjectMetadata from feast.saved_dataset import SavedDataset, ValidationReference from feast.stream_feature_view import StreamFeatureView @@ -26,7 +28,6 @@ class CachingRegistry(BaseRegistry): def __init__(self, project: str, cache_ttl_seconds: int, cache_mode: str): self.cached_registry_proto = self.proto() - proto_registry_utils.init_project_metadata(self.cached_registry_proto, project) self.cached_registry_proto_created = _utc_now() self._refresh_lock = Lock() self.cached_registry_proto_ttl = timedelta( @@ -308,6 +309,10 @@ def _list_project_metadata(self, project: str) -> List[ProjectMetadata]: def list_project_metadata( self, project: str, allow_cache: bool = False ) -> List[ProjectMetadata]: + warnings.warn( + "list_project_metadata is deprecated and will be removed in a future version. Use list_projects() and get_project() methods instead.", + DeprecationWarning, + ) if allow_cache: self._refresh_cached_registry_if_necessary() return proto_registry_utils.list_project_metadata( @@ -355,15 +360,35 @@ def list_permissions( ) return self._list_permissions(project, tags) + @abstractmethod + def _get_project(self, name: str) -> Project: + pass + + def get_project( + self, + name: str, + allow_cache: bool = False, + ) -> Project: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.get_project(self.cached_registry_proto, name) + return self._get_project(name) + + @abstractmethod + def _list_projects(self, tags: Optional[dict[str, str]]) -> List[Project]: + pass + + def list_projects( + self, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[Project]: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.list_projects(self.cached_registry_proto, tags) + return self._list_projects(tags) + def refresh(self, project: Optional[str] = None): - if project: - project_metadata = proto_registry_utils.get_project_metadata( - registry_proto=self.cached_registry_proto, project=project - ) - if not project_metadata: - proto_registry_utils.init_project_metadata( - self.cached_registry_proto, project - ) self.cached_registry_proto = self.proto() self.cached_registry_proto_created = _utc_now() @@ -395,7 +420,7 @@ def _start_thread_async_refresh(self, cache_ttl_seconds): self.registry_refresh_thread = threading.Timer( cache_ttl_seconds, self._start_thread_async_refresh, [cache_ttl_seconds] ) - self.registry_refresh_thread.setDaemon(True) + self.registry_refresh_thread.daemon = True self.registry_refresh_thread.start() def _exit_handler(self): diff --git a/sdk/python/feast/infra/registry/proto_registry_utils.py b/sdk/python/feast/infra/registry/proto_registry_utils.py index f67808aab5..b0413fd77e 100644 --- a/sdk/python/feast/infra/registry/proto_registry_utils.py +++ b/sdk/python/feast/infra/registry/proto_registry_utils.py @@ -1,4 +1,3 @@ -import uuid from functools import wraps from typing import List, Optional @@ -11,6 +10,7 @@ FeatureServiceNotFoundException, FeatureViewNotFoundException, PermissionObjectNotFoundException, + ProjectObjectNotFoundException, SavedDatasetNotFound, ValidationReferenceNotFound, ) @@ -18,6 +18,7 @@ from feast.feature_view import FeatureView from feast.on_demand_feature_view import OnDemandFeatureView from feast.permissions.permission import Permission +from feast.project import Project from feast.project_metadata import ProjectMetadata from feast.protos.feast.core.Registry_pb2 import ProjectMetadata as ProjectMetadataProto from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto @@ -69,13 +70,6 @@ def wrapper( return wrapper -def init_project_metadata(cached_registry_proto: RegistryProto, project: str): - new_project_uuid = f"{uuid.uuid4()}" - cached_registry_proto.project_metadata.append( - ProjectMetadata(project_name=project, project_uuid=new_project_uuid).to_proto() - ) - - def get_project_metadata( registry_proto: Optional[RegistryProto], project: str ) -> Optional[ProjectMetadataProto]: @@ -316,3 +310,21 @@ def get_permission( ): return Permission.from_proto(permission_proto) raise PermissionObjectNotFoundException(name=name, project=project) + + +def list_projects( + registry_proto: RegistryProto, + tags: Optional[dict[str, str]], +) -> List[Project]: + projects = [] + for project_proto in registry_proto.projects: + if utils.has_all_tags(project_proto.spec.tags, tags): + projects.append(Project.from_proto(project_proto)) + return projects + + +def get_project(registry_proto: RegistryProto, name: str) -> Project: + for projects_proto in registry_proto.projects: + if projects_proto.spec.name == name: + return Project.from_proto(projects_proto) + raise ProjectObjectNotFoundException(name=name) diff --git a/sdk/python/feast/infra/registry/registry.py b/sdk/python/feast/infra/registry/registry.py index 366f3aacaa..634d6fa7ac 100644 --- a/sdk/python/feast/infra/registry/registry.py +++ b/sdk/python/feast/infra/registry/registry.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from enum import Enum from pathlib import Path from threading import Lock @@ -32,6 +32,8 @@ FeatureServiceNotFoundException, FeatureViewNotFoundException, PermissionNotFoundException, + ProjectNotFoundException, + ProjectObjectNotFoundException, ValidationReferenceNotFound, ) from feast.feature_service import FeatureService @@ -44,6 +46,7 @@ from feast.on_demand_feature_view import OnDemandFeatureView from feast.permissions.auth_model import AuthConfig, NoAuthConfig from feast.permissions.permission import Permission +from feast.project import Project from feast.project_metadata import ProjectMetadata from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto from feast.repo_config import RegistryConfig @@ -70,6 +73,7 @@ class FeastObjectType(Enum): + PROJECT = "project" DATA_SOURCE = "data source" ENTITY = "entity" FEATURE_VIEW = "feature view" @@ -83,6 +87,11 @@ def get_objects_from_registry( registry: "BaseRegistry", project: str ) -> Dict["FeastObjectType", List[Any]]: return { + FeastObjectType.PROJECT: [ + project_obj + for project_obj in registry.list_projects() + if project_obj.name == project + ], FeastObjectType.DATA_SOURCE: registry.list_data_sources(project=project), FeastObjectType.ENTITY: registry.list_entities(project=project), FeastObjectType.FEATURE_VIEW: registry.list_feature_views(project=project), @@ -103,6 +112,7 @@ def get_objects_from_repo_contents( repo_contents: RepoContents, ) -> Dict["FeastObjectType", List[Any]]: return { + FeastObjectType.PROJECT: repo_contents.projects, FeastObjectType.DATA_SOURCE: repo_contents.data_sources, FeastObjectType.ENTITY: repo_contents.entities, FeastObjectType.FEATURE_VIEW: repo_contents.feature_views, @@ -157,34 +167,10 @@ def get_user_metadata( # The cached_registry_proto object is used for both reads and writes. In particular, # all write operations refresh the cache and modify it in memory; the write must # then be persisted to the underlying RegistryStore with a call to commit(). - cached_registry_proto: Optional[RegistryProto] = None - cached_registry_proto_created: Optional[datetime] = None + cached_registry_proto: RegistryProto + cached_registry_proto_created: datetime cached_registry_proto_ttl: timedelta - def __new__( - cls, - project: str, - registry_config: Optional[RegistryConfig], - repo_path: Optional[Path], - auth_config: AuthConfig = NoAuthConfig(), - ): - # We override __new__ so that we can inspect registry_config and create a SqlRegistry without callers - # needing to make any changes. - if registry_config and registry_config.registry_type == "sql": - from feast.infra.registry.sql import SqlRegistry - - return SqlRegistry(registry_config, project, repo_path) - elif registry_config and registry_config.registry_type == "snowflake.registry": - from feast.infra.registry.snowflake import SnowflakeRegistry - - return SnowflakeRegistry(registry_config, project, repo_path) - elif registry_config and registry_config.registry_type == "remote": - from feast.infra.registry.remote import RemoteRegistry - - return RemoteRegistry(registry_config, project, repo_path, auth_config) - else: - return super(Registry, cls).__new__(cls) - def __init__( self, project: str, @@ -204,6 +190,17 @@ def __init__( self._refresh_lock = Lock() self._auth_config = auth_config + registry_proto = RegistryProto() + registry_proto.registry_schema_version = REGISTRY_SCHEMA_VERSION + self.cached_registry_proto = registry_proto + self.cached_registry_proto_created = _utc_now() + + self.purge_feast_metadata = ( + registry_config.purge_feast_metadata + if registry_config is not None + else False + ) + if registry_config: registry_store_type = registry_config.registry_store_type registry_path = registry_config.path @@ -214,11 +211,52 @@ def __init__( self._registry_store = cls(registry_config, repo_path) self.cached_registry_proto_ttl = timedelta( - seconds=registry_config.cache_ttl_seconds - if registry_config.cache_ttl_seconds is not None - else 0 + seconds=( + registry_config.cache_ttl_seconds + if registry_config.cache_ttl_seconds is not None + else 0 + ) ) + try: + registry_proto = self._registry_store.get_registry_proto() + self.cached_registry_proto = registry_proto + self.cached_registry_proto_created = _utc_now() + # Sync feast_metadata to projects table + # when purge_feast_metadata is set to True, Delete data from + # feast_metadata table and list_project_metadata will not return any data + self._sync_feast_metadata_to_projects_table() + except FileNotFoundError: + logger.info("Registry file not found. Creating new registry.") + finally: + self.commit() + + def _sync_feast_metadata_to_projects_table(self): + """ + Sync feast_metadata to projects table + """ + feast_metadata_projects = [] + projects_set = [] + # List of project in project_metadata + for project_metadata in self.cached_registry_proto.project_metadata: + project = ProjectMetadata.from_proto(project_metadata) + feast_metadata_projects.append(project.project_name) + if len(feast_metadata_projects) > 0: + # List of project in projects + for project_metadata in self.cached_registry_proto.projects: + project = Project.from_proto(project_metadata) + projects_set.append(project.name) + + # Find object in feast_metadata_projects but not in projects + projects_to_sync = set(feast_metadata_projects) - set(projects_set) + # Sync feast_metadata to projects table + for project_name in projects_to_sync: + project = Project(name=project_name) + self.cached_registry_proto.projects.append(project.to_proto()) + + if self.purge_feast_metadata: + self.cached_registry_proto.project_metadata = [] + def clone(self) -> "Registry": new_registry = Registry("project", None, None, self._auth_config) new_registry.cached_registry_proto_ttl = timedelta(seconds=0) @@ -231,16 +269,6 @@ def clone(self) -> "Registry": new_registry._registry_store = NoopRegistryStore() return new_registry - def _initialize_registry(self, project: str): - """Explicitly initializes the registry with an empty proto if it doesn't exist.""" - try: - self._get_registry_proto(project=project) - except FileNotFoundError: - registry_proto = RegistryProto() - registry_proto.registry_schema_version = REGISTRY_SCHEMA_VERSION - proto_registry_utils.init_project_metadata(registry_proto, project) - self._registry_store.update_registry_proto(registry_proto) - def update_infra(self, infra: Infra, project: str, commit: bool = True): self._prepare_registry_for_changes(project) assert self.cached_registry_proto @@ -320,7 +348,7 @@ def apply_data_source( data_source_proto.data_source_class_type = ( f"{data_source.__class__.__module__}.{data_source.__class__.__name__}" ) - registry.data_sources.append(data_source_proto) + self.cached_registry_proto.data_sources.append(data_source_proto) if commit: self.commit() @@ -363,7 +391,7 @@ def apply_feature_service( feature_service_proto = feature_service.to_proto() feature_service_proto.spec.project = project del registry.feature_services[idx] - registry.feature_services.append(feature_service_proto) + self.cached_registry_proto.feature_services.append(feature_service_proto) if commit: self.commit() @@ -773,15 +801,16 @@ def list_validation_references( ) def delete_validation_reference(self, name: str, project: str, commit: bool = True): - registry_proto = self._prepare_registry_for_changes(project) + self._prepare_registry_for_changes(project) + assert self.cached_registry_proto for idx, existing_validation_reference in enumerate( - registry_proto.validation_references + self.cached_registry_proto.validation_references ): if ( existing_validation_reference.name == name and existing_validation_reference.project == project ): - del registry_proto.validation_references[idx] + del self.cached_registry_proto.validation_references[idx] if commit: self.commit() return @@ -811,37 +840,36 @@ def teardown(self): def proto(self) -> RegistryProto: return self.cached_registry_proto or RegistryProto() - def _prepare_registry_for_changes(self, project: str): + def _prepare_registry_for_changes(self, project_name: str): """Prepares the Registry for changes by refreshing the cache if necessary.""" + + assert self.cached_registry_proto is not None + try: - self._get_registry_proto(project=project, allow_cache=True) - if ( - proto_registry_utils.get_project_metadata( - self.cached_registry_proto, project - ) - is None - ): - # Project metadata not initialized yet. Try pulling without cache - self._get_registry_proto(project=project, allow_cache=False) - except FileNotFoundError: - registry_proto = RegistryProto() - registry_proto.registry_schema_version = REGISTRY_SCHEMA_VERSION + # Check if the project exists in the registry cache + self.get_project(name=project_name, allow_cache=True) + return self.cached_registry_proto + except ProjectObjectNotFoundException: + # If the project does not exist in cache, refresh cache from store + registry_proto = self._registry_store.get_registry_proto() self.cached_registry_proto = registry_proto self.cached_registry_proto_created = _utc_now() - # Initialize project metadata if needed - assert self.cached_registry_proto - if ( - proto_registry_utils.get_project_metadata( - self.cached_registry_proto, project - ) - is None - ): - proto_registry_utils.init_project_metadata( - self.cached_registry_proto, project - ) + try: + # Check if the project exists in the registry cache after refresh from store + self.get_project(name=project_name) + except ProjectObjectNotFoundException: + # If the project still does not exist, create it + project_proto = Project(name=project_name).to_proto() + self.cached_registry_proto.projects.append(project_proto) + if not self.purge_feast_metadata: + project_metadata_proto = ProjectMetadata( + project_name=project_name + ).to_proto() + self.cached_registry_proto.project_metadata.append( + project_metadata_proto + ) self.commit() - return self.cached_registry_proto def _get_registry_proto( @@ -856,10 +884,7 @@ def _get_registry_proto( Returns: Returns a RegistryProto object which represents the state of the registry """ with self._refresh_lock: - expired = ( - self.cached_registry_proto is None - or self.cached_registry_proto_created is None - ) or ( + expired = (self.cached_registry_proto_created is None) or ( self.cached_registry_proto_ttl.total_seconds() > 0 # 0 ttl means infinity and ( @@ -871,33 +896,12 @@ def _get_registry_proto( ) ) - if project: - old_project_metadata = proto_registry_utils.get_project_metadata( - registry_proto=self.cached_registry_proto, project=project - ) - - if allow_cache and not expired and old_project_metadata is not None: - assert isinstance(self.cached_registry_proto, RegistryProto) - return self.cached_registry_proto - elif allow_cache and not expired: - assert isinstance(self.cached_registry_proto, RegistryProto) + if allow_cache and not expired: return self.cached_registry_proto - logger.info("Registry cache expired, so refreshing") registry_proto = self._registry_store.get_registry_proto() self.cached_registry_proto = registry_proto self.cached_registry_proto_created = _utc_now() - - if not project: - return registry_proto - - project_metadata = proto_registry_utils.get_project_metadata( - registry_proto=registry_proto, project=project - ) - if not project_metadata: - proto_registry_utils.init_project_metadata(registry_proto, project) - self.commit() - return registry_proto def _check_conflicting_feature_view_names(self, feature_view: BaseFeatureView): @@ -960,7 +964,7 @@ def apply_permission( permission_proto = permission.to_proto() permission_proto.spec.project = project - registry.permissions.append(permission_proto) + self.cached_registry_proto.permissions.append(permission_proto) if commit: self.commit() @@ -978,3 +982,91 @@ def delete_permission(self, name: str, project: str, commit: bool = True): self.commit() return raise PermissionNotFoundException(name, project) + + def apply_project( + self, + project: Project, + commit: bool = True, + ): + registry = self.cached_registry_proto + + for idx, existing_project_proto in enumerate(registry.projects): + if existing_project_proto.spec.name == project.name: + project.created_timestamp = ( + existing_project_proto.meta.created_timestamp.ToDatetime().replace( + tzinfo=timezone.utc + ) + ) + del registry.projects[idx] + + project_proto = project.to_proto() + self.cached_registry_proto.projects.append(project_proto) + if commit: + self.commit() + + def get_project( + self, + name: str, + allow_cache: bool = False, + ) -> Project: + registry_proto = self._get_registry_proto(project=name, allow_cache=allow_cache) + return proto_registry_utils.get_project(registry_proto, name) + + def list_projects( + self, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[Project]: + registry_proto = self._get_registry_proto(project=None, allow_cache=allow_cache) + return proto_registry_utils.list_projects( + registry_proto=registry_proto, tags=tags + ) + + def delete_project( + self, + name: str, + commit: bool = True, + ): + assert self.cached_registry_proto + + for idx, project_proto in enumerate(self.cached_registry_proto.projects): + if project_proto.spec.name == name: + list_validation_references = self.list_validation_references(name) + for validation_reference in list_validation_references: + self.delete_validation_reference(validation_reference.name, name) + + list_saved_datasets = self.list_saved_datasets(name) + for saved_dataset in list_saved_datasets: + self.delete_saved_dataset(saved_dataset.name, name) + + list_feature_services = self.list_feature_services(name) + for feature_service in list_feature_services: + self.delete_feature_service(feature_service.name, name) + + list_on_demand_feature_views = self.list_on_demand_feature_views(name) + for on_demand_feature_view in list_on_demand_feature_views: + self.delete_feature_view(on_demand_feature_view.name, name) + + list_stream_feature_views = self.list_stream_feature_views(name) + for stream_feature_view in list_stream_feature_views: + self.delete_feature_view(stream_feature_view.name, name) + + list_feature_views = self.list_feature_views(name) + for feature_view in list_feature_views: + self.delete_feature_view(feature_view.name, name) + + list_data_sources = self.list_data_sources(name) + for data_source in list_data_sources: + self.delete_data_source(data_source.name, name) + + list_entities = self.list_entities(name) + for entity in list_entities: + self.delete_entity(entity.name, name) + list_permissions = self.list_permissions(name) + for permission in list_permissions: + self.delete_permission(permission.name, name) + del self.cached_registry_proto.projects[idx] + if commit: + self.commit() + return + raise ProjectNotFoundException(name) diff --git a/sdk/python/feast/infra/registry/remote.py b/sdk/python/feast/infra/registry/remote.py index 618628bc07..ba25ef7dbe 100644 --- a/sdk/python/feast/infra/registry/remote.py +++ b/sdk/python/feast/infra/registry/remote.py @@ -16,14 +16,12 @@ from feast.infra.registry.base_registry import BaseRegistry from feast.on_demand_feature_view import OnDemandFeatureView from feast.permissions.auth.auth_type import AuthType -from feast.permissions.auth_model import ( - AuthConfig, - NoAuthConfig, -) +from feast.permissions.auth_model import AuthConfig, NoAuthConfig from feast.permissions.client.grpc_client_auth_interceptor import ( GrpcClientAuthHeaderInterceptor, ) from feast.permissions.permission import Permission +from feast.project import Project from feast.project_metadata import ProjectMetadata from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto from feast.protos.feast.registry import RegistryServer_pb2, RegistryServer_pb2_grpc @@ -50,11 +48,18 @@ def __init__( auth_config: AuthConfig = NoAuthConfig(), ): self.auth_config = auth_config - channel = grpc.insecure_channel(registry_config.path) + self.channel = grpc.insecure_channel(registry_config.path) if self.auth_config.type != AuthType.NONE.value: auth_header_interceptor = GrpcClientAuthHeaderInterceptor(auth_config) - channel = grpc.intercept_channel(channel, auth_header_interceptor) - self.stub = RegistryServer_pb2_grpc.RegistryServerStub(channel) + self.channel = grpc.intercept_channel(self.channel, auth_header_interceptor) + self.stub = RegistryServer_pb2_grpc.RegistryServerStub(self.channel) + + def close(self): + if self.channel: + self.channel.close() + + def __del__(self): + self.close() def apply_entity(self, entity: Entity, project: str, commit: bool = True): request = RegistryServer_pb2.ApplyEntityRequest( @@ -173,15 +178,17 @@ def apply_feature_view( arg_name = "on_demand_feature_view" request = RegistryServer_pb2.ApplyFeatureViewRequest( - feature_view=feature_view.to_proto() - if arg_name == "feature_view" - else None, - stream_feature_view=feature_view.to_proto() - if arg_name == "stream_feature_view" - else None, - on_demand_feature_view=feature_view.to_proto() - if arg_name == "on_demand_feature_view" - else None, + feature_view=( + feature_view.to_proto() if arg_name == "feature_view" else None + ), + stream_feature_view=( + feature_view.to_proto() if arg_name == "stream_feature_view" else None + ), + on_demand_feature_view=( + feature_view.to_proto() + if arg_name == "on_demand_feature_view" + else None + ), project=project, commit=commit, ) @@ -450,6 +457,49 @@ def list_permissions( Permission.from_proto(permission) for permission in response.permissions ] + def apply_project( + self, + project: Project, + commit: bool = True, + ): + project_proto = project.to_proto() + + request = RegistryServer_pb2.ApplyProjectRequest( + project=project_proto, commit=commit + ) + self.stub.ApplyProject(request) + + def delete_project( + self, + name: str, + commit: bool = True, + ): + request = RegistryServer_pb2.DeleteProjectRequest(name=name, commit=commit) + self.stub.DeleteProject(request) + + def get_project( + self, + name: str, + allow_cache: bool = False, + ) -> Project: + request = RegistryServer_pb2.GetProjectRequest( + name=name, allow_cache=allow_cache + ) + response = self.stub.GetProject(request) + + return Project.from_proto(response) + + def list_projects( + self, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[Project]: + request = RegistryServer_pb2.ListProjectsRequest( + allow_cache=allow_cache, tags=tags + ) + response = self.stub.ListProjects(request) + return [Project.from_proto(project) for project in response.projects] + def proto(self) -> RegistryProto: return self.stub.Proto(Empty()) diff --git a/sdk/python/feast/infra/registry/snowflake.py b/sdk/python/feast/infra/registry/snowflake.py index 801b90afe3..accfa42e12 100644 --- a/sdk/python/feast/infra/registry/snowflake.py +++ b/sdk/python/feast/infra/registry/snowflake.py @@ -5,7 +5,7 @@ from datetime import datetime, timedelta, timezone from enum import Enum from threading import Lock -from typing import Any, Callable, List, Literal, Optional, Set, Union +from typing import Any, Callable, List, Literal, Optional, Union from pydantic import ConfigDict, Field, StrictStr @@ -19,6 +19,8 @@ FeatureServiceNotFoundException, FeatureViewNotFoundException, PermissionNotFoundException, + ProjectNotFoundException, + ProjectObjectNotFoundException, SavedDatasetNotFound, ValidationReferenceNotFound, ) @@ -33,6 +35,7 @@ ) from feast.on_demand_feature_view import OnDemandFeatureView from feast.permissions.permission import Permission +from feast.project import Project from feast.project_metadata import ProjectMetadata from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto from feast.protos.feast.core.Entity_pb2 import Entity as EntityProto @@ -45,6 +48,7 @@ OnDemandFeatureView as OnDemandFeatureViewProto, ) from feast.protos.feast.core.Permission_pb2 import Permission as PermissionProto +from feast.protos.feast.core.Project_pb2 import Project as ProjectProto from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto from feast.protos.feast.core.SavedDataset_pb2 import SavedDataset as SavedDatasetProto from feast.protos.feast.core.StreamFeatureView_pb2 import ( @@ -138,26 +142,57 @@ def __init__( query = command.replace("REGISTRY_PATH", f"{self.registry_path}") execute_snowflake_statement(conn, query) + self.purge_feast_metadata = registry_config.purge_feast_metadata + self._sync_feast_metadata_to_projects_table() + if not self.purge_feast_metadata: + self._maybe_init_project_metadata(project) + self.cached_registry_proto = self.proto() - proto_registry_utils.init_project_metadata(self.cached_registry_proto, project) self.cached_registry_proto_created = _utc_now() self._refresh_lock = Lock() self.cached_registry_proto_ttl = timedelta( - seconds=registry_config.cache_ttl_seconds - if registry_config.cache_ttl_seconds is not None - else 0 + seconds=( + registry_config.cache_ttl_seconds + if registry_config.cache_ttl_seconds is not None + else 0 + ) ) self.project = project - def refresh(self, project: Optional[str] = None): - if project: - project_metadata = proto_registry_utils.get_project_metadata( - registry_proto=self.cached_registry_proto, project=project + def _sync_feast_metadata_to_projects_table(self): + feast_metadata_projects: set = [] + projects_set: set = [] + + with GetSnowflakeConnection(self.registry_config) as conn: + query = ( + f'SELECT DISTINCT project_id FROM {self.registry_path}."FEAST_METADATA"' ) - if not project_metadata: - proto_registry_utils.init_project_metadata( - self.cached_registry_proto, project - ) + df = execute_snowflake_statement(conn, query).fetch_pandas_all() + + for row in df.iterrows(): + feast_metadata_projects.add(row[1]["PROJECT_ID"]) + + if len(feast_metadata_projects) > 0: + with GetSnowflakeConnection(self.registry_config) as conn: + query = f'SELECT project_id FROM {self.registry_path}."PROJECTS"' + df = execute_snowflake_statement(conn, query).fetch_pandas_all() + + for row in df.iterrows(): + projects_set.add(row[1]["PROJECT_ID"]) + + # Find object in feast_metadata_projects but not in projects + projects_to_sync = set(feast_metadata_projects) - set(projects_set) + for project_name in projects_to_sync: + self.apply_project(Project(name=project_name), commit=True) + + if self.purge_feast_metadata: + with GetSnowflakeConnection(self.registry_config) as conn: + query = f""" + DELETE FROM {self.registry_path}."FEAST_METADATA" + """ + execute_snowflake_statement(conn, query) + + def refresh(self, project: Optional[str] = None): self.cached_registry_proto = self.proto() self.cached_registry_proto_created = _utc_now() @@ -271,6 +306,17 @@ def update_infra(self, infra: Infra, project: str, commit: bool = True): name="infra_obj", ) + def _initialize_project_if_not_exists(self, project_name: str): + try: + self.get_project(project_name, allow_cache=True) + return + except ProjectObjectNotFoundException: + try: + self.get_project(project_name, allow_cache=False) + return + except ProjectObjectNotFoundException: + self.apply_project(Project(name=project_name), commit=True) + def _apply_object( self, table: str, @@ -280,7 +326,11 @@ def _apply_object( proto_field_name: str, name: Optional[str] = None, ): - self._maybe_init_project_metadata(project) + if not self.purge_feast_metadata: + self._maybe_init_project_metadata(project) + # Initialize project is necessary because FeatureStore object can apply objects individually without "feast apply" cli option + if not isinstance(obj, Project): + self._initialize_project_if_not_exists(project_name=project) name = name or (obj.name if hasattr(obj, "name") else None) assert name, f"name needs to be provided for {obj}" @@ -343,7 +393,13 @@ def _apply_object( """ execute_snowflake_statement(conn, query) - self._set_last_updated_metadata(update_datetime, project) + if not isinstance(obj, Project): + self.apply_project( + self.get_project(name=project, allow_cache=False), commit=True + ) + + if not self.purge_feast_metadata: + self._set_last_updated_metadata(update_datetime, project) def apply_permission( self, permission: Permission, project: str, commit: bool = True @@ -620,7 +676,6 @@ def _get_object( proto_field_name: str, not_found_exception: Optional[Callable], ): - self._maybe_init_project_metadata(project) with GetSnowflakeConnection(self.registry_config) as conn: query = f""" SELECT @@ -821,7 +876,6 @@ def _list_objects( proto_field_name: str, tags: Optional[dict[str, str]] = None, ): - self._maybe_init_project_metadata(project) with GetSnowflakeConnection(self.registry_config) as conn: query = f""" SELECT @@ -992,8 +1046,27 @@ def get_user_metadata( def proto(self) -> RegistryProto: r = RegistryProto() last_updated_timestamps = [] - projects = self._get_all_projects() - for project in projects: + + def process_project(project: Project): + nonlocal r, last_updated_timestamps + project_name = project.name + last_updated_timestamp = project.last_updated_timestamp + + try: + cached_project = self.get_project(project_name, True) + except ProjectObjectNotFoundException: + cached_project = None + + allow_cache = False + + if cached_project is not None: + allow_cache = ( + last_updated_timestamp <= cached_project.last_updated_timestamp + ) + + r.projects.extend([project.to_proto()]) + last_updated_timestamps.append(last_updated_timestamp) + for lister, registry_proto_field in [ (self.list_entities, r.entities), (self.list_feature_views, r.feature_views), @@ -1003,53 +1076,31 @@ def proto(self) -> RegistryProto: (self.list_feature_services, r.feature_services), (self.list_saved_datasets, r.saved_datasets), (self.list_validation_references, r.validation_references), - (self.list_project_metadata, r.project_metadata), (self.list_permissions, r.permissions), ]: - objs: List[Any] = lister(project) # type: ignore + objs: List[Any] = lister(project_name, allow_cache) # type: ignore if objs: obj_protos = [obj.to_proto() for obj in objs] for obj_proto in obj_protos: if "spec" in obj_proto.DESCRIPTOR.fields_by_name: - obj_proto.spec.project = project + obj_proto.spec.project = project_name else: - obj_proto.project = project + obj_proto.project = project_name registry_proto_field.extend(obj_protos) # This is suuuper jank. Because of https://github.com/feast-dev/feast/issues/2783, # the registry proto only has a single infra field, which we're currently setting as the "last" project. - r.infra.CopyFrom(self.get_infra(project).to_proto()) - last_updated_timestamps.append(self._get_last_updated_metadata(project)) + r.infra.CopyFrom(self.get_infra(project_name).to_proto()) + + projects_list = self.list_projects(allow_cache=False) + for project in projects_list: + process_project(project) if last_updated_timestamps: r.last_updated.FromDatetime(max(last_updated_timestamps)) return r - def _get_all_projects(self) -> Set[str]: - projects = set() - - base_tables = [ - "DATA_SOURCES", - "ENTITIES", - "FEATURE_VIEWS", - "ON_DEMAND_FEATURE_VIEWS", - "STREAM_FEATURE_VIEWS", - "PERMISSIONS", - ] - - with GetSnowflakeConnection(self.registry_config) as conn: - for table in base_tables: - query = ( - f'SELECT DISTINCT project_id FROM {self.registry_path}."{table}"' - ) - df = execute_snowflake_statement(conn, query).fetch_pandas_all() - - for row in df.iterrows(): - projects.add(row[1]["PROJECT_ID"]) - - return projects - def _get_last_updated_metadata(self, project: str): with GetSnowflakeConnection(self.registry_config) as conn: query = f""" @@ -1153,3 +1204,98 @@ def _set_last_updated_metadata(self, last_updated: datetime, project: str): def commit(self): pass + + def apply_project( + self, + project: Project, + commit: bool = True, + ): + return self._apply_object( + "PROJECTS", project.name, "project_name", project, "project_proto" + ) + + def delete_project( + self, + name: str, + commit: bool = True, + ): + project = self.get_project(name, allow_cache=False) + if project: + with GetSnowflakeConnection(self.registry_config) as conn: + for table in { + "MANAGED_INFRA", + "SAVED_DATASETS", + "VALIDATION_REFERENCES", + "FEATURE_SERVICES", + "FEATURE_VIEWS", + "ON_DEMAND_FEATURE_VIEWS", + "STREAM_FEATURE_VIEWS", + "DATA_SOURCES", + "ENTITIES", + "PERMISSIONS", + "PROJECTS", + }: + query = f""" + DELETE FROM {self.registry_path}."{table}" + WHERE + project_id = '{project}' + """ + execute_snowflake_statement(conn, query) + return + + raise ProjectNotFoundException(name) + + def _get_project( + self, + name: str, + ) -> Project: + return self._get_object( + table="PROJECTS", + name=name, + project=name, + proto_class=ProjectProto, + python_class=Project, + id_field_name="project_name", + proto_field_name="project_proto", + not_found_exception=ProjectObjectNotFoundException, + ) + + def get_project( + self, + name: str, + allow_cache: bool = False, + ) -> Project: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.get_project(self.cached_registry_proto, name) + return self._get_project(name) + + def _list_projects( + self, + tags: Optional[dict[str, str]], + ) -> List[Project]: + with GetSnowflakeConnection(self.registry_config) as conn: + query = f""" + SELECT project_proto FROM {self.registry_path}."PROJECTS" + """ + df = execute_snowflake_statement(conn, query).fetch_pandas_all() + if not df.empty: + objects = [] + for row in df.iterrows(): + obj = Project.from_proto( + ProjectProto.FromString(row[1]["project_proto"]) + ) + if has_all_tags(obj.tags, tags): + objects.append(obj) + return objects + return [] + + def list_projects( + self, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[Project]: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.list_projects(self.cached_registry_proto, tags) + return self._list_projects(tags) diff --git a/sdk/python/feast/infra/registry/sql.py b/sdk/python/feast/infra/registry/sql.py index 90c6e82e7d..2b4a58266c 100644 --- a/sdk/python/feast/infra/registry/sql.py +++ b/sdk/python/feast/infra/registry/sql.py @@ -1,11 +1,12 @@ import logging import uuid +from concurrent.futures import ThreadPoolExecutor from datetime import datetime, timezone from enum import Enum from pathlib import Path -from typing import Any, Callable, Dict, List, Optional, Set, Union +from typing import Any, Callable, Dict, List, Optional, Union -from pydantic import StrictStr +from pydantic import StrictInt, StrictStr from sqlalchemy import ( # type: ignore BigInteger, Column, @@ -31,6 +32,8 @@ FeatureServiceNotFoundException, FeatureViewNotFoundException, PermissionNotFoundException, + ProjectNotFoundException, + ProjectObjectNotFoundException, SavedDatasetNotFound, ValidationReferenceNotFound, ) @@ -40,6 +43,7 @@ from feast.infra.registry.caching_registry import CachingRegistry from feast.on_demand_feature_view import OnDemandFeatureView from feast.permissions.permission import Permission +from feast.project import Project from feast.project_metadata import ProjectMetadata from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto from feast.protos.feast.core.Entity_pb2 import Entity as EntityProto @@ -52,6 +56,7 @@ OnDemandFeatureView as OnDemandFeatureViewProto, ) from feast.protos.feast.core.Permission_pb2 import Permission as PermissionProto +from feast.protos.feast.core.Project_pb2 import Project as ProjectProto from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto from feast.protos.feast.core.SavedDataset_pb2 import SavedDataset as SavedDatasetProto from feast.protos.feast.core.StreamFeatureView_pb2 import ( @@ -67,11 +72,21 @@ metadata = MetaData() + +projects = Table( + "projects", + metadata, + Column("project_id", String(255), primary_key=True), + Column("project_name", String(255), nullable=False), + Column("last_updated_timestamp", BigInteger, nullable=False), + Column("project_proto", LargeBinary, nullable=False), +) + entities = Table( "entities", metadata, - Column("entity_name", String(50), primary_key=True), - Column("project_id", String(50), primary_key=True), + Column("entity_name", String(255), primary_key=True), + Column("project_id", String(255), primary_key=True), Column("last_updated_timestamp", BigInteger, nullable=False), Column("entity_proto", LargeBinary, nullable=False), ) @@ -80,7 +95,7 @@ "data_sources", metadata, Column("data_source_name", String(255), primary_key=True), - Column("project_id", String(50), primary_key=True), + Column("project_id", String(255), primary_key=True), Column("last_updated_timestamp", BigInteger, nullable=False), Column("data_source_proto", LargeBinary, nullable=False), ) @@ -88,8 +103,8 @@ feature_views = Table( "feature_views", metadata, - Column("feature_view_name", String(50), primary_key=True), - Column("project_id", String(50), primary_key=True), + Column("feature_view_name", String(255), primary_key=True), + Column("project_id", String(255), primary_key=True), Column("last_updated_timestamp", BigInteger, nullable=False), Column("materialized_intervals", LargeBinary, nullable=True), Column("feature_view_proto", LargeBinary, nullable=False), @@ -99,8 +114,8 @@ stream_feature_views = Table( "stream_feature_views", metadata, - Column("feature_view_name", String(50), primary_key=True), - Column("project_id", String(50), primary_key=True), + Column("feature_view_name", String(255), primary_key=True), + Column("project_id", String(255), primary_key=True), Column("last_updated_timestamp", BigInteger, nullable=False), Column("feature_view_proto", LargeBinary, nullable=False), Column("user_metadata", LargeBinary, nullable=True), @@ -109,8 +124,8 @@ on_demand_feature_views = Table( "on_demand_feature_views", metadata, - Column("feature_view_name", String(50), primary_key=True), - Column("project_id", String(50), primary_key=True), + Column("feature_view_name", String(255), primary_key=True), + Column("project_id", String(255), primary_key=True), Column("last_updated_timestamp", BigInteger, nullable=False), Column("feature_view_proto", LargeBinary, nullable=False), Column("user_metadata", LargeBinary, nullable=True), @@ -119,8 +134,8 @@ feature_services = Table( "feature_services", metadata, - Column("feature_service_name", String(50), primary_key=True), - Column("project_id", String(50), primary_key=True), + Column("feature_service_name", String(255), primary_key=True), + Column("project_id", String(255), primary_key=True), Column("last_updated_timestamp", BigInteger, nullable=False), Column("feature_service_proto", LargeBinary, nullable=False), ) @@ -128,8 +143,8 @@ saved_datasets = Table( "saved_datasets", metadata, - Column("saved_dataset_name", String(50), primary_key=True), - Column("project_id", String(50), primary_key=True), + Column("saved_dataset_name", String(255), primary_key=True), + Column("project_id", String(255), primary_key=True), Column("last_updated_timestamp", BigInteger, nullable=False), Column("saved_dataset_proto", LargeBinary, nullable=False), ) @@ -137,8 +152,8 @@ validation_references = Table( "validation_references", metadata, - Column("validation_reference_name", String(50), primary_key=True), - Column("project_id", String(50), primary_key=True), + Column("validation_reference_name", String(255), primary_key=True), + Column("project_id", String(255), primary_key=True), Column("last_updated_timestamp", BigInteger, nullable=False), Column("validation_reference_proto", LargeBinary, nullable=False), ) @@ -146,8 +161,8 @@ managed_infra = Table( "managed_infra", metadata, - Column("infra_name", String(50), primary_key=True), - Column("project_id", String(50), primary_key=True), + Column("infra_name", String(255), primary_key=True), + Column("project_id", String(255), primary_key=True), Column("last_updated_timestamp", BigInteger, nullable=False), Column("infra_proto", LargeBinary, nullable=False), ) @@ -156,7 +171,7 @@ "permissions", metadata, Column("permission_name", String(255), primary_key=True), - Column("project_id", String(50), primary_key=True), + Column("project_id", String(255), primary_key=True), Column("last_updated_timestamp", BigInteger, nullable=False), Column("permission_proto", LargeBinary, nullable=False), ) @@ -170,7 +185,7 @@ class FeastMetadataKeys(Enum): feast_metadata = Table( "feast_metadata", metadata, - Column("project_id", String(50), primary_key=True), + Column("project_id", String(255), primary_key=True), Column("metadata_key", String(50), primary_key=True), Column("metadata_value", String(50), nullable=False), Column("last_updated_timestamp", BigInteger, nullable=False), @@ -190,26 +205,75 @@ class SqlRegistryConfig(RegistryConfig): sqlalchemy_config_kwargs: Dict[str, Any] = {"echo": False} """ Dict[str, Any]: Extra arguments to pass to SQLAlchemy.create_engine. """ + cache_mode: StrictStr = "sync" + """ str: Cache mode type, Possible options are sync and thread(asynchronous caching using threading library)""" + + thread_pool_executor_worker_count: StrictInt = 0 + """ int: Number of worker threads to use for asynchronous caching in SQL Registry. If set to 0, it doesn't use ThreadPoolExecutor. """ + class SqlRegistry(CachingRegistry): def __init__( self, - registry_config: Optional[Union[RegistryConfig, SqlRegistryConfig]], + registry_config, project: str, repo_path: Optional[Path], ): - assert registry_config is not None, "SqlRegistry needs a valid registry_config" + assert registry_config is not None and isinstance( + registry_config, SqlRegistryConfig + ), "SqlRegistry needs a valid registry_config" self.engine: Engine = create_engine( registry_config.path, **registry_config.sqlalchemy_config_kwargs ) + self.thread_pool_executor_worker_count = ( + registry_config.thread_pool_executor_worker_count + ) metadata.create_all(self.engine) + self.purge_feast_metadata = registry_config.purge_feast_metadata + # Sync feast_metadata to projects table + # when purge_feast_metadata is set to True, Delete data from + # feast_metadata table and list_project_metadata will not return any data + self._sync_feast_metadata_to_projects_table() + if not self.purge_feast_metadata: + self._maybe_init_project_metadata(project) super().__init__( project=project, cache_ttl_seconds=registry_config.cache_ttl_seconds, cache_mode=registry_config.cache_mode, ) + def _sync_feast_metadata_to_projects_table(self): + feast_metadata_projects: set = [] + projects_set: set = [] + with self.engine.begin() as conn: + stmt = select(feast_metadata).where( + feast_metadata.c.metadata_key == FeastMetadataKeys.PROJECT_UUID.value + ) + rows = conn.execute(stmt).all() + for row in rows: + feast_metadata_projects.append(row._mapping["project_id"]) + + if len(feast_metadata_projects) > 0: + with self.engine.begin() as conn: + stmt = select(projects) + rows = conn.execute(stmt).all() + for row in rows: + projects_set.append(row._mapping["project_id"]) + + # Find object in feast_metadata_projects but not in projects + projects_to_sync = set(feast_metadata_projects) - set(projects_set) + for project_name in projects_to_sync: + self.apply_project(Project(name=project_name), commit=True) + + if self.purge_feast_metadata: + with self.engine.begin() as conn: + for project_name in feast_metadata_projects: + stmt = delete(feast_metadata).where( + feast_metadata.c.project_id == project_name + ) + conn.execute(stmt) + def teardown(self): for t in { entities, @@ -673,8 +737,27 @@ def get_user_metadata( def proto(self) -> RegistryProto: r = RegistryProto() last_updated_timestamps = [] - projects = self._get_all_projects() - for project in projects: + + def process_project(project: Project): + nonlocal r, last_updated_timestamps + project_name = project.name + last_updated_timestamp = project.last_updated_timestamp + + try: + cached_project = self.get_project(project_name, True) + except ProjectObjectNotFoundException: + cached_project = None + + allow_cache = False + + if cached_project is not None: + allow_cache = ( + last_updated_timestamp <= cached_project.last_updated_timestamp + ) + + r.projects.extend([project.to_proto()]) + last_updated_timestamps.append(last_updated_timestamp) + for lister, registry_proto_field in [ (self.list_entities, r.entities), (self.list_feature_views, r.feature_views), @@ -684,23 +767,31 @@ def proto(self) -> RegistryProto: (self.list_feature_services, r.feature_services), (self.list_saved_datasets, r.saved_datasets), (self.list_validation_references, r.validation_references), - (self.list_project_metadata, r.project_metadata), (self.list_permissions, r.permissions), ]: - objs: List[Any] = lister(project) # type: ignore + objs: List[Any] = lister(project_name, allow_cache) # type: ignore if objs: obj_protos = [obj.to_proto() for obj in objs] for obj_proto in obj_protos: if "spec" in obj_proto.DESCRIPTOR.fields_by_name: - obj_proto.spec.project = project + obj_proto.spec.project = project_name else: - obj_proto.project = project + obj_proto.project = project_name registry_proto_field.extend(obj_protos) # This is suuuper jank. Because of https://github.com/feast-dev/feast/issues/2783, # the registry proto only has a single infra field, which we're currently setting as the "last" project. - r.infra.CopyFrom(self.get_infra(project).to_proto()) - last_updated_timestamps.append(self._get_last_updated_metadata(project)) + r.infra.CopyFrom(self.get_infra(project_name).to_proto()) + + projects_list = self.list_projects(allow_cache=False) + if self.thread_pool_executor_worker_count == 0: + for project in projects_list: + process_project(project) + else: + with ThreadPoolExecutor( + max_workers=self.thread_pool_executor_worker_count + ) as executor: + executor.map(process_project, projects_list) if last_updated_timestamps: r.last_updated.FromDatetime(max(last_updated_timestamps)) @@ -711,6 +802,17 @@ def commit(self): # This method is a no-op since we're always writing values eagerly to the db. pass + def _initialize_project_if_not_exists(self, project_name: str): + try: + self.get_project(project_name, allow_cache=True) + return + except ProjectObjectNotFoundException: + try: + self.get_project(project_name, allow_cache=False) + return + except ProjectObjectNotFoundException: + self.apply_project(Project(name=project_name), commit=True) + def _apply_object( self, table: Table, @@ -720,8 +822,11 @@ def _apply_object( proto_field_name: str, name: Optional[str] = None, ): - self._maybe_init_project_metadata(project) - + if not self.purge_feast_metadata: + self._maybe_init_project_metadata(project) + # Initialize project is necessary because FeatureStore object can apply objects individually without "feast apply" cli option + if not isinstance(obj, Project): + self._initialize_project_if_not_exists(project_name=project) name = name or (obj.name if hasattr(obj, "name") else None) assert name, f"name needs to be provided for {obj}" @@ -742,12 +847,15 @@ def _apply_object( "feature_view_proto", "feature_service_proto", "permission_proto", + "project_proto", ]: deserialized_proto = self.deserialize_registry_values( row._mapping[proto_field_name], type(obj) ) obj.created_timestamp = ( - deserialized_proto.meta.created_timestamp.ToDatetime() + deserialized_proto.meta.created_timestamp.ToDatetime().replace( + tzinfo=timezone.utc + ) ) if isinstance(obj, (FeatureView, StreamFeatureView)): obj.update_materialization_intervals( @@ -789,7 +897,12 @@ def _apply_object( ) conn.execute(insert_stmt) - self._set_last_updated_metadata(update_datetime, project) + if not isinstance(obj, Project): + self.apply_project( + self.get_project(name=project, allow_cache=False), commit=True + ) + if not self.purge_feast_metadata: + self._set_last_updated_metadata(update_datetime, project) def _maybe_init_project_metadata(self, project): # Initialize project metadata if needed @@ -827,7 +940,11 @@ def _delete_object( rows = conn.execute(stmt) if rows.rowcount < 1 and not_found_exception: raise not_found_exception(name, project) - self._set_last_updated_metadata(_utc_now(), project) + self.apply_project( + self.get_project(name=project, allow_cache=False), commit=True + ) + if not self.purge_feast_metadata: + self._set_last_updated_metadata(_utc_now(), project) return rows.rowcount @@ -842,8 +959,6 @@ def _get_object( proto_field_name: str, not_found_exception: Optional[Callable], ): - self._maybe_init_project_metadata(project) - with self.engine.begin() as conn: stmt = select(table).where( getattr(table.c, id_field_name) == name, table.c.project_id == project @@ -866,7 +981,6 @@ def _list_objects( proto_field_name: str, tags: Optional[dict[str, str]] = None, ): - self._maybe_init_project_metadata(project) with self.engine.begin() as conn: stmt = select(table).where(table.c.project_id == project) rows = conn.execute(stmt).all() @@ -929,24 +1043,6 @@ def _get_last_updated_metadata(self, project: str): return datetime.fromtimestamp(update_time, tz=timezone.utc) - def _get_all_projects(self) -> Set[str]: - projects = set() - with self.engine.begin() as conn: - for table in { - entities, - data_sources, - feature_views, - on_demand_feature_views, - stream_feature_views, - permissions, - }: - stmt = select(table) - rows = conn.execute(stmt).all() - for row in rows: - projects.add(row._mapping["project_id"]) - - return projects - def _get_permission(self, name: str, project: str) -> Permission: return self._get_object( table=permissions, @@ -987,3 +1083,72 @@ def delete_permission(self, name: str, project: str, commit: bool = True): rows = conn.execute(stmt) if rows.rowcount < 1: raise PermissionNotFoundException(name, project) + + def _list_projects( + self, + tags: Optional[dict[str, str]], + ) -> List[Project]: + with self.engine.begin() as conn: + stmt = select(projects) + rows = conn.execute(stmt).all() + if rows: + objects = [] + for row in rows: + obj = Project.from_proto( + ProjectProto.FromString(row._mapping["project_proto"]) + ) + if utils.has_all_tags(obj.tags, tags): + objects.append(obj) + return objects + return [] + + def _get_project( + self, + name: str, + ) -> Project: + return self._get_object( + table=projects, + name=name, + project=name, + proto_class=ProjectProto, + python_class=Project, + id_field_name="project_name", + proto_field_name="project_proto", + not_found_exception=ProjectObjectNotFoundException, + ) + + def apply_project( + self, + project: Project, + commit: bool = True, + ): + return self._apply_object( + projects, project.name, "project_name", project, "project_proto" + ) + + def delete_project( + self, + name: str, + commit: bool = True, + ): + project = self.get_project(name, allow_cache=False) + if project: + with self.engine.begin() as conn: + for t in { + managed_infra, + saved_datasets, + validation_references, + feature_services, + feature_views, + on_demand_feature_views, + stream_feature_views, + data_sources, + entities, + permissions, + projects, + }: + stmt = delete(t).where(t.c.project_id == name) + conn.execute(stmt) + return + + raise ProjectNotFoundException(name) diff --git a/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_creation.sql b/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_creation.sql index 021d175b4e..fc13332e4b 100644 --- a/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_creation.sql +++ b/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_creation.sql @@ -1,3 +1,11 @@ +CREATE TABLE IF NOT EXISTS REGISTRY_PATH."PROJECTS" ( + project_id VARCHAR, + project_name VARCHAR NOT NULL, + last_updated_timestamp TIMESTAMP_LTZ NOT NULL, + project_proto BINARY NOT NULL, + PRIMARY KEY (project_id) +); + CREATE TABLE IF NOT EXISTS REGISTRY_PATH."DATA_SOURCES" ( data_source_name VARCHAR, project_id VARCHAR, diff --git a/sdk/python/feast/permissions/permission.py b/sdk/python/feast/permissions/permission.py index 1117a3ee82..9046abbfa9 100644 --- a/sdk/python/feast/permissions/permission.py +++ b/sdk/python/feast/permissions/permission.py @@ -256,6 +256,7 @@ def get_type_class_from_permission_type(permission_type: str): _PERMISSION_TYPES = { + "PROJECT": "feast.project.Project", "FEATURE_VIEW": "feast.feature_view.FeatureView", "ON_DEMAND_FEATURE_VIEW": "feast.on_demand_feature_view.OnDemandFeatureView", "BATCH_FEATURE_VIEW": "feast.batch_feature_view.BatchFeatureView", diff --git a/sdk/python/feast/permissions/security_manager.py b/sdk/python/feast/permissions/security_manager.py index c00a3d8853..cb8cafd5b9 100644 --- a/sdk/python/feast/permissions/security_manager.py +++ b/sdk/python/feast/permissions/security_manager.py @@ -10,6 +10,7 @@ from feast.permissions.enforcer import enforce_policy from feast.permissions.permission import Permission from feast.permissions.user import User +from feast.project import Project logger = logging.getLogger(__name__) @@ -88,7 +89,9 @@ def assert_permissions( def assert_permissions_to_update( resource: FeastObject, - getter: Callable[[str, str, bool], FeastObject], + getter: Union[ + Callable[[str, str, bool], FeastObject], Callable[[str, bool], FeastObject] + ], project: str, allow_cache: bool = True, ) -> FeastObject: @@ -117,11 +120,17 @@ def assert_permissions_to_update( actions = [AuthzedAction.DESCRIBE, AuthzedAction.UPDATE] try: - existing_resource = getter( - name=resource.name, - project=project, - allow_cache=allow_cache, - ) # type: ignore[call-arg] + if isinstance(resource, Project): + existing_resource = getter( + name=resource.name, + allow_cache=allow_cache, + ) # type: ignore[call-arg] + else: + existing_resource = getter( + name=resource.name, + project=project, + allow_cache=allow_cache, + ) # type: ignore[call-arg] assert_permissions(resource=existing_resource, actions=actions) except FeastObjectNotFoundException: actions = [AuthzedAction.CREATE] diff --git a/sdk/python/feast/project.py b/sdk/python/feast/project.py new file mode 100644 index 0000000000..d9ec45dcc9 --- /dev/null +++ b/sdk/python/feast/project.py @@ -0,0 +1,175 @@ +# Copyright 2019 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from datetime import datetime, timezone +from typing import Dict, Optional + +from google.protobuf.json_format import MessageToJson +from typeguard import typechecked + +from feast.protos.feast.core.Project_pb2 import Project as ProjectProto +from feast.protos.feast.core.Project_pb2 import ProjectMeta as ProjectMetaProto +from feast.protos.feast.core.Project_pb2 import ProjectSpec as ProjectSpecProto +from feast.utils import _utc_now + + +@typechecked +class Project: + """ + Project is a collection of Feast Objects. Projects provide complete isolation of + feature stores at the infrastructure level. + + Attributes: + name: The unique name of the project. + description: A human-readable description. + tags: A dictionary of key-value pairs to store arbitrary metadata. + owner: The owner of the project, typically the email of the primary maintainer. + created_timestamp: The time when the entity was created. + last_updated_timestamp: The time when the entity was last updated. + """ + + name: str + description: str + tags: Dict[str, str] + owner: str + created_timestamp: datetime + last_updated_timestamp: datetime + + def __init__( + self, + *, + name: str, + description: str = "", + tags: Optional[Dict[str, str]] = None, + owner: str = "", + created_timestamp: Optional[datetime] = None, + last_updated_timestamp: Optional[datetime] = None, + ): + """ + Creates Project object. + + Args: + name: The unique name of the project. + description (optional): A human-readable description. + tags (optional): A dictionary of key-value pairs to store arbitrary metadata. + owner (optional): The owner of the project, typically the email of the primary maintainer. + created_timestamp (optional): The time when the project was created. Defaults to + last_updated_timestamp (optional): The time when the project was last updated. + + Raises: + ValueError: Parameters are specified incorrectly. + """ + self.name = name + self.description = description + self.tags = tags if tags is not None else {} + self.owner = owner + updated_time = _utc_now() + self.created_timestamp = created_timestamp or updated_time + self.last_updated_timestamp = last_updated_timestamp or updated_time + + def __hash__(self) -> int: + return hash((self.name)) + + def __eq__(self, other): + if not isinstance(other, Project): + raise TypeError("Comparisons should only involve Project class objects.") + + if ( + self.name != other.name + or self.description != other.description + or self.tags != other.tags + or self.owner != other.owner + or self.created_timestamp != other.created_timestamp + or self.last_updated_timestamp != other.last_updated_timestamp + ): + return False + + return True + + def __str__(self): + return str(MessageToJson(self.to_proto())) + + def __lt__(self, other): + return self.name < other.name + + def is_valid(self): + """ + Validates the state of this project locally. + + Raises: + ValueError: The project does not have a name or does not have a type. + """ + if not self.name: + raise ValueError("The project does not have a name.") + + from feast.repo_operations import is_valid_name + + if not is_valid_name(self.name): + raise ValueError( + f"Project name, {self.name}, should only have " + f"alphanumerical values and underscores but not start with an underscore." + ) + + @classmethod + def from_proto(cls, project_proto: ProjectProto): + """ + Creates a project from a protobuf representation of an project. + + Args: + entity_proto: A protobuf representation of an project. + + Returns: + An Entity object based on the entity protobuf. + """ + project = cls( + name=project_proto.spec.name, + description=project_proto.spec.description, + tags=dict(project_proto.spec.tags), + owner=project_proto.spec.owner, + ) + if project_proto.meta.HasField("created_timestamp"): + project.created_timestamp = ( + project_proto.meta.created_timestamp.ToDatetime().replace( + tzinfo=timezone.utc + ) + ) + if project_proto.meta.HasField("last_updated_timestamp"): + project.last_updated_timestamp = ( + project_proto.meta.last_updated_timestamp.ToDatetime().replace( + tzinfo=timezone.utc + ) + ) + + return project + + def to_proto(self) -> ProjectProto: + """ + Converts an project object to its protobuf representation. + + Returns: + An ProjectProto protobuf. + """ + meta = ProjectMetaProto() + if self.created_timestamp: + meta.created_timestamp.FromDatetime(self.created_timestamp) + if self.last_updated_timestamp: + meta.last_updated_timestamp.FromDatetime(self.last_updated_timestamp) + + spec = ProjectSpecProto( + name=self.name, + description=self.description, + tags=self.tags, + owner=self.owner, + ) + + return ProjectProto(spec=spec, meta=meta) diff --git a/sdk/python/feast/registry_server.py b/sdk/python/feast/registry_server.py index 40475aa580..2661f25882 100644 --- a/sdk/python/feast/registry_server.py +++ b/sdk/python/feast/registry_server.py @@ -32,6 +32,7 @@ init_security_manager, str_to_auth_manager_type, ) +from feast.project import Project from feast.protos.feast.registry import RegistryServer_pb2, RegistryServer_pb2_grpc from feast.saved_dataset import SavedDataset, ValidationReference from feast.stream_feature_view import StreamFeatureView @@ -624,6 +625,58 @@ def DeletePermission( ) return Empty() + def ApplyProject(self, request: RegistryServer_pb2.ApplyProjectRequest, context): + project = cast( + Project, + assert_permissions_to_update( + resource=Project.from_proto(request.project), + getter=self.proxied_registry.get_project, + project=Project.from_proto(request.project).name, + ), + ) + self.proxied_registry.apply_project( + project=project, + commit=request.commit, + ) + return Empty() + + def GetProject(self, request: RegistryServer_pb2.GetProjectRequest, context): + project = self.proxied_registry.get_project( + name=request.name, allow_cache=request.allow_cache + ) + assert_permissions( + resource=project, + actions=[AuthzedAction.DESCRIBE], + ) + return project.to_proto() + + def ListProjects(self, request: RegistryServer_pb2.ListProjectsRequest, context): + return RegistryServer_pb2.ListProjectsResponse( + projects=[ + project.to_proto() + for project in permitted_resources( + resources=cast( + list[FeastObject], + self.proxied_registry.list_projects( + allow_cache=request.allow_cache + ), + ), + actions=AuthzedAction.DESCRIBE, + ) + ] + ) + + def DeleteProject(self, request: RegistryServer_pb2.DeleteProjectRequest, context): + assert_permissions( + resource=self.proxied_registry.get_project( + name=request.name, + ), + actions=[AuthzedAction.DELETE], + ) + + self.proxied_registry.delete_project(name=request.name, commit=request.commit) + return Empty() + def Commit(self, request, context): self.proxied_registry.commit() return Empty() diff --git a/sdk/python/feast/repo_config.py b/sdk/python/feast/repo_config.py index 52372f2987..bf0bde6fcb 100644 --- a/sdk/python/feast/repo_config.py +++ b/sdk/python/feast/repo_config.py @@ -9,6 +9,7 @@ BaseModel, ConfigDict, Field, + StrictBool, StrictInt, StrictStr, ValidationError, @@ -132,11 +133,10 @@ class RegistryConfig(FeastBaseModel): s3_additional_kwargs: Optional[Dict[str, str]] = None """ Dict[str, str]: Extra arguments to pass to boto3 when writing the registry file to S3. """ - sqlalchemy_config_kwargs: Dict[str, Any] = {} - """ Dict[str, Any]: Extra arguments to pass to SQLAlchemy.create_engine. """ - - cache_mode: StrictStr = "sync" - """ str: Cache mode type, Possible options are sync and thread(asynchronous caching using threading library)""" + purge_feast_metadata: StrictBool = False + """ bool: Stops using feast_metadata table and delete data from feast_metadata table. + Once this is set to True, it cannot be reverted back to False. Reverting back to False will + only reset the project but not all the projects""" @field_validator("path") def validate_path(cls, path: str, values: ValidationInfo) -> str: diff --git a/sdk/python/feast/repo_contents.py b/sdk/python/feast/repo_contents.py index 9893d5be4e..d65f6ac7bb 100644 --- a/sdk/python/feast/repo_contents.py +++ b/sdk/python/feast/repo_contents.py @@ -19,6 +19,7 @@ from feast.feature_view import FeatureView from feast.on_demand_feature_view import OnDemandFeatureView from feast.permissions.permission import Permission +from feast.project import Project from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto from feast.stream_feature_view import StreamFeatureView @@ -28,6 +29,7 @@ class RepoContents(NamedTuple): Represents the objects in a Feast feature repo. """ + projects: List[Project] data_sources: List[DataSource] feature_views: List[FeatureView] on_demand_feature_views: List[OnDemandFeatureView] @@ -38,6 +40,7 @@ class RepoContents(NamedTuple): def to_registry_proto(self) -> RegistryProto: registry_proto = RegistryProto() + registry_proto.projects.extend([e.to_proto() for e in self.projects]) registry_proto.data_sources.extend([e.to_proto() for e in self.data_sources]) registry_proto.entities.extend([e.to_proto() for e in self.entities]) registry_proto.feature_views.extend( diff --git a/sdk/python/feast/repo_operations.py b/sdk/python/feast/repo_operations.py index cb27568957..6629768375 100644 --- a/sdk/python/feast/repo_operations.py +++ b/sdk/python/feast/repo_operations.py @@ -1,6 +1,7 @@ import base64 import importlib import json +import logging import os import random import re @@ -24,14 +25,18 @@ from feast.feature_store import FeatureStore from feast.feature_view import DUMMY_ENTITY, FeatureView from feast.file_utils import replace_str_in_file +from feast.infra.registry.base_registry import BaseRegistry from feast.infra.registry.registry import FEAST_OBJECT_TYPES, FeastObjectType, Registry from feast.names import adjectives, animals from feast.on_demand_feature_view import OnDemandFeatureView from feast.permissions.permission import Permission +from feast.project import Project from feast.repo_config import RepoConfig from feast.repo_contents import RepoContents from feast.stream_feature_view import StreamFeatureView +logger = logging.getLogger(__name__) + def py_path_to_module(path: Path) -> str: return ( @@ -115,6 +120,7 @@ def parse_repo(repo_root: Path) -> RepoContents: not result in duplicates, but defining two equal objects will. """ res = RepoContents( + projects=[], data_sources=[], entities=[], feature_views=[], @@ -207,6 +213,8 @@ def parse_repo(repo_root: Path) -> RepoContents: (obj is p) for p in res.permissions ): res.permissions.append(obj) + elif isinstance(obj, Project) and not any((obj is p) for p in res.projects): + res.projects.append(obj) res.entities.append(DUMMY_ENTITY) return res @@ -214,33 +222,57 @@ def parse_repo(repo_root: Path) -> RepoContents: def plan(repo_config: RepoConfig, repo_path: Path, skip_source_validation: bool): os.chdir(repo_path) - project, registry, repo, store = _prepare_registry_and_repo(repo_config, repo_path) - - if not skip_source_validation: - provider = store._get_provider() - data_sources = [t.batch_source for t in repo.feature_views] - # Make sure the data source used by this feature view is supported by Feast - for data_source in data_sources: - provider.validate_data_source(store.config, data_source) + repo = _get_repo_contents(repo_path, repo_config.project) + for project in repo.projects: + repo_config.project = project.name + store, registry = _get_store_and_registry(repo_config) + # TODO: When we support multiple projects in a single repo, we should filter repo contents by project + if not skip_source_validation: + provider = store._get_provider() + data_sources = [t.batch_source for t in repo.feature_views] + # Make sure the data source used by this feature view is supported by Feast + for data_source in data_sources: + provider.validate_data_source(store.config, data_source) + + registry_diff, infra_diff, _ = store.plan(repo) + click.echo(registry_diff.to_string()) + click.echo(infra_diff.to_string()) - registry_diff, infra_diff, _ = store.plan(repo) - click.echo(registry_diff.to_string()) - click.echo(infra_diff.to_string()) +def _get_repo_contents(repo_path, project_name: Optional[str] = None): + sys.dont_write_bytecode = True + repo = parse_repo(repo_path) -def _prepare_registry_and_repo(repo_config, repo_path): - store = FeatureStore(config=repo_config) - project = store.project - if not is_valid_name(project): + if len(repo.projects) < 1: + if project_name: + print( + f"No project found in the repository. Using project name {project_name} defined in feature_store.yaml" + ) + repo.projects.append(Project(name=project_name)) + else: + print( + "No project found in the repository. Either define Project in repository or define a project in feature_store.yaml" + ) + sys.exit(1) + elif len(repo.projects) == 1: + if repo.projects[0].name != project_name: + print( + "Project object name should match with the project name defined in feature_store.yaml" + ) + sys.exit(1) + else: print( - f"{project} is not valid. Project name should only have " - f"alphanumerical values and underscores but not start with an underscore." + "Multiple projects found in the repository. Currently no support for multiple projects" ) sys.exit(1) + + return repo + + +def _get_store_and_registry(repo_config): + store = FeatureStore(config=repo_config) registry = store.registry - sys.dont_write_bytecode = True - repo = parse_repo(repo_path) - return project, registry, repo, store + return store, registry def extract_objects_for_apply_delete(project, registry, repo): @@ -289,8 +321,8 @@ def extract_objects_for_apply_delete(project, registry, repo): def apply_total_with_repo_instance( store: FeatureStore, - project: str, - registry: Registry, + project_name: str, + registry: BaseRegistry, repo: RepoContents, skip_source_validation: bool, ): @@ -307,7 +339,7 @@ def apply_total_with_repo_instance( all_to_delete, views_to_keep, views_to_delete, - ) = extract_objects_for_apply_delete(project, registry, repo) + ) = extract_objects_for_apply_delete(project_name, registry, repo) if store._should_use_plan(): registry_diff, infra_diff, new_infra = store.plan(repo) @@ -357,10 +389,21 @@ def create_feature_store( def apply_total(repo_config: RepoConfig, repo_path: Path, skip_source_validation: bool): os.chdir(repo_path) - project, registry, repo, store = _prepare_registry_and_repo(repo_config, repo_path) - apply_total_with_repo_instance( - store, project, registry, repo, skip_source_validation - ) + repo = _get_repo_contents(repo_path, repo_config.project) + for project in repo.projects: + repo_config.project = project.name + store, registry = _get_store_and_registry(repo_config) + if not is_valid_name(project.name): + print( + f"{project.name} is not valid. Project name should only have " + f"alphanumerical values and underscores but not start with an underscore." + ) + sys.exit(1) + # TODO: When we support multiple projects in a single repo, we should filter repo contents by project. Currently there is no way to associate Feast objects to project. + print(f"Applying changes for project {project.name}") + apply_total_with_repo_instance( + store, project.name, registry, repo, skip_source_validation + ) def teardown(repo_config: RepoConfig, repo_path: Optional[str]): diff --git a/sdk/python/feast/templates/local/bootstrap.py b/sdk/python/feast/templates/local/bootstrap.py index ee2847c19c..e2c1efdbc4 100644 --- a/sdk/python/feast/templates/local/bootstrap.py +++ b/sdk/python/feast/templates/local/bootstrap.py @@ -10,6 +10,7 @@ def bootstrap(): from feast.driver_test_data import create_driver_hourly_stats_df repo_path = pathlib.Path(__file__).parent.absolute() / "feature_repo" + project_name = pathlib.Path(__file__).parent.absolute().name data_path = repo_path / "data" data_path.mkdir(exist_ok=True) @@ -23,6 +24,7 @@ def bootstrap(): driver_df.to_parquet(path=str(driver_stats_path), allow_truncated_timestamps=True) example_py_file = repo_path / "example_repo.py" + replace_str_in_file(example_py_file, "%PROJECT_NAME%", str(project_name)) replace_str_in_file(example_py_file, "%PARQUET_PATH%", str(driver_stats_path)) replace_str_in_file(example_py_file, "%LOGGING_PATH%", str(data_path)) diff --git a/sdk/python/feast/templates/local/feature_repo/example_repo.py b/sdk/python/feast/templates/local/feature_repo/example_repo.py index debe9d45e9..e2fd0a891c 100644 --- a/sdk/python/feast/templates/local/feature_repo/example_repo.py +++ b/sdk/python/feast/templates/local/feature_repo/example_repo.py @@ -10,6 +10,7 @@ FeatureView, Field, FileSource, + Project, PushSource, RequestSource, ) @@ -18,6 +19,9 @@ from feast.on_demand_feature_view import on_demand_feature_view from feast.types import Float32, Float64, Int64 +# Define a project for the feature repo +project = Project(name="%PROJECT_NAME%", description="A project for driver statistics") + # Define an entity for the driver. You can think of an entity as a primary key used to # fetch features. driver = Entity(name="driver", join_keys=["driver_id"]) diff --git a/sdk/python/tests/conftest.py b/sdk/python/tests/conftest.py index 5e70da074c..a9bb9ba9c4 100644 --- a/sdk/python/tests/conftest.py +++ b/sdk/python/tests/conftest.py @@ -35,8 +35,8 @@ create_basic_driver_dataset, # noqa: E402 create_document_dataset, ) -from tests.integration.feature_repos.integration_test_repo_config import ( - IntegrationTestRepoConfig, # noqa: E402 +from tests.integration.feature_repos.integration_test_repo_config import ( # noqa: E402 + IntegrationTestRepoConfig, ) from tests.integration.feature_repos.repo_configuration import ( # noqa: E402 AVAILABLE_OFFLINE_STORES, @@ -48,8 +48,8 @@ construct_universal_feature_views, construct_universal_test_data, ) -from tests.integration.feature_repos.universal.data_sources.file import ( - FileDataSourceCreator, # noqa: E402 +from tests.integration.feature_repos.universal.data_sources.file import ( # noqa: E402 + FileDataSourceCreator, ) from tests.integration.feature_repos.universal.entities import ( # noqa: E402 customer, @@ -451,15 +451,20 @@ def is_integration_test(all_markers_from_module): @pytest.fixture( scope="module", params=[ - dedent(""" + dedent( + """ auth: type: no_auth - """), - dedent(""" + """ + ), + dedent( + """ auth: type: kubernetes - """), - dedent(""" + """ + ), + dedent( + """ auth: type: oidc client_id: feast-integration-client @@ -467,7 +472,8 @@ def is_integration_test(all_markers_from_module): username: reader_writer password: password auth_discovery_url: KEYCLOAK_URL_PLACE_HOLDER/realms/master/.well-known/openid-configuration - """), + """ + ), ], ) def auth_config(request, is_integration_test): diff --git a/sdk/python/tests/example_repos/example_feature_repo_with_project_1.py b/sdk/python/tests/example_repos/example_feature_repo_with_project_1.py new file mode 100644 index 0000000000..ad04d7ae66 --- /dev/null +++ b/sdk/python/tests/example_repos/example_feature_repo_with_project_1.py @@ -0,0 +1,151 @@ +from datetime import timedelta + +import pandas as pd + +from feast import Entity, FeatureService, FeatureView, Field, FileSource, PushSource +from feast.on_demand_feature_view import on_demand_feature_view +from feast.project import Project +from feast.types import Array, Float32, Int64, String +from tests.integration.feature_repos.universal.feature_views import TAGS + +# Note that file source paths are not validated, so there doesn't actually need to be any data +# at the paths for these file sources. Since these paths are effectively fake, this example +# feature repo should not be used for historical retrieval. +project = Project( + name="test_universal_cli_with_project_4567", + description="test_universal_cli_with_project_4567 description", + tags={"application": "integration"}, + owner="test@test.com", +) + +driver_locations_source = FileSource( + path="data/driver_locations.parquet", + timestamp_field="event_timestamp", + created_timestamp_column="created_timestamp", +) + +customer_profile_source = FileSource( + name="customer_profile_source", + path="data/customer_profiles.parquet", + timestamp_field="event_timestamp", +) + +customer_driver_combined_source = FileSource( + path="data/customer_driver_combined.parquet", + timestamp_field="event_timestamp", +) + +driver_locations_push_source = PushSource( + name="driver_locations_push", + batch_source=driver_locations_source, +) + +rag_documents_source = FileSource( + name="rag_documents_source", + path="data/rag_documents.parquet", + timestamp_field="event_timestamp", +) + +driver = Entity( + name="driver", # The name is derived from this argument, not object name. + join_keys=["driver_id"], + description="driver id", + tags=TAGS, +) + +customer = Entity( + name="customer", # The name is derived from this argument, not object name. + join_keys=["customer_id"], + tags=TAGS, +) + +item = Entity( + name="item_id", # The name is derived from this argument, not object name. + join_keys=["item_id"], +) + +driver_locations = FeatureView( + name="driver_locations", + entities=[driver], + ttl=timedelta(days=1), + schema=[ + Field(name="lat", dtype=Float32), + Field(name="lon", dtype=String), + Field(name="driver_id", dtype=Int64), + ], + online=True, + source=driver_locations_source, + tags={}, +) + +pushed_driver_locations = FeatureView( + name="pushed_driver_locations", + entities=[driver], + ttl=timedelta(days=1), + schema=[ + Field(name="driver_lat", dtype=Float32), + Field(name="driver_long", dtype=String), + Field(name="driver_id", dtype=Int64), + ], + online=True, + source=driver_locations_push_source, + tags={}, +) + +customer_profile = FeatureView( + name="customer_profile", + entities=[customer], + ttl=timedelta(days=1), + schema=[ + Field(name="avg_orders_day", dtype=Float32), + Field(name="name", dtype=String), + Field(name="age", dtype=Int64), + Field(name="customer_id", dtype=String), + ], + online=True, + source=customer_profile_source, + tags={}, +) + +customer_driver_combined = FeatureView( + name="customer_driver_combined", + entities=[customer, driver], + ttl=timedelta(days=1), + schema=[ + Field(name="trips", dtype=Int64), + Field(name="driver_id", dtype=Int64), + Field(name="customer_id", dtype=String), + ], + online=True, + source=customer_driver_combined_source, + tags={}, +) + +document_embeddings = FeatureView( + name="document_embeddings", + entities=[item], + schema=[ + Field(name="Embeddings", dtype=Array(Float32)), + Field(name="item_id", dtype=String), + ], + source=rag_documents_source, + ttl=timedelta(hours=24), +) + + +@on_demand_feature_view( + sources=[customer_profile], + schema=[Field(name="on_demand_age", dtype=Int64)], + mode="pandas", +) +def customer_profile_pandas_odfv(inputs: pd.DataFrame) -> pd.DataFrame: + outputs = pd.DataFrame() + outputs["on_demand_age"] = inputs["age"] + 1 + return outputs + + +all_drivers_feature_service = FeatureService( + name="driver_locations_service", + features=[driver_locations], + tags=TAGS, +) diff --git a/sdk/python/tests/integration/online_store/test_remote_online_store.py b/sdk/python/tests/integration/online_store/test_remote_online_store.py index f74fb14a86..d8c92077db 100644 --- a/sdk/python/tests/integration/online_store/test_remote_online_store.py +++ b/sdk/python/tests/integration/online_store/test_remote_online_store.py @@ -187,9 +187,6 @@ def _create_remote_client_feature_store( auth_config=auth_config, ) - result = runner.run(["--chdir", repo_path, "apply"], cwd=temp_dir) - assert result.returncode == 0 - return FeatureStore(repo_path=repo_path) diff --git a/sdk/python/tests/integration/registration/test_universal_cli.py b/sdk/python/tests/integration/registration/test_universal_cli.py index 5c238da24d..735f71407f 100644 --- a/sdk/python/tests/integration/registration/test_universal_cli.py +++ b/sdk/python/tests/integration/registration/test_universal_cli.py @@ -52,7 +52,9 @@ def test_universal_cli(): for key, value in registry_dict.items() } - # entity & feature view list commands should succeed + # project, entity & feature view list commands should succeed + result = runner.run(["projects", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) result = runner.run(["entities", "list"], cwd=repo_path) assertpy.assert_that(result.returncode).is_equal_to(0) result = runner.run(["feature-views", "list"], cwd=repo_path) @@ -71,6 +73,10 @@ def test_universal_cli(): assertpy.assert_that(result.returncode).is_equal_to(0) # entity & feature view describe commands should succeed when objects exist + result = runner.run(["projects", "describe", project], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["projects", "current_project"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) result = runner.run(["entities", "describe", "driver"], cwd=repo_path) assertpy.assert_that(result.returncode).is_equal_to(0) result = runner.run( @@ -89,8 +95,132 @@ def test_universal_cli(): ) assertpy.assert_that(result.returncode).is_equal_to(0) assertpy.assert_that(fs.list_data_sources()).is_length(5) + assertpy.assert_that(fs.list_projects()).is_length(1) # entity & feature view describe commands should fail when objects don't exist + result = runner.run(["projects", "describe", "foo"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(1) + result = runner.run(["entities", "describe", "foo"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(1) + result = runner.run(["feature-views", "describe", "foo"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(1) + result = runner.run(["feature-services", "describe", "foo"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(1) + result = runner.run(["data-sources", "describe", "foo"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(1) + result = runner.run(["permissions", "describe", "foo"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(1) + + # Doing another apply should be a no op, and should not cause errors + result = runner.run(["apply"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + basic_rw_test( + FeatureStore(repo_path=str(repo_path), config=None), + view_name="driver_locations", + ) + + # Confirm that registry contents have not changed. + registry_dict = fs.registry.to_dict(project=project) + assertpy.assert_that(registry_specs).is_equal_to( + { + key: [fco["spec"] if "spec" in fco else fco for fco in value] + for key, value in registry_dict.items() + } + ) + + result = runner.run(["teardown"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + finally: + runner.run(["teardown"], cwd=repo_path) + + +@pytest.mark.integration +def test_universal_cli_with_project(): + project = "test_universal_cli_with_project_4567" + runner = CliRunner() + + with tempfile.TemporaryDirectory() as repo_dir_name: + try: + repo_path = Path(repo_dir_name) + feature_store_yaml = make_feature_store_yaml( + project, + repo_path, + FileDataSourceCreator("project"), + "local", + {"type": "sqlite"}, + ) + + repo_config = repo_path / "feature_store.yaml" + + repo_config.write_text(dedent(feature_store_yaml)) + + repo_example = repo_path / "example.py" + repo_example.write_text( + get_example_repo("example_feature_repo_with_project_1.py") + ) + result = runner.run(["apply"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + + # Store registry contents, to be compared later. + fs = FeatureStore(repo_path=str(repo_path)) + registry_dict = fs.registry.to_dict(project=project) + # Save only the specs, not the metadata. + registry_specs = { + key: [fco["spec"] if "spec" in fco else fco for fco in value] + for key, value in registry_dict.items() + } + + # entity & feature view list commands should succeed + result = runner.run(["projects", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["entities", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["feature-views", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["feature-services", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["data-sources", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["permissions", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + + # entity & feature view describe commands should succeed when objects exist + result = runner.run(["projects", "describe", project], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["projects", "current_project"], cwd=repo_path) + print(result.returncode) + print("result: ", result) + print("result.stdout: ", result.stdout) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["entities", "describe", "driver"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run( + ["feature-views", "describe", "driver_locations"], cwd=repo_path + ) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run( + ["feature-services", "describe", "driver_locations_service"], + cwd=repo_path, + ) + assertpy.assert_that(result.returncode).is_equal_to(0) + assertpy.assert_that(fs.list_feature_views()).is_length(5) + result = runner.run( + ["data-sources", "describe", "customer_profile_source"], + cwd=repo_path, + ) + assertpy.assert_that(result.returncode).is_equal_to(0) + assertpy.assert_that(fs.list_data_sources()).is_length(5) + + projects_list = fs.list_projects() + assertpy.assert_that(projects_list).is_length(1) + assertpy.assert_that(projects_list[0].name).is_equal_to(project) + assertpy.assert_that(projects_list[0].description).is_equal_to( + "test_universal_cli_with_project_4567 description" + ) + + # entity & feature view describe commands should fail when objects don't exist + result = runner.run(["projects", "describe", "foo"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(1) result = runner.run(["entities", "describe", "foo"], cwd=repo_path) assertpy.assert_that(result.returncode).is_equal_to(1) result = runner.run(["feature-views", "describe", "foo"], cwd=repo_path) @@ -161,6 +291,12 @@ def test_odfv_apply() -> None: assertpy.assert_that(result.returncode).is_equal_to(0) # entity & feature view list commands should succeed + result = runner.run(["projects", "describe", project], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["projects", "current_project"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["projects", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) result = runner.run(["entities", "list"], cwd=repo_path) assertpy.assert_that(result.returncode).is_equal_to(0) result = runner.run(["on-demand-feature-views", "list"], cwd=repo_path) @@ -192,7 +328,14 @@ def test_nullable_online_store(test_nullable_online_store) -> None: repo_example = repo_path / "example.py" repo_example.write_text(get_example_repo("empty_feature_repo.py")) + result = runner.run(["apply"], cwd=repo_path) assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["projects", "describe", project], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["projects", "current_project"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["projects", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) finally: runner.run(["teardown"], cwd=repo_path) diff --git a/sdk/python/tests/integration/registration/test_universal_registry.py b/sdk/python/tests/integration/registration/test_universal_registry.py index c528cee4a8..20f1f5ef0a 100644 --- a/sdk/python/tests/integration/registration/test_universal_registry.py +++ b/sdk/python/tests/integration/registration/test_universal_registry.py @@ -38,11 +38,12 @@ from feast.infra.online_stores.sqlite import SqliteTable from feast.infra.registry.registry import Registry from feast.infra.registry.remote import RemoteRegistry, RemoteRegistryConfig -from feast.infra.registry.sql import SqlRegistry +from feast.infra.registry.sql import SqlRegistry, SqlRegistryConfig from feast.on_demand_feature_view import on_demand_feature_view from feast.permissions.action import AuthzedAction from feast.permissions.permission import Permission from feast.permissions.policy import RoleBasedPolicy +from feast.project import Project from feast.protos.feast.registry import RegistryServer_pb2, RegistryServer_pb2_grpc from feast.registry_server import RegistryServer from feast.repo_config import RegistryConfig @@ -91,7 +92,7 @@ def s3_registry() -> Registry: return Registry("project", registry_config, None) -@pytest.fixture(scope="session") +@pytest.fixture(scope="function") def minio_registry() -> Registry: bucket_name = "test-bucket" @@ -158,7 +159,7 @@ def pg_registry_async(): container.start() - registry_config = _given_registry_config_for_pg_sql(container, 2, "thread") + registry_config = _given_registry_config_for_pg_sql(container, 2, "thread", 3) yield SqlRegistry(registry_config, "project", None) @@ -166,7 +167,11 @@ def pg_registry_async(): def _given_registry_config_for_pg_sql( - container, cache_ttl_seconds=2, cache_mode="sync" + container, + cache_ttl_seconds=2, + cache_mode="sync", + thread_pool_executor_worker_count=0, + purge_feast_metadata=False, ): log_string_to_wait_for = "database system is ready to accept connections" waited = wait_for_logs( @@ -179,7 +184,7 @@ def _given_registry_config_for_pg_sql( container_port = container.get_exposed_port(5432) container_host = container.get_container_host_ip() - return RegistryConfig( + return SqlRegistryConfig( registry_type="sql", cache_ttl_seconds=cache_ttl_seconds, cache_mode=cache_mode, @@ -187,6 +192,8 @@ def _given_registry_config_for_pg_sql( # to understand that we are using psycopg3. path=f"postgresql+psycopg://{POSTGRES_USER}:{POSTGRES_PASSWORD}@{container_host}:{container_port}/{POSTGRES_DB}", sqlalchemy_config_kwargs={"echo": False, "pool_pre_ping": True}, + thread_pool_executor_worker_count=thread_pool_executor_worker_count, + purge_feast_metadata=purge_feast_metadata, ) @@ -207,14 +214,20 @@ def mysql_registry_async(): container = MySqlContainer("mysql:latest") container.start() - registry_config = _given_registry_config_for_mysql(container, 2, "thread") + registry_config = _given_registry_config_for_mysql(container, 2, "thread", 3) yield SqlRegistry(registry_config, "project", None) container.stop() -def _given_registry_config_for_mysql(container, cache_ttl_seconds=2, cache_mode="sync"): +def _given_registry_config_for_mysql( + container, + cache_ttl_seconds=2, + cache_mode="sync", + thread_pool_executor_worker_count=0, + purge_feast_metadata=False, +): import sqlalchemy engine = sqlalchemy.create_engine( @@ -222,18 +235,20 @@ def _given_registry_config_for_mysql(container, cache_ttl_seconds=2, cache_mode= ) engine.connect() - return RegistryConfig( + return SqlRegistryConfig( registry_type="sql", path=container.get_connection_url(), cache_ttl_seconds=cache_ttl_seconds, cache_mode=cache_mode, sqlalchemy_config_kwargs={"echo": False, "pool_pre_ping": True}, + thread_pool_executor_worker_count=thread_pool_executor_worker_count, + purge_feast_metadata=purge_feast_metadata, ) @pytest.fixture(scope="session") def sqlite_registry(): - registry_config = RegistryConfig( + registry_config = SqlRegistryConfig( registry_type="sql", path="sqlite://", ) @@ -250,7 +265,11 @@ def __init__(self, service, servicer): ) def unary_unary( - self, method: str, request_serializer=None, response_deserializer=None + self, + method: str, + request_serializer=None, + response_deserializer=None, + _registered_method=None, ): method_name = method.split("/")[-1] method_descriptor = self.service.methods_by_name[method_name] @@ -347,9 +366,11 @@ def test_apply_entity_success(test_registry): project_uuid = project_metadata[0].project_uuid assert len(project_metadata[0].project_uuid) == 36 assert_project_uuid(project, project_uuid, test_registry) + assert_project(project, test_registry) entities = test_registry.list_entities(project, tags=entity.tags) assert_project_uuid(project, project_uuid, test_registry) + assert_project(project, test_registry) entity = entities[0] assert ( @@ -386,11 +407,12 @@ def test_apply_entity_success(test_registry): updated_entity.created_timestamp is not None and updated_entity.created_timestamp == entity.created_timestamp ) - test_registry.delete_entity("driver_car_id", project) assert_project_uuid(project, project_uuid, test_registry) + assert_project(project, test_registry) entities = test_registry.list_entities(project) assert_project_uuid(project, project_uuid, test_registry) + assert_project(project, test_registry) assert len(entities) == 0 test_registry.teardown() @@ -402,6 +424,14 @@ def assert_project_uuid(project, project_uuid, test_registry): assert project_metadata[0].project_uuid == project_uuid +def assert_project(project_name, test_registry, allow_cache=False): + project_obj = test_registry.list_projects(allow_cache=allow_cache) + assert len(project_obj) == 1 + assert project_obj[0].name == "project" + project_obj = test_registry.get_project(name=project_name, allow_cache=allow_cache) + assert project_obj.name == "project" + + @pytest.mark.integration @pytest.mark.parametrize( "test_registry", @@ -725,9 +755,10 @@ def simple_udf(x: int): project = "project" # Register Feature Views - test_registry.apply_feature_view(odfv1, project) - test_registry.apply_feature_view(fv1, project) - test_registry.apply_feature_view(sfv, project) + test_registry.apply_feature_view(odfv1, project, False) + test_registry.apply_feature_view(fv1, project, False) + test_registry.apply_feature_view(sfv, project, False) + test_registry.commit() # Modify odfv by changing a single feature dtype @on_demand_feature_view( @@ -1283,6 +1314,10 @@ def test_commit(): project_uuid = project_metadata.project_uuid assert len(project_uuid) == 36 validate_project_uuid(project_uuid, test_registry) + assert len(test_registry.cached_registry_proto.projects) == 1 + project_obj = test_registry.cached_registry_proto.projects[0] + assert project == Project.from_proto(project_obj).name + assert_project(project, test_registry, True) # Retrieving the entity should still succeed entities = test_registry.list_entities(project, allow_cache=True, tags=entity.tags) @@ -1295,6 +1330,7 @@ def test_commit(): and entity.tags["team"] == "matchmaking" ) validate_project_uuid(project_uuid, test_registry) + assert_project(project, test_registry, True) entity = test_registry.get_entity("driver_car_id", project, allow_cache=True) assert ( @@ -1304,6 +1340,7 @@ def test_commit(): and entity.tags["team"] == "matchmaking" ) validate_project_uuid(project_uuid, test_registry) + assert_project(project, test_registry, True) # Create new registry that points to the same store registry_with_same_store = Registry("project", registry_config, None) @@ -1312,6 +1349,7 @@ def test_commit(): entities = registry_with_same_store.list_entities(project) assert len(entities) == 0 validate_project_uuid(project_uuid, registry_with_same_store) + assert_project(project, test_registry, True) # commit from the original registry test_registry.commit() @@ -1330,6 +1368,7 @@ def test_commit(): and entity.tags["team"] == "matchmaking" ) validate_project_uuid(project_uuid, registry_with_same_store) + assert_project(project, test_registry) entity = test_registry.get_entity("driver_car_id", project) assert ( @@ -1371,6 +1410,7 @@ def test_apply_permission_success(test_registry): project_uuid = project_metadata[0].project_uuid assert len(project_metadata[0].project_uuid) == 36 assert_project_uuid(project, project_uuid, test_registry) + assert_project(project, test_registry) permissions = test_registry.list_permissions(project) assert_project_uuid(project, project_uuid, test_registry) @@ -1483,5 +1523,194 @@ def test_apply_permission_success(test_registry): permissions = test_registry.list_permissions(project) assert_project_uuid(project, project_uuid, test_registry) assert len(permissions) == 0 + assert_project(project, test_registry) + + test_registry.teardown() + + +@pytest.mark.integration +@pytest.mark.parametrize("test_registry", all_fixtures) +def test_apply_project_success(test_registry): + project = Project( + name="project", + description="Project description", + tags={"team": "project team"}, + owner="owner@mail.com", + ) + + # Register Project + test_registry.apply_project(project) + assert_project(project.name, test_registry, False) + + projects_list = test_registry.list_projects(tags=project.tags) + + assert_project(projects_list[0].name, test_registry) + + project_get = test_registry.get_project("project") + assert ( + project_get.name == project.name + and project_get.description == project.description + and project_get.tags == project.tags + and project_get.owner == project.owner + ) + + # Update project + updated_project = Project( + name=project.name, + description="New Project Description", + tags={"team": "matchmaking", "app": "feast"}, + ) + test_registry.apply_project(updated_project) + + updated_project_get = test_registry.get_project(project.name) + + # The created_timestamp for the entity should be set to the created_timestamp value stored from the previous apply + assert ( + updated_project_get.created_timestamp is not None + and updated_project_get.created_timestamp == project_get.created_timestamp + ) + + assert ( + updated_project_get.created_timestamp + < updated_project_get.last_updated_timestamp + ) + + entity = Entity( + name="driver_car_id", + description="Car driver id", + tags={"team": "matchmaking"}, + ) + + test_registry.apply_entity(entity, project.name) + entities = test_registry.list_entities(project.name) + assert len(entities) == 1 + + test_registry.delete_project(project.name, commit=False) + + test_registry.commit() + + entities = test_registry.list_entities(project.name, False) + assert len(entities) == 0 + projects_list = test_registry.list_projects() + assert len(projects_list) == 0 + + test_registry.refresh(project.name) + + test_registry.teardown() + + +@pytest.fixture +def local_registry_purge_feast_metadata() -> Registry: + fd, registry_path = mkstemp() + registry_config = RegistryConfig( + path=registry_path, cache_ttl_seconds=600, purge_feast_metadata=True + ) + return Registry("project", registry_config, None) + + +@pytest.fixture(scope="function") +def pg_registry_purge_feast_metadata(): + container = ( + DockerContainer("postgres:latest") + .with_exposed_ports(5432) + .with_env("POSTGRES_USER", POSTGRES_USER) + .with_env("POSTGRES_PASSWORD", POSTGRES_PASSWORD) + .with_env("POSTGRES_DB", POSTGRES_DB) + ) + + container.start() + + registry_config = _given_registry_config_for_pg_sql(container, 2, "thread", 3, True) + + yield SqlRegistry(registry_config, "project", None) + + container.stop() + + +@pytest.fixture(scope="function") +def mysql_registry_purge_feast_metadata(): + container = MySqlContainer("mysql:latest") + container.start() + + registry_config = _given_registry_config_for_mysql(container, 2, "thread", 3, True) + + yield SqlRegistry(registry_config, "project", None) + + container.stop() + + +purge_feast_metadata_fixtures = [ + lazy_fixture("local_registry_purge_feast_metadata"), + pytest.param( + lazy_fixture("pg_registry_purge_feast_metadata"), + marks=pytest.mark.xdist_group(name="pg_registry_purge_feast_metadata"), + ), + pytest.param( + lazy_fixture("mysql_registry_purge_feast_metadata"), + marks=pytest.mark.xdist_group(name="mysql_registry_purge_feast_metadata"), + ), +] + + +@pytest.mark.integration +@pytest.mark.parametrize("test_registry", purge_feast_metadata_fixtures) +def test_apply_entity_success_with_purge_feast_metadata(test_registry): + entity = Entity( + name="driver_car_id", + description="Car driver id", + tags={"team": "matchmaking"}, + ) + + project = "project" + + # Register Entity + test_registry.apply_entity(entity, project) + project_metadata = test_registry.list_project_metadata(project=project) + assert len(project_metadata) == 0 + assert_project(project, test_registry) + + entities = test_registry.list_entities(project, tags=entity.tags) + assert_project(project, test_registry) + + entity = entities[0] + assert ( + len(entities) == 1 + and entity.name == "driver_car_id" + and entity.description == "Car driver id" + and "team" in entity.tags + and entity.tags["team"] == "matchmaking" + ) + + entity = test_registry.get_entity("driver_car_id", project) + assert ( + entity.name == "driver_car_id" + and entity.description == "Car driver id" + and "team" in entity.tags + and entity.tags["team"] == "matchmaking" + ) + + # After the first apply, the created_timestamp should be the same as the last_update_timestamp. + assert entity.created_timestamp == entity.last_updated_timestamp + + # Update entity + updated_entity = Entity( + name="driver_car_id", + description="Car driver Id", + tags={"team": "matchmaking"}, + ) + test_registry.apply_entity(updated_entity, project) + + updated_entity = test_registry.get_entity("driver_car_id", project) + + # The created_timestamp for the entity should be set to the created_timestamp value stored from the previous apply + assert ( + updated_entity.created_timestamp is not None + and updated_entity.created_timestamp == entity.created_timestamp + ) + test_registry.delete_entity("driver_car_id", project) + assert_project(project, test_registry) + entities = test_registry.list_entities(project) + assert_project(project, test_registry) + assert len(entities) == 0 test_registry.teardown() diff --git a/sdk/python/tests/unit/permissions/auth/conftest.py b/sdk/python/tests/unit/permissions/auth/conftest.py index ea6e2e4311..5a29f8ec78 100644 --- a/sdk/python/tests/unit/permissions/auth/conftest.py +++ b/sdk/python/tests/unit/permissions/auth/conftest.py @@ -8,6 +8,7 @@ read_fv_perm, read_odfv_perm, read_permissions_perm, + read_projects_perm, read_sfv_perm, ) from tests.unit.permissions.auth.test_token_parser import _CLIENT_ID @@ -90,6 +91,7 @@ def oidc_config() -> OidcAuthConfig: read_fv_perm, read_odfv_perm, read_sfv_perm, + read_projects_perm, ], ], ) diff --git a/sdk/python/tests/unit/permissions/auth/server/test_auth_registry_server.py b/sdk/python/tests/unit/permissions/auth/server/test_auth_registry_server.py index 9e9bc1473e..c72b1aa1e2 100644 --- a/sdk/python/tests/unit/permissions/auth/server/test_auth_registry_server.py +++ b/sdk/python/tests/unit/permissions/auth/server/test_auth_registry_server.py @@ -5,9 +5,7 @@ import pytest import yaml -from feast import ( - FeatureStore, -) +from feast import FeatureStore from feast.errors import ( EntityNotFoundException, FeastPermissionError, @@ -23,6 +21,7 @@ read_fv_perm, read_odfv_perm, read_permissions_perm, + read_projects_perm, read_sfv_perm, ) from tests.utils.auth_permissions_util import get_remote_registry_store @@ -50,7 +49,11 @@ def start_registry_server( assertpy.assert_that(server_port).is_not_equal_to(0) print(f"Starting Registry at {server_port}") - server = start_server(feature_store, server_port, wait_for_termination=False) + server = start_server( + feature_store, + server_port, + wait_for_termination=False, + ) print("Waiting server availability") wait_retry_backoff( lambda: (None, check_port_open("localhost", server_port)), @@ -179,6 +182,7 @@ def _test_list_permissions( read_fv_perm, read_odfv_perm, read_sfv_perm, + read_projects_perm, ], permissions, ): @@ -191,6 +195,7 @@ def _test_list_permissions( read_fv_perm, read_odfv_perm, read_sfv_perm, + read_projects_perm, ] ) ) diff --git a/sdk/python/tests/unit/permissions/auth/server/test_utils.py b/sdk/python/tests/unit/permissions/auth/server/test_utils.py index 5d781919a0..32b4fd8f98 100644 --- a/sdk/python/tests/unit/permissions/auth/server/test_utils.py +++ b/sdk/python/tests/unit/permissions/auth/server/test_utils.py @@ -6,6 +6,7 @@ from feast.permissions.permission import Permission from feast.permissions.policy import RoleBasedPolicy from feast.permissions.server.utils import AuthManagerType, str_to_auth_manager_type +from feast.project import Project read_permissions_perm = Permission( name="read_permissions_perm", @@ -14,6 +15,13 @@ actions=[AuthzedAction.DESCRIBE], ) +read_projects_perm = Permission( + name="read_projects_perm", + types=Project, + policy=RoleBasedPolicy(roles=["reader"]), + actions=[AuthzedAction.DESCRIBE], +) + read_entities_perm = Permission( name="read_entities_perm", types=Entity, diff --git a/sdk/python/tests/unit/test_on_demand_feature_view.py b/sdk/python/tests/unit/test_on_demand_feature_view.py index d9cc5dee50..6073891aba 100644 --- a/sdk/python/tests/unit/test_on_demand_feature_view.py +++ b/sdk/python/tests/unit/test_on_demand_feature_view.py @@ -251,11 +251,9 @@ def test_from_proto_backwards_compatible_udf(): proto.spec.feature_transformation.user_defined_function.body_text ) - # And now we're going to null the feature_transformation proto object before reserializing the entire proto - # proto.spec.user_defined_function.body_text = on_demand_feature_view.transformation.udf_string - proto.spec.feature_transformation.user_defined_function.name = "" - proto.spec.feature_transformation.user_defined_function.body = b"" - proto.spec.feature_transformation.user_defined_function.body_text = "" + # For objects that are already registered, feature_transformation and mode is not set + proto.spec.feature_transformation.Clear() + proto.spec.ClearField("mode") # And now we expect the to get the same object back under feature_transformation reserialized_proto = OnDemandFeatureView.from_proto(proto) diff --git a/sdk/python/tests/unit/test_project.py b/sdk/python/tests/unit/test_project.py new file mode 100644 index 0000000000..f15aef2972 --- /dev/null +++ b/sdk/python/tests/unit/test_project.py @@ -0,0 +1,122 @@ +import unittest +from datetime import datetime, timezone + +from feast.project import Project +from feast.protos.feast.core.Project_pb2 import Project as ProjectProto +from feast.protos.feast.core.Project_pb2 import ProjectMeta as ProjectMetaProto +from feast.protos.feast.core.Project_pb2 import ProjectSpec as ProjectSpecProto + + +class TestProject(unittest.TestCase): + def setUp(self): + self.project_name = "test_project" + self.description = "Test project description" + self.tags = {"env": "test"} + self.owner = "test_owner" + self.created_timestamp = datetime.now(tz=timezone.utc) + self.last_updated_timestamp = datetime.now(tz=timezone.utc) + + def test_initialization(self): + project = Project( + name=self.project_name, + description=self.description, + tags=self.tags, + owner=self.owner, + created_timestamp=self.created_timestamp, + last_updated_timestamp=self.last_updated_timestamp, + ) + self.assertEqual(project.name, self.project_name) + self.assertEqual(project.description, self.description) + self.assertEqual(project.tags, self.tags) + self.assertEqual(project.owner, self.owner) + self.assertEqual(project.created_timestamp, self.created_timestamp) + self.assertEqual(project.last_updated_timestamp, self.last_updated_timestamp) + + def test_equality(self): + project1 = Project(name=self.project_name) + project2 = Project(name=self.project_name) + project3 = Project(name="different_project") + self.assertTrue( + project1.name == project2.name + and project1.description == project2.description + and project1.tags == project2.tags + and project1.owner == project2.owner + ) + self.assertFalse( + project1.name == project3.name + and project1.description == project3.description + and project1.tags == project3.tags + and project1.owner == project3.owner + ) + + def test_is_valid(self): + project = Project(name=self.project_name) + project.is_valid() + with self.assertRaises(ValueError): + invalid_project = Project(name="") + invalid_project.is_valid() + + def test_from_proto(self): + meta = ProjectMetaProto() + meta.created_timestamp.FromDatetime(self.created_timestamp) + meta.last_updated_timestamp.FromDatetime(self.last_updated_timestamp) + project_proto = ProjectProto( + spec=ProjectSpecProto( + name=self.project_name, + description=self.description, + tags=self.tags, + owner=self.owner, + ), + meta=meta, + ) + project = Project.from_proto(project_proto) + self.assertEqual(project.name, self.project_name) + self.assertEqual(project.description, self.description) + self.assertEqual(project.tags, self.tags) + self.assertEqual(project.owner, self.owner) + self.assertEqual(project.created_timestamp, self.created_timestamp) + self.assertEqual(project.last_updated_timestamp, self.last_updated_timestamp) + + def test_to_proto(self): + project = Project( + name=self.project_name, + description=self.description, + tags=self.tags, + owner=self.owner, + created_timestamp=self.created_timestamp, + last_updated_timestamp=self.last_updated_timestamp, + ) + project_proto = project.to_proto() + self.assertEqual(project_proto.spec.name, self.project_name) + self.assertEqual(project_proto.spec.description, self.description) + self.assertEqual(project_proto.spec.tags, self.tags) + self.assertEqual(project_proto.spec.owner, self.owner) + self.assertEqual( + project_proto.meta.created_timestamp.ToDatetime().replace( + tzinfo=timezone.utc + ), + self.created_timestamp, + ) + self.assertEqual( + project_proto.meta.last_updated_timestamp.ToDatetime().replace( + tzinfo=timezone.utc + ), + self.last_updated_timestamp, + ) + + def test_to_proto_and_back(self): + project = Project( + name=self.project_name, + description=self.description, + tags=self.tags, + owner=self.owner, + created_timestamp=self.created_timestamp, + last_updated_timestamp=self.last_updated_timestamp, + ) + project_proto = project.to_proto() + project_from_proto = Project.from_proto(project_proto) + self.assertEqual(project, project_from_proto) + + +if __name__ == "__main__": + unittest.main() From 96344b2b6830dcc280567542d111d1b0f39879e0 Mon Sep 17 00:00:00 2001 From: Hao Xu Date: Fri, 6 Sep 2024 02:41:25 -0700 Subject: [PATCH 47/96] fix: Hao xu request source timestamp_field (#4495) * not sure why fix linting Signed-off-by: cmuhao * not sure why fix linting Signed-off-by: cmuhao --------- Signed-off-by: cmuhao --- README.md | 6 +++++- sdk/python/feast/data_source.py | 11 ++++++++++- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 10c20050d3..6f17e7fa6c 100644 --- a/README.md +++ b/README.md @@ -187,6 +187,7 @@ The list below contains the functionality that contributors are planning to deve * [x] On-demand Transformations (Beta release. See [RFC](https://docs.google.com/document/d/1lgfIw0Drc65LpaxbUu49RCeJgMew547meSJttnUqz7c/edit#)) * [x] Streaming Transformations (Alpha release. See [RFC](https://docs.google.com/document/d/1UzEyETHUaGpn0ap4G82DHluiCj7zEbrQLkJJkKSv4e8/edit)) * [ ] Batch transformation (In progress. See [RFC](https://docs.google.com/document/d/1964OkzuBljifDvkV-0fakp2uaijnVzdwWNGdz7Vz50A/edit)) + * [ ] Persistent On-demand Transformations (Beta release. See [GitHub Issue](https://github.com/feast-dev/feast/issues/4376)) * **Streaming** * [x] [Custom streaming ingestion job support](https://docs.feast.dev/how-to-guides/customizing-feast/creating-a-custom-provider) * [x] [Push based streaming data ingestion to online store](https://docs.feast.dev/reference/data-sources/push) @@ -208,6 +209,9 @@ The list below contains the functionality that contributors are planning to deve * [x] Amundsen integration (see [Feast extractor](https://github.com/amundsen-io/amundsen/blob/main/databuilder/databuilder/extractor/feast_extractor.py)) * [x] DataHub integration (see [DataHub Feast docs](https://datahubproject.io/docs/generated/ingestion/sources/feast/)) * [x] Feast Web UI (Beta release. See [docs](https://docs.feast.dev/reference/alpha-web-ui)) + * [ ] Feast Lineage Explorer +* **Natural Language Processing** + * [x] Vector Search (Alpha release. See [RFC](https://docs.google.com/document/d/18IWzLEA9i2lDWnbfbwXnMCg3StlqaLVI-uRpQjr_Vos/edit#heading=h.9gaqqtox9jg6)) ## πŸŽ“ Important Resources @@ -230,4 +234,4 @@ Thanks goes to these incredible people: - + \ No newline at end of file diff --git a/sdk/python/feast/data_source.py b/sdk/python/feast/data_source.py index 17fbfd5fcf..f7881c5045 100644 --- a/sdk/python/feast/data_source.py +++ b/sdk/python/feast/data_source.py @@ -524,12 +524,19 @@ def __init__( *, name: str, schema: List[Field], + timestamp_field: Optional[str] = None, description: Optional[str] = "", tags: Optional[Dict[str, str]] = None, owner: Optional[str] = "", ): """Creates a RequestSource object.""" - super().__init__(name=name, description=description, tags=tags, owner=owner) + super().__init__( + name=name, + timestamp_field=timestamp_field, + description=description, + tags=tags, + owner=owner, + ) self.schema = schema def validate(self, config: RepoConfig): @@ -570,6 +577,7 @@ def from_proto(data_source: DataSourceProto): return RequestSource( name=data_source.name, schema=list_schema, + timestamp_field=data_source.timestamp_field, description=data_source.description, tags=dict(data_source.tags), owner=data_source.owner, @@ -593,6 +601,7 @@ def to_proto(self) -> DataSourceProto: tags=self.tags, owner=self.owner, ) + data_source_proto.timestamp_field = self.timestamp_field data_source_proto.request_data_options.schema.extend(schema_pb) return data_source_proto From da246561ea7d222f3eabb22d131acdf0b5efc979 Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Sat, 7 Sep 2024 00:07:42 +0400 Subject: [PATCH 48/96] chore: Scope fixtures to session in test_universal_registry (#4497) chore: refactor fixtures in test_universal_registry Signed-off-by: tokoko --- sdk/python/tests/integration/conftest.py | 33 +++ .../registration/test_universal_registry.py | 234 +++++++++--------- 2 files changed, 147 insertions(+), 120 deletions(-) diff --git a/sdk/python/tests/integration/conftest.py b/sdk/python/tests/integration/conftest.py index 5c34a448e2..82f80b8992 100644 --- a/sdk/python/tests/integration/conftest.py +++ b/sdk/python/tests/integration/conftest.py @@ -2,6 +2,9 @@ import pytest from testcontainers.keycloak import KeycloakContainer +from testcontainers.minio import MinioContainer +from testcontainers.mysql import MySqlContainer +from testcontainers.postgres import PostgresContainer from tests.utils.auth_permissions_util import setup_permissions_on_keycloak @@ -14,3 +17,33 @@ def start_keycloak_server(): with KeycloakContainer("quay.io/keycloak/keycloak:24.0.1") as keycloak_container: setup_permissions_on_keycloak(keycloak_container.get_client()) yield keycloak_container.get_url() + + +@pytest.fixture(scope="session") +def mysql_server(): + container = MySqlContainer("mysql:latest") + container.start() + + yield container + + container.stop() + + +@pytest.fixture(scope="session") +def postgres_server(): + container = PostgresContainer() + container.start() + + yield container + + container.stop() + + +@pytest.fixture(scope="session") +def minio_server(): + container = MinioContainer() + container.start() + + yield container + + container.stop() diff --git a/sdk/python/tests/integration/registration/test_universal_registry.py b/sdk/python/tests/integration/registration/test_universal_registry.py index 20f1f5ef0a..aee93b4705 100644 --- a/sdk/python/tests/integration/registration/test_universal_registry.py +++ b/sdk/python/tests/integration/registration/test_universal_registry.py @@ -13,6 +13,8 @@ # limitations under the License. import logging import os +import random +import string import time from datetime import timedelta, timezone from tempfile import mkstemp @@ -22,10 +24,8 @@ import pandas as pd import pytest from pytest_lazyfixture import lazy_fixture -from testcontainers.core.container import DockerContainer -from testcontainers.core.waiting_utils import wait_for_logs -from testcontainers.minio import MinioContainer from testcontainers.mysql import MySqlContainer +from testcontainers.postgres import PostgresContainer from feast import FeatureService, FileSource, RequestSource from feast.data_format import AvroFormat, ParquetFormat @@ -93,16 +93,14 @@ def s3_registry() -> Registry: @pytest.fixture(scope="function") -def minio_registry() -> Registry: - bucket_name = "test-bucket" +def minio_registry(minio_server): + bucket_name = "".join(random.choices(string.ascii_lowercase, k=10)) - container = MinioContainer() - container.start() - client = container.get_client() + client = minio_server.get_client() client.make_bucket(bucket_name) - container_host = container.get_container_host_ip() - exposed_port = container.get_exposed_port(container.port) + container_host = minio_server.get_container_host_ip() + exposed_port = minio_server.get_exposed_port(minio_server.port) registry_config = RegistryConfig( path=f"s3://{bucket_name}/registry.db", cache_ttl_seconds=600 @@ -110,141 +108,121 @@ def minio_registry() -> Registry: mock_environ = { "FEAST_S3_ENDPOINT_URL": f"http://{container_host}:{exposed_port}", - "AWS_ACCESS_KEY_ID": container.access_key, - "AWS_SECRET_ACCESS_KEY": container.secret_key, + "AWS_ACCESS_KEY_ID": minio_server.access_key, + "AWS_SECRET_ACCESS_KEY": minio_server.secret_key, "AWS_SESSION_TOKEN": "", } with mock.patch.dict(os.environ, mock_environ): yield Registry("project", registry_config, None) - container.stop() - - -POSTGRES_USER = "test" -POSTGRES_PASSWORD = "test" -POSTGRES_DB = "test" logger = logging.getLogger(__name__) @pytest.fixture(scope="function") -def pg_registry(): - container = ( - DockerContainer("postgres:latest") - .with_exposed_ports(5432) - .with_env("POSTGRES_USER", POSTGRES_USER) - .with_env("POSTGRES_PASSWORD", POSTGRES_PASSWORD) - .with_env("POSTGRES_DB", POSTGRES_DB) - ) - - container.start() - - registry_config = _given_registry_config_for_pg_sql(container) - - yield SqlRegistry(registry_config, "project", None) +def pg_registry(postgres_server): + db_name = "".join(random.choices(string.ascii_lowercase, k=10)) - container.stop() + _create_pg_database(postgres_server, db_name) + container_port = postgres_server.get_exposed_port(5432) + container_host = postgres_server.get_container_host_ip() -@pytest.fixture(scope="function") -def pg_registry_async(): - container = ( - DockerContainer("postgres:latest") - .with_exposed_ports(5432) - .with_env("POSTGRES_USER", POSTGRES_USER) - .with_env("POSTGRES_PASSWORD", POSTGRES_PASSWORD) - .with_env("POSTGRES_DB", POSTGRES_DB) + registry_config = SqlRegistryConfig( + registry_type="sql", + cache_ttl_seconds=2, + cache_mode="sync", + # The `path` must include `+psycopg` in order for `sqlalchemy.create_engine()` + # to understand that we are using psycopg3. + path=f"postgresql+psycopg://{postgres_server.username}:{postgres_server.password}@{container_host}:{container_port}/{db_name}", + sqlalchemy_config_kwargs={"echo": False, "pool_pre_ping": True}, + thread_pool_executor_worker_count=0, + purge_feast_metadata=False, ) - container.start() - - registry_config = _given_registry_config_for_pg_sql(container, 2, "thread", 3) - yield SqlRegistry(registry_config, "project", None) - container.stop() +@pytest.fixture(scope="function") +def pg_registry_async(postgres_server): + db_name = "".join(random.choices(string.ascii_lowercase, k=10)) + + _create_pg_database(postgres_server, db_name) -def _given_registry_config_for_pg_sql( - container, - cache_ttl_seconds=2, - cache_mode="sync", - thread_pool_executor_worker_count=0, - purge_feast_metadata=False, -): - log_string_to_wait_for = "database system is ready to accept connections" - waited = wait_for_logs( - container=container, - predicate=log_string_to_wait_for, - timeout=30, - interval=10, - ) - logger.info("Waited for %s seconds until postgres container was up", waited) - container_port = container.get_exposed_port(5432) - container_host = container.get_container_host_ip() + container_port = postgres_server.get_exposed_port(5432) + container_host = postgres_server.get_container_host_ip() - return SqlRegistryConfig( + registry_config = SqlRegistryConfig( registry_type="sql", - cache_ttl_seconds=cache_ttl_seconds, - cache_mode=cache_mode, + cache_ttl_seconds=2, + cache_mode="thread", # The `path` must include `+psycopg` in order for `sqlalchemy.create_engine()` # to understand that we are using psycopg3. - path=f"postgresql+psycopg://{POSTGRES_USER}:{POSTGRES_PASSWORD}@{container_host}:{container_port}/{POSTGRES_DB}", + path=f"postgresql+psycopg://{postgres_server.username}:{postgres_server.password}@{container_host}:{container_port}/{db_name}", sqlalchemy_config_kwargs={"echo": False, "pool_pre_ping": True}, - thread_pool_executor_worker_count=thread_pool_executor_worker_count, - purge_feast_metadata=purge_feast_metadata, + thread_pool_executor_worker_count=3, + purge_feast_metadata=False, ) + yield SqlRegistry(registry_config, "project", None) -@pytest.fixture(scope="function") -def mysql_registry(): - container = MySqlContainer("mysql:latest") - container.start() - registry_config = _given_registry_config_for_mysql(container) +def _create_mysql_database(container: MySqlContainer, database: str): + container.exec( + f"mysql -uroot -p{container.root_password} -e 'CREATE DATABASE {database}; GRANT ALL PRIVILEGES ON {database}.* TO {container.username};'" + ) - yield SqlRegistry(registry_config, "project", None) - container.stop() +def _create_pg_database(container: PostgresContainer, database: str): + container.exec(f"psql -U {container.username} -c 'CREATE DATABASE {database}'") @pytest.fixture(scope="function") -def mysql_registry_async(): - container = MySqlContainer("mysql:latest") - container.start() +def mysql_registry(mysql_server): + db_name = "".join(random.choices(string.ascii_lowercase, k=10)) + + _create_mysql_database(mysql_server, db_name) - registry_config = _given_registry_config_for_mysql(container, 2, "thread", 3) + connection_url = ( + "/".join(mysql_server.get_connection_url().split("/")[:-1]) + f"/{db_name}" + ) + + registry_config = SqlRegistryConfig( + registry_type="sql", + path=connection_url, + cache_ttl_seconds=2, + cache_mode="sync", + sqlalchemy_config_kwargs={"echo": False, "pool_pre_ping": True}, + thread_pool_executor_worker_count=0, + purge_feast_metadata=False, + ) yield SqlRegistry(registry_config, "project", None) - container.stop() +@pytest.fixture(scope="function") +def mysql_registry_async(mysql_server): + db_name = "".join(random.choices(string.ascii_lowercase, k=10)) -def _given_registry_config_for_mysql( - container, - cache_ttl_seconds=2, - cache_mode="sync", - thread_pool_executor_worker_count=0, - purge_feast_metadata=False, -): - import sqlalchemy + _create_mysql_database(mysql_server, db_name) - engine = sqlalchemy.create_engine( - container.get_connection_url(), pool_pre_ping=True + connection_url = ( + "/".join(mysql_server.get_connection_url().split("/")[:-1]) + f"/{db_name}" ) - engine.connect() - return SqlRegistryConfig( + registry_config = SqlRegistryConfig( registry_type="sql", - path=container.get_connection_url(), - cache_ttl_seconds=cache_ttl_seconds, - cache_mode=cache_mode, + path=connection_url, + cache_ttl_seconds=2, + cache_mode="thread", sqlalchemy_config_kwargs={"echo": False, "pool_pre_ping": True}, - thread_pool_executor_worker_count=thread_pool_executor_worker_count, - purge_feast_metadata=purge_feast_metadata, + thread_pool_executor_worker_count=3, + purge_feast_metadata=False, ) + yield SqlRegistry(registry_config, "project", None) + @pytest.fixture(scope="session") def sqlite_registry(): @@ -339,11 +317,11 @@ def mock_remote_registry(): async_sql_fixtures = [ pytest.param( lazy_fixture("pg_registry_async"), - marks=pytest.mark.xdist_group(name="pg_registry_async"), + marks=pytest.mark.xdist_group(name="pg_registry"), ), pytest.param( lazy_fixture("mysql_registry_async"), - marks=pytest.mark.xdist_group(name="mysql_registry_async"), + marks=pytest.mark.xdist_group(name="mysql_registry"), ), ] @@ -1609,45 +1587,61 @@ def local_registry_purge_feast_metadata() -> Registry: @pytest.fixture(scope="function") -def pg_registry_purge_feast_metadata(): - container = ( - DockerContainer("postgres:latest") - .with_exposed_ports(5432) - .with_env("POSTGRES_USER", POSTGRES_USER) - .with_env("POSTGRES_PASSWORD", POSTGRES_PASSWORD) - .with_env("POSTGRES_DB", POSTGRES_DB) - ) +def pg_registry_purge_feast_metadata(postgres_server): + db_name = "".join(random.choices(string.ascii_lowercase, k=10)) - container.start() + _create_pg_database(postgres_server, db_name) - registry_config = _given_registry_config_for_pg_sql(container, 2, "thread", 3, True) + container_port = postgres_server.get_exposed_port(5432) + container_host = postgres_server.get_container_host_ip() - yield SqlRegistry(registry_config, "project", None) + registry_config = SqlRegistryConfig( + registry_type="sql", + cache_ttl_seconds=2, + cache_mode="thread", + # The `path` must include `+psycopg` in order for `sqlalchemy.create_engine()` + # to understand that we are using psycopg3. + path=f"postgresql+psycopg://{postgres_server.username}:{postgres_server.password}@{container_host}:{container_port}/{db_name}", + sqlalchemy_config_kwargs={"echo": False, "pool_pre_ping": True}, + thread_pool_executor_worker_count=3, + purge_feast_metadata=True, + ) - container.stop() + yield SqlRegistry(registry_config, "project", None) @pytest.fixture(scope="function") -def mysql_registry_purge_feast_metadata(): - container = MySqlContainer("mysql:latest") - container.start() +def mysql_registry_purge_feast_metadata(mysql_server): + db_name = "".join(random.choices(string.ascii_lowercase, k=10)) - registry_config = _given_registry_config_for_mysql(container, 2, "thread", 3, True) + _create_mysql_database(mysql_server, db_name) - yield SqlRegistry(registry_config, "project", None) + connection_url = ( + "/".join(mysql_server.get_connection_url().split("/")[:-1]) + f"/{db_name}" + ) - container.stop() + registry_config = SqlRegistryConfig( + registry_type="sql", + path=connection_url, + cache_ttl_seconds=2, + cache_mode="thread", + sqlalchemy_config_kwargs={"echo": False, "pool_pre_ping": True}, + thread_pool_executor_worker_count=3, + purge_feast_metadata=True, + ) + + yield SqlRegistry(registry_config, "project", None) purge_feast_metadata_fixtures = [ lazy_fixture("local_registry_purge_feast_metadata"), pytest.param( lazy_fixture("pg_registry_purge_feast_metadata"), - marks=pytest.mark.xdist_group(name="pg_registry_purge_feast_metadata"), + marks=pytest.mark.xdist_group(name="pg_registry"), ), pytest.param( lazy_fixture("mysql_registry_purge_feast_metadata"), - marks=pytest.mark.xdist_group(name="mysql_registry_purge_feast_metadata"), + marks=pytest.mark.xdist_group(name="mysql_registry"), ), ] From ac381b292cfa29804ee5f0822f876d227a0989d9 Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Sat, 7 Sep 2024 00:32:19 +0400 Subject: [PATCH 49/96] feat: Add registry methods for dealing with all FV types (#4435) * add new registry method for working with any fv type Signed-off-by: tokoko * fix: different project for each test in test_universal_registry Signed-off-by: tokoko * revert project names to project in test_universal_registry Signed-off-by: tokoko * remove print statements from test_universal_registry Signed-off-by: tokoko --------- Signed-off-by: tokoko --- protos/feast/registry/RegistryServer.proto | 35 ++++++++- sdk/python/feast/cli_utils.py | 5 +- sdk/python/feast/feature_store.py | 62 +++++----------- .../feast/infra/registry/base_registry.py | 38 ++++++++++ .../feast/infra/registry/caching_registry.py | 34 +++++++++ .../infra/registry/proto_registry_utils.py | 39 ++++++++++ sdk/python/feast/infra/registry/registry.py | 21 +++++- sdk/python/feast/infra/registry/remote.py | 49 +++++++++++++ sdk/python/feast/infra/registry/snowflake.py | 72 ++++++++++++++++++- sdk/python/feast/infra/registry/sql.py | 57 ++++++++++++++- sdk/python/feast/registry_server.py | 64 +++++++++++++++++ sdk/python/feast/utils.py | 5 -- .../online_store/test_universal_online.py | 5 +- .../registration/test_universal_registry.py | 22 +++++- 14 files changed, 446 insertions(+), 62 deletions(-) diff --git a/protos/feast/registry/RegistryServer.proto b/protos/feast/registry/RegistryServer.proto index 3ad64b5b34..6685bc0baa 100644 --- a/protos/feast/registry/RegistryServer.proto +++ b/protos/feast/registry/RegistryServer.proto @@ -32,9 +32,13 @@ service RegistryServer{ // FeatureView RPCs rpc ApplyFeatureView (ApplyFeatureViewRequest) returns (google.protobuf.Empty) {} + rpc DeleteFeatureView (DeleteFeatureViewRequest) returns (google.protobuf.Empty) {} + rpc GetAnyFeatureView (GetAnyFeatureViewRequest) returns (GetAnyFeatureViewResponse) {} + rpc ListAllFeatureViews (ListAllFeatureViewsRequest) returns (ListAllFeatureViewsResponse) {} + + // plain FeatureView RPCs rpc GetFeatureView (GetFeatureViewRequest) returns (feast.core.FeatureView) {} rpc ListFeatureViews (ListFeatureViewsRequest) returns (ListFeatureViewsResponse) {} - rpc DeleteFeatureView (DeleteFeatureViewRequest) returns (google.protobuf.Empty) {} // StreamFeatureView RPCs rpc GetStreamFeatureView (GetStreamFeatureViewRequest) returns (feast.core.StreamFeatureView) {} @@ -208,6 +212,35 @@ message DeleteFeatureViewRequest { bool commit = 3; } +message AnyFeatureView { + oneof any_feature_view { + feast.core.FeatureView feature_view = 1; + feast.core.OnDemandFeatureView on_demand_feature_view = 2; + feast.core.StreamFeatureView stream_feature_view = 3; + } +} + +message GetAnyFeatureViewRequest { + string name = 1; + string project = 2; + bool allow_cache = 3; +} + +message GetAnyFeatureViewResponse { + AnyFeatureView any_feature_view = 1; +} + +message ListAllFeatureViewsRequest { + string project = 1; + bool allow_cache = 2; + map tags = 3; +} + +message ListAllFeatureViewsResponse { + repeated AnyFeatureView feature_views = 1; +} + + // StreamFeatureView message GetStreamFeatureViewRequest { diff --git a/sdk/python/feast/cli_utils.py b/sdk/python/feast/cli_utils.py index 264a633c31..4152eb219b 100644 --- a/sdk/python/feast/cli_utils.py +++ b/sdk/python/feast/cli_utils.py @@ -175,7 +175,7 @@ def handle_fv_verbose_permissions_command( tags=tags_filter # type: ignore[assignment] ) for fv in feature_views: - if p.match_resource(fv): + if p.match_resource(fv): # type: ignore[arg-type] feature_views_names.add(fv.name) if len(feature_views_names) > 0: Node( @@ -207,8 +207,7 @@ def handle_not_verbose_permissions_command( def fetch_all_feast_objects(store: FeatureStore) -> list[FeastObject]: objects: list[FeastObject] = [] objects.extend(store.list_entities()) - objects.extend(store.list_all_feature_views()) - objects.extend(store.list_batch_feature_views()) + objects.extend(store.list_all_feature_views()) # type: ignore[arg-type] objects.extend(store.list_feature_services()) objects.extend(store.list_data_sources()) objects.extend(store.list_validation_references()) diff --git a/sdk/python/feast/feature_store.py b/sdk/python/feast/feature_store.py index 27b6eade5b..4f96cfb0fc 100644 --- a/sdk/python/feast/feature_store.py +++ b/sdk/python/feast/feature_store.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. import itertools -import logging import os import warnings from datetime import datetime, timedelta @@ -247,9 +246,26 @@ def list_feature_services( """ return self._registry.list_feature_services(self.project, tags=tags) + def _list_all_feature_views( + self, allow_cache: bool = False, tags: Optional[dict[str, str]] = None + ) -> List[BaseFeatureView]: + feature_views = [] + for fv in self.registry.list_all_feature_views( + self.project, allow_cache=allow_cache, tags=tags + ): + if ( + isinstance(fv, FeatureView) + and fv.entities + and fv.entities[0] == DUMMY_ENTITY_NAME + ): + fv.entities = [] + fv.entity_columns = [] + feature_views.append(fv) + return feature_views + def list_all_feature_views( self, allow_cache: bool = False, tags: Optional[dict[str, str]] = None - ) -> List[Union[FeatureView, StreamFeatureView, OnDemandFeatureView]]: + ) -> List[BaseFeatureView]: """ Retrieves the list of feature views from the registry. @@ -274,10 +290,6 @@ def list_feature_views( Returns: A list of feature views. """ - logging.warning( - "list_feature_views will make breaking changes. Please use list_batch_feature_views instead. " - "list_feature_views will behave like list_all_feature_views in the future." - ) return utils._list_feature_views( self._registry, self.project, allow_cache, tags=tags ) @@ -297,44 +309,6 @@ def list_batch_feature_views( """ return self._list_batch_feature_views(allow_cache=allow_cache, tags=tags) - def _list_all_feature_views( - self, - allow_cache: bool = False, - tags: Optional[dict[str, str]] = None, - ) -> List[Union[FeatureView, StreamFeatureView, OnDemandFeatureView]]: - all_feature_views = ( - utils._list_feature_views( - self._registry, self.project, allow_cache, tags=tags - ) - + self._list_stream_feature_views(allow_cache, tags=tags) - + self.list_on_demand_feature_views(allow_cache, tags=tags) - ) - return all_feature_views - - def _list_feature_views( - self, - allow_cache: bool = False, - hide_dummy_entity: bool = True, - tags: Optional[dict[str, str]] = None, - ) -> List[FeatureView]: - logging.warning( - "_list_feature_views will make breaking changes. Please use _list_batch_feature_views instead. " - "_list_feature_views will behave like _list_all_feature_views in the future." - ) - feature_views = [] - for fv in self._registry.list_feature_views( - self.project, allow_cache=allow_cache, tags=tags - ): - if ( - hide_dummy_entity - and fv.entities - and fv.entities[0] == DUMMY_ENTITY_NAME - ): - fv.entities = [] - fv.entity_columns = [] - feature_views.append(fv) - return feature_views - def _list_batch_feature_views( self, allow_cache: bool = False, diff --git a/sdk/python/feast/infra/registry/base_registry.py b/sdk/python/feast/infra/registry/base_registry.py index f5040d9752..f2374edf1b 100644 --- a/sdk/python/feast/infra/registry/base_registry.py +++ b/sdk/python/feast/infra/registry/base_registry.py @@ -391,6 +391,44 @@ def list_feature_views( """ raise NotImplementedError + @abstractmethod + def get_any_feature_view( + self, name: str, project: str, allow_cache: bool = False + ) -> BaseFeatureView: + """ + Retrieves a feature view of any type. + + Args: + name: Name of feature view + project: Feast project that this feature view belongs to + allow_cache: Allow returning feature view from the cached registry + + Returns: + Returns either the specified feature view, or raises an exception if + none is found + """ + raise NotImplementedError + + @abstractmethod + def list_all_feature_views( + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[BaseFeatureView]: + """ + Retrieve a list of feature views of all types from the registry + + Args: + allow_cache: Allow returning feature views from the cached registry + project: Filter feature views based on project name + tags: Filter by tags + + Returns: + List of feature views + """ + raise NotImplementedError + @abstractmethod def apply_materialization( self, diff --git a/sdk/python/feast/infra/registry/caching_registry.py b/sdk/python/feast/infra/registry/caching_registry.py index c04a62552b..8f47fab077 100644 --- a/sdk/python/feast/infra/registry/caching_registry.py +++ b/sdk/python/feast/infra/registry/caching_registry.py @@ -7,6 +7,7 @@ from threading import Lock from typing import List, Optional +from feast.base_feature_view import BaseFeatureView from feast.data_source import DataSource from feast.entity import Entity from feast.feature_service import FeatureService @@ -102,6 +103,39 @@ def list_entities( ) return self._list_entities(project, tags) + @abstractmethod + def _get_any_feature_view(self, name: str, project: str) -> BaseFeatureView: + pass + + def get_any_feature_view( + self, name: str, project: str, allow_cache: bool = False + ) -> BaseFeatureView: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.get_any_feature_view( + self.cached_registry_proto, name, project + ) + return self._get_any_feature_view(name, project) + + @abstractmethod + def _list_all_feature_views( + self, project: str, tags: Optional[dict[str, str]] + ) -> List[BaseFeatureView]: + pass + + def list_all_feature_views( + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[BaseFeatureView]: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.list_all_feature_views( + self.cached_registry_proto, project, tags + ) + return self._list_all_feature_views(project, tags) + @abstractmethod def _get_feature_view(self, name: str, project: str) -> FeatureView: pass diff --git a/sdk/python/feast/infra/registry/proto_registry_utils.py b/sdk/python/feast/infra/registry/proto_registry_utils.py index b0413fd77e..fc5c3f6671 100644 --- a/sdk/python/feast/infra/registry/proto_registry_utils.py +++ b/sdk/python/feast/infra/registry/proto_registry_utils.py @@ -2,6 +2,7 @@ from typing import List, Optional from feast import utils +from feast.base_feature_view import BaseFeatureView from feast.data_source import DataSource from feast.entity import Entity from feast.errors import ( @@ -93,6 +94,33 @@ def get_feature_service( raise FeatureServiceNotFoundException(name, project=project) +def get_any_feature_view( + registry_proto: RegistryProto, name: str, project: str +) -> BaseFeatureView: + for feature_view_proto in registry_proto.feature_views: + if ( + feature_view_proto.spec.name == name + and feature_view_proto.spec.project == project + ): + return FeatureView.from_proto(feature_view_proto) + + for feature_view_proto in registry_proto.stream_feature_views: + if ( + feature_view_proto.spec.name == name + and feature_view_proto.spec.project == project + ): + return StreamFeatureView.from_proto(feature_view_proto) + + for on_demand_feature_view in registry_proto.on_demand_feature_views: + if ( + on_demand_feature_view.spec.project == project + and on_demand_feature_view.spec.name == name + ): + return OnDemandFeatureView.from_proto(on_demand_feature_view) + + raise FeatureViewNotFoundException(name, project) + + def get_feature_view( registry_proto: RegistryProto, name: str, project: str ) -> FeatureView: @@ -179,6 +207,17 @@ def list_feature_services( return feature_services +@registry_proto_cache_with_tags +def list_all_feature_views( + registry_proto: RegistryProto, project: str, tags: Optional[dict[str, str]] +) -> List[BaseFeatureView]: + return ( + list_feature_views(registry_proto, project, tags) + + list_stream_feature_views(registry_proto, project, tags) + + list_on_demand_feature_views(registry_proto, project, tags) + ) + + @registry_proto_cache_with_tags def list_feature_views( registry_proto: RegistryProto, project: str, tags: Optional[dict[str, str]] diff --git a/sdk/python/feast/infra/registry/registry.py b/sdk/python/feast/infra/registry/registry.py index 634d6fa7ac..bf5dfbe24f 100644 --- a/sdk/python/feast/infra/registry/registry.py +++ b/sdk/python/feast/infra/registry/registry.py @@ -585,7 +585,26 @@ def apply_materialization( self.commit() return - raise FeatureViewNotFoundException(feature_view.name, project) + def list_all_feature_views( + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[BaseFeatureView]: + registry_proto = self._get_registry_proto( + project=project, allow_cache=allow_cache + ) + return proto_registry_utils.list_all_feature_views( + registry_proto, project, tags + ) + + def get_any_feature_view( + self, name: str, project: str, allow_cache: bool = False + ) -> BaseFeatureView: + registry_proto = self._get_registry_proto( + project=project, allow_cache=allow_cache + ) + return proto_registry_utils.get_any_feature_view(registry_proto, name, project) def list_feature_views( self, diff --git a/sdk/python/feast/infra/registry/remote.py b/sdk/python/feast/infra/registry/remote.py index ba25ef7dbe..cdb45f0363 100644 --- a/sdk/python/feast/infra/registry/remote.py +++ b/sdk/python/feast/infra/registry/remote.py @@ -30,6 +30,24 @@ from feast.stream_feature_view import StreamFeatureView +def extract_base_feature_view( + any_feature_view: RegistryServer_pb2.AnyFeatureView, +) -> BaseFeatureView: + feature_view_type = any_feature_view.WhichOneof("any_feature_view") + if feature_view_type == "feature_view": + feature_view = FeatureView.from_proto(any_feature_view.feature_view) + elif feature_view_type == "on_demand_feature_view": + feature_view = OnDemandFeatureView.from_proto( + any_feature_view.on_demand_feature_view + ) + elif feature_view_type == "stream_feature_view": + feature_view = StreamFeatureView.from_proto( + any_feature_view.stream_feature_view + ) + + return feature_view + + class RemoteRegistryConfig(RegistryConfig): registry_type: StrictStr = "remote" """ str: Provider name or a class name that implements Registry.""" @@ -249,6 +267,37 @@ def list_on_demand_feature_views( for on_demand_feature_view in response.on_demand_feature_views ] + def get_any_feature_view( + self, name: str, project: str, allow_cache: bool = False + ) -> BaseFeatureView: + request = RegistryServer_pb2.GetAnyFeatureViewRequest( + name=name, project=project, allow_cache=allow_cache + ) + + response: RegistryServer_pb2.GetAnyFeatureViewResponse = ( + self.stub.GetAnyFeatureView(request) + ) + any_feature_view = response.any_feature_view + return extract_base_feature_view(any_feature_view) + + def list_all_feature_views( + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[BaseFeatureView]: + request = RegistryServer_pb2.ListAllFeatureViewsRequest( + project=project, allow_cache=allow_cache, tags=tags + ) + + response: RegistryServer_pb2.ListAllFeatureViewsResponse = ( + self.stub.ListAllFeatureViews(request) + ) + return [ + extract_base_feature_view(any_feature_view) + for any_feature_view in response.feature_views + ] + def get_feature_view( self, name: str, project: str, allow_cache: bool = False ) -> FeatureView: diff --git a/sdk/python/feast/infra/registry/snowflake.py b/sdk/python/feast/infra/registry/snowflake.py index accfa42e12..f9dd37e516 100644 --- a/sdk/python/feast/infra/registry/snowflake.py +++ b/sdk/python/feast/infra/registry/snowflake.py @@ -5,7 +5,7 @@ from datetime import datetime, timedelta, timezone from enum import Enum from threading import Lock -from typing import Any, Callable, List, Literal, Optional, Union +from typing import Any, Callable, List, Literal, Optional, Union, cast from pydantic import ConfigDict, Field, StrictStr @@ -575,6 +575,76 @@ def get_feature_view( FeatureViewNotFoundException, ) + def get_any_feature_view( + self, name: str, project: str, allow_cache: bool = False + ) -> BaseFeatureView: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.get_any_feature_view( + self.cached_registry_proto, name, project + ) + fv = self._get_object( + "FEATURE_VIEWS", + name, + project, + FeatureViewProto, + FeatureView, + "FEATURE_VIEW_NAME", + "FEATURE_VIEW_PROTO", + None, + ) + + if not fv: + fv = self._get_object( + "STREAM_FEATURE_VIEWS", + name, + project, + StreamFeatureViewProto, + StreamFeatureView, + "STREAM_FEATURE_VIEW_NAME", + "STREAM_FEATURE_VIEW_PROTO", + None, + ) + if not fv: + fv = self._get_object( + "ON_DEMAND_FEATURE_VIEWS", + name, + project, + OnDemandFeatureViewProto, + OnDemandFeatureView, + "ON_DEMAND_FEATURE_VIEW_NAME", + "ON_DEMAND_FEATURE_VIEW_PROTO", + FeatureViewNotFoundException, + ) + return fv + + def list_all_feature_views( + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[BaseFeatureView]: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.list_all_feature_views( + self.cached_registry_proto, project, tags + ) + + return ( + cast( + list[BaseFeatureView], + self.list_feature_views(project, allow_cache, tags), + ) + + cast( + list[BaseFeatureView], + self.list_stream_feature_views(project, allow_cache, tags), + ) + + cast( + list[BaseFeatureView], + self.list_on_demand_feature_views(project, allow_cache, tags), + ) + ) + def get_infra(self, project: str, allow_cache: bool = False) -> Infra: infra_object = self._get_object( "MANAGED_INFRA", diff --git a/sdk/python/feast/infra/registry/sql.py b/sdk/python/feast/infra/registry/sql.py index 2b4a58266c..9ce3fbe5dd 100644 --- a/sdk/python/feast/infra/registry/sql.py +++ b/sdk/python/feast/infra/registry/sql.py @@ -4,7 +4,7 @@ from datetime import datetime, timezone from enum import Enum from pathlib import Path -from typing import Any, Callable, Dict, List, Optional, Union +from typing import Any, Callable, Dict, List, Optional, Union, cast from pydantic import StrictInt, StrictStr from sqlalchemy import ( # type: ignore @@ -334,6 +334,61 @@ def _get_entity(self, name: str, project: str) -> Entity: not_found_exception=EntityNotFoundException, ) + def _get_any_feature_view(self, name: str, project: str) -> BaseFeatureView: + fv = self._get_object( + table=feature_views, + name=name, + project=project, + proto_class=FeatureViewProto, + python_class=FeatureView, + id_field_name="feature_view_name", + proto_field_name="feature_view_proto", + not_found_exception=None, + ) + + if not fv: + fv = self._get_object( + table=on_demand_feature_views, + name=name, + project=project, + proto_class=OnDemandFeatureViewProto, + python_class=OnDemandFeatureView, + id_field_name="feature_view_name", + proto_field_name="feature_view_proto", + not_found_exception=None, + ) + + if not fv: + fv = self._get_object( + table=stream_feature_views, + name=name, + project=project, + proto_class=StreamFeatureViewProto, + python_class=StreamFeatureView, + id_field_name="feature_view_name", + proto_field_name="feature_view_proto", + not_found_exception=FeatureViewNotFoundException, + ) + return fv + + def _list_all_feature_views( + self, project: str, tags: Optional[dict[str, str]] + ) -> List[BaseFeatureView]: + return ( + cast( + list[BaseFeatureView], + self._list_feature_views(project=project, tags=tags), + ) + + cast( + list[BaseFeatureView], + self._list_stream_feature_views(project=project, tags=tags), + ) + + cast( + list[BaseFeatureView], + self._list_on_demand_feature_views(project=project, tags=tags), + ) + ) + def _get_feature_view(self, name: str, project: str) -> FeatureView: return self._get_object( table=feature_views, diff --git a/sdk/python/feast/registry_server.py b/sdk/python/feast/registry_server.py index 2661f25882..c2f4a688d3 100644 --- a/sdk/python/feast/registry_server.py +++ b/sdk/python/feast/registry_server.py @@ -8,6 +8,7 @@ from grpc_reflection.v1alpha import reflection from feast import FeatureService, FeatureStore +from feast.base_feature_view import BaseFeatureView from feast.data_source import DataSource from feast.entity import Entity from feast.errors import FeatureViewNotFoundException @@ -38,6 +39,28 @@ from feast.stream_feature_view import StreamFeatureView +def _build_any_feature_view_proto(feature_view: BaseFeatureView): + if isinstance(feature_view, StreamFeatureView): + arg_name = "stream_feature_view" + feature_view_proto = feature_view.to_proto() + elif isinstance(feature_view, FeatureView): + arg_name = "feature_view" + feature_view_proto = feature_view.to_proto() + elif isinstance(feature_view, OnDemandFeatureView): + arg_name = "on_demand_feature_view" + feature_view_proto = feature_view.to_proto() + + return RegistryServer_pb2.AnyFeatureView( + feature_view=feature_view_proto if arg_name == "feature_view" else None, + stream_feature_view=feature_view_proto + if arg_name == "stream_feature_view" + else None, + on_demand_feature_view=feature_view_proto + if arg_name == "on_demand_feature_view" + else None, + ) + + class RegistryServer(RegistryServer_pb2_grpc.RegistryServerServicer): def __init__(self, registry: BaseRegistry) -> None: super().__init__() @@ -178,6 +201,27 @@ def GetFeatureView( actions=[AuthzedAction.DESCRIBE], ).to_proto() + def GetAnyFeatureView( + self, request: RegistryServer_pb2.GetAnyFeatureViewRequest, context + ): + feature_view = assert_permissions( + cast( + FeastObject, + self.proxied_registry.get_any_feature_view( + name=request.name, + project=request.project, + allow_cache=request.allow_cache, + ), + ), + actions=[AuthzedAction.DESCRIBE], + ) + + return RegistryServer_pb2.GetAnyFeatureViewResponse( + any_feature_view=_build_any_feature_view_proto( + cast(BaseFeatureView, feature_view) + ) + ) + def ApplyFeatureView( self, request: RegistryServer_pb2.ApplyFeatureViewRequest, context ): @@ -228,6 +272,26 @@ def ListFeatureViews( ] ) + def ListAllFeatureViews( + self, request: RegistryServer_pb2.ListAllFeatureViewsRequest, context + ): + return RegistryServer_pb2.ListAllFeatureViewsResponse( + feature_views=[ + _build_any_feature_view_proto(cast(BaseFeatureView, feature_view)) + for feature_view in permitted_resources( + resources=cast( + list[FeastObject], + self.proxied_registry.list_all_feature_views( + project=request.project, + allow_cache=request.allow_cache, + tags=dict(request.tags), + ), + ), + actions=AuthzedAction.DESCRIBE, + ) + ] + ) + def DeleteFeatureView( self, request: RegistryServer_pb2.DeleteFeatureViewRequest, context ): diff --git a/sdk/python/feast/utils.py b/sdk/python/feast/utils.py index 5862cd4630..992869557a 100644 --- a/sdk/python/feast/utils.py +++ b/sdk/python/feast/utils.py @@ -1,6 +1,5 @@ import copy import itertools -import logging import os import typing import warnings @@ -746,10 +745,6 @@ def _list_feature_views( ) -> List["FeatureView"]: from feast.feature_view import DUMMY_ENTITY_NAME - logging.warning( - "_list_feature_views will make breaking changes. Please use _list_batch_feature_views instead. " - "_list_feature_views will behave like _list_all_feature_views in the future." - ) feature_views = [] for fv in registry.list_feature_views(project, allow_cache=allow_cache, tags=tags): if hide_dummy_entity and fv.entities and fv.entities[0] == DUMMY_ENTITY_NAME: diff --git a/sdk/python/tests/integration/online_store/test_universal_online.py b/sdk/python/tests/integration/online_store/test_universal_online.py index 2ffe869ef5..308201590d 100644 --- a/sdk/python/tests/integration/online_store/test_universal_online.py +++ b/sdk/python/tests/integration/online_store/test_universal_online.py @@ -163,7 +163,6 @@ def test_write_to_online_store_event_check(environment): fs.apply([fv1, e]) assert len(fs.list_all_feature_views(tags=TAGS)) == 1 assert len(fs.list_feature_views(tags=TAGS)) == 1 - assert len(fs.list_batch_feature_views(tags=TAGS)) == 1 # data to ingest into Online Store (recent) data = { @@ -421,7 +420,7 @@ def setup_feature_store_universal_feature_views( feature_views = construct_universal_feature_views(data_sources) fs.apply([driver(), feature_views.driver, feature_views.global_fv]) - assert len(fs.list_batch_feature_views(TAGS)) == 2 + assert len(fs.list_all_feature_views(TAGS)) == 2 data = { "driver_id": [1, 2], @@ -518,7 +517,7 @@ def test_online_list_retrieval(environment, universal_data_sources): environment, universal_data_sources ) - assert len(fs.list_batch_feature_views(tags=TAGS)) == 2 + assert len(fs.list_all_feature_views(tags=TAGS)) == 2 @pytest.mark.integration diff --git a/sdk/python/tests/integration/registration/test_universal_registry.py b/sdk/python/tests/integration/registration/test_universal_registry.py index aee93b4705..6e4a208d4b 100644 --- a/sdk/python/tests/integration/registration/test_universal_registry.py +++ b/sdk/python/tests/integration/registration/test_universal_registry.py @@ -36,6 +36,7 @@ from feast.field import Field from feast.infra.infra_object import Infra from feast.infra.online_stores.sqlite import SqliteTable +from feast.infra.registry.base_registry import BaseRegistry from feast.infra.registry.registry import Registry from feast.infra.registry.remote import RemoteRegistry, RemoteRegistryConfig from feast.infra.registry.sql import SqlRegistry, SqlRegistryConfig @@ -415,7 +416,7 @@ def assert_project(project_name, test_registry, allow_cache=False): "test_registry", all_fixtures, ) -def test_apply_feature_view_success(test_registry): +def test_apply_feature_view_success(test_registry: BaseRegistry): # Create Feature Views batch_source = FileSource( file_format=ParquetFormat(), @@ -464,6 +465,8 @@ def test_apply_feature_view_success(test_registry): ) feature_view = test_registry.get_feature_view("my_feature_view_1", project) + any_feature_view = test_registry.get_any_feature_view("my_feature_view_1", project) + assert ( feature_view.name == "my_feature_view_1" and feature_view.features[0].name == "fs1_my_feature_1" @@ -475,6 +478,7 @@ def test_apply_feature_view_success(test_registry): and feature_view.features[3].name == "fs1_my_feature_4" and feature_view.features[3].dtype == Array(Bytes) and feature_view.entities[0] == "fs1_my_entity_1" + and feature_view == any_feature_view ) assert feature_view.ttl == timedelta(minutes=5) @@ -502,7 +506,7 @@ def test_apply_feature_view_success(test_registry): "test_registry", sql_fixtures, ) -def test_apply_on_demand_feature_view_success(test_registry): +def test_apply_on_demand_feature_view_success(test_registry: BaseRegistry): # Create Feature Views driver_stats = FileSource( name="driver_stats_source", @@ -545,6 +549,7 @@ def location_features_from_push(inputs: pd.DataFrame) -> pd.DataFrame: test_registry.get_user_metadata(project, location_features_from_push) # Register Feature View + test_registry.apply_feature_view(driver_daily_features_view, project) test_registry.apply_feature_view(location_features_from_push, project) assert not test_registry.get_user_metadata(project, location_features_from_push) @@ -563,13 +568,21 @@ def location_features_from_push(inputs: pd.DataFrame) -> pd.DataFrame: and feature_views[0].features[0].dtype == String ) + all_feature_views = test_registry.list_all_feature_views(project) + + assert len(all_feature_views) == 2 + feature_view = test_registry.get_on_demand_feature_view( "location_features_from_push", project ) + any_feature_view = test_registry.get_any_feature_view( + "location_features_from_push", project + ) assert ( feature_view.name == "location_features_from_push" and feature_view.features[0].name == "first_char" and feature_view.features[0].dtype == String + and feature_view == any_feature_view ) test_registry.delete_feature_view("location_features_from_push", project) @@ -1110,7 +1123,7 @@ def test_registry_cache_thread_async(test_registry): "test_registry", all_fixtures, ) -def test_apply_stream_feature_view_success(test_registry): +def test_apply_stream_feature_view_success(test_registry: BaseRegistry): # Create Feature Views def simple_udf(x: int): return x + 3 @@ -1163,8 +1176,11 @@ def simple_udf(x: int): project, tags=sfv.tags ) + all_feature_views = test_registry.list_all_feature_views(project, tags=sfv.tags) + # List Feature Views assert len(stream_feature_views) == 1 + assert len(all_feature_views) == 1 assert stream_feature_views[0] == sfv test_registry.delete_feature_view("test kafka stream feature view", project) From 87e7ca4f55bd23fe5bf38b010c6b5a43d1780fce Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 7 Sep 2024 16:31:14 +0400 Subject: [PATCH 50/96] chore: Bump actions/download-artifact from 2 to 4.1.7 in /.github/workflows (#4482) chore: Bump actions/download-artifact in /.github/workflows Bumps [actions/download-artifact](https://github.com/actions/download-artifact) from 2 to 4.1.7. - [Release notes](https://github.com/actions/download-artifact/releases) - [Commits](https://github.com/actions/download-artifact/compare/v2...v4.1.7) --- updated-dependencies: - dependency-name: actions/download-artifact dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build_wheels.yml | 2 +- .github/workflows/publish.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index df8534d078..d8e5e484bb 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -159,7 +159,7 @@ jobs: with: python-version: ${{ matrix.python-version }} architecture: x64 - - uses: actions/download-artifact@v2 + - uses: actions/download-artifact@v4.1.7 with: name: wheels path: dist diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index e56296ec4b..0342943313 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -134,7 +134,7 @@ jobs: runs-on: ubuntu-latest needs: [build_wheels] steps: - - uses: actions/download-artifact@v2 + - uses: actions/download-artifact@v4.1.7 with: name: wheels path: dist From d793c77d923df95a186b9d4829b167f1a5a304e6 Mon Sep 17 00:00:00 2001 From: Bhargav Dodla <13788369+EXPEbdodla@users.noreply.github.com> Date: Sat, 7 Sep 2024 12:29:43 -0700 Subject: [PATCH 51/96] feat: Added support for reading from Reader Endpoints for AWS Aurora use cases (#4494) fix: Resovled merge conflicts associated to new changes Signed-off-by: Bhargav Dodla Co-authored-by: Bhargav Dodla --- sdk/python/feast/infra/registry/sql.py | 51 +++++++++++-------- .../registration/test_universal_registry.py | 43 ++++++++++++++++ 2 files changed, 74 insertions(+), 20 deletions(-) diff --git a/sdk/python/feast/infra/registry/sql.py b/sdk/python/feast/infra/registry/sql.py index 9ce3fbe5dd..b049adc898 100644 --- a/sdk/python/feast/infra/registry/sql.py +++ b/sdk/python/feast/infra/registry/sql.py @@ -202,6 +202,10 @@ class SqlRegistryConfig(RegistryConfig): """ str: Path to metadata store. If registry_type is 'sql', then this is a database URL as expected by SQLAlchemy """ + read_path: Optional[StrictStr] = None + """ str: Read Path to metadata store if different from path. + If registry_type is 'sql', then this is a Read Endpoint for database URL. If not set, path will be used for read and write. """ + sqlalchemy_config_kwargs: Dict[str, Any] = {"echo": False} """ Dict[str, Any]: Extra arguments to pass to SQLAlchemy.create_engine. """ @@ -223,13 +227,20 @@ def __init__( registry_config, SqlRegistryConfig ), "SqlRegistry needs a valid registry_config" - self.engine: Engine = create_engine( + self.write_engine: Engine = create_engine( registry_config.path, **registry_config.sqlalchemy_config_kwargs ) + if registry_config.read_path: + self.read_engine: Engine = create_engine( + registry_config.read_path, + **registry_config.sqlalchemy_config_kwargs, + ) + else: + self.read_engine = self.write_engine + metadata.create_all(self.write_engine) self.thread_pool_executor_worker_count = ( registry_config.thread_pool_executor_worker_count ) - metadata.create_all(self.engine) self.purge_feast_metadata = registry_config.purge_feast_metadata # Sync feast_metadata to projects table # when purge_feast_metadata is set to True, Delete data from @@ -246,7 +257,7 @@ def __init__( def _sync_feast_metadata_to_projects_table(self): feast_metadata_projects: set = [] projects_set: set = [] - with self.engine.begin() as conn: + with self.write_engine.begin() as conn: stmt = select(feast_metadata).where( feast_metadata.c.metadata_key == FeastMetadataKeys.PROJECT_UUID.value ) @@ -255,7 +266,7 @@ def _sync_feast_metadata_to_projects_table(self): feast_metadata_projects.append(row._mapping["project_id"]) if len(feast_metadata_projects) > 0: - with self.engine.begin() as conn: + with self.write_engine.begin() as conn: stmt = select(projects) rows = conn.execute(stmt).all() for row in rows: @@ -267,7 +278,7 @@ def _sync_feast_metadata_to_projects_table(self): self.apply_project(Project(name=project_name), commit=True) if self.purge_feast_metadata: - with self.engine.begin() as conn: + with self.write_engine.begin() as conn: for project_name in feast_metadata_projects: stmt = delete(feast_metadata).where( feast_metadata.c.project_id == project_name @@ -285,7 +296,7 @@ def teardown(self): validation_references, permissions, }: - with self.engine.begin() as conn: + with self.write_engine.begin() as conn: stmt = delete(t) conn.execute(stmt) @@ -549,7 +560,7 @@ def apply_feature_service( ) def delete_data_source(self, name: str, project: str, commit: bool = True): - with self.engine.begin() as conn: + with self.write_engine.begin() as conn: stmt = delete(data_sources).where( data_sources.c.data_source_name == name, data_sources.c.project_id == project, @@ -607,7 +618,7 @@ def _list_on_demand_feature_views( ) def _list_project_metadata(self, project: str) -> List[ProjectMetadata]: - with self.engine.begin() as conn: + with self.read_engine.begin() as conn: stmt = select(feast_metadata).where( feast_metadata.c.project_id == project, ) @@ -726,7 +737,7 @@ def apply_user_metadata( table = self._infer_fv_table(feature_view) name = feature_view.name - with self.engine.begin() as conn: + with self.write_engine.begin() as conn: stmt = select(table).where( getattr(table.c, "feature_view_name") == name, table.c.project_id == project, @@ -781,7 +792,7 @@ def get_user_metadata( table = self._infer_fv_table(feature_view) name = feature_view.name - with self.engine.begin() as conn: + with self.read_engine.begin() as conn: stmt = select(table).where(getattr(table.c, "feature_view_name") == name) row = conn.execute(stmt).first() if row: @@ -885,7 +896,7 @@ def _apply_object( name = name or (obj.name if hasattr(obj, "name") else None) assert name, f"name needs to be provided for {obj}" - with self.engine.begin() as conn: + with self.write_engine.begin() as conn: update_datetime = _utc_now() update_time = int(update_datetime.timestamp()) stmt = select(table).where( @@ -961,7 +972,7 @@ def _apply_object( def _maybe_init_project_metadata(self, project): # Initialize project metadata if needed - with self.engine.begin() as conn: + with self.write_engine.begin() as conn: update_datetime = _utc_now() update_time = int(update_datetime.timestamp()) stmt = select(feast_metadata).where( @@ -988,7 +999,7 @@ def _delete_object( id_field_name: str, not_found_exception: Optional[Callable], ): - with self.engine.begin() as conn: + with self.write_engine.begin() as conn: stmt = delete(table).where( getattr(table.c, id_field_name) == name, table.c.project_id == project ) @@ -1014,7 +1025,7 @@ def _get_object( proto_field_name: str, not_found_exception: Optional[Callable], ): - with self.engine.begin() as conn: + with self.read_engine.begin() as conn: stmt = select(table).where( getattr(table.c, id_field_name) == name, table.c.project_id == project ) @@ -1036,7 +1047,7 @@ def _list_objects( proto_field_name: str, tags: Optional[dict[str, str]] = None, ): - with self.engine.begin() as conn: + with self.read_engine.begin() as conn: stmt = select(table).where(table.c.project_id == project) rows = conn.execute(stmt).all() if rows: @@ -1051,7 +1062,7 @@ def _list_objects( return [] def _set_last_updated_metadata(self, last_updated: datetime, project: str): - with self.engine.begin() as conn: + with self.write_engine.begin() as conn: stmt = select(feast_metadata).where( feast_metadata.c.metadata_key == FeastMetadataKeys.LAST_UPDATED_TIMESTAMP.value, @@ -1085,7 +1096,7 @@ def _set_last_updated_metadata(self, last_updated: datetime, project: str): conn.execute(insert_stmt) def _get_last_updated_metadata(self, project: str): - with self.engine.begin() as conn: + with self.read_engine.begin() as conn: stmt = select(feast_metadata).where( feast_metadata.c.metadata_key == FeastMetadataKeys.LAST_UPDATED_TIMESTAMP.value, @@ -1130,7 +1141,7 @@ def apply_permission( ) def delete_permission(self, name: str, project: str, commit: bool = True): - with self.engine.begin() as conn: + with self.write_engine.begin() as conn: stmt = delete(permissions).where( permissions.c.permission_name == name, permissions.c.project_id == project, @@ -1143,7 +1154,7 @@ def _list_projects( self, tags: Optional[dict[str, str]], ) -> List[Project]: - with self.engine.begin() as conn: + with self.read_engine.begin() as conn: stmt = select(projects) rows = conn.execute(stmt).all() if rows: @@ -1188,7 +1199,7 @@ def delete_project( ): project = self.get_project(name, allow_cache=False) if project: - with self.engine.begin() as conn: + with self.write_engine.begin() as conn: for t in { managed_infra, saved_datasets, diff --git a/sdk/python/tests/integration/registration/test_universal_registry.py b/sdk/python/tests/integration/registration/test_universal_registry.py index 6e4a208d4b..5dc2509333 100644 --- a/sdk/python/tests/integration/registration/test_universal_registry.py +++ b/sdk/python/tests/integration/registration/test_universal_registry.py @@ -118,9 +118,43 @@ def minio_registry(minio_server): yield Registry("project", registry_config, None) +POSTGRES_READONLY_USER = "read_only_user" +POSTGRES_READONLY_PASSWORD = "readonly_password" + logger = logging.getLogger(__name__) +def add_pg_read_only_user( + container_host, container_port, db_name, postgres_user, postgres_password +): + # Connect to PostgreSQL as an admin + import psycopg + + conn_string = f"dbname={db_name} user={postgres_user} password={postgres_password} host={container_host} port={container_port}" + + with psycopg.connect(conn_string) as conn: + user_exists = conn.execute( + f"SELECT 1 FROM pg_catalog.pg_user WHERE usename = '{POSTGRES_READONLY_USER}'" + ).fetchone() + if not user_exists: + conn.execute( + f"CREATE USER {POSTGRES_READONLY_USER} WITH PASSWORD '{POSTGRES_READONLY_PASSWORD}';" + ) + + conn.execute( + f"REVOKE ALL PRIVILEGES ON DATABASE {db_name} FROM {POSTGRES_READONLY_USER};" + ) + conn.execute( + f"GRANT CONNECT ON DATABASE {db_name} TO {POSTGRES_READONLY_USER};" + ) + conn.execute( + f"GRANT SELECT ON ALL TABLES IN SCHEMA public TO {POSTGRES_READONLY_USER};" + ) + conn.execute( + f"ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT ON TABLES TO {POSTGRES_READONLY_USER};" + ) + + @pytest.fixture(scope="function") def pg_registry(postgres_server): db_name = "".join(random.choices(string.ascii_lowercase, k=10)) @@ -130,6 +164,14 @@ def pg_registry(postgres_server): container_port = postgres_server.get_exposed_port(5432) container_host = postgres_server.get_container_host_ip() + add_pg_read_only_user( + container_host, + container_port, + db_name, + postgres_server.username, + postgres_server.password, + ) + registry_config = SqlRegistryConfig( registry_type="sql", cache_ttl_seconds=2, @@ -137,6 +179,7 @@ def pg_registry(postgres_server): # The `path` must include `+psycopg` in order for `sqlalchemy.create_engine()` # to understand that we are using psycopg3. path=f"postgresql+psycopg://{postgres_server.username}:{postgres_server.password}@{container_host}:{container_port}/{db_name}", + read_path=f"postgresql+psycopg://{POSTGRES_READONLY_USER}:{POSTGRES_READONLY_PASSWORD}@{container_host}:{container_port}/{db_name}", sqlalchemy_config_kwargs={"echo": False, "pool_pre_ping": True}, thread_pool_executor_worker_count=0, purge_feast_metadata=False, From 7ecc615945b7bb48e103ca6eb278b39759d71c5a Mon Sep 17 00:00:00 2001 From: Shuchu Han Date: Sun, 8 Sep 2024 04:34:38 -0400 Subject: [PATCH 52/96] fix: Fix the mypy type check issue. (#4498) Signed-off-by: Shuchu Han --- .../infra/utils/postgres/connection_utils.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/sdk/python/feast/infra/utils/postgres/connection_utils.py b/sdk/python/feast/infra/utils/postgres/connection_utils.py index 3749fc2fc1..70148f3ee0 100644 --- a/sdk/python/feast/infra/utils/postgres/connection_utils.py +++ b/sdk/python/feast/infra/utils/postgres/connection_utils.py @@ -56,14 +56,14 @@ async def _get_connection_pool_async(config: PostgreSQLConfig) -> AsyncConnectio def _get_conninfo(config: PostgreSQLConfig) -> str: """Get the `conninfo` argument required for connection objects.""" - psycopg_config = { - "user": config.user, - "password": config.password, - "host": config.host, - "port": int(config.port), - "dbname": config.database, - } - return make_conninfo(conninfo="", **psycopg_config) + return make_conninfo( + conninfo="", + user=config.user, + password=config.password, + host=config.host, + port=int(config.port), + dbname=config.database, + ) def _get_conn_kwargs(config: PostgreSQLConfig) -> Dict[str, Any]: From c94f32f2b637c7b7d917d2456432180af7569cf5 Mon Sep 17 00:00:00 2001 From: Jiwon Park Date: Mon, 9 Sep 2024 18:24:52 +0900 Subject: [PATCH 53/96] fix: Disable active_timer When registry_ttl_sec is 0 (#4499) * fix: Disable active_timer When registry_ttl_sec is 0 Signed-off-by: Jiwon Park * feat: Add delete mark Signed-off-by: Jiwon Park --------- Signed-off-by: Jiwon Park --- sdk/python/feast/feature_server.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/sdk/python/feast/feature_server.py b/sdk/python/feast/feature_server.py index 4f8de1eef5..9757e95143 100644 --- a/sdk/python/feast/feature_server.py +++ b/sdk/python/feast/feature_server.py @@ -90,9 +90,11 @@ def async_refresh(): registry_proto = store.registry.proto() if shutting_down: return - nonlocal active_timer - active_timer = threading.Timer(registry_ttl_sec, async_refresh) - active_timer.start() + + if registry_ttl_sec: + nonlocal active_timer + active_timer = threading.Timer(registry_ttl_sec, async_refresh) + active_timer.start() @asynccontextmanager async def lifespan(app: FastAPI): From 867f532154977790e3bb11f2a94baa4f2289de99 Mon Sep 17 00:00:00 2001 From: Shuchu Han Date: Mon, 9 Sep 2024 06:14:43 -0400 Subject: [PATCH 54/96] fix: Ignore the type check as both functions calls are not belonging to Feast code. (#4500) Signed-off-by: Shuchu Han --- sdk/python/feast/ui_server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/python/feast/ui_server.py b/sdk/python/feast/ui_server.py index 35b51a8021..7e8591e2aa 100644 --- a/sdk/python/feast/ui_server.py +++ b/sdk/python/feast/ui_server.py @@ -51,7 +51,7 @@ def shutdown_event(): async_refresh() - ui_dir_ref = importlib_resources.files(__spec__.parent) / "ui/build/" # type: ignore[name-defined] + ui_dir_ref = importlib_resources.files(__spec__.parent) / "ui/build/" # type: ignore[name-defined, arg-type] with importlib_resources.as_file(ui_dir_ref) as ui_dir: # Initialize with the projects-list.json file with ui_dir.joinpath("projects-list.json").open(mode="w") as f: From 0c90137f628917914cb9373a2b4aa5c21c5a4457 Mon Sep 17 00:00:00 2001 From: Abdul Hameed Date: Mon, 9 Sep 2024 10:00:35 -0400 Subject: [PATCH 55/96] chore: Added rbac examples (#4450) * added rbac examples Signed-off-by: Abdul Hameed * Fixed the intra service communication Signed-off-by: Abdul Hameed * remove the rbac-local example Signed-off-by: Abdul Hameed * updated readme with suggestions from code review Co-authored-by: Francisco Arceo --------- Signed-off-by: Abdul Hameed Co-authored-by: Francisco Arceo --- examples/rbac-remote/README.md | 171 ++++++++++++++++++ examples/rbac-remote/cleanup_feast.sh | 24 +++ .../client/k8s/admin_user_resources.yaml | 56 ++++++ .../k8s/feature_repo/feature_store.yaml | 14 ++ .../client/k8s/feature_repo/test.py | 140 ++++++++++++++ .../client/k8s/readonly_user_resources.yaml | 57 ++++++ .../k8s/unauthorized_user_resources.yaml | 36 ++++ .../client/oidc/admin_user_resources.yaml | 34 ++++ .../oidc/feature_repo/feature_store.yaml | 19 ++ .../client/oidc/feature_repo/test.py | 140 ++++++++++++++ .../client/oidc/readonly_user_resources.yaml | 34 ++++ .../oidc/unauthorized_user_resources.yaml | 35 ++++ examples/rbac-remote/demo.jpg | Bin 0 -> 115961 bytes examples/rbac-remote/deployment.png | Bin 0 -> 98226 bytes examples/rbac-remote/install_feast.sh | 109 +++++++++++ .../server/feature_repo/example_repo.py | 130 +++++++++++++ .../server/feature_repo/feature_store.yaml | 26 +++ .../server/feature_repo/permissions_apply.py | 21 +++ .../server/k8s/feature_store_offline.yaml | 16 ++ .../server/k8s/feature_store_online.yaml | 20 ++ .../server/k8s/feature_store_registry.yaml | 12 ++ .../server/k8s/server_resources.yaml | 27 +++ .../server/oidc/feature_store_offline.yaml | 18 ++ .../server/oidc/feature_store_online.yaml | 22 +++ .../server/oidc/feature_store_registry.yaml | 14 ++ .../templates/deployment.yaml | 1 + infra/charts/feast-feature-server/values.yaml | 3 + .../client/auth_client_manager_factory.py | 15 +- 28 files changed, 1192 insertions(+), 2 deletions(-) create mode 100644 examples/rbac-remote/README.md create mode 100755 examples/rbac-remote/cleanup_feast.sh create mode 100644 examples/rbac-remote/client/k8s/admin_user_resources.yaml create mode 100644 examples/rbac-remote/client/k8s/feature_repo/feature_store.yaml create mode 100644 examples/rbac-remote/client/k8s/feature_repo/test.py create mode 100644 examples/rbac-remote/client/k8s/readonly_user_resources.yaml create mode 100644 examples/rbac-remote/client/k8s/unauthorized_user_resources.yaml create mode 100644 examples/rbac-remote/client/oidc/admin_user_resources.yaml create mode 100644 examples/rbac-remote/client/oidc/feature_repo/feature_store.yaml create mode 100644 examples/rbac-remote/client/oidc/feature_repo/test.py create mode 100644 examples/rbac-remote/client/oidc/readonly_user_resources.yaml create mode 100644 examples/rbac-remote/client/oidc/unauthorized_user_resources.yaml create mode 100644 examples/rbac-remote/demo.jpg create mode 100644 examples/rbac-remote/deployment.png create mode 100755 examples/rbac-remote/install_feast.sh create mode 100644 examples/rbac-remote/server/feature_repo/example_repo.py create mode 100644 examples/rbac-remote/server/feature_repo/feature_store.yaml create mode 100644 examples/rbac-remote/server/feature_repo/permissions_apply.py create mode 100644 examples/rbac-remote/server/k8s/feature_store_offline.yaml create mode 100644 examples/rbac-remote/server/k8s/feature_store_online.yaml create mode 100644 examples/rbac-remote/server/k8s/feature_store_registry.yaml create mode 100644 examples/rbac-remote/server/k8s/server_resources.yaml create mode 100644 examples/rbac-remote/server/oidc/feature_store_offline.yaml create mode 100644 examples/rbac-remote/server/oidc/feature_store_online.yaml create mode 100644 examples/rbac-remote/server/oidc/feature_store_registry.yaml diff --git a/examples/rbac-remote/README.md b/examples/rbac-remote/README.md new file mode 100644 index 0000000000..118800db55 --- /dev/null +++ b/examples/rbac-remote/README.md @@ -0,0 +1,171 @@ +# Feast Deployment with RBAC + +## Demo Summary +This demo showcases how to enable Role-Based Access Control (RBAC) for Feast using Kubernetes or [OIDC](https://openid.net/developers/how-connect-works/) Authentication type. +The demo steps involve deploying server components (registry, offline, online) and client examples within a Kubernetes environment. +The goal is to ensure secure access control based on user roles and permissions. For understanding the Feast RBAC framework +Please read these reference documents. +- [RBAC Architecture](https://docs.feast.dev/v/master/getting-started/architecture/rbac) +- [RBAC Permission](https://docs.feast.dev/v/master/getting-started/concepts/permission). +- [RBAC Authorization Manager](https://docs.feast.dev/v/master/getting-started/components/authz_manager) + +## Tools and Projects +- Kubernetes +- Feast +- PostgreSQL Database +- [Keycloak](https://www.keycloak.org) (if OIDC) + +## Application Environment + +This demo contains the following components: + +1. Feast Remote Server components (online, offline, registry). +2. Feast Remote Client RBAC example. +3. Yaml Configuration and installation related scripts files. + +![demo.jpg](demo.jpg) + +## Setup Instructions + +The application works with Kubernetes or OpenShift and the instructions assume that you are using a Kubernetes or OpenShift cluster. + +### Prerequisites + +1. Kubernetes Cluster and Kubernetes CLI (kubectl). +2. Helm: Ensure you have Helm installed for deploying the Feast components. +3. Python environment. +4. Feast CLI latest version. + +## 1. Prerequisites Step + + - **Step 1 : Create the Feast project with PostgreSQL.** + + * Install the PostgreSQL on a Kubernetes cluster if you are using OpenShift you can install using [OpenShift Template](https://github.com/RHEcosystemAppEng/feast-workshop-team-share/tree/main/feast_postgres#1-install-postgresql-on-openshift-using-openshift-template) + * Port Forward the PostgreSQL Database to your local machine. Since we are setting up the Feast project locally using the Feast CLI, we need to port forward PostgreSQL: + ``` kubectl port-forward svc/postgresql 5432:5432``` + * Create a feature repository/project using the cli with PostgreSQL. Please see the instructions for more details [here](https://docs.feast.dev/reference/offline-stores/postgres#getting-started). + For this (local) example setup, we create a project with name server using these settings for the [feature_store.yaml](server/feature_repo/feature_store.yaml). + +## 2. Authorization Setup + +### A. Kubernetes Authorization +- **Step 1: Create Remote configuration Files** + - Set the auth type to `kubernetes` in the respective `feature_store` files + + ```yaml + auth: + type: kubernetes + ``` + - For each server, feature store YAML files can be created for example like below: + + **Registry Server:** [feature_store_registry.yaml](server/k8s/feature_store_registry.yaml) + + **Offline Server :** [feature_store_offline.yaml](server/k8s/feature_store_offline.yaml) + + **Online Server :** [feature_store_online.yaml](server/k8s/feature_store_online.yaml) + +- **Step 2: Deploy the Server Components** + - Run the installation script. The setup script will deploy the server components based on the user's confirmation, enter `k8s` for kubernetes authentication deployment. The script will deploy all the components with the namespace `feast-dev`. + + ```sh + ./install_feast.sh + ``` + +### B. OIDC Authorization +- **Step 1: Setup Keycloak** + - See the documentation [here](https://www.keycloak.org/getting-started/getting-started-kube) and install Keycloak. + - Create a new realm with the name `feast-rbac` from the admin console. + - Under the `feast-rbac` realm, create a new client with the name `feast-client` + - Generate the secret for the `feast-client`. +- **Step 2: Create the Server Feature Store Files** + - Set the auth type to `oidc` in the respective `feature_store` files + + ```yaml + auth: + type: oidc + client_id: _CLIENT_ID__ + auth_discovery_url: _OIDC_SERVER_URL_/realms/feast-rbac/.well-known/openid-configuration + ``` + - For each server the feature store YAML files can be created for example like below: + + **Registry Server:** [feature_store_registry.yaml](server/oidc/feature_store_registry.yaml) + + **Offline Server :** [feature_store_offline.yaml](server/oidc/feature_store_offline.yaml) + + **Online Server :** [feature_store_online.yaml](server/oidc/feature_store_online.yaml) + +- **Step 3: Deploy the Server Components** + - Run the installation script. Enter `oidc` for the Keycloak authentication deployment. The script will deploy all of the components with the namespace `feast-dev`. + + ```sh + ./install_feast.sh + ``` + +## 3. Client Setup + +### A. Kubernetes Authorization +- **Step 1: Create the Client Feature Store YAML** + - Set up the client feature store with remote connection details for the registry, online, and offline store with auth type `kuberentes` . See the client remote setting example here: [feature_store.yaml](client/k8s/feature_repo/feature_store.yaml) +- **Step 2: Deploy the Client Examples** + - As an example, we created 3 different users: 1. [admin_user](client/k8s/admin_user_resources.yaml), 2. [readonly_user](client/k8s/readonly_user_resources.yaml) and 3. [unauthorized_user](client/k8s/unauthorized_user_resources.yaml) . + - Each user is assigned their own service account and roles, as shown in the table below. + ##### Roles and Permissions for Examples (Admin and User) + | **User** | **Service Account** | **Roles** | **Permission** | **Feast Resources** | **Actions** | + |-----------------|----------------------------|------------------|--------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------| + | admin | feast-admin-sa | feast-admin-role | feast_admin_permission | FeatureView, OnDemandFeatureView, BatchFeatureView, StreamFeatureView, Entity, FeatureService, DataSource, ValidationReference, SavedDataset, Permission | CREATE, DESCRIBE, UPDATE, DELETE, READ_ONLINE, READY_OFFLINE, WRITE_ONLINE, WRITE_OFFLINE | + | user | feast-user-sa | feast-user-role | feast_user_permission | FeatureView, OnDemandFeatureView, BatchFeatureView, StreamFeatureView, Entity, FeatureService, DataSource, ValidationReference, SavedDataset, Permission | READ, READ_OFFLINE, READ_ONLINE | + |unauthorized-user| feast-unauthorized-user-sa | | + - To deploy the client confirm `Apply client creation examples` `Y` + - The Deployment of the overall setup looks like : + + ![Deployment.png](deployment.png) + +### B. OIDC Authorization +- **Step 1: Create the Client Feature Store YAML** + - Set up the client feature store with the remote connection details for the registry, online, and offline store. + - Set the `Auth type` to `oidc` + - update the client secret in client side `feature_store.yaml` or if required any other settings as show below. + ``` + auth_discovery_url: https://keycloak-feast-dev.apps.com/realms/feast-rbac/.well-known/openid-configuration + client_id: feast-client + client_secret: update-this-value + username: ${FEAST_USERNAME} + password: ${FEAST_PASSWORD} + ``` + - See the client remote setting example here: [feature_store.yaml](client/oidc/feature_repo/feature_store.yaml) +- **Step 2: Create the Roles and Users** + - Under the `feast-client` create the two roles `feast-admin-role` and `feast-user-role` + - Under the `feast-rbac` realm, create 3 different users: `admin-user`, `readonly-user`, and `unauthorized-user`. Assign the password `feast` to each user. + - Map the roles to users: select the `admin-user`, go to `Role mapping`, and assign the `feast-admin-role`. Select the `readonly-user` and assign the `feast-user-role`. For the `unauthorized-user`, do not assign any roles. +- **Step 3: Deploy the Client Examples** + - For OIDC, similar to the k8s examples, create different deployments and add the username and password as environment variables: 1. [admin_user](client/oidc/admin_user_resources.yaml), 2. [readonly_user](client/oidc/readonly_user_resources.yaml) and 3. [unauthorized_user](client/oidc/unauthorized_user_resources.yaml) . + - To deploy the client confirm `Apply client creation examples` `Y` + +## 4. Permissions Management +- **Step 1: Apply the Permissions** + - See the code example in [permissions_apply.py](server/feature_repo/permissions_apply.py) for applying the permissions for both Kubernetes and OIDC setup. + - The `install_feast.sh` has the option to apply permission from the pod with the user's confirmation `Do you want to copy files and execute 'feast apply in the pod? (y/n)`. +- **Step 2: Validate the Permissions** + - use the Feast cli to validate the permissions with the command `feast permissions list` for more details use `feast permissions list -v`. Additionally, there are other commands such as: + `feast permissions check / describe / list-roles` +## 5. Validating the Permissions/RBAC results +- **Run the Examples** + - As outlined in the [test.py](client/k8s/feature_repo/test.py) script, the example attempts to fetch Historical Features, perform Materialization, fetch Online Features, and push to the online/offline store based on user roles. + - The `admin-user` can perform all actions on all objects. + - The `readonly-user` can only read or query all objects. + - `unauthorized user` should not able to read or write any resources as no role is defined for this user. + - From each user's pod run the example `python feature_repo/test.py` + +## 6. Local Testing and Cleanup +- **Local Testing** + - For local testing, port forward the services PostgreSQL Service and Feast Servers with the commands below: + ``` + kubectl port-forward svc/postgresql 5432:5432 + kubectl port-forward svc/feast-offline-server-feast-feature-server 8815:80 + kubectl port-forward svc/feast-registry-server-feast-feature-server 6570:80 + kubectl port-forward svc/feast-feature-server 6566:80 + ``` + - When testing in Kubernetes, users can set the environment variable `LOCAL_K8S_TOKEN` in each example. The token can be obtained from the service account. +- **Cleanup** + - Run the command + - ```./cleanup_feast.sh``` \ No newline at end of file diff --git a/examples/rbac-remote/cleanup_feast.sh b/examples/rbac-remote/cleanup_feast.sh new file mode 100755 index 0000000000..18acf6727c --- /dev/null +++ b/examples/rbac-remote/cleanup_feast.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +DEFAULT_HELM_RELEASES=("feast-feature-server" "feast-offline-server" "feast-registry-server") +NAMESPACE="feast-dev" + +HELM_RELEASES=(${1:-${DEFAULT_HELM_RELEASES[@]}}) +NAMESPACE=${2:-$NAMESPACE} + +echo "Deleting Helm releases..." +for release in "${HELM_RELEASES[@]}"; do + helm uninstall $release -n $NAMESPACE +done + +echo "Deleting Kubernetes roles, role bindings, and service accounts for clients" +kubectl delete -f client/k8s/admin_user_resources.yaml +kubectl delete -f client/k8s/readonly_user_resources.yaml +kubectl delete -f client/k8s/unauthorized_user_resources.yaml +kubectl delete -f client/oidc/admin_user_resources.yaml +kubectl delete -f client/oidc/readonly_user_resources.yaml +kubectl delete -f client/oidc/unauthorized_user_resources.yaml +kubectl delete -f server/k8s/server_resources.yaml +kubectl delete configmap client-feature-repo-config + +echo "Cleanup completed." diff --git a/examples/rbac-remote/client/k8s/admin_user_resources.yaml b/examples/rbac-remote/client/k8s/admin_user_resources.yaml new file mode 100644 index 0000000000..d5df8bcbf2 --- /dev/null +++ b/examples/rbac-remote/client/k8s/admin_user_resources.yaml @@ -0,0 +1,56 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + name: feast-admin-sa + namespace: feast-dev +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role +metadata: + name: feast-admin-role + namespace: feast-dev +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: feast-admin-rolebinding + namespace: feast-dev +subjects: + - kind: ServiceAccount + name: feast-admin-sa + namespace: feast-dev +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: feast-admin-role +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: client-admin-user + namespace: feast-dev + labels: + app: client-admin +spec: + replicas: 1 + selector: + matchLabels: + app: client-admin + template: + metadata: + labels: + app: client-admin + spec: + serviceAccountName: feast-admin-sa + containers: + - name: client-admin-container + image: feastdev/feature-server:latest + imagePullPolicy: Always + command: ["sleep", "infinity"] + volumeMounts: + - name: client-feature-repo-config + mountPath: /feature_repo + volumes: + - name: client-feature-repo-config + configMap: + name: client-feature-repo-config diff --git a/examples/rbac-remote/client/k8s/feature_repo/feature_store.yaml b/examples/rbac-remote/client/k8s/feature_repo/feature_store.yaml new file mode 100644 index 0000000000..d316005098 --- /dev/null +++ b/examples/rbac-remote/client/k8s/feature_repo/feature_store.yaml @@ -0,0 +1,14 @@ +project: server +registry: + registry_type: remote + path: feast-registry-server-feast-feature-server.feast-dev.svc.cluster.local:80 +offline_store: + type: remote + host: feast-offline-server-feast-feature-server.feast-dev.svc.cluster.local + port: 80 +online_store: + type: remote + path: http://feast-feature-server.feast-dev.svc.cluster.local:80 +auth: + type: kubernetes + diff --git a/examples/rbac-remote/client/k8s/feature_repo/test.py b/examples/rbac-remote/client/k8s/feature_repo/test.py new file mode 100644 index 0000000000..6e1480bc94 --- /dev/null +++ b/examples/rbac-remote/client/k8s/feature_repo/test.py @@ -0,0 +1,140 @@ +import os +from datetime import datetime + +import pandas as pd +from feast import FeatureStore +from feast.data_source import PushMode + + +def run_demo(): + try: + os.environ["LOCAL_K8S_TOKEN"] = "" + + store = FeatureStore(repo_path="/feature_repo") + + print("\n--- Historical features for training ---") + fetch_historical_features_entity_df(store, for_batch_scoring=False) + + print("\n--- Historical features for batch scoring ---") + fetch_historical_features_entity_df(store, for_batch_scoring=True) + + try: + print("\n--- Load features into online store/materialize_incremental ---") + feature_views= store.list_feature_views() + if not feature_views: + raise PermissionError("No access to feature-views or no feature-views available.") + store.materialize_incremental(end_date=datetime.now()) + except PermissionError as pe: + print(f"Permission error: {pe}") + except Exception as e: + print(f"An occurred while performing materialize incremental: {e}") + + print("\n--- Online features ---") + fetch_online_features(store) + + print("\n--- Online features retrieved (instead) through a feature service---") + fetch_online_features(store, source="feature_service") + + print( + "\n--- Online features retrieved (using feature service v3, which uses a feature view with a push source---" + ) + fetch_online_features(store, source="push") + + print("\n--- Simulate a stream event ingestion of the hourly stats df ---") + event_df = pd.DataFrame.from_dict( + { + "driver_id": [1001], + "event_timestamp": [datetime.now()], + "created": [datetime.now()], + "conv_rate": [1.0], + "acc_rate": [1.0], + "avg_daily_trips": [1000], + } + ) + store.push("driver_stats_push_source", event_df, to=PushMode.ONLINE_AND_OFFLINE) + + print("\n--- Online features again with updated values from a stream push---") + fetch_online_features(store, source="push") + + except Exception as e: + print(f"An error occurred: {e}") + + +def fetch_historical_features_entity_df(store: FeatureStore, for_batch_scoring: bool): + try: + entity_df = pd.DataFrame.from_dict( + { + "driver_id": [1001, 1002, 1003], + "event_timestamp": [ + datetime(2021, 4, 12, 10, 59, 42), + datetime(2021, 4, 12, 8, 12, 10), + datetime(2021, 4, 12, 16, 40, 26), + ], + "label_driver_reported_satisfaction": [1, 5, 3], + # values we're using for an on-demand transformation + "val_to_add": [1, 2, 3], + "val_to_add_2": [10, 20, 30], + + } + + ) + if for_batch_scoring: + entity_df["event_timestamp"] = pd.to_datetime("now", utc=True) + + training_df = store.get_historical_features( + entity_df=entity_df, + features=[ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips", + "transformed_conv_rate:conv_rate_plus_val1", + "transformed_conv_rate:conv_rate_plus_val2", + ], + ).to_df() + print(training_df.head()) + + except Exception as e: + print(f"An error occurred while fetching historical features: {e}") + + +def fetch_online_features(store, source: str = ""): + try: + entity_rows = [ + # {join_key: entity_value} + { + "driver_id": 1001, + "val_to_add": 1000, + "val_to_add_2": 2000, + }, + { + "driver_id": 1002, + "val_to_add": 1001, + "val_to_add_2": 2002, + }, + ] + if source == "feature_service": + features_to_fetch = store.get_feature_service("driver_activity_v1") + elif source == "push": + features_to_fetch = store.get_feature_service("driver_activity_v3") + else: + features_to_fetch = [ + "driver_hourly_stats:acc_rate", + "transformed_conv_rate:conv_rate_plus_val1", + "transformed_conv_rate:conv_rate_plus_val2", + ] + returned_features = store.get_online_features( + features=features_to_fetch, + entity_rows=entity_rows, + ).to_dict() + for key, value in sorted(returned_features.items()): + print(key, " : ", value) + + except Exception as e: + print(f"An error occurred while fetching online features: {e}") + + +if __name__ == "__main__": + try: + run_demo() + except Exception as e: + print(f"An error occurred in the main execution: {e}") diff --git a/examples/rbac-remote/client/k8s/readonly_user_resources.yaml b/examples/rbac-remote/client/k8s/readonly_user_resources.yaml new file mode 100644 index 0000000000..c9094e7f2f --- /dev/null +++ b/examples/rbac-remote/client/k8s/readonly_user_resources.yaml @@ -0,0 +1,57 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + name: feast-user-sa + namespace: feast-dev +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role +metadata: + name: feast-user-role + namespace: feast-dev +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: feast-user-rolebinding + namespace: feast-dev +subjects: + - kind: ServiceAccount + name: feast-user-sa + namespace: feast-dev +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: feast-user-role +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: client-readonly-user + namespace: feast-dev + labels: + app: client-user +spec: + replicas: 1 + selector: + matchLabels: + app: client-user + template: + metadata: + labels: + app: client-user + spec: + serviceAccountName: feast-user-sa + containers: + - name: client-user-container + image: feastdev/feature-server:latest + imagePullPolicy: Always + command: ["sleep", "infinity"] + volumeMounts: + - name: client-feature-repo-config + mountPath: /feature_repo + volumes: + - name: client-feature-repo-config + configMap: + name: client-feature-repo-config + diff --git a/examples/rbac-remote/client/k8s/unauthorized_user_resources.yaml b/examples/rbac-remote/client/k8s/unauthorized_user_resources.yaml new file mode 100644 index 0000000000..5068c94fd9 --- /dev/null +++ b/examples/rbac-remote/client/k8s/unauthorized_user_resources.yaml @@ -0,0 +1,36 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + name: feast-unauthorized-user-sa + namespace: feast-dev +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: client-unauthorized-user + namespace: feast-dev + labels: + app: client-unauthorized-user +spec: + replicas: 1 + selector: + matchLabels: + app: client-unauthorized-user + template: + metadata: + labels: + app: client-unauthorized-user + spec: + serviceAccountName: feast-unauthorized-user-sa + containers: + - name: client-unauthorized-user-container + image: feastdev/feature-server:latest + imagePullPolicy: Always + command: ["sleep", "infinity"] + volumeMounts: + - name: client-feature-repo-config + mountPath: /feature_repo + volumes: + - name: client-feature-repo-config + configMap: + name: client-feature-repo-config diff --git a/examples/rbac-remote/client/oidc/admin_user_resources.yaml b/examples/rbac-remote/client/oidc/admin_user_resources.yaml new file mode 100644 index 0000000000..7843ce3c9d --- /dev/null +++ b/examples/rbac-remote/client/oidc/admin_user_resources.yaml @@ -0,0 +1,34 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: client-admin-user + namespace: feast-dev + labels: + app: client-admin +spec: + replicas: 1 + selector: + matchLabels: + app: client-admin + template: + metadata: + labels: + app: client-admin + spec: + containers: + - name: client-admin-container + image: feastdev/feature-server:latest + imagePullPolicy: Always + command: ["sleep", "infinity"] + env: + - name: FEAST_USERNAME + value: admin-user + - name: FEAST_PASSWORD + value: feast + volumeMounts: + - name: client-feature-repo-config + mountPath: /feature_repo + volumes: + - name: client-feature-repo-config + configMap: + name: client-feature-repo-config diff --git a/examples/rbac-remote/client/oidc/feature_repo/feature_store.yaml b/examples/rbac-remote/client/oidc/feature_repo/feature_store.yaml new file mode 100644 index 0000000000..1454e16df9 --- /dev/null +++ b/examples/rbac-remote/client/oidc/feature_repo/feature_store.yaml @@ -0,0 +1,19 @@ +project: server +registry: + registry_type: remote + path: feast-registry-server-feast-feature-server.feast-dev.svc.cluster.local:80 +offline_store: + type: remote + host: feast-offline-server-feast-feature-server.feast-dev.svc.cluster.local + port: 80 +online_store: + type: remote + path: http://feast-feature-server.feast-dev.svc.cluster.local:80 +auth: + type: oidc + auth_discovery_url: https://keycloak-feast-dev.apps.com/realms/feast-rbac/.well-known/openid-configuration + client_id: feast-client + client_secret: update-this-value + username: ${FEAST_USERNAME} + password: ${FEAST_PASSWORD} +entity_key_serialization_version: 2 diff --git a/examples/rbac-remote/client/oidc/feature_repo/test.py b/examples/rbac-remote/client/oidc/feature_repo/test.py new file mode 100644 index 0000000000..6e1480bc94 --- /dev/null +++ b/examples/rbac-remote/client/oidc/feature_repo/test.py @@ -0,0 +1,140 @@ +import os +from datetime import datetime + +import pandas as pd +from feast import FeatureStore +from feast.data_source import PushMode + + +def run_demo(): + try: + os.environ["LOCAL_K8S_TOKEN"] = "" + + store = FeatureStore(repo_path="/feature_repo") + + print("\n--- Historical features for training ---") + fetch_historical_features_entity_df(store, for_batch_scoring=False) + + print("\n--- Historical features for batch scoring ---") + fetch_historical_features_entity_df(store, for_batch_scoring=True) + + try: + print("\n--- Load features into online store/materialize_incremental ---") + feature_views= store.list_feature_views() + if not feature_views: + raise PermissionError("No access to feature-views or no feature-views available.") + store.materialize_incremental(end_date=datetime.now()) + except PermissionError as pe: + print(f"Permission error: {pe}") + except Exception as e: + print(f"An occurred while performing materialize incremental: {e}") + + print("\n--- Online features ---") + fetch_online_features(store) + + print("\n--- Online features retrieved (instead) through a feature service---") + fetch_online_features(store, source="feature_service") + + print( + "\n--- Online features retrieved (using feature service v3, which uses a feature view with a push source---" + ) + fetch_online_features(store, source="push") + + print("\n--- Simulate a stream event ingestion of the hourly stats df ---") + event_df = pd.DataFrame.from_dict( + { + "driver_id": [1001], + "event_timestamp": [datetime.now()], + "created": [datetime.now()], + "conv_rate": [1.0], + "acc_rate": [1.0], + "avg_daily_trips": [1000], + } + ) + store.push("driver_stats_push_source", event_df, to=PushMode.ONLINE_AND_OFFLINE) + + print("\n--- Online features again with updated values from a stream push---") + fetch_online_features(store, source="push") + + except Exception as e: + print(f"An error occurred: {e}") + + +def fetch_historical_features_entity_df(store: FeatureStore, for_batch_scoring: bool): + try: + entity_df = pd.DataFrame.from_dict( + { + "driver_id": [1001, 1002, 1003], + "event_timestamp": [ + datetime(2021, 4, 12, 10, 59, 42), + datetime(2021, 4, 12, 8, 12, 10), + datetime(2021, 4, 12, 16, 40, 26), + ], + "label_driver_reported_satisfaction": [1, 5, 3], + # values we're using for an on-demand transformation + "val_to_add": [1, 2, 3], + "val_to_add_2": [10, 20, 30], + + } + + ) + if for_batch_scoring: + entity_df["event_timestamp"] = pd.to_datetime("now", utc=True) + + training_df = store.get_historical_features( + entity_df=entity_df, + features=[ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips", + "transformed_conv_rate:conv_rate_plus_val1", + "transformed_conv_rate:conv_rate_plus_val2", + ], + ).to_df() + print(training_df.head()) + + except Exception as e: + print(f"An error occurred while fetching historical features: {e}") + + +def fetch_online_features(store, source: str = ""): + try: + entity_rows = [ + # {join_key: entity_value} + { + "driver_id": 1001, + "val_to_add": 1000, + "val_to_add_2": 2000, + }, + { + "driver_id": 1002, + "val_to_add": 1001, + "val_to_add_2": 2002, + }, + ] + if source == "feature_service": + features_to_fetch = store.get_feature_service("driver_activity_v1") + elif source == "push": + features_to_fetch = store.get_feature_service("driver_activity_v3") + else: + features_to_fetch = [ + "driver_hourly_stats:acc_rate", + "transformed_conv_rate:conv_rate_plus_val1", + "transformed_conv_rate:conv_rate_plus_val2", + ] + returned_features = store.get_online_features( + features=features_to_fetch, + entity_rows=entity_rows, + ).to_dict() + for key, value in sorted(returned_features.items()): + print(key, " : ", value) + + except Exception as e: + print(f"An error occurred while fetching online features: {e}") + + +if __name__ == "__main__": + try: + run_demo() + except Exception as e: + print(f"An error occurred in the main execution: {e}") diff --git a/examples/rbac-remote/client/oidc/readonly_user_resources.yaml b/examples/rbac-remote/client/oidc/readonly_user_resources.yaml new file mode 100644 index 0000000000..c43137bfba --- /dev/null +++ b/examples/rbac-remote/client/oidc/readonly_user_resources.yaml @@ -0,0 +1,34 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: client-readonly-user + namespace: feast-dev + labels: + app: client-user +spec: + replicas: 1 + selector: + matchLabels: + app: client-user + template: + metadata: + labels: + app: client-user + spec: + containers: + - name: client-admin-container + image: feastdev/feature-server:latest + imagePullPolicy: Always + command: ["sleep", "infinity"] + env: + - name: FEAST_USERNAME + value: readonly-user + - name: FEAST_PASSWORD + value: feast + volumeMounts: + - name: client-feature-repo-config + mountPath: /feature_repo + volumes: + - name: client-feature-repo-config + configMap: + name: client-feature-repo-config diff --git a/examples/rbac-remote/client/oidc/unauthorized_user_resources.yaml b/examples/rbac-remote/client/oidc/unauthorized_user_resources.yaml new file mode 100644 index 0000000000..f99bb3e987 --- /dev/null +++ b/examples/rbac-remote/client/oidc/unauthorized_user_resources.yaml @@ -0,0 +1,35 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: client-unauthorized-user + namespace: feast-dev + labels: + app: client-unauthorized-user +spec: + replicas: 1 + selector: + matchLabels: + app: client-unauthorized-user + template: + metadata: + labels: + app: client-unauthorized-user + spec: + containers: + - name: client-admin-container + image: feastdev/feature-server:latest + imagePullPolicy: Always + command: ["sleep", "infinity"] + env: + - name: FEAST_USERNAME + value: unauthorized-user + - name: FEAST_PASSWORD + value: feast + volumeMounts: + - name: client-feature-repo-config + mountPath: /feature_repo + volumes: + - name: client-feature-repo-config + configMap: + name: client-feature-repo-config + diff --git a/examples/rbac-remote/demo.jpg b/examples/rbac-remote/demo.jpg new file mode 100644 index 0000000000000000000000000000000000000000..718e49dde69a898cad82f3ad7b5d5d02b032a34e GIT binary patch literal 115961 zcmeFZ1yr2PvM4$P0wg#Dmn3Ks+#Ny)P9O=voxx?0fx&~j1qkjA0R{$lg1fuB5AOa* z{=M)1|F`eiYoGh>I%nOtZqJ(T>h9|9`g*Frs;;h{`^o!70JgNalsEtZ0RTXF_yF#g z5hkTXMYTVEmKT?j75k&26Yvm_SO5SEkgd&UiFYqlRMlRfEdJ5rSDh{pZ1pSs55hyg zXXC$m2LMJG{{x(VRs2NX01SLUIC%I{+dK^ZK0 z<1g6e%V*ICSpNZjW%MUl?@ut$%H|h-@B@y3g}L3Yv3`YL6r&q}6crv))Q2w_01WsH zkN~{<_52U{hhUiw0Pvgw0EplIQKlCM095(_07TROD5FjV0I<9PfQo^Cl>K8)taNR3 ze|LxU5I-_B1OSe5000bC003_g0C=kYyY3;eO=xm%wnJQnVR>$Z6%Tn+Q2}W z%g-#g$K$C?XRC2ovL>VT97}MlWd33nfl#{8H&q#eGPK!Xu^0o{ z^Iw)gEhRHq7Gf*HD7)hqxWTtyElE1qv8kYyTJ*rf;u@d|PuuwEW~r-=DYUh>v1MB| zes~C!PZEN+uprVckSg(_w9;VPCfFR?8pS4^A-0Qmayc=Ao)Uhqjp;H#d!*QJUwD&B5#)88E7}Vfa9G^ER9J3!w$*FkzpdISz0rne7_$CcI8~-R(-Bysy59p zcW5L(-#Zi9`)wq&eE_Vpv1LCCKzsk0Ur&V5#OiTHS7bMr^qBO)20G%l$l7 ze+oJos?gBM)y(J`x97Q7ksPVek+>{G5f(dT)|6y>6kYOt#8WSzpXz((B4)1%6I?LK z2gqNe;0_;B>8FxvV|eCd#%?kCdNqaDoK3}y;#^#c6xIZ*`?kn6Q`wGfp7}{lLDA&a zI*0YDR$4#qTdRQ%fyp#bW4sUNPyhu05z1bYPNk%LMET;sA?o%TZa&ena#GRqNA-92 ze~7#?wZ0G75E?n*ofE3mzfN9d>OY~|dl6E}FgO-(I+moTvI5TVfnu=nV77ZoB7&!? z`I-A}>1NgE5NW}ZUHKfXvh7C2kOFH;+Er7Y`Lf=&g6}+gUGgk(bDJH4nZ+E9_8}G8 znYmtzWb$C<=e9yYd#*i7OIaA&u|sjPcV=Ie)3oO-zxzwTS?i&BPI8ah){ zAC9uoOWvF4+;MOZklr>EcvYCt7s{mX;FD`>J{4~jpw5b3bo|v`q!bY`NW@;>+hPSx z!z_ffm7LJM*_Z(u@9T$eL-wxoFojitG$OM@?^Op?nx`>s(%k0RvFCdG(FT^H7r1iU zG|wy<`=BVG^b4$2j?AQ9h7+K!qyc3kW%kM3Q=1UX^o>Ax0U@t_hvh@FC?^^)i@8W! zvuD$~V;&vzbD`IuXfTppf;5+=QV*ncV-PTD1}-tqd~tO32n|9PtIaCd<5Y?T<d%Z)Cdq9H^*ajqa&Ru^y*C&!fNPNQ(V3mB$9&FxIiMsb@UVwwybW zBbRC6Tkmwp#giVAVF(K`Mkstx0ZAY4c`Z>OpTD#w6*Z_$HZ|a|CEAW>61Ynnn`rw{ zBCyU_8d1iy`mIKRR2_R=2h0VU?Z$I=?Yhxes?S%pD?%?vw*PGM>Nw6-_B13Z~Bb1=NCdw6j7=Op(D4Ymts6KFN`<))_S>^Xitv09WMqJRJTDfxza z!O_j>nN3Mu5^2xs=2Eta(dtaixKrI|a?}`)K1*L#Lj^{XeIKEPGW(iKLF%J&0GDC|u)F_wiSN||( zmY`Ldv%$)3b-Y-2hB?elmYvlWg7@RXdiJs{#Gv(t2}{ilXyn|F!tvw7I00Gpd|J$z;kFofyD<{4&>&cOb9(}YK zxfEGS6AVC&s{T|vPeQF|GRZR9G@`}^a{PkALRY(HK3KsZGdwY(iHLI(4p}#bPGGM` z%$-FISAitBT770nS-aOtNqm*Ps3&MT#D^MtQ}YXHN!cB{4=?X7^)ky`9o zPM>#Y%GD})vbjO~b{x}ACfRR(aCcYrlUi+aaq*mBCYq5Ts*NwF0uYt~Hc*#62j^Wp zxf^AaE+uLWP{l@|>u!DQD)e&p;Cz}k=>m!sk!&v@covZJmELPLAD2!Y+$MBv$V=bw zkJ6l^0W8;alZC*=%Qvk7iCc03M7*(2A*B_{nrpt?fNJw8ME~M({c?x zSCD~OYy&0BXY;p2dWiUbZ+js2X~Nca|4;+bso%eS{~qaoH4-V}DC$?a2P`~%dAC*H zZ)qpO?g0(3JMd`9B{X9~c|({RvM?I<5(l55^?x!5&T;+Vb|EvhkGtIGuAU!uvc@{N4cx`?oLF+vL z$+I)CavkmCZH%4$@?^?l6lt(i2I0>+LZJE%h;7G6IZ!gqGl?(OgmE@rikOtjm-~yc zb9E-@HWgu75|wM4(JnK+`o{r}`AAh!AuHbcj(soDY_JN~jPX@=bcf~Q0MJ{ zzfFotYF&DsL{^auIDdHslIfa(osw$CvMv{Vk=){A@8|tbuJi_hmrV zuoN|qZ+Qp>n^aaYDkEyS_>VCX39%13`P)KU=%+^8`2`)zXnL)kGi=pGRtItK%Y&!6 zFF-F7LMmt7Ef_}#wN2kJtoUZyCE(Z|z9aTyju18+KaXB(G>5#Xq|6*`?f7;l(gB}K z?xB;=RC9NZX&|-&B+wPWyu$|e_mU_FH4VWSe}kmmD+6?vv+%Ofpo*BZTa#Gy_8AFLbU)$twlrN6G3@2^iJx z8OcHC6rQ!poqFI95|>TXId`ue%(67vqj0_B4{Z#D&KD&WPdOWOP6QlwP$dp_2!?@0 zm;hi%_{x{y4&2i-|H$W=wlSFH1ftY%i@Mp-(imc=Rd}dj`!qb6A2Y;fW`CHlPNGhy zv}LZg{f_^6RMEU6ALpLPumrQ1z=!a}!#csxsh=U!XXQUHnvAK+Ud??-=P)b}tmEsD z(k+FJI!4s8eW5I&bUdt17f=2Sp`d}8?mlZ(&@gf-{t*|~E2F4?v6PQlR-IG!m7Y8| ztx}sEnF2>kP424<#UM3w2e+$>WlMC~8b~tEgV65`*^sfO*WM0s%d&!*G6>ii6vh)z zf8LtGH!=u}7@WqPVjvWI5jym;X#nWu<;O20IfZ-B=v%?O%2s-C{z1x~VWQ$TW?9F! zV)`t)PWq^Co@|1LLeCy)e?@D|!7J!1#TqR-KRZSCat%tWiB_H}TJ!AlO>Ewjm5ItG1JW}HVP zn+rq8f^C@*Dri#UmmnT%T+-M*Y9ed7G;T~zuAFwArDWqy7j|IHE2!sg3YBP(V~cvH zHai!bWu9;N|2)diQ&_EQv{+QDj_~%Mv0*+At62iHYf}xx=9kyHUvEH}1I)wd2PAeY|2{#sqgQ>7HE_ zdJL?--Zpjm?DCcHqmETYW(Iz=j8UOc5gIv>b7ye#HREjZ*Kd#B2A{q9)c%$7%gDqL z5gCL;t;S7OdyG&{S_bcfj;PHH|5WJ4<2rcJ6jiks>P-00P|ZW2+}3qivWx?R%U-Eo zeRUFz5>JnyJ?DY+Zin~992p20W+p=--uifX?q%kTRvXur%UjPmwEoF;P{IG$XkzRBdt2Y41Gg=ibho8m%k823M}N+x zRzCDFLOz=KsIcRfFrZn~cSOl1wqOlyaH>xk3+IbLY-Z&%OPuDl2bE$SA!JbG)@pQx2 zwf*WEYwk(RvNkT|(z=4Ip-#U-=gQV6U)BiEwdfCk^*Pm_3oH-79by9UURWW}Uo^qRm7v39(0F#U0bA=?Nk5wb|B25&sq z0+#$ax-~`WqL#4Y)>mB9H)K3sRLGfjJzPmlpZTK;8dG&R24v9D6JeEhy7Gl$BM=cTW~)Ly6%;ttxmS_a(u_vHR9`O!6TdMq52F3D;yXAR#?DKhDp?x zTBWAtYOk6`@Aw#BCw1{{KCVnV;?A)PjpXE_7Y4pv$Gq7;_Uti(+J-s@WgMqPCU2km z3%#kH70KUN!ooVVx6*jizoUtlm)YGw7W(GZ;RMV^$ezr$ROA!%nmI#*`5JK zFSCTa{YJFY`kK6T>Dv&;Vecr9rsPcsWQsW0+}5;$nC0ZmIH*paai*-&kjIPE9}UhS za?n6n#jZB9ltNSn1>&>_zRu;GmzE7p>E6mwiODzXVqlXBvKKU#wZn2Q_m*g6iMr{1 znU-T_CF)V9zo`5e>|`(2m7c%D0Cg~+%s1^lUfgNkXr2-!#c>A=*dz297f<-ZS*$oXccx z{DjgP53FKW)^)IKXGEk>kkXrJ)zG(t?RD7JlPR=Bb*g*!&-)~Bh!_^ASH_l!uX}Wl zAG@wxnaa@?d+j7 z=ua7EkjHJkaZ+{6WC?HJR6^#ZmToLG{Y&z0l&R667d%{1D%L4&e|q#^BRGud>! zCT2{tmCsJW!4)~$hvzfTvF-t-6KVLeK93M$mfzS-DR#(@wmLx*OJNzQ3&< zlS20Nu`NDae<>n^CVmxJ##2tIewedBAD8=Ozq1|ZBDzA zvB^r7ZLFENA?GNZbDTHvjI$r;fx{n{8C^N}U`SXUJV}xjhcsw=8J*q(p1Z;xtXS8A z_7|%=cS6^|rF+1c)Ba(PobvK*urQ)6t@DTVlX9sLweO{@A0UJ^QNBaCGD=+2)u>x4 zcOTbfgeAwpmc7njX5%M5HTuUB>=wEOwJzF;Bb@1Exe;tb%k{e?jMb=NwMGtcH^Ec= zI!vH}7vHxxSDoZC+79kQ!Y6KA&pKM}0r1m>mB^xt>)kNFd%)CAV+W&k@Wj>rr)?%m zU5@0EAx9Y}ITPIuAk~i*fngF@XS>|llz>O6K^Mn7aty z4QSr+^4$Z%8YVB?Zj)0SLj1;@Ht+B+G)F z7fMG=$H9{$ubkg;s{o0}+8c~Rk2nMi4;*igH6+G}$$48Wtd;$V3NWdt;$aGmHfM}5 zrI+Pw7-p+h6`NOtGS5Dkm_vc{o5>x~)rG7u3EHobd512-85aNImsU66sZY#nWoBm_ zFsXrqoXG?H@HvfYZnq6O->tyc&_=`Pmz;SvDKAAP`KzQ!s~0fUak6%ZX4ZbV{OhP{YTS0QgJa? zkUGd>YGiQYZNb}>i?%+8JEN$oo{1UA{tdr1U2}5%eChxQc7HruFy{nhoVYT>OM1Lf zVBBQcQn0;unQ38fE=Mzzyzo|v@emg>C##>b&L=yD0@27@6po?cnARzW?CIilCS5@gZAbiF$> zHDj+>sBj{a)^HE_a1V%@SJR|2jUAecJs9XJPaoGR`Xt6Fw-vkR(}hFWt2Oyg1`3=? zvXLKU-*sD33O3xBZtK#0Qsw50nT!{9!!)(WsGeY{nuVLbI2ZL!V_@e`__-g18PaDw zQ(O;gU0#GRnjd~c{kptYKk)7ohw;{cOK&wvb>Gxa{~+l2f4_cqxi4ppCS#mTW?Th6 zD-56;iLyZ5{s^;x7vVe9s3uRUEO*ZI&&}`pdGjj|25m49Vs7nVKHr@6vcm+T8Jn11 zOs%&!5Y)6urTTZFU8&3i=G^^?Id^A|szCNN1H>%}!#r*LGS8D6{2+Ot2!m_O2J<#a z*;zZkFL*tg6;weDUVIcrt)lK5yR%rt#+*>{HUeA=U@p7e?kYP^aF{}uUR`AopCr=9 zNi$tvRa$2h&d6HThMUyncK4|OTOrS8{g+uxX8G&ghIwly3a(YL`pH=;W*yHMt=7Rv zc9SyVd$W;MqzI9rVNxcV3VBBjmMcMx&&h9+-?Vk7lYFk>NyAG)`tY1xPiJa>l{(3H z)dJR7-!m61N>WL{aVm(Hc`5{vD(L!=Cu*9(GFcnLzqDU85=kcO-%HwwR>xK5&JPg` zqPBMPi&SlI(Pn3iJ^J>!RBQB^xr}0P)!QHXNd>IU2D3JP)tSfKQy7P~jaVo1G0j_k zV)^?E?IuaSxatZ@n=6Gh)@nqED*v!zKmb?sl z8=jN6N4BV5{dGx|Hlv`V`Hr5M=?#=?74PlT7Vkhgk=08mMqH@>H_U4u-r6f~$SX2# zq?3L;34;>@foPF$hD@bYH2!*dnx}B~bk+u-@(aViay8Hk3C%<1g2RNXv+ESvCQwdjfp)p+UwB04Zn1~0a{Jv zw3@bqIlMBhe63zgx!aDoZ@!nCRzBN@;bL2@r;gTmebgRRHTNcoh-5uGgpjIyF=yDo zK;K+4ulH0%@u8~3;@^AwvhIoq?6nMv7%gMtX33!g=I2$uq%m5y5Y;o?$71b5XA__3 zo#Ny6jYjMhL9B$S(f9V)r596%7sQ1)=)|jy zx9A-Xuc+o7SLlV|TB-D3;nQx{Jnp4MRog_boa{AmKj6bk|Q$JA&2}iSY?6!3QYHe z(uyd^(!zwPyE9*ce}>o`%X!s!?ORz$lNG?ziglJ~0Nu#`p4Z<47c0Qz<+mmVY1n>Cpo-us0=^!QHP2Lx+o_#Q&X`qb^ZqciW z#F^BfRZM|e=RWw)Ybn|IFQDcO@+L#q*>;obgYXxl^^iReeJzZBoN0{9(nmflYNSKd7L)xWOJpa=py_$t3bUK;fM3OxZVDqA&4*%-y&lG18z+e`t3i_!BI55!51Fh&1FrkW*o12OZgiBuF2@gg z;C9(UuZoWFPC;KsmTu+c!qD3O_mQx>q29s_8;`w6Z|qd;j!EvYX6B0TVMu`vakeC{&=)fp16 z=n8+2I0;!9r;d&=D8IdP7Sa<*Myd?|>e;rmIZ`QS^N$BTzNZDEpohcJQH2SI2x8R$ zkD`#3*7a|FO}m0=s*2;n5*9;BpowxaSlD`Vy`2J4HcQZm15w!?<99JXwUXoEnM$g+ zu6dng!~Ek5I*1ZLwej*=&eSLG#WwS@U;Ae#V-LsmuD^f(+2h~Z9sXGr`rmzW!W>z& z&Y8_EvgvT3Tux5HGz5<(wo)-%)lZ?F)Y*5SHSyI7r9}6$AsQCJ3{SwcEKo{bavL6z za$Qcls>L6-@2#$<;%Alp@r(nM*=cQM8a%Jgu5Q6CpZqGrV|2O1@LP`-FZmP5Pv2Ud zJIi-cGdgS+BI%Zks=4s>Bh{dy+N92K3OQ5~>aYeZ>Xryv-q}7g2!Avj`6qy&N37J3 z)+{cVLyiP$J=Y)qE{aceyPEu^0*V5CP$cRGpOGS`^Oh&^A~?s4WC~3iz+c^f7=R$N zjh=$DZ|b)#$Qm_jt(#h%9j8oWwX4sSKV3uFaH-Bfet>QzPzOJ=b@AvY1raQ~Z+7at zY>~A(|8%I-fz9^G%$e);N`6noe&D#N^mO>o8PYGVj@SJ>*LrJo&YxpY1ft6-qbjuM zjgAqeTfm}5Yv#R;c;<L&wz3{B!=hfZiT0jN_`v+0YxsK9=zYkI>nE{fn^Ul|no%`4 znm#5b_BdGG8BOg0x3KS2l^?fZyj#>->ed`$Eu71U;3og>%gx_v!sGjTt~|FD)T5u) zImDyG8pVnp2$btH)5PjC0w*X#p6K)U@yqInqj6o)Vl)CbYE=6fYke;n_PBWqiu4om z7>1$+qEUxo>e#cveZCHq`-s#TGScI z%nRd;xESMuN%^Jd0CgFDI;zCe5d}$Kfym#HF z`9FVlE(L^O0Evbs$K3%BZUMN!SJ}DQJRA`1Pu4<$R5S%3voCxFe7=&fGloSYKbr84 zrxwDT<~)~4+R-CfW?I`J8~UF-U4)FfdoVKv-Ves-Gk>3WX1aQS<=gjav%`?@Jc)+z z=~>OwJsdE?YZvcwci0US=uEICMkF_DW1J{nJZ_{e>N7g+r~D;;Oyg$iDZ|e)5!GTD zqJnLCw;enp%$^%Ko!rjchWrvH0^)|_>1|sD2C`X;hlX%Ym98u-D{xCP@YDfrK^Bk(hqeCib;JaHJKgey8*>MEerW}hApEE=NGeaV5)lu4BUMWxmbM1m8nv&h0GHUYTrRl#sy9#uKLQb`BJ8mKhwqfp9++%L%&yS=_|8V}_V1KCNxCf{dKh$e)CAsZm2#+(o_@nW^XB+?Psqg>CGjy?G6F#COI;Lx@hr>>ely$E@`ZI?Edy*-Zsu|xi|aw0I|_E$UWfG<-%Q1%*@>Lvy`Wej~l|kk%9y$ z>MRaR_6=Ulg!y>&2PL5igYuUtPi}BVHC6}2JSI4sCfn%hgiY=GwCR{1(tbXmsPMYT z=k*ND$aXAlwN*JD7aFFdgVT#+VF%6JIFlf%&O zI%|L{PWJ#tffEO#GqI89q!X8BR_T>R$se?Ob6Q8{odnpgDYwExc5cd!hn7;N2xVO? zwbu%p111KjN?#xTt4{2WJK^0f9|SitT7*lOVlU(rF8X{+~*PUQ@xz*Yt^Lg6Nbn2bUyY z!vg6|llefSaxX#$LIPN;N3HuW^9ZQylj7D7R$s`KP=Z~Xv&Q2;t+l@_ye~Cqe7_dN z({{wo1;u;7tXtQc{(K^=3|HlQz=JJ_I-l)l`PwJWK~-2wF5r{Pdl8Y&LUY@<%4g>n!EPtBBz)3nZ)q68;@vp^q{?m z_khpc&l0w)dy4Z&UG4B#1XSut>K{YGaOjJ@vAz`8@NwE(27re=wxOaB*#MHeh=How z0s?)#fONKwi244$7(p|vNVipO)#0xLmgm6L)rr%r_#m|3BnONaUvl4x6}SDh;5~6} zam^nSne@A{$bWl%J_&HM^ugaqKL6_b8?)cU2bN~rScNDTV2R(&jJaB_TcuW~H}9yp zJnV@m|Hci-rVLEweqD`6bicU;``zL{#7UfcS2HK{hal;-bI14*jq2YX_}}0hCHhJhgtEm;r}k`13giNbOu@I4`2O z@T5(0R?#P2d<9E8NU8Rf_t5;=EW6)Mb3-0Ex_^yGzqkhklTT|%4?AjFSo&$Ov>0-X zdv=YD*zYcMc991;22r2})|s1Nn#;csz(uQ^#o0Ezyg971m#}_{W;ud?0d5vR$1`<; zM=N(8a?mcgymMmF)RDlOq-4S=!<*iKq+azAc5A%c`?11|?U0ei5t^|CNp8`#JEQ2& z%@H8xHCN9M08+eUD~&4LqW&gU>an&K*;e(^dJeBu<)D6)1ebq`9=7OUk#SseEvQuB zZQrZmi%PP-hnPqcY~u$3?N`=d<%hdi=I|HuoW}Ht_;*`H$_##$_q)WTTv-mH;|x$2 zDejuLmYr8Rsy}A+=qFDs!SK>R)e7D_Pr9(q*BR#6)v=xTP0QnSIq2Rqy;N@s$qROm zEC#QJLz%1~z3cF~TI_2a!qzI#eu%yB(+i(gjTFp-(zfguZH-u9V!^yu8s#=hEW@@D zFkh*3thsf@nZ;UA|L6TY2Z@B`(2U{z-b0`ikfUF2EJsR#SbCF9`U; z%6N?wtHN{pZwISr!&tIteNG}<9>yn$D`NKNMyvf36JcDRDVd}A(kWjwLF`dGKlrV` zs-T_Pr##m6NblgL6Kl8zA3F#%2wro|Nf_FN3gbk#FO!m5NI{nJ^gB$E?cMiHjBuqV z>Y;0tjP5$%x)+c&OsX~Ls{;k?o#fX$6!^=qIBCAm$8c*Z(s*-_g}9Msn8lm1_sQP% z71KM}g6*?~)+eGaa|qex{YBL+d-U6P&)2QvEOcA*FMG#@)`i4-g3>6E_k>E#VSSg8 zXu3h5ul;5Kkc{sw&yac`*LvMvzYx5`t>i1&*lxeI??y@um7mh9t%8d9-r+vrDP|g< z>zPhPdqt$T(!Ak!iot=C*MJ3!CQK9qn01_n7U;Vp1DAB+RYb@NOv$i^^ll|yl75u| z3nSOiRsyq1H)cuh?c|7nEW4@NneAwA9-|K1fG3ze9eWBYnsyMsQC?3)viJF5q>e`< zb0L!_tS#Q*mghzY`OK2sjhxI59iKLQvbz^X{Q}TZyE3)b_+_LKw1Xj++*g$pkl6n9 zqMvGs_vrNxqabri%8ddX9AvWOkOezC&CC4j=})`{CjQ|)G@+itWsZzf`Q?iR|Km*~H)33OdwRe+ZlZ0-uyk2W1Edj%T?#VDJxhpRxo;$0l~-QnninpBc&UJ>I( z=T>O8#zr*bi#+Z!x@OEA0%bCjbk>+}-hk{jdVd{nV*AfOLvFs-KPV^sVXd0PoNY55 zA6?l$AVm57JPyN4^-x+**Ny6@H-)mzQ3NWLYZHo|=h46Am%v}18q%q8#62$Vgk8%V zzjJ!^gHzcgd-oHLqV{q!c-OXYCfzt7)^63ZATi`LMqjNyqu^StG0=^`Rg)}jBC4Up zhI?EyT-G+aM7S^iBIl5=hF7$R%2j{W6dv?y>w~#lf^x&Bu}@tS;dz~7Q_s$zmAj_r zNs44~ibw3@{-UHRYIA3^(`Pu&9cKm46sE8C%OmqLB88zAjy&}%*9w12l^(s` z4Y8l@8l71k*h$cmV|KV3{aZTb9xy}lt5s!}U|1C4ebuGvo*!B}?{ek;&VDYLcgOpo9@X2>^;(ls$rZsEFH6}<)EAr}l5(zKCE;STS zrv^c5pE0!#{_kFZId;t{cqAiRIdbUNmxT^J*{;-FJoh7yAgT&*BycoNDlVR}b*i1&G{V))N|-fETKvR}nSoEB~l%xc_4B^pC7 zrF*osh+hS%Sa+yx-}3DeW|nbY5%o4yjWRZ~(>qnVh`pm7gBePZ60*OKAedMM%DB&6 zC!I~+jL*eYEA{dzD3J;h1oj#@^l)Pz)Zw&Pv-!{$Z}m7u(&RDdP)0i^sGDOe2NpX6 zb&S6rHI^mC0*r)~i*l-3+9_y42^SC4yijtW+e)VK)1EgD{Wrl~{?+)P_>AW-Ft*4j z-da1WQSV|hlFpHkWOJnhpS!(IJlOY7G3D!YrTN~yeDK|%+pt8@__I4$MsKt)@KpzN zqskp+j(#|4+VLs8^4%<#&Nmb`6@4tA2dO72oJxEi^|B=BYaKrg zb(<_S*dqT6pTo||Vwxcc^mhHYi!^PW0(+ftP0;&CJ@WeKNbK?696MQkL{e3|wsa9G zWP325b7x$yP^}CUZIa~xqymcXffJnp09+|JfH|r>hsZhA)XY^ky~>UJW^JBqwEZ2G zpKOP2yywZ7s-dXiCwBf}ZqD71bgnZV!Pakx%3-9OIryh{J}WZ^S1EYeCwD$gQ*AQ7 zoxq$7%LTiN&yc07m|u!k6yBAFMp8=)8iY4O^PZCVwOo9P`;> zuP%AJLsu_B9zXRvD|2${~nNE|2XQOrSh-K z#eZpzgY8sI@v(aHs;5pCnZ;T8tH@5&5i~qyH^$AWD6yvEF4Ixqld}z$M$U`V?vWnu zKyz1v-A+N9M6Ze0;VZDyNZ<}LNId63Aaov1LLH^g)+mZU_!Zz$b$rixtu=$C%0*G4 z?{M^ejebUV2Wir&iA{j(HB9R!V;n71>8dN7&X-j2_xqb^GWD^&1$8nPBMApVbNc$31ovHc3tiuFJ2~&$9Ijm$1 zgKA$bxP|aOOk~=I(DT6Ab;98WYG-^xacLg2MNBO1GRb7SP5gTsmg<5-E_@_SH8L@r zL&BWv=7u#TT0eL#&i0uuXJRrpE>mDz0+=9Bn>T+hkNM5lAAKG2pBk~!Yj5M~a=vuF zQHsyp^Ho9e)1^+ROyo?QM?iv0KAo!k@q?AOV`1SUgQKwcAVWk6CO681scL=YIi3gI zyNW$BV36Pd%Ws|u^ubtG`HR749N1jriMoqqQ#%f%4@aP%yY);qY)FEby;DBUwn>@M z>9MqH2CgzjNX`-2{%oBW=PjjJK;r1W2TbRPB-X`S)JSO8@=RNFiv=Dt>#HS@$@qOO z_r^%Ww0ynJ=wNN;=$MGmQ$2@}ZlVgxi&1>HOMQz+0S)d5jysmpl8T>PH*2XBXzDZe zOW#-*63$Z)BgwZDQ%StN2PEHi#%1f`r8%c0Ilp5^Xn$g%R;tVoWc3d8&FPX6O7WF! zHP1(uZJ@VtAE9FpFczebMWIrII)OcA8HW`QIm^zX@u`{(X-A*Et=3%^@E%(&Vpwf> zr?nx*;O@8g#2PX?kK0z+!N?!fggy_J-oDX@TwQAY+%eN4Sg}7H1+_KzaQ}8YJ%PKY zL)z^a1fkDxDAelD9=0psTTjl5?=Xa_bgUm@d5({USZ$&EzMWL-k80hU2tf2OL8z@I zxuvaJH>3m)kO>R-v6iBT82k(+-l>0YX9&iN@METdD8S6{6Y7IqtB9oZmH5YvTX#+9 zbm_QezHuS>gcQ~$@rbm_w!!xbMhF$Mj(6wpH!<82+m={uPw}xh6tik`_6)2>M)Ko5xsIz(^}hk`0e}ER&F1r3^?9WrXrXs1Zg`%gdOKQVpP$S)zCkd59(% zSB9ZrZN~9)EZumT^4AK5VL6YVTAx@c1r)dTb*%y40zl64IuxAn+<3HLHRk|sM2>c> zPmQDupZgTpf%w!a*XAKNp{&yTXLTY?SwC^-e$r@-XZ0w4;(>CP2Btf_-0z7ofT}$+ z>3b6TZllj+h(}rwQS?3Naprz)8`bCxg`^GH59!)}ZA_@Ybf#Y0tk~kM zBum(>v=q**n*u7tl5DXORrC$^Zm?FrMth%n)0H*{pn;M@Av0xX$c^opLmJ*@1*Tfc z(aY98%{(#)osx*jl3OBzSUSeDWoO69&BQC32|}q`Zz1dyRQr`IRay@&XF|nvyo0@7g8w@ecJ|RAQ5lP&SuYCL*x)^ z1jHFTB5>!;zkAu^9e@0)1Hqv1zE%D~lsSatY&1bP&9>Uy?Lh&I5qZ&6DgDmS4k}fRoQlGI^ zugBdh1BHgRpKhd55XGzKR7l88IYTSFFPYt@CnR;UcB^;YVj{-Ak=cum`8|rH=A>%F zPsni!?!bGTPg7&G1biA?uS-dWd3!-}5*#WhKXvTiKGCJ%hNHQP^V&zWXOVM@1+??W zq-{Eu;9|Poawn|Q#l&nw7E;DmKTCQclo|!Y&0u9nv@O%;K}Ag12#W&T>63NI&3!)+ z%XsH$mV5XzCom&10?lvX=LFnzFLjE^7uge1QNC5$X!leJdW0V4eGv%?>1>Jc7;ata zdlu$t=)Rjkk3~O?=FSB0g3ajhzw1^Z0wpzq0uIt|oo~)=+ldJ)r6y@Le(LPXhhRU#@cgMQ*Dq znguM#Sg4+dK3J;vMGXGQtMV^R|5i)<@!@|Bg8F}%(OhyeGp-F5=JPk03LEn%Kuw+ZjIxXy4fi%Bvc!8Ps;hD*zjL8 zz8mE-oeC)x6=22+Rhi{Bi(i^t|0aS2!B&=06|kVkf1}x{R3vzb&Z!rz1Uv2|(o!X& zv*+Q2LR!G3QiqST4wO!jg{$&0!iT3E4jt88II|4PD<{}dlPEaDs9AU$2>M=-Uq3-H zvgo$^YT8I>Fnrrr(otx2sVN`PTVhI#Hkn6?ucALv;TSwrVY7qFnyq{cE=`(2?3q@2 zX1rXTI2J71QA6t%VOS^BBzClyw->N%kH)cqQygEK$BCRd?C3&3hZUI>07EBeSA?C| z$jkB4e5;`&meQA_l~XED%j%w)y$7^uz1-f_%(eXrGwAIX@*$~4|C$L2>fGvFcL{Z- z&D~@Q>6ujPKw)hq=$e@xh82lIg1}%1)k?ElqIK(aX5g|B#&*y~y>nXwmB}9ahs9yA!j5Ld2(66+ok>|Zez~a>hg48|NWm*Mou|!@i+GS40 zVCO~si*XY(BC==-Ba|31P~CCo0eSH%P2maFLFQpdLpjq-0%sq-b(ru*fDvA7w>;q% z=DE=clB{xO1}`yx zO5x*F$(z7i!uM!7{w_1wv*M7|SxD|ZCQjNZ?ITxJuAIWyP6KF5WZLjFSI}ee<_p6J zUJWN+^a41b3dCTr+g)2p^ZX|^pi6JpDQQfkHUm4#$^`47F+B4^cCv*l4o_XWX^L#d-HnDy8~vZ z+G6SLNn*C`R8tR*qh;{c2AgIp?*E6q_l|06f8PaBR8*RX^rnEI(uB}M5$V!Iq=P7- zhaP$;3Q7-Eq=V8)=q->?M5^@OLO^;8q4(nCoHO@#&iT%rweDRr>(0zt^9L)uJJ~yX z=TqPJeV*rivmnfxHS8@-oel6xtI;ej@(BNF2R(E-4&{ij1x}q;oANpd1*^0R(N;)D zKVZT)l(F=o*1d$=_#kk^Okq@;U5`f1!Kc*tQAC2<)oL4i&AzzKz(;8$04S3c=HT-Zcf)u^AvG}A#DCC=z1T2dJg0U zEC|U_#xftlSS>PovUhF~Zq6tc?8`Ck0}hYdP<@I5DZS%HxJqSkXfAI~BT|sOu_&M; zg7;3#E2Z5owQNdg&qs+?^0A$zGw^3!(-J4mQPt3v8(QjrY=<_v^MF{IQ9m)Q^}^lg z_+)(sDk_r?|MniQ>80DVS67m~RBn7e=@tAljn1uax;Fj9#Md(O6%;5s@uHwtmeaM_ zWw4(NZB{YvrrYx%M@9KDNp&BSN7H)LqKAFXz(3bcUHESzbwV=GYu&&6-M<~(>tN!2 zTX+Coc8SJP_g|~(zg$|Hf3BSbw|U-k2&3n>&~%C(Fx399o&XzZ{w{(J%H*Cg)2H6GQ{BaK(qyA3&2C;bYSB=ZEH?)WYR#kh*fQ}ehz)48qFN^Pp1RJXm8IVAo4(!7ZBi4%pUws5 zGn3^gpD$mQnK`7Q`|Vk_}E9Qy(x<#Z7+-OT&A0U`8j?-{p8Emk7T`#TnqN3V-qI zSsD+Q!>lz8rS2x>@`4ywS`AE?Fqo;DNCqhfgw7x$rdT8eb89Y2uWrX_e%S_yJRMaI zV}9MTeFy{IAan)$6+&p~iM%cB3$f3%ZYocR?UlTjo1sjbH4E*PNKb9RJ?$mfO#ctD z{znRJQkDf_qCzXJB%zUQWw@7&vLB2% z6kL?`cEDuk5f!J+Dx zzVPF*r$SZyAj?YOI8pI$@4U&EAv0sJPGZ0QFrPKkcc6#)_q#W4d-Y0tC_6k#K96GP{O%o!@`mEDRwOUQPRL#j&$U7qDNdQf|Y6C-Z14e-P^b}Ka@@K zeSzozI)L1?)8W|jk|lQ&gk9s~(9mCTZEVT)0aX!^`rl&w$|BZGNhDU!{B`ycMSl|& zeX;nHGfsV~kn!vCQ_nw*vDffFp?3d)mJ=|_NZWF)Pt4Vv#NxHqNYjUA%T0C-!5b}t zRCIkTeh7 z4`Ms!A8W*Q&6d8%_1Nn*Z!L}z$86N~-M@0AS5t=ten$Zq_cm6Ue_fej)U$GOI#Z8~ z?y9LeWzj2@iFSnV%!Ors787n@LyWBq7VEx|67D(AGFGI`|4me_b;YmYH_@DDfNSPq z;V%JX(>ebZUa#NWE%Owd*{uHygQNKJn@A0E{N$X8a}rFcc9)LXK*b9r$IL2C1^aB1qJ4kBloj@a$?K@QPurBmpjq(2yI&r~fK?HSHT>OxrQlTSq5ruZ~#3iCA_ z;mcBjfL;5~MwyqRQr8cUeiMDdUUUUa<-c#7`AxKqZmKi(3mXbpD+#!676dJmG``^L z|Ap8#+5S7!{H^TV@7Eii*UKWDX(njfWNms&2|-Z>_Y-Buwz8{RI*tw}N(-ZUY-g{x z=`gdGbe>0#C(^kuQ&zThk6Atp&8|8Aacz6{5dvrTzuHtcQ^h4B*l2gbWlG0D8|~05 z-~j5q*GJ^&ZFnCX)l!86iUN-lJbie~%)?4H!`DK$QtU#He;}~rLy|sROPkVe!^~DT zaE{rhwOQ-~5CWfq;P}3r$~Lq$rBSUT*DrW9|21|x7S+=ntnAavD#nWK`;G(pzW8{# z?oTX;2UY)im=jbZI>$DmU`$h&zq)A!Pe;?j;Sy~C!R_F~)U1Q4E|2{&aEZjQdXMje z>V$vpBVPCa{j5(&OpoN80fuYLd5OhsVEV*GL}Qa*b7;-cQ*nDa@VCicjxt#Luh?4s zLvGdIM0s`RH2&ln|JAn|Vr|bo-AS&ly)1pzan}mXD_9*}F8XA`H-|hTjfu!{Ws|5v zc2noOnNz^A>Fjj=e~knW619NbKQC4mp{s~h{qCJZRr#ijdS0Nz*ygC`+4Tw+hBT$G z9c>BCi|e!b;xiH(FDB`?YL(5$M{GoVRKiA#;Gvl`05);EbrjGrD-@w)1!wnt-QzpY z5E@#|`1~Ki^j4%uwj-l^P85LCRb{2&(*=x|=}PUPzid?yA1#|)!s!C#{OoD`*RBhf zkR7KB6#&({eFn3VrYxfpw{MXA)S$<-P|kp`aAarjB8s*(s&*2k15;1yfC zkL~%jMCEPTU?v9DG~;%#pA7buBDa_zd8G|$2m9sAOJxHN$D|k&(9*1BpS;$~9v4!R zxmsB3#g%UdxU}bfIMbfKq;k<55sX7rJDLsBIKg5%P$NFoxo}^&p@b<;e>Day$4^d# zZui*9&IYB8*TMK5&vT*%k7%&Df&fA4t+1_rU*|)?^~@V!HPhamb^IEmiAl%2^{C#i zKm_sZsP|@%Ci~YYDe(00W&AKLOP2I~uDej;7v&a$=Y|=LrR{nID20^1CvfdJkE-?v zQ(?-lpgtCh8K&ENuk0kqBv2@5fSIzGxtq|ln6W;Gb)?Cfq!L?Q~W+D z(Tg*j-#6zp+DkeDmzJ*f#RUmAFTG92E5~ua=7D{~wcH$0P5FsSo$`r8mKt46dgpc| z1=xyrvTwS7<$&wi62LGuw*9B0)?ke&QRjDimyUb(Ao`j1>c&FOq;z`?0Dx&HwC|+P z8UP5zCr5j7fS?O+2y;jtzg-lANrwwCWWvwy6T(ia@pi_SO0`!OyppMB_(nZjzPe%B z?)p6C)+@emD-)gXu|7Pf^dzg*i71EV&4?^iU*4L^%t&S^C`oHIJw6+{vA3@YP3>B{ z^^WV#kc!;}1KC^e7{PC?_NGW%(wMOjx*P8o7JwcJ38oK4^hJ8KJbjg=G1nT+D^tv^h0ZTWNA|BVOU%=<0iug8SI>;vakbWNd>bdl2hpNJc3K>{G>RZfu$BVH zUE;E7zIb@8&R3DGJJ6hp(+!pR%EW?SJ7vRPHm!PR9K~IaRL#;@!H(d>Po4K9K zi_b>b)=d{_i`4A4`^icdnmWFe$mh=x{fYiL*-~kFc$Iy4WF}p`P1n;OAlpy2mTw&> zPd%TAk3P*Z_kDFS_?zgpncONH{wyG%&*bc~!m#`gnGF-IwY$)uQ#!$a8m2GcfA4Aj z{;%BeX(P9=qu7DmL{aW&&unX!Ik#Ilmllc7^A)+6Cfc)8QURycC$Nm+)4KZhsM7xL ze)6nJR+k}sGsgx{=7BQ%`Cmz_RurQjCy`RN(XJ4Il;U@_**yxrDx>e6)-7=aIgJJ{ zGCEqqYnTsmA%cz1k8xAtb^593S$Cz|rZ#3!Y{uYJx|9t1bt=Ot4U>Odu|;>c+V(fm z@}a-c&k*$;Q*3@1W98`{t&Gv>gQO>4?b%s(_Z=D00i)60(yZgb(gO}!q8~z3U7o?P zt?9ZuZP8zHz_xEVI?AP#M)0zmmfW6et}}dX`ot9vwglv&K6)il58{qFus25bY)@-7 zAGF^%-FgYcns{G4u}k2CGF&-DKe=c~Ll?(9YMK0X9b3)eH1a8OjaQ421A7aDNFkcy0fA3sRwopno2XuFY*(d;X_VW29l6_h{WYWPc51hnp?+2|K-p64a z{mxXF-Uvs@e|%-0TS^<wFP{sz`Qdc)_dRiO#i=N7=S)uaMhSM?G08_){vn&qa89HNF*EGV9A*b9FqG{Ws zpveI+^@jVehW2qYp|gDxkj@$JgPN2+f1E1XAQ1XXSVTOKNnOngP@>IcCRk#W%+XCD z1C&I{jHRNSnLrK zEM%XB^n)#kGmU=6gmo-X>^?jXF(8?e%dE#*6}Y)pUYkY}+#FTTX}YENE5KLiP3N4; z^)?*zT-~uX=E`8TwT4GYiGUWX1b#NTT@13Y5{VzE1gZJdA~jBH+s}`zAmZ&fCGKbn z@xIMy*>pv{3BODsZBarb;`#+{knCgKx6;`Gw1<>E)DzrbUOw;tv9bWahDzbj*=m=F z>@;#<{Bb+VR~kUv9qI`d-2|&#gLP^HoRM5zWZ|T`+)xo#EumvJ;Q>V{eHQU0Xx4V! z#LvUiycp)2+d--8UJm|#8WBG`j2S|UpT}fdwJ}l4EUjbLL0h&2zXD{z2YoYtbl#ie z8mH^R=oQ|y+k;PY+KOVTq;5j@NxVpc@hX~~z|%)T0lv=Iq~w0jH0$*~uk3!)g#(RPkSwgm>eZs2pcXC8iIBBhDpBUe}D#X)LLW3VJb%q*(zTkctV!y|^H? zs9?DdVV=O$^n$1M9stYUk3PqzR*|(v_W4#j9NguTfhdk{L40&Rw*+F z_fsA?%n#0I(xaOM2PCCf+lgy2w{w9|&}=kv;NrDgLJIG6kA4zdw31qVQ%O(?LZ2Kq zCxK<=SQA!wBQp2rPMA>*)gh6X{dLF3liGgFF|S8H0!h#f7@ZzZ1o%Z^Vz8YDS6IF# z@MR&KJ+G}+MTsfMMZtn`cT5|e(~0h6^JG;Vsgp)+LR4qoyix`$%{vOB zx+f{-a73TP3PT1IScmUXX6`7@L(`(@G!(n?l1kuvmHpyVf4!x2(xaX!`QZgw>>lUi z$(R=MpcyQ41tpWw7Ipezh~1A4%VWUjc8#P$uFqRf4lN3KheXlzKMgr1%elt|YpX7F zJe;Rl#;(jQ=K<2N(1~@1)MU>^%M`>S)evmE`Ce@M5H_EZW@?3``fJ@HEc9Bt9Un)5 z+l6?kXsg%)BC*?`_*J7&Z+<_k;-1Q~M0tjubg;g#%4TxST-%4%Vn#lL(tB-dQ<+W~ zc&4LyKn5hzWdLcd1-YlVo>!RR53SYooM(ry>_sY_r6Qc0 zAD%Di9yxKwZbM0VGR)~5Z}4yD$M&cvjGj3Px2L3(i&=RtMB$meT6eIaOH0(B1atF> zG}iO|#qp{}$7wn6skBA*^rR8zu{;VOQ@O}Ws z*33GzX~HvaUM5HiT(Wlf7cMFjhN(RP`y5%(J@vOJrH}8CrB__6^u6DQQh&a zR??6z7T+v_oXfXjMo2W5?A|`5uW^;+`uQL>npQ-KFNjkQ)#9C7?CS_l(vpp2({a*uD3<#XYqcVino%NC9R9g{x7ENi@Dab{&o~ZDeKZ8y zW4iF5WY)PMM12%glC34im7ZeL!K`4fBPCr4G!fgHO^$>{rI%`q59Xy-D)rLtMB;|2 z6#;_L_I=~zXGhB*hC2IvlBce265O^1xwOfuJG=V);nt8l{;ut`VYRE#&j&bEP6|y;qr90P{`QGJcU){yr$jWy<-cCr@hG zBKc139f`3bTf8RMe!F-d1e>`QxibfVoueiFL?$Ow50nT8VUu&wXoj>Nd~*9m9BA%s z#_cQ?{_80Y@-#AO@vVGFY836Kv_XY0Cz6=Pz=9p~qrt5<7b zh{`7G$h`%oI?p~}9(3+gsX`JAQSCIjAhqJ_IE!wy;{Mp<670a7Ne;LMa-J-td6Y4Nm2v7Thx*aK+MgXkQ6`pS|GTAJTxSjWq4AWJkOMqo?I@(Lw#UP26Sy2 zvnPlq!q%}4NGqWo&WPwxD4@Y?r_qSmuTp*CX&eI;b6m)fub`fvhcb1FdaC`x1IWa1 z&n_X?2DdtjcE|HBrV`tqBzUUa=PYBjl~TZI)uSX;v#l=RJiP$B!lXWsoglD2|Kqu- z#=XJH!fM9`xK*!?F3=EG5Sl)t^TYF`@x6lLV#cGtAq)nt=f{W(#(>w6)UN^;Pq#XUW2A1D9T%fUa-7c zWE-!h%q6v<={Fd{DUQ1>%Kj;RiUgxX2rzd$9UbksQYP?il=$PcY|kptXxFzPMMvzl z-C0UtCR7^FTPpliusA-sc=Fee&F|Cp%C?)BH{-hZM#q%AK(kAa)0Ur(nE>#yO#;{V zSp&G$I7;08(VdovcdH9bMz{Do-i&6ZBuaIVX)9lI*^`Q0Z~~<*Z6qy3o~}DG7!N!2 ztf4jLgQ`mMT8*GgQUJxs2HpfKL5ac6AI8_-SNDNKI2s3vt)Q{)%^Zdl6NivC^)HWK8?dbv!6{PBjN z5oa%R^~=|hBQK3&wjAM%Z1qTgSH6wiufoENn)=YjyG5fwhG(%lu7?owM1zGPlH9cEq=?JaO;vHTu z*pEyirUarF8d+T8LtJ-vcU`MsC&<|mOtrSF@EPD<`^Ci`HWGSN0V~>4+GrG=7*3f> z6B#rS?+3$FSw-rgqve&O#Wal3&}63El%Dc38D*vIv8$>}oH$W#Q@IWnmwd0mHg#bs zu`!>%!ZZ9IPsIHDt|79tr>RU#gba2*r;gc_4F zgDM-TGMoGPu;21NNfKo~`+_q?`$Ab;^9!@W(iE1Hw$fGvGkKjlDwBaa{NuHCv7M_8 zg2l&{E$WxZe7FsO+eD}GMJ9CIS3NcuWM^{-3Hna%BnCEZ;>V;^m#h~9MP%53YBr`C znLl0lqn`KCF_|{{Da081_^r59Vr8k9^R$cNU3_XL^`~`xnzaUti{$#eY(c%x}yx+ zyt|U*-nz(++g9gvpxss=|Ff|wA#ReY*e(_F6&A$f>fEff6)Ep6tRA2#1DG8ScwM=_ z;&!8^venQ1{iw;~PII?Q*f-6k?&7*v-j7L!zqCfh#`dhJ^>&VxDGVuL&<=-m6x2Il zzBb9~J^5gUX=}J^p?)uLmgT#-{{efm2)h_gg{)s;E3+m0-LpA1JmlmCZJK& zxT^lJ1Q}OVhmJ#R99oL@k!k)Ln|VH1OhaYc12;KLqnI#BZ12Q9x*OtxMOD_+LMgk4 z=|hqn)Uj=ixy53+t{x6U4PHGR5nC29a?C;^#ZaOs>FI|L`+!7vf-vaouo(X7Y~PqD-x3UtIQCmntQzuY(gA0h;;Z(WHp)LaVoGO zb`<9S>JM^NdSk?C@5Q{?=l_Qf?^W7WiFXYiTnefkW zd>B^`CXUc1wN0&~l&up=Gtq52Vk}9-T%*9t?S+@N8x!`?zAAzYR`v9p7Q2GeQ8p}5 zf?KQKx6jR9H?S$#2qWE)B1Vx`Pr_}(;VHJsNy)Q<40voK$JBxJtw9uU3W(}`Jq*xz zJ*=)v`0HuyJIMq55L2>^27{0hX}%8CL zBaN@LG-!P6bpB2B6yRQ}gj*EYvJEokkYM64Zd}QIhwUY2ic!JM9QJBstsk#;kPG{A z235tG@q;z7slmNzsC0$J@Qhl8IXrXr%#iW*tFm>cg6h>)r$Ab6r3C-Ik()44gQQyh*Vlrp1 zZZ9MtM;7Jg?v- zz#DzjfkU(yv-{3!8aw`@STI5ygb>utkyt|*cPzbw%vTwiZ0~O00yo+yTVe2?G7o20 zO({}!91kwk+aHCyPy;joBAQ}73jJuN;~x1THxlJF5xuQ-5ePl<-aMC8ij`fO-{a8a zP%IX9ZuOvwsSPh{#2=RrN!#gYY_7C2V3|L2aJ<}wyU0e?@o9e(^5G6;7VWpOUiOh3 zSi~!w-D7u7D49`ZY)3mJ`H`AHm94Pw5v&f@iGGowT`i+89Am~ZKZZt@y|?BJFv@oc zBFD01%*}v#N)9N}f4nQya~l&(@=#fsnt-Jo0bLn2v4I-(&?+1qL>{EZh=95o=C3Jn zr6yg3r8lAALO&vW5eXD~$wL}M_OF#M9*I$VIeb$5) zt%Q7u5V{wi0Uca}lTe;2C)P7Mk{fgsy@#8VhP3dxypUm3<)>-MPOJ5w!nKj=PNm&| zB+)pcj_U8M1HRN7frFUx`_odJ`(|TKTX)ZM7pkjz8B;&^ESnde)blqcXX|?;r}gOS z5vr5uz1A%S`q3Svp0zeT{`(C0h@tkY<*5|58s7g$RjJDjR2;Dh+`!I*FnK3j5-CS~%Evn!)TNr#i@&A}=vq~lUY)nWYnfh0xr zkWw&A1Dk>~L zqkrD{Anj@?qI7gz#EKmtCORx9?woAh1bdsGZbN$It%9sVQg zP_viKcGV^E4MmCqiVwmyl|x*myrw*Ms5S%?HQKf#GzHOcY9V2?f<}GFSEJj*g#m=#=^s3gL&vAr#?4m;@2$- za8R=?Nfp)99b+nmH6{;NMjcZB#XMtLm(je0mG%`BS0dX6XQ9RQ5P|;ira=Psfmu!P zUH9I6={_OU-*K0$^*^AW(m`$}!1CN1x&kj`)}Td3ajs=S^H|HPH>YX#qP|?ztfXcW zJfYBt%!EMp$DMz~(;xpaYbw%TQ&1=Yon8BB7~R%g>7uvZ*U@)Xh(R1OcZ@GQC5>b&R@d?CD;Eb1 zB)<&0KGGPlJrG#46-!Qr`F_H5|EXS$K zyeW61k^~AVd9yMe35x?<8JVUQ)^YlS=klP%0mc;ZDU+3V_~0t?pImf@*Y~W6yqrpf zd?nk~qnKT?+SzSQ#nB-tv{Q(C#p!d6S3Cw2MPlR85}__Y+_N|6ZzYN>G*CN~ZB#{O zspcqP-wrDn){y~hD0LXne~cVBszGEWZ9F>NcNmscDjueyQHMTh$AKCu4DcIB!LqtE ziFX3mdY~)LQpR_Hq6fal#fsPmS;rk(3T<%l0m<_LU|C&rZY_6}cd5tl-!LtqR zlPSTsx)4IoIPn%%K@xX7M^c3!k_a-Mc!p^KAsafbOh9xLBBJwoKpovL-7O|~SX!Ky0uccKdX__}>ttgU+^SywMo;ZtF(qNBEE7s1-Q;M$ zd7$rP+?2Inx%tNuZq}3~3}MEfuX!jawmM(m*}5+5M7=vE5}Y*`edpH?OlexBk-dAT z&x!u*l1tH zme%>5I*N%8^U{dX%V_<3wjw$rih}!Mqne{q)^6?GZhHg`_6zzFzSZh@kzw_Kg;`yo z+l4JwFdV5R#VScRe1RYU`!~Y9Bm;6STWf08Mu544jMGsn8P2y{5*|cHRo^pG%|rdJ8s~;zBKT z1d8x-mS2<0ZrmeCMDg;Va-CJ9t#7(P-V6;nS5ve)On|Pjc_H9C7aV_4M+7K`0+4Sm zP8(g>*@tZqMv1HWD!+*|D^Jt9y8dki$$QHBi5(9gEhA69jGY+KeHc=VTqvb~>S;;! z2I*6M{MPwFe^81w-FxNGTl1U0iR>L}jn_PW6R89^PT^Dd7Jd^&{jv52s5p3P(3TVS zcZ69j^p`ND$7hy<7AY1-|2L_x#-dXl4rdxZGkl^=!^hxNzTq5&R zU5>r#g*M+jMynvhI-jEQ!B!%&#iUyj!QZ{=j2D0c;M|fFANE#V)7n#Ae4Toa*65rd zwQxULz%RQ_Lz*-{55CzkREJukeyw@xrF*YuSn$X)b_P3MCy)$$@Kunb)e^)7tzk)h zZ?vttZ;B4k57XYCNf6d?Uqlo;+S)Toc8BkisC)0+3LujxT{kY0Z0o#XXpZjjOJ%$Vo9Y z!#;PdAAsmF5E&E*33!)SFa2r+G=OP(Y)x5%9A+ zV~TB^^{5HfF06la3vL^!TnSt{FlQIIdQ$smn!LOs07!5dYva!!S`^u5_F)3>oWF@~ z$uWHM4-`I{foCpOYd(@D-KWZLJRqoNJcPSk`Nz7}^LM~6NboRm5GQt^FZvGjS>fd$ zWjFr7F(zMA(1}q{@kMz;c>IWySJ}yL^>@msA?UdBsD7nU2XAiz#-wrbL zW+;_cAa11B)BJ@-3)C=ZAC-AXYVCp2FaF@vTz4O!pDz3|nps^@JJUj!I=XIn$gLfz z=u}u~Hn0DXQpM19ArH=rLhezj1W}lty zO5b1yGs}}9)lZ3(;9{p!OZ^tnzNhU!pY(8(5mo5#hLL&zBv{wh2ZmY>JR)N!)sgBP z9g9L&KgTx$ev{jyV46!~5I3^7GHi6t) zKuMWoa1@BA!&EH^@k*Ps%G%Z*@?M%WS_LYG89AUM)J$z|v9B?m3L)k)dts6BW<@Kg zZf35}dE%H;T-YzqHE61S)uqd;$h`cpocRKr*=w$N{wZ%5!L)9fm($5`{~c)AlM4rH zi-HBG(Cq5L#!zyRTcz|)h*&wseyld}$^>!! z?K7|`o#d3GeV-tTJt^t1l^}?)^8^jeG2b#(>`iW$iBuF6>@rUZ&1`%aMyQHHF25#w z_tiBt&(n(Wm35xc;(t1IigQ_8=fL|WMtMXS;tF=rxSU#D8^^| zfqWOwOf?N=p3xSbwGwhYHDs3Dp~HU?f>4831Y6at8Q+d#bQ zygixlB2jhoY;?6yE>9((Sony9giF%KaZjq9=*Mc{{F&{GNsf~ur-{yvR<&pQ@0!n3 z(=WJH*Q;Ll(%Q8Fny9G+GrW?L`l|;A-D)bcS5rNB+5nB_=1(H`+O!2To$lqQvE6Mt ziXF`i^4!wtqYN`U>s;?|Mjyc|DXEU@!bgGDdqV_T+w1;(at~D6#SV?1pyS!1w}VO= zmwRYdOkHjf<~vo@RWrAao#nxdn|Xud-0gaqSE8E|-3yWwBN1Rx3YBMAjH;a=H&|Z& zgOivIlXOGLiq^LFZz9^+4K@|8!rw#-NP@xVNW8OyU9A*v28qf8q%^O@XD}xep`R8 zbmq`uxSqg39h3vU^OPXA$ykvt1$nwa#H2r!fZqa40{W3fwriVA(G0vE*&XlLrC8^-rrgoMI8F_nKYRnfe%+Y@60NFp_m+mH=65Z z{^;$X3K4;CDwr5S^d)WD&n})3v9Vs#UNpd!wwSXz%+id8^iK1bVMQFS>eWms=P;ik zElxkSEGj3m*jrLoa^-JrDW7`#uYGv`f-y8 z-l#u1jEvr--=DtMk`3qOUqXjoHXz7 z5R_I~43EY94>VQ)Y09I%YW+42RB#tb$Ht`Amkpd_<$eUE)Naw)w@)SV^jx2{k8NUp zKr?KEN_cXPoK*s94rhUn=HF7L2;{3YR)U#s-5S0#X(YleqQAVe$f^ZLu|0M(<|=w1QwT*+=F=b z1tn&SSL1F!)GBCY8{RKb#VzlKv5wUu{YIpi!6WKDcIti&EH6B?`?ZIU^@h7hWHwWU z!A4h~FW(S>S!{X7<{S&g&@UiG-1f&%Sa*HJ9tW4u&~R&l&=`}fKaKVy$bp`h6_hLC>~PkAq`= z+~hT6uEeCs^X?KzTHe@sr-)|9ma@CIHeZT2ica>flkq3DtwOC1W~wCQ%@5AN*v{SOmGJk~ zy@bgg&l^Tim}SxgWNTfD$5{Pyr$bO%;ji$CsMz2x2%=R->I*J3oN7#KWn(VGtG~>9 zXt8jl-iX`!f?WSJV@gn?$$rw}n6J@f^{XINT&Z&y?P;>nK%L!WGF_)+?}*b84A}&KR)~8 zBW3MqJR2GO!`c<)cdIh}3;Of!M-uguSns5Oh>s=58-#SgcqOo?(uwVbHRWHIVV7an zx4C?QZRqNb4jH68V2cG(${hwUHc^+>^#=E?`|h2aaC>&hUN&0*P>`3hZ9n2<@Ps}# z!P{>0=sfi3i@fkX{~|uNk@S$SXRt*GEQEXn)Il=LPc7`h=w_;wmpV@j}0UnI0!l9yKP-|Q& zdh3?z_yh0IRQfhtlirw=n4ah?6eY@QJZv1HmwoRC3Z9ZLbohTvdo4d>VM_5;xKsQ* zFC2te_Xcc-yZdQz`BTYJ{D0hj{tx=lTm5hP1eIAI08;z4456%>cv;bfnaHB)F}TTR}B36yAF&*Bm~Do%A)c^79x%zh>sV61SS z79L`BJ#(Efu^*fY1ds8!A#rLno*0PPTVuFLLvZ%o=%_vhW5MU_R0>|Lr`YekCRa>mc8oS0R5chcz!{_7?~ z6#wUJgdHF7<7kqa4@4W3)ezOhrf6QDirZ;isIr&4c9r?&{r7|y1bq~#GeL6mK-@*Y zI4yNu2AlT9eDe%)yW$qOY@(cJc@egdT(q)u4PE)e<4pcvW8@jSMVcx${m}89q$=mn z-ed@R`CY?Lo5sjE582UTj)kRt4*$2r?_D?8$b^Mo=XL-{s=YTC1XTJ?Qfqv@-;a2l zpPl!N235A=wQd4EoyvUvY>a&UkzMP#d-pVeEt`Ox-9({U`dyK9x6ZXEX~lJq1&36J zqz83{O41ZnAG#QU`xe}jckN?yM$|Jvf zXT=de+|&sEgc1Yl)9Ce@z;uG(>%!daOZb=G8`~?q zG|Gb>KHD+WrgiPE>OH@;m-XXe#h~{f!y#EmQ#3_~^9>B~zZ$l9agLH=36rVi)Gg9Y ze8C@}UL9lg9I|=|pSF*8x`X*_|HBBq?4|Qn^+VF%@rFFeW)35=_2i^X~tPVh0I`a#jhj1Q6{!$ogpFU2Q( zsGrWZtUUc!4>BR46b?=*SDK-|yUN`$hJ1C)%*xhma4J21LI5(Y9vx>hb)<4>o#^OY z&(XxF7BXcmFX@t(-8mj;B_ukM$Ya-L)Z=(wRC=vK^2sJsMrX=kO-m~gUE@EesB4ms zqyFdY_MhjQ$jf=H?SyxKb%k=*q|OPfOf|k$?ASE@Ttfo%;WibI=jkq_gKpBD9=zGbF^;_={Y^`Hm6I@ffgVt&^^57W{csPk$oH?vOrn; zdWA*4#kGYQ-qRLtRAv1&R{qsdmAi~cTDJ{KgVztosxD;QN}6Q)*m{4M0qe_~#U@ z#SJZfI_)r*R@^x0v00&8O~gPwQ#^IPqk?nb2*H!SvlY6)jB~J=8v9a7P?Vmf=Mx z-~hr2nGi4J)3URXH1=t1u{_M$-bzyXaRWn*nRLac_l$OXYCG$^z@Fp@D8268p#;!x z@^3~dl&$Y-K))Ho<`u8}?Kei7= zy*aw8zQP)#`Ado&dy-tn;X3-^8yfAEkJxu72dliQj+(h#o%UtmceIK+>L)VPrMg^fN63M*6+Df?ILWeaM_O*=W`4@$U%da;2n78XA zt`TUgm?E8@VX)7IXZ=^J8l_>aIf)t!IT6R_3|CqRb?K7*I-B|)@gfZ%Y^Sc}Au#ILJV&7`3t zv*r|LzCf#Ts#qnTeQ&RhSL|L-{|P*6=YzgxDf) z0CT!o>z2c**_j0Bc#+mkH>z~rv*tqJBCY(~)jfc^0l5}{TZ&$YF&xyBYzmvf>^B{{ zcI^BdJ=Z*(%_S6Y{vsPCXRTMe1qxWq9}hJ?;8B?P7-A)|Xc8)lC0d z_vnrY7h1%oEdKjRGBxS+Sf@k1aLjNyA4h<;Nzr8Siz+0)LHO-$!J%&6QgRD>8*CpW z`Yq8_)klP4Qjwa(EHWVA=R(*w-TJ*LB=B7@*H#T48VXigq!?a@OJ9oFt{VI-LqTOz zKjD|wp6CY?0hK>h7n(n8$RPxviEA!WH|pEpETg-TI?Ei@G*)+UdP05QUq!yZ7X20n zKs%b7)vc)HH^k$Y(T<6@m zR$YC4erwrItc+UlW<}jORNEP7_3Zp1)u&p*-66|gxM1CsZKVaVAMn?Qa)&Nw!%l?D?SRPLZasd*``E<< zY67%6sJHGIp+xe}*M|zNNYH>qs|m5Yv@0!VEj005I@xAcG;!2+2T`yf#n6~6>C7H^c^)sf9&%C6{ zvIgBH^DzU1vY^%@$2D50M4T1u^Dx3lbXh( zce}CAL;b@@QY*PGb(u7Z<4KqXnPa6i?EMHAe#yKmMuBTG*<|@&Ui*vUS?x?I6*B!A zN{ZdDe&?ph)jt+5fb$se^r6?rlzjnuNyBJPd2=bUp@7kf5Ub3yJ3^}(B&XyddOw=R z8`BKxMn8p*qhPwcM8cSxs;M-5Vg{*+ICMSK9$TRJ1wSO%b*48b0TfIzo7dH2YO)R+ zG*(DW?Y|5xnT3+2d}}a?dtDPmIj-6eCYWM-OU>UNv>+M|I*Le(>}c2nPW21X}dpXgAdm@=my1r7tFGM-6pirH~FI zQ(gq>llVAoM05nK+CHcQbA)8omF(x-7tp4H0W!O|T>!j4~gwi7icC4Q1G^2O!6 z_kfz}dV=D;Qiiz@PM92Q)o(zu`2N=DG9vum94q>tMJRooaHctLAg8zN01OZ3DuIZw zy3%iCx5nwJLj2c0yJw#>OMh2F6yO`%GLF#0SR(d9(1bhWy9}zLN3ycDzK7j&h!|1yjpl%0IjjQ}( zG6Hu91i403w!gpmaM|ka!;Dp3<;_3(DU7E4D{5@tIinKWLR2I{INL~+*k^+-O+Iyu zg90jc8Z`nZM-~2G163XsN`}M|=RdpC{hm@SH7TX`LIlJGtT}vGj7gF|Vg0llEKc$~ z!OSRm3m(ndJu=A3vL0Xat`cc={#UC$xOQWj~L zwdO*GlzMq15oPRxLP}EkcjOF??@sfEh}lB~e*eJa;I%~Rzk8(YZhDLDbCe@`_OZG{ zQUesYMZvAHX*j7unp2{T?S>NlmjV9Y9xu9nr4*25Gp**tYLNx?X_un^z?l+$5VAys z4#J~gC1gq5>CRo7kMQZ|B2MO_EnW{XYA}uPi4GOl+EBX-fXiT^iOYw8pqRGb0~P)< z(EoQ-+5TJi0lwSO>9W^U5nYSl77mpbX(E*hWf@uN!1A&A6E6J3gmnE1XaHd` z9iHk(R*iI)3q(jS7WXa}{`*Q3hEUF?&5P;621i3@sTFMeJ^6oY)AJQas16kPW&N#8 zf8xoUn8n<$P2p0q5{S-JUFkFYy7ccapSxwso7zY4Eu(F{yhxes~+pa_-+a&TniijoLsI~ zF$nLbzKX4&nAzSMQGtW-C!Tt26Pe|K!|zX5N11Zc1F~qahaS}rv8)Z$&DIg&J1~TU z`6onYyq5AhfKh!T-$H@4$N{<_^({$NZ3=rTvTr!52`b=iBc6Ag0EYZheDO5Ix1!Qh7EXE6HUYnel)aO z(44nTmxt;ZeDT8M)?w*gK7$vLOEbTCH6>oSuK|r}M=G7K?b&dLMzw7RKDm>m1`uyS z`w~mla4$5+!3od{-cx4oA1k3M)Uo4-6U44&Tn~&17Pf{=+D6cTTAGyDgWR&3UP;ErV%Oi)e#P?aP}n zV?uU1z+agRSe~8AfI{EQH^q##EB~q(Y6UWy(nN-;14{6=5I?T=9sI9}gW^NrnvU%7fJbwF9zN_{N*1JB(Vq#WE<8&%#0o zSKUZRyq%bgc#}N8MmQEETSkH7Lk@Uu-!euA1o(WlQQ30Lx3pQ4*w?9%_#fo{@LS&EKBw(< zL(a9sR&$<)@Ma?7OMk28d7rfd^4s<^=3L=YL&O0v?2=yOI4fXPkrrKz{(UzDCDOxB zj09*|8q`TeWP@B)lM`j__*r}O?}V@x@>@hyiCQ4DAf_IBFna^t*Vu6s@c3(MUmyZj z72^-7`tcnvwnW3VB$1k)?w&kSI>fZa@eJZ9t1Y;;x7v7Mqfgzd?RhWeK_FHD`YJ`zNWt zePcE;RQc%t;-Y}2USsg3*mNOWJyO5VwYMeEh}PaK*=n+uyJxP2Lsjpf%4M|uQ^V*F zPwZ0U@Tge24uAHh;HC_WQqonwd{CZkDb0l~7);jYi72S}WW;25L7FWEY5e+y-2A2@ zhH)Nb_ha(FR*7rneAAfByo1T2+zr;brU9SlUg7Zr;U&OZT8+)}-17vw1-9 zbYuQs(RpD-&P_S?T?C>rg_fK8dHKWO3Fq5WB@%tYbN_*^x{&Zll=p&F;8kQXMoyFt z9p>4SYs=31iOL$yC3EG#g`RZ?Iy9h{i9NOlNV>`k51WVdeOUauC|pZqPPWDfKrDAGk`U^Cwm2CZDZg5=;V)PgEW2PUS@Y( znRmV<3I8>RN&SuAmEA}`cCt2Pqv-mU?@E-~H`S!2DLYck@XMZ`#ta0ZTxc1B!3?sH zTC6@GYhUP!8f=e=Qg5K2WwVD7+?_To3mp~SVTGFX8NX||>Tmby2&F1MzS?9_QJamQ z3ma}MOj5=TyuXXIJ)w}283;KT;M3@=8=a}Rt>nRU>>tJNes4-sx+8?C*&oL1Q~eN3 z4rbgYeA_K^f5O^Nf{};Jpm?)kk7Yq;{c(zjczfaUtf~tCG7<^&wjk!0(QF-L@h-n$ zq9`_z#CEJ3xbeNlL)kVk3sNt>dli)vY6*=3y|W8+ zOut1!JGDhz3aZ4$A#~pXjAykyTS#5sbkYh!&{zXZ?o&k@smnLSro~LJ=NuF~C z*_7>oa^7%c&9KuZ?&-oiA)YM1lDWx_E*%HHjiR7bSrhj#w_PG590G#tj~YFwi$9-! zl2}Y-f7Y45!qJquQer_&*yRV$`ieC3WRFFyPvTEx6AAM7G(kAUa?4|1;Y^Y4zwF>H zWEy2tZ9|9};}wy+(8Et&=m0-!Q2I`nk%sXq>yNHPozqVLNH`y(CX1RV!6$Kb78Shq%ih z2WWBUXq_n>`$|Y`EY51ai<1@Y2)L4J@wsi5d@Z*M2|7ug4jC-slHkEM4OW`}v4dqT!i(ezHki=!-sC%j*u?ZW8EmCJkv(J-V z#jjTTI`HV2sQ2IOi!xY7OzaHt zNRUctG@s)~w~S~XgO%9YTSqjOA}4iWu36nnW{>h%4egri6IejcFvPXR`%^|8N_HCY z!IwC$wOZw)+|fq30R;oy0*j!{(Dy*v4?4)*oD02#?&P)yD5w|D}HYX_<8GA?rl4cb#L>Z?O38FnDw> zKH5E{m4i5$!vp%ghFQOOp?Y8g`la_ z8mM1hw%7C|*HXXClnzjJ48UFpeRbs4aTr*Mv)MH!i_ISfX%Gj9Cx19l(KxFOF?ltV z+YlZkr28qXi96jTe(u1)EqlSbcH`NlYcNY`U|`7AMwphWqV1wzblY(rXLv9Mw0Gxz z!(H4#+@COKR>OB5OzO1o zEs&O$ZTgJIRfU$;j7tWaw-9zzIePia<+#>aJ=a9Ej68$cgY zS5#H<5LNT5C5EJYK6sY>fzV(-u8~y;y0r&PiNw%gU>Y6@jeKEm?F>mI9XXh9@hwNO zmOjxJlZ*hdG}|p3Rrr3mryyH_@@IN35Y(Ztor(25RVQLEPIG zf+{TnZh1nbbR70u+|Kba_^ss7Lkh^gO$lZ=d|wJIo0GB}TdAYL=&`~7Du(1!EAqA^ zFz{EYArKKV`uU=>{|#}T;;;}jl@|%Z{IGP1t4%c2#CVJJZR2&9CaCs3(I95izifq;xxCsNqIut z#DG;*4D zNNl462d=uA``985U%&!?;Oj0ykDwWHn?$UeuFUJp<$j&L#dqLA2-wg`0KZ%_TUDjy6jAX~Ml}Pld}dH$#a!vh5l2HF48bh% z;I8uJFdWj}RCwc$5BO23&1)sT_fo5@w;ir58`1V3HO%tyrfIB>0Y4FR=> zHc@5InJt`7oT=e18wJPRsNec_L&9(?nL1YpuhyA3)ifjEH&Pp=8{w%Q{y-0GK_pbh zK-bzjyTY|*-C$LP2K!Orkr^|1XmHw(`w!#s9%?zYDamkgLEY;2)`d(J8dV+dnAz(; zY&I3!K;l0@q(h#k-dm$nlaAQ}?!gC~?a7;m3k$8oC6MMU)+pOWg#zBKaU^gQMBd&;wH_q3W* z#;lHIx~sdavb4sI{H9FDyy%5&=DcSFbcYw>cp+_w7R12KgoSyk!c#36n4qm9JjM=d zEy{HZj$bnU_N?J>M2vcZEXM@MB^5O|d9=?{J;A#N!MC4u5Khn<5PZ35`2#Hc`TJZ@ zimA+Yy^2;DMBnslY%76vprAB_6cj&fwmBn~mTEBUja55^6 z2cu?3X2%JAz9y-&0Nt1JjR9@w!q`ao}PAFMR-iqP6cdb)30>0^eV5I^Yve4^YkORg*pBGc5pApQIlTvj8 zEllXwZ|hwX7BF=1>h1H1M_#V+g!n@3BVSKn%{St#_19!NKEzp;B{t&o3;{XYo5a6B z`39=#yj<&GY|abfCUgdu-i+_HEb{A`m*~)0E+)Wr4a_2sNCW%i@2VXXbaFE?xOYm7 zD&nX6pI-r(usFm>Pipb22?@t&_AS4XT`{yKw{BD>3chO#uy4FxaMfRDyeKZkv(q+Zf$X0~anON?*h$25Q*DUmfxc zsPIVJ3AfR|-K0*GhJ>MQXKPLx2Ialay|C{h?&fXnPiV}|uIGLyWJ$D(Z%)X5z4O*$ zGh#G;Ukj8nI!xn1-a2Rmwt;v%&ObTI=Te=l8iKK1O#`pscy#ssMV|< zSj3-h8J2A91Z)`}rV6y?n(0tItN2O-+CYd(#QT$fffd$+lI0l83QI8}gXL8Zz5BFk zFzB{DgJkwriQ1Aw^U?z4XTxU57)8vU3~L;T0mhguwJb$UpLC_ zmbQ&}b(~dl++OF-&OU`;P(TC>Rzd^oO>V@jzKCmIXGVy3!`~IM33U3YXBhqrED7oD z+v;kYYAh#sfkV1d{qqptP4x^|sqrolkE4v>oODJ9p$VwyZqM(UxvspF$HgIuI`2XY zvL48K<~=58)=O(rh z253=QONrC4RPG}^b*X*+xSqxBDreBnGcq>l#v-r6$hk22!UL5`2>+JSLJ#j~?%IoWF>sae#~gzY4} zXAF&&kB=-fwW+0yiUoaq{J>j7L)*7+-(n|bZJV+_Jng;#>&vqyTu6@$>EAvN zJ6&jw*I|jfJ--q3z`I~L!^XB;W+P<%Z}x?wB+z;{1o?1QyV+`^%F2%K0^+-o7KBMVKAfIM zv%K7JoWC}Ce*^gGhD9H92bS8hTHLN$d}qGFPC$qA%UnQY1J7i^Ocl9YGSu0P6pZTvl0aG<*R(mU$jF6;z#Q(NEdL+>NM^!1PP zMbS_!iI$bvS4>A7b9Zwulur6YJLnRuJPwfQt~V%FamVc+`Gj%)K=RY1|Cfr&E1e&- z^K5x4T|BU&{Zi;dDupz2Pl<(1s_k?^-e1bQuMCQ0JbRig^m`8wt!~|pyXRp07)3juQfRM zb9^@jcTPoKvkR|TU6JK^4&y9X44LocHe9I5Z<0MQb4n(HiUO*vNd=Q^8ueH6&@UDj znX@_Y?BRQg!@SIrk;9%*don#=!6!nWrWEKzX+kZ;7Te+0G=M?>u5c%uy&$r27Zx`4 zVbBf?AEM4V0q(hyj>_GdVC$CYVvJx`Cs#iunj0ry3l}tHB7c5`Vd23{+nCkqlg?u3 zkQx(=z8jzTnRt!rO(cJu;FJKKfi7% zJHW>u|7hAJ|ISu@Pz@gPM&Gf2R!3-I27(R4oB^M#>H&bYK00cXq~uWmvnA|ZAW)oy-W%`GCeDUl+X))+SlLYKWaQ_7zVn%x^sE$Zqa_YuAtxMxgByu?(o7e)&fFN?}<$YwGHg$-51rZV%BNi0>wBZ)b zVpYs`qri`LL%a2Y$fWQvVlC^$-i`~rWU~M9mie{2LG&TrRBSDCd-A(vQ?#?ht&ip^ zaS;imsl_9rjXbWj;PN82v_?NdlxB00ga z?nU(Va7ShM5)bT)N)YOgHv(Fu-7Rfy`#sZam+U#|6t&7Iohic)?Zhp<0{+Si1NviX z`MYHAV#gx|F^xG<=E8_)mH^v8ObgvF<4ptxF%4L#^B&I^rZca@+C5+Yx_0Uq<_v4d z+3O6*5W(y??KBK|-PMy5mkafc;)CB0nlU5C0p~u<(IX5H?*%Njr*%9|@@_+3jTGMg zmhfTtQ%TvS6=8Iy{M+sSzd|OcU9gUp`roZVE{G;GeoKt#M*Qhsbl2Q-w;H-KH*dHt zqLb=MT}j0+Rd1#yN$3@)aOq!Ggzgs)X2Prv%q+iu*4h@Aow0(pd;NjbgIO|p%c{Po zAe~{qiA56fvpu0K-65vQ*R+I8tqOwC0h zdjX_ES35MKF6^3xy>UCYjQq^MG<7Bzjq_pQU`_3Ifq3OPAH-qTrc6c*Kj(Q}th7(@ z)VkmOyylZ6dESHByUWY#5qGOnw=(CeVrMKjZ4kq)h0M}e;wc7e{(;P11PU^@qVEYu z2eUi7Xl_IlDqtF(j-N8rcGl^6fcS6m?1q?_$NIL!{@ngVoLRJ^A2Z3pv(-ynAZ zo((M~UGzyx^0*@ybAEhnlZ2bgK}?6&jzK4=mxV5aHt?yamD3|27yLq*=`bZ zqDWoYKv(CH7lf!<155fjDM85{46{3giyM>#DCKtKvMA6T>tv@_&bt1S5Q_n6j4C*p zoKeqCC3pG7Y7>?7?8X#vE7c~s@Z$V#{>?RSrrF4lbo;%r@RPWS=b;TL zEEZOGR$%Y!&okI!m@Nrz?JH{sQ4)9_@_8un#ALap@X?WsHrl#U4`FVmmtBH9hOUMXZHMx>cm|PE!8rC(k6GYagM2r z4S4>_Htl}(4ajnG=FJqK#vuZ-9?)9ZYB~&1L+zqH+Ho0MX>y72ZGZoS@}|DqF}Lt+ z2SEusCSXvDU6Q+Uu8_n^Ifyn;YD`2hi9Q3o@$0-h>R++mrMJ{~EF)w%_6cwYO}Ju- z>$Pp8@f??x{OF&JTpWmE=>!n@kP%th_e31ALh2TXW=04mqilx#@wtWSr@Fq4+wBq+ z5!-E~1$j?XJTXPk8Q`DA;$I78AJmuz0(Y9RZL*}+Dx9HI?HKQTvJyiiql%zyQzaNv zNWVxpslVbNk)Kq0??amGd^C@7>j6XcD^uCiq+c_BjKb}kr5lTleWy7I4C%OiE8v&N zWZ0YGY_Nxh*nK5FwbX6!o`PR23LtLyG!R~dHKp-MaUryjy{s9)LH|;iSn|_wGWox zkXChAvc{9JhMzq_=S|-@xX+^n9I+6QTt^0Ig`Wo3|aL`sfkz!LM}YE*13sb+VPsdjqx^C*VN57 z@nctd$^%WEmQ75W?cqAprhRAf`YF!h)j3TVPVdj-7QaOGb0P)y_CZenIso-Ee0oF1 z=~(&!TM5l+_HXc9fn(0EjvIdLtH2K!P79!tyy>Pv02Y7rBzSm}EHK3AK*6cm(y8lk z6PGXY&~s=BJXt@1A2X6j{jt*y12^k9(NhQ-r&Bwi`?9dQW8Km?TxxXS6zMp0LlHr| z^h$~NG1|4^NHXmhFF$s1O@Ay*!(+Sh@hKeV&|Y-1?9jGe11jnq5L~hTPh+!ixk9Y2 zL$(EbD0AF=*1zR4+pm-S4u!JYGGm5X)Tw;~Jkl|)BNZ{Lo9|3B}R1-EHb z8Yrb~S_@g(em|UxQ>16Gg%2#p6l4bsHc$*VJE_Qj6@8~HcG)xwEAv{+y+Ped$P__n zWI86TGYb5V-yf;Z;tU8OD{508&;nR5%NH9;YsRb#VO@1i5kb~kqAzGzW;F-7=Sg*s9w4+_5au)$;K zes!Uj8EGMKq^H_Dj7k1TO}3;7QRGv|<&M-$$aN9W5(gk{uvejowjTS91GWNSJ80)N zFks?d0&VK|rH`IxT@nYE=&|G8PY&r?p-{+hF^YLsf2W6Xo&#-GWdYHfV=^)Fq*h>h zl>lZqaMt#;Z$-0j8A)dK*H+j|HqO>92LgtgwCv_ZUiNeKfw))oZElqgSJKNd=l3K| zrT0x*wwJu;9!+$jtapOeE6%aPAvR7f`#^%jUZaN5%Ts&r21kh9 zQSAx3yg%vAm!drD3r1~xfJcp3@m%uu?FvE^M+bm@P z_|&iSE(RfAp-bL3yZofd;d7Qe*3~s~Om8vG+G`d=L2fUFel|pxcMPVAh9w%Z9N!Bi z)RMmv&%5{m2ql>*a=2R+tl(JUke&-|XW(ez!bNQdq96{?5~uvBBL@K+F+(;}L)JQy zg?@%PuU6?=4Fy!!gBE8}kk_`06~^uDy2$7R#WUXJWJQOLdBB%nmzepltR|u94)&e} z9?sYAPM8blvQfJ~;C^!(@Xy*px2Wd@Z>D$`Z#1Nz#Loi)%Xf;ZcYaazrCvjaZnCeh zv)u>0y!%N|1NSZZLl427S&nD&7hE1h5)SYOdI;}xIQq3gfM;#?(!;prm5dqJ8pgoI`9d9y` z_Sx%cX9XI;@~UquTcY?!hRe(tVp-W4F^LSgKoHNKF|Xj^Xz3Ke3#9I@!;n4{PR-dT zn>NqNbWyrq37maa2*~oO!O##l>6FXnABYGam`NFPegdoOTp2Kf)Wwr3y$6{E1oO{s zZ6+ku&TXvsm9*zguPj3GMnV1x=F)>a*IJmuhxUuqfyH~zH#$5t6k0@vFZk5}U?c@W z+cTYsXb65r?!Lpc32b~J$h_%{>7<*)ZKKRoiq_tWL*$Dl(cpk~ebpYAHig=WO1(5K zj&^~Zx~$^R*so*vye1l%L`Im&KRwB0*CFlg55ZQKq*aXf3{JUsKPRb8j#vHQ zHF!b18Lgf|iSs4=JnY%EA|w&{Mx56yywgJ!96xL;tu44}yVSJ7r^P%-7uO1e6F3`{ zZf*>Z_Ew}r3IIIMMpG4S$6g%t9HBN2vXFhtci}?~DoE(mYgY>4${+0l<`zYF_gdBq zC_%C!>8hP?`IUGrQfE;(0f)Lbb(tqR`hy0?r}Mqjb*VM4{pf#z{Kyk7IU%2e4GDQ{ z$jA{SE8^}SB2`m0LLWZq2DIepZu+5Bn=5@}42`6u{N8xvMvD!8SgX;GCY*0bgE1r3 z(pnDP$V+Y~f4s>&@8|MNF3X!W(|Ro{ltLezwaGDXgCKOf(60%r>E11Xm;GApAUA_K zgHEs#nanV|Si;XcNoOo#?zNvEYB zR6gg7d-Mk0*RbXi4BY-^5@8{bdvJA6X%4!LaY4|yAO00_^>=3@8Uf>3>DIZ6Xq)<% z&c6G-x(9Nl<7n(elx+e?^h``=&=v&m8u z0?0ROvKIW}%5j8r+Pw%>CMMdeEbsKiGDkd}w7e<_u$u*}566f0ST7A%dAk)J=2COB z9ieF!o^SuF3=Z66S*CN#0aX+!Q~LXz9oC^qcU5Qg{SWcA(35d5#}ox=v)Rjd?JR?3Mqs z1Wn6EA5Vn%u_DVoZ`)OxD-w1`xE2mg&_W*n!KUSVrFe=e4*g%XID+V z;GsV+cj8ok3o^3*i=1Zv=%eTA9zhQ0g756okB1JzPX&d!e$B8^oMA?4pdHT@WC4j{ zoAWjYFjcW8f{a?gy1lQS-l^8367~LTJsLYIp-dh-ng78zuL5}~Q*;)M$#HHai0MTa z-CmATd(CB@j-hA0>b;IgeR!)f>Q}uvaS+){vAR0pX+1YQYupvLy3Ue&4o%w8jA`MAnNjo2mocMIqT=M-CGX5?grptAT}+`~f4E z#9WGq=tK%LJysWw0lQ8b_S_U+_@H?Z*o*=w><{& zE8c~U5OEpRf>C(p4_^{pTh+w;UH%2e+Vk!dDtwFKJ; z>Agj82k78B$grnbTk@z9{c$mgvR{PC77_mm$P-_`@S;t?{|%XUwqy-;ge zN!FmZ6!P3>Tmn3au&2i|W|Xe!aaFIeRtb(zUr>;Akceigob zKz}n379cTq92Zvw-aJ@az#J2noNZ+Il~ma4QO_txJQ9aWiV4bku!P(8jgg zrNB5MoVoiBH$T-BG`bRd{EuH=@$cWB%)0+Q<8K8B1aAIWk-qbARQGTjRO`9hWNxgy znx8ZYttsb%<_BcR+B=K1f7*WOn;hD}M5|SP+V7wv@t>-9r|ioI@p!mT8Xf(gq8L2) zz%PO4H)IVcFAcq&mXVrN7EGl6&G7n9i>+_dsP>CX2q5;0ZXa zes4s#RF=d=)*>P1u>g$DAR!?=J(mC|AcCRQ`LxbqV-(J|2og7#)+Eq3O^%G}%Aex4 z0WakVo{}&`O=n7#t1ES2v`dnOWAG*i_F)#1+-m>?pvVmRjq%+!XI`E0c9d0M(%p%8 z1&SF<0)9)Xk<(ge>yWppPY8*xKGG2>9&`bB1I}}TRkSKD>js19zb2f@tP-wO0^FX6 zRgJ2>U#a!=OeWYp!Fsc8*)^L5sal(p)8T#QuldZ80vo9`LCCST4QRCq5mNs7xqIZh zoce)QoBiijG-90_@+&P23&4o1EhQ2vJ#$YX-vYRPV6AZDL`bn!C(HGPR*4BZfe=LE z(w}|brm`{-Aok4hwYpM5^E;1lThZCG`%f6wzq|VMIPM82Z;~fNHcDw!Eic~&(Q(H! zCnED#&k$WLw}}*Xodr#b8YM1cV=b2+CS^0t!4>dK*;aS$c}o`y>um#7v6Zw}9D?}! zh)s@&%8!jtBL=yNaSUhX@qq!zjB6n?n-HymCBql(@z!gvq=y()A0jQ>B}6@|1yz&| zuITsYABSBN3>)!(MZ)tDtUmxP8apojGSJC)ECUMAhd*TsdRoD#2vF;C+T;`W3zss@R6S7rMm){G>Y5aznoq@2=zEp?KR3 z&_^Qr%sU zWn^z08*dY*df}H()Z;4d?y{|(?e}Qrz2VUCa5QF?#EVZwa-*}f<5CGIlE?YS9YSnOkw3O-9fMx4K@t=_6sj7RhFHQ`Px6nLUxCY>Ez}u7jgWGD1J>J z!ke2gwBo>`^rrU|;5=YkjkbhN#)wR_k7D{1*^LylGaVZ6Tv{^S+<>;uOKK4o)L=Vo zYk6q}$RTo1q4AAgLlD9=x1)5YEa?yA>2wrnsEas09T~v@r_oMKO9rh!jVu+MmCz)av&9?wl1L zxV8-GrGmIAnSz!yw+Febnoqj!m*le`ie3tIR$V`5o+Ux7b`&fS4lk2C>$#l5PkVS& zDZfx;N%TIPfTi{(?cWADuViqm4Qh~bj2xJmwK<|vhI?fXZfDekyF4fgj1okaGX!f| zbvF1cvQEbVHHOeyW}SLFoVlEc$}c~YTS=b}#v-se$Zy!W;MrWK-^B!DOeJRW{4nZk zH=xoaa-?m24s&hCLy;flvZLV$4BL_uWU+55)D*`J#k?2c&&S4oW|DtP)c*(42iT)_ z+&J2%$M0u;w4BE#5yfRmk57#q5;{(*zr&uM2glq@LvDrARv6zKZg@H@JciL-e|kBH ze2$hUa*o)5-W58CnNg(c@?Yhq-7u@<|79TJ_OtQsl)qt#NCzcLZSq8}JVr$xAR*UX zG`t!kysyDaYrEfc!%CQ{fhe1!y_tWZ@#;28qdlZ(u6M6v*Wk2wiK=W9yYJ}bemCC> z=H(RFbH(?&XpXr?Z0)(WII`4>m71fbG_o|;-;f>D&yGfru`M|6h@4l8mQXUI&}?IA zjb}UXjQnsZWEqWOTGL^9HQg2SH5Uf$lcr@w*r^n>jd3crBDu(T5aHBG?h{3Sc2=)7 zHVy@+T5qH!v98sMh1OV2i=(?x1de64-XSu0Cs5UJoO zU z9PGpi{<#sQXX=D$&f?Q!u~1z*A>bq&IWaa8%QzfN|GxLrs}6vfAFZ@7eK%iV2m6VV zAnDA~);FCt3+EjN#rQ+FzAv4|*WVYl0*b#v{hu3l4AhStcYPey3|ysosW-JBtC@V=v<16&K8IW&=k9c-B@CvHGL=Igv;ga;My9b;f`|vplD`&CKg_j{Z5*E) zZ(eH`ICrEENHK)(;;N*CMC0NPK)6fSZ7ixC=fTr8ut6K#FmqZHzVY4?c$bA9NYj4M z>RmpkmRN}v%hRe^fryGprjYhrzq3_#DZtFv(6wcvujjSh^am&=Rn^S;da$NsxA`Ei zfskh-HaO+>VR2D$&B7m+Y~%=>Rp&B;R_AiSxuqCbfyHa zg;5n>CJ%srY?ddCfI^}NDkDoTHEb>8=Z;eQb$9J;VPTNaJI5Ck8ZglNihDpwJ2P%v zS>hDFX`h{JfoWx6B-#ZXUekMc!be$Pa4w1+rR~1S(FP>h(T_3smXZH?!U@OvJ`b_< zyO)Fw=)47IFU~Jq;ssZQ``ziUNccm~Sq+?g!Q9)y&P?FU(uK9%O`Xw@uWj=?M_ip_ z=Mj>lXZ37@y`BrrEX$}3d0sAxh?66lhAD@Rr_twihK&ofeT@!r>dYK}YcZ2{vwn6K zqnKN%y76u_($;>M%U2ARS%BI4b(m9tu9_)T!7Aq_2Y1=zZgnMr3OZ#{6Vr6H!R&b4 ztL)I2!>w48y5!>LNeIoD4&(IJ#rEpOTflctZaIhDrV&^QQMiDH)fw~88OXj%BL#(F zbb}qb2mjHWttEG+`xqa_rG*&HzL1AE`vGn9zKB=n%?e15*j&dXv99RAoEQ+=?H+u6 zgX&MgVrVMhYJH@^;Z$E#uar=2$V-%K?1N=jv+4Mejo#lU#8>o`gBE%U-z1;U7 za|Bu#uy94Zwp*GVy}qOvOaDLYy>(QaTedGsHYC8t0t78wlVHK6kPs3G!IBWHa0)NH za7}R6;O_43!QCASE1;0z?)%kFcki9Atl&H&covO_Db9E?5W@)dCnkG|*k8f%Q4KxjX#paFT#hEQ_(77EetZF4?qr z#FQi;3VP*Nv=$wMAsZEQ0dr~ijm}K?t-83()9M~lL4K4*m=eaJRGS=!emK08ECD@6 zoDH?~d7P)_J&XeJYFCYX>Vy=!T;FEe=w%%Isiyrno73c$R&z?KyFj3w6tRbrWJdW$ z&sg?r*48=O#_A)w*x2m_oQb80{CyYd2VwOkhcgX@0ST6>+jG{3dgk+fZ+Fa+Z=G6& zpXBoHu3^7vK`F%Ry? zZ!=B8QFYdaOI$g00-Om2Vc8?kV-i>IrQYrA89Dn#j!&%d5_yFe^2_TahaHet4@uJ# z!Nv$vHAlG6Q)N#p#F=~?UT$1EwDGGLMzkwVf8|D{_q0egn|b&h?x28X)1(rxYxZSa zC1RMOIAC9Rh0<@dH`l;hBkuP2qm?t=Qv8@I4!@Q!Gp|kZEW3V@J|81IpAb*_X&+mg+Y zrqGh}<7uA~24_lm5M`ViziCXo1I8KWSc6FH5xD|K9#4yDxAfGezTM7q7Y6>=Q+PLFjHEcR_Jl-C3Afr&}Un;*LpMb%5VBtLj zhA8lJCU8v%e=duy0&cBKX9ZOf#w2>6L!C5Q33ID`CR7D^qty!aD&rP75^3KzTv@7;#77<0BLEerHS}X!%^(I+}HdWmzoVS(?UraQaN_)xvksZz!bp zLfkt_2s*m-iGKi4`M0dnhdOtB<;#bA%hRTX1=-X&)4+Sm#_J&^yO-L64ssWQ+`+EW ztI9cH4b~dwDAU@AWzIA1_ zR+x1Tk{itXF{?_%7d$%y31w^CyY{cQH5MJ$*vy^qB#aWK!1P$DWUOYp^yQ09Y1H`< z=$&fAh!i@Yl9fcj)jrAk8M`*Vk2{Cz+L7}Pj3ZzFjb?SOehpiwJ$I(ZyJ_Kgyo_y^ zU(Y6BO2Yetal z-vv2bD|l61u6AfWwmOkB;%SjmHY_rDD0_1p->_Tl)q@0t9&gcd@~OnwnkApZf{g#arw1s&N_IGRo!LI>`!(yU#(9gu$@n6Q!=4ViJDshZ*k^{iO}KlGk=!)c^Dhve z#ooX@F$Awk)u`XXFOI8lji||hjw1_N{-OjE8mRlx-G_M8p_6eGB@T+)CA8bgCfX#C zljZ8=_T*K0iREt$)lz!eC*nghHU30-6c`bhrn z1gs3`tPBN6+5{?XvS9UAE$?%ohU~S5X=_t#Yp-)f!3?2jEP3ex!brj#yS@tAwej!94Jhf zB_0&x)%v}|7oj8QqIBe!@Uk5sjC^UEh<%zC8Ab(fxxR5@sK)#Cw0U=0563RtUn(G< zOu%>=RbwFzah;>1u}ND1P8!yalT{KG9@!Pn@wrq@GR5}xOsp9uY)Y_;ud$$ljHhi&42->!DXc|0vV{HG!GKyz z0B0xo~Tg*}8y%fAitb=+CC!Vp>e7pC8hYf` z?VgshgJow9nAa9Ymvf(aV%hZ1bd38+JqdI1hrTvs>jel<98b@sc%)z@-I9&Xa1M3q z%YFaaP|F<Z~r)7O@X+=4y zZySd<>zHgXyLOuoqr9F)X-h%3{M3zve$5T6xfb=+zN^)9qV>h1*;mOhZJgE#?-lUPEj!E6albQrQY!~DGN85 z&7APJY0WL6>F^RK2|=Ib9Z%GB6_S+Bm#a`4s!t4}%7zgtxmwp3LqHu^^cyHBHF{4w zxxPOiR)Prq+ITW6Cmu2>=xTOpnyiI*wPj#1-*nWP5jO-_T=RD{gHo>o+hLwhuhqS; zYY9u#T}q19Y|rV*qPum|4KB_5lxlKIE*DWXKO}{Kbwsg;Q}sY&jp~iHzcSGTBKUa+ zE-7=14sVD$26G(PqKZAsiIU70%0EVS8Fi0$UyAJ0IIctICE)g}`^&X@oM(6_wk_Ec zq(hA1gIb1S6nzP=1#ZEV+*uCXS?J9kW9!RqsoEJ(2iW748Jwg5f1&=&rq^#+sa(0Q zenTl1y_&vz_t5&FsAH8xGQHm|2c zbqB^B*{RqqhH6!0cndW=-_?BY5$39@dAqIy)xygY-=#H{T?!dF{0+tHSogb0kt|ZL zebt=I!beuA8pZ7VOq&6_toz&WmkMr2eZ~Wb8c$;r3MMRhL`-3PF=jFM^D5%kYz;dK zzLa!O6`!%Puh8M^UU-6J;^Xo(6aYhru8R~niChEofl;twoEL|r;o6b)#&gBUViJr@ z!VaY_pQBEDgqeO1)t;qSBcRovjW%VKSP~l%GEru{C|GD%n02z-3K5=>QkuiE&lBwm zHSAy?e|Mqk!8F8f;)shvbP%%dIvKZUEy3ZzNo=3Ry}Ac{rgvECjy_h1%@|_o+$; zGcyP2vA*1N?A2#k%h*c59Ut6RjY+5sbx(ID(!j&}I;@ZMyHG5s*3*UDZlqU| z(VC_8EHr+Gbl-X17pfxa3_8Vw+bDSh#-jJuP7y}3YB>Y;&&SOisMQ9mG0g~o)=Qro zNbo4KY)du}?u?-rJ5EscI51D9PmloF_4bMM^brKN0_%^HyHv6Zxfx#!kLvhx1E%i=`>}Ac9!A{%B)cqzIw~U%;j8j-*|`U_R zZ)l=NVmRzz-j=v2QdeBO=l!JV{=ITh6$tgPn0Fg7@ibo$PN3NoKI`D9fmNL0!M^g> z4mI3tlaZa#^wos9-ssYVT3EAq{t;_{l4r}A5&k1#^$+8`g0NP@MbbOE`Kj_iK-6y( zOTZ6?dmHf*^jgbWqJT^$$ZQQ4uG1DPpOELvQ7UrHWdbw3kRjt)-dJ`VV@_RC#jpFHXT*h!&k~zYW|PKjd&OlNP@3i<2wd{m+QKH67sU`72 zsXtFR2eR6+`jAs5LhokdX3853V-A>^^GPnd?*%Kz8 z@RY;~0LWaaS=xx^kMGOjM_*XH}QuEbV>M zvkBB=;^vy~VN({?7X-RY>5o;z1(T+m1UsxosJfPNzNIV^e@9+I6^UlI>q4t?w!kAF z{AlH|a3O}A(g1xx72!Op?+pQZNp*{npU}elw(JAL)0WWQ*$%yZI4itVpD877+xtN+ zUBr|F7thP{V2)1 z;l?ya@>9Zz#BncZLNG#cW;Je2ToCW{j>{Rm0me z+?aOzu>_Z73|_eo3vpkT8p8#BZyal>8d7qy z-O`QbuOAY7ojfdbAEA)EHT63TKwv4~r3i#vPT{Q*=I8BUiB8|XNxN+`U9RlGZzWH^ zrE~I}YBdU_cjuM3e7$1|d^J+tif=6T)67zIsx9Z%{Fg)C-{T7C z9pU_IpTxZ7b26(~Z`6T;X-;T(RiDMQz9L!>BJ`4JbPsX7;eS5%ld)PSB9g4rz=6|yk%!TIV7YBP&={+qOXSv>Tr7N*|DuazF}M<_hs zQbya#>!X?1vVM@Fc`Y7e}a8v)hHcq zOdUfKoM5WS{1FEDUh=1Vzyhq7cfV|pH1qt1(r23d7i{2Su2#9Gr>Wz)0l=MtWxmH%9UFaAN>p zzDDm0eUmCN17FJ$zFdDQnMybL?eKK zDM5mUTh{_9_YomD&@Z@FCbQu0wsr zUsI{ptmkvBu2fHWLT2+d3&w;;Z5>#IF!@7OBsKe2TUmmf5)KFxK&!K_@{Ch(Ow3VJ zDihmep6a(-YNctF}FN2~2+w0{jY?YDx1BMVz z1!aL1Up|X%idEgs=wjwJ^s-;FeVZmU@7Q@@5j8XeJ)RN1B#!*rN-^qGH|Di(=<)c2 zpfxu>fMQ3*oU>sMqJj!Onrszue}j9fa?sZQ2i=~ zy{TdZWSrwu0)CF+q|Sg()!Pxq-qlI)#EnUDY9rYpc%FkSy40tylKfMOz#hfAW34Fey8hL!agPyV;{ALxTW?NECMA_96#G0qV0-}dtX@cv z2|{|TIz!kWL6&d1)Nkd4Z)xE7~G`*#r)#J*4&`gI+O5VWc$p37P_%QYr{qI@~D zcy&IG0aMbgna;V$HEq?}T)fZRS(eN|VQ_7-PgC2_GVTG|AsL%k?07uuzScB7W0bvC zwOYK=UiZ9R?E-X_31DpKXzuTR5VmG=BbAGT1gb}8_HYT z7JeaqMNZBk$`D&K@aLn*OlOayjfyqLNWR|D6$;ePP#Fcw5ew6{`D`DID46rAJ*8v;Y1VWS0EM;clzFvAK|*Y(*?HSS zSFjjRjSwe})A293@#UXURb@FP#~&?mE#A0DD-&_%C$RM|q;_I)! zB5mZKZEz6por$w1e|~O1*`HCpdUbz|1firm&2b5D1dgWGHT)odw|7VvvwlR7cEr7T zA9S;6boOlON{JT{HMpbpV&;X~yHCY>?9%!Ui$+5fA+7|kC5K!vM5%5Lc3AV1ln9@f zD)KHA#I3#f_9g%!5pX}p)7qsh=C8m-p>W=pegC}t_SM|?JZraKena61Vk2gbM@GBp zBZ+s2RPSkpd#%M5m|`yrdX6w>-a9|M*CqzB{@l|k9PUvg$8n?-1}7{@JH@(fVXab4 z(HBVFi;}rj0$+Nc(02ZYVwRu{FQo2d))e0&xpGxAF8_JlpXMar;5Eu|d|5H3_w%@e zeuQ@wCLV!B9*>jl(9^Uxjtnw7|8Cr$r!2IC_p)^WKJYJptlcEl7mwJkm>|NR2L5@< z-}6NNKa#~$Ek?HoMjq+X*Tl=9O8$;edZQ!0!x=l#M12xE&bM3TQGULus`0((*Mak< ziPdiu>&M5`N`|MpG6s9h_UJ(G@wJ@o2_HUpVx>Od*b5NwV4+s!j=P-N(bBzKO>u2a z(!VjFcf*>%fTJday*yOlh%e!Lbx{55v-(U*VfDR}w^s=dAORat>G|q24LL*StHqRG znYce#3$GqQji5p)tgq+P%_HxDsBFQRbY`3vP26wTFkoQY=A)*eg0cQtkOneZ>L4B3 zfXsy>IKq0rIB1YQvMsRS{`PRUY}RttGjon(kcR6e1D)QR~-q%^M+=G`JL==0Sjg!I!Oiyu|LIx#QP{rk6E( z$Ft9ND+G+?K7z()h`tzy90&)C2+nhjB5XcOt958OS|N%}gu@=&z+O!XS#rirHfz$f zT+dDP19rot&kAV295aAp{qfeb%TW^9a+yJuWPh@?N zULek~(1=E!7}ENBzlB|mo^DDUCLxW_m0uz7-dY0VRvhctWb@I01wzv}@6I>{nL5L^ zFT)Iv>H==Y;t!8M@y3At$!+fACgvgU4URv2citq_I=e2bm=>CiYN0egI%P!(G#uM- zL6z9~);_9l4P>hN{GH|S;@qUGZa(zDi3LB*@OWgki$hW{2WU?3EM-$&mZE%OaUe|3 zW1pXp`uONc0jQcy>QiZio8AC^+px^4@QSEN3I-GdO|Vhm7d|Q?$ADapjS5v$?Ed*g z0z-p%n$G#i2dr|qu5xiwV+}7U&}pn0_8+Z3X7nC*cuik77cohiLbPKR6HyuOC!!6| zll{=Smh9ZIj{(!s@qR-K)ntAuin~BKzf7KXgvZt;%j+h(NS{18T^?%;E9aBfkO>*S zM+7y)6Pi@FQz=|?z`eR*HiDY;uEY)Bb1$cIxIu>9xwWA~YdT)E?7Za)0hQyk@io~c z-LKlHc}k!e4Aq}wELY9TS&+W1DiKzD`ls8+mEuX&yua$Tkgr!bz%=0RjwNS<;9p6z zD8g`nS>wK`fXUUaZsRy-Gaj3mULCGuV)ee( zxK`^?!dW^i&8n)~Pg9p>G1SlEr@|L%b17~!i}m!w->j}m$*k2 zbPS6dJqEZ>x0YIhJec!}`Ykaa5{tNL*OKm_5|>b@$C`wuFpVJCRt-{*$&rjU(#8wz7F)1@ zp$`otJWB6`WhVrsZLIa+gD}#3Rte$i!*e#<+)lCb?oVOJ zh|pe!BWyK6qCuR)j+ijm;Ga#rv2Z zai~qYY$&|%WM5PG-Bp{Qg3R%>8%!_1j|Tt==FJdP$q)sVxMTgVQB2$H!70d}P1kXR^iJ z(kHYXgle-|K)}IeP=OP34<0oRW9LHn(Q(uevCKSG!DW+ccFdIUq@9X&NWcX%zW7x} zpNqtlHc8mm6ypl5(KD5IAR9PCAEJA8Io9-g^>y9sHG`$-XZyp;8ov^j zAs~L1MM`HTGlB)0H~2jBIm}T%MZzinRm=IONH~U)jgb8Xw}f}tuT?3{guh%$Pj~cE zdwYoxqpRc5xD(iw9iy+(jJ)~sqd58Z7H_%7bbb^w|7d}I`rp4JhC8tvtF7M>g~!gW zvYqs*`+I(0XX$3l-M05sLC6gG*`m82l6gkniEho0gzgsqwuDBlaz|H@XVUU`)fOgS z!aY|M`~t4|rMt)e?c46P#et^BF_Pm4;*U4K_8!w2?u#_e^Rksg0%nUdH#WX4o;m!6 zGS9+B`FTPE8V1N`o$J!yRNl8pxMdKRX9ZmFD;maVYciqj$lPS+=AORLV@kg9lD3}Nuv$qQ) z+&*2V<|>jyY%(s?2@|LZ;1Up1m$_vJp$Ogg9vl0x9@`-`%7$&U|Yp)XYj8 z-kFl_y(uy_II8nX(pPqFVE({WodXcTa2Pa#g0>$GGk9Xv3X-u?n#P|2t5Y6@dH@Us zZ8M+0yj}Z3L{}r!4aBc;cmqsSI_Wuj z3Kf;$ngM;|C6LpPJe`-`Lr}PFNI2K4N5kuf>WvB(mkq6+^D-c5o8gb@uZEJc$ow}r zRgzRHiL}lhMvM@yL1h{D*dj{zoapt))eX9p@J%(Q(tTSshWpcCF`Lp4OOzWco;FC? zY9#Ki^$zQ;xU$s}zO%;W+X*GeoBXz8{w$!!WRHZ3_}vqT|G)u%TEwx1OmZKHuakcn z+CLrAw~IxU`|FG9I{$mdR%RQs2(w~+w>^2L>QZA9NOs_(5tf%!SO(F1Xa_ZWmTYr` z81vCMO$^yoqSJghnkj*VG42p@^`)lSM1>T!EL+?c7KfV(G;E346uDo`7h0tbJ>!Z$ z`{qDh#yCJSP7L(pFM7l>d`WdI_07yg9fI27;3CpcG@4d;ifwmBad)k3c z#~^sC;h>87E0c-nsM%2Bb|lAwj_px+@7ivtGQZ=zJ*=)_EwpDu5^_8gTW_mby^P-j z&fF+i3JKZLTL&vJ?Q$FrBoAIVs+&DBeb&yDpYq(We#|LZF=fIDTRx-O$AV7AGGR*I zd-fMqY;$b0F=?_rk*1-WK(1C)+b*ip!q7p)4t(_qP7|0pU{qf}2ho6O1YP4g+3$uP zi+s8LEG%Gw+x>>ryUXsi-jgJ}UtiQwI=T2;ukWk&EeASM%qZt#p)y89KHd~Zvytlu z8|%g&6NjAdSu+t_VA=?X0DRg^DDJg3k~vJ7SF+0Oi`A^B=1>bySsiof8FhU24JRaz z9hQ>>i>>CL#(7zQ24t-K7$xvChK)c5G`-tuOtlO=I7ZWz!rk&;MJ9g7MXMA^tj%O_ ze93cJX-z>i9>-<^1;Mi-u}I9+>b4#}e=TMBO@y^L?wgsXOv=9EGm8TUubOXWXtLtG zQ_r<=FIBu(o37Lyn|ult5!g%v*`*&VF@sX0+ny;G5H&AW)k|`hTc255OmPI<;{|`>}{}18F{tgz#>P%>R zmdJsmFFdzApMLct!WH?k{Ufiu^FD&PPAnbz^TVSw_-`mARUT-zkh79XyO}9Iy=+i1 zIl@JSwckFJ6byrkEKq)-F~c)G8l})~|L#VOT;x3a8KY;<)ZElxKz^ks5tfI5WP-Hd zg2J?wUuMJI^r~C!O}-flxIOZpggt|^7w5-t&E)Mrl!^4nfdy81*D;myd4Pb zL0wyP-RLqWT;wbgF#5)C4aUji!tR!eOjMLokPDIEv9GVUKGx`gR#&#Bv!OM*s|bBO zlxDMXyRuZ;b|$qqWtop6QYU))FgB+iz*0v|JWkl}aFfTY70V@rpUIFp6TW_QQ%7}W@T+Jk@Uj^DQOwMj)r$n$`ArwvqAB2EBjqD zSzNJCFCjAFZrh^uV@f)qL&dt@zGRS^bsOn$pJG>BE3g4tqh33loD$CPrDZ1|azAV| zK{p}tyul-WR|`u7H>KDf@?w}vS-Te_AiRErqQnsw7p3YK%`)7DSH>lqtQJNMBphGi z9C%!6I2`SicwZ)*^+D-Riz8E>3aK%c*8S3v=@`+7t2;u&0JVwE`+n&4Non*JcTz)o zMKp8dth-^vbjavkO|o6%)Ovy>9|rT66@fS0dLbitbJ^vg=DiQlw~~aG-^_N;4bDu> zsu_P=KS-4AeZ8f-xe%MY3M*cYGq2x+Iq&3m@AJR~*wR_Pn;a=_0)IoHS0H+|F9ICS%@{lHJ~ zhk~pZiL8JzTlw;O%Eu0TT{m7U2d%oh+DS0M6-4Ef>!;N_Ht4rvH5Fxv&S2l1X`uny zpr~d>eZUZ?Nm>4OYH_Rz=Spo51Q^t?FSAnjB_Hr_GM<@Nu7n^@Q$LU?>-l36KXIs- zN#_Uf4c~k#ZTPMOB5==b8U;-nV|{+*RgMpm`AWA zO@_oHJTZA3ocYc_+BY{piS-Uke8!s0!Sj-bH$Cf|TIvkXS_Qh4v{H)BgOJo@HHNSv zp0fxp@MS>R(fgf0sO>qJ>4_RvM(LKDFhB|3yJ7qjHNrocpPtnn)Y&yrH$4((_AJkf zt$Mc2v+j>Zk;ujUp3w={MqtrH*qvU*K@qn@6(X+B$a>bvCg$mk>uw=Mu&&2b*m?7i zF$yx6;vC(4xXpIRjk`S6(E{nVlo>(eON*?IOJq=d(L+zc4@r>h#NY8GUM$U9`QGe$ z*U!WsM6@z-ZOL2w!oKQ;7tNARZNumA72=a!qz2;czH);Af^E4L;X)Z;aanHN;Fa}d znnyCbR586<&K_1&L>6|i=gk(M%r9NXZtOu%1!!5{#rz;t`*KS*CfLH$&?&-1gWbkk zEsu*^y?1z~{4Cou$!3TpSkBQE+v+fJtbYZCon1Ld`02L|-VzB_BZhKlOX@3ClOV+& z%}6C!_cooG=d)fXJWC)RO9I9oO7KPAHHCpsmg8C$Z%#?#TR450fmtk_Z(mB~9QRJ& zSig|KhV7W6I4nnXmV*548GqyI{!%Y^tu~@#B&h88aHmp+B1{LDhF}YZNYO#{?I{yL zEr&gAV)veKcHn^U`uZZNK^u(vQ2NyDBwJKYBb%d>X?^(x@A27m7-Fdb$xS)b_%aW} z!brmSu<=Rin~w}-2rM>9DAyx~o%XC3ptF3aIw*={2fCjo?uqD?tc6W1nBk`yMKQbe zMT&@?wddUvdPBa0s4xmRDp=oj7@8#U@(c!=h>C^B2vNAUhsc;!v+d?EHKueE*9-}r z^lvt{pQ5MPFXh85kmPlsAirxVC77Xvi2p&5wPbI4+1y5;Dk_q!Xltnl70}_kx<}hl zWm!%u)z>cakUc$Bu`jU3Vm8ugX{oY$39gp*LWUl4nvA7&*@p{N#awo~*!$4T$>UI< zpr5LQucLC9+NEUn5?N{+?m3|h40<^Dn`p^w?YvM~D z(m8dvWTVRLo$c(RR5OVcw_xg*q1u!uMMR)^O9N8cQGmOW$99gq9sRb zuj6HN`9AqJLiXuIL(eMs%#?ibTgD{NDue`71^sbCic!q?W>J_441=vc?OPAWq>h82 zz(StIB6o=oUzVyUor5)2UNX9Rs+sXIm7$j5Iyy1nYLk&Yx%{4eEO`6q*HF=Ip}=UF zDW*yhQ2Jo`NdL4H&tRlcW%x-AQ&X+=8nZ6@Frjg`&erz1R6J36WPDyf_e|qXJ`%l7 zwdiz_o2+l;7~OYohcz*YZ#*7r_WMj`zp6mX8ShFIxAj#0_c|d=>IbnI(YsaNR7=8ECa#e~J zt$UwIPK`FTQk+|DUMaOu!Ec;{<8y-wy6g*vO^sHowR%kkM*^^O_9a42d7!Q3x9e%0 zaEm#5N7^AVTY`d#LyuQ;-_$4UpKq+^yH7f>_3obCUzfA$@Fp{+B<8q)$VbVwK!Q?6 zI*Nk(VA+X###B6egcJY==1V^O+)1#0doGf>k9e2{D#YCEXZ2v$`sn(mXlg^b%66yY zBMA~*8~`?nQF(Vv&i8P{iQRg44;sL59{KL@=-=6IJPea zTusSb3wx|$J=24`+kN~IY}2oCk^h9M;TmI7%(?hCl*kQ%w6mE}$@H5k%iCC*sBYK2 z{Cl|Ak0gpzps7b4(p#zjm*j(gpqpVeOC4VD*k|}Xa)kxJd{VP~6Cl=(j9k%93lX^} z$VFzb2jacDLo+#ORubzoP5KeI@^5_*Q%D_S+<$MCRx8B9zx5PrI2czp{r6SXhe|)v zDs0Qq>~$im(qi7V_n5r};+OAHraQcTl0tN-?MFeemq+*k{Vzqt9rTbC0yl5$a;%+!!DHJuymPqOg^qeV5{Z1(07HAz@8<0{ zAHgZi4o;`h3+BNN*P`8y!lc)13#X4f-k1!@nPSxkM)ippFFyGwVPi}|3dc%3VXn23 z5#Kxe>X57)bPtKT+&MT1Pg{!IWGJQaC;qAM%bGRsQzFaS@WI1~EFI(i#^w9s;Y4GynH6B34x| z6eAu@ZbRQ3+j!aSGvMJILQUM!fzTYql- ze|PPBcKF|mQ!hGfv+(NbiG@pDXkNb?`rFt2B}$?$b)4Y>{ghqr_mGLdKF}0o(EaV5 z@DDGffBE=#%4nem=e4?tSd*=oieAOBI4JfH;DCH>Ad($E@usu&84vX_cIon9>~{|3Pei4E6btX2YEN4p5Zq=Py#8-c zY_(5rY(frBRp{Wc00;?YMM?@dca>P9U$W6f-D}h$S#uudT*3yw_0?|mQbmgi)|8gbJs7EhXs|S~MQM1}#a2Wq-G-Q{5QTEf$ z=~JB(ORo711txt@;K}{dG5Ow^WQ$(GZ6_az`UwAbS9%%zZ(B!IHEbRKZpJ;b@^8~Z zfBOS=fcZ2)r0otX?xzQg=Xrm`p8SW-DCD`t^7Fnan89|BnO8WV#6!~5#&P=43lce_`65C4IM`IA(_YbPncD%=TuZq@TzpGD6u7r(0em$P<&R*eFOR#7D(d_zC%;lgzy8Zquq zU6lBFdqf{^_t^;xF45>cZG2OcScx#HR}UjTKd^{cFvCvj`1*_Vq813zoW5%XMM8x+ z`irkloe9ToKH0fWkvR$M)qFiBB=u@eRbS`fATFSa8lQf08GJx<^p|QoHwDJ4uGc`V zt@H0Ohod{cwwO`|e~$=4i*Fpr+G?=jo8?b!+Y{v#^`b7^apD+$_G4_KaDUG0mfXl& zc(S%)pS!gh#(&#t&-C^kBFzpqx;1K(w)dj`zoGE!Ri6Ej+Dog-ZE%+8A@cTJMXxfg z=VyuP)BWoEIsMbx@j?$$--5-RN?lD%rT!4%b5y_IP=LqF9u*$NB5I@HzYHeyL!1@( zkHmhg6~!M-S!f^sPc?Ft{UH`*_K!aDMfE2fX9R8^&KaeD3VRpFOv_Ue0?0>En{-ZG zXRd>Coqvclqw3F8=t6--Qa^maqa!eqV+{tcp*>!*96ICQEjug?S{!e(+z$+6k??=94P(Ty zg8-NarX8qpjC6N?G0fpiu4Sx4NM6KiR)SFdXD$0K*g1SOINHu=+Ygszz_&v}9f!@| z{Ypucg?e%mU`3ReQ9A>Ch%*O`w)TCpsmXKRY75HkxN4lT^VtDzLR5=)R9MMP&q;<)Z2(C~=(jTWy?jT*xVD7l(m$DKms?jOgEPHQ_ad6BO_F1J{&;TeW;{ z4T^ys*i3r%=v~3vZ|$~IG*@afBN{zjP?Hwh7QjR<{=z1O9s66!5@{|OXY8e&Rs(r? z+k>#04690J`92%@&4Ba@6zpK!!&+Tq1kEq4;@PWVI5}O0Rv!;<|aS$Nu( z4PT15>p9uL08c0l!G$OM<=!r+Bj|3hc*Yqd3b0xh@m(zm9(;?mrY5bN?HkPuY3GS6 z_}(skz+4X)Pnh94BQ*XrI4a|=sL8|;VH_4q5e@IDrjtnh5TP>vUE>bmzqZB4Hr1C!_-#F zm9qd9SaYO05gmFCI4)mAKBw+H!%ul+mHbBOWNuY^j^mvxRDB~iW#rfUK4-QNiNl z3B3}@DdFiXC2f%uqPxh`+#cQ*kkdcbFJ5|_w6a~?FkeqhB}3ToI`_#ZGouVzXt(1u znIQBmXz`|d?BF8d>JEo<9KkxZZ{M46v0$S|2fRXe?uq!f5E`?hYI?y4Fdd6m!;9w z{;TJDro@PRQ<(Ky;k*y5LMdjlsHAV+Ui68@S7E$QwdUP70XgvndU0G}gtHGL-3$dl!j@Nvmm|3>R@HG^k0YaEeGSp^b(pJ;EnMEUmNIG*TI6TAq+|qUG$2{G@$vO~|3ho$|MKPXL16K;sr+IB>8FWT zR;hly<}iNTvn(rhZ=j|Qo}SpG=2f{<()u`^=iltr|I;G&{>;x@E^Pc~3479$M)_(@ zhse(Sk$lvsNls7JP9jF8g*cw4m8KL)e=+xU4SFN8?uwb0>P?(dxb zn9TeC8T3CW|NoKc-_~d6z{Zhm=b}^ZG;Q@dvx&H<_qA75k;k9QYa@YBqI>1s7zWUo zsH3OEpt-*HK^G!tZp)~;<*JQ7g75HlxSi7xd*+P&yFQ6WpQvI;GETh4+fV=ArtCv}Z$i|1C=8ng%;Z{mcqn}f^@6y@ z_%h>e6oK?A8LSlR+?Hg()K@PTQY#jyYElKR3R-sxj-#FMNy?nWe?L+mHLR_%4s~=Z z=c6h18NJ`$xkH-}dYVgM`20l|{X7Mz2c5Xix3SH7SCmm^N~jWKbwL|2Ue_?MBHgz1 z3dSk(HKfX7!7a9W%6pm~oLr?F$_57An@D!GWl`qm3q+-#hWJiRI(w2Ua0)pH4!Xos z$a&DGTvTV5R|!0p@dx&@mgtIOSK( zwYJjK9G)kKb=1F|<>zgp-eyjtd=#@1#SIxHJ3i&&S`c}&q!6s1s0L)x1VHJ6b8pXV z68OejpHX7VKMfy69IN1)v?|059Bmo&Jae+9leH6GhD^{C!?FB#NCowi(=d#wa6mql z==E;M3k6#Z)gmW&XHZUoow5QQK6J8yvra&74)(_hF%BAHY7N-;2>MobOV(r5OQP@HuS< z2|AgrY1nJGfilP&f$JCEkRxuE;{)z1tRa%V%Iodbqu8mGk-R|XgZ&8}SU{OV0 z2(37meX#Fhs-3>KZJ(Ha47{tCA3=-m=C}Cb{@nP2WVGIU9DE$%N7KC68st0Zy7Q2* z38ji74e6B5b9P+Yw}93-RaA32Nh#o0%)Ogxs?JRG{RqRt)H6A{MlyvM>UM+j2bOx; zi_qSUO*HD{Pem&mt({0_LaWZTE?}!I>RVO`w2Kh7zW9xrenR-1EoUKq^C)D7ssh;e^7Df zlvc&ef+gYryld4o80nnUO8G5CZl(^H>66@jN&gA~vRwr;9Gp(z<8Yw;z%)Vo%=Jx$ zELEa^N(pD-nOlV$OuLsOwvEm=kWl9xa4zk#?<1wzqih*EwX4~|25YF%nZb|RIMdb} zf?I>^e=MI|RZ7&nt=74=Od#d7O&b5VN3lL{{i`ZAF8#83guMDWi{U>o1Q+FYg5y}Y zkCWQDupQ-Z?At6OY+BSSob|6A9WO8=c1Mkmbo7nYJHq%4Jrvuv~NXItiaD@)T& z{K@jj=i%eeFBb3mCJSr57^7YyZnWF*g%UIFzukX{y-YaR*Bt&L{Y9$RTN!r2uAS{B zNBbK1|JV3G$0JSLbfq)_pp7j4_C1PTxYR5m+6jn(rr$nCdKc9BhyD@7}Js|W^CYaA)BVL<# zlxbG8O<9@w{b_Y=K$Snl)y%AL;8}1MnzM={(-?_)7v=Zv7FBtMhPAl`SL3AEE6?{8v+Cmz)=X6n>S z?M$RR%$PT9fY!tW$!jyIlQ(_m|N91S=Q-!ShJFa1E-vZQpZw>>O!~#p+E%{}cT(Vt z=9a%}@Bt5^3)dfZ72lj!!3%U6n39Nmn95Io&k(aRf?gyghYScZyQf=boe{7nL8#gj z0_w>@vhJ^K#`0cvjORWX$k1+yfErSJ02m0l08L06k#fS_K?#`BNb~KFzWP!(+Ga8y zNHHT)_gh!2t`(UDXdc@?sRLx2&aBD-^(ON_eg6jB&A(L3JxMd1Xl>>5uwmyHLDI&A z>06xUhSJk=>wa&YaHJSiP8N??Y*NQk)Nbn|tFy%mEMqG?Xb6h9c5SQI4@nZ#6GNv{ zuzh)r#d(T*(Al8v$2m#0Z*?h0V~@Cnd1fGrCV@?2Vuw|%P|<}_D=V}U3(9mSR!Pe4 z#{gVG+#%3v{(=7lvFErTLfi37k|#S|f|puTcqiHWBu_y8=k)28Idu2|ydr!^=U3GS zD~ba%r@?AKSyK(p=}C`l1R&_CFmf_g(|@j$z&}NT3Hyk$$RBC8ZcMK$HOibeF?XiVUA?aK>?N&Y7PcPk>A zNo}+Mn)vaCLMQx5g0?u6IZJNOV*;dp$PkEK;zsL~NT8xP~m2Do{}3b*-NzCu8cGLw zjcVCO z@AKA1A#%lVR zXg-QuL%#a- z_Q3|Ge-Y837Njyh{!L?802U}|lseH{_`@jL%$@vY+H~p z8I-oKt=-cH7Y}T4P9OgN7hBh+Ln2x0PugTZ9GbEiUWPQ(#jn23TgW+fckCcJ6QrVFdNjA zgSPbrKqA=>B`>c1~<8*%T8!ex_{3CCcOv@c{DGHqr z%3u-O@|ttT^Dn~1H9t7qglg(IXH(b-9YNz>o0SU58OWjRmKT(HqA(yx3Okx8|Nl>_uc_q`9-6J+Z z57U|S8=-BFwv9iBg9dkfWVqz<3BO3Mn|B$y0qN%wSmP&ZrVvBlj6ZrX6L{fT&wO2} z#~4bu_QM=h-7b}@bFw#@KFAI6G&7c7)R9SK#9ovZcsYS)x4Aw0ChR$SN+dAdQI@$a zyKjIRXv9!#$NYq6fyB?l%VPX(PU9Bw5ov5;Y*@>>V2(46^-=V5qfu=D4(S4RDr+o# zMEhLdW=lPM?=-Q+;z=^8<-f|n`7qjHC9f$By%=q;f0jS7R!@3%1~o#^l~W!qfUwSy ztsC@0S*;!&a2^07czoV~q2<U{@c_c%CBbr_Ck>X<`&U`*d=*taW_%V;X0Byu4APmN9N8{YrKK5!M`WBje|WKv z@7y&QC3@zmdBFbh?_=HT3$Cx#jt4>MRmHKf&{9JkcAkUy&(3DJ1=J#}S@sP{+kxN^DVbxr!|L!kr=uHa4 zH&5N^@;MvUMXfI$6h)?OJ%FO}uSk6p3rkD3f{P)gST}}sJNM|W$Ucng^spPtDljqANCNX?27-+OI`on6mX@Ri zoE{LOfBM%0IWSsqLoJE1jt$@^Ro-O+@h|{QbXk0UKZ07B)^J*|(^CGt9&yy{qdfN; z-B5rix5lXbTP?^~7@~#J%w+I%lh!X)OR+X~v!xhOPE#+%VtLyB z8*2F4CR-Y7vz^Wk79d#lgWLa-Ba%esQ=*_j%W<+bbDv@G36AV`db5)_(2Rzf@l})w zCOkcGXRA3dyRaOirGT64+4ORdtSH|r`?2h&vIlDfp?9T86|43y^q+B1p{~W3vd%P} z%Vz{&*9JLW_TxL}L zCf(k_{OXyus`jx~Kju@_1ufD=q4kZ=#%V&o#;vNW%sE`8C4 zYo_tckt!+lCj))%xmPE)wi0s2@0}UBwNL&%S6*$q(q;tSCN)&oZ|!%Xdaj3R+S&CV z)*dHQ;8&{i7jAom)@Scjej`7=%G#XLe^)D6u8%C~?3O?Ae>3Z|mHKjDk{*?4w|4Hw z{%W(*zD@G=YziaB%4^jV*(s)*eGUQNAlmFA;_cr#ajffVTJj*0$7TjTh6IXipnU=b zoF5V}fX?1(P6RlUY$ovElPTH*r|jAjGezJ&d2kMZ3%RKu{)cil*8U+Do7+B+gvab- zVb;c!rzExQ3BOIVxcenqFw=fI`+2R28*gHwa}Y#=@TSNrFVsC*fKNzq-3m%O-efpF z_lybova9Qml~tsPk&%JBlc<19>YlRS7ZGW2WF<-ABLe>m>u|{6tkiiqY``TEBj~ft zmh&*hXSZ+e%@{e!cC}#|j(rfWLpr^!G&i9k8BSXwrxsU8DpYm zzd<6t@6RmZ;_ZKzKD;kq5q9m)09uJ7#S)CsBzO)g)BT&-6KcWhPMCn0jfl{po?tU4 zm?&@OhmHG)suOosDzy!+*H*Xcw5MMPUC2DkWi(p{?-+_^%W`>?^o~DzSv6AHlZVN+ zb6=~JE}QmQ5HNjd{ebhU@g%@!;<_)xQbVaD-CCVJZk2!oLzRT_Qfsc?1F~z|C&|n^ z!O0atUiw1f(D_#zwl2$3`D;7n1nTbxmacFUiDZShRDh8iyk~HC?=GGV#`+#}+wTyM zxz`{^w&YH5!oIii#;t>MBDHIQ^Iupm-0jOx@8ud$a}{2St_k^>t>tPxDJ()s{pW9L zM1ll!GGq27n36iE6qdLAS7_NB)f>4b{KxE9TK%SU@h$9yJp=HuRz)L+(k6(S&B5A> zIi~1S3G%FP$#=h$Jq%hqW^+a8ka#pBbDbJHhEK~-s}`uK%qJ-)#<BZWQZwbw<(& z>1=ZBOVm9^6ri_`9M-cw--9JZ_usE1y8MOp=hNDuqc{0q*rOr%Lium*b0Q}V_lD_{R+fMwyk<=J}eYA5EDzc`i85ou=yp z7ZnJ}kWV+nqrU89#RSw^pjNNcWtREeF6Q}YNdU9`Yxuke7mV~{5-3mG;1nFcOEt

>d&DZ4ntmFX=e z z!Mx6r(aUa9(jRtYn6?QnK&Pa%tzOqkH!fE)S@~yXoE{bKSDmy}Hm^?eOZzN9eUkpp zfm9-ZE5gXEn9&g9ospg?;q*vaVp{I{vPK=P?Lnu4ZJQRRL?p`RALhG12yj_HNR;QHD+s@>o#?vek=WqNjE&=K&3(Bu4twD$LG*4Op}*i?2!G`9ictyOr62DYs8zJ0*ko9!k(#x1Gll7>4f*A zhT^?A+&~Fp-}c+@ZdlAXqfZ$BOMmj$-5nw~x3k!soa54MLJtl1nr$$^7aa*4l&OQp z(D;yfx<6D?s6oIUK}^#WFEgsp(Zid6MlpIJ_Xtl2#(f-!^H#wMSSvH*<%XHxer#~< z8{K4>?RBp_WzQS}EbwMi2c{2W^ z`oFMju1!f0yNB(wpWO5QAK#-kUD1hS@nJP_U~tmY2tx$U?QSRseB+XrrfrLW{{7s~ zZ@0GT6BU1Nlr2f%9U;=`+>vPQ4PIeLNcmwZ?b3JJ5urup0^8{o|Ej_xW%QT-=!VC? zyRS3;Ed4<&!oHkt?&RBli(B*`uTf!iYw@K3zKMF-YI*V@-vEwWiF;@g;)UXv|PoPT*z{P<#+Dt(eu(t z1X5Pk@ci%}Msa^U5eo_xzku3-^j&WH{I_XM(U{S=8M-~#nVaEQ4`@BG*~y$SW_XF! zf#3L|C8q=6Keqy3cBri;d)*Df>nfo6)KVU^vWN#Gj2lfqKWdshyZc1rxl7bPJT-CU zNqf0m0A3`!fnMGb-lbmF83pQ47@JX244C-XXRpuSuNl336` z8obu9^Bl5GSTIMIbB`80Q{5^g8#|9r&JdEuL9N2uvvqQ<3-?koZQy&(Y0fS)U0uDf z!f{Fu)8_)ti4hqJmX(_EvtQ97VXa?dd!qArsv># zKB$UMO#7PctL`#g#IJ~;k~XvPX0^#VkZ{GT?FBX8h~|94vMF=xZhj*{O@=}=c{TC@ zo12c9({b6Yp5WJ>Ht#TB6V0dQf%zv$vV*ZGl8gCKB8|(btX1E{ad#uju>MsZf!D@; zpPF}TP0Eo>JHZD;b-}cr@d}j$)q#4wD>m_E@}HW1sIuece{GnRKg> zj^ag{b9iMK4BZ7T!~{K8L~qcPVB?mZ3Fw<~`ji!ecxSw7vJg+GA`)yXdx1#|0Yj&J z5{pa*)%xOmSZLZ8#SPgl<5&9zvO4mYw-?1i?n0CUgpdzv^fpQKDM!|Oh|>4K5+7nP=&Pbs~9h0SoJDz zAOe2T^JtO1A^9;Vo_5qEHC#k-+4#Gsmz0oM_Z4pt360eanw_J3o)FX8x#fuJ%V!;k-1`_x@{ zaC`4w8q)OY>Qb%(!3iFljJ6Km=~hq&c`f0^&DaMQn&(uT9&GxhwJ_Ur+Ifj!S8~n7 za$FhH+2Ee&m*ac8BiCF5WU!A}`?Fg@856w_mGX>Ga}&o+K+ljSb5LC+wdx0y4aI_+ z_ivYb`N=de6ELR0rW0hrPSo9$T{$M3{)!;2O6n~xdabCbUtOVHF$<=onUwoU;dRWS(ik;|8 zez}ctnAR)}ZeUGit()LP8uF5_A&{I;z99&KipheciIeY|S!?&r1bV@>BN8kEB_l;y z2p;X*`sU@$M@vZ06zS0lZikD{dNvpun<3IDw#sel z5>X~2P)}W|R$)9rx`4ds#vU@_PZ<-&vLn1JG4nXFU9MEKTx*6MXyK{*2=YoD-s_yD zc@AZU3-X@3LaW6T1f#mQ5(ZffUbF9tHGv{a?1Fep<9qI{7RIo^zoQBJhj+`yAMCZ~ z-su3Ul3GmnOVA1XmrS9So5yHhAj-&H2?E4K3*emn@r6)mlcNrBdw&kTOmFVbZ{3s2iJ>^!D_X!sdZ-5GCS( zUm;D+{0}d6qqQcW)KdAqx99F2L*nJmLxS1+3vzf8mFqE8liJ21DQ2WCI^_HsXTJ|# zrAbMwc$eo5iNRQg2Qx35VF`tL*rk`aFvh7aR2VPf7^HMnbt3fYqHKmuJe)j;JkVAg zGyp%hji$yF7TQc-kuG1~i;7Iz#+zpr_hMN$6DMoapH1=GP@kkco506>tGr~#(Q5^* z*Kjf8bH4$**hh0R$_rBmb>;7-wJ#r%+K}66WV!v;cICi9UZU<#O;9>xz294pk(QFm zoDa#+Z#TR;FQ5wCQI?u2#1(Bi&!UExW_;QP$mjcm6OTsYZf9!hCdZ}ug{F@^ ze<9~+-Fv*nD-w1@i~@%d*Dwfemo^wKG4!ZlBwCpH2{3APQl(s>*~(gceys*lO=69% zX6RGW+K8><+(4Tyh}85=;Ru$~V``5kHIk=on8F{1*+BgAYo|CRc#qsbkMxHc@Ju+i;f*br>9L5f*S^bkZFRrP=fH7WlBo8oTotkyea8TA& zfA(o)1KQWpN*URcmN`nPhPEAIhNo;f;|t4^@=Ik(@*9N9S)%%_r7s|xmC#SJr>eS=Ks2sGf z{6izxo z)tc!oy{T3qV^-#YbaqhYmQoNdT^Z<5FYg%)hQY^#4(eGrhBv7V)R8vsXOVk;5i@f_!rU| zFj@Mow$iT&^9ZDjX|zOc;O$8N0FEh4==Z+%_vSpjuRM7txq^#2A2s;6YbX6X$F5CciDz@+xlRhzInE~z3U8X zZjP{`K5}t72HMZ|EE(1Q2`Z|*b3z9d7oAmj!4ki?Jj5DZpf|S~RT~;ao4Jh6m+R~& z45!8gTwRcRU*4FptsF5%TqO>lY#F_FXtFuic>}3ktKs5iQU42T^*q}=!K_CwVgX+J z7KJE#?m+h%ypQHwTu+zIt9d&{Ggu=s|9w(FySAvL?5*GErhRRitHmU1%@3~;MS5OI zQCdShTxgn-B)#PD(3U-EF`s+$Qtp_DUAY2|NOgFfTezm`dktx#fPHHh$Ybm#MGV$; zSX)MZ(4+5YO5o6e>$!goLu@E2BR<{N0! zL9fsWcj#HTq3){s3-`K7k6SR)M{DYnH`X_^gp5X=_@weW-xOIXdUjj!cvXxyC64Pu zL>C5AsBo_VjRhJzM(hhWN&TP`{uPLrp^-|YT3C;-Wa))T;KET8a!D~aKIoh*3a{G2 zYtoQ;MZ2MIddSw8YTNE!K1ar+F0M-lAz+W=NOeV_h;{DQI)7fmIrr)K(U7F&hTYg?9&~leA6&&ic~hLoJ^r?_}ohpBtIpZ+R#$4%lW#am10XtPcmd> zDtY`EIh)kWo0RmXr7XU(Zv%$UWl z89T~p$em$*UXtp?2~gig8b>Qz*QePDK0W~KZJydwcR}2iyGoUnKX`IYlF*DI-G^&^Rkp)r5Zd-Y z!uz(Zk9bep3Ir{=ZLc|{X4&r!HV8O%rlS~$$@6U=yt$0yw&_OU(Ck7>iuv{^ms~Z< zu}L((Wyk=ta5Acfn1VLA$d%Ge6(K3fb+v)ca#|$vJb_|ZzTo9wM;e{4U$@%-g*90u z@OMQ2KTFay`Rf_>z_5gYMCRnvpDs~N&jLHsK!9WrO}OuUNNd)@Yf{R*oUIxpBiH$X z?|ahuk&9{XQ!n{toczDA4C=x%cba{RHk>+0?raIs zJ3OX)3pqsgU3QdEZS{oKdI2NjnFG&1U7^py6Qai{!^8Ew6aGxQ$c{J^>J7(LQM0zq z;i4%weZ1e>tr{1(QqUt>w&J9(L>kG)vreP-ZF0z1i%ip)&j_?`_=gW&eQz6-;7erz>6weyc=cS`17?QE}S zRF&J4`~7Y$j$+5|_|^6OM{5_(#x@?FhaHOt#HYm?N)i@T|_&n-5{7z#075#gMs z`^dyhj41ge9P8OMc)U%{3y*!;lpyrS@>5|+C3DAR=PP3vV2}G*cB;U~l=4Z<_ZD7h zg=?orC~JG^<-vu5A=qwaXXcjlmHTF|&0u*&x`PiB)ZX!7-|WrcXr-n3*3jGQth| z|3k~K(OfByDzN;Z{}Q5iqBA`Ge6e?~&bY_hOqejxng)O)KUEDD%!z+`*+lyJKbjRA zjfsDbrBS7oB#$wB|;H44}=AW=N)Jh`p4*M_96_Q4@(S zy(=I*^~F=LmZaP9&PyektlclKGoDP*Y`qGF^<3r#swGIQxyHbwFP{u@x!qM>snOV^ zTmysW)L%HmiZ|d9J66Y{uz9b&Mz~$6U0ly*r6|S{6zssCcA(X*Vl#K0f?1@gIBYcd z5T?5j9`rgOiVYvCtaOU$qS$I${}dgvQNd>0`zY1F$wRwhVHgO{93qqX=`xOx)$g!6 zCRw-byV?yoPRJ`W){!-HiP7;6q*wq!QwOH7tC#oETVF1`9)+${&v~33M<8u<`fdYs zPjo=x$>!d%!TuIBSTVES|Cf@s+$b2TASCn-(Ihp9r8vdO<7!B*j}oW z%L*8r;$}@;bHcV@=9E-qJ?YI}P_#Cdq*RLBBTVU)`{5#tKZO|-c(jOZV5X$(4|Cew zB;o?^?Ge72+M#H0#n+QDlZh=;Iuv4Xe%t=I!Zs`&3&y}Qip9*y1rYh;k)s$Y%xQ>(_8z>&Ud2~`bv1Z6qbw_QNPE}r{M<#XeSP~`0(~@t`BQZjI4!H2j-(# zf=l#mU!JH7Zq0=%8Qv$nW;6Acs}hdq6ONc$)yG|E8VyHY*?G6V$%j%}5_`{(mLbg& zTlRX5)il>OV%Y>SH)Zt27wO<|Di}%LT3K9V2HvI}91W>mH0?9!>wjmQnfRl-x%tL< z?*W!c5SccX_P&H*YLl0$9Ck)fJez(-V$3gUsY)C;Bo2^iVX`$a-&6HT@=F8Jk}y>9K$q!|@@a_GLg{N^8PbFhV2FP&DpATb&2jKq&-N7V{$ znx=z9P2$mSE!|_G_(UF|*i0z?0ec7Dh4c${yeA=j69w5>!{hxM&Y#|hCRC}{_Q-j7 zEeH-(ldy6o4@(k#V|g@RPrUUb=T$o^-#WM~jA2A^{E4-!bAOZiS5%^aAgYUq-;J3D z6`_oqyz;}enPuc?wK3sM@~a&MJLBRhFhuYa{AloLNmuh^5J#+V0weOcWpF+|y7aQy z-cDYul`FM?M=M;8GNUW5M_=4?O&AKSY0IbUeCpjvX7hhUODl>7BagbvH6$FOqpX2V$7?Qn5 z^NJ7?KWAioc1tvFR&fi2o~86#aN7D^+dVLE2SI$R_3ZKrV7vVzRz7l48ArsV_V9lu z!C6Q@ppbHlNN{lfBQWV7Cp76X?)R1rc-1YqH({m5@x#vKmYyUR&rKOOLAx(!uDfvp zhTqd}o0iU-oEpUJ1blDxxmHy_-@9SDYNT=~WMn*A61l$HSe<&%qLjwp9<&Im4l2>b0U#vSkRF zq+*lEJ>w$6OUOojghR;Z>(sMd90+46=@WpQPv%dao_@tb2gG#^;oA~?XdfiW;;Dse zb8LGYLCue}_tgz(QokXf)bSe$tfH%I;)pf(1>A_NLHg)I5v~nYr>HTi6Ji=w?5~MB zI}-e9MPa`bIKAD(I9!XXhM&!mC5luv1%IL6^y+mqkx13v9vdD{4qm2+xaB8a7OxhU zyhoD=9@-InSmb0S(b_fQ{QCPGK5-92Qu1H$4&_nvw6lnm;mC~-QzqZW0?{f@miZ;Q zz$Y$!M+a(aRaX+$nhfb}{skQe!Z8}5uK>oLe#eTlRECegbtWwtg*6DxPc9TKDBwX) z`4`Ok1|4Y4RtZv0K-lJA1USL0EqrW;;v{45Rz;PYUl2t(3fAv~r~Tgz=8Mf^FMo6t zGL@U1M)uO8ILg)V#19qDT!LvEuv{F zZlz;qX9$nI?X*bs7JR*@okS|Dev-9g)2WoVGH3003iB8{#~K%|k1txDMGrjE^SNs1p}-ZTTpW)lZB`;k^$msgwLn zK&#&sxkRl2D%SCSKCogU(<7sMZ&`oPQd@J*{XuY44dys-bZtDdADrpC|3OfTB;@wc z=QK3A{Z6J_(2~f>Gd0+qqB1!xIi(lH*IX%C22;7eFfWZytc3*S@L|^ua{&lybp(6M zld~V{1T;ERLC+cHtKJHTY@F#|cdVqaC>!hDdUEX z?+?99rPF)#zb?F$ow@FXpG6Z3NmkkUm_PoXZqn*|Z5M?+EY!zsrE3%Uz%VVoS=ytl z+!ILTwD139oAmsTc9S_oofuNTVciM$M)k=3llq{g3`JYYCFZ1V>?O0WJu3qz0D}Q9 zwVNLuQ=1i;O&fUI$f5I!qOSW_b^g`6vyOVAReBz~wK=rBt8X|mOetJc?S8hDZCd^D z&d_Mi%^GNEzabb%&fA%UK|2Im@kBT2|_Jrwo`fu{{jveXN4M?yJ@v!rpkX z2ao7`DM%MoVFL62$o*j!`I#T~+BPAKbf-ljy33Ql3!Ec{a4O4GlqCu?_8nfi{y^@g%LKK3#MmU+33~f8 zdTWR#P;GNy3pfkCyI%@QjKM$_e(rljPWIen3oMA>B@gT;53U#)*RrmYI!LB|wxN!X zRroL%Q=ODNaG6KAO*{0q!>Dqmq*hbWI(#TAO3{8?e-XnrGy^xL?hXy@53=3Yo~aFyKX>!yNl*-OHE;ua-G20eoF;ko)iw1{(uO_e zt}CqX^^_L}8xlP5Zg61{C{*PP7WoUuzp${OqUxF!nbA$L`CDiQ$@e2g<5m0=$p|qu zzw(*%VDRtsi)Dmx&KBB2xAQ27@fMQZZ)1-Wk8BU@b2V~93F?{GnI3n(6r2O_(y*^5 zM?6kI%?UeV*qNA+y1exO%8(k#ET2+exBQGA5R)K5N-ajR$po`~%?7`_Flrz0m{D4B zPN>K27v~?H+8k6hi>Ry9u-iR!{Sjj{MyQ%^&ZwrC_w-dyf)HY55KZ_v))8~)_0FiB z00_?4eTcBEI+jSD->a7BSwf)%rT{z%I=rKD{X-W|PwlWxru+c2;K)mvO_7Uym!YhZ zyz2T?gtPj$+zV5oqLYvIAx6|D5A2Fb`zXRs%AX9q*Rm7TrX>$yA+f4-!+u-bD&4O_=ftxP}v6T|_w zMy_kOOmU8QIvXhq5i$G-Y>uhYwMuH#M(h(1{-IC_?BE1ow>g9%i>WH2uAwrsWl@tN z91+z1TWRTi5QIX0fli*cori9KKGy=>kGRmWEwP$Q^Xx%eNtyF~g z*@Mc0QaqE3WdN>;4!X7G`Bdk%l`g~3WIO{7KX^@GB<%Sk7KJf2`{Q@Jv_h)_)VhVP zk{NB9Y>AJ3dF)s4#aH#HsolOe+bf)BiEPLS&Ts5n# z|65vIx1b&P}=BnRIRqCx=)@6)j{iM-o679>C`po1pU7)AX zzVq)cltHVVD#6^5U}>y-F7devjNYNz4FFM`ru9g(8rY`c&N(uabD=DB@BI=qbq26` zd*uM4FY`cxD5J8s=aBqeLux6+^-vGLjSpGH4+sy%!UqS7fV+(D7BI`%HFwO}{m<1s zL+$EzRi?trf|#N_fo{X3Ha`(z^9napIC0QQ-vK$7*q??*G{YN)10u!Knx4O%fsnQTwm15M499Oa z89qwXGDB*zBo*kWuNugU>FylPz&{ERv%q?;QhR2V7Y^icEV(f3*A$49+B$`^b1&w6 zw81=Qk-+^^DmQ+;6qoEC!yr1XlYuE*jLN-YmML3@SNaSUQ0b%cni}zNQOy@9_fseW zG33C+l|QKuMkH0S;^tJ~*C~TT>5_4^e7pVmhX41t+% zEvsK1>03l=iBuao@XFs^(A3Ksqy>fy4>mz)|A70Ww{ zR2g@!aU)3Dd_G!vhEs^xx3)#7nUx+6$a}5vBo(t7$dUIh2&_WtqQY+_3KyljniG;% z1vzpDe5?Ek9t+JAfM){-U)ea%e#i1qwgBlHuoEtH)-oyX_zHgbeg1uI>T`tXK>9*$ ze)k&J?~MXQ5DQALUnmAV_*I2<-TjO5n8U!c`9!#Z1dX8@YvFRZ)0$|wE|O@t6TVF;A;P^oY`;_BCU6!&UN^_0o&DW$0C;0iNc2{Rw@W{EQL;&3Eqw*j{uQ^` z+-}Qb1?1&-FEek+ndWa(QYy;+VC4QZNlg6M;@e+X0yR`@4EYgNW(R5}YhyaIi^3E3 zbE1+Jsb9`)RON@MH$RLf!xwpKf8@99rTw{rR1}q2J8D?RaR%2WeqiqOKfap=z`8h$z9@7`0O_lOU5yvaWEAtE?=ro7b zRy)Ix(v>UE?=}X&e-A9w2}-rWyyb}$@hEY_$u|U%c!|GN>2&@OhT>j zo<8xQCoS%N7OE6J6!qh)b25=_&%D`z4zfDtUlFVSav1-O<4LY(7HPGNKU%A}Zf!fW zQtv3qqfkt4E}iWvOn4$jy8=n?RF588h;-o{3(N2ZcRl?@Z?c)dwU`8YGKl?SaD#e> zF#TGTLN(lHhEtYW_cOj922IObzqi1h8~+p(bWN^d;434V$V;W?q!*e4esrS=1>7i@9M^J}Td-6AP*^XmYwae*51U`jhw+C|>F5-gcIm#z( zHglL1agqaDH!Wk#7Wjl1%m2x`a?E})uDRxUU9RkVwnBa$l>%{ z1ozNg(WXr2XB&Yw;CX@F= zND;Ap+rL|h|67p%k`MT%f<*QAeTsY3mQ1z!jW zw7-zDD^l^VsEQxz?ld+q{oc|4qEY05j!{E&&^DOPPT;OP&+XL4HZ`f!c!Sisc;|S0 z)|kZR&NgVGde`o0l&E3+Oo_;K8G^HKAU4@HjgI3-5*k<3aB&_GYEow>HTf7W06 z6X9vkpXW6D)Y>n+vzwBynl#$(;74pH+uJc@5XCT%il)hj$b^ka#mQ20&D}chY?HXq zbR{P)qm&YG25E~;(?krKAJ}GF(QQAa=$D?s#cOD$iAqq96w9Owj*~$DnfhjQ9DlOm znf=D1@qoa>Rc%E07WK3mW3UKf2qZJi#&i0<40W%vZ&wS-r>e4pcbLhlqZACyr6q1J zaE;fngV=Jv{$5$h#2q?R2=6Zg1+Nh*^LMdRear~0ztytKt@q$#1R5TxQq#PS`91yo zS?{dBb^k4EQ=FNH!(|wyGVz?K?v~&{oq!`OZ9kp@J2P?>f>OeeB|;3 zSQ)1(#*fzbjDMviNOl{3kL$O_)ae(I>C&TvV2c3@10;5%Y+#I)PsTLaS+1DC&Z|o= zJZ|a2LKE{6=fxDKAUWX4`>@;iFGT}HT$d7Dj$jf+A7K_e9w zYKbiRKl%ylsC@)Q2bguC#lcC_4Hfg*u^$(|IzEn`9bhUcp|t;`uStnCvJYR$kDd^V zQv)5KUF{34#_cKXl_)*R1P)@gJN1r*!8-2vk;40{Bg*pob{R6^*MR}lPd}o+#Q~I= z9wjn?Z_0(EDZe?%%Axv6*)-Jl)Xanc2f|ib7_4$QBOzH9Fa8XdvN1vOx6n|BJM@j*FuI+Q(58 zL>iP@LO?;Jq&QhG&t%%$v0{rnRpk-%>yXYba3JUV+-235Vn)Vh>_C+gM)uJ6 zx&FIe%PK*MUe+|LAO0!JcuBBT_}c?Sjd^qP0YFbSmnYFyM}`9hp36?m0v(yGm^_SG z?0BEBYkh;#The8*{}}8wPd{%;qW!M7Aa+GMFtD0ml3eK|AT3CnNS1X9Z^?XmotFgg z1VijcN|&_&4g7v=mNu!v4ga=JTb#I7VO8Q-qSGcUa%N7yz*wd^+-#xv%~_PUMatS_QhbkopPmi`+^(zQ@S+KvZwT#Y zJi4Cf-~x;SpX`hH%_{f2AF!8(D6<~Pqn4gnxEL0QSK@T->#LUZ;D&hJ=7~!LxxUH zoooh%JY<^>rYY4gj62P?q?^`YM$%8i#KU=C`o-THErpFPl-XJwqoO$YV@md(;VEVprXE&GYHIluquJ;_504MA zRm+4A&u!dedGrskF>`iTWW~%K$uqtk^27_In$dPT5ebDM<}18L{aE!kpgWWK=Qb*# za3)7saQt|Sh@MRg=5%*vxSiCvu*_ZAfExF0p=25UY%|jJri5cxR*K^F+s6hBE;``5 z%Pb)X8;SeOCaA_D?J;yyAGv!R-@7n2Y1eBbVTBGrQr%L_0kPTLmK()9Hp`Iy@1Enf z(?KU&ecjqY-?q69kDatKyQSUcgeVu~0O4N*<_%!!>qKZ&lsIW%g%8YBETA~)CE8HC zTP~Q}KiIml>J)%)Pa-?%;mLowC~HvD)fy_Dl{r2Mq+hUIFsOcvQ_|t$M`7K~$QVYR ze2rauC^I-=4e{`@k5tduZXfFv847Y`;+mFJT5{nx(_B74lIg#bR>uoXNtAWoDWs*R z{?<{nO42m=xk98F56^_TM8R0)v{_u(>f2-_dv)`Ny@#x1i$J zFUZZhw7lFfXCBAvcC#rlwyz12)wq8p$}3K)M4>BM7uznGm_)uS($CJcG~lLoT84^b z6;`l*B@CxDozvUxQVBc|X2fG~OJVenTya*slw}15nH6B@!H(C;&bN`{kvAud| zX*;5O)kvn<)pDX8XEJzzNv-4keR%FSPbdER+%E9L^Q#BZL2YIcB zTy$OcCDZ2Y*U-IejZM(ybpxgCn)j5%M$7)2nN%Or2fOc|e<3SiU*UPoLuRkLI(K3- zKGz$+&AuMXfT>{Tgh|Ula1aOP@zxFCd4n!E`KyHc_c?|#gq`l{EJFav#vKoK^4b~M zC1yPWpW~AK4X}+=XZ2bNsIVkWMxIINX<~3ZH3beJAagR+ z@Pl$v@s-4LpIH;h$;MDgid}P=%&t0|pi=D9QH^VN;HUs@%0f8a-1+YZh!i zyeTievl?@?QY@dIrWQ0lfgO?}q|lsENIO~M>jBFY8@+sV$LRrmu1k3YdEze&?(>ue zyp79&`i#9sJw4qzo7&UWor80@s5}o{P{)#%sB57O$fm+j-YICmii_<~BvFp02b*VB zX|<~|l`8B=g}L%Z_uzg>#NRu7w5y7sQJlxSO1PkA0&pJ95Wn3+D{kIK>PC5XzPnd; z{_30)-`vSl7A?2*e8L?6!8Jlx>gMrhFepT&Mg5?&)mnnu};ce zk(+O<;+wDAecii16E?D4g5#>&CPJaSHjEA}aQ80bk0lvSj0&Eza>)%!`i-aJyX zOWe@!gdLdG9vaBn8XG>WIlEe<96o%WSQ}i#UJ>E<-MZqXh%Mc7>i+o{ILbTIy~XYC z%`5+Xf-q%X7lC>nlq(#~vU35PSDRbk$@vZiHRvoGZ0z*W^gWHNhl^b!>${wq>_XY> z*Zs%CxW=71Qok>1P9D^s|J0pRupqE9KsNDUz%sFR?qHlZ>&^YA+rdT^A*Qy6cdZ&! zO@}Y&L`Hj?9Q?|n)37|8L@%A|-NVzA2lT^Ty*-EgG{xBMWU~Kr2Y>zhvL}5wlG19R zW=VCFrwr~iFqaH!Gs+*#(yEUbFTgbnV!A{ay~3w6zwgf3yd zcG)aOcEB5Xdgkw;M6k^a+kNm|U6?4R9ThQiXaYT#MslG#SnzB-JQ9ivH~BUy0HW#& zLsXvef4j^MNc80bH}Aj1DRw$3dh7rvP4DEp6& z-RR}x3%neXq}U^|#4M#of)0qH4;{N{g1#X(U(qJU>`oW$fR@d-0yIJIoe&+Km7N2378~rTl5>j+6tgbkm zPRMq%G4moIp#JKklr-bI!YZ7a59K>CouR(Jth(hXQ#~^2u4%D9DO4UZSMAB)fg@$Z z?wc@NuDH4y+R_DaWwtSBp)8-KlR6VUc;Q8v%dJVR6gw)Yld7_u)vfs2ir)%IVE)l6 z%LAu~Q7Ccv&Sdguq*rCZC8p%UxlP7F9iw6}jdoDz(8OM#(8|<+Q$K3bZhCo(ecHvj zgCc&u4!DtRaaApkli{&KBbiZ?R^)1w?;?#)T4-+@9BRK!zKq4sQB~#_)&QJI1*^|R`7BS_q1*BA+(BWKCmw(j%VyiE#5@Eh|5q}~RLs32DSNxs;BBA#p zrq}n0)1{Ws1N=}p+%yU}A^qKIE5wj1Fz`%4N$8BR!1DyDYxS(voyw{XAhUS2_bfe? zgT&JrL!c?wSdz61!+geD`%Cb&*sF=%Wsw~e&i=(L`=Tjyh>UqJc=QDhw>o5Ys);qE zDw}z1Jw=Y@#%e~*zCh7RSL53lD0WHk;oud33`PZLrPMLTtGh9j4aV0hXqqZYETutE01JUb(eRCu5{at`M9ganeuhqB|G)stLU5*Vcs?!>0Ui{09$%jT8Z*r<=o1q z5B00l!qQgJD)jSZ!o&V?*Aw0y`Vz~^tmIvmv)Y9IYQSgouqn~U@GFoHDGvI_HNyTJ-M)93 zqH(0W^KK*KBu8s82Pu9iMi%B6|L4a_f9-@I@N%SZe*~3!K^rV9q%4bW5gx?@R}Lud z1<+kbraIroL^V`J9dGd->lTEnknZ`#I8=OSe-Kbb5%AB3;$Q5>KQ8{+GQ#B#Dpeg? z^rQVJO~Fs)=^J{PyANhrsN)Xhyf<#>CHYVWCmMnO96kj3EBh(s8RjO!TN4ZWVY}N2TY|XU%KO*F zv8N-+2^bI?JA9&k>AnJmSUgSa2y6w5rRnqFb^DL^fUjDP-b}xGx(kPK5{eEUFv>V! z7hTQwebo}Y8fmz=+HPV96T7(ZXJ(ddN&{bACcQ8%TmY9*%OV`mO(bMGH{o^JMWBlw zHycszu|hd5KS4GrVOQNpp`o*1orH? zMCa1RH1WmS`Us+n5z;waiP4X43{vABUH5YE_3ydA79PRwCY5wo8kS7KW||{`jzZ0s zz9G#Dc_(nr9lrIeFCM>iL55_Tg2Jn9K}%h3Bi8nBs1Hcv<>x?iW`4WNtRult{0SYM zk>L%|E~4YT1kY~|@{vKEk=AEM$%U(4LD7&lqt7THsa)V1&xUfa?R4;Gs!n#5nD|wukCwI|x$C*)pqcm|8t&muDK0MHmXgk$sppU2h;y&S$F4chwa3oX4{= za>{C{2?-W^R}QZgy7y5<#1St`>)@zzR?g_i*-(z$F1ZDGWSHBXjvtiTH2rR3oMy5s zHJl|G!xOx*@xl@fW`Wyp*o}IZT%9JLeGN=J_ibydde(m*{&?gqgm-ErUiVpWZ9B_h z6>on-L$N6wh=%m8W3?QLzqLeRmF_v${KV>t{@8+8sK^Y@qTRjNTi)LTCIv$^mu(Oq z=N@m0q46cldL>%w&dh2n0ZVE%dPA)QP(7!OnIc=bHKzlIk>sn(5~5rOBRI<}aI7v> zL8at}R2@mEl~COjLT}ckFXT{zj7EbJC$JO3FCbvn^j&BV57X;(3e@$Ls@0ouaw!hi zHBec8^{^)X<&>+Vpk#D`EyYuhg_uI?{W_t9;U3Id=zO7wiCRc^nFtGwVpD^=N{WNB7 zwcrx%^Zy<=k=R$N=bos zk{6FRZU%2&STufpuKCC7C&4UDukF}sOy{>CQniyNlGt4xEiCbui3wsjeUGqG_Iuf{ zzFW0i-TrWY8=ivh34c4*4!@MhsBkQBG2`MPg-4x_OnfjU{rSTkwO&NmA`TP&^S{Pr zZ`$cq^u}0Dq{Ing03`1F3u7zU$!R=%;TOhb?v2UdmDl{}vE@iWn()#*dZ=#Q^h6O| zea=aoe! zU3h$;aKP=4skoLiaNc2KENe*WBsoONA|nmS3?<8bf)ZIrlH zC2$^cB1To2|Gc=OVw-19>G!GfxD8=7@lBz`Edy{JUR}tjXqW{&3`-Ae>wEO7ys2ykSn0y#5Pf>s6KdV(-_p0;z0>j$ z4URsBEpZ{DT*Ysaw3a7ji`Tt7T8_nJr^Zk*SI$Q|kt*TTHE-r=>pKZAlCQ`y!=Q{n zcDImorJEozLX}e&kIb2z#N(h4!Npdir3p>+8=t4ia7S>aHO>{*iLQ>)ZZZs)8ud!n z+vML|gQsVO@!D6O!-~b{Av-!@-1g1quw+87`7$xmzqYj&lp@&D#2WlrDvb0tuu z2ICS(U|hMFtlGR&(kUl+JNfCJWOAWT;cW*nIQL4pVc2E_Ax@Yhi}K%8WjlD$$SM-h zqh;*O*&*`zp5NY*y6cVFwz#Z~hUJ)pE)U>pnFgUK)!t`-BqmR{3iuxnfwk5yh2$4T z?=4a3xTz*>Y22;;{x@H?$Tq-Q!fu%0w9Z-04i&b=(^MjT)=jz@j@uJ`As3sQH-}*9 zlH*ibw1Og!i#WFFUrI|I!dr?Zn)GqHET6fLUR{^j1bu z5(MotqzgW<9CZhnCNb$6i2*GCl2f=p0c+Io-*-mCW}U-5Q*&bJ8|-q1g0;F$R<-%P zH8T}0C!_5OYeqH7;Uo*^6Jm|!%Z6`Tu^HQIrQSXG;o@D{W*#&-Gpd4JBJFl;FNfgW6&M@TB-Z3tN)OMdb{is?bD|YYVY)60&Wz-PJn^X4IA$rA&7T< zKtSEYe?tTQ#s&Um?q6X7drxV{fS0>38^yj98QZsxFCO?4Ry~IGoxoooIkN%zpNvi+ zaFu##22^rGAwuYUk8_c{vr$R6kDNt(d7G1=;nKW?=%PQ zs|Rx#VynmV3Hd9R?~iSvh2mp`ZrF%Od&>f)wIAoW!n8yqcDJ4ARaALT9G1R%^p4d$ zrxIg5d~$IKBpm_R$oXEz>Ugr}i=Q%iVpNaVk9CdmK_~(Mw}nrJBKLbR_#aIImtXgU z`<31mAJX1e-=U}x)h2X61rg}nBf-?-b6OtHN`bMKJK(09bkC@i*Q0k(`nKl*9#cZs(&}cx1fKm4Qq$-} zfwwJo<8ctQCKh_a|7KvtH7uJN(n^s}ZW#%)JRc7BgcOt%%xek45{9 zq|-SZ^DA3z;a?bswc-WG_rn^sifZ{!&RD~?)_ks$-!k$qiG_0cE)U#4$qr6Z+^{hS zE{UgLWndQ>e!wkw>sJw+7F9O=Mwi02orr&R=Y%X|VAIN-r&io=x`6b1qf9}PqRtYQ z6@5elvs{>q?9XM<$fhj63ezYuK^YK$*d9o*y&Ey^Pmc9TIb?TQgB9TZvqJoJLZ=~Q zfbB}02PcuI<3NfkN!dc_QEy@;<^|Y#7KaN6hA* zl)+g_@dj{TXT(g##6eVa6sp*u+Gh)0^3G_-i`fR#XQ2-VKq%n7(!g5(IqD(wUp3zwhmQrTn=2`Ga73o z>qmU=IVe%n7=@~jT#9RqjaMdTt@IifcetGUC9YL|cw+pe{v^jt9?)!5oBFi=CJk;k zw~6;;jLP>b=slX@^wv*bt5@$l%@I_tSvU)nk!@aYU#sD|h89oymWfz{SzThe`U#vQ zoIIt%2H`KPCT!FcRVbzc!~M|E#D07RrGFPxEvL1CujC{fJ9ug*1(Q5*_WQ4V~Qun38-5@g|K+w3rE2J>)?p?h!{Zx&%yU!tNwG zkzs#^)qr2Q@A}E&!s(~Rd3KA*2!bw$)1Xx|WZ2l{ENI(ezZV_1HTM$KaR8QWmn44E8fidGJZRIiN{eL_*JfHlsu;-^Iuv(>rw zQ^}-I-}^abl|MR6v+}JMxhsszgk&Gk`*^-fv(&YKTkF0o(vK(w!ZVIbM@WeX#T$qnB8L{S!FtC@`)YiH&h*3HNsi=zr8mB|7_s z5z~F6|AWmX(&s`ab?owPmp;osc?IV3_1+VL+n&m!XU-|K?1Zfa zHN;V2f?u5o$EfT14ztPoMPO7($oQmuq~_1%^Uv;x9fXzL{jklg4v=QTPbzK7FtYP* zFv?-=;EvXaSbw6={b*Z#o?<@lJeV?}*_}XdTcn$C_V>jv>JmeebUd?+A(SSYw31Xp zRr4MUs3bZK+FL!(vw?~)T&fC7S6L?nL4#27!D+)Vs`0n}c>C^t*B5cK5U)DR*)lc% zhNXx=+K&-+a-zg=5t`Z=ql0a2-BGb_O4=t&TmpHZ9)p7fO|GN*7DMJ>;5p0| z2)ziHW^@#ZI2R5bsp7S59J^Fz$#LG*eZlRTw9BsLz#`AJ5lK<&9Hh1s^w%<7uw<`(0bX(G~j*W<}uIkga5DyZr<0?JX98VId9u6@<G(~r(~S{2hYoAIp?jP|V#`la1y6cMH|k%uh}@YNq` zJrkC-z2k(<8BzrKAqa~c6r~%Pj(S02T(TgDqpiw2^;3CBGm44#5XfPxIIBIwF?&T* zJyT}06Xzt8P$nj{?>?_-F~OreGJFBduhva=2+OCYH%W;i-`S^ovT3l9S{Zhyt-`je zHdum&evPx}=w^8hw|_5lFTh=|XvQ|qJJ>e$rl?kYQeD+UUQt948NZ%t>my1WRFh%2 ztV~kbXsBRE!V9DsXv%b~tA4DlJe$})zafS#FQXL2f7Bx#Uu$fRkz&xh#FUY9mF=3G zO1XXMbSJ|lOGhcpUzHo5-La$gejap@7AS`SLA)|>!#D6qBpC876%h%(7C`r-Qs?vg z%B@&f>aM}%+oGNb?gAOvYjA85G#~A~f7#_!_M>(d=?~tfC9vNP67(431!6L`@3Q}} z*t~d-V9N5wZQMp0{dlY!{%AQGR%E;+`O?|;J{+5N6t;0Pow&v*nr(J_ZnW1eq9SNh z{P2Keoy4A;N8N=Ws<#|h2Ckp8lIp5Mg_~Z1w;*>9@$_yNc(;rmz5@eeht_M2xHIZ( zrBf*Jc#=_d2^xd?K=U4v0uV-aOZ^Xi(ctj4io z3BZfz7xt!?8hNEI)6N{G!ln>`$rw+Iv%32t$kt-1u7XaM>&8&fhwX|qsMP)_An~w^ zsQmiHk{{`PZ9ZjUZSl7#g7)DLu=iTV-*%i#%{?7upL)tDcc!mlB?waeTOgp} zW@ocsAd^dc+XtP;{yoGv`7^{w_YVJs@y9!e1DrAbCLp-{-xaHAn2vLQ`aC=^)VY3aJztS93n#34 zJpz3&?lq{R&~6d-)bvP)E9uSF+w|*E2fkuJC8MXwP%AT!u#kyWu+|f{kN3pw$a;H+ zOs&#wO&RE6e1`UOxiDMFI;St12-6!JCoQrSgP%OaqPtxnuLqnppQg)otxfA4qrfZ9 zpL`dR!wt_yX@;OrN8CH-32iNLV_o=gi6$z7x|TR}%Wz_!XBmfCbBUPOGKC~4f5NSm zul@Yci|PzNa506nEj0>_?oOHK9y*lxsq9bCbjfijpmUs?}p8a)jM zpMKFU14Jlws|8oKkE!tloB~wDwxihbXP1$r3Cm1BM$eOFeG7v8D?~Dri9GZpN6MA? zT&?Q`QxnN2oRX`W`OV>?#L+kry)0JqcdGOK!pp}yt=C@~Q$GGPL@ao>V+PEPd)?Bo`|Naja$5r&aomoYkV0mJ|0fVyMn-(S!Qb!R>I)k2Oq;z zlM*Wtc4mHSpZX2ge9=JHhb5L26v|_H9xwtUnlgtyZ5T!4&9Aq1ZN4@|*E{P1uez6z z{&0Qz2TnQ$>qSo))XT1OEO3P|C82MubY#N_>yrEgj>!nMP4$yHdXqMOl!SPv{;Wd= zD-|<)su}D#uxImSUXL?ToC1#Sr>5(7f|=t8n^r>W+^QcwXD2U0BTGatiMDLG_-f4+ z&N@eIzXdt^mvs~q1cA9_A|z;vXrBq^488H5zn(fhKj+{yW+NUT}c!u2FqJGQgu49T5cdMS(IO+8{u zTFUoJ9m#L|fCTFvrPLvIeli0*k{O?i7}v^)JnnhU?11M@^3F|u)Lca^`4GY&q%RZW zvYUr`By!7zJo|y5L!AoUB7!tf#;J)dn5K4mik}NLf4^T*r0te{^OnxuUi@3N?W!x6 z^M~3%@Y`S+OU$suRBx5&oE2m;tKPWgMtBCz*SmJf9EqtJumt4ob99gcnGcvpChr*5 z<W+#6+qUa%8Ci16PMp{Dm73UPj?4OH$k@1N(mWOYPQ8Oi|--eJ2K)D@N_kS=IU}dK+q9@HX4*gc4 zY?QVe5_{2A;CX^BPFT0J?;ApTAgev9Slwe~8YTgFJfe*( zbi;g^e)Smg@q-e4iWbec%!=ZPAu{C8cuJv4JzM4uuJsM>5RT=dp_k5M*&cO*t5z#c z7f;#NuoQoC0FeB=V``7qsXdmVOQPp?i#tZJ55SmVP@95PJ-(N6#!(FSBwTto2w`@_ zTN}pip`1U#*9=2EA(FUvH=C?#fg>XP%_dvm5o_-8O~|p`2J|QB>_&UUtx0qM)}MTVS6ZF|qo0Kjs^|>!y?U55@nANsmj0&>~&i zK0a4jh|%B71^B}s<|0UdGoIp1Ql@|*>6t@0l}lFSP?cdb^@1_Jy=~jXgQt-)z%{Qb z8o+^e17v~U?5Olm<-%6A8?)7{9j!X-$FUMRAUaC^ z3?R@OS+toZ2}q}bs0PRFcmr29ZR>2HaRNSLqj=kI zUO|f4G1rDeOP%8DZJOL{Rxq_l`0nYDbopAkbBIx?3kWxd0OCVJ1=y7G9B`!DI<=iY zjg!?KT_RaqV4?IFD))m1{?;{g60%V}DNreHJ2F*z@FPwd@T`8@%+q19ppnZ|aE962 zrv9F*qfPhZTJklaMnx-a&fU4RwZI(Qci5C|$oVc)$4iyth5_+{9iM3l`oH4xSpU8) zC@MkV#4p*DoPI=^&ZGSG!g=B$B~v@LKs^41vRz|?M$paw1nXK;Z<*sU}fEQ*<578OnXgS4vdvv;9!L2s0;B2DTfdT+q#3J@HJPvyQAUSZU-#202* z%yyl(g$@RRvm&iX@qKu7j;1aVXc(1aA`6=+BoU{-BZK zgnY+he@^8)Vo=-HF!=Cf*fy#YkIme;rG(H)Z%#4TQ+|_IbD{{1An1l%x!U&&x(y^6 zhA0^vnK^Z4_U=bPp@id5;qhc=gldelkh?3+&a&g7lju z66jRNySVj?$LuO{5Jk-IWU6eAJX$vM-WiDDQSf{^>A8#U!I?WMYOplIrO9g8 zn7PRADJD|zM7^UAL_^OUH7jN4sA&EC#GWNca_#NF!M#`ONX zlIWJr5MORVoe<~cppTdaKJs{huh7! zLJkvwZlUyrStC=p!m1itRh+7VSQ-U>IlU}JY#PF5K#0FAwWI&byT%UYW}J4f_hb>I z>K!Uv%Cx&zXSNB;-yEC-`+&L9hgr1DXA+2(hu1o(nhTT>{p)2l3Q9bA5=9`P3WxWb z>#h+%jYjT6&M{DlbX z*|PSD4-rl;og!I1AO>q~gRJual{$xcBo}pU-)r}uE0@ml#F2x6RK`}4P{h?Mk)1au zVO?{qbr*2vlJjq|qtm@yt-CD2Ww}{{AX!coze1&|^PSm7Gc%ESiiDxoDVj%i9@@iT z63n2(xFAv0rnLN(O4AI4nV-107>$OgyHJ@~seM(h2ZP{5L%K#F4FGz|x6;L^VC|Ex zdHL9Bo@Sn6;5f{wdB%U(i_ME!a#$d9b!}z7+K%kXy|?bd=)J?RA{VWEU=2&@Wv2oJ zXRTkYCq&<^+tGB6&59OhlRiKMc0htFuiMxTRyKC|g+bw}pPN1G-UV7bF-{02OdS2r zq1ILvU3TWM>z|Mn8oBg#Z|@UGcM+5Fe*b*ghJa947d#vSsm7WG21@^Mbtx8XQGU1I zJAkRSc;q4&<6mJ5a^a%c90O+*(8NB-+@v-(LznZc?{ci#uL3IVR;9OjOdWVqIfcTZ z+}Yr-QXjICLkQTRT+Jc|WsLTOc6nPR90#M>)r_$FD&ii_UW^V`0y%5*S*zTn)tm;V zx3%|N;~M|;RB$)-GtgP`ade{nOg<_1Br4QJVKYSz%M@ zG|h>auNe!k1lhEEKphX0W6AR<$NiqChVKU;jGWFzwQ7uj8j80`U6H$N3m7yQ@e^1tQ-hS`2 z)kbqUW{3CDAj9`5n-wYPu$D;TN7iUzP&WGq0rB_HVD%qp{TEg8>*Tqe+fNl_w44_r z4O2IFOgVFp#dpZ+p}lkEQc}9lUjGNc0Zr=rPeJ0JDhz)pTBH7nS^QrEQ2M>_<66Sm z-$Kcxm9v>&7-&E~sRV`&UcS8KPBe_ilEdPMx|HlHnovP`R`X-z)`UoMdnCo~vpL|{Bh;o6;KQ(c zxVjm`fPqY8g;KE{AfmmO>~$oJm9h8;<6c8J_uYxEK5DAnYIb^!b6=!%aEtwbz9k!0 z%%}^DKceNNfQzzF6qdEIX59*lD{HsYX~g!AK$#w*$%Wt+eZ^3PzILfSByjfVtgC4{ zFj1Qx`eKl4eHfA0pDo3trB9w28oKZ-vZ9oKjkjGMJ> z2`JMW`I0qxubTZe!Jrtz)!`GjAgGP`o2a>j@LwXd{RZ9lp2>{$pQKMu;<#&<9eXbISwmQCAa@qJnV&e!*qi~DA8ncP zkP04MpE2`h9zdxU!)VFWx;D`L&H5OR*F9<)-{^;V`pGt*&n`{)qrnL1g3i)%tw;<_ zwn(b0awg?>TYwVCse)p{V5b+t?zmI*W3jB@QFPCl1mP7iS!lC}E!H5JNS3|2(zcTmp;kQ;Ba>z?0}Sh@NVDQ>$#$y z&3YvPT)*$L73^AB1m7Z#no`rln92L)DI{^xdPgitpxiK?rWw)o)xag2cMV-XctdzL zlem_^ak8zK=Nisv7D{^OxfBP$$9(Ddfo{f)+F^rWRk~zz#Uyo+vFfexV#xe{K5KPo|&% zlx0z9?8ic}rxB$&``}qtiN(DyFqCG4f{g}g!1~2n&x@R|?C9pX2W$!Fseh)cb9Yqs zGwIBI#@>`<-u|7qw*9Y(>u2c1b<~fG4=at|e_=TKy!PEanFzmmeiD0)=Ev~PKI`n@ zXHCAbubt&&4Y00D-$}YJNTcFZ?wH%WNG@6s8WeJuspFA#t0MzL==VR3CYcD!&wwB< zhQIA38QXP78(NkBej+;OHeU)lbmIIy+TP29hI@&A@`L#Zn+;OC^)qLI?oE2}jc zdb(-UZxkx$T{MMCttKmNVaX03CQHbs>-GGEiD{f|%E%4v%0ioOb-Vc6eXDNZ*02<# zR}K?88-=DKYHeu_iT<)CWm<)ggUENa@**l!P@DJTk$M6eVQ&VW<8jB5ljC2!5}Ejp zF4Hc6Y{MwR+^#RsMP;6Qu?xziCmp&Dc`h!YT$1mta6yL8(ZUx4VpJcvgzi|GX*%P1 zR=lATN7tqL4po=hu5xwGVQuw4)0#N?SS}cVRj{}*?A%dnViC4bW~k&QWCR{^>>H|C z123Ed86aP5)glXPNl9tuNA|y3ORn{A**x=s3BoR@r7 z6$1cW!1RXXR|O}3nX?+c%ya#ZHGBwmCy9I361(>}g=1KTzG`HsQypN|gfU_9`n=mw zA1X?;*)j68haPqcK=!4E5_;Elbz4#d55z9dSPJUf7k9R7X?2EbdIYay3RENvHENgK za=gsDje@TvjgV;>9}nf!8+q(YbhoyJfi|pTTGZ+fao?>tqh$G<$ltC+aWl1uq^b+i zFiUnb4T;qklcbDCkgZDVXKBR82O!&!Uw0(eX>gi>%=QyX``zcY$5m#0X}ep$Jy^&f zHMY_28PD1MdP&xVr$*^wfOv`hnSmv@UTT~rj#PN~c%mK(3d=2ZpLoCZu(CumMHs?$ zq4kUjyKfSDZfd1VTAM`uN$6W6sm|L={73!8tEvf|*Vj&|&?`iMCt4Ou5T|(C%wBjz z)p%d2ue+VqeO^ot8kIQ6t-w3EQ84C|LGZnj0u9A>A&w-c7Akji@28bdYik_=#F{%ka1w09ryM(|0IUt-+YnHKXWwScjo@I1L)_Z z%lo@~uQwXQO0*?ec1*g-oJ>DN#EGTU*=jsUps79?a+yU-6}LjeR2*tfCdBe3_@!P= zatL{u^e|ylXE~Ie#2Kr69wRrNWWxTSbNF>teDz1wEpk56ApMt`B(sMJM=*)XPdC}Q}HIF$kO=_A9 zA|V4i!((99KwoTpgHLVCR!y-kn<)>KV*8DpCCHE)A+29sZ2#di4*=WFi1-D`1Z8FD zYpIBpKujO38np8jTkmXI?$c8++gp^8cvimro!x@d87m}YhpV>wDzOi}#yD^C(6q2P z8Mcb}?1cYqdEQQu@h^eCA7-A?h3I=f^2U*YBJ5M!`ze zJ1gidBSl-B>V{A#*dSw9mW%l>j5>{*%Oi-(Iw4?rgsS4;?R=B!l<;0fbWui+-Mi0DKj{d@}4X z!$LFlWPf3(X}?6v*5+n#4;MLnqN$1r!9EnsG2Zs>SE>rLGGVNQyDz>yALRgo5l@`% zVD9E+pMN<`#y4FJea4UKWcESixaa2Aw|$q=mHv0v`6zW*|Loy6Rdq3zdHY9AaWOaChlR9CeywfFSyg4Gr1}dMS*4OWkW;c7(Q#E4_ne34}o&)a|G` zk}{Ta2n9La`?*T|=nKOWL|1b4RlciITDsZQ^X0FJfJ=}75mzqm^h);x~>S{Xt zD88bnvlPs3^M>|t-v=M$Gm3d5zk&J9fAap&q@eQX zlZ!1S3Mo$)kWZdrZ)<+P2vx{!X=4cZF5_(>Cr{4eq0yYgkw=ZJN>^*>CjVjNR=5(wRQwcb6cB*B;oj zptIvxJ|0qVJk-fV(M7dYOvCP?(jKVZ1!lWlQU^MKbr3OStHNB$aU8;q&8dY ziC9od-_T7AaODJcWLY2EG_fB&XiRTs_i@jZZuk*?7}J52pr2`m`PGLd5szJM&|IkB zYqzxfvfXJ5C#)}jVZ2Z2ANGm?ZV2~Wz;vLBOH;>G(RQ^mO+WPrmt-kvrV(0C=&a~8 ze__=Cs$tjs%P@u*V4=3 zb{v&x3C`6qap$UvV^&dJa`fdJEW04xWBy?Q>`7e=Z{B8IXIEh+uGzshtrcs!7S2J% z1)oQDm2J2TotNjJVyD1QZCOqH`+&^XRl&!N8^UR zhF15D<}|wQBeTqSY0nu0NLE&H^PzxnhzdzCXnpWNw!XgqQwZ&(Y<9mPUF_Y${e{C6J?h7Wd#2QO)mGp+LT@vYk}&++>V6_}_GzQo4guAWm@(DTdW zpDR#CKJ>y>JaxWG1GqR@_371295y5$q|_|d%o2$k&c?IWvht?KdT@)i<6neZp5~-E z2=bZ3TO0?e@Pqu_eC9R!dx^J&<(K7%&1{3F%jXuDui8( zo*T)y@`)7Y+P;-W61l}!pTyCSPvedexjE~bbGUQXG!SFIY8JJs`)}rigCDz-{JC%5 zM__hu_fYr;PG%y6yS@(Usn_h$qm16W!_ZA7 zu^A2JL8(V?UZ?EN>)4Ik>E(>800sgMo)gA7U4m!+TXz#T2Bsy+ZhuH%IS^lApsk15 z7IuA39p|YA4f)k`*7b*N|KGkU=(V#Q{?jCG(GT?XO?Q>dy9 i)=QD=P!V)r#K{u)qRCODN@0TQ$P-8~zR2SE|0V#oMtGS3 literal 0 HcmV?d00001 diff --git a/examples/rbac-remote/deployment.png b/examples/rbac-remote/deployment.png new file mode 100644 index 0000000000000000000000000000000000000000..9b9a0d7b2a48039a1e716be1a12ab81ff87d68dd GIT binary patch literal 98226 zcmc$`bySqy_XmndDIgZzAfh1MjUpfo(%sz*Ih25aN=kQ#baxJjNHcV&)C>(Wblzv) z_ceZg|Gjrz*J3%$^PJlI>^S@LIlNbtm&CnIavKE&1y}0T%hxCBlifU&zn50yYGubl+c87uiIgAoqyEp5Hn z-cK6zoNv%UIion(7M6x91}?v$+-c%VtHdnm+}(5_uv>Qj@o(Zv)mT&u8(N@zuu^@M=62)U;ns`Fxg?x<~giL8^PzSNC&qh_Zw^c>pTk ze!|I0E_3NCHFpBs*jxTzsp|=7jNs=2wL2v`!TIVA;|+}m!YQ_`UAAs;5j1;zc@~#; z#pV%bPTkm8hrG5lqJstPaT%0Z&%pxCaqYPZtxl)wJN?yov%AKASVi^U04b}PYDk&M z$)PX+pRrJG1Y4q@0iSLFKP12pppd8_6ind%`@ql34Ah@bZ$dI|{``!3g?#b3vY3<< z@V~OLlc}j4$im*)dZ!8obTw|NqT#F|C(CbaZ_8|GVsB*1>~8CTY=R=_&JTRFHFY+m za<{dy1M#~H(fsoUKkym(H46>ZKd(4j3(;uEDN>2qJDE~(GqW`cHLOdtVQ;?~&|4|1AqxAPe#d3mY>l z%dftHrh>?C`4uhQO>Hz^TG|5Q0mcyK;pP_n=l%bA@_WQTT59}m$;ZX>XUjjH{M}L& zWa=blZwn0REc{zD|26*e;eQPUS&%FLLlXar`JZnAISbzwWcj5t;oH|mUwu$eL{X$( zK38$Sv6YHBOD2BW376ZY7J5V-iRO1#YfTP|3PN}f?cv9qhv=Vc(b`3@{NFv1`}|?} z`DYo_Cqm9xne^0<^cRwgu@u1(c_-{f5aVQe@W3EiN0e{akRImSkCYYy% zN(51zspiD_wVMC>0Va<5_kjQZOx%yaR=~O9VTbo2I_KXrLP1p(M^Ug>b)gUQ`+L3V zC=#U#0p5ShIFA9jNZUDcUOCW9?v)@gZ~=Y zuYd;7nChIA@!!+^LjPuHbHsr1?~w~W080D+ei3mQ0`R^><2V+5J^!SD~?#ZH?zbrz_F0Wcy~RQ01o-7fTH1i@}shLPwKr&g;YNDGpz6IabDorBLjQDZK5V zcQ_nVPo})u3x>^t25c2goteT(xO&FfJdbQZM^om>Gj@$k<^1u_G>y_cclas|TK!cL zag`Jb_J{-)e9nvczQ4KI_fe9i&pAJaq=?OhK zS{tI3e)Oa6g-h7$75(e8cw&BM3zhqhbdRWRD#3@F&v&d>KBll5`fHk}x{&DHgcO}# zAO_@0^&s~PWzsev$MZ8?k0-3c{2I8SW#PSI?T(5kbP!qmk|mVi zb<4u!b1Xwq%hg#;e^5zj=0<*TF|^>UCzcVaf3;7TrG%TC1}+$iG+e+=N5x>12Aye@kB1lrtDiT zWpm|I`|b#ADcaAk1DZCnYgm7*9n@4yYFC#@EbLXM+2EYdSTL{VVPezB>U-rSu%dZ5 zHaa?a+OjXqSfo+@b=G}3ti~=|3N|dm!Zob0m=SP?{d}u3Vs60$o@#qGq99@$(ROGt z+F_ZSY9=&o6%j<~yVr`TrgmM=EpoP&RU?_j}jV|yjqR=`BD<+KVlK?WO!s@l-{n8hfmi83FhPUFsS zE=e-Sx8C0$o{LdeE$6XCk0B>g*brZ<^29y6clt zoGFfe-ZmY#ITskF$jNi*$4yy+2pF_-sK9)(%n$~c6-T^|)BDt!hV`6etpF8lTTAyc z36icgPk5?ZpS=%)6mi^gJ*VR`rpnzyAUZs2f5naU1B~bG;kDOCLi*{+?ji;464Vy_~clf?N)2SPbn`-g(6@WhX z;6P7JZ`^Hegf<>cTPNSd6C8X}GDxLXtX*3(e=?!x2_>S}?Tv#0 z9;E6rvpK5OcU;pvkv`4+E4PrcKHp`v>Z^iMPRMF@yj}%7MDL{y;JS}ii`eiYCl6IQo*+~P8Zky9+7IydwLTXcu}s`AuP>hE(N$# z2e;HDi*X4?^Kk_=s$1JEY`^o#j1}{Ld$o$eg7z%lAkEp9)wkYtV>F-G<p>YBIaw2Ffo;F^4uRA3+r%zh0&qee^BYh}=^P4(ySs$ia{D2$a2hT& ztkd{~mxDwNF4%Hb^`VK6gn(W_f!9KbWc4FqA2eT;f=Mf@LV<7fAqX%WtM@lTG)djoN|0`lM32qVpQ*9kR>L-UYLQ^Y?9It?ntpv@UZBDB z=&*;ebQ0aQ`TBBKTXz+G2FOT?b3v6*?#eV9(@0EOX>|`{AVvAeFtUBHGdBZKIs%GA{^DHDhArif3q9I^ZQ|pdb1zg|1jU@DBw)5hj%$I=Vi#KPzcRf5@mZbE4Sefl&By~7e>T}4rSstIOnf1__d4po2 z$pg`J&`noV!SzVkUGwhK0?zKcKU2;;W>h*J@L^;#Jk2xb3C6ru7Il;oV-A0S`d3Ga zk^_S+Orfq#YW;M!{eUHj2nyS*CBd$9kC3T|tcae0BUbRmk4|khc0rRAYxlfRXc^n` zX1?K4)Gf?oc2O}uF0@(Vrl=7`RgrH)x^1$iHh@=cbR9D%)N<{r+(U!NV`v%<9;_kQV}dpeec8l?dEO`oI6 z^eIAyVVSm5JmC`q&)s)P!ch@EN)?GX`Z)7Xw7xz6_h?lRAlhK6?9bgDIToPSOx@q6 zI=MF1)=ZJcCDMn-@0zxtgSYR~Y;?T57QJbxl5P|=p@twcC9`HD`4Mwt2I6`A3jfey z6UKLj+FuswCWE+J8OYnvEARkkp=Mr+7$>Qc80Xf2nvIgBVOwnyj-KBTee=q8lppCG za5+_JgZ)8+N1XgPaf+SE+okHdB+8YKG1N~7tV%NOVK68otX5l{pG)MG2zf}FiWUx} z{&?D^5MmV=Py&}VEG(|6fXlN&sJJR#RFKqX3Qy>q83n*1Hj;+i^4eHtzv+Oj4_zQJ zA*7t8dLhp(%UK~PuLn1_8|2$E%g?1j*<4O9L4~>w-fQ7=H7eftI0N#j1N^09EJ?IG z=`HfPMzw}*gaNf0pH}`m|HSb5b)=1uazzOU2FtUzwtY^uCQQT5I}K|Ma3~=1)zjWH zFh`)Z9lu^*B!_C^Hgxqz9FF*7LPj%_X9xrzEKX$;rfH@38TQ$_zqnr@b;7STrKiMN%+c?Mg()YC zj-%g=$RrN3=A(MUeVTcsIDE;sv7;Zq3DKsutPw-~g=r#CFtWtjPi$EQ`;HdZDlaYWr z3WrMKQNt{raN&9iiRvyj)`x@Rq(wP8g@HvTI9_R}fcRfBMD5JOw{zD_@o z=WXra16u49ya*x5e|Bkda{o5%UH84>@)d$Nx8pydPdfWpeq=R&hsOAJhPOOEGh4jy zf{?g@J6z~UqtPLGGs_^)*|v-UT;R@RlikGpB;tDMu?Am*8jF;UPxg68JUS!yjCKEd z21xasUtJd~=qhKwS5ysaB8yO#okCZL{eD1wZOJ<=CidK_FCj@EM#4kK7)zUmJJ3O6 z#i1NUsTl+z%JT-(0U-soxUdE7M1$}O-7QAa2O^aOb%@h73TiQSjnv5xbW`eDd40am z3Ct3TOHE{ZEYupmz2LV}dPdyrgLSO>@Yv{FnuVzPB;joGu$Cx@@O=N!==WKEMJ(g9 zgngN!Dr77napQzL@Z74x!ob`N#b5WlgU>+-~GJx8%zn9 zxhq|FxxsWXGU+;aS*^2Xn~5O2V6A#SjTpw_tFjWy_r!Nr>PLES#4<|2(|wz{G7k23 zUog+5rWES$zYU`Qj`I}m9j!!&e_<^KPZabD#2{Yo_atQ&laQSp#OYl-OJ+Bf=53fS z*Q@CY(Rmd~0oEKRdVbhIXzg>kIH&NF0zi1RY_@%B_o1p!sTGYLIpZY zB;lg0p_Bbe6Q87}n%}3aPh0RppYX}PX}bV35w41)Zrc*Q$e1{cu}D5W<|l-^Ap6 zzux)Sbn36#&0qo=!|+-d>d~MWUZMd-f!wrty7#A-?*c?s?+^Bp6~ngG9u17XD)u4k zOYV!2^}2;Ms807r7XN1{P?YLr22C(e@U#J5K)usZa>Ch;8JWGQ3h~cy@-sZY5p}tj zN1{;Lq*E`bJ2vK0^*_}pr+XH0GH;xJDdT5VN;Lcam$l;2G%@?XA9YEgWbx3`{9T?D z&3KKhH(Cv3^8Ed1<3eUJ6)g#L3qc`88h zEOr{ShWqy;K45Uz%eQ~k$ZlY%Fx&{8m)_<(_?epiEM@wo0E2^QUi|$1SBDI-qS(17 zYeQLc%)j^OFR`fJeMJn>wyjoG<$Ndcb0I&UqM(u82;vdAZ}UIJy$xh>Pd9#;>ThW} z0J9*!Rr5=;|4m2H1^B+OM8@*&-@@btBoL1FKQqhYN6jBgE-Wz>=e9W7ni7u_b;bJp zU~M>C2K+KjG9}srRjldLEeD8-^Kqsi8dap9X!&ssPWyd8kSkPxdeHCkQ#xlT;SB!LJSN2G^}hXi3+rTjU8Hlgr77<%{c6Y#PXE!BXnPQXO9?!IHt3xQLk+N_Q~F5riQ1-(TV z)2*T%7rUkn!;QJdVqYv{1CaiRw#&-CMWnDqN$`-7KZVIPBK`WI@=K|GX3dIk;AH97 z#IY-KUqlEOe6PI`hZFj6U5Qy7WZ9Hh2<)umxI%-~Wld_j$KW$vWl0Qa=n3fLLCqs{ z*{O$vC4acO0Tmc`|5vg2ejeiCZ%&qaOZ#dU>F?(GKWk(QV+geKIg*o^Bg1e71&B8h?A^LFDtz{`S42 z^M&hX==b!Hcs+2w$7XTuYxM3{T6GSF8+DVZz8A}su#(0dhMI)OsrsO{3o5Z=;IPF5 z6n@K3cidX6Yma9^5iC*DcyKwoGUHoaYtYU;sN}qE2vB@-5t?p*Vca3pU`c!T_149H z3U=QQ`m1OAwd_P!zWpAA$t~B*t^SO4h_seuJdvxTpceZT zTz8=C-gmZHYTtVS=uoE`k1qfdk2^x-s&2}8M9x;*GyzejJD4A%kptABIu3RhM2ZEO zoIgiy>j|` zyj=m(Y9+Cn7X0qLAtl1*eSRP%vy6h;e&2m>u~W9i$9sf8kEy1Q_cW`iE}pU`9Liuk zfwzAa-sJv{vRxQ0=@}3aXd0V=+HKCjhDZ)jWM)N{%J^Uj#(nLC`ZiZUX>EFV^o=7A6~8{ZcaLfGyQ=AaPrBEBH!N0NkBwZwHMA(b%LL2W zVxhmNTJ1ovqqv4FNtP}K)o%=~BP`uOm>7|NI;LqJ7xSL9V@ti@w6TqY2;u_rfwOl` zV0QRK(+w7>59mpVykJK=2&=h1Z@K26l?<;+(~kuYOVKCNr+W>nQQ_^~z$7yW&fEcN z`LN7jii#g#i#ec@A0~Jc01(2afP-uwP|lgD%4}0}IP?XdjO|I_8Vjvu1iW6I^*)|M z7C8M_tdPYP2%k$G?f9ca6HB4>JK`q^$4X~{a?*u`EO9`qJ{iowhi zmbjpIEP73o_^DmUFJf*DAe&tk3 ze)~(l29firfg-rZ4?C3r#u3|Gg>(v!t2}#`G;5+f><9GnRW7~&8pGUUcZqm;Xwg-L z)30q08H=Ny8)dE*dyXh{CSOk(@+gc~Y@VO$_aLU{r@Y88{0>NE!A3#-Bp zBrYxQM-3G!5rS`Ryl^*6kv(?m-lXvA=YvloWZ;p!o5F?n#Q7&UvpX(rKYG<{zF3zE zlor4%X@Kz~dam^`6UQ%9}0JV>^K!dJIf7p%dtNY%9%IUY%PAT2jln~qBq+nlVBqjxYn^%v%r&XCvkA0SkhVJQKr<33DIb-oPP4irA zSJB=2M!WY#ECMYHbDTRF=jnEc50e)D9d?`5%UE?DhL@}FqiFA-j>%_Cjan*)`Mh1h zT5Huwt5FcnPrk|NvY{fdqKb*8fd>OqHC$hvZwFN3;9VPb zPn6j&G{bU|?b!9begrZh;3C)cOfq8YCJqtA9FOUIW^8K{k>HeXO}#jq?dvItMdWBviE<)K5^u(rUlIt&oQIhiVsU2T^5}}J%AW7~L<6TZmD~baIyXTuXVwFlj zn$#0~rGb-GDDjle!q(J$fey*pGXQ#G-tzGr07Rz{+WYJqE?2z|3O8SLRD}|j?}t5X zN!x42NsxVhe4AK!15Y;ue1ibL0{BcYAV;UwSE^QIDX2$3_bU4<^;$5=ul&2V_Dz^=D}$&-fk zP$fcGU!r*&#zf86rc>;`nXCn_#;vBT`mQw^Rz6=ZJj145a{`d;BuPa`UEdb+A*=rU^g_+(eHcL4B z_Dn6$<-|HX)}SvcTc+q9>e#65h&Fz~kN3JtArarz)o31*@eM5t$NIUdbE4j9Q49}Za?dYKy&lof=y=xD^IpaUaDir|cR zb;c&vIQBKUM%d5>UYZ~z*|sL}qq<$H%Xm3nYk$DAo3DeuI(CxB7j$lSF*X8q+$3z~ zx-T{Wj644QW2bCK6*l`Lm3W+cLRB*Q*|m0a(-s!BFXEYXx0;iAHukT89Eh8p=1~iP zR%_k#JlR{6kf;bnN3YiGD8^wC`eG@czyCC^sU8HMZjW6Vt6PbZDRMM?@ZgLknx0%|zi1zWn$Sw7d%3bi)?)q%#d5wE@T4a-sm+-lCjb>pXwCyT+!#t}l};KF`Egly%Q znzp9#Fn)pX(9N=bzdWh{j*W|tYZfaq(f&u;mV$}P3~%LLB@7p8SV^>?6t?bc`4r?zoyw7Z1|L%qzp z(FI}?H5_(_6_U!7&Z#}_L)a-s0VTL>ACDEbM@t`4kGOn%+We3|7DV14vY8#McV|6& zex*BlGe3Dp)=D4`p2D#hTx6t4=-vqzutaypr3Qq|1e##S~Fy5 zVQfJ7oaQjvG5)Dd&3fb)MRJm|O|x*JGfdZ=^8_}RLROK}(Mq#Y$PsHWB{*`Okm%d_ zc4d5OZDM{AwO_6R=)Gwd1?b?TS@3o(*EM+G@*LH(R=34kA{;zr7ON!WG|FhU0BIrA zjM-@krtlJVVA1XMvOhM=aT{#8_B?c9oZ&b*KY<9jRB8LF=t>JQv9O@w1@gVKX<}C z?$T+hRN;HNU%cD)JOrAP9OF}B6LW(UA8x%ZY`W4}j^~&2Dnne^##yXZ6EA~goHT7= zTA)s9!%rRWvB=p_#MT}saS8x+D}0vT|H5E#V#Ip^WXv2u+Oe=L$VQA)FhV@V?r^C1 zxngyFRpp(-1V$Z4vh1o9Jq{sB5do=akyP_Qv8+Yo52K z5xy5$N&?lW1r%FVgHaS@(RaX#$_{NDq=;6g_ch&5LN<22ddIP&fk;0aet!?qaHW)j zQ|jgKd$mRvthZ(H@@?o12-VO{AC<^^J9-5cftF#BgAQ&UH9C_HZMfTIMyb0D${0C8 z6uqo*i}{;MwmruwZmW&O3udvk-H($$JA>G#t={?{;OH-~t1P2Ih`Lttk%1-5haJqB zy3|2x&#A8Kq6#r0HA-`-)@3=&;1Yc+F)Da{fjDX|YVW#;+}7LawbWkFb zarMMPmFDrJxB8~b<;JGHzvuJ43W2@B-Y=mK&JNa7&yq_n+-6;$}fiO7DY@hX)qrB*j$Q zU-dkxvk`XX}Lt?+t?q=R8VGbh8Dn9$Mxpc!yeTan7fr{brtAhc`OF0HW#7JR}Y5?t}ljU zsBm$|^U#PR`0?66YFQhDPi{ldhP*%?Az5FMuvu0*45v4b2w-=RgHtwB;vI|Qxf+D4 z(;3%R{RJDr_wJ5+&$&!ggRMy3z|P>L&d{~#qks~0c(?11eb!C>klQ;AA<~SG`&%R~ zE{%GMI-2@8S7?`yAuZ_&#H){hV+U#DG$NSN z+i0AUvy5s?_+rm^#PaZy>72hfx@ve6BpTHi)Ja}ji{aXgzTh-NZnh^unxv1}R?LOI zamX{zUVC}i?-f$M4**FOS>O`vAZeb_%S@_vhDRee@kFd3P06+mQdt&)&sf2dZ>?Mz zd90r^UVfD*+XsL^zuRQ<&y3YOY$w0Fq}Br!o4UCu8`HY(s?LX*8R#8&~I z>izdS)}HIkD+qnIST(1oFO6)fB{?h1>`6K=$h6lAo51T={)`Di_voITH6Y)-p^fpj z=~F7dO9h_R2RuxVwS>ysi_G1pso&sbq_jg`9tXuI33 zaLkm8P=^-tn?5`eHfzO3j>|FOr6yFi$-(O=R)UZW5wxVH)D=(?+v6C^LeJ(z6%DD9 zQinWr40*XeJ>+jUtUQrm?*QY6A6gQpCi4%Nb`|KX$30ymi+3!_ygoBY4AR(W8gr}W zN>_i*(#okN1~0`isr{~&!>-#@TDxSjXEhILYG?>v(_r}sl!*c;@V}bA?0!fxFAGtp-nx&1|q3GD3{c>?U-#epN{P znxG)UL5UVr()4XW==zh#Y3y|(*66NU%q_0Mpv`5M@&&!RmP~K--G1(KaD#5Kn)&xDo^4to@a3kMZdv3UJI9 zs7CL~#};Eom6>-DP+a7C7?Bh_i%95+mY<#osKzFd=MmhCvQ(nY?rqHBOJlZhc@5;;QdN%#q2U4d+!IUT0^bG5fS5Dt;9H+Suf$gIM}0{o@-f}fMOwnJE^{;EH+Yzu9=cpv z+R*e*2_MWhWZVj2jh*c?Ks1~ViV2*@b$qN3vk)Cb8#bSJ>K zGBY-%3tw9Y+E%xrA0}>KVXm%%#0?p;h3@D_ljMIo&(wSL^jm#=<9YtdB}k!jF><>B zYx91;_F<=A@F+SiUjM`J+prGiTYBzuF1wdUwRYDoTfLRC7Y!I7hu!*JN*8P;^kX$B z=DdA6SI72v>#U(}u-3Xd$btjiTY*=ZccY884_Z5KC@*}7GxDqs4@bjZxAAZ_*<-tq z&3c-FSVXQZfkklFrV;f3Jszi4+`?yL5Xw^x@~6B-@MD^hb#y|z*0+qt`O6Yrhqb#wLz=W2>?w53R6JzQ<<#O}cr{{cR zzDomJm`yCa$3@?Zm!glLMm4un{XECGCT6I6*)3vu$egl+SC=VzqwjO;EW@C5^mt)v zt}a0VD~H7ROlzMF`}TF#@oY*k)naK&9Y&8StA}%glzOpAj~)geMrTqfl;g-;XG)9Z zt!73|Xa$86pno`PV~I$>x!WSN$p4LF%LTjFgx{{Kxl6@-_=i*#^?P=wY8i~UjgQnD z$JI;DY4u9C>U(bGZVHa=>9@#scNxSQ78!1_vm$UwbTb0yor{Bd$NBQDU&3v9P59y7 ze5nY2^JNU(5%#1ujD~$ixd~7T^@xx6@Io;ed3gM>E_TGJ#7CAaC5pshuX?9hg(V%9 z&p2TcLu(}HkPL_P9V-iosh!op(bVmRC!d=-dX!&LKF%&ANE^1e2R#mZz_~9y;d>(d zNMU1OKyW6-bwkRv**>L$z1zaEq^7mk;Ck2j@f7b5hEj2ZmooN*acIYK6EN9x`I_n4 zMTEivY@2LWox>Y%`Ue&q^kdpg2C=#X>=*%>6LEA8H&e0)fdA)0W9S%Rq^;BPr+8UO z{7wlEs${FqXBeQ4r?6jO`Kny)E7WwaK$vMr)HRKP`MzVen_Jbc#7MHgy};@kOeOHT(?Q&Py2`A>zoDn+Zs|g?XakpIGv$eG z+Hc}M4sF%h4hLs)+0D8m)|M5f`I^s)l*%)`zhO5S^@K7E5H~UKjQFs*sJ+&^OdZ62 zeM}>!cdgSJKR~B#)6i6xRJz&RR%qC-wI=O|W62!Hd|LQXv>hOHd*NETwk6G}t?yGc zMq2il%COWH>n8L5FCstOkXV zhwUnpn^-D`)Mj>IiMG2L%|n)E6^eQHX)4uxw5p|wk|_=a5iYh}o9v3bmyko9ob!^M z{Qf=Y_#J>D6R}y>rKUDpmB-iYh@ipr%aO=X3Y~Ar>>-_8>_^?fQD@5ee$+<3I4}_w zr7SFBpG^$W5;a#TLs2MyE>S?RAVqU=0T)sDMAY*}^pT%UddG*p!MC)j^ongvho408 zhV2%4JsM57Hl8z1#JqbP8kr-Ov4SR{=qz&;*JJ9MoJ(8 zJP|>HJw1$!v}44T{dWmyvg_S(t*@N7+=jbss}_@t7Y1EChTrlbS;C5RXeOgZlWJ1t z!#G%?8C$oy-n74CI=6fA9P1m6qElAw6^+`p{8NUkF(!jm8hk=-GjA3Y zsNH`EVXO=i9e2m7ubvUfjNP_;xq@LjWMG-^TwYUh?siSW& zwoO1M#v;v#A?e|lf{fTaqf@q(x1;|DhLrmt+TA?DSkRxmiKoynuI=F!;lOfpF(@+-ofPFi{6KW@0)Vc!B!{L&dSI<(NjNAf8o=`5!!!|!HY_V zIFObjErm8G5rs&TY#K)!H(YF?J_JMyz&4ib7N>Bnco?P zw~!p`XZp>vS863XmInY!IregFe4OMGzyh~QH7m^|T42!R8GV5E9R^e@9eV91^*;Rq z$_qZCC@79i7FOLty=&o&8bZjzEfe>QO(veXT72Y~&=??z6zE`ALh z7mfWm-R$i-lCMlZQfaQx$kJ9bNtLDHvc_XSFUVm#&1roCY;*7ZJAga>Wf8*wBA$cz zj)epe_0R~Iv`oSjZ5y4}jljn{w)}28DeAvDB*nxk5%mLnNKG5$SFxi60mR6Lo-hN$>} zM>_j7R@lbDj8+Sy#%Hsnv4k8|$otH>aeJ~n>yR_|wjt)P{luw+%5{l}Q=mGtKzgb6 zKwPvEiJX6MQ3XOZC{%C_$6vfwGCZ+buF;_7DcEvI!Osu1fDd(QWpsCl>K(y1Y|{i*3BO$}H@Trk{z;JxtaAt^7PSfO9Z5 zU0H>Vdbcb2jheuCB<#jb3?e3t)!-mb*SQp(F}Seh%0%k7tJUCQ;~-2Ck+o7)zAO9#yMIIz$bZjJIt5g@P1O6T6L@?&Y?wcfbUXGsn|S z90Tv8we0WkaE@;Hhlu^N0;sK|RfHbFrcYRAeh8?Fp!;=~iD7SMTWfc+&F# zjKrj*B$X?pFkKJzWs1TD5|^v>HM7?y=_8mq^CD?e287^&Q+-KV^=B z8Y>R$po%kWTWS=)x3{{!$7V9(l~Ug3ayMOLcpH5G+?2k07w7wn(1kqUe!@u)O2?jD zLGY8Gd)m(lU{h${fxNi`m|fQ0$kswCWNS9}tk732eL=GE93*Cdl~>;K(o#v6DWE%8z*sD$Mr-YeGz#{vwHjkEC;lBg z{hS^Hi|mI4*a3@NLW^V8waQUQw;KmkUZt9ZU~^w3hah4OM{wG|;WK>_HNPmpDn3hL zY=Ea#$;vqWXk#~Oite&_pB!rSv1@DZI^N*rs1;J#I7aX*V z`(u+zTeUxk7o^y=+hoJLLV~|B7=5FC5XxIeWvrg^E#&upXGlazo0r}Zat2se`!rNH zff6s_|aWo6)^R>D&8f9ppP?M<+ddV@+@*8v=0Es;?`D@P9d`&sg8r&4bCbco%ECFDje6w+;i>i*sL}{#Ti%I)!h(O%711z%P@J@CMjw_E>ecBsB>WI?o zTb}_qLd$MR5}DonJFnPRz^1zIiJJ;+swS+pnaVRkrR%H9k^M0Dm6@vLyqo5%64|O+ zI$mu&sUdam(|Paz_Pp8mfN)zyA*TP2(NvlbaL+ZkZ~!oc!s=L=*+N#L^qy-$_jecQ z2ji0uAtEj6v)>fR{xG^>q!-bO$YcR%KZJe&6ID@a6XGd*hRa zz~WKC9}gsdJo;l-A}=UPK>|zui9m6{#W`f81#tAV9zt`C%H}pw^PyaoO#Hg6qx{sV z7w!p_ydQGw{#-e5Rkb)K?S;ZWD?b~R-<^ky>|0LS)leCiy>4zxE+%&z9_`l&vQLP2 z|G@b3`U$}Lx{YLgtv@{g2x0p)8TGl0YDKKNjjnR52tspd@gUbFoO4{${Ixb-jOGD3 z!S>jC2>#smlmD)SS-eI|I^3A*UHeCYKn4aINVf+ctbH8+TsFbZ#KzmTdWh4j-s@Q6 z8eIRr@Z^sf)7`b;c9~YmLgv74M62%(9-9z8(}?7b=jn&PUvd{J4E(qd&bxgCfh5f^ zHha}h>;oOY=Xkb!|FLt(?>V|F;XK+gW+s$6DQLv-)Vra8gFYH$3E0@3M@t>1ys z3F&q1g#3kpH7cR4tOBeikQTAK$B2W8VAtw4E{g?S|B~DvbCd;ke~KsO+&^CWnP$Z! z=o-{)52ow>peqs>1$aM8z3GoFn*zj%IM8keDX?XE{MthZIjZ>Vt*qH`NM|jf&%=dq z9SQ7z$EPReKzv%A6lzb80+Vy@g03KruX3hL2HU{ zH2;ZxTrxL~{gGeU z&ufdKQ}-;CTuCyK1VKO4TNd)Ap{#@#r@74iw!N0ZyZ0o?3>#cFVJd}r#qP`N#d*wP zn*Ta*(P-K?xwYLce7HfPhxawAtyH(fi+TcIgb*3BNNCrhb;JK`*|DzopeD zJ!@@k-7Dqo{QKtfQ$XBLA0jXM#mb7dy{&etq;!I=ADT`n(3e@qDG&zMe>ev&Vvb<{ zlYIW`17mIht7c!v8U+OSftD&uMsK8!I$4RZ7H0*S*4mSN^(oyp8xy%<_M-0rL_Hy@$BvS8_jBCJ^_!T)(kaRNFa$Fow9S-rn$5W~u z@Ve+9TkzEz`fpBK_|E@G9biJYA>lkN~~k zx*IvvUTgFKrM}^vm)`An0uuI4TTg+r$3H9dGXd=7ac}f6uoWa|G8U@=cW3c=y0COQ zet++6h(KF%;VxN*_G)Ou5ziHyky@#>9|UgjcLj*b$%Vxp5mrvSIOK-XoutopKZ!B;IyoTBL+LHp zag`&RnB>;2AcWAF_mKYB`or}?Mv30#r-D)Zys*l^AmOS=fIr!jBnr?j`=aP%kkrc_ z`2p+FmZ7btkvnRo_RYlRYG?=pNgc4^R(%;e_;|*=G^(0$M&`z~slR*3|1~K=$bX)5 zMJ@vfab`c3+RG8Rz9uhhrXHdBI`=8TIy)(BnOjy+M@)P&|>um<$SRQ!KVy>Y&QOMJEtB?JiDN#Iwj>lMa zO#yI5O%B7fx8NpbpPcpS)HRzd5l9ZWGdX~~3X0?enVoOfu;Br494i^v6EsPUsj0AQ zX`ZIcz*RZEOxkomreC+gxjQNN<!Qx>wpRI?AS6Ac z+$;B0dl$$#b-60LsE4Poosr%@b^v zByAkT@h2B>W(BBnuZWxOlX5z|@wg1DwYdXbNagkpS|6(n4!d3`ES(^ZYt>oEmWelg z!Zo#~BbLk}Qlww-6-e!ffBf<+z@G>BBW;~W1DK}9u>REKo_OmMC*y%r;BNU%Xs)qH zWm^(U^TBP<{`=0Ez|4QB8SU}rXcQW(X$3*MWsvJp#1}WtG}XhA3#h>xcV2l?Gxo{djDr110M5dije_iL}qHNBIAKN zdOD3CK*T=NUF-TEY*WySE;6A@jQ<|@|M41%J4pM71P|gNkAR}V2CYvP)>Lnhe3k8z zrK*kRNlicb!0|_n`!ln6oH2mEju7E6>wC4@K9l@H$(}wuegaNkva~6Tty(Hd^E((5 zBZF~PzbHPC@bRJUzmQMnW-9)oGe2(=qMMDcVv0FMQ2JW=(h$l|px)F(^ZciO&$y4w zabK`x(jpUv8{X2AX$KIIZk;ueNvheFz&HYG{wuT< zL;x0kc=(hFh)9B{0NsTfxR<+cv{by3V$HQ&h03@`C4YWl2(|je`+@$qXjSe5;YoUf z$r6Y0sm8>Db(q%>p547Y?6-jf_d*bWzq}K_ zpPT07lwEVLUfVVcY6@^nG2kFplPLRy$D+?k{M zynUVDRcCeuO)ojyTTYg+fxp#(c`hUK{pT?rIJXe}gLaKc!;d9eR1FLMALicrFAA+| z9~J}!5d@@>4rv4=rIGHAAw(LaV<`QHhyJ0TjqjzAe znzOdH*6mMmxAnu5zdh?czdUd#=p|#Ez)OIX%ibOlX~DE(YJpd4a4x4bagxva+IvL+qw;S2Pi36M+`K@Ya9l zna7{#8fpYV<7J4ppYqfE?B&0g-H!?JLFve3xxhlLpd%nq7BGVB=?&s_s@9wA&W~e{ z(8)}>yfIEzf{SIyq&%P#S|08bleLq2%yj+#I^`mIWbV0l^|=Nu7c+&Kr3G#y1c<m$a#Er$qVn3No~*ldHCc{+bLN3_Go@s=C^+eQSASz*CgZL4zjcG zaHB!5y|Kcmn`YI9d8ZdX)x=Ggk4M0)?=NpN;wFH1qx1Ti{F6qg)FM7YosAU{aH_OO z9>O-{M#-9uL)We!~HdYl_nGSA>4?#^|H6<*J4Z4HgNH! z0;Xup4fY3pl&1mVdLnC_4)13!jy6+?xyj_gbn=Vu0!r7g_!) zz)Er2hsR@pM>L(8!epY=UE+g5z)~Sq@!0sr^e58-Dsda9zh>}NFW+QezaGEd_P+*t z&+itRD-VdQ{}c}Yfe9_C5eqIOJ*Mz~ z9#Q-Z0;>8i*V)Ja;E8`_7FnMFjATnqrpWbo*#56ic`1p^Vmg86mChrP6^`C;uwboe zBJTQhm}%lNvQl{I_1~`N9x^5Z6VCpw2HY{gBfv0g3Ia9UUudcxbzD4_%QOZtumBuy zRP9n_KE-6tE|;D0UZ0pRKmEV|+dZrTA7c=)-xn=Ui|jaelAAWaO;l1m2byomBLQTyoC5oWpX%SCS?j|AP;w`Q%nW!|8e z+n%Tmm@)q?P$1t^hZ-+du!2^!#66z;zg|enUP2`wta+9gI*VcByuM=(#>Q#S(o3!l zHic5y!9i;>|3QPX`EJA|ofbXnKNbVdE*l_4^0N6_Bk)+--?P%I1Li%3_OM1T)0_w8 z7QRa68E34pC~7#l6!KS2%y8!o39W1!O7<_-x|)|02BF&`;TS*UcGyM4*(`PC;EmF zu-0WV+KJnI=La&{8n*l)^#elHtH(ECii%ANZB z=5PNYYJ#w+$2;Tbh(ze;NTM)j?&1X}r%_u*`e3nkIZywcSNzSOr(GcYXp|ypV{p{o zv5>s6CHblY-+ZKqd9d%!z_!)%9J%_>2gA>1OoxqEQ$%*eLAXlpm)nJdw)G1M4qq+^ zc>xh2_kf;|gbluiTMcHrr+>{p|M#nOQ%B#vp4QP*0l@A(zh@}qo_{*SaR>eJlWc4v ztl&qe1%qsa$rOgC(CWKL(&Q%xuU^G(&YQS#^JX}I449uhHf7kQ?6;J)4$FC`KLyxx?&_P0=c=7C=`)ih(K`jc`O6Hb}$Pm_sx?a~Ye zr@NSKL9Jbv2AY90WwBG0t@gjB%6{Adknc6Ml`-gEYVojg@6>6*++FfQc;BQ552b9S$sx3WVD!c8Zh-{jqWeMs@HEjd-K^*b4+%_8u) z;UeSk@6^5jwnPr;qrKij7A0d4nXKp`O2H4^XzlEjD`w^WuT76R4tzOguv`A%e2nkrg`-s7qx#um|p_%=mge(I(gf&>_dYfoe;goreZO z*o$5v=}rF}n7fa7{B~Xa7Kr)v^W1?sO$58`C1_QZ!k2?d2Y{NDEzgYUQ<@*YO3LZ0 zKO}j#5xCWov zLe>Y0@k8h8?d<_pVB>SqZ$$FGKsvZET*gCZy*Hl2vzyo=39#Fy6fy*7o&n{j4Zymr z2V?|O;wIH)uq50Fw%YbYN#YmuV8?#--4L26!BLXtP%rSU6alVNy3A@{+1v?ubT0va z4SoUi0|)-}P}oQOuAMTRBcQrb0%AlqznR1{HuL-s7F?aZVwqAwj{V=DBPxvY`*=eF z1E8&0BWUjgis~=i^sa;A1yH(oCP`1Y^nxw|+Q*uhl6Xt9ht5Rq3=M6k-CG7Q49u8i z9#{hjF&fD@`ujQ=Ivz&re@=vQedviRNiv>2m(e1JVCHH8B`~1@L}Q?(k%Z|6C>kXJ zWyG;}bu;E0vC6`goVU%!q@RardF7um3#F@kG0aC@YOGq{f1R6F1KGgxyIi~b56$XK4@wYKKkLJm``M`puEFO`NmpO44Lct_tZTDf8Sir#X_ShgWCyUJ~ zLzfXr^OloIu1w!s_zc+6!Ls^A>y{s2yxuWJ>;b#sBJC$Ng1-}nAnf)r2pabx@Q-xQ zvwmP2;1j`~77TqVg-DeO>*mF$C6?(}8G4>EJ5QrR^G(^|>$=tNF655d?ISUR(MezR zKrhpRHnN=W5@OH~(%il&Ef;+^8Pr|?)R;Z}`L6*FQ-My|5|-p5U_=fkTVz;;5ZYjs zjRMhDI|vxsZ#GsE*rt88R0kr@WQC^x;nr0Hd-HF9I3UyvfFCx0(ZM9EOb6@$v&M~~ zR{gc12_awsK6765*whVxbmmWCghM6@C(;lc!|@eO@OEtz+7~aMK{A#>ZI!p8@8L7U z#VJjV z&xA{U~t9pw7iXNAS2?k*&$KQL1&5#0A z1;V6ckbtZEg68deHaL*!<0>;Up_pDvS@P9uz7^8V>E0O{7@?2|gVzD1%jz&M7}K!J z2H(=+w{=mqFb8erCw|GQ4p}Bj&!8Kbpu(HopjUXMPsaKO8r0=vUda5PiE*VVoKm%ReMeIrn>m zWoCztqZv+)`wvzo6Adyu6k|0!P&M!&VOCD)k9!Bt>^E#T0yePOQ#z%KUICWR2ytULQ{IcVeM}h2mao_@4=e4+g`sB@cWr z))Nb~%MGQ%IB@zhKQz|_Jy=iYu}v|snq{5WwA5Yucl;(9?L{H)Q{ok{j8art%I8~S z0+-=f*^TIyGG3sZrqQl|?I(;0{GHCq$pU}RXr)dF^6FU8o=CC?pZ`%NAfeHUfoxY+ zB@fI6{#~vA$IUXxgL21ghLr*IO8@smvLH26^rtra-+u-6qzg1fSo`h782`@`ux0!~ z-GGb7c>kZu^S{sSivbVlx?#xuk7V|*heUBfJ{p@PL;O#M`rl8dk^mm)uY#yN`oFIg zgAmz3k?i}oruffu!H^OF9}*V))&B2mMWcWcm6k#6!QVFe_dmqZfd{I75ySyc{t#W13;-Xo&z%43uV8_)SP=bGv-&JxEq}+U|2&sgEK9OHqa}tRfds9O&F4T-N0g2!3@Hak!QTI> z(ELV!Q8-oj72qe)Xnw!%ds7Fgp`oN;BN<5OODh(2S~2~#>Mt3P1Y0me6FaDeYtX*E zmx*GYs@WQuEs&v1l%6tRhOebizxum${U{GKcGwu#fDaExnGf)Y6RoRZxbR*jX}oT@ z42+23^9(xrR)zsTq>L_!T0ORJ2v_h((=Rwar1U5FuU#D?>?-5NUkkg42zlRFu+%}r zTSK4ED4(2gv`+j`gDJD12s(dzf~lL>6LmDhD5Rdk-t!LxWCJV-X8aXIk?4m=XAa~; zxRFmNdFW+EhZZOtxpAp!C!bzRlEUhf{(K8zA|Uk z=YNt<=?)4nE@nbR)#)iECfj(fS1UdB3i3Ao5eopDjsO&-NRD7?CY4XQe_MX>&D)c<~Hm3v4+ z?oHT>&>lNxe&3~a6AV9E^tdtp1bM=ILX&g)-iWII9)t-wP}%>5v}O6BnRGTL$x%LlzEq8*a0dqr46+wgjOw-)@QzR0i-g*s zv3rCHrcbvujcxl0Vm|7m7!RFy(?&jMuoc5XRzytTsqep^#qtwG-->FZn6GR*IiuT+ ztiAgdt8DZ1+f2mvTwy(NW0$$>!^OLU)-%6kJ}WsUdRQ9gvx+6dFWz<*J2h4tW%WzW zA#P2BlaM4sA+Lfi$zHSKtKPy4`TkT{ws``x&=M1Ucu`$ZMuR|iJaYl7sz8E2w7}<6 zaTC#^>N>pf#)fj9-S554<*_bcK(7;6O@*%E^^FX>?m0rXw@=gm>>i(vUB5RPih`~) zC{fAs$M5wyn>Ag|YyGiX>b2fondga3qd4eiT4ZOBO*`b{Pwa_bt@AN_zPr0sC>>9z zG*yY=;}PoY+Gs26r0Uw_G(TD1Sw5HFWJ}L4vY4pZY-CvJ#55Pb*>}3@eASx9UM>jS zeU>BePCN*F00`YU_>QM{>`yOA?aBLq#@Q*a)uCzPf}PjtFLR)?li^w6^yw+R(n}D( zoI}ht_vE-tS0mJV?4Gj}o`VDgzBcYQe~xZ`7@X zj_s#Wj$&48Z=D_~TltP=u39>8CDWb6@6Ocz&^-MPTV1$y{tPW&8v?8L-4}3rUO}E= zpg$f^Gn%tva7$ZX%zW*qYTk zms9v0yQ*u?1P4q%+tzWNSp)=QOMPT~+v7`Llps~!#G>J0rwCnWCD87}94yo!>%7$D zJm`KG*1)Zy(xg9keFF;WWPsk0+T!yNS2?CUT4<6ajgxp_Z|aF+1QgvN&n`k0|fsC{o7=u(T9V{|=EoP(4 zv*W#i3q*X%?-g`>iPL)ybVBbk;5KsG`@+S1drf7dcpW{imA$v+0>@(~rAup!>c1*W zAf9ClP4yE$wu@>i(rRM#tIY>TXV?3VYn9~KN^^I#9QHm(?&~x@pCHgK?;DyV=w&>2 zeV%X9z*_u#-mZ($bgWV-XaDY<6Xr~UzCianSv_lvvzb7oPa6ZPx6dm$7`84fR(2-#ge+J~8#D690xSXVpa2Yq5%X2uu6-gC3yb^XS<%kA4%`6-@fa*T;C zkn6*7n1b!+w9Ngr`AK?$Kbg!+_{)*CmS1eB_|nLaHrQ7?VoQu5TQKC0y75XxdL5?@#nkmhnxnlG)$QEdZ_9`e%(n zGuB%hRsvGJ+^-=W0>yw>Bdgcr070R)TFkc&bBPjoo->r*(A?<+A0o3DFc)a+x@x~U ztK3X7J*gi~-?S@nLl87aaf=v~%k5sMicpP3e7j>D>fJH_^|1GXm+JXuof1!C+S-l4 zLMz}NomlVsgXMj`X}bDdmPg;5%GU8eCJgpadXM$B^puwG?)1f+_Z;rz2)q0hOt8(6 zl=!;v*vT&J*yR#l*HC67A@Q!W^d(f=5sXK0r`ijA)B3XN@G%sl-{G;JH7#~4n=5NW zL%md+jb6Q-+2zTeiIDyB?zi3`__} zE@E~%4ltG7Jzd8fL|Wn}9AfO-(&HLAw&|-*v?TThzb&X7Kh$#qvNB4syK~UJacpv9 zxAj&sK}*Y}AfKBZaz2d=pX9Z}FRx&c#_*y>P5W$HRZ^}$=u-xz`q|(d>UwHZAE?zy zHeZ#t)ZNhR-VTTAa)@0v{X8{Dy)H9#x&9XW17&J)?M-&kzIoI_Fuc@l_pngX>7F0G zLVKHVt?!ZudCkkZMW><}CY_&hj?0-S`e{*0jwnGRO7|$- zAlu^%bDR46zfbbZ{<=o}ZRig4QMkQ^6<#{W36*vPKmk=F9# z{Hl)+)N1KUP3n#yo!4YJ;_bn_b0=80m&Ye>q*$+;~IPpBPE|14<6fuUa=Ca>kjh`44T11Uk7CUghF)!I)y@Pn5p687|o*AzSTZ^et!%vyzXC4636`jFi)!fsyU&KnTYP zPzw`dU3-++V3CgN8n7jHfguej{C9$nG21BQ>n4$1NaRUQ5f~WYX(Qb)^;XilLIlEP z^tjrQ64LOi?I$oFKqskW{=#NhlSfY0WhL{8f`A>yUjOcblX-K=A;->~SsV5-PTiaD zyfZ(p;7~)E@7n~29GhH^f-U-P(igB&^a}|J0>Lw;qa5=D>tQ7(Iv_|5eR}IrqA&9D zHCWDY>o9j4wlacmR-gqaM3~;AEpWNnQtm8#bqg=L`R!O-N~!o&8EW;P9IN=V=y}>% zxn%xDk9FIMeujgd#F1*t{=q)zLGI~8%CcIwcO)A)xAOrp&wG6w=HPQTZ4WJQe;#YU zuzVO}7dFOhJvZ!X0o>M~U9A3##!Dw3Z+8(ELN@iDN3#8r@m9>C=j{4W9a7`s2N<4ZDGmU=xgsJ*~ATnW<7JiEgdmk;kiR{ zjJm@iEL`8IZ`D+vTsv~Eu_`^SKex=kWNfd=$=f4wqi>WaHI1D@TIrB2a97hiF470i zW*?U}+F`aqQ+VCl%-lZ#;*VC)jvtMq2Pt$F)MBL`X&tsBsr$2-1c%{VaTjF9+ z9_qEZa5h-~^!dz>$5vZO{hNu|h)D+(=z;)NBhMX|Q{2h19X?BAO@94WjOo1V9?aNw zp;aDghTjPI!cyCors0r}#THTf=0M(qv_xln&FCzdlYhavmnR|ZnC+}i!#~pTs+?r* zaHS}5ux6Xf-^K%@TJZ&@GJD>_l1VLM3=#M>T~E&i3K1dE-6+_`tQ&?fU5=#VJcY^g z)w<+c2Dl4F;Y!D$(vWTE;caCbg=T_$6eJqY0Rs#+btaq)NmFj{Y7gb%q^c8tZsNFi(9j`v4dV)v#vZ<* zf97)A#TrAck|0%1QkW{%w`Cv%g-)|A)ndq>M?`sbqV=0K z&aA~8*}EC*8kILV`DDCXH;cunr&C{WT)%Ft$^6n>k>>l%PQJnJd8#~{9r&w$%WUv2 z$eM2|^z&;ZJ(ZFL2E)C#@#mg-gpU)n7;B-`_e*u3|FB?nywkZ_Hqp+b$YdSLuM869 z#&wtyOWRehiW!^SON{w6RvYK4Mu<`z@xhl2ZUdj4mGPofK}VS^bx=2-Z6pn7HLj;0 zzga%Br?eT}IYq26%O=HUjPthL?&isvbN(w`qoE}z_^t5bicQEGd_3(&=4x!X_+i84 zbs2My?G1Q}c4O?x`t^H6)|3LWrrbLC8pw#G;NQh*NFulVwK%oY z4C(Pn^%v|)4m+-H9UFx6&E%i-JA*h*gDre_W3tz@J08VOcb|A}zkYpq&>!9`q4494 zDg&_TXbL~_g{Cmpk7Wm3=m~|wk}yj z6Otw?MDwhl-NMf5PK-~Rto7Y~%xA!Nq>A32MKe+d88Y>{VDr${Iv2C-=lrw+sdnPJ z*~9e3g2}6z`gydt{KBa=*)7Z?4(azxt;TG;?pyc$ZE&%c+h}DZYuE z|2DZ2+67JGltCWi7;RS{eh;(PM95|`?KeXGc_~9@6$9g6#uWS8xyGx^oOjJI3oV$K zx80#_zo(EKE}hcp6;ls^H8k1X?0(XNh|Z`wWf>hq$(N>~bZ*+>8mU~htfxE8y0HyfThbE;?+de#sgjFDmER!KR zgJnM%A!eU=;w&LP?rlfZn~4&qlaAt2b}j-X<6h&A+oj@zc-GbI{G!sEh(puZxnONc z-@p5@=+Qo>s(Iy06D>|cTfbfqtrD9`j*USij*qR1lZl6l5qe|a8Itqlckk1y>g!Ac z97()82I`i8Ep@AHzrdJiCkSqgD4MM3-O&Q??)>}BA$~6BeMa@kTknlu{9fj67lWlO zeTH84pFHgMJu((^>ojpb`io+}kEWN76FerD6vZiGLH_h?8$Vkg#m%uSrXZGlJo~$A z1ioK04dqK(zTZaSGBH|Z4bNs-Hj}Lii7Sg!83Un`!n|!CfUyqwxJTJPZ`1_ zPVri{6Xq~W+Ugb4SISldYyIi0v_eJQaIjMhwiM518mwZ8mu9UEJR9BZoQ8CK8WK2PG_l77C-y)yO;mO8>)(YFE`X9PKGID2i*A65m2zn&$ zkvQTTWD|c{T_?EPe;Osy9f{j=JSva6s#&RTty%bX-KLQ``Xx$1eQk2f>Ec{Xq%90G zktL5c!`@(rKjM$?7e_-WsASo9b8xsF$E#jajz&xF_nN6Ci++G0v6$4pbr6}~cTu$f z{)OXDmNhEAA1&oQ%&#it@KXUu`k0c?O!(1!39S&i>$m^|vWEoaqZ@8s-W1R|d5%o7 zQzC|=g7P5pUR>VsONVFN4<)noQE-%-_R9nEWrUx^+B1=Qm;!0ie(LkWZG&YfwF)x|?w8|G6(yFV_i6%H!m#_{oJ*yxRjH`V0TsCgqvNrP52!a&2WmYR(=j)bJ@y|6c=p;F!% zHcYJ^cJG+_q05@KxBA?RvIgpCaTHwsQD%ksq3kb}BWc%{tAnR|>^t6zcs_6R)-S2= zv2t_sBndsks0LErHXGV7LUvtqoo~g)14$joF7YhNM`|cl;i486^#M0G+k_zBr@LGV8+l9x))LCH1gwWdU7f|U?3D-OQ1G_^-eNnQaDA%>N3q-V1a?r*f zAJglCbG~z+`HDqx)rH<^H~>+J`t}f zYA12nGRczQAxToAAX%dMG+FklRfFz)^38Ll-=~Y@HdgW%4ruP4x;RD1$e%Pk^V4S; z(Qrwak?%{~({tMH6{64?x~u`OY>JN*cnUOsXKL!9NkcHZl4JAQvRO9CED|(PiG@WC z)B@&T@J&PWck=RIv+}$fNIu$RgaX7DVPrw@>DCUhHxrh&Sa+|GH2Ro1$bqs}FgI zFbhr<(_p2Vbl28R298u&a}A{98*wAtiXci#D(T?2Ppf+p!wxMdCd-|FtjnRy;1Ld# zxPnls-0e`&6C9c5(K#p*CfRb??elCpQ*+FYgw=^>dZsZaAhCI)XwM1(GAr+Yf@dGZ7I4cU=z=Q%uey|7Bpr*u304DKlIah$Equdy`T~9c_j3TBgjEhWFk47OQFP*e}m* zGkfyZ#@CZ1cP|q{sOc7kZVwXf(85Tp13vsz9`GCOzxE#TP{y4q;L!`Ej#fg+dWu|0 z6vdZ#>{2IZQ-+pDhb@3=fF>frB7qWu=Q97K+IQR*LowOwFUJ#+-3ozry$`ab`lc@WIMYu=* z8l1@FnQHU#7Py$WfK^}Y?yYqVGNZWY+AO7~oGMpMaj2*@&U6X<3;*Z%LUrtZBJs#l zoj)*!+#=8OQcHHK}5`Dw{W7tBymJd`!k6+_kY9Kpm|uXZs!<7|oxqlmhpbgnT{;*^z$ z#}TSjaFTl=YoFfsQXM2YXQPUcIYfvyhKqhbqqd^z>S_$f4eEGhJzk{q1I}ArF-jHk z>@7ksKA!W|g7GB}&op#oc2=$4y!VW{@*g*GgLZ8LZF{VUlb5OJ+h2nkDk^|2lQ)Xt z(qXClcr%#PzS43i6B@@B8XI{~468Lg4mo(ZtXGGgB_7V^h?yu8cu|Vp$F{}BWl&@K zg;>zCg8#aLUn|<}pk&e`pzv3Y!nTZlE%tsY`;vfI+#I+tGR3$vn0N(5YKXwkCTL;k z@nE(E6{DpoXvm(v!D4-Q=aFC^x+%ZD{A27c)05}r%fO+f6A{8=ic3ikL%5UKhNZ{G zvDIk{YHd@3RkJfk=y;Vrt31wkT;$=;C_@W(F|bO+>7?-c#@y29P;!T3nva6w7PvE# zD?NuJrR8_>u=Tl?|K92RG4>WtT6OXDBNKzhUmO^n4UQ&*eD+@{i3g8Na_`21J}@*A z5jb3!Q8Bb$fII!-Ca|-EvcPpEw;rz&-N5XO7W&3wLvDLcG#F`T)4GA3gM{29s+>Xa zDB$3I$*>fj*H*gc_Vpt2YkZ+)AsZP2X0EsVXG^(v_y{NXo%F95B0)$c zYZz+65=$F_k7UD*|J4^vdAh-jICNCbRj~MgbcZ?O%EOK=`$~(d7&aG81(iOzSJm@a z+fOeR9iMc+_olS#P^}bC-gvXj7%tC{Ov>}F;;N8FBMeucNu;$YA>nqv5#Yl$y1zVV z8jSn0Lx`|CQkA^|;43yQa^+qWn*$h&HCC}Ax_3{okyMkmxReENiC(SrH%+O$Ldhb| z_K;goyMI6G*>J}0dq0lw0Ac?CXIQ^bv-XfCF>JvrZIWl=-SH(u0W+Gx1k8n17Gz*3 z2(P6EzjZ7Rlex1=iIGOZzK8uy#mzA_{3#J#ihygerg#0jnV5^u)Y{wY`av0dM)L0u zBpue%seb10<@^e6Y4b|7uZ(YnUT8UILIZ0`GXv_J5RMEWKs*A$Bd$$2sJh@|N=hV{= z7YeQZ;M`NjT4cRpS$ejV9lS$DS%YPN-~Jpg|7K+YVE<<}AjuD(ocJ}pYs zj7!vu@Vb7au0uS}p5AoD)|nCJMJ5Ja`SxkDi(|jqN#q9*fT2HPixF4GmA~RvVne3B z$HkkjI;oc1$NgNI+Po9=Z<#m8%FefI~BE%_KE-Ox!StfKOih%QK<{|qoF*AOzPb0 zr-N$xpnme}5#RjHIVFglM0UyX7Y2lCl{Ch z$wbZ}uH6FNP6nrJ9NABZUP*X3#g_>={G*VHQsBF=3KM|;TFu4ZmwQK;MI(~|FaiH`gpnMU&= znM?G%)<@spxtS@0kh)WKPlF&{87el@pXopCB9@DBZ$eY;YlpIBeI3RK*bU5i?2hND zT3T&SONmkQ)h{9Ym;$uQyzH`Jr2V|#n^r|Ra*Uwc6GBm`ZHr6Q4b(&(>VGoYpXNNv zSGR0Sf=UrhHfzTjJ(g~^X{E?ssIWNkbVLcn`sC5`+sFa`mU${{GfNw!Mp-dvv_5%> zLw@&BL$Jh?-_aTQ+8h3SR88}NTMpNsN6TX9A(KK5TBQS?on`~>Qw(+d2g8um-&{MP z4%phuH$e51kTX-z%C{+}k`{480D^TC0Cy;kfh zG%DwBI?8V$!U4~u;6%-V7biC`5!x137Pk1J)S7{+bNT*Z4hL>7*&sOaT7C~93 zO@QhfOJ1yngkt$c82Z5=q^Y(gR5S)hre;(=q@PL-XFwc(sn17=A?jyn^x%q;PKtEo zuk<pLk8rNUzz4pcJN~C;L@#<23 zfrGlT>TqYG1mi(11H4DmJ$LQPSzO4ymxCkA5sL@Ym1B{|JiOeq196M1CVd~71PRx1 z!F3lA@{z-rq6JJ17Y=NXsuUm$<`pZ3%VX}A90o|Y3PQPu=7+=B%@50mpK=JQ2O!Q1 z0m3E1?vc^vHBH%IX!FSBb_V@%YX9j_!n7ewJpR@=OFw|)>i(puzfSVa)VfGV3)*>A z1UAP2K0PFyQN=edAWPD5(e?Hr-Iciax^k-#U#8R6SJeFa=&iID;z1>*vjG&ZvT1mg zyhy7xe2Et%RUco%h-dKWxIXO_*opPAA0yO}j@Dw8XRVlwUD*qMzb#sEKe8%(VD876 zo$Z9y(82$HPvQxC#Tfoq>{!w6!aHGD-`FGH`si@hat8Sk0cw_18m!Ec2P@6*(PkIFQAW$; zYYRL|{L}cc*OgcCM<>obULy9#M3RIyT+RxisH%2d`Es{}O-7?#DJ*wEUk=-nyzq1T zpP`jEXgT7k``~o=OXWh{o!ih^+*c#_XI~#kqgdZMzkl!5q|fw?^9;ZAI@DyDUI<=`zd5v3aq7U%fTkX`i& zd~>j2vT|}T*(T7MQ$KEDB%?%dDJ?D$dD>b}(m{;UTn}k~z-|jioS7M|DT9!+CyH}n zFPs6Ih15kT5YpYXPMn`{fSD*O6&)J%mImInpXgFR`yCHmR^Hiroz={tx z@dM1b^bqU7*(lZMj((ws6fRkJT2~a=X?xE`$pb4F-!I+NF&3tKRv8dUZ=4mhytjgh z96ZQQve(~ekoRI*?hE(#>fK(oO@LJ~n+0}_`+X4ChfzPaJ-QYmRIc_|u4}OxE654^ zAy(Lh3>);3NSMbcTcN@oukRHC6@KmOd$Su}g6BU2u2|JHpZJj0?{#E)3~72_L_PCE zZU)LlWGAT6Nn+}M$%Q&>og_iH{i(Ja-H zKFIkUngfZ9*`4%rPJFO;w;)%Msde+?;%=Q7j&p~}zMr0NCvps}m~%%U%%)F%lrt zZ!t?m5X#t`FBS3qX%KA~Qi}L;M|3-!b8)uKOw~Prpp;3P5|)<(d4#Fz|EXVfAgFHX zZioB_rZyGd?es-(ZrabK+tM__nA^uSdQUv@gA#9ZdROVf3&wcW2r+MPKGc0>uq@$l zx69wy2ve0WVEEqg*)C!pmHy#M8I^~4+cS4tnF}IV(r(xYI@AKsXr0ktXUQF43T`bA zEPGFC)S53>@<8=yO%tt?&)cx&?qPvmmOCJm(%}+=*(^m9gKkHQs zYDp3)5rOF+(&SF&GowGX_Qbv^`ZVUk<*#tp4ab}jaHI;si^-qxpRi$lEQ3k88BFUE zP49jcA1_N!e}0=;3@sV+{9`|q%X?BOE^$_r3=c$@I~q#PL$g(3Uk>NO$fn!9RFQAO zSQ1QVcLw1Wgn1K<*d;RKtw}3O(j@8b0DqRju>2$P82jr4O4j8z;U9n}+;m8yZIv|k7$ufIhOw8QD za4TC~KPS;W(HByprgOOHj?y1?zWkC)r(=`82r3BKpRiUEstEDv*YxI4DAR|JKKgv9 zDBn_r&8}B9K7>vt0-N|rqFYYctaFL+R@H6c$a8`neO-L&_acHj^Yb%p_A`?NK?tV6 zb7j@wE|Y-66{dD45f7OUk)QdeV^iS+x_PozHfhuOq(fhWbVgU#^3Rx%L)+UC-&(#p z&!pYy<6N{(a?mnX^Y%=ym{hb5>ZB`aUeJBxW;XE-madfnZdvDpSm&u?L6tM6l55Y7 zDxD@NmJxjSu6Y>l=JYGsnxEofLcUCu6svp*@-CbH7u@Y#p zEmZO3CYtl5B5EvY=+<;!zfva2aExCHgBjT0azOLo*KwGWCCJcWKEJLCY*@B>nfU=!?v>y~<7Q6%s; zhyTXXST3s<>t|}^C}e5>p_@09z8SWhe)34i1(#%ytK3icdXLcaQKOckz+^KhrL!LT zePe3Th{BSkyy$D!ghXtj+#Rm+dTdBr$wE(v|ZXMPeJx=IEo*?^RESLfa=u0 z_%+Gj$6qrjqZxO4GJJ^QJi_=8a-ZP%F)Np3fNf`c#KqM+*%~l!;V+&xF|xpLKDk4# z%_j}_+FC|++ikD=qORbqeluTYGjUtt6nMrNGUaL&`6{9)<(j7yQ*5eV)hF9k`;XZ- zLJ1=;FE4s`D{Ljzv2&%Z9s)K)YaUN>rd1)`z4(wHnit$mA3QUuRg4kc4LQg*-^*jk zdkSw@;?>-3SoND8Ip~Wa852(xFSXhO?zee_f#5gE=Q7!@`?uf3 zr_HS&9Bu1j65Nj)z@U{vAF>g(gh~IrS{L1>s}o!C)L;-Z>+o{yhZ2^B_V?&Za)>Eb zYx*kir! zI7PA|`EgOwRFR9ond6SO(khY@c-EDQf)3t8DO!KG;rCW@M`Is{BS^KYg}4gplM(H! zD=9C1suEo-wWET&x3zzkJ3|PIi8Nm*m2^NXi=eogMbZ@s!Wg`&;V9wf)Y#N7EQ06_ z-gOKW){BYs5Z4J%jmz^T+)|gedNg7km~jX|ad=vniXP<<1$~Tt7RWALR=Ct;GT-1h zX>&ioX))lpN%r$I-^I$2Q$$WdSt(aO$1hm*CR-N(QTB$a+N_nb?{qs{Zk*j$8Jb^k z29A0%HTir9>g`tbi8_}{+@7uKWP2BMNW%M~ss^Y>yrNFz4y$=FX#;B+y51_();oKh zUDCj}ee(zVC*&I@VZOF_2A-jnvr8AWOz^GA2I||F-)-7;#s%v5%LkR;H7&O&_&O3A zNi3f*dv}rCvk#yj#VfMc$oSP`e7M$I_htR}#!F+oLRd%Qs$ed{iHe#bFhXX3__tAw zlTyyf{A7N6wlP!+k!qX)|F_1?s;muS9vWPj>%2^{Uo>eI}K8Q-Ctsv>>=Z(T!VY2rQT*|s1JW|3PW?^fyiEWDPL&0P5awLF6Qbn_6_bA zipzOVBdc9dgg%WjI~qEUxB&x_0tRGX<|pInf#&l96VDJD3DGDB6D5=Utw4e`p4=FM ziV~&XdJ$I(E^FZIdQ8mujj16@=rl>U%Jh>lUXn)>OH9CF|4C`&ez0-=R7uD^m(ZNx z4thU(u6J>St4gy$Z>{Vrrx#Y6riJ#4h;N?p<-D^jVnxpRjeA7?;_;)U-rOv#1IZcJ z%|9Jpom(mO2~w?)Up8UI=a-l|E6EAcdIX+S!2rf*2(B(bw1`}>r=7=ra@py2pXmR- z-HB5%hrL~(Sm7Yi@&`e`wnF4hvTng82a46k!z!{5fkg_udo(hfv;Coi`5ncWuYRqs z793dAe{L7TrSEimKG2E#RWYz$&BJVaw(a(Uooi^0$#}fzvsJvMtnTBn>JD(fvg%ox^RX_Ozxd^c~aO`3Qo_Gj$wy^e}g!6-d9R^=X}WY6#m z2zhTy13%RDq;#FK!-PN--8w9Fa-#O@J1^s=nM&u7z5drSSYfE70RLg;B}mzrBgoH> zxk!YR>k0R~Rd}boMk1H1!03P0YGYWZRT0$}rENbwQ@BTj`McHsZjBBt9s6j{;Umtc zm+%Zi!J70?(H0z;Uozz`XfDwMA(mD{1gjs9Rw_AjYv35#>ypd(X??XXazC`cJKEQC zJvVl4zTr-NM6hAaHHT=ou6SBNTM05HI6lf3stOVZVvUkZ^iaPfvF(_60wwG2 z=d~chha(~1Yx656{_2tlp{QjaH`1t!k;$I?l$9Dn8*OBWtlZC3#Y1Tu9+0beG24tj z)bWPSZJUAE$gR%$D{uEaH}oaLzzs*gzhh;6Sd)d0Ly#FBmK4#ERGTr;#7<-Qv0BCz z*8Gv)ME>OZu+&!L#8KR5?5>|_4;v%jwjVa}Z(22qg9!_QIJTt(MAKHq;IHQ3Tjq|& zB{EHzHerhC8dj&|mdTwQI!u(o>XxrWfj2aWm_@%U@w1_RCkOc&-MLlZ-SFqkghPL6 z*4a-l#?~SVeXZztsyq(s1~YfWy@@O{p$noIlm>1)!I0<`g` z2(0G>g;v&V%(%m4RWG@^l|cy)gA1nl4ELKPXSu&~G%m}urJw^!mijz}0pa%w{Z72L zip?=^q440FBO8FdcY&|yosfWU3(c^S(aA12G_W!7Q3%4fUH*Q!E zkPf9gRJvo*B`qQfj2tB(Am!-M-6&m>QqtWa2qGQAq((CuqQHuboy|z~jW=a))!g=^ z>3YN1x{;+OZYnBlLImFou_ZAa(C4u;DRkmZ^KXQJcgk<233a?~uvL0wN@2P@0pMbU z=8?;GN*#O*5oRK10d|BHx!7AO=v$7J1oJS7x; zw{h0?mXXIa7EloE|A$v1m$^6)OAA=kd?#-fbM>sLh%B);1vY-f)1=gunIv!)%4(qU7B>jO;7 z_joj^4}LLL77*tgZl8?RAVl!kca38(SW<$x2v@}GwlbtrP9T2&@Mc5joC&6&;oJpM zgRV&t{Nf|dN(E-FrReH3b}AeH8~eK@lUv7`7~9RCYw>4|(z_dK4V7BFLk0e>SNtLs zVdaxf%)4!*j^eD2vjesF=AQK8WTwHKG>CX@e)8*P8uDy zUVS(G*X74XiM_?)B$C$qVOCVGZc-YSVUtcpKtv#2+C#T5IoSpP1}p#ulG?A%j#2kj zuw!KSbfL_!y-&K9(d>b1O!Ge_q8&clc84eZsUbsSA0H9XCfQo=1_jzrm$G*uG85tY zq~Ahm+AegD+dB%T=BJ>gpqyrp=`;1soTPSuC=diS7tlCLAl1wKUiO$|nXq97eM$*^ zFCX5A^z)mZww_omOwfV9DCxpgFSCEft@A>e0$z|yF8%J?q$~fCHsN@O2VJj}Zw#RU zq5Nr{x)bG6eIutP?o-3g?htu?9!tN1VR$VGb|sJ1=GqHyK`Nge2k>!3Vv`p$W^7Sm|&dA1ltp=12(S` z-Krg57c48_vlT6V$7B$McG`=3u7AOf9(M3pu%h&48*}vjx%U=I=g>Pa%JSn(`*z26_=Gt4(N$t3vHBlu?P6p$Bez@5`L2=!P+2s9F`~XEGlQE*ng)eINZSt|Be6-R? za|rmJy;jN!<9bEE^n&*;8|~!J{Z_Ey-`hDl{0zT~5sd)Cf-KJkA7SWq{ji_6*;X8GsHg}hrci%2 zzyH-xQAGuB`JiYUB+9WgWY;3pcsUm~~|r5g@AHyjJQ|MZE1v+3Gw)vx&@O?*y`(Z^Q! ze_A9Bo9>ANw^iAjzN%qSIK&1VG~&(9lGzv>I7EGUNK!~0jY72F#;-l!HVxp=pvJo! ze@@xgva)H!%iFR&{n05HWYgNx(vqR{N7||Sr)4*hxyUZUsOse3+OJHY00UljnZJ?p z)@PB-RbWhl>BBjtLzb|8QDEE-Lv36`G{1veF?Vx6<%Nv6R!}j;!4E29=RlyP9q1T5 z^kdg8vzO`$ncS^Pi*7<0>fI9%kqv!GZnTvo4-{bmuoVzLn~RS~{j^DgI1wSpmI=oB zBfYhiZ`7+=px3MoP`eS2No%L^)&P|2gSe02Z+VTl0Z&Fj(!Jj(0Jz5p~ zMH13Kp|eCz#$;s$lL^*(D?dK5kpNrJmQZK|FpduEc$=YO(0BeGal!$L@jAY;DFmRP zphAR5wrZ=sTE?`{qc#1|Z|j@34(~<~2$-D%gZ(xq$+5CJBvwz@2KTA^ zH_)Z@dyf!&tDjEhe9vC8Tlp;mjo|jgvbWAqpz|H4t2U(YTCi_$k5=!o-XaE=y~IJ@ z_8}ifG0l5}H)UtsyrL$(T1^}l*9>43QK0anr527K)BFuG&$@%}vgz&*M=K0U52rh{ z#x&TNmH$q96yDfrKofO6ZMjdM)_)sxYF#Ruk?}>-@yV|-R5LR(G%X)OGi%$X@iC2Q z7k&Ohw!fEEs{cu4vVriYvF*JV{S#g4__uk&)q7sJT30dUA!bb299wQPmr6&b#^cT%? zBPK7`FD5VS4d=<@!{{ufGmA3Umea^R21Hhj$}5+%t^BIddPJAC=Tj2y0_SZ1RN zSqV92Btuc|$y&Eizx=?baMk(iQIZA2Qje@QpH&VjzD)xx^$$4KWqO-~T`Mo5z&aS! z#|^`fI2UIv%iT5uAHEgHV*}WudX@J?oiv5;Ic?%wPJ4Mzr-h43|VMl!kw4 z*^@#(&s?*2AeMI-Nn~5c_vTuLfS9hide3>z!mBp^P+!XAs<-=^gQCthf@M6Rxia|i zGIO-9yc!PhMh;9hVD8-%C7Z$ERaNz)svMsQj><^5YNgGdgw=ykWe7^cR%23_r7!AIbZ$08Oe3H&p&t<=xXeg-9 zX=<^0tmlugeBQDGkg@+%yALGf#vUBmoxNUjcZ*X`D6o0u4`NqRW7E|=m8QQ?8w zO+)%r@htD&2YYEn#%lL}x+IuXv2$N-7(DX+L%H;ZWYh-#H zEYOHQEZm-alWh26VDSlsVsP8?n{DH>9pGMeH5DAc*4g!%+kOqk6}HeVWAqvZ?W$MZ zygun?Uu@iMVSVF-b-uH=Q|`fVoy`9glOb=OW@dJ4u_`u>8Zj<=tt9@kb zmYZ9ljjSFxAZ~s}WNGZ;nM%g|m!#IU^cAo-S&{~)Q$zX98n>SJ0i1u9l}kX^#xd1>(*c4lEkO1hHe)Yek^d(>1#JP$Qsk=x;t z1UXyaxT8qw`6V0mnu*pXetq&8`yu*>O@CqDzVU=wBlJQ&tDV+9mOSty>*s;#%s@kG zK0&^}u|%^bq$+c}t$J1`&@z*o&f9g^i|fwtHk*@m<|2S$FxWq+=6M&40W+kd%-d8OomcMRY1p2Xa);vRK3a4dl8& z%gT|ua@`wUStnnIGM;>@!0($8)tEzqy%!HFE0a&~e?cvOPi~xVZABpS!jb*H7qV}r zo+s_>8ar`cvtBjY8UCQ9cz%rS5E2*S_q0QKdfWog+;ju{Jn-87PtN-!PEz88VD(6J z@=C+({hH8Kfy*?hvtX4Wi`%GYCo*5+{ni@|4FQOG>vZ`HfTmbd!@^$n-Z=#Av> zWPl1F=r|@C86QuRT{68FXFdl2504Iv{{l6JOsbZMpvORKCw)2yX{83!yl$`rxbizDPJIy>kBC3 z4Ak0P;?F~F&1^asgzOYWLWD<_7TFYm(ST7jG0^`^a>s^U9Ykzo5|4l4z)trPeyP~{Ft zhD9~AZ`fI4p7Y<_849CTwMEPBH^4rNDGjz$bqIxk@Qm%LqKtU|9aiW{@qN7bQNVBO zmHcO$HeHv=SCfS~AlduvWIYU-_2uC9{i2zS9LUvB#&jL|NNa(@De5#a_z_8^!Qzhg z8;=VdC#?md&HuxFNzXyXgtS$ibCzs!d_Rh+S5*7}8QIIkAaQ(&F&ekL&@stBp6xN$^?aOwjQ!nPZH#4i~KmCcD`1`uCY2q1YacfahHo zcYNIBPP4Ur^giQ(V!i|8EMk>?TJ#uKKv>t%X%MKPTkqILwOHzgbPsPmB*zMn^l`q4=UARTNvFq7Wh5}Rda>b@IQ~UenvlB#LW}Vu z`A~HsFdYs%+%Qs?)h{SxlWMX@NiuaTbz0XTWh8s&3C@{Fq%9WHTZ(@efmXh#P^I;5 zj@BrDr`4TE`s7eVTp2K2(s~qTIL#lw5n=W^rtR?m*B$+v`TJo&$Ej7RPn`&Fw`9aERO52(lxqB+R zid0!no#z#YY4R|V+QTj3ocrdEjfaQ+RagD~{_xw)xus0=Uhf6TyD_~r{~B$V!PcKN zWUDA3kAi@0<`(`&8f&?Ye%vMS5e!vFf5fG@?WV{nW4tr(c2e@k>oc;y^=yB$*%CR= ziVk>0?s#}OiJToTJYzTO#4>E#4#YABnvL{G2+wf*JF!U&zw+W(xx@V zEr1DaILEQQuNusMxJf|e-%oh5skrDmw7+>x=O~hRp7TLMvaP*BD~|p{&6gbh(AXj8 zQvS+$gZIsRih%lG%&0>troSPsB0Wgf(mRu3)`rhd&sI-H6)Q~>!5G8K%bV~Xc}{sl ztcJvW7e|nu>~*|$=+A!i`%XIJ7UA!|2whunT+H4m`ki>*wB26UasHjiK4NVy#`zU0 zZKFbS-Gts_c%sqtJ81unU&k(U+8@?1l9Rh$s(e42hFmLK*k>fpJBs`Bi$_GKLy+oW z>|fX&5a7K0Ai)n@wqYUJ3#jjxgYb$P);GRbc5Jgr6U0qW&-XtRG9R)M|A^YOpN&`}E5Bx2z!I;zmhM|kD9*uf zV|$9_M2LU8u4mWHl4)JE-2%{t?w+l~3z4G_BbaG;d|*G}?oZRBf7p5G2ivZHrzKWl z2_rYPs`slOM8!`=f8LAJ51DYRSY!=0{3W%N`$i+9ndiMf1m18pYm+nmOU_{!P)#Ppu zLp$n#O98Iyxk%h2EMX(l{ZVO~$o?ZQwv8Te{4-tURwVFPG+1mrnQ0vs;|M`JT#TH$ zdgJzldbrC}SI>X1GOOVpu<99@$Q4kJt`==g>eRs2iU1O?*{uY5M@>e(4+LU0Oykn^ zRZM?yk`~0{mK>Jb@V5F4$8mC-OJDT&Lg#>3EuW8a7F*P!kh6Q5av3BvNcy&T7zC+lv=XG3_Q%(KZWVG@-f0i&zKcnv`+RL8@+E+hmtN0F(t@=R8lCTtcl>;KKsoHSwZlda{;H>dT#mKx%_>oUv8NRiKxJ44>wPiP}b6`FlAU zMNVk2X*aaWC2oRij3q|zj!nU*&MP18?N4Vmf?;ZB_fh13#NKopWSWo3Fm9->hw#O8 zrSKKWnVvC8e!QW#<4@aS;3o;5l!W$+Kqv8j8A#-J9A`g`3(iE80jtCYT5r^J`@HZ% zO+s-YG3_0>=%2U$&fvDqO-em2hcAYFj(f(^&Ki*WhrV~kauJMf>5#9GzEHHU`0 z=nb(GD=RC5a?_|{c4l8+Wk;@PoF?XHeNH(G$lsb2;u|egNW&=pQ)kdh85V3m&?Bte zS;}L+>=X@%i(FswgrgvLRlt)^11s3U$RrDJ(ZL)WkXcua`cYY&#Syoa(Ct%b#l8(IHGHkD zIw&c(Li8kVu~mrmXH+Y2A9tiopC}E=z$@Q^t333oKCRK#9>x5YAfbfpPJYRP?@HUYM9F27sfLL?KEnBCDTnvXG6{vz zWX%=nURzv&MV7VECHuPP0~zL*vLkf{Ta5~Gh%@e!ZzgbSfg(RI+-5xnzrXbMa*rjx zyU)){N@x~?BvOmzO#6cLD(&C@#U)$X7fj_JdGadHHA*F{Ov+4^w2+%$Zbh`tY{jC^ z&sW7F(|BLzTQ*U6%Iv6Pc+-A%h=3vSnrwm=vsu%3kTPoHMe#r(HVA zu2wo=i=EwU_LlpN8KT*TP2PCsI3Es%GBGr&@uKg1&1`IJ zE3iHfHNfz-S@;r_9lnkL zD*)OjczCLJlr8RSNbyO_J;ldpdBj);4|nKZ`&G+kzk!ZV!8ix?alycgB@Ep}KVml#lW0u*6V7Eh)E~9|S-jTg{86qJ#!B zRTNVInG5Gd*Tdp$^fH_LpYASmo7}8%pRu~dRQsCtQ)Ji4J z6~p>z+6u={Roe*l7QfqFY(!TBvfuAAp~^JR2BTI9e3iAAwV912tVB4p`b%Y)RD14^ z82-E9=Dj~=*xK2>(kCi2WiZY%unhJpd7M8kh%?I}rGEVC-K(}9!co80erW7;^@)^8 z;2mo&I}w`fWY z@%^&ZRNHM#u(CRo3_lACOCIIH316U=K&&zN6EUc{08zMJugZcn zr7e$;0VIHDJi|b^I6Y;v$Xl<}(MoY>OzPxSp3l}_)FLIH1iOK8E+3b}4beld>`Ldo zTd&Op=;-N(53Plq+w)J|WoV>zflZSOaPAg1MeS?q&IlN8=G4wrhj8#E7&Hn5E9w;O z#xa^(Sto&<^fsmFVneKVXcT*Svidud9$)emaC9z=sd6sCG8A;KziImQnV;=UyWc-; zwk=@-q$W|5LpP*Nf28ntj)LHY^OuuntY%+c}S z>yl`xOZ8P>4lLbiJi2XczRCUm&ll^rYoBw6N;NI9_7-KmS?llpif(>B(IQvoRbUz& zClY`k91vMwMyyyH@9GPGS|8}?geQ|jlvaROYj3W@{Dk*>`uyKWufVmYVRMTibfjcIIJUwAv>iy}F`p7=XzMf;S>xzWxClo`f}SIf$cV-3+LGiyg$q(sm-G&PfD!m+Lh!W3t~V}0 zQ=Nb))$S#IC|vXfTi5b_?F9msIw+F}2MkBs@E1D&M%K$_lnb%Ys>x;cVah(BocklE zlXe%0gobnXpp>0X{Duy>f%&(-V+Fy{gw^Ob3|4W>z_H@$VsD*o#u&1l>$DQ^J8}55 zvDEsn%#A)_bt4p4k=XmWBkdB0{0H|RJ$;+#EfVY8_Z5^FiP3tAH;Ow}eirYCcQH^V9kh%R2Q!&qXWy51(P01H(@f*I-NRFt^f&DGC>iaep*;-TTR zXk)S7#lxm{ZgVrFvx?>H%O+GaX4z9P#p7IM3jP1yUB;V`9P5Y*x4dj>|?$ z7%{X7Vs7$VZ^@52_PYybwF zUg)jkb>B|1$ThE?mRh{Vk{^Apz2_Uo#f<0)l^WL{#+GQ>kr93ODAl-OyHjDd928mI z(dVUOLl%gq#>}G3Kc9;$^>{`%jA>q2Vp@CANPlu6o%eAH!eJV{|G?i8(iT zD2hk%M;i-X_h`p6i_pMJaC}?)C8Px2KjnxIa*;-uBnhnue{x2#5m2VjA2biq`9kK{ zrPUz@ZI2}S?R#ar;A60;2D(1*R?}Z;HPvFLNX4c5^Ezp=GEwA2g#jXGz&<*KM>L+# zc-^6GHXU|N_EAI+YbUT9a>k)E6chhIou6_OZ5yC>?#2iuG!qdhs%B;rJiyiXE3DSc zI9(CddY#S}y(*v(_&MQ%^cxnEAX(**tZ~5M}kU{%}~oD zr3=W|2rWo+bPbE#L=Sq#-H!GSdY=6Jt#w#Igo(TF$sr)yRaZ6-PzOa~*zE1(p7@g7 zgtQ_h0{bOMx|}y@VWcxyc}sRv4*Jr!@H~e`nHa^jBS>naAnx+l6q``6b7+`8xL`=~ zd;!Yu6q}I}w3_(~gZ_sS5J$m?yC>*!9Oy2D2nuH^SGfwb0`x}K(fLgXBdnEBhxra^ zOuD^03uywM7i*ErKl1f7H9H8pyR-5+I_1`KG*aI;uL2IQeG7Sd%CUj6o?W4Lqkv1F z?1c&Q-d$V$bEHC!V~k56Y89VTaH&j6#o(YNi6QAZyq0#ViRgTW>jhvz*@t~`@6WUH z>W}TyZG9V5kK5@IYS#1`x7RP2{GI5V+dM5o3@-vcU-kJ#5z5-R`YVH8%(#zKKRI5& zpKVHQ;SOp{uKrXQ@+tn<=C(G{G^_m`7^^dArsh}QC(pvR*Nzqf!oo?IARlbRMfQ-U zp~H5b3LR^cUh(-$@U+m{=o=7z)r}^+nlqAn=KyvHTBH-p5U;b&=q1UttjSw?Li^e{ zv_T@z6P->XWCI$DQ>@-HmkK92i~lti?nezj>VYG4HrD zu~{Lwix>9~x)PI)fdM>TE_5%`GK=v}>@@&(4oTa(VOU*Eg`kIoFunP@+=x!4|coTyePX8&w#LCDq za;SH`h1vUy{dCljwtK0@XF-Cd!}clnaoDG11ORX~`ZD=;jXRGE0}`{D8mscN?yc_^4#y>K_J@iv!j z;XAIvyPvc>@eh_bq7H!ys=Xxa#>52D)a(s$e;(J@*H-t@y0=Usm@i$*zs2pcso%>{ z)N$#V)*)g8a5pSFD88XB$kHSQ2;FFkr|*v5c55y%{M)!e!K)y2_OE$76{_eh$`kx@ zY4uZkoeCM{@%j6C^vuTU1T~H((5j+@NHmA1?b_Gi^_Bgpxz%&}yVinFZQv@@2?i4} z`4V8}r{qf&R#*qq6QoaXeKX$NtpY_`OWj|tx$8WQ=S%E^z@1P_OnZgzw7fbPwv}cR zCJHRZR9nlKhg!;{+K>mhRav{LRm=ubDfe-<1iT0zet(YeqC2lYJN|-JsfP~~7o!n~ zI`vH7CgCq0YqS_IsxDOd>v?_BI+LNwzM`ipa@FZlUZ0pqcn;_}?xHZI-aHibX?zT! z_v_T46MSzz{urT<5eC6ZBk5}TYtEK;`P;!r$K_T!I?hLDyMkg;m*bIgba%{Z zbih-LO-i@8Dk(*t;f9PEtETo($tK4~{8>L1xX2-xgi4D1nE9ujE9*}P)cFai`anDp z`9r>+4!BwmS~yxA%hjEzd!*q%$QGzEzKYl;P{iD75$kxKBf;A!`5pnQh#|J6%t>pK zoys8Y!ylR&z(@na%gOTxl%0j(Ys}KN{-R}K#EvG5%f&zJO!Y~=u)>0lQhY}_7WapP zyz=KbVQ^v*a^Vj(x$yp2y6Lv>Sl#2ozLIxLg16u##N+twXXv{^0DV`Omk1bQS@R4N zQ1UgtqEeomEI-ga06kE~ecr6MUpFRmQ24h}_n;TI`e6f4+tqL2>2LYN$sP$pl;fTG zgKJ9O@Ty+)Uo2eM)P)y2w|~0Gf(U#+S9MCYxmw!58w9!s#(*55KOCz1#Ok{hzMZ4F zHrJglYyD>i<)cb@37GYnPU1-UQ;w@#=9=6QfYM;Pf+gD2F}C4X)=z7$wP&++{K0D_ zF9drE9_noAT9kJ6OQ!`+19IuTRyw~|2`}63Ta&Mu00Ur-1ry}5>Uh!YG^8lfpqUiz zRQp_2 zG?wG5eMX&Y4d*cTb4Tmpu|=j^@i~Uq5Yvebi zhp2SH3XcZx?_O*fj**V+Cg{-v6|4e4RB};Z`{UsppUn-kmmjkWr&X$tWXBzci^n%2 zF47AX3Dv)DQ~H--EtgQ6F(I@tQu$$}WjQOKyQ&&6wNZjn4oUBO0za*Bb*vZH)5UPr z5-iCaj2@h_9fUm+db~T@L^4`>Z&l((|X zE~<;x&jiX@+{tZ|njNu<{CJ5w`2NNpXEM;j%_?NTt>Q6wBTzu!YsLikku?VfnA@cw zp+1nKDIuKh2EgRQG?IC;FmPzE*_dM}3|R+I_a1iYZ%LOK{z(^nVAf|9Srdv}=^;{> z|2D0F!T0<@o0~N}XU}cdC&1Rkhp)^}- zhM~?M;}^LV&wXp#{VU(@W~Zha{VNVq_GqDNQrpD0kWO~W_vE{3Tm2P+qW59Hx4%+k zXW*p_Odu0GuUERV%H!?=J*se_Fr!hU9pI?O-AK)nK?FTR1%EE!w`QSJ_Q1QBk=&xu z)DXjLe|O<|IT4C*4K)FpG$-7nz8o(dU9clm#<98Sw9*9f;#UR>4L)fE9UH07F4kH_ ziweC1zb7J^Alus76AEr8`}ac;Cv&F8GMPjr{525$z!im7GEoc;!G0}=m627d0Nw5l zz$+%TWXI99F)eaiYB8M38%cfF6IJP`yYga?!G}%(e9&SHf6;ZQdJE>et-&$@*NAGL! zhfIk+5s>Kn;y_(TJQ0?H&i|~8pp>Dumsbt9EyV9Aqy8BN;$#n5=u%JRISKavc8iT> zWA4u@97?%(1x^#5k2xbuwRrP5+gR#RM?U)6dikGn7}s%6%uwPnVAF1VOd8<#edHr@G<`dkQ1hR1SpQa1$T8$ySMkfg z?>no;F$=m1RN@oxGmyNd~@9??41mw4QomiJ`smqo-4nKRqee%}QGFVr@(CEg= zT-Q3Kh6#FA)ovf3W{E5yIo(4N+*y3?SB&ITJ+cO0VY6!o?0ZpAn61<$y({PM?!_7t z+=;2(t>PZDiwN)Ta<781oW3-6fY4Wkc>@@e1J>g*aok=j!7Ms}$>;>S#^R34hg-d& zNm`F_2nrIuP#c#SH>pxHgzKm7?i)UP^&$eJPM_jnnB#b7lzwCXV%V3;x)o5~2z!p3 zL7SVr>tcTDX3mOv4)P7A5BJ6E*>`Z`V;&RdxlmH>99^q!P9(@!{KVBjdU`g`rz!o0 z`;#2TPcRT;nHq$q-*6ap@}r>F{>!+-Suz#X-Y4BqZrMlIc%2W4lI0)tkp=Tc@xQ}7 zn(4OhJsU_!Fviv|;&IlT8FQ8ft<(7P+$On!ewHat-d+s)U-Bx85*nX!n)i!%y+N0QcBO~0}D z5Mi?}eozp<0WsiY0@>*evoma@z%C&=mzu|^Z#r6SO|=a^hVm;$k8|NX+e(^P0;-Ha zPOJZ087snoU*WOjkP3jE>}w3`r5n~wF{r~jDOj9L8OrDZu8R&PYu zAPcidSRfwQLMlT*T^KXQMvvYp_N|m?N&o7PA6Y(#GV0z1nWunZE;_!u9@u#c!i53; zsB#sVuLVR|b<3ggEu(hr4nEet!wK#_Qc@g(Iv61K+{gXQ6S3xxgR>)~?NXS4?fkQE z5S4XfN^U&4(7pg$fNtWuySW)`(BljbBmqwmV6nz1uw)l%w`o!#VIyctQC)kfXAgg_ z9kR^8q3ok&!CZMwE{`%lv8Qao&`*x0 zcCA4^_OZRLwG)Vz)5b=Kvbj2Of9LY{j_w_Dg z|CWl$X%BySr4(U4)-hcMp6_$kCk@wL&Sh3F`ze2slo+-vlwwV6jx!iABrkMJlgA#} zn=>v6C@GlZaaEb*hgpDv&jC?gzp@jnX0{45o^0Br{)p;Vk^%8h{_u9D1Eje4nt64X6m=T*gOqzpu$zpo1Mur7$eBPqss@Y+oU@6?}%n6 z(InSD-e#{|FIOxqn1z9ndx8QVA4a@E5A%UhK2BSgu?t}hxy^hH}lSxcY;9L%p$ z;$EgZoXHR#VLRcO#r_CPW)R2ymPbkjBUxurPgz&Y;A#m{cCZj&Z$1o$s#R(7){_I1 zr79if>HfjLqXL2s-zJa$C993ZD&5@IH@w>|!_^_c*!neQZ<29rTBav0Vha3=t^d8x z7+)U-Lj94;kVA*4Y~6<#KbW5BDSDIQ=F@nkJ}xd6O)lfap$Cx8=z#Gk`#oqC@`@f% zcf5wd!oOCIi@8PFI@%2gKb><|Viv+={e|#7yqkeq9%RjKM)V>n*XIXlwqwzsknDd9 zzdTR)ydeORY_|Y^*iRn;{i)X8^G-2>M3((e^WUD*aI{;tUptosB}TyCeo^N!Y8~&f5{x^ZO1;1;TLl8X z_U?fHfcWT;l}6amx$Nb5L3YE{vnesR?}q7*PFv4b+L@RCpA-&kYXaiw_stw_1sxsF zwM9xbIvxfoYiLBvpp`E0Uca0n+&%~9Ow{f3BwhN(h|DM%rR&*U@ol<>2 zdxwPuXzM(dJi0U`_WvaA+g0CR&3y7tu}pH(^{l%MdxU7LLF8-O&y>@6D$@+H{8kWm z9r@7{rB-RvFBAD)EWD53V7oc%!B{T?Q*ScQsKINQX&>dwPohKPXx}w6^?#akP63Ht z78Z<$%5N!vR|CFySC z`p`l9j;pu=74i-({-<;p%04H`bmmT2N(sT+_#Z)_{`nzkP7EuLpAlr%Jt25j6CQ8p z^3JxRyMwR^(EWWk&-*Wbd9(u28*R{+Z^@U?isXJx1m*h*-jwV=a!=MmcPgUfu>T_; zZh}JN-^ZT5RVj}G5-8;#Y;ctl#vW4irAmCt{^O0Gaux-yoHe^&`_+`Wv{1wBPQm9n zR5r3fmG!I9qe*28}#hb(BWNQg`KPu zg4O8XhXEW+6BCo7-%pMP@%A);`e|!PC%vx7Po(82;jS)^NH3%nJdFynpsQ!)cNyN5 zZghX0c6CNICZ;l)rK87q{TVHDd`ay=VJdhu*x=38m9#jXTc!%XkyNw0@+l}#juje{ zzA#=~%nF6Paqr{cb4}K{t%pR(>7l#L=$-<@sG`xzSb>e4JvLKS%Wku^bj0vhy@0C!ha)NCD z43b=y_<$pgy8vKAn*h=-0wnF#{X)v3u5{$;fcb04s6XDm!PTfroq#{(K1e^wgbcfdS&l&Srnp}Dj`1OKu ziX-f@SHB0wwqjRseYQ1BYe9LpgI<^K&bkVKkOu?6*xr=CL+6TguoyF!-2l+%)lV?pk5OQun4#Puveh0Z;#3>_01$h?ub-*$sWr4rdAiM!_wA z^!m?;33GFI)8oIuSm#ma(-S0{(H2vq(l~ig%QRk48=$=Ksx{jeA=TlRID7T>NNO3J zn;S!8PE4N~)1kT6sC9TKZwB=3)&QSqADfvlMZbJV0eoF~&3Y7Te*hZZ;H4B>^h>7^ z{TR{F?)!_c&TPY5l53%5`^q9d4aVpX`0~J`%l`hfSQf|@wb6L1 zv})wo(pPTMHq(CSdswFGXFC)^1_5vtBrVY3SG56bR;QFWbK735d&e;!V#N}@PgXd1 zuQn>;)1dq}nn^|v59Kjr)}UP?OIPce!zxSe@0X?f9s^Hn(6;~Q&DfB#q7`XC%0@G= zL2)PACVvW)33;SJR$hw1Pu%+a;zK!b`A!2OPe7JnOg4VU@CY^4?Oezpd$f6Vx2Hvt z@hDYgp4if7iUWEc2^|ha(mT+eGd!RDkj091yY#W5?0`Q9_32M5pDo8Byh+-yHY4}~ zF_d-W@AvV*Zwqht`m!JzVW($`5})!Jq#8*XLdtvR%p2$TkN694sCe#pe( zodbyoe}JI1__->zEds2&NIv8CKa~in&W7ps@fl$134gQWlK6ksOAlcg$5(gS|>B&X^9o}H3)SD7c=;m;`JIVXOr~g^{ z`(tP}W1V~oyn5*nX7~T%{nLQ3*$aczK;->}KMM4TalVNvCm6si|0&+NB{gBmDe<+a z^CQJnjn~tkKJ&j1Z=$RiU0iDGZof^fN;4rbHg{h@S=Fu1jfL<@dALO0E(D2v{A*U( zac+15bOwWsSMH8eyFPCJ=H=Hy<{wRN){l$S_kt#7XRWLZVpJ83yu3Z;^pu$K{5ex6vK3XTiOt}9o`{un$4%dye@Zr`H-@Wc$#XD*%^AIIPD>5^A`pw-U zb4N3I)idTTK#^w;uV|X5XxUCyDt2=GzY!Sk!n}=HZXlX7U831vO0$mY=$J%Q>NPp8 zw!%i{hfny)m)4ppDn`)hS49&>)rXaB2ST#IS3|`28*U7Z)&1b2$tdYwezi)~^m$|2?D2PqK5#y9K0-4TbHX92qY=)`q4G@~cO- z7to?@55%0(Ko2`EGN7a4zse1!)<%N5wZ3v*04mj38d<+~2c|J?S!Hk2n>o1hXn`@6 z_7U(m??}5v>4S@&mb@-nkZI|{MQ2a{Y=d}jXGlm2+Ie1qdG>_gJDr?lGK#>M+`fn} zSAf%Qq-1Gu>IqmLe!q*6~V_;@aPx;6)jxyT9*(q%vAV_L})!P7qe3|s4O z7Ae?^dZe;|+;!_(R^oZ&_T|)jT4P8T>o^RJg>6$uFd0;j^O&5cfvPy9qZS^uPb{*} zIF=7woFY)yU(Fr0lL7>M23W$=*5V{9N!X%}%MtJBF+yx=go>3sDQR18I&Ve*uevpI zBQm+gQ{%+5u>G#f)O&XR#Is4miSy6urXNtht*=CdeVMs*TSOt|)Y0JaX9wtV76FpU zTPSj=c(!IW(zK`Jeb(7FYlrxRNdSx11jRs$6EJOsKC}|$!Pw{nG zN!y&Jbjzdz?+fSq*xLKLoV(%s;`=`oy~bHm`00Hw$TBv_E~Jzxz3uVII0}!hGwUj0 z(OyfbsGO_55t2+(pOyXlA>aF@fAJK)_cthk9vIsB2C%b+hfz7GTclD4%^PFE{`MW+ zDj}futyZ}lUG6-221A@M3j5#tz=*U+8WH0A^;5&>@?DwhiTJtpJ1dntlq~8*c1H2x z*kdWa6uy0$H{E~xMKE`Q0zU$!@nS<|Rh@I~`wmWZyF@qQuiP4EOEN zB)B}5h0&g5f$)Ed;U9L~dO!^Qxdn)<$HuuUTYudyFKLlDpq%_p=uWn^{O5d*f|&?A zK%+!JtDDf$Wj~0J;9y;_xl-rAD+8$gY@&VDMC&z!KkpLjiyT(wy;w*jsUt8v%$KuhPUu$90TlTS3Ib8Q@HAl#4Ef8mD*p}!gJiNL<~71-t3D!m zFaO&$rlwf|LrI@YXV~knMAaRsE^*DclRT7FX#;G01iJ&!*K<)?-`C5p?L?YmSgQk(TNc*8l72 zI8 zp(ZX6S|36MVu74MOvdeIgN~owkA~wIB(LkEZ+@PsXX6bi9w*+c{S+|3LVl~FH7M7# zc$v`r*Vju!MewVyv;DetUGqY$sehBWr$Vq-yiGY51q1~9R5}z0rw(j68I`Qajm|tC z_`^$@fQ?z*1#RfeIP!6hYzMU!+EnDKZZ6fbWae>8(vKfUrqrZIPX+p(Ti_+Lk>q+>&^j{mV}6Ontm|g+P>iE1hMYy}*GRBL@{?S!vqo9X_bf5+ zDh&{?|FQfxv@1v2zIE}w{IM5UGM)he_{4ho;=AU=GVhH>R3ZEadv4&A`nN7=Dn9`! zdxaoPJn`mH0lDyG)R!xhL+{k9A0&Qe+Y%*&Ta#yVo>Q3!a#y3AdLUK%ueJgCUX|~@ zeG1=ShX;nmIr4dZ&|q8q7`-ZB8JE#Q)cU{ZuW&E=>IHeuf&A$i{PTB#R-wf$S!k-t z!U?s(pAEet&Mem+mz20%_D***zuMg&h0FeJ zC;Mz}Cm?uEbk{@GY1Z`i2Yw9mNWNsVWb%`stA#Q2jGoJZ?t-LP zGhKK4PVQKHk?h$VA`QYt)};I)z$yDMN1IJqmFCVW^E}`C<9)AdesG;@M$g%MpS|yU-S=86a^>AGwffuZ zJ?=;-^FD8ig^pvm!y|_%An5j)s&^gW@12QGH}R^fJ2tvKr55|>+K%yfhLn;2=T-lT z6i>>)x!HuW^n&F>v=K!8GF@t1!KjEPnp9xPb8@cs5}$4=>I za`p07!AZ=n*@9J;Ljc4u2uMini;MTM3j=|Qq0v>JHo<#x_+SCKeRBOb{+F`fc34)( z9ZA2>t{HZ>;0)qZ^F+jj46?(CJE}MZQ3^Sx=Gs;BnfFc0y1qfB)_-jOCz4oq_~TVx z&15lPj(hg63Z8Uyck8gUaMcQ}R|I!PHGEcO=~Wm^wjJU!{_Ff|ZQkSId}{L@`lya) zP@tp3ZTqwXpSRkOmfe{w(Z*v`j#}p-8Ov+35AshU0l8=86)J@V1*11tzSKaQPTfH^ zPHlv=IyR=)hb5W<8oBN!ljD^?K_&5@opj+3-${h(xXq~vJ0TnqQ0K!juC3A$y3H%^ z#m+UC@7TnrwO(`Mr|sU8GS>v!tKA@J?-)XBATm6AhEiso2_~59_dRSj9Sm8%)^)jf z-o|q^P?Q(#aJJ(8>dUX7(&NMq7EHr%4E%?$Fg9bq1wf6dwG9^%)y8gr#~HG zz!s<_MeP~gS!;W-sWmr#D0hmL+{uY8h6>5E>n99L-T89z@(nYnky|oeVO%|9Z(gTA zDtE>}KQtrZ3q`!25Dz#dRQ?WDcONe}mVPx7J)-eh@BH3MM1#SDoED-!O( zf6Lc_(<~it7qq+SmRl~k(4uQ_gT6%^I4+`d4XVuYp3FWLCh8jfg?UNHe`d6O?7;` zw9VkZZQe~mKSn5>gk*da@^{{k;;oG=-J#D?II;hgGkqWMka}`x|GDtZ_97q)X-FVj z+QGTg{7HK?&h-?Rwjz43D~8ea?`octA(ulbAT$IK`0*i3JfP1b$n;Wtdxi};)H zAei62>68mJ+Em_F-SbJb*>exAKyy`zVB0>qo$mIYpX|*odTUYaeV^JPB z<6J);;y5ALweCLdnD_=Cu}L8Lvs-f><+8PK>q^NzazTzf6AV>TO5DwNZg?PdE6rkh{@sa z+5JV^NAN!##t-nc`O;R0!|+2Cd3;oxuz5*5t5Mk4+dd+djA&nA z2hNjy8GeZ4X5ea`ao68r+n}hMY6|jzf#oPM0l}*v#%1%;wlWJ&7$_1v{gg;J0kXns zgA7NcF6l{N&46J5sFtL|x|xUcbCi~DjP*M}Y)DEF*CJQW+t#tAdN1T2!)6-}Nd!Z# zS+(}uX77hFp+i{v*+abn#!}EQmtW)f!fhR;`M+^`c!QnT^>V$KSzENsVDWcdLYD4W z6K_+BK@DDZOx!mQR`n^2^yrTu$sOlLBEzXyK}9l)@ya#0K!duB44Sn4!*Jnvy-)tz zok#qZ1ptn@a~dUaxzHbBXH{WUh@86f8Kd1G{;xZPbHyX#%YBXnESzlu&{whK^QBWH zukHLHhXrA+w|pSU zbm236L7xEZLE^7_&-627XI=YO=xk+0zAkZVSu7Mdj*@j3eO_7!$ubz|IN;^nzI%iC zk^`8z&!Q{M1E(V z(Wmrmx=Z~&!gsMYap6}~otCv9&8#GAw zYZaTlHR}K47A8V0n&W$K2th6rJHmg~WHZgvspt?=N6!FV8%#933)(55wh{Rf8c7BQ zugk#arzO8omFhEYy88@gqJ)!S=}tLU$4|;4Zz7gJj#w|{h6k+V4WotdunntDUFuOw zD3#0LNb{lMBzHf;4W{cSnX9KRQ*8HT7$QKR@kP%hs9n}a)Lh1GD0p0 z5EIubaQ`5+UN1)UA$|2_I=pYV&F>8!ZxVLH)6rFIj^+i%oZ}b~JOBD>h@9fN1jl)?oCJyQXU*#0 z9Q62WlbGb(*}Ry=;we8z78WccLq@DyWXN{;J`gPV?XCu!{`*94enL)fjzG*q%|%Qy z{+K5VcN<3y{o@T+r{!TiZ^|`PD$%1|Dws)T5j4hup`)3)5%4(l*x8>MoMXd&!V#fyQJT?-qb#ZL5mja7s$QtkMTGpn+1Dhz zxdnhGz)Sd^i{)QQOfIa~A7CNYzkYU!YG$Y18-2Oec&)9}_7vzoI4m8E)3aqNxSn{gtb&PIFZ3$6+u*7({an=jR($UYn_CGY;|Kb$8sNPFfx|(vQA&)e z_jzXU_i|&z>n2al5C6DqC&hbGe4e9y#!LcDx3j~?WXbl1#ug* zkkZ05138E1huQ^?@dm>wIroPibm#3%?9tmaY_hc11&!&duINk=+y|e`@Y6Bu%qJ$> zf6I{ZI(s2LH8!8FhWVWXlw6qt5OX<({ zMOxAsu)Z`Zsh9rB1=R4WLQIXRt_xpe0bIYT(k{X!#R#r9+JmQp4);OCM1W*xd5kED z%Sgtm^pHq+P?zzQ2BMLjdA3ir96 zKFsf~WPG*q4lr%4&5}Ad9YbRJX#kx#$Wnf&WA*uT93+NgyWF}6e(KAjgj=t*ipk#b z@|?_EzeKdw!xd0A$@k)kE-9=wsqHNe^GwK5(s_s@~0t%QC)0-aJF1#rYhjHU5ya@xnmw8;JE4oNG0`I zljzxIo}Jn`vNGX(r+lstc{WiuC~+_kli+2Usg*&c}E6nrV%0d;1;gukuu$E8W|e1m1gbI#2_mml9+b8K6Ja zQS|?>DWcUj#SLD=gn{Wj{qT)4k3_?kQepnoeIsDZ_XpBq!HX9cMB}rQ^ghsE8`ZKl zfMw%jjQ<=ZX{F_Wt6Ne9H}|9OF{i87&E(udZTBZHCH^ZOP3OZ_<}ty06GB(4K=XzA zWC9Ps>BD?_h(k*;ULbo~3uwnXjcf-fYgJZ=s=S&Suuq?K-SL;z&WlL)qruCN-zB{G z7_0twr=#RMlN+68np%v&JVe|#@Ezy9TCqIZ<`Z7sF zl5UY9Riwt8d=1j1jjW!m8#|c30p{fmFoKo-_qR)$j;h!6^7J`vx@SGJlVfa$eV3d0 z#ZPJ8kM)u96PEM02k3{7_q0+yZ}@AEW@>D-iF8X={!a#WRVVn332IvOo78O|;<|a<0|X*{4Nldi)!o2dma3Ynsy?+gRf!D`yc*s{kG@n3 zJRRl1>WN_3{QM+<$YM9vjs09M&7Ru4FCRxR;Egg~%PPL{#NX({@}yS_>oBRM&{IxT z(rrsjzjJki>2dNWmEPjXrCB|WBqdb24xM7D*;B4vh61D{baRYW_p>;6vY-1J(W8CHN^G^ zbq+aFODW6?C&C~DM3J@?gWs9@_GzGY%~wf*$MYD>6`j5}E*>nG?AUZx|I za%(U}7^TWvYiS!e_xS|m#)ZjTp4R?JjrXbLa^rCmmprf%BiTk`qvn*%m8zT}+ zkxc6Enm{S3{4Vs=goaKhO=vHUE?c)do)|py;Sk^R-6t&Fq;GzokJ9VfEnIGp752V4 zQSaBQEx#u@QW2wYnma>?FWbC-(yw|QE;O`$>X(Ox!%G-;IEhHNZ7?Urk5+|N!8K;a zUGY~}mJWc8ZhBC*@xdri`ya;d$7!R}li;BX0}Mw0yT-|Js(+3&8FylrX>~VC!SRA6 z7Mbvi{|IC_4kim39#1y6|A@MfdzSQ|qX-=%^A$D5B)6U@Z=c+RzSUd2EJK${`RP?d zHhx;vcjnXWEL}Bwuo(u-Z6Ht97yIHkCqJvQvJTrbli-!_>xGaL?5Eo8bq6&x;WPui zB5hAWj$brKv!VuZj@P=Oest$UrkY7(a?B&lSVSVoXVc_vH1n>HCSg(s`H!KX8S@fceA2i}eqI7~}4y>wKv zz=PTUvN>Fz33=LiC+2Y1D#lC}upWhtINj_=H~*=wU!UIoj3w&-W)5M0Yirj=y4n4O zl4>$v4N$*mx4ANDs07+`rvWefJ_u~kNRM>e9n(}E=1k7Ezt`rJnWxFJ=U(?p`%3Ov zSbJ2>x0s>Ve};$WBPC;iJl37d=cBw(c=-Un>;rRqQhCt*W=AXYO$0KquI#LY=^(z) z#OtZBP;{B-Y4xo?>`fg>sIcdHZ{|q>vVlw9tV5uqzmD2eQ|IsxAtzm%c*t`P(cDhb z-DVqCLoCsxZ<7jhMH5z02h;yHwPt01QR1HMDDpDOdnfJCn?H%QrVkeO4CB+)khd&- zEn&chc_6!jG|gCdXB7M!8BUus0kf8@pU|90c`SFr+JMAypy|;_#YLnm9;4j_*PeQO z_6KighO8}#Qu#+Qzgbjv&b+IcHeh_TvlZptLu&8(ONTUADLlKFk$%YatFe;AV6K~a z%eq&7j;hXzPpjjqv{aigx8W7^jW9{MsEuB0+Xdla@M%#5^N2OpwZ^tkHmC-t8T%&) z`Heguj-;iuYf5{-sHagh03z(Yz~Jc&#&(|bZS&DyilwE*EW>Tk3EZz-DeIXmIpx_e3D8g`0>~sU*E=tE9qFgk!ZR2gB9< zl<*nXO?l6AH*2VpT4MQ2l6zNlM7%95HZ;c*cev6QcIaMgF_XI5o<2M5_4wc-zb!YI zL>-ZeVTCEDQP5M;C|^nrmuSa8D_I7_sY0WfKFg`% zwbvEvBJ-;6uYkKXGbZAq>0;>Nci?z3o1jElG)wDAm~qosM2kR4Prj^*PRYSLd1xh%rKu3(6xwbGXB0cT`m^ zRqb5m=aBil5C(SzdM{6{kD_dt%p+eg(JxeIhlPc<7TlG;#}-C_cgAwz z2%XcTA2LL<95ZMe>Za-2VI{Gvb>nlf{hzHn`yygNa<4Wn<{dnQ7oAg{KGCtr5Ha0y zJ#5~yuY0Xq^G9D7B;S9OF1#FY#htzDh-2^(6d!Kvt*{-_@jO>WZ*6YR9*37(pNi*Y za&OjZOy5UkhSF#d35#E= zb^uHm#L7y5RkeiRA+^aB2$8WSB0UjiZ2oY$}uKTB_v5 zgEodTZk0ermR2W!YRJ^SigSVP7|sWoXJJt{qLJSwGadWEW4tB%t2I`}V>iEht=#PX zC3m+ZFpcO+sFHc&*26P}>?cOv{TW-!8$@t$%B4P2s^etUP@gscHa zVmPsgU!G|McS72~VYC!ib{x=JMSlL=5zPZOZbas9@-qOdxw?`hKi6iHM;CbKU&IBK z|7Q)51kNjUPoSZ9l0;BiAn%m2fVjTf-wxxK+qo^gtq${u%6 zhmHRlChslP<+gfu_T-bE>t9;fhxGlinSJzeT-PTO@-041D+6NR$^%WC`gzbSoKt}{ zM;8|^fmN4sx}YTv(9n7)d7G#2iFgbI9JjhK?zO~EYotS!(KUV-z7-8B=F)%PsN~fJ zb0(SUI7saU3!N-E8Tpc_-EkVm!%s`mv;0w3%_2q@v#VfdBcc?caj9{XEMor2B4Glj zH*N82AZk^4F~YVx2zXID$^;WQ74J}cOcl{x3ImG@IGci2b+E&{C}^;a8{%2d^Y-kK z%AR3j$X^c^2V^|77Mqa?3EF|35Ab&RokD0eEXCQl7t&m|yqH*W}egyydesV%PcU^0K`)Tu!!GSbxEvi1KO&k z=9&PX=;zqEZwKhwSRaCPFnL-2B3RZx)nfBS2CF*DefH2zYJkY~84UW_Yil_%xYOBJ zU5HSp!(P?8$N?rg+vS}^$^_uEe0L;-jm&!-YpL3SZ%!ZZyw5mV$g5y%^&r7PW||Df zqm-6N;T?`&377`JfrOn&B+(Q^Qc##<<}XFx_}U__RbX+`B+EDs=rmQ zm?}+%wt2ECBGLzvy$T?uUbQ6*?G?o`-%Yy1DWk6H-m8U*(e_M?H&xU#pdB9{`4lWO zoSscFcHJ6#Mm<;Ma;S}bw71=9&*f4hT)n6hbXA9Tv_TykFM}sf-!&1P>ji#VVsh%e zoX&e<{6RpRs54JR``7mfqz-%_@Hyw>Bc`QKNW`I8h6{3DEZp=)gs3yR_DaVW7(lD7 zj_OXElPytw-MPSu>Z%S>SWp!Rar&86UmQlK*D#~o7+(u)L{ZJ3XT+C)A0%nVx5Crh z@3@w=;LVj^Pl(DQLs;m+?har6I&Fif+jv*K4xqsH6Y?e_;w-E0Z^-L^-Y!$@rD?AO zU(n1~*j3P8wO5+m%E6*B3}0W6tvymKW555DYl6PuV6_uasy*Hvk9ByNNtNIe2!bMzW8T9Lky<&T_6&4Bgo~7GnL_Q^6@kq^e z%K_%I`{tEN{nl}z0_9h08nBv1k8phk=8*ubHMij?JLP!047++pDU0qZK6m}E{T_)! zFSNuJollPJ#NW?dCO85DGiBPNHA5el>f#mdz+3c;H^r%|uf-iuT`DwCn4cK6J-0p$ zl~l(uxVl_twWHj)-g}FmOS$FL(n;4>%Wyu}to;5_)e-WQ#*B1Yk_}wk)hBNgwJTd?i93D%?s=gbeMk3X99JB zqchboJNp={i+z*h=0#g2hB$PVu5UIa~g9WXb@=sY`IXi*-Qmb)_V0xj&nKjqY=>d^Jw z*Q4wCvF;6u7XzH?bpKG-^vOY~u6{qZl0VpuR_kWMEKz+{99zq{{4todZKN&PdCb4N zI*7HTh_7{XFX5cwu<0^z37wPH4W&j!iS4^5RyEJ_>n(k9(iWtf?(TfiJsshgGpcPx zts=|~a|*UvJ%)vFC%d#IKAy zNmr0H1xAv`?GIbzD?LejjDnj3*Ll~P9Ruq;*NnM&pu*w~)1M`|-)s2>);PM;&+{1~ z%3kPe%^CaZtST;dP!U>qnd{l;gm#V*uNTI7u=K@H=Ewc%Grc12?-EH38Vq;v`P+AA zoEm3gEv6gt4x+&eAb~$XynNj5rFyNt4M&OZlGPI3({AK`#o~N!A?T;Letr-Fb~lL0 zNb8>;!x$7tqc4}G+~k6qK^=$*I|3y*?&zw}95n_h3GneZCz+Wo{P^l)7B1^Ub7DX~6idH)zY&47|ifsOIkUisRX4jmSMY_XxJb>qS1j@%$_+M&{6=x|3x zO?f;vIP=wA0mf7cj}d1qdQ%X87e+1sK!*xK@Y+EGzuVJL;GuydcNm)3gT22 zT8+l_)zt^y(EG3F2Pub#*f_e|qtwy2uuyod)Xi}x_;zU|Q-;3R>imh21DxdJ*1zHH z(QLHk2efl`qU^60;C<__H}a%A)KiMR+1HQec0udN`H$*g8AYgR?j>fZSx{LIIs>uF z{pUVwx6Pp@A(ljbsm@zy4ch~2J+uZ8yW4E2ag^@UhbL#Gufw}^jvP;XC)VqiytrtM zBu*J%*_LzXME^+@TgG{4jkMBt-J+UTNWhF<8>CVR>7#nI!xPkj25=;QFN2V|XpZe| zsexWuGdlyRB^##IaxCjz7I-qAk&vS`;}f&kQ>FM>#F{}HxMjLL>lMBp?f-py#r(QhsUxg0jDLQwzX&VlV-q4NjGQjzq_t@2)VV%im=c4@truB5;VR9tnifmVHTtS| zR1NLXW_73uu4v|iz!(F#FnO$F_gQY3_j~q)xAy0B5x@52ei`6yO!`+NtZpZr-^e^X zV4s>Y+o;>N?=g?QZgE@pNPQcFC40ZiMZ^7Fsq9lXl;V@!i%CLsBM6ALKb ztF!@}uE8NaiN1W$e;=h0$QTNRpV|j%PB7{iCHQek!G9}dO8=`=z!^AGw3#K({#}A| zM(^+W5Tsll8FJ91&uK<#sej|6Uq*))6vk-W3wvE8|tS zKLL+g*`8n|GMJ?h|Gi;~SbMfzX-32pTz(Ii>Bhd9u^cA*ruXWyFAhgAW z&F6cs-z>aMA;TT(@pxyBoiaZADl`5cN>&x?I{b~U-*(XryTO0nY>fLPnw#y6))tO` z5)0hkaY0%6NZQRudgEmN&#*nq1)2a`pE|(Dv(9$b*`vRt&Wov` z3b)8r3Cj?1#MYz?cCs4#t>1?6@Ns=MOten|&>!k2EPC+2ck3CZpW3hI1Ulyu*qit3 z#sK%(-%3?>t*;f2zr9u@tpmJ~9I%djsBhZQt1!q*=dcb}5`RYYG3I4_T}w+IS<8Md zuTw_4pSg)3XB%8V5BU` z0yP9$oCcgGOV=-5F+2D(sCFX7S#P6W+m|o@yC(wqO=exBeSGCh0IvW3pg)f3i~-O# z(}dmdwOs-ozj=)GU~yXY>wj`mi_zaMWPd&I2ec>Dr5!L-|v zF8J);j^z@|j#I=p7~Px-c&r=Ueq_ZB+Hu4*LiHsqJiguQ)QodZP*mRj%R=OFrjH~nNkMp7JY(tS^r92B7pW*37vL=uC7F+0Zb_j4j5+c=Kv~(EK!a)El_G+_q)sKw%p)xT+&eTnaa)R`h=*W zXWz=9q@r41+Y`;wO#?4SWQcd@&n4DEI~Qg%P|b|ELrq#+VhQcB(XZdA2`=X9UN#k# zoh(1dlX@Q}7<#|PMMJ&?F0B}5z3bkN<_-xA(w-Sit!sLpyOdUiZ9^>Biz!;C7cG*mqx)F;Xe?xWWCNwDXHi|Dg!HKg z9;AHubb<&Ma*WPzAeL=wyF-C)um(vRBL*#mp1E~uT%~Z8De7E z-IenqMK|5Jd}GK_7$gh%$bgE>jv$`(oy$E#Mr^ORYQZG~rQarTSikQ3fp)5j#XrHt zHg*B1B+*fMLF*IR&TGdKG#xo>-yLZl#zm9B#4ho->gOFA zX3=X~QZ7K{a}&PcUUYdk@>pu6)nmy6-Fho>S!_Xn@W}9UIqU(~G!3RNWHj|9^YKTE z5^_#%VOL#x-;uY6KRsGmxI=*$hQuEtc{MD(N}&ksBYrrpH6#5uOcRB9Y8i)-=098P zaM}uF89eqY4mr2L6~`Yr%*NG`hb(*m)_l2@ufWk%cDD1}JW3*RgE{RJPFn@&P~ z!8bJ$`69Q&6?@`2ao7YGVxcK3`1}ZR_gMJg+xKR|Z_USY!;I$<7WGo-+vKc2K;zTK zwnYsuy7Y0d9hPmf^hYiyjLYl2vSp6X;shAm&EO>$n8&y^oyZagzt{bl2LzNt{-u=O z>X!iDCoSc*U9#z+eY{@fSEVT{VfzJ;2XZx};ngrw!yJ@G;V*!trTEB=O&_Nj$5w|Z zb7~@GeU$`E8eFvyXnB<-L-dcz`>{gdNC95e6fX+?p(!2Y6hMDr&mSP{*7tL1-SKg! zMC~Cvy;Urm+UXI++9qh&Ndo&~paQlW{Fjf!seA)!m%wY(I$m%VBz0YZnt(W1n3&D7 zjrmrH>onVr_fZGqUWPqZbrQB5c_#ci`TG;mQ$jZR-%tLr5$q(HGD^9~t(BLX)fEUr z4j`?E5GSU3TYbEY_Yd~^-*YGy4O@Oq{LnuIo-n4$q`w@mNpT~{#N6YE>f z;VE=3I7v&6y>@k;nK2^2phqpe29Se{1;5Jy54PrQx!XB4zf*Gi)c~)ZZx9&FGsAqqjl7e2gKsx)arVd}oqeh7#e;3cD+mP#7PG^St z&X3^R+^mOq4urh^T@kVQ zudaC~k}{iT)eR_wKp^M;f;24TUM4-znt0L>A-0=6IBAx_7`e8dt!^X+r@i2&i?oVb zk3wG3wj!FC2s}nIUuB)YO=v<4-KEdp{gP>ZzC)aPk#UG-y!j!MiwrkH_xU0Aj1U2E z7;ocW`tt#chKIyStCF`s7i?i~Jq)tXa#-2~7hOvh*>k*1LkziO+^sc4lcp0l0L0^o zUQ{$Jcle-n#b>KY48E{Rl*HfPvZ~_XGz^}qy3wB?H@u1YTfbyx4@7v9QB{&IwaZzw z;5B8#-6)?!gx~cAZw%wh1zw((i4ZB2%T~Rxg0w{Tm&bC-GP!YOYis$$OUU496sh6N zuUo$<3&}m8-m$*K$S>`Oy5bf8Lr)v$^(jXejYbUbMVbIGxhKF_{hi%R6{Mp6ivh7d-#=U}6ti1q zAog{e#6vd6KW^*VJ6U>6Bd}S#cbH|s4{c)<=CcQqe{ewPvO6L`HW&Wpl4N^XLT*w0 z+og0u0x8f+p?Gl^Qc(brm)^9wMSqoA6E$AOH3UhWkxY>; z?k>@D2f6O>@!dM8EeKJi49sTO((5savRAEY}dXmlrx}b1*)F zks4?)Jt34lczCy2E8Fw!_7rlrb;}U>eoAIM(5tT=kPu|4Q@*>kzeDHyn0~g}RPJN6 zF*sz1OuReq*;}k@J#q6m^1HkSsfn-3fo~BOL!#qpcZ&{9VD+2Tg?=OS^)%QEggKtc z_hse5tMOD%%XPNg;eQ#I?2(PyMp|ifglt--_8!J)_yTrtR&RCPgwZh zYR2oJKONbU5Tx;}H1F<>pNI;RBqatRu8ZZHz}`$q0bGr)+2Vz2>V}Z2r^l=YM$-%N zS`LCP2b6xSAa`de%war_o3-22iaM?K18bBgIGh0@MCP5;$Y-@H=#bR1?S^-c(5Qqh|~}(1eFKM}BQ8&I=y1z89m`iqr2hut`~Dm54Y8dld-H zMDu1oh74Ayp}R92_sOG-7D%P2vij@oM*mKJiXM47h2mt!h!UxW)WbbS7nX$%EvSw5 z372)6X%ZXJQDO^4*&glqhR1gc|Jc$t6uuuc+ZkO33(wjgO%*S8bkcP+KG_p!)2)pl z(DT)~pQ#;&lL(7OVL6u8yX(}g`sG{ryUPG_RadoJgpxwO>0jKqbJmc>ptc1h^=0d2 zYp>nswm*w>Zg|gD62e$Gmc*qrQl5~SoJEwt&)}q-kaVB)>Ymz3rCkn`?ao)Cz1MEk zy;I=8b!ySTlCaU68?Ii#N20LAyyNTXmMMDhhWcSzw7+OIt>?v)Xz;QXqocNwOQdOX z$NbWT)}`#3{#bDR%3~?l4Ac!Z-8jYVrEpu6XrKE@nxMy`SHjUZ?=7*V!y48^-QzvZ zGFK(tr|;!JU+&Q87!~l#j{KoxTA-op2e#Wa<`h%{Rdv{)esv!Z-Jl%mFdi<@k2bAu zKp-Vetd%{IHgKRNeB`yUcYAk&{u;@t@EIXqFW*71#ji3GSK0(SE|Pg!EClapyBNt{nW~Q@lTtU-lU# z$_H5HEHXBPWII&j2+L+@^%E3Yjj^(M&gxy1_8#uNk9y0*JTyX@w-`4BRH~g8%H+W@xtN$kK%6`2x0U9t(TRXXYUfP{kGu z2Rv3jtY~qY$ zO8Y4}<22lKsB+>{Uy}x_S^~bfwT`x75ZIsD*JAfOtnW+AqbNihivBW0>h_;n>`KdV z)SaiEpov63{ur;{6{lbC%qtkUb{hP~1z?c1yd>*`9Epu2svZML)CElkQN<5@)a8N; zEW_Oq&^Xu?uw!z#mWj5)H(Lu$$lpGBtTx|z`#M=mH@ui#D2UvDh-t)4nm4kH%Vuzj z1kPgJhCE287b>ly3po1uFZR>-V}9w9(Da3Rp-G@g4Wz-i9t9d)QTB{lvW>7zS_hTx ze%7><$$1 z*Z+9`sps4ENsoEme6B;4Hn@zdUS7|$vok=C+LqJ2rt({8U&$BK5PEHL8m@MNx0(@) z)2*jw4%NH)QnwYC>$(%(ytlJ*V2Q=Mmec7&g;6@Kz^{UA43Hb#I(C8Z^8*KPXNcN0 zfFRr|2vU3IrJ!J93otP9p$8y*Qt#lqH?P*4;VKurJl-!tx(2A#7vms@?d}G+jO14G ziF*`2y_UQRgEjfEJTxq(7I(PCyo}Abo$!;QrAHkcmAXqIZ?9T?m!0w6yQ0=rB=)Py6nH@=2tOguOO75YrFPp9 z0U1osBN@h{7x(>sh0PziTH@eUCDR{3Fsiij}=E74xj)8dBsR*%uy;EW~8J?Kh)sXX3%EmYo6 z<1MF}WfzPGMh4)d6#n%Q#S-Fm#yi&|^zeAI_lQRuTIKR)OJW#85Vq%+-?H4YwRp6o zH2KdP+m`QzwL8X|l2_4(nZI`JtWWlbNjUshJXfWPSbtxmQu1Bapa zX3q&Mp}uz4(O@*Ebg*NZ0g^}_zLb)b)DTbL@oOKvJ+sKdzJ4@Ho0Za{J8L0{61m*j zeh6;=6LLNJ?2s_>>l8U!rK3PI=H)D>plqTwYmUkS>t!&w4HGlL(#JVzq56*r|roCb`Y_Htp}!G@GPu@_d|@= zdaX8D^cR@wV};?$0{Ka!HZIL=s;G7z!^LPKY?on?L(ln=nbY*Jrb?%-vSHCY4dk27}C6w{+H(s|ebjFVbA+G!||%7Y8V1pl=~{<3nc82i+$LxjoCFR(L1P(UL^x z%aNlorl35`Uxa@akQNN*1ecKx;C>qz0Ucj^h8ilNmO_I@@_$Q=Jg-;sQo?FZItjGq zL?w|oo-}H>&zLx+Ich_x3gq8t;EcFq{>h0U(S{SMG^CNcuMbp{?}|s6CFLJJwq+cw zuJ1Simdt_bl!m(1T*G=H+J@rIBMd>9bEEa zlo&O-Laqk-Be9pb<>{9HX$fuUu~$-;TPPZ|Gk;rdd3HXdI3u?*SXtP2ll8}k$emYw zwcFl%;zO&$D6_-2V9$Yq4>2`UQa2~C*3>|s3zsOjyCb>z1Q!$6s{utvS;{>&YV?Hg z>CR|R3!4B^hDAhaWz#pNTkoa{>9jP_K!VMe<@`?20sk@?0;%DBSeP%jUZAx?_;h@# z;38?H?IV`uK#4)q>vP)O7lbBzKLj-YjliOcSrwXv6*|7^eiz?Ku4fF;_YDkN|FA+> zhWlt~B!h@e-s6;RXEJ8F8M(d{+eb}o+6y#C6q9%kW;m0oahEbUvF0*55ZK7FhkdjI zTkZtXGf_R2w!Kj>5R$sMT0O2RIq-qy-;v&R_|!f9fRC=asG2}N9t!bR|El6fE#>gTEWz^EeQxR~)p{H<+T8lbP-v`UK3+Y4*jqL)N@A!;!<5raX8`8GA_)+uAKzB>3)8fgh2=t--?yxCnx`m zsp$LESx!JPrBr0ezl;)n;c(7-M_h$ezqgPngWktsn#aOqz3tv1ZmNE-a z4Nz0)2h&pzE@t%7PD2`)c0V(&qkJeuHyxOuVS816jko^H7iYH<=r%-*eOy1`(9j&K6iG|@@)@HFSv1Llby zO)4K29QR7DteCpnXi?pGQ^Fxuo06X#!)d6^GI{GUZ67Aanq!x?Gc(9PV+Bc;lQC9k zoz+bYGtwj{d=Q5>4ewLldnX4^gq~lyfD>zX%2r4?YqmU2+sA}~>^Q%4q1ps)JvdRs z6fCl-xY5cuwqqbgcoEr^Ckmr-T5k67O@||(narOzs1}#kL|ixXynd~#HMq22dPUtFh`LO2M4WA|7Rx0e z33;&(P|z2@JLEZ6ZttNjUg;(F*ppMtFA~5&H?UtiUUO;c;aAw!#2@lAT96y>Oq5TS zu69l7VWCS;FU;byS@dkb>jG$h4~gc1?@?Gb7($4;h)biH(5PLk{AjqIEmlMqA0uJ< zk-YG|k2B<3!{K3S_ci^Evj*C;hs+34mPO9EemidmID@82Tn3FJd;OiUoo2jKxtSR5 zMi(V9KjE*73pC&)`JT^3;tS{k(~1Ow-3@ zsBxinJKW|>^!F!>+Xl6?v$rlIEsfSIcg^=rZlu>+yiZkDCT;(O+h+#-P}RA{5O({`H|thHs!u{#n6etuOD z+ljI7v#%k z5JVg!rBnb;UdBBp^A{co=YXFm(k_YAJ15F7B|qK#~c74Bf^#?+2D0VMwc z@%{6zXq;2K_UrS9V@p%Phai?`^0)V3E2rXdS;W@gv``T|mbcHiKGQfb9&1Xkj*wI} zkI;+BT@_E{^giCLju7)AEpwI2x(mbqe#V50{=y^KyhcN8^Y2yK%isFV|Y_*%kdW?1`VZNft@eE zvGzr}Cf0vvIfzzHTQ!J|h+)jGk{Ml3_|r)kMew31x0S4l@%mUkJ>Ai*m={}noJg9j z1!R}+?4jlctAEVtb|&A-eFMEuHwy>%Kp^h(VNP1a2ciZ9I1tC9!qBj={ZSDfh4FJIO6%0W4W87Pc+M{ zaSO5sk`dyXtX<9{t~NXXFw*=8%XN@mz$@?aw8(@v2nahJj5)r(_f6a_V}!$nYDrrP zuR*Hvic4-6+q9&oi`UQooW{5Kr4ZD{!`;0lieYjFLrx*3ws zA*OrW*(Gf{y@Lgk5RfAtuzD}J4E$%yxf(%_hP6SBvOK6anv0x`wE?oLVdUBP%_N$^ z3XB7A+i1raqaS8|=^K(;B*ngzKoSD6aydOi~p|EAs_D)vRUw^|!INjUHrk~E{WAWior?4nY2zs7vx z-R7k~YHbN)uLQ24tf~0!7TCS5kT8;3FM)OU?is7&(N7yRums!<;uUpdWa*vFd;?om zxzLEqI}}nue$4>ykr1jU+dtUDQEPZr-)L&pFFH39*7TC~64cr+$m$DEe%07_=!i|! z;4p1UxSI=YCd_`zflL18|=+Yjr-|77*{R?~W{CWeq|qo|<`AU4a#`xM{HI7x zhJe0xV}R}+rRes(61%lWof(S4K0v-#^^v59`_EN@<-#TNC{Z?}F0WbyoU}@sTTem1 zPVVSmfQ$$ty4iIf06P6eWC%bU?B&WA2 zcP|7Dh(Kar6Y`u?aBwKrE>9DENAq9c0~ojnly1pJ&na8UQJ`MeU(e8y=P8$yHL?9! zER-Rgj9+~Xh`@CI1F87WPrqquvMlbEKdem*3>8TFzp4Hf|Np`8{Qb0oX8NS8tNp_M z&#(OFKYpEvFVAqGdA|RNOZd-!_MyU}Hq8rc>uP_YE&l*q{snVkg-2=)Ql!iLJMe#o z|1TpnV|*E%vv!fW{@)Lyl>t9!p!|&Xf4(2Zs5N+tx;bJcZ}ESB%-{c?VSX9a!i)Ql z_O*X|SPda~i+$(bSiS#klK&glKQJEHXC|wkjzlDZ&g;tm4n z^1o(U7!lat-QoXc?H_ag-(vhP=kxzZ*P%b-JWzR}l?wWi1=eA}qF$Vt3O%8R-6cIQ z;%++n-2d(nh53NAwY%Mf1~TS__z&fXvR- ze%+k)A8!2)IBwfzVdUa}aDM;p?*xFqvvNkpuqTD}x&vVJ^%8UQ-YRHsM|%*{Uk^SC z0YV06xbW97%uTo@KxztFm&3GX764`7-75x?w11)T{>%NVf}^88?r?a*O>7jV?d0)0 zO5%=oAJjMIfdbily2rt`ghdG*P%MJf-+iKS%smA>i+q90Km?`vTfoy>QqjuP|2}2_ z(!d1o$>dp&F34@OxHpvmpzt=_=Ho9}JYVq+yt3zaKQm)70-?cJ8DOE5!&db*79`Eodin3dn%Fv-Da*}KMJ`GQ5^g856jE^73LlbaWRFBlg<55s ztzj+G=Shw2kafK8(f`}Jl)$==4-cdW5SL0ZEJEeA{-+7eo`7E%%1r(81X_&ln#3}&Wj$XTQb{<1+Y9NC%4sh&WdZcG zBjPhXk%MJp_e~VoxMimh`$LDhAuCLZ2n`w)`|$(SVlkh-)4@^1*+A|IRnql=itF!1({;r!_#tVKP=U!4oJ{ zN(3Ww>)vdd=54Y>I7p(|-*vF;HLi*{vfRC$rhvp;WXKEUQkX;QF+A-G`369|my78N z*l37|jqX^{>#gQqJ>-y(@2$!#*BVZbZ}e6f+&Rx#aX(B<`a2HuQvSz7M|Hq6UtM`u z6Jc1+!cs;oyO`9M^COGINGae>l&VBnn#qn|F)90 zpa?JA30-Gom(yNZX$F0xM_j08*3;Eo7XNAdrapOW-Xc_EU(zvpxfw0V`k^{-ps{3#(%4DgHhR^=JopKy;&SF58S6#)Fhi%-Dhs}GiG2y?~y`MEXSn(n* z$ruSLC8~`u1_Ev~CVWI)sOvyO8@lt_bqVxJ_Jt?@vA7CH`3AAPd@seQ6r$^f?2*wV zj>-e9jY4tWBrb;k!)#3-OI#i3Ph&AMG>RBA`uE`&!G>e&qR6ccq_7O7vJJXEp64s5 zE?Ye|sOLVFKa9opjzL@kc*2DEh$=qW?PT{N!GOF~PfF`?&y`4Qz`RQ=YP<0#w8#E*=1Dg6(@r7lkw|7pou9d)JM8kFVedNStN#?EJYqpN^}zp z+cL@lM1#Qh&-;JXUMXc+I54?q*#0@9o_Z^hr~zR9FV2-`W4$)V@>zPtL~Qx?9Z zYKh-k9M;X;zpHNO#uccCxMb@NWh5XD5tabf>;6IL&)h@9cGjcq64v@P@{3lP*cO%C zB8rUNa&YA!)>H4~cM41zfZ_mb=yd>FWg1~Te&+6CPD|oAD$*t~gMh6QPs@xxd%j;) zR+kFn=MY$9|1ibuqIoSt(Q&7~2#-lEQ^=}qYA|3X{*!CyXMU>-EwWP#2E&dC6Uu{0 zteE*HkePyHLYnz^9gmK3x{Ak8SCWg+9KR^GqnBe;sq_-a>K+3K9q=zmtxoO;c*OWT z_PN~3hKe{=Tlrl4OzF3sxqgVM23C`R!6OFNI6v~C#5w~EPJt4m8D<9>1jm+QXw^u;5Q~IWU~ADfn23b zm;EF;rv0u)hNv7hkU};W-O~9gZ%Uw)XQstUpkws8It1eQkI(-inYpAWL3)a1T7JEt z@)`bjlB{nO$z)XJq(!-84mb1JuiCUKCW^CQi{Y@?ku<)U$P1J64wq-7aXDT{nJ1xa z$qLm0+7`N9=(&ND3Dg3}0coVfC6+^9JR3X_~(Lnb4hvnT9 z5$VI}*+Ti9RIO^0@PJ4D6}*LSmtN=q9)Uvk3H+hzKK|R6$bGfbq3*ckMd=y@pk`}- zm#E2V^aK5L`tLiu$$SZx5|v-jH~iyotCXh!gV^{ho;%rwjII(kp%8@Qv0T2xmWh(A zzG!0!Ap$Fn*N6fHqnI>ur9XV4Hd|Kx3(%?vnxs}N&r9mENDosfNGmgAp2aDK3s1S5 zI(oH2e)4++>OfuB%4d1eBMTIJ%?}YbrZ^r$P%wFeVxVP&aWvh3Q!;6y`UwUe`vNJQ zsqX#Fym>ORbWdCR3Q5@tMu@_fuC)31Otk`A`Y2uMyCN3H)j#$|J83BmG&(M<9hCK| z4rAo5P$0(_{cy}igN%OT0m56F zdA2ICt`tSRIkbwgHV9{mFB9P_ubZm}`DVKPQ&F#1FWe(v? zetg&yNVx4TBoltlff;OeTH$k1Dx88XX;}jBqc6pNhOV%Q?J&cl8ocK?j^%Qo%c(^4##V`j`L;+VV>C*k;z`Khdw!CxSsN6 zhyEUcYJW|Yds@bGx|vurXdsCiyi9O2cU?7?d-?P*7c>%j3=H4M#Zwg@0spY@7942U zE+pq#h)W7nY&Og`dvX$!gpN7Zon1jn0{C@r?Sk(W6mU9t_<8Ft|H2^#9YD-u!?q7w<;-n?=-zqW_XOR zBWKh&IW|Tz4khNIOV?+e0A^GGlGrg&WndTDBc8G1Dj1D8;F{=8bc$N!jP<$*b027Y zou2MeT}`hY(4vAwtsHfxR1DQ2^J(x^TvAt0aPb^5N(f_pE4OEG2wrh?B>#!zG!-BS zL80_jBzoSfOnG!tI?)$*Impd;%Jn8JQ}?%*xL4k>b%dfWr`|F(4Q#nQc|zG@=a*pm zBAlU49H;@Arr*q#%F$1+*Sf3bmIoOd!zs@GryKGJgvG&Qg$5j~myI)VJHzSqAPm9**O zlV@?D{nYKDvo;pD-xsd8B-MLnYuX6H+eHt8fmqL$6|ldCDDAq*A{n4ClTd>ODVtdgJ3u3ovDhA*7ZT1>0)6D#CdW^VR@ZU8@imA&R#V~?=-bY zD87@j(3p#%2W?Zl&7Y9^XP@btf>6}OBzmWdUx>irnCDFq?i(6W%SertrgiO~e8emq z;hUB4vYQB`c03qnIPlYpJ}695u@HBad*=ho=1wL2l#YEunn?6k2J@Onlo? zX5m$ps;`t#I~z+T2eNyzN+aql1oGO*!*H=dw0Q`-^h z1$N80|nP2-;pFhkkq_Es$lwW4qjuD-CLqd63c1{ulCvej*Elw|_J#R3&cT z#{D``G_pLWEg9C}$LT3&L!k?^8t0Ms5dW7|_~66O;$2^$yNf+ib;}VF_xK=fl(io) z+sji%mPJ5UI%Sv!ef#H}+P8zxdcGmVtZVb@>KogRcNA%Rcz*pA^?YHP-w+~>Vln+R zk@QE=!!v9n1nh9IU9VB=cebnv2zRu{_s_tJ`z!U2l_t=RF?S~4tB8_HsHzlZ}aI3v3OM2pzwE1h*y50)o zn_!KNFF8X#jpUJ4J>P=nweeewSwYzGhFAMMqqdYtNeF{%ZtPf>&f+o2H}1W_f{D*O zqsdEi5iYuJpjcvt1v@=|u z^}H`gg>Xj0?KA*@RFNcwO*z?lvku`T_G2bnVU1HxwN*;Fqrj3>h_W=CUxMPmXv7l) z8{LJMtB5hsw&dg>sf+4`Y;UGIMDPK5iN3twQau6V4?TtTX+R4qLqpJ^c*EN*tJ9Q4iC?|ZywdtXE)c=XT9DI1OE|={(XqNT>Bakc}SaLtRpHBdQ?^G;Wt1Ec4p0 zL(IL8tqPvC#KttKy=i_~zA-4tLk#(QF&o{7jn)%^)gJR%>ov;SnOn)@n7u81jhxo3 z+1G;xEZ}Km9keVcySB5UEn|4{^*=sI-zCfO4r5-e<@|P}tqBqElWRgt(~dWi-igEM zTw6c_;lrhFgi0X0x^X zzvTgljE`1oNCUb)yUol{0j4jU3>c71C0-695pcpha7(+cQ*p`%Ioq8X`3Ay@9Vxgq zN4xLP5mR!6=cVg!$Rgo7@t-6Pk{|WdJ_VG2r@RJ?+65m%Ay+HS zAGSub+U~P?f6~eO_LGAf3kujMkBi71*>=pBQ|nEZleEEkNQRdii^3M%tjP`+__Qh zq{X@8ji7p#p-~^KA8`-Vgcw+Vho5X{U{!o74M24zU2l!)9Cpjc|9SF(I8a0!1Gdus zqqyWHB}|O!CDqO`(UZ-(`y&~d@Y4pab@PI+$`Aj0cKYG4#TBE^3x=cZhzR_I5M#5) ziO76%gDmsx{V6n8K`VS#LWgNshpb-|8|6F@lu5uZv%}^&H)yn*NZ4V2y4e#5^)==I z41Lwo_?#={M@3zbSoIp1Wp*&Z;IH{XGAtI3Tlk~!?On3y6J$^0?#799gg6OM8KQ+vg;aOYV#`1$nyl_m zYyJ!>{qJt0zko^P#CvPeg|9>*$Z0ztBy$9B$(0yS-foql&B%p4K6g>J9m2+6udQ5Y z86w@+SqwD=d_R{Z3WX#W6k`!!5^zBwQ0vIVP)4fQ?Xdw}BBbWEU&AQjGgB#z5leMy zoeA1@5J@$FG!Ym5&Aa0b^OivRH9Q)3kXL)#UD2^1;%mc@mSX%|-RWu4-MfwOixX?) zC7Z%XLspr;hfGsANJaR&q^^fd>N7w}ge#9IiO@(ioj*2COGIFf+m#`j8FB24$wlyI z*!R;NfV8zs``tQ=7RByy5kw=&pWERTShsR;#nz z1z`Pgj>s?r9di1s{nQ_@+zX;4dK{xriHN|OFq#4f)#bv-Ic6h75lrnMHle_um&C5R|-mVTs4S?#F4RogX z&^gI6`Bg*R){Z;1MQu_;A}pyo3eE8;RcrIrB6o|PvbcUjp0{q^SB>1-!I6lEdr`5= z8r+%+2}Hzmd(Ko8ycvA~wz5=EousPAOI@@iy~uO-zX8P-HHd1E7p(|V%Ze9U$_skE zPUkio=MECrtQ+^8T`qX^9EGm8yB|w$%x^HIEM!!)f3>~Re6$f+jIuB5k5(PHs7o|k z^a!5NJl3v;w}TP?F+y|TV)Q64|Lc)QhpwFPgFyrc8>VxNE5n_RYp>+Hd9XY&+{K1! zt$*jS!^}I#;<`yJerMIk_=klL}cbb$0cNQdH zh+fuIS4)rX=fYirvt!8xw>iwYdFFc;yHnixwZ+RnXw8S|50Bb)JRe?UOy505UMM=e z&Ah#ETXNC+v%|fU?5%6$_T+8n?ur12uSTVBDHJf_0~c+Ti}HUs+R-7{2Dhg|@ZdQM zMYFi2muoFG4Q|V9yar@bUJa$$_28F%Ns~D@VaLmuAV{`XM@7>CY46 za)};pn4LKS8Q5cEWQmtC&jutmt|o%1(u$jx`k$&nErvF46k}ymNbG6acd!-3-hbgc z^bm3+y&k=`aVIP?IfwAj2?HK4ii zYK*$?wkKMHH^cSm3G?J-t+90mI|Vr1^ucPAhU=N#ovpQGDF1Qu1MB)q`HgDVH&_Yk zcIncZwCfAG$3;&K6#*AipE7;emN!lxLHsCgR9$esoS-n z<8XMP%e~~@obuX0Z>fv#{dd8GiyueWFvjVNIg`7AcUIK*wr51&__bB@@voH#C`NY> zXEkg&Dc_e3t6y7^z(d_bpXsh#X1Vd~4aOQNmvbDJyagDJFLJ_^t=k`7jIdrp3_buP z)aT8$^dV)th=QSG4>`ABzRMClmq*To%a1Q$O&o$hiw@nJZ%=00R+#^EuUj823q&)Q zHR~Lt71@1K;wqbkoqJyBCwSR87b0*0&U4y}2u$kn5!1$2kjwy+CcvEDiEy(tqDuH9}5p;`Y1Hg!WVx zJ9`vLe#-*RVz7cN_Rfa{0)d-oGgt~KiceD_nDsR4KB-Ykuhg%ob)Vagpo0Y|wJ!i1 zLCPP}s*Z&^vp1w&)jm;wN_o{UL%b=oQQh%Rx3kS1IBhX}lIt^B+4T58? z!1iBnK^UfA~4- zkynI#?*85RLk;)Ff=aO4lhW1Vp`)PcRLEosT=-tYXo^a3o^_Jf(~}+NA2?au%)gC# z%d695gm(_kTbm7Z214<(aR2-hE;0eTsoE7h1=QrA60Bxtyx5zM4fSAPpa1@7<0Kw` zeSdrVc=wgXo5IiBMFY{Rzn(o_Vyu-4=QkDtoY4r!mlROAlsuj2G|Mz>zgeU$LKQx- z?!OoHAsG9{v+=|mQ(c0O@q_C{uU$^fY*cgbypI)i@az4l_6i=f@DN{fa!^|luNPRe+8pb_Z957&h?ZtHmp&pe zwi%eqmw|agVPK)SqmR$=S3!U7ga;QT!LefaVvPKuPTLOxNdE+C)mk{Myi8^~!!gcL zTc4V>O=JUIU8ZHPgXg=O`tDv$dk zMNX^Rz{MGK8lKP7_jEFOEvKbKts-KYfMgs#PYpve2~PAdCsJ@WrMq%1Z-(P%UrIdz zV%fj}63d;>0h5hq+U>IWp%dr|4V~xmX_@W~^{u2H4|HWTh;5=_|ASNfVTOv5C+as> zRhJ7zOK5p-a!je53&ih=Goa=Dx;e@}!SYf0m(k{c;0y4EycxdnjKwP?XvDmeW!jD7 zFdyD4ipC|X9iQ3DGd;fhadD4=axSa#8!Zbl((PWgJc^5WLWr;+d>~7DeRequ;ap0# zIo6|`J8(tr^SY_lvkkZ;+LQ2JI%@K9=_S2Q)qM~NaCW(;q|}Lo4#aQ`#BD$C*bzuW zv8T{SjE1`Jr`D|_#TG@%aDlyh66O>HqOx1SiK5ljYPd*u>V8cQw}!r26{y?Ee_8F) zER^9bDS_N3{?~GjWX}m-J%0nL5ef){0}>@Uye3D6)Llbt3%rhU4m2c4*O{AFXAXECP8eOr8FV)!+?PCNgLk5WMD2b% zJqb+nNa40|Hv948v#=!GH)Qxy$zaLqU4z6Cxu0mel!GvAgB)#q|KG(O1d(iQM9)7H z?g~b8fpJj*F^*Y&H6tuemIze=z3-YFq)WE32~KWFhPhC?lEtaveq2hKunYDZ^+_%V zi_Sip*3Q-kuS4ey!BfY}zDO_H7oNdQ*l6XB9HHxiO{wmfR|6nHeMd>YRD`rw-|)u14=cCGD^8 zgK{2;$RQ?+!@)DwbdWfAax$QqZy)L6vm43xeyVA_&zH0FY2U{2`Ej?vWjWJ0mILLz zdpNA{xqRZ6xZCWsiwfUaz8`9(-iyD^RRSRMxOu;t5YbyirFUc=`s_7`nKWip9LiO5 z?0xQUhsp~~#ki!@oMhuS#9J`meq1jWEe(b3r9@a&Z@#7VVs;?%;$!QS_+$c~>=6_5 zkzAp|zHZ%zB+?ZW!Yii+rVvp?b|igrPXH~!HdsaJIxOSXJoVX@5fd>X4KRo#xs|xtVe@2|C`VW(nUXrEYIvr+5>W2vZP8KNeN`exMkJEZp~YaX1Eb@M zKupegIul{^_BZ}HBjVCi64%U`LhJ;Nak~Xm2EwUhGcbVc7sTy#aS;QMmX%Kg;RYK_ z5ro|3aUnS^j_}~r)xhLUm6z;dEm`^DKqA;4-VxZn@sj&B!M(t4yAVDSBvRa>b8hAt z&3t;40mB_A^H0}s*&4)l(_y*xfd&b}1#nyZ6^H5f3_*rN7_&qB?iZWSoJqKeW+K#T zsE}w@O7G7sclPjs%E2)WiAykveI?&{+QUjMQ`^nC4bSup2?oBAQPEdi{s*`4emU`$ zmXr|Qe5b;gMp~vIkkQz}LLk^Kn@Icq1zQ>}u*dGowKtrLib`Vb>k{EZ@~=-aP30{hn#c;4FGIYl%v%I`MGlwGQ zQ7|f@a;ofOLHRZRO%~4{0wly7`==z8K1Ns8^^0R;PbqWkIZTOmVyGUfO-KmvlRa1o zzAnDlAKBWE%?>EmD3>JcI_dn;r23YXU=|RJfqP&0+4QlTA&(P~WIp(FdstBk{fw?< z^gW^UXoWr}U%7WLAtVGUkuXp&PIDq@_+x;3Ee&uHK4Oj4=^X3}G@u>xxDSr&^WW{J1Pd zErJ<0lsyHd$9@Se6vY1a6knjerY||^dugqwC9M}Z6657s?=&brtbRv2@FKnXbUU&I z>&dabmCx9Sc>N(B*ZcT(a<4i^h1j`8Z70t89lvpyhk@$=bTVc&Is*m`bENn4#w^&o z#0pArn7hbh<*m&eVx9pIfjh|!04k@C&i|@ylKY87Gv4-x1)TEB(T;#BHyQ>&w#}?= zNy6v+VIu5+6a91c0UQEe-Z#hsh^uFBWpLc4i&Cil7yYFV|NSkj)v~ z2**A#e1ipo_N+=dc|p>70QWeYl-vrnr<29-g+md3J{APsDlAy3PI*%-<0aYDws03> z<`Rfdfe(r403himo0CuiwzTXGQA0DZQB#UmiSh{#z^pMJ*^>-?G(l={+xJ(fO=?kO z;p!!M0gECRp!zdYmZ~mVNhzwj&$Y4^gNsimiFidEV; zvL=2NdY+T}l6@TGsNNWH3Dlc)2o`E@87{i{UG8M~xgMp;{%tQunnIW;3<&i}xE|xD z@lRckk^E6mXe_s8@KEZPKIDShUnO$fShlv$32lijL|QX)Zw7NcyZ#kCym2B!qR|{D zNq}y{=My!8dCa0wU&~2J(1x4-u4r8Lmm(=Khe_DOa(sj`?kC{@B=y$Z@?;eyOBvHL zWQ$G44*}J`XvS=OSmqOwUah0;3OJ=3E6S`w&L-x7)k16V!QeahpKmK& z#-bmXer2unZDB>Yb!>D#;XzS%fDB%Idw##~`O9jYh(Q_3_JY%5lC}(49jCC9I}I+U z74-a9u%1P8>qW@)5GKfuxCXrV-KwBm#^X6M2(ScNRS7{qpDZMzhQ}Yi6vcl|qZQ~~ZqOmc&{R`9di-46gKG)-vd@jV5P3L!`scm;hYLjJ@ zvw6qq+|}sm?0?x!k%BC-(i-v$7x_ zB5bQxPf2i>A_Sa_z^OIG`EH!Ox*k4NEHDT+pxJ2s3Nre{ryWiEcPv0Vf`#h zc{#*lD&b%xCvo|D&tqMgBulTU9Oy&EZIsiIkkV)=8!_W<1iDk+LCv=Rd+cUT** z455w|eutcxXV7o0v4<_YkJcI^boUi~+>C-v-s(i6_uTa_)nul2NXvz8=g6V~D2qSA6ZCzz{_5W{1ZmbojfI*H>fG0YIkNeRi&gSjov;KR0Z8C^c}ifQ zU*mnvGbI#L3xn%r;-7FeF@JKK$n_46nu*{F(uAANzED6|%<8`fPzv^iGGCvtkt!3= z*!i_aj?uee6n=(tKrV{mU)ozMB}r6BiGwq^8dI;|t?Im7Z=?t}AJEfY_S*Z}3y8=T zHkaU6lu?j@9z-maZx0-Cu_OO4e&rG|4JR}%(_xoYb|~mqaw;I54JEuKaFh&Ad)i7< z`xlz20~MZ5(c1fHv)J;>U>zUiGvdndljCJ48$Xok3SJmbHFrkQDlI*~U)_GMgDK)0 zWe)8y`%;)iSdhCWN8Z6s3RDUmK|oxhFvm0t$AxMqGIn?Cu4T!CE_2YhN%Fw_MbklV zX=wOuM#L0=%r*m1WX1c~svn*XZ3cFaLic3~%ml*QO%v^FZUOJIMM(|EqjeqFHn4y> z09AjKCU1aWLxoR-Qc3&hE7J(xq{KO5tZ+*=g!c|X9OdOB4W)1~oriwN^OpkksTc_B z?DuMb&AYvQk{efMCN|lrw6vv8uXMOK3;i&#A+(rDEzeo3DBf+G2FqgjdOZc)jvb4I z*LCao_7t|`r?Z4og!@=!1;;1jY(q`Gj_H3Ly)Wc7O;&w(1_Cr0-TV3~M{qDz1FE*& zYL%OlkcMHpv2#q>Dsb^L=Y35{)HZ`P?5vp-;q^xM>>4h}Mmefv^rE{15;z8D1!1Aw zg^f6ICL3Dggz)j>25dukNIT`>;5I z2dpWpGdNb42t^t}Ge}+`Ci_kZYk#U3$CaXN6ngn*+?C`p?TgPbq{o)Z^#sLD`7sa$ zmMuuKpqG31-MIy#UPI!Mi5ss6JGt0 zz_MHeMJ$!E6O1J#Tw*1Reb6Q7DdCq-q^v&%Q?W^aq5AXDIzW|^NR+jcUZ`}CB9Wb7 zP!fwi`0VIC*+41XgsXm(yK;U~%0T39LwcolZ=|9QexAj|Q?XyxwD5>}P|qY&NmGTb zkU_5J0P`A-?rVwR$YgW8-skE=c61^a)nOT1W5o&I=2W-~gEV3%;}|1$X>G-wy%p# zg*2S+&fJ>w!=QkfimK`|?mY|?A5KKL4#OHckO`-Muczg1OUkzSy@ic}3EZu%Lg#X2 zK)7B*0AE9gcWnIIVLVrgZ>Xq%TpCfm!-%6~U$$IHw5n z*OQH-aoC=XulAGB)~tSeb9FLbe@1SRX;iN=`i$#pK=}^{y^;eAgO4K$6cv3TSk%6} zponZNg|9O9sKM|u$WQ?#oK_PC=?Rm98eTlh+ch2)T4eW zC0tr3qWcX8Z885==haV}GoD!avFT)&T)!bjc9?**WqOx2S}^LqvL8+wstNKN0n zy)}X}2Ou_E>T+K&bdg%fv0+uLPr~eA9DUlq|H9PrkXUyHlG{q@LU?VXt9xv?P?mD9 z`me@pdxTXrbJ7(l19bwO5p~OsH9nSizDj{B9v;*SA2}|QAg*rmXKS=Isx`we^EUj9 zZ`vzff!0Okq@d`$vjbA~HJ~RO6g0p%zKCY71?fWd)lCA8-9#CB==gsxWFea2ef=I* zI_$iKVx9H=xZD8o$?mpcZ^V%2#?IL55ugIFIDx2B-b--~WaRE&m`p}p*~_?@Z?!u0q*hV039RvBLMT9*%M9Gxj$6il z^_V?BeP2z4VjNFW)#Ff$Vv1=m#UMSp&VMZyhi^GKC^7Bvg5G`~>bsd3kq>aRG|ZIH zwBkC!bL`OEF`cl@tl>c%*%$F-vHt+bX`z{hLArv^0KC^Fn8^NBRNCYc*qlm6)w5M!y0+EExhvOzIEZGGnsbwU`5%ELjN{OOKIp8o*InkI7af!z2S| zfH{Pz{5gqXAXVeJ9yYeGB^|+tz(8_ho;l(I=>4%|)@M?1vKqh;EzGoj}*1sshr@Ud9^N)A8hC){Y9JJeWl zCvpGYSP&g5=sF7LG8(IR5SW8zJMvnxQ;9uq`fS0fe!(-KkjtS!+ld_>Q3q;v^_2*{ zlO?BY@Ls&^C*qhpr^V+C<5={nZMZkf8(u$HhB{Pfs?~lq1;7z6@Gni40rdkn@~Cau zECN>ghd(aY>_Ty5<+5WR-W9SH;16WSJllV{h!iXKH~4&$>5f3ma279e@^fu&KDga3 z?shuQhshy{Z>`00J~WbN)azgpbD?xQndatUt{K<0q}MZYO=yPA;+eq<%p%2ivfQ8F z;IdOJL`c64jFyv?VToG}`Zy``2Gr=W$P*^)2*ahb?U8i~vEtsm{TQP?EJL%ocgRs; zI?`^u@W=A-{-L6FX49=zb@$}qA!D#|EZHXj%qWl zKz2ViapSbze>2inMKUv7vrg)rZe3q9cd=+GpW@&^x;s{`ID7vyu~|5OWe7k`ZR6_A z#yTCP)s)p%XCt;RObHPbk+%lQKfc`66)AHMa8kvI*r#7UvoM%DGWA5e-`C~OZ-%`|6N$MHv|-|W;dC!a~xI;D<26<0qQ}u-P;Qm3tFB0?klIEnXv0_(-C2L46H5; z=ckO|-Q)|Er@c}U6$^i&WKTO(JyZDENLplNq6R!Pq8 zP&Sed^4In+F^`fa;ubWJvSat%fOgTvN-${?&-o5g`KN4sEs^qg;&Fb%`_`Gs$5e&m zyo-n}*jaLE;Hw?&h#_?aYTvenoDXgi-Xu$8Sj}1A+jz}i&^Nx|KfO8KudIIfVt-hG zTUJ*+OO#^oVH>bP%&-DoCE~wlQ$50Y?#|UN|4W@~q!;%dV`|EL=v?Uq;e&)yMTJiD zG)lsWl%2oR^_o)!`C>8O>p^LsPv=c-L@ybNIU0Oc9qtWgj!M6;IT>qJJWen>?%l!D zK-`s`b}pVt_xa%wgf(>aKoopX$T% zde4JWFs)2@zc4N9Q{biZwiGBTWBr}LE!YSzTb))g)fQYwPA>)misKQJJ#y~?Vt{t3V}08uMhbOl#%| zD$$nzl*^LWj2ixpu7kT%^YuoG80i4~UxRcrW%??MrD1=WF4vjUX;6VH37ycSaRF_R z`JU)aMFr05{c(BR_u)D~XJsyb29!{jnRyw$sCJYFCO0LPEoFG<7%QS2q{z17>NrM$=FK7Rhufv*k^y<4 z``z%k(@e)24G!28a1CZJ?RyJ^*{2j8^e`GA1?nQbum#&@t^HcF7ZDoAvND%3`b**)|<2}tUBLl2p3>B-{j7s!@p@)#<#(o(Hm-fhx&$0+D}=k zqKZFt;jdSEk4`(AdC7flVLa5j)aEf)7|Qpv{gS_rxn(7*rk6{A+N<@(uxO=ClUw1N zUVb)D4Xefpdl>>u8O+QoZ*|XAttjmn~RN-&6^idc6_VuPWhZj$Cit zKpy8}22h)xxe21BL zt#7^GTHk*#Kjq}EyKZjI-DltDJbOQT=Oc=E+D%`d zemoeNpmq7aF}|Kkz*RdpY=0{>DoW_IS_}bimjgGaDymz7Kz2LzH^pVi!fI)YATs{J zwdy&uQzp~gLC%YIL}u>m%)-f!h!GjhIv4^e-Mukn?(ETw3vLz4@V-8CCtc2~iVyDB z>)&jki;?t7xs-|#o(v0~Jx!51 z5xiLFda|lV9>ihDeSc16@XFHP4*P1Lk`>3TX1s86ubDj-4{$xXDRgw5F5W}JDnoz?WlG=WsMw8yZiXz|-cir-qB zP~E39o0~yFvqSb!9Lp>S!!?!E&R(SU=f|ddj@WtbEaa1f(0(~qpsmaIt;43j5FC0kEFZdYf`*0o0t@)bol_X38r56PcO1ONG@0NfR2-;*$>a_+{G0SThJwWOo`OnETH4nL1=tw+%~U zgAnh$@wfpskt_?d5)$L^Zehh2Qb8YkO=Di%Ojh8#B250>8Vi%c{?Q70V_2=qkHwg8B*P@Q2gqfJ01Wyyoy8d|cI>yJ5ubloUu1n405a z1e)L&in^3^qwbHh;~;$6W&QpnLK_oIFkGh&7?)6gQR)qNJEx4CaZW!Lv~mr%;qYI} z_dz*DV$~AWS-FvjYuF~MX>Uj3nM}1*Ip(uOw1T@JzpfL@MbPTg+?lw7+LTLV#f^&Q zeURdue^w}=vJf1y_V`AOj40xW;VDdDo`Q(0b&2Znqw%`h9)=iwL4y6gli;}(xCDLR zMsh3Wq)@XiL$^&+Z!_Z0a-Pm*@*}I99iOu^hhi}Pk16SEevu9JKrLP1{-;L<+NrPK z;JTQ`Do61cEth^?wxfZ(s+@c(!_OlZ<}wzteBC%%^yfqxoBOIgNV!_}W;5A!n$tAB z#xBbDnk7v$yFfI@RaD&TaCAg}Tz1*n=$#!5*VL_To%I#RJ@Ivh5XzB>n6i=FrMw!cezam~50k=VPVy9$NVWj691oc4S(@I2ZXy zk!0g6=RZFtv%eHutd4JS{PsPgYPC^~C@B~FacP{X)7P?$bl2+wm;B)nQy-DXxg~OR zIh3ENw}B8$@32;vivFp~t$8he!kukstP7{3y&v%M_i8B7lzP3tS}}*2TFhy{F3|LA z_=RWauN0I-+|~Dg+ij6jE*H{cMo&TF)Ah?~-6=Q$);Zqjz!H_;SA;K83(S08u2PCq z%~pHo@_sB*(dX?pVO|-R&ZI9+%m^~Dqoso}Vi$VfVuWQ=k%sR(BM(57`3JEltps?$ z5(_)p4t9LzY+50^)|)v$-ASHKO!brX*@_dUGU_V>30>-kHDpv7Y;z+*P{Ovo*65e2 z_&;@UY`&hmAMdL*+B74}W%(Lo;a;hkgvjYuJnCl4k4b;$v2&?wUmTV{49rS4wr$;{ z@O%E{;Y|1*mPYy(bN|-*{f!G^FT%gGe%`fE@zj@o4OS;B>&6NB5<~96)O^XTgO)qg zeo9A@l$M(q2spST%#`<+)&scs2@={dvTlq)>4`R{`rvK=DK~Q}S#YS(YA32ujNsh) zWC`XqW*vUnGpWN51=#VCpI;-rnJSHo*%UGS%)$!G`dP;-n0* z=ID_nfF}oW@bQvHCbGIC=CWL|*9v(#4U*$Zo9~t2sh6?eTM9bDo^k*wxe_0cs&SK@ z9653xlnQfW&&G7d=Tqn95Dkz8M4eVDBg~rT7X|ieYgeppZ_)%yZj7$R?3>mo3}-Fa z*=O^cVpa^elQliQ?blHsk&32muEVxvl=16x-PJ&GIUK<&W-(FZ*wTu zq$>eZmP;IB>tC zjtHY$B5CuJRbmBbAi?uZ>riZ##hXP$MJ?6}fAjtEOL6}zlQt2Yrt{21J#79#K#I8( z|BisC)`|h5B>YIn&CGFCn4f;{%C*Kw>-A|{)>D6)Wa_iyKN!V6aM^;@k5qd1p0S7PM$H#l}ilKnBB@3yXfgRXk=0wXu{Y&BtzsM!J1c(0J>yBEt5%4iD`7B(5eEBy7xHye0L-l3LjgMKc# zmN9JntCj@@Fl9z=rlyrVv&COBPrkP_dBCVwe6zb-c3ylX$E})-EMoX<^3~`!^lFq{ z^X?Vds(DwhKt9TH>};~q&Z@s^STjlWP|#gEz0I&LdmCcu*B{qUeE7Uvm=rYYi8Dlm1=wwDWh zTKzp{DWVs(i>(Gk0i2cWOelP6NThVjuWP2>c^*bJ8iJzW3fe|A>!EJRp;CuVqLC>` z?*gQs*AU*<>k^2(7a1%S37II@LAXuf%z#@LA_{BkXH2Dw`^MYLEh&Xhg80ZgJ;njM zs_kliH|hA1sL}(Bbif29s^gJ^0TLn!i%QloxY0X93DSq!`8J7NVT=XN67N~0);4@b za?4h@F#@#_f2xZQUt)LU;`YSmDoKBgv3G{rUeP$d9J|>KAwa#gnRNFR<<=WRQ?VVh zmVBAJ;;DN?cJSK&ZgM!7?6>#go+KD+WRg#_lbV*X2FvETd$#?Fj$<3Ie6f&4Nb?Y? zOninpa(nT!L7x-h8z|T`*y(DJiFvIZa+J#r9>>E?Gu5&I?YGXr2Ekr72D)Qg2Pzf4 zM+MOt4CK)h=O==v!S<1u>fEpp_L%y|9$+C~g1VWWa9gl(Uzt6J2JqJDdnMA!Lwj>P zpb>X-1|J6cw)7}VxD5I|1@NU&}FZqp@g{Z#;v`U?sLD7Lzk^*Za0guWe~ z?nZaz1n#qo_QKXKP^*OSGV>oSktTj=^b1>>l+-w_wJwALY6?YT5k%PAMDu#r?Wn}+Cer8{{^C4;iEA&{fuL7@I536gvz+u?tS^4aI%c&f|TAvo{J`RT%acAc8~#j0rBI zV`$OKP-!ui;Gp3MahrbZ6_s#EpGAZ%0jf0U@|@1fu;ydq_IF&{5DpsQbRj+*Kd5mt- z-TAPFR(VK}l{Z_4R-_LYEMJe77`7dFSjy@f@Vnr1gPU}~khfa;%<$IH`u-U6qx-DS z61V5jE2wb~R}rAkmQWnKW_r1Kokp6{6wT+<71gkf9~N8gj`ckNai>`47sOxdyg{fO z3aVgXEY(#j(DVOToE{%guY>1Bb9nFH+bY(qTdIO3H-|i!bh`9j9s`IRyQb6^h`vPW zFGQRVHN+h7ub3^uZu5?Qx>gf)li+{bo2wy%f(O%jnMWSF_#7bTQmV66tj+!wtF(oz zW82O5$UJ1FVN$S^Vs|~5>ja70Mp80+%fc5@CDyiHI5J0pP@8gvVx!CFU(w)OB4`Q& z4>a;AGw-TAOiy3dTBUJ^QZxnvf9DO_Xwdc)LkaK0qGQ`1xegvS>tQ64bJE}^KS%@1 zZaN)qTpik9QnAW?t5ig%{w3ORZ%%$VialYmr^E+#H)nfMruAjH9K*0Q5cx$UR7e+h z&Ly5HKk&2IravP`lAQpkeBmI2vKrs_x9Qv4ELA90Yb#UfqzX8B*k6uw z7qA5LzQ}gJsyLC}WhI62vV9BHOPO7@zq~1nn5|K9xVXIA%q~!qG4MiY`u#_k_hhK& z_iN$9mz?Y0g89-?@7XnjAGBgRE>t7J3Cha5AXXs7W^bs{xONB4kVsFpoa_VyU9Y?kld<3$V>Fh%foS`VuG#)@SbJ!MuEwd5uWVA}h zD;4%zV*SzjK5R6^WVpWx*~|3aO7Q~DftV~bR6S=2f4`O!uMV>|Q6E}~h3=Vu)1>Tm zLCGg&OYPftDzZQ)Q5mmgi@W2-RGlI(&(E)qD;`Jszk_u83PAm8NF^`y9s!}gNN*Zk zjI=U$$w;RxBG_6mp>AWu0p^JiC*)e=rP%{OJu!)v+A5Fvp60NVmnkvoMv0`PU$O7n z7i#2|YqH(Ft6~WJ*47!u6Gc9><{URG@}q}{3uw{d!BSUV`cB#FCu&ljfNhpPH=OO6 z{%Jpd-F&0#ojg=a8yHK3krI5Z;>tmz3N7_G;ZVuUnZI@|S}n$-&oB$TbPzd3hM0E_ z5PrUgN*&|2s9NMBgLp6K)_~bYYMecyfWTd1`8A>IQ@gasHBy;BW20E|FFO(Rmlux7 zBYILom$T7w3L5}@zDSzwEz+hR6FOR#OyP3cRG$gPEx10@mu*x?+?Gy{e@Y^&y7l~t6fAJjdZ0vV=z;?A);Ek*{(qKIF!|wD+`UJv5f(~8 z(G5@SYtaIzt*q}5j{T*g>e5>vwvbAB_$RkC2Rz3OAHpdIDNkYF`y4h#j}kYplclA~ z+g(vLC4_Ga?Wf@n zQ(ZBzbNj6)>jq}9FWH2cEh%1Ya}HOv2_1PR*Rb!~uFSplwHr&kQ&~(EX}w@NU?BCi zEX!$(_U6koS&@qeoX10sQ?xMF=U%|kfo~CdO6!IYG(=MtH|4B(S7YYfaHT`y_G@@N zBYkZd_(EN_&$t`#f|a=SzoSCHV$#^8(G9m+`VdY@N7gMTIwTu!j6Wm?Xrx_b`6{l| zV?K&jTse5VfK!WErNhk-R7k406s}`cmMzZ literal 0 HcmV?d00001 diff --git a/examples/rbac-remote/install_feast.sh b/examples/rbac-remote/install_feast.sh new file mode 100755 index 0000000000..b87d44b335 --- /dev/null +++ b/examples/rbac-remote/install_feast.sh @@ -0,0 +1,109 @@ +#!/bin/bash + +# Specify the RBAC type (folder) +read -p "Enter RBAC type (e.g., k8s or oidc): " FOLDER + +echo "You have selected the RBAC type: $FOLDER" + +# feature_store files name for the servers +OFFLINE_YAML="feature_store_offline.yaml" +ONLINE_YAML="feature_store_online.yaml" +REGISTRY_YAML="feature_store_registry.yaml" + +# Helm chart path and service account +HELM_CHART_PATH="../../infra/charts/feast-feature-server" +SERVICE_ACCOUNT_NAME="feast-sa" +CLIENT_REPO_DIR="client/$FOLDER/feature_repo" + +# Function to check if a file exists and encode it to base64 +encode_to_base64() { + local file_path=$1 + if [ ! -f "$file_path" ]; then + echo "Error: File not found at $file_path" + exit 1 + fi + base64 < "$file_path" +} + +FEATURE_STORE_OFFLINE_YAML_PATH="server/$FOLDER/$OFFLINE_YAML" +FEATURE_STORE_ONLINE_YAML_PATH="server/$FOLDER/$ONLINE_YAML" +FEATURE_STORE_REGISTRY_YAML_PATH="server/$FOLDER/$REGISTRY_YAML" + +# Encode the YAML files to base64 +FEATURE_STORE_OFFLINE_YAML_BASE64=$(encode_to_base64 "$FEATURE_STORE_OFFLINE_YAML_PATH") +FEATURE_STORE_ONLINE_YAML_BASE64=$(encode_to_base64 "$FEATURE_STORE_ONLINE_YAML_PATH") +FEATURE_STORE_REGISTRY_YAML_BASE64=$(encode_to_base64 "$FEATURE_STORE_REGISTRY_YAML_PATH") + +# Check if base64 encoding was successful +if [ -z "$FEATURE_STORE_OFFLINE_YAML_BASE64" ] || [ -z "$FEATURE_STORE_ONLINE_YAML_BASE64" ] || [ -z "$FEATURE_STORE_REGISTRY_YAML_BASE64" ]; then + echo "Error: Failed to base64 encode one or more feature_store.yaml files in folder $FOLDER." + exit 1 +fi + +# Upgrade or install Feast components for the specified folder +read -p "Deploy Feast server components for $FOLDER? (y/n) " confirm_server +if [[ $confirm_server == [yY] ]]; then + # Apply the server service accounts and role bindings + kubectl apply -f "server/k8s/server_resources.yaml" + + # Upgrade or install Feast components + echo "Upgrading or installing Feast server components for $FOLDER" + + helm upgrade --install feast-registry-server $HELM_CHART_PATH \ + --set feast_mode=registry \ + --set feature_store_yaml_base64=$FEATURE_STORE_REGISTRY_YAML_BASE64 \ + --set serviceAccount.name=$SERVICE_ACCOUNT_NAME + + helm upgrade --install feast-feature-server $HELM_CHART_PATH \ + --set feature_store_yaml_base64=$FEATURE_STORE_ONLINE_YAML_BASE64 \ + --set serviceAccount.name=$SERVICE_ACCOUNT_NAME + + helm upgrade --install feast-offline-server $HELM_CHART_PATH \ + --set feast_mode=offline \ + --set feature_store_yaml_base64=$FEATURE_STORE_OFFLINE_YAML_BASE64 \ + --set serviceAccount.name=$SERVICE_ACCOUNT_NAME + + echo "Server components deployed for $FOLDER." +else + echo "Server components not deployed for $FOLDER." +fi + +read -p "Apply client creation examples ? (y/n) " confirm_clients +if [[ $confirm_clients == [yY] ]]; then + kubectl delete configmap client-feature-repo-config --ignore-not-found + kubectl create configmap client-feature-repo-config --from-file=$CLIENT_REPO_DIR + + kubectl apply -f "client/$FOLDER/admin_user_resources.yaml" + kubectl apply -f "client/$FOLDER/readonly_user_resources.yaml" + kubectl apply -f "client/$FOLDER/unauthorized_user_resources.yaml" + + echo "Client resources applied." +else + echo "Client resources not applied." +fi + +read -p "Apply 'feast apply' in the remote registry? (y/n) " confirm_apply +if [[ $confirm_apply == [yY] ]]; then + + POD_NAME=$(kubectl get pods --no-headers -o custom-columns=":metadata.name" | grep '^feast-registry-server-feast-feature-server') + + if [ -z "$POD_NAME" ]; then + echo "No pod found with the prefix feast-registry-server-feast-feature-server" + exit 1 + fi + + LOCAL_DIR="./server/feature_repo/" + REMOTE_DIR="/app/" + + echo "Copying files from $LOCAL_DIR to $POD_NAME:$REMOTE_DIR" + kubectl cp $LOCAL_DIR $POD_NAME:$REMOTE_DIR + + echo "Files copied successfully!" + + kubectl exec $POD_NAME -- feast -c feature_repo apply + echo "'feast apply' command executed successfully in the for remote registry." +else + echo "'feast apply' not performed ." +fi + +echo "Setup completed." diff --git a/examples/rbac-remote/server/feature_repo/example_repo.py b/examples/rbac-remote/server/feature_repo/example_repo.py new file mode 100644 index 0000000000..5b8105bb94 --- /dev/null +++ b/examples/rbac-remote/server/feature_repo/example_repo.py @@ -0,0 +1,130 @@ +# This is an example feature definition file + +from datetime import timedelta + +import pandas as pd + +from feast import Entity, FeatureService, FeatureView, Field, PushSource, RequestSource +from feast.infra.offline_stores.contrib.postgres_offline_store.postgres_source import PostgreSQLSource + +from feast.on_demand_feature_view import on_demand_feature_view +from feast.types import Float32, Float64, Int64 + +# Define an entity for the driver. You can think of an entity as a primary key used to +# fetch features. +driver = Entity(name="driver", join_keys=["driver_id"]) + +driver_stats_source = PostgreSQLSource( + name="driver_hourly_stats_source", + query="SELECT * FROM feast_driver_hourly_stats", + timestamp_field="event_timestamp", + created_timestamp_column="created", +) + +# Our parquet files contain sample data that includes a driver_id column, timestamps and +# three feature column. Here we define a Feature View that will allow us to serve this +# data to our model online. +driver_stats_fv = FeatureView( + # The unique name of this feature view. Two feature views in a single + # project cannot have the same name + name="driver_hourly_stats", + entities=[driver], + ttl=timedelta(days=1), + # The list of features defined below act as a schema to both define features + # for both materialization of features into a store, and are used as references + # during retrieval for building a training dataset or serving features + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64), + ], + online=True, + source=driver_stats_source, + # Tags are user defined key/value pairs that are attached to each + # feature view + tags={"team": "driver_performance"}, +) + +# Define a request data source which encodes features / information only +# available at request time (e.g. part of the user initiated HTTP request) +input_request = RequestSource( + name="vals_to_add", + schema=[ + Field(name="val_to_add", dtype=Int64), + Field(name="val_to_add_2", dtype=Int64), + ], +) + + +# Define an on demand feature view which can generate new features based on +# existing feature views and RequestSource features +@on_demand_feature_view( + sources=[driver_stats_fv, input_request], + schema=[ + Field(name="conv_rate_plus_val1", dtype=Float64), + Field(name="conv_rate_plus_val2", dtype=Float64), + ], +) +def transformed_conv_rate(inputs: pd.DataFrame) -> pd.DataFrame: + df = pd.DataFrame() + df["conv_rate_plus_val1"] = inputs["conv_rate"] + inputs["val_to_add"] + df["conv_rate_plus_val2"] = inputs["conv_rate"] + inputs["val_to_add_2"] + return df + + +# This groups features into a model version +driver_activity_v1 = FeatureService( + name="driver_activity_v1", + features=[ + driver_stats_fv[["conv_rate"]], # Sub-selects a feature from a feature view + transformed_conv_rate, # Selects all features from the feature view + ], +) +driver_activity_v2 = FeatureService( + name="driver_activity_v2", features=[driver_stats_fv, transformed_conv_rate] +) + +# Defines a way to push data (to be available offline, online or both) into Feast. +driver_stats_push_source = PushSource( + name="driver_stats_push_source", + batch_source=driver_stats_source, +) + +# Defines a slightly modified version of the feature view from above, where the source +# has been changed to the push source. This allows fresh features to be directly pushed +# to the online store for this feature view. +driver_stats_fresh_fv = FeatureView( + name="driver_hourly_stats_fresh", + entities=[driver], + ttl=timedelta(days=1), + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64), + ], + online=True, + source=driver_stats_push_source, # Changed from above + tags={"team": "driver_performance"}, +) + + +# Define an on demand feature view which can generate new features based on +# existing feature views and RequestSource features +@on_demand_feature_view( + sources=[driver_stats_fresh_fv, input_request], # relies on fresh version of FV + schema=[ + Field(name="conv_rate_plus_val1", dtype=Float64), + Field(name="conv_rate_plus_val2", dtype=Float64), + ], +) +def transformed_conv_rate_fresh(inputs: pd.DataFrame) -> pd.DataFrame: + df = pd.DataFrame() + df["conv_rate_plus_val1"] = inputs["conv_rate"] + inputs["val_to_add"] + df["conv_rate_plus_val2"] = inputs["conv_rate"] + inputs["val_to_add_2"] + return df + + +driver_activity_v3 = FeatureService( + name="driver_activity_v3", + features=[driver_stats_fresh_fv, transformed_conv_rate_fresh], +) diff --git a/examples/rbac-remote/server/feature_repo/feature_store.yaml b/examples/rbac-remote/server/feature_repo/feature_store.yaml new file mode 100644 index 0000000000..78b13c660b --- /dev/null +++ b/examples/rbac-remote/server/feature_repo/feature_store.yaml @@ -0,0 +1,26 @@ +project: server +provider: local +registry: + registry_type: sql + path: postgresql+psycopg://feast:feast@postgresql.feast-dev.svc.cluster.local:5432/feast + cache_ttl_seconds: 60 + sqlalchemy_config_kwargs: + echo: false + pool_pre_ping: true +online_store: + type: postgres + host: postgresql.feast-dev.svc.cluster.local + port: 5432 + database: feast + db_schema: public + user: feast + password: feast +offline_store: + type: postgres + host: postgresql.feast-dev.svc.cluster.local + port: 5432 + database: feast + db_schema: public + user: feast + password: feast +entity_key_serialization_version: 2 diff --git a/examples/rbac-remote/server/feature_repo/permissions_apply.py b/examples/rbac-remote/server/feature_repo/permissions_apply.py new file mode 100644 index 0000000000..93bdf2ffc6 --- /dev/null +++ b/examples/rbac-remote/server/feature_repo/permissions_apply.py @@ -0,0 +1,21 @@ +from feast.feast_object import ALL_RESOURCE_TYPES +from feast.permissions.action import READ, AuthzedAction, ALL_ACTIONS +from feast.permissions.permission import Permission +from feast.permissions.policy import RoleBasedPolicy + +admin_roles = ["feast-admin-role"] +user_roles = ["feast-user-role"] + +user_perm = Permission( + name="feast_user_permission", + types=ALL_RESOURCE_TYPES, + policy=RoleBasedPolicy(roles=user_roles), + actions=[AuthzedAction.DESCRIBE] + READ +) + +admin_perm = Permission( + name="feast_admin_permission", + types=ALL_RESOURCE_TYPES, + policy=RoleBasedPolicy(roles=admin_roles), + actions=ALL_ACTIONS +) diff --git a/examples/rbac-remote/server/k8s/feature_store_offline.yaml b/examples/rbac-remote/server/k8s/feature_store_offline.yaml new file mode 100644 index 0000000000..4fc01508bd --- /dev/null +++ b/examples/rbac-remote/server/k8s/feature_store_offline.yaml @@ -0,0 +1,16 @@ +project: server +provider: local +registry: + registry_type: remote + path: feast-registry-server-feast-feature-server.feast-dev.svc.cluster.local:80 +offline_store: + type: postgres + host: postgresql.feast-dev.svc.cluster.local + port: 5432 + database: feast + db_schema: public + user: feast + password: feast +auth: + type: kubernetes +entity_key_serialization_version: 2 diff --git a/examples/rbac-remote/server/k8s/feature_store_online.yaml b/examples/rbac-remote/server/k8s/feature_store_online.yaml new file mode 100644 index 0000000000..aa167731b2 --- /dev/null +++ b/examples/rbac-remote/server/k8s/feature_store_online.yaml @@ -0,0 +1,20 @@ +project: server +provider: local +registry: + registry_type: remote + path: feast-registry-server-feast-feature-server.feast-dev.svc.cluster.local:80 +online_store: + type: postgres + host: postgresql.feast-dev.svc.cluster.local + port: 5432 + database: feast + db_schema: public + user: feast + password: feast +offline_store: + type: remote + host: feast-offline-server-feast-feature-server.feast-dev.svc.cluster.local + port: 80 +auth: + type: kubernetes +entity_key_serialization_version: 2 diff --git a/examples/rbac-remote/server/k8s/feature_store_registry.yaml b/examples/rbac-remote/server/k8s/feature_store_registry.yaml new file mode 100644 index 0000000000..579141fb01 --- /dev/null +++ b/examples/rbac-remote/server/k8s/feature_store_registry.yaml @@ -0,0 +1,12 @@ +project: server +provider: local +registry: + registry_type: sql + path: postgresql+psycopg://feast:feast@postgresql.feast-dev.svc.cluster.local:5432/feast + cache_ttl_seconds: 60 + sqlalchemy_config_kwargs: + echo: false + pool_pre_ping: true +auth: + type: kubernetes +entity_key_serialization_version: 2 diff --git a/examples/rbac-remote/server/k8s/server_resources.yaml b/examples/rbac-remote/server/k8s/server_resources.yaml new file mode 100644 index 0000000000..03e35495d6 --- /dev/null +++ b/examples/rbac-remote/server/k8s/server_resources.yaml @@ -0,0 +1,27 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + name: feast-sa + namespace: feast-dev +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRole +metadata: + name: feast-cluster-role +rules: + - apiGroups: ["rbac.authorization.k8s.io"] + resources: ["roles", "rolebindings", "clusterrolebindings"] + verbs: ["get", "list", "watch"] +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRoleBinding +metadata: + name: feast-cluster-rolebinding +subjects: + - kind: ServiceAccount + name: feast-sa + namespace: feast-dev +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: ClusterRole + name: feast-cluster-role diff --git a/examples/rbac-remote/server/oidc/feature_store_offline.yaml b/examples/rbac-remote/server/oidc/feature_store_offline.yaml new file mode 100644 index 0000000000..8ed4cc1ff3 --- /dev/null +++ b/examples/rbac-remote/server/oidc/feature_store_offline.yaml @@ -0,0 +1,18 @@ +project: server +provider: local +registry: + registry_type: remote + path: feast-registry-server-feast-feature-server.feast-dev.svc.cluster.local:80 +offline_store: + type: postgres + host: postgresql.feast-dev.svc.cluster.local + port: 5432 + database: feast + db_schema: public + user: feast + password: feast +auth: + type: oidc + auth_discovery_url: https://keycloak-feast-dev.apps.com/realms/feast-rbac/.well-known/openid-configuration + client_id: feast-client +entity_key_serialization_version: 2 diff --git a/examples/rbac-remote/server/oidc/feature_store_online.yaml b/examples/rbac-remote/server/oidc/feature_store_online.yaml new file mode 100644 index 0000000000..c47c3a0662 --- /dev/null +++ b/examples/rbac-remote/server/oidc/feature_store_online.yaml @@ -0,0 +1,22 @@ +project: server +provider: local +registry: + registry_type: remote + path: feast-registry-server-feast-feature-server.feast-dev.svc.cluster.local:80 +online_store: + type: postgres + host: postgresql.feast-dev.svc.cluster.local + port: 5432 + database: feast + db_schema: public + user: feast + password: feast +offline_store: + type: remote + host: feast-offline-server-feast-feature-server.feast-dev.svc.cluster.local + port: 80 +auth: + type: oidc + auth_discovery_url: https://keycloak-feast-dev.apps.com/realms/feast-rbac/.well-known/openid-configuration + client_id: feast-client +entity_key_serialization_version: 2 diff --git a/examples/rbac-remote/server/oidc/feature_store_registry.yaml b/examples/rbac-remote/server/oidc/feature_store_registry.yaml new file mode 100644 index 0000000000..a661d9dc56 --- /dev/null +++ b/examples/rbac-remote/server/oidc/feature_store_registry.yaml @@ -0,0 +1,14 @@ +project: server +provider: local +registry: + registry_type: sql + path: postgresql+psycopg://feast:feast@postgresql.feast-dev.svc.cluster.local:5432/feast + cache_ttl_seconds: 60 + sqlalchemy_config_kwargs: + echo: false + pool_pre_ping: true +auth: + type: oidc + auth_discovery_url: https://keycloak-feast-dev.apps.com/realms/feast-rbac/.well-known/openid-configuration + client_id: feast-client +entity_key_serialization_version: 2 diff --git a/infra/charts/feast-feature-server/templates/deployment.yaml b/infra/charts/feast-feature-server/templates/deployment.yaml index 8dddeed6fd..dc62be8b95 100644 --- a/infra/charts/feast-feature-server/templates/deployment.yaml +++ b/infra/charts/feast-feature-server/templates/deployment.yaml @@ -21,6 +21,7 @@ spec: labels: {{- include "feast-feature-server.selectorLabels" . | nindent 8 }} spec: + serviceAccountName: {{ .Values.serviceAccount.name | default "default" }} {{- with .Values.imagePullSecrets }} imagePullSecrets: {{- toYaml . | nindent 8 }} diff --git a/infra/charts/feast-feature-server/values.yaml b/infra/charts/feast-feature-server/values.yaml index 64d805a66c..22bbdeace0 100644 --- a/infra/charts/feast-feature-server/values.yaml +++ b/infra/charts/feast-feature-server/values.yaml @@ -44,6 +44,9 @@ service: type: ClusterIP port: 80 +serviceAccount: + name: "" + resources: {} # We usually recommend not to specify default resources and to leave this as a conscious # choice for the user. This also increases chances charts run on environments with little diff --git a/sdk/python/feast/permissions/client/auth_client_manager_factory.py b/sdk/python/feast/permissions/client/auth_client_manager_factory.py index 3dff5fb45d..359072f38e 100644 --- a/sdk/python/feast/permissions/client/auth_client_manager_factory.py +++ b/sdk/python/feast/permissions/client/auth_client_manager_factory.py @@ -1,7 +1,11 @@ +import os +from typing import cast + from feast.permissions.auth.auth_type import AuthType from feast.permissions.auth_model import ( AuthConfig, KubernetesAuthConfig, + OidcAuthConfig, OidcClientAuthConfig, ) from feast.permissions.client.auth_client_manager import AuthenticationClientManager @@ -15,8 +19,15 @@ def get_auth_client_manager(auth_config: AuthConfig) -> AuthenticationClientManager: if auth_config.type == AuthType.OIDC.value: - assert isinstance(auth_config, OidcClientAuthConfig) - return OidcAuthClientManager(auth_config) + intra_communication_base64 = os.getenv("INTRA_COMMUNICATION_BASE64") + # If intra server communication call + if intra_communication_base64: + assert isinstance(auth_config, OidcAuthConfig) + client_auth_config = cast(OidcClientAuthConfig, auth_config) + else: + assert isinstance(auth_config, OidcClientAuthConfig) + client_auth_config = auth_config + return OidcAuthClientManager(client_auth_config) elif auth_config.type == AuthType.KUBERNETES.value: assert isinstance(auth_config, KubernetesAuthConfig) return KubernetesAuthClientManager(auth_config) From def863360bf0e553d242900ee915e953c6c3f9b6 Mon Sep 17 00:00:00 2001 From: Theodor Mihalache <84387487+tmihalac@users.noreply.github.com> Date: Thu, 12 Sep 2024 09:28:47 -0400 Subject: [PATCH 56/96] =?UTF-8?q?fix:=20Refactor=20auth=5Fclient=5Fmanager?= =?UTF-8?q?=5Ffactory.py=20in=20function=20get=5Fauth=5Fclient=5Fm?= =?UTF-8?q?=E2=80=A6=20(#4505)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Refactor auth_client_manager_factory.py in function get_auth_client_manager Signed-off-by: Theodor Mihalache * Refactor auth_client_manager_factory.py in function get_auth_client_manager -Added test Signed-off-by: Theodor Mihalache * Refactor auth_client_manager_factory.py in function get_auth_client_manager -updated test following review Signed-off-by: Theodor Mihalache * Refactor auth_client_manager_factory.py in function get_auth_client_manager -fixed linter Signed-off-by: Theodor Mihalache --------- Signed-off-by: Theodor Mihalache --- .../client/arrow_flight_auth_interceptor.py | 2 +- .../permissions/client/auth_client_manager.py | 41 ++++++++++++++ .../client/auth_client_manager_factory.py | 41 -------------- .../permissions/client/client_auth_token.py | 14 +++++ .../client/grpc_client_auth_interceptor.py | 2 +- .../client/http_auth_requests_wrapper.py | 2 +- ...ntra_comm_authentication_client_manager.py | 31 +++++++++++ ...t_authentication_client_manager_factory.py | 55 +++++++++++++++++++ 8 files changed, 144 insertions(+), 44 deletions(-) delete mode 100644 sdk/python/feast/permissions/client/auth_client_manager_factory.py create mode 100644 sdk/python/feast/permissions/client/client_auth_token.py create mode 100644 sdk/python/feast/permissions/client/intra_comm_authentication_client_manager.py create mode 100644 sdk/python/tests/unit/permissions/auth/client/test_authentication_client_manager_factory.py diff --git a/sdk/python/feast/permissions/client/arrow_flight_auth_interceptor.py b/sdk/python/feast/permissions/client/arrow_flight_auth_interceptor.py index 724c7df5ca..7ef84fbeae 100644 --- a/sdk/python/feast/permissions/client/arrow_flight_auth_interceptor.py +++ b/sdk/python/feast/permissions/client/arrow_flight_auth_interceptor.py @@ -2,7 +2,7 @@ from feast.permissions.auth.auth_type import AuthType from feast.permissions.auth_model import AuthConfig -from feast.permissions.client.auth_client_manager_factory import get_auth_token +from feast.permissions.client.client_auth_token import get_auth_token class FlightBearerTokenInterceptor(fl.ClientMiddleware): diff --git a/sdk/python/feast/permissions/client/auth_client_manager.py b/sdk/python/feast/permissions/client/auth_client_manager.py index 82f9b7433e..2151cfb409 100644 --- a/sdk/python/feast/permissions/client/auth_client_manager.py +++ b/sdk/python/feast/permissions/client/auth_client_manager.py @@ -1,8 +1,49 @@ +import os from abc import ABC, abstractmethod +from feast.permissions.auth.auth_type import AuthType +from feast.permissions.auth_model import ( + AuthConfig, + KubernetesAuthConfig, + OidcClientAuthConfig, +) + class AuthenticationClientManager(ABC): @abstractmethod def get_token(self) -> str: """Retrieves the token based on the authentication type configuration""" pass + + +class AuthenticationClientManagerFactory(ABC): + def __init__(self, auth_config: AuthConfig): + self.auth_config = auth_config + + def get_auth_client_manager(self) -> AuthenticationClientManager: + from feast.permissions.client.intra_comm_authentication_client_manager import ( + IntraCommAuthClientManager, + ) + from feast.permissions.client.kubernetes_auth_client_manager import ( + KubernetesAuthClientManager, + ) + from feast.permissions.client.oidc_authentication_client_manager import ( + OidcAuthClientManager, + ) + + intra_communication_base64 = os.getenv("INTRA_COMMUNICATION_BASE64") + if intra_communication_base64: + return IntraCommAuthClientManager( + self.auth_config, intra_communication_base64 + ) + + if self.auth_config.type == AuthType.OIDC.value: + assert isinstance(self.auth_config, OidcClientAuthConfig) + return OidcAuthClientManager(self.auth_config) + elif self.auth_config.type == AuthType.KUBERNETES.value: + assert isinstance(self.auth_config, KubernetesAuthConfig) + return KubernetesAuthClientManager(self.auth_config) + else: + raise RuntimeError( + f"No Auth client manager implemented for the auth type:${self.auth_config.type}" + ) diff --git a/sdk/python/feast/permissions/client/auth_client_manager_factory.py b/sdk/python/feast/permissions/client/auth_client_manager_factory.py deleted file mode 100644 index 359072f38e..0000000000 --- a/sdk/python/feast/permissions/client/auth_client_manager_factory.py +++ /dev/null @@ -1,41 +0,0 @@ -import os -from typing import cast - -from feast.permissions.auth.auth_type import AuthType -from feast.permissions.auth_model import ( - AuthConfig, - KubernetesAuthConfig, - OidcAuthConfig, - OidcClientAuthConfig, -) -from feast.permissions.client.auth_client_manager import AuthenticationClientManager -from feast.permissions.client.kubernetes_auth_client_manager import ( - KubernetesAuthClientManager, -) -from feast.permissions.client.oidc_authentication_client_manager import ( - OidcAuthClientManager, -) - - -def get_auth_client_manager(auth_config: AuthConfig) -> AuthenticationClientManager: - if auth_config.type == AuthType.OIDC.value: - intra_communication_base64 = os.getenv("INTRA_COMMUNICATION_BASE64") - # If intra server communication call - if intra_communication_base64: - assert isinstance(auth_config, OidcAuthConfig) - client_auth_config = cast(OidcClientAuthConfig, auth_config) - else: - assert isinstance(auth_config, OidcClientAuthConfig) - client_auth_config = auth_config - return OidcAuthClientManager(client_auth_config) - elif auth_config.type == AuthType.KUBERNETES.value: - assert isinstance(auth_config, KubernetesAuthConfig) - return KubernetesAuthClientManager(auth_config) - else: - raise RuntimeError( - f"No Auth client manager implemented for the auth type:${auth_config.type}" - ) - - -def get_auth_token(auth_config: AuthConfig) -> str: - return get_auth_client_manager(auth_config).get_token() diff --git a/sdk/python/feast/permissions/client/client_auth_token.py b/sdk/python/feast/permissions/client/client_auth_token.py new file mode 100644 index 0000000000..68821e3f9c --- /dev/null +++ b/sdk/python/feast/permissions/client/client_auth_token.py @@ -0,0 +1,14 @@ +from feast.permissions.auth_model import ( + AuthConfig, +) +from feast.permissions.client.auth_client_manager import ( + AuthenticationClientManagerFactory, +) + + +def get_auth_token(auth_config: AuthConfig) -> str: + return ( + AuthenticationClientManagerFactory(auth_config) + .get_auth_client_manager() + .get_token() + ) diff --git a/sdk/python/feast/permissions/client/grpc_client_auth_interceptor.py b/sdk/python/feast/permissions/client/grpc_client_auth_interceptor.py index 5155b80cb5..121735e351 100644 --- a/sdk/python/feast/permissions/client/grpc_client_auth_interceptor.py +++ b/sdk/python/feast/permissions/client/grpc_client_auth_interceptor.py @@ -4,7 +4,7 @@ from feast.errors import FeastError from feast.permissions.auth_model import AuthConfig -from feast.permissions.client.auth_client_manager_factory import get_auth_token +from feast.permissions.client.client_auth_token import get_auth_token logger = logging.getLogger(__name__) diff --git a/sdk/python/feast/permissions/client/http_auth_requests_wrapper.py b/sdk/python/feast/permissions/client/http_auth_requests_wrapper.py index 3232e25025..ba02fab8d8 100644 --- a/sdk/python/feast/permissions/client/http_auth_requests_wrapper.py +++ b/sdk/python/feast/permissions/client/http_auth_requests_wrapper.py @@ -5,7 +5,7 @@ from feast.permissions.auth_model import ( AuthConfig, ) -from feast.permissions.client.auth_client_manager_factory import get_auth_token +from feast.permissions.client.client_auth_token import get_auth_token class AuthenticatedRequestsSession(Session): diff --git a/sdk/python/feast/permissions/client/intra_comm_authentication_client_manager.py b/sdk/python/feast/permissions/client/intra_comm_authentication_client_manager.py new file mode 100644 index 0000000000..678e1f39e5 --- /dev/null +++ b/sdk/python/feast/permissions/client/intra_comm_authentication_client_manager.py @@ -0,0 +1,31 @@ +import logging + +import jwt + +from feast.permissions.auth.auth_type import AuthType +from feast.permissions.auth_model import AuthConfig +from feast.permissions.client.auth_client_manager import AuthenticationClientManager + +logger = logging.getLogger(__name__) + + +class IntraCommAuthClientManager(AuthenticationClientManager): + def __init__(self, auth_config: AuthConfig, intra_communication_base64: str): + self.auth_config = auth_config + self.intra_communication_base64 = intra_communication_base64 + + def get_token(self): + if self.auth_config.type == AuthType.OIDC.value: + payload = { + "preferred_username": f"{self.intra_communication_base64}", # Subject claim + } + elif self.auth_config.type == AuthType.KUBERNETES.value: + payload = { + "sub": f":::{self.intra_communication_base64}", # Subject claim + } + else: + raise RuntimeError( + f"No Auth client manager implemented for the auth type:{self.auth_config.type}" + ) + + return jwt.encode(payload, "") diff --git a/sdk/python/tests/unit/permissions/auth/client/test_authentication_client_manager_factory.py b/sdk/python/tests/unit/permissions/auth/client/test_authentication_client_manager_factory.py new file mode 100644 index 0000000000..5a6a8d70fa --- /dev/null +++ b/sdk/python/tests/unit/permissions/auth/client/test_authentication_client_manager_factory.py @@ -0,0 +1,55 @@ +import os +from unittest import mock + +import assertpy +import jwt +import pytest +import yaml + +from feast.permissions.auth.auth_type import AuthType +from feast.permissions.auth_model import ( + AuthConfig, +) +from feast.permissions.client.auth_client_manager import ( + AuthenticationClientManagerFactory, +) +from feast.permissions.client.intra_comm_authentication_client_manager import ( + IntraCommAuthClientManager, +) + + +@mock.patch.dict(os.environ, {"INTRA_COMMUNICATION_BASE64": "server_intra_com_val"}) +def test_authentication_client_manager_factory(auth_config): + raw_config = yaml.safe_load(auth_config) + auth_config = AuthConfig(type=raw_config["auth"]["type"]) + + authentication_client_manager_factory = AuthenticationClientManagerFactory( + auth_config + ) + + authentication_client_manager = ( + authentication_client_manager_factory.get_auth_client_manager() + ) + + if auth_config.type not in [AuthType.KUBERNETES.value, AuthType.OIDC.value]: + with pytest.raises( + RuntimeError, + match=f"No Auth client manager implemented for the auth type:{auth_config.type}", + ): + authentication_client_manager.get_token() + else: + token = authentication_client_manager.get_token() + + decoded_token = jwt.decode(token, options={"verify_signature": False}) + assertpy.assert_that(authentication_client_manager).is_type_of( + IntraCommAuthClientManager + ) + + if AuthType.KUBERNETES.value == auth_config.type: + assertpy.assert_that(decoded_token["sub"]).is_equal_to( + ":::server_intra_com_val" + ) + elif AuthType.OIDC.value in auth_config.type: + assertpy.assert_that(decoded_token["preferred_username"]).is_equal_to( + "server_intra_com_val" + ) From 6a6a369eec427814f39c812f2f3da274e6100ad3 Mon Sep 17 00:00:00 2001 From: Daniele Martinoli <86618610+dmartinol@users.noreply.github.com> Date: Thu, 12 Sep 2024 18:07:59 +0200 Subject: [PATCH 57/96] chore: A full, minimal, reproducible example of the RBAC feature (#4501) A full, minimal, reproducible example of the RBAC feature Signed-off-by: Daniele Martinoli --- examples/rbac-local/01.1-start-keycloak.ipynb | 94 ++ examples/rbac-local/01.2-setup-keycloak.ipynb | 416 +++++++ examples/rbac-local/01.3-setup-feast.ipynb | 1029 +++++++++++++++++ examples/rbac-local/02-registry_server.ipynb | 73 ++ examples/rbac-local/03-online_server.ipynb | 111 ++ examples/rbac-local/04-offline_server.ipynb | 99 ++ examples/rbac-local/README.md | 57 + examples/rbac-local/client.ipynb | 607 ++++++++++ examples/rbac-local/client/feature_store.yaml | 12 + 9 files changed, 2498 insertions(+) create mode 100644 examples/rbac-local/01.1-start-keycloak.ipynb create mode 100644 examples/rbac-local/01.2-setup-keycloak.ipynb create mode 100644 examples/rbac-local/01.3-setup-feast.ipynb create mode 100644 examples/rbac-local/02-registry_server.ipynb create mode 100644 examples/rbac-local/03-online_server.ipynb create mode 100644 examples/rbac-local/04-offline_server.ipynb create mode 100644 examples/rbac-local/README.md create mode 100644 examples/rbac-local/client.ipynb create mode 100644 examples/rbac-local/client/feature_store.yaml diff --git a/examples/rbac-local/01.1-start-keycloak.ipynb b/examples/rbac-local/01.1-start-keycloak.ipynb new file mode 100644 index 0000000000..f73e699833 --- /dev/null +++ b/examples/rbac-local/01.1-start-keycloak.ipynb @@ -0,0 +1,94 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "e46a65b1-7cf0-4cc2-8aca-529d659630a4", + "metadata": {}, + "source": [ + "# Start Keycloak server" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "374e8693-7e47-4985-b7f6-a9b818b0b4d0", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Updating the configuration and installing your custom providers, if any. Please wait.\n", + "2024-09-09 06:37:54,515 WARN [io.qua.dep.ind.IndexWrapper] (build-5) Failed to index org.springframework.core.io.Resource: Class does not exist in ClassLoader QuarkusClassLoader:Deployment Class Loader: PROD for keycloak@6d91790b\n", + "2024-09-09 06:37:54,518 WARN [io.qua.dep.ind.IndexWrapper] (build-5) Failed to index org.springframework.core.io.DefaultResourceLoader: Class does not exist in ClassLoader QuarkusClassLoader:Deployment Class Loader: PROD for keycloak@6d91790b\n", + "2024-09-09 06:37:54,519 WARN [io.qua.dep.ind.IndexWrapper] (build-5) Failed to index org.springframework.core.io.ResourceLoader: Class does not exist in ClassLoader QuarkusClassLoader:Deployment Class Loader: PROD for keycloak@6d91790b\n", + "2024-09-09 06:37:54,525 WARN [io.qua.dep.ind.IndexWrapper] (build-5) Failed to index org.apache.tools.ant.Task: Class does not exist in ClassLoader QuarkusClassLoader:Deployment Class Loader: PROD for keycloak@6d91790b\n", + "2024-09-09 06:37:54,568 WARN [io.qua.dep.ind.IndexWrapper] (build-5) Failed to index org.apache.activemq.artemis.core.journal.RecordInfo: Class does not exist in ClassLoader QuarkusClassLoader:Deployment Class Loader: PROD for keycloak@6d91790b\n", + "2024-09-09 06:37:54,568 WARN [io.qua.dep.ind.IndexWrapper] (build-5) Failed to index org.apache.activemq.artemis.core.journal.Journal: Class does not exist in ClassLoader QuarkusClassLoader:Deployment Class Loader: PROD for keycloak@6d91790b\n", + "2024-09-09 06:37:54,569 WARN [io.qua.dep.ind.IndexWrapper] (build-5) Failed to index io.mashona.logwriting.ArrayStore: Class does not exist in ClassLoader QuarkusClassLoader:Deployment Class Loader: PROD for keycloak@6d91790b\n", + "2024-09-09 06:37:54,573 WARN [io.qua.dep.ind.IndexWrapper] (build-5) Failed to index jakarta.jms.XAConnection: Class does not exist in ClassLoader QuarkusClassLoader:Deployment Class Loader: PROD for keycloak@6d91790b\n", + "2024-09-09 06:37:54,574 WARN [io.qua.dep.ind.IndexWrapper] (build-5) Failed to index jakarta.jms.XASession: Class does not exist in ClassLoader QuarkusClassLoader:Deployment Class Loader: PROD for keycloak@6d91790b\n", + "2024-09-09 06:37:54,574 WARN [io.qua.dep.ind.IndexWrapper] (build-5) Failed to index jakarta.jms.XAConnectionFactory: Class does not exist in ClassLoader QuarkusClassLoader:Deployment Class Loader: PROD for keycloak@6d91790b\n", + "2024-09-09 06:37:54,657 WARN [io.qua.dep.ind.IndexWrapper] (build-5) Failed to index jakarta.jms.Connection: Class does not exist in ClassLoader QuarkusClassLoader:Deployment Class Loader: PROD for keycloak@6d91790b\n", + "2024-09-09 06:37:58,410 INFO [io.qua.dep.QuarkusAugmentor] (main) Quarkus augmentation completed in 7235ms\n", + "2024-09-09 06:37:59,697 INFO [org.keycloak.quarkus.runtime.hostname.DefaultHostnameProvider] (main) Hostname settings: Base URL: , Hostname: , Strict HTTPS: false, Path: , Strict BackChannel: false, Admin URL: , Admin: , Port: -1, Proxied: false\n", + "2024-09-09 06:37:59,903 WARN [org.infinispan.CONFIG] (keycloak-cache-init) ISPN000569: Unable to persist Infinispan internal caches as no global state enabled\n", + "2024-09-09 06:37:59,949 INFO [org.infinispan.CONTAINER] (keycloak-cache-init) ISPN000556: Starting user marshaller 'org.infinispan.jboss.marshalling.core.JBossUserMarshaller'\n", + "2024-09-09 06:38:01,394 WARN [io.quarkus.agroal.runtime.DataSources] (JPA Startup Thread) Datasource enables XA but transaction recovery is not enabled. Please enable transaction recovery by setting quarkus.transaction-manager.enable-recovery=true, otherwise data may be lost if the application is terminated abruptly\n", + "2024-09-09 06:38:02,119 INFO [org.keycloak.connections.infinispan.DefaultInfinispanConnectionProviderFactory] (main) Node name: node_693934, Site name: null\n", + "2024-09-09 06:38:02,122 INFO [org.keycloak.broker.provider.AbstractIdentityProviderMapper] (main) Registering class org.keycloak.broker.provider.mappersync.ConfigSyncEventListener\n", + "2024-09-09 06:38:03,086 INFO [org.keycloak.quarkus.runtime.storage.legacy.liquibase.QuarkusJpaUpdaterProvider] (main) Initializing database schema. Using changelog META-INF/jpa-changelog-master.xml\n", + "\n", + "UPDATE SUMMARY\n", + "Run: 124\n", + "Previously run: 0\n", + "Filtered out: 0\n", + "-------------------------------\n", + "Total change sets: 124\n", + "\n", + "2024-09-09 06:38:05,143 INFO [org.keycloak.services] (main) KC-SERVICES0050: Initializing master realm\n", + "2024-09-09 06:38:06,418 INFO [org.keycloak.services] (main) KC-SERVICES0009: Added user 'admin' to realm 'master'\n", + "2024-09-09 06:38:06,492 INFO [io.quarkus] (main) Keycloak 24.0.4 on JVM (powered by Quarkus 3.8.4) started in 7.761s. Listening on: http://0.0.0.0:8080\n", + "2024-09-09 06:38:06,492 INFO [io.quarkus] (main) Profile dev activated. \n", + "2024-09-09 06:38:06,492 INFO [io.quarkus] (main) Installed features: [agroal, cdi, hibernate-orm, jdbc-h2, keycloak, logging-gelf, narayana-jta, reactive-routes, resteasy-reactive, resteasy-reactive-jackson, smallrye-context-propagation, vertx]\n", + "2024-09-09 06:38:06,495 WARN [org.keycloak.quarkus.runtime.KeycloakMain] (main) Running the server in development mode. DO NOT use this configuration in production.\n" + ] + } + ], + "source": [ + "!docker run --rm -p 9999:8080 --name my-keycloak \\\n", + "-e KEYCLOAK_ADMIN=admin -e KEYCLOAK_ADMIN_PASSWORD=admin \\\n", + "quay.io/keycloak/keycloak:24.0.4 start-dev" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d2d1e035-85b3-4d77-abb3-13af5e31ef37", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/rbac-local/01.2-setup-keycloak.ipynb b/examples/rbac-local/01.2-setup-keycloak.ipynb new file mode 100644 index 0000000000..d896bd82df --- /dev/null +++ b/examples/rbac-local/01.2-setup-keycloak.ipynb @@ -0,0 +1,416 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "e8952066-7a10-4c9b-a4b7-27be074ae269", + "metadata": {}, + "source": [ + "## Create Keycloak resources" + ] + }, + { + "cell_type": "markdown", + "id": "7252812d-90eb-4752-91a7-d46b400bacd8", + "metadata": {}, + "source": [ + "Wait until Keycloak is running" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "e5d13f76-f184-44f6-8542-54a61060e531", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{\u001b[36m\"Status\"\u001b[0m:\u001b[32m \"running\"\u001b[0m,\u001b[36m \"Running\"\u001b[0m:\u001b[95m true\u001b[0m,\u001b[36m \"Paused\"\u001b[0m:\u001b[95m false\u001b[0m,\u001b[36m \"Restarting\"\u001b[0m:\u001b[95m false\u001b[0m,\u001b[36m \"OOMKilled\"\u001b[0m:\u001b[95m false\u001b[0m,\u001b[36m \"Dead\"\u001b[0m:\u001b[95m false\u001b[0m,\u001b[36m \"Pid\"\u001b[0m:\u001b[95m 2838024\u001b[0m,\u001b[36m \"ExitCode\"\u001b[0m:\u001b[95m 0\u001b[0m,\u001b[36m \"Error\"\u001b[0m:\u001b[32m \"\"\u001b[0m,\u001b[36m \"StartedAt\"\u001b[0m:\u001b[32m \"2024-09-09T06:37:49.055739669Z\"\u001b[0m,\u001b[36m \"FinishedAt\"\u001b[0m:\u001b[32m \"0001-01-01T00:00:00Z\"\u001b[0m}\n" + ] + } + ], + "source": [ + "!docker inspect --format='json' my-keycloak | yq '.[0].State'" + ] + }, + { + "cell_type": "markdown", + "id": "cc9a6329-4e89-464c-ac48-dbbadaf72a2b", + "metadata": {}, + "source": [ + "Then create a sample realm and client with some roles and users matching the test environment." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "d5c60591-f41d-4a5e-9b18-93385a889495", + "metadata": {}, + "outputs": [], + "source": [ + "import requests\n", + "import json\n", + "from dotenv import set_key\n", + "\n", + "OIDC_SERVER_URL = \"http://0.0.0.0:9999\"\n", + "ADMIN_USERNAME = \"admin\"\n", + "ADMIN_PASSWORD = \"admin\"\n", + "\n", + "access_token: str = \"\"" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "d16969bc-423a-4d18-afa3-97a791b84b13", + "metadata": {}, + "outputs": [], + "source": [ + "def get_token():\n", + " token_url = f\"{OIDC_SERVER_URL}/realms/master/protocol/openid-connect/token\"\n", + "\n", + " token_data = {\n", + " \"grant_type\": \"password\",\n", + " \"client_id\": \"admin-cli\",\n", + " \"username\": ADMIN_USERNAME,\n", + " \"password\": ADMIN_PASSWORD,\n", + " }\n", + "\n", + " token_response = requests.post(token_url, data=token_data)\n", + " if token_response.status_code == 200:\n", + " global access_token\n", + " access_token = token_response.json()[\"access_token\"]\n", + " return access_token\n", + " else:\n", + " print(\n", + " f\"Failed to obtain access token: {token_response.status_code} - {token_response.text}\"\n", + " )\n", + " raise Exception(\"Not authenticated\")\n", + "\n", + "\n", + "def keycloak_post(endpoint, data=None):\n", + " url = f\"{OIDC_SERVER_URL}/admin/{endpoint}\"\n", + " print(f\"Creating {endpoint}\")\n", + " global access_token\n", + " headers = {\n", + " \"Content-Type\": \"application/json\",\n", + " \"Authorization\": f\"Bearer {access_token}\",\n", + " }\n", + " response = requests.request(\"POST\", url, headers=headers, data=json.dumps(data))\n", + " print(f\"POST response.status_code is {response.status_code}\")\n", + " return response.status_code\n", + "\n", + "\n", + "def keycloak_get(endpoint):\n", + " url = f\"{OIDC_SERVER_URL}/admin/{endpoint}\"\n", + " global access_token\n", + " headers = {\n", + " \"Content-Type\": \"application/json\",\n", + " \"Authorization\": f\"Bearer {access_token}\",\n", + " }\n", + " response = requests.request(\"GET\", url, headers=headers)\n", + " print(f\"GET response.status_code is {response.status_code}\")\n", + " return response.json()\n", + "\n", + "\n", + "def create_realm(realm_name):\n", + " data = {\"realm\": realm_name, \"enabled\": \"true\"}\n", + " keycloak_post(\"realms\", data=data)\n", + " response = keycloak_get(f\"realms/{realm_name}\")\n", + " return response[\"id\"]\n", + "\n", + "\n", + "def create_client(realm_name, client_name):\n", + " data = {\n", + " \"clientId\": client_name,\n", + " \"enabled\": \"true\",\n", + " \"redirectUris\": [\n", + " \"http://localhost:8000/*\",\n", + " \"http://127.0.0.1:8000/*\",\n", + " \"http://0.0.0.0:8000/*\",\n", + " ],\n", + " \"publicClient\": False,\n", + " \"authorizationServicesEnabled\": True,\n", + " \"protocol\": \"openid-connect\",\n", + " \"standardFlowEnabled\": True,\n", + " \"directAccessGrantsEnabled\": True,\n", + " \"serviceAccountsEnabled\": True,\n", + " }\n", + " keycloak_post(f\"realms/{realm_name}/clients\", data=data)\n", + " response = keycloak_get(f\"realms/{realm_name}/clients\")\n", + " client = None\n", + " for c in response:\n", + " if c[\"clientId\"] == client_name:\n", + " client = c\n", + " break\n", + " client_id = client[\"id\"]\n", + " client_secret = client[\"secret\"]\n", + " return client_id, client_secret\n", + "\n", + "\n", + "def create_client_roles(realm_name, client_id, roles):\n", + " for role_name in roles:\n", + " data = {\"name\": role_name, \"clientRole\": True}\n", + " keycloak_post(f\"realms/{realm_name}/clients/{client_id}/roles\", data=data)\n", + "\n", + " response = keycloak_get(f\"realms/{realm_name}/clients/{client_id}/roles\")\n", + " roles_by_name = dict((role[\"name\"], role[\"id\"]) for role in response)\n", + " print(roles_by_name)\n", + " return roles_by_name\n", + "\n", + "\n", + "def create_user_with_roles(\n", + " realm_name, username, password, client_id, roles_by_name, roles\n", + "):\n", + " data = {\n", + " \"username\": username,\n", + " \"enabled\": True,\n", + " \"email\": f\"{username}@poc.com\",\n", + " \"emailVerified\": True,\n", + " \"firstName\": \"user\",\n", + " \"lastName\": f\"{username}\",\n", + " \"credentials\": [{\"type\": \"password\", \"value\": password}],\n", + " \"realmRoles\": [],\n", + " }\n", + " keycloak_post(f\"realms/{realm_name}/users\", data=data)\n", + " response = keycloak_get(f\"realms/{realm_name}/users\")\n", + " user = None\n", + " for u in response:\n", + " if u[\"username\"] == username:\n", + " user = u\n", + " break\n", + " user_id = user[\"id\"]\n", + "\n", + " data = [\n", + " {\n", + " \"id\": roles_by_name[role_name],\n", + " \"name\": role_name,\n", + " }\n", + " for role_name in roles\n", + " ]\n", + " keycloak_post(\n", + " f\"realms/{realm_name}/users/{user_id}/role-mappings/clients/{client_id}\",\n", + " data=data,\n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e097fac1-f2c3-4afe-b78c-2c8279e3a84e", + "metadata": {}, + "outputs": [], + "source": [ + "get_token()" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "f114fa41-8cea-486f-baf4-998cbf69fea4", + "metadata": {}, + "outputs": [], + "source": [ + "realm_name = \"rbac_example\"\n", + "client_name = \"app\"\n", + "password = \"password\"" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "0f889548-9b60-448b-beed-ac3fc1890b13", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Creating realms\n", + "POST response.status_code is 201\n", + "GET response.status_code is 200\n", + "Creating realms/rbac_example/clients\n", + "POST response.status_code is 201\n", + "GET response.status_code is 200\n", + "Creating realms/rbac_example/clients/c3475e89-27c3-41ac-a3d1-0bbcaf68083b/roles\n", + "POST response.status_code is 201\n", + "Creating realms/rbac_example/clients/c3475e89-27c3-41ac-a3d1-0bbcaf68083b/roles\n", + "POST response.status_code is 201\n", + "Creating realms/rbac_example/clients/c3475e89-27c3-41ac-a3d1-0bbcaf68083b/roles\n", + "POST response.status_code is 201\n", + "Creating realms/rbac_example/clients/c3475e89-27c3-41ac-a3d1-0bbcaf68083b/roles\n", + "POST response.status_code is 201\n", + "GET response.status_code is 200\n", + "{'store_admin': '2d7a675f-031d-42b1-aba6-eb28a95561af', 'batch_admin': '8664084a-4e3c-42b0-8e37-70a8fea012b3', 'reader': '6cbf4473-c165-48bd-b572-d20133ae2b2b', 'uma_protection': '172d464d-92c7-4055-95af-3e048d8077b2', 'fresh_writer': '9e2abf47-a7af-414e-bf14-2c9897933532'}\n" + ] + } + ], + "source": [ + "realm_id = create_realm(realm_name)\n", + "client_id, client_secret = create_client(realm_name, client_name)\n", + "\n", + "roles_by_name = create_client_roles(\n", + " realm_name, client_id, [\"reader\", \"fresh_writer\", \"store_admin\", \"batch_admin\"]\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "a3430d83-107d-44ad-acf2-0df810dff0ff", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Creating realms/rbac_example/users\n", + "POST response.status_code is 201\n", + "GET response.status_code is 200\n", + "Creating realms/rbac_example/users/a87b4ca8-e1a9-40f7-a166-f48fe45beec2/role-mappings/clients/c3475e89-27c3-41ac-a3d1-0bbcaf68083b\n", + "POST response.status_code is 204\n", + "Creating realms/rbac_example/users\n", + "POST response.status_code is 201\n", + "GET response.status_code is 200\n", + "Creating realms/rbac_example/users/eb343a9b-d800-4fff-96b6-4588c7db08de/role-mappings/clients/c3475e89-27c3-41ac-a3d1-0bbcaf68083b\n", + "POST response.status_code is 204\n", + "Creating realms/rbac_example/users\n", + "POST response.status_code is 201\n", + "GET response.status_code is 200\n", + "Creating realms/rbac_example/users/91bfbaae-e1fd-4167-9432-2d1d8ca8c838/role-mappings/clients/c3475e89-27c3-41ac-a3d1-0bbcaf68083b\n", + "POST response.status_code is 204\n", + "Creating realms/rbac_example/users\n", + "POST response.status_code is 201\n", + "GET response.status_code is 200\n", + "Creating realms/rbac_example/users/4d67e8ca-6c2a-48b7-b511-c3f6197aa5ae/role-mappings/clients/c3475e89-27c3-41ac-a3d1-0bbcaf68083b\n", + "POST response.status_code is 204\n" + ] + } + ], + "source": [ + "create_user_with_roles(\n", + " realm_name, \"reader\", password, client_id, roles_by_name, [\"reader\"]\n", + ")\n", + "create_user_with_roles(\n", + " realm_name,\n", + " \"writer\",\n", + " password,\n", + " client_id,\n", + " roles_by_name,\n", + " [\"fresh_writer\"],\n", + ")\n", + "create_user_with_roles(\n", + " realm_name,\n", + " \"batch_admin\",\n", + " password,\n", + " client_id,\n", + " roles_by_name,\n", + " [\"batch_admin\"],\n", + ")\n", + "create_user_with_roles(\n", + " realm_name,\n", + " \"admin\",\n", + " password,\n", + " client_id,\n", + " roles_by_name,\n", + " [\"store_admin\"],\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "54317f9e-476b-4b8e-864a-a07c54b549f4", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Realm rbac_example setup completed.\n", + "Client app created with ID c3475e89-27c3-41ac-a3d1-0bbcaf68083b and secret REDACTED\n", + "Settings configured in .env\n" + ] + } + ], + "source": [ + "print(f\"Realm {realm_name} setup completed.\")\n", + "print(\n", + " f\"Client {client_name} created with ID {client_id} and secret {client_secret}\"\n", + ")\n", + "\n", + "env_file = \".env\"\n", + "with open(env_file, \"w\") as file:\n", + " pass\n", + "\n", + "# Write property P=1 to the .env file\n", + "set_key(env_file, \"OIDC_SERVER_URL\", OIDC_SERVER_URL)\n", + "set_key(env_file, \"REALM\", realm_name)\n", + "set_key(env_file, \"CLIENT_ID\", client_name)\n", + "set_key(env_file, \"CLIENT_SECRET\", client_secret)\n", + "set_key(env_file, \"PASSWORD\", password)\n", + "print(f\"Settings configured in {env_file}\")" + ] + }, + { + "cell_type": "markdown", + "id": "35dcd5ed-4004-4570-965f-0f68668605d8", + "metadata": {}, + "source": [ + "The [.env](.env) file contains the settings of the created realm, including the client secret to be used to connect the server." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "46a1e2c7-e379-461d-b0bf-82354378e830", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "OIDC_SERVER_URL='http://0.0.0.0:9999'\n", + "REALM='rbac_example'\n", + "CLIENT_ID='app'\n", + "CLIENT_SECRET='REDACTED'\n", + "PASSWORD='password'\n" + ] + } + ], + "source": [ + "!cat .env" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d035826b-64d6-47cc-a48e-26eb29b31fc7", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/rbac-local/01.3-setup-feast.ipynb b/examples/rbac-local/01.3-setup-feast.ipynb new file mode 100644 index 0000000000..e7e0943094 --- /dev/null +++ b/examples/rbac-local/01.3-setup-feast.ipynb @@ -0,0 +1,1029 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "a7b2570a-bdf1-477a-8799-0aefe81a0e28", + "metadata": {}, + "source": [ + "## Setup Feast\n", + "Create a sample `rbac` project with local storage." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "74c1ee91-1816-4338-aabf-7851b655b061", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Creating a new Feast repository in \u001b[1m\u001b[32m/Users/dmartino/projects/AI/feast/feast/examples/rbac-local/rbac\u001b[0m.\n", + "\n" + ] + } + ], + "source": [ + "!rm -rf rbac\n", + "!feast init rbac" + ] + }, + { + "cell_type": "markdown", + "id": "e3215797-198a-49af-a241-7e0117634897", + "metadata": {}, + "source": [ + "Update the `feature_store.yaml` with an `auth` section derived from the Keycloak setup file [.env](.env)" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "a09d2198-9e3a-48f6-8c9d-72d62d20cd57", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "OIDC_SERVER_URL='http://0.0.0.0:9999'\n", + "REALM='rbac_example'\n", + "CLIENT_ID='app'\n", + "CLIENT_SECRET='REDACTED'\n", + "PASSWORD='password'\n" + ] + } + ], + "source": [ + "!cat .env" + ] + }, + { + "cell_type": "markdown", + "id": "6cd89872-a6c6-4be0-a6e3-8fd60d448b7b", + "metadata": {}, + "source": [ + "### Update the server YAML\n", + "Update the server YAML to use OIDC authorization" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "e16d5a44-ab0c-4ca8-8491-e7d9073469f8", + "metadata": {}, + "outputs": [], + "source": [ + "from dotenv import load_dotenv\n", + "import os\n", + "import yaml\n", + "\n", + "def load_config_file(path):\n", + " load_dotenv()\n", + "\n", + " with open(path, 'r') as file:\n", + " config = yaml.safe_load(file) or {}\n", + " return config" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "cd30523b-4e1c-4d56-9c72-84aacb46b29d", + "metadata": {}, + "outputs": [], + "source": [ + "def update_config_with_auth(config, is_client=False):\n", + " config['auth']={}\n", + " config['auth']['type']='oidc'\n", + " config['auth']['auth_discovery_url']=f\"{os.getenv('OIDC_SERVER_URL')}/realms/{os.getenv('REALM')}/.well-known/openid-configuration\"\n", + " config['auth']['client_id']=os.getenv('CLIENT_ID')\n", + " if is_client:\n", + " config['auth']['client_secret']=os.getenv('CLIENT_SECRET')\n", + " config['auth']['username']=''\n", + " config['auth']['password']='password'" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "1631a8c8-f635-4970-8653-06c147b1c128", + "metadata": {}, + "outputs": [], + "source": [ + "def update_config_file(path):\n", + " with open(path, 'w') as file:\n", + " yaml.safe_dump(config, file, default_flow_style=False)" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "78898d46-1185-4528-8f08-b137dd49246a", + "metadata": {}, + "outputs": [], + "source": [ + "config = load_config_file('rbac/feature_repo/feature_store.yaml')\n", + "update_config_with_auth(config)\n", + "update_config_file('rbac/feature_repo/feature_store.yaml')" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "e2437286-2907-4818-87ad-a2293f21311e", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "auth:\n", + " auth_discovery_url: http://0.0.0.0:9999/realms/rbac_example/.well-known/openid-configuration\n", + " client_id: app\n", + " type: oidc\n", + "entity_key_serialization_version: 2\n", + "online_store:\n", + " path: data/online_store.db\n", + " type: sqlite\n", + "project: rbac\n", + "provider: local\n", + "registry: data/registry.db\n" + ] + } + ], + "source": [ + "!cat rbac/feature_repo/feature_store.yaml" + ] + }, + { + "cell_type": "markdown", + "id": "fa715453-8c41-4f57-8cf2-c96f6a211cde", + "metadata": {}, + "source": [ + "### Update the client YAML\n", + "Update the client YAML to use OIDC authorization" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "886a558a-1746-44fa-9e38-0e381b3b3deb", + "metadata": {}, + "outputs": [], + "source": [ + "config = load_config_file('client/feature_store.yaml')\n", + "update_config_with_auth(config, is_client=True)\n", + "update_config_file('client/feature_store.yaml')" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "267a72e4-443a-4b08-bd59-84d475a29e2a", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "auth:\n", + " auth_discovery_url: http://0.0.0.0:9999/realms/rbac_example/.well-known/openid-configuration\n", + " client_id: app\n", + " client_secret: REDACTED\n", + " password: password\n", + " type: oidc\n", + " username: ''\n", + "entity_key_serialization_version: 2\n", + "offline_store:\n", + " host: localhost\n", + " port: 8815\n", + " type: remote\n", + "online_store:\n", + " path: http://localhost:6566\n", + " type: remote\n", + "project: rbac\n", + "registry:\n", + " path: localhost:6570\n", + " registry_type: remote\n" + ] + } + ], + "source": [ + "!cat client/feature_store.yaml" + ] + }, + { + "cell_type": "markdown", + "id": "f71f5189-4423-4720-bbd2-fcb9b778a26b", + "metadata": {}, + "source": [ + "### Apply the configuration" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "e0c24e05-6e38-4ff1-9c39-73818fe41f18", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Applying changes for project rbac\n", + "/Users/dmartino/projects/AI/feast/feast/sdk/python/feast/feature_store.py:562: RuntimeWarning: On demand feature view is an experimental feature. This API is stable, but the functionality does not scale well for offline retrieval\n", + " warnings.warn(\n", + "Created project \u001b[1m\u001b[32mrbac\u001b[0m\n", + "Created entity \u001b[1m\u001b[32mdriver\u001b[0m\n", + "Created feature view \u001b[1m\u001b[32mdriver_hourly_stats\u001b[0m\n", + "Created feature view \u001b[1m\u001b[32mdriver_hourly_stats_fresh\u001b[0m\n", + "Created on demand feature view \u001b[1m\u001b[32mtransformed_conv_rate_fresh\u001b[0m\n", + "Created on demand feature view \u001b[1m\u001b[32mtransformed_conv_rate\u001b[0m\n", + "Created feature service \u001b[1m\u001b[32mdriver_activity_v1\u001b[0m\n", + "Created feature service \u001b[1m\u001b[32mdriver_activity_v3\u001b[0m\n", + "Created feature service \u001b[1m\u001b[32mdriver_activity_v2\u001b[0m\n", + "\n", + "Created sqlite table \u001b[1m\u001b[32mrbac_driver_hourly_stats_fresh\u001b[0m\n", + "Created sqlite table \u001b[1m\u001b[32mrbac_driver_hourly_stats\u001b[0m\n", + "\n" + ] + } + ], + "source": [ + "!feast -c rbac/feature_repo apply" + ] + }, + { + "cell_type": "markdown", + "id": "69b9857a-e32b-47ed-a120-57919ecb6b5d", + "metadata": {}, + "source": [ + "### Validate permissions" + ] + }, + { + "cell_type": "markdown", + "id": "867f565d-9740-4790-8d11-31001d920358", + "metadata": {}, + "source": [ + "There are no permissions after applying the example:" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "id": "004f16bf-d125-4aec-b683-3e9653815a27", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "NAME TYPES NAME_PATTERN ACTIONS ROLES REQUIRED_TAGS\n" + ] + } + ], + "source": [ + "!feast -c rbac/feature_repo permissions list" + ] + }, + { + "cell_type": "markdown", + "id": "f2276488-39ec-4ae8-bb69-08dce7ad1bd4", + "metadata": {}, + "source": [ + "The `permissions check` command identifies the resources that have no permissions matching their type, name or tags." + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "id": "9fdd2660-c0f5-4dc9-a2da-d45751dcfa01", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[1m\u001b[31mThe following resources are not secured by any permission configuration:\u001b[0m\n", + "NAME TYPE\n", + "driver Entity\n", + "driver_hourly_stats FeatureView\n", + "driver_hourly_stats_fresh FeatureView\n", + "transformed_conv_rate_fresh OnDemandFeatureView\n", + "transformed_conv_rate OnDemandFeatureView\n", + "driver_activity_v1 FeatureService\n", + "driver_activity_v3 FeatureService\n", + "driver_activity_v2 FeatureService\n", + "vals_to_add RequestSource\n", + "driver_stats_push_source PushSource\n", + "driver_hourly_stats_source FileSource\n", + "\u001b[1m\u001b[31mThe following actions are not secured by any permission configuration (Note: this might not be a security concern, depending on the used APIs):\u001b[0m\n", + "NAME TYPE UNSECURED ACTIONS\n", + "driver Entity CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "driver_hourly_stats FeatureView CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "driver_hourly_stats_fresh FeatureView CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "transformed_conv_rate_fresh OnDemandFeatureView CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "transformed_conv_rate OnDemandFeatureView CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "driver_activity_v1 FeatureService CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "driver_activity_v3 FeatureService CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "driver_activity_v2 FeatureService CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "vals_to_add RequestSource CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "driver_stats_push_source PushSource CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "driver_hourly_stats_source FileSource CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n" + ] + } + ], + "source": [ + "!feast -c rbac/feature_repo permissions check" + ] + }, + { + "cell_type": "markdown", + "id": "eb65649d-7ba7-494f-9e01-772842304ca1", + "metadata": {}, + "source": [ + "### Applying permissions\n", + "Let's create some Permissions to cover basic scenarios.\n", + "\n", + "First a simple permission to read the status of all the objects." + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "id": "3e910c5d-2f27-4f19-b324-c00347133da7", + "metadata": {}, + "outputs": [], + "source": [ + "from feast import FeatureStore\n", + "from feast.feast_object import ALL_RESOURCE_TYPES\n", + "from feast.permissions.action import CRUD, AuthzedAction, ALL_ACTIONS\n", + "from feast.permissions.permission import Permission\n", + "from feast.permissions.policy import RoleBasedPolicy" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "id": "9e85bb35-cf12-4860-90d6-d1cd4830049c", + "metadata": {}, + "outputs": [], + "source": [ + "store = FeatureStore(\"rbac/feature_repo\")" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "87cc7c4f-48af-4158-adee-b1ccd8a72ea7", + "metadata": {}, + "outputs": [], + "source": [ + "read_permission = Permission(\n", + " name=\"read_permission\",\n", + " types=ALL_RESOURCE_TYPES,\n", + " policy=RoleBasedPolicy(roles=[\"reader\"]),\n", + " actions=AuthzedAction.DESCRIBE\n", + ")\n", + "store.registry.apply_permission(read_permission, store.project)" + ] + }, + { + "cell_type": "markdown", + "id": "e1dcb0d3-21e3-44b7-9ad5-c6b2b1e45b33", + "metadata": {}, + "source": [ + "Now a specific permission to write online data (e.g. `materialize`) the `FeatureView`s whose name ends by `fresh`" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "id": "1c2fecdd-056e-4462-b1ad-eec123e282dd", + "metadata": {}, + "outputs": [], + "source": [ + "from feast.feature_view import FeatureView\n", + "write_fresh_permission = Permission(\n", + " name=\"write_fresh_permission\",\n", + " types=FeatureView,\n", + " name_pattern=\".*_fresh\",\n", + " policy=RoleBasedPolicy(roles=[\"fresh_writer\"]),\n", + " actions=AuthzedAction.WRITE_ONLINE\n", + ")\n", + "store.registry.apply_permission(write_fresh_permission, store.project)" + ] + }, + { + "cell_type": "markdown", + "id": "71edd0ea-67b5-4845-b8ae-602ed3883bb7", + "metadata": {}, + "source": [ + "Another one to match allow access to OFFLINE functions." + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "id": "c74e677c-3959-4963-b683-a5289c8238c9", + "metadata": {}, + "outputs": [], + "source": [ + "from feast.feature_view import FeatureView\n", + "from feast.feature_service import FeatureService\n", + "from feast.on_demand_feature_view import OnDemandFeatureView\n", + "offline_permission = Permission(\n", + " name=\"offline_permission\",\n", + " types=[FeatureView, OnDemandFeatureView, FeatureService],\n", + " policy=RoleBasedPolicy(roles=[\"batch_admin\"]),\n", + " actions= CRUD + [AuthzedAction.WRITE_OFFLINE, AuthzedAction.READ_OFFLINE]\n", + ")\n", + "store.registry.apply_permission(offline_permission, store.project)" + ] + }, + { + "cell_type": "markdown", + "id": "3edc08f5-40e1-488a-b749-9b1f5fc31061", + "metadata": {}, + "source": [ + "Finally, ad `admin` permission to manage all the resources" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "id": "739a26ee-e08e-461a-9f75-59158328fc90", + "metadata": {}, + "outputs": [], + "source": [ + "admin_permission = Permission(\n", + " name=\"admin_permission\",\n", + " types=ALL_RESOURCE_TYPES,\n", + " policy=RoleBasedPolicy(roles=[\"store_admin\"]),\n", + " actions=ALL_ACTIONS\n", + ")\n", + "store.registry.apply_permission(admin_permission, store.project)" + ] + }, + { + "cell_type": "markdown", + "id": "916c9399-866e-4796-9858-a890ceb29e48", + "metadata": {}, + "source": [ + "## Validate registered permissions" + ] + }, + { + "cell_type": "markdown", + "id": "aed869b3-c567-428f-8a69-9c322b62f7c6", + "metadata": {}, + "source": [ + "List all the permissions." + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "id": "cd284369-1cef-4cf6-859f-ea79d1450ed2", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "NAME TYPES NAME_PATTERN ACTIONS ROLES REQUIRED_TAGS\n", + "read_permission Project DESCRIBE reader -\n", + " FeatureView\n", + " OnDemandFeatureView\n", + " BatchFeatureView\n", + " StreamFeatureView\n", + " Entity\n", + " FeatureService\n", + " DataSource\n", + " ValidationReference\n", + " SavedDataset\n", + " Permission\n", + "write_fresh_permission FeatureView .*_fresh WRITE_ONLINE fresh_writer -\n", + "offline_permission FeatureView CREATE batch_admin -\n", + " OnDemandFeatureView DESCRIBE\n", + " FeatureService UPDATE\n", + " DELETE\n", + " WRITE_OFFLINE\n", + " READ_OFFLINE\n", + "admin_permission Project CREATE store_admin -\n", + " FeatureView DESCRIBE\n", + " OnDemandFeatureView UPDATE\n", + " BatchFeatureView DELETE\n", + " StreamFeatureView READ_ONLINE\n", + " Entity READ_OFFLINE\n", + " FeatureService WRITE_ONLINE\n", + " DataSource WRITE_OFFLINE\n", + " ValidationReference\n", + " SavedDataset\n", + " Permission\n" + ] + } + ], + "source": [ + "!feast -c rbac/feature_repo permissions list" + ] + }, + { + "cell_type": "markdown", + "id": "be3873ee-2514-4aec-8fe8-8b54a3602651", + "metadata": {}, + "source": [ + "List all the resources matching each configured permission." + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "id": "419df226-36df-4d19-be0d-ba82813fef80", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "\u001b[1m\u001b[32mThe structure of the \u001b[1m\u001b[37mfeast-permissions list --verbose \u001b[1m\u001b[32mcommand will be as in the following example:\n", + "\n", + "\u001b[2mFor example: \u001b[0m\u001b[1m\u001b[32m\n", + "\n", + "permissions\n", + "β”œβ”€β”€ permission_1 ['role names list']\n", + "β”‚ β”œβ”€β”€ FeatureView: ['feature view names']\n", + "β”‚ β”œβ”€β”€ FeatureService: none\n", + "β”‚ └── ..\n", + "β”œβ”€β”€ permission_2 ['role names list']\n", + "└── ..\n", + "\n", + "-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------\u001b[0m\n", + " \n", + "Permissions:\n", + "\n", + "permissions\n", + "β”œβ”€β”€ read_permission ['reader']\n", + "β”‚ β”œβ”€β”€ FeatureView ['driver_hourly_stats_fresh', 'transformed_conv_rate_fresh', 'transformed_conv_rate', 'driver_hourly_stats']\n", + "β”‚ β”œβ”€β”€ OnDemandFeatureView ['transformed_conv_rate_fresh', 'transformed_conv_rate']\n", + "β”‚ β”œβ”€β”€ BatchFeatureView ['driver_hourly_stats_fresh', 'driver_hourly_stats']\n", + "β”‚ β”œβ”€β”€ StreamFeatureView: none\n", + "β”‚ β”œβ”€β”€ Entity: ['driver']\n", + "β”‚ β”œβ”€β”€ FeatureService: ['driver_activity_v3', 'driver_activity_v2', 'driver_activity_v1']\n", + "β”‚ β”œβ”€β”€ DataSource: ['driver_stats_push_source', 'driver_hourly_stats_source', 'vals_to_add']\n", + "β”‚ β”œβ”€β”€ ValidationReference: none\n", + "β”‚ └── SavedDataset: none\n", + "β”œβ”€β”€ write_fresh_permission ['fresh_writer']\n", + "β”‚ └── FeatureView ['driver_hourly_stats_fresh']\n", + "β”œβ”€β”€ offline_permission ['batch_admin']\n", + "β”‚ β”œβ”€β”€ FeatureView ['driver_hourly_stats_fresh', 'transformed_conv_rate_fresh', 'transformed_conv_rate', 'driver_hourly_stats']\n", + "β”‚ β”œβ”€β”€ OnDemandFeatureView ['transformed_conv_rate_fresh', 'transformed_conv_rate']\n", + "β”‚ └── FeatureService: ['driver_activity_v3', 'driver_activity_v2', 'driver_activity_v1']\n", + "└── admin_permission ['store_admin']\n", + " β”œβ”€β”€ FeatureView ['driver_hourly_stats_fresh', 'transformed_conv_rate_fresh', 'transformed_conv_rate', 'driver_hourly_stats']\n", + " β”œβ”€β”€ OnDemandFeatureView ['transformed_conv_rate_fresh', 'transformed_conv_rate']\n", + " β”œβ”€β”€ BatchFeatureView ['driver_hourly_stats_fresh', 'driver_hourly_stats']\n", + " β”œβ”€β”€ StreamFeatureView: none\n", + " β”œβ”€β”€ Entity: ['driver']\n", + " β”œβ”€β”€ FeatureService: ['driver_activity_v3', 'driver_activity_v2', 'driver_activity_v1']\n", + " β”œβ”€β”€ DataSource: ['driver_stats_push_source', 'driver_hourly_stats_source', 'vals_to_add']\n", + " β”œβ”€β”€ ValidationReference: none\n", + " └── SavedDataset: none\n" + ] + } + ], + "source": [ + "!feast -c rbac/feature_repo permissions list -v" + ] + }, + { + "cell_type": "markdown", + "id": "90319f10-abce-4a18-9891-7428c8781187", + "metadata": {}, + "source": [ + "Describe one of the permissions." + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "id": "cec436ce-5d1c-455e-a6d7-80f84380e83a", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "spec:\n", + " name: admin_permission\n", + " types:\n", + " - PROJECT\n", + " - FEATURE_VIEW\n", + " - ON_DEMAND_FEATURE_VIEW\n", + " - BATCH_FEATURE_VIEW\n", + " - STREAM_FEATURE_VIEW\n", + " - ENTITY\n", + " - FEATURE_SERVICE\n", + " - DATA_SOURCE\n", + " - VALIDATION_REFERENCE\n", + " - SAVED_DATASET\n", + " - PERMISSION\n", + " actions:\n", + " - CREATE\n", + " - DESCRIBE\n", + " - UPDATE\n", + " - DELETE\n", + " - READ_ONLINE\n", + " - READ_OFFLINE\n", + " - WRITE_ONLINE\n", + " - WRITE_OFFLINE\n", + " policy:\n", + " roleBasedPolicy:\n", + " roles:\n", + " - store_admin\n", + "meta:\n", + " createdTimestamp: '2024-09-09T06:41:28.335684Z'\n", + " lastUpdatedTimestamp: '2024-09-09T06:41:28.335684Z'\n", + "\n" + ] + } + ], + "source": [ + "!feast -c rbac/feature_repo permissions describe admin_permission" + ] + }, + { + "cell_type": "markdown", + "id": "a267a3bb-9861-43eb-9f7b-33f5d5a23e81", + "metadata": {}, + "source": [ + "List the roles specified by these permissions." + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "id": "b6a3f4a6-e3ab-4aaa-9a15-69ea63246b45", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+--------------+\n", + "| ROLE NAME |\n", + "+==============+\n", + "| batch_admin |\n", + "+--------------+\n", + "| fresh_writer |\n", + "+--------------+\n", + "| reader |\n", + "+--------------+\n", + "| store_admin |\n", + "+--------------+\n" + ] + } + ], + "source": [ + "!feast -c rbac/feature_repo permissions list-roles" + ] + }, + { + "cell_type": "markdown", + "id": "0dbb31d3-edc9-4146-a46c-146d7f59532a", + "metadata": {}, + "source": [ + "For each configured role, list all the resources and operations that are allowed to a user impersonating this role." + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "id": "45832f21-43c6-4784-ba88-1e65fa8479b5", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ROLE NAME RESOURCE NAME RESOURCE TYPE PERMITTED ACTIONS\n", + "batch_admin driver Entity -\n", + "batch_admin driver_hourly_stats FeatureView CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_OFFLINE\n", + " WRITE_OFFLINE\n", + "batch_admin driver_hourly_stats_fresh FeatureView CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_OFFLINE\n", + " WRITE_OFFLINE\n", + "batch_admin transformed_conv_rate_fresh OnDemandFeatureView CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_OFFLINE\n", + " WRITE_OFFLINE\n", + "batch_admin transformed_conv_rate OnDemandFeatureView CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_OFFLINE\n", + " WRITE_OFFLINE\n", + "batch_admin driver_activity_v1 FeatureService CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_OFFLINE\n", + " WRITE_OFFLINE\n", + "batch_admin driver_activity_v3 FeatureService CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_OFFLINE\n", + " WRITE_OFFLINE\n", + "batch_admin driver_activity_v2 FeatureService CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_OFFLINE\n", + " WRITE_OFFLINE\n", + "batch_admin vals_to_add RequestSource -\n", + "batch_admin driver_stats_push_source PushSource -\n", + "batch_admin driver_hourly_stats_source FileSource -\n", + "batch_admin read_permission Permission -\n", + "batch_admin write_fresh_permission Permission -\n", + "batch_admin offline_permission Permission -\n", + "batch_admin admin_permission Permission -\n", + "fresh_writer driver Entity -\n", + "fresh_writer driver_hourly_stats FeatureView -\n", + "fresh_writer driver_hourly_stats_fresh FeatureView WRITE_ONLINE\n", + "fresh_writer transformed_conv_rate_fresh OnDemandFeatureView -\n", + "fresh_writer transformed_conv_rate OnDemandFeatureView -\n", + "fresh_writer driver_activity_v1 FeatureService -\n", + "fresh_writer driver_activity_v3 FeatureService -\n", + "fresh_writer driver_activity_v2 FeatureService -\n", + "fresh_writer vals_to_add RequestSource -\n", + "fresh_writer driver_stats_push_source PushSource -\n", + "fresh_writer driver_hourly_stats_source FileSource -\n", + "fresh_writer read_permission Permission -\n", + "fresh_writer write_fresh_permission Permission -\n", + "fresh_writer offline_permission Permission -\n", + "fresh_writer admin_permission Permission -\n", + "reader driver Entity DESCRIBE\n", + "reader driver_hourly_stats FeatureView DESCRIBE\n", + "reader driver_hourly_stats_fresh FeatureView DESCRIBE\n", + "reader transformed_conv_rate_fresh OnDemandFeatureView DESCRIBE\n", + "reader transformed_conv_rate OnDemandFeatureView DESCRIBE\n", + "reader driver_activity_v1 FeatureService DESCRIBE\n", + "reader driver_activity_v3 FeatureService DESCRIBE\n", + "reader driver_activity_v2 FeatureService DESCRIBE\n", + "reader vals_to_add RequestSource DESCRIBE\n", + "reader driver_stats_push_source PushSource DESCRIBE\n", + "reader driver_hourly_stats_source FileSource DESCRIBE\n", + "reader read_permission Permission DESCRIBE\n", + "reader write_fresh_permission Permission DESCRIBE\n", + "reader offline_permission Permission DESCRIBE\n", + "reader admin_permission Permission DESCRIBE\n", + "store_admin driver Entity CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "store_admin driver_hourly_stats FeatureView CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "store_admin driver_hourly_stats_fresh FeatureView CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "store_admin transformed_conv_rate_fresh OnDemandFeatureView CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "store_admin transformed_conv_rate OnDemandFeatureView CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "store_admin driver_activity_v1 FeatureService CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "store_admin driver_activity_v3 FeatureService CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "store_admin driver_activity_v2 FeatureService CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "store_admin vals_to_add RequestSource CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "store_admin driver_stats_push_source PushSource CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "store_admin driver_hourly_stats_source FileSource CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "store_admin read_permission Permission CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "store_admin write_fresh_permission Permission CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "store_admin offline_permission Permission CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "store_admin admin_permission Permission CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n" + ] + } + ], + "source": [ + "!feast -c rbac/feature_repo permissions list-roles -v" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c7960d2c-e43f-46b4-8cb3-5c6fc9dbaba8", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/rbac-local/02-registry_server.ipynb b/examples/rbac-local/02-registry_server.ipynb new file mode 100644 index 0000000000..43a5ead908 --- /dev/null +++ b/examples/rbac-local/02-registry_server.ipynb @@ -0,0 +1,73 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "70df4877-177b-441a-a745-f0cd091e0a3a", + "metadata": {}, + "source": [ + "## Registry server" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "ef9f796d-f9d7-47d0-96c2-03b38a219d83", + "metadata": {}, + "outputs": [], + "source": [ + "!lsof -i :6570\n", + "# !kill -9 64859 98087" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "bd303508-9f32-4bdb-87c2-729e3ab62b4f", + "metadata": {}, + "outputs": [], + "source": [ + "from feast import FeatureStore\n", + "store = FeatureStore(repo_path=\"rbac/feature_repo\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "29127952-f9d5-44c4-b7c3-437e0b55c4b0", + "metadata": {}, + "outputs": [], + "source": [ + "store.serve_registry(6570)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5c285fb3-442b-4bb4-bf34-2a61ae5fe76a", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/rbac-local/03-online_server.ipynb b/examples/rbac-local/03-online_server.ipynb new file mode 100644 index 0000000000..f80ef35a17 --- /dev/null +++ b/examples/rbac-local/03-online_server.ipynb @@ -0,0 +1,111 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "d75bb824-a6cf-493e-87a8-2ae1095cf918", + "metadata": {}, + "source": [ + "## Online server" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "27a17dd4-08f5-4f01-b5a4-a76aa99952a1", + "metadata": {}, + "outputs": [], + "source": [ + "!lsof -i :6566\n", + "# !kill -9 64859 98087" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "897f5979-da53-4441-ac31-f5cd40abf6cd", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "YES\n" + ] + } + ], + "source": [ + "# This must be YES on MacOS\n", + "!echo $OBJC_DISABLE_INITIALIZE_FORK_SAFETY" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "73b219c3-7782-4e09-9897-d01f44ccae2d", + "metadata": {}, + "outputs": [], + "source": [ + "# from feast import FeatureStore\n", + "# store = FeatureStore(repo_path=\"rbac/feature_repo\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d1619739-f763-45bb-a1f1-53f6452bc60a", + "metadata": {}, + "outputs": [], + "source": [ + "# store.serve(\n", + "# host=\"localhost\",\n", + "# port=6566,\n", + "# type_=\"http\",\n", + "# no_access_log=False,\n", + "# workers=1,\n", + "# metrics=False,\n", + "# keep_alive_timeout=5,\n", + "# registry_ttl_sec=5,\n", + "# )" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bc804040-9cd0-4dbc-a63d-a81de9422605", + "metadata": {}, + "outputs": [], + "source": [ + "!feast -c rbac/feature_repo serve" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c3bc63e7-cf7c-4132-b39b-3cd75a1d6755", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/rbac-local/04-offline_server.ipynb b/examples/rbac-local/04-offline_server.ipynb new file mode 100644 index 0000000000..62ad8b1a78 --- /dev/null +++ b/examples/rbac-local/04-offline_server.ipynb @@ -0,0 +1,99 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "36f5d04f-b456-4e65-91a8-482c91f854c1", + "metadata": {}, + "source": [ + "## Offline server" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "86924e3b-d7dc-46e1-a9f4-05c8abee4da8", + "metadata": {}, + "outputs": [], + "source": [ + "!lsof -i :8815\n", + "# !kill -9 64859 98087" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "73b219c3-7782-4e09-9897-d01f44ccae2d", + "metadata": {}, + "outputs": [], + "source": [ + "from feast import FeatureStore\n", + "store = FeatureStore(repo_path=\"rbac/feature_repo\")" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "75967678-0573-410e-b9dd-09743b67eac3", + "metadata": {}, + "outputs": [], + "source": [ + "import logging\n", + "import sys\n", + "from io import StringIO\n", + "logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(message)s')\n", + "logger = logging.getLogger() " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5400ee1b-de0a-4fe9-9003-83d0af0863e6", + "metadata": {}, + "outputs": [], + "source": [ + "store.serve_offline(\"localhost\", 8815)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8b822d5c-41d9-477a-8b42-c4701784bac2", + "metadata": {}, + "outputs": [], + "source": [ + "# Run this in case it's needed to force materialize from offline server\n", + "from datetime import datetime\n", + "store.materialize_incremental(end_date=datetime.now())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ff854a14-4649-4d40-94fa-b6e2b8577afa", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/rbac-local/README.md b/examples/rbac-local/README.md new file mode 100644 index 0000000000..dd1128e94a --- /dev/null +++ b/examples/rbac-local/README.md @@ -0,0 +1,57 @@ +# RBAC demo +RBAC demo with local environment. + +## System Requirements +* Clone of the Feast repo +* Docker +* yq + +## Architecture +The demo creates the following components: +* An OIDC authorization server using a Keycloak docker container and initialized for demo purposes with a sample realm. +* A sample feature store using `feast init`, later adapted to use the `oidc` authorization against the sample realm. +* Three servers running the registry, online and offline stores. +* A client application connected to the servers to run test code. + +## Setup the environment +Run the sample notebooks to setup the environment: +* [01.1-startkeycloak](./01.1-startkeycloak.ipynb) to start a Keycloak container. +* [01.2-setup-keycloak.ipynb](./01.2-setup-keycloak.ipynb) to configure Keycloak with all the needed resources for the next steps. +* [01.3-setup-feast.ipynb](./01.3-setup-feast.ipynb) to create the sample Feast store and inject the authoprization settings +* [02-registry_server.ipynb](./02-registry_server.ipynb) to start the Registry server +* [03-online_server.ipynb](./03-online_server.ipynb) to start the Online store server +* [04-offline_server.ipynb](04-offline_server.ipynb) to start the Offline store server + +**Note**: For MacOs users, you must set this environment variable before launching the notebook server: +```bash +OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES +``` + +## Goal +Once the environment is defined, we can use the [client.ipynb](./client.ipynb) notebook to verify how the behavior changes +according to the configured user. + +In particular, given the configured permissions: +| Permission | Types | Name pattern | Actions | Roles | +|------------|-------|--------------|---------|-------| +| read_permission | ALL | | DESCRIBE | reader | +| write_fresh_permission | FeatureView1 | .*_fresh | WRITE_ONLINE | fresh_writer | +| offline_permission | FeatureView, OnDemandFeatureView, FeatureService | | CRUD, WRITE_OFFLINE, QUERY_OFFLINE | batch_admin | +| admin_permission | ALL | | ALL | store_admin | + +and the user roles defined in Keycloak: +| User | Roles | +|------|-------| +| reader | reader | +| writer | fresh_writer | +| batch_admin | batch_admin | +| admin | store_admin | + +We should expect the following behavior for each test section of the [client notebook](./client.ipynb): +| User | Basic validation | Historical | Materialization | Online | Stream push | +|------|------------------|------------|-------------------|--------|-------------| +| reader | Ok | Denied | Denied | Denied | Denied | +| writer | Empty | Denied | Ok | Denied | Denied | +| batch_admin | No Entities and Permissions | Ok | Denied | Denied | Denied | +| admin | Ok | Ok | Ok | Ok | Ok | + diff --git a/examples/rbac-local/client.ipynb b/examples/rbac-local/client.ipynb new file mode 100644 index 0000000000..7e5561f5f7 --- /dev/null +++ b/examples/rbac-local/client.ipynb @@ -0,0 +1,607 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "bee9388f-8ffc-4fcd-930f-197ec3c2dd96", + "metadata": {}, + "source": [ + "# Test client" + ] + }, + { + "cell_type": "code", + "execution_count": 44, + "id": "eceb50b4-c516-4224-a0b1-efd31bb78c29", + "metadata": {}, + "outputs": [], + "source": [ + "import yaml\n", + "def update_username(username):\n", + " path = 'client/feature_store.yaml'\n", + " with open(path, 'r') as file:\n", + " config = yaml.safe_load(file) or {}\n", + " config['auth']['username'] = username\n", + " with open(path, 'w') as file:\n", + " yaml.safe_dump(config, file, default_flow_style=False)" + ] + }, + { + "cell_type": "markdown", + "id": "08a4020a-10ad-476a-af25-26a09d3d4786", + "metadata": {}, + "source": [ + "# Update test user\n", + "Use one of `reader`, `writer`, `batch_admin` or `admin` (password is fixed) as the current `username`." + ] + }, + { + "cell_type": "code", + "execution_count": 45, + "id": "564849f9-c95a-4278-9fa7-fa09694e5d93", + "metadata": {}, + "outputs": [], + "source": [ + "username = 'reader'\n", + "update_username(username)" + ] + }, + { + "cell_type": "code", + "execution_count": 46, + "id": "6ffb2c42-5a5d-495c-92c5-0729f0144fb8", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "auth:\n", + " auth_discovery_url: http://0.0.0.0:9999/realms/rbac_example/.well-known/openid-configuration\n", + " client_id: app\n", + " client_secret: REDACTED\n", + " password: password\n", + " type: oidc\n", + " username: reader\n", + "entity_key_serialization_version: 2\n", + "offline_store:\n", + " host: localhost\n", + " port: 8815\n", + " type: remote\n", + "online_store:\n", + " path: http://localhost:6566\n", + " type: remote\n", + "project: rbac\n", + "registry:\n", + " path: localhost:6570\n", + " registry_type: remote\n" + ] + } + ], + "source": [ + "!cat client/feature_store.yaml" + ] + }, + { + "cell_type": "markdown", + "id": "664b6f52-d8cf-4145-bf7a-fcce111a34da", + "metadata": {}, + "source": [ + "## Updating logger\n", + "The following is needed to log in the notebook the output the messages logged by th Feast application." + ] + }, + { + "cell_type": "code", + "execution_count": 47, + "id": "3a6fe206-63f8-486f-88cb-b4e888cb6855", + "metadata": {}, + "outputs": [], + "source": [ + "import logging\n", + "import sys\n", + "from io import StringIO\n", + "logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(message)s')\n", + "logger = logging.getLogger()" + ] + }, + { + "cell_type": "markdown", + "id": "a1eb1495-1f38-4165-a6a4-26a2087f1635", + "metadata": {}, + "source": [ + "## Setup Feast client\n", + "Initialize the Feast store from the [client configuration](./client/feature_store.yaml)" + ] + }, + { + "cell_type": "code", + "execution_count": 48, + "id": "b2292e78-cf30-441c-b67f-36e1f1a81923", + "metadata": {}, + "outputs": [], + "source": [ + "from feast.feature_store import FeatureStore" + ] + }, + { + "cell_type": "code", + "execution_count": 49, + "id": "bb653327-9eb3-448f-b320-625337851522", + "metadata": {}, + "outputs": [], + "source": [ + "store = FeatureStore(repo_path=\"client\")" + ] + }, + { + "cell_type": "markdown", + "id": "7e826371-3df5-483a-878d-ce79e8b907e3", + "metadata": {}, + "source": [ + "## Basic validation\n", + "Verify the authorization config and run some GET APIs on the registry." + ] + }, + { + "cell_type": "code", + "execution_count": 50, + "id": "a59979af-a438-436d-918c-3174d94ade5b", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Authorization config is: {'auth_discovery_url': 'http://0.0.0.0:9999/realms/rbac_example/.well-known/openid-configuration', 'client_id': 'app', 'client_secret': 'REDACTED', 'password': 'password', 'type': 'oidc', 'username': 'reader'}\n" + ] + } + ], + "source": [ + "print(f\"Authorization config is: {store.config.auth}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 51, + "id": "bf0af19c-6609-4cb4-86f3-a976528c3966", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Entity found driver\n" + ] + } + ], + "source": [ + "for e in store.list_entities():\n", + " print(f\"Entity found {e.name}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 52, + "id": "0494a65f-64bf-45f0-a772-ee6d8b89c91e", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "FeatureView found driver_hourly_stats of type FeatureView\n", + "FeatureView found driver_hourly_stats_fresh of type FeatureView\n", + "FeatureView found transformed_conv_rate_fresh of type OnDemandFeatureView\n", + "FeatureView found transformed_conv_rate of type OnDemandFeatureView\n" + ] + } + ], + "source": [ + "for fv in store.list_all_feature_views():\n", + " print(f\"FeatureView found {fv.name} of type {type(fv).__name__}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 53, + "id": "0832822f-e954-4d43-a96f-de5cf05acb2b", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "FeatureService found driver_activity_v1 of type FeatureService\n", + "FeatureService found driver_activity_v3 of type FeatureService\n", + "FeatureService found driver_activity_v2 of type FeatureService\n" + ] + } + ], + "source": [ + "for fs in store.list_feature_services():\n", + " print(f\"FeatureService found {fs.name} of type {type(fs).__name__}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 54, + "id": "98fd0767-4305-4b18-a50b-298fa7103815", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "NAME TYPES NAME_PATTERN ACTIONS ROLES REQUIRED_TAGS\n", + "read_permission Project DESCRIBE reader -\n", + " FeatureView\n", + " OnDemandFeatureView\n", + " BatchFeatureView\n", + " StreamFeatureView\n", + " Entity\n", + " FeatureService\n", + " DataSource\n", + " ValidationReference\n", + " SavedDataset\n", + " Permission\n", + "write_fresh_permission FeatureView .*_fresh WRITE_ONLINE fresh_writer -\n", + "offline_permission FeatureView CREATE batch_admin -\n", + " OnDemandFeatureView DESCRIBE\n", + " FeatureService UPDATE\n", + " DELETE\n", + " WRITE_OFFLINE\n", + " READ_OFFLINE\n", + "admin_permission Project CREATE store_admin -\n", + " FeatureView DESCRIBE\n", + " OnDemandFeatureView UPDATE\n", + " BatchFeatureView DELETE\n", + " StreamFeatureView READ_ONLINE\n", + " Entity READ_OFFLINE\n", + " FeatureService WRITE_ONLINE\n", + " DataSource WRITE_OFFLINE\n", + " ValidationReference\n", + " SavedDataset\n", + " Permission\n" + ] + } + ], + "source": [ + "!feast -c client permissions list" + ] + }, + { + "cell_type": "markdown", + "id": "ad2d56ee-e7a9-463e-a597-932c10f8df1c", + "metadata": {}, + "source": [ + "## Validating with test_workflow.py\n", + "The following test functions were copied from the `test_workflow.py` template but we added `try` blocks to print only \n", + "the relevant error messages, since we expect to receive errors from the permission enforcement modules." + ] + }, + { + "cell_type": "code", + "execution_count": 55, + "id": "930f7e8c-c2a0-4425-99c2-c9958a5a7632", + "metadata": {}, + "outputs": [], + "source": [ + "import subprocess\n", + "from datetime import datetime\n", + "\n", + "import pandas as pd\n", + "\n", + "from feast import FeatureStore\n", + "from feast.data_source import PushMode\n", + "\n", + "def fetch_historical_features_entity_df(store: FeatureStore, for_batch_scoring: bool):\n", + " # Note: see https://docs.feast.dev/getting-started/concepts/feature-retrieval for more details on how to retrieve\n", + " # for all entities in the offline store instead\n", + " entity_df = pd.DataFrame.from_dict(\n", + " {\n", + " # entity's join key -> entity values\n", + " \"driver_id\": [1001, 1002, 1003],\n", + " # \"event_timestamp\" (reserved key) -> timestamps\n", + " \"event_timestamp\": [\n", + " datetime(2021, 4, 12, 10, 59, 42),\n", + " datetime(2021, 4, 12, 8, 12, 10),\n", + " datetime(2021, 4, 12, 16, 40, 26),\n", + " ],\n", + " # (optional) label name -> label values. Feast does not process these\n", + " \"label_driver_reported_satisfaction\": [1, 5, 3],\n", + " # values we're using for an on-demand transformation\n", + " \"val_to_add\": [1, 2, 3],\n", + " \"val_to_add_2\": [10, 20, 30],\n", + " }\n", + " )\n", + " # For batch scoring, we want the latest timestamps\n", + " if for_batch_scoring:\n", + " entity_df[\"event_timestamp\"] = pd.to_datetime(\"now\", utc=True)\n", + "\n", + " try:\n", + " training_df = store.get_historical_features(\n", + " entity_df=entity_df,\n", + " features=[\n", + " \"driver_hourly_stats:conv_rate\",\n", + " \"driver_hourly_stats:acc_rate\",\n", + " \"driver_hourly_stats:avg_daily_trips\",\n", + " \"transformed_conv_rate:conv_rate_plus_val1\",\n", + " \"transformed_conv_rate:conv_rate_plus_val2\",\n", + " ],\n", + " ).to_df()\n", + " print(training_df.head())\n", + " except Exception as e:\n", + " print(f\"Failed to run `store.get_historical_features`: {e}\")\n", + "\n", + "\n", + "def fetch_online_features(store, source: str = \"\"):\n", + " entity_rows = [\n", + " # {join_key: entity_value}\n", + " {\n", + " \"driver_id\": 1001,\n", + " \"val_to_add\": 1000,\n", + " \"val_to_add_2\": 2000,\n", + " },\n", + " {\n", + " \"driver_id\": 1002,\n", + " \"val_to_add\": 1001,\n", + " \"val_to_add_2\": 2002,\n", + " },\n", + " ]\n", + " if source == \"feature_service\":\n", + " try:\n", + " features_to_fetch = store.get_feature_service(\"driver_activity_v1\")\n", + " except Exception as e:\n", + " print(f\"Failed to run `store.get_feature_service`: {e}\")\n", + " elif source == \"push\":\n", + " try:\n", + " features_to_fetch = store.get_feature_service(\"driver_activity_v3\")\n", + " except Exception as e:\n", + " print(f\"Failed to run `store.get_feature_service`: {e}\")\n", + " else:\n", + " features_to_fetch = [\n", + " \"driver_hourly_stats:acc_rate\",\n", + " \"transformed_conv_rate:conv_rate_plus_val1\",\n", + " \"transformed_conv_rate:conv_rate_plus_val2\",\n", + " ]\n", + " try:\n", + " returned_features = store.get_online_features(\n", + " features=features_to_fetch,\n", + " entity_rows=entity_rows,\n", + " ).to_dict()\n", + " for key, value in sorted(returned_features.items()):\n", + " print(key, \" : \", value)\n", + " except Exception as e:\n", + " print(f\"Failed to run `store.get_online_features`: {e}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 56, + "id": "86359ae5-e723-4976-89bb-e772f597ed60", + "metadata": {}, + "outputs": [], + "source": [ + "store = FeatureStore(repo_path=\"client\")" + ] + }, + { + "cell_type": "markdown", + "id": "c0fed355-a1ac-4515-ae27-9d0feca886f4", + "metadata": {}, + "source": [ + "### Historical features" + ] + }, + { + "cell_type": "code", + "execution_count": 57, + "id": "e18dba03-6199-4b48-a9cb-23e3fa51a505", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "--- Historical features for training ---\n", + "Failed to run `store.get_historical_features`: Permission error:\n", + "Permission offline_permission denied execution of ['READ_OFFLINE'] to FeatureView:driver_hourly_stats: Requires roles ['batch_admin'],Permission admin_permission denied execution of ['READ_OFFLINE'] to FeatureView:driver_hourly_stats: Requires roles ['store_admin']. Detail: Python exception: FeastPermissionError. gRPC client debug context: UNKNOWN:Error received from peer ipv6:%5B::1%5D:8815 {grpc_message:\"Permission error:\\nPermission offline_permission denied execution of [\\'READ_OFFLINE\\'] to FeatureView:driver_hourly_stats: Requires roles [\\'batch_admin\\'],Permission admin_permission denied execution of [\\'READ_OFFLINE\\'] to FeatureView:driver_hourly_stats: Requires roles [\\'store_admin\\']. Detail: Python exception: FeastPermissionError\", grpc_status:2, created_time:\"2024-09-09T08:52:22.529654+02:00\"}. Client context: IOError: Server never sent a data message. Detail: Internal\n", + "\n", + "--- Historical features for batch scoring ---\n", + "Failed to run `store.get_historical_features`: Permission error:\n", + "Permission offline_permission denied execution of ['READ_OFFLINE'] to FeatureView:driver_hourly_stats: Requires roles ['batch_admin'],Permission admin_permission denied execution of ['READ_OFFLINE'] to FeatureView:driver_hourly_stats: Requires roles ['store_admin']. Detail: Python exception: FeastPermissionError. gRPC client debug context: UNKNOWN:Error received from peer ipv6:%5B::1%5D:8815 {created_time:\"2024-09-09T08:52:23.51953+02:00\", grpc_status:2, grpc_message:\"Permission error:\\nPermission offline_permission denied execution of [\\'READ_OFFLINE\\'] to FeatureView:driver_hourly_stats: Requires roles [\\'batch_admin\\'],Permission admin_permission denied execution of [\\'READ_OFFLINE\\'] to FeatureView:driver_hourly_stats: Requires roles [\\'store_admin\\']. Detail: Python exception: FeastPermissionError\"}. Client context: IOError: Server never sent a data message. Detail: Internal\n" + ] + } + ], + "source": [ + "print(\"\\n--- Historical features for training ---\")\n", + "fetch_historical_features_entity_df(store, for_batch_scoring=False)\n", + "\n", + "print(\"\\n--- Historical features for batch scoring ---\")\n", + "fetch_historical_features_entity_df(store, for_batch_scoring=True)" + ] + }, + { + "cell_type": "markdown", + "id": "83bdd1a1-7071-4c51-bf69-9b2bade572a1", + "metadata": {}, + "source": [ + "### Materialization" + ] + }, + { + "cell_type": "code", + "execution_count": 58, + "id": "baeed80c-d2bf-4ac2-ae97-dc689c32e797", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "--- Load features into online store ---\n", + "Materializing \u001b[1m\u001b[32m2\u001b[0m feature views to \u001b[1m\u001b[32m2024-09-09 08:52:23+02:00\u001b[0m into the \u001b[1m\u001b[32mremote\u001b[0m online store.\n", + "\n", + "\u001b[1m\u001b[32mdriver_hourly_stats\u001b[0m from \u001b[1m\u001b[32m2024-09-09 10:50:53+02:00\u001b[0m to \u001b[1m\u001b[32m2024-09-09 08:52:23+02:00\u001b[0m:\n", + "Failed to run `store.materialize_incremental`: Permission error:\n", + "Permission admin_permission denied execution of ['READ_OFFLINE'] to FileSource:driver_hourly_stats_source: Requires roles ['store_admin']. Detail: Python exception: FeastPermissionError. gRPC client debug context: UNKNOWN:Error received from peer ipv6:%5B::1%5D:8815 {created_time:\"2024-09-09T08:52:24.551895+02:00\", grpc_status:2, grpc_message:\"Permission error:\\nPermission admin_permission denied execution of [\\'READ_OFFLINE\\'] to FileSource:driver_hourly_stats_source: Requires roles [\\'store_admin\\']. Detail: Python exception: FeastPermissionError\"}. Client context: IOError: Server never sent a data message. Detail: Internal\n" + ] + } + ], + "source": [ + "print(\"\\n--- Load features into online store ---\")\n", + "try:\n", + " store.materialize_incremental(end_date=datetime.now())\n", + "except Exception as e:\n", + " print(f\"Failed to run `store.materialize_incremental`: {e}\")" + ] + }, + { + "cell_type": "markdown", + "id": "f3ef1e87-a98e-447e-893a-d10e205d87c5", + "metadata": {}, + "source": [ + "### Online features" + ] + }, + { + "cell_type": "code", + "execution_count": 59, + "id": "feb552de-77da-4177-bc4e-4c882ca91fe8", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "--- Online features ---\n", + "Failed to run `store.get_online_features`: Permission error:\n", + "Permission admin_permission denied execution of ['READ_ONLINE'] to FeatureView:driver_hourly_stats: Requires roles ['store_admin']\n", + "\n", + "--- Online features retrieved (instead) through a feature service---\n", + "Failed to run `store.get_online_features`: Permission error:\n", + "Permission admin_permission denied execution of ['READ_ONLINE'] to FeatureView:driver_hourly_stats: Requires roles ['store_admin']\n", + "\n", + "--- Online features retrieved (using feature service v3, which uses a feature view with a push source---\n", + "Failed to run `store.get_online_features`: Permission error:\n", + "Permission admin_permission denied execution of ['READ_ONLINE'] to FeatureView:driver_hourly_stats: Requires roles ['store_admin']\n" + ] + } + ], + "source": [ + "print(\"\\n--- Online features ---\")\n", + "fetch_online_features(store)\n", + "\n", + "print(\"\\n--- Online features retrieved (instead) through a feature service---\")\n", + "fetch_online_features(store, source=\"feature_service\")\n", + "\n", + "print(\n", + " \"\\n--- Online features retrieved (using feature service v3, which uses a feature view with a push source---\"\n", + ")\n", + "fetch_online_features(store, source=\"push\")" + ] + }, + { + "cell_type": "markdown", + "id": "7ce5704c-86ef-4d00-a111-b86e853f2cca", + "metadata": {}, + "source": [ + "### Stream push" + ] + }, + { + "cell_type": "code", + "execution_count": 60, + "id": "e53317fc-8e6b-4dc3-89ca-28d6be04b98a", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "--- Simulate a stream event ingestion of the hourly stats df ---\n", + " driver_id event_timestamp created conv_rate \\\n", + "0 1001 2024-09-09 08:52:33.038542 2024-09-09 08:52:33.038547 1.0 \n", + "\n", + " acc_rate avg_daily_trips \n", + "0 1.0 1000 \n", + "Failed to run `store.push`: \n", + "\n", + "--- Online features again with updated values from a stream push---\n", + "Failed to run `store.get_online_features`: Permission error:\n", + "Permission admin_permission denied execution of ['READ_ONLINE'] to FeatureView:driver_hourly_stats: Requires roles ['store_admin']\n" + ] + } + ], + "source": [ + "print(\"\\n--- Simulate a stream event ingestion of the hourly stats df ---\")\n", + "event_df = pd.DataFrame.from_dict(\n", + " {\n", + " \"driver_id\": [1001],\n", + " \"event_timestamp\": [\n", + " datetime.now(),\n", + " ],\n", + " \"created\": [\n", + " datetime.now(),\n", + " ],\n", + " \"conv_rate\": [1.0],\n", + " \"acc_rate\": [1.0],\n", + " \"avg_daily_trips\": [1000],\n", + " }\n", + ")\n", + "print(event_df)\n", + "try:\n", + " store.push(\"driver_stats_push_source\", event_df, to=PushMode.ONLINE_AND_OFFLINE)\n", + "except Exception as e:\n", + " print(f\"Failed to run `store.push`: {e}\") \n", + "\n", + "print(\"\\n--- Online features again with updated values from a stream push---\")\n", + "fetch_online_features(store, source=\"push\")" + ] + }, + { + "cell_type": "markdown", + "id": "5709f71b-ddff-4048-9db1-98d4090326e1", + "metadata": {}, + "source": [ + "**Note** If you see the following error, it is likely due to the issue [#4392: Remote registry client does not map application errors](https://github.com/feast-dev/feast/issues/4392):\n", + "```\n", + "Feature view driver_hourly_stats_fresh does not exist in project rbac\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "573d9e29-4ba8-41f4-b6a1-82a24d4550b5", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/rbac-local/client/feature_store.yaml b/examples/rbac-local/client/feature_store.yaml new file mode 100644 index 0000000000..d428adf671 --- /dev/null +++ b/examples/rbac-local/client/feature_store.yaml @@ -0,0 +1,12 @@ +entity_key_serialization_version: 2 +offline_store: + host: localhost + port: 8815 + type: remote +online_store: + path: http://localhost:6566 + type: remote +project: rbac +registry: + path: localhost:6570 + registry_type: remote From ddecae89a4d5e6cdb6e49c7514ba6ae0c354350d Mon Sep 17 00:00:00 2001 From: lokeshrangineni <19699092+lokeshrangineni@users.noreply.github.com> Date: Thu, 12 Sep 2024 23:49:58 -0400 Subject: [PATCH 58/96] docs: Adding the missed documentation for the RBAC (#4515) * Adding the missed documentation - * OIDC Token requirement or assumptions * Added `feast permissions check` cli command documentation. Signed-off-by: Lokesh Rangineni <19699092+lokeshrangineni@users.noreply.github.com> * Adding the missed documentation - * OIDC Token requirement or assumptions * Added `feast permissions check` cli command documentation. Signed-off-by: Lokesh Rangineni <19699092+lokeshrangineni@users.noreply.github.com> * Fixed code review comments. Signed-off-by: Lokesh Rangineni <19699092+lokeshrangineni@users.noreply.github.com> --------- Signed-off-by: Lokesh Rangineni <19699092+lokeshrangineni@users.noreply.github.com> --- .../components/authz_manager.md | 10 +++-- docs/reference/feast-cli-commands.md | 42 +++++++++++++++++++ 2 files changed, 49 insertions(+), 3 deletions(-) diff --git a/docs/getting-started/components/authz_manager.md b/docs/getting-started/components/authz_manager.md index 0d011fbf2b..20fcdca107 100644 --- a/docs/getting-started/components/authz_manager.md +++ b/docs/getting-started/components/authz_manager.md @@ -1,5 +1,5 @@ # Authorization Manager -An Authorization Manager is an instance of the `AuthManager` class that is plugged into one of the Feast servers to extract user details from the current request and inject them into the [permissions](../../getting-started/concepts/permissions.md) framework. +An Authorization Manager is an instance of the `AuthManager` class that is plugged into one of the Feast servers to extract user details from the current request and inject them into the [permission](../../getting-started/concepts/permission.md) framework. {% hint style="info" %} **Note**: Feast does not provide authentication capabilities; it is the client's responsibility to manage the authentication token and pass it to @@ -44,7 +44,10 @@ The server, in turn, uses the same OIDC server to validate the token and extract Some assumptions are made in the OIDC server configuration: * The OIDC token refers to a client with roles matching the RBAC roles of the configured `Permission`s (*) -* The roles are exposed in the access token passed to the server +* The roles are exposed in the access token that is passed to the server +* The JWT token is expected to have a verified signature and not be expired. The Feast OIDC token parser logic validates for `verify_signature` and `verify_exp` so make sure that the given OIDC provider is configured to meet these requirements. +* The preferred_username should be part of the JWT token claim. + (*) Please note that **the role match is case-sensitive**, e.g. the name of the role in the OIDC server and in the `Permission` configuration must be exactly the same. @@ -57,7 +60,8 @@ For example, the access token for a client `app` of a user with `reader` role sh "roles": [ "reader" ] - }, + } + } } ``` diff --git a/docs/reference/feast-cli-commands.md b/docs/reference/feast-cli-commands.md index be31720034..b32db3215a 100644 --- a/docs/reference/feast-cli-commands.md +++ b/docs/reference/feast-cli-commands.md @@ -224,6 +224,48 @@ tags: key2: value2 ``` +### Permission check +The `permissions check` command is used to identify resources that lack the appropriate permissions based on their type, name, or tags. + +This command is particularly useful for administrators when roles, actions, or permissions have been modified or newly configured. By running this command, administrators can easily verify which resources and actions are not protected by any permission configuration, ensuring that proper security measures are in place. + +```text +> feast permissions check + + +The following resources are not secured by any permission configuration: +NAME TYPE +driver Entity +driver_hourly_stats_fresh FeatureView +The following actions are not secured by any permission configuration (Note: this might not be a security concern, depending on the used APIs): +NAME TYPE UNSECURED ACTIONS +driver Entity CREATE + DESCRIBE + UPDATE + DELETE + READ_ONLINE + READ_OFFLINE + WRITE_ONLINE + WRITE_OFFLINE +driver_hourly_stats_fresh FeatureView CREATE + DESCRIBE + UPDATE + DELETE + READ_ONLINE + READ_OFFLINE + WRITE_ONLINE + WRITE_OFFLINE + +Based on the above results, the administrator can reassess the permissions configuration and make any necessary adjustments to meet their security requirements. + +If no resources are accessible publicly, the permissions check command will return the following response: +> feast permissions check +The following resources are not secured by any permission configuration: +NAME TYPE +The following actions are not secured by any permission configuration (Note: this might not be a security concern, depending on the used APIs): +NAME TYPE UNSECURED ACTIONS +``` + ### List of the configured roles List all the configured roles From 06eade3b83d5e481fe158dd323411a3d92d75c98 Mon Sep 17 00:00:00 2001 From: cburroughs Date: Mon, 16 Sep 2024 10:55:02 -0400 Subject: [PATCH 59/96] chore: Remove bump upper bound on fsspec requirement (#4512) Alternative to #4461 Signed-off-by: Chris Burroughs --- .../requirements/py3.10-ci-requirements.txt | 305 ++++++++---------- .../requirements/py3.10-requirements.txt | 129 +++----- .../requirements/py3.11-ci-requirements.txt | 302 ++++++++--------- .../requirements/py3.11-requirements.txt | 127 +++----- .../requirements/py3.9-ci-requirements.txt | 289 ++++++++--------- .../requirements/py3.9-requirements.txt | 127 +++----- setup.py | 4 +- 7 files changed, 545 insertions(+), 738 deletions(-) diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index bfe855a2d8..b8798d96c6 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -1,9 +1,11 @@ # This file was autogenerated by uv via the following command: # uv pip compile --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.10-ci-requirements.txt -aiobotocore==2.13.1 -aiohttp==3.9.5 +aiobotocore==2.15.0 +aiohappyeyeballs==2.4.0 + # via aiohttp +aiohttp==3.10.5 # via aiobotocore -aioitertools==0.11.0 +aioitertools==0.12.0 # via aiobotocore aiosignal==1.3.1 # via aiohttp @@ -38,9 +40,9 @@ async-timeout==4.0.3 # via # aiohttp # redis -atpublic==4.1.0 +atpublic==5.0 # via ibis-framework -attrs==23.2.0 +attrs==24.2.0 # via # aiohttp # jsonschema @@ -50,34 +52,32 @@ azure-core==1.30.2 # azure-identity # azure-storage-blob azure-identity==1.17.1 -azure-storage-blob==12.20.0 -babel==2.15.0 +azure-storage-blob==12.22.0 +babel==2.16.0 # via # jupyterlab-server # sphinx beautifulsoup4==4.12.3 # via nbconvert -bidict==0.23.1 - # via ibis-framework -bigtree==0.19.2 +bigtree==0.21.1 bleach==6.1.0 # via nbconvert -boto3==1.34.131 +boto3==1.35.16 # via moto -botocore==1.34.131 +botocore==1.35.16 # via # aiobotocore # boto3 # moto # s3transfer -build==1.2.1 +build==1.2.2 # via # pip-tools # singlestoredb -cachetools==5.3.3 +cachetools==5.5.0 # via google-auth -cassandra-driver==3.29.1 -certifi==2024.7.4 +cassandra-driver==3.29.2 +certifi==2024.8.30 # via # elastic-transport # httpcore @@ -86,7 +86,7 @@ certifi==2024.7.4 # minio # requests # snowflake-connector-python -cffi==1.16.0 +cffi==1.17.1 # via # argon2-cffi-bindings # cryptography @@ -103,7 +103,6 @@ click==8.1.7 # geomet # great-expectations # pip-tools - # typer # uvicorn cloudpickle==3.0.0 # via dask @@ -113,9 +112,9 @@ comm==0.2.2 # via # ipykernel # ipywidgets -coverage[toml]==7.5.4 +coverage[toml]==7.6.1 # via pytest-cov -cryptography==43.0.1 +cryptography==42.0.8 # via # azure-identity # azure-storage-blob @@ -128,54 +127,50 @@ cryptography==43.0.1 # snowflake-connector-python # types-pyopenssl # types-redis -dask[dataframe]==2024.6.2 +cython==3.0.11 + # via thriftpy2 +dask[dataframe]==2024.8.2 # via dask-expr -dask-expr==1.1.6 +dask-expr==1.1.13 # via dask -db-dtypes==1.2.0 +db-dtypes==1.3.0 # via google-cloud-bigquery -debugpy==1.8.2 +debugpy==1.8.5 # via ipykernel decorator==5.1.1 # via ipython defusedxml==0.7.1 # via nbconvert -deltalake==0.18.1 +deltalake==0.19.2 deprecation==2.1.0 # via python-keycloak dill==0.3.8 distlib==0.3.8 # via virtualenv -dnspython==2.6.1 - # via email-validator docker==7.1.0 # via testcontainers docutils==0.19 # via sphinx -duckdb==0.10.3 +duckdb==1.1.0 # via ibis-framework -elastic-transport==8.13.1 +elastic-transport==8.15.0 # via elasticsearch -elasticsearch==8.14.0 -email-validator==2.2.0 - # via fastapi +elasticsearch==8.15.1 entrypoints==0.4 # via altair -exceptiongroup==1.2.1 +exceptiongroup==1.2.2 # via # anyio # ipython # pytest execnet==2.1.1 # via pytest-xdist -executing==2.0.1 +executing==2.1.0 # via stack-data -fastapi==0.111.0 -fastapi-cli==0.0.4 - # via fastapi +fastapi==0.114.1 fastjsonschema==2.20.0 # via nbformat -filelock==3.15.4 +filelock==3.16.0 # via # snowflake-connector-python # virtualenv @@ -185,11 +180,11 @@ frozenlist==1.4.1 # via # aiohttp # aiosignal -fsspec==2023.12.2 +fsspec==2024.9.0 # via dask geomet==0.2.1.post1 # via cassandra-driver -google-api-core[grpc]==2.19.1 +google-api-core[grpc]==2.19.2 # via # google-cloud-bigquery # google-cloud-bigquery-storage @@ -197,46 +192,48 @@ google-api-core[grpc]==2.19.1 # google-cloud-core # google-cloud-datastore # google-cloud-storage -google-auth==2.30.0 +google-auth==2.34.0 # via # google-api-core + # google-cloud-bigquery # google-cloud-bigquery-storage + # google-cloud-bigtable # google-cloud-core + # google-cloud-datastore # google-cloud-storage # kubernetes -google-cloud-bigquery[pandas]==3.13.0 -google-cloud-bigquery-storage==2.25.0 -google-cloud-bigtable==2.24.0 +google-cloud-bigquery[pandas]==3.25.0 +google-cloud-bigquery-storage==2.26.0 +google-cloud-bigtable==2.26.0 google-cloud-core==2.4.1 # via # google-cloud-bigquery # google-cloud-bigtable # google-cloud-datastore # google-cloud-storage -google-cloud-datastore==2.19.0 -google-cloud-storage==2.17.0 -google-crc32c==1.5.0 +google-cloud-datastore==2.20.1 +google-cloud-storage==2.18.2 +google-crc32c==1.6.0 # via # google-cloud-storage # google-resumable-media -google-resumable-media==2.7.1 +google-resumable-media==2.7.2 # via # google-cloud-bigquery # google-cloud-storage -googleapis-common-protos[grpc]==1.63.2 +googleapis-common-protos[grpc]==1.65.0 # via # google-api-core # grpc-google-iam-v1 # grpcio-status -great-expectations==0.18.16 -greenlet==3.0.3 +great-expectations==0.18.20 +greenlet==3.1.0 # via sqlalchemy grpc-google-iam-v1==0.13.1 # via google-cloud-bigtable -grpcio==1.64.1 +grpcio==1.66.1 # via # google-api-core - # google-cloud-bigquery # googleapis-common-protos # grpc-google-iam-v1 # grpcio-health-checking @@ -244,38 +241,36 @@ grpcio==1.64.1 # grpcio-status # grpcio-testing # grpcio-tools -grpcio-health-checking==1.62.2 -grpcio-reflection==1.62.2 -grpcio-status==1.62.2 +grpcio-health-checking==1.62.3 +grpcio-reflection==1.62.3 +grpcio-status==1.62.3 # via google-api-core -grpcio-testing==1.62.2 -grpcio-tools==1.62.2 -gunicorn==22.0.0 +grpcio-testing==1.62.3 +grpcio-tools==1.62.3 +gunicorn==23.0.0 h11==0.14.0 # via # httpcore # uvicorn happybase==1.2.0 -hazelcast-python-client==5.4.0 -hiredis==2.3.2 +hazelcast-python-client==5.5.0 +hiredis==2.4.0 httpcore==1.0.5 # via httpx httptools==0.6.1 # via uvicorn -httpx==0.27.0 +httpx==0.27.2 # via - # fastapi # jupyterlab # python-keycloak -ibis-framework[duckdb]==9.1.0 +ibis-framework[duckdb]==9.4.0 # via ibis-substrait -ibis-substrait==4.0.0 -identify==2.5.36 +ibis-substrait==4.0.1 +identify==2.6.0 # via pre-commit -idna==3.7 +idna==3.8 # via # anyio - # email-validator # httpx # jsonschema # requests @@ -283,18 +278,18 @@ idna==3.7 # yarl imagesize==1.4.1 # via sphinx -importlib-metadata==8.0.0 +importlib-metadata==8.5.0 # via dask iniconfig==2.0.0 # via pytest -ipykernel==6.29.4 +ipykernel==6.29.5 # via jupyterlab -ipython==8.25.0 +ipython==8.27.0 # via # great-expectations # ipykernel # ipywidgets -ipywidgets==8.1.3 +ipywidgets==8.1.5 # via great-expectations isodate==0.6.1 # via azure-storage-blob @@ -305,7 +300,6 @@ jedi==0.19.1 jinja2==3.1.4 # via # altair - # fastapi # great-expectations # jupyter-server # jupyterlab @@ -325,7 +319,7 @@ jsonpointer==3.0.0 # via # jsonpatch # jsonschema -jsonschema[format-nongpl]==4.22.0 +jsonschema[format-nongpl]==4.23.0 # via # altair # great-expectations @@ -352,7 +346,7 @@ jupyter-events==0.10.0 # via jupyter-server jupyter-lsp==2.2.5 # via jupyterlab -jupyter-server==2.14.1 +jupyter-server==2.14.2 # via # jupyter-lsp # jupyterlab @@ -365,18 +359,18 @@ jupyterlab==4.2.5 # via notebook jupyterlab-pygments==0.3.0 # via nbconvert -jupyterlab-server==2.27.2 +jupyterlab-server==2.27.3 # via # jupyterlab # notebook -jupyterlab-widgets==3.0.11 +jupyterlab-widgets==3.0.13 # via ipywidgets jwcrypto==1.5.6 # via python-keycloak kubernetes==20.13.0 locket==1.0.0 # via partd -makefun==1.15.2 +makefun==1.15.4 # via great-expectations markdown-it-py==3.0.0 # via rich @@ -385,7 +379,7 @@ markupsafe==2.1.5 # jinja2 # nbconvert # werkzeug -marshmallow==3.21.3 +marshmallow==3.22.0 # via great-expectations matplotlib-inline==0.1.7 # via @@ -401,17 +395,17 @@ mistune==3.0.2 mmh3==4.1.0 mock==2.0.0 moto==4.2.14 -msal==1.29.0 +msal==1.31.0 # via # azure-identity # msal-extensions msal-extensions==1.2.0 # via azure-identity -multidict==6.0.5 +multidict==6.1.0 # via # aiohttp # yarl -mypy==1.10.1 +mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -448,8 +442,6 @@ numpy==1.26.4 # scipy oauthlib==3.2.2 # via requests-oauthlib -orjson==3.10.5 - # via fastapi overrides==7.7.0 # via jupyter-server packaging==24.1 @@ -491,11 +483,11 @@ parsy==2.1 # via ibis-framework partd==1.4.2 # via dask -pbr==6.0.0 +pbr==6.1.0 # via mock pexpect==4.9.0 # via ipython -pip==24.1.1 +pip==24.2 # via pip-tools pip-tools==7.4.1 platformdirs==3.11.0 @@ -507,7 +499,7 @@ pluggy==1.5.0 # via pytest ply==3.11 # via thriftpy2 -portalocker==2.10.0 +portalocker==2.10.1 # via msal-extensions pre-commit==3.3.1 prometheus-client==0.20.0 @@ -517,14 +509,12 @@ prompt-toolkit==3.0.47 proto-plus==1.24.0 # via # google-api-core - # google-cloud-bigquery # google-cloud-bigquery-storage # google-cloud-bigtable # google-cloud-datastore -protobuf==4.25.3 +protobuf==4.25.4 # via # google-api-core - # google-cloud-bigquery # google-cloud-bigquery-storage # google-cloud-bigtable # google-cloud-datastore @@ -540,8 +530,8 @@ protobuf==4.25.3 # substrait psutil==5.9.0 # via ipykernel -psycopg[binary, pool]==3.1.19 -psycopg-binary==3.1.19 +psycopg[binary, pool]==3.2.1 +psycopg-binary==3.2.1 # via psycopg psycopg-pool==3.2.2 # via psycopg @@ -549,14 +539,14 @@ ptyprocess==0.7.0 # via # pexpect # terminado -pure-eval==0.2.2 +pure-eval==0.2.3 # via stack-data py==1.11.0 py-cpuinfo==9.0.0 # via pytest-benchmark py4j==0.10.9.7 # via pyspark -pyarrow==15.0.2 +pyarrow==17.0.0 # via # dask-expr # db-dtypes @@ -565,23 +555,21 @@ pyarrow==15.0.2 # ibis-framework # snowflake-connector-python pyarrow-hotfix==0.6 - # via - # deltalake - # ibis-framework -pyasn1==0.6.0 + # via ibis-framework +pyasn1==0.6.1 # via # pyasn1-modules # rsa -pyasn1-modules==0.4.0 +pyasn1-modules==0.4.1 # via google-auth pybindgen==0.22.1 pycparser==2.22 # via cffi -pydantic==2.7.4 +pydantic==2.9.1 # via # fastapi # great-expectations -pydantic-core==2.18.4 +pydantic-core==2.23.3 # via pydantic pygments==2.18.0 # via @@ -589,23 +577,23 @@ pygments==2.18.0 # nbconvert # rich # sphinx -pyjwt[crypto]==2.8.0 +pyjwt[crypto]==2.9.0 # via # msal # singlestoredb # snowflake-connector-python -pymssql==2.3.0 +pymssql==2.3.1 pymysql==1.1.1 pyodbc==5.1.0 -pyopenssl==24.1.0 +pyopenssl==24.2.1 # via snowflake-connector-python -pyparsing==3.1.2 +pyparsing==3.1.4 # via great-expectations pyproject-hooks==1.1.0 # via # build # pip-tools -pyspark==3.5.1 +pyspark==3.5.2 pytest==7.4.4 # via # pytest-benchmark @@ -641,16 +629,14 @@ python-dotenv==1.0.1 python-json-logger==2.0.7 # via jupyter-events python-keycloak==4.2.2 -python-multipart==0.0.9 - # via fastapi -pytz==2024.1 +pytz==2024.2 # via # great-expectations # ibis-framework # pandas # snowflake-connector-python # trino -pyyaml==6.0.1 +pyyaml==6.0.2 # via # dask # ibis-substrait @@ -659,7 +645,7 @@ pyyaml==6.0.1 # pre-commit # responses # uvicorn -pyzmq==26.0.3 +pyzmq==26.2.0 # via # ipykernel # jupyter-client @@ -670,7 +656,7 @@ referencing==0.35.1 # jsonschema # jsonschema-specifications # jupyter-events -regex==2024.5.15 +regex==2024.7.24 # via parsimonious requests==2.32.3 # via @@ -706,35 +692,33 @@ rfc3986-validator==0.1.1 # via # jsonschema # jupyter-events -rich==13.7.1 - # via - # ibis-framework - # typer -rpds-py==0.18.1 +rich==13.8.1 + # via ibis-framework +rpds-py==0.20.0 # via # jsonschema # referencing rsa==4.9 # via google-auth -ruamel-yaml==0.17.17 +ruamel-yaml==0.17.40 # via great-expectations -ruff==0.4.10 +ruamel-yaml-clib==0.2.8 + # via ruamel-yaml +ruff==0.6.4 s3transfer==0.10.2 # via boto3 -scipy==1.14.0 +scipy==1.14.1 # via great-expectations send2trash==1.8.3 # via jupyter-server -setuptools==70.1.1 +setuptools==74.1.2 # via # grpcio-tools # jupyterlab # kubernetes # pip-tools # singlestoredb -shellingham==1.5.4 - # via typer -singlestoredb==1.4.0 +singlestoredb==1.6.3 six==1.16.0 # via # asttokens @@ -754,44 +738,44 @@ sniffio==1.3.1 # httpx snowballstemmer==2.2.0 # via sphinx -snowflake-connector-python[pandas]==3.11.0 +snowflake-connector-python[pandas]==3.12.1 sortedcontainers==2.4.0 # via snowflake-connector-python -soupsieve==2.5 +soupsieve==2.6 # via beautifulsoup4 sphinx==6.2.1 -sphinxcontrib-applehelp==1.0.8 +sphinxcontrib-applehelp==2.0.0 # via sphinx -sphinxcontrib-devhelp==1.0.6 +sphinxcontrib-devhelp==2.0.0 # via sphinx -sphinxcontrib-htmlhelp==2.0.5 +sphinxcontrib-htmlhelp==2.1.0 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.7 +sphinxcontrib-qthelp==2.0.0 # via sphinx -sphinxcontrib-serializinghtml==1.1.10 +sphinxcontrib-serializinghtml==2.0.0 # via sphinx -sqlalchemy[mypy]==2.0.31 -sqlglot==25.1.0 +sqlalchemy[mypy]==2.0.34 +sqlglot==25.18.0 # via ibis-framework sqlite-vec==0.1.1 -sqlparams==6.0.1 +sqlparams==6.1.0 # via singlestoredb stack-data==0.6.3 # via ipython -starlette==0.37.2 +starlette==0.38.5 # via fastapi -substrait==0.19.0 +substrait==0.22.0 # via ibis-substrait tabulate==0.9.0 -tenacity==8.4.2 +tenacity==8.5.0 terminado==0.18.1 # via # jupyter-server # jupyter-server-terminals testcontainers==4.4.0 -thriftpy2==0.5.1 +thriftpy2==0.5.2 # via happybase tinycss2==1.3.0 # via nbconvert @@ -806,7 +790,7 @@ tomli==2.0.1 # pytest # pytest-env # singlestoredb -tomlkit==0.12.5 +tomlkit==0.13.2 # via snowflake-connector-python toolz==0.12.1 # via @@ -822,7 +806,7 @@ tornado==6.4.1 # jupyterlab # notebook # terminado -tqdm==4.66.4 +tqdm==4.66.5 # via great-expectations traitlets==5.14.3 # via @@ -839,24 +823,22 @@ traitlets==5.14.3 # nbclient # nbconvert # nbformat -trino==0.328.0 +trino==0.329.0 typeguard==4.3.0 -typer==0.12.3 - # via fastapi-cli types-cffi==1.16.0.20240331 # via types-pyopenssl types-protobuf==3.19.22 # via mypy-protobuf types-pymysql==1.1.0.20240524 -types-pyopenssl==24.1.0.20240425 +types-pyopenssl==24.1.0.20240722 # via types-redis -types-python-dateutil==2.9.0.20240316 +types-python-dateutil==2.9.0.20240906 # via arrow types-pytz==2024.1.0.20240417 -types-pyyaml==6.0.12.20240311 -types-redis==4.6.0.20240425 +types-pyyaml==6.0.12.20240808 +types-redis==4.6.0.20240903 types-requests==2.30.0.0 -types-setuptools==70.1.0.20240627 +types-setuptools==74.1.0.20240907 # via types-cffi types-tabulate==0.9.0.20240106 types-urllib3==1.26.25.14 @@ -873,6 +855,7 @@ typing-extensions==4.12.2 # ibis-framework # ipython # jwcrypto + # multidict # mypy # psycopg # psycopg-pool @@ -882,7 +865,6 @@ typing-extensions==4.12.2 # sqlalchemy # testcontainers # typeguard - # typer # uvicorn tzdata==2024.1 # via pandas @@ -890,11 +872,9 @@ tzlocal==5.2 # via # great-expectations # trino -ujson==5.10.0 - # via fastapi uri-template==1.3.0 # via jsonschema -urllib3==1.26.19 +urllib3==2.2.2 # via # botocore # docker @@ -905,17 +885,16 @@ urllib3==1.26.19 # requests # responses # testcontainers -uvicorn[standard]==0.30.1 - # via fastapi -uvloop==0.19.0 +uvicorn[standard]==0.30.6 +uvloop==0.20.0 # via uvicorn virtualenv==20.23.0 # via pre-commit -watchfiles==0.22.0 +watchfiles==0.24.0 # via uvicorn wcwidth==0.2.13 # via prompt-toolkit -webcolors==24.6.0 +webcolors==24.8.0 # via jsonschema webencodings==0.5.1 # via @@ -925,15 +904,15 @@ websocket-client==1.8.0 # via # jupyter-server # kubernetes -websockets==12.0 +websockets==13.0.1 # via uvicorn -werkzeug==3.0.3 +werkzeug==3.0.4 # via moto -wheel==0.43.0 +wheel==0.44.0 # via # pip-tools # singlestoredb -widgetsnbextension==4.0.11 +widgetsnbextension==4.0.13 # via ipywidgets wrapt==1.16.0 # via @@ -941,7 +920,7 @@ wrapt==1.16.0 # testcontainers xmltodict==0.13.0 # via moto -yarl==1.9.4 +yarl==1.11.1 # via aiohttp -zipp==3.19.1 +zipp==3.20.1 # via importlib-metadata diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index eed2baaefe..a3006e4555 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -1,23 +1,20 @@ # This file was autogenerated by uv via the following command: # uv pip compile --system --no-strip-extras setup.py --output-file sdk/python/requirements/py3.10-requirements.txt -annotated-types==0.6.0 +annotated-types==0.7.0 # via pydantic -anyio==4.3.0 +anyio==4.4.0 # via - # httpx # starlette # watchfiles -attrs==23.2.0 +attrs==24.2.0 # via # jsonschema # referencing -bigtree==0.19.2 +bigtree==0.21.1 cachetools==5.5.0 # via google-auth -certifi==2024.7.4 +certifi==2024.8.30 # via - # httpcore - # httpx # kubernetes # requests charset-normalizer==3.3.2 @@ -25,67 +22,46 @@ charset-normalizer==3.3.2 click==8.1.7 # via # dask - # typer # uvicorn cloudpickle==3.0.0 # via dask colorama==0.4.6 -dask[dataframe]==2024.5.0 +dask[dataframe]==2024.8.2 # via dask-expr -dask-expr==1.1.0 +dask-expr==1.1.13 # via dask dill==0.3.8 -dnspython==2.6.1 - # via email-validator -email-validator==2.1.1 - # via fastapi -exceptiongroup==1.2.1 +exceptiongroup==1.2.2 # via anyio -fastapi==0.111.0 - # via fastapi-cli -fastapi-cli==0.0.2 - # via fastapi -fsspec==2024.3.1 +fastapi==0.114.1 +fsspec==2024.9.0 # via dask google-auth==2.34.0 # via kubernetes -greenlet==3.0.3 +greenlet==3.1.0 # via sqlalchemy -gunicorn==22.0.0 +gunicorn==23.0.0 h11==0.14.0 - # via - # httpcore - # uvicorn -httpcore==1.0.5 - # via httpx + # via uvicorn httptools==0.6.1 # via uvicorn -httpx==0.27.0 - # via fastapi -idna==3.7 +idna==3.8 # via # anyio - # email-validator - # httpx # requests -importlib-metadata==7.1.0 +importlib-metadata==8.5.0 # via dask jinja2==3.1.4 - # via fastapi -jsonschema==4.22.0 +jsonschema==4.23.0 jsonschema-specifications==2023.12.1 # via jsonschema kubernetes==20.13.0 locket==1.0.0 # via partd -markdown-it-py==3.0.0 - # via rich markupsafe==2.1.5 # via jinja2 -mdurl==0.1.2 - # via markdown-it-py mmh3==4.1.0 -mypy==1.10.0 +mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -97,9 +73,7 @@ numpy==1.26.4 # pyarrow oauthlib==3.2.2 # via requests-oauthlib -orjson==3.10.3 - # via fastapi -packaging==24.0 +packaging==24.1 # via # dask # gunicorn @@ -110,23 +84,22 @@ pandas==2.2.2 partd==1.4.2 # via dask prometheus-client==0.20.0 -protobuf==4.25.3 +protobuf==4.25.4 # via mypy-protobuf psutil==6.0.0 -pyarrow==16.0.0 +pyarrow==17.0.0 # via dask-expr -pyasn1==0.6.0 +pyasn1==0.6.1 # via # pyasn1-modules # rsa -pyasn1-modules==0.4.0 +pyasn1-modules==0.4.1 # via google-auth -pydantic==2.7.1 +pydantic==2.9.1 # via fastapi -pydantic-core==2.18.2 +pydantic-core==2.23.3 # via pydantic pygments==2.18.0 - # via rich pyjwt==2.9.0 python-dateutil==2.9.0.post0 # via @@ -134,11 +107,9 @@ python-dateutil==2.9.0.post0 # pandas python-dotenv==1.0.1 # via uvicorn -python-multipart==0.0.9 - # via fastapi -pytz==2024.1 +pytz==2024.2 # via pandas -pyyaml==6.0.1 +pyyaml==6.0.2 # via # dask # kubernetes @@ -147,37 +118,31 @@ referencing==0.35.1 # via # jsonschema # jsonschema-specifications -requests==2.31.0 +requests==2.32.3 # via # kubernetes # requests-oauthlib requests-oauthlib==2.0.0 # via kubernetes -rich==13.7.1 - # via typer -rpds-py==0.18.1 +rpds-py==0.20.0 # via # jsonschema # referencing rsa==4.9 # via google-auth -setuptools==73.0.1 +setuptools==74.1.2 # via kubernetes -shellingham==1.5.4 - # via typer six==1.16.0 # via # kubernetes # python-dateutil sniffio==1.3.1 - # via - # anyio - # httpx -sqlalchemy[mypy]==2.0.30 -starlette==0.37.2 + # via anyio +sqlalchemy[mypy]==2.0.34 +starlette==0.38.5 # via fastapi tabulate==0.9.0 -tenacity==8.3.0 +tenacity==8.5.0 toml==0.10.2 tomli==2.0.1 # via mypy @@ -185,13 +150,11 @@ toolz==0.12.1 # via # dask # partd -tqdm==4.66.4 -typeguard==4.2.1 -typer==0.12.3 - # via fastapi-cli -types-protobuf==5.26.0.20240422 +tqdm==4.66.5 +typeguard==4.3.0 +types-protobuf==5.27.0.20240907 # via mypy-protobuf -typing-extensions==4.11.0 +typing-extensions==4.12.2 # via # anyio # fastapi @@ -200,27 +163,21 @@ typing-extensions==4.11.0 # pydantic-core # sqlalchemy # typeguard - # typer # uvicorn tzdata==2024.1 # via pandas -ujson==5.9.0 - # via fastapi -urllib3==2.2.1 +urllib3==2.2.2 # via # kubernetes # requests -uvicorn[standard]==0.29.0 - # via - # fastapi - # fastapi-cli -uvloop==0.19.0 +uvicorn[standard]==0.30.6 +uvloop==0.20.0 # via uvicorn -watchfiles==0.21.0 +watchfiles==0.24.0 # via uvicorn websocket-client==1.8.0 # via kubernetes -websockets==12.0 +websockets==13.0.1 # via uvicorn -zipp==3.19.1 +zipp==3.20.1 # via importlib-metadata diff --git a/sdk/python/requirements/py3.11-ci-requirements.txt b/sdk/python/requirements/py3.11-ci-requirements.txt index 6a097526d7..cd78247a23 100644 --- a/sdk/python/requirements/py3.11-ci-requirements.txt +++ b/sdk/python/requirements/py3.11-ci-requirements.txt @@ -1,9 +1,11 @@ # This file was autogenerated by uv via the following command: # uv pip compile --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.11-ci-requirements.txt -aiobotocore==2.13.1 -aiohttp==3.9.5 +aiobotocore==2.15.0 +aiohappyeyeballs==2.4.0 + # via aiohttp +aiohttp==3.10.5 # via aiobotocore -aioitertools==0.11.0 +aioitertools==0.12.0 # via aiobotocore aiosignal==1.3.1 # via aiohttp @@ -34,9 +36,9 @@ async-lru==2.0.4 # via jupyterlab async-property==0.2.2 # via python-keycloak -atpublic==4.1.0 +atpublic==5.0 # via ibis-framework -attrs==23.2.0 +attrs==24.2.0 # via # aiohttp # jsonschema @@ -46,34 +48,32 @@ azure-core==1.30.2 # azure-identity # azure-storage-blob azure-identity==1.17.1 -azure-storage-blob==12.20.0 -babel==2.15.0 +azure-storage-blob==12.22.0 +babel==2.16.0 # via # jupyterlab-server # sphinx beautifulsoup4==4.12.3 # via nbconvert -bidict==0.23.1 - # via ibis-framework -bigtree==0.19.2 +bigtree==0.21.1 bleach==6.1.0 # via nbconvert -boto3==1.34.131 +boto3==1.35.16 # via moto -botocore==1.34.131 +botocore==1.35.16 # via # aiobotocore # boto3 # moto # s3transfer -build==1.2.1 +build==1.2.2 # via # pip-tools # singlestoredb -cachetools==5.3.3 +cachetools==5.5.0 # via google-auth -cassandra-driver==3.29.1 -certifi==2024.7.4 +cassandra-driver==3.29.2 +certifi==2024.8.30 # via # elastic-transport # httpcore @@ -82,7 +82,7 @@ certifi==2024.7.4 # minio # requests # snowflake-connector-python -cffi==1.16.0 +cffi==1.17.1 # via # argon2-cffi-bindings # cryptography @@ -99,7 +99,6 @@ click==8.1.7 # geomet # great-expectations # pip-tools - # typer # uvicorn cloudpickle==3.0.0 # via dask @@ -109,9 +108,9 @@ comm==0.2.2 # via # ipykernel # ipywidgets -coverage[toml]==7.5.4 +coverage[toml]==7.6.1 # via pytest-cov -cryptography==43.0.1 +cryptography==42.0.8 # via # azure-identity # azure-storage-blob @@ -124,49 +123,45 @@ cryptography==43.0.1 # snowflake-connector-python # types-pyopenssl # types-redis -dask[dataframe]==2024.6.2 +cython==3.0.11 + # via thriftpy2 +dask[dataframe]==2024.8.2 # via dask-expr -dask-expr==1.1.6 +dask-expr==1.1.13 # via dask -db-dtypes==1.2.0 +db-dtypes==1.3.0 # via google-cloud-bigquery -debugpy==1.8.2 +debugpy==1.8.5 # via ipykernel decorator==5.1.1 # via ipython defusedxml==0.7.1 # via nbconvert -deltalake==0.18.1 +deltalake==0.19.2 deprecation==2.1.0 # via python-keycloak dill==0.3.8 distlib==0.3.8 # via virtualenv -dnspython==2.6.1 - # via email-validator docker==7.1.0 # via testcontainers docutils==0.19 # via sphinx -duckdb==0.10.3 +duckdb==1.1.0 # via ibis-framework -elastic-transport==8.13.1 +elastic-transport==8.15.0 # via elasticsearch -elasticsearch==8.14.0 -email-validator==2.2.0 - # via fastapi +elasticsearch==8.15.1 entrypoints==0.4 # via altair execnet==2.1.1 # via pytest-xdist -executing==2.0.1 +executing==2.1.0 # via stack-data -fastapi==0.111.0 -fastapi-cli==0.0.4 - # via fastapi +fastapi==0.114.1 fastjsonschema==2.20.0 # via nbformat -filelock==3.15.4 +filelock==3.16.0 # via # snowflake-connector-python # virtualenv @@ -176,11 +171,11 @@ frozenlist==1.4.1 # via # aiohttp # aiosignal -fsspec==2023.12.2 +fsspec==2024.9.0 # via dask geomet==0.2.1.post1 # via cassandra-driver -google-api-core[grpc]==2.19.1 +google-api-core[grpc]==2.19.2 # via # google-cloud-bigquery # google-cloud-bigquery-storage @@ -188,46 +183,48 @@ google-api-core[grpc]==2.19.1 # google-cloud-core # google-cloud-datastore # google-cloud-storage -google-auth==2.30.0 +google-auth==2.34.0 # via # google-api-core + # google-cloud-bigquery # google-cloud-bigquery-storage + # google-cloud-bigtable # google-cloud-core + # google-cloud-datastore # google-cloud-storage # kubernetes -google-cloud-bigquery[pandas]==3.13.0 -google-cloud-bigquery-storage==2.25.0 -google-cloud-bigtable==2.24.0 +google-cloud-bigquery[pandas]==3.25.0 +google-cloud-bigquery-storage==2.26.0 +google-cloud-bigtable==2.26.0 google-cloud-core==2.4.1 # via # google-cloud-bigquery # google-cloud-bigtable # google-cloud-datastore # google-cloud-storage -google-cloud-datastore==2.19.0 -google-cloud-storage==2.17.0 -google-crc32c==1.5.0 +google-cloud-datastore==2.20.1 +google-cloud-storage==2.18.2 +google-crc32c==1.6.0 # via # google-cloud-storage # google-resumable-media -google-resumable-media==2.7.1 +google-resumable-media==2.7.2 # via # google-cloud-bigquery # google-cloud-storage -googleapis-common-protos[grpc]==1.63.2 +googleapis-common-protos[grpc]==1.65.0 # via # google-api-core # grpc-google-iam-v1 # grpcio-status -great-expectations==0.18.16 -greenlet==3.0.3 +great-expectations==0.18.20 +greenlet==3.1.0 # via sqlalchemy grpc-google-iam-v1==0.13.1 # via google-cloud-bigtable -grpcio==1.64.1 +grpcio==1.66.1 # via # google-api-core - # google-cloud-bigquery # googleapis-common-protos # grpc-google-iam-v1 # grpcio-health-checking @@ -235,38 +232,36 @@ grpcio==1.64.1 # grpcio-status # grpcio-testing # grpcio-tools -grpcio-health-checking==1.62.2 -grpcio-reflection==1.62.2 -grpcio-status==1.62.2 +grpcio-health-checking==1.62.3 +grpcio-reflection==1.62.3 +grpcio-status==1.62.3 # via google-api-core -grpcio-testing==1.62.2 -grpcio-tools==1.62.2 -gunicorn==22.0.0 +grpcio-testing==1.62.3 +grpcio-tools==1.62.3 +gunicorn==23.0.0 h11==0.14.0 # via # httpcore # uvicorn happybase==1.2.0 -hazelcast-python-client==5.4.0 -hiredis==2.3.2 +hazelcast-python-client==5.5.0 +hiredis==2.4.0 httpcore==1.0.5 # via httpx httptools==0.6.1 # via uvicorn -httpx==0.27.0 +httpx==0.27.2 # via - # fastapi # jupyterlab # python-keycloak -ibis-framework[duckdb]==9.1.0 +ibis-framework[duckdb]==9.4.0 # via ibis-substrait -ibis-substrait==4.0.0 -identify==2.5.36 +ibis-substrait==4.0.1 +identify==2.6.0 # via pre-commit -idna==3.7 +idna==3.8 # via # anyio - # email-validator # httpx # jsonschema # requests @@ -274,18 +269,18 @@ idna==3.7 # yarl imagesize==1.4.1 # via sphinx -importlib-metadata==8.0.0 +importlib-metadata==8.5.0 # via dask iniconfig==2.0.0 # via pytest -ipykernel==6.29.4 +ipykernel==6.29.5 # via jupyterlab -ipython==8.25.0 +ipython==8.27.0 # via # great-expectations # ipykernel # ipywidgets -ipywidgets==8.1.3 +ipywidgets==8.1.5 # via great-expectations isodate==0.6.1 # via azure-storage-blob @@ -296,7 +291,6 @@ jedi==0.19.1 jinja2==3.1.4 # via # altair - # fastapi # great-expectations # jupyter-server # jupyterlab @@ -316,7 +310,7 @@ jsonpointer==3.0.0 # via # jsonpatch # jsonschema -jsonschema[format-nongpl]==4.22.0 +jsonschema[format-nongpl]==4.23.0 # via # altair # great-expectations @@ -343,7 +337,7 @@ jupyter-events==0.10.0 # via jupyter-server jupyter-lsp==2.2.5 # via jupyterlab -jupyter-server==2.14.1 +jupyter-server==2.14.2 # via # jupyter-lsp # jupyterlab @@ -356,18 +350,18 @@ jupyterlab==4.2.5 # via notebook jupyterlab-pygments==0.3.0 # via nbconvert -jupyterlab-server==2.27.2 +jupyterlab-server==2.27.3 # via # jupyterlab # notebook -jupyterlab-widgets==3.0.11 +jupyterlab-widgets==3.0.13 # via ipywidgets jwcrypto==1.5.6 # via python-keycloak kubernetes==20.13.0 locket==1.0.0 # via partd -makefun==1.15.2 +makefun==1.15.4 # via great-expectations markdown-it-py==3.0.0 # via rich @@ -376,7 +370,7 @@ markupsafe==2.1.5 # jinja2 # nbconvert # werkzeug -marshmallow==3.21.3 +marshmallow==3.22.0 # via great-expectations matplotlib-inline==0.1.7 # via @@ -392,17 +386,17 @@ mistune==3.0.2 mmh3==4.1.0 mock==2.0.0 moto==4.2.14 -msal==1.29.0 +msal==1.31.0 # via # azure-identity # msal-extensions msal-extensions==1.2.0 # via azure-identity -multidict==6.0.5 +multidict==6.1.0 # via # aiohttp # yarl -mypy==1.10.1 +mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -439,8 +433,6 @@ numpy==1.26.4 # scipy oauthlib==3.2.2 # via requests-oauthlib -orjson==3.10.5 - # via fastapi overrides==7.7.0 # via jupyter-server packaging==24.1 @@ -482,11 +474,11 @@ parsy==2.1 # via ibis-framework partd==1.4.2 # via dask -pbr==6.0.0 +pbr==6.1.0 # via mock pexpect==4.9.0 # via ipython -pip==24.1.1 +pip==24.2 # via pip-tools pip-tools==7.4.1 platformdirs==3.11.0 @@ -498,7 +490,7 @@ pluggy==1.5.0 # via pytest ply==3.11 # via thriftpy2 -portalocker==2.10.0 +portalocker==2.10.1 # via msal-extensions pre-commit==3.3.1 prometheus-client==0.20.0 @@ -508,14 +500,12 @@ prompt-toolkit==3.0.47 proto-plus==1.24.0 # via # google-api-core - # google-cloud-bigquery # google-cloud-bigquery-storage # google-cloud-bigtable # google-cloud-datastore -protobuf==4.25.3 +protobuf==4.25.4 # via # google-api-core - # google-cloud-bigquery # google-cloud-bigquery-storage # google-cloud-bigtable # google-cloud-datastore @@ -531,8 +521,8 @@ protobuf==4.25.3 # substrait psutil==5.9.0 # via ipykernel -psycopg[binary, pool]==3.1.19 -psycopg-binary==3.1.19 +psycopg[binary, pool]==3.2.1 +psycopg-binary==3.2.1 # via psycopg psycopg-pool==3.2.2 # via psycopg @@ -540,14 +530,14 @@ ptyprocess==0.7.0 # via # pexpect # terminado -pure-eval==0.2.2 +pure-eval==0.2.3 # via stack-data py==1.11.0 py-cpuinfo==9.0.0 # via pytest-benchmark py4j==0.10.9.7 # via pyspark -pyarrow==15.0.2 +pyarrow==17.0.0 # via # dask-expr # db-dtypes @@ -556,23 +546,21 @@ pyarrow==15.0.2 # ibis-framework # snowflake-connector-python pyarrow-hotfix==0.6 - # via - # deltalake - # ibis-framework -pyasn1==0.6.0 + # via ibis-framework +pyasn1==0.6.1 # via # pyasn1-modules # rsa -pyasn1-modules==0.4.0 +pyasn1-modules==0.4.1 # via google-auth pybindgen==0.22.1 pycparser==2.22 # via cffi -pydantic==2.7.4 +pydantic==2.9.1 # via # fastapi # great-expectations -pydantic-core==2.18.4 +pydantic-core==2.23.3 # via pydantic pygments==2.18.0 # via @@ -580,23 +568,23 @@ pygments==2.18.0 # nbconvert # rich # sphinx -pyjwt[crypto]==2.8.0 +pyjwt[crypto]==2.9.0 # via # msal # singlestoredb # snowflake-connector-python -pymssql==2.3.0 +pymssql==2.3.1 pymysql==1.1.1 pyodbc==5.1.0 -pyopenssl==24.1.0 +pyopenssl==24.2.1 # via snowflake-connector-python -pyparsing==3.1.2 +pyparsing==3.1.4 # via great-expectations pyproject-hooks==1.1.0 # via # build # pip-tools -pyspark==3.5.1 +pyspark==3.5.2 pytest==7.4.4 # via # pytest-benchmark @@ -632,16 +620,14 @@ python-dotenv==1.0.1 python-json-logger==2.0.7 # via jupyter-events python-keycloak==4.2.2 -python-multipart==0.0.9 - # via fastapi -pytz==2024.1 +pytz==2024.2 # via # great-expectations # ibis-framework # pandas # snowflake-connector-python # trino -pyyaml==6.0.1 +pyyaml==6.0.2 # via # dask # ibis-substrait @@ -650,7 +636,7 @@ pyyaml==6.0.1 # pre-commit # responses # uvicorn -pyzmq==26.0.3 +pyzmq==26.2.0 # via # ipykernel # jupyter-client @@ -661,7 +647,7 @@ referencing==0.35.1 # jsonschema # jsonschema-specifications # jupyter-events -regex==2024.5.15 +regex==2024.7.24 # via parsimonious requests==2.32.3 # via @@ -697,35 +683,33 @@ rfc3986-validator==0.1.1 # via # jsonschema # jupyter-events -rich==13.7.1 - # via - # ibis-framework - # typer -rpds-py==0.18.1 +rich==13.8.1 + # via ibis-framework +rpds-py==0.20.0 # via # jsonschema # referencing rsa==4.9 # via google-auth -ruamel-yaml==0.17.17 +ruamel-yaml==0.17.40 # via great-expectations -ruff==0.4.10 +ruamel-yaml-clib==0.2.8 + # via ruamel-yaml +ruff==0.6.4 s3transfer==0.10.2 # via boto3 -scipy==1.14.0 +scipy==1.14.1 # via great-expectations send2trash==1.8.3 # via jupyter-server -setuptools==70.1.1 +setuptools==74.1.2 # via # grpcio-tools # jupyterlab # kubernetes # pip-tools # singlestoredb -shellingham==1.5.4 - # via typer -singlestoredb==1.4.0 +singlestoredb==1.6.3 six==1.16.0 # via # asttokens @@ -745,49 +729,49 @@ sniffio==1.3.1 # httpx snowballstemmer==2.2.0 # via sphinx -snowflake-connector-python[pandas]==3.11.0 +snowflake-connector-python[pandas]==3.12.1 sortedcontainers==2.4.0 # via snowflake-connector-python -soupsieve==2.5 +soupsieve==2.6 # via beautifulsoup4 sphinx==6.2.1 -sphinxcontrib-applehelp==1.0.8 +sphinxcontrib-applehelp==2.0.0 # via sphinx -sphinxcontrib-devhelp==1.0.6 +sphinxcontrib-devhelp==2.0.0 # via sphinx -sphinxcontrib-htmlhelp==2.0.5 +sphinxcontrib-htmlhelp==2.1.0 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.7 +sphinxcontrib-qthelp==2.0.0 # via sphinx -sphinxcontrib-serializinghtml==1.1.10 +sphinxcontrib-serializinghtml==2.0.0 # via sphinx -sqlalchemy[mypy]==2.0.31 -sqlglot==25.1.0 +sqlalchemy[mypy]==2.0.34 +sqlglot==25.18.0 # via ibis-framework sqlite-vec==0.1.1 -sqlparams==6.0.1 +sqlparams==6.1.0 # via singlestoredb stack-data==0.6.3 # via ipython -starlette==0.37.2 +starlette==0.38.5 # via fastapi -substrait==0.19.0 +substrait==0.22.0 # via ibis-substrait tabulate==0.9.0 -tenacity==8.4.2 +tenacity==8.5.0 terminado==0.18.1 # via # jupyter-server # jupyter-server-terminals testcontainers==4.4.0 -thriftpy2==0.5.1 +thriftpy2==0.5.2 # via happybase tinycss2==1.3.0 # via nbconvert toml==0.10.2 -tomlkit==0.12.5 +tomlkit==0.13.2 # via snowflake-connector-python toolz==0.12.1 # via @@ -803,7 +787,7 @@ tornado==6.4.1 # jupyterlab # notebook # terminado -tqdm==4.66.4 +tqdm==4.66.5 # via great-expectations traitlets==5.14.3 # via @@ -820,24 +804,22 @@ traitlets==5.14.3 # nbclient # nbconvert # nbformat -trino==0.328.0 +trino==0.329.0 typeguard==4.3.0 -typer==0.12.3 - # via fastapi-cli types-cffi==1.16.0.20240331 # via types-pyopenssl types-protobuf==3.19.22 # via mypy-protobuf types-pymysql==1.1.0.20240524 -types-pyopenssl==24.1.0.20240425 +types-pyopenssl==24.1.0.20240722 # via types-redis -types-python-dateutil==2.9.0.20240316 +types-python-dateutil==2.9.0.20240906 # via arrow types-pytz==2024.1.0.20240417 -types-pyyaml==6.0.12.20240311 -types-redis==4.6.0.20240425 +types-pyyaml==6.0.12.20240808 +types-redis==4.6.0.20240903 types-requests==2.30.0.0 -types-setuptools==70.1.0.20240627 +types-setuptools==74.1.0.20240907 # via types-cffi types-tabulate==0.9.0.20240106 types-urllib3==1.26.25.14 @@ -861,18 +843,15 @@ typing-extensions==4.12.2 # sqlalchemy # testcontainers # typeguard - # typer tzdata==2024.1 # via pandas tzlocal==5.2 # via # great-expectations # trino -ujson==5.10.0 - # via fastapi uri-template==1.3.0 # via jsonschema -urllib3==1.26.19 +urllib3==2.2.2 # via # botocore # docker @@ -883,17 +862,16 @@ urllib3==1.26.19 # requests # responses # testcontainers -uvicorn[standard]==0.30.1 - # via fastapi -uvloop==0.19.0 +uvicorn[standard]==0.30.6 +uvloop==0.20.0 # via uvicorn virtualenv==20.23.0 # via pre-commit -watchfiles==0.22.0 +watchfiles==0.24.0 # via uvicorn wcwidth==0.2.13 # via prompt-toolkit -webcolors==24.6.0 +webcolors==24.8.0 # via jsonschema webencodings==0.5.1 # via @@ -903,15 +881,15 @@ websocket-client==1.8.0 # via # jupyter-server # kubernetes -websockets==12.0 +websockets==13.0.1 # via uvicorn -werkzeug==3.0.3 +werkzeug==3.0.4 # via moto -wheel==0.43.0 +wheel==0.44.0 # via # pip-tools # singlestoredb -widgetsnbextension==4.0.11 +widgetsnbextension==4.0.13 # via ipywidgets wrapt==1.16.0 # via @@ -919,7 +897,7 @@ wrapt==1.16.0 # testcontainers xmltodict==0.13.0 # via moto -yarl==1.9.4 +yarl==1.11.1 # via aiohttp -zipp==3.19.1 +zipp==3.20.1 # via importlib-metadata diff --git a/sdk/python/requirements/py3.11-requirements.txt b/sdk/python/requirements/py3.11-requirements.txt index 9f6dff962b..611a0cedca 100644 --- a/sdk/python/requirements/py3.11-requirements.txt +++ b/sdk/python/requirements/py3.11-requirements.txt @@ -1,23 +1,20 @@ # This file was autogenerated by uv via the following command: # uv pip compile --system --no-strip-extras setup.py --output-file sdk/python/requirements/py3.11-requirements.txt -annotated-types==0.6.0 +annotated-types==0.7.0 # via pydantic -anyio==4.3.0 +anyio==4.4.0 # via - # httpx # starlette # watchfiles -attrs==23.2.0 +attrs==24.2.0 # via # jsonschema # referencing -bigtree==0.19.2 +bigtree==0.21.1 cachetools==5.5.0 # via google-auth -certifi==2024.7.4 +certifi==2024.8.30 # via - # httpcore - # httpx # kubernetes # requests charset-normalizer==3.3.2 @@ -25,65 +22,44 @@ charset-normalizer==3.3.2 click==8.1.7 # via # dask - # typer # uvicorn cloudpickle==3.0.0 # via dask colorama==0.4.6 -dask[dataframe]==2024.5.0 +dask[dataframe]==2024.8.2 # via dask-expr -dask-expr==1.1.0 +dask-expr==1.1.13 # via dask dill==0.3.8 -dnspython==2.6.1 - # via email-validator -email-validator==2.1.1 - # via fastapi -fastapi==0.111.0 - # via fastapi-cli -fastapi-cli==0.0.2 - # via fastapi -fsspec==2024.3.1 +fastapi==0.114.1 +fsspec==2024.9.0 # via dask google-auth==2.34.0 # via kubernetes -greenlet==3.0.3 +greenlet==3.1.0 # via sqlalchemy -gunicorn==22.0.0 +gunicorn==23.0.0 h11==0.14.0 - # via - # httpcore - # uvicorn -httpcore==1.0.5 - # via httpx + # via uvicorn httptools==0.6.1 # via uvicorn -httpx==0.27.0 - # via fastapi -idna==3.7 +idna==3.8 # via # anyio - # email-validator - # httpx # requests -importlib-metadata==7.1.0 +importlib-metadata==8.5.0 # via dask jinja2==3.1.4 - # via fastapi -jsonschema==4.22.0 +jsonschema==4.23.0 jsonschema-specifications==2023.12.1 # via jsonschema kubernetes==20.13.0 locket==1.0.0 # via partd -markdown-it-py==3.0.0 - # via rich markupsafe==2.1.5 # via jinja2 -mdurl==0.1.2 - # via markdown-it-py mmh3==4.1.0 -mypy==1.10.0 +mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -95,9 +71,7 @@ numpy==1.26.4 # pyarrow oauthlib==3.2.2 # via requests-oauthlib -orjson==3.10.3 - # via fastapi -packaging==24.0 +packaging==24.1 # via # dask # gunicorn @@ -108,23 +82,22 @@ pandas==2.2.2 partd==1.4.2 # via dask prometheus-client==0.20.0 -protobuf==4.25.3 +protobuf==4.25.4 # via mypy-protobuf psutil==6.0.0 -pyarrow==16.0.0 +pyarrow==17.0.0 # via dask-expr -pyasn1==0.6.0 +pyasn1==0.6.1 # via # pyasn1-modules # rsa -pyasn1-modules==0.4.0 +pyasn1-modules==0.4.1 # via google-auth -pydantic==2.7.1 +pydantic==2.9.1 # via fastapi -pydantic-core==2.18.2 +pydantic-core==2.23.3 # via pydantic pygments==2.18.0 - # via rich pyjwt==2.9.0 python-dateutil==2.9.0.post0 # via @@ -132,11 +105,9 @@ python-dateutil==2.9.0.post0 # pandas python-dotenv==1.0.1 # via uvicorn -python-multipart==0.0.9 - # via fastapi -pytz==2024.1 +pytz==2024.2 # via pandas -pyyaml==6.0.1 +pyyaml==6.0.2 # via # dask # kubernetes @@ -145,49 +116,41 @@ referencing==0.35.1 # via # jsonschema # jsonschema-specifications -requests==2.31.0 +requests==2.32.3 # via # kubernetes # requests-oauthlib requests-oauthlib==2.0.0 # via kubernetes -rich==13.7.1 - # via typer -rpds-py==0.18.1 +rpds-py==0.20.0 # via # jsonschema # referencing rsa==4.9 # via google-auth -setuptools==73.0.1 +setuptools==74.1.2 # via kubernetes -shellingham==1.5.4 - # via typer six==1.16.0 # via # kubernetes # python-dateutil sniffio==1.3.1 - # via - # anyio - # httpx -sqlalchemy[mypy]==2.0.30 -starlette==0.37.2 + # via anyio +sqlalchemy[mypy]==2.0.34 +starlette==0.38.5 # via fastapi tabulate==0.9.0 -tenacity==8.3.0 +tenacity==8.5.0 toml==0.10.2 toolz==0.12.1 # via # dask # partd -tqdm==4.66.4 -typeguard==4.2.1 -typer==0.12.3 - # via fastapi-cli -types-protobuf==5.26.0.20240422 +tqdm==4.66.5 +typeguard==4.3.0 +types-protobuf==5.27.0.20240907 # via mypy-protobuf -typing-extensions==4.11.0 +typing-extensions==4.12.2 # via # fastapi # mypy @@ -195,26 +158,20 @@ typing-extensions==4.11.0 # pydantic-core # sqlalchemy # typeguard - # typer tzdata==2024.1 # via pandas -ujson==5.9.0 - # via fastapi -urllib3==2.2.1 +urllib3==2.2.2 # via # kubernetes # requests -uvicorn[standard]==0.29.0 - # via - # fastapi - # fastapi-cli -uvloop==0.19.0 +uvicorn[standard]==0.30.6 +uvloop==0.20.0 # via uvicorn -watchfiles==0.21.0 +watchfiles==0.24.0 # via uvicorn websocket-client==1.8.0 # via kubernetes -websockets==12.0 +websockets==13.0.1 # via uvicorn -zipp==3.19.1 +zipp==3.20.1 # via importlib-metadata diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index f32f6790d3..f4e7c795fa 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -1,9 +1,11 @@ # This file was autogenerated by uv via the following command: # uv pip compile --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.9-ci-requirements.txt -aiobotocore==2.13.1 -aiohttp==3.9.5 +aiobotocore==2.15.0 +aiohappyeyeballs==2.4.0 + # via aiohttp +aiohttp==3.10.5 # via aiobotocore -aioitertools==0.11.0 +aioitertools==0.12.0 # via aiobotocore aiosignal==1.3.1 # via aiohttp @@ -40,7 +42,7 @@ async-timeout==4.0.3 # redis atpublic==4.1.0 # via ibis-framework -attrs==23.2.0 +attrs==24.2.0 # via # aiohttp # jsonschema @@ -50,8 +52,8 @@ azure-core==1.30.2 # azure-identity # azure-storage-blob azure-identity==1.17.1 -azure-storage-blob==12.20.0 -babel==2.15.0 +azure-storage-blob==12.22.0 +babel==2.16.0 # via # jupyterlab-server # sphinx @@ -59,25 +61,25 @@ beautifulsoup4==4.12.3 # via nbconvert bidict==0.23.1 # via ibis-framework -bigtree==0.19.2 +bigtree==0.21.1 bleach==6.1.0 # via nbconvert -boto3==1.34.131 +boto3==1.35.16 # via moto -botocore==1.34.131 +botocore==1.35.16 # via # aiobotocore # boto3 # moto # s3transfer -build==1.2.1 +build==1.2.2 # via # pip-tools # singlestoredb -cachetools==5.3.3 +cachetools==5.5.0 # via google-auth -cassandra-driver==3.29.1 -certifi==2024.7.4 +cassandra-driver==3.29.2 +certifi==2024.8.30 # via # elastic-transport # httpcore @@ -86,7 +88,7 @@ certifi==2024.7.4 # minio # requests # snowflake-connector-python -cffi==1.16.0 +cffi==1.17.1 # via # argon2-cffi-bindings # cryptography @@ -103,7 +105,6 @@ click==8.1.7 # geomet # great-expectations # pip-tools - # typer # uvicorn cloudpickle==3.0.0 # via dask @@ -113,9 +114,9 @@ comm==0.2.2 # via # ipykernel # ipywidgets -coverage[toml]==7.5.4 +coverage[toml]==7.6.1 # via pytest-cov -cryptography==43.0.1 +cryptography==42.0.8 # via # azure-identity # azure-storage-blob @@ -128,54 +129,50 @@ cryptography==43.0.1 # snowflake-connector-python # types-pyopenssl # types-redis -dask[dataframe]==2024.6.2 +cython==3.0.11 + # via thriftpy2 +dask[dataframe]==2024.8.0 # via dask-expr -dask-expr==1.1.6 +dask-expr==1.1.10 # via dask -db-dtypes==1.2.0 +db-dtypes==1.3.0 # via google-cloud-bigquery -debugpy==1.8.2 +debugpy==1.8.5 # via ipykernel decorator==5.1.1 # via ipython defusedxml==0.7.1 # via nbconvert -deltalake==0.18.1 +deltalake==0.19.2 deprecation==2.1.0 # via python-keycloak dill==0.3.8 distlib==0.3.8 # via virtualenv -dnspython==2.6.1 - # via email-validator docker==7.1.0 # via testcontainers docutils==0.19 # via sphinx duckdb==0.10.3 # via ibis-framework -elastic-transport==8.13.1 +elastic-transport==8.15.0 # via elasticsearch -elasticsearch==8.14.0 -email-validator==2.2.0 - # via fastapi +elasticsearch==8.15.1 entrypoints==0.4 # via altair -exceptiongroup==1.2.1 +exceptiongroup==1.2.2 # via # anyio # ipython # pytest execnet==2.1.1 # via pytest-xdist -executing==2.0.1 +executing==2.1.0 # via stack-data -fastapi==0.111.0 -fastapi-cli==0.0.4 - # via fastapi +fastapi==0.114.1 fastjsonschema==2.20.0 # via nbformat -filelock==3.15.4 +filelock==3.16.0 # via # snowflake-connector-python # virtualenv @@ -185,11 +182,11 @@ frozenlist==1.4.1 # via # aiohttp # aiosignal -fsspec==2023.12.2 +fsspec==2024.9.0 # via dask geomet==0.2.1.post1 # via cassandra-driver -google-api-core[grpc]==2.19.1 +google-api-core[grpc]==2.19.2 # via # google-cloud-bigquery # google-cloud-bigquery-storage @@ -197,46 +194,48 @@ google-api-core[grpc]==2.19.1 # google-cloud-core # google-cloud-datastore # google-cloud-storage -google-auth==2.30.0 +google-auth==2.34.0 # via # google-api-core + # google-cloud-bigquery # google-cloud-bigquery-storage + # google-cloud-bigtable # google-cloud-core + # google-cloud-datastore # google-cloud-storage # kubernetes -google-cloud-bigquery[pandas]==3.13.0 -google-cloud-bigquery-storage==2.25.0 -google-cloud-bigtable==2.24.0 +google-cloud-bigquery[pandas]==3.25.0 +google-cloud-bigquery-storage==2.26.0 +google-cloud-bigtable==2.26.0 google-cloud-core==2.4.1 # via # google-cloud-bigquery # google-cloud-bigtable # google-cloud-datastore # google-cloud-storage -google-cloud-datastore==2.19.0 -google-cloud-storage==2.17.0 -google-crc32c==1.5.0 +google-cloud-datastore==2.20.1 +google-cloud-storage==2.18.2 +google-crc32c==1.6.0 # via # google-cloud-storage # google-resumable-media -google-resumable-media==2.7.1 +google-resumable-media==2.7.2 # via # google-cloud-bigquery # google-cloud-storage -googleapis-common-protos[grpc]==1.63.2 +googleapis-common-protos[grpc]==1.65.0 # via # google-api-core # grpc-google-iam-v1 # grpcio-status -great-expectations==0.18.16 -greenlet==3.0.3 +great-expectations==0.18.20 +greenlet==3.1.0 # via sqlalchemy grpc-google-iam-v1==0.13.1 # via google-cloud-bigtable -grpcio==1.64.1 +grpcio==1.66.1 # via # google-api-core - # google-cloud-bigquery # googleapis-common-protos # grpc-google-iam-v1 # grpcio-health-checking @@ -244,38 +243,36 @@ grpcio==1.64.1 # grpcio-status # grpcio-testing # grpcio-tools -grpcio-health-checking==1.62.2 -grpcio-reflection==1.62.2 -grpcio-status==1.62.2 +grpcio-health-checking==1.62.3 +grpcio-reflection==1.62.3 +grpcio-status==1.62.3 # via google-api-core -grpcio-testing==1.62.2 -grpcio-tools==1.62.2 -gunicorn==22.0.0 +grpcio-testing==1.62.3 +grpcio-tools==1.62.3 +gunicorn==23.0.0 h11==0.14.0 # via # httpcore # uvicorn happybase==1.2.0 -hazelcast-python-client==5.4.0 -hiredis==2.3.2 +hazelcast-python-client==5.5.0 +hiredis==2.4.0 httpcore==1.0.5 # via httpx httptools==0.6.1 # via uvicorn -httpx==0.27.0 +httpx==0.27.2 # via - # fastapi # jupyterlab # python-keycloak ibis-framework[duckdb]==9.0.0 # via ibis-substrait -ibis-substrait==4.0.0 -identify==2.5.36 +ibis-substrait==4.0.1 +identify==2.6.0 # via pre-commit -idna==3.7 +idna==3.8 # via # anyio - # email-validator # httpx # jsonschema # requests @@ -283,7 +280,7 @@ idna==3.7 # yarl imagesize==1.4.1 # via sphinx -importlib-metadata==8.0.0 +importlib-metadata==8.5.0 # via # build # dask @@ -296,14 +293,14 @@ importlib-metadata==8.0.0 # typeguard iniconfig==2.0.0 # via pytest -ipykernel==6.29.4 +ipykernel==6.29.5 # via jupyterlab ipython==8.18.1 # via # great-expectations # ipykernel # ipywidgets -ipywidgets==8.1.3 +ipywidgets==8.1.5 # via great-expectations isodate==0.6.1 # via azure-storage-blob @@ -314,7 +311,6 @@ jedi==0.19.1 jinja2==3.1.4 # via # altair - # fastapi # great-expectations # jupyter-server # jupyterlab @@ -334,7 +330,7 @@ jsonpointer==3.0.0 # via # jsonpatch # jsonschema -jsonschema[format-nongpl]==4.22.0 +jsonschema[format-nongpl]==4.23.0 # via # altair # great-expectations @@ -361,7 +357,7 @@ jupyter-events==0.10.0 # via jupyter-server jupyter-lsp==2.2.5 # via jupyterlab -jupyter-server==2.14.1 +jupyter-server==2.14.2 # via # jupyter-lsp # jupyterlab @@ -374,18 +370,18 @@ jupyterlab==4.2.5 # via notebook jupyterlab-pygments==0.3.0 # via nbconvert -jupyterlab-server==2.27.2 +jupyterlab-server==2.27.3 # via # jupyterlab # notebook -jupyterlab-widgets==3.0.11 +jupyterlab-widgets==3.0.13 # via ipywidgets jwcrypto==1.5.6 # via python-keycloak kubernetes==20.13.0 locket==1.0.0 # via partd -makefun==1.15.2 +makefun==1.15.4 # via great-expectations markdown-it-py==3.0.0 # via rich @@ -394,7 +390,7 @@ markupsafe==2.1.5 # jinja2 # nbconvert # werkzeug -marshmallow==3.21.3 +marshmallow==3.22.0 # via great-expectations matplotlib-inline==0.1.7 # via @@ -410,17 +406,17 @@ mistune==3.0.2 mmh3==4.1.0 mock==2.0.0 moto==4.2.14 -msal==1.29.0 +msal==1.31.0 # via # azure-identity # msal-extensions msal-extensions==1.2.0 # via azure-identity -multidict==6.0.5 +multidict==6.1.0 # via # aiohttp # yarl -mypy==1.10.1 +mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -457,8 +453,6 @@ numpy==1.26.4 # scipy oauthlib==3.2.2 # via requests-oauthlib -orjson==3.10.5 - # via fastapi overrides==7.7.0 # via jupyter-server packaging==24.1 @@ -500,11 +494,11 @@ parsy==2.1 # via ibis-framework partd==1.4.2 # via dask -pbr==6.0.0 +pbr==6.1.0 # via mock pexpect==4.9.0 # via ipython -pip==24.1.1 +pip==24.2 # via pip-tools pip-tools==7.4.1 platformdirs==3.11.0 @@ -516,7 +510,7 @@ pluggy==1.5.0 # via pytest ply==3.11 # via thriftpy2 -portalocker==2.10.0 +portalocker==2.10.1 # via msal-extensions pre-commit==3.3.1 prometheus-client==0.20.0 @@ -526,14 +520,12 @@ prompt-toolkit==3.0.47 proto-plus==1.24.0 # via # google-api-core - # google-cloud-bigquery # google-cloud-bigquery-storage # google-cloud-bigtable # google-cloud-datastore -protobuf==4.25.3 +protobuf==4.25.4 # via # google-api-core - # google-cloud-bigquery # google-cloud-bigquery-storage # google-cloud-bigtable # google-cloud-datastore @@ -549,8 +541,8 @@ protobuf==4.25.3 # substrait psutil==5.9.0 # via ipykernel -psycopg[binary, pool]==3.1.18 -psycopg-binary==3.1.18 +psycopg[binary, pool]==3.2.1 +psycopg-binary==3.2.1 # via psycopg psycopg-pool==3.2.2 # via psycopg @@ -558,14 +550,14 @@ ptyprocess==0.7.0 # via # pexpect # terminado -pure-eval==0.2.2 +pure-eval==0.2.3 # via stack-data py==1.11.0 py-cpuinfo==9.0.0 # via pytest-benchmark py4j==0.10.9.7 # via pyspark -pyarrow==15.0.2 +pyarrow==16.1.0 # via # dask-expr # db-dtypes @@ -574,23 +566,21 @@ pyarrow==15.0.2 # ibis-framework # snowflake-connector-python pyarrow-hotfix==0.6 - # via - # deltalake - # ibis-framework -pyasn1==0.6.0 + # via ibis-framework +pyasn1==0.6.1 # via # pyasn1-modules # rsa -pyasn1-modules==0.4.0 +pyasn1-modules==0.4.1 # via google-auth pybindgen==0.22.1 pycparser==2.22 # via cffi -pydantic==2.7.4 +pydantic==2.9.1 # via # fastapi # great-expectations -pydantic-core==2.18.4 +pydantic-core==2.23.3 # via pydantic pygments==2.18.0 # via @@ -598,23 +588,23 @@ pygments==2.18.0 # nbconvert # rich # sphinx -pyjwt[crypto]==2.8.0 +pyjwt[crypto]==2.9.0 # via # msal # singlestoredb # snowflake-connector-python -pymssql==2.3.0 +pymssql==2.3.1 pymysql==1.1.1 pyodbc==5.1.0 -pyopenssl==24.1.0 +pyopenssl==24.2.1 # via snowflake-connector-python -pyparsing==3.1.2 +pyparsing==3.1.4 # via great-expectations pyproject-hooks==1.1.0 # via # build # pip-tools -pyspark==3.5.1 +pyspark==3.5.2 pytest==7.4.4 # via # pytest-benchmark @@ -650,16 +640,14 @@ python-dotenv==1.0.1 python-json-logger==2.0.7 # via jupyter-events python-keycloak==4.2.2 -python-multipart==0.0.9 - # via fastapi -pytz==2024.1 +pytz==2024.2 # via # great-expectations # ibis-framework # pandas # snowflake-connector-python # trino -pyyaml==6.0.1 +pyyaml==6.0.2 # via # dask # ibis-substrait @@ -668,7 +656,7 @@ pyyaml==6.0.1 # pre-commit # responses # uvicorn -pyzmq==26.0.3 +pyzmq==26.2.0 # via # ipykernel # jupyter-client @@ -679,7 +667,7 @@ referencing==0.35.1 # jsonschema # jsonschema-specifications # jupyter-events -regex==2024.5.15 +regex==2024.7.24 # via parsimonious requests==2.32.3 # via @@ -715,37 +703,33 @@ rfc3986-validator==0.1.1 # via # jsonschema # jupyter-events -rich==13.7.1 - # via - # ibis-framework - # typer -rpds-py==0.18.1 +rich==13.8.1 + # via ibis-framework +rpds-py==0.20.0 # via # jsonschema # referencing rsa==4.9 # via google-auth -ruamel-yaml==0.17.17 +ruamel-yaml==0.17.40 # via great-expectations ruamel-yaml-clib==0.2.8 # via ruamel-yaml -ruff==0.4.10 +ruff==0.6.4 s3transfer==0.10.2 # via boto3 scipy==1.13.1 # via great-expectations send2trash==1.8.3 # via jupyter-server -setuptools==70.1.1 +setuptools==74.1.2 # via # grpcio-tools # jupyterlab # kubernetes # pip-tools # singlestoredb -shellingham==1.5.4 - # via typer -singlestoredb==1.4.0 +singlestoredb==1.6.3 six==1.16.0 # via # asttokens @@ -765,44 +749,44 @@ sniffio==1.3.1 # httpx snowballstemmer==2.2.0 # via sphinx -snowflake-connector-python[pandas]==3.11.0 +snowflake-connector-python[pandas]==3.12.1 sortedcontainers==2.4.0 # via snowflake-connector-python -soupsieve==2.5 +soupsieve==2.6 # via beautifulsoup4 sphinx==6.2.1 -sphinxcontrib-applehelp==1.0.8 +sphinxcontrib-applehelp==2.0.0 # via sphinx -sphinxcontrib-devhelp==1.0.6 +sphinxcontrib-devhelp==2.0.0 # via sphinx -sphinxcontrib-htmlhelp==2.0.5 +sphinxcontrib-htmlhelp==2.1.0 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.7 +sphinxcontrib-qthelp==2.0.0 # via sphinx -sphinxcontrib-serializinghtml==1.1.10 +sphinxcontrib-serializinghtml==2.0.0 # via sphinx -sqlalchemy[mypy]==2.0.31 +sqlalchemy[mypy]==2.0.34 sqlglot==23.12.2 # via ibis-framework sqlite-vec==0.1.1 -sqlparams==6.0.1 +sqlparams==6.1.0 # via singlestoredb stack-data==0.6.3 # via ipython -starlette==0.37.2 +starlette==0.38.5 # via fastapi -substrait==0.19.0 +substrait==0.22.0 # via ibis-substrait tabulate==0.9.0 -tenacity==8.4.2 +tenacity==8.5.0 terminado==0.18.1 # via # jupyter-server # jupyter-server-terminals testcontainers==4.4.0 -thriftpy2==0.5.1 +thriftpy2==0.5.2 # via happybase tinycss2==1.3.0 # via nbconvert @@ -817,7 +801,7 @@ tomli==2.0.1 # pytest # pytest-env # singlestoredb -tomlkit==0.12.5 +tomlkit==0.13.2 # via snowflake-connector-python toolz==0.12.1 # via @@ -833,7 +817,7 @@ tornado==6.4.1 # jupyterlab # notebook # terminado -tqdm==4.66.4 +tqdm==4.66.5 # via great-expectations traitlets==5.14.3 # via @@ -850,24 +834,22 @@ traitlets==5.14.3 # nbclient # nbconvert # nbformat -trino==0.328.0 +trino==0.329.0 typeguard==4.3.0 -typer==0.12.3 - # via fastapi-cli types-cffi==1.16.0.20240331 # via types-pyopenssl types-protobuf==3.19.22 # via mypy-protobuf types-pymysql==1.1.0.20240524 -types-pyopenssl==24.1.0.20240425 +types-pyopenssl==24.1.0.20240722 # via types-redis -types-python-dateutil==2.9.0.20240316 +types-python-dateutil==2.9.0.20240906 # via arrow types-pytz==2024.1.0.20240417 -types-pyyaml==6.0.12.20240311 -types-redis==4.6.0.20240425 +types-pyyaml==6.0.12.20240808 +types-redis==4.6.0.20240903 types-requests==2.30.0.0 -types-setuptools==70.1.0.20240627 +types-setuptools==74.1.0.20240907 # via types-cffi types-tabulate==0.9.0.20240106 types-urllib3==1.26.25.14 @@ -885,6 +867,7 @@ typing-extensions==4.12.2 # ibis-framework # ipython # jwcrypto + # multidict # mypy # psycopg # psycopg-pool @@ -895,7 +878,6 @@ typing-extensions==4.12.2 # starlette # testcontainers # typeguard - # typer # uvicorn tzdata==2024.1 # via pandas @@ -903,11 +885,9 @@ tzlocal==5.2 # via # great-expectations # trino -ujson==5.10.0 - # via fastapi uri-template==1.3.0 # via jsonschema -urllib3==1.26.19 +urllib3==1.26.20 # via # botocore # docker @@ -919,17 +899,16 @@ urllib3==1.26.19 # responses # snowflake-connector-python # testcontainers -uvicorn[standard]==0.30.1 - # via fastapi -uvloop==0.19.0 +uvicorn[standard]==0.30.6 +uvloop==0.20.0 # via uvicorn virtualenv==20.23.0 # via pre-commit -watchfiles==0.22.0 +watchfiles==0.24.0 # via uvicorn wcwidth==0.2.13 # via prompt-toolkit -webcolors==24.6.0 +webcolors==24.8.0 # via jsonschema webencodings==0.5.1 # via @@ -939,15 +918,15 @@ websocket-client==1.8.0 # via # jupyter-server # kubernetes -websockets==12.0 +websockets==13.0.1 # via uvicorn -werkzeug==3.0.3 +werkzeug==3.0.4 # via moto -wheel==0.43.0 +wheel==0.44.0 # via # pip-tools # singlestoredb -widgetsnbextension==4.0.11 +widgetsnbextension==4.0.13 # via ipywidgets wrapt==1.16.0 # via @@ -955,7 +934,7 @@ wrapt==1.16.0 # testcontainers xmltodict==0.13.0 # via moto -yarl==1.9.4 +yarl==1.11.1 # via aiohttp -zipp==3.19.1 +zipp==3.20.1 # via importlib-metadata diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index 960eaa6554..0ae2fcf9d6 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -1,23 +1,20 @@ # This file was autogenerated by uv via the following command: # uv pip compile --system --no-strip-extras setup.py --output-file sdk/python/requirements/py3.9-requirements.txt -annotated-types==0.6.0 +annotated-types==0.7.0 # via pydantic -anyio==4.3.0 +anyio==4.4.0 # via - # httpx # starlette # watchfiles -attrs==23.2.0 +attrs==24.2.0 # via # jsonschema # referencing -bigtree==0.19.2 +bigtree==0.21.1 cachetools==5.5.0 # via google-auth -certifi==2024.7.4 +certifi==2024.8.30 # via - # httpcore - # httpx # kubernetes # requests charset-normalizer==3.3.2 @@ -25,69 +22,48 @@ charset-normalizer==3.3.2 click==8.1.7 # via # dask - # typer # uvicorn cloudpickle==3.0.0 # via dask colorama==0.4.6 -dask[dataframe]==2024.5.0 +dask[dataframe]==2024.8.0 # via dask-expr -dask-expr==1.1.0 +dask-expr==1.1.10 # via dask dill==0.3.8 -dnspython==2.6.1 - # via email-validator -email-validator==2.1.1 - # via fastapi exceptiongroup==1.2.2 # via anyio -fastapi==0.111.0 - # via fastapi-cli -fastapi-cli==0.0.2 - # via fastapi -fsspec==2024.3.1 +fastapi==0.114.1 +fsspec==2024.9.0 # via dask google-auth==2.34.0 # via kubernetes -greenlet==3.0.3 +greenlet==3.1.0 # via sqlalchemy -gunicorn==22.0.0 +gunicorn==23.0.0 h11==0.14.0 - # via - # httpcore - # uvicorn -httpcore==1.0.5 - # via httpx + # via uvicorn httptools==0.6.1 # via uvicorn -httpx==0.27.0 - # via fastapi -idna==3.7 +idna==3.8 # via # anyio - # email-validator - # httpx # requests -importlib-metadata==8.2.0 +importlib-metadata==8.5.0 # via # dask # typeguard jinja2==3.1.4 - # via fastapi -jsonschema==4.22.0 +jsonschema==4.23.0 jsonschema-specifications==2023.12.1 # via jsonschema kubernetes==20.13.0 locket==1.0.0 # via partd -markdown-it-py==3.0.0 - # via rich markupsafe==2.1.5 # via jinja2 -mdurl==0.1.2 - # via markdown-it-py mmh3==4.1.0 -mypy==1.10.0 +mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -99,9 +75,7 @@ numpy==1.26.4 # pyarrow oauthlib==3.2.2 # via requests-oauthlib -orjson==3.10.3 - # via fastapi -packaging==24.0 +packaging==24.1 # via # dask # gunicorn @@ -112,23 +86,22 @@ pandas==2.2.2 partd==1.4.2 # via dask prometheus-client==0.20.0 -protobuf==4.25.3 +protobuf==4.25.4 # via mypy-protobuf psutil==6.0.0 -pyarrow==16.0.0 +pyarrow==17.0.0 # via dask-expr -pyasn1==0.6.0 +pyasn1==0.6.1 # via # pyasn1-modules # rsa -pyasn1-modules==0.4.0 +pyasn1-modules==0.4.1 # via google-auth -pydantic==2.7.1 +pydantic==2.9.1 # via fastapi -pydantic-core==2.18.2 +pydantic-core==2.23.3 # via pydantic pygments==2.18.0 - # via rich pyjwt==2.9.0 python-dateutil==2.9.0.post0 # via @@ -136,11 +109,9 @@ python-dateutil==2.9.0.post0 # pandas python-dotenv==1.0.1 # via uvicorn -python-multipart==0.0.9 - # via fastapi -pytz==2024.1 +pytz==2024.2 # via pandas -pyyaml==6.0.1 +pyyaml==6.0.2 # via # dask # kubernetes @@ -149,37 +120,31 @@ referencing==0.35.1 # via # jsonschema # jsonschema-specifications -requests==2.31.0 +requests==2.32.3 # via # kubernetes # requests-oauthlib requests-oauthlib==2.0.0 # via kubernetes -rich==13.7.1 - # via typer -rpds-py==0.18.1 +rpds-py==0.20.0 # via # jsonschema # referencing rsa==4.9 # via google-auth -setuptools==73.0.1 +setuptools==74.1.2 # via kubernetes -shellingham==1.5.4 - # via typer six==1.16.0 # via # kubernetes # python-dateutil sniffio==1.3.1 - # via - # anyio - # httpx -sqlalchemy[mypy]==2.0.30 -starlette==0.37.2 + # via anyio +sqlalchemy[mypy]==2.0.34 +starlette==0.38.5 # via fastapi tabulate==0.9.0 -tenacity==8.3.0 +tenacity==8.5.0 toml==0.10.2 tomli==2.0.1 # via mypy @@ -187,13 +152,11 @@ toolz==0.12.1 # via # dask # partd -tqdm==4.66.4 -typeguard==4.2.1 -typer==0.12.3 - # via fastapi-cli -types-protobuf==5.26.0.20240422 +tqdm==4.66.5 +typeguard==4.3.0 +types-protobuf==5.27.0.20240907 # via mypy-protobuf -typing-extensions==4.11.0 +typing-extensions==4.12.2 # via # anyio # fastapi @@ -203,27 +166,21 @@ typing-extensions==4.11.0 # sqlalchemy # starlette # typeguard - # typer # uvicorn tzdata==2024.1 # via pandas -ujson==5.9.0 - # via fastapi -urllib3==2.2.1 +urllib3==2.2.2 # via # kubernetes # requests -uvicorn[standard]==0.29.0 - # via - # fastapi - # fastapi-cli -uvloop==0.19.0 +uvicorn[standard]==0.30.6 +uvloop==0.20.0 # via uvicorn -watchfiles==0.21.0 +watchfiles==0.24.0 # via uvicorn websocket-client==1.8.0 # via kubernetes -websockets==12.0 +websockets==13.0.1 # via uvicorn -zipp==3.19.2 +zipp==3.20.1 # via importlib-metadata diff --git a/setup.py b/setup.py index 6da5e8226a..d8bc55e334 100644 --- a/setup.py +++ b/setup.py @@ -74,7 +74,7 @@ "google-cloud-datastore>=2.16.0,<3", "google-cloud-storage>=1.34.0,<3", "google-cloud-bigtable>=2.11.0,<3", - "fsspec<=2024.1.0", + "fsspec<=2024.9.0", ] REDIS_REQUIRED = [ @@ -82,7 +82,7 @@ "hiredis>=2.0.0,<3", ] -AWS_REQUIRED = ["boto3>=1.17.0,<2", "fsspec<=2024.1.0", "aiobotocore>2,<3"] +AWS_REQUIRED = ["boto3>=1.17.0,<2", "fsspec<=2024.9.0", "aiobotocore>2,<3"] KUBERNETES_REQUIRED = ["kubernetes<=20.13.0"] From 3073ea5911339a5744be45512a9a2ee8b250292b Mon Sep 17 00:00:00 2001 From: Abdul Hameed Date: Tue, 17 Sep 2024 00:27:15 -0400 Subject: [PATCH 60/96] fix: Removed the k8s dependency from required dependencies (#4519) * Removed the k8s dependency from required dependencies Signed-off-by: Abdul Hameed * updated requirments lock files Signed-off-by: Abdul Hameed --------- Signed-off-by: Abdul Hameed --- sdk/python/feast/permissions/server/utils.py | 5 +- .../requirements/py3.10-ci-requirements.txt | 149 +++++++++++++++-- .../requirements/py3.10-requirements.txt | 82 +++++----- .../requirements/py3.11-ci-requirements.txt | 149 +++++++++++++++-- .../requirements/py3.11-requirements.txt | 82 +++++----- .../requirements/py3.9-ci-requirements.txt | 153 +++++++++++++++--- .../requirements/py3.9-requirements.txt | 82 +++++----- setup.py | 1 - 8 files changed, 525 insertions(+), 178 deletions(-) diff --git a/sdk/python/feast/permissions/server/utils.py b/sdk/python/feast/permissions/server/utils.py index ac70f187ce..9a8b319dbc 100644 --- a/sdk/python/feast/permissions/server/utils.py +++ b/sdk/python/feast/permissions/server/utils.py @@ -11,7 +11,6 @@ AuthManager, set_auth_manager, ) -from feast.permissions.auth.kubernetes_token_parser import KubernetesTokenParser from feast.permissions.auth.oidc_token_parser import OidcTokenParser from feast.permissions.auth.token_extractor import TokenExtractor from feast.permissions.auth.token_parser import TokenParser @@ -116,6 +115,10 @@ def init_auth_manager( raise ValueError(f"Unmanaged server type {server_type}") if auth_type == AuthManagerType.KUBERNETES: + from feast.permissions.auth.kubernetes_token_parser import ( + KubernetesTokenParser, + ) + token_parser = KubernetesTokenParser() elif auth_type == AuthManagerType.OIDC: assert isinstance(auth_config, OidcAuthConfig) diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index b8798d96c6..55df7ccb68 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -1,6 +1,7 @@ # This file was autogenerated by uv via the following command: # uv pip compile --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.10-ci-requirements.txt aiobotocore==2.15.0 + # via feast (setup.py) aiohappyeyeballs==2.4.0 # via aiohttp aiohttp==3.10.5 @@ -21,6 +22,8 @@ anyio==4.4.0 # jupyter-server # starlette # watchfiles +appnope==0.1.4 + # via ipykernel argon2-cffi==23.1.0 # via jupyter-server argon2-cffi-bindings==21.2.0 @@ -30,6 +33,7 @@ arrow==1.3.0 asn1crypto==1.5.1 # via snowflake-connector-python assertpy==1.1 + # via feast (setup.py) asttokens==2.4.1 # via stack-data async-lru==2.0.4 @@ -52,7 +56,9 @@ azure-core==1.30.2 # azure-identity # azure-storage-blob azure-identity==1.17.1 + # via feast (setup.py) azure-storage-blob==12.22.0 + # via feast (setup.py) babel==2.16.0 # via # jupyterlab-server @@ -60,10 +66,13 @@ babel==2.16.0 beautifulsoup4==4.12.3 # via nbconvert bigtree==0.21.1 + # via feast (setup.py) bleach==6.1.0 # via nbconvert boto3==1.35.16 - # via moto + # via + # feast (setup.py) + # moto botocore==1.35.16 # via # aiobotocore @@ -72,11 +81,13 @@ botocore==1.35.16 # s3transfer build==1.2.2 # via + # feast (setup.py) # pip-tools # singlestoredb cachetools==5.5.0 # via google-auth cassandra-driver==3.29.2 + # via feast (setup.py) certifi==2024.8.30 # via # elastic-transport @@ -99,6 +110,7 @@ charset-normalizer==3.3.2 # snowflake-connector-python click==8.1.7 # via + # feast (setup.py) # dask # geomet # great-expectations @@ -107,7 +119,9 @@ click==8.1.7 cloudpickle==3.0.0 # via dask colorama==0.4.6 - # via great-expectations + # via + # feast (setup.py) + # great-expectations comm==0.2.2 # via # ipykernel @@ -116,6 +130,7 @@ coverage[toml]==7.6.1 # via pytest-cov cryptography==42.0.8 # via + # feast (setup.py) # azure-identity # azure-storage-blob # great-expectations @@ -130,7 +145,9 @@ cryptography==42.0.8 cython==3.0.11 # via thriftpy2 dask[dataframe]==2024.8.2 - # via dask-expr + # via + # feast (setup.py) + # dask-expr dask-expr==1.1.13 # via dask db-dtypes==1.3.0 @@ -142,9 +159,11 @@ decorator==5.1.1 defusedxml==0.7.1 # via nbconvert deltalake==0.19.2 + # via feast (setup.py) deprecation==2.1.0 # via python-keycloak dill==0.3.8 + # via feast (setup.py) distlib==0.3.8 # via virtualenv docker==7.1.0 @@ -156,6 +175,7 @@ duckdb==1.1.0 elastic-transport==8.15.0 # via elasticsearch elasticsearch==8.15.1 + # via feast (setup.py) entrypoints==0.4 # via altair exceptiongroup==1.2.2 @@ -168,6 +188,7 @@ execnet==2.1.1 executing==2.1.0 # via stack-data fastapi==0.114.1 + # via feast (setup.py) fastjsonschema==2.20.0 # via nbformat filelock==3.16.0 @@ -181,11 +202,14 @@ frozenlist==1.4.1 # aiohttp # aiosignal fsspec==2024.9.0 - # via dask + # via + # feast (setup.py) + # dask geomet==0.2.1.post1 # via cassandra-driver google-api-core[grpc]==2.19.2 # via + # feast (setup.py) # google-cloud-bigquery # google-cloud-bigquery-storage # google-cloud-bigtable @@ -203,8 +227,11 @@ google-auth==2.34.0 # google-cloud-storage # kubernetes google-cloud-bigquery[pandas]==3.25.0 + # via feast (setup.py) google-cloud-bigquery-storage==2.26.0 + # via feast (setup.py) google-cloud-bigtable==2.26.0 + # via feast (setup.py) google-cloud-core==2.4.1 # via # google-cloud-bigquery @@ -212,7 +239,9 @@ google-cloud-core==2.4.1 # google-cloud-datastore # google-cloud-storage google-cloud-datastore==2.20.1 + # via feast (setup.py) google-cloud-storage==2.18.2 + # via feast (setup.py) google-crc32c==1.6.0 # via # google-cloud-storage @@ -223,16 +252,17 @@ google-resumable-media==2.7.2 # google-cloud-storage googleapis-common-protos[grpc]==1.65.0 # via + # feast (setup.py) # google-api-core # grpc-google-iam-v1 # grpcio-status great-expectations==0.18.20 -greenlet==3.1.0 - # via sqlalchemy + # via feast (setup.py) grpc-google-iam-v1==0.13.1 # via google-cloud-bigtable grpcio==1.66.1 # via + # feast (setup.py) # google-api-core # googleapis-common-protos # grpc-google-iam-v1 @@ -242,30 +272,42 @@ grpcio==1.66.1 # grpcio-testing # grpcio-tools grpcio-health-checking==1.62.3 + # via feast (setup.py) grpcio-reflection==1.62.3 + # via feast (setup.py) grpcio-status==1.62.3 # via google-api-core grpcio-testing==1.62.3 + # via feast (setup.py) grpcio-tools==1.62.3 + # via feast (setup.py) gunicorn==23.0.0 + # via feast (setup.py) h11==0.14.0 # via # httpcore # uvicorn happybase==1.2.0 + # via feast (setup.py) hazelcast-python-client==5.5.0 + # via feast (setup.py) hiredis==2.4.0 + # via feast (setup.py) httpcore==1.0.5 # via httpx httptools==0.6.1 # via uvicorn httpx==0.27.2 # via + # feast (setup.py) # jupyterlab # python-keycloak ibis-framework[duckdb]==9.4.0 - # via ibis-substrait + # via + # feast (setup.py) + # ibis-substrait ibis-substrait==4.0.1 + # via feast (setup.py) identify==2.6.0 # via pre-commit idna==3.8 @@ -299,6 +341,7 @@ jedi==0.19.1 # via ipython jinja2==3.1.4 # via + # feast (setup.py) # altair # great-expectations # jupyter-server @@ -321,6 +364,7 @@ jsonpointer==3.0.0 # jsonschema jsonschema[format-nongpl]==4.23.0 # via + # feast (setup.py) # altair # great-expectations # jupyter-events @@ -368,6 +412,7 @@ jupyterlab-widgets==3.0.13 jwcrypto==1.5.6 # via python-keycloak kubernetes==20.13.0 + # via feast (setup.py) locket==1.0.0 # via partd makefun==1.15.4 @@ -388,13 +433,17 @@ matplotlib-inline==0.1.7 mdurl==0.1.2 # via markdown-it-py minio==7.1.0 + # via feast (setup.py) mistune==3.0.2 # via # great-expectations # nbconvert mmh3==4.1.0 + # via feast (setup.py) mock==2.0.0 + # via feast (setup.py) moto==4.2.14 + # via feast (setup.py) msal==1.31.0 # via # azure-identity @@ -406,10 +455,13 @@ multidict==6.1.0 # aiohttp # yarl mypy==1.11.2 - # via sqlalchemy + # via + # feast (setup.py) + # sqlalchemy mypy-extensions==1.0.0 # via mypy mypy-protobuf==3.3.0 + # via feast (setup.py) nbclient==0.10.0 # via nbconvert nbconvert==7.16.4 @@ -432,6 +484,7 @@ notebook-shim==0.2.4 # notebook numpy==1.26.4 # via + # feast (setup.py) # altair # dask # db-dtypes @@ -465,6 +518,7 @@ packaging==24.1 # sphinx pandas==2.2.2 # via + # feast (setup.py) # altair # dask # dask-expr @@ -490,6 +544,7 @@ pexpect==4.9.0 pip==24.2 # via pip-tools pip-tools==7.4.1 + # via feast (setup.py) platformdirs==3.11.0 # via # jupyter-core @@ -502,8 +557,11 @@ ply==3.11 portalocker==2.10.1 # via msal-extensions pre-commit==3.3.1 + # via feast (setup.py) prometheus-client==0.20.0 - # via jupyter-server + # via + # feast (setup.py) + # jupyter-server prompt-toolkit==3.0.47 # via ipython proto-plus==1.24.0 @@ -514,6 +572,7 @@ proto-plus==1.24.0 # google-cloud-datastore protobuf==4.25.4 # via + # feast (setup.py) # google-api-core # google-cloud-bigquery-storage # google-cloud-bigtable @@ -529,8 +588,11 @@ protobuf==4.25.4 # proto-plus # substrait psutil==5.9.0 - # via ipykernel + # via + # feast (setup.py) + # ipykernel psycopg[binary, pool]==3.2.1 + # via feast (setup.py) psycopg-binary==3.2.1 # via psycopg psycopg-pool==3.2.2 @@ -542,12 +604,14 @@ ptyprocess==0.7.0 pure-eval==0.2.3 # via stack-data py==1.11.0 + # via feast (setup.py) py-cpuinfo==9.0.0 # via pytest-benchmark py4j==0.10.9.7 # via pyspark pyarrow==17.0.0 # via + # feast (setup.py) # dask-expr # db-dtypes # deltalake @@ -563,28 +627,35 @@ pyasn1==0.6.1 pyasn1-modules==0.4.1 # via google-auth pybindgen==0.22.1 + # via feast (setup.py) pycparser==2.22 # via cffi pydantic==2.9.1 # via + # feast (setup.py) # fastapi # great-expectations pydantic-core==2.23.3 # via pydantic pygments==2.18.0 # via + # feast (setup.py) # ipython # nbconvert # rich # sphinx pyjwt[crypto]==2.9.0 # via + # feast (setup.py) # msal # singlestoredb # snowflake-connector-python pymssql==2.3.1 + # via feast (setup.py) pymysql==1.1.1 + # via feast (setup.py) pyodbc==5.1.0 + # via feast (setup.py) pyopenssl==24.2.1 # via snowflake-connector-python pyparsing==3.1.4 @@ -594,8 +665,10 @@ pyproject-hooks==1.1.0 # build # pip-tools pyspark==3.5.2 + # via feast (setup.py) pytest==7.4.4 # via + # feast (setup.py) # pytest-benchmark # pytest-cov # pytest-env @@ -605,13 +678,21 @@ pytest==7.4.4 # pytest-timeout # pytest-xdist pytest-benchmark==3.4.1 + # via feast (setup.py) pytest-cov==5.0.0 + # via feast (setup.py) pytest-env==1.1.3 + # via feast (setup.py) pytest-lazy-fixture==0.6.3 + # via feast (setup.py) pytest-mock==1.10.4 + # via feast (setup.py) pytest-ordering==0.6 + # via feast (setup.py) pytest-timeout==1.4.2 + # via feast (setup.py) pytest-xdist==3.6.1 + # via feast (setup.py) python-dateutil==2.9.0.post0 # via # arrow @@ -629,6 +710,7 @@ python-dotenv==1.0.1 python-json-logger==2.0.7 # via jupyter-events python-keycloak==4.2.2 + # via feast (setup.py) pytz==2024.2 # via # great-expectations @@ -638,6 +720,7 @@ pytz==2024.2 # trino pyyaml==6.0.2 # via + # feast (setup.py) # dask # ibis-substrait # jupyter-events @@ -651,15 +734,19 @@ pyzmq==26.2.0 # jupyter-client # jupyter-server redis==4.6.0 + # via feast (setup.py) referencing==0.35.1 # via # jsonschema # jsonschema-specifications # jupyter-events regex==2024.7.24 - # via parsimonious + # via + # feast (setup.py) + # parsimonious requests==2.32.3 # via + # feast (setup.py) # azure-core # docker # google-api-core @@ -705,6 +792,7 @@ ruamel-yaml==0.17.40 ruamel-yaml-clib==0.2.8 # via ruamel-yaml ruff==0.6.4 + # via feast (setup.py) s3transfer==0.10.2 # via boto3 scipy==1.14.1 @@ -719,6 +807,7 @@ setuptools==74.1.2 # pip-tools # singlestoredb singlestoredb==1.6.3 + # via feast (setup.py) six==1.16.0 # via # asttokens @@ -739,11 +828,13 @@ sniffio==1.3.1 snowballstemmer==2.2.0 # via sphinx snowflake-connector-python[pandas]==3.12.1 + # via feast (setup.py) sortedcontainers==2.4.0 # via snowflake-connector-python soupsieve==2.6 # via beautifulsoup4 sphinx==6.2.1 + # via feast (setup.py) sphinxcontrib-applehelp==2.0.0 # via sphinx sphinxcontrib-devhelp==2.0.0 @@ -757,9 +848,11 @@ sphinxcontrib-qthelp==2.0.0 sphinxcontrib-serializinghtml==2.0.0 # via sphinx sqlalchemy[mypy]==2.0.34 + # via feast (setup.py) sqlglot==25.18.0 # via ibis-framework sqlite-vec==0.1.1 + # via feast (setup.py) sqlparams==6.1.0 # via singlestoredb stack-data==0.6.3 @@ -769,17 +862,21 @@ starlette==0.38.5 substrait==0.22.0 # via ibis-substrait tabulate==0.9.0 + # via feast (setup.py) tenacity==8.5.0 + # via feast (setup.py) terminado==0.18.1 # via # jupyter-server # jupyter-server-terminals testcontainers==4.4.0 + # via feast (setup.py) thriftpy2==0.5.2 # via happybase tinycss2==1.3.0 # via nbconvert toml==0.10.2 + # via feast (setup.py) tomli==2.0.1 # via # build @@ -807,7 +904,9 @@ tornado==6.4.1 # notebook # terminado tqdm==4.66.5 - # via great-expectations + # via + # feast (setup.py) + # great-expectations traitlets==5.14.3 # via # comm @@ -824,23 +923,37 @@ traitlets==5.14.3 # nbconvert # nbformat trino==0.329.0 + # via feast (setup.py) typeguard==4.3.0 + # via feast (setup.py) types-cffi==1.16.0.20240331 # via types-pyopenssl types-protobuf==3.19.22 - # via mypy-protobuf + # via + # feast (setup.py) + # mypy-protobuf types-pymysql==1.1.0.20240524 + # via feast (setup.py) types-pyopenssl==24.1.0.20240722 # via types-redis types-python-dateutil==2.9.0.20240906 - # via arrow + # via + # feast (setup.py) + # arrow types-pytz==2024.1.0.20240417 + # via feast (setup.py) types-pyyaml==6.0.12.20240808 + # via feast (setup.py) types-redis==4.6.0.20240903 + # via feast (setup.py) types-requests==2.30.0.0 + # via feast (setup.py) types-setuptools==74.1.0.20240907 - # via types-cffi + # via + # feast (setup.py) + # types-cffi types-tabulate==0.9.0.20240106 + # via feast (setup.py) types-urllib3==1.26.25.14 # via types-requests typing-extensions==4.12.2 @@ -876,6 +989,7 @@ uri-template==1.3.0 # via jsonschema urllib3==2.2.2 # via + # feast (setup.py) # botocore # docker # elastic-transport @@ -886,10 +1000,13 @@ urllib3==2.2.2 # responses # testcontainers uvicorn[standard]==0.30.6 + # via feast (setup.py) uvloop==0.20.0 # via uvicorn virtualenv==20.23.0 - # via pre-commit + # via + # feast (setup.py) + # pre-commit watchfiles==0.24.0 # via uvicorn wcwidth==0.2.13 diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index a3006e4555..3420c8a0e3 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -11,36 +11,36 @@ attrs==24.2.0 # jsonschema # referencing bigtree==0.21.1 -cachetools==5.5.0 - # via google-auth + # via feast (setup.py) certifi==2024.8.30 - # via - # kubernetes - # requests + # via requests charset-normalizer==3.3.2 # via requests click==8.1.7 # via + # feast (setup.py) # dask # uvicorn cloudpickle==3.0.0 # via dask colorama==0.4.6 + # via feast (setup.py) dask[dataframe]==2024.8.2 - # via dask-expr + # via + # feast (setup.py) + # dask-expr dask-expr==1.1.13 # via dask dill==0.3.8 + # via feast (setup.py) exceptiongroup==1.2.2 # via anyio fastapi==0.114.1 + # via feast (setup.py) fsspec==2024.9.0 # via dask -google-auth==2.34.0 - # via kubernetes -greenlet==3.1.0 - # via sqlalchemy gunicorn==23.0.0 + # via feast (setup.py) h11==0.14.0 # via uvicorn httptools==0.6.1 @@ -52,98 +52,97 @@ idna==3.8 importlib-metadata==8.5.0 # via dask jinja2==3.1.4 + # via feast (setup.py) jsonschema==4.23.0 + # via feast (setup.py) jsonschema-specifications==2023.12.1 # via jsonschema -kubernetes==20.13.0 locket==1.0.0 # via partd markupsafe==2.1.5 # via jinja2 mmh3==4.1.0 + # via feast (setup.py) mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy mypy-protobuf==3.6.0 + # via feast (setup.py) numpy==1.26.4 # via + # feast (setup.py) # dask # pandas # pyarrow -oauthlib==3.2.2 - # via requests-oauthlib packaging==24.1 # via # dask # gunicorn pandas==2.2.2 # via + # feast (setup.py) # dask # dask-expr partd==1.4.2 # via dask prometheus-client==0.20.0 + # via feast (setup.py) protobuf==4.25.4 - # via mypy-protobuf + # via + # feast (setup.py) + # mypy-protobuf psutil==6.0.0 + # via feast (setup.py) pyarrow==17.0.0 - # via dask-expr -pyasn1==0.6.1 # via - # pyasn1-modules - # rsa -pyasn1-modules==0.4.1 - # via google-auth + # feast (setup.py) + # dask-expr pydantic==2.9.1 - # via fastapi + # via + # feast (setup.py) + # fastapi pydantic-core==2.23.3 # via pydantic pygments==2.18.0 + # via feast (setup.py) pyjwt==2.9.0 + # via feast (setup.py) python-dateutil==2.9.0.post0 - # via - # kubernetes - # pandas + # via pandas python-dotenv==1.0.1 # via uvicorn pytz==2024.2 # via pandas pyyaml==6.0.2 # via + # feast (setup.py) # dask - # kubernetes # uvicorn referencing==0.35.1 # via # jsonschema # jsonschema-specifications requests==2.32.3 - # via - # kubernetes - # requests-oauthlib -requests-oauthlib==2.0.0 - # via kubernetes + # via feast (setup.py) rpds-py==0.20.0 # via # jsonschema # referencing -rsa==4.9 - # via google-auth -setuptools==74.1.2 - # via kubernetes six==1.16.0 - # via - # kubernetes - # python-dateutil + # via python-dateutil sniffio==1.3.1 # via anyio sqlalchemy[mypy]==2.0.34 + # via feast (setup.py) starlette==0.38.5 # via fastapi tabulate==0.9.0 + # via feast (setup.py) tenacity==8.5.0 + # via feast (setup.py) toml==0.10.2 + # via feast (setup.py) tomli==2.0.1 # via mypy toolz==0.12.1 @@ -151,7 +150,9 @@ toolz==0.12.1 # dask # partd tqdm==4.66.5 + # via feast (setup.py) typeguard==4.3.0 + # via feast (setup.py) types-protobuf==5.27.0.20240907 # via mypy-protobuf typing-extensions==4.12.2 @@ -167,16 +168,13 @@ typing-extensions==4.12.2 tzdata==2024.1 # via pandas urllib3==2.2.2 - # via - # kubernetes - # requests + # via requests uvicorn[standard]==0.30.6 + # via feast (setup.py) uvloop==0.20.0 # via uvicorn watchfiles==0.24.0 # via uvicorn -websocket-client==1.8.0 - # via kubernetes websockets==13.0.1 # via uvicorn zipp==3.20.1 diff --git a/sdk/python/requirements/py3.11-ci-requirements.txt b/sdk/python/requirements/py3.11-ci-requirements.txt index cd78247a23..6c5fb02e06 100644 --- a/sdk/python/requirements/py3.11-ci-requirements.txt +++ b/sdk/python/requirements/py3.11-ci-requirements.txt @@ -1,6 +1,7 @@ # This file was autogenerated by uv via the following command: # uv pip compile --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.11-ci-requirements.txt aiobotocore==2.15.0 + # via feast (setup.py) aiohappyeyeballs==2.4.0 # via aiohttp aiohttp==3.10.5 @@ -21,6 +22,8 @@ anyio==4.4.0 # jupyter-server # starlette # watchfiles +appnope==0.1.4 + # via ipykernel argon2-cffi==23.1.0 # via jupyter-server argon2-cffi-bindings==21.2.0 @@ -30,6 +33,7 @@ arrow==1.3.0 asn1crypto==1.5.1 # via snowflake-connector-python assertpy==1.1 + # via feast (setup.py) asttokens==2.4.1 # via stack-data async-lru==2.0.4 @@ -48,7 +52,9 @@ azure-core==1.30.2 # azure-identity # azure-storage-blob azure-identity==1.17.1 + # via feast (setup.py) azure-storage-blob==12.22.0 + # via feast (setup.py) babel==2.16.0 # via # jupyterlab-server @@ -56,10 +62,13 @@ babel==2.16.0 beautifulsoup4==4.12.3 # via nbconvert bigtree==0.21.1 + # via feast (setup.py) bleach==6.1.0 # via nbconvert boto3==1.35.16 - # via moto + # via + # feast (setup.py) + # moto botocore==1.35.16 # via # aiobotocore @@ -68,11 +77,13 @@ botocore==1.35.16 # s3transfer build==1.2.2 # via + # feast (setup.py) # pip-tools # singlestoredb cachetools==5.5.0 # via google-auth cassandra-driver==3.29.2 + # via feast (setup.py) certifi==2024.8.30 # via # elastic-transport @@ -95,6 +106,7 @@ charset-normalizer==3.3.2 # snowflake-connector-python click==8.1.7 # via + # feast (setup.py) # dask # geomet # great-expectations @@ -103,7 +115,9 @@ click==8.1.7 cloudpickle==3.0.0 # via dask colorama==0.4.6 - # via great-expectations + # via + # feast (setup.py) + # great-expectations comm==0.2.2 # via # ipykernel @@ -112,6 +126,7 @@ coverage[toml]==7.6.1 # via pytest-cov cryptography==42.0.8 # via + # feast (setup.py) # azure-identity # azure-storage-blob # great-expectations @@ -126,7 +141,9 @@ cryptography==42.0.8 cython==3.0.11 # via thriftpy2 dask[dataframe]==2024.8.2 - # via dask-expr + # via + # feast (setup.py) + # dask-expr dask-expr==1.1.13 # via dask db-dtypes==1.3.0 @@ -138,9 +155,11 @@ decorator==5.1.1 defusedxml==0.7.1 # via nbconvert deltalake==0.19.2 + # via feast (setup.py) deprecation==2.1.0 # via python-keycloak dill==0.3.8 + # via feast (setup.py) distlib==0.3.8 # via virtualenv docker==7.1.0 @@ -152,6 +171,7 @@ duckdb==1.1.0 elastic-transport==8.15.0 # via elasticsearch elasticsearch==8.15.1 + # via feast (setup.py) entrypoints==0.4 # via altair execnet==2.1.1 @@ -159,6 +179,7 @@ execnet==2.1.1 executing==2.1.0 # via stack-data fastapi==0.114.1 + # via feast (setup.py) fastjsonschema==2.20.0 # via nbformat filelock==3.16.0 @@ -172,11 +193,14 @@ frozenlist==1.4.1 # aiohttp # aiosignal fsspec==2024.9.0 - # via dask + # via + # feast (setup.py) + # dask geomet==0.2.1.post1 # via cassandra-driver google-api-core[grpc]==2.19.2 # via + # feast (setup.py) # google-cloud-bigquery # google-cloud-bigquery-storage # google-cloud-bigtable @@ -194,8 +218,11 @@ google-auth==2.34.0 # google-cloud-storage # kubernetes google-cloud-bigquery[pandas]==3.25.0 + # via feast (setup.py) google-cloud-bigquery-storage==2.26.0 + # via feast (setup.py) google-cloud-bigtable==2.26.0 + # via feast (setup.py) google-cloud-core==2.4.1 # via # google-cloud-bigquery @@ -203,7 +230,9 @@ google-cloud-core==2.4.1 # google-cloud-datastore # google-cloud-storage google-cloud-datastore==2.20.1 + # via feast (setup.py) google-cloud-storage==2.18.2 + # via feast (setup.py) google-crc32c==1.6.0 # via # google-cloud-storage @@ -214,16 +243,17 @@ google-resumable-media==2.7.2 # google-cloud-storage googleapis-common-protos[grpc]==1.65.0 # via + # feast (setup.py) # google-api-core # grpc-google-iam-v1 # grpcio-status great-expectations==0.18.20 -greenlet==3.1.0 - # via sqlalchemy + # via feast (setup.py) grpc-google-iam-v1==0.13.1 # via google-cloud-bigtable grpcio==1.66.1 # via + # feast (setup.py) # google-api-core # googleapis-common-protos # grpc-google-iam-v1 @@ -233,30 +263,42 @@ grpcio==1.66.1 # grpcio-testing # grpcio-tools grpcio-health-checking==1.62.3 + # via feast (setup.py) grpcio-reflection==1.62.3 + # via feast (setup.py) grpcio-status==1.62.3 # via google-api-core grpcio-testing==1.62.3 + # via feast (setup.py) grpcio-tools==1.62.3 + # via feast (setup.py) gunicorn==23.0.0 + # via feast (setup.py) h11==0.14.0 # via # httpcore # uvicorn happybase==1.2.0 + # via feast (setup.py) hazelcast-python-client==5.5.0 + # via feast (setup.py) hiredis==2.4.0 + # via feast (setup.py) httpcore==1.0.5 # via httpx httptools==0.6.1 # via uvicorn httpx==0.27.2 # via + # feast (setup.py) # jupyterlab # python-keycloak ibis-framework[duckdb]==9.4.0 - # via ibis-substrait + # via + # feast (setup.py) + # ibis-substrait ibis-substrait==4.0.1 + # via feast (setup.py) identify==2.6.0 # via pre-commit idna==3.8 @@ -290,6 +332,7 @@ jedi==0.19.1 # via ipython jinja2==3.1.4 # via + # feast (setup.py) # altair # great-expectations # jupyter-server @@ -312,6 +355,7 @@ jsonpointer==3.0.0 # jsonschema jsonschema[format-nongpl]==4.23.0 # via + # feast (setup.py) # altair # great-expectations # jupyter-events @@ -359,6 +403,7 @@ jupyterlab-widgets==3.0.13 jwcrypto==1.5.6 # via python-keycloak kubernetes==20.13.0 + # via feast (setup.py) locket==1.0.0 # via partd makefun==1.15.4 @@ -379,13 +424,17 @@ matplotlib-inline==0.1.7 mdurl==0.1.2 # via markdown-it-py minio==7.1.0 + # via feast (setup.py) mistune==3.0.2 # via # great-expectations # nbconvert mmh3==4.1.0 + # via feast (setup.py) mock==2.0.0 + # via feast (setup.py) moto==4.2.14 + # via feast (setup.py) msal==1.31.0 # via # azure-identity @@ -397,10 +446,13 @@ multidict==6.1.0 # aiohttp # yarl mypy==1.11.2 - # via sqlalchemy + # via + # feast (setup.py) + # sqlalchemy mypy-extensions==1.0.0 # via mypy mypy-protobuf==3.3.0 + # via feast (setup.py) nbclient==0.10.0 # via nbconvert nbconvert==7.16.4 @@ -423,6 +475,7 @@ notebook-shim==0.2.4 # notebook numpy==1.26.4 # via + # feast (setup.py) # altair # dask # db-dtypes @@ -456,6 +509,7 @@ packaging==24.1 # sphinx pandas==2.2.2 # via + # feast (setup.py) # altair # dask # dask-expr @@ -481,6 +535,7 @@ pexpect==4.9.0 pip==24.2 # via pip-tools pip-tools==7.4.1 + # via feast (setup.py) platformdirs==3.11.0 # via # jupyter-core @@ -493,8 +548,11 @@ ply==3.11 portalocker==2.10.1 # via msal-extensions pre-commit==3.3.1 + # via feast (setup.py) prometheus-client==0.20.0 - # via jupyter-server + # via + # feast (setup.py) + # jupyter-server prompt-toolkit==3.0.47 # via ipython proto-plus==1.24.0 @@ -505,6 +563,7 @@ proto-plus==1.24.0 # google-cloud-datastore protobuf==4.25.4 # via + # feast (setup.py) # google-api-core # google-cloud-bigquery-storage # google-cloud-bigtable @@ -520,8 +579,11 @@ protobuf==4.25.4 # proto-plus # substrait psutil==5.9.0 - # via ipykernel + # via + # feast (setup.py) + # ipykernel psycopg[binary, pool]==3.2.1 + # via feast (setup.py) psycopg-binary==3.2.1 # via psycopg psycopg-pool==3.2.2 @@ -533,12 +595,14 @@ ptyprocess==0.7.0 pure-eval==0.2.3 # via stack-data py==1.11.0 + # via feast (setup.py) py-cpuinfo==9.0.0 # via pytest-benchmark py4j==0.10.9.7 # via pyspark pyarrow==17.0.0 # via + # feast (setup.py) # dask-expr # db-dtypes # deltalake @@ -554,28 +618,35 @@ pyasn1==0.6.1 pyasn1-modules==0.4.1 # via google-auth pybindgen==0.22.1 + # via feast (setup.py) pycparser==2.22 # via cffi pydantic==2.9.1 # via + # feast (setup.py) # fastapi # great-expectations pydantic-core==2.23.3 # via pydantic pygments==2.18.0 # via + # feast (setup.py) # ipython # nbconvert # rich # sphinx pyjwt[crypto]==2.9.0 # via + # feast (setup.py) # msal # singlestoredb # snowflake-connector-python pymssql==2.3.1 + # via feast (setup.py) pymysql==1.1.1 + # via feast (setup.py) pyodbc==5.1.0 + # via feast (setup.py) pyopenssl==24.2.1 # via snowflake-connector-python pyparsing==3.1.4 @@ -585,8 +656,10 @@ pyproject-hooks==1.1.0 # build # pip-tools pyspark==3.5.2 + # via feast (setup.py) pytest==7.4.4 # via + # feast (setup.py) # pytest-benchmark # pytest-cov # pytest-env @@ -596,13 +669,21 @@ pytest==7.4.4 # pytest-timeout # pytest-xdist pytest-benchmark==3.4.1 + # via feast (setup.py) pytest-cov==5.0.0 + # via feast (setup.py) pytest-env==1.1.3 + # via feast (setup.py) pytest-lazy-fixture==0.6.3 + # via feast (setup.py) pytest-mock==1.10.4 + # via feast (setup.py) pytest-ordering==0.6 + # via feast (setup.py) pytest-timeout==1.4.2 + # via feast (setup.py) pytest-xdist==3.6.1 + # via feast (setup.py) python-dateutil==2.9.0.post0 # via # arrow @@ -620,6 +701,7 @@ python-dotenv==1.0.1 python-json-logger==2.0.7 # via jupyter-events python-keycloak==4.2.2 + # via feast (setup.py) pytz==2024.2 # via # great-expectations @@ -629,6 +711,7 @@ pytz==2024.2 # trino pyyaml==6.0.2 # via + # feast (setup.py) # dask # ibis-substrait # jupyter-events @@ -642,15 +725,19 @@ pyzmq==26.2.0 # jupyter-client # jupyter-server redis==4.6.0 + # via feast (setup.py) referencing==0.35.1 # via # jsonschema # jsonschema-specifications # jupyter-events regex==2024.7.24 - # via parsimonious + # via + # feast (setup.py) + # parsimonious requests==2.32.3 # via + # feast (setup.py) # azure-core # docker # google-api-core @@ -696,6 +783,7 @@ ruamel-yaml==0.17.40 ruamel-yaml-clib==0.2.8 # via ruamel-yaml ruff==0.6.4 + # via feast (setup.py) s3transfer==0.10.2 # via boto3 scipy==1.14.1 @@ -710,6 +798,7 @@ setuptools==74.1.2 # pip-tools # singlestoredb singlestoredb==1.6.3 + # via feast (setup.py) six==1.16.0 # via # asttokens @@ -730,11 +819,13 @@ sniffio==1.3.1 snowballstemmer==2.2.0 # via sphinx snowflake-connector-python[pandas]==3.12.1 + # via feast (setup.py) sortedcontainers==2.4.0 # via snowflake-connector-python soupsieve==2.6 # via beautifulsoup4 sphinx==6.2.1 + # via feast (setup.py) sphinxcontrib-applehelp==2.0.0 # via sphinx sphinxcontrib-devhelp==2.0.0 @@ -748,9 +839,11 @@ sphinxcontrib-qthelp==2.0.0 sphinxcontrib-serializinghtml==2.0.0 # via sphinx sqlalchemy[mypy]==2.0.34 + # via feast (setup.py) sqlglot==25.18.0 # via ibis-framework sqlite-vec==0.1.1 + # via feast (setup.py) sqlparams==6.1.0 # via singlestoredb stack-data==0.6.3 @@ -760,17 +853,21 @@ starlette==0.38.5 substrait==0.22.0 # via ibis-substrait tabulate==0.9.0 + # via feast (setup.py) tenacity==8.5.0 + # via feast (setup.py) terminado==0.18.1 # via # jupyter-server # jupyter-server-terminals testcontainers==4.4.0 + # via feast (setup.py) thriftpy2==0.5.2 # via happybase tinycss2==1.3.0 # via nbconvert toml==0.10.2 + # via feast (setup.py) tomlkit==0.13.2 # via snowflake-connector-python toolz==0.12.1 @@ -788,7 +885,9 @@ tornado==6.4.1 # notebook # terminado tqdm==4.66.5 - # via great-expectations + # via + # feast (setup.py) + # great-expectations traitlets==5.14.3 # via # comm @@ -805,23 +904,37 @@ traitlets==5.14.3 # nbconvert # nbformat trino==0.329.0 + # via feast (setup.py) typeguard==4.3.0 + # via feast (setup.py) types-cffi==1.16.0.20240331 # via types-pyopenssl types-protobuf==3.19.22 - # via mypy-protobuf + # via + # feast (setup.py) + # mypy-protobuf types-pymysql==1.1.0.20240524 + # via feast (setup.py) types-pyopenssl==24.1.0.20240722 # via types-redis types-python-dateutil==2.9.0.20240906 - # via arrow + # via + # feast (setup.py) + # arrow types-pytz==2024.1.0.20240417 + # via feast (setup.py) types-pyyaml==6.0.12.20240808 + # via feast (setup.py) types-redis==4.6.0.20240903 + # via feast (setup.py) types-requests==2.30.0.0 + # via feast (setup.py) types-setuptools==74.1.0.20240907 - # via types-cffi + # via + # feast (setup.py) + # types-cffi types-tabulate==0.9.0.20240106 + # via feast (setup.py) types-urllib3==1.26.25.14 # via types-requests typing-extensions==4.12.2 @@ -853,6 +966,7 @@ uri-template==1.3.0 # via jsonschema urllib3==2.2.2 # via + # feast (setup.py) # botocore # docker # elastic-transport @@ -863,10 +977,13 @@ urllib3==2.2.2 # responses # testcontainers uvicorn[standard]==0.30.6 + # via feast (setup.py) uvloop==0.20.0 # via uvicorn virtualenv==20.23.0 - # via pre-commit + # via + # feast (setup.py) + # pre-commit watchfiles==0.24.0 # via uvicorn wcwidth==0.2.13 diff --git a/sdk/python/requirements/py3.11-requirements.txt b/sdk/python/requirements/py3.11-requirements.txt index 611a0cedca..1d0ce54cc1 100644 --- a/sdk/python/requirements/py3.11-requirements.txt +++ b/sdk/python/requirements/py3.11-requirements.txt @@ -11,34 +11,34 @@ attrs==24.2.0 # jsonschema # referencing bigtree==0.21.1 -cachetools==5.5.0 - # via google-auth + # via feast (setup.py) certifi==2024.8.30 - # via - # kubernetes - # requests + # via requests charset-normalizer==3.3.2 # via requests click==8.1.7 # via + # feast (setup.py) # dask # uvicorn cloudpickle==3.0.0 # via dask colorama==0.4.6 + # via feast (setup.py) dask[dataframe]==2024.8.2 - # via dask-expr + # via + # feast (setup.py) + # dask-expr dask-expr==1.1.13 # via dask dill==0.3.8 + # via feast (setup.py) fastapi==0.114.1 + # via feast (setup.py) fsspec==2024.9.0 # via dask -google-auth==2.34.0 - # via kubernetes -greenlet==3.1.0 - # via sqlalchemy gunicorn==23.0.0 + # via feast (setup.py) h11==0.14.0 # via uvicorn httptools==0.6.1 @@ -50,104 +50,105 @@ idna==3.8 importlib-metadata==8.5.0 # via dask jinja2==3.1.4 + # via feast (setup.py) jsonschema==4.23.0 + # via feast (setup.py) jsonschema-specifications==2023.12.1 # via jsonschema -kubernetes==20.13.0 locket==1.0.0 # via partd markupsafe==2.1.5 # via jinja2 mmh3==4.1.0 + # via feast (setup.py) mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy mypy-protobuf==3.6.0 + # via feast (setup.py) numpy==1.26.4 # via + # feast (setup.py) # dask # pandas # pyarrow -oauthlib==3.2.2 - # via requests-oauthlib packaging==24.1 # via # dask # gunicorn pandas==2.2.2 # via + # feast (setup.py) # dask # dask-expr partd==1.4.2 # via dask prometheus-client==0.20.0 + # via feast (setup.py) protobuf==4.25.4 - # via mypy-protobuf + # via + # feast (setup.py) + # mypy-protobuf psutil==6.0.0 + # via feast (setup.py) pyarrow==17.0.0 - # via dask-expr -pyasn1==0.6.1 # via - # pyasn1-modules - # rsa -pyasn1-modules==0.4.1 - # via google-auth + # feast (setup.py) + # dask-expr pydantic==2.9.1 - # via fastapi + # via + # feast (setup.py) + # fastapi pydantic-core==2.23.3 # via pydantic pygments==2.18.0 + # via feast (setup.py) pyjwt==2.9.0 + # via feast (setup.py) python-dateutil==2.9.0.post0 - # via - # kubernetes - # pandas + # via pandas python-dotenv==1.0.1 # via uvicorn pytz==2024.2 # via pandas pyyaml==6.0.2 # via + # feast (setup.py) # dask - # kubernetes # uvicorn referencing==0.35.1 # via # jsonschema # jsonschema-specifications requests==2.32.3 - # via - # kubernetes - # requests-oauthlib -requests-oauthlib==2.0.0 - # via kubernetes + # via feast (setup.py) rpds-py==0.20.0 # via # jsonschema # referencing -rsa==4.9 - # via google-auth -setuptools==74.1.2 - # via kubernetes six==1.16.0 - # via - # kubernetes - # python-dateutil + # via python-dateutil sniffio==1.3.1 # via anyio sqlalchemy[mypy]==2.0.34 + # via feast (setup.py) starlette==0.38.5 # via fastapi tabulate==0.9.0 + # via feast (setup.py) tenacity==8.5.0 + # via feast (setup.py) toml==0.10.2 + # via feast (setup.py) toolz==0.12.1 # via # dask # partd tqdm==4.66.5 + # via feast (setup.py) typeguard==4.3.0 + # via feast (setup.py) types-protobuf==5.27.0.20240907 # via mypy-protobuf typing-extensions==4.12.2 @@ -161,16 +162,13 @@ typing-extensions==4.12.2 tzdata==2024.1 # via pandas urllib3==2.2.2 - # via - # kubernetes - # requests + # via requests uvicorn[standard]==0.30.6 + # via feast (setup.py) uvloop==0.20.0 # via uvicorn watchfiles==0.24.0 # via uvicorn -websocket-client==1.8.0 - # via kubernetes websockets==13.0.1 # via uvicorn zipp==3.20.1 diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index f4e7c795fa..ae27849f4f 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -1,6 +1,7 @@ # This file was autogenerated by uv via the following command: # uv pip compile --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.9-ci-requirements.txt aiobotocore==2.15.0 + # via feast (setup.py) aiohappyeyeballs==2.4.0 # via aiohttp aiohttp==3.10.5 @@ -21,6 +22,8 @@ anyio==4.4.0 # jupyter-server # starlette # watchfiles +appnope==0.1.4 + # via ipykernel argon2-cffi==23.1.0 # via jupyter-server argon2-cffi-bindings==21.2.0 @@ -30,6 +33,7 @@ arrow==1.3.0 asn1crypto==1.5.1 # via snowflake-connector-python assertpy==1.1 + # via feast (setup.py) asttokens==2.4.1 # via stack-data async-lru==2.0.4 @@ -52,7 +56,9 @@ azure-core==1.30.2 # azure-identity # azure-storage-blob azure-identity==1.17.1 + # via feast (setup.py) azure-storage-blob==12.22.0 + # via feast (setup.py) babel==2.16.0 # via # jupyterlab-server @@ -62,10 +68,13 @@ beautifulsoup4==4.12.3 bidict==0.23.1 # via ibis-framework bigtree==0.21.1 + # via feast (setup.py) bleach==6.1.0 # via nbconvert boto3==1.35.16 - # via moto + # via + # feast (setup.py) + # moto botocore==1.35.16 # via # aiobotocore @@ -74,11 +83,13 @@ botocore==1.35.16 # s3transfer build==1.2.2 # via + # feast (setup.py) # pip-tools # singlestoredb cachetools==5.5.0 # via google-auth cassandra-driver==3.29.2 + # via feast (setup.py) certifi==2024.8.30 # via # elastic-transport @@ -101,6 +112,7 @@ charset-normalizer==3.3.2 # snowflake-connector-python click==8.1.7 # via + # feast (setup.py) # dask # geomet # great-expectations @@ -109,7 +121,9 @@ click==8.1.7 cloudpickle==3.0.0 # via dask colorama==0.4.6 - # via great-expectations + # via + # feast (setup.py) + # great-expectations comm==0.2.2 # via # ipykernel @@ -118,6 +132,7 @@ coverage[toml]==7.6.1 # via pytest-cov cryptography==42.0.8 # via + # feast (setup.py) # azure-identity # azure-storage-blob # great-expectations @@ -132,7 +147,9 @@ cryptography==42.0.8 cython==3.0.11 # via thriftpy2 dask[dataframe]==2024.8.0 - # via dask-expr + # via + # feast (setup.py) + # dask-expr dask-expr==1.1.10 # via dask db-dtypes==1.3.0 @@ -144,9 +161,11 @@ decorator==5.1.1 defusedxml==0.7.1 # via nbconvert deltalake==0.19.2 + # via feast (setup.py) deprecation==2.1.0 # via python-keycloak dill==0.3.8 + # via feast (setup.py) distlib==0.3.8 # via virtualenv docker==7.1.0 @@ -158,6 +177,7 @@ duckdb==0.10.3 elastic-transport==8.15.0 # via elasticsearch elasticsearch==8.15.1 + # via feast (setup.py) entrypoints==0.4 # via altair exceptiongroup==1.2.2 @@ -170,6 +190,7 @@ execnet==2.1.1 executing==2.1.0 # via stack-data fastapi==0.114.1 + # via feast (setup.py) fastjsonschema==2.20.0 # via nbformat filelock==3.16.0 @@ -183,11 +204,14 @@ frozenlist==1.4.1 # aiohttp # aiosignal fsspec==2024.9.0 - # via dask + # via + # feast (setup.py) + # dask geomet==0.2.1.post1 # via cassandra-driver google-api-core[grpc]==2.19.2 # via + # feast (setup.py) # google-cloud-bigquery # google-cloud-bigquery-storage # google-cloud-bigtable @@ -205,8 +229,11 @@ google-auth==2.34.0 # google-cloud-storage # kubernetes google-cloud-bigquery[pandas]==3.25.0 + # via feast (setup.py) google-cloud-bigquery-storage==2.26.0 + # via feast (setup.py) google-cloud-bigtable==2.26.0 + # via feast (setup.py) google-cloud-core==2.4.1 # via # google-cloud-bigquery @@ -214,7 +241,9 @@ google-cloud-core==2.4.1 # google-cloud-datastore # google-cloud-storage google-cloud-datastore==2.20.1 + # via feast (setup.py) google-cloud-storage==2.18.2 + # via feast (setup.py) google-crc32c==1.6.0 # via # google-cloud-storage @@ -225,16 +254,17 @@ google-resumable-media==2.7.2 # google-cloud-storage googleapis-common-protos[grpc]==1.65.0 # via + # feast (setup.py) # google-api-core # grpc-google-iam-v1 # grpcio-status great-expectations==0.18.20 -greenlet==3.1.0 - # via sqlalchemy + # via feast (setup.py) grpc-google-iam-v1==0.13.1 # via google-cloud-bigtable grpcio==1.66.1 # via + # feast (setup.py) # google-api-core # googleapis-common-protos # grpc-google-iam-v1 @@ -244,30 +274,42 @@ grpcio==1.66.1 # grpcio-testing # grpcio-tools grpcio-health-checking==1.62.3 + # via feast (setup.py) grpcio-reflection==1.62.3 + # via feast (setup.py) grpcio-status==1.62.3 # via google-api-core grpcio-testing==1.62.3 + # via feast (setup.py) grpcio-tools==1.62.3 + # via feast (setup.py) gunicorn==23.0.0 + # via feast (setup.py) h11==0.14.0 # via # httpcore # uvicorn happybase==1.2.0 + # via feast (setup.py) hazelcast-python-client==5.5.0 + # via feast (setup.py) hiredis==2.4.0 + # via feast (setup.py) httpcore==1.0.5 # via httpx httptools==0.6.1 # via uvicorn httpx==0.27.2 # via + # feast (setup.py) # jupyterlab # python-keycloak ibis-framework[duckdb]==9.0.0 - # via ibis-substrait + # via + # feast (setup.py) + # ibis-substrait ibis-substrait==4.0.1 + # via feast (setup.py) identify==2.6.0 # via pre-commit idna==3.8 @@ -310,6 +352,7 @@ jedi==0.19.1 # via ipython jinja2==3.1.4 # via + # feast (setup.py) # altair # great-expectations # jupyter-server @@ -332,6 +375,7 @@ jsonpointer==3.0.0 # jsonschema jsonschema[format-nongpl]==4.23.0 # via + # feast (setup.py) # altair # great-expectations # jupyter-events @@ -379,6 +423,7 @@ jupyterlab-widgets==3.0.13 jwcrypto==1.5.6 # via python-keycloak kubernetes==20.13.0 + # via feast (setup.py) locket==1.0.0 # via partd makefun==1.15.4 @@ -399,13 +444,17 @@ matplotlib-inline==0.1.7 mdurl==0.1.2 # via markdown-it-py minio==7.1.0 + # via feast (setup.py) mistune==3.0.2 # via # great-expectations # nbconvert mmh3==4.1.0 + # via feast (setup.py) mock==2.0.0 + # via feast (setup.py) moto==4.2.14 + # via feast (setup.py) msal==1.31.0 # via # azure-identity @@ -417,10 +466,13 @@ multidict==6.1.0 # aiohttp # yarl mypy==1.11.2 - # via sqlalchemy + # via + # feast (setup.py) + # sqlalchemy mypy-extensions==1.0.0 # via mypy mypy-protobuf==3.3.0 + # via feast (setup.py) nbclient==0.10.0 # via nbconvert nbconvert==7.16.4 @@ -443,6 +495,7 @@ notebook-shim==0.2.4 # notebook numpy==1.26.4 # via + # feast (setup.py) # altair # dask # db-dtypes @@ -476,6 +529,7 @@ packaging==24.1 # sphinx pandas==2.2.2 # via + # feast (setup.py) # altair # dask # dask-expr @@ -501,6 +555,7 @@ pexpect==4.9.0 pip==24.2 # via pip-tools pip-tools==7.4.1 + # via feast (setup.py) platformdirs==3.11.0 # via # jupyter-core @@ -513,8 +568,11 @@ ply==3.11 portalocker==2.10.1 # via msal-extensions pre-commit==3.3.1 + # via feast (setup.py) prometheus-client==0.20.0 - # via jupyter-server + # via + # feast (setup.py) + # jupyter-server prompt-toolkit==3.0.47 # via ipython proto-plus==1.24.0 @@ -525,6 +583,7 @@ proto-plus==1.24.0 # google-cloud-datastore protobuf==4.25.4 # via + # feast (setup.py) # google-api-core # google-cloud-bigquery-storage # google-cloud-bigtable @@ -540,9 +599,12 @@ protobuf==4.25.4 # proto-plus # substrait psutil==5.9.0 - # via ipykernel -psycopg[binary, pool]==3.2.1 -psycopg-binary==3.2.1 + # via + # feast (setup.py) + # ipykernel +psycopg[binary, pool]==3.1.18 + # via feast (setup.py) +psycopg-binary==3.1.18 # via psycopg psycopg-pool==3.2.2 # via psycopg @@ -553,12 +615,14 @@ ptyprocess==0.7.0 pure-eval==0.2.3 # via stack-data py==1.11.0 + # via feast (setup.py) py-cpuinfo==9.0.0 # via pytest-benchmark py4j==0.10.9.7 # via pyspark pyarrow==16.1.0 # via + # feast (setup.py) # dask-expr # db-dtypes # deltalake @@ -574,28 +638,35 @@ pyasn1==0.6.1 pyasn1-modules==0.4.1 # via google-auth pybindgen==0.22.1 + # via feast (setup.py) pycparser==2.22 # via cffi pydantic==2.9.1 # via + # feast (setup.py) # fastapi # great-expectations pydantic-core==2.23.3 # via pydantic pygments==2.18.0 # via + # feast (setup.py) # ipython # nbconvert # rich # sphinx pyjwt[crypto]==2.9.0 # via + # feast (setup.py) # msal # singlestoredb # snowflake-connector-python pymssql==2.3.1 + # via feast (setup.py) pymysql==1.1.1 + # via feast (setup.py) pyodbc==5.1.0 + # via feast (setup.py) pyopenssl==24.2.1 # via snowflake-connector-python pyparsing==3.1.4 @@ -605,8 +676,10 @@ pyproject-hooks==1.1.0 # build # pip-tools pyspark==3.5.2 + # via feast (setup.py) pytest==7.4.4 # via + # feast (setup.py) # pytest-benchmark # pytest-cov # pytest-env @@ -616,13 +689,21 @@ pytest==7.4.4 # pytest-timeout # pytest-xdist pytest-benchmark==3.4.1 + # via feast (setup.py) pytest-cov==5.0.0 + # via feast (setup.py) pytest-env==1.1.3 + # via feast (setup.py) pytest-lazy-fixture==0.6.3 + # via feast (setup.py) pytest-mock==1.10.4 + # via feast (setup.py) pytest-ordering==0.6 + # via feast (setup.py) pytest-timeout==1.4.2 + # via feast (setup.py) pytest-xdist==3.6.1 + # via feast (setup.py) python-dateutil==2.9.0.post0 # via # arrow @@ -640,6 +721,7 @@ python-dotenv==1.0.1 python-json-logger==2.0.7 # via jupyter-events python-keycloak==4.2.2 + # via feast (setup.py) pytz==2024.2 # via # great-expectations @@ -649,6 +731,7 @@ pytz==2024.2 # trino pyyaml==6.0.2 # via + # feast (setup.py) # dask # ibis-substrait # jupyter-events @@ -662,15 +745,19 @@ pyzmq==26.2.0 # jupyter-client # jupyter-server redis==4.6.0 + # via feast (setup.py) referencing==0.35.1 # via # jsonschema # jsonschema-specifications # jupyter-events regex==2024.7.24 - # via parsimonious + # via + # feast (setup.py) + # parsimonious requests==2.32.3 # via + # feast (setup.py) # azure-core # docker # google-api-core @@ -716,6 +803,7 @@ ruamel-yaml==0.17.40 ruamel-yaml-clib==0.2.8 # via ruamel-yaml ruff==0.6.4 + # via feast (setup.py) s3transfer==0.10.2 # via boto3 scipy==1.13.1 @@ -730,6 +818,7 @@ setuptools==74.1.2 # pip-tools # singlestoredb singlestoredb==1.6.3 + # via feast (setup.py) six==1.16.0 # via # asttokens @@ -750,11 +839,13 @@ sniffio==1.3.1 snowballstemmer==2.2.0 # via sphinx snowflake-connector-python[pandas]==3.12.1 + # via feast (setup.py) sortedcontainers==2.4.0 # via snowflake-connector-python soupsieve==2.6 # via beautifulsoup4 sphinx==6.2.1 + # via feast (setup.py) sphinxcontrib-applehelp==2.0.0 # via sphinx sphinxcontrib-devhelp==2.0.0 @@ -768,9 +859,11 @@ sphinxcontrib-qthelp==2.0.0 sphinxcontrib-serializinghtml==2.0.0 # via sphinx sqlalchemy[mypy]==2.0.34 + # via feast (setup.py) sqlglot==23.12.2 # via ibis-framework sqlite-vec==0.1.1 + # via feast (setup.py) sqlparams==6.1.0 # via singlestoredb stack-data==0.6.3 @@ -780,17 +873,21 @@ starlette==0.38.5 substrait==0.22.0 # via ibis-substrait tabulate==0.9.0 + # via feast (setup.py) tenacity==8.5.0 + # via feast (setup.py) terminado==0.18.1 # via # jupyter-server # jupyter-server-terminals testcontainers==4.4.0 + # via feast (setup.py) thriftpy2==0.5.2 # via happybase tinycss2==1.3.0 # via nbconvert toml==0.10.2 + # via feast (setup.py) tomli==2.0.1 # via # build @@ -818,7 +915,9 @@ tornado==6.4.1 # notebook # terminado tqdm==4.66.5 - # via great-expectations + # via + # feast (setup.py) + # great-expectations traitlets==5.14.3 # via # comm @@ -835,23 +934,37 @@ traitlets==5.14.3 # nbconvert # nbformat trino==0.329.0 + # via feast (setup.py) typeguard==4.3.0 + # via feast (setup.py) types-cffi==1.16.0.20240331 # via types-pyopenssl types-protobuf==3.19.22 - # via mypy-protobuf + # via + # feast (setup.py) + # mypy-protobuf types-pymysql==1.1.0.20240524 + # via feast (setup.py) types-pyopenssl==24.1.0.20240722 # via types-redis types-python-dateutil==2.9.0.20240906 - # via arrow + # via + # feast (setup.py) + # arrow types-pytz==2024.1.0.20240417 + # via feast (setup.py) types-pyyaml==6.0.12.20240808 + # via feast (setup.py) types-redis==4.6.0.20240903 + # via feast (setup.py) types-requests==2.30.0.0 + # via feast (setup.py) types-setuptools==74.1.0.20240907 - # via types-cffi + # via + # feast (setup.py) + # types-cffi types-tabulate==0.9.0.20240106 + # via feast (setup.py) types-urllib3==1.26.25.14 # via types-requests typing-extensions==4.12.2 @@ -889,6 +1002,7 @@ uri-template==1.3.0 # via jsonschema urllib3==1.26.20 # via + # feast (setup.py) # botocore # docker # elastic-transport @@ -900,10 +1014,13 @@ urllib3==1.26.20 # snowflake-connector-python # testcontainers uvicorn[standard]==0.30.6 + # via feast (setup.py) uvloop==0.20.0 # via uvicorn virtualenv==20.23.0 - # via pre-commit + # via + # feast (setup.py) + # pre-commit watchfiles==0.24.0 # via uvicorn wcwidth==0.2.13 diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index 0ae2fcf9d6..8a7ac763c0 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -11,36 +11,36 @@ attrs==24.2.0 # jsonschema # referencing bigtree==0.21.1 -cachetools==5.5.0 - # via google-auth + # via feast (setup.py) certifi==2024.8.30 - # via - # kubernetes - # requests + # via requests charset-normalizer==3.3.2 # via requests click==8.1.7 # via + # feast (setup.py) # dask # uvicorn cloudpickle==3.0.0 # via dask colorama==0.4.6 + # via feast (setup.py) dask[dataframe]==2024.8.0 - # via dask-expr + # via + # feast (setup.py) + # dask-expr dask-expr==1.1.10 # via dask dill==0.3.8 + # via feast (setup.py) exceptiongroup==1.2.2 # via anyio fastapi==0.114.1 + # via feast (setup.py) fsspec==2024.9.0 # via dask -google-auth==2.34.0 - # via kubernetes -greenlet==3.1.0 - # via sqlalchemy gunicorn==23.0.0 + # via feast (setup.py) h11==0.14.0 # via uvicorn httptools==0.6.1 @@ -54,98 +54,97 @@ importlib-metadata==8.5.0 # dask # typeguard jinja2==3.1.4 + # via feast (setup.py) jsonschema==4.23.0 + # via feast (setup.py) jsonschema-specifications==2023.12.1 # via jsonschema -kubernetes==20.13.0 locket==1.0.0 # via partd markupsafe==2.1.5 # via jinja2 mmh3==4.1.0 + # via feast (setup.py) mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy mypy-protobuf==3.6.0 + # via feast (setup.py) numpy==1.26.4 # via + # feast (setup.py) # dask # pandas # pyarrow -oauthlib==3.2.2 - # via requests-oauthlib packaging==24.1 # via # dask # gunicorn pandas==2.2.2 # via + # feast (setup.py) # dask # dask-expr partd==1.4.2 # via dask prometheus-client==0.20.0 + # via feast (setup.py) protobuf==4.25.4 - # via mypy-protobuf + # via + # feast (setup.py) + # mypy-protobuf psutil==6.0.0 + # via feast (setup.py) pyarrow==17.0.0 - # via dask-expr -pyasn1==0.6.1 # via - # pyasn1-modules - # rsa -pyasn1-modules==0.4.1 - # via google-auth + # feast (setup.py) + # dask-expr pydantic==2.9.1 - # via fastapi + # via + # feast (setup.py) + # fastapi pydantic-core==2.23.3 # via pydantic pygments==2.18.0 + # via feast (setup.py) pyjwt==2.9.0 + # via feast (setup.py) python-dateutil==2.9.0.post0 - # via - # kubernetes - # pandas + # via pandas python-dotenv==1.0.1 # via uvicorn pytz==2024.2 # via pandas pyyaml==6.0.2 # via + # feast (setup.py) # dask - # kubernetes # uvicorn referencing==0.35.1 # via # jsonschema # jsonschema-specifications requests==2.32.3 - # via - # kubernetes - # requests-oauthlib -requests-oauthlib==2.0.0 - # via kubernetes + # via feast (setup.py) rpds-py==0.20.0 # via # jsonschema # referencing -rsa==4.9 - # via google-auth -setuptools==74.1.2 - # via kubernetes six==1.16.0 - # via - # kubernetes - # python-dateutil + # via python-dateutil sniffio==1.3.1 # via anyio sqlalchemy[mypy]==2.0.34 + # via feast (setup.py) starlette==0.38.5 # via fastapi tabulate==0.9.0 + # via feast (setup.py) tenacity==8.5.0 + # via feast (setup.py) toml==0.10.2 + # via feast (setup.py) tomli==2.0.1 # via mypy toolz==0.12.1 @@ -153,7 +152,9 @@ toolz==0.12.1 # dask # partd tqdm==4.66.5 + # via feast (setup.py) typeguard==4.3.0 + # via feast (setup.py) types-protobuf==5.27.0.20240907 # via mypy-protobuf typing-extensions==4.12.2 @@ -170,16 +171,13 @@ typing-extensions==4.12.2 tzdata==2024.1 # via pandas urllib3==2.2.2 - # via - # kubernetes - # requests + # via requests uvicorn[standard]==0.30.6 + # via feast (setup.py) uvloop==0.20.0 # via uvicorn watchfiles==0.24.0 # via uvicorn -websocket-client==1.8.0 - # via kubernetes websockets==13.0.1 # via uvicorn zipp==3.20.1 diff --git a/setup.py b/setup.py index d8bc55e334..d412541b7d 100644 --- a/setup.py +++ b/setup.py @@ -63,7 +63,6 @@ "psutil", "bigtree>=0.19.2", "pyjwt", - "kubernetes<=20.13.0", ] GCP_REQUIRED = [ From c5a4d907bf34f4cf7477b212cd2820b0e7d24b7b Mon Sep 17 00:00:00 2001 From: Harri Lehtola <1781172+peruukki@users.noreply.github.com> Date: Tue, 17 Sep 2024 09:16:19 +0300 Subject: [PATCH 61/96] fix: Move tslib from devDependencies to dependencies in Feast UI (#4525) tslib is a runtime dependency, so it should be in dependencies. This ensures the specified version is installed also when the package is used as a module. Signed-off-by: Harri Lehtola --- ui/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ui/package.json b/ui/package.json index cd80859aa1..3a609f3c83 100644 --- a/ui/package.json +++ b/ui/package.json @@ -47,6 +47,7 @@ "react-query": "^3.34.12", "react-router-dom": "6", "react-scripts": "^5.0.0", + "tslib": "^2.3.1", "use-query-params": "^1.2.3", "zod": "^3.11.6" }, @@ -105,7 +106,6 @@ "rollup-plugin-svg": "^2.0.0", "rollup-plugin-svgo": "^1.1.0", "rollup-plugin-terser": "^7.0.2", - "tslib": "^2.3.1", "typescript": "^4.4.2" }, "description": "Web UI for the [Feast Feature Store](https://feast.dev/)", From 7535b4036ce980c9a05bc33a9e61a7938ea1303e Mon Sep 17 00:00:00 2001 From: Theodor Mihalache <84387487+tmihalac@users.noreply.github.com> Date: Tue, 17 Sep 2024 13:21:40 -0400 Subject: [PATCH 62/96] fix: Added Offline Store Arrow client errors handler (#4524) * fix: Added Offline Store Arrow client errors handler Signed-off-by: Theodor Mihalache * Added more tests Signed-off-by: Theodor Mihalache --------- Signed-off-by: Theodor Mihalache --- sdk/python/feast/arrow_error_handler.py | 49 +++++++++++++ .../feast/infra/offline_stores/remote.py | 68 ++++++++++++++++--- sdk/python/feast/offline_server.py | 54 ++++++++++----- .../client/arrow_flight_auth_interceptor.py | 9 --- sdk/python/feast/permissions/server/arrow.py | 31 ++------- .../tests/unit/test_arrow_error_decorator.py | 33 +++++++++ sdk/python/tests/unit/test_offline_server.py | 32 ++++++++- 7 files changed, 215 insertions(+), 61 deletions(-) create mode 100644 sdk/python/feast/arrow_error_handler.py create mode 100644 sdk/python/tests/unit/test_arrow_error_decorator.py diff --git a/sdk/python/feast/arrow_error_handler.py b/sdk/python/feast/arrow_error_handler.py new file mode 100644 index 0000000000..e873592bd5 --- /dev/null +++ b/sdk/python/feast/arrow_error_handler.py @@ -0,0 +1,49 @@ +import logging +from functools import wraps + +import pyarrow.flight as fl + +from feast.errors import FeastError + +logger = logging.getLogger(__name__) + + +def arrow_client_error_handling_decorator(func): + @wraps(func) + def wrapper(*args, **kwargs): + try: + return func(*args, **kwargs) + except Exception as e: + mapped_error = FeastError.from_error_detail(_get_exception_data(e.args[0])) + if mapped_error is not None: + raise mapped_error + raise e + + return wrapper + + +def arrow_server_error_handling_decorator(func): + @wraps(func) + def wrapper(*args, **kwargs): + try: + return func(*args, **kwargs) + except Exception as e: + if isinstance(e, FeastError): + raise fl.FlightError(e.to_error_detail()) + + return wrapper + + +def _get_exception_data(except_str) -> str: + substring = "Flight error: " + + # Find the starting index of the substring + position = except_str.find(substring) + end_json_index = except_str.find("}") + + if position != -1 and end_json_index != -1: + # Extract the part of the string after the substring + result = except_str[position + len(substring) : end_json_index + 1] + return result + + return "" diff --git a/sdk/python/feast/infra/offline_stores/remote.py b/sdk/python/feast/infra/offline_stores/remote.py index 40239c8950..8154f75f87 100644 --- a/sdk/python/feast/infra/offline_stores/remote.py +++ b/sdk/python/feast/infra/offline_stores/remote.py @@ -10,9 +10,12 @@ import pyarrow as pa import pyarrow.flight as fl import pyarrow.parquet +from pyarrow import Schema +from pyarrow._flight import FlightCallOptions, FlightDescriptor, Ticket from pydantic import StrictInt, StrictStr from feast import OnDemandFeatureView +from feast.arrow_error_handler import arrow_client_error_handling_decorator from feast.data_source import DataSource from feast.feature_logging import ( FeatureServiceLoggingSource, @@ -27,8 +30,10 @@ RetrievalMetadata, ) from feast.infra.registry.base_registry import BaseRegistry +from feast.permissions.auth.auth_type import AuthType +from feast.permissions.auth_model import AuthConfig from feast.permissions.client.arrow_flight_auth_interceptor import ( - build_arrow_flight_client, + FlightAuthInterceptorFactory, ) from feast.repo_config import FeastConfigBaseModel, RepoConfig from feast.saved_dataset import SavedDatasetStorage @@ -36,6 +41,43 @@ logger = logging.getLogger(__name__) +class FeastFlightClient(fl.FlightClient): + @arrow_client_error_handling_decorator + def get_flight_info( + self, descriptor: FlightDescriptor, options: FlightCallOptions = None + ): + return super().get_flight_info(descriptor, options) + + @arrow_client_error_handling_decorator + def do_get(self, ticket: Ticket, options: FlightCallOptions = None): + return super().do_get(ticket, options) + + @arrow_client_error_handling_decorator + def do_put( + self, + descriptor: FlightDescriptor, + schema: Schema, + options: FlightCallOptions = None, + ): + return super().do_put(descriptor, schema, options) + + @arrow_client_error_handling_decorator + def list_flights(self, criteria: bytes = b"", options: FlightCallOptions = None): + return super().list_flights(criteria, options) + + @arrow_client_error_handling_decorator + def list_actions(self, options: FlightCallOptions = None): + return super().list_actions(options) + + +def build_arrow_flight_client(host: str, port, auth_config: AuthConfig): + if auth_config.type != AuthType.NONE.value: + middlewares = [FlightAuthInterceptorFactory(auth_config)] + return FeastFlightClient(f"grpc://{host}:{port}", middleware=middlewares) + + return FeastFlightClient(f"grpc://{host}:{port}") + + class RemoteOfflineStoreConfig(FeastConfigBaseModel): type: Literal["remote"] = "remote" host: StrictStr @@ -48,7 +90,7 @@ class RemoteOfflineStoreConfig(FeastConfigBaseModel): class RemoteRetrievalJob(RetrievalJob): def __init__( self, - client: fl.FlightClient, + client: FeastFlightClient, api: str, api_parameters: Dict[str, Any], entity_df: Union[pd.DataFrame, str] = None, @@ -338,7 +380,7 @@ def _send_retrieve_remote( api_parameters: Dict[str, Any], entity_df: Union[pd.DataFrame, str], table: pa.Table, - client: fl.FlightClient, + client: FeastFlightClient, ): command_descriptor = _call_put( api, @@ -351,19 +393,19 @@ def _send_retrieve_remote( def _call_get( - client: fl.FlightClient, + client: FeastFlightClient, command_descriptor: fl.FlightDescriptor, ): flight = client.get_flight_info(command_descriptor) ticket = flight.endpoints[0].ticket reader = client.do_get(ticket) - return reader.read_all() + return read_all(reader) def _call_put( api: str, api_parameters: Dict[str, Any], - client: fl.FlightClient, + client: FeastFlightClient, entity_df: Union[pd.DataFrame, str], table: pa.Table, ): @@ -391,7 +433,7 @@ def _put_parameters( command_descriptor: fl.FlightDescriptor, entity_df: Union[pd.DataFrame, str], table: pa.Table, - client: fl.FlightClient, + client: FeastFlightClient, ): updatedTable: pa.Table @@ -404,10 +446,20 @@ def _put_parameters( writer, _ = client.do_put(command_descriptor, updatedTable.schema) - writer.write_table(updatedTable) + write_table(writer, updatedTable) + + +@arrow_client_error_handling_decorator +def write_table(writer, updated_table: pa.Table): + writer.write_table(updated_table) writer.close() +@arrow_client_error_handling_decorator +def read_all(reader): + return reader.read_all() + + def _create_empty_table(): schema = pa.schema( { diff --git a/sdk/python/feast/offline_server.py b/sdk/python/feast/offline_server.py index 839acada93..ff3db579d0 100644 --- a/sdk/python/feast/offline_server.py +++ b/sdk/python/feast/offline_server.py @@ -9,16 +9,18 @@ import pyarrow.flight as fl from feast import FeatureStore, FeatureView, utils +from feast.arrow_error_handler import arrow_server_error_handling_decorator from feast.feature_logging import FeatureServiceLoggingSource from feast.feature_view import DUMMY_ENTITY_NAME from feast.infra.offline_stores.offline_utils import get_offline_store_from_config from feast.permissions.action import AuthzedAction from feast.permissions.security_manager import assert_permissions from feast.permissions.server.arrow import ( - arrowflight_middleware, + AuthorizationMiddlewareFactory, inject_user_details_decorator, ) from feast.permissions.server.utils import ( + AuthManagerType, ServerType, init_auth_manager, init_security_manager, @@ -34,7 +36,7 @@ class OfflineServer(fl.FlightServerBase): def __init__(self, store: FeatureStore, location: str, **kwargs): super(OfflineServer, self).__init__( location, - middleware=arrowflight_middleware( + middleware=self.arrow_flight_auth_middleware( str_to_auth_manager_type(store.config.auth_config.type) ), **kwargs, @@ -45,6 +47,25 @@ def __init__(self, store: FeatureStore, location: str, **kwargs): self.store = store self.offline_store = get_offline_store_from_config(store.config.offline_store) + def arrow_flight_auth_middleware( + self, + auth_type: AuthManagerType, + ) -> dict[str, fl.ServerMiddlewareFactory]: + """ + A dictionary with the configured middlewares to support extracting the user details when the authorization manager is defined. + The authorization middleware key is `auth`. + + Returns: + dict[str, fl.ServerMiddlewareFactory]: Optional dictionary of middlewares. If the authorization type is set to `NONE`, it returns an empty dict. + """ + + if auth_type == AuthManagerType.NONE: + return {} + + return { + "auth": AuthorizationMiddlewareFactory(), + } + @classmethod def descriptor_to_key(self, descriptor: fl.FlightDescriptor): return ( @@ -61,15 +82,7 @@ def _make_flight_info(self, key: Any, descriptor: fl.FlightDescriptor): return fl.FlightInfo(schema, descriptor, endpoints, -1, -1) @inject_user_details_decorator - def get_flight_info( - self, context: fl.ServerCallContext, descriptor: fl.FlightDescriptor - ): - key = OfflineServer.descriptor_to_key(descriptor) - if key in self.flights: - return self._make_flight_info(key, descriptor) - raise KeyError("Flight not found.") - - @inject_user_details_decorator + @arrow_server_error_handling_decorator def list_flights(self, context: fl.ServerCallContext, criteria: bytes): for key, table in self.flights.items(): if key[1] is not None: @@ -79,9 +92,20 @@ def list_flights(self, context: fl.ServerCallContext, criteria: bytes): yield self._make_flight_info(key, descriptor) + @inject_user_details_decorator + @arrow_server_error_handling_decorator + def get_flight_info( + self, context: fl.ServerCallContext, descriptor: fl.FlightDescriptor + ): + key = OfflineServer.descriptor_to_key(descriptor) + if key in self.flights: + return self._make_flight_info(key, descriptor) + raise KeyError("Flight not found.") + # Expects to receive request parameters and stores them in the flights dictionary # Indexed by the unique command @inject_user_details_decorator + @arrow_server_error_handling_decorator def do_put( self, context: fl.ServerCallContext, @@ -179,6 +203,7 @@ def _validate_do_get_parameters(self, command: dict): # Extracts the API parameters from the flights dictionary, delegates the execution to the FeatureStore instance # and returns the stream of data @inject_user_details_decorator + @arrow_server_error_handling_decorator def do_get(self, context: fl.ServerCallContext, ticket: fl.Ticket): key = ast.literal_eval(ticket.ticket.decode()) if key not in self.flights: @@ -337,6 +362,7 @@ def pull_latest_from_table_or_query(self, command: dict): utils.make_tzaware(datetime.fromisoformat(command["end_date"])), ) + @arrow_server_error_handling_decorator def list_actions(self, context): return [ ( @@ -431,12 +457,6 @@ def persist(self, command: dict, key: str): traceback.print_exc() raise e - def do_action(self, context: fl.ServerCallContext, action: fl.Action): - pass - - def do_drop_dataset(self, dataset): - pass - def remove_dummies(fv: FeatureView) -> FeatureView: """ diff --git a/sdk/python/feast/permissions/client/arrow_flight_auth_interceptor.py b/sdk/python/feast/permissions/client/arrow_flight_auth_interceptor.py index 7ef84fbeae..c3281bfa51 100644 --- a/sdk/python/feast/permissions/client/arrow_flight_auth_interceptor.py +++ b/sdk/python/feast/permissions/client/arrow_flight_auth_interceptor.py @@ -1,6 +1,5 @@ import pyarrow.flight as fl -from feast.permissions.auth.auth_type import AuthType from feast.permissions.auth_model import AuthConfig from feast.permissions.client.client_auth_token import get_auth_token @@ -28,11 +27,3 @@ def __init__(self, auth_config: AuthConfig): def start_call(self, info): return FlightBearerTokenInterceptor(self.auth_config) - - -def build_arrow_flight_client(host: str, port, auth_config: AuthConfig): - if auth_config.type != AuthType.NONE.value: - middleware_factory = FlightAuthInterceptorFactory(auth_config) - return fl.FlightClient(f"grpc://{host}:{port}", middleware=[middleware_factory]) - else: - return fl.FlightClient(f"grpc://{host}:{port}") diff --git a/sdk/python/feast/permissions/server/arrow.py b/sdk/python/feast/permissions/server/arrow.py index 5eba7d0916..4f0afc3ee5 100644 --- a/sdk/python/feast/permissions/server/arrow.py +++ b/sdk/python/feast/permissions/server/arrow.py @@ -5,7 +5,7 @@ import asyncio import functools import logging -from typing import Optional, cast +from typing import cast import pyarrow.flight as fl from pyarrow.flight import ServerCallContext @@ -14,41 +14,19 @@ get_auth_manager, ) from feast.permissions.security_manager import get_security_manager -from feast.permissions.server.utils import ( - AuthManagerType, -) from feast.permissions.user import User logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) -def arrowflight_middleware( - auth_type: AuthManagerType, -) -> Optional[dict[str, fl.ServerMiddlewareFactory]]: - """ - A dictionary with the configured middlewares to support extracting the user details when the authorization manager is defined. - The authorization middleware key is `auth`. - - Returns: - dict[str, fl.ServerMiddlewareFactory]: Optional dictionary of middlewares. If the authorization type is set to `NONE`, it returns `None`. - """ - - if auth_type == AuthManagerType.NONE: - return None - - return { - "auth": AuthorizationMiddlewareFactory(), - } - - class AuthorizationMiddlewareFactory(fl.ServerMiddlewareFactory): """ A middleware factory to intercept the authorization header and propagate it to the authorization middleware. """ - def __init__(self): - pass + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) def start_call(self, info, headers): """ @@ -65,7 +43,8 @@ class AuthorizationMiddleware(fl.ServerMiddleware): A server middleware holding the authorization header and offering a method to extract the user credentials. """ - def __init__(self, access_token: str): + def __init__(self, access_token: str, *args, **kwargs): + super().__init__(*args, **kwargs) self.access_token = access_token def call_completed(self, exception): diff --git a/sdk/python/tests/unit/test_arrow_error_decorator.py b/sdk/python/tests/unit/test_arrow_error_decorator.py new file mode 100644 index 0000000000..fc350d34c0 --- /dev/null +++ b/sdk/python/tests/unit/test_arrow_error_decorator.py @@ -0,0 +1,33 @@ +import pyarrow.flight as fl +import pytest + +from feast.arrow_error_handler import arrow_client_error_handling_decorator +from feast.errors import PermissionNotFoundException + +permissionError = PermissionNotFoundException("dummy_name", "dummy_project") + + +@arrow_client_error_handling_decorator +def decorated_method(error): + raise error + + +@pytest.mark.parametrize( + "error, expected_raised_error", + [ + (fl.FlightError("Flight error: "), fl.FlightError("Flight error: ")), + ( + fl.FlightError(f"Flight error: {permissionError.to_error_detail()}"), + permissionError, + ), + (fl.FlightError("Test Error"), fl.FlightError("Test Error")), + (RuntimeError("Flight error: "), RuntimeError("Flight error: ")), + (permissionError, permissionError), + ], +) +def test_rest_error_handling_with_feast_exception(error, expected_raised_error): + with pytest.raises( + type(expected_raised_error), + match=str(expected_raised_error), + ): + decorated_method(error) diff --git a/sdk/python/tests/unit/test_offline_server.py b/sdk/python/tests/unit/test_offline_server.py index 237e2ecad4..7c38d9bfca 100644 --- a/sdk/python/tests/unit/test_offline_server.py +++ b/sdk/python/tests/unit/test_offline_server.py @@ -8,7 +8,8 @@ import pyarrow.flight as flight import pytest -from feast import FeatureStore +from feast import FeatureStore, FeatureView, FileSource +from feast.errors import FeatureViewNotFoundException from feast.feature_logging import FeatureServiceLoggingSource from feast.infra.offline_stores.remote import ( RemoteOfflineStore, @@ -120,6 +121,35 @@ def test_remote_offline_store_apis(): _test_pull_all_from_table_or_query(str(temp_dir), fs) +def test_remote_offline_store_exception_handling(): + with tempfile.TemporaryDirectory() as temp_dir: + store = default_store(str(temp_dir)) + location = "grpc+tcp://localhost:0" + + _init_auth_manager(store=store) + server = OfflineServer(store=store, location=location) + + assertpy.assert_that(server).is_not_none + assertpy.assert_that(server.port).is_not_equal_to(0) + + fs = remote_feature_store(server) + data_file = os.path.join( + temp_dir, fs.project, "feature_repo/data/driver_stats.parquet" + ) + data_df = pd.read_parquet(data_file) + + with pytest.raises( + FeatureViewNotFoundException, + match="Feature view test does not exist in project test_remote_offline", + ): + RemoteOfflineStore.offline_write_batch( + fs.config, + FeatureView(name="test", source=FileSource(path="test")), + pa.Table.from_pandas(data_df), + progress=None, + ) + + def _test_get_historical_features_returns_data(fs: FeatureStore): entity_df = pd.DataFrame.from_dict( { From 58c6fc11e690704d80a829c272400446626ba810 Mon Sep 17 00:00:00 2001 From: Francisco Arceo Date: Wed, 18 Sep 2024 13:57:22 -0400 Subject: [PATCH 63/96] Update CONTRIBUTING.md --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 2bc0915002..eae34fe0c3 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,3 +1,3 @@

Development Guide: Main Feast Repository

-> Please see [Development Guide](https://docs.feast.dev/project/development-guide) for project level development instructions, including instructions for Maintainers. +> Please see [Development Guide](docs/project/development-guide.md) for project level development instructions, including instructions for Maintainers. From b3c93a0704856324151960b15308fc94e4765914 Mon Sep 17 00:00:00 2001 From: Francisco Arceo Date: Wed, 18 Sep 2024 14:03:33 -0400 Subject: [PATCH 64/96] Update community.md --- docs/community.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/community.md b/docs/community.md index 21cca702bf..640b5238b8 100644 --- a/docs/community.md +++ b/docs/community.md @@ -2,6 +2,8 @@ ## Links & Resources +* [Come say hi on Slack!](https://communityinviter.com/apps/feastopensource/feast-the-open-source-feature-store) + * As a part of the Linux Foundation, we ask community members to adhere to the [Linux Foundation Code of Conduct](https://events.linuxfoundation.org/about/code-of-conduct/) * [GitHub Repository](https://github.com/feast-dev/feast/): Find the complete Feast codebase on GitHub. * [Community Governance Doc](https://github.com/feast-dev/feast/blob/master/community): See the governance model of Feast, including who the maintainers are and how decisions are made. * [Google Folder](https://drive.google.com/drive/u/0/folders/1jgMHOPDT2DvBlJeO9LCM79DP4lm4eOrR): This folder is used as a central repository for all Feast resources. For example: From 0fb76e9041885659c68e294b0c033c62050bd374 Mon Sep 17 00:00:00 2001 From: Bhargav Dodla <13788369+EXPEbdodla@users.noreply.github.com> Date: Wed, 18 Sep 2024 12:51:05 -0700 Subject: [PATCH 65/96] fix: Removed protobuf as a required dependency (#4535) * fix: Removed protobuf as a required dependency Signed-off-by: Bhargav Dodla * fix: Removed install-protoc-dependencies target Signed-off-by: Bhargav Dodla --------- Signed-off-by: Bhargav Dodla Co-authored-by: Bhargav Dodla --- pyproject.toml | 4 +--- sdk/python/requirements/py3.10-ci-requirements.txt | 1 - sdk/python/requirements/py3.10-requirements.txt | 4 +--- sdk/python/requirements/py3.11-ci-requirements.txt | 1 - sdk/python/requirements/py3.11-requirements.txt | 4 +--- sdk/python/requirements/py3.9-ci-requirements.txt | 1 - sdk/python/requirements/py3.9-requirements.txt | 4 +--- setup.py | 12 ++++-------- 8 files changed, 8 insertions(+), 23 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 15921e633c..283338a838 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,9 +1,7 @@ [build-system] requires = [ "grpcio-tools>=1.56.2,<2", - "grpcio>=1.56.2,<2", - "mypy-protobuf==3.1", - "protobuf==4.24.0", + "mypy-protobuf>=3.1", "pybindgen==0.22.0", "setuptools>=60", "setuptools_scm>=6.2", diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 55df7ccb68..f09f3dded4 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -572,7 +572,6 @@ proto-plus==1.24.0 # google-cloud-datastore protobuf==4.25.4 # via - # feast (setup.py) # google-api-core # google-cloud-bigquery-storage # google-cloud-bigtable diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index 3420c8a0e3..1c6f53cf69 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -89,9 +89,7 @@ partd==1.4.2 prometheus-client==0.20.0 # via feast (setup.py) protobuf==4.25.4 - # via - # feast (setup.py) - # mypy-protobuf + # via mypy-protobuf psutil==6.0.0 # via feast (setup.py) pyarrow==17.0.0 diff --git a/sdk/python/requirements/py3.11-ci-requirements.txt b/sdk/python/requirements/py3.11-ci-requirements.txt index 6c5fb02e06..bed8145e2f 100644 --- a/sdk/python/requirements/py3.11-ci-requirements.txt +++ b/sdk/python/requirements/py3.11-ci-requirements.txt @@ -563,7 +563,6 @@ proto-plus==1.24.0 # google-cloud-datastore protobuf==4.25.4 # via - # feast (setup.py) # google-api-core # google-cloud-bigquery-storage # google-cloud-bigtable diff --git a/sdk/python/requirements/py3.11-requirements.txt b/sdk/python/requirements/py3.11-requirements.txt index 1d0ce54cc1..99a994e946 100644 --- a/sdk/python/requirements/py3.11-requirements.txt +++ b/sdk/python/requirements/py3.11-requirements.txt @@ -87,9 +87,7 @@ partd==1.4.2 prometheus-client==0.20.0 # via feast (setup.py) protobuf==4.25.4 - # via - # feast (setup.py) - # mypy-protobuf + # via mypy-protobuf psutil==6.0.0 # via feast (setup.py) pyarrow==17.0.0 diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index ae27849f4f..1015f46cff 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -583,7 +583,6 @@ proto-plus==1.24.0 # google-cloud-datastore protobuf==4.25.4 # via - # feast (setup.py) # google-api-core # google-cloud-bigquery-storage # google-cloud-bigtable diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index 8a7ac763c0..4bd6a44857 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -91,9 +91,7 @@ partd==1.4.2 prometheus-client==0.20.0 # via feast (setup.py) protobuf==4.25.4 - # via - # feast (setup.py) - # mypy-protobuf + # via mypy-protobuf psutil==6.0.0 # via feast (setup.py) pyarrow==17.0.0 diff --git a/setup.py b/setup.py index d412541b7d..f332c21f71 100644 --- a/setup.py +++ b/setup.py @@ -18,10 +18,9 @@ import shutil import subprocess import sys - from pathlib import Path -from setuptools import find_packages, setup, Command +from setuptools import Command, find_packages, setup from setuptools.command.build_ext import build_ext as _build_ext from setuptools.command.build_py import build_py from setuptools.command.develop import develop @@ -43,7 +42,6 @@ "mmh3", "numpy>=1.22,<2", "pandas>=1.4.3,<3", - "protobuf>=4.24.0,<5.0.0", "pyarrow>=4", "pydantic>=2.0.0", "pygments>=2.12.0,<3", @@ -102,7 +100,7 @@ "psycopg[binary,pool]>=3.0.0,<4", ] -OPENTELEMETRY = ["prometheus_client","psutil"] +OPENTELEMETRY = ["prometheus_client", "psutil"] MYSQL_REQUIRED = ["pymysql", "types-PyMySQL"] @@ -139,7 +137,6 @@ GRPCIO_REQUIRED = [ "grpcio>=1.56.2,<2", - "grpcio-tools>=1.56.2,<2", "grpcio-reflection>=1.56.2,<2", "grpcio-health-checking>=1.56.2,<2", ] @@ -160,6 +157,7 @@ "virtualenv==20.23.0", "cryptography>=35.0,<43", "ruff>=0.3.3", + "grpcio-tools>=1.56.2,<2", "grpcio-testing>=1.56.2,<2", # FastAPI does not correctly pull starlette dependency on httpx see thread(https://github.com/tiangolo/fastapi/issues/5656). "httpx>=0.23.3", @@ -403,9 +401,7 @@ def run(self): use_scm_version=use_scm_version, setup_requires=[ "grpcio-tools>=1.56.2,<2", - "grpcio>=1.56.2,<2", - "mypy-protobuf==3.1", - "protobuf==4.24.0", + "mypy-protobuf>=3.1", "pybindgen==0.22.0", "setuptools_scm>=6.2", ], From 50b8f238b6f9adbc9ff0b20e18b78b2948c2f440 Mon Sep 17 00:00:00 2001 From: Abdul Hameed Date: Thu, 19 Sep 2024 14:04:56 -0400 Subject: [PATCH 66/96] fix: Logger settings for feature servers and updated logger for permission flow (#4531) * Logger setting for feature servers Signed-off-by: Abdul Hameed * added/updated logger for permission flow Signed-off-by: Abdul Hameed * set defualt logger level for feature servers as WARNING Signed-off-by: Abdul Hameed --------- Signed-off-by: Abdul Hameed --- .../feast-feature-server/templates/deployment.yaml | 8 ++++++++ infra/charts/feast-feature-server/values.yaml | 2 ++ .../feast/permissions/auth/kubernetes_token_parser.py | 6 ++++-- sdk/python/feast/permissions/auth/oidc_token_parser.py | 4 ++-- .../client/intra_comm_authentication_client_manager.py | 1 + sdk/python/feast/permissions/enforcer.py | 4 ++++ sdk/python/feast/permissions/server/arrow.py | 9 +++++---- sdk/python/feast/permissions/server/grpc.py | 7 ++++--- sdk/python/feast/permissions/server/utils.py | 1 - 9 files changed, 30 insertions(+), 12 deletions(-) diff --git a/infra/charts/feast-feature-server/templates/deployment.yaml b/infra/charts/feast-feature-server/templates/deployment.yaml index dc62be8b95..1f673280fe 100644 --- a/infra/charts/feast-feature-server/templates/deployment.yaml +++ b/infra/charts/feast-feature-server/templates/deployment.yaml @@ -42,20 +42,28 @@ spec: command: {{- if eq .Values.feast_mode "offline" }} - "feast" + - "--log-level" + - "{{ .Values.logLevel }}" - "serve_offline" - "-h" - "0.0.0.0" {{- else if eq .Values.feast_mode "ui" }} - "feast" + - "--log-level" + - "{{ .Values.logLevel }}" - "ui" - "-h" - "0.0.0.0" {{- else if eq .Values.feast_mode "registry" }} - "feast" + - "--log-level" + - "{{ .Values.logLevel }}" - "serve_registry" {{- else }} {{- if .Values.metrics.enlabled }} - "feast" + - "--log-level" + - "{{ .Values.logLevel }}" - "serve" - "--metrics" - "-h" diff --git a/infra/charts/feast-feature-server/values.yaml b/infra/charts/feast-feature-server/values.yaml index 22bbdeace0..f0bc55a646 100644 --- a/infra/charts/feast-feature-server/values.yaml +++ b/infra/charts/feast-feature-server/values.yaml @@ -11,6 +11,8 @@ image: # image.tag -- The Docker image tag (can be overwritten if custom feature server deps are needed for on demand transforms) tag: 0.40.0 +logLevel: "WARNING" # Set log level DEBUG, INFO, WARNING, ERROR, and CRITICAL (case-insensitive) + imagePullSecrets: [] nameOverride: "" fullnameOverride: "" diff --git a/sdk/python/feast/permissions/auth/kubernetes_token_parser.py b/sdk/python/feast/permissions/auth/kubernetes_token_parser.py index 7724163e5f..c34ebf386d 100644 --- a/sdk/python/feast/permissions/auth/kubernetes_token_parser.py +++ b/sdk/python/feast/permissions/auth/kubernetes_token_parser.py @@ -40,14 +40,16 @@ async def user_details_from_access_token(self, access_token: str) -> User: """ sa_namespace, sa_name = _decode_token(access_token) current_user = f"{sa_namespace}:{sa_name}" - logging.info(f"Received request from {sa_name} in {sa_namespace}") + logger.info( + f"Request received from ServiceAccount: {sa_name} in namespace: {sa_namespace}" + ) intra_communication_base64 = os.getenv("INTRA_COMMUNICATION_BASE64") if sa_name is not None and sa_name == intra_communication_base64: return User(username=sa_name, roles=[]) else: roles = self.get_roles(sa_namespace, sa_name) - logging.info(f"SA roles are: {roles}") + logger.info(f"Roles for ServiceAccount {sa_name}: {roles}") return User(username=current_user, roles=roles) diff --git a/sdk/python/feast/permissions/auth/oidc_token_parser.py b/sdk/python/feast/permissions/auth/oidc_token_parser.py index 28273e8c10..ffff7e7ad3 100644 --- a/sdk/python/feast/permissions/auth/oidc_token_parser.py +++ b/sdk/python/feast/permissions/auth/oidc_token_parser.py @@ -17,7 +17,6 @@ from feast.permissions.user import User logger = logging.getLogger(__name__) -logger.setLevel(logging.INFO) class OidcTokenParser(TokenParser): @@ -69,8 +68,9 @@ async def user_details_from_access_token(self, access_token: str) -> User: try: await self._validate_token(access_token) - logger.info("Validated token") + logger.debug("Token successfully validated.") except Exception as e: + logger.error(f"Token validation failed: {e}") raise AuthenticationError(f"Invalid token: {e}") optional_custom_headers = {"User-agent": "custom-user-agent"} diff --git a/sdk/python/feast/permissions/client/intra_comm_authentication_client_manager.py b/sdk/python/feast/permissions/client/intra_comm_authentication_client_manager.py index 678e1f39e5..30476316c1 100644 --- a/sdk/python/feast/permissions/client/intra_comm_authentication_client_manager.py +++ b/sdk/python/feast/permissions/client/intra_comm_authentication_client_manager.py @@ -13,6 +13,7 @@ class IntraCommAuthClientManager(AuthenticationClientManager): def __init__(self, auth_config: AuthConfig, intra_communication_base64: str): self.auth_config = auth_config self.intra_communication_base64 = intra_communication_base64 + logger.debug(f"AuthConfig type set to {self.auth_config.type}") def get_token(self): if self.auth_config.type == AuthType.OIDC.value: diff --git a/sdk/python/feast/permissions/enforcer.py b/sdk/python/feast/permissions/enforcer.py index d94a81ba04..d9855fef8c 100644 --- a/sdk/python/feast/permissions/enforcer.py +++ b/sdk/python/feast/permissions/enforcer.py @@ -67,8 +67,12 @@ def enforce_policy( if evaluator.is_decided(): grant, explanations = evaluator.grant() if not grant and not filter_only: + logger.error(f"Permission denied: {','.join(explanations)}") raise FeastPermissionError(",".join(explanations)) if grant: + logger.debug( + f"Permission granted for {type(resource).__name__}:{resource.name}" + ) _permitted_resources.append(resource) break else: diff --git a/sdk/python/feast/permissions/server/arrow.py b/sdk/python/feast/permissions/server/arrow.py index 4f0afc3ee5..bf517d94ac 100644 --- a/sdk/python/feast/permissions/server/arrow.py +++ b/sdk/python/feast/permissions/server/arrow.py @@ -17,7 +17,6 @@ from feast.permissions.user import User logger = logging.getLogger(__name__) -logger.setLevel(logging.INFO) class AuthorizationMiddlewareFactory(fl.ServerMiddlewareFactory): @@ -49,7 +48,9 @@ def __init__(self, access_token: str, *args, **kwargs): def call_completed(self, exception): if exception: - print(f"{AuthorizationMiddleware.__name__} received {exception}") + logger.exception( + f"{AuthorizationMiddleware.__name__} encountered an exception: {exception}" + ) async def extract_user(self) -> User: """ @@ -69,14 +70,14 @@ def inject_user_details(context: ServerCallContext): context: The endpoint context. """ if context.get_middleware("auth") is None: - logger.info("No `auth` middleware.") + logger.warning("No `auth` middleware.") return sm = get_security_manager() if sm is not None: auth_middleware = cast(AuthorizationMiddleware, context.get_middleware("auth")) current_user = asyncio.run(auth_middleware.extract_user()) - print(f"extracted user: {current_user}") + logger.debug(f"User extracted: {current_user}") sm.set_current_user(current_user) diff --git a/sdk/python/feast/permissions/server/grpc.py b/sdk/python/feast/permissions/server/grpc.py index 96f2690b88..9feea47a6c 100644 --- a/sdk/python/feast/permissions/server/grpc.py +++ b/sdk/python/feast/permissions/server/grpc.py @@ -9,7 +9,6 @@ from feast.permissions.security_manager import get_security_manager logger = logging.getLogger(__name__) -logger.setLevel(logging.INFO) class AuthInterceptor(grpc.ServerInterceptor): @@ -22,11 +21,13 @@ def intercept_service(self, continuation, handler_call_details): metadata=dict(handler_call_details.invocation_metadata) ) - print(f"Fetching user for token: {len(access_token)}") + logger.debug( + f"Fetching user details for token of length: {len(access_token)}" + ) current_user = asyncio.run( auth_manager.token_parser.user_details_from_access_token(access_token) ) - print(f"User is: {current_user}") + logger.debug(f"User is: {current_user}") sm.set_current_user(current_user) return continuation(handler_call_details) diff --git a/sdk/python/feast/permissions/server/utils.py b/sdk/python/feast/permissions/server/utils.py index 9a8b319dbc..cd72ae5820 100644 --- a/sdk/python/feast/permissions/server/utils.py +++ b/sdk/python/feast/permissions/server/utils.py @@ -30,7 +30,6 @@ from feast.permissions.server.rest_token_extractor import RestTokenExtractor logger = logging.getLogger(__name__) -logger.setLevel(logging.INFO) class ServerType(enum.Enum): From cecca8360bed62ab2f4fddc5d3a888247ea0a87a Mon Sep 17 00:00:00 2001 From: Bhargav Dodla <13788369+EXPEbdodla@users.noreply.github.com> Date: Thu, 19 Sep 2024 11:36:58 -0700 Subject: [PATCH 67/96] fix: Adding protobuf<5 as a required dependency due to snowflake limitations (#4537) * fix: Removed protobuf as a required dependency Signed-off-by: Bhargav Dodla * fix: Removed install-protoc-dependencies target Signed-off-by: Bhargav Dodla * fix: Ran lock python dependencies to correct dependencies Signed-off-by: Bhargav Dodla * fix: Adding protobuf<5 as a required dependency due to snowflake limitations Signed-off-by: Bhargav Dodla --------- Signed-off-by: Bhargav Dodla Co-authored-by: Bhargav Dodla --- .github/workflows/build_wheels.yml | 1 + Makefile | 19 +++-- .../requirements/py3.10-ci-requirements.txt | 72 ++++++++++--------- .../requirements/py3.10-requirements.txt | 32 ++++----- .../requirements/py3.11-ci-requirements.txt | 72 ++++++++++--------- .../requirements/py3.11-requirements.txt | 32 ++++----- .../requirements/py3.9-ci-requirements.txt | 53 +++++++------- .../requirements/py3.9-requirements.txt | 28 ++++---- setup.py | 7 +- 9 files changed, 163 insertions(+), 153 deletions(-) diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index d8e5e484bb..14d0b7d5ae 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -97,6 +97,7 @@ jobs: # There's a `git restore` in here because `make install-go-ci-dependencies` is actually messing up go.mod & go.sum. run: | pip install -U pip setuptools wheel twine + make install-protoc-dependencies make build-ui git status git restore go.mod go.sum diff --git a/Makefile b/Makefile index 78a0b6d328..8a9f643967 100644 --- a/Makefile +++ b/Makefile @@ -52,13 +52,16 @@ install-python-ci-dependencies-uv-venv: uv pip install --no-deps -e . python setup.py build_python_protos --inplace +install-protoc-dependencies: + pip install "protobuf<5" "grpcio-tools>=1.56.2,<2" "mypy-protobuf>=3.1" + lock-python-ci-dependencies: uv pip compile --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py$(PYTHON_VERSION)-ci-requirements.txt package-protos: cp -r ${ROOT_DIR}/protos ${ROOT_DIR}/sdk/python/feast/protos -compile-protos-python: +compile-protos-python: install-protoc-dependencies python setup.py build_python_protos --inplace install-python: @@ -69,12 +72,14 @@ lock-python-dependencies: uv pip compile --system --no-strip-extras setup.py --output-file sdk/python/requirements/py$(PYTHON_VERSION)-requirements.txt lock-python-dependencies-all: - pixi run --environment py39 --manifest-path infra/scripts/pixi/pixi.toml "uv pip compile --system --no-strip-extras setup.py --output-file sdk/python/requirements/py3.9-requirements.txt" - pixi run --environment py39 --manifest-path infra/scripts/pixi/pixi.toml "uv pip compile --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.9-ci-requirements.txt" - pixi run --environment py310 --manifest-path infra/scripts/pixi/pixi.toml "uv pip compile --system --no-strip-extras setup.py --output-file sdk/python/requirements/py3.10-requirements.txt" - pixi run --environment py310 --manifest-path infra/scripts/pixi/pixi.toml "uv pip compile --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.10-ci-requirements.txt" - pixi run --environment py311 --manifest-path infra/scripts/pixi/pixi.toml "uv pip compile --system --no-strip-extras setup.py --output-file sdk/python/requirements/py3.11-requirements.txt" - pixi run --environment py311 --manifest-path infra/scripts/pixi/pixi.toml "uv pip compile --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.11-ci-requirements.txt" + # Remove all existing requirements because we noticed the lock file is not always updated correctly. Removing and running the command again ensures that the lock file is always up to date. + rm -r sdk/python/requirements/* + pixi run --environment py39 --manifest-path infra/scripts/pixi/pixi.toml "uv pip compile -p 3.9 --system --no-strip-extras setup.py --output-file sdk/python/requirements/py3.9-requirements.txt" + pixi run --environment py39 --manifest-path infra/scripts/pixi/pixi.toml "uv pip compile -p 3.9 --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.9-ci-requirements.txt" + pixi run --environment py310 --manifest-path infra/scripts/pixi/pixi.toml "uv pip compile -p 3.10 --system --no-strip-extras setup.py --output-file sdk/python/requirements/py3.10-requirements.txt" + pixi run --environment py310 --manifest-path infra/scripts/pixi/pixi.toml "uv pip compile -p 3.10 --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.10-ci-requirements.txt" + pixi run --environment py311 --manifest-path infra/scripts/pixi/pixi.toml "uv pip compile -p 3.11 --system --no-strip-extras setup.py --output-file sdk/python/requirements/py3.11-requirements.txt" + pixi run --environment py311 --manifest-path infra/scripts/pixi/pixi.toml "uv pip compile -p 3.11 --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.11-ci-requirements.txt" benchmark-python: IS_TEST=True python -m pytest --integration --benchmark --benchmark-autosave --benchmark-save-data sdk/python/tests diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index f09f3dded4..59e799ebab 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -1,5 +1,5 @@ # This file was autogenerated by uv via the following command: -# uv pip compile --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.10-ci-requirements.txt +# uv pip compile -p 3.10 --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.10-ci-requirements.txt aiobotocore==2.15.0 # via feast (setup.py) aiohappyeyeballs==2.4.0 @@ -16,7 +16,7 @@ altair==4.2.2 # via great-expectations annotated-types==0.7.0 # via pydantic -anyio==4.4.0 +anyio==4.5.0 # via # httpx # jupyter-server @@ -51,13 +51,13 @@ attrs==24.2.0 # aiohttp # jsonschema # referencing -azure-core==1.30.2 +azure-core==1.31.0 # via # azure-identity # azure-storage-blob azure-identity==1.17.1 # via feast (setup.py) -azure-storage-blob==12.22.0 +azure-storage-blob==12.23.0 # via feast (setup.py) babel==2.16.0 # via @@ -144,11 +144,11 @@ cryptography==42.0.8 # types-redis cython==3.0.11 # via thriftpy2 -dask[dataframe]==2024.8.2 +dask[dataframe]==2024.9.0 # via # feast (setup.py) # dask-expr -dask-expr==1.1.13 +dask-expr==1.1.14 # via dask db-dtypes==1.3.0 # via google-cloud-bigquery @@ -158,7 +158,7 @@ decorator==5.1.1 # via ipython defusedxml==0.7.1 # via nbconvert -deltalake==0.19.2 +deltalake==0.20.0 # via feast (setup.py) deprecation==2.1.0 # via python-keycloak @@ -187,11 +187,11 @@ execnet==2.1.1 # via pytest-xdist executing==2.1.0 # via stack-data -fastapi==0.114.1 +fastapi==0.115.0 # via feast (setup.py) fastjsonschema==2.20.0 # via nbformat -filelock==3.16.0 +filelock==3.16.1 # via # snowflake-connector-python # virtualenv @@ -256,7 +256,7 @@ googleapis-common-protos[grpc]==1.65.0 # google-api-core # grpc-google-iam-v1 # grpcio-status -great-expectations==0.18.20 +great-expectations==0.18.21 # via feast (setup.py) grpc-google-iam-v1==0.13.1 # via google-cloud-bigtable @@ -302,15 +302,15 @@ httpx==0.27.2 # feast (setup.py) # jupyterlab # python-keycloak -ibis-framework[duckdb]==9.4.0 +ibis-framework[duckdb]==9.5.0 # via # feast (setup.py) # ibis-substrait ibis-substrait==4.0.1 # via feast (setup.py) -identify==2.6.0 +identify==2.6.1 # via pre-commit -idna==3.8 +idna==3.10 # via # anyio # httpx @@ -321,7 +321,9 @@ idna==3.8 imagesize==1.4.1 # via sphinx importlib-metadata==8.5.0 - # via dask + # via + # build + # dask iniconfig==2.0.0 # via pytest ipykernel==6.29.5 @@ -372,7 +374,7 @@ jsonschema[format-nongpl]==4.23.0 # nbformat jsonschema-specifications==2023.12.1 # via jsonschema -jupyter-client==8.6.2 +jupyter-client==8.6.3 # via # ipykernel # jupyter-server @@ -438,7 +440,7 @@ mistune==3.0.2 # via # great-expectations # nbconvert -mmh3==4.1.0 +mmh3==5.0.0 # via feast (setup.py) mock==2.0.0 # via feast (setup.py) @@ -506,6 +508,7 @@ packaging==24.1 # google-cloud-bigquery # great-expectations # gunicorn + # ibis-framework # ibis-substrait # ipykernel # jupyter-server @@ -570,8 +573,9 @@ proto-plus==1.24.0 # google-cloud-bigquery-storage # google-cloud-bigtable # google-cloud-datastore -protobuf==4.25.4 +protobuf==4.25.5 # via + # feast (setup.py) # google-api-core # google-cloud-bigquery-storage # google-cloud-bigtable @@ -590,11 +594,11 @@ psutil==5.9.0 # via # feast (setup.py) # ipykernel -psycopg[binary, pool]==3.2.1 +psycopg[binary, pool]==3.2.2 # via feast (setup.py) -psycopg-binary==3.2.1 +psycopg-binary==3.2.2 # via psycopg -psycopg-pool==3.2.2 +psycopg-pool==3.2.3 # via psycopg ptyprocess==0.7.0 # via @@ -629,12 +633,12 @@ pybindgen==0.22.1 # via feast (setup.py) pycparser==2.22 # via cffi -pydantic==2.9.1 +pydantic==2.9.2 # via # feast (setup.py) # fastapi # great-expectations -pydantic-core==2.23.3 +pydantic-core==2.23.4 # via pydantic pygments==2.18.0 # via @@ -739,7 +743,7 @@ referencing==0.35.1 # jsonschema # jsonschema-specifications # jupyter-events -regex==2024.7.24 +regex==2024.9.11 # via # feast (setup.py) # parsimonious @@ -790,7 +794,7 @@ ruamel-yaml==0.17.40 # via great-expectations ruamel-yaml-clib==0.2.8 # via ruamel-yaml -ruff==0.6.4 +ruff==0.6.5 # via feast (setup.py) s3transfer==0.10.2 # via boto3 @@ -798,7 +802,7 @@ scipy==1.14.1 # via great-expectations send2trash==1.8.3 # via jupyter-server -setuptools==74.1.2 +setuptools==75.1.0 # via # grpcio-tools # jupyterlab @@ -826,7 +830,7 @@ sniffio==1.3.1 # httpx snowballstemmer==2.2.0 # via sphinx -snowflake-connector-python[pandas]==3.12.1 +snowflake-connector-python[pandas]==3.12.2 # via feast (setup.py) sortedcontainers==2.4.0 # via snowflake-connector-python @@ -846,9 +850,9 @@ sphinxcontrib-qthelp==2.0.0 # via sphinx sphinxcontrib-serializinghtml==2.0.0 # via sphinx -sqlalchemy[mypy]==2.0.34 +sqlalchemy[mypy]==2.0.35 # via feast (setup.py) -sqlglot==25.18.0 +sqlglot==25.20.1 # via ibis-framework sqlite-vec==0.1.1 # via feast (setup.py) @@ -858,7 +862,7 @@ stack-data==0.6.3 # via ipython starlette==0.38.5 # via fastapi -substrait==0.22.0 +substrait==0.23.0 # via ibis-substrait tabulate==0.9.0 # via feast (setup.py) @@ -939,15 +943,15 @@ types-python-dateutil==2.9.0.20240906 # via # feast (setup.py) # arrow -types-pytz==2024.1.0.20240417 +types-pytz==2024.2.0.20240913 # via feast (setup.py) -types-pyyaml==6.0.12.20240808 +types-pyyaml==6.0.12.20240917 # via feast (setup.py) types-redis==4.6.0.20240903 # via feast (setup.py) types-requests==2.30.0.0 # via feast (setup.py) -types-setuptools==74.1.0.20240907 +types-setuptools==75.1.0.20240917 # via # feast (setup.py) # types-cffi @@ -986,7 +990,7 @@ tzlocal==5.2 # trino uri-template==1.3.0 # via jsonschema -urllib3==2.2.2 +urllib3==2.2.3 # via # feast (setup.py) # botocore @@ -1038,5 +1042,5 @@ xmltodict==0.13.0 # via moto yarl==1.11.1 # via aiohttp -zipp==3.20.1 +zipp==3.20.2 # via importlib-metadata diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index 1c6f53cf69..26eeca3529 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -1,8 +1,8 @@ # This file was autogenerated by uv via the following command: -# uv pip compile --system --no-strip-extras setup.py --output-file sdk/python/requirements/py3.10-requirements.txt +# uv pip compile -p 3.10 --system --no-strip-extras setup.py --output-file sdk/python/requirements/py3.10-requirements.txt annotated-types==0.7.0 # via pydantic -anyio==4.4.0 +anyio==4.5.0 # via # starlette # watchfiles @@ -25,17 +25,17 @@ cloudpickle==3.0.0 # via dask colorama==0.4.6 # via feast (setup.py) -dask[dataframe]==2024.8.2 +dask[dataframe]==2024.9.0 # via # feast (setup.py) # dask-expr -dask-expr==1.1.13 +dask-expr==1.1.14 # via dask dill==0.3.8 # via feast (setup.py) exceptiongroup==1.2.2 # via anyio -fastapi==0.114.1 +fastapi==0.115.0 # via feast (setup.py) fsspec==2024.9.0 # via dask @@ -45,7 +45,7 @@ h11==0.14.0 # via uvicorn httptools==0.6.1 # via uvicorn -idna==3.8 +idna==3.10 # via # anyio # requests @@ -61,14 +61,12 @@ locket==1.0.0 # via partd markupsafe==2.1.5 # via jinja2 -mmh3==4.1.0 +mmh3==5.0.0 # via feast (setup.py) mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy -mypy-protobuf==3.6.0 - # via feast (setup.py) numpy==1.26.4 # via # feast (setup.py) @@ -88,19 +86,19 @@ partd==1.4.2 # via dask prometheus-client==0.20.0 # via feast (setup.py) -protobuf==4.25.4 - # via mypy-protobuf +protobuf==4.25.5 + # via feast (setup.py) psutil==6.0.0 # via feast (setup.py) pyarrow==17.0.0 # via # feast (setup.py) # dask-expr -pydantic==2.9.1 +pydantic==2.9.2 # via # feast (setup.py) # fastapi -pydantic-core==2.23.3 +pydantic-core==2.23.4 # via pydantic pygments==2.18.0 # via feast (setup.py) @@ -131,7 +129,7 @@ six==1.16.0 # via python-dateutil sniffio==1.3.1 # via anyio -sqlalchemy[mypy]==2.0.34 +sqlalchemy[mypy]==2.0.35 # via feast (setup.py) starlette==0.38.5 # via fastapi @@ -151,8 +149,6 @@ tqdm==4.66.5 # via feast (setup.py) typeguard==4.3.0 # via feast (setup.py) -types-protobuf==5.27.0.20240907 - # via mypy-protobuf typing-extensions==4.12.2 # via # anyio @@ -165,7 +161,7 @@ typing-extensions==4.12.2 # uvicorn tzdata==2024.1 # via pandas -urllib3==2.2.2 +urllib3==2.2.3 # via requests uvicorn[standard]==0.30.6 # via feast (setup.py) @@ -175,5 +171,5 @@ watchfiles==0.24.0 # via uvicorn websockets==13.0.1 # via uvicorn -zipp==3.20.1 +zipp==3.20.2 # via importlib-metadata diff --git a/sdk/python/requirements/py3.11-ci-requirements.txt b/sdk/python/requirements/py3.11-ci-requirements.txt index bed8145e2f..9f57ecd841 100644 --- a/sdk/python/requirements/py3.11-ci-requirements.txt +++ b/sdk/python/requirements/py3.11-ci-requirements.txt @@ -1,5 +1,5 @@ # This file was autogenerated by uv via the following command: -# uv pip compile --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.11-ci-requirements.txt +# uv pip compile -p 3.11 --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.11-ci-requirements.txt aiobotocore==2.15.0 # via feast (setup.py) aiohappyeyeballs==2.4.0 @@ -16,7 +16,7 @@ altair==4.2.2 # via great-expectations annotated-types==0.7.0 # via pydantic -anyio==4.4.0 +anyio==4.5.0 # via # httpx # jupyter-server @@ -40,6 +40,8 @@ async-lru==2.0.4 # via jupyterlab async-property==0.2.2 # via python-keycloak +async-timeout==4.0.3 + # via redis atpublic==5.0 # via ibis-framework attrs==24.2.0 @@ -47,13 +49,13 @@ attrs==24.2.0 # aiohttp # jsonschema # referencing -azure-core==1.30.2 +azure-core==1.31.0 # via # azure-identity # azure-storage-blob azure-identity==1.17.1 # via feast (setup.py) -azure-storage-blob==12.22.0 +azure-storage-blob==12.23.0 # via feast (setup.py) babel==2.16.0 # via @@ -140,11 +142,11 @@ cryptography==42.0.8 # types-redis cython==3.0.11 # via thriftpy2 -dask[dataframe]==2024.8.2 +dask[dataframe]==2024.9.0 # via # feast (setup.py) # dask-expr -dask-expr==1.1.13 +dask-expr==1.1.14 # via dask db-dtypes==1.3.0 # via google-cloud-bigquery @@ -154,7 +156,7 @@ decorator==5.1.1 # via ipython defusedxml==0.7.1 # via nbconvert -deltalake==0.19.2 +deltalake==0.20.0 # via feast (setup.py) deprecation==2.1.0 # via python-keycloak @@ -178,11 +180,11 @@ execnet==2.1.1 # via pytest-xdist executing==2.1.0 # via stack-data -fastapi==0.114.1 +fastapi==0.115.0 # via feast (setup.py) fastjsonschema==2.20.0 # via nbformat -filelock==3.16.0 +filelock==3.16.1 # via # snowflake-connector-python # virtualenv @@ -247,7 +249,7 @@ googleapis-common-protos[grpc]==1.65.0 # google-api-core # grpc-google-iam-v1 # grpcio-status -great-expectations==0.18.20 +great-expectations==0.18.21 # via feast (setup.py) grpc-google-iam-v1==0.13.1 # via google-cloud-bigtable @@ -293,15 +295,15 @@ httpx==0.27.2 # feast (setup.py) # jupyterlab # python-keycloak -ibis-framework[duckdb]==9.4.0 +ibis-framework[duckdb]==9.5.0 # via # feast (setup.py) # ibis-substrait ibis-substrait==4.0.1 # via feast (setup.py) -identify==2.6.0 +identify==2.6.1 # via pre-commit -idna==3.8 +idna==3.10 # via # anyio # httpx @@ -363,7 +365,7 @@ jsonschema[format-nongpl]==4.23.0 # nbformat jsonschema-specifications==2023.12.1 # via jsonschema -jupyter-client==8.6.2 +jupyter-client==8.6.3 # via # ipykernel # jupyter-server @@ -429,7 +431,7 @@ mistune==3.0.2 # via # great-expectations # nbconvert -mmh3==4.1.0 +mmh3==5.0.0 # via feast (setup.py) mock==2.0.0 # via feast (setup.py) @@ -497,6 +499,7 @@ packaging==24.1 # google-cloud-bigquery # great-expectations # gunicorn + # ibis-framework # ibis-substrait # ipykernel # jupyter-server @@ -561,8 +564,9 @@ proto-plus==1.24.0 # google-cloud-bigquery-storage # google-cloud-bigtable # google-cloud-datastore -protobuf==4.25.4 +protobuf==4.25.5 # via + # feast (setup.py) # google-api-core # google-cloud-bigquery-storage # google-cloud-bigtable @@ -581,11 +585,11 @@ psutil==5.9.0 # via # feast (setup.py) # ipykernel -psycopg[binary, pool]==3.2.1 +psycopg[binary, pool]==3.2.2 # via feast (setup.py) -psycopg-binary==3.2.1 +psycopg-binary==3.2.2 # via psycopg -psycopg-pool==3.2.2 +psycopg-pool==3.2.3 # via psycopg ptyprocess==0.7.0 # via @@ -620,12 +624,12 @@ pybindgen==0.22.1 # via feast (setup.py) pycparser==2.22 # via cffi -pydantic==2.9.1 +pydantic==2.9.2 # via # feast (setup.py) # fastapi # great-expectations -pydantic-core==2.23.3 +pydantic-core==2.23.4 # via pydantic pygments==2.18.0 # via @@ -730,7 +734,7 @@ referencing==0.35.1 # jsonschema # jsonschema-specifications # jupyter-events -regex==2024.7.24 +regex==2024.9.11 # via # feast (setup.py) # parsimonious @@ -781,7 +785,7 @@ ruamel-yaml==0.17.40 # via great-expectations ruamel-yaml-clib==0.2.8 # via ruamel-yaml -ruff==0.6.4 +ruff==0.6.5 # via feast (setup.py) s3transfer==0.10.2 # via boto3 @@ -789,7 +793,7 @@ scipy==1.14.1 # via great-expectations send2trash==1.8.3 # via jupyter-server -setuptools==74.1.2 +setuptools==75.1.0 # via # grpcio-tools # jupyterlab @@ -817,7 +821,7 @@ sniffio==1.3.1 # httpx snowballstemmer==2.2.0 # via sphinx -snowflake-connector-python[pandas]==3.12.1 +snowflake-connector-python[pandas]==3.12.2 # via feast (setup.py) sortedcontainers==2.4.0 # via snowflake-connector-python @@ -837,9 +841,9 @@ sphinxcontrib-qthelp==2.0.0 # via sphinx sphinxcontrib-serializinghtml==2.0.0 # via sphinx -sqlalchemy[mypy]==2.0.34 +sqlalchemy[mypy]==2.0.35 # via feast (setup.py) -sqlglot==25.18.0 +sqlglot==25.20.1 # via ibis-framework sqlite-vec==0.1.1 # via feast (setup.py) @@ -849,7 +853,7 @@ stack-data==0.6.3 # via ipython starlette==0.38.5 # via fastapi -substrait==0.22.0 +substrait==0.23.0 # via ibis-substrait tabulate==0.9.0 # via feast (setup.py) @@ -867,6 +871,8 @@ tinycss2==1.3.0 # via nbconvert toml==0.10.2 # via feast (setup.py) +tomli==2.0.1 + # via coverage tomlkit==0.13.2 # via snowflake-connector-python toolz==0.12.1 @@ -920,15 +926,15 @@ types-python-dateutil==2.9.0.20240906 # via # feast (setup.py) # arrow -types-pytz==2024.1.0.20240417 +types-pytz==2024.2.0.20240913 # via feast (setup.py) -types-pyyaml==6.0.12.20240808 +types-pyyaml==6.0.12.20240917 # via feast (setup.py) types-redis==4.6.0.20240903 # via feast (setup.py) types-requests==2.30.0.0 # via feast (setup.py) -types-setuptools==74.1.0.20240907 +types-setuptools==75.1.0.20240917 # via # feast (setup.py) # types-cffi @@ -963,7 +969,7 @@ tzlocal==5.2 # trino uri-template==1.3.0 # via jsonschema -urllib3==2.2.2 +urllib3==2.2.3 # via # feast (setup.py) # botocore @@ -1015,5 +1021,5 @@ xmltodict==0.13.0 # via moto yarl==1.11.1 # via aiohttp -zipp==3.20.1 +zipp==3.20.2 # via importlib-metadata diff --git a/sdk/python/requirements/py3.11-requirements.txt b/sdk/python/requirements/py3.11-requirements.txt index 99a994e946..5c20e45f07 100644 --- a/sdk/python/requirements/py3.11-requirements.txt +++ b/sdk/python/requirements/py3.11-requirements.txt @@ -1,8 +1,8 @@ # This file was autogenerated by uv via the following command: -# uv pip compile --system --no-strip-extras setup.py --output-file sdk/python/requirements/py3.11-requirements.txt +# uv pip compile -p 3.11 --system --no-strip-extras setup.py --output-file sdk/python/requirements/py3.11-requirements.txt annotated-types==0.7.0 # via pydantic -anyio==4.4.0 +anyio==4.5.0 # via # starlette # watchfiles @@ -25,15 +25,15 @@ cloudpickle==3.0.0 # via dask colorama==0.4.6 # via feast (setup.py) -dask[dataframe]==2024.8.2 +dask[dataframe]==2024.9.0 # via # feast (setup.py) # dask-expr -dask-expr==1.1.13 +dask-expr==1.1.14 # via dask dill==0.3.8 # via feast (setup.py) -fastapi==0.114.1 +fastapi==0.115.0 # via feast (setup.py) fsspec==2024.9.0 # via dask @@ -43,7 +43,7 @@ h11==0.14.0 # via uvicorn httptools==0.6.1 # via uvicorn -idna==3.8 +idna==3.10 # via # anyio # requests @@ -59,14 +59,12 @@ locket==1.0.0 # via partd markupsafe==2.1.5 # via jinja2 -mmh3==4.1.0 +mmh3==5.0.0 # via feast (setup.py) mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy -mypy-protobuf==3.6.0 - # via feast (setup.py) numpy==1.26.4 # via # feast (setup.py) @@ -86,19 +84,19 @@ partd==1.4.2 # via dask prometheus-client==0.20.0 # via feast (setup.py) -protobuf==4.25.4 - # via mypy-protobuf +protobuf==4.25.5 + # via feast (setup.py) psutil==6.0.0 # via feast (setup.py) pyarrow==17.0.0 # via # feast (setup.py) # dask-expr -pydantic==2.9.1 +pydantic==2.9.2 # via # feast (setup.py) # fastapi -pydantic-core==2.23.3 +pydantic-core==2.23.4 # via pydantic pygments==2.18.0 # via feast (setup.py) @@ -129,7 +127,7 @@ six==1.16.0 # via python-dateutil sniffio==1.3.1 # via anyio -sqlalchemy[mypy]==2.0.34 +sqlalchemy[mypy]==2.0.35 # via feast (setup.py) starlette==0.38.5 # via fastapi @@ -147,8 +145,6 @@ tqdm==4.66.5 # via feast (setup.py) typeguard==4.3.0 # via feast (setup.py) -types-protobuf==5.27.0.20240907 - # via mypy-protobuf typing-extensions==4.12.2 # via # fastapi @@ -159,7 +155,7 @@ typing-extensions==4.12.2 # typeguard tzdata==2024.1 # via pandas -urllib3==2.2.2 +urllib3==2.2.3 # via requests uvicorn[standard]==0.30.6 # via feast (setup.py) @@ -169,5 +165,5 @@ watchfiles==0.24.0 # via uvicorn websockets==13.0.1 # via uvicorn -zipp==3.20.1 +zipp==3.20.2 # via importlib-metadata diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index 1015f46cff..bbdca890b6 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -1,5 +1,5 @@ # This file was autogenerated by uv via the following command: -# uv pip compile --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.9-ci-requirements.txt +# uv pip compile -p 3.9 --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.9-ci-requirements.txt aiobotocore==2.15.0 # via feast (setup.py) aiohappyeyeballs==2.4.0 @@ -16,7 +16,7 @@ altair==4.2.2 # via great-expectations annotated-types==0.7.0 # via pydantic -anyio==4.4.0 +anyio==4.5.0 # via # httpx # jupyter-server @@ -51,13 +51,13 @@ attrs==24.2.0 # aiohttp # jsonschema # referencing -azure-core==1.30.2 +azure-core==1.31.0 # via # azure-identity # azure-storage-blob azure-identity==1.17.1 # via feast (setup.py) -azure-storage-blob==12.22.0 +azure-storage-blob==12.23.0 # via feast (setup.py) babel==2.16.0 # via @@ -160,7 +160,7 @@ decorator==5.1.1 # via ipython defusedxml==0.7.1 # via nbconvert -deltalake==0.19.2 +deltalake==0.20.0 # via feast (setup.py) deprecation==2.1.0 # via python-keycloak @@ -189,11 +189,11 @@ execnet==2.1.1 # via pytest-xdist executing==2.1.0 # via stack-data -fastapi==0.114.1 +fastapi==0.115.0 # via feast (setup.py) fastjsonschema==2.20.0 # via nbformat -filelock==3.16.0 +filelock==3.16.1 # via # snowflake-connector-python # virtualenv @@ -258,7 +258,7 @@ googleapis-common-protos[grpc]==1.65.0 # google-api-core # grpc-google-iam-v1 # grpcio-status -great-expectations==0.18.20 +great-expectations==0.18.21 # via feast (setup.py) grpc-google-iam-v1==0.13.1 # via google-cloud-bigtable @@ -310,9 +310,9 @@ ibis-framework[duckdb]==9.0.0 # ibis-substrait ibis-substrait==4.0.1 # via feast (setup.py) -identify==2.6.0 +identify==2.6.1 # via pre-commit -idna==3.8 +idna==3.10 # via # anyio # httpx @@ -383,7 +383,7 @@ jsonschema[format-nongpl]==4.23.0 # nbformat jsonschema-specifications==2023.12.1 # via jsonschema -jupyter-client==8.6.2 +jupyter-client==8.6.3 # via # ipykernel # jupyter-server @@ -449,7 +449,7 @@ mistune==3.0.2 # via # great-expectations # nbconvert -mmh3==4.1.0 +mmh3==5.0.0 # via feast (setup.py) mock==2.0.0 # via feast (setup.py) @@ -581,8 +581,9 @@ proto-plus==1.24.0 # google-cloud-bigquery-storage # google-cloud-bigtable # google-cloud-datastore -protobuf==4.25.4 +protobuf==4.25.5 # via + # feast (setup.py) # google-api-core # google-cloud-bigquery-storage # google-cloud-bigtable @@ -605,7 +606,7 @@ psycopg[binary, pool]==3.1.18 # via feast (setup.py) psycopg-binary==3.1.18 # via psycopg -psycopg-pool==3.2.2 +psycopg-pool==3.2.3 # via psycopg ptyprocess==0.7.0 # via @@ -640,12 +641,12 @@ pybindgen==0.22.1 # via feast (setup.py) pycparser==2.22 # via cffi -pydantic==2.9.1 +pydantic==2.9.2 # via # feast (setup.py) # fastapi # great-expectations -pydantic-core==2.23.3 +pydantic-core==2.23.4 # via pydantic pygments==2.18.0 # via @@ -750,7 +751,7 @@ referencing==0.35.1 # jsonschema # jsonschema-specifications # jupyter-events -regex==2024.7.24 +regex==2024.9.11 # via # feast (setup.py) # parsimonious @@ -801,7 +802,7 @@ ruamel-yaml==0.17.40 # via great-expectations ruamel-yaml-clib==0.2.8 # via ruamel-yaml -ruff==0.6.4 +ruff==0.6.5 # via feast (setup.py) s3transfer==0.10.2 # via boto3 @@ -809,7 +810,7 @@ scipy==1.13.1 # via great-expectations send2trash==1.8.3 # via jupyter-server -setuptools==74.1.2 +setuptools==75.1.0 # via # grpcio-tools # jupyterlab @@ -837,7 +838,7 @@ sniffio==1.3.1 # httpx snowballstemmer==2.2.0 # via sphinx -snowflake-connector-python[pandas]==3.12.1 +snowflake-connector-python[pandas]==3.12.2 # via feast (setup.py) sortedcontainers==2.4.0 # via snowflake-connector-python @@ -857,7 +858,7 @@ sphinxcontrib-qthelp==2.0.0 # via sphinx sphinxcontrib-serializinghtml==2.0.0 # via sphinx -sqlalchemy[mypy]==2.0.34 +sqlalchemy[mypy]==2.0.35 # via feast (setup.py) sqlglot==23.12.2 # via ibis-framework @@ -869,7 +870,7 @@ stack-data==0.6.3 # via ipython starlette==0.38.5 # via fastapi -substrait==0.22.0 +substrait==0.23.0 # via ibis-substrait tabulate==0.9.0 # via feast (setup.py) @@ -950,15 +951,15 @@ types-python-dateutil==2.9.0.20240906 # via # feast (setup.py) # arrow -types-pytz==2024.1.0.20240417 +types-pytz==2024.2.0.20240913 # via feast (setup.py) -types-pyyaml==6.0.12.20240808 +types-pyyaml==6.0.12.20240917 # via feast (setup.py) types-redis==4.6.0.20240903 # via feast (setup.py) types-requests==2.30.0.0 # via feast (setup.py) -types-setuptools==74.1.0.20240907 +types-setuptools==75.1.0.20240917 # via # feast (setup.py) # types-cffi @@ -1052,5 +1053,5 @@ xmltodict==0.13.0 # via moto yarl==1.11.1 # via aiohttp -zipp==3.20.1 +zipp==3.20.2 # via importlib-metadata diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index 4bd6a44857..7ffef84b23 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -1,8 +1,8 @@ # This file was autogenerated by uv via the following command: -# uv pip compile --system --no-strip-extras setup.py --output-file sdk/python/requirements/py3.9-requirements.txt +# uv pip compile -p 3.9 --system --no-strip-extras setup.py --output-file sdk/python/requirements/py3.9-requirements.txt annotated-types==0.7.0 # via pydantic -anyio==4.4.0 +anyio==4.5.0 # via # starlette # watchfiles @@ -35,7 +35,7 @@ dill==0.3.8 # via feast (setup.py) exceptiongroup==1.2.2 # via anyio -fastapi==0.114.1 +fastapi==0.115.0 # via feast (setup.py) fsspec==2024.9.0 # via dask @@ -45,7 +45,7 @@ h11==0.14.0 # via uvicorn httptools==0.6.1 # via uvicorn -idna==3.8 +idna==3.10 # via # anyio # requests @@ -63,14 +63,12 @@ locket==1.0.0 # via partd markupsafe==2.1.5 # via jinja2 -mmh3==4.1.0 +mmh3==5.0.0 # via feast (setup.py) mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy -mypy-protobuf==3.6.0 - # via feast (setup.py) numpy==1.26.4 # via # feast (setup.py) @@ -90,19 +88,19 @@ partd==1.4.2 # via dask prometheus-client==0.20.0 # via feast (setup.py) -protobuf==4.25.4 - # via mypy-protobuf +protobuf==4.25.5 + # via feast (setup.py) psutil==6.0.0 # via feast (setup.py) pyarrow==17.0.0 # via # feast (setup.py) # dask-expr -pydantic==2.9.1 +pydantic==2.9.2 # via # feast (setup.py) # fastapi -pydantic-core==2.23.3 +pydantic-core==2.23.4 # via pydantic pygments==2.18.0 # via feast (setup.py) @@ -133,7 +131,7 @@ six==1.16.0 # via python-dateutil sniffio==1.3.1 # via anyio -sqlalchemy[mypy]==2.0.34 +sqlalchemy[mypy]==2.0.35 # via feast (setup.py) starlette==0.38.5 # via fastapi @@ -153,8 +151,6 @@ tqdm==4.66.5 # via feast (setup.py) typeguard==4.3.0 # via feast (setup.py) -types-protobuf==5.27.0.20240907 - # via mypy-protobuf typing-extensions==4.12.2 # via # anyio @@ -168,7 +164,7 @@ typing-extensions==4.12.2 # uvicorn tzdata==2024.1 # via pandas -urllib3==2.2.2 +urllib3==2.2.3 # via requests uvicorn[standard]==0.30.6 # via feast (setup.py) @@ -178,5 +174,5 @@ watchfiles==0.24.0 # via uvicorn websockets==13.0.1 # via uvicorn -zipp==3.20.1 +zipp==3.20.2 # via importlib-metadata diff --git a/setup.py b/setup.py index f332c21f71..5a6f18db35 100644 --- a/setup.py +++ b/setup.py @@ -36,7 +36,7 @@ "click>=7.0.0,<9.0.0", "colorama>=0.3.9,<1", "dill~=0.3.0", - "mypy-protobuf>=3.1", + "protobuf<5", "Jinja2>=2,<4", "jsonschema", "mmh3", @@ -157,6 +157,8 @@ "virtualenv==20.23.0", "cryptography>=35.0,<43", "ruff>=0.3.3", + "protobuf<5", + "mypy-protobuf>=3.1", "grpcio-tools>=1.56.2,<2", "grpcio-testing>=1.56.2,<2", # FastAPI does not correctly pull starlette dependency on httpx see thread(https://github.com/tiangolo/fastapi/issues/5656). @@ -400,6 +402,9 @@ def run(self): entry_points={"console_scripts": ["feast=feast.cli:cli"]}, use_scm_version=use_scm_version, setup_requires=[ + # snowflake udf packages refer to conda packages, not pypi libraries. Conda stack is still on protobuf 4 + # So we are adding protobuf<5 as a requirement + "protobuf<5", "grpcio-tools>=1.56.2,<2", "mypy-protobuf>=3.1", "pybindgen==0.22.0", From 9688790a5e7a70f628a46021bde0201922c7e04d Mon Sep 17 00:00:00 2001 From: Bhargav Dodla <13788369+EXPEbdodla@users.noreply.github.com> Date: Thu, 19 Sep 2024 12:21:15 -0700 Subject: [PATCH 68/96] perf: Added indexes to sql tables to optimize query execution (#4538) minor: Added indexes to sql tables to optimize query execution Signed-off-by: Bhargav Dodla Co-authored-by: Bhargav Dodla --- sdk/python/feast/infra/registry/sql.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/sdk/python/feast/infra/registry/sql.py b/sdk/python/feast/infra/registry/sql.py index b049adc898..d6a716e082 100644 --- a/sdk/python/feast/infra/registry/sql.py +++ b/sdk/python/feast/infra/registry/sql.py @@ -10,6 +10,7 @@ from sqlalchemy import ( # type: ignore BigInteger, Column, + Index, LargeBinary, MetaData, String, @@ -82,6 +83,8 @@ Column("project_proto", LargeBinary, nullable=False), ) +Index("idx_projects_project_id", projects.c.project_id) + entities = Table( "entities", metadata, @@ -91,6 +94,8 @@ Column("entity_proto", LargeBinary, nullable=False), ) +Index("idx_entities_project_id", entities.c.project_id) + data_sources = Table( "data_sources", metadata, @@ -100,6 +105,8 @@ Column("data_source_proto", LargeBinary, nullable=False), ) +Index("idx_data_sources_project_id", data_sources.c.project_id) + feature_views = Table( "feature_views", metadata, @@ -111,6 +118,8 @@ Column("user_metadata", LargeBinary, nullable=True), ) +Index("idx_feature_views_project_id", feature_views.c.project_id) + stream_feature_views = Table( "stream_feature_views", metadata, @@ -121,6 +130,8 @@ Column("user_metadata", LargeBinary, nullable=True), ) +Index("idx_stream_feature_views_project_id", stream_feature_views.c.project_id) + on_demand_feature_views = Table( "on_demand_feature_views", metadata, @@ -131,6 +142,8 @@ Column("user_metadata", LargeBinary, nullable=True), ) +Index("idx_on_demand_feature_views_project_id", on_demand_feature_views.c.project_id) + feature_services = Table( "feature_services", metadata, @@ -140,6 +153,8 @@ Column("feature_service_proto", LargeBinary, nullable=False), ) +Index("idx_feature_services_project_id", feature_services.c.project_id) + saved_datasets = Table( "saved_datasets", metadata, @@ -149,6 +164,8 @@ Column("saved_dataset_proto", LargeBinary, nullable=False), ) +Index("idx_saved_datasets_project_id", saved_datasets.c.project_id) + validation_references = Table( "validation_references", metadata, @@ -157,6 +174,7 @@ Column("last_updated_timestamp", BigInteger, nullable=False), Column("validation_reference_proto", LargeBinary, nullable=False), ) +Index("idx_validation_references_project_id", validation_references.c.project_id) managed_infra = Table( "managed_infra", @@ -167,6 +185,8 @@ Column("infra_proto", LargeBinary, nullable=False), ) +Index("idx_managed_infra_project_id", managed_infra.c.project_id) + permissions = Table( "permissions", metadata, @@ -176,6 +196,8 @@ Column("permission_proto", LargeBinary, nullable=False), ) +Index("idx_permissions_project_id", permissions.c.project_id) + class FeastMetadataKeys(Enum): LAST_UPDATED_TIMESTAMP = "last_updated_timestamp" @@ -191,6 +213,8 @@ class FeastMetadataKeys(Enum): Column("last_updated_timestamp", BigInteger, nullable=False), ) +Index("idx_feast_metadata_project_id", feast_metadata.c.project_id) + logger = logging.getLogger(__name__) From 1b9280302b525084d7e1d4f99b2a8fcd68361a99 Mon Sep 17 00:00:00 2001 From: Theodor Mihalache <84387487+tmihalac@users.noreply.github.com> Date: Thu, 19 Sep 2024 15:24:56 -0400 Subject: [PATCH 69/96] docs: Add docs example of how to use tags with feature views (#4536) * Add docs example of how to use tags with feature views Signed-off-by: Theodor Mihalache * Add docs example of how to use tags with feature views - changes following review Signed-off-by: Theodor Mihalache --------- Signed-off-by: Theodor Mihalache --- docs/getting-started/concepts/README.md | 4 ++ docs/getting-started/concepts/tags.md | 59 +++++++++++++++++++++++++ 2 files changed, 63 insertions(+) create mode 100644 docs/getting-started/concepts/tags.md diff --git a/docs/getting-started/concepts/README.md b/docs/getting-started/concepts/README.md index 9b967fb5af..a32c53b5f4 100644 --- a/docs/getting-started/concepts/README.md +++ b/docs/getting-started/concepts/README.md @@ -31,3 +31,7 @@ {% content-ref url="permission.md" %} [permission.md](permission.md) {% endcontent-ref %} + +{% content-ref url="tags.md" %} +[tags.md](tags.md) +{% endcontent-ref %} diff --git a/docs/getting-started/concepts/tags.md b/docs/getting-started/concepts/tags.md new file mode 100644 index 0000000000..d5b285f7c7 --- /dev/null +++ b/docs/getting-started/concepts/tags.md @@ -0,0 +1,59 @@ +# Tags + +## Overview + +Tags in Feast allow for efficient filtering of Feast objects when listing them in the UI, CLI, or querying the registry directly. + +The way to define tags on the feast objects is through the definition file or directly in the object that will be applied to the feature store. + +## Examples + +In this example we define a Feature View in a definition file that has a tag: +```python +driver_stats_fv = FeatureView( + name="driver_hourly_stats", + entities=[driver], + ttl=timedelta(days=1), + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64, description="Average daily trips"), + ], + online=True, + source=driver_stats_source, + # Tags are user defined key/value pairs that are attached to each + # feature view + tags={"team": "driver_performance"}, +) +``` + +In this example we define a Stream Feature View that has a tag, in the code: +```python + sfv = StreamFeatureView( + name="test kafka stream feature view", + entities=[entity], + schema=[], + description="desc", + timestamp_field="event_timestamp", + source=stream_source, + tags={"team": "driver_performance"}, +``` + +An example of filtering feature-views with the tag `team:driver_performance`: +```commandline +$ feast feature-views list --tags team:driver_performance +NAME ENTITIES TYPE +driver_hourly_stats {'driver'} FeatureView +driver_hourly_stats_fresh {'driver'} FeatureView +``` + +The same example of listing feature-views without tag filtering: +```commandline +$ feast feature-views list +NAME ENTITIES TYPE +driver_hourly_stats {'driver'} FeatureView +driver_hourly_stats_fresh {'driver'} FeatureView +transformed_conv_rate_fresh {'driver'} OnDemandFeatureView +transformed_conv_rate {'driver'} OnDemandFeatureView +``` + From 4e2eacc1beea8f8866b78968abadfd42eee63d6a Mon Sep 17 00:00:00 2001 From: Bhargav Dodla <13788369+EXPEbdodla@users.noreply.github.com> Date: Thu, 19 Sep 2024 20:38:50 -0700 Subject: [PATCH 70/96] fix: Fix for SQL registry initialization fails #4543 (#4544) * fix: Fix for SQL registry initialization fails #4543 Signed-off-by: Bhargav Dodla * fix: Removed combined_sql_fixtures Signed-off-by: Bhargav Dodla * fix: Added protobuf dependency to pyproject.toml Signed-off-by: Bhargav Dodla --------- Signed-off-by: Bhargav Dodla Co-authored-by: Bhargav Dodla --- pyproject.toml | 1 + .../feast/infra/registry/caching_registry.py | 38 +++++++----- sdk/python/feast/infra/registry/sql.py | 6 +- .../registration/test_universal_registry.py | 61 +++++++++++++++++++ 4 files changed, 89 insertions(+), 17 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 283338a838..c91608b6ce 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,6 @@ [build-system] requires = [ + "protobuf<5", "grpcio-tools>=1.56.2,<2", "mypy-protobuf>=3.1", "pybindgen==0.22.0", diff --git a/sdk/python/feast/infra/registry/caching_registry.py b/sdk/python/feast/infra/registry/caching_registry.py index 8f47fab077..042eee06ab 100644 --- a/sdk/python/feast/infra/registry/caching_registry.py +++ b/sdk/python/feast/infra/registry/caching_registry.py @@ -19,6 +19,7 @@ from feast.permissions.permission import Permission from feast.project import Project from feast.project_metadata import ProjectMetadata +from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto from feast.saved_dataset import SavedDataset, ValidationReference from feast.stream_feature_view import StreamFeatureView from feast.utils import _utc_now @@ -28,13 +29,14 @@ class CachingRegistry(BaseRegistry): def __init__(self, project: str, cache_ttl_seconds: int, cache_mode: str): - self.cached_registry_proto = self.proto() - self.cached_registry_proto_created = _utc_now() + self.cache_mode = cache_mode + self.cached_registry_proto = RegistryProto() self._refresh_lock = Lock() self.cached_registry_proto_ttl = timedelta( seconds=cache_ttl_seconds if cache_ttl_seconds is not None else 0 ) - self.cache_mode = cache_mode + self.cached_registry_proto = self.proto() + self.cached_registry_proto_created = _utc_now() if cache_mode == "thread": self._start_thread_async_refresh(cache_ttl_seconds) atexit.register(self._exit_handler) @@ -429,20 +431,26 @@ def refresh(self, project: Optional[str] = None): def _refresh_cached_registry_if_necessary(self): if self.cache_mode == "sync": with self._refresh_lock: - expired = ( - self.cached_registry_proto is None - or self.cached_registry_proto_created is None - ) or ( - self.cached_registry_proto_ttl.total_seconds() - > 0 # 0 ttl means infinity - and ( - _utc_now() - > ( - self.cached_registry_proto_created - + self.cached_registry_proto_ttl + if self.cached_registry_proto == RegistryProto(): + # Avoids the need to refresh the registry when cache is not populated yet + # Specially during the __init__ phase + # proto() will populate the cache with project metadata if no objects are registered + expired = False + else: + expired = ( + self.cached_registry_proto is None + or self.cached_registry_proto_created is None + ) or ( + self.cached_registry_proto_ttl.total_seconds() + > 0 # 0 ttl means infinity + and ( + _utc_now() + > ( + self.cached_registry_proto_created + + self.cached_registry_proto_ttl + ) ) ) - ) if expired: logger.info("Registry cache expired, so refreshing") self.refresh() diff --git a/sdk/python/feast/infra/registry/sql.py b/sdk/python/feast/infra/registry/sql.py index d6a716e082..a6a2417c6e 100644 --- a/sdk/python/feast/infra/registry/sql.py +++ b/sdk/python/feast/infra/registry/sql.py @@ -251,6 +251,8 @@ def __init__( registry_config, SqlRegistryConfig ), "SqlRegistry needs a valid registry_config" + self.registry_config = registry_config + self.write_engine: Engine = create_engine( registry_config.path, **registry_config.sqlalchemy_config_kwargs ) @@ -281,7 +283,7 @@ def __init__( def _sync_feast_metadata_to_projects_table(self): feast_metadata_projects: set = [] projects_set: set = [] - with self.write_engine.begin() as conn: + with self.read_engine.begin() as conn: stmt = select(feast_metadata).where( feast_metadata.c.metadata_key == FeastMetadataKeys.PROJECT_UUID.value ) @@ -290,7 +292,7 @@ def _sync_feast_metadata_to_projects_table(self): feast_metadata_projects.append(row._mapping["project_id"]) if len(feast_metadata_projects) > 0: - with self.write_engine.begin() as conn: + with self.read_engine.begin() as conn: stmt = select(projects) rows = conn.execute(stmt).all() for row in rows: diff --git a/sdk/python/tests/integration/registration/test_universal_registry.py b/sdk/python/tests/integration/registration/test_universal_registry.py index 5dc2509333..0bed89ca16 100644 --- a/sdk/python/tests/integration/registration/test_universal_registry.py +++ b/sdk/python/tests/integration/registration/test_universal_registry.py @@ -1767,3 +1767,64 @@ def test_apply_entity_success_with_purge_feast_metadata(test_registry): assert len(entities) == 0 test_registry.teardown() + + +@pytest.mark.integration +@pytest.mark.parametrize( + "test_registry", + sql_fixtures + async_sql_fixtures, +) +def test_apply_entity_to_sql_registry_and_reinitialize_sql_registry(test_registry): + entity = Entity( + name="driver_car_id", + description="Car driver id", + tags={"team": "matchmaking"}, + ) + + project = "project" + + # Register Entity + test_registry.apply_entity(entity, project) + assert_project(project, test_registry) + + entities = test_registry.list_entities(project, tags=entity.tags) + assert_project(project, test_registry) + + entity = entities[0] + assert ( + len(entities) == 1 + and entity.name == "driver_car_id" + and entity.description == "Car driver id" + and "team" in entity.tags + and entity.tags["team"] == "matchmaking" + ) + + entity = test_registry.get_entity("driver_car_id", project) + assert ( + entity.name == "driver_car_id" + and entity.description == "Car driver id" + and "team" in entity.tags + and entity.tags["team"] == "matchmaking" + ) + + # After the first apply, the created_timestamp should be the same as the last_update_timestamp. + assert entity.created_timestamp == entity.last_updated_timestamp + updated_test_registry = SqlRegistry(test_registry.registry_config, "project", None) + + # Update entity + updated_entity = Entity( + name="driver_car_id", + description="Car driver Id", + tags={"team": "matchmaking"}, + ) + updated_test_registry.apply_entity(updated_entity, project) + + updated_entity = updated_test_registry.get_entity("driver_car_id", project) + updated_test_registry.delete_entity("driver_car_id", project) + assert_project(project, updated_test_registry) + entities = updated_test_registry.list_entities(project) + assert_project(project, updated_test_registry) + assert len(entities) == 0 + + updated_test_registry.teardown() + test_registry.teardown() From 5f5caf0cac539ed779692e0ec819659cf5a33a0d Mon Sep 17 00:00:00 2001 From: Hao Xu Date: Fri, 20 Sep 2024 00:18:10 -0700 Subject: [PATCH 71/96] feat: Return entity key in the retrieval document api (#4511) * update entity retrieval and add duckdb Signed-off-by: cmuhao * lint Signed-off-by: cmuhao * fix lint Signed-off-by: cmuhao * fix lint Signed-off-by: cmuhao * fix lint Signed-off-by: cmuhao * fix lint Signed-off-by: cmuhao * fix lint Signed-off-by: cmuhao * fix lint Signed-off-by: cmuhao * fix lint Signed-off-by: cmuhao * fix lint Signed-off-by: cmuhao * fix typo Signed-off-by: cmuhao * fix typo Signed-off-by: cmuhao * fix typo Signed-off-by: cmuhao * fix typo Signed-off-by: cmuhao * fix typo Signed-off-by: cmuhao * fix typo Signed-off-by: cmuhao * fix typo Signed-off-by: cmuhao * fix typo Signed-off-by: cmuhao * fix lint Signed-off-by: cmuhao * fix test Signed-off-by: cmuhao * fix test Signed-off-by: cmuhao * fix test Signed-off-by: cmuhao * fix test Signed-off-by: cmuhao * fix test Signed-off-by: cmuhao * fix test Signed-off-by: cmuhao * fix test Signed-off-by: cmuhao --------- Signed-off-by: cmuhao --- sdk/python/feast/feature_store.py | 46 ++++++++++++----- .../online_stores/contrib/elasticsearch.py | 25 +++++---- .../infra/online_stores/contrib/postgres.py | 37 ++++++-------- .../feast/infra/online_stores/online_store.py | 1 + .../feast/infra/online_stores/sqlite.py | 22 ++++---- sdk/python/feast/infra/provider.py | 1 + sdk/python/feast/utils.py | 51 ++++++++++++++++++- sdk/python/tests/conftest.py | 20 ++++++++ .../online_store/test_universal_online.py | 7 ++- 9 files changed, 149 insertions(+), 61 deletions(-) diff --git a/sdk/python/feast/feature_store.py b/sdk/python/feast/feature_store.py index 4f96cfb0fc..ab2bc6cec2 100644 --- a/sdk/python/feast/feature_store.py +++ b/sdk/python/feast/feature_store.py @@ -78,7 +78,9 @@ FieldStatus, GetOnlineFeaturesResponse, ) +from feast.protos.feast.types.EntityKey_pb2 import EntityKey from feast.protos.feast.types.Value_pb2 import RepeatedValue, Value +from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.repo_config import RepoConfig, load_repo_config from feast.repo_contents import RepoContents from feast.saved_dataset import SavedDataset, SavedDatasetStorage, ValidationReference @@ -1666,20 +1668,29 @@ def retrieve_online_documents( distance_metric, ) - # TODO Refactor to better way of populating result - # TODO populate entity in the response after returning entity in document_features is supported # TODO currently not return the vector value since it is same as feature value, if embedding is supported, # the feature value can be raw text before embedded - document_feature_vals = [feature[2] for feature in document_features] - document_feature_distance_vals = [feature[4] for feature in document_features] + entity_key_vals = [feature[1] for feature in document_features] + join_key_values: Dict[str, List[ValueProto]] = {} + for entity_key_val in entity_key_vals: + if entity_key_val is not None: + for join_key, entity_value in zip( + entity_key_val.join_keys, entity_key_val.entity_values + ): + if join_key not in join_key_values: + join_key_values[join_key] = [] + join_key_values[join_key].append(entity_value) + + document_feature_vals = [feature[4] for feature in document_features] + document_feature_distance_vals = [feature[5] for feature in document_features] online_features_response = GetOnlineFeaturesResponse(results=[]) utils._populate_result_rows_from_columnar( online_features_response=online_features_response, - data={requested_feature: document_feature_vals}, - ) - utils._populate_result_rows_from_columnar( - online_features_response=online_features_response, - data={"distance": document_feature_distance_vals}, + data={ + **join_key_values, + requested_feature: document_feature_vals, + "distance": document_feature_distance_vals, + }, ) return OnlineResponse(online_features_response) @@ -1691,7 +1702,11 @@ def _retrieve_from_online_store( query: List[float], top_k: int, distance_metric: Optional[str], - ) -> List[Tuple[Timestamp, "FieldStatus.ValueType", Value, Value, Value]]: + ) -> List[ + Tuple[ + Timestamp, Optional[EntityKey], "FieldStatus.ValueType", Value, Value, Value + ] + ]: """ Search and return document features from the online document store. """ @@ -1707,7 +1722,7 @@ def _retrieve_from_online_store( read_row_protos = [] row_ts_proto = Timestamp() - for row_ts, feature_val, vector_value, distance_val in documents: + for row_ts, entity_key, feature_val, vector_value, distance_val in documents: # Reset timestamp to default or update if row_ts is not None if row_ts is not None: row_ts_proto.FromDatetime(row_ts) @@ -1721,7 +1736,14 @@ def _retrieve_from_online_store( status = FieldStatus.PRESENT read_row_protos.append( - (row_ts_proto, status, feature_val, vector_value, distance_val) + ( + row_ts_proto, + entity_key, + status, + feature_val, + vector_value, + distance_val, + ) ) return read_row_protos diff --git a/sdk/python/feast/infra/online_stores/contrib/elasticsearch.py b/sdk/python/feast/infra/online_stores/contrib/elasticsearch.py index c26b4199ae..a0c25b931a 100644 --- a/sdk/python/feast/infra/online_stores/contrib/elasticsearch.py +++ b/sdk/python/feast/infra/online_stores/contrib/elasticsearch.py @@ -9,12 +9,15 @@ from elasticsearch import Elasticsearch, helpers from feast import Entity, FeatureView, RepoConfig -from feast.infra.key_encoding_utils import get_list_val_str, serialize_entity_key +from feast.infra.key_encoding_utils import ( + get_list_val_str, + serialize_entity_key, +) from feast.infra.online_stores.online_store import OnlineStore from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.repo_config import FeastConfigBaseModel -from feast.utils import to_naive_utc +from feast.utils import _build_retrieve_online_document_record, to_naive_utc class ElasticSearchOnlineStoreConfig(FeastConfigBaseModel): @@ -224,6 +227,7 @@ def retrieve_online_documents( ) -> List[ Tuple[ Optional[datetime], + Optional[EntityKeyProto], Optional[ValueProto], Optional[ValueProto], Optional[ValueProto], @@ -232,6 +236,7 @@ def retrieve_online_documents( result: List[ Tuple[ Optional[datetime], + Optional[EntityKeyProto], Optional[ValueProto], Optional[ValueProto], Optional[ValueProto], @@ -247,23 +252,21 @@ def retrieve_online_documents( ) rows = response["hits"]["hits"][0:top_k] for row in rows: + entity_key = row["_source"]["entity_key"] feature_value = row["_source"]["feature_value"] vector_value = row["_source"]["vector_value"] timestamp = row["_source"]["timestamp"] distance = row["_score"] timestamp = datetime.strptime(timestamp, "%Y-%m-%dT%H:%M:%S.%f") - feature_value_proto = ValueProto() - feature_value_proto.ParseFromString(base64.b64decode(feature_value)) - - vector_value_proto = ValueProto(string_val=str(vector_value)) - distance_value_proto = ValueProto(float_val=distance) result.append( - ( + _build_retrieve_online_document_record( + entity_key, + base64.b64decode(feature_value), + str(vector_value), + distance, timestamp, - feature_value_proto, - vector_value_proto, - distance_value_proto, + config.entity_key_serialization_version, ) ) return result diff --git a/sdk/python/feast/infra/online_stores/contrib/postgres.py b/sdk/python/feast/infra/online_stores/contrib/postgres.py index 8c6d3e0b99..8125da33be 100644 --- a/sdk/python/feast/infra/online_stores/contrib/postgres.py +++ b/sdk/python/feast/infra/online_stores/contrib/postgres.py @@ -37,6 +37,7 @@ from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.repo_config import RepoConfig +from feast.utils import _build_retrieve_online_document_record SUPPORTED_DISTANCE_METRICS_DICT = { "cosine": "<=>", @@ -360,6 +361,7 @@ def retrieve_online_documents( ) -> List[ Tuple[ Optional[datetime], + Optional[EntityKeyProto], Optional[ValueProto], Optional[ValueProto], Optional[ValueProto], @@ -391,12 +393,11 @@ def retrieve_online_documents( ) distance_metric_sql = SUPPORTED_DISTANCE_METRICS_DICT[distance_metric] - # Convert the embedding to a string to be used in postgres vector search - query_embedding_str = f"[{','.join(str(el) for el in embedding)}]" result: List[ Tuple[ Optional[datetime], + Optional[EntityKeyProto], Optional[ValueProto], Optional[ValueProto], Optional[ValueProto], @@ -415,45 +416,37 @@ def retrieve_online_documents( feature_name, value, vector_value, - vector_value {distance_metric_sql} %s as distance, + vector_value {distance_metric_sql} %s::vector as distance, event_ts FROM {table_name} WHERE feature_name = {feature_name} ORDER BY distance LIMIT {top_k}; """ ).format( - distance_metric_sql=distance_metric_sql, + distance_metric_sql=sql.SQL(distance_metric_sql), table_name=sql.Identifier(table_name), feature_name=sql.Literal(requested_feature), top_k=sql.Literal(top_k), ), - (query_embedding_str,), + (embedding,), ) rows = cur.fetchall() - for ( entity_key, - feature_name, - value, + _, + feature_val, vector_value, - distance, + distance_val, event_ts, ) in rows: - # TODO Deserialize entity_key to return the entity in response - # entity_key_proto = EntityKeyProto() - # entity_key_proto_bin = bytes(entity_key) - - feature_value_proto = ValueProto() - feature_value_proto.ParseFromString(bytes(value)) - - vector_value_proto = ValueProto(string_val=vector_value) - distance_value_proto = ValueProto(float_val=distance) result.append( - ( + _build_retrieve_online_document_record( + entity_key, + feature_val, + vector_value, + distance_val, event_ts, - feature_value_proto, - vector_value_proto, - distance_value_proto, + config.entity_key_serialization_version, ) ) diff --git a/sdk/python/feast/infra/online_stores/online_store.py b/sdk/python/feast/infra/online_stores/online_store.py index 9cf2ef95f6..fdb5b055cf 100644 --- a/sdk/python/feast/infra/online_stores/online_store.py +++ b/sdk/python/feast/infra/online_stores/online_store.py @@ -349,6 +349,7 @@ def retrieve_online_documents( ) -> List[ Tuple[ Optional[datetime], + Optional[EntityKeyProto], Optional[ValueProto], Optional[ValueProto], Optional[ValueProto], diff --git a/sdk/python/feast/infra/online_stores/sqlite.py b/sdk/python/feast/infra/online_stores/sqlite.py index 9896b766d4..061a766b8c 100644 --- a/sdk/python/feast/infra/online_stores/sqlite.py +++ b/sdk/python/feast/infra/online_stores/sqlite.py @@ -33,10 +33,9 @@ from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto from feast.protos.feast.core.SqliteTable_pb2 import SqliteTable as SqliteTableProto from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto -from feast.protos.feast.types.Value_pb2 import FloatList as FloatListProto from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.repo_config import FeastConfigBaseModel, RepoConfig -from feast.utils import to_naive_utc +from feast.utils import _build_retrieve_online_document_record, to_naive_utc class SqliteOnlineStoreConfig(FeastConfigBaseModel): @@ -303,6 +302,7 @@ def retrieve_online_documents( ) -> List[ Tuple[ Optional[datetime], + Optional[EntityKeyProto], Optional[ValueProto], Optional[ValueProto], Optional[ValueProto], @@ -385,6 +385,7 @@ def retrieve_online_documents( result: List[ Tuple[ Optional[datetime], + Optional[EntityKeyProto], Optional[ValueProto], Optional[ValueProto], Optional[ValueProto], @@ -392,19 +393,14 @@ def retrieve_online_documents( ] = [] for entity_key, _, string_value, distance, event_ts in rows: - feature_value_proto = ValueProto() - feature_value_proto.ParseFromString(string_value if string_value else b"") - vector_value_proto = ValueProto( - float_list_val=FloatListProto(val=embedding) - ) - distance_value_proto = ValueProto(float_val=distance) - result.append( - ( + _build_retrieve_online_document_record( + entity_key, + string_value if string_value else b"", + embedding, + distance, event_ts, - feature_value_proto, - vector_value_proto, - distance_value_proto, + config.entity_key_serialization_version, ) ) diff --git a/sdk/python/feast/infra/provider.py b/sdk/python/feast/infra/provider.py index 9940af1d02..c0062dde02 100644 --- a/sdk/python/feast/infra/provider.py +++ b/sdk/python/feast/infra/provider.py @@ -364,6 +364,7 @@ def retrieve_online_documents( ) -> List[ Tuple[ Optional[datetime], + Optional[EntityKeyProto], Optional[ValueProto], Optional[ValueProto], Optional[ValueProto], diff --git a/sdk/python/feast/utils.py b/sdk/python/feast/utils.py index 992869557a..a6d7853e1b 100644 --- a/sdk/python/feast/utils.py +++ b/sdk/python/feast/utils.py @@ -33,11 +33,13 @@ FeatureViewNotFoundException, RequestDataNotFoundInEntityRowsException, ) +from feast.infra.key_encoding_utils import deserialize_entity_key from feast.protos.feast.serving.ServingService_pb2 import ( FieldStatus, GetOnlineFeaturesResponse, ) from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto +from feast.protos.feast.types.Value_pb2 import FloatList as FloatListProto from feast.protos.feast.types.Value_pb2 import RepeatedValue as RepeatedValueProto from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.type_map import python_values_to_proto_values @@ -49,7 +51,6 @@ from feast.feature_view import FeatureView from feast.on_demand_feature_view import OnDemandFeatureView - APPLICATION_NAME = "feast-dev/feast" USER_AGENT = "{}/{}".format(APPLICATION_NAME, get_version()) @@ -1050,3 +1051,51 @@ def tags_str_to_dict(tags: str = "") -> dict[str, str]: def _utc_now() -> datetime: return datetime.now(tz=timezone.utc) + + +def _build_retrieve_online_document_record( + entity_key: Union[str, bytes], + feature_value: Union[str, bytes], + vector_value: Union[str, List[float]], + distance_value: float, + event_timestamp: datetime, + entity_key_serialization_version: int, +) -> Tuple[ + Optional[datetime], + Optional[EntityKeyProto], + Optional[ValueProto], + Optional[ValueProto], + Optional[ValueProto], +]: + if entity_key_serialization_version < 3: + entity_key_proto = None + else: + if isinstance(entity_key, str): + entity_key_proto_bin = entity_key.encode("utf-8") + else: + entity_key_proto_bin = entity_key + entity_key_proto = deserialize_entity_key( + entity_key_proto_bin, + entity_key_serialization_version=entity_key_serialization_version, + ) + + feature_value_proto = ValueProto() + + if isinstance(feature_value, str): + feature_value_proto.ParseFromString(feature_value.encode("utf-8")) + else: + feature_value_proto.ParseFromString(feature_value) + + if isinstance(vector_value, str): + vector_value_proto = ValueProto(string_val=vector_value) + else: + vector_value_proto = ValueProto(float_list_val=FloatListProto(val=vector_value)) + + distance_value_proto = ValueProto(float_val=distance_value) + return ( + event_timestamp, + entity_key_proto, + feature_value_proto, + vector_value_proto, + distance_value_proto, + ) diff --git a/sdk/python/tests/conftest.py b/sdk/python/tests/conftest.py index a9bb9ba9c4..08b8757b95 100644 --- a/sdk/python/tests/conftest.py +++ b/sdk/python/tests/conftest.py @@ -197,6 +197,26 @@ def environment(request, worker_id): e.teardown() +@pytest.fixture +def vectordb_environment(request, worker_id): + e = construct_test_environment( + request.param, + worker_id=worker_id, + fixture_request=request, + entity_key_serialization_version=3, + ) + + e.setup() + + if hasattr(e.data_source_creator, "mock_environ"): + with mock.patch.dict(os.environ, e.data_source_creator.mock_environ): + yield e + else: + yield e + + e.teardown() + + _config_cache: Any = {} diff --git a/sdk/python/tests/integration/online_store/test_universal_online.py b/sdk/python/tests/integration/online_store/test_universal_online.py index 308201590d..1a0803acff 100644 --- a/sdk/python/tests/integration/online_store/test_universal_online.py +++ b/sdk/python/tests/integration/online_store/test_universal_online.py @@ -846,8 +846,8 @@ def assert_feature_service_entity_mapping_correctness( @pytest.mark.integration @pytest.mark.universal_online_stores(only=["pgvector", "elasticsearch"]) -def test_retrieve_online_documents(environment, fake_document_data): - fs = environment.feature_store +def test_retrieve_online_documents(vectordb_environment, fake_document_data): + fs = vectordb_environment.feature_store df, data_source = fake_document_data item_embeddings_feature_view = create_item_embeddings_feature_view(data_source) fs.apply([item_embeddings_feature_view, item()]) @@ -861,6 +861,9 @@ def test_retrieve_online_documents(environment, fake_document_data): ).to_dict() assert len(documents["embedding_float"]) == 2 + # assert returned the entity_id + assert len(documents["item_id"]) == 2 + documents = fs.retrieve_online_documents( feature="item_embeddings:embedding_float", query=[1.0, 2.0], From 163d34f23c5fcdf5b28599ee8556d2fb31d8ac79 Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Fri, 20 Sep 2024 21:44:30 +0400 Subject: [PATCH 72/96] refactor: Use get_any_feature_view in online flow instead of listing them (#4545) --- sdk/python/feast/utils.py | 90 ++++++++++++++++++++------------------- 1 file changed, 47 insertions(+), 43 deletions(-) diff --git a/sdk/python/feast/utils.py b/sdk/python/feast/utils.py index a6d7853e1b..2ab73ae089 100644 --- a/sdk/python/feast/utils.py +++ b/sdk/python/feast/utils.py @@ -49,6 +49,7 @@ if typing.TYPE_CHECKING: from feast.feature_service import FeatureService from feast.feature_view import FeatureView + from feast.infra.registry.base_registry import BaseRegistry from feast.on_demand_feature_view import OnDemandFeatureView APPLICATION_NAME = "feast-dev/feast" @@ -756,61 +757,64 @@ def _list_feature_views( def _get_feature_views_to_use( - registry, + registry: "BaseRegistry", project, features: Optional[Union[List[str], "FeatureService"]], allow_cache=False, hide_dummy_entity: bool = True, ) -> Tuple[List["FeatureView"], List["OnDemandFeatureView"]]: from feast.feature_service import FeatureService - - fvs = { - fv.name: fv - for fv in [ - *_list_feature_views(registry, project, allow_cache, hide_dummy_entity), - *registry.list_stream_feature_views( - project=project, allow_cache=allow_cache - ), - ] - } - - od_fvs = { - fv.name: fv - for fv in registry.list_on_demand_feature_views( - project=project, allow_cache=allow_cache - ) - } + from feast.feature_view import DUMMY_ENTITY_NAME + from feast.on_demand_feature_view import OnDemandFeatureView if isinstance(features, FeatureService): - fvs_to_use, od_fvs_to_use = [], [] - for fv_name, projection in [ + feature_views = [ (projection.name, projection) for projection in features.feature_view_projections - ]: - if fv_name in fvs: - fvs_to_use.append(fvs[fv_name].with_projection(copy.copy(projection))) - elif fv_name in od_fvs: - odfv = od_fvs[fv_name].with_projection(copy.copy(projection)) - od_fvs_to_use.append(odfv) - # Let's make sure to include an FVs which the ODFV requires Features from. - for projection in odfv.source_feature_view_projections.values(): - fv = fvs[projection.name].with_projection(copy.copy(projection)) - if fv not in fvs_to_use: - fvs_to_use.append(fv) - else: - raise ValueError( - f"The provided feature service {features.name} contains a reference to a feature view" - f"{fv_name} which doesn't exist. Please make sure that you have created the feature view" - f'{fv_name} and that you have registered it by running "apply".' - ) - views_to_use = (fvs_to_use, od_fvs_to_use) + ] else: - views_to_use = ( - [*fvs.values()], - [*od_fvs.values()], - ) + assert features is not None + feature_views = [(feature.split(":")[0], None) for feature in features] # type: ignore[misc] + + fvs_to_use, od_fvs_to_use = [], [] + for name, projection in feature_views: + fv = registry.get_any_feature_view(name, project, allow_cache) + + if isinstance(fv, OnDemandFeatureView): + od_fvs_to_use.append( + fv.with_projection(copy.copy(projection)) if projection else fv + ) + + for source_projection in fv.source_feature_view_projections.values(): + source_fv = registry.get_any_feature_view( + source_projection.name, project, allow_cache + ) + # TODO better way to handler dummy entities + if ( + hide_dummy_entity + and source_fv.entities # type: ignore[attr-defined] + and source_fv.entities[0] == DUMMY_ENTITY_NAME # type: ignore[attr-defined] + ): + source_fv.entities = [] # type: ignore[attr-defined] + source_fv.entity_columns = [] # type: ignore[attr-defined] + + if source_fv not in fvs_to_use: + fvs_to_use.append( + source_fv.with_projection(copy.copy(source_projection)) + ) + else: + if ( + hide_dummy_entity + and fv.entities # type: ignore[attr-defined] + and fv.entities[0] == DUMMY_ENTITY_NAME # type: ignore[attr-defined] + ): + fv.entities = [] # type: ignore[attr-defined] + fv.entity_columns = [] # type: ignore[attr-defined] + fvs_to_use.append( + fv.with_projection(copy.copy(projection)) if projection else fv + ) - return views_to_use + return (fvs_to_use, od_fvs_to_use) def _get_online_request_context( From 9a0398e2e18585172d857cf3202a81551d31609b Mon Sep 17 00:00:00 2001 From: Julian Gesche <26026582+jgesche@users.noreply.github.com> Date: Sat, 21 Sep 2024 07:08:05 +0200 Subject: [PATCH 73/96] fix: Fixes validator field access for 'project_id' in BigQuery offline Store (#4509) Fixes validator field access for 'billing_project_id' in BigQuery Offline Store Signed-off-by: gesche <26026582+jgesche@users.noreply.github.com> --- sdk/python/feast/infra/offline_stores/bigquery.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/python/feast/infra/offline_stores/bigquery.py b/sdk/python/feast/infra/offline_stores/bigquery.py index ef12eba442..3ee1717461 100644 --- a/sdk/python/feast/infra/offline_stores/bigquery.py +++ b/sdk/python/feast/infra/offline_stores/bigquery.py @@ -114,7 +114,7 @@ class BigQueryOfflineStoreConfig(FeastConfigBaseModel): @field_validator("billing_project_id") def project_id_exists(cls, v, values, **kwargs): - if v and not values["project_id"]: + if v and not values.data["project_id"]: raise ValueError( "please specify project_id if billing_project_id is specified" ) From 76b4576c2839d41a257948d0052cf11d7c5a921d Mon Sep 17 00:00:00 2001 From: Abdul Hameed Date: Sat, 21 Sep 2024 01:28:46 -0400 Subject: [PATCH 74/96] docs: Example to Deploy Feast Remote Server Components Using Podman Locally (#4516) * Added examples to deployed Feast remote server components podman container locally Signed-off-by: Abdul Hameed * removed the script and used composed files Signed-off-by: Abdul Hameed --------- Signed-off-by: Abdul Hameed --- examples/README.md | 16 ++ examples/podman_local/README.md | 72 +++++++++ examples/podman_local/__init__.py | 0 .../client/feature_repo/feature_store.yaml | 12 ++ .../podman_local/client/feature_repo/test.py | 123 +++++++++++++++ examples/podman_local/docker-compose.yml | 33 ++++ .../podman_local/feature_repo/__init__.py | 0 .../feature_repo/data/driver_stats.parquet | Bin 0 -> 35141 bytes .../podman_local/feature_repo/example_repo.py | 144 ++++++++++++++++++ .../feature_repo/feature_store.yaml | 9 ++ examples/podman_local/podman.png | Bin 0 -> 211897 bytes 11 files changed, 409 insertions(+) create mode 100644 examples/README.md create mode 100644 examples/podman_local/README.md create mode 100644 examples/podman_local/__init__.py create mode 100644 examples/podman_local/client/feature_repo/feature_store.yaml create mode 100644 examples/podman_local/client/feature_repo/test.py create mode 100644 examples/podman_local/docker-compose.yml create mode 100644 examples/podman_local/feature_repo/__init__.py create mode 100644 examples/podman_local/feature_repo/data/driver_stats.parquet create mode 100644 examples/podman_local/feature_repo/example_repo.py create mode 100644 examples/podman_local/feature_repo/feature_store.yaml create mode 100644 examples/podman_local/podman.png diff --git a/examples/README.md b/examples/README.md new file mode 100644 index 0000000000..91799864aa --- /dev/null +++ b/examples/README.md @@ -0,0 +1,16 @@ +# Feast Examples + +1. **[Quickstart Example](https://github.com/feast-dev/feast/tree/master/examples/quickstart)**: This is a step-by-step guide for getting started with Feast. + +2. **[Java Demo](https://github.com/feast-dev/feast/tree/master/examples/java-demo)**: Demonstrates how to use Feast with Java feature server and deployed with Kubernetes. + +3. **[Python Helm Demo](https://github.com/feast-dev/feast/tree/master/examples/python-helm-demo)**: Demonstrates Feast with Kubernetes using Helm charts and Python feature server. + +4. **[RBAC Local](https://github.com/feast-dev/feast/tree/master/examples/rbac-local)**: Demonstrates using notebooks how configure and test Role-Based Access Control (RBAC) for securing access in Feast using OIDC authorization type with in a local environment. + +5. **[RBAC Remote](https://github.com/feast-dev/feast/tree/master/examples/rbac-local)**: Demonstrates how to configure and test Role-Based Access Control (RBAC) for securing access in Feast using Kubernetes or OIDC Authentication type with in Kubernetes environment. + +6. **[Remote Offline Store](https://github.com/feast-dev/feast/tree/master/examples/remote-offline-store)**: Demonstrates how to set up and use remote offline server. + +7. **[Podman/Podman Compose_local](https://github.com/feast-dev/feast/tree/master/examples/podman_local)**: Demonstrates how to deploy Feast remote server components using Podman Compose locally. + diff --git a/examples/podman_local/README.md b/examples/podman_local/README.md new file mode 100644 index 0000000000..f5b6ad40d4 --- /dev/null +++ b/examples/podman_local/README.md @@ -0,0 +1,72 @@ + +# Feast example using Podman and Podman Compose + +This guide explains how to deploy Feast remote server components using Podman Compose locally and run an example using the client. + +## Prerequisites + +1. **Podman**: [Podman installation guide](https://podman.io/). +2. **Podman Compose**: [Podman Compose Installation guide](https://github.com/containers/podman-compose/tree/main?tab=readme-ov-file#installation]). +3. **Python 3.9+ environment** +4. **Feast CLI** + +## Setup + +### 1. **Feast Project Setup** + +- The project [feature_repo](feature_repo) already created using `feast init` command + +### 2. **Run the Podman Compose File** + +- Use the [docker-compose.yml](docker-compose.yml) file to install and run the Feast feature servers (online, offline, and registry) on podman. The docker-compose file uses the `feastdev/feature-server:latest` image. Each respective service has specific port mappings and maps the volume from the `./feature_repo` configuration. +- To start the feature servers, run the following command: + + ```bash + podman-compose up -d + ``` + +- This will launch the necessary containers for online, offline, and registry feature servers. + +### 3. **Verify the Installation** + +- Use the `podman ps` command to verify that the containers are running: + + ```bash + podman ps + ``` + + Example output: + + ``` + CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES + 61442d6d6ef3 docker.io/feastdev/feature-server:latest feast -c /feature... 2 minutes ago Up 2 minutes 0.0.0.0:6566->6566/tcp online-feature-server + 1274c21716a6 docker.io/feastdev/feature-server:latest feast -c /feature... 2 minutes ago Up 2 minutes 0.0.0.0:8815->8815/tcp offline-feature-server + 4e38ca8c39db docker.io/feastdev/feature-server:latest feast -c /feature... 2 minutes ago Up 2 minutes 0.0.0.0:6570->6570/tcp registry-feature-server + ``` + +- Alternatively, you can verify the running containers through **Podman Desktop**: + ![podman.png](podman.png) + +### 4. **Run Feast Apply** + +- To apply the feature store definitions to the remote registry, run the following command: + + ```bash + podman exec registry-feature-server feast -c /feature_repo apply + ``` + +### 5. **Run Client Examples** + +- The [client](client) folder contains example client-side configurations and code: + - [feature_store.yaml](client/feature_repo/feature_store.yaml): Configuration for the feature store. + - [test.py](client/feature_repo/test.py): Example Python script to interact with the Feast server. + +### 6. **Cleanup** + +- To stop and remove the running containers, run the following command: + + ```bash + podman-compose down + ``` + +- This will stop all the feature server containers and clean up the environment. diff --git a/examples/podman_local/__init__.py b/examples/podman_local/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/examples/podman_local/client/feature_repo/feature_store.yaml b/examples/podman_local/client/feature_repo/feature_store.yaml new file mode 100644 index 0000000000..d4ad1ccb6f --- /dev/null +++ b/examples/podman_local/client/feature_repo/feature_store.yaml @@ -0,0 +1,12 @@ +project: my_project +registry: + registry_type: remote + path: localhost:6570 +offline_store: + type: remote + host: localhost + port: 8815 +online_store: + type: remote + path: http://localhost:6566 + diff --git a/examples/podman_local/client/feature_repo/test.py b/examples/podman_local/client/feature_repo/test.py new file mode 100644 index 0000000000..13ab2444aa --- /dev/null +++ b/examples/podman_local/client/feature_repo/test.py @@ -0,0 +1,123 @@ +import subprocess +from datetime import datetime +import pandas as pd +from feast import FeatureStore +from feast.data_source import PushMode + +def run_demo(): + try: + store = FeatureStore(repo_path=".") + + print("\n--- Historical features for training ---") + fetch_historical_features_entity_df(store, for_batch_scoring=False) + + print("\n--- Historical features for batch scoring ---") + fetch_historical_features_entity_df(store, for_batch_scoring=True) + + print("\n--- Load features into online store ---") + store.materialize_incremental(end_date=datetime.now()) + + print("\n--- Online features ---") + fetch_online_features(store) + + print("\n--- Online features retrieved (instead) through a feature service---") + fetch_online_features(store, source="feature_service") + + print( + "\n--- Online features retrieved (using feature service v3, which uses a feature view with a push source---" + ) + fetch_online_features(store, source="push") + + print("\n--- Simulate a stream event ingestion of the hourly stats df ---") + event_df = pd.DataFrame.from_dict( + { + "driver_id": [1001], + "event_timestamp": [ + datetime.now(), + ], + "created": [ + datetime.now(), + ], + "conv_rate": [1.0], + "acc_rate": [1.0], + "avg_daily_trips": [1000], + } + ) + print(event_df) + store.push("driver_stats_push_source", event_df, to=PushMode.ONLINE_AND_OFFLINE) + + print("\n--- Online features again with updated values from a stream push---") + fetch_online_features(store, source="push") + except Exception as e: + print(f"An error occurred in run_demo: {e}") + +def fetch_historical_features_entity_df(store: FeatureStore, for_batch_scoring: bool): + try: + entity_df = pd.DataFrame.from_dict( + { + "driver_id": [1001, 1002, 1003], + "event_timestamp": [ + datetime(2021, 4, 12, 10, 59, 42), + datetime(2021, 4, 12, 8, 12, 10), + datetime(2021, 4, 12, 16, 40, 26), + ], + "label_driver_reported_satisfaction": [1, 5, 3], + "val_to_add": [1, 2, 3], + "val_to_add_2": [10, 20, 30], + } + ) + if for_batch_scoring: + entity_df["event_timestamp"] = pd.to_datetime("now", utc=True) + + training_df = store.get_historical_features( + entity_df=entity_df, + features=[ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips", + "transformed_conv_rate:conv_rate_plus_val1", + "transformed_conv_rate:conv_rate_plus_val2", + ], + ).to_df() + print(training_df.head()) + except Exception as e: + print(f"An error occurred in fetch_historical_features_entity_df: {e}") + +def fetch_online_features(store, source: str = ""): + try: + entity_rows = [ + { + "driver_id": 1001, + "val_to_add": 1000, + "val_to_add_2": 2000, + }, + { + "driver_id": 1002, + "val_to_add": 1001, + "val_to_add_2": 2002, + }, + ] + if source == "feature_service": + features_to_fetch = store.get_feature_service("driver_activity_v1") + elif source == "push": + features_to_fetch = store.get_feature_service("driver_activity_v3") + else: + features_to_fetch = [ + "driver_hourly_stats:acc_rate", + "transformed_conv_rate:conv_rate_plus_val1", + "transformed_conv_rate:conv_rate_plus_val2", + ] + returned_features = store.get_online_features( + features=features_to_fetch, + entity_rows=entity_rows, + ).to_dict() + for key, value in sorted(returned_features.items()): + print(key, " : ", value) + except Exception as e: + print(f"An error occurred in fetch_online_features: {e}") + +if __name__ == "__main__": + try: + run_demo() + except Exception as e: + print(f"An error occurred in the main block: {e}") diff --git a/examples/podman_local/docker-compose.yml b/examples/podman_local/docker-compose.yml new file mode 100644 index 0000000000..5bc1ae546a --- /dev/null +++ b/examples/podman_local/docker-compose.yml @@ -0,0 +1,33 @@ +version: '3.9' + +x-defaults: &default-settings + image: feastdev/feature-server:latest + restart: unless-stopped + +services: + online-feature-server: + <<: *default-settings + container_name: online-feature-server + command: feast -c /feature_repo serve -h 0.0.0.0 + ports: + - "6566:6566" + volumes: + - ./feature_repo:/feature_repo + + offline-feature-server: + <<: *default-settings + container_name: offline-feature-server + command: feast -c /feature_repo serve_offline -h 0.0.0.0 + ports: + - "8815:8815" + volumes: + - ./feature_repo:/feature_repo + + registry-feature-server: + <<: *default-settings + container_name: registry-feature-server + command: feast -c /feature_repo serve_registry + ports: + - "6570:6570" + volumes: + - ./feature_repo:/feature_repo diff --git a/examples/podman_local/feature_repo/__init__.py b/examples/podman_local/feature_repo/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/examples/podman_local/feature_repo/data/driver_stats.parquet b/examples/podman_local/feature_repo/data/driver_stats.parquet new file mode 100644 index 0000000000000000000000000000000000000000..7ea02b9a9f2a2c271115468e2e81be6ff6224cae GIT binary patch literal 35141 zcmb5#Wn5HIv@rZ31(Z-J5n(_?KtQBp&RRo#V)8(YML|aCq z-4%ZC$eelh=K1SSA~Ys1?K8X+p-uLx=o%1HcKM_j6X{yTeP+atcU?;?iQY#)*=-`y zd}Q|75$jesK6NBAhcef=5-0Oo!rWPyOAKevda*FyZ~i6n&6jNMxL^i>MDHz086iZP z?D2tcV%?JofhZ!gVzKj9;^ZdAfp}sH*Ouo=M0)1|`JKd$q}!1xL~qr<&FMtitM$LK z{^dX9ZIDM|ru*KU-NZ@3uG>Y#l);aJr9^tx>dh6z4sfWiB6@$#m_0zGooQC8C)Nee zY&}e5N{L)LLY#bHN^2sf6epUT`j>y>v4XQC?qHi3xLgCq3~uNpra1pexIv@~tiO7b*m1*){SMJPwa8+KNYm{s8u^$1345;L2m zS3M$5`q}$EC8mgH9GfE2M=rdZCU)$e)|e-Hn+WfCO{D!b>3T=3yO_ZBiO7sPYP~?5 zlzUM6otW~B`{A#D`KxrLXz$7E?ZSfCh~A51|nU?BtxCp@hE;ki|Ac;L_m*7vmSQ>VjU;<0evF#n$B}0;$%Xoycsb?Wq;(x zfBENcHd~XpqyE=VTcWp{l7SK%}3t zoed^-1g0s46TKzRZ;d3<#-F#v{L5b?NQ);i)7;o3i8#p;U$BFia;b4BnMmI{CYny{ zSjXj+Mf855-H=P99SC_&}1qpEi{CkeFFHI&qyiX|qV`CNYIuJLoo%-W7cE9QoV#o zi|2gKPpnhcl3z|_&IUya6DMmcn^zE1T>5{C6X{ET=+lWE{R%m&iQZ|OZ_5&CdIf?C z#JW#c+?0sS6L0ENh?D*jvuebYRhyKwi1g9qt-8dH!ZU3Y(cARdUnY@8TV`TJtZOwW zFeNghw+>klC*==|S`$;IhCOYG^!*$S4#W<7%~#Gu?I@>|?rtkXNO{6_IMO`P>?R}a)KxA473=9${S@i|(5mPQl zI}QKKAA7KRjKm#^_opU^-qURIPl&V{^~h($I;Vi<7epq1`OjJ6fn@^;VS$r)dcI??s6%)P9PNbI+X)KTX zEC1zh;}xhTF*637YKfByk<|x@DbH)C8i@4jyK={f9S$s!Cy3s>s?Dc~v_9XT=ZJN? zO7vTZOs$^mHsa*F&x2QpDM!}`cKpllvJ`8BY-DvXAl~6FZ(P8JZ+|S5Trah%}oB&smb+kgN9aOA<3X z@4S3NoJ^u^_&`ii-H`B^NPp$i{*~BqsF>|1(c8V-{11^P^mz{pDVypBWk%WgnB4C- zXE};LYoN~FCl%FZCxHFgeW6PaEgm**2FMOM2N5>xKl*XdQij_p~AJZ}WiL~>=Z8gNYFw?*F|MJTu8aI%5^3l=!qr{Z52ltwYbZZ{b z)5H!=JX)s=I+a0{D@5k}@AzxP$$I7XE@FzC7h4~ZzP!l1pM_a^ zvt!RK5_)HR7`a2FQPSe~iFIG>d`JInKQATA6DKT06Z$17ltot*suLFy5aJZ#6c^?C zUp2NU*Z+E&Fiq}%*h;QRfA~NDUc^SesS{`6PNuPPi8AX_QrWnc$+>jYrKT?ukT$Di z)~97I7F7)%@2F4D=BFFvi0U8A$XzaHchsfxU}nCEitmj|{X<#1S8B)1jdvc(-XqCO z<62>GIA`x_vtl{duEV({Yi$mi?K5b|D_iGsF8D!LLw?12@2;E`hK&XL)PjeOx^_42 z-meukbz`65k-`Idi67@4bRXGMiz%$!Vn#=c>J4+2$-DI&-FwKiNZP#0=vZ;X#!A(Y ziJoI6M>f?NCDoDY3G(A+e3+Pgb%)y9?+56%bQ=M+#$VY09-5|FVulcLYI zh@Wm$q>^gDu~Su zfxfXqC1B`6uKn6gwamRm z<(dU98~m;pt+8(@a8(PsQ)gVFx!X-6Z0te#bl2HBd$+;}|7#Z*oq-O^y&L#BIW z9WRxJ*dN(cZ&IdP7V6mKd^T|8QdyYuX|K*KA-(c&*YiR58hPp7v{Y73KDt?*H%mGv zp9C|9?!%hNBdqLwqt1sdn@<8#xTJj9bC=0l*O}G(a^wpudv@eV`El+R(~OwBQSY~C zj|A+ZaZCGi?Ugnuq?;e~=Pr@8u3_d%2k?|BIGytDKNzsMLfP|b3is;3CHqtZZ`YY0 z3gq3d8S$hecXbfo0p0kwll_N+_-kR;9~zHLut2>*7N3;G;ozl*ObR9R^JGGnHCU7@ z`wScoS$@R222y#}gbE(BYqYMnXb2T*aysSNnYSiP_@rA)#Iu2hFp<-qS9h^2mJJs@ z>)Tf-wXrdL#reS7HTwCoTf~|}M^E|OY}~T)V#JfHsf*V}h_}Ve+^*kvBx2R&__t3w z^Vdd7v~OE@`|ReCNXcuv{`_HCA{RyPOylI2wmcdo)t$vBX;2^+E!~?Zyx#ZL(dgCJ z3zH=`b?eyHwRdVDnUz-|R_@+GlReT_ z$7AL1H(J*k>{f_V7&-2A+IR4H+`6$-p4ZZN*TpM7I2U;5pw&ud2aT0ui8!-}*~!Yu zE6zv}(C=dB5mI#TDAModjR9PtU?yUa&`@t2Mo$AMiiV*y0@nZZ5&l}YYunm)mJd_|2dk-cVXF!C(2)}!-Q`vfH6tEh*~uni{YX1z`qlx%3hT$Z@oyfr zr;BWQ!r1oZ_2ce}O;2Ih?_a;!L~SOSX&k%~MwK?t46=AvL58U9lu@3Ll4nn)?Q@gD zRn#u_6?QMoiq}}x811v0wkTKfxSFxTe#Wv&W6P7CefG1~HPp^O>|zdcHV2J&OBh!< z%-c0u9e_+R$CnPr9Z!1pRyn?MI_1&6i({qJYnO8YgEhwcHB(Yp*m*>?Q|PH_981Ks z6KPUu8Qg*jF0Cn2=~=wux{2Rt(iu5RWh^SSQ>8QWgcRLc6IoVg6|7JX9dAuton5$! zvF+P87MYwPsf|W?qB?0Zxy5TNtF)6?*W{JTI~;duOIwp)uH=3x@jI(*LFESjfl8fp z+1*td;S;S%Y-A2u&l zaJ`fvU(#q*t()|NU7_@-?O}_3x|s@P#~n|&wQb{ASAN3vLg<4_nd>S}d0g4{{Rf9) z*Xw^{ViJ_T}t}Nsa)L}{bFFBUbgaqOL4C! z+O{uRUvnkt^UDX9v)9*N-TCXs_n(VY>N--{7mG1+RO-7jm#oy?!L{LFPp;rPw<|dt z4)yI8*W31sOZD)Ly)qlC7`dtq17(VvFYVw~YrM5jJ#6AiuG*2?2N>Ib{Nh$Wdbi#v zUyRCAKQ`20xnFlDkH+!gV-8Jj?RgqaqbJ=jZ~M)od1Cym|II2YUvtYzH`!5ZaUu2< z8Z=IU6{7qB@c|LBUa=ut0=CBbY>5(L6|NH(zG^Le+gkRnk}TK%O?9zCk#qoQLaa3M zMSwtRSau;BA*Q7e&wsQ7<+$eAXdEoGH@sZ5*SuVeSO4k|@_6}Ql9%ZU!T+bSdGlXC z68-nm|9JeXZ2tGHO*0_>XBdutJI=_o;KD({TnJkcflXVM!+bTaz;^2-?0b8ZVO>tgs|L<^d>sp_$h+eo&-EyK zIt$+M^PyMkYP_|-9cXE75WH6q-v)}I2)h{8?HGclw@SF+FNA5?TOm`P9h#iiW4e(b z?q)Z`m&~^i2RqP=-yWAfamS56m&0wp{ZNr7fr?Xfoi92xl zyAo7rOXFXM4#sb7M6HwdSQ{;hJNRR<^PCk-8~%i|H<}@9S_*ra9%$jy07F&Xl;jIt zydS2GG3WZJ7r8w64FmA=WdRgovccwPRVw7}X=qNrLIr$$26xw_g7oAM_{_cvcYX~) zJ@a)?B<=}urFTH);Zl5>Tm?UB*We$NqMlia*9)BVezdxUzlw-!Rr}NG1(-kAaV#3T$q{Q}v|eehtp0Ddq2 z0NcODf=bdNI6G?(rxe#C^IZy@PxC-y&LmV!9fU58O4u-_gAOr0;JjrAeEYQnSFr9t z`#RTHFePeIbxNYGc~Lw{ig9uo1xuf|NMc$Nlg`scy9L=fql#4+hvBI-B_ zgPYM=uw*xYgQpW9U_=K~r$mqn3B_-BCSYtuCk!2SLARk5IOa)%0vB2AHdRIS3{`k5 zCxruFv|;cX2X^NBp_0BfC6)F9xcL5p#VtkrQ6`C+bDTJNHVB_6e}Ma}OOYnD9oUQ& z@Pd9Y#*1tM=huFeqTn;?bha&IZr*_6@2t?5UkyqR?FHZJb*OlB1%CU&3$>cJAyL}` z$E7sTaQ!;)Hj={&McVMl)&&nbK7j|Td%!qrCmufc9n>;3@f%AN9x#r^ZJ!-*_A)Dm zv?KNevck`+<*+SX1asetV(J+$#QjNB9H$~ykJZC;PZTP!oTj{%n&5!I0=y}uGcpF1 zu&KZYWh9GX$=x`R(W|DuTpIxQ#V$Cpt{)yU<#F*$APyNTK{2}y<83$_wy;#f@qRP( z=Tpa{yuYb;?{|^+!-W?L7~mYFje)t>0S)Z&Z9oL_L@j1$^Q?zs{g)|!CmnpQM#gG2 zThtHCg0Qrc)SA;749z}C92_u3r+zXPj;_X{SWdk0aTwg#yiky40e|#tQBU?Aq#8<~ z09z0~<=TaRrP85Wy%^rD+KjiFcH$;$JKXtK3pkFS)Prab_;_~{K3vNQe#Wb*Lm^%; zd)F0v6cS;-yAoC;egR9tKk$>sjWPT!3(EDRQKp~(Yz3{s`D7`?SoA;uhb{8Wa)Zj% zQ5Y>V1(uriSbJOu&OG(NZzr|zSAafBKjA^OlzJGfdk@8_+IakNEof;BQd}pvFfT3< z5B|9fl1GE_(h6_fEBKR9C>sT{mrW2yZBRZY7`b?^!46$V807Xsb&pW=joAT88Eklh z{|@}k(HgJ7{L9C;4}W6shZ`2BMZlq{dZtxFQrvoSklt763u(Td=^#+J(QG=d4Y zA8=`40lqJ{q39Z3*gQ}`^?RCP{@XDig4bV+rgS%m>4PSMV8@$NJXN8OH!?57QUx<$4K(pg35U;Gj0<|HXlZiGU zQ!xn%Az$Hbnhqw%d;`5lxggoa2_pQjpzXIlj-+nEZGD~;Px}R!GkZdnS_;5S=SF1C zvf|+j8nCxe0=3m5@QtA<$QY%;5pxB+p|t@51}srfp^6fCuo(Csg+L>oWbpn}#-6v| zL1BFp{B}7-z0UW>OX;roc8(31F@dmr)keJBXpad|58?3NCTj4hFRlX#jQ3gz8LK_8 ztKkC(KROJNoh{Vdd=TEY5kh}igjXyZv4dVooh>!To7sgh^;Hzvz4h>}rw$5UD~87s zpWw-&aP0mTiANTN;p@;fRE?7fUVd#0vr8X<)MEjZ*|!Szh}J=nWjQ2SzJSB$LZE!V z4f4kNpjNdCGW)dgdX_P&>A9hV<1oWGY9-P`#qa?c^UmzFz(VaAc;>}~tNS)%IJFpT zvML~XqbdC0JxX;QaKbN!b#P2q7>%Q3F(B6)x0&u?$k_1V@{kZ*|8fkJg;Y^zr!w&P z3L@vW<+ygi3x`DQacjU!imJQ`2I2D>?gBqkn@*pc0P!vO$3~395Hc0i9!Iak?T6 zWIUB1l5Z_YoatdCi@%3%*7caSzYG=~IzUZ^kHN`L3Yfa~1mOEHWZiWP*sMO)!+|ly}W2N zNUl9KAsDB&4ueO>sp+P@lLSY~; zE?ZE-8Iz0DqG)pL^hcveVlPselx0!)=#&hu>F`CmG#7ArhN9)Jd*2W_qBt`WfV(@CP|cSIUxe<&hr9u3@OBwS@VP*&QxK{s6~pdX zS-gMH9hCw4x_BhE>nL3${~FR3vwMaq{f)xIJ@K`*s~d-N>MI6{e6=9`F<6?u~$QF zXA%5;mmLkF4Joy84bZ8IW|aL|4-Tu|K$%V`zToCTtzX+Q+L6L-@dv2IT}<4za~Wv1 zDdX2nI!xrR!Q$Ri(8A$E?Oz%Jcrh4DGB?7Z+x5>Vo=8IJ9Y!N+fc@$H*NsL$rbi;erpwM+vyJWxTc z*mh8hX2sknPUJ5<49;#5c(v*m957st!c-H8GY*5ifq@>+)&dCrtPA_Zm*Qlk9!gvv zheN&d)RTNe98?Vj&2RgGzHk*{4t%Cqq=Ud!UIg1NQg}Ao8-fqpqKKU>GP6FyqP`6X z4$dI)eKi_=eGHw!`zRJjLQefd@Hasj`Y-%~wV!6-m*Qe*;FlqQp;mt>CSHu`;?9fvg{cA5tRTw%|GiUTl!` zLFcK|8qmitCSobej_fM6fWzI3#dw zn=e)-$l@k+!PyrSTE7a%n#|`^b^S6t7Pbt+G@e7jWft6gb{RT9jHW83pTX6u`(g6+ zB0Lng3Fle1;_Zhk@HdMk^lY*Rj{W;#^xPlNW-F$eo97w7eYKG(R6%*l{DP)3H@vb@ z8}cW%Q=EfQ=w2d(bDsl1>VrQOnyZegZqD#KLKC-3>S6YdVCZ|X8y;keV?u~Ai1Ec^ zf%y-Rt<^`Vk5?(N^J}s77pc!x{7@q8I8|%)S@-Qq5wy7P5A2d*nBlI4XYLq+QMo=w zaRj1*&r;k~5f0Ck4}g#7AxMfS1V784@M-&3_?MJ;SNrH*xqdU*Y=6Z#gN z0`I_$D0}Jwe3zaE*0594&|UkEmkkRc^Euvq#C=V)$*w3KDPbPH4H! zbMYRv{cIX!8rtDiF;eFqkbv9tY-;USSF|e%!X^BV!D`MO3tTQk=0_8FKcx)k2X28< z5g8ATU8GhV5Wvu2PNZCSpkNt=x$3^~o0SckWFqlsk_6}v>Hu4y8rJPE0`na{c*!jm zS?10I+Bo9x7!|NqP62Um3!Jw-1GBuzsB=ybr&MT^NGKngp7X;!K}B%A&;!3{WkJ@} z+hCkzg|mYljF@6C)O~jh-pOS_b+r%ro<0D}#1>%fGf9;3UknSMJ7M8GCl$MRC%$j7 zLpjK!o<_X^`%5A?vQ-6lzjlCYjxU8b*sHGfOi-cdvj5(NvGkfLktXL3hEiV>;ouI#%B%@c*%4%T)Pqpo)0xJ zH{m<@%;{i5wjiNey_`dNK>^Y>36RRxo6z2(OunI%J9)Hkfv_U{i93C5N zhSI^MnE8=|{GCp~*4-SGgu`+y=qiJL3sZdVQVdn8SAmx)1=pW=V|^_@{(U+J9%h^I zDW?$@mtLh(|5%{=-ZuF3jumYMuY>Om6BJxMK{Xyz!H3PIl!)scaGX9)rAIu4q|YOWJu6TqxZTJ~ReRwg>$`nhId4R7}8{54!0BmnUnoDxS3ExJxZ3tM=5a_ zPuIklJ`OaMybNu23GgMVmGoRzWBnszTqhubquoZ}##;#UySy>zIvZLzJ_66MIuH!J z3CBzJLmkdj%l5W1@|+6b_bF{S^OYO(;~&GS4Wan?4?kL}CSq-V1F)Ym$K4t~p>@_9 z=eL}ql&aoC0$mqJ#P5NX$6BOYk2 zq3YZ<(Cnq*r<{GTw?YrS)ooB!aSqnkUx&DJ+hE4=Exhm8hTRDP4DJ9iq)dJFgoP|| zCUXYb?^wc$&-R$z)BrW1$|xjE;Yz(=P-(voU-q7*bR|@9WvvuB@2|jyHv`m#QYn=0 z&xdyrtFXO|7pK-Cx}4TS-V-5Mua|_%iXxc$a~E1n@IXCJBuE`d#MZOQ_+DNTl{;hb zP|ri4y%s^KH6nOK_bSMn6hhbUWOSeF1d+gCRCJBQswN|dQ)&U11QW%1Y$@2L8{xLvbf}dU#4O9(E<1oJis0}fx`UeZ3bh->ADg`k*FbeC#K7ut184F_@;o+l5n0YWk z1@Z2{K&3}u|7Hs)`O0E)hzT4r$b?NNFN06$HWWACf&1?!!_pByU>j}*2h&NIGtk4c zfs&}!v;nMUuEV6xSx6+GsexJ&9&xb3a8osGUMGZKO80}qqAb`VxeLEqy1?!g9w-wL zfq~{*QN=PI!^j*C6ImucW&NRW=m{6*6K8ccYQG-ZdW4*cDYS0yuLElTl*dXYQf}1@t*D(fjeXNiv zo(^Z$8DjR&Z1{3+C4LKa2BES&(EM-|a*GUbty?;5x8_FXq(4gEEYjU_mK`t0>Ep#_ zIW($JM0NEh0PhpjF>+rUR7Z~Q3KNh$u7s>7-crT=#t<`I0Gh*m)UQN)a^E-sb4Qjz z)3sOBPN!Jp;QR#71m8pFr{#EmksE$krU?m3+_;2{w+9QguqR~%zLI-s-DliL?bJka z&cHJdQ{Xj^g}{5ZDBsEetAmoTb*7x!_q`gL9^2pnr#;l2g?R9J{0w}B#Zmg)Luy*} z0I&;6f~O-J&e`~5`VRw!bm}Z*@3{(0Hg(MDUy38gJR!;>5svJfh0|dGLDMO)?0Y%v z{yamaoh*cjPa+r+5|2<-4B}5xpleDLAL(qtJzHKve*F~aR9~fJMELOF`jeF2s<+gH zRv?w}WdfWP`9YzW3#-MVP;tByZf%i3=R`@2-^9c<(f6pDk$a&0#}0erzd_R6L0Fnt z249oE0p~R})bF|lVc~WZ2&OUmKPIAw;x$I#*Ij@$Ht4WZ2e-Z8Loykfu~w*X?SG#R zW&bykD9Zk?)kS6`|64>RKD(BG?&SWj8PLVaG_DjHCo_drbdttpmclCBk;17x$s&-G z!Y<29<#Cy06}^$dso9armpI8r=St-=VWussoMe|XOXYFuNE2?I~e9+w+w!Yv({ z%8Ad&g*jcckC~-f`D{tBS-RM0N0w&mGv272bnzKxw(j^dzQh~p5(^#K@a-9Y3Rec5 zQ$NQ*bV?w{EJIqjGsi@GYH3kUhK#I!u7%6gvdSA7vYMT_)`?Tg>$o!IO!V{YDyIaG znq?|Db>=y>P6?gK$y5x~&vzT25^lYbsT|*#@A+*?q=PF1+SM<5)4YMrO^3H-l z?dL1*=47ci>hBJ9c`i17BTKWTb9Y4I^Ocia+1h>jg)x=S#plejbw@i3<6EDvdY_Zc zn9<*}ZTz{!w;S28(79*Vx95^H?i?nkK~b9M3p$s1j)8DjQI_@#DS_M^BUyvJc`h%c zMf-D1G`sc|Ccaor=gu`VF(@vsd?6!eo@?RMRb1ZsVvR~}u4SM>N!9oZS?&H@>-esc znr|=GGP(0?vJ6TOicZU!ndjM+ca=73Ps`in<~cMPlpS}OR&eRhb86`-JC!)S&YL^m zrO%-JT;;T4uz9}QXjgel>$Fl-Zod1BLB*xa6AF+QW&+P^y@zI*?ZZ!=mQ+=WqDhSgJ|v)VVz3uDT=t7o)lb?)XC z#x@!rcq1c9jFAjhem3tdm~a*YT7cH8DPJx9_FHQH!$UPQAyS+Fm-I z$tyb%Xx!xX;H6XRK-sDI-X_oQFP%Gh%FkpOpYUDr%H@Vd`ML7m6M;IfT<_+UUuZNw z8S47VZG51-rKR^|MAECxlROozea5F^_PuhSv#7W<+IuR#?Ul#-yoxI`#;3PEc;)$R zpyKL6@9ACNUwP3MS9WlkoJm{p+M8=*WtVW@nXKL}AO3Wg9vRIId5*VyMY>)3)LS+b z#@+IhWOKb?q^Vk5cFTXQq3eJ{i)wlEt$_9EuDAR()v88r1!{G>-i~citNC&(2-)23 zW@xG(6dDXRHFO&)ZBcL37!28z?lyc_Q{%YfV5oDq+vtTBjZ<-hVP0&T$9pt2&y@{^ z2N`aj7;e#QX&&4XnZEha3r(#{BZCnM-J73$Zqd5>WiT?C&3%$xOS?OJ{#JK-9e=lcvT#+1;V#OsEhA4@MXW;E_3-AVWSd{~Qm z>3C<`ShwfGg^SeNxI5dQvUz>$(Snaa2abGEGxg8vDV0k$FbEwxaDr<`V5~X0osOQqj$5kdVKieS`8%@?q(yquRx}@ zk+kr=98)9TWo4~KvYPjDH)Z$=HfS3wINi&0?(r3FZZ%epznAaD?kC!-ZK7I!uOP_C zPi&;sM6>1I?#K*3@o8;S-O+o62|a!iUs_FJ;ohEPc7HmDj+ue*P*Jv#zqC-BnTh7m z-aQ%qGHZ0qEu4mmD|-B8HQLOrVSSva)}m7UHAPz=!7 z=r%f3ez7M&Ij(J^=fY6MHTFQ2OdU&K;ro@>jRIB6+AIS#@9(>l5vbmvV-@OjziO-} zP_wzsDkA>={-^9g+Pyl~G3EEGXN`h%N7}68Tkaotml4F6*4eae^nT6Po*?+rwrSVG z{o23m!AuTan>69!x<$sp211u?vNVV5`7?u!*67;iISn5a=?yl~xMW)xKYU1%BgD*D z*RHsH`0!fe5DUjkcI7R@4eK*QECY1yt44<#wR%IW<1X3PEDRq(j!>ISU5A6hBS%e* zL+#2gIW%gH9NUx`>d>I;c-(2^xN~o)Q}ZRqQ}H8BUL0XAy}C~4%12HF8Hc%zTykn@ z895o58RkB%>wIZ+wmr_pm2z2SixmtAkikDjmP*b;24=QdP6df|ximQcsbZlf)u&8IWBga_zto){f% zx!AiUBJT3$Ckvw&uW>{~W$L+436Hg2H;#xYyX-!rIo5V3Ga|M@&*P=j*rl=Fi1_Bq z9&h8vEnW{3O1PUq{ZEqD+Rrw8(=SrY14P zWmo)VwI1|u%8Ds%U<4>QKNxWCiz#ou5}=&$;HDSn*2-Q+plZc~TR|pUt46K_YF>OW z7@4)TdYTcWJNDpqLf_V!FIR%#>w`PVoUwHrRIq`_#NBL@*n>jt!6sT0_x5DP9$rI* zSU67%RrJL+YP5$~CrsS0<%~OOOoiH2Obj0}i6hG&LY*#7jGWGjI}t#Ixs6SXUhIoI z71ti-`E_FK8fW~OOe)-0rllXIG?csr1461Ht-Vx4OvS@TvnC0bM%p9dFFt(yE-T^6G!?mR?BSEIeF;~;v`6mx`ta#r&cqH5 zh)NTAG`Yw$u}kP`RMyJdQL;sESj2_6{%ds}w_jvLyjP4*Y`}l6Q+r#x zCgyh)!&<4|&~@FN+V=~ox^RDJmFCCF*Bsc}aD|dbHspB~0LJsbfK&Jx^|0FmSFx@I z|D`@?P@DoX0=_7u`5KBU*Wzl{2Fg9*3^>=bqV(7dR9w6Tk#;vJebV+El+(gZHx7V; ztrPy-XN0#_M#IhxInW{&3t@rP@cr>I>S50UgkBzp0lz6~?-PBvC+dr~vD%nNrggTT zDu>n!84#uvf?*<~@RP9%&MNLi&2rM-Q#Qkam&f6!`Y=@&;E1~`8)4|sI+PhQf-x^q zTzkcb;)_lyDcvK*a$ZP}q z#u}jO$)NX7dnn9I#!X-CaaEf!R+uctnne%b-sn>>GP1=dQ;N`FZH|B39q?9?DlQ^Z z7-u4Fu}HxkCf%&?aA*Oz9yG(@^X#~%JQ{}sYW0SmufeCALNNY;C>|Xaz;E4Osc7MK zSW#<==COBRl(ZlprM-sLhpgd%!+R=bLo=+_yFl$fat}^bDdOjM<~VIBj#=_{INc|Q zy5n(ZaUuy>xLNQb>sGujsE)%1AEDYl5FPZ^p=V<|CH{~DH}tci#)@AMV#Wz-N`M|p zEAf`JK5{A#P|LE;!rdD#q~)K0u-XpUl^F^I|;g8iKopR^xkCHf*##4YVS4lt}*xT=zHNpHETbICTVH zwI(oE;DDK3nsDFY8l~9X43Z9(xbOK#hQjtk)R#did{Q%@7Zm&wG`;*#H;M&5-p+4bPD?N<{0zJq<-{|zUaHSo7+;^u zrBX6CQ#Ha~c=Fg3T$xqExh`X5Yrg@R!$DYZ-VR#&UI9(P7b{LPAl63`%-zE9ib^tm zb;tun`zUIUvJ<~-UdEtz!lO>UJLIs`_JOX1_hidB@VI!GdXx{Xk@nlO7UVq|+_w{8# zTwWiffprBOSj2Lla+F?;aXZq$yCn~zWQ?92{n(bux`N&_lP%vtE~r~pbmj`V?9(1G0^L(JZkXVhHu4d zP;iAMcJ_H7d*3>&k=co&Y1NQ1ZHwYdwUGBn0`}`}1iw`l==dFw=^q3WoSNugACJvf zEO8}S)@oE@1V)$GF!ewnb!SZq*sop>r7WzlJ)0dR+P^@mhZml?MwWb?kVM7HuGFIg zpBX!Qi}idJNtp=Z?DSW=m7|;JK1&%L=0QQ8Uelb}Z z;HZcjuc?DzWgN!MJ%>%db--{pqF-(;ypb*X0X>|tE8v!W5 zuY;ZNJYa}R7SF?ShkmGjrHtDp7NDZDiXlus zC&6a`x;IVnvPeIyiipQ+rfHx;&dcQyWLb;#H_EQ_wcg<}4;)*rj+rl);YD92nI77K z&Zqk+^=cEm`O6E<-6(t^7K#GOhS+oF6nvOb!vo5acy6MKdf=mji{B?Bhpra<^&Evx z4=Z#z+dyqiYp2MthZR&ksD<6s`%O>A6d^mb)A&Y39ap9xejD<&+2i-^OK|9bHCaM< z8TL=PV~l$)98u@TV}=Fb^?ib|e5()kZI8szJMr-MuOzzrm|)^>F7!G(MYXW+r#{cj z!SvV7c(Gm^Pw$@t?F2nkBInMLn`3avoE?|{kwN<^4m`Qx6g+v!#IxIv0poTO^8fh* zpL%XH*pF~fv5_*El%|54tCjJ)tpi&14uDf&Jp5LnA(MSERwZS^^Drfx(0C1F?BkRH zr!N{mQ^tU|VR&xEO~`Q8MG5aHyjGG4uWxU`vCbe|c)kRaNL$!I?j2*bo*#rhQ9!;u z8n8`=0|sK8G2XHS6nI}l&=($f|7?o-`j88SSF_{TnHx}^?vAHUjR7CG3BF*-1yjjp zcsOH%d~7WLPkUb;Pu1FnjSOXKGE<364Kif5p4&!dLS-!3=9$bCsU$_xpiD`VsglZ^ zI;fB-NdrxaCY4Y!bgJ*(>YTSTy}$4G`@Z-6>s8mjuKT+0wbrwqz4lsb@BOU3w7yx+ zhRat);L@vDLJt`xRm{`0C$kQbiAE>r(usl6p>Xo<&}LZg9S9{y50Rv-M8fXSN3_K6 zP({2QL^FK>RQ)Q0!-1@{71D;tEUm>Pd)yed?lFPhUCV*BV>#q!$m8VzTu^k!2zu|@ zgSOdXc%3tg%&gZXdTjz=#$BNurWi@Jk8dXj+|8i!!Zs2kCJ1siAGO>cWl-K?GoXrj zI^~{zl{iT>62Zqa;I<|Wez|2#s+;nN)@K6B7Lt(jSq;MJGr`|>HPp;7C%c`ufLn_! z1hVZSfu*NNzsm>8@zel$qgzE?H58?ORTBk~EjSD&j|rl=+~D|KQ&3)p!-Mdv!8TR` zXH(|Fdz~xf$Qd5+bMc|NQ*d}*<8o4-Gz~r;rHH8OS!%PQ0k|kez_C;uCOvS5=zb`t z_S~OAYBf!@PxqJ+^#LYWs_qBb>+mbES$)LyJ|D=j93ze=`$%Rgnw!cH@AXix>+kz}In?+u?{v49<(gEk2mQbOZh#QNE6c)=$~ei@z(!#EsT zO@SXuef&t^+-`EXY%LMK76TlK3FPVZ2#}&J0a;26RL`s7(CB_LaA*-|U$TeXH7iJA z_c_XIfi}1>j}qO5kBL|le)V*;G4uFa*FOxCEUA4 z3@ui{V-GX1f7wj+v_Bzi`=VedSq|h4@qGt119;V>AxqQ&`sULiNofX=d9;#Dez{Fq z$OV9t)HJy17y=vi-z1YKKWbNtbrRcK%c&%)dln);DL^ zH0u~8oV1I?*X$unR{DT~LI4@flOzw1#KKa!GQ#q1CP=Pd4!tfHh$SC>?N0@PCQAvq zUh53DQQkm{FeWUQ$4F0yJf!Xm0*)iPR86-CJgToF1J^@g*xj1U&nly$Y+K2wqzFum zS^>8S3#{lEA!DK&ASPfF@#AzLt}ow{v*CpDDhr4B;5AShJVA0NxuMOK!2UggkjSATRA#toJr^H?*kOwvS)MSG< zJnYf~EiNhIe_EKB$H_y-a2V+O_mNKA-)_C54Xx{RN!m&W&}(}@4Bp7XF5hO#*@*|@ zuUdoDtzWdKZ4e+VRx9y&@_<|HrqJCc4WC!}kma0yBw+Ln>Bi#>pRYOieA0xSTuS8N zN@=Jl4kCVib}-ikhmx$iL4CXQf-2gx3fhGls7EX9VAq8(c-Zg@^*jT=eoO`^(z#5W zx3U72U_sP$9#Pje2odIsJIDl|7j)%YK;O~nAR5z7f-;40Xu22JxXOZ`o;Eo>W=Qosij=+BL!4gl!CeVX{Awzb?ATmDay^#7YsIzT(ZfaETQQ3`j0b?cQ6#XY zvqH0^9kCw}B}vO}lYI@mkgr`sRd%b9T^tWc@B%i-FVF)@)&=x7cvFE%4shgiAaHg* zBl`-L!|{g{+2CgeyZF`M+`$kStM>z@QEAv#;tE%^Qi-SCFxlSa4Ne*A-iNgadn&(1CNC7D;bA?Tg$0@Nd%c%X-d*bGCi5%Fuhm4O2!vp1ou)R+R z0yj8!a=%R<)x(u0DUQc@8#ge|`o>bUvUMSs!mt){p*T?Z|dYif&!ZUL& z+#9MV{P@0cZ0$JN^UeUa&qyHhM;&2AofA}Yr-6d(eiHdq72IcECi9jhXuXy(g<}yq za6GD=QfcoeEal;3^%WT~-^2$MFD?@SybNu9q6>I3>A+F0Qar~EAguvA2;Z;*Jdb}s zs>%U|<*G@=+WGKIzy#!vOo#A zb8s~XRLDZ{+N+fP$+NhveIXl4>11W-21r|%L)1(5QoZ9#US$rX=B+4fZ!Rb8 z=U0QR@(9`I@rp>^qu?~FBwXf?pxA>v$<_@%0DjQYsPG1EbhwR}&6*u^J zGeN<*bZYg9O7d%u9dJ5pQBPiFQx3L|NL-p7oMeh7)fYdKjpz74F*}3U;rZPKStV#V zR!sdGI7C{7@cn?G1(eB@lB$_;M0HRRFWz^e-2VYCpIFzCH6a~X+)5sgV zye`g8p7`A~1A)L=N}lPmHW<+%Y%mtMgwu#fu^ljv8iIv(46rDPgVnca7&Z_i^s~Zn z(0L{dS7-uj5FH+mPLQ6M5fW&H!;%*tChJPNi1O0u)Q-_?Qo_6mTv}BiU(OYt%uXPC z>*~nn%yX3H*BavJzW}sbDQdSIz7OmbqGs}L1UWiCtejMa!T}p?y_5lBGy4p=mJ>@_ z&gsBOc3XI|D1|cTkcZ+o9olARzEUQ)0*Up_>Cnydf`~n^f!ZrkuwnZ->NtxNWXq5cQef4S_RBGM~L^4CuHl6Sh&&ikSd;S3n$!cAv+z1lkW-v zPQ2X9q>T;yNATM7)ERDTOx`yY2)wnRc z+S*L^6v&X0x^itc6)}>%pqZ#S+Yntm=KFn%gu!EKFtFDj9yNTT+?35hq5L4_RGduK z1ihk6vt8gM9X~HwAp*TCPZO!f4Mb#;AGFWEAQKS^pk2uaHm9sfAN>*ORtbgZxCvrG z=cDS+@xXd2gAA2BV=h|9{;T(T3-|Y;R#Vc?Kz8qBeYC(6o1vn|Qk#2f2*~n)?Dz+ci3=v-jQ3|uE zVD24c&hAm7epa23p6m!FA#Iefg&PqnH-X_#9Izw|-z%jx5#C%Tc$BUSocs#pI`30*6=Z>{<}mpx z8Uf401_;M=br@myr-U!^L8XK@oW3v<7CJ}4G_OcXVark?a6TMX#$O_NH9+VIj?iBc zO*&!~Vf^9|@|c!NXf-QT0eG3O&U8rAZ6-CZW8uj964INm1s~1V!Qs4cIJ$T@8TxfANqt*Gm3sTb zkvCr{`ke-nR8>T_iphbG%N)2>d6K$#ii3I#=cvywRbfCxQj2rmL()^K4}5)QkP|md zo`&(j9bZ=1gqJ)v>inX8RObm^dd@|03-Xf%n=g?qPet%tB?j2(350Dk2mETjgVHNo z0t@cbVOfnQ#JwFN)@;@wfA9r~T;oYzbykqRCnKbDGM#X~T}n>c*b?34^IBWFZsL8s{msl2hDN(jm#PfgSz<25_ze_2dctZ%0}0@lOfemcx&;V1Cu47EmT zJEa_C0)1m4kh->t2n=yR@2ouPn4mat4UZ6`0eg68(@uQk_(_at0`*k=HPu`+PNdHH zljoXrN|rmH3g^MkJNIzF=E-SrK3<60o+k|!RZ)M~eni*zL7RCq5tO#}BF)tj9mJ4E55trF2bdXV@zM??RT_hh?pA+h8#g_avR z7us8nZwy9=ZVA|L9qDZ1Boi%v`nl%5Rn5W z5Kh)X?!BGFMU)5X**C$0(H7#ke}tGn@WszeL&3?pje5douHEtqhx^9#lg1^s;LD>2 z%e~Wx%eVbx@T?K|yr#j2Z9HUNUM)G%UQS}!a>!DilUmkYY;eWW6}Hqbg!qmvK(EjN zekVmp_nf2-r(Pj$PsFK^C3mRJ?-#-Xrv-34RtASVT7#*x0eGd1l1(`{)NDN+jHT2- z11|&fkora?Ds3S%+6|y<{Y|33+!q=T@k${Is5nOy_}z#YEr+~ZxdwttZV^Eot|`;E9-gY%ku8fBKvo7`YWO&e zc;LBqK}|0a9heWv6)GTElTN|S07`jrESyj`B7=htu;jfuFp6noZ`!y21P|FDSHGN0f{NsK$0r$P0f%YTUx$7G4Tyda$3Y zjOZh?+i=Lc3Jva=_<=|BFj-=GfUrm!fRuj^xh{E{m{~m_inns8`zsvb)fXnX9`B`n zXrTt!JX!_|(t@C}SPDkZ&9}+eJ2uccss<0~hp14UyP6k1>A?NH!P;6lWIa=KF)Z`5gZ;ZH_^@~{ z`8UJU7$4#P>)S|1^uOV0{~Mn6zu{^B8=m(6G(1h?Eqzu}`ZZ~T^SkZex(KzTH>ft8 zPl2cwAUPh*worfAq^EBRhTP9_8xhb!Hua&+5OUnRn7K;!Ram8FH{?=+M@Bw6T!%IEggW;7eM+Up$c_#cmvdJ`O#w(wy-p6biN;>-M3&UR~yWhu- zw;cWYsqvSu6YsY$apy8|7}c_f48=`1%ViO4s%6(4+Bz#amsQ57j?-ain^0>myINBn zZ}iZ1aqeT&4UOvOWDdp4njPb`ZK@Zj9@?Rhe2m-E=&DH1P=b2vG2V!#tMey^cIt2+ zpS9EInuN%3qLJD0Ift9BNox-8vP?eCmv3}^vBU6gyVm0Z6;0RWqlc56xlaf+8Z{_o z4kvq=oe;U#)Sy~DyeBC6gxE`?M$Mk#l*rZ-^T(PR$>i|fIPR0;9L7z$A|t7ZW+x>C zZ!{Tbj_gZKKDkK7_=btY$o}-!lhSH8ZdgQ*9LVK9C2MHhY@IoBu)yrpV%rvd<5m}u59v*2c}jGVfanidz1;cgjmGW4nIE!WndNKVyU`wA{o%+^a=!LUbxmRFCS4cc>h-Aj{u)jgxdk!`1)#+vV4nH)VA$8*M+!}NZ=$fuG-^D{1j zE%zHVKb=qAbH+`^w7c2i(}nc5GahO!-R;qzF6Q!_^)fW=>CF68T3~+G$F`;CZuO_K zl09erJWU_;^n5C>Y&#ne(emK&Sp5maLrVpQqj8!(77Y841dH6zetg2&AacI8j zqXCDp>iccQ;T0{92BXI=_41sHY&3m5k~vmm`pW!V^u3nHpQ^_$5A8X(`K9TT$)2$* z<89|+$6B6zogDjxiMJ$pG@bi8)DMaee7)?Rk4@w!xJ4&64Y9s z@@^WxD$aX8(a`MKoUHL{vKHre+qOOvxHNuUA?19sr&*uKgYgFS_VXzbt$p)9k2mV@ zUP#?()-NGC(PU(CVgKRQerc_V8GNb&E+tEr7Jqwk!VqR zT(Iqxf!5@$)RfYbGUl&M949-|+e`D*+Fo01n!KIMTUKCbK46_S*;QasR%F{YU~_5m zPDx7H8Bg;!4i6^pR<@TFN3^|h`aF5BmbbiQr}eJ^?*cY`e$dNXQQ2rd7@YO_;VX-Z>U(X2;g>!?8cM0Cd1?MW`oZVN z7=zH{2YhNn%ZySn}~I_(pV zPTP&6(|VtQJ%k0GQ4S*H(UZg64&*Nw_aN(F%!i1^*a>SJQAeR~fN>~lZp0PTj_4^M-@uxu z2odzH(NjcTi+&hF3ek%m;}>(~QSU?^!u&~OUyO4R^O#S5G~h--p&5hFK|ID-1Sgb0 zy&pLlC)$nr3~FA~OA##?uf)1rhzl71iaAR}0_sL&H$)2NI1zd1^&`(fEI_{%r~Qih z0P0laD2$yE9f+9-9;|o9TrP4Z#(Ky~h5o-&PgOJ}~{wiuFtVuy#il{>* zW1Nfra@2*WpP^oYP{Gor2w~Juk>ikiFi%5$7i({0ybN_Uaspx>q5!cQb5&Tk9rY5_ z>{ye7+8e!Hh*RiQGZ>MKK85iegdN60$QcMx)Y{0>n6Jd?1u?cqJqNaHjU1+9b2yLwMK(7Pw1mi-SUpn$Z#6$G1BZr}P0_!-D&m)eaKY+XiJuc*C zjGrS`qn?XiHtIN>ZY}Z|WIu#ILIHC&ShEwg0P3fx6S3w9>WipLP_MyU7shUgx9Dxg zcmv`E<`$zTj-Eg2U(q{+%!bT?UJU9)%;Pvzrc%TTL==LcUxr+W$i@5)tYt=Ci|9re zU@V1PigkTh!}#sTBJ}nkd=aw|E{NyoD9tUOncwq4q+r4`V>yfbkaecVf(oS^|-VaW%&4ka55< z(^9OvgeXDnhQ2rA8KNIOd4wJ60%U2#J)B+>>zYx&NB=#>cTw}Br-rD<_&hQj@^)lv ztcgdwz?zE~XQE$@Jc1sDIu8B&$X75vjx2=nO5^~nS%fe}JsUl7)MJQK7|S7JInzdb z=*GAiwKB2`@@m9%)T#&%oUR(@(W}hWmEQRg96ppHilN8cIIj?;feFA%jNYJJpeQ41o}5aH;VB8Q+q zh`5ch6UJt!b8tR1jE#^h(Mv<`74lr<&*6mAXkc7~@duob7lIW%U5po^ua4+KZG-+<)Vq=AA@iXpgW3mmD%LAvya(f1s86ES zW5kGPjPd4EOpK46st~I%-h}W(J%aVTs1G69aX!Jw>FBW`-$8Jru0(9boDuR>^c-+H zIVmyv!A)qSYoV&{1UkoK|`&M(|aQaBDhhf zVLd-W2V+-^MG<0HyAM4d^t3VlgzSmlHuPeVLorrGmce*2>bVQhxzL~Vq83=xdE5v)Cg+>bsT zF&`m@u@vGF*1W{{7vv9EcLDh>#;u6Ws1woqjCGF@2T&hHyvE!B@&U}tqyGdk7vm$y zO_;lgn1yj9Vgc6Rf2K*ytwH}V@*L!|XzoYo6U1B8vG(;9c7jv#SO&LN7^Q$o)L#>ZmANd8M4)r@jm3E zh#fesC;G3E&tZHWbMui~kXN8@fO-IR0HPRm1!6sFS)#ixYP#^KX}#mzalhxXMcP$9n*~8z2@A)#429P#LDMf%RGHiZY>M~Tng93uX3;Ga=yx= z-k*DwSNmh{)tNf13$M+hW?9A0HWc4Sn`07M!aCPneVCQc%IH=c$7)Nf9sIV=`*sL8 z2Hn~rxFK$Kf{<$}v%auL_ErNCwSx2PqW+b`>|z_66glRF-QUPDKkC(ajs-E}!yMvU zrz<7SjOTM{;@v5}KT#q{p)*l(ug)A};r(X1#!`pYZ8Mh6^s6_P$=S?eB6~bp*F-Ka zdmHE6!UC3Si_erkYhF@vjb-c7i=jHE%gP^aGnKD?TW`92+jxEb@>*8ORs~;uT{Fc7 zvF&C`%}cj)DMyBOCM)06nQP9~X{Kkca!bhBe9k@R1AA1@`rYPIdldPBNBwD{GOxz- z^s5aTT)78QG~bj+Hn6^P?rqodubrl^J@Qa#CN-Q{RQ-^uR zic}faBemuuY+Vr%2I=Y<8u374tmbkJj@KUT+`g6?ntvLAYLuVTZdza2OoEOE{ zW_T%%Z@-b`zH2*lrOfG_Jm%T))+US3u3$4=Ru$T5s&IYLZL<}3;^WQLo*q14p~b&w z+A3;dMe`~hHmNn1db3sZt(452yQ~ZrrggEKEW5sDwZ)7J9jk40?$Fj08$G|Z#?W$Y z+FDzBTAGcc_tmw_)<=!An=v5y)^mU}clxewcnJ!y{{>jiG1G`5|GiV_S8T zyiV-1ahRWXB*DR_=xl?-nzL1(MEuUzi9b-fm?qBQU)tsIz`yeOq5T1u?p${ayz+%z z4C-d6J*0U?>Sg<{;~#4ZXqY!UPC$eAoDd{nDsl z@|JJRLgLO|H4AeC>)aH?w=UxM%8lWWNYCBYt$neVjeD@cemn2BVe$A5ec5AdbFJ;@ zt^!VYD-dV*f~N`BHhQIp9}lgZxwJHDJVU+z{)dH$^N!DYwpzMM`dOk@s#g|Q;z@~} zrcxK&p6yCMCTqtlSCqbX){+bNk0<3_9v4hL`O1@?taQlAfS=DNxWTE=?OG>`a;L1( zo)7mU`&Cx1udHWB9uKoP0!zlBT_S99{i(~}DYl9Q z%oY~np7U-Y4((|>pP=2z-(4qbkPzPQ7kF=e@ekRPhD!q40VaQn}r2lJDa9j%JuJAx7Pc4nw>}C^FvE? z=f=4%29-|Fzk z8Jq_rrLEj^qMNH^j>cF=9Q8Avh`MGK`|E4b+{ZhF3Uc>4)2>E64O5mq))Q)3upyy- z9S?MTIeKl%xlKtuDe~wRXjQt#@#PH#2t4=v8Uj~z&;!ux-5)MZgDE- zY;V~zDhg%^z7{%qr(;n-g(cxIIrh9TyJ>2aLdVHn|!y( zNx6b!yETjErtQ%2Da`Yzd=SoL9J#otuqStbM}ViyNB2xZ=Bm@hJ<~GWo?Xt=IkCPr z)A&p&jh;9^;o+jSMoHtwr_NmTm}4YY8b0TR)4{tY$!Fg_YF;O|hQ8Z(x4hklYg$zU z)o+w)e9~NXPSkYyGS^q#Nk6Cat99ME+Hh6llD9M7@EhkYwc(ADxZb;RwUwe#M~UHV zN5ARm24PJcUxkxa&dU;8vgE>z1U^n``ekcb*JgR&1s6+|YE!4R9Z!tEc$3N!-N-UM zK0C0(ASq)@p3!Fm&LG~p z3ukqiHi~ANKZ?Heyn>nDYqGs}Qan$@J$Ao+<+GfO6Ebm?UZ?5iP71{-ECIKtS8*Ql zJyfM)mwIOQ%i~!A^X`bB8mzXz@IY5AmNY+$oqywM|FQ?H_vF6N9nnu-<*xET_$*00H=WeJw*eR1$$Cn^8Sf{>>b$R`w zltRb)?E`EbaZ*9Yhjwx)Yq#v+(Q0K$o~g#FNSpo3ZISH9CT3hQ+l;j~ZmuIb``9Du z^fq4Pl323bURA={J*C5Cz|i?(ncAyl{+YVFuUpS9u$?C8YUb7utzIOiVF{erR2V7WVF<5 z?h@bK(qEomu6jkAqg9f%xi?Airppc`mS*i8TpKo9ITtDKG!1Dhty0S^HeTJrlPw_)%mf!AJLJ#-c;70fM zi`ozx>>U*H;|?Bn0_AqnDVL{7Z`(BuKY9MwS<;`*nf~^ZXL0^TW{i86%)xg8+CJ8c z9$sI_&Q$R9{j54nZF;DxND#k3`(U0r$ z+uc_5;_e@JA?aim=wX&F@0%w5=^3uW-^}kHmhyM&B)nOLNr*kf)yv(V&d-f^ zVv%C`%gtW=l4pxC-u>ZuCnNo@yP`1hPiMrxUw!^P{{7>Ad~3^?+n=6@?=$&rm8AI0 zlIMsmotit|Qbt%z*zdo1;~DAy%H3Z6a~JFX=e}q855JiHOWU_V<-=*XRY^}5h}?HWj%~MKS9!_&&=GQ}cKKG3P(d$iv&+&uzmW zz3(mC5E>Q4SfWH&fVXR)+xJF<`3K>C>_;Nd#mC(>l+ieU_fWbUJ(P~+n`YU6=masz0Ru)E@7EP}LQGZOZTVPo2ehHvh25 z-*;huUF089e_Eu6Um!hHS?R9}{VDP97W>;5qkmoM&zb+<3;pAP@o$#>*MdJa0sHjt z*7|q*->vhHW7%I9`%~gii~DVB`fES`mci{T>@VBd)U!PGa%3Dn1B0htC4Sy6!SvuL z#)~8*Uw zr_h;M%Nc)>o`y6U%?iQzGo{g1&~!a%1~i%{P0w@6)7AY$K7wJc{5@~P&{OzPHI+A} z(IRL#9WrAky2hv(F^z^-6rfTw_V){OFtk}=r(_ZCVq_EQ5@7DV%3IgPJ<24QZmZ&J z>=Wo|9AM(*;_v2X>>c50?62bGV!M`66Xs?c8DgeK^LACX_HqrdLXQ^lBMx`fGgdX$ zU%A3u-!;To&m`K-&?bU0FLOIZH%5)Ul17M|tzwv+()Z;%*!{lTFne1?KSr%ffK7;t z9%GpfK@PT&zIIA~vur~HY`;OYowBuxt0ALZG;cRsl^~qIufu9%#NHt_Sf*H``h@q=}qNB9qg>Va2^WwtFg^SA>IzQDn2es)HDpa9ve#U48BCto^2zy~bX_R!Jq=?t35nWgWk_ zFDTr_|9hX|I_et-C~BDLF}8`H*2l;s$j#p-3ZEz6e_W40*Zgh!{leVz?G=7(Ly9hT zx+*_EXSl5?Som0dUoTr5>^H{xuEhTTzMXljj_|~7^7n1S*dMpo00$3Oe*=Xd&$B<> z)+WTkK;z%lMA+jtZEt7sbD#OU7;1RA>6=IWQ`;Q;Bg5_OtfI}m|7LqJR8e#>jQmT# z+1pz9*eRRfI;@>qCkKB&KNmx54;O!f&>zp0m%Wn3&+Y&3wav~Uz)s29i*6gqcn+fI zcDm93^xWG2Q|&)K$N$`qjOW-Kmo&BQ{@*>vQ4Y5F{MvrsF8-zueO&!*7_W=(+veZ4 z>0hs#e|&!NwU7INAl$DhSr~*Wx+*ICa~o|G82gkT%hiZt?5A+M458baMA_S!sC?gF z>Z+iQa#41jnh)dkhBlf8lfAAQ!bw>T+MjY_52a*(juqasrF8_Zz^OQmuR$~UMo|sLO4z} zwR}CRKTSV%B>ZW5ld0*IG{W5cG_e1z{cvtHJ!~C=zyGbIMgD1hrY8Dnee@WsX2^IQ zIQY5xX#_b;P3TF}pE_>3GUoT=4-d!qhyA&haXiK60ehX%P@LKCjr!5w-w)bTUC$_> z(HMQJ>xbJyn45vd)P9_C+;p*{G3Ianqut-T8oT&Me5)YyWZwzw} nUFI4Tv~21mMEu7|hWPi>4`DyCsnIS!KQa~L5FtyNwc`H)VtH10 literal 0 HcmV?d00001 diff --git a/examples/podman_local/feature_repo/example_repo.py b/examples/podman_local/feature_repo/example_repo.py new file mode 100644 index 0000000000..60ddd49f9c --- /dev/null +++ b/examples/podman_local/feature_repo/example_repo.py @@ -0,0 +1,144 @@ +# This is an example feature definition file + +from datetime import timedelta + +import pandas as pd + +from feast import ( + Entity, + FeatureService, + FeatureView, + Field, + FileSource, + PushSource, + RequestSource, +) +from feast.feature_logging import LoggingConfig +from feast.infra.offline_stores.file_source import FileLoggingDestination +from feast.on_demand_feature_view import on_demand_feature_view +from feast.types import Float32, Float64, Int64 + +# Define an entity for the driver. You can think of an entity as a primary key used to +# fetch features. +driver = Entity(name="driver", join_keys=["driver_id"]) + +# Read data from parquet files. Parquet is convenient for local development mode. For +# production, you can use your favorite DWH, such as BigQuery. See Feast documentation +# for more info. +driver_stats_source = FileSource( + name="driver_hourly_stats_source", + path="/feature_repo/data/driver_stats.parquet", + timestamp_field="event_timestamp", + created_timestamp_column="created", +) + +# Our parquet files contain sample data that includes a driver_id column, timestamps and +# three feature column. Here we define a Feature View that will allow us to serve this +# data to our model online. +driver_stats_fv = FeatureView( + # The unique name of this feature view. Two feature views in a single + # project cannot have the same name + name="driver_hourly_stats", + entities=[driver], + ttl=timedelta(days=1), + # The list of features defined below act as a schema to both define features + # for both materialization of features into a store, and are used as references + # during retrieval for building a training dataset or serving features + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64, description="Average daily trips"), + ], + online=True, + source=driver_stats_source, + # Tags are user defined key/value pairs that are attached to each + # feature view + tags={"team": "driver_performance"}, +) + +# Define a request data source which encodes features / information only +# available at request time (e.g. part of the user initiated HTTP request) +input_request = RequestSource( + name="vals_to_add", + schema=[ + Field(name="val_to_add", dtype=Int64), + Field(name="val_to_add_2", dtype=Int64), + ], +) + + +# Define an on demand feature view which can generate new features based on +# existing feature views and RequestSource features +@on_demand_feature_view( + sources=[driver_stats_fv, input_request], + schema=[ + Field(name="conv_rate_plus_val1", dtype=Float64), + Field(name="conv_rate_plus_val2", dtype=Float64), + ], +) +def transformed_conv_rate(inputs: pd.DataFrame) -> pd.DataFrame: + df = pd.DataFrame() + df["conv_rate_plus_val1"] = inputs["conv_rate"] + inputs["val_to_add"] + df["conv_rate_plus_val2"] = inputs["conv_rate"] + inputs["val_to_add_2"] + return df + + +# This groups features into a model version +driver_activity_v1 = FeatureService( + name="driver_activity_v1", + features=[ + driver_stats_fv[["conv_rate"]], # Sub-selects a feature from a feature view + transformed_conv_rate, # Selects all features from the feature view + ], + logging_config=LoggingConfig( + destination=FileLoggingDestination(path="/feature_repo/data") + ), +) +driver_activity_v2 = FeatureService( + name="driver_activity_v2", features=[driver_stats_fv, transformed_conv_rate] +) + +# Defines a way to push data (to be available offline, online or both) into Feast. +driver_stats_push_source = PushSource( + name="driver_stats_push_source", + batch_source=driver_stats_source, +) + +# Defines a slightly modified version of the feature view from above, where the source +# has been changed to the push source. This allows fresh features to be directly pushed +# to the online store for this feature view. +driver_stats_fresh_fv = FeatureView( + name="driver_hourly_stats_fresh", + entities=[driver], + ttl=timedelta(days=1), + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64), + ], + online=True, + source=driver_stats_push_source, # Changed from above + tags={"team": "driver_performance"}, +) + + +# Define an on demand feature view which can generate new features based on +# existing feature views and RequestSource features +@on_demand_feature_view( + sources=[driver_stats_fresh_fv, input_request], # relies on fresh version of FV + schema=[ + Field(name="conv_rate_plus_val1", dtype=Float64), + Field(name="conv_rate_plus_val2", dtype=Float64), + ], +) +def transformed_conv_rate_fresh(inputs: pd.DataFrame) -> pd.DataFrame: + df = pd.DataFrame() + df["conv_rate_plus_val1"] = inputs["conv_rate"] + inputs["val_to_add"] + df["conv_rate_plus_val2"] = inputs["conv_rate"] + inputs["val_to_add_2"] + return df + + +driver_activity_v3 = FeatureService( + name="driver_activity_v3", + features=[driver_stats_fresh_fv, transformed_conv_rate_fresh], +) diff --git a/examples/podman_local/feature_repo/feature_store.yaml b/examples/podman_local/feature_repo/feature_store.yaml new file mode 100644 index 0000000000..3e6a360316 --- /dev/null +++ b/examples/podman_local/feature_repo/feature_store.yaml @@ -0,0 +1,9 @@ +project: my_project +# By default, the registry is a file (but can be turned into a more scalable SQL-backed registry) +registry: data/registry.db +# The provider primarily specifies default offline / online stores & storing the registry in a given cloud +provider: local +online_store: + type: sqlite + path: data/online_store.db +entity_key_serialization_version: 2 diff --git a/examples/podman_local/podman.png b/examples/podman_local/podman.png new file mode 100644 index 0000000000000000000000000000000000000000..9aeb11f7f7f0c9c419cf8d744fd0fb812b602fe4 GIT binary patch literal 211897 zcmeEubySqw`!66!sVE4FNJu-BAR*l`APqwdrGPYpbR#GtAw3}7-8u9qC>_$$C=A^& zbljJ7zV*AmJJbMCFlw@xbJRrcpz_^_W`#p76xV&OrKW($K zG~|`7uCt^c*}~(xwNTl?_O&^4sqg93ch26K4}#higO71lRlaxXZ(QSJ$l=$`lYV+* zv!$#1$&Jbyrz|cdBaQ?<;|TtX)`gfE&2&m4gX8Tb8KoYXjIM!Z1h4E7en3&!+?@5WZ$ej%y6!TB@1a+RKQhtNE z1j{~4YlB0BA7+{8cXq1JZo{_JZ#Km8KWVZpEpcJ0u!>)LGekqkTF$yGYaAawf0xDl zqWfLJ)7`a@L8jW^SBi=l0P!*KFt9NnVBi3+FoA~%CdI#A%V08NT>Im5EDVeQ3k>Xk zj8OufFaN@U$K^GDK3|J`kAVxkBL*I>saXFUjR#7-_RrTiw}9_3#MLFiVBlHZ*xuCC z*1;U+So<>L82A9+PEN-G1A~m=@_`9fqu&DN|6!q_?WnEzT)-G+!~Vhq_R^Hy)yD2} z9tquGH05k*E5aHzF=N9_og8%l_znA=v zOSS)fDc2MJC;xfre|+`7U#jU~YA*@10Ty)>`8UJFfSTrV%D3;i{$q`ncUsrH?UYH zA8Yo-z`~<>^SA$M#6Jg%;qWmJVqa7?{|9_wTa}Ft+An{QJKf_{Ust z#Bf^wcC&xnPmLll7NI(L`$`+Wo%{LzRd$PkiNL>|JIibNK;jDbhJlGidjHy0=u0!p z3yhtVQIx#WXt7i*<14L?J;^AR>Nk;Vf%o^E04BuKFuHLCX`p%YW&#*%N5OOR(cjAs zFrhC&$d%T=ZI=Lywav(W^tW5kZ~LoIe^%aCjWjpB&Z9oj5FtLI4IV+ICQQ1c}Ih* zs@mYde4;az{alY)mVz>VNZmO670w8p457&`Rr3A3FghR3mGv?;_Xu+zcd%GbHO|O#Cq=!|IfH0xVYm}#0X@qPg;UURU|33t>*BHL zV`NBhWJKhnY4F%UmVxrJK}l>S9&E1DR^{H`>z)`t88eSB!D7>yZZGSJnq_8|oS0py zoIim-vRWaQp6xMM30EU}#bugF6Sk@D>dmytcz%|ZIMFC3@wQjaC`+vC+;kDLVLI|* z1UW>QN1URkpD%-s4S#hIH|2P?Tz(cjzls!%-4(|tzE4a~s<9O&v?UP@Fpu;rywo0p{>=)He3sg=A^+#Z0awDVVX zIMHFyY+4O2PB<`5RaB9Zz5MlyzH-J`ZEo7v&7p4ocrtX%{?+OY3vE%UuUb%Xadg+A zaS_fi&s^WP=Yb-L%)=8Rk2Rsfwrq$&iBu2()_C6WlWMBy{ym_0t7EvK`ULqgG{cA@olKI}ciQ z362#rj(wCA9riXaxb`qq_z_QvYcGn0B^Q2I>1;cO+ge-tD-??Q897ozm1!i5_jxIP z{`$2Ky7y^UBc)_hVoR+I?T(apdERVJE)!HW%GobEo7rqdTcM{0Xy=7<^ zbj8mdtClbkLRy5e=2q$TrGH>8OXQ2R6cqbi)8W9Tp=$x=Ysa-GI=x>Y!Q{Ch&b zd83a-MUfj`4MRL67uKM)8oHgA+VhLz=|yXfeprqv7cJJ%VwQsa{GLT%;$bp2cVIc< zdco0v@_M+B*3Fn>G*%^V<@s{K<9R8}9~06UR_@eQAo`QYhoRu8abZ!ii&6+=qKfY| z>{N+-d+;`XNMWI-p?Jag2Njj*+C^T_;9JrVI4Q{00>utqiNKDVMAKP37$1x{>Bp{ccA2Gbb-u)}u?n@cgSToKn{PbF z9)h1C=13wWpJvJDR*%0WP;0-5ZC4qbZG^*iTddN^e|I~O_U3i)FeHqBAj#7W`Qt2) zRftPgqPak~Fxv8-8P43@%&h^~cVEi%W7jgT?>e6cF0YL|LrEg_Y{Xhm^ za{MwG7!83hj z5f_nyf9lz(Jr-%u?Mx#LOKg^0G5_w68~8#f4#Eus+sTEi6Y<4N5}#b7Zq?o_mRFTu zx(w$1yS2Uq`Nf||m@+%cZ?-1Uddc)zFx`A$F(G0;0bYWNnxo08g0sbNY*ulr*1n{@ z4IHqWjiE(%F*|nMqomJ#k?Wj?W0q_4SPuyFvZ`~wgXBX(W8p;BT=BoIxGg-5(u7Vf zI$r{8kaj3u@f9N5wL4os#Bi#;jaG3zu0L_R!WHtdZ`!Vd9;Q5z0LgBW%mk5EOmbByj(`)sM5sR@dh%i^RE zM8-t3vawV~Z}Ta2f#;d7BuRLelC=D7cYc5`MM190a(phA+!-A~Q9Aqi+KP^1)Ve{W z>wcle(pl=zlHlilUDlnHiS5Alv^X^eZYim6n0lSd2^sp_I3SJh>6oAoiDAljEuP5b zSG#wo3(Ime{aZQZonKYUNGI z=9PQ`&;zj(VEi^Pq30)1Sj32!GWu8TO?@~Jf=@kjAn<| zcWha;<_bQ%$~K>|&i4Bf>kHZ=yRHefTJc=hlSA$6EV=Ox*T|lKTzbW?B@-?Id6iB7Feo->yph?3w`nvQ zPI8s(=^^RXA)p;%Irn-Z4*gT)8sm8!v>+;ZZdha*k+1J#5{y>?AtEEYlEjHHBhLn< z`MlloVl8C_s(XZdptPx4+Fw^kT=AU3r?4vX*zB|)qW0mhqSuQ=IH_Y3mfzz&P14G9 zw>3eQwW>N7yQ;VGzVHD|0|PLY`*H5ep94hf5>(o1af%1 z*@yB}@xy+P4aTVwkp8c^0@RiIfP5cEUNhBiTVP;+Jti{=%3yOr1s1fI<_z3^-PhSZhZCbD@!V z8vu&`e%Jr+i1b!aU|_bp?$R(gm2s*GL738MflnD=b091_*nC0THl_UpDy_ z1TBmVFcyj>0l+figE#(OCf>*b!Zw>dDhVu;YH|GvQjq~DtKH!0BVd{D+;9Khg3b0{ zTE=BGunePly!hV>-j?~L8t%CR%aCyQT!D$Mm4C27`2VxvFC^;!fDOJc@ijht{0P4| zKR`xCN6+^Y8Rn@mxXt?F(hCVS{5S!pN|ccd{=Ede5qo@_IY!K1{OxtHyuAElSI>yq zS)-$+jIf}_5^T_2LC%|3;Bqw9<$jiCkjV3;w6rv}EP2u}lV?8X4fnrXBP3&G-WPdb z^O)q;6?6o1RZBfmTV3KjFO!|u|khUQBPbL0h zIAISH90~qjms4*L+|EsH)BqgWZ2LmQRScL~s`fR~I;T~}_3^6RcHEhA)8Im#3Ykp` zUjnM5pryNiZ*j!P?=p{w`49Y(PL;55Q{TG+Dw0}Bcvm5{c_>dERf0SRk@BujAY#)b z1IRoK=jP_nyb_X{!hlL))F92@-}xI{fZVg$u@e6`ZS6PW+$RnT&&O(BXXq16 zH+s0ZEu{sp?g*$gS&bHR+0BXt?>H>>!mF%Cbu&pF`)eJ{-1hoq3G3F&9kEFDWWdsY zA(<*l*ta(LIViM}!qwZ<==WXAGa1cdqL>yr_1SLUpQpRy-})3v<6>5zNshePh-mWqG!+gzH_gVp3B^ zZioKa#BQb^6sS&@dX~C^~?>H2mHaU=PXDhBbWGTQP{MaD7v-;*_2I@}|bp zXi3W6Qh$r(MGS`_yP+2v?hPBr1gq6M+nk+l{cPcKUei81o%Yy-PuSM?SRQRQohXs9 z>1NJQhUyJBhRf}KN}N&o5K=ojHD39@ez0$Vo4@Iov`>Vw( zAYX!bdS8O#2TDVK8RXp6pZ%D_*U`U3C|&X<67Q~f>Wjh7onO(;(#&qPc*Y0iIA+N# zvq5DW`8jkd;!ywrG+P4Q|B3*<>4Y~{&w?|vUXe9o$d7bQeL@_DHeE5KOcG@06$K}3fe%4?ym_1PXLb3prk&$7#Io){N z7)Zh-Z(@?^?Y+08O3q^$?LD0dCb1aGjTqkH(IIz8v7=b>Ult61Ox}Sd@>wvZv zlQZT}vCXb&y9~FGTBZ6mCY>J6g)uNQ%k_C5?^(EQ)Gn^EbP1 zQoP60cCbOuzDh5=y!)ffI6$MsKz{}B8X-k0Jij;3$&Z1h&6;9&zO$Gy!N}`TFQTJA znmGbwkx$dcB7N1D%(gmOf+z@^s&k6MymnoC-YL^_v{>JC_OmY;YOyDQ$!esqOpayg zS1Sc9x{b=CFm-}mAuKFxbd})dO(&fs4Gm}PTU0{yVWQ_X51(y?ovR4D?}h7Byb3}f zT*lnwlCgj9swuFZ?~h~raxTMvCg|4ngRNRi@*6wZA3PHTO#N(rFFeb=Q$AHfO)aw8 zw9T9U^{;miMZNXP%gccv?YZS}K7-A8Wke7uOH9`2LbVyW$f=u7gKL?zM~c^e+@z<2 zOE5S4@1>Es3s^q}g;*WyTUeWCGajv&;b^R91sH?Wi*hgnh`ng$vOGYQTk5iD5ckxq zm{q4-N=;q8NW{g+D3!O&yP_K1syvK6}d7ruOj9bk;AN}^4>qWCv9KS=h z^lu3-LEX&Pzi(&~(;+h|N3bIG`@DJo5R>FH@8u~|OtZFQRD7Z!OALU9emo&% zfh#F~R2ub+4nlyHc*tv z>cIWn@T4R*$$(pf5Y@wMIV!K?y$C?l$nvbgqyny6;f@=g8PdDx)zR3S6eG~g+GARF zb|slmPwA1uBh{hKyJsf{v1P_>HFe}GlD{?9pEGbit)Z7j)w2Z`FL{Z0P87z4w(AXb z(t2?jUKNxyfdXz(+G|qVjO>0TQEtDH=%|kEb|MEI{-qh1FUkqk_ouqOM7M7HA)J?Q zC|cKv1zh#S=S)9Q7!G0zW$cPUmzA4JCYofDo-^2mdHXu{+5)_(o4PsHuFVQyz7w+> zLMy`sOeBoYzbBj?%=lEnvcnv6k8lXdo<$>>;}oE&%>W5Atws~1ib5j}YK|5XOn;P` zWx4GxL`FnBs&`(G)-dI?;~%_p7TbT+c~;8~Q94VA=19WsBe8Sdsm^RG=IOsQ`y(C{ z;Dd`H5+nx9O6R%7vW3o0%V>ZpD%~6`-~A-ZL2Vy9k#e<|_%RSt+fLPifo5cUEYD@oi|K|VMvvVafIo=o&v2LybgL5(by|t`L=TUa1DbScY;xId zgS2i%j90D7MKLPQH=S%mjTDviLc1M>-F8g)eMG)E7D4Fl^KEE?6%?ZSL@)GWI4&Te z)S|^R$eqrDOp8IrogsB)`JQ+lMbn{YGRBC!1^=c}4dDTTi7PFMt@y z=}F-0f+5e?FHU#E)@ClkAci4Bc@F+m-}@Wfp02y36;;A!rZvKjIwBbqq~Uy`RNk&{ z^qht@-3I{EWZD`1IK6uANoae!969$A{@Ibp(Y(4%eH1!=5sXhjFj6g9D}OOXA>#NtSqmpi6M7ae0*9%$Id!Me|}$xo!VZ8_GtGHMQBFk;ALg)aI!Fqaza>m1&5?Rn+NC7S6$u4H;xS zCPb*>Bzx|F-HuQZzfdSsP>E8q+k%cx`PcH(LY=3m5h!eUm-3mNSe@gtr79x`B)`y` zlyMe{j(%#^t3COYePy(SBgK;oN~mXUO|Rp5es(akv(*x2E>7#uA)7UwwxSH=v22|M zgjSnIC;K)76=uq8Tl1OH=gWgRT|Vc7Q00@K0o3!gi|-dbe8_Mr33)bNdG4iAJ&QP? z8vSqf?4O3tR3SB!$z$fzgwD#4i+*Zz|FuA_<1$0*Ci*IFpWW&-d(+_joxa`*d(OGY zyyx-*()^X1iTkE9>cmXnXaH+x0}j)7h}>}D3qNm(PhjHr6gimE-x=JwAJ7$dZ)>}V zhHpw$d4;E#HSG>5O2ay6?eHvm?QkwLGpoGkT`gR&F9|RSD=dEohcPW+#jP0!Wy3D1 zb<6Ev(gT2%^ITrQhgMs|>o@AUgutRf~Cn`!bQoP6Khur*Qy1!4<5Wsqg6-}pd=!y-E`(?rjNCe+gNc-eXz-PjPO zsftB9L~zI3yNUN2>kt`b0i>w=-ovujb22vHf=+tsAxmyR(v7CM>oNU>4KYc}qE+0j zm2xL{t5WB$=aRv9a2g=jp6bj1s zxMD&)_dT0LoeQkbQSXFxc2RCy%L@aIE?#-FHb*FZy16DBGn0W`WLFkzU)bk7bx|ph z)^6;(ie>v#yYy|mto^|;AjrpM`ZLJ4f0k!+oALQ4^SE67v)w-LJngcVO?wUamW0=|1_$)% zz-&5ts@!?(#V&oX{-uN#F@;RUYhiamck4fB;oN&;u9-{Ycjs($jMUnA`qu}SlBir~ zp66go|8T;^r+{PEI=$AQxQuVcH9!1i6}6V9p&rka{9I>9(Q@d<&|ClaGxe<`^-C%# zfG@kn1Ce`$MaumUiPyElF!#QR1 zs6{?Y%saRy=ch*ygsm$O2WI(PcxP7=#?xyR+?;e!rq0viP#XdfRlMrqf^#Z$r&s`% z*#2iTPGBHEZA$=F$3*DUh1tg@*saVV!USqoE3w2JSt58jClL2-O;e3%f<|WG96U)^=K)F_- zG7klv^`KzmH*4cn3augJ#&<_Whxe%*7QGoCJ$iIpVB%2v=6RK1G&gbRA^YM=4&heNLhd!ciwV`E0~~Szh1Jn624kDTy9}UyWp3c0}-B>NVL}T(tFid*7GV! z=KTbfIlUJy&oD>(@AR5hl-1bjIC8G#GmAGG3>omzoSdr=^ohZHrbMh#F zf-T6W`k`jac`cG%UQU(2JJYH#^Pw$i+}CZwQix@TJFkz&52&lFqdN{-0@&!VCaP@; z6k9oXP!+u3V#c;u4#R9*g8Sq^2F3wkFkI}4DGN}w+^;~iviyZO`^I=+KCIyrR$Mgl?6&Til8^P|GJzIYyW+eS1*xXnab?#h?#qETZa#lmRVoHn?uncyBzM-F%PKlI zq@$IKB<7;$IZD?zolMHB#09<6K*tirOj}2>3G=8YH_YJ*&&bZ8Z;M`Zu{^x1ao91T zrJ$C+?y_Dm%;SM#%U-7v@v;gy-DyP6_~KWv1RCVc&4Pj8cweQ~emhfLPeA%v?NLpV*6K z>K~p0vTk#dz9YojQdWzTeC3p;oi=JD)a2nayVUC8udcz|PHENQH76U>M*}WvKhjFf zQ*E^q=zRcYRoT1fM5`B250%V(rd8C>Z35tcw&)GHRKL9Nk0e$$2_*}uL4b)BSDX*< z_t)8*1u}zCNYs*5#H;CKo>54VZbuO5j7~Hs`w;I<*)ZL6I(zMjZK=vt3;|qJ%we*_rO@Bp3V^8 z_ViLwxoHc2eb59+ZYn6gtRV+mYabrfrau1-*P+ROdf_;JXYK30yC8>frBX8ajM9qv zOtefAhpgW2Hi0!F;o%$)N2DJyXUz@XyXr%>mPz_L9}~Gb-G~F0c@*t z^NpMdMW%y;v@4KE1exPVpUe3jj?ISXit_rSPb}s_6!7Xz4TWRT3BCLwyzfz!I4xD{ zKutSXN*dBr3K&ibGz(r; z`Ry+D1&_ZMqr8lch0pd>bm|-frC%a!r=;0>HrM(Ac*FkJS0vf+CBD5;Gv^FQ<_jA? z!>kg$qsB_}eu*UAn;I$;Y~1E5hDg|Xi|z#SyiS#tj!tnb5nn^4K+JlgTJikcqa=45 z9vz)oq8lC_P9u66P7?Qu+l1;_>Oc@N;Ls03DJMRH6jRE|$_ARel%%pEa;ryv0F@Z; zBlPG+)Qgt)p;U$*nM@0jgAGjql-J7L|dg~xuve#w95cAHl4p`W!1Vs4#3tR zOVc30(t-~*rdT1ZPx{*Ue`|ZjNPuj_fMLAQDK{k1xoEH^>iyXxA6QphaJgn&qb~uB zZ3%S8>22lk4KCDnG7H+WWECR&KE$FZB}FiaQPY9RM$_+3?)Sz1Z2LCJk9$HyQ-?+8 z7Uz=ixO3!&`#>8Bq5~#MHQLv+3USJ-E}}$UxJrn`o7c*hA6Fj_mDG_QemvXNH4Ly) zI5#usPCBZBlu-=+nFF14R)!8_Iyoi0l*ZTi(TJ*i?mfK|+&=Qa9i2gu`Jy7R_UGc} z1^AaTJ+$kUe7zc>JAUyz(_&wC*H53`)#U}_;?(UzE}5#EQTJvVWri#%M{Zg95-aMC zgvaI>ni0^qLzQqIJ@+uJus#u1LTYN$w82Cl#)cUp8yH$C90V1Tj^R6yJi`L94-wbA zW1iW3!&QEEnV7r32a1;Wxy{2fGc#-Q!+2L~Q8CbYUtFs1yLq4eA4$Gn!o0ytxbO<7 z+N)Wf9^!LZjj#rP1qy7PK%#3I0f`P4a4K%sS_`0tC{(-}IdCuEw>htM;DD(b*+dEn z9H$A!&E5+<&H`O_ zy&4+D1YP%dTu}F??TCi$R*D7bha!eep0!lZQwelEa69>*J*)ZbZs=~2>3M#%h%?QH zOsl3;osgKcYq`15k@(vN*l2Oz$mBK6Oms;;ZAp*w`9+jBdi!Q}hZeM?b6U)Qtiblp zg&1|qZ{NCv~S}zm#|?K`!J%a>m@zSXQ;dI54rJa6?ILf zfCGm1g17d`lFH0qsC$*BWA=XVw7zF+PR9n(GZ{obLX?ziRyABG66NVlDH}Q&iN&5` za`Li#=lbz9*~+ru(ewCGotc-vUdmK^?v9BF*JO)F<=b!0zEax_3UXIhSa=xD;BXKZ z?`73QwnP#~VqM{RF0O-I`GkF_70&K55>@OXvm#EkPyoj^M_O5;!vj;Ma&a;8J_q)P zou-e*#z$HZl&iAD-#2!@*smyeJQpRVjs+#T8$3cBz%oY@vQgCIC$iu{zYzK3KfvA# z0PHQb8f9OnI8tDMo)u|y%L4cfs$Q>({afBL2XG0@AjbYJl%cRR4k2UwHmy{b2{vh^kHvB`Wd_E(_a3<+hx zLa~pOXQ~7?ZR(5H$@@5znjpG*B>7>xfyy?~*F1XTR`$2iF&5+URF5>z=o$Fcug|z2 zDIh_qpG7~*PK?tljj#(_gEEv_`&Q`D545Tum~fh^^5v;MRhfX$YF=hS>529UM_$Up zY{#3sJLBOu$qssa?vXv8p%e9XX#eF6)&mf5{ozHxu-IuXcB%9N6x2QQ zC6Zyr97<5h2jDbR_wF(AO%3(XPPNIN_|Mcy59L&W*EyOFB?#}AQN_8U;97y>YF|#D zYz9;vPXehTkebl=S4r9!e$Q2v%+sqa*T~lZYitD^sYdz|>@IRVu?c%Qo%6FLAi*%c zS2^+7qZcn;9G^TTruN*z#3v#e<)8$F!EJ7LZOozEdW`F6e`3=F$l5;6V~O%v|DXZz zYnCk$zTZ|GPufSiizaq@hA`5@3H&aj?A~sXK1J zGwPU^rJSDM$;I)?ilom9$SD(#NOl9K#$dLwH^&iQcc}nVSjyY+;NdT(r%mf=d}dVP zTisUcnO=){kInt+b+uDP;tC1kVLTCtnT-bLKR{PoaTZW+2U?5jd~AX5h{cj*qol7^ zH9r7%!MIQGX9|a;ZL^t`?V#3=XURF!PC$k6l%^LGP$i7yU3skN3mk_Ff)o|UynBZooTH|&d2xU=)XqdTetPmEy*rVA zkha$Q?6~}eWN*3f2!!W8pKYSh{*MeY)1hy9>N%C`IO7Wnd^5#gBEa@QtUH?Z;9u2* zgoQIcB&W@|quXMVOLtTz6tS@bz-jS}+rvFTg8(>~jE!vv4yb*;W&!tR8{6?<)v6+Z z5bFhVdvE|s??0T^I(2?_B8}6TI}5+e&&D*yz)_4~q&E>6vw2me2J?)mB*{XU?YwHs0f{D=VrV`S~@V*EPDYaX-Y@ zaJBwAJdig^>V7tIoNjyH_^i7_!>zak6?7vzw08UJB~Vlh6z@P_3UoAZmI%$<5+KTP zSy?#H0N{ogAb~_FXyGUD!A2l2U7ySl;91ssS6L$k47`s>bxMtVy=_l(p@`yY&9*SL zvX{N0+A_zVIE^nmCt9=4^S;nXs{ix103ggiY8?V{Z3KsYw$r<*UL(~IJUQ4LJWuZ& ztB}zznA@m#?^O0Ayu0{OaMrMWxul1 z9TEqrUvtWF181kFm0~fFY%AGmUr;7ws>XGDXGg-oz(6fWiBjFG6?}1iW;#`u@=@VT zJ*)9(fr89J@3&1}(Ug~Q0}nl+uFOmgY_%d4j!gj40`l4MR`H_bQlB<*o$6VmCL>G_ zMrXkCtEk+5$7x|rmAkbZ)>!!_i~vWtSo+vkc+s>uih^}O{#Wx@$3!GmeP75Zq^yRs znNi=Vc2;wR4C8t+z8LxN4!$C{OZ=9Brd z1SAJ*Fm!|e(l~>GUK$3AjN}2bT)WxNn9BoM1;aUYK+OO(W)=|77#jxEKiFoFdI@6Q zgO=xT7UU@8u6f$bM5RU4-o{yq=h5-rG99d8yN;?byy$ElJ>|k;(jNZkXy@krt6xXZtDOQVreK#zq<>C}ol%||JLS8+881#te`opA!Hi>mg6Nr%3U-9G3{ zdNVEEz2Cy)S!$Q?_{~b9n0@B036xB7v&b-FB;b6pt+^Zk?TIu#ZOzN@mo6rLJ8NdQ zR$R+3uf`&^94cg1C!5OTGj1Y%E9?X%)Of;J$YE;QHR879_MDj@KSVQ!SBbbSR4)7Y7(9;ixeOP$E+z zYvwv40dPgPl8C1$WL7SmORd})G-OEQ+JgvO7 z0zXvhS&Y?~_2tBiQS#54d6ciX1qrJB>--F^79BhDp#=DOfSr)K((p5Gea{<-Q7y`CvV@65Jb9MSe+@fFN7sW+o>iuSyj1e zZbcr%PO^S^#BcbVr!VTj^RUrs-ciqTdfsz%U2r(kjInZfFpRQ}Cgb{fB1c$6D#YN^ zzLQHo*>sGLb22=zZ{Cyt2Lw3tDmpij?2hc|B1e<$ja4X1y3NMDI$n2EckUWDjlsNi z7Em3&agmoZ<3g|UvlY79nZbc7lsYdYOLSBsRNL8WxwP_&Ko{5_p8TUPL0nuM(0@k< zVJff@D)xrrP_pXoWntsuLQS-w_>P;?Q@^0cWaI=j|8n9klyUS3Gie-6x6Ns<%Rq{4 zoD=zp+ahvr^&qtx=bKV3dcu_nT?EQT|zYBgc^!~G3_SI9$!jRW7kd-tha08rUS z==$$I;S8cFy|pGx5d1cq-!O=Ta}OcI9XsbQQb8cCd&BwHB-@a zU&6Cj-R9%~0yJ%Msk#rI{n_7P+*BQc4RnaK~wjp>ma%Y6uKr$>&T?&#~iTode7ImpuQ7t*hnpF~%sQK1pg50C-wH=YI;svmvmPjaPsaUSKHnUO(* zZjD0%1!L%)C?GLuBokomq9B=j^e~_mhI)$TW&#SE-K%~!weWB!wFwlMuSHqXx6G== zotv2g)1l7!c;K^V!_jPs+ZAOz{t1gKrkkHeVPRu%7d}t)2a>In>Q_K-Go-EyRQ%eG zfyy%~N?8OB6xb=i5K$Kb9MW5=UDANPa-0LEt6L|%wDt+Nh&J^yfrqZ25)M#2OG{{)&)tS>S^ z?i;l~cZRhL<5o17fZnjYNg&5}AOG3HY2R{_PkT~pWX8LG5`fU@85n>tpI2@2BE;Oa%{Y{GQD8*pE;ytzggnuaf5rXL<3Uv|`F0hz>6^~chBwR>jIm6h*Rk;;4n&ieaQ7G1oJ2b!92 zZ#@j(+q)4MQphq~u*e!Q|Eu^e)Cpua547Nndp!HO?~=Nfo{@n95aW0lj>ULo;4nty zOAlAjZ-2XF3lCE{ZjJIuVb-Lhi%Eo{_o-nBt83qJU&5qe!zz=hn?%5670 z4wwJN`k8UsjJMBu&#i%9@lS&B=YG(203FXZJ1cD$Ia#xrylj|1GJnN~p zN}oe{e)>4DBH6q(gDnR(K5spzTHHbX&wm0k4ZAc)^R;#XwNdZS}FS z7Vu*}T3+N^deQ2hBh_nmna%r2mSCh=g2M15oTR(!6;J_D6m9G59!0Et!CE+2BX^KF zzwEw9I2jt%l2n=CyDvLgcK66+fcMDkK!*-|apT4gT&v1PpO*IRb8!Js>TFmQlSyBU ztDCh6i||bN1VmxS|3+~6+p5_tt9qGF-Y(uJ+wZE6+ac=7KvS1V`;Y4J6?{!rzehtA z#fGmn7OZ@zbB%8P12Mk-58qNwoNF}4S^Jkiy#kbH)fqjCyyzw)0)RT|rrgo5-U5gq zUj95w8qp3>TERISJ-esG80OewoPR56DYqEcEwOc6Ka5<r=C+c|L)ch4~Q3jW!9p))HS0#)Zb9 z8-_~cxWH;LWzbe!IdXTQOZUS3KbQn);XQez@)85U!MU6j)`t5_-(C^}H-Y7epf3T= zK+XPjfGX%r-oIjk$7LsQql>O?a{Fb6wylNR;NE4+3rPG=c?AwNT4TJX_+7;lXiEi7 z5JxovCy1l(zp02qgiH~8IB~C~z5y)Vmj#0EjbY^Aj`eOm$pV5KIY0Rg3pnJ(UL3LV zTObMe!ngdJy#0e$1}2Y)cS0@&8tJ6ek0A<;nc5-o_y z=+QOBa_|WkKmlXQRyj)|Lw^A5Eu6@m-k87!{ZK^3hX98QTO)M;VlzO)%#$(b{Hlfl>3h9Tr!5zQ8zod{1 z+Ia@>2DYG46W|B|Pw3EJowPSqp8a;h+wT7^r2N20C_ef#^5X4L>6`{pf<*UN$+eCZpCO#TXFD|;MwP2 z3(?OxWL~?DiNv6`DMUE9|BooZj?;8DQcqlU^8PI(L45CM|UmKX>@-fZOiit@^`6K>;ChEE#5)G)mYeR4lK!A5~|AC947I9kjCD!9G`w~Qi4|4QYu z#AMuKSd&QDR-1$w$JVe^+1tcki&Vz)YE2wf3)mmfef-)tGm)jp>8#%gHdP|HyQu9_ zZUyw_RQTPB0bpqHC*{8__tCr&t0jF_v)FK2Edcoy%BZlmZ30&8Wkrw))i_}Jii;4d zGRZX;=Q3Z6R>W{y8hooIu!(-P_UCbi8jr$zXut(9=j+}1U+lekIF)T1H(Ij7G8QYP zOpRJHlri%{DH)cmg$R){WeCfZAxo%KG*Ac&h0OC9LW5F<%rljF%skuY-TOT6_kQ1g zANB3O_CCJdf6wt8weI`AuIs$c^ZX5GHT3k9CGY-$SHKfGL>s)y0UudJ^2;Cm1=lBt zHrNXQ`+_~s2R1o-f79k=R1t}G858^rgNEZKXX5X^2iGCQie`9t7?HZP6#{p?rwkzK z&VtMwV8@0_*a@z$uXFiXYMR3_Q@JDa2mj<1;+yYO7?4rSy}89H?OX+Xq=WqY)k!6Q z?Ok;14{@(k1n}>WuyCD~oaQ-*2Myh{Yk#1uwokaJLXEvxV2{hQ={Srq+Cg$~*4a5v zX*+aozfy49w6@ag8hhpf?(;Ebdg8Mhg)E(GNx#Vk`|FFoX`og;hVL6|%Z~586{ixk zYv_Ewt)XWG4dy200jM>rf&csivPhk69E7?GjPAu48i5?!F8w5^OfGdDy6&ss_XwY$ zzRCUb_d(L!VQ)=Qyml=mQGgS4b^=EIB%mW9BJo z=guZE@voiHdz4C$nNU?z6FM|x(eQ~5{^&&KLQC%}P@c7t4~t%F#W#jBrb*tLZ&Ibl zy7mEQUMD&A{L8ysITZ2%4pEEVtoc;&tD!)0+Os|35B6KX#+#Qu=)DrQPl(Ann1Waz z{dW|QJuFA-=UNq_3~Y|a=r}H}OqQKWowziP?>L75#c72D9M<`U;ouH;?&K?m-)|Ah z6S@LXya{MUbhwRCe4Wsw2AQSFTxs#n%a<=-sgFGcb>Zuhl7578H=hQdYwWG)--m3I zmw?r^1@263dY*H^x4ZnaO>FSdJ9ojSU~RmMzA-=c5NJGi+Bsq=mq7u5>T2%qKP^4| z>>eeoanz|;nKUq2%>b$$3LkET8IP>>dV=CL60r33~u_FjYS&~A)1)myUZ!Mi*7 zn<+c)q3$2&_Yu)LsE)jF!G`w)A+XN}WwphQGkq~of&xl7rg8vtl49us3VS%WSc zH2<{Ed0eO;aS>=s??IcT8^tVYRtajq-4~yX4};LW*j^Ff)RfcrIrdBhP!#=@Q~tdR z{t**{%e8`fS~QWx1B0j!L8kP~-vLNo=j^ro8rj4OiB##a@f4tGGqSs#d9#C}z{%SR*xhtbv93js-PlXH1pcpKOe( z=L%`m7((AAh}~h*zG^9AAZbUm$_6uI*|m8!k<#X-$)8gd@w|>2DSp2XsW%n&Q%@V> zW8kDUzdZL;`L%OCGZ{@6XQuK;FUS`xL`K8HmX^%aNKd?DAyiF;XHjiZQvCZ{9pw+6 zB%@1iEaasb79VDKa`%7ZM-}jWf8^BCiYT0`+IY<%Vh5HB!~kCk)Lv_o0sYf8(3Mic3+c30PGo8D0f3*CC3Z=AIwP?An!y3YQZU{X(~pmGf-6)ToKB~jeSFlM)PKh^#?NX`>L-WaA6_319%LIL z3@oJoszF9C^%`Qnvmv_D70^M9C#1^zH77zE5W;WJ{3(8o|3cO+o1V-{=ND@oS^YE} zVD(^8vYnAbKRf=uQf=7Z3DqXQTop~Vv_DapG(w1wam!+zXi9!MH`XR%(oP`2J9Tn$ z%D7ft{rt?tz!qA(_OD21#_-BIjC$H+GD={oqNN+oWT!g zThQmfy6ym!LHFqO*#^ycD{e8X#;UJ*nL1t|eX#+mf$epWTaBe;*WAs;&TLYFB43$L zl&Wwi0e@uH(4{Lx_-!Wxw$r7@GmkW?cj z94=~WcI3IeVyMw5_^^zXEUbIC-e%%#H^_|(s`)B+qImj9w1=sC*^*9_+TuqSKgh2ipM6bHV;i7UyuHIZo?R>F_|##jE{3k~1>~Tj#8-HQ1iPM6omb~y z-Wq!v`R`!*-(z)fG|J&nB2TbG|EDm2OACk(4?JNvJaSf9XUmUPW?U0Cui1y}Ux4Ah zjO)5+$6k}w*_b}pPKp@Hlf?>9 zH)NX!T^m2reHz*S_qk--Q@`d~>yCm5qNoW|;5t=KqfX=!l~E}3TKk&%-5jK=4UF7c zdDkw;^H-{j>sB)F3lV?oS0Qr2QqCzZ7@w2px#G;e>!^0TTFyILBgnC9&JtCyghUp{ z;*?|fD|cmY%$vD+OKt2tq^kGx+q-NBNnj}n=%hRgId)(OWze`*` zKZ-4nQt>`Vp(=pe>xbl()EMrpj>8(^*PuSmI`BZUVtH1Mzqt5Lp`m?i0HEt%ty^9aWy}R7FWL)bw&#!1F#sYh< zIrdawujQ?H-4s0&_|IHW?bpmtbd3)1TWBTg2#nUp1)TC%&`G;!2_GxTU^3Le_PTr^ z)U*B9M77h`o@i?z*>@pn_6ms4YK)>vmmaLdHTdry2KTLK8+Xp4((0!|%#lI6MxM_~ zL6L~j*JYKOWHr6W?M z^L>>^!_%V$^yL@ZSC@fumUE+aWbI%vXtn&it~6Sn-E?)k_fN9>3*3yzMD{G)$i+CPDgs1)F2p^nt{SJ zC!ln%)~>hi04?*^k+(yuwV%ZnL6m8ut*?^ge?@ZRwWCP(+0bUKko%C$ytj$FR8G`2 z40$=83%W6RJIKw0iXv-PmPgZ>NjfVb?;mhl_js+OdWTPF_4!+;6&}8}Ub4_V8kHO; z-+F)Fn21+ROqw!_K3Vs_39$E_<2nn z-IC?{)0F)=e(H1xV{C39m|{aKfhF&Zynz_?*_8OHdy0JJ@iRKaJ0Mj1>gP$W_Q2!% z0$Pl22o&&L({I8c+BtSMw85L-83DFVD3iSwI}7iQ$UV?EUk0R)q-s5G<+)=T_m!y7MA_I_vZ(0 zI|0pmUV|Q2Z})iJcs<@xR;&85%f@RUtS_=XtW8d2l!04L#%ZBs!daK5s5^P2_e)~Q zT(iOaJ9cA$-ofRx$FMyB+^C%BL;BarW7Z zR_GW`TL~Alw)!f@O{ilfUaQMwE7H2HFnOJnB)`7+JwAgRZUhl*(HH#W+QE@#q*N$p zJ(yQXv2yz&jMCv74=n8wy%tz%4guM7VeQ40qEN^^3XKIOzK^Dt*+PTCadfE0GcpEBTS^P{P+6!HHHS?Io8UuOQ{g~@ieccI_6|ky{(p)Vlv1^0s6v;ltC#?8-CVE z-z2HUsdtbV9MNvY)rztC`KMmUv3Mw+4UB>=*-zEXVlmc`%5oPiC8^bwdfssHoRb18 zf_^IAa>{L;&wPo!&Y|P}SoyMDHxd@VPBpB&WST*uVyts|!He21;t5|5^tybgr7g=}}7CY61j!)ziw;oY@ zHk6kd+ML-QQ@P<%an(BDxcK?u8>HZsnrmg@+@*al9&6l{I`|e6KPzULshb9dSEj|B zlrxU*qi8sYb$y%+pv4asN`06sp_uGz)EbXa2uHZ$3W46A+sV9aVoJAoEUWrrLg9_+G9iwdiU71=;g6lk#$}}jI~lNi z3_JGa;rJ~IP-`+;^tK6u;pzQW(AOmel?$PdChDJFn(jAL;3-h6bbh_ieZw|19ZxwD zMu$}%UDZ8AM|^^(j|xmWH6)gV3W^)#2vx+0vJK$3DB&1-;3!tr1vb~$4nfbWunFkM zwn^fw*U&hN0#?5(vqt~fclmD(jbK)v7u1+lI;gg9o=_fztO10syiuuIL)q0eIi0?tg$?~Mj@oTc2oxr`Lx%@TG+gjet^@%oMCOv z@!<)@+7Xi8a{tr${-#nE)>LD&oGnuF0oH?v@su+ys2CFGO{D(H>3e zfL#Sp?XQhxs1R-HMq%K{x`!v8H|xaB9(CVV=Ja{79Tp}@Ta&XK_`ptE zx5*nvT9ldny2YGJjsrZ;N`MY-E69X^bgzpRWEO8fJQqiR@{X+Kpt{v&U_x%slb^Wx;B%>1|ox@*!QZj3ZoNG7FxqK>Z zug8Qh8fuVi5M7@vs8S!j@kLkSqZ*#KIC4Yo79~a^7WLTag!jfuk74iG%2O7tw7Hk? zI3-0wieCK}X0M+s6HvL)3ol&>lhtKx`3e2ya4w8TU9E^Azx?OZI%G6@Cm)`rO$U-C zBpr(Igc~-Z)UI*0&|0c}1tgg^F)^ZP1>exg+6fPI7*Uwni!*V#y>*B5(~|a}m4{^0 zywT}(ziDioxup;Vhv5#t9z{Bb#BOi@Y&$6}X0wx!aUM z^;LFGSFD`*NX&ygg0kqM-~=w*)&K+2T4z~su_nfH+&kg>n!+~YFz9r zVBM8OjL%p9ahK!0LZ!qaFWsk50qguuhd}AK=Y&drR4{~{jKZE^*mL4-YSO~by>_p6 zU^kR z-A=>CL9`y9MNE7!xdL6^4hLq^-;|cgk%a*})IP*p6Tp}gf5qhMqMWoALX#Mp(PWj#IW`;yH&9XJPxAyY5artp# zbnqU=f9kmYt4HAT9R0%*UmpEh>M-Usp5{Wv@Pj^Jx))1-5liva2Tf}#Xj^SPy#tDR zPM2>dUBeCqC#4E+$2bY-yd)lcCSg}ve2jw}XZiTO4%9#ePtUp})kbU-j^1m&tMY<0 zwJ=mKYuUtR8rgY!#{sn%lo(?&MWfTD+wFtg#;T))W~HLCHTv2RlRK$>rG@KtpkZXQ zyWOauv)?t<&|{YAucz-G{TvM6B0Z6X-=1ZBRaB@R=l5CXC~Z`b(Ilk8oyUlQ_<$QZ z;toH~NSNL7(m?DnQy)d(P(5ww+d__Nzc;J|pUeVYekXNE+D2=5^U3HWuDHHEjkDo# z#IJQHHsqj;(cw$%tCfwTkn^5yJhfCxO$@({RY|nEWryassAAywO|E=$$C*16viDXr z*U$qalKA!rDf=eN)cE3h;x74C4aq+02G`ko)fk$VbAH1W9E9K!Sj%IJI!DLT)()#j zv}##aGFT)s^)wzYx>C)Y2T5edOxftfQ!%?{(Z-LHgNH`0cNSc?4B0Cl?!QwKZuQF( z^F15u`2t|$Bhb<{AiP^?|47pG$v@*V=)V-hd-nbzlG#!|4 z*^!r;{3f=){QIG-*a~oYQ!A&`Fmd=R5_g%FXdcM#`$u8)U){f-)aS8@y_!xWee-$9w`{<0buK$rj_bx) zCep`wHnFcpC{>sS)7V#P@sm9d8Wv7Z&v7;$`YN^RGSPWuWq$HpzKrC7{0g*tt$=<` z73WBh_iEg!VjXwJkz+c8?)F0gmsFg=>SC;dUWg$oD?*3l1NlVZUAiI>!!OKA6jijY zk=x6N!Lp$=XGZ#%(5AcSB#jeZtFHMrImNxC{8dOXCqhS=+`Q%PFkus?Z&mEJN|LjO z{J+q0gv4e$s2(C;0Psbr#Usw7zB&!du(cb6@EqM`u zcE;l|-RJvha__+=(g)=717l=dALpl+egWh!%r-j(7GrHN6ZOKf4{ab9o>);fiK^OV zci~5ojLKh4!T;V=;nSl!x`#0Jw&P}g-ly51{DHZLB*C`vabq}d&&Mh8#p;go_cs;I?CMM;(O;DYB=Da=Qi zGHgEyUBw97f~~0yfVC~iHRd33kDDVt7JM9=A(L)7iG*zl(YMg@>~sfbFZ=4*x6JjF zVS2%WH>84Or!p$m5y=ICWE_gf;|82rr7J7aq!BY{Lpn5ZH-@EuI+PSoF_H9V*z?W* zMP24jPC}iW=U`F%Xmjf5MT|Z^pxEd;*r%nOa-M6_HGsB5SB_MtvB%F-5~)_q%U-(u zY$$Hgx9@4VJ$geeZ{I^o7{ZSIM{p<^UuzBNYco5 z(dP*h)^#2l0FCjQ(GybTP7>LwC4()e@qPLH2^zJzmYf^O4N2y(sE5WJcB$`8Q{ZeS zjWjY9Cwl^!wUA2(pD+XGez&t6T42b-SKTBmIj!guX?0}@y*u%{PGn1FI|6QvFVykQ$6Sd}{{CY3NXkAC~ zF$8xHlWN~e%Uc4AJ*=qRgrzsSjCEtrsWDDo)95@j23v61BFbOB?uM|w_TW0;Q(dvp zN*&sQK9#tv@;aX96j%o-C7ZDk>h6{++=)tdC#QkD72j|}YtqfQOOber%YBG9U(J|| zW0hT~47XVR@mMnt`h3L4Y3~dMTKaZudJ9r|(LVwyinwn_5`(jo!|r8}vD}FcWGT}F z5mMF$ITj`WKs~tlBWkiex0fW6OM2hAZB^vn-*(816BDgr(X0EYNnlkEu@Z0fKFUOJ@u#JANzwvSA#XIV$-hC z-Jcljs5JjiEAby6Ja_{I3pmU%kN{eCq5HWK-K#E-%q0IF(ZWXXp%;?uS)MP6rIgAT z{p0Lo1#$Ri%vc4JBl@3GsCfNSj4m|)l^OZCQr*IQ8_xJAE*9T&WFI+o&78yYp53?a z4`#hkg3+*5zxu(oa?pZlQycVuy=m~*Uq9gnZ%P+l`5$lpPao(-4zg7ZxnQf32ZaA* zs|vtYJ^K_`G><4J|NeNJ{E&hwJ@N|uy#I&3g0-&2#@}85|KDAarNfC+(6*7Pk|K?~ z5Vc5od?1nKod41v>=(bYJXd_tV0nOmbM2jljy4GA$)-cx#qfdD2%mL*tY5B8M>9Ba zn?VlgT3TR4jhTLT<=er{FU4OMpu+{8mt=I=>U5wd*d&SDb;}?)?RZ=Ek%?*{t=L;) z*3F;w^KHi~=ntI>y;tmIBzQ0}`04V#%`_5!-K;!B>S>w83_x2~AQQZ(&{r)~c5fv1 z6CL_M;wLhh0cxYbA|v1kjzNi%1G>UC;5M?7B43A}#q*~BI4StsTjiz#mhQ&|JYWG{ z83|GDp8yODtBG^{`7KRRF-4WG*kv*gfiTRYJ+Fc>MIL03@f|9V#f}5gl@AH;3xiO0 z5nAjIKdBGDAw(c`+Ziz*8Twz09$*{3bmBsKe1=|2{lfzv!ejdOce{RPghWZdh`78k zX$O>NH3@m4s}N%8Ne}*jF&k_}<+h9POp26rpHo^qN#)MYLn%OOvn`-u?8j%Beax66 zXqkXLsO|3CdKgR&4mv$4U@&RYG0?k}#Wqeq-9}X;o>bAsF9q;!u_9 zADju=gPO>zcnt-ZzPsB-FZ7ya{x0T(9vFau!FDawKL* z&0|cBDFxfjVn9SV!>Q;Y6L>&>2G1+VO#cU4fu%~s=U>z$NaD;VF--UV{pSC4(Lvt# z00q1t6DV2ZdAS=27h-^3a@|-GgZVa{!Ln(2_gAM=hP5i2QUL|lSpQSwyuX)@{U$~6 zBG<0Xi|oH{XFG(MWCD25d`sDSQw$TQR3m_&d)c4eMPGN*`-AlOra}}QY|a>q`dD!Q z1o??c1hre2@Vs#`+|XvdvTZwq{Yue%yLBqGjE*lvd`2O8RY3B^LWdK~)tP_AwoUsu zXO8;F&|$B&W|Y6bzt?cD4ZL`t(eFh(_`}6*I9wh>_pEbv5}}*%`uBd~;4p;& zF69^KA`d49<29T&Q>*>`>zXL|bwp+G_4pHh-5R169_5G3Q8M5h+-B+$GD@E#PaWe!G7sBMNkU%mDZe)R zxMY2K^faCqixK&gIC~}$;eo@@0&_H5;&@M44^d|-lbUM{W$*PC!Z6uT+F;5&knhgriApinmZJDO5OlvyR-HX>XpUs?$lsepNn+Zi=J~$&9LT)<~d+~ zr)${9(`EnncMq&%$&8u~2 zKPOYT592jfEmGlP&?)s;vIv}Rl3`Szd9#hVfKn0Z(!FQKT(#6)3)O1|cC}Uzx9+dx zLw+d@td&$`0vg~7_}0`duRqJU(E+I^bqx}|c)*;b2R@#($N#+_|4Z=Zdqzdah)w+J zle7;Rwvi?x$<+pI(tas&(^7qz_l9e7`%rzH&A{g{+sI3&{Pgz9&U_XT2eZgzurOof zloD;WvXw)wLAGap9NkJ=IdVH-NEw^L?G^-kGAMQi11vl5Y#@L8>)aQU&r#%!T&y-; zH|_CUF#N3R4Xmc3NtLy0cza&?yyKVOw*r~6^A#X}9fCzev8#(G(4S8x>(|1uw~mI< zCXFpGuu|Z>afjok_FN>SntEX;uBe50Sk$9d&5O9^+RklCOWt`1k`Pqa!xS_vaE3DE zSNoW+8*A$Lq7ed{C$#k^YT9p16dsuc|_+0_Cm*NAz4gbMLK%wY%^pQ>ce2N)@mX+q|;1LWh*lX2jn zSe#uajDveO6K0%*54|2Oj{cJx&qFI5tVM?RIJ_^k|p80Gg)h7F`Dn`MF0eg=_ z>{G}#=uA|)sBCRpeW=29oH&I)-nyu#rC$y|c? zDn=Zt!^_vV19ENxWKWkswtI9;KZ7GFVE@P$)1MCL>TTTR_hwobg?ozLQ&gT3Xx$U&_;OpQ@w+bmU4Afu*_CH6{-JI2LU zK&@v9!vr#c!d@(G{yj`fi#JN6T(3FL#-QaDG6c&24swe3#(G)2?Wb7t4FgxpR*U*1>*7#2dDGiegDSDOX19b!iIyBPnZ#?SXEA zm(kPyG=xH&k}Ajo-T|xL1Q-b9Vt=m$j;L?b)k68eysWQ9%gzI~i9*e`=k&lVly+eG zD^|~ymMxDY0~lu&<5o_WnryS{m>x^GAvmnLq(SZL9P^-#yrIt5IQ5A?y%mDA(&1^z zxkl}A;Hc#T%kw?)CH_(jz?%;vX7+8C%va?hWM$op*Zrg)A}|%`5q2%SSgm6Qa(O-w zKi?WkxE{%qI@f5qGhG(aF67n%fIk$7&7MDXNSwDuI_o3TN}srPGG-Olxk#cWN4^ZC zwH~9V8p+Y#eR>>H$1K3D@un7G6z(;&hmzbg#kltnb@YN&Pj=s zc5!Zi67R}#(`?D^wnpAma2k?d8`k`lU#tN)vK2@VojFuHiz?;xwU3NLYR#tqq-&gv zqeo$oU^O4swPyJpt~@DL(ry)NfLgC_^>+=~7HgwhH~7)mXB&!UqGZ8{UA6kN_4=ss zh83Dq*&0TVWJ1j;oHK-wwLuslJ1gU$xRsBa^0Rt%!KZ=�zK=I3=!M2Tj1Rf85XL zXjqpl$m&*zm0UzW<;kV0n<0vO5UkSB^}sdX4cU|Pt(PMoGhbOG5r|je@z*U|rRv$85VqW-z**PitkooiL{`9eYIh4^- z@M>xaOI2mW`lat;*fr#KHNP78OZ+R^Ta+TJOYf2^*otO0GAfxIIh&GL`nEApV-But zs7H=^4ej#7u({Wl!9H2vP}PbjU`zm4O56DXw!Gmp8_>tusuO3YIG2*9`;u=oLNFx` z;7@JyvjGvo55=ABXC@^W4}6cz%rDJv02%n_6g2kQCn~Cr&|PRqUBe08OW<&JW(VNMnOi{AnF>)CSwov@-4!`++R}$r*@}W>>ti`5Rx)2^G#X)aF1^ zxLvbnjiI6Tgcw{{q7xs>*5}gkM_wBzEj&qzhvehEl=GNHk^)OAgwD^5^^`3Qu+bxJ zLk#Rnniz|DT!?nGI>LBYm zc8qP=nzMvirQJENm1W3$i;{%hcQGsvW!IExie5YeWga=Aa8jDvp-#@@(xCrd+YE#d zvJk4sic2>3T6#wms&_yh^M%YN8aW?oB-^JH%QS@{h0|i!q}Sm4?F%!jaTBJ7`L>aJ z6Xf*qI9@7}vSsJIh5F@NqcvJxbR2D5-mBML7Lq<00+)I1Vc<9-==febns%XwVCq>% z!L3_XaPL{>?R&9j!v5RaJ2B@cu&k4ldFycZ7`}2PRdfNPIZa8>!TewG$=0|vibmSP zmop0BTLvP-)Zy~Jy`n-paHx>p7S8YGdEl?GQpvsEdleNBPP-kGm1TGpK*6?XMK-iH zU%^jkly29Q6&*LE!`?bmak%kkx`wOYmsqiqBZSNkx~fy7Z|Sj#ia!2A=L4H?+#W)F z47#9&$S*B3l8T~omV48?YkjgBsM>1LcFPRfJmnI=@UGoGekALt>q~{ec4)fTB5lhE zoqk55fBiya(rsn&wddM=xG&z5ud2N_ws*5woVt5=kuI7KaFm{oXU%HY{XM^ zdCJrnF1H^$kh0G{-?rUl9q#OwwS8YdLj#Dv10|h6dRpSm+13>;^$I2?KS`^UTHL>y zAwi8HV}}!vngfXZ8U58yi%s^d+H>e@6Nzk0wEmcHC=9vfq+TJ9Yoi|L+WLwDQ6I?% zI}eI-@>9O`B-dk)FW&dWPBX4l0qCTd0(`E5UA)-zOyCH-EM!T6#saOW7r)Ch{K0aC>mxSid>n^Y?43kRqr-25MS!>a+uo7T%kv4jfNuRVm zS-UkKc=2uO5?wz^HwfE}IhUYWHW%1+8KeY$8ergSTx8TxuDbmK9j%#5u`&&2HcA__ z#m|DvYtf|Sh0_<8L~SizPIZ5g*~0Guigklzf8OaY#lRY>oq(yRz*ZTVsnq>e?+ytO zlR?^%@;+Wd;VF!FBV;xH9HfHg<;mPEv~@ zQs-;;^ZKpFibbZAz1`C-3tk{{Ho{NdX6wf5tF?GCx}FfW8EN3;jw;}r${)Y~AywMf zbIDR(ep-t&3@wOsa$>SZ9*!!@>9+QP&m)P`>tXf&=xEn(cha&)kQ?~olhIl_><_8y zty>9qlrE-VX^R8C=xHF)M#(|~ z_TV%xFG`6#?#nDEj5^l{4v>XT7!5M#2h`C-3R>v<`}>aR()vqlh!_oK<1lHr`clZ3 z9gJs7l6a3d5}Fh$k11Q13*(?`(=~cV$07U6<`C)4lD3e&jc)}F#0lj%q;#@ zRy_i_NTE!r$o(xr9%(hn-fN@A z6B_kZSAdOnoJ7@>Y9QgD^)-r>aE2o{^y{Tz!vro;!ukmqAZ$}whm3Qm2J^M22S z!GC_jI26Y0+f!@zx3_nI08dQ4C=Z4<*BOqZgGBARJB1vA(8u{cQ*?sKXy^dZ6Qk?8 zrl8w;qN^`Go>xTl^)2qAoA4R08#jJT%Lt^}SN@4=qwKw%_VRi&u&guvz4$<^nK)8m zXRtO;4521yDG*(_pkL?I7Y2?lJy84%=;-j^%BgdES1uB7xGzkM53C4*U4=snu{Kw2 zbaB8J?ViS>3~lkX{R@c--5}ZR%Bf)bRTm;E0VM)_^dM!X%h_;0vkz$3;VKD>Ye~yw z|MTbZqcH(o=o|OZZdKPnxIVF)8nbVk@g_I`j}O&ZMhFu+G>;|@f>AXh?E-z2TZJNZ z(Tcvyvylf)koXoq;2~O8A$>$Q-Cme#Pm1i5icNBRyZ2ihfC2Fj97r6cfI`GP{#W6f zW5OFb#hC-r0`$?P^NS}MmrGHT9xb^XC(LfspGZ{F=gfxYd!BKw0#N&7k!dM6mWLCR z(lv=!nB-UOC30?F1;D9oHX8^sx2x3oZc=J@h?NYAu9E#lgL|Mub}{3fx4d;!1+~MR z4MH4*7+;hyatmmZC8LQgMOI08XIH+JF|W=5 zN54@+YWSe^x7!EQcu_=#`W3?uT9Opl3|>U`_!&*rCwzVL$y=hy+tCM*p*^;ld2YQv zG>Uz+0=g)FB<)*EQbc^Ay6SeKQ6tZ!?8F!)Vi%t1XT$~=#gWO&GR?J0oYpWK*H+e( zH<2aw(mzfOguGFEtPk;Uts)Q>S23NF$Wra1M9&0wI+jUF47=7Y2Y)Ble1lsxn zI-)fG$^*Ek0|f9_wuLPnRw3@W&5(>rW?3XbZF8*{B%-U@x_)nTg=xPqs122*VRuXU z+d*Ct;={%?HEB}TDc|kfVH1-i3j-nO!EKQ^$`={P+ zV#|{pP06mvNka*jpUeDM8>_y0x>B_%qsa?ONK(*{ zc{uk8<-NME&c7p`yS;CxVl`xxgIK}Ek)7cQ@iUVh`RR zy*rOSUy_HYzQy!7)dp+b!rf18$;u^Cqx4qV!hO(5B?)P|4658nEk&P#2s#E?+Ob5o zlU|1Mt5d$&gb$^P!pjLG=rBtT(;shqwh1!qOnJc4se_{>p1~)0=ZyYnVcd=ZBYY`| z8!PkUc`(bCT|w(zsNM&tMWEyx%4=F6V{L|R@OHC^uP?6GdovGvoC`fRZ2rN6aA-T~ z&M)f#*h&p3VN=_x%{rV4JQ2X7%A$MP^aaRX#Cvl)_*$vT(X80)bRM$mO7bg5cj%_i z!?>2h!6a-pCas-PyEem}mv$)j8;oKp*)`q;+}NCUB~P)i0ja+)vJ+(gP>X2J{1& zE4YG+0`t1Hc;j8k5QL!7@d!1lr zpV6bnn0`IzycOs7);&w&4-Rw+!srdxEj5-d0|ub1sL+2wI{$-a{_FqpyhJ4;OQwXp z+aEpP%U>kmX^5vgMFTcz$^9Lz&RG6hD4D-v_5b<+|MzD+um5X}n2|Md-2Dgd{bbj# zUHAX0SKv8BMnh?lhe&z{q-Pw@r9eK3Na*wcnsOXvIleGzY=D>&rELln{`+g8ixi@0 zFwjKd1b?B6=Oejqj2%zL$0(i;0=DFDuAsjOnWs=-#y2$(Vb_j4)r*EsxOWgUlNVf1 zDT?O{Pww6XW&P{kRJe-<#-$0c_{5=NWH|>chH>C3_?8T58*01u_v#KXgM=LT(obw7JD+D>sos^p0=8v?117B! zfAVi16$t){^ZWKLumeG|q0RmII9w?EShKeMK3IW&<+0!_oFCN07XSrI1>(U;uZQgr ze+D43VguCj7qyyq)j9sWKmbwF<X;}(Vdw0Cn! zyoki?PLHfU^E`%3e077$S_y=O{h#1z~c3ZRJzfax2 z?~MQDh@xW@qPGA03fX-FVs}?C6M_ddr5Q?JyYS;N0=HeQd2Wf2eQeruj$XSCtPm(r zxahxtS|Rq*ex9l73FGkz`TJQxS3S9&MPz>z9}jPzQnAbVdPTMWn##4`^B>t1d}J80 zozT9>0|VC&`Y%;62nay}wgYTucbZ8sAF%;<9KcRr*Ig+Zj*m!z?=f)_=f^_Ny49zw z`{!%*#mSU6&)=%ek2Q7s%Q_jFI=}zDoDa&K-~^oCXj*X%f}KbLL~r%W$R&%Fun5Eo zcwb8soFC$j<~~0*Jgpcn$S}$9>_? z`F9olI;*!$#V=1-NUu&+W|hc`|6WXhj#cVvhZWS?MLETZSMNE+MrCG0rN6B=Oo$X-rf1-#Ujx;cS9b0T794Ldwl^L zmCqA;EcF-brX&mp(|$>B5MPUf@lb?_`GfJUVhOOn3a2RUrqPT=2v_8a6dCqsCmln< zVahuNs2k2e-qd@z|J3yAb4ee2AE1GCB&U~JLH=^C__VG*a9AsKd-wcx=p`&Nvx&j=^)|MMk zI{RukxYmeUI=_t2YnO3;wsM4A{3}RS2M2mT}+X$S_hUI!eYuXQNDYX z-emg6Q9uGTpUuAn!m>;8c%A_*zu7qv7jYoggvf%gS{~q;E;la+$u@(~T~UwMrJ#); zUOo5`)!)=9&+bvj=+Jzs<;ocfPU-7{Gr-Xs1G@9M6?xN=*Rq*1GDYi}sSAn6 z9<3D@dj)we_NiQ3o?Uo;Q!!tn%PB4rcz$kt6)}G`=y-%sAT?ocn7jr!soGww#ER17 z)j_eTVpHeFRzwDOChexJYRi3^<>ytlJ*u&_l9=`R$G7DYl#36#d&4(G_{K!&0E6HB zxW9udoN%?s*q`AOW)SpB=nq&JKA1Qj<{=>0KN#3cS&L@TY3GaR7!7+zwx2ZRLQEk8mXoe%S*+f2*Ro zBi}B%INJ+4zR;|CkAI^e|9G|?@v)Jft~yYecW3V4i5I;s=dSAYLAx~@3_HJa7C|mL zA0WS49(}qs&Io9PJvIQwJEdzjWa?i{Gq*QDeeDLuyY?CN4L8I=Efucfa$-gN9DOB{ z!6H}8ist0egy*JDjn_Le1of}a4%H)Cg$$cm*^|RB9*+Sl>3$xcs1W=#JB!XQj)($| zr2s<9HyweQ^y7<5VGXrGbgYM;`8#QAf7k$)+?vT(fYy38qWhLVH&0SQj)k^iJsGo+keC#)9-Rg8A>&;VbxX3YP=h%>C%!fh z0D{6(z(~&rb-h@-Ri4L^!;L;a_RcKGTnl;*tlo9^T=sq-6-E3w2o1t}d(CEBlsTK1 z?=?=^l+OfS`4%+D&FU5j4rS&rp4GpH6whS`j;u~)cH1THtyX3mhu+RmJUw}|gqidl zu5ACciN0Fi#i5>U(k>HDEt6o_lns6^)Vg%)=5sDb%3`|X+@rq$nI3G1C(Mh{_~nub zumDwCC{EFto(!qme*N%l=^fp(SomBnr}WCtP5n!yp|H+cH8FX3xwsqpnl=cpb;$0n z``q$g&8{L62+o9@IL|-azrGM(we+xd4i3fY?)3hF$-6%vR3lm&iZGqof}>*jtvjqo z!o4OYL9(U{S#E9)_Riebq?t(e`I8SDhLh?gk7i z+h&j?an9#+d{Hxt)}AVOB^Bv_^W%B49xMjjpaq)ZUXL~2Gfk|(Z03AE9#4mTalqvC zJ}720ug_Lay-J8W;mQ5pYS}kVDy!*iZ-mzt8hOI%s$LL)dU*f{`~4y$A~3bdOca

V7G&?Sy{#8qBks*A$s+1r0&*P;6jl5p=Uv0FiN=Ky+4~L-I1qK_Tq`@v_Z3s&L<{ z3}DU&VxUTwj>z1HQSc{kgR+g?kyGF{=KR#&L;NWjK=sU#W|>?FAXRdJNL^&MQbovm@l;2?Q*{eT%489%+BlnyS5)@ z6D;TtHAn%$o=@S`htW36DNTc?%^D^RgJkEQMRaADC^by-m!gcL2ZmjqfLv_l-7pvC z(`+BJK;xvsvSC&J=VumO%0}iWey&fUStaWW7*Bgl9kQ@Bq3g4;#*uf3eBa^@~m9uqvQ<}7)ry=(i@HA9zf>Zjq^jYsp{fOJcGVr!Rf_~jOxqR#@7mxBLUes^yV>!s8(7JFnACoO>d53N^egs5-^D(xDPxM z$@L>ol-{3Z6c_uf%pA|KHQYMjdD1}Sdt=X8;-G}L@y z^8fv;@npOE?ArJ%Cbe7IbC-Yo5L72HVKccA=(V@H$X*=(WB+;V>uBn;eT~3-=LPaD z1xLDNA4qs?c0KbAH+1x%1Dj;evPW^Q^8x$yli^xiU}kN5F7-z{P$I~e$poJ9zlsmB zuo%Xcg$)1IMFgL)#1BqtvIbW77k!_6^xIQqT$SSFRJ$9a$xll|3TD{azLN8+ycNtd z3Hj5i4pT2t$!pk5rbBGvBsJUqhljX5u?%6e`w(@mt(S=w>d9*!SUN9TF);=NMcUUc zYTqYM9*%NRZJZyJpIM|QHJ`%vOD|<1z5t?-|A^DqL_k*P^0%q(;~rPk^|}Gv>-h}J zTCQCKoTPB70Ezi_UC;v@=PbLZoZr;ssC)ivTXX*Y`mHM|ip1h#NmEgJA{L&p+zfB? z&P_Y&cg$Cp@TWnr5x$7(Qrw zY-avVjtgv|0$U-7XAB0d{vT4l+*576ul?5{bEwBW68sif_K~QkfMaGy$y4bKU$OD- z%HX)Y_MF>prf$+316@^ACRr1~x#*?C9l0T=+c(dEEivPxYJ)gN*g$vz3|%OP@&uTjV}F1#m#+^D`tr+Ht@G=KIwDiq>0g=)s~8}wso1GcM76i> z7oaE$Bj*D>WBlSAn+lT-KQo+ePQEg-QV2`eeII*$UETMZoq2B($?MHfg)%gz_PhbUX zv2#qKUsK{oJP}Rh{*sFKF2xBugn}8og8R9qR^JiBy1Z@-CwI9{c8WGQu#nubb7d(E zZxPyXuA>smOt+G~*f-P^W+}^i^o=u*1NXKLwU*&yn51P%tLf4Y5M%_3$5^NA89>d) zTt3NQQ|MXvq*5UG5GC9=`*X7Bl0j_VPOhCK}4C59cm5-=!98yyNPgT7OVF<|H6 zQM>8A`=iSFiqqy{Ksl#L;ITcI25SX2je+&RQ^@uY$at{<`tA{5i^mSlO> zS+FpTDL)sqU`*vX;K7u|khx3IE`|4hxv*DRE-bvWvZnC+oq&4a5gTS{GB68xk{Mi@ z9<c3Wy zDSmq|>wMarr1h6UU>G9sM7Wc};lj84g+cr6xli-elX*T#$x+}V{`sPa4Jz&9TnJIB zynKg^maRMjls-I(BhnM@r*?AJkJ&Krp?5hh*X3`4H!yQkZtluiT@r8Mkl;g&w&M(@ zXmYIGViG;%Ef&IL{+tjDU_uNphK563Ayxv=ZFUU2BQ3E+B2A7P1HXM=rvS;`FQ>1R z9b#{ZE?*ATXh1iBqE0|1b8pD)R zp-Io0V1~ta$LpS^Nqc-=y$3wt%lO3nm_a+cBOND5+#BrZD>p!uS&bqHJR0z0`Jg8M zwv0)}tE>f6MJHqx9>qJhxx4FlNeoK{KZ8>XEd!>q_`6ZV>S=s>O4J2*1t>c*76z3H ze!8>2cNQFFsHm zOl!~pw0&EIE{Xh>r@1;RA@NI&uH4$JUKlJlJI1j^ODQc9kp^m>rDL}Bm>T(%R=>S( z%dRqV;fSk~KLc(Uiyp9HXhrRc!O5m=CtU6_duZ98byl7yO6m1b^JO2i7D{EOPVBh- zU!xwq5ni=tl|fQv7Y$=saXAQ2&${h0NncRC15xfO3Vv0H0??eD;gEgOg|;( zfvp znO~Ine)i-PJD<81_TIHecuAN~8x^k5BlU9kardJxI5n{M%O>pmjUPqakFoZ7aEdp{ z4oZY~mO*GD?7i@1{V>4P(*lszn<}f=MxZoxylxnH6$7Vnm|V$5w~7ijsNMsOY7$=4 zTo!xiO9sC{4FW9hv-ZbQhOtaDSW|hd-H8qpC!t1Wlkm&$DuaLSB`r3eadxCB^fyl} zZv*1bydbflq443piVw^%#<0OF>iy?0*NYHS7B!o_iu0))u)R_#bh>DdK8#sH1f~Q3! zY`h{|07)jTMaC>-a#E~wn`0PwP_9b7pDk~1NY?{QbcyE5#i6&h*e7S$Kdxk_b%gAZ*<=^M>V12}U3>FRwNZ8nUYyCt%`CjM|XGz^G*lM%BoYJFv2E-6>MtT5hR5*cm-+59 z2J2DXM=2|dIN92IbPftl%-LW5Ybr7aPa!uKe<;fzXN7B(d}(l~J^b~g{Blj22f~YR z=b9Yd6ML0>ap73p=9NGJ30$WP_KFarJN|R7-P1Mw7|^p_j`RYTTLP=muIVKk_5cul zy(D$%p_@zALYju$bXj^9ir|6_ip81`dozOATel3Eh=xFM0T@P<{xQAQ-yZ?kX@0$a zL;oigk8*oEJO(cuks%mL$)!~Q30!;Bnn96R4h$PgFXMFv<>gPspdpj9OUxJj3@4^J&~K{^Qx_sCWWltscLVC9px2T@ z|5;W;dBN0u-0_4B1D?_ruta&GHJEJ^snAQ9ZZMV#C|4u4y(8}k6H~jJ=l22ihjgJx z`Ztk39t4FboQMNWzE6N0UW&o(a z<{R+e!!XN%Lo{fSrD2sBThxP=D3+P7CFUKOx&cxq-FVD2Py|UW)7pN!?~Ia~Q3UUv z9E|QQq_n;N4l=@mV?zYAfuIv!`KCJ~YgfQpWbu7eA zlBKSaNT%DVnr&f!IOEG$^>Li|Uq|vEJI@w5m>zSz5|i$=t{;5EABho2yQ&a$ztMv9 z?S;Rk&WA6&dl$XA*_rlxD?aQiZaY_iZmG6p9)}&0uJ%f*-|OAC)#@6!-c;(meQobf za-38WFq!1-PCKHXVx=lU$r4k?3kZLaTHe|J{Sr)+;^&WVeFyFPO(!NoPs`72fTr#^ zNUV{=xnH&pzRHyTB73E@HuyNVIAl)N`XU=8eWR($iqp?fy8 zu!tqqRPidNeU*YBh`%gLEYmhX){15}uV;TyrJeH!<|=E01^yN;YQadJtlI}_DZohP zec}Bir;64I$&(DLerfs;DfGo`6bL1-0oNP?Fvfm<~voJe>!%5^U~mT;#K88k?U{ zFfpsh%sNETg2N@FZ>%$#Ka%-(v{#Lu!ZNZw=14yM3077#t74*{(Ez}-4^V8D{@clISdKf*lWyn$hx($S+UySHy;_L z=k9U|!>Rl2jhUe__)F@r;tcF{&yvxP$9%+pt^m$d|SkQY_lDH=3$G4UEJYHDyeSfV^WQWsIP(_>_%~ zVy2UI>cek~^UI=&;fOjN*I~S`!Qz31vs+OSbQf}3E)hfh9IUr5*wv(?0A%(2@Iw6$ zF7Sul6^=y_Y?MHXzl2Je6x41qv9dK9BRQ~~32PiayqGfW$9D!TfEf_R&Fh&FKvG6y zKiw}8lz6Q`O#S-bIK_wece;m-bGx<0P zJ8W^E?tb0_Q10+!6s}C^E5;BoYqYdHdLdwj*+~mSxGih%gi{H=H)M#^Hht*xdJ2d| z`5(y_9I-qPre?e!hHxdq!N5s- z&Qw6;lH;mC*)#k|){)Rd)KVOJ8aR_AjTo4onG?zMk7e^j@D}I-^?tuUL$b%UHX_G#!D#nrU>Dy5J zW-phvMmmK5fzj_91@Xw!Cy&^$f@{b^c_2ZRAnVxl1bX@*v90jg<5#-kktTrI?$T(Y z?D|KJwvuDpft0j4tJc>b_$z5;VrkZ$7o)!jM z*#9A;qm3dk6cR4IBdVRuXOxT1UJ~eH=3$WY%U$&z@0qQ9y1J7cv0^eao2L~)cN4dd z;9f{5?8IL@*!BxR$li#S$^W#{AV(SGPu(gAvZrz8`I$jBtG_3OmrD$)=0@2+pJ}dg z%j9vG)B^q`IoeB6>YIOTKml2}qv}0+8eCd_mR|hv&VJCrT7VzN;YUzkBMV$ebDwZM zxX+4lY_)t%^2-Ypr`T`@;h}??3UN_sO0|1*?p$lHbl%&E_22 z4HKNvfZ*+U2wA|_n)er^6h|Ht8^8(6x6LMtSiBzBbBp$*h^cdrlP|hAiw>l$uOP`( zC_zWigeG6rL0MN6;B9g0_svQX2=3QNG=bZILb=9@iONxA z;%Y?LlKmihHO@P(n#^wX+FqYlx?DxE(c7wNu!}Vy3S=2dxgO9S0}wvKZ>Aoe0_oHj+qZ1VknGxXou5F zba75oDm$4oHw!%{ITp6w=S`uJ8Nir^;SEx7~~!J;}I4P~W~aWP)Ida&#W z(d_#{N?+-RP^r8taoq5-7gm!IKOq+mMneT9ICgHVe7Z#82VaH+eh(jfK>Or|VV(P% zyMaSf_*#3GL)ix3UOEUVZiDL!ZW3xcKaHQXGK?|@qE34D>)3=t{~pkr^YdgWz%fEE zp&XfXX-8rLrw&c=j@cn^>N;EMstWP-WHnA-4%3Ar>^5Bg=*s=LEVWjMtf4<&97nkD zuQ%6_u~}|kwzdnya_aP6LhXc89?GOs>gU{1sUsEYyWeD>Kt+_DL%R1VfFTZIV0iGg zk5KC2!gC;SUut&=eVd1d5X-!5d8%1}mwmOEr1dirnsKvoka%rh2#f0<>DI)#HX5mU z_>3zJ!G-b~g-2G}NE^r<;=A84y-veYB{N~F3pTcj$`1l?!YL@U?>lV}t*lSw(Y3xo z0nFz94`SByD(Cye7VkYS=09xr2o`6-qRe&<1@ktrx-TPvU0RuK__WzqUS*lrqO;R> zwoCney;ofl@rdL9c!eP$fti{;i`r?Vd6)nF%$+Fr+`Bn|xyD+;_tf)H}3OV(q{I%CzZ?+l7TEIvI3+&NOww<3@Iyrud0EZZA5c*u3_JN!NSfesl9a*}D^vpK2a(%zxzQbj;rTveS>{ znuX-Ej{{PQei6sGZVaO{`XSz^B*SmWt;{sY_bJ_!IV7|~dW7I2*ngJkHAF+}E`Z~6 z?}fwMhhuWsCHR6ud81|qw%`fl8LTc-=c$zU##=Px5Zr4!8&S~CT&`3|%Xd9L-8V`+ z1ecH3ebh>48*B4Gy<+OD@Z9n-a9jbDrg>^=+l(S^j8N^-&M%k&rB{_okWdbQ!9VAO zMjxCr4LH-}XmqpNor0bWJu&q5Ctvmr`_c}CO0=@^f~+t1H2Jp3pHl{W4WohV{Ad}r zrz}Q>FBzI}g$G^g>&r_L&uxA9?C>NfJe}F<4`fGyuGLFlS@sXmBc6)5V_pto?qx4# zX5ZdM+Hod5B^T@#XoyrU>+4-|Klp8{bv4iPL;=^y{|rZ9@1qz-)oSI4bPFse&Syjx zbGu1j^u^Qg+>HcoX}U!PT4|Em`iSh=2a9{oV#48w6i#_f=>Bv6R*`ZF4sCe@i;1<7 z9+|n}z2WwNq_6D(`rhY;(rO|$8jA)}+3}y!s6GbSgBo(M7m9y~E&dqD(8Hw5kS`DB z+i_TxmZ;x>s}XRy9Q6<(iNLRsY6QO7C9QYs%kUihYPqefq55>_5>jkhOKuB*FZmv} z6^)%R%7cr4mqhx#D99L})xsqnw=_}0!u^fO(ir`KsPtk1=3oiyaLY)lewrPYc845d z2My7Hk?MUX)8l~#mQ)n3L#);Ro-+a58Psu9k9$7+m>`E=K*9e;W>0?U9JqpxysG`< z1iTd+1R$hI<9aVNe(+EBw{~P5r%Eqk`f$V_QOcMD0Hz|$htfho0R<+@JeKpUj4V6b zyYHhqJE1{ukoa?c6aDe_Esfl_^2`^*px{9~q!dco7w5``>OY@WVOYm^-@H)}RJp+8 z{R^m^F(4u>NV7B?K|I)Z_Z66dpeOZiqzl?J$PL>5W45&cfQ-OJt!GhvCq=Apkcy{c z_s6F*)99*pGrFcsuP(aU#l;k#Lhl73}z+JwxRp; zY*&D0D>0Y!aT=0K-y0DcCZ6_(_AvpEPJK#UuK+pk?c7yK<#eRyilyB3eC^DM-7nd3 ztJ+vdgc!FrOH(Gb>|nRoAA8j=*o;qR%Z%0&TWmJCK6GT^E*5J`5t(@CA;b?|kvv`Q zPD5*K90bz>II)_9y5GF#N-Jwl6-BV*Lzmt&7p50dT+bN^i+26>j@yr-SC9S@IIa zPrE+uo=s!Ol%o#(hgmoYWWRxCz6nn4x*=lV-^wnlpPD)iSQr!yogFlm^Z9^N_f^>q zY*wHoeZcZ1$WT$<*P9m;G(s5)lg4hCOc-POz0>7p>oa>l`C5b{?%vnH?{_2T&boKM zgc}?YGHm?amcVQEV;)6W8|nq=Lak#qia8@4EjJc0C9U8sOyC!XlV@^@3pFRQ!5+#Am0EOv_qhN3ihDMca6OvlsrM;53(?wnW=rd97JAF%)Eusl0P`gS}drtdXdhOU!BV0o&Al;KN5&bRM7HhHTU zsqqYY`90us23sh=ifKOZDCAr6TyW#RyB@rxJ@B>*B&G;}73?VnSpzApiMh8DZ;bbQ zoToY}4Sa)&%t#9vkk=vJIV;3N)mi`C&%YCU@nTv&Zvpx9&FSdX)Ap#^@dPbE*|cup zN(E*2ob+e$zE2K0)HGu_2T}u3oR4B~%H##W5TT09bSv2lIwoJ=ui3c%R$OAk8AocC zIsWx-kM(;KH(wdaM$b-rSS3wVN+=A69a*EyYP`+&sNtNH=@ZMMO~*g{v_alX=NwSZ zdOF^C;-&@W7&UhmDzP@->9auAk4E2W3Q1;p+IF57q&0h$%W7*8Qpi57IzJgQtFi!) zbtV0IH_C~sq>Gub^<64m&qgWTM&(5o(JxYYQ|$U#sw|w=GB^He%D{dU-^YnmYO4rw zkX6j&{@7zFMV(CZtJUvO6_e&b=V;s|l5Z?zpU&dMEX4~QTH^P<`QnEiIuex zhu&>PJfgl3j%a0NV%)v=qsFA^+%B1Z#1oT~<8m;57hbo$(8r;f;j5sQR6D-$042;u z*di=^(SQ(1-Vz&;;DR7}0K6Ia|{b)v#;4m?kHFV40gCjczK0z9YHzb5vJ zym^4w^G?O@4@Pps>Q94<$@v^pb6FJVJ&#E;7iHn0zdDK5)qL^K^tW?j$IDV-0GZ2Zo$R zyi)3_fG?Xc^l9hbU%wmt`W9`dYo^<;5n`voHnV`_mV3eMcn|?Tt5#bRP^97;UF&B3 z*Z#r`$t2!1`f6{}^JjAopgt|;t0;m4uMa%)Xkw7m@!YcBG7HUu@yT5K_jSjA`wvxB zs3JK}OWF~Td~*Z#ns(PiJ-d-B;`a*t9un5GQ1QT0C0B81(m#8_Rd7EpY$l4RUi1IO zd%;`Y7=WXV;;z2ZUlaZ>EaG1u`ak>^`3;oq@Z0-n+-+O&lUaD*bX*AX{u|Je(b0>FI56-ESg2rOmrZ$Hn!8nAzVL)bM4c*}QL zFdD?yidg>J5B9HbfIC2O1L*tz58!_p+yAcyFiNK&9;nHqA|sKfQO<<_bP)sOn9X(e zV+I0%>)DfUP%PS(=hcqN??ew{D(U-_%^saw5%Af994k(3E$bKIWelwSi>J2BH3mBC)*_>yoI0x zz^F6XHs>+9=+?kTa`P8JlV&D&@cq4p#y{P!S=5=S%oK1tZ#M@3?<@0=xiKOCcz04j z!oyPZa37?9xq#I)3{jRB4`{^9I#&OK5o%?2UH--q>=BNJ<2I~IZ7P4PlUsnw9A8lO zKiJVNku?wIqfMIwWNGz27!!pf%2^Q_A+F>+{cB2=N2#y>+e!D=Hz-^shjZ+cfj?&X zGyGCEYUTSI4zR`)bK3p~+kJ>4GhhV?STxZ;DYSkYiV8O*C*V}9fm46Wy~RdOP97}f zkc|4Lqr{G+fVqs-uz))jL{t@Sv|Mn17!aL=Du?~!(ZVB!CAI)!XQ)wnXB}AVk(^~j z;B?^APX1Rt`(Jj-|M1_bE8sNpo2A}+1CYF9fw_OWbmA2Zap!b#ec%2^U0~%Zy6`_( zG4$YfLOVtH)N;@)e05Pz|Du`w>!kbJpA@XD4$f-jpYOkL-N+6Rz4S2w4_-W0r50c$ zqq4{NgYB4fEotXcU-&ibZwFD;U>Snf=n~YnVeiM=*>?})T?DWE`|{;)YJoo=5Hlzw zDC$YCVpOdez6|U}x4h!Tw=KSMbL3*Vy<*vkiv1j7i4Q$-HxfeQhEmQxm zvh(lDhXgqI;0dZXR|f!AnmENpJjPyT)puiAvLcaQ`jLX=@EO^zf} z2J9aUhr$dS_(o0b&p{TrQ-?b$RYiouui9_ItwXS&XmgHqw2KUKPn z8-qfb4M#L|UmF#}q$F9cVL_|~FqKyXtv3&7*R{$X9q76-kDdV?R5!R-er?el@D0$f z15;XM6xp+yo0$Sn%4L98tYYy?JzU5A_pcF916pz=bnSCvROc9a26m!aqTlk)Jw3bs z_-#FrHW!Hm@{Sz6QW1z``@nJrWF!f|!W@8Onz?{%>2{Ud&E^zX59RtF?3 zcMQ`pra$EXy~JU#FiH8w4Mdf0UT$|1x)~3c`*X#~Y)1lKj|%X*Wd;fx#4hJMe^Qu> za#Wf;+k?G1kJ?8Zs1h@3MS7*XD=;8`T z+{%6BonlVDB(rzt<$IgQ1`h4~I;Sfa=LQy-B23;XH8oQ4)OhNcavhJWldPMYjs_fV z+tvc&5zc}|1b~fiz~gki(l>g-xe1g?-QYI$HTck#(K8Ys!ajMOrgTNI4bT6If~&ub z7jm|I>&9&9VUkxYF;`{!iNiZu@_Bple^|zU%;%`bpd!^bSf6M#iDi`K0hpQoxpsyO zH*oPy0%;|m045Eiq&Sk_>jkEegrCx+-sP&l_(Gk|sd@Rm8cxZP*xd@Tg~dZo|gj~6}*q6*&mYe$% zS@U62EOFH-mU=Aq!SGv|KW+;M{oR~mcYxl~DkuY5L0r@z2+6p)w=pRQST-b|3F28* z5Zpk*{hUWn?Ns~fG!#@C%P#s9oG`~m|5DQiKVc2yRTELs*L{2siB6|Y@`O|`D*gbH zX!e`2^m6a_fDc%LoWIYlbm{lQhk${{Z2u+g8GvMODR=Y|Q(J@~y5g$ZdKzSvRaCfx zO@a9j!9LM_D9qiU)I16J82KcQ@%K)*M{t0M5J)C^-?m~hQ?BI}y?lVFaLCpEpHmlk zEk<2R_iGn35`mPnYd~9`AIPbzm>&I%=-(PCzq2B}T~(?R)Z7ri4_w9zo6}d?#h$MA zE!QcVYePb`_&a_-|H#dF=$g)@4lp^-`$v7|mjbSoSTr-P?&D8p0}@O(D1mapXn(78 z(Dl_BBNK*plHL5Y{EB7hsyNpdSjB=Cr%?fAwCGKmzuEt$HJHDWP=Cpk7X-e-z?{-Q?|#lbYxj($s=-Z43ta6k z3E#J7LodJo9JTg*?2G+jLEzY4y8}_tcdHZztBD405swv&tu+(nI^6Kn88Grx10c;{ zRdnTP>-}>8Kk@*twghm2`sNj=7!trW(B~MzrUK3f&PLzYfg7z2Fk&r{DJTNkmu;Ma z4yZl!d{eGDu!3E}!!{6zA8dhJuH(0NK%&cY4&MHK3uJWwEYoZbc(>h#)&f7$E-+r- zg`Yq5g>Y)bP;t-jXy3O?VC)p)F6m=PZ0gzp#-q1vY;v^<c`TY5uH73!Gkduqnzlunxqvb1 zGjO-q1-9DkF!HPCIAR2VgNZYoT$^f2c0B<37Xaj+xaNV*!^0`s`{GkZSz}y+_aR`L zb^Ia%&d9(h+(ho-A~1Q@T{{4%AI8KreYikJ>GVF}7tQWaXY?^2we0%?23Mh^OpM-} z+baHnmf4>fHygxkx{!480S&8{Bd%5g71cHna7hN_qaQyyM_0R;9fIVTUmqUGhr!dN z+>~P1@-rilD+xj39KKEPPtTtwQ$rC+;s5zuP@pC6pZ!8SJ_lDvBl~r_fMa)Iu*buY z7MV}Lg!N4n=rItVfWo>&|JaIo@&$FOo%ez`;g@m>3QDE25xilFgHoc2-b$Y<_^Vj= zhbaZ$9H3jb0H%Dk;zq&sS#0 zWiFv2n$@zBlE8=D8wgoVmwPU1{A#O%!r+gxp`+2_z(ZG)cs9+Y1nU*Cgjck>2ARQ+ z_htV|SW_~H8_vyy>Dz|Ee2DlvhM5F+2W791OOiZiR` zu8$54$^t&|E;Mo{)3yQHpsnKq_93zQTt~GamPqY@d@YN53%F%*x5mlkm9j)CPSN0U zn2zFqa>0;V)&y%b?fm8C_#}h}j93UOrQw^+-#w4zZU*A~5A4K&d*suji5iu&K%(Ju zaQ+N>4h7IdFN_l!j&6VW7Tw6V-pLe9Vt>DD+6>UkBAY)}*fbN*#XgC5r{mV&yuvJ+ z@E=cn1zJki6!lw?1H=VrNaz>Caz`>G7XYSpdGw2yj4wK9Q)V7x{aRs{^v6N%D{QP+ zHq~GIaM6pJ7Zku3ud^ix1zUo0I!}u?d|Sns^Hb?9*qPBn-#+~a$BPovXwU8GdpfOY zK?GNeEjxMcV((1?`1%RM^&v5Ck|G55-XNsJpV9xfj#z6Pv2Srp^J0eYbCO^%^5bGw zF~_Y~;X9^9^};_cy5YM#!Os2q{wC4*r9Uv$YF+Wl9c%^n0i9O!c)aZ_k8#`s$7=6) zD!xDsN32Js|1j>dn3{9@3JPpY-^I_C^d?6>s(DvlLe=1K`lL7!Fz%&%n#>nNGHjY^ zxg5o0Im=kOf?mUAy-B+%a1vv!h5h{ePXU3)crFxWGv?Z{q12<9BDQKAtgR~nk$7KA z%bsWRGaa^n*8O=WG`J~9k`P61s8B|v`wRLEPx+$PdeAPuP{ex(OeS9;D{@Lu@OU3K zM9vIr@d$Z@I?2I@Ju7$z>Yf$4T!4lOI1w#~Mx9=6^MtM`2N*6YFx^#b1q|U*KE@?! zOK#W@4bqZ$`u(uE?qcj>1z>>Gjp7F@0?u+<_Rn znxVBEcEbo-K!5)nx^b~7(e-|X*Os-dGtApkaSouHIs&|=Nk&&jkBDbBQ$d2S&W=#E zQdrCMxXy!VjmT?{dm|;F%OX*80%?2`pY2=!u7t@}Z%Mm;t4p5{QxK#V01#8pbTZB~ z%c?7RK}xoR^!jAh6~y>+dDjueG$&J`WTwt!zX`(;!JMDB+g>X85mSrgbzePvc&V%d z{=#;nb^L+sO5K$_s!z97Tm8a0JU(M8_WRAH0=LqBZYiv>(_}*f+U9Zd|W;;B0j>~}NBhA9osI(TLf+S*r(p~qbYZM*{0USFO zZo(*P`HepDCmc{}e--_*N@ire1~YW(w}wz#oqB^HA(a?*_j=b0l;t(@mTi|fS9*hv zVSK0&uI}DjcEz-vx$o&*1H-!T73#1-t4cqlhiXk6d` zH#vHb(yG1&mSLxE{o3kciz`^Autr2u;I7Pg2opEJiBuyU8x^<)?XWNhjFfc^*Ndxo zFEGfxyCjUrKucB&B3+CS;Mxa=bHjEnhYUsomr(r#BPcI}B{yP*Tt)#{c;1Y<@LL#u z!SVxsvf^fXcq%VLPEO7om<0ybL>)MjWIcfy79jiS!1g#%yW&E#idVl*BA|uX37~eM7Q~=vs$EBVJ@3$-{xdRBkpbGou1+AsF= zU0Jr;{ixdns_NR}EiXXG-^J%F@5K`);4CHIG_iQ^sJBxlQcuI@)nqP>Luk-{t!4@~%ofTd2AY361bc*|s85X;stTx7}k3e$3@_D-pSdk1`?Tl?T z4(KcOG$5&3*9bz49)Wak*t+V}45Uv%%<;R>JC&d2Ac%*8XBv;TMI~JJE(FOZ(Gn6$ z*HhKTgAb#*kgYQHyZp+76XtQbF#XaF3#W?tr|~^M*k($h$$y7kArt->RNfu;t7dhT zV}QQmsFxi}@>=`CXnRD(@=f}pOS8D6!NLY#cl~QSm(6k2>4VwF=S6Tf)`VNxZM7U0 z!Aq19!49@F+#E<$c$(2eZgu`+zHYePRkPwJV-{NhZw$5+KP7JsMLXt7JzA*gF%~vi z;LBuNtu7UPN32ZJsm(|hnG}vgUFY$NFH{rN3~-xN6-P6Q-^0rM?pGS}Tug>TO0MAI zC49x--@nA+l}fgL3DkL0Uvr@A@|k|yQ6}j^iDZ{9!m-yLJHbUkPp#Jk8lcWkHiof5 zzK}@7=CS`m58WGqyL+2gkOGSz~|itSn>SozD!wiktA*| zdr3zVl^+cBwVe0!7$Y&3=~F;l4jPlA8e3r%ZjeLhL2NQc@#E1|Mu(FGaorIR7pMZ+*!L2y>^9I7CuN})zh>r%5t@}sERqJ_0#TwEVNfdTO|t!( zG7jq|V_=ppG^?-bPJt_@*$J7cw1|c428mI_w1+A9M5b71C!2xTx~TGmQW{3O{v<(t z>(0Ha?pg`_Zse+|I_%nW{N4N|_+x*E*{FVpQ8c0z_Zd$N@1QRy8XImE8f31_wjOmv zn_{nj47||8B&UG2-f_3$TcDxIr+8sA z=4kcjwlGuyx)|;Dqs~6&N~STjW4BV$s(NpIq59}|hw<3ubdnc>U2PsQwjV2AR@l5f z4%=QMi1{4b_|yG%vpi)Cx^*DhA?R!3Q$V}bvzy*ZjUJqZ1HVu-=gMI08riq@=vGiu z^JCF_;!Hg~f<_e`uh76w!5mp*AQTG32=E`kx9Uw8xt;f(GvKiT+ngjP7GL0@c53ju z!v!2c4Ym#&_?6UcZJ@a;jJl)2?0e3Z6P_xnz0~i1S~SXl7mO|?SOcgA`<>1^lUBiR=-VO`{@+<$a^`pnXDfd z%$1CE72t-KO``ZzvQY|_70Ca5zW`^0-nvm>xseFEUvAN9aCty}(y1U$*p5$!>kUCj zM*;|pA|YI}5`*x%-65gkVVBKD4ezzve9o;cH@hsyRC?~pzkGUlU2ajGhYre(>53N! zO{+)xAKL$*n16lajpOb7dknP!rzt(>p22)Z%^E{4Mhq7p9#t;$t0NZYN!1I=(oqES zZIF0N3Sh>v{naa7)>q$Bj0lnp6M`^NQ*!{jtJ}P<>)n@GL40jwG8sAcAncRRoTB+$ zJuryT8^vxfN4aa4FhMymHLlYk543{`rzc>Pr)3NAT-uB{@S`r@54d6%HfD2THmk{8 zMMiT_z-wY2SlQfOy!4YP(CCmC)9>7&eA=I_G@i2fR1Z>pVMxo=>q9;ovKCe#VpOq| z7?DRp&d(kqGz&ULy?hB^Kl5T9M!j_|pVu7@m|1GTBfPiW(H9WeD(-$|oj+=?r2huw zUAT@ClLEj356Apr&~x|9k4 z`qn98zF)s4F4h9VFdW5n!n7}VS=71TiXzhjaM7nkE`tx&Z5Qmibjakk&Ot6$xgIxC zqs2l>!(FLAXv(2qcBrjERbiOSTtO{}{XOrsYGr==*9Yk;bG_o49Nuc@L{9fjn^|5p zW7(Yy?k8a0=zgx)rF)^FTn-L`QzKr)zpa;0AOF0szG%l_`w>uW_U8k;>fSnW z@FdS?eM;nbA@ZU7Gd?v_{87@%t;KRP+pWTIDjq{wTdfvgT4QGUsfAHPeitj*I2(H% zL*bmll$eFR;DNwZW~hodxg%BcfJ56P<FRUPw_~=p_%T9UAyx~F z3#+|C0ViI|AlWh+A(jojJb$0*!sTnKwO2u7TAflL-e+{b-4?Q5T4QjBb<{#1Bsh## z-xusM0SeidLRKLz$w$|?W~OPNA+)|h$M`b5Ez&?o(`sS9u&-QiDYP0>v`Sd)za9De~I3D?XT!zh2tT+NO(K^*bsYAbxcxH%pD*tG!hSx5bw= z%0xE?50(k}n*#3$Zqu4F#~UEMVh)(rI0D{cuDwq7uQlz0roi2&oZEIk?Y}=VJ=35S z)jje!zfCQ?zfsCzve)CrE%0^WVkJ6zqo~oeSS$TH$H4hZ?uUF|+a9m2z75Zoc=4F? z-LESzczmj5(94Pr=<*DkFRn%RCm1G@QQ_y5*SIWq=DWFYk$a-A;`EkqNf-~>gZLIc z--ll*yUYYipnOIbWRK9JUxqJy2{j}QiVbxb9y=W3f*9_|MdCt9-+ud8jfU*reMZ11 zZ_}4C?tzf>tYPR+tj-1(CDmk>ITj>0r%?DbJUy1tx`bqme84X5w*L$nZ(ukAS^k7x z>b8;Xg9lH8#9KoW20UVtPJrpk3s8>nsB4XK1UEPIY~1pLQv;_+!7_78LK&sjx>gXq z_8lhAdE}vR0_gm?fQbk-Kmw!auMywvfxnmyKnxvyL7Syk3<(ac(%8ZUSr+FmJ#p>D zG208wTzS!1K@_2FNtb{_Yy%6R7dMTHpHE$fRbO`9fRZQBX$(_Oz&U#w#z;xQJwiyk z)N@C`u6XVf>ii#A1i@1jv=S~2#)fx?>b!SojH-SyiSmp-+Fj9mZ796I708DQoDN?i zVR~h2r>u$uQ-J{6HIx^uIa(GFbc_F>bpr6JeoZ!80L`8aC-I25GBGVi5RiR<8%+`7+P{k4?wg6z3=4208 z?rnS?mi=a(vjc3W&_&zjWGq)q6`8SzmTL>RzLRriP;ph;k7W1V%Cnv^XNxP1d*!^1X$qm)2a2byqw-WD3lQ@EsoMzWIoHke*rL`;6n5oApIZBGF- zTM*^)$L1e|CCH(HeNPv@bL0KnbIX}ykcoD?e!-(2gr9lzI+n+9cWl0RaVQ%_mJQW{ zuIn-*+TH=diBm2>grQqpZ9w4xn}IS+=51y1ueA5G9_>{S)43U572_Xpjf zEx?sNi$KH3ke8H|m9Kgll_7$i=tJ220zHCW6Zi>|GuyQ6!<@q;r+50~4RYA%2VAVm z-r_gy*CZodwY>E2!1)0&BuqaL_DT#~afXVl{)bvDxSvhPdGrD}08A9F4ApyjzA@Ms zsoIA1G7S@AocYj|UOhY|(%jz}{H-Rxm6+1x+giuxT zzK#qrV3zT;*Uxm0V!Fu5JnLm6>B~dLz{v=BC`04Pqi}(9N z{o&-6&aEyYjFg#JJP$;fW6-3BMouSK)4Uty2vhBmSF#6Y-zA(Q`Gi`dkziOWy8#5N z=3jF$=trQo73ko%`<2Mvw4YUM8x-KJ-Ykm@!y6{6AzXt{9$@?TsQ|x%9Typ!`T-K? z^>Q*%frQ%itx^~S6092}IRz@fr;p&t0#iq=x_3bav~7ZOBn9izwY))C$Fwi8fysgx zUJ*%G5&qb{hJ%?H7Xehy2K-xV9YGkv>xO7(RYu6h*SSxLy-H)G*7r`CQWELIgb_h<aoT)EWxG&gyQ?(f0Vm2%!SnVmnog5V z>DQb`JNN~jd;U(liK-T!IXT>sSj!>G;xIgnm?}cE;OQNk!NL*iOII*`W{ zWArW$XTXpEwC&XP=b^+l8kH+DlI9?jY+eAmt9We-M2Xd!l1xjQ3iFxOVKmbMK)h_^ z0|JfT)*mF>z~7bR!7!}ogul#E6{OBwJTwehjP`9$lX_?hB{&hcPVxcM_cbtw<5>ni z8ti6u?v5tf6h93zS4CJH`|UPc2S2sNX|49=n=8gWYE5gBJ@`pfCbtP9td%JX0AOyu z)fVk5deF{ORh2sbv)XDNqK~wcYE3Mg2k5xI3ng?{noG? z6=jDGO0*3Ec_6yAn!&G;8Y8LG05ttv2%piobXAoo23GTUvoTa90T;?skyARIF$(= zDQiHu8hC@-fIpLp??I}?;=t;Tc!kHYbdrR2Nl1x^h+P-ECk-#}>y1Jl6-1KjhtA=T z%kQ}6#om|K))xSj|Hm2B|7Rrudx^)HEZj=}q8~uHPWO0^SaWKg zzl`_pj6Kz_^}VQk1z4BZJx!OmsI0?cK?q|+`d~qT?u{YZqnmaZ)%&Rr<%oZv5!A(> zt`>h&&RQj^3y55O@eC+i;e_f1cstjVO!R^ILcNR58;$G`bM63uInGxQ$*oOm-PHcl zsm~Y2iaq#^Zdpz| zbE}?!th(c4Y;(gC){}{A;fT>esObZs9nv)lY7tvvzvfzq)F+E4^Iu%sr}mzjf$1Y# zhFvbUzG3W8^nzS6Msjy_c!PRBIi)U`2bbA-8wAa3tBA-!aQx|-2fKAWsdaj1=9TI$ z%2E;|A}D4(N9t$CjNEpok84bZUB9^T?hT6X66}X!+bEwJceRs!ES*$BP+PA+{tz7h z@4EwY@Fpud|4Zl)YwduWqkA&nOWTPC6PMew!1W`7KNv)-n~=zK*1d1G?KS|Qu)+8Y z${SE4k;@Fm7}pIr_<~#O;fM)xl4SI>&v%bE-zo}~4bPL~&N}bRE*(wNTn6~T+gD=< zs$c*StAU=+Pk@v)9K_unrwNwZNCHN0MNsfg@K0!cc!aB*Ht2;VGvgZ9Tt7jE=^v4B z0vWKjA#?&xO}Z}2pZM`@GUd71W?{M1w$1aKnY{eoBzAi|vJ?*4ZKwO<$q3sGokM0* zNk5zB9l!eV~bGD z(|FmoKo8h2&q{L8t&MDdZG-=MG^4vkx8Lh|;C4cqByi3!S$<&Q4o94|UjboD7r>2Q z?Jmwu^PC{u@Jk0DCdihgCbOkouvn#~kwED#1<=+A9lz+UxOb+Nsg>op#(I0hZNp6L zZgyL$py#t_TPKyGqT`mKiaWzMKE23~ci;G}S2-&^GrH@P!0L@?Tpi4R*$J3I&pBeB zM3&_~6XLKUv3psi6kFevca!C8PEoEd0FU7Rcyt#iAV?V@JT(e zKO%eyEbx1_03!c&_+Au`?3mpcNJjZA_TgY__B9Y{M9rK~yhB5Vl$gIbOf=Nu_$xZU z4XkCOM#2!jRhiG-l#-+8e{|+s_D**5Iy-qe>#>Mux8=Ppdc6_9uXp7JX$s4m+qn0Am9R}r5gz)R3s#% zJES{ABn9afkVX(mDMh4Pq!&o%H6wniCaqJoLBAP-j1*9XuYX zikX^8G8-sQ5;*EhTj=mQA2$}Ncu7!U#x_5vNTcfkaCF>EB1L%y%LA5Z+sEC6q7^ucUJj z|7rHL(cQ)DaJ$p+t$Dc$3h2bX?K^~xM?NmvLYdLhpV4YMQM;4x-A-r84VF~)d_5*M zJImJ-4q^wsq3z*=fPA9+mRt)c?&nVT%iRuwFf^vWV%s&-L; z<%_z|_yo2M54B}YcgYwwh2mMh`wiUJr*iYElrk{5d3y9VUmRaMV;d+O>_%TK_D#?>k#~;l3Sn@Ya;)!F248{K8cF@My6byxcWXwn(`}!xHu~AN4!o%Mx6X zH{3hd`h8m6vJ^_RSvWbnc<4l{WfC+Js7tNz$UW2cDin{+{3R7_54Kop+!q95-Xm_V zxMaF69C2r~kZHf3l(o9my1goPx7@sDXKZJ|Uwmu0T*85*S~+WD)bqV##JT&MMWaF* zj)?tIi?iq2^^@MYMN{ola^J?vSJ)##kjYuP74zRGXTf0r`{|`UgeiTU0hMr0(5)&M z&253F@TXLM;_Q7WB%$^ci5V~d;c|B32xyn@?Iwt&ZB)pcOh<)0VAp~Mmfqpc3Ll^; zv;zR#75j~IxOV;AWj8>3kOZMnCe}sP3J-PFAMrpHETo3z6^jU%7L*AG`j~YHDW3kU z|Fr~RlM%9iteCA!YiA}Z7clE<_yvo^WdKzbxFZ9R&eoN zyUx8phf7(K>%MlITX#m>Y$KI31K^H-#0fb67oLvfx({sbH-{%IFLvA9&$h90CFiZS z)RLd164bS|vufznJ}3^mJglJ0L3QgWB~=ed~l}O!uHaf zz{7Py0h@tq@#_obYgy*;hfj{mSrnh1ZQ9%YSbZFyvk;mx-#fc7|Fg_*t9!e|Y-m)! zkvzp8kDb}A%3y19rGdq%*S*|*2XRTHI7eYyWp!6=aPWytLYS^u+p9YQ?ut&D8}`bL z9D7qoFGs~4Ow$~eClOY|1xH!eoTqhnUe}w<*=bTp6FFL5k6BC}FGsu$!0XUBYh)dq zKD8)Zj9V$U?NmH){3NXDRkTbf7y8ypv*zjvl=rMR|G`fBf44sBFlWRNG92PeNZ z_x|qhA>`fNKL=VH6lqU8tE|SXbt5Y&x((jv-op;90MFAR?#>t8BP{Y6E!})g+}gzk z(a>@11Pk3OgknfZ*}ZlY6ai+vT2}X zE^Fo?Jvhf5dVWYjGto_cI4;vAwCkPOm*0KAZxhLHQY5qMIJ>v-Z%puOaA-oF)Wog# zYBt(Q{)BTU>vr}F)0NYra@&aqx(||6#f_C{en#;sxpKTNDK&WSa9daJJ>%7}`nQ0p zoMZb9NR$*N4pPePuRGbWI~cs)bdSR$P-C!o@x*W;TrL-l^8EdycN$uxArsH49o#LV zIPJewT_1F^6|J1uIpK>N!>TAhZN1qMsKj+n3rBpNA3=LK?0T2#T4wC%`nYv28HYwo zJPYS?t;5=c+AjTQ(X1FQIkrO@yRP7rYa{)ZnKp~9x27L4re|y%hgxmd>WyuWk@sem z8hJYUdJX&0ArjA)bDvIDX&H-TAd>Wo-+iY#`$E*s(ne;cl(8sNJ*oaqWu{n^OSbOD z;roS4M<+W^4CFj&T|ob~ zGyV2SJ!)M5B#I622o+<&*Q^fX-r3L}WfE5_;=Ye%55NP3?HE80)ReZ+2!6x^y`ShY z>$=b6X;IXReQBuH7oId8oATsq0~t3w3T}1~QxB?s%c7Nm;SCC`h{HYlY1P6~vu>?R zV$P2`g30=y};?!yU;D1KVA(6QNC9Jo-f#UQ^GYl{@pUkCv|IB z<6tgrY9W1LO5|H!{T%&kv3TE6{uGDYxG24l<(l;S=WF>iW!$t+N6YW7N#P&| z-27O!&Srfde>*u$TzbgtmD;lPIEPR3@~^(b&)sItl(Z9t{Kq#Rg2#x+$El`rN>wxrTOu}D4XGRJSnaq6m$>S;xn z3Dd*vOTFhC!oZO;V_~@=G%>GLLU(Vy!E7+A)0z$bARWw8cdLG{GAS1a%r))xZ$-&) zG_qcH!$Fr%`Z7~}he9JwK2o6>&KG;F`(CqOWt1&0i@d>|?JTlIg*Le$7bsD(+S8kQJ3d6{My8^~=4SiV{bdKGS*dSs`2ulHlz9EG!qguVb(^Oy z-si2Wnk3mcu)7!CX4i4SMstfZv?_O+;~4+}7fz}VYjh>O&9HQ2bG4SlUDg632Fg=K zoa85$_On7ZCd>-wyP}PB)q^GbP86+}k-?xNO`@rJ83EcuU(X-3wBHW^zV6IDz$d{PGOr^oCLx|E2Sx$l@(q?$}W ze=_wUMgAR#Gxe_*iQ8#!;^liB0%h$?D<<5lyZ-twv(6^3&S%L% zlGS1ermRO957a6YmAcgs_c>CVULH5vML1l3X0K_MR(;szVYb}y!$Y6D`+`?jW@V^p z`{bHk>)|4q-FSjjeta>bW0xLNV2mbzIn7!h!5t`1yQ&*v|%) zVk4(UC)Wp20-AU(YbTduZfUzJ%Z<-Mg#4UXC)-wDIUeCMnsn`iD@oif^G)};`H^~$ z@a=>86iFO@*M7wn!J1=Xvv)nOfBChGDe!F0=00oCh~H$<*nfhM(5-)3)hz!}l&4Oi zj44}VOl4`d$)Zzx?#RvIL^8v7`SR%>#wF+_sEjIXDcOd#BmK5t3Zd(z3zo~^i?Snd zJvIQtN19KApOIeJRuZJ}xu9ts3*I96(i0QDFM{wbJDwcj@K0 zx*MO{DDU;#irVOM%1h!{r zjEJeorg&7PSzy!i5gnC({#OSg{$`ck8sgI70vo4t8t;_+?$dRbL&6mXYM~;H$8!p~ z{f%S4wx-IY>}3duF)$2zls@Xv^Wu`5H><&ZGOU+Vi+g z=?)J11l*C*n!U5vr{n{}uIgh2+XtBFNj?SjHkoGW)~PFrE@G`htz(^;niu9{wI$-Q z<5xuACO2#m9`wI@C;9W2IeksZ)BEiX>WNB;%8kpcj@(V(P7GJpUSdbT%bh01C;V&t z%8Y^PdjV|DRAY+8Za&?qZ%yQvX-oobe!)>~bW|l9eamhXz!(>^p z8sQnn9X9(gI5nTVVN-n3d3EN;mTZH0J_kV4qN`HU%2Usk$VIHW%=>-w(@5pgF6r2w zYVUrhJ5}?F#t0Gc;f-kbvR9(~+pwf;r$rlPtI=dFHuJNCH|`QHartS*^ac~n`2RX& zfSCg_8C5?1{L!1jwl+FpfLwabwENu$^GH7B<{lJ~(OJ#=I%lV8ojpkvD7`YZl1ab8 zx1YxnsDsE%>;vWWa*2c*$eIILuV*082HGrDl_icFpN_oya#<`giiE{RTY~*miwZPT zglxW>BuPcYxF6Ve+FS@c(AUW9qr}N*5jCo3^D5G1o-jO-zO3ch5uH?x2rx=ij9!0O ztFR{Tn0;pHLDP+34#1-uK>h_hmSkd>4p%|XaamY)OuAu}eB5B0qZ26+!N`;(8}G)H z-Fu~tyIQW3_KS1KwhV}B?b3Z`oj7cK98790ghzTz$(w4wrp>9XFzK8Zs$WT6T^!>n zHCi{@BUV~!ySh*8$ld6V$Hk+qA=R|{BcI+kI5y{0eKy$Pv0&~laNSy3|9lxGQ*IJ= z0R!WtH!=(`76~rDJR{XFWh5LxQ6=Ac5oJL&Mh8yRQ{O$@5V65mow|p~Xn` zySh8a>@ct3w$j^ipp`nAYoyOAy#2G|vL;>Wpl0-pk(2f}3P`Tx=wpVbY_bL$&z>7` z=oZ#<(NT}4+)3!sHB;H6pNJ4yBjd2*kf-{WhB4}|47fzi9|c%n&XK^hmeXpikvfqH z!Ml;bqrZ@3ks7I9=Xw0hmyE|%=5-fNPS5>1Pfmc{Ia%64$~8Y1DPb3SVSyHp98u#m zn_&eMWX!K<c|&n;66R$yz*LrkoKlQ zMTzgyPX<{E`gADmJL$gbYZP*ObmBOI7FKO~~d)=P0-} zokX&VhydmrY`*ap>~1 z=((sHhht<&2a^cc%@cJ3fSX1h-&zY$6WyX8EMaqfsPo3hVZ;D|-1_VDw7R<+Buj@| z?Sr{bq}nTfecNfxy3-_EV&f`TgjZ>1T;LgD#wfs1(JXleHJLo;oKRna;T=j6WAucSz6d^n zcy77kAHp#XAI8p#1QSrwHVX!zyBWXqh3zQ*#<-vr^zhZ*Dsn_e2ehthz#XXL4b}JK z&G0uT3wYe^8Eewy&Ult`ti13!>3U+l^}*Zc zBrt#QWedei-nEW7l^}?#)c+>CtdSQ#wfYEOfGHvJu>Jdmrq6Tj${o!$iW0{KqVHO|wGIceNbgU-NoF(B zz{{~+E^2-kj|gbCHRbq!U95kQ(1?1mD0mIHp;776{G^nR(Q5RXfU%@eo>ox^-^fcL zEJ)3WFj9zmSmFPQqlH{wQH@-7m!tdT4>z9IxbKwTj6W(6W7rVijF@laNnH6lNpl== z++ZE-;8If{rnY0EeH;_6U^X4;XS(NepR?F_5@B~T>L_A-kaw3|ZJNMkBhM-ONwEHn z-ZYAPG2&CZ5$38h)0=(*j|PpCs@;pbE=0GiX`KRQJl8|ntL1)ff76*HxI@%s$;ZnZ zy5`lyal7O+dksT0F&lqx1HEDSe2u)q2$u&$yqh+SBYL2sV15wcsstQ8>P7RFWH(@X z&lyl$K#lx;j8R1D?;lAU#qr2V8VSE+{9#qVFX9NH-%2E2bzR%J^BVM)18>C~`LIC) ze>$oBBa(#(ig1eraVu;N)8LC$>!VD=nZ3yHl2 z$5&5qEiy0)FTArFGfXjTk66nFVySh`;h2q}>GId5gr>Y}TL+7n6A>E)_I8?7x zJa~}I#>9j`PN^gBIqXkQ@6-0hiO0@& z$4|*O>1JB%kn-M@H4pDdWVg#%z4cC}_r7pW#Mgq;sQK8wyPy3g*If)tjnSby@Jg-0^?LhLuain?uvdIjC~5$3PB7`P z(sg3za9keeLQ!101x;F{yJuT3qev;t_CL6{jyAkH8a<9QUICLf&mOSvVO)J3(QnLv zP-_QxAoBAo#88eW0Dn>+5iV5h@bV|HmR=R=Rdy}lVJ>9QDoad!RKl{|UDZ99k6q`q zDKWfd(C%)PTwrJUaB0x2+-`w$eo*a>wjCCNHNgFNiSLvZFeF z%&Hh}LNc8@Tyop^Yw%jTPY_(uVVdWclF<@}v zaJ!~_Az?c~)?~49?Pk}SVCD5uPnnF&a*sZt#BQ8wtWN|6!sDu?xlbn=G>Z-GQ=>Wb za$mj{chieV6@DbhI+8qr5|fHXRUeh~#6OJ&yz6P&@{R0W)oDhgO-s{Biy)6KWF>uXGEoxl??ZFSt` z%dMa!R+)&^!G)|6jen-tryn$p1hL?1>^Lz4{}Hwu8^u__1aJByD*Zrsp7is8~O ze(5q)(8RS-ul|{8L++A@T^NZN11SxoR{v5c-z!;Q!0s?s!@U@YYR>-Bx9}n5kudVn z&3P<2X~RzG48*k*PlDe{y#KmC87`yZ;VUJuG8uT>oo*n*9(QPO$1LmeJ})|9^m z;OHoD)5|{r1;DP=5&I!ecoBR)T=I*2xN-|{EC$|q@wn?fh?Lu15-v?!(v%EESHWEa zBCq7o!O7kk_*5#yFJd_(fHs%@MEmR+`Jo9+G6VN~&gHX*?YsPHC#B&!s z)fRy`Ufes^oT7qmCbmW`Cajop6JImSZlO9Gnn7Jh=wf<3DWXOx0M-owTIKz%e^P!a9tz?_ElaB&K=LISrCOQnTh=AF>o=g}h0NPZEw-9lnR#zZ(m zM&F9{UZKx>*#11`mfQ16&?rTc@IzQrZ}wL7kMw54$>LCNjPdrSyn9I-5dePYmrXc% zFVo=}5`5e6Ji*Q-q5$xUx(R82m1u_V4G&BwB%j{9(hu2;uWpbc>?Pf!ndKf_*GkC< zz>BEoNe3TU+W3ySlRe9Ve&SlXmM456G;=PA zKKE^|XjvPpR*FL^_Sd@z%B6{?xb%+nqB4u6!G*2mg2xy8@ETIcD@3L;?{F=&fm%@z z`RW&MbUzK}7w}uMAd8_{MZwv;f?iLf^Cg_RI*bVaHsd9LPw>LnPN?EOE_n|kfz~TjE2^b&Vn@+DK@rIMw=*2eLfy{O&% zBavsbHY~Hb>NyEyyaqZhn*103>i1m4pj5t<=(Rx-&_rtd{k&J2NkrtImDFCt_WmG3 z-P8l9FTR|3zpZFkS>nE-%}@6GN1M*P3k@4!q*;?0HV0-ISHtIfYlR*Unu<95Bz%#I zPBR3XG=F=2LfdR46wl!R`a_)%Xxo-W0Kf^DPIgee|8{4HV&*brSS7oVMVmV_@%!4H zCB9=wclq@eu!-UxqvI_FANZOIZy>JF2(}oHYh&OEe)$echUw0n8pwAxIH3a-7W6A- zuh+q(PYVX18KAD8;c+As1B=SoaXS3LK2nv_A*5FkU{^r#mjq;jm4C2owz544MH7YN zig{A8FGf($&{Ltecx_;`mZ==lTxAw&;Mm`wu{ZtW`NTZ!3zva+djBB>fQy6+XktsS zINy2cuaA3`A*P0(Gd*lz#*OU?ENhbfR2(3m1|lC^fPmD-KElQqk0xOWse9^lydS)^ zm?i_r^9NgCW?IV~+5^h)-LN(^MGC6XEZab_ymNO=lo`Cv28-`g$L}C=kyVTuxn?}P zjdgv|oZyI04cq)COXs6oOk%fP0bs?5Ud^mwrT0ck$M=|@XI zXqQ4;PTIE{t!nQ{In&pH&kqGc(bkwv9m*)GR;04X=nd@`N~RdZiErc~K(iGgS^R3#s9R-&{7EP7Wl{7(~%c;%?jIb1P98^ELr@2elKf)4({hyMZ4e;%!(vXDbmx-Q=Plz z$jj}IH&M4217HZii(SfAebYA|uWfksNWkMzairnMiSHIC*5ZP!G&=U@WUsv9p$ZG9 z&M3wPm;2hHsW{|?JM`l2>|p6hr(5|D^Z+!0?DlY!V$`RLjHZ_WFe{DwXsMhlUY^twPKwzJqVS-_sIp`z!-=V z3(L7T^_Di2;RRTD(5p)LUgjsp@ScQ|!Bi&a-hE9sa?ZF|Lh;;!-Axcg$Cu~F_(aMY zc706ncMNCHBX!==3F9Z~TbQaFxGWV%=YIT=iQ~(ssPE4*xB0)j{JAV;@KFH`AOyQl z80TkLo0{e!Cl~oJVzm{cleoT?~`KtTRTp_W4@j?oJvsREk?jrm3*E zv2bS#ZXUjuiL}1!9O6*>z7G)q2mjSO=im2QmY7|=cu1>J2eJ=uo=(>5u3g~+QDJ#z zAJU(MA1O^1RMCe^H}qqeD;7%0w%OnpwS z?r{NimN572ffuzR)~I8>Ik35D4`X4#56#7PBT3ASgh3e}-H$jgkRyiJyY;nBAOoZM zs$_67gCq$vUex|4FV_EX(a=f$Qk2T<1jf@NfdQJg?O3U|yf}BTza@TIRCY076QJzb zf5NKUzbG1f=}HH_m+5)|n+MoDM1c@)BnUG&CeK0~X4+Zt;)blWQe_52J53PeeX*AASzUc1uW_LkGzS_>sB@V8Q#T5<9$a%-Y@;J=Y@G0CptBK+e%b3_A#Q8tg(C~7QZ4Hp zM(&~wWJu|^N=XyInT&mzqOH=>ivmZLPC|$~>?|9aclo8%aQg;w-lOb9|JVPJ>Hu}z zU;22H8qUw&4y3ySD-yXWl~igP!#$3Sr6MkU!yE$BgPY`&iB*+)3iZObO>i>jC0{#S z{e&)ufMksD_ zEFB}Rgxwi+^UYUYT`4?sE~31n?%sb#f&Mj=`k#NII%=QSF~MDQ#NYn?e~Op?`Z!T1 z>Uo{4%!PG;A=V)^Kl<0-_(x0c-+m~Dr%s1N(Ek9#p%<}TjDPQQ!WAJyg%`3{h5bJ{ z1Mo6#M_c{RZ}Pug(S3OUK^t}68E}P^dm#8>^?ypu|26k$Rz`hVHqxafexM7+k#+yw zPYaeA=YX5n;`@Zz8UT{AVj`=T|F=*0&)-EF6PB>FpIj9Iup_we{|oW*&%g1Xe)hkC zde3BMETc$708T2JVd|e#?|=Q8-dLF;@XX8+e7MBdl5GhR|L*gcF2VDdM*RI9QL_Nc zmZxcd_nETI&N5aa7Joe0>zYgfDs**5 z*C@eVTrQ41y8R)PD-Sief17IltyNEijm1={f^J9#XpSp=X%d8f8Ar;o$kHfgIg$ml z4->gSG>{28_*<{_dpDWLF{oW2U!Q8?gW&^C_vVq(7%mBOls_(MEiFJ%tclD1nvVXR zKP9P1g5Dv{Ijg^ST+yN;m^gdoxs;ta?y3oy(bn{=Z&4pIm8%K|FFW_F{|9 zwpTSn%(aEGL7n-0&l~MZk5sy2;SHO~ILHU}l%GxE`#^$- zThIAk9*A_Atz!S{=lSO|!l#s#=y3qvaPj@vW=ZNbgib}!)>?ZIu@od}Gj=raY9Cww zG=cfYg8Lso`0)RJwb69CPjh>9KVYUj$NziJ zQW^E7p!pyI_*2n@{@WJi|6UW*F{o;BQ@TMBw(sQYviAS`DSt&|q7+nQv#3zY@J&-< zfAP0<;63j`D3wSCBG>U#gb2;wy$MJ`)UiC`uLkc)+BNga@Aqu|6FlgTUswW#YJto6`#ot0 z{_a}eM+Hx~j2x)=+2!hZ?0; zuZ5pwJsHY}iDiTe)rEZ>Z4V zx+BK9qQnS%tcZvbSLO?(&dA4670}M)}xa5d>Ub`Ty}Al&JUc*#-z`PZS|wc;f&VSod@XWJ_Y2 z&QA5?Vc=Xo?e&NSek+uDEI0ydmu8A9)ayR01z%dU7jJ6((b;6^S1OWE%FIh&Oryf` z1m%lju4frnVMNz9tW0YV@l{d!d!^z2ZQwwg8Ey5+K< zdNBTN^Bw$hYRm%>66f2~GjlHlJUgXz-3wcpjD`Hns|IY*5l3p)&k(YUz=+O_}0V^ ze&_3o<{rb<6Ju$!V+Z;XDBpEX^6ef_a>g94}ZpHH|9l<|b7XQQg zL@n3mGuV+tEh4vIMJc9==~hC6@E#y}@?>~`x2p+*fy}OmOM;)?+)O0{ZXu~Ww-g2+ z&c#;}8VOdqTeJT3t3&MS3a&#N>*uco(?~VD?Lh071uCBCH&TYXO#J*RbP}H2-~zr= z&Kmmf%-nxnG?L$mU~=MbP;$SH70@24MX2#K^iFkqQCFj9&W1XW$3D1uoSmLz zgZiq8t>|H9RG|BliY1&vM0ax8dIM>t>3!2o& zkDYhDhqwGC!*rO+=CI_)_a7Wba%_?Ye+-$EQ%d+xkvE7Jb-x3W)`tobq1hr?`L)L0 z0(7FoyS1REqy$cQIA<(4P(B7})9$TFQ#fw$c@o0cvEG)XaHvywttC>u8n$Ls%D=YN zT^2Bu9zxE=3Na-HB?U`q;c@gFMwrT9J)w9tgANMn%USI~s8j$ICCP#W*wAdWHyvz8mqBzy(}m~h{`MDT7O&Ied);oy zw=;aJKkA6zBz4XibF*9cWY#1dCFJO;d($@BD1xUDdqFnWyOa>nsR^-c%JQ&5vVdOx z``b8y7{*|=jnNRR_?u}SCZNb=xZ*2QTMlLgqb!VE-o6y~sLdJD`E7@+h~Z?7)m3S) z!S+%zb1Q;TP4?i4H?!RBN`yx0-QAQwXZr|0R9r~khuGHvVub8_>z7U~csM}Jw#&*H zrqMhWcTSIgDeIOR`!vT3lZf#(V@f?@g+LX{I=1D6(e{cmF~@V6?`GqSrOV1ko>TFF zjGN*z5Vsp7g@v$7h@h*}0R3epP{vxct+zue0RHGTZXfIe=X=wKEUlp}Enep` zpYt?s=t-3s{Y0G3cC;(k%e# zw1klzQ!xD$4(qy!?@t0cHiN5o)Mq#}ylsh0SW(`N!NgbIqjPiXjLl(mW8sqIp6$9s zM0hMjRyCi2ke_egDQE@Vd@{LW@0{YT4n7AB#i3A7kl(?;fH|K-&EufI0CLE7fr`4sj(pfX4XGN@H(Kczq@;gll0D3zK88$k}Dn;6Hk z<|R0ZTiRL%Q1kWZGi;RBR!^FQ5yU?hx4m^u+FsyTOKN!UO$-}(-TKZj>AJ#$D&6oZ z?14;@bi*dK8IyqI;wDN{-eL9o)t9Q1Nh%qNNdjH_x15p2 z(07)DP9Mv&*^jlkHFsXVVU!B&fEoHcY_(;vutA%~gSXpoC{L@(=D>l`!Pj)m6mdA% z=;8LbKi!8@4{`z#kHdF5eM>^6w7~Xu--LnwO z3WsZM6}%cRXx{aR}iB* z1U48T4?mP+RYio=lX`3H$7?~*za!c&rMddRpG4-Te52v{9g{c^CCjt!GQZGz2E9oS zy#a#gRuIfG|B@>H2u@iC5CWr}6~xDWWbqVy3wU986$54e%5sMMfx}5Bke%sCk{qqp*u9aD_={DB!;Y`nE>5H@7bRdpN{#kZLyT1 z?584#qZI0_Sk8>1bRKJEYzg+Vce=^fVR%#Z`+AwK~gFGgEbWF;Y$)W}$*G(iyYe$@5--fmc!zTq{_MO6IL{|IU; zCBLyKqW2DmHDd2cV<>cPa1zNzNzcbCez-L)9ne@2`JYl<#w*4i33TNN2rbcOoE+l{ zQ@jG>R?ZLCuZ2;=^i!2LwrrGyHv-rv*LR{MKq*xy@sqqERV$o)rm5}nUr>6}Gfxc3 zd{^dSgXNOv@aq|ynL*Otb7)eDYVt*!)t$;Ra0h_BPFBLKi@bczv00XEZwrawUBv+8 z$S;env4M&5XH(`QE|2OA8a+jN`tO=$U`*P~97s~9n@AU_WcV^GzEr7tI>IFNfC`|P?&Bi2Kd>&z)p`J)RWeGb@a+l1CwiWa<_2I{X|Q~i z{E6jkgrU#utnd92y;J*CFE{=2)zj(PmqrGZGb%`<3&af1eTWztrrS8p{9iVo5GYe= zF}A`i=X0Y8ZzG7zJD$x{z8JnF#_hcUk(|ZyTjAlB(4^q>LI$`)bXA-~SOW=YfV-WR z1R2o$g^kG~Qfx1qb4B_##wEM9C>=hBXR5YXs{()C;8EUuSeYp*Ch#qv1h8E^*Q*9H z{MoQ2wvG?%7w*ge^0JB}ggn$%bFCx3Lg@pJFeH)~>AzxB6C@vjR|~UbxkWEW_OFDv zt%ELws&{-BU5n9ZMV$;5>cpO@niXgstV* z0r9Gds-7J2){ht&kSvTJF%iVN0hqOLpfboiZ#3*VB#bhB^`#(Vh{I^1X8iDzkeE*T z5`UCFsWip~3U1q z1atik&|J%?=V1JzeF~h`mMF=NhiMGWg^&Z^Y_Od{BqIr7`4O_nBES2HGtM0KoSq!T zzg&b&=iTP9H zPg%oovYf9=#=LcBEW~@$U8?)X-U0waW$!ZR;gU~(I)ohi2N|llS-MP8AP(vMTP=}g z#fHHzoR7Rk9~SZtJu7h>>3El|!YJ@LaL=R0ZYl7!u;)DtOb??wR7A#w=AXAivXssq z1k~)^3Z}b`;M9F~xZlu^KI-Z0P)K zGkXSZY5!f_LW-#x`<&JpOU(BTR}HCE*6<5ziK*;N`YZRSKt}3A@(Rk0W76x)V~E^& zx6IYI|D-@8FGjD8pbWi*g5d)L!N$2>67jz6MT#yMNh-b;Rt1OCD?~cYq5&SY&*lOj zPv4dG2H?*qCGPX58Lr_P2p<^GON;S8Bx+=kjJ0R8GT9_h^+ka zU~W_yoOH4ELYSs&Hv4qTBdqFSovTmMN@ssTYeCbxV(k~x7jUeOek>%m(cS7rFE=do z(5(9tL*jPou!CXwqfK{#Cw4M&ar=v`%SVZt40zj=?e)EQc?1;NrmSU=!37++@ z4?N`y%|Zu`XF@eDi-E(#Azg@bPM@rr`?=26+AfZI<2@A93LRr}=5Ee^?r%T^M`%(J zelLz7`lio4q_;PrLfjqN6!)55km=T-LVE|w?)VdNpDEI$WPuY*8YknML52O$0@vdV z%p-c9Y9r3k=A#8n?0iT)_mM;=DY6s;dGFa`FYS&O$Q%8rY}L<2f-6rK)RR3cD=Eka zR``5AmW$nq%KY^ElinMbaRQ8$;MO3{jXMzq3M))`9u?t@n9!fzo|>Adb4mnp#QbF~ z!izYOnGI4pW8e0!v`7H<^*gwoZJd=f9(HZd8W**Dkl^6@sC$Ve52{6Kb?nQVWNjaPLxPPxF21*O!h2}_Ewn8DHKxY3TSO6Y6GOm32Dn- zr^IUqFb&Yd3}e95c_6;=K5KXR9qRhr8QlTSjf4b~xiMEQ;mZkIU|lvy#2Lq{a+i~g zo$rnvRsazWmqT5|DJ%7!RZNjvbw{QCaDAhq1_dW@4DKQ)!}p9VuwCEh$?Rh@ui)8R zN#5D_1;7dRT%gd59K$<*RD=uka^^rX>*5b)aA@>Dy|v`0iG_CMde|!P?45M4*9YL? zT<}FQ>~hrW%+&(%L~{~vI;Ha!L*@Y(GA+a^i(gef>N2`OUPwpWbC*Z-8=jot^GUw7mf|r1|Sd7T(hWm+qctem_VD8b~CPvc@hnlw$ig7!+GjX_a}7* zy5BcNn72}9_*SMc2gh)3;E`WQNQ|)|OVqjgP`{#K((7m<6R?gKC(?N2jo?H4B@|QaL|7iFN5poI zh9#~WNYb2J(;qMwJ~XSkyl#EsWJ!gqO-Ifkz7H8M`3+}G+c*b$FlOeO4qj>6{6wA4 zUaVQ`@Oa@W17uAx*B+~pYdxNB_Tik;&cKjnBO)2i0fApm8)B3?vgJ=7kXGPOmhJV& zn`zVAqnr_e0w^01kHR3Fg3fzNwm;cVwiv!+)jstMj4sGHbfy$0bAWe0DIF8cw6a6qyQPd&w>~661~zy41*-UP=n;2p zz1^7dctg%kkno3sJQLSvta?LstcrF4XdDDGuWxK52YMKXG2eUoG0rrk94j+G1-;-I z7{C;;=E2BoYfyo&CWT+DbeQW3Kv6m1ly=l-6Ci_BK%mEW&!b~ck5+RjG{JG9J7TBa zrn&gjagwF!eEQ?%)xarp*^oH7HF+cvmKZsw2H$-tI&l}~v2db`duI(BM24sW;nC!* znz)e&M`Cr@$4Kfa6Op6XGx0-hA-eC}$rt-@>j>84UPSu6NWGonzey?|?4HeRxca`_ z-BRTd+ELYN>SBMqo2G$v@dw`sPGAq2zen@ekkrAmd|9y_+opSw@|)J#Bh=F=rLrbIAKlfLSPaKd_5mxZtI+eK)uC$q*;XgRJ>Y zD-@C_wO2b7f)i|W!gYQPypch*0*Cl;M1csBxqItiZr$;R95wX+dQnaMu&}e#9PoHt zOt|u=1I7uX}{uGpMNtF4{eTUi|0rjsOhsmZgUA^^{Wn6++GWZ~`Fpl&PJq&oO>$k!@R zJ13X5;Gpz`9gCq`jL}+uZ~bB}F-XFC997N3vEfM0^u9Mmbj3PZKZy4?^m7ax%5U_! zRm{ofqCGn=O(&~!KT>n|8`cIc|D!oiFUb7nKi`qKmz?kGrajw zzxzyN$RrLqgQWKj0^5}@IguBs_$6H?z>8%npb&Q!%Q*(a$oasVjl;5KyqQZAFhp67 zE8ol$>V}haqePjfF`={U?oH8F04?D*HvSMtdK0?M8tlm1Gbzx;ec=A>Q;av7YyAbA z-Sy%Z8}If@8f0p?`rQ93?jqySJPYRPBiIK>b4>#@-M&$svr5VDxvHGP@9sp|u9xuh_(1YBbZ5dkO>>`N+Qd6WZY zAfNa15_0Vj#$um%vMOPRZa*H>&ZqRa@qJ-8^*dkFsxF`Yo>g@BFyFcEVYaOw9c&AD zafv7dcfyJO{B#-SO|I`kN?qF7iar8rWs>JQ_w*)>54xR-;?Td`I_i&re1xhSN*0dx zNP6vy(+=Vfu4-F`-2YLc80Wvu6~RF%D=k2v2(!B;gYnQ8-ML}?=3xM z9}i~t#DNE~>4LYt7zu8F$7c?rm=2zSvm+76Ih->=>(w3}P9+uT-wuGWfEo<)RJRAF zKl^-0ANC^Y>ik5tgg{uqRjUj8ABdrYnD!Bc?L*wS{s?xNdlH#@i5XF%zvt2VP~Sx`O_Z4^8mrc6BimE?Yw>;cNzb|m($52iS$%NW&~Bdw zlX?|coCESgG>u64Wv)jR3YpTuHGs0gE%0Plos@$SRT4=-XQhT-i2L>7^~} zpEe$gd!l+SBVT4!tH$eHra*uU4$(sbpxsF;uH??qBw>xOK8Je)ZtMtBIYlW$9PWYjS&M zar;_sHKC>%j||B>ovFRwl%hQXege*N4h#s&Nw_4Vrnz}Qwsz}BCAq_U!SuVJ(F9l& zjMhp+lgP4}TYZN<7nQO36_08M`{Gs5hwD%(rMG_)$p~Ad9fC5tG^TJQUmtQE2{q%( zwnC4<#GsR-y#g_}4BsI?@8U2bLZxIO%@;3?oO?=pD=JZfr!Toq8JWJK8g5;onEv=b z3*L~?6`FLsu)s?}($0Yli1|Ldv#$3(R)&znR)WMb5Wv6JIhBxn_-P6Lg(@J^luKw; zb@OY;XSr_lLxSDWXd84@-*QpkT@zy)=ABYMd-fqNkMzJ5GF#ai9cjty)hZ}{Fb8VA$IL@ttj#M$QLS{? zja-w!!6ac-Bx=7{zO0&nrd%vEKq`ERA}q+VDZx1QLzzPBA+1Wvq`IjryOF&newE z{i|Dpxc4r3^4%T}h!a{XP<_&tR^6j3?toKh)O6eP@NsnOC*#5bwYD0@Wq?<24QU%u zBLXV&QDmWubDzOC4lgqH0>v>i-~iPU^MSmxIo^Fk`Blb%b`Xcn`1{9&&EbqNb-EDF zI)aTG<^2N7SSQd&gn0=veF2Kcl^5hlqx07bb?wj5*-2f=2D%T@^GNm4HS(n~?`BK{ zp=CZG)R%@!5_dGRCaXv9FqOYShnWW9yXVv38~3f1Xsy*7deHrNKcEYK&YsbFO89}9 zGL+vbft1*VLgAe2XFpXxcnZDw1eGeWpfVICr4iHPmA}e&_Z-Vdkg3Sw&MPF3jw{~6 z$osXf9mg}o0wSLIe%HHiPZN*+Kf2yJs>(I&-lZE^fQ#-BX#^z~jew$bcQ+E!-5??% zC7TxMmhKK|>F)0CI1hV&-x=fl&N*Y~pDMz7-uJoZJ?Az50-74cwT2HwcBku{3XZ8( z8w>+blRLwpO3Nr=1lUc6@}%<3;5uF@Hb-F&^imTrLjn&dWpAlLd;cDIAmm%MAc_DB zYX7OaP6UpOE21wCe2Qn|vz^uMC9qC02H`aBE+3*=+<5qm=VB+dqs2M`>lQyXD=oWl zPD&gTY^siOIlOKh{U(tH2#~?JlL^C}BIRp=V?LZL(X$O>qIxO93l|t7vW55yY^P=X zkw-MQ-4Mdm;w%1|k=aPU!6X39#V0p7SGmF&gi`zk=YKvS~ z>_j?IXkP>t4U8oa)d+c>gqI3pK{}-*S5Au!>m`D6BCn4CpMBR<`_rosS0cm`q90~z z7NBa^@{_&XPT=)DSmBKeemPc;slQ#+jK8H@VuhCr!iY?R#eD3G{9`*S`=yMx*8n5b zz}1*Bx)(5HqH5>(w^>@#W#ZM14CI}MZ43q{YX>~R>C?cN~vkAjWpFrBvTvCiu4 zAz^OFF+ieVj6>D9vs>>RVw6u3U*5^RP2~Xj_^kBC5l%ZKQg!;fW+e3 zVY6KClVO^`%MnAjWgxK!)F4q+*)GE8SA*&rj&H9(G%F!f>JiVi*8K0!#I@cyF6BdX z9#`w%=T^KGSR>ruPN{nF&}=fqjBAIE=A5QVsUoXEije#5u!8f?sbdcMQtN4lcG3|| zCu|r4c=ngyM0f*+8U2U^3X(bG=QM-o0oc~}Z3uWtU_wh@nvMJ_iQ`RIDTv2qTG1*q z%qrBbEUKN&Sn+^yi zYz#D)riP8(mI_D&KhFlY5=8ydkw0-ENa7sSp&{O<8y{83x{zxky4r7YqrkNYqx^Y8 zB%tP-;Zc}9U5h@AX<0tM{J8A7Kj?Jry^4_7Xqio-n0ZMe)966?KCDBuSig~c^EY{) z$Q6+JvSeP&+* z_OF?B!6%?6;-V%=9XpT@h+t*yAuV?6t~X~E>vg!)&mCRSBH5R+_0`L}5}42s^?i>z zJYo)tdG4H2N@onckCq@t(jb~_9{PS#`PSeVkfBS{ zcYJItSMddzBmJGzZC{VcDMK0FM!F8054?!@=lW_NLzI;12ooyBwH@ z_W7rZwzTlL9znnwy_mCtw(z^|5&U5osL4qH`=Ww6$ggD&SgM&sg*-Lcac$%C|8X}4 z68qgeM5l_!61- z36yLqWy_}Iw!x(0>?JTW5MCe_yuvlo%310B%AD{+wOwdVr9`_{aj%h&YGVAU2?Bgo z6XUe|6Z{lXDPu;T)dm&jk6@dF@&u_~=#f_7HbD^^px3Sh7z!Db{?!jF{b^>8u_%bb zN(>tpskF76Ywpq8>$;Aoxl8mE1pfg!jt~(*5NLu`h^d}C?aZ(WsHZY+|T7A}Q+vDE{f)0a&)3(GXH0*P~%O z8QWch?mgc!e3A@G{nwIr=Ak5wACKmcN@OomQyOff5}NH-<8izB^Q3+@_9c`^KVihi zQw8fu+h)!z7ZrHX=9@B$*jKw7;i?DN8)N%dOAS&dj=M4uW;*w?%)W>`ra=j7u8_Z`D z*7~ee^tzKSnZVQ3z&`EToqb7^E4*W3=%t`@W0n;1M}I9Bq(W+UU`-*nHt2k~M-A`% zqNKuAq**CXL1V8E#V`d(i=(ZppqpfHz82*HM?(n(cD(m@G|-4GHCfPkuSQe;2Y@fi zg-PXe{*}!2MP&w$Q>T|=E#ih)II|0c*s+9las zyN!;IiGuby$2#-%WvED@o(pAx-MY{@bZ7+GECcNeXVsJ+{LYmfU!dK4!%$kRu^U#I zwMbfz^_BJVu7J*~@As9kV6))wigo61%9EWo4@ae9bAl0l?q1+LG%|d_dl7w-RUor% zsFJ0WmVSg6BgMi+G5kV#-1)9amw@G3^S9|AR+Zk#Hy1fQdF7%QH~qxF`v|e?-t8oe z65p#c6?k9EewuH}P)#)Xkf1{1JqMt6f2><}4X06v|aK*J@I)E#PJ0H%|O&05^qWlWqE`PUrlR~E3o*n)83^=MK zIxon)lfdm)o!fT7u#UaUb^@%^@@m#W-J4o4ObWabke4~HLFdcs2AUL=^`lY&kg!** zInq+hl^31;dhZhG4tB|LHnZL;6w!7hlYx_&E(>`~Kp(2i+kBr3SO>yghsXAk0&=mF z86aJggwmV7%MuNOLWxEJQ8X?GJY!do=DB|&D21t4yFSY2pgbd^Lc?}i>rqhQNm#CH z)~V;7SmC~fJOdpVa}Z0qH>w6Gmx@(J-LkZMV-&4#mQ2u46l#8S{S4}jTXeRhs*LbD zS^Lt35UTRqd4Fibbr`)=9N;{Q!1h}OM8K<8C!v2Q!FWVE#96$n)Elb%t4B)`bT@DC z?G75yN)R4o0?b&!Eu0&NYV*A_@~Y3<@~x*CuwCdTJLW)mC8e#iN|qyvCbi}JE8~6rRc)(r}mIaTUMCHoq$Z5haPVvP@HR*1>Rlm%iZBcAqYsRw3W&4%D-V;H?obFrIrMFL3bK z%oH~8F8u^J2E2ymmBt$|Ywt8Z0&T+iDB=C122bO7+IG7T{jk zXa0Rq3`rB*;%YO}h+$6)O7-3>)eX6&`_E7ATD9m^9e+%IaM%_23oLi`0hZ#qaoC}_Q z-vH0Q>m9yr?(~eE?kHU@`j0#5&@rA*x-kpl!fJ+yQYoG#z1@npDP7zeo9rpkLgLLF z*dY`Blbp@PFrKAkmE5D#?4XnT6-IhKb<@ujsP?NJ zany}w?@x^L`Wq}xL;5asntyapkjEa*u3H&}6Zek}2sC9Dqc687erD)Dbw`y9iN0@& zS!2%{%SrwOuI^!7{l2#=HnU{)B^`(+`x}3CbK(5j0ShjUdeOT7)8?q$t_-*|Uz>Ho>W@r=tby1fS#UJUe_S*yb0{RZSg_nIBQ>u4CEu{wJXIFx=XMr2ck$d9&G7XqdfeO*;3F7_z-hDor!|QEa<*0 z&gMmuJjjv>SQrBG6Gi_n3hbNX&T~4rzy3%SL2nrWKT2UhjOsgBrWu*o$ap2{&w~2V z_dwDcb0cy9)3!k==S`r~d9hH=H=~;=@2{I5iPQ{@TCu)g{LRoTJeIF|unY6=*ix&wO3I6mB&3E#V%+EU!;K#7 z^x1G`1uwh)dm?~nEr%GquA-zg8f}XtgBiG6*9_Tx)H^^X3In-v(~jw;ta_XBPFFj2 zv)ogaOCyctObZOqKg{;@7#&TV~f2msp-@t!0_Ym6lNPDE0aN znYweeMBzsPr@gH=#`m7qvsIN15AJN@Ep?KBdS_5&F#ji+FXr3Jj)WmG+osRzB?De~ zp|VIPo9uW@?dR3QIOjWFmJe0uP>46><59rsrLo>@_}+P`h!?DwYI*1V z=N~<XRjl(go_qS1RBaC? z**00+T@@4rVsaD5Bfbn{*J~<#{uiK!%GLtsiys_T_b~n`sCO9L37K>Y_C_P#zUG?j z=@A`OrX}@GqGnWdr-UExBh2)7NUJJr-$jI82sX{gHP-_?+jEIqH<&zDvCqkH8P-11Gs@}0_@el2?7n2S$T$z-A@E8&v_ZMI2 z4UzpS)_S~W&oWHZw0vAy_xaVGo_@#_*VoKGzVM(<{NdT-b-g7fP3TG1VvO8wmCE|D z6qud$vcz=qRLfFfIG!)GTneMJ3d{<26WDZBNW8{y-)aMkt@cWpy^vscH>gTALCA#O6*#imO1wETXkGe&kl7gL>BlA^+^+Il<CQQ`%wfN2C9w$TL1xZ!3}qAJD9D|S0n+)ssnA- zRk~;-gql2f9&iSd4&sV%P84v1102?SiSDIybR!uR=_UtIXZi7&^xv$66PGKyg>Qle zEtLW)YX25Q_dskBf3={Q$^c!Soem#hw-Nu!!=dX6EX(#W`p7)tL`GfV%2{P4I&W_| zAAlo+*&dFh-O_|8Yk3{P5K4sd@NoO9?pZhukV}AB>KA1 z?w1F7Frk*yPA*#Ofgi6{;~+>w@A)0k-0w3Fi4aG|@)mm+8gIwEL0?q3+e<6ge(9nO z+v64bjb6*=r5MoRG+-u83jSQ^$|_IQOkL+*c%bGbr`}1eDvsG@-Tc$BJ@{jny0@Yi zKqUQO37v9O20?ShG zDg1?#-b^K?dr5~N3hndn2Qubp&|!)VvI2Yi#ebbdV$%t=B3cV{t`2{@wt+I3JDeX% zjwQbGbTw3flXwTg+4jKHCA_&HPlqM>nl!poGOdze-4tNS(MUx;WYy`jSB#*%HRdN_ zAGD%I2zTO`(ZEk#KHnbIOZkmK?CDnIexe(69YQ3^u)9mJNrnz>+YvfF*DWQ1y`rRl z|6Fas0*CTJX@Gqu=r5GH_PDP7hg=GOk%XhDk#!L)Yso3ZBlQbX12x>MQ9v{r+k6D^ zBJBn7n{txHH$pdC;*sn|UV47Vy8$>0Y73!D>z3=igck@6K3*5oIqz9Qp;NPqCb${ot8IdxT7kSnBi+z7sji96>3yCP%1^w^uwphtCq^9YOE6GgYk30xBC zq7)zX*Zf29>GXs!|5ouV@{I*ug&_T;1`9qixtq;2o&-S;e{Vfrw&e$FfVoig(lS0L%)LR0@+E*~)<90X#t z*WIy1QV5+}^q}K+M$3xp*6_HDhcag%h>@LY=?~Kf7nW4tGLxiUPtc7d}oRaG&RdXY*P>!5&)QwR9mvT%er7 z84TJsWBUbIey*+&AzRWD?i}WA<1j2~{#Nsu0yRpLB^J;o{o3*@vG+G2Q%ky#@N!HY#UXiYh_hYAMgD_5XfEXG|Gp@eLAn)j{*vLCO(#xI%}a?rd;Al+`xIE^4rB3Tp2PF!bG5zsJh{z8c`c=Ji?{JWU=V7yQ3hHB4h=TCXh*m zX?N7}>*P3CH^;W1_>(c4)pUitB1JKBrS-EXKxgh%E7Huo0C+PS45BggRh%|hOVCr| zTO#0gnOi4xl@EA-@VXse;P) zEHtPZ>dC$BIE|(TFN$*z15h+?1V5UWJ@3`*q{>A-fC#m)av;f1aInp!Z&^3wmSFFi zd$a|XaW4}bY5HD1YcRDxG}zs>8Xvu6sLOC9tnkABBv}Zqn9|*1-km0 z%Q5V3f|{&D(w_k*OnvT;0;jpan0&EJ=zRE)wyeOoRbO*nuD`2#D9E%Wv~&2@Rk*^v zedHP_MmIMp;(9S!c>m^^>|mn+;#P%4_iL0(N ztkgO-L35y|Bw+>=0NaS}mg$vw?M~8P6VFIsJ4jv8%9ocZPm=LiW2(_pYGIW#UHx6L8=Qe zeP8!F(O5r{>QHqe;=%sRh4@~@<=j`v!$7^xJ)T92YsezSrhcL){92Sb8N3C)(|=?$ zLcKR{3}?}Oqoi7xVoMu3jJ&~~oZBY4=G_?O__~3ESH{l|Kxjg6jQb)~PF6>3=@r`O zIPIql%WDXj#(bLNmO)XQ8-;+Db78E;VkEwlS2^R=BFN=Lsl^TL3t!Q(Q5 zlWAvs4OoCxk=vut0%%~*MfTFa4b;6NnkzG;w75E=wmdH%9MzKQQnPy86BONYAFR6Z zgWdcT7fz4V^PZMeDLW~zvIl}bRE>O7|8v7)1WtZ0hO>oF39lKuTJ=hC@B9>qCC9hXEfBPJ+-~vI(5dc4@(chQLQn*#A{k) zdWJIstn&i_!%Qs5p^Is$j(f7hujI4eBsReVuMAj6aO+_L!&fv|d8UWau%Jp%PfUKv z9w`{L-P9MLXf;Nu7#t;61Zr_1k&rj=R) zq2sz5s^=0Q%L;Sn)mJ^y2Yoa+RBzj8z+v*kqSoLjt3EN^DP)@)o?(AigL)mR43F_}$OS{oQu3l{>6 z?zu;S|7#ol`WeFgxmp*Q8D}58H++RdEbs0e_}4$Ss-*a^-LZNFb1ON4(=?JZz=Bxk zRd8~iMjw1-P)qGCG8J;L@2{$`+dvurA=-(QFsxC2`C^{*x8pbqpS7!2jrZ#BS`5v} z&o7j>uGTsJq?AXoWmnZ-ep01=@tsbVX*Go9_n*&>z`jDuB)Tb^3bPmn;W zIQW}c&1CeZVmD-ygMv?!J_I8od;m=aZgQu1C-3X@{a-QU4x!ahYuBP&)6%)y%~11fLU;wx$iHnTX{>eG=? zn5xS`gWVHx@&?ntp_V3f1-B9*IwGK3*8d{h_w-=nA;l#>FYe+c<|=Qv(3GZHj^Wz} ze`Mi259gH*`|Q6Y;dL%JC+bEzUSU;H$>1nvU?>7(3ocEo(6$Hu>LfM2BXNIcnh(XR zjZNG3fHoZ?-GhSK)^fUEp%A7g;%68X6K{Uo+i-VRJDgl#0jpWxM($7x+x-Y`7{CPI z*!G0v!Yv?zp@-47@jRYd@qKTla!T8-RpmD zi`n{E(Wmfv!4iVnfwVw&RglEs>rw3iTaa?@(iV^o4VT`aEP5PW8>Lk&jc8@G0Xt=f z!b|3R3bp13DaoxJ#T^F&dS$dfW*O{@e0$T!KalR88f_xXTsuPlO8$5=8$=W#!!P{x z2!K=@(2CX@e)+5cYw!SC8FX!0Dblx;b%*^oeYAbsm?rUqk1>&v$tAF_YNsu!a{?4CJaAj>k+RTeP#3VJ|^R`(>6% z0!jXi@4=$N@2TquEbE?06mKgHvs=4WAqQjmP~3v!p}PD94V_&# zu)>fdwhExeLgavKfgH4&QP71>o@V9mu|ndrn+dmn-!YWTB|cZnQmYyr1YX1;S{~qa zW@w0f12V1w=+XLHd3ujVe$~iUBVBWox|kF(PBnOeL?8lW3&h1@g_yALX0);r&u&dHRIH)ga8gEfhEr&V>+VtR2&$#_(Q+P2W{CHd{;U!baa zHWCi26@Jny)zA1xVBW?0*y|_A>I9p8m-_&Z3oPky7$1aVak~DO1<(W$A<72Rdcn}E zz~`bbQ5vO(bvzWn*SUB5%{vkk2P||Cxl}hMhQ#!M1~$7Qgf4*!g0Q@)>+fQ+i3~oO z@(-RJSpx$LR=t%Hj?Aspt2+X{D(l)~FXpP2Oo>EJQNyZSPJR$*=2g0K5CoccR+N;| z6L!^G-U<7K;&4JO2)Z2mn*0j05BUtH&{wLu#v{|z3yQfA!$8~-aK>=`b-=o>^s>X- zg8vTlO^+>hhL|^qDVVmosr^Lr`!BxKF3-*TEvskX0Id>fqJpEB(JLjO@9AqqIS z>R7)bXyzW7#0V+A)U$}(VbSE%Q7yh*Fz7*kZ8vr$%i-zCaK7{aWUg?V57+k7)xS;e z5ptZIA8Z3Wxjb^~yulkadx(;;`9RaGQ1UCgO%nF^Jq?F~rsbc_4^^t!L!|4k&k%z! zEG^siBHs+MhyNz#G)8k~El?O@Qf>(d8!wp5za2=F3KIUNvIyKL$~Mo4hSiG5aIhmW zfF&v}iN}^i7p< zK&*&KYITqZkmA(UPC%$~R=o&+O%QBu{v`n?QG-0u75_t;wlgEbuf1{Mbf*$)p;MQZ7F8lsGSla9| zVZ6rx$gDpu)ctO*H@}^)+f=z9=y~&HHQekJn5K^Rp<(gaB`lU1MQJH{l%S7h4@7}f zdfK<#R_mTA`xnZk#S@*LnK&2dY-RQ~cSA9#hrs)JwA{{V(59CEvMXM|WjNn~=r7={ z=+~6#WQE@vP%97nCjuY=(ZY+JAM@jqtY*O;dYcH#_}YT<9p$gWr-hairxqGTFDV-o zeQEKqB0!2#wcuybLD%we`Um3fwALGnC-K%I*O5n88xy_6qF=Z;t6g}jlyN*?C7R?- zJ}UI}!_AE*_8*V9rW#t1>6|xfTEMI;nQF5_TTok4xmf$7WJ=IbU17U80n3(em1rer zN7Dwi+F*5A2;*KBBuIU(nB_jbO7If|y>4_mNo3KN&Ms-B($<^22{aixk`Box+~*kl zww_9-yUSr_gPz0yW&H0h$Yq)#tpO76q5iFtCd#*95xP8G^m1E46cL!y61M37~IWd(0yOCsa3l3TQ={l(WX_EhX#s75KA_LmTtGWL<2k>}yZ=X=v zPSwSN)JduWdVjWa@2d+oV^^R7bG!n*)xw)!<>9HxAP?|3w~}W`%#6Cfod|_aZ~y*w z>Lz)Y^db{$=ar6*!;oJQr^O7MikWL#?RU-~o0R|2I+UkN@t7R9AM>;uofUdxU8P7S ztZV}ePr~6(Zks@x*OWYZbAVJN1@X#7I^Xr#=#AoUW4p_5x0jS0Se;5LIfY`#1+T2O zhN-(O*Nt2Gi**RJDCGfK_l9^SNV#=qx%CwA1Fw`hX?dqn zuX!Q3^)g7bb%$+ky0=xIkXtse<0^-;M@w87AZ|O*`iP80K6eo@^P*y7;9*fWAhyA% zz;#i^jj^REb8770S3tAJr7nUId-x_O_H4--K(fUo!VE73&uCKvBh>Y3+eK<1ma66K zH@*eONByuqLmV}+V7X|2DDANkSHL5Q-vr9UqJESKgjNlz3Zhpup#_zx^Ad3_f0bQ+3i2F#@!YRMLM9DEAJ{{`Yd^NT-ZeaLZD0h#c6L z0&@!^b}+rW0yj3~APo25+Vfbr1=QPfQhk`uqgs%LZ|MRKJAvc7qc)_Dn z81A(fsUs%>qYaoUl*#Rny60VIA+bOp*Gq$mbmr+d7k?d4bV8kt@76!E5I zJ>Y{M1Q|qFSpoj250&3pG&om7K2lb*uJqkRQHvJyfW4YU2mbk7R!V(W5so)m+&J}{ zxwy~DJg_0Y&lV)9UT8yB&|ZZ}*B@uvJ3bewjt8S09SN!Y-Wi^P%O2x{CAXXJe|EI0 zaRy$|cn^!XqC+o|rx*r;QEN$WvUjT$sjPi<&$$Ao(%k?RYMS`43} zPGFxqHaE{3p^G-v`N|v+F9i)31Hva{CPO^UJsAE{@7?`L!ARCQ(Ypu5_ePx?T+_cl z$Ms)Ro12Ifxsbj;?@h}s2IGhn|HXp{F@c_JqGVkqalsDE`$S#!0Vbzfct5!30A3s~ zJZH5o00ZF3!eP+3NmMWy&>&kldzE=tx<=~c73|HCLp|qGp+F!x4=To{VZOnlbv}=x z{)<^aoi&*T!=QpzaL>YsOs6Lm)cCN~>7 zf5oWE7^t+Y@3Ng*uzb6!Weeq0>#)l3S2?pg+tTX_`*EC4;mzJ|72xgbeYha_nI3S7 zP1f6S0x++Z=i&WBCUyI0o}}n~SHnB;S^WSj%^2DYgp(>gxXuhwSJ^H{W1D&LnDs-~Pc`~hx zvugaVS$@@~YMiZe*4{21P6W@CEE2~Qm!(=nV#-w`2RzdG5oO;=nd|mr05YMM`V8)% zvBE;{&B5g-?oYeX;M70Hs)Ye|Y9dW2@I&XTtVKxR|2TYd61=L#V|vb{EMIig{ig8U zkVg>4`nOKDL#^`I*?Rz9&ilLoZZ7-MoUf67@aPE41MPFbog6gRBp7&!(9Rmsjd6KPGP_0)ehtUhC>zi z4aSc24HU0aHGaaM(X?V}<}9G!}sW zoPx_VT*?W?xF+ihW@d)x?alomhtBcM5y{Fp8V*el|12-O&K^o}rMS|Lpm>^CK7+An*L!QM2Z3RE$j>HNE#zV2occ$N+iw0%_Xxkzdy++m+zHJ9`yB3m4{~X*Ul-3UFEPwIxN#RzA60WccQe8RucUcD^${&molE zUCfKev~*YJB;2>sDS8hiaP1vxzCl_Lz0`O{ej~oGmanYvK8`61OM^qQMIO8QURqT= zUSKtCT-~-P#2F0iPdOG~LV z3M;xWE@-Ay-j(on!ygpUybXJq@r64Z0E1+MI7Pg{ljR$#|2imVU?<*u_zoNsYN?b_ zA>i*335CN&3?Jjbqn&UD{4A9%jJX&N#ehaHL8rDW#7I0yg8FC_H6lI8k!~;+{2h<3 z#(eHbuRMIo)hk+xXygh~@^K<&;G#*ex`%1d`rCv#+>Y5w{_U3+NA3Y04n!u)=WF0} zGYv({i(-D4Z*n_j54YMVb*lm783>=tLGsG@Zxb#ku>LptJU%>@yAaqZs@M?uBz%Hg zp>2N>7168{KoUhp#}!42J~%j#|AmD7OJX}pvQAWb3E5VV#;(%VoK5U z=8@4s<7VpJqCnZD*WfLQz2~N8Dtj$xt=q1Y$H&+}whzACpDlU6d>`O})g|c~-dfq! z_|-VVHUfgcVQdru@qTUlkcX>DpX9FUaJKbo{ST5+cP)sdt9S4egB@iK!L~l-p9?Np zs>#MnOS?3cI-C2{U?Q8f9)tMWr3S$aG4pw|g9Xf)&*;S#n}d1Q zA?osZ;4?ovRCKWtU>nIVdazbtxDrD^f%lZfq7+gDS+U?1zf1F*Gx%l!Yxrf40o5^M zw$&mrWKUVK{@_@EW~Ju{;$bP(YWB{&yO<@xMA6FdpC&~NGzQiY)kC#=UB}J(;rT_f zcu~-``V-9$G#rs4urxykxB>2=Yf+z6Y)n$3puu)g^y|^seAz&%pg7f7Bon%i3-w(Z zANWh&f`)4I!LZ@m=S+b|Crgnz==cbq5MV4jK%`Y-^*;UBf&nG^B^6nvo4BNm_zVN! zlHooe4J}mP23A`(e|8E@<4`C1kdgjQM*31xcsU0K0LrL;TnYXvbh>!NrtOYCjHFOux zwli|^b@0tv;v<*EqlOP-?4wu0PU`h;?%nTCe#y7Dft{{Mt*!OfVKS@kHrV1y`hPnX z010wG|Q{ z?jh?o1}mi|pt^ovQzmZzX8h%!l=++LZ_mu4>FgDWg^j;tMP#P*A0idXx;Um@PxvEh zQJ{(2fn|N14YP7fyH#P+_@>S7N)0e>f^?usfs);jfcu#`{fAP11Af=Xdqv5;p-2|K z8v|~vp(B2MS<_aDZzSVc&iim`$w%&RMs!%E{24-BMfe@q96h@s8?D5TrS60UuF=+l zV&Dmuz6Ww^V~AdRvoa8*CHQ8F5VyF>{Qw^wy{~ZkEdgb7+_hXjJ2WQ#^c3o zA*@(^Ae*VAVVZ%!`NUvEM8Q%G_EBjW#O#1q$2DfhCg*RX`QfLI@)PW41zei&ezeev zHG5C|MA2b;2`&vHvE^>31Uj%`f?$)gB*2k!IslE-!IM$c@k3t5|CGpq6b;tuz7F&S z;xcmoI}ZrQ=K0oN6=JVwvuDD-MxX0zHl%CPw4qGty3!@cGiXderJP4o;_P=Q$zVZ3 z(X8<^I=v4wqC>-j@V6)+Ai8?~0WQL<&;mdlMifCKqpRRCXjA_>Nhp)q6(Mjqz9dvI zEVLj#6M!8tu)W+h&E6r|HL|W8mf7@c-Uk9{ zx6K&6zIP;w6uho81g}lu1l-0p>YqX<%>6I=#s(H~A>E=&A;z~J>jye|?N(9eF2*2| z6Mku?CZkpCO;~rE)ILJ(B25(r@iibu*@*fjCuH$c!k^lL_UHKh=}`Gdjr}MD+v?Bv zAk}6Z@iBF}+_~mQeLn`V&-tJM=eRole6#-AQ(`s;+!EA~4v=m^&QH^COMh!P4&7-<{pzfv=0RU-X5AL%(-9vWO^_#(V9SCNc_fdpDga1p>#$y`527Hy zo8_<`X4%jq&i{F%*$xkQb${1*;_xyDSNmZG@Ae&A_5*X-Yt_*^hF`7uVz7=cgzUw* z-`g}h0&b3U{jLpFnq`mZqS^H*jNW<)7C;YPUhY?ytCTS+y?EdM4hmMo#yamg{*%m#Er^9qEhZn>ftk{JbFl z)qnK+$Pa3E7Eob1&hGDab$eT|3RDe}S9(?jUJd%=uT=pF&!_c!hzFcua57&xX$B`E zfhiU#cgK-y)R-cQ03;W@V>F>+t=40Ai;v$)Q^iSMvSX@3KcLzfVY^I7AN9!BRaB;b z2ste!WD@1c48jmarvbPF24Ewss$<}2{!agu`5Ca#jtu~BXAcl26G(XwV9Qkam!u0x z;L6Pth@xGfy9v7%0BgmBK3 zV%QT(=<)~sd(3?~NI>#OZ2|v`sO)13f80qhyl9Gzzc2EFe}x)(avEO@g{TeR+w)bR z0}wBYCZ)9b-eMV0lhNOMGwCgO*Dy+*``>8))y-VOHajui9WOMO^C0sxW4enF@ppZ$ zOB2;kBV;DTNPKYzQ{?7*KE?- zvv)lI_dW0aVeY$#G8LS`)!LuYxCFNHA5)37?H=z}yRg?!hqN>##sPI| zH^V_x!KEsy>QNFl=wMlYICMYq>4SjE1bHNe)n!S71#@Uzj~h}YrhhHm1+m( zvB7RfPbT2Jg>DY0an^-5^~_HztSJ)T{ci&bsodsokb7#&tQBV1QgO$+ zjX+XovSl8&Q%1dZH7}wZ+dd+zT5lz3fSA(K9pcYApO-O|1brH92&!3M^hv{LaGk=x z`q8)JW!LI~M}r$0Qi?saw&U@prc5k%Wvl!}Ze503T85t5A#$kv(obQ)i0it1`#whq z^=P5(&YnH+tda6Lb1JQ0u9!%(EiOuX+z_bzvN?PU%-e4z!Rm+VekP_rAERF^oeO=uWnJJ2HJ&|B!hxM+Cgh~ z8N#|xsjymdp(F7<4%bf6yIJe$hg0>HF97;c@EF=b84Ainc_>=mCR{EsuN}b;nb2~d zpYKl60}3otYPkt#b_xhRk$h5$EExafI+X~F-kp$7cH*>toe|{q;jVvb!%?TVFWWBGxijymF z{&yWdQYpXVM*pGmpLa1`qgUHVTm=6Qg0WwyNV?_tjKk@M&%z&VV*FH>$D3UYwS3o) zV81cMxNHWx#AxS}!B23QUi>905w@ehVKZ=P^b!S5#c?$3%~Z+|hdCzlJMADjB;*a% z)&oGjjzb|K6upL7dAJZ`gNbhO^&TDH==!jRb2Wslf)e!XCe4`SKOa!&_{5juwleZA z@#vI}&tW5K(7}o{F$klo%*6(tjz>G$d2fn3%D%~J@_=c=qS@Q4Bbr*b8^imsu?=*` zv9f-&O7P@hbJ-;*Md))l85s0KglF?!wyaRfiHILf&9#bLFJ+YHfARz@b6>NhJwcGtL=w&bHs5^M0*2)uEULO;_x3Oe(LF>4qb}-t|_daeX9U zRP5lGcD+3TLcIbqdQuYYI;R2Tyd0gU{@q4696!qO>bA|Jle}XWS_~oJv`SyJ@ zbQ|2>y(s!YBk=4~%w$({)C*e(6CKDyg(hmPV24dgP9-ve`f9n0fw?8JhiKyp%9BnhOrmF<7ABD$5YO6S70Uq6LR`WOl~LK4tity>Zv zfb(a)r1?SBJ_)yv=x4-|_T9c z#e=F^Xm2ZWFbnXr5j(~gN)0ke1bAr|cqEWRYXRU;9GWyfL0=J(6s{h3fj8rqgY}sN z=K2HAK_3=Mp8_&j?J{j9w9aRiojJh7^t1_(i36dq_aF&2u1$ULMC?DId8$M{KcYYu zTz@t_!ssI1f(SzLM{pg|6G#BUx94wP1B7FRL?N$inpgHKHTpix5RECkr?y?79Z0=T z@YWN0sChT#Jh<@x#v&(!VkZC5Tz~e^@pSO!;+Ydd)m6x|YKOXkEt}xMJ5hj`{P#tR zB-es3hCDCh*#6e%T=7koSBmp@mS`v51i_q(UbWR`#kczaZXj^Wd5M?+flAp|z@zXd4+D{JJpiw$hE5i0%&R-;jylXJi6Ot{HJ5)B$-qd)WvbEQd6^*w zLcYdZ!=io-=wR?<(Z0$ApG|S%L23u-B<^oPxXw%KW_wfUOeG(ahQg1Kkc$RG)<8#Q z|EZ`41f|8U*VR**N5e&dk+diU$zDZ|XUVv{adBPpg%i&K zAV19k!Y*vH5uidFcZF-@zdS$=>=;HDivXXyf(y`?V)QD0>lW^E2(2V=t<&8gP7cUN%8my5G4u z_ra2rd_@+VbMl)}Dt}v~=lX1$Yh8g_;ip8j|C)s|kDwQ40D{+f90-On8~8mupoXwFU5%hI zMr(sKlCf6j!ErL*=WmMyx1d9ymuW1-CIg`9Nhx0Hl2FTys2^x}u>xdv=338jnlyqF zHJM)y=AGm_u&1#3413{cM{l>?&+N8MSuC!KBi0^8De4>9QXWUTa`%zI=JgeMf~k z!4}p1C^Vu1YdQxP8P$yvt?E{eCQT3Rqd@r#k4rXA`<3TL&Y~-<4x=Qn>%D4ti-qIdV2C1R|NRB_kv3kkeCyvv zLkcD7KnxJ4k3HYz=|QC&OTd8k7C;EWAA0lS1s>XSKtC}&c)w3ehX`W@eEacG+xs{` z#8d{_j#Xh#KNTSHVXyHa#(^IVn_989!m=#Y5x@?IQ@^e$Zjb}&@9fH3QJ3sa2wF6! z+X<35ubWH|LaonZDjsXPEt&y9oYLUc*dNEi+#tA80P}+4=h|hssZ4zB_MF6e`v0e+ zNx~Q$6N$Bg)vhV<@zi;a@VluKxr^n(9U_1{UDWX^qUyd?06+hKKLXSCri)xiJ2)U7 zzT8#az%vM8#j14OC}U8bsz&dX@HC8+E~EBx1*D~=)mk`_H$Bt^#9hJp zBckCf4ImuHEWc_`hR6`ViW@-Bpph>&>D%D+dBEwV^}qH6y2CHv&W|sA@~WhAqT~-h zL*py67&YzV+XH2spgQ+Y0(+W@5DxgpLTY?&E3Y2fr*LqWH3R=I_TDn8>V91trJ0mW zK)OrmPyuO}AgP3sN+Y63NeD&?{}@eea=~5 z&lqP6Kj1{vIsd=7@9VyzohH71g+2V)QP4I0<-k9$brlvYNH=1|GA7m}4hAxp)?Iq- zU^xQ>Wy`Z1F$86)O@qyl8evvhtcie09ScO$l+ip^j-s=lBWZ$rfF_n&;8+`MT389l z+mgW1S!MZq*(>sYTnbpktAGWfV`}L_=IX$|Ub4L{O$*Z;fsDo|IHDIhY+9rW;+kxa z_^sz5qvW{$l(Of}T<~mDnpX3~Sv3r|bo5G2bwMM+32LdN@p{i(`vAIevX7tdBlOY>_>3zv)WsQ%tIVS8rUD?%;^{kW6-QY zqRD~{;zJ|kBp0ogfX{gMkBZ+qGB^Zar=xwezWSwX#y67!p$B5{VwCdn!EeKMD%^k6 zdb;|q;BUxr+1lsr?nZzR1wt13pX(4t`I?S-b0s)fPKJUY=pv?~Le1vejf}qYoQUkc z2SMUbDLLPCtNw$3`#)n+FbI-?By1KiMFL6O?yb7B41Z+}nO_^(@p-rLL;DkCq|os8 z$m$9#f3lK-@P#kcV2U%nv{XRcA;cg#qm2pfyNh_7I!z2yHOv;cK{EHp>t37}G`YS^ zKN@R0BKO+wo{`=$DD!B9w zYqqV2&PnOA$QNRfA#TETkf4Rd#23rr1${=1#RGboYm}2j1d13~XGZRvXEKo(*i%SU zB1WrB5RZwd#-D?U$!%Rg(SFhzcE$^s_D8TcEZn&aiu6`MN(vLgyuzmsvRqfksv~vxO&9WQ zYQgc#$p}jdcTH9KDCk;erjpu-QLL!yx;Fzl< zlIT7ukp$_TFr$X!^1$wwS0AN9ipI^+_bZ8$8DNAx@NS`#r|&pZL1F%A9@LSSy!N?t zaactrhniCX1!0At<#KYd7v#-NWJE>%+{$wmIM>m5^&O)BOa1qZ`z5e7D zLb}8D#h*_WHX(GEv)=y5`aaO>6{1`-dHe}q@q7&I{I)S$AABuyKiq0-KPv9A#K^vF z?T9DLMLHIbCWh2b$k;;&iFbcpahx=(_Bmm09c}To-}>~s%t6o#V{;{(B)0MKiiBbH zCUvWo51*B72tI~&5MlARA-sEo&Kx>T)!_Q@vD(=;Z=C?C13uvTy{O=?Z~MAH!9M8T z8AZ_j#~Kf@_ovdHX{Ni0Tv%QI$58{sXW%S zgDZQgEGVSA<6h-hwNef$=mSlLP}uAPZw~F73PnwS8^_Iw4wSY^dWFIjtbLuTkxPDG z`h66`!Ioq0E}2Obb=O9**Qm!mcUR_VA4b}Ab1&d%rQCLf<_8-rCIWO#)t?{p!5tF^ zPzhhU_qTyXhI6G)84ET~j*p46+&o)v_}Z(}+A)ZF>h!yYoMU8#%fBqa6`U(iQ;Qyo zF5`3R$kF%#D94NUL=Ij2%m$Jf41wQWzJAG@2t&KZqg79IZm$IfRhrt%EDxFHN)l2m z8nG6`tCxN~?86Bs=S%>r+S@N(_DssIm@Nk>=Dw~sN>&g_9)&=nY@<4NRpd8tcAQ>3 zk0aK2*6{O#=nxQ57Jp4=;?g2*lY{9cmfMfkk`gp!%64-xzF^W=NIK0rf_9?Jroyt* zVbI&G51XJ(09pTi^Bo;AT$fnL4{lqPld{j;$lQ5zVkTuN{%3Xm&(m6NkQ4f?Bx6sA zmst*QlE_dq+m0_Qzx#DsYlXdz&Ov8GVPPcK`M*BeFY7Nw9_k{(!27)Gds!A=@y4Tb z0=wq@N;dtk({zfBR99wjGyQ?&(z7vhQD=PiBLztb_L&ELKi=az8CZlzhlgrw<0lEM zG@URaHOl+vXPW{YW*qJ1d*=F_6!u;r`ud%38|wES)!>-_ip&wDT!yJXd! zy(_(uDgN>|g;6bpET0b=lxs2K(cvmVkxzbytCeheTA%YmQ!>d z%5}wE)I1dl!q+8vq+<88aO%Xepa~nFyiO0Wh5-PHqCDLKHlJW>aNl~PbMQeKA~Ct4 z=)A>r8AQt&n;T9L*9-|Vxe`h$q(QrKD8?}&2>l@8kZ2O=sEmeL$o9&pJ|xX$E>51J zR49VJIC3m%;~t=}$byZM7g+~SQ-jD6q5c*`jP9brP;`w7xbwte;cng;`il1$I-v84VZqRo z7Dxai;2$gfBH$T-nZ<#DaKXlRLQ1o`G?67qaTl&8OBFDMq{R1HND+rPrH@Fw!CQ9j zi`Sg5^mV>?58p0=dO0g)m>2xn#}@4?rY95|5=wd@p}O6EgQY&vrEN4tzd z#hrWk;z530VAD3`Ka)?`?swfYjsM+S++NBw>FJIrUf)v>J&z+l#9L@9)CynId2eO5 zUY`_l{&gMA_*2++)EJVNL<&ug4-)LjgLM{~m!Y=ruf=npeoFrMiFhQ^~XXkpjB#olRc?*LY?!7a(4H49^FL@5O z8ZAHXb~W5HPm8G_0fFb|_2e=Q+Ap76S+3?8*S=5S)$d3x+EsW`@|tXn7c6{?o>K9% zp-JH@HHTOZOi=wjY?pvk_M2cyo~M&%@%k6?jX5yJsLH|x{5-cK1ZBilgIy$gY-;U) z;(--KjxJLWUeRrlB)E(2%*|lz5!5|wdKfI8qnU0pQswY)W7A9q5(dJ$a8A9qZ>LFb z#9fRr>F)V0v)O_aqIt>i2BFl{>6i?y35R0L!EKAqtvGiAFOK4k^zc95ywJj4fxGt# zf#46wp5vsB|BASbC1_e7+Yu=HJYrZN$S^(QhjvKS$bLYtbPQD+ zT|t*+KIOcy%jFEY;5oKN&n`lc*Phd1u&$yEX>xN_19B)OF*+u_G=O**+cR$ zpy#~tvZ@_luLO}c**pm#$n3YGI~~biRi2(dBYNDHyau@B9MsPty_N`@(U^K+-NSGIgGX|`p335Q1E)i|ef3|Ic)lLb=6no@$q^c%y^g-qNQa}L5C|JN@RMBoO;ZbUd%f~M9nDG2*N z4N1l%7edF`;O+9GUr4=>A^`h&b|2PQO9UX)V z*E?((_~EbrHaq$EH}voCRS<2jon#DQ#DCQWe-{PvKVP4;Lc_u3L@@eGqD z-v9kx>(Hsf@cF~TD<|q+`17Os*R}kQul$?5_(8FUT7MzEwWe)Nr zoPK|1+}PTJP_VSzGicPcvcQM6Qm5#)E_hJS@1n!p|C?y{pDd6dOj!CT+D#})XMsu+ z3(KXhBZ-W_F4A5h`ci(;OgnudODeixM z;I^Q%rI`Yv`_F;+n%vaXbjSjF{A|am7IgMvIJ2xzX+3ywo2<)1HtHsL58>Sz*M^Uk zA(Oq0fd4dKfxo zvGm}%LPqxM`St%SC>gyJZG&-C?CsYbLb94r#?sq(fKFGjA5@Nx7w7Hpgle3a}um@by6ZJz5cb3$_PB4In zZdRO-GN7&X*&kLKEVe)Wnz{~m2en`m49Bd%u8oQnh0d4*c^6d5DNr0HdYzr@IV}!y zC*qP4tS2CK=&m}x-aj}{1r&z^Je=eyNj8BRCqF%<`me{u|Kr#Gj_9cz5j93BWZ7{7 zCI4n6&}-QsA0(}nsyB@ij4WZ^s#AJrcd9jXpwZXY)gE#o$k~;MYv)owW@|Omq9d9A z_;&vABGLrz565_^HXKBsBe*o`W?A(-$8sQS=5*J)3K7sedg$zM=+R&Efd4=u@IF+~ z>lLz>{Xf^nLvSzeJA*$3_597lEJT9~IFDf6zYw*1X-0qf-t~j!iari7QP3D9=CYvT zg@>jLn5w0&8-xcrZO*C!g{vISq@l&s0(?M#)c(se@z4Llzi;`Lc-U>t{SA*-UJ~wM z#1L*ww?6~78{k`^n!F9q)c2&aZs*$_|EDLrqYmXBTlW&vAu84Xq940*G#4oWl2>lbOamQ+mI{gG zRwDQNJI^F)VPtDirZwI~}3Z@roE8 zXhw&G_N%5_XKyb(YQJmv{=nM9?l>ayqq5g*AR$T4t-xrAFYWERiW~hmNb0|B7@!A- z-2_JpQ&Sj7d#ZEk6((P?8SGlTiFOT#Nlfvkgv&e&;6(2v9Ax}{2f>-g9fJV!)_G=T z=AMPgAi@+;JLto{sh~rH;ArSh3CwXqJO(5MQ%Wx?9!JAOf>YXhH{N#@$6Y&o89C7S zN*=G0hRfs9Z1nQ2sr~=t%B)L}OCaMxFRx#H>4Inw6|gWBIPmAdORu7)5KyUD7`Nor zUAXZQN5Vn(`&GCQg1483HIZ1;Cwmh~=yd0@7GMF){!Wid{83`c8$*m9P2>aAWK0X0 zf^}X8H$iK~4!3-%1amw#)Gw$Q*DN;wFX|oL@i!vsZ5E?=e7KD1U3c@vgYRBCJv}Ao zzkOxrheHXYmtGce%h}auXrz0+gTj!a0pGIysTF5&M4|K4hyU02$9B=53k+DR>|fb_ zFO@gWfJk#y;25J3iuW}ZRrWvQ0mkIrz|fF0T5qQ|&0Qze!?ioNyEY*xd%B<14KmbX z9^bX`7e%$#|H}(N+BNl}CKIJz%bTay)1(Z6sKNqTw0Nk+zVEt(MRV&Q$Yk`QEpR|o z!LrifMf;#t`f8040$cd)UMkm$?i9?rE;-Gr{P^%F>#z>D0L(1ZVV1)G9k$#B_|z$E zH>?bRSo%W9JU~L$tO)}Qcf-+f-@tot$opDurH+ovV7c1|#7n*dGH-mQU?4s4q6+}< z+R2T*wts4YJ`4Q2lqPq0<2p$dnZ`Z21&_UF+nvc~g|G2p$$U{nhVv*b5626sx`d~z z;CNjB+cWGnfqSJE<}-kr!JFJu?~|BmgV!dQOZ_kwk$g0*SnaY9f18tt{ahWVTw(uv zyF0aWB67BFt7F`a$D0xPftEpjxe()vu6B#2ucc{l?ZIKHKHV0s+Z75I-~25mH0nVX zY0L2xIWOtvS2$YV!E1e|Qqh%m%*#*z^QRZ8^7sAvd|$C}+^KzBbhj&3-fC*A0m=`x z*ROl7z-b5@JnI}J`mlk47`g)L0pLcLNs0_^np@OKaO5i!mf#@mbV^K&(FyWB-@j(7 zW;N9E(VD-*?xRGPw2S=e&-uD56lh#O*~9K-6ZcONRP=IaNC<6E%B>uZ^SY${Xurcs zJz=}DF7Z;0VhkB2rsLCxI8NF8lH8*twNy58tPf#u30Ojg#? z>FspiQ#3l9>nvo`T!C^FXeK+Ig8zH|xjGR@w`I{Xy{M6@W!VxA2bJo#LPP9Cf(Lm2 z2#?nemwU#V-esDE9dvzMbZyLVv#V~^xz6skM&ECAa%lP)pf^e2)}aRTz`{kg^IB)8 zC%WT6Rs@$|K86ZFL#I@qUEtOvO7}U;duRW1B>z*`6UWstadZVhX99HUxgNnr9VW^a z%Q~QVB!OvIK{4xCpkM4?09F9|`hN4nYS#l1>5Dk{JmjgN`oZTHyf}O;6srGz+L1%x z4uAq5qG{+;B5%@cSDeChXN(}!3AgSP7Pp@Q$^S3R7Swt@N#K~3tEQ-mp0cbeCwT_hEHF7 z{%J<+$=p~#%6ZhmeBb7SuWz{f2;RHGU7pjjE7$Z|0sBP{P@j@}6rZ1fE1lX~jaP$? zWUH)@h&3hZ0ss|>wB!YU74^y0DE(%H(d2tyb-PTkF1 zcnh*!8496BHGarTklfypA6;aCnMOJ3+C1a}i0l4Dkn_JHA2Ju+?b%;(e0E<#szJ~PJ>XS$6I8sK-kSS$CsaCAxRt;I>0_>K-T+mMhhxu+y?cAGc z2`+bAGl{?;X~7P1x&OU11I>B4L_jlz&Z&gEj^bnvZ%tgng>+>9aH&N1jszc)r`0cJ z5J=PhewZKQv@j3g?9(%O|vRwyvA`I*W!6^ZPJ_qLc z3RYkr>vpjQJ&*&@X=2^k=}|!GH7Mxc@hmMjS55fvu5+0?6r)3*dsD>v2F~NK05U*@ zIC1fC48ea5e=Cs`(GT^;lid_LF)7*w96=?>HoJMs6}pYepr96MJV9p2-KGc%Ex%E`NowB!9ijg4Im_mH zf)cIw1Zxz%v{o!_S0Y4=t?o9UYw8jy75Rh!oEXmc8Tz)JEtce^%99^khn;4yt`Y8j zcfVzle_e)LkhPs~+~tNi_1`@I=YH?!uHKP(*L?sM5Z@P7IrPLs?zOa;N5as&5f_h* zgT?~jcjnwtbl`TOVTFzi)B73!y2}L9K~VEOEh#CvT}BXxMa-7*0*a#>HMXOph@eC* zR=|;OF29;4oEW$CoXtD?DkM+)Lz-vua*fZ)Ar>jYF1d^**iTvs2b^wnGI!_Nm+)<} zm<0;c&nLkHkzl`R_-Lszf^z)Iuq}50x+u}hXa$yKhU<~=3>d5Hu+5#OH804{vxSSQTt|Gl$=!S)Q$Ye z?HckQ#q^!(e{OZ5p|J`kyyp%bln(FXIB>UpTv^`l#dIW8Zsy8E&UBcf&y;^XB?*mv z4dsbBjO*BUo9`n&8&}_;uxEsuANrQ3nK-Chxv`fF)wttI5)wSCBF}#^RLoS4DCTPP z+PQgpwLREu!vv?;+BmVuVVq&E-_SU-_jX+SJb{9Y6s6hD2^L=;ZjK`Od|iV&hsKb^$5Irq?kzeMDsz?Kh8B z2`RN`L%a)f1#f1DT?%V0@ZUAT5H|Y#`IRoS`;ST>wQqjBxJZNd*nP!c8LRMqC&N@Y zM#qY&&%reSux#>v_nsTuwX8gbTr#LqSo0bA2jtvCpvK9)S9-{l<{LYyi{W;}8#q*lPb4!GXZ1N{7tZ*(H)!Pi_ zRF^*Vs1Q;1UtJxB16~D=(V>%g@WN+l1#M^>Ba9{7H;iUC7xdpStUkHyzg?ZFO5CX( zSm&{rO8>9T+W-Ap@d32tb=AlB%CPiaws?QA3$PA`2Cbl!Og@})QWeww%@ie;5@XnT zGex2ZMX(yuy2m_@YI=skW!{Kee&M9HaGaCv<-ZxvALXnxSE1kc@j3e)tIo1lPP;9% z(4%eXJnaPnEWh6#RyMl)skry4u(ZieRHkho9XUrC7jkSr|boZ>Pib`&V7LX-O2 z`Fce>aN|@-ue?{ry^GrvwdNgy96-i+P$Hr*$`+^jd$0Eb}WMe;=) zQlx%zr#Fn;{pYbh?vAiB$gPZ24vB{e!e#MxcPHW^FiP>8Qpno4f8lH(P~tx*eB=hd zynAqKj!tqUxH+iIWSopuJM*u;+`wSQ!ymm%#cLMK9jW{aClEug&!YW;UfA?h z;|hSRfu!XKFH@|Ai z8frA(3dq92mabEnKMx$|0PG-?T)@U?TJE_tib;9Q>2rlGZWo%}eE>}06!rg&^{NGx zTME{2oag(!7x{x|kRRZlR|H`9O$GlbAVO-u?HJsEmCSE~v3`WX1meQdZ<&adaBK^N zyE&o2-O$?OPbggFt#)_T^UV(Q6~j2Q4&!w{*fhR3`ZjK21$ep9KFGkT<6?P-3xvdk zrk$;l*@djw_brEU!E$3kYT_A|IopeNLj4kxM1Z!qhi9(h&%AgX6*W_;0&`4UtG>OB?Fs9F`d0TJ@2wg; z=>+vM{UTp{GbHCAcFbEX3#rd#bqgZ=7|)SJzLtw>`ujHV+Y4hZwWLBwQ!YDeCcrZf6qgw@vr;ZQctAkHH&&#P~ zfYi019C;ObaZWAiph~%bDSC4XGo)CkF+ykR1|ALi$uu+MH)(&smc|Qmm-BFjdjC1$ zp5yX&Vb1wYq15Psj0!={pCikmOd`rdBMJ$9I!ke?1SJ zG$`p%!`C^GY*^G9b@9miq6&jzyrI&V3YNStHo(x{5ctgE37>(-k{qXsWSh$O;3CIrv5^BGt; zlIOhX}mzB(1rFL z-^V0fHYY~(pf8qzT_aN~A`vmYWtjwq>PtOIuU@KbC$GvDdY^1odP%yzV378@ZP6ah z?WuXp^(9kywWPjaE?TR3Od}H~Jkc%qX>hlCo)jmC(l*9SR&^xrk)Vl865~a;o;QIh zR()TLHzzl+_4Dz0(4skXlZ~Er(Hff1RwY4o|F-QOtP1K6CBxk12+DKJGJH^|>$fU# z1ppepz-{n;eVYQHuW%;m8VGm3ZqXLT3Nyplhr`8&-QQQ6maz0Tm(C|B2cw}(m@4UL zhpj-R`W_%LPM~a1)2zfH^U*Rg8(C;@Y^;1>o`RMipxe6Uoj__VO~|>{BWRK1K=3u; za9?eePBu?fS%34o)i{^ZpuqB*&^FQSFy_FFfLG@IZ`QOQWtO3v0+?!eG|aY z1Tow#O6XM3a9(q~ZmF64!R#|lzRvIRk&bQJoji)!z~9V0bMz#}Cb2NK{`6A!6;S~- zwDeEkSs!u7=J?x`y4HqlTK(=|wfCAzt-SO^Mb16`ldqTH$M}oq9;J+b_p3k!kK`A>*CyIQrw5KnGt^Llmmbqta`poSQSi(o3E8g zZw{X2*^#+2$l7~x8%}zMQ(S(3TrSPR;P*4&O)u<88p+3nkY{|2cC5;7^xn%M-&&xr zQF`g=gH=|;$?lz=aCDTaemW><@*lODMLEi6#DRJtS%I?Y{wuJUfemRXiPCS%Lbh-u ze?`~06E9W~lzJs5{EQMVTA!Mo$s!kipMW@H5;#E9^AU?oMQd?m>Zz#%htFfg4&tMcl(%1VEfrN| z8&<^ZuvU?!$|X?pn9XrTVhNFZgR_pm0EHflg~+VrS{=L4bF-X`9~D@{SJu1Mfg(27 zLQA@^qxKcbP=uZp0AjkO1DE(HAhJ<))Y%(6Zp5D#znd-8%5aEixcOGmZf=!nie_@d z?Dv;f(@So{_HM$KZSeusg!EIa-D)`~N)GceMZLLWrh|`fT2rigGK!q$ds#hBTP7Ck zWK>>Wtxo7R9k`#BiS9v)3Qfx%CKA7rRHPNODoZNL9Gq=u^*d)lrU|Q;oyadq-F9O+<-20p;=W zF<+W5OL6PobGrZpPKSLMxy^8Kkw)tbhL4=r_7~+#F7sLi=uu#0(Dm{i5XN}JTvyiv zaWeU{DYXejB)ZOcCsUDN>w%TQaOQb1>Og!Hc3U;#?AV7?d0m=w&NK5zV56pI2sQ!w zEg0XgM5lVj+&HjH9iHhq!8Xp1`Zo?y|oe!ju+$! z$({28?1YrP=d0HU(3GG6i&>jMZ~$wY`>mbVG=Wf+k#B) z{^jFP{L2^q+2bQ_VC|-`rY^vHVAIe1~d)q4i-kMN^0INdr14mJp}sNq7>7JX`_#E0|0~` z0BAkQ1%O`K3ZdW+C^-O|4q%VR6O)8)qgS#7jLbrzCGT&~BpMGyb^H`I)H_1~e+K zj_tT_l}76D8ORgf{pcW*v+MigsDSw8%2?9lyHq@KI3bB!&@kTG3JDE{%AQwj$8&c% z4#Y4$XgkXFqk|Y?VC~(&vx}1@oU5x?&1JH zo3e1blhBicW}qyeNy#OQ;(|shS8mK*Dyl0E<3^x|MGzvRF(={>kNkGGV~D_v8(_d3 z7zyUrLz}0LhJ97=?6~A|BLERE*n-)MmN^l!151i!R>%{>5#&}ItSCD(TVRgFgR?Og zCflJ>7@oK0)EM|*{k+5i+-Jj|73Il3eWRxrM0iDnBMZJ15xF;$0gBxi z#wh8N)wcQRr8ir91b&bbp+`Y+WvS;L)6CJ6jE#Qj_hc7r3Lhr6=5FWf7Fd0HS=lWl zWHacfj^NOA@dcgd%-%J>s!VVlHEdCl_S)}vPg}1~mGzZ3W)B*CDdiDxu_kie4oaUN z2zg56JzF^a@F;%QxdnH$fkj`AQ3t524Q{`?Z@+-m7!Jc!4!2dDuzq^cm zVQ=8RYEWXrXsLbYPF0#I-ms5*>e*b9Y1HqwKY(+dSA^-urWBq!^wF_#_}?o1@X z>|B_NTr@JQwNmneoqDO~CF0B8#2ZZyoqfTaRh0RO3o~9AjErv}hwGsH*Rz}H#EN9a zxCJB6o<;3KtXJSCk;69~Y?*U$%yeo0^HqX1BBTh*LO=N;OQZ?FXYBnwT=|2V8Bw}LYeUv@JIex!9Fh{Q-mpo*7VmJ2TXZl0YOq6duOhNyH=RB&{Sf>;o?Ki^PtTJ zbpu0bxlLbMeMo1~fu6$QdTU6CTurL3i2(zP)B>nZBPWg>f)4_cN|Tw79AhvP?<4I% zS#E4I1Xd_5LwfAp-gqUINH^9T)vd?bw>N(CI6vPb_P>!L+B1;Tvn+~dX*`r4O`^TG zcJy&D?<+}0iHzhO8*Rz-B0)uCE?TNkCVs zeFpyYH-N+Z(DP*{%-S+kFhZ}*4zk0`%$Ap8uaIRd0ykaGBYmgsHK16NI%r-fAa!p!ZR9o^NpWhiO_a~2r$mb6Gu z(lC(&RVBb!eWcp??QPem9|jynqEmjRPNKkO@JlxmLu{Pchh|sy373wLX@d{$((&Jly+6+`~?hM*)i#ZA6llKctC5=?>7JlDtA=T^s z+Z-r%JD8i*qlR=S6AQeIUjwdD{^09{2QbTsq@ygAApR{xMRRDrK5T+?VYHpHXCB9Z z`k#GYpzH=1r)aN$6nNB$P-zReEj;oV#N(?MXtr6q73lYiE0WebxryiJ(Y~9{+1BZB z}5$s{D5_#@L2+Z_SMNV;J zjZ^dzq;X6>AYi&|`>f#D_(jPok$GMdF~J_o>NVw$x=PJI&!P{3jCYl7vQVq}ta|F> zTvEkmXEc!oUvznjS7!Km{#S-PgP-laqG|C3vcJ8`&tG?t*L|Q>5Ik7OQ1gfs#_)@T z>wkGKTp-&<+f9Xhe^mB_W+FquGp1M$RCeWMj#KZ*OfH3P)VUX>k>r0Jzj&PE-_uwsw z2icNX`x?z|uf!`sM+rHu51sXb#$%jN!HuVf>aaTAShxa)rc6MA#2&LP^y|Xd=-=qf z7nk=4PXs3dxn;8y$8%O-!Io!aI!fni+Y<_UaKa3Ek?m2iy4|K7U1~PWKuu72=V%b@ zo&W5{G?vI*#rNE!@sqZ~eeHsrD*}5N9O}t~btkZwE>v-J9mu z)*L8*HJem-rG_3%WsoqL&D3&8O;aBo9MJNToM~r7rIM*k9#IVAW#U3#n~N#>Y2?L5)tRT0%DK2JQEVSHjOLh) zJXyt=M}q4r6sZDODv2kUE!Nr%Crdi7;m=|D@kNLyPrF?i16Lk$uG&!YMbqou}Sg z-Eb1SM;S^U0}Q_cJ;K#2ebO4o#NAne(J>R*Va57{@k&3=-2que3D+mC0WXSUF)P? z`Uj#QJH5Ttd5Tn)F9EDu@@m{WVkS1iW*Em&!9s1+w)*FJa3aERRagcQbw~EvG4H(h z*O{3fT+F<3Gh0P<=EY(JlH}sOUK$t;6eBCiUhv4eugl4t9jmin888%ChHkXG_r)_# zv@OOxI&F+BW_}3N{>!lCV+L(6ZwQial!)!X%(mbz=0R>@ycY~pz|K(%W>Ns($?klaM zLj>&4Jw#h4hdvGjNIwdn%l>o@^*~-OFU`GN>(DB`83uHQE$ z+9tAOl?g>&95*x!>yr!DX&!oMwpHf;m+?*a!-+uhWL~J$Al&Us`DFjFfr$sAa?9g7LQ({D%SkDBmiHTb(~H6$8EQK%mV! zyrA(`Dx+*qoZMYB3HPKxS(9S7`sy=imc*_0J|}QzU2bRBdBMf0;|?EsQ$&Ec)#p^h z_Vq#8yS=}cS6WxDkdKKigkO+?8*nDgyN45V~?G;$~Vk}i|_Wu z_pSG8si{#H*t<>ono1Q(i7-fSgi0Ok#c_nCt8^z7^XEk9KNC2?YEi#VU)k+0M{hbb zQSbYV&qUa`{N86?-5#+kyVhO7J(z@t*B2_(m+nb(%$*I;%@}Yu+8J@|op=rLR1iO? zP}_=;P#+n5ffZ%hIbK=r)6?g)wJTz!qLv_3cK3R^!b|V(+VS;?1NmN!WJT?3{v|H= zr47z8PVgU{vrCk8bKt?g8RGIId#=-XdDncXD+S1S%i3k1TNTFBeN6;Fs{3iWfu;Zt zxY%!}pSvx8S0&g0*CDg>K|QmeS^o&+8(nd!<Q(%eQLVqL8rqO}|jHWNcw zeBW-`1lw+h3x$LA?6bdQnM7)kt-eVKS7gAK&(3qfd79~IRM=K>vb=Xk-!ne1mRE@r zb6^sYZ4(rG>-_ulk`$xHO+#>Jj|a-($Az!fIzNpa6|?8!gE_-$5L ztdke8(U_St4++Bw=FFB~Rb!YpG^Q->T%=JU)0kPGVwdO<(z=iUHh1)UYBmto8`;ea zXxwXDMi>u@NHHGU2CYiif))&{o(5Ti@{zDWo?EtTWXNifouB3z}W3mZE z^sym(ftP3KePRCAlVZ@2-}JDDfmYQfoBxuyt4-4p!pgXss>V*g3czPE)ZvO&hJiNq zTFH6t)3+AwmUBtr!>A#3IPaFVg3CgUR`YQ+*4nc>Rj0*uS6_cU&wUdN zf;SAkfA!Mt{O~<`EQe69!(YF%wACkDaX8lKG?!0ycsGG=YqA-e59{idbw?PPf>rxd zU1$undprpzWPzL`By+@*TXQ%UBfI!*c*YtpZqN!{B9l$cOTUi(v_#aDiBlmkr1@F4k$N(L&~ z?A)NLJA(4p?h9Uz%g1|@y#22OJ_3GABPVs^FD?gifdJ>CjzQ&m5W~St`JF93tt<6| z2UkmOxdu}3jMaJTJ|VU#9-;eY9i6Q~TN*I3H_o^qd3>2uv~*azD4$vNgpR*R!?^a5 z>LH1PoiM{o&3hjc?s(YCr5WaynlJTj&TP_R2{)rDN?-`?ED}yF=AXcIvl+=#Sl-Fde>~{1gNv?^;Q5VbT1^yqVF2xy#LF-ckLydd~Pw>-f>%4>BF(zM2tWA|4ic0@U)aLxWTu>WIq>oCt~ARrXeUSM;1)zNdj6 zYVJ8*NFqeI{NPWK^0>qs7gVsNoCUIT;jtI1RiiSOV5p%yQhC&}ShsS#&NLK)8kGRD z#jTKA1$g5OP5q>SvgE}JU}hI3WF|I|S~(6Etv&%$>_fEoIG1~d`}$;tYmS)m!a&Y5 za3JT!zEEy9Yf^n_a?7?iIqrEdd(9=OD3=9t7K|?eIH|3RJ774OXMqg6+nd~MU^uFw zPxS6&I--xnAq7`QhFDxZ>69Ti^0QFM_>jlQ)i*JARqI`T9-Lk?B8r|jOZh`*z*>VQ zWV|;dtZ6uxqI4dQjfCcf?JaO;N_>rpR|wN@1C6EXfVZN%GaSVCZ|6=fR+e5_Z8;|s z+M9my{hn!-5(x?E+quHqL99NrU0?L}@~Y+Lx+$ZQw|71nK7Y7$zX5pztj3Lo9t`E@ zuzusQ)n&$q6hCbXLPyaE3nT9mi6cW1vnCey?Qs&Tt{@>STNfZ=A)=0my=$BWo3x~W zjaLv`^W^xuh0KSf%lCS7+nicMqJ+}lkEDw`zZ?<QXW`TcvJPO zGwcC2oQvm)AY< zI(NbEU6(}O!mcN8Wz?;^B{*WRtbQ-{;0WC4N&q}G3h{F-0)v{7fe??2Fm$z`3jVNV zJm2kb&Upf&t#TD7jYE7&J*(kDu{jw1W{-LC(#GC~rtFWi>yI;^Wb-+J)=D9=RyC7; zT#8qZ&)TaB=ZxV3+l}*$^6aQhVWWA&qHwJ5{VG~(aCn+lM>xMd0rRFjS(njw4=i)p zoV(_~Wa;v)!MLsSa1@qA{zgZIUCE%_7ci=n9CbyjAmi7cr# zRC>($2RWcePJj5xMdNV=j4~9bj!aD&XCghqCJHk zt%+s7M>m#qQcn9$a^8}ye%u?k<2=8gdClSDiy}XBsT$sgqJqzhB(azf?+>#h5_oRw z4te}K)2SiZH(-+5zU4aNHKOh`=dN%6vs$3X_r&BJ<4NCDJI!1_rTB~L+!OB@S@UjY z$DM>$_4InQ@`b!wHr!OBw`5QyzmSNn{J3jdTtTOi*>^ohhwql=%9-=l(``EP^*cFg z2c+3ga$*viXJ_+H^7+G9py1-XLH}51ZX+|8@O#Paj0~NU?*=CIKHQ^jC5ed-f90L3 zC*@w?f9awW#{$zN`Dtxl0%|^kTtf3o4n98St;~WAsrAMwN=p zbllKEArYvCgA0=sR?{jBpS5KhXm;mcBvDGf6Rd5k%YqmPbd zNEJ~Kog5cM*drEO3<&ftxUDyZY74l8W~T!XQm`v@2F&iT_f!V6h zloA#0Kns3rr89zB|Mq)Z`!M}-I#e`$Ib=r8y~4p=0uqW8vzckk+)@d)4DX|uW<+py z=QKeE)<~;Jr;X8=7;stic@utCQ{C@L#L*NOxq5PX#F+zuBO$;2Xgca$uc5~YikL`F zd8zC*{veZ;3E$XFTde0%^uD=~4~6E>?zxkTJO9!Y!MT%>D9SaRU4Ou4N82IIL zs{MG~1H^s2e7=g<=2v_9m(z7WoFduh@A^!z^`(J<` zJGgVLR;b2;Q*7KVc`KNRIx$SZEZUS?liPh_D$SY=Qy(GM>vDRGyK^G@csZA4jrGKM z#bzFV>o;MfX%wCJ>pHULo}rS);xc0DbW^lZSU4`(B1R+cx+H>t?S@wpi#WTET(w2} zQ<@1^%Aq|TpDORYpA0*v^0G*gg}n$#h&mHI>l=U3)MHTp=y-#=+?eo3;wz4F4#mee zx$mlX>rsc>8`bvAjR*`^T7QhsSthSZxC`GX5P7(}e3$5KOLC^}bFa7cYOmzw;js+{ z=B)?A4&I62`=ie$7&aA; zTX2|ASbRD{%-aC8ec4@k-zeCt&XEfG3htMe+IJ;orUq0SaZY6({J6>YoXjLJ7D@Q1 zBa-{g^e?&kkx-4?$rU-^B7q^2gd*CBe4o|DPt;+Y&$t535p=ZrwIW`ZmmZ7ZA51vj zzhi1l#j_t%RPdbQMy}(en2mJXdnc#cB@5fOCVP5LFLz!^^CW9eNnL2dNyeZ+T)-kA zpbU9@6k>05);GZU_C`{C&AEoP@iAR?NYz6;>US{6-F~T+EB9#ehq#_-<--Y$r%YdU zSNDjR1zRu7|E3YnkUS}SDA`OTf3`|7mtlJL9aTo*`SGN|xKq2~Vg!vYa5HY4goF%C zTs-$t${H1%oGUoLL2MZL#8P(UBJMf&SaIQdxH;}^yQO5fAu&X6W%rJ+&vol$JYE~o zn@_8CeEE3o=-QLpT*jH@U(P+gCrHDKx(SWPdAz+B_ZV)Wf-SD8-x@yG8tJv%n=Bds z?POnM?qgyMTdn4M$$PsY!1L8v{hgOMAoPuOP#&DHqXON##;P2AA6&WiMnI>Z#LT@E zO`@jVDRl;W5bdh;D>&0r;!K(B3p-~0K?NQ(u{r@?-f5;IxRpGnf}IBQhiW%tv$Qbr zc3}Ldsvs0OjO5M@UCi}sa9ccoVF_HDfmB?WTYGUhA7LWKQp`EMyOWr z`-DFF2qFmq?n;~{5>iq@NR4R|goqSU=@oprsKMaiCi63FEKB)=(1$r)U4o1Y$nu#u z<<$}sef%PmGbp1K-Jdd+-zd!0$;5wG;7NMFf0Bf?Fqc!a3giM$!)ZD9gh_>%bpl>v zWm1MwAPKY0nvV*0va*y_rWAlvGb^R#S~?Vrotsr6)%l3Vx`_mjh{HJdbqs%`1Tt^_ z!Fb&PYPL-8UbB1Auc*y!j51sttS2qiEPRiad5NbG>i>tlw+@SP>*9tZC=2iI)XT7%DU6PVuMrv^*ZI!M$>pEk^uEkg&a`@^%nl{N(ElvpNX@VW7?okq(Q2}jVQ0B z#5!oEZN;g^4nk8Qp35Q(>no->NU)%_i>4xdL1B-cf$hH4Y)kG!Y*dh;>$auq_%kYo z=E>2sJIKJM#pL0dOewD?8LE_`z=+FDU`tyy;N~&&N|0gaSzZxVlVAWGPbM{Af-xCy zjK*jiO!Ll&HnXewP*}to$A!#{?U?tyU@OyaBX2iw-p3i9XY{!g-$SAGL!xFW0H29X z0`}84d~F(gZPaU1OZGtPg17ZT^C&1c%IJi*@-#(0?QNCXj0Iz}9QI!9<#KkhAWP)1Ro9 zx1R@pPNZZE$Kyu_AtCN=0343b>{*mopl@$js{k-I?pz!vy#%{}tLhg15KjZA?r=SR zD;IrXP9Xj$z1sZeN6}V-)>qvL7o4}p)_%DAockQ8xPz${KsUeQgo^LtF69B4VGHGSy;7o zRkQmRQQ~<;BPomW?EAbLAaR-{C!HBh@BLj_Z{TFb&b$oc_ebWIr}%GDRl2doH*w5E zic~;I)hjXkwDGdwJ_Y}!JeGeB0VTtxzrYt-z98^tr!dvY?^#C#=`zhY${BC?xm%jC z2GrHm&Vxg0ofK<6C{Wif5lEiDF{Nvz!hNcv{Tofv8X>l*hC<60g<~p3aUJTK6vjXB z<8NZLc4Yqm;oZI^Xsr})0`((+0*O4B9=dS3IQx_VErGW^U`f%ING8Mr((znHDZq!H zp6_|=LRy_=$g=z%>d^X7Lxt{#y)oZ!^LAAO5H@nIFHgE(dT++hYT`jtP55BT0vjZK zxoXLS3VYq-7$RMro%6`X5G0eQ6Wa5vrHZFEchl;2aI6* z#b3$u5I|E2S#0ZAGOrG0mFVFx{xiarNOvwB{=JVmzWOr~rwvMJ`aVG~hHL3RDr8)p z9_9P?;}SLSeb=#DksEZI@-Xt`YZ34R#`V4wh8N$E#s7AOHphhr`-~zzI7e%?8Sa_O zz}MttU)_a0=Kv<_?VDi?GoXk{F)?Lm0^21xy8=j5{Hp9)6eHCI?L$6eo{lT4{C*DmXxWO}@y_@^-}fd55VgthkP6)m%z<=w=I6=*=vA?5ym7U%W~qc}ekr5HG>C7mi=!vY3l5nfG;W)g}}o z?akUwjC=Ndk9i#S-1X&VTxjL&+jZ z&Z(^ceLK!0KGF>#Wn8)QeYqukJu3NMho=4*Gy>!rou2fztZte%oOTP=B26SEf_J{Y zOrU|iqe>z9h*msoGW?d^#a&Ad+d5w@S-V)L^AgAp5>d3em&6$kM;yWOD#h|XJJLqw zlmv~^&#&=b>eA{PeYsBoTfTv58)1T{kf)Xmlc0IYPN^~@B&YGu1L3zhfenaKCENS% zO726|48}f}s`@3DFRGJA06gN3xlFuqK=JAuPQ63s`9R)!&CW7cRvm-dSINf7o6plx zr36n^DPSD^xL-A%uA6z}78^f{F6WPGTYgj2CC=b@3?HOd3 zNIfu80-4wo71(QuZ3pP8cF;CqeUE0&It4gea`WX7EpM{nON+pNMDBSJEgPLg4=60X&Y^kXWO%%A284j+!37Rdd2UZIQ6E%By zWo0I;ir@xqX^DNx*K~ecBLqucV{g%cq@HJ4naX8vDVqMYCC#JUw%>GCsO?Gh00O_e zC%~82_6%2XATM%*E+j&BLjw(se?a-Ztd1bis)aC$RDCs=h8g3@MbL=7G6*XE-UHMM zLZ!}LCX(@sc-F}nf#Y$SDpLcG<9+9pZ_fZ?Z4`C~bZ(6sAOhdszE|i#6GV{+P-0z9 zf`VHcGIufiZ_ET@S;!x4Hf++WyJu4q4>xG`-+5uNx0umxe5%$*1ihREH0`*ZUk7JI ztA-#~xRhx7=pE5^o}r|#%P{{&OV_h{4}yiU4~{$lW|TbuS5d3#KpsND)^AYqT_gYrZ72oOCB}T=c9mA;zwb4@(o$_2HHAT7-V#d?u#}#|;UAGg`lMlU`bW&T)3-*4cK< zxxQlk_UTO$SO+`~wxP&miI*)+Uml}o%o;DBry=k!nbvA2+i$_v$C}oV`|*ksM!~xuc@H=c zTFwi#fYs?r*zqRAZ;Df|Z7VNL+>fiQFbYQ+bZZ$O8g=>sRWgZ z!$u|}+vZwS`I7x`3Pj_Cw;lx{1(9oCYGSsNJC5g!Vdrn~Q?#q>X+19vJU<8i9$vK_ zPVq_*t4Y>uG2821HP-a`LT)PWXm(BK+&;7eF6A>XVJpLVY-FsI;ch znLb+Jthy>s@9+_m8KEK<2p6<1FS%nW=XPo}`ug#RnH<7CmS?$FnteGptVFXX&*C)) zn;Nx@k)J7eEOA)q(ITL)41IWOrv|XI5BVA?r+on0unAXwhvg?H2+`CPKnaLjkIOb< z)M=sIz)!}G&vcouoj{|#Ah;>K#H@)iBJ2a9TTiVbHf1qm=JI$|(_P%wVK1N;i)cA& zl$CX(*nGKQUQZfM=n&zz8c4?- z9mrFD{Sn)b)_(?@uOoIOV9(JW+OfDBHGgWxT~kAulaY&aT+KyO1K}c3!SMkjI2STd zYAc8^R8X&NZKFSV4-t~qAp2xR4gqM|KLC#fPcD4Uw1(KFkOK3R9Os2};nnJ20Y^97on!=4eq<{| z#_o+iLI&Df;}b>FSJ5mzy5Xq-I!jU5eRe{#Qh_nFVUO!xZ~buTPl*3^utuQ_Xwq2} zrtaxL-el#FlMKhUm9T-4)jHriRv;Plu*FI0`ceP_{l#5U*zt?>6Hi`-hZk@>(#V)Q zpl5owkq_)*g?Ck(x0-!Q{jSiNdIyB4o}jn3zWiBUV`P#@04!!Uk=1+*E%Fgo)lid| z72pf7>V{ko=~9(z5>@-?NKT6T{EzmUse5%;1dX-t_ix7MlP5qKq4x3naBK{8au`-C z$8jm~=^J_=mxacHbbjJWdZ#~G^+)@@b& zL%duHPOIQBIlXIIMHNBBlBj8McA3HX>fN`=@AI3FP&(-2Z4_K;01O6wZXEp28}u!7 zKtMD=7ZvqnP80L(DpK`g^1?;DE@2=twAY0o3ITR2^!rVE#kX z1{~*K6K7h)L`6g}wZBjtMBs;*(UCtPkPQ@>MI!GJo2_C1@EHt_MY4prmOGiy`>JmUne+d!wmsxSmZW|RGED0~^ylgh#*P!tA^-Z((J z00i+4m16*t)qKkqX(M`pJw?ml{xw6IUboOdmd?5JIQVnRjin*b5W=- zVP(U3>po?*0t+-qW{I76!pn(Or_}QT46))1nq3c9Q)|D1>x7Js|D}8;x_;(1Mke4k z(cTybLPV6ofEzn?+tMsgjYyN2=FFdlS6>^u^ey~kNKkem85}p zaRyAqY$hqp3K=96#=?8!!+qT;Pk=FLmm>zi;pUwlm02(OT(V&9 z0={3WyvFkhFu+iVq;C2^A7_8Wu70Ys=mFE&e$Blu_IsX43J2J*W`Yc&b+XGhmN2-a z1^i7>&xe!do!~Yy?l(eGnV6Y5T%LH9kb@-03cbvceItxJ=RY5+DND7r?Qx?Da7RXl z&Vq*fKZ+m0em>g)vl)RE(e|B+XKXs#qY@Q@6b$=;ZqOV-8tw;E{iWcyl0w~ ztPakA^a~s(@@evx8Uq{gE&(Y73k(sPTpcw!B+&y~K-12%m(2iyUrm4W_TBrt%R?WS zTSz0*85H2b%sIz$h!R9q#=`EqWYloC8*+G>rkwP;>CN=Vsa)S`JTpBH+Oh!Waw3*F zJ90YSUQc=O%eME+_7E*W_+!A1!DHGeEhV12xVZI|qDu}TLq|*CqpIW&N;d*cQl!dT z{kCWQ#hAN{sTRl?x$zqjJ06d)G<@SBMY%)Tk z;id%lxyxa^#3CQE_0a4dZpqEqjuXarB7UOS( z=AUDn(IfmDTfv&LD8_snC_?1%^GnM5UAW2&8F5T{%^3hFK4M2e_;r^W%Ku!qpb_~c z-0s5fbR}z-QFJ%9BQiw>sI=5gl;{@R(EX^Re7oE{!XJ6t{w(&Dyq;W#^rS(_sJu9+ z{MSZ`viqeof|;;7h^R;M0YqsKSP9@N=(#_X2~;Gg{1E5qAKfVYa!kgC+6V*yvU2AO z`Cx_c5LY}?^}{YbQ0fr@8;XT654wmPzzcN&!w_y zu*CU;qyg(vJ9F>i=!MYSx{pN*!292?P7u+F&BF7Znn|Zoc)*%ScXhs=q~Pn8T*=gVO`bjq1(V(^9BsRvsB zkMT3;dj%+|X9aFpMb?DpbP6ltqEnVTMM8WlU!D}>7HMlrah(9VOfEa=K5b`KcJS@r z)EaPnk^EaL=JZ`V=@vbo4j-#8o96z~%Xk(t0DucGjR~Ss00mE|zpxK`p$5z9S8k8u z?o_B3mISOh{A==^tMzJ$BGOOina+aJ_WRBp)$R!{A1Q5 zg;%@!3r@KW|L9h?EwgO#2L!`qhChCSqpfA>>!%1<-PjQx6E^)s@-(69+{rZ05Fxkh(eU)id*) zb%=*-6kMv7?1wqs!>UH83r8Jg(0BXXF#ciE(P&~)d6G?QB`}v86SNX+UKz*!Gw0%> zyCLvboug<(G^8EG6s}Thxi8IPe{6m1WoQW%vcfkJJaG92&*mWSsCAPs7_sR6ZvM+%V%(`1W^5HhtEK zDtq`|Z@z7~q=v=D4eCzE%l^d<6<~5%VOjG&%UJZ(+Ax0qGr#9HlWWMY;3l@7AU{fV z4T|zByX@A-*1g*zUlhIxuZoO!9TnQS)>h1=I`D}&&Ij;gfw6Vs&nUnLs_$4ea-AsS zof<891Paxk`p|Jr7&p4;8?lB^o5$!AeM#l{cESC>0`-#H2#?ugUxq^#!dGAie6%TsW?{_jzb9i8N4t`X1& z%LRNwyf#mK-TPs>@k^KqC*53p;X7XHjbA)Y52(#Yi($b$ggu~__4aWsupY7PPtOHZ z1(miYZu4CXK(fM0&IRT+w*#p{Y#81T`~c(7+-58~5+Iy`ou*=9ARg<0zSH_bDc)&XF;7YgG%9X&j8_P%-Do9PR`!5=-}BtZO;RwMUzx?( zHg%w|ih`faf6zofsHY{UoQ2Vfzcz+*QoZaIibpid;0bS=W|X)`1G0%6j)LzEGN)MJRlUO6Y$scLb}9?vL3ue{)oOOtAG0j(?eItG5`q zDTMCR;oa_obwrUcAu{xkLV%^vFnAtj>L`Ll#&a!6uDUfEN@qy{~EH`$6Ci-aI z(*es_=#x)QE2nN)?M{$mM$t=CQLn?)?c3H@w8?yCraLEu!oPO+>%Ol#070$#HY~bZ z82sFA&Fku(XB$tnS(eqQ1N7VgWooGM89_;{kl!Uu-E^?2P$CxWXiYd~CR9@c_@cf) zHtM;&ET$)}^Z^C)uMqd2^q{qkw~ekheb6cU{*Ij}t)#d1IlAAdv&PJqJGPrI_w{;e zb?WCYq{n!KDchlwP?!YyuE-~kA|j!pThp{f;_jc<=%DowSg(nAMjmg9$`aqt%VwqJ z2_yD#y%tuTsOW!m&A}X+1tT3Fit%A9%BP0QSTa@OLf8h<#5$AQoCe<}xk{COpkdvm z3_v!{`C%G8sZGbkCKpHL5`Nu^+NYIvwvlB(+Bo^_gg?&WQ@vqAVHAU}SH%-`n-Clc zO8cpEo{z>S4_poGEpUx0pWrYZT`8sUE>rH|?NHY(C3s==0 z{#KE*ZZ6kl&=2DMrE9W~(A+x)hhBy75Ha&;j~3HY6Gv}rVqAc0Rgx2p4J2=7A!$t$ z_-|PI#RgY;9bGuy5)c>&>Tw&_x;1sD0lUJYMP3-Dua2W<6ExXKoM zzh!;gOPUWXb(>VSS+wfi17UmAVA7xky~8eJc{P$2iG+{OV^9LHM>ItXP6ym0?PnS*wq(vvkwZZXQ=0{ zf7VI0Bx_i^&(-QWQSb4n5x7Q1g`5~_Y|dd9>pj$+1Cj`DzfTU{2$b5r1 z>J}X_op84~4n_i$>$f#7_9QA9FxF;8bCCPx=K8W;n8G8DwvkxTgb7^Z1l>9!#1)!& zAE{V%99cs`!i|*2tFB%Ovpf1gxe}9T2J7>;vgV|`#t!Y{W<_ze z`2b1kg(Z^(ar*`;g3iyqQ$T`P*8O=h+ZT;${SWXnJi45Z-+5tXb^s5~2&9ve4#+5I zYi=>nSBDX%@_`{-32$JG~Tz<$+ zm--~-5$J+@mx2yc7*t?qm}UEfNX|aML*9vLZ50bWvn8o1F?GxU_@IEDNz0CbA6s$- zO`;V!-6YB-&H1a3~VQ=HFAQ58n%!~yWFIEl6)~oJL2LH?Q(Ee`Stiof2t@c?uCl8 z4`|NEQ44NvW3tQ?gBG1Y9Y5tONCATxuaS{W&8Nw7TP|110olY)zqJ_c*I@5U=Sjt! z7T*~yJwp7^q)PnP?Kr^v7L0{;leoMDsMus&eoLuZ-d}mN8=wP5FdKwRo0Wk$X#Bkk zUt`;bIP6AlM7%+{0Mu3>*R0NmbN0*{fNeH7a7(1~S{HD>iBeQ#(yt^W(qWicoUBW{<(T(h>NWmL#iAp`?GZ@nZSOQ`%DmGi!+9|`5D9TQ+y=;efe}YzrH*BUw+h} zk>DHe1H&xo9iQ0YqBArpqiWw?YzOJHbO2OVW1z%C_4BlvyE~h;EKy8npyt+(-sVpu zW?n1^EAmGXn300Z&L&B-56be($4%&K?dL!84t|?1HZ?s?OFay*Va^#j+wwDq&%a@? z8_X#S8m+<^5g>k8czL;#8D~dp! zzh4yyDaxGy?mNgeEYC*A$4LvrE#K<=vSRlX2LiIsbQ2ZFbGUW<C119^F-Gm~l){iXr{U7YTEu?FNdR~4=(T1r z-EI7{(I+@!VSOy|r&nR?kO@Odf-GIf`u1#FrQelVtN$y7j&fuP}80#0DKklmk=%QW^md!~}kMil# z#9XJvli3^ET*kl;?XJ!Gw0jfEc(gqMcv=`cBHQ^cYIp3ocxVhu&%8E7@%FC}dil8;V=?`mH%e zG_2t=R5&_a(~I3ZPPVr<1xGl4e;2z0DKKaO1ZX7CO{P}oO7iCCbz6I46ktPBUsYk3 z^6d+sabys}_1?|1tYW%oooJ83PN#bh-5qdvGiT{WdQEurM(+ctq|2j%Q9iRvY%2X< z$=arTYh8_&7J4|F5YQxjx%vxQu%DG}wQi}iW@l~;c{5Vhq_-I6oP?^{dMOQqX&0}% zyrwxZV1!VMzn^Iw5@^YN-IKW0ZggOid-E57j#8^qa;| zX2v0yrb>P3io_O0cR=7bzg0UgFs{XSwI2)JweGS}QKNuGtO-V$a#&v4cq|WD+^(>u zu^VfQ+6?oX$}@++;P4y@0T-?bd8Z_EI6@eUh4R_i!5Xvv#55O!Z@W96j*L;rtbVqF z>u_P2x)N$e-2_ZGD#0nxpi=y`{I%hF*io&3nDqw5-=NG*K9cP*f4&PK->+VL+WFkK zT`&m6%sBcfYRUD$6b}j^5YV7Y>BO&o_F!Erm2@S|ZR#^AS7sA`aT_%{;R+-$p8KTG zkV#fS7i8Hh(GVM?z_#AfEY}tcR=C% zNu?FIgu9Tfhm%M0Wd)WWei}6u`ZYt#1_=R!3rI-z?`bJe^)-3dxK5M$A7yDsC9_Zi zxgG1kCaCu`ru_gkK+1<{xi{Gl<-DLWRPS*||G^36f4@ZVMkQoGm&)CL`@-ot7*oH2 zRZ$gUc{A#0zKkQFxNtS|P7rIH3J^p{6qhf&{*jTg0-TQ*8bK3%J=aAplRr_q?GbTjAU!{Liam|M z(3Hcz@N{It#Z2Tri36HG%$C-<`M~a@Yi|zt1|);L^yKF>a0m@4#XHQR$Hj|J@&}!H zO`y>pQLBQQSuqy44H2LA-s1sinEUXbrAOt+B&W~B`qm^QvE^Hu8>?k-#?p|`(1f|` z>Qf$KncL@&t~F?dd7l)>`H+}GrQTYmm08)%p-%H9f`F`G$Vc5h-n^9O0D;3$hDUR( z9`e3^bdB|O)rmJb9t){O$DY)lxmBdmx|qIK2wwBqFz6$}Uwl(c4a)Zn*^$^*m3E#F z3Gwig_tXLNT#uQH*_(f0(f%_4`ip`#{yP`t@){6D%jG+1{m`(yIdwEAH4+4&LLaBz z{Ti{8U!haysz}3w78^IIyiL@|d+>sM;G2=K4yn}m4RDq6Z#=f7D>A8zqG`K(3{1M2 zPr-2Vfk7xchoGR`?abj!78afDykh$?eI)&i2^#VyLpMriI>#U&+YJ_k%(SO2bWyVTACyb9k?Nn?4R%1%s0;U z98H&G10x4zIv__QaI@|fEHkE}#_}%1>v|Gc^IRJkiEw%yz8q@ugHILKPrcrdit#8b zVki9s!5gr_HStmwXl`F2KdSN09v0GSJ@Dx1Z(w=>91b{2EUbWa<+n@dXcVsF=OG3f#$|nNSblX%w>=kY<&?{KWuhdZ#$`k=FRUwMz z0iZ!cm%M^}&oO?E>txT!4Jy0PSvjfMFQ{&@um(1~6%S zwL8!G%ztyN?2WZ0o6b9_i#xySPuM@2Hry7nKhNXNH)X&--fA*hEt+5>ZE1NignE*+ z^o<%sX>e)D8SwpCzVPaF|5^82cH)E0N%`~+T|e400iRPsCp#l+KWd?JlayO9cQO6_ z&kaymvwZ7du32adLBYY3h0D6F&zbnp?#`YnZAGEqDky&s=ilwhk1KUPETLoqFf6B1 zbASwq3Lhixv%s35lipA~EEw>77pqmA*cX)~!`dYyv53-^B1`I~==#g`%Oo*J2Hs$% zd<`0|gxOz5V+^j3yFrembhMz9ctJ84(>~UR>xHY0R)Na{>1bg&>Z{zp43KP_0?v`s|96#_v!CZ31PluTh|r zsaR-e#&(oOWtb3qk10!d0r*4hW28~#CIryVQqMrqSGJn2@&F*C=i+cf9w8if^|U+w zgHDc;1wO$V_ukI$@LGUvbEJc^q0eI7APUO@<@5kp3KW>=p#+pHxe`$2Y+Z# zA$}dm}6Cif8W;Cra4%18osA@dbB#M zAGH94{eDM&_~->JF@Uw3S|cqml8`)I&UT(&HP+g&T#MASX!hq2d%Z65>FGeKsdrh{ zRN+L1m}@OzohATEJ|F2_oNZO1ZLlZ+BGOp0sK`Jt;RGGUHT7L7Qd}jBq`s+Kzo;^8 zMh2WtoV4y#E-His?+||K|0b1xZCW}NP`~1qTi7DyVu(1*qRjB>DM(eskIvUeT_jK) zs^q>=d9RZ9Gaqgpc%)Hf07%P2FsgpbHAF4+TnPvsREdwCpAf9nZH;JxK0V5AeB^cK z_9CFt!gWt5_)N(ZwxV~B%eO)Mq8#?5>SF)Lvr5;sdtGSU-U(!i7oNAC zpS>X#>spf)A%K74R|WrZuJqN!14hPcl~xM{k^+}6Pi59YvGyHcu}JK*KVC1Vh?C&O z`QAeih1|6j9mW8Bz>1E1L^mjtpY((G&KGY(V7jya9DRU@oh^n8H&xm-X*=VQBZ)ir z3&B>uhyPl*|6NjIzH6w+&#?+k#;-Tx`8rFmg8HM8Ck0C<2QewfJ zsXW9@Cr>axKCTFY!K(I__#lB%TMJUH(D`pEqnp*j4*yt2pwG4;m~*3h)({~5*&;XG z{XqIA`E6N4Mmp0#cZj}K)CV2X?V?7uhC$7K=$rB61Ii@+7QKbS7^JcRG%i3TPbTUx z(}I>x_|MZXLJFJVhYJ9-0ZD5}5*#O(O+g5b42g=0^;sEQV20_SVw@t}-pOQ=R(tO7 zrb&4w-N#~hsGKSzIE%XyD$&JZhZ{~7cdY4rlzd$U{mHt|2!RE6kK`}C4cd$5kvK22c>SH>!0=5$5w0VrG|&W9mlXbL8&$?8FzQL)1&DZ&e-C5 zxogh`vRH$(#DS#tAZ~OiI+%>ztA~K{SOr*^d@m3sy2!+5Y;OfaqXi={XGP6CG?^^T zhyjcJF$QA{ybhUo0gv7MN~_>+R=5$%5atC*XmkTQt!7Pmow=!cg_9Q3x!at z+DJnN=RV}8$1Aw*>w~C+Kh+Bz20{}EpoTUO!#JSb8+Zu3Q`Ld124$LvQ{2N0DISfT z{T~lgjSx(@mxH?R!8xj!WA|R`j77 z11F@~_3eGi5b;+V&f3d7M*|TutNZtpfn|qw&Ry|tk3U(^KS+J%7{t_nY~+xCZR9O1 zHtrOqA4%$aXCAlqur_X2=HGKQWL@O+@#WR;uuWaRPA-ot7QgO+5E(I;PccHw)3SZO zQB|-q$m$iY2b{i^H>y$|4P-s29Q9(r1Ghi-&R7Ba39OY^^n z>7yFw{zqU8U`OP&jbB0xlnY|hdG{ON+-a4q-&L7(FM9kT`*w}vY!w#J|bNlIwhD6hX$}Qd-S;8217Zn`4gAhN=H-I@-$b~%3+c9enzzkt{r(k8Z(;7 z2(w&FjKmicl>|1bT;-$)?eMGrM?ik(4~huv2TV$zL0patnD0@#i&3?8uBeW4FJHd% zCfhI33y&;z7l{XZOj7ytS6=NI`O1QJZ6SWg+l(X%4A|gulcb$fO~1<6Weg`43xl2m z52L*KE_x*B2xu!Qg%BGaXprKTI01N%7#IUe$b#2nBZA2JjgD4G0=xFhd~Km@JW#IP zsq3{$iA4mCLl)3)?sC$A5$axf^!#PT_|M(bq6$hL*?ZTIUyi=eqfCY|>R_5LpxQCe zxefs!cl~w<9`lXn`3S}S(BZ5GMPdV4z zp!6Q=^VYbms;_}cum){jHha|f;TQTCr*eaN4;5=FmXH#UGjeY5H9pKGNW@>hbYA-3 z;Qbe;0k;$c^F?$5Xy8kk6X(DE&p^DAy{`OZ?`8gT87%%Rfc*h{o%w8{w*!Gj^-#I_;tFfJS*^)e(>YrOU@3`Z)H7&Aq*_g3(rViQ;3FDTk;1*PF z=QM1F0R&KY4Tl^Lbo*W~3&bQ#cz~Ls5gn_{|D3Tmtd)9Z~EXvE%yW?CfopGk$G#L?(hEa3;`k zfq`8)!3ZQ9QM@0%SIQ&ho{E%1|L*I-6~6x0qRa}Ym^-!)yMP#CeBTe?z_S4OE^%z? zw}B0>e*7=|N@u`btk)BDU5)QM`p3}+-;hw|3B})jOagESsxc>Vq26h4z~q_Wa@wU| zvN22rOrUdt>Y7v!qTK(FK!D8o_W6I`&R>0k!777nrL_#eg#yX`c?w&6=y~h>XNV-M z`b%rt-+s6y;J*!-;e0v%xkv`Z?|rHYysGh zQ|X|ASe?)R_A@Mk1;X9N(n3KPN*Tf*0ZmO2y}xUq9l*~ngOu}U5BcY}1_`vK?DpS1 zN^nth06>zE1rLwQ{q;Ba1uts&uNM5jKWFIuW9{&OWWS8LEP1{rqmpBx!XJud|GQcL z=M#&E48UIDnt$}T-o0$1(MNpwhx1G`x`{C8RLfSya%eGqFsb`I4_Z3Q_atQQ$DDID)IkhYh{r5LREglQdbpr(`3vjEIEu$5* z4nZ@Frjrc(cy%=GM*zGzTEwM1b+-G0YX;=a_ZoQww}P>qkJ~ex=eOFN7p8j#erSal zswYb%0If=Sy1w&L3H`>#_^`d;3@_UwN#u8Y6&n!Ct0oFfn23pf zaqSN3dGOp~87CH0^@vml4r(A}9Dny`qA-R$C>YInGd7BpQhQz)T(GWuFEvS}b~C2j z23`*Hz<)eWz^bE~eK^q!lxzPxrvF>$R}#Tm)?#dSQIEz) z#~>&d0kv_U)hB=4^YSB#X9D{TBGj8LKU$aw+w)DNbM>(``~xTa!0qva;ir$+F5N&` zmIpL~JY#si6g|Iou$FyuqYLZ-KG1g@dSg~*i2xp_;`X$-iok6L!iPK2$m+b1^)=T; zc%f-6biRq2X{?jX6zkR`bD)xr@@50ihaaqo@z3s;M*qX5yb&+k}n=(T>br0411(VSTe9u$Dh$(4aiW&E$OzHX4OoyGJdGC+fjn^^kX@zrbU4|eV@$8*ur`dc}aldZ-?ys9lJr;XgXWS_g2^=f6VvY*jrf2zSP_RuQDF3i7* za=897^<*&b?G_wU5)c}UZn0`R^cZUc_s4lK5q}HVk6nO&XyQzpf(oB6mO8WwvIF`5 z+Cjb?U;@{jQzRnM5#R#3`8i5j+z0WBpHav=VK2XAnHyxuSA#a=LCstwCEb^8Uf-V7 z8%n)j5=b7|`Qh%WR_4Qxd$tYipnE_gI|t}FYd3nIa1gBk2Y}(7Bp^M{1jfPP+C~G9 zsiq^Z<@(t(ytTdJz9#XRy?VV=wgEr}x&Y>rzem#j0r-2+^$5PfQeyBWsdnD^oGRg+ zl1}=&Bg}6d7X(EEPbsRFZg$h@Svc zh27pe{l`+JPhql1xw`>26)ccNtUj71R*Hu~Ck&$X{&+cl+jPoDGR?yV0e=zf80bhs zM&%HoeS7U$?VVl>OtX|BUK446>WaX3-fB4Vw3`Fg#N|L*!=ytO6$1(hPTi03qo-db zz`PqwD-@HmH3Pskn1YZ14sRxPIBOJ^AJpO&+TBx3vHY;Kymkouuj7C|YfHtjP|F;8 zeTT$$GbWHmzw*rmy3nxet}7vxVD&w!p?h*VRgCkUjmhsH39 zUTx3FULV5Z&p?ukQ!~?llOT)H0dNHdQvztZ-=#;Bu}d_ifE)q#1RfN`iK0l$ zDPBTKQs8FtA-ozN3bdRV?S576=$R}5I}u`lj+AJ4hy>RqDLzVaTs*Kb6izfZBpI+y9-DeB zKnqfRuoZ@Y;dU1o@DzH2f#DCI#=SmqZ2;-zF|76bMiC=es>|;%@fqVe!{4T*)F%Su@sx9`0uwRm=yWYZylq-7{h@r|N5T*}k8V z_{?~=HW=n8kx&WS1DmYCO5m3fTBON5g6_mRf_v_=@|i|jDORp9&7X4hxWzb><-zkX zs3op&-Jjnr0!o?8*NMv{QTLzUEBKM3)Eh>0&j)O_S{MR97Vma3xPOPGQYm;*o-#%} zRFI%kQNx37s=m&M_c&KpcbX?2Z*_s8n<^pDzru z=L{>NTgFcT(;)}w{Uy4wj+R&Ff#x7{wB+HtM|zp(7ow&i#0-$Y87vIQ=>Emgv=K%S zVCVJS8pU#v{w>w{E{uYY7a}?aq$OY9#&fthMjGw*e}TJA;BACF;=oZRUaV(F(%y?s z`ir0_LsB}f1)LNS`1`1tR~NwN4E~0C;68&G0zdhVbz(W9M>WARA?H<#cHD7#`zeS0 zR4u3U#uN)zRdFIw(vwcmTEYN6>4^_1PVCBoVN5qo<{7!M$bv)E?NSZ{7ib0n640 zlt>IgbN-+m$~%3t_*b*R^ogL&tP*;Q^;H8XjJv2E>XD=0aN04SQsV`g>odOLT(b87 z+Nc}8^xQ<^?yu1NB8cbix2~T7UB)$68q9WT+*vd}91;5CSad$TZAxo6(RPd`ke)Tk z2NA|lDNfkI)Gp@|xD0(-BDB8E{E4au)I~hRRFK=v=3o&lm1CI@_FQCg*mudgU#JuJ z_7l5%09twpZ{!Q|bk-RyD?l4HBtuJl9eWcW1a4j(8S2fbQQrIhqp`peAq$ldA=1ig zjeF>O7yK7dTfQOBRe(fiNfO9NUPL&SL?K)|QO$nB_it|1LbsyAapu!yq7*k{S<;!W zs!Q4%{^}~{lny!vt7L-v^2a8?2Le7V8XC=vpR36j%My29gRk<1X(9Ldz{fbg7p6@y z854yofgw$5+iz+TH*KWiI}Fh@;2f;ka3~1*l7XH|+PXZR$<{ufz4xB`y>agn?v_F^6k_Txr^g zGjY_0@&!YJjR6Lq(|x`4okFy&n-JA%HfEjXTUl>_ziZ{AB&?(}{$*x0tk#EPZ@b!d zJoik^OXg~CBIN*t>A=3XFxcW3%ejBE07DC@KZ)PFb3mv!{EdgJ2ziM4Y;WrozHI0JXuTHb&$IkD|>ssOF0(x2G3Mnn~I;O;<%qx&-x{Vwfayv=a zFP>7ss!gqGEqqy&fYa-A;vGN4<{7`NT!cJM+AQ*Z+&!x-k-L^R6aM0$GLV6bwk8>n z(*2T4_Uqny(Ep+9ErYV~;;vn~TM-Z`N$KuZ36TZ?>F(}sK^mk}Lb|(Kx~01tZa_NU zjsNF8bLPx*J~^W!#?9XQ7i+DnYpnLoa!DG$*OyBOA8nmAqEINa{9BF%-W~rKj;1QY z>l4AVSprQZ0PWk8JB-FrXps0=@TIsgM=Fl-6QbSYnX6i>V_@-e?U&l)dHNYEjZg*m z(~lsSj3KP96uHON<(;s_9hj*!84UvM&@T{^@Ad?Q6c;Mx zSs7aH=g&yEqOK*ni_H67?wOrDPgX*N-3*yAtHsjwo`p2&q0gW%}i9d_=?w z`JBefp>Q1p;+Ry|;gH-`8Q|U}-eF&a$w6zq#G3c}4kx@?23xneZ&)co0~eF`sy`-l zjRXf|t=Psu{cqCD-dBRbM~I0{JK`*TGg*JO%KhlOrjg%^~SxCw3mZrU2+J~RrZ1ikzQsl1M# zcJ63*YNGM`KAVUJ7CmYGh$s>3OOA_BTg(*J$hD9s;B8II`E+CxN*uzup-$L=gv**U zNMlICL>(9UCFWBg-3PjuDZc5xf6E#V$h5T1b@9H#neasYMCvxr6#Z$8g8;7rezXKdEKEt{ z*-KjPbtS|u#?tA%n>80TX6KK6LE2R^9|^b%9s3r-na~l)mHP4nP;f(4V$6OQecFR* z{U{-?TVs1ATWNPdtJY)~o+{|gyPCURB?m2Lg>Um9-7{-_B@8u+eEb29`4Mzs+FVDG za?tQ(A?;`mNMsr>&>CS8(4xRdhtKeaPU&Rxrh|PN#M?8JRt0%8>yo@?Hz5Vs3eJx; z&af^#C-I=aA~k^T{CfREryJ~Ve=&wIGLAQ?z~v5Ji<+Om zeSGR*%7J+!a_(0GRw}#2tX$An=4D>LEl8OhG!bDWKGi*}Xtr?Jcaq5MbkT-!)7 zd#_G`?hi1eHmfRIY6DelIHDoea7vRwi-GTyg01u7df)%=|EsNF5uig3D81-n^kr1+ zktlIe>|d&^m?VA0?FQW~wWGTw{k_%P@3t+I_^nDURvM*ZmQs{BA1@mg?J+kV8=G?c zv1w-)B!AGBT$akUd{BP(q^c$vGh5f>?d~~q@np3e<;XWPcs5LD&~-_&wH_B*`VnS6 zVpKHn>G4kX8F}xj!`X%`vWkNAZ3?grkUv154A=mof^Xg%iXn-h;C7Cq#fE1yQMU6P z2r{%x1r-b7ruWSj9+6w8N52uE`hwDs>Yc*kg$)jECJa9gek203D0ubpe~g8Jd5Zd-en$(`~n9TfHG&l z+b>~pm%m5GO~iTWyBLZ1w7@0eyWa7?+-t%xWT5H)Xu8yx21wFfN7#7p&gy{xL!(X6 zN!^EU0}fjM{r95|sz$r5kU!1uUSyHhBEFirb>`xm^+NkrRAzU{TTzid7J%~cZ}U$U zF7t8WW%Db$*yzz)hYGas*Q#|@>v^&i?|Z7{t=I%PSIguwwSM2)UG#9=$Or1wpFwcz zX#RBs?-afizAAaE9HJDnc}3-{KH|93HuD%4x1cgeyTzmXU$3A%^VW^e@GMrL)C9hP zX|JWyrlc#ab-}MT?Nlw_r^{n45YJ&33cnWnvVFzCr~%Lf6$#xn59W*kFI%a+E)Wan zB#O;&;AP8#GCnr=WM7woep^@x3Ij=CJXf0VxlcBHxV?Zm6)Epek?SBXgc379KalmS z@B|ro3iUR+L@`}jJD^)rXm&XTJ_?}D53dIO2G`Xcq%HK|st^3q)|_$1)D0N%%aP5R zOFO^%jTua|It~Q`OMzrc>Vh2aO$ut;ya2GOF!x376l^{lpqps(_Rb?mjR*Kr8TSpzIuh!e zLp9b&nFPm=;>UVqtkN18?VuuMq2&IiY3hJxy@yD=wI&%|U2zOUQ(a&9Qz_q8O8B`c z+ozkiTS|mezd*+@z}6#hb`go1voeXK*nPm-3Q+1)Uf{UR3%7!n$#(|E-S6(cto8O2 z#B8qeS9(PxdK1>#QQb=XD~7MT&7pWqW=yCc-KaA8R6L*Ae*3d8HT)275cYEVVbwJv z`sV?Y54)W6*OV)YhUvcfz>K3}v#Y zsQrUlc3Kyj^i#e{SvqkL&+*d(#CxJlTPUvrFfZi^J1YK6W&17t`P~;y&cK9|3!Q2m zEE@$b0!kqGw1kFkQ^9jDll19pU>wzyCpD zA{X#}Y<|v_f2xhxJJo3OdfsT^P?DK`j^xQa4{EzO5t59dK;DXSP5rO3ulNAvt)j2c z4~i^2DRP~<=by#Wf&;BD-`;q(vjifCjZOgn!?_}vCEFXqt`+9@K@m#JX$cbR-QTBJ zW}<2>S0WX?XTT~cLgmZL9)PZYx%yjA8Ej4wTiZkfiLEbV-3;GkLLHJXReGm*w7E7E zFExtEr1Dse+I?kG_?fTtPD>ehwq>U+)XDiSt(*tytPT3Fqm*d1Bn$D%s6PIESz)|) z5lDz9nepz?)3nz)6J~Kk=;{9NybVgW*M|=Qu{83cZ4O$XN^$_)?FbT~QuId~HEVv< zb{e#eU68;|-+ChGQz?NYUT(c2*m4{x2PCk>dZHwx#n9a-NL;>5vswdP9SPcA1q-6w z!C=I;>*#^cghaq*^fH*7lvN^`y&&%SW6<{ovzwAeRWMaeZeR$9KQsY)uP<~+`U^1P=xbZKzFKYw74l}kTfCs2nV8oJKc@h%WE=U{=K#{ zz#Uu8vme>Q%+T}GHs2jL0kUG-{zf3u?;M@$=PN3dVvH#<8GwL%4a>s-=vZ;Hkqlsb z$ktS~RQ!5=54Opz=wC^sQi+MX~wC<4uAtR#G4Trc>b0@}{wR$ER1_X?0p7z7$%GpY6I)~t81$;A$w zRfu>!NP!|VqU1l`u_}en&cyEchQZ^#^;=i$6At=jK9A#u+cPi@{|agHoI<|32Pd<@ zxyAE6@8^5zFGbUT{+)*up1<}4oI^K|K};cwGAdY3q~5hgq8}BA^}}HuX=o2vZ)ZVo zjAVeu1*Y$4ux)?Rz?ra;ZtSlzJa8x4OwV>VoMT!sC+p1AM{J3A93BGk>Pn2cfKyz? zcvH1yKkPK0D-KWn21cY4e8k7ie`?{co|wOpFZv5~>@Jn4b6MKdcw++>q80+in|v)8 zfnVs`pq;{a!%YgRg9^06UBtJr_&3sa9F6*INxoJCI>_0~&6uR{Z6xiqVp$Kw6lX7L7YY}g7X1e+5Y9hcaVQWs)?Bqq#4fxcElBknlypm7|_DM|XUXEcJsbmjHQKz{z!c(g+lm zmQ@A634G5mmGXTOMC!eVcpg^$F(*kxA?o_&WpG`=Aj40jln@4d2@Dc$YIUwRaet^8 z51ve@^|zJwz0KZT8hq)GzVrIyAUp`2JwLGg(6-O3#=29!pDPn5M%=HhMC@H0w&_*> zvqq@#3!?@von<5YlME9Zy$n2|3`qPTrb1_!XG) zl52agsC)0oCa#{meEETG{Od23GQqQ}6%1RmyGs%pY@Ow(i!1Ob#&MOox@`~C+h#j9 zxj@eqD5~ToM7~p?z*Hv-qC;~*Y_i)Txk#Z16xJX#>4a>8Ad`JF7kT{8gua)11^0K5 zx^dBHFmRLsxbUIk`EuRtvoX%4f&nQe4mrPdTA4dNy$wLi`PA$#^_}O~IBY2qidg*C z2MN9N0Bi>&ml~s~W#QcL*X+{3M9+I4lG=8DQf1^xBQt!B8p|^n^sOCGz70lF8>|TZ zD@u)|N5AC*8F62|?Tc8swenJJ5Cd%3?AmXs&Qf6yV1{OT{j^I{Y@?G(WVA%f)cRT0 zGlHbK{6J}#?CTzBFc?3~k+J}uh4KI3#@R7io)@?r-;1S{l97rU74t^Mhz;Je(0qOc zmk|&}dg3$ImA*eyRzxM|R0TmfaQwT>Q;P_M6Tu^(9wCDv=o$uZMOQn-0q+x_Dk4Rs z3sOmWY0(zNT?GgrI`vy#jHZLt>~o9FAmh?#fZiWJ&ST))QheYh3I;}Z!u|*tQzZN( zE{aWE_U+&vRcbB_mtpcBOtT@hE`WV&-C161wVlbeM$BY`N~dx&J%s z&R6v)_^Tp6fr)!Q(eg>}V+szr~P!ZG?t& zpdg zAO8iD4u!SeF%|giB4KB8L1jix$HhW{$>gzlOyMxkoskxdcbOBgTI3!Kg}t1EC!6hQ zNCM3BWL)RhDZC;QFtB{hs-$FXN(BmT0=cCeOG%NDAi&W~7W*V!NA`rm*FNN6lmmW1 zC~AYl6C8F)wysOCn#6iH6!I>}@aGkIDJGI8X}u334iyd*G-$bJtZA(2QLhGM4=_TK z-)wPGHj3vs{9aTj(ek>WS0IGWF|ssAX{B!dg}2Wc&CBuHl@4c%AG`};%p&Y3jKDgb zwgXH`G#%>}53H@qMe$^5zU*d`_>?~vDZwy9u9Q>#;Iy4)wyvye5Qbl~J#qcWxkMWg z+dmM@)M>r%ZH0k~iXk!Xg$W8?Cd`L&K++oTf-JX$8ioBLP?)#qo8jRIdy}g4VNvnN&Kia`kE{-dQ z(hbR_E{e~Y@qzGREK{gP;swDomUufEV2KR}l=4>2D;Si7cgHI9sZDH!!E#7Xx%+CE zE7V{-3)1=3VBbxqK^o>XP&kg3WAbhnRh~B$3=L>8Yn%nj#0prxL{!0@r1?R3-OWJ2 zjO$LC(WRzti&b4-_qH$?t6>AJm?#|Uw-lgf`sTZ`Hv$GYS5!;CyyjM4j^r_=M~|wE z#G9^o6}nq0)D<*qY*F?{@p{T3w&*;F?%fr5TScwg?FQ+r>h#QmXuk8o-^FTGyn*uN zbwLDMpw@|e)E{$UD&GNWa4hhXg~d- zQ$Qxx`%O!Uk-o8c1k(#zDZ~rD`;b^h%}>dkmSLI|@k0WiGr`iyulz7kG_ahX;f)d+ zPJ2@faMn%=tXpqj5Y4}{Ra0m|ht&pay)A&QzO^al&;mlvFht5HHkE;l|uBi zbPtB=GJ2d3U*`|t-YV5Q9^}3{r1YahPB5_kLpgY4^mA)mg4t0nGLuBO;|b217b{>6 zKBA{@Pb%MMh0i5=E%N7>;M;oQr=5(V0frW1GCdkmp>`7w^#%#aG~SvP_qORZY2@{8 zIpmk%A5*rjns;IT?Ey@meu+nFxupWb?iatQL$@VOZX|2O7el8TUwsy~?;x@}b@k&b zyuL0Gig^4Zx^pelt1XW?a8a{m}#=3 z3Uvl=|C(<7qX308| ze*)YY9}kzBoHNqvlju}ryhDPN2|&ZX@9@Es8Pq#zx)#+yDrX^w_x)XFr?vNSY!7z$ zg@_D9P?~rcd(N||4QvNa5@L7koFb8F;6diu>%;k)!FLXDomlM*p4@rBHb3yJ-2uex z@}A>C4sUJ}%-(-`H_}z9{F9wWdsfRK1j8Y;FqNziZ>?+ScIs5%K~}S-d)>e)3(+|# zRrs|`45g$I`fd&iY>mh8%-KRI!`apMuKnM7cKsQ7Xjm0L3xm8OHVJf7PDoTLJ& z0vYUAjPk#UZR zym);f-|x7EagMX!cvTl1%(|VbRq9NbB!X|r#ln=17RSlxkG>TGShBjFf^AW^l6kc5& z%uvU;AAk#xMix=~UB!u05#xUQ!5r8l9T`Nu=ClYvfpwB=2p%oyO!L%1EAuPucJtCD9j#Be66n-k_Yu81&lbFoQ`;pUJ>?w4{%4E0X@C!R|J-#lQi6V z@(iB8Y`%FM=l+1t`iU~OudGwEW!u2QPbCQWbQ_3JJ1zO|Z7?iNAQQiuta=as5Y|S5 z>=8J>BoNKLYwXw0+Iu^#F74P?4KxB}EfT995rjMSk#d|-_PX;J4h=4+3eclqsk~oU z-$v|0`Xd4t(jU46wqsKqtSat=?4gpP1aM(c~OSBDUYdE<-g3GVM6~2SeusLp4wvTgDll zrEpJt7Z-uGHsNcDuerQ5v8Z1ouh}4=;TJL|)Z|L#5c7rFG#pI?Mx}Bgl~obDsluuGP*`5i zRbNE7Q5F88=b>}PL!Jyzr`$SmyLAbuiU;=^yf^#gac>REiS1WfTj-%vACg#Xbc8^5 zH$1P>y8wBK>qihBTI+VH{AXc%BQJv!h~3x(mfxRTo8kjjajS=`AwYN+KHONfxicFD z^j7A9EAioBSQip>KoJ%7>~eUtm?`+1dW-&$aQ9j0Y8}%1F|_rviaj(C_IZZ6Mjv2< z23m3~BQ(yBpe66Db;KnU4J=s7{gfF5X}zvQ?=1tvE1;*Y!uo&=bQ;Qx(U%`+L-96% zRW%+3_l;F6IWAH*5g@w77bxcKW9u4q0NK_Q>O&vXVW&yo8oogA2$d;3gj?ig;cwn(hg%GB9ukPv+0d z*KI#--posTTH2(-86p(>|%gD*_yvFzch<^ z>q`4hy)YSvmfmJQGTi;g65T_SH|^dZ0i7*Re)FjN`crrFg;u{a0io2N5iCJ9cWF*F zju}{j_pT0YH7mvKyUaN=#xakNVMYnPlpwcaT$R8^yV)uYu*ipD#C*!;C5ycrMJVsIpHfZpOKMa zvmCa#k|4?_0)vDf;9Fgl=~Nye3yvJ%i+8(%AcB}4$hbp8BF%>+9A@>33CvoeiEHmf z#>R{1Qmm?2QeO02f<$7Ix=H5ZXNkv6}@c6fLah+h5-LRTau`X~I3Di#7fPkvL8JA>fz4W(R5{&R&%CdOT@A=wT#W z7y-Q8aN&3tjJXWz(iv?K?vwMihE_(p6HCZs3@h(oQtKyxRnSsB{JqqiZn$u0#*)4` z;1Zs3{X3ldsaScUK#-Hy`8fCY%T)`!-*9eaw|B+hZiz(6aMmCsBMXBf{K1b5RDiti zGdeU-nO}mG&@r5ml!_F}2*eq?kyM@<)l%;#Un;r@yNR6B=u2BfWEXO%4Z+r`C>;&& zLzdod(#3=CI6SRVpC5DpPKXDkj6bntKiMXHqYh+{vT0PEbD|l6aFU8+%O>&Y;NWGw z<*kJbf@=YPOXs~LV>4P2JpnrNqj?g8>EhAz+9vnD$A{U*i@P(hpQ3?2&{Hd4B`iX} z`vhX6(>{=Oc;F!be$g`5j3g-;HcihMrj_g!0L>UZSRT~+knjO;F`jn4H`SfIfXu6B z{|3$mV?y^pit)w1@}zSp7X3q`+)t_)@&0e0Mb$#c*EqBu?3>~@cftBbMH$jxe^>4_YA4rv zPRw7{u7lfREdlpzyc*iAXLS=>%0Y3)S|eeEcJa@geO|TPY8sX1FP+w`0T&0cR&^Hj zFD3TPSRQ|;E)`lGq&0WuFN)4|%f#$XC*U>nZU{9uqhWq;Ztk@iEN96Y3|=<%m^uUP zPYla@qnp(w-?LR%wXK~JbHj=1$9P?8?{v`#jc{_!eZV5Bb!1&r6nFV4#?(qZMJJHg zdvf)ie1fv`RDEl&km*xN-eVp*6`%K$S{*$0hp7<9tr~<9M^_K9H=j2DVoq99*hXJx zwGvx%fH!y#$=!RTi}w^uZl1vS*i40j+q^!`hHMOAw5YnZt}SkkHC*{ZkJs79Z9@2X-yr}3k&{*Z0@fREb(`BX=LJPag(P^!350g%L+=1R_);rzxr{Y_lzdu z^O~wUKXpg{wgdp>>c??MOlR`g-~4HnX$x!({#@yQL)&w7KNbY1ftm31hivXXEq6Mo zuXfd4Ar3L_5R^Zx6x@X1NqR17*h7DjPCAk)&r;_je!a-AkMMQ2`b0pmQI2 zG-8TXUFbKfMBVofKD7WBFISiG_M;t$AFaSC+f){f*f;|$lWJ!i_UlFaQL|A*RW5zg zKRu_#-5dzyFJlO#XaZ17Fn8Wm+1f5q$fT-{%3*A4{45d1;cvj-a0o=%$^qc;0efK_ zM@u3gL=c+Df(niyC1#y@)y#N)I4ADT2|WmwHIjrgI%7{?Tyf@F7Rb1ralaDyzwOyM zcBxoF`VU^5E|bcg3CG@Y!Ppc)Qv&U?1J zL#vGJuV(nl>$2vvn3bj33_4~n4vXuoOEEYmmC#q7#x?#sGlT=?7y;f$jy@~6*F*tu z(NwKcQTN!iKQ+Kye9~Fd@u4MT#S*B(3R=C#Fh59ddm)Pf;3!VI+)0Yn-UMVk4ab+4 zKe@dR9+O%ETzpwa!hq-#aV-J^3Js81BYO)ZV&o}Ye2=X*+MQR+VC6r!*&nfT+A7ui zfOCD9W2?`hCgzGRDoWE`ph(?>%N2Am4xE0a##DW_H#nqV7)vp0QY!X5l(-!82t&rj zxTlpykt3iXC^HWcLyqmEFF_L$W z&Pyro=G|Z+Muy>rVOH8gwK=M;%3DXAnKEBvoq|TZq*Zox$Se~iU4F&l;2}QOU|!XZ zDpD5l!f`qxEybX_o6)Vs{jMVBZq4sY98fuZ8ucCC+Fc1c7nBtZ)Wyz_x~)zscGDL; zE&veWXdc_yx)}EF8;0a4Xaox)UMp3~pAw4cAC|& z*~9{KRDZU-JBM6;k24NHi4;%}ov*_;IO-jkWcoIkOw#t_&77SSB!Yxzt6|}}rHgC2 z8~SXDe}J`53Y2+YsI*u?K`TWlUGWDLyoBv1dJq1X^Qsr+b^v9qT$jfQx4y-rl|?>x z(D4&l{r zrynGbc-5>IZ+wDaUN|?p{;i88YpNfq_y-b3`*e4GXjxzCjzbGJoSLCb?f7J5Cw9FQ z_|wel_%JFt-(U3dTo`k`b>5#8Ay~9aDy01|V@G60CK=OIH6v|Q-isNj({^9^kJ-%$gxp7`_sgA%6DZf&Z%04Ja@5EFpJre}N$ z{C{b3`|w^S?eqoNA}s2Z3s<7z9`3vnBMMeJG`&1zitE#_S%%-JX|M{*Lkwmh#VV&h zhu8gnpsI@`u8N{DNW{n6d6DSGoI~1;_yLK-T+q_#fbzHq%qq=%(cyTUMr^_{^*Pac zPQ+j~=qJDoJ`UTnLJN?9vnX}O_$CoM`P&1XJN)2dowahR(I?HcTE z+~g4PfgExfB?JTo-vJE^p`EBJ?`(~6%JMWqET))LDc`!Eb^M5e7M5U~g__-`6-E;A z*BjPN-toE{ya#xxJ*ZuvUKsHE%_cCdR%ct`!8*v&)t@Fura6Dk`gUC^uXWq=uE3+p zApQqw=gY*4DSRS~tg&p$UukEZ$Hz`S|2GR@_1#jdr_^K1ZDg_9Q;7$UcV*6}B(WZ} zRAXEa*&@bNtCUWVUUR8$IDGq&_2O^zx|M|tJj4QYjS(UGBw>9F>bz*liO(3}Qe@(I zDrq|nYajz%k}%@JSvc_iJM3)6x%$>q`Zd?dr5n%|D^!(2z9F$V>XJ4)>}7G%CeLj; zWx^Q2cSRxjzvX`^e5eyPoQsaX$f8eTI`;F??Xs$$P0G1PLXLr{{aab;4up? zLhFYJX`!_s;~rMgAANguSHFr3=Mi-{Ly9KF%q-%-W}&*75bU%OVXGL?0xJGig zMV9L8(?dayQ65yx?3xZloyqUHeKvXZS0;adRtrO%;R--|qfyAgbh@MoQKmY+bbKxacNy;`T)G60}`-Uc?Qg9mV~eEEw=gYBs6 z?WmQ?5e{@IXaBv<0j(W8z~9#JlJW4)GiXUp3J`o>^e<^4&wAgH=E&yBkTkXg9fp5U z1|Z0ym>W%Bufx%|f65wy%qQy!3wyjC7TSc4M#Hv;At>z9>P}>PV2fVZl0eGT7=Y5@ z_3!w+p-!NPlKVF~_zbT2g?{z$JB8(|PXA}LPBUJJcmp;ztl{xuSy}`VOe>?Npu;@g z2q73D{GCJAvjDzfXa=-G@QU(LljR1WEIiIKTBAEOYa~s$hP>>5a2Z1cJBhJf)#!i9 zY|;p<6G{+0cxlX2f2AZR{)5h&%C%+dh1V1ZF)lxDE2MREB6ACi8WZUx3#&HoDx6*i+(vw$u`eu8> zoYG4*{~s{8Azc4eP5<5c!T0A;=Q^!Yz0F?n(34Z)p%MTFrwZr}`*B`?YInYcOAZB> z-ZW%yE0>|jG59-s!JmQrNBgw1Ma%z=#|lvU#$tQtFP|6;bsQcuYBnCQI=ydlb;a0- zZc?)YAHZyw9K21*aY_2|&{h``ZaJS?rGZ;6?{se%fnftyPpzcU7q(jI&!#_&;{h?) zAOqdnJKEa$)Oz1V{WaC|In!DFUcuGTx7n{48{|I>^B%cbZ1x9Nrf7Mu^}LY#W%J=e zok~NqU=F`os!FNgQ#75*W5wd<1%M3xlP4RH?rQ~*!A8%>U`-d@vx;*nB~{Qi3a~9L zIO=sW{$v13dl#{GFYTRHkh$lUD=^T>jggHRe>qo>7!AJs8_(eg0=5a(um18oBN&k! zmveb0sQo{l^?Rh*$s(axwx#_VDcoK!6gp!&8m=F%H23fCGx=#W6n3^k1i4PGeO`h| zQct)xa7H==9}jU?pj~Q&;j;{&wCPxn7B8T?oj{G%m}7b_M`6lCHzV?Abwcx>cE9+P z6FaR`6)$GvLv&nX zj?%Kp=x;jgn37MBejwMVqYk~>m_?K6|V#&RU#j zr8m_cio(0M^1%|Gh5N2(P53*aqJU{P7!G?aK*yjse*&f2DibunYws911NK>m_?{ zIoCd6>1~3Yx#KR1`y3b?^?{j9wNJ;?0{r|Lg;Kfp3z{9eIe50mNpxy(8h2oJaCc@D z3*|T7xvmB76kE9H0?)!XLD}gMK02)7_A2OlQjuA(XHVd3XvK5d$aH-*f7M{FyU~*| zS;H=^O-Oa$1tJI?o1K7{$I-tH}~O#2Lq^FhFBpD&|9>eD1&!#?J-3pgjmnz)KOD=Tz$JDJ2aMzk2JbpKi-SrgXBy zqmO9)#E7=d71Y8|`a@Wh6bRt%s`#A_Kj3a}D_$je9eds#am6`T*FDe^35Bd#p8^#_ z;flH>JTWD$vR&}p`oK8IrBE4(&DhA1>yVYx{Sqb`ILkxELa8(hq>Rl1`ALF({h#vX z{{8gY;eXy6Qkmx)p4)RhQFAb@!L}RBE((xabkL6@ojb!#z1jv`X6svkMo7EaPO$Wxt}kYX zXr9uq`X*Z%V+%w_9aw)8q?MS5zrO1}*jX`}@C4GFb>kWHsxO`8pXTp?||! z>Z$6x#a6Dh>zUJ&uKIx2&hUKKwvi^HM2@Vt7(jp(awIYrXT6HT@5afLnEjH#ob;Xb z3ntYr4lxGVZ+AMQ@SW5y?wN@|isd;pEqq*@D&YQwpoVCMq*=j-90x4|V3q$=WgQ*& zteyT;puAe?*nSOsqIrh-=Z?+0sK{6BBSN>VYb2j%W5Jl^6WDRRfuAxizcN!wSI)I} z1{>wZ=Z*4^YDK^d7Qc$q(fqIGmN(Uvt1Z7kR_8QlAj&J0Qs6lPujE9Xwa`%o@~End zg3GG!XRo>_xj>Z571~RiPf*5Qcnx=qn3qjsUZOBJ!@6a?jyMuXV9#sg9S`uqlwf=4 zl(Dy?b+383U*sIP<@H6sLX~3m2ojR`=F8hHl+iuujRM7v2K(@Sksy6-Q15K0oeF?4 zo(Ee<1P)j#BP!QQ{*QA*9t0@n`%KR;&M_6W{^kGiX9+32P(^Owi*voLQv#%s2@D8D zxk>L~wJy<*Io`7_LJk@4|BMFp&tHU6n#TuGW(~ev+S)!cH$3e1bVw5sFp}s&8*%&= z0ioe@wwyN120Lb5$js*CUfVB)hWUx>ty&qH9`6ZpujR&g;FF?dsk1$r=P;cbu$9qr zFNuz2BAN5t0LtIEOqUqqSj~JLE+cgOG40_mGI*w|J%$wz@HJH~nF%;d{8$CVTKw8A zZqfv}keWq?1>hjIL5Kbbkml7QSf$JE7yG4Xtgr=!hLbwOVB zwj1B7E%umyy6S}yq+(-ZbYOMzkq*I50DfOh0!(pJsPvI^KGxwWNOh6^$Nhbqh1pn( z)6B4{suOSl?p2f`+}Nkn-V0X!9{QGrDfIv@ponWEWG3xQMz^*G2Junl!$U{61kHSA z_say3@nq&Zrgx%)sNyc5eb7U*Qmza8aCpBSIyWHaIdkx>z*@sf=)nQq706?Ut<3i% z^KBk((G9zuG+qE{T>Qi@DK*FDl6H64W^U4krd%NHDFJcW!1_zB_;)*cBR*S5|0hsp zVollmw)#uEM35Q;>0XZTSxZ&ueRQ-3dad}4UfE|kC(xJocsc8GyWkkj60(ZELXswx zV9_PfDBID2Oy%BI|WagF-)@EzS*n*ymfm*}I6H zWYdQ0T)wwofxm2HON#k3!7UlSS@D0KC9DJ_psZ;}>x)DvOL*a}d44cP+!rI3>`Y&T z{?$uaF3=B~mbAJHQ9$bp!lE~&eQwZZ*Hw_z?HyYoK?;}Q@HcD2TfB%Q=+P-t`ipcg zWz4Phty=A$; zD;h;AF5H+x}xB>z1a z{{*`2M~BiGPSM{r^P%LSdoX&N9NHh3pGC}D%;*`1c95%#N??Tje0%`@rNzi z%f2O&`Drwqx7Dz-uRgrWYq`>zz1;KJYG~I<8g$4sL1VD>mz74I5}J^-)WvCX??1WP z)hZ!Cn5p(Q7>Ij!)i#emyx?9T;&qtl-IUIkt296s=pyl){lNnPcH+2i=-SoBrge8e zs630W;AxeD_Zw_p2M3`qA_DEwvk$xfhitqptzlaPK&4O?DEx{U5m!YBF!t#yaE1`a zuw+-Mgmwi*1|xf)-Txg$8G%&*-XHzut_L)$!FW%|^qpAwRiV%*t$@kJb4KV9Pxr;L z!+!(rM003SCKnYETD3}-9CuvN{JlTM`}eJ$?&?)5&*@M6CTP=l0?6GCR6-KM*zFL4 z`{@ogf>Agmget?q*UTx`v`X!N%eC8}lf{d^Fuv-tbs?dwOMPKRn%*}8I`4<)sx#v}%QW?Kp=>z-7B-s!S zwG2I$X`SVrS5BLD=M5=PQ0`Q{=$`dJ#sYz0FlJ~upk@RZ8^6wk9i^RQW7kM+{uV;L$O0yPwVX?aP;vzM=^05>dia98i8 zB%}kP*29B;{d99xS@ZCrB4i@V6f^JivDG;gjo@!)&8QY4@hh1Op3J{lV0q_y$GbI^ zSvMsF_hM%xW8&NL%YMbZQ^kCpweP2KAe8vs`!SCt>*Z$G&EI$#Dr~p*w0H~R)lzo& zdD38rT5kPpfJpH!28ZV($a%>zUWcr-X}8|nmA;WrhlvV1uy4r})cNpj90eHxl9%E2 zi6D_UO6ZaM=f3B!C3kWF_Zu0wP9!Q?m&x~e0AAuBu7BTbc%P4ppG(;tiogCJ8Jt@S z*byrLW3gqQ9}CPkv-%_PO2SGHf~UhCH+O;R4axj+qv6*;B{WC#R_DGGbyN}n!8bvO z(<^+nH6CbWVurC?$!&PSo=5O&DNEB6VAvn%$MXmV14RG>>_EaS!G9(p;#+~W_J%nd zhp8|MbRA7Qh=<}PPyug+S@pZ!eo$HG6CD1kDEyIR8-cE&4pc4EqA&D66A60cU>N$` z7b?>g;rS`Y8VBA@wFZ|}_ig*|)zI!M^0dOp#w#q_%88NXHsV`%!CxCKu>8r3whDnMiwKRzByf^*+p`v;RP+e2+ly;Um3P3W9YnuP$2qF0NM{TErze` zGnq;@rFE7Ix$D<5Q={hW#K6l{xFbpu6^YqRuw1=otch}kS^6Iec<1#DV~jv6k0?@m zRJeY?Nw)02;Ez&r=YbT9@lhFm@>O|gtq(v6P1Ns>A{r!I?+=%$ zve8qv_sg|*7Y)dG-umoxgLT6uq%G2%xL(|T)8_uz20j47p(6jI<+BHfbLT;3Ayfcb zhwAp1%jNCQ>!z&*>$WOp;9ovm#wVo!PJ6doFODlc=9R#X$L(^2eq0<0Qn*5;f=oD7 zyex-m5S^p!?avUZK)CkB6_^R*PuOQGq?1_XNhf7#kiDJB?lS~DuXWvBx8e|<_0lq# zqN*9`?TA1FyE@dF*^_(GCK*cAGBpT~X+7tUZ=lCf1vWt=3Kd_!lbTV7(Sld_+7M#o zY8*^{e4K+xE&1K!w?D%FccP|}uY<93;%i^&*u}!*xgK-ttp*uMClMXuMCB)PlExn zZ>#G90RsIYia_%f&bR{{YabTXYNo3(?5g3!AvE*>0j)GjuX?4-(}pT!6TOP zV|`l%-TH;q^VVET|FWdtxg_*xC*A#v*>v$*Ec{h7@Noa>gv&!)5Ci*E<-^e@HTR)G z3;gso$M*C?zwaQYO0vZbLnZaZ4N9K^t(+=Tu*VXo8=bi?C1JZY%sa6qRIOQ}l$Z=m z2IZWVo69axJ&T*3X~6Jp1VNthH3y;IiYviTI_>@yiyZLw_fD|<^HqOEg|0FEv%zbf z&U^@jGC0i??SZwrz~Yni%Y24$pgeu@<+vX1#!dTSvF06t*wDWIhXbz21IjDldgXt6zPmtc|AIwnLc|33fx%}(t1nm*KTTlF7J?p|-=>m5s z{Mhq~54Vm}D_=)Oo!!0aOThHa%9tW#1K1ftp!6$lOVbbyr>R3X`!af`NKWefL6nq> z(eUHX`!ZXGcN*u%B!?ng_>pwCbV~I?(r&I*$0=uaTYbkS4;@u=$MR$-qJd0byx%Fd zCcWKZNS z4*s(v(+8VWInW{rxybt@F__w9^R@KeUh*n~Ua~T(_NV|v{PIUPG?9q04#jg-#YkhU z*R`Du4DxovYN2tNoeP06W^@0v6n^LS$ir7Jpv1ML_$|}i-V@dh`XW^54NziKIwP>8Cd0+ zJc2Gtx@&GOaA#^p;@w3#L0SoA^ngAfe$ZLGl*SqsG45ax+ieJVL6xgak5NpSi?En% z!Jwg3RDnCxMUG^d&ZaqW${L4X@+ISaVzT^G>K`)TMFZErS)NJ+m zHI(!>u5<@H^LD<(Li;)hw?H46isp+c5c?6BAhL{Ryt9nc8{NFyd4$n!(FwimSabaC z0p}w&ArLQ(JDVg247pGuHh@jZtqCLu({7s!v_xPICHI8gz#?LNy|Y0V9+E59@gm?J z!)DF$enQ$hC2dhgpu@`dxI5A#678^>1bL*{XK&8@E-P1+I%=W+FB2G-ju&;{GdHGl zCeCLzF@Pq-ssIw5xn|d*KODXrbSlS^!(_mwl*`i+fbDvJjcj*c-}M5+Y2B@>#TA5~ zQhKQVGP*aCBc{6LonpIsXH>WDC0@BgJuCJrOL4o(HegvSC3GtO?F0&%VDBT2nK`NvC#+HWO`1d@yP-9DIelu3J)STu+Lb z$!{}kG`M~ExU{mtan!{Oq`4BmLq1~Y>J8`;?EcU5E6V6hW)Sut6Zmt;$N@d5rFGds zQd?;Ybs~#i66K)(JRt&{V<|GsD;HE&EypM!Qyc-mM!QeD3-;SX$}1vt_nkfmXu4$l zl=ec(CHsPlv3YLG>2y8jFPqoG2sZdraE}Ng$xl89u3$@GMRrQT#Q-MoC|g$mwB62w zA#mpLcKhG7irr!TANJk@D#|Qd12uqRnLsHJ$p*keKqTiRCIky90f{1#lSs}YA~_fU zC6}ZkOU|H3P!U0LMnEKk@E{#2d+oW1wi`wLE$ zen+GYnDWfJuZl_RAGeuzADN`q{*+a&-eY-Sq2ydgKb`(Hji=D#+@Z7bg8OK07-z|g zju>69<>z7A=1dk9n~A4iEhTAbu+6t?*S1ta?f09bV-l(xsfta{F`%ioOK~-BVp3b;HlH%Lt}!FhoSavF%61MOabG!w z^Ooz;wMql6&gQmqCBjcc2(2u!xk%i;Mz4phmJv(GH zGPT5d(EIGQV`~wCQXyc_zbVDeOm2)@`tAnK0w`30&0fSq!XL za$H#KJN%rn?wO?XsoT@)kKU4fD|)#8(-?ScSt%BbfoVTk1g_X?*(2JsTNH1s2pUD0 zE%XxB_}$~Y@mo|bZaI#l{B5>i3=CDDf#*Mrfw@E53sh-UchE;vxa}ew;gChxZORC{ z?P>ifG9*SHc;3@Ff7L6j`@_*yxo}ObFo~(oQ!@u$u7}a|yOuu3{CLzp$6_@!9dmaj z?Bwv9C!6{zl68f@83VUJqK*SRi6YNPPX*Rft_5azUvhLr%f3AzKQtSXbN9om!B5{` z7oW3XbedEQ;@Th=FU%!JKCD;mo!@$N zw76|9hJmK6daYIdp_=?`yBV!v{zVnB?t@$+R>L2Y!5P@x`qXRHg6T_Wu2qV~6Wz1f z2f6O!!Icf8{lTBwO(TwBtB_NxlrX&1mgjn|{p0s~b?dVR?Ep8{rAq82%`Q2w&!~Uz zMCj6oXGsfkOQH_B&}h3%p>@mF3N2E>I>5Ej=j3JRY2Kp;o5ty zIr~O}uflAEOWp_PBi;w0M^&|Fv11BUoI8da`aDj;RM2i}=PTpZH~RW}+G$NlrFT@g zvB z07%)P9&Q+5j3~TZhGE**sKZuQ-Y77s;vX8tt(-jl&0~*Q!tJG9`eEItN%nGh<+V5r zbp<_ZBE4F~VPLzpG@?MW+G^OD@8s8sZaL4nAFZJ~Q@ravK_fEXychG-vK%h*!iYhz zs!8rY5)MT7K9&H0Q-AtbhB~OFL!P)#`yk>Wqus`DSK%UavC57?Q=l zXNd*ov{D0HSa8w4FV-2zIMl-o_h6ZV z3{7r1iyvQg6u48S%)XW~?Ma`o_b5{KFS+2f9^H2Ggrmt9_ax`Z+5=ow7LN<7_;eDF z4N9Kij2C;`8dqRH0Ga1Wu?_vSCWQcXmXJ$=xQnB(k5o+{{mi$ktu2RJKS?(;gq5Fq-jSsGi-x7{>JWIC@{Y&pnEAl1^Kn>i8G(8)A z)H?;c)BZ@Z8(Iz*+Qyapl>0-s!!5hh*@wMh)E{t6QW~a|C)%-x+E{!1)R~=D`^jA577&21nnR4B#$@zQXAqlp#G7z>X7>h zN*mBkIR%KCO2P~7V|Zz%cGMk?cY63Mg&ANL96v+~TFRd$+vLb({gd{?O=ZuZF_-#M znXT0R&)&-iRrI`z9A^hl(?_(np3e1&*vR$0ImU^nxBn>lP{0#-&?W4Nz4t&CS8e3r z@fL0Jq(hK~H5kclanOHEHKORZ0|(?adu&^~C7zz;-w)kx*4Jj;wlB}=Jk1aU8oq1N zp1^ZzCe{;eGH{{hfSW|{#Q-gzw(<_af78v8`~feJELu~w4P>~_Uf;hLok$`Lqav?R z)uPeNo8^JIY5mgY<1M3w$_ADL=C-_-Z4_aw!$aMGaj!gfE88`zr_`pImu6VcnH)n2 zJg|mW1@FLMNw=2s>P-htP`F9Zk801#mN|`+^vK9*&T3$Up{otRd~Q2Ww`X2IWw8rh z91#9FP1-Y>A6t3T0VLR0=hnUfG)x|n3nB#E%q@_;K6_^@MbU< zm5rf665QI_lH9EK?s0%)DKvJUPIgAgN{}CseE$Fk&eF(8H@)~GfqXRY4qvP?)As9LO#3jUbj7*07=?4+|k zAkFjy#S@*1ovb;2n3{_2QV;Wv85Ica1^3fmR_WTO6k74lTEs(7QJH%t(rD(LR?$D;e2HGIt%-iN|0Vuxgmo47jSxtyVM9A5DQ3i_O{jP_r5 zj`fy!@v}rc1Kd8GD%*GE`!;vX?0P|QSP{8z+Yt8fP@RPl$k(V?S-<7Hfzn$!0tukT zyo7061IK5hFFT~~zDboPJY~i2>M95_dmrq6^>y&YpriO>!F|p-$v`{1ky+vj9Kws> zV4VLQ47Uu~yOD02s3yDeZsXG!Y0m&!x2M-7q7tjnoung;d{Hojgl^p{F}G9KqcsdX z-igeVCP`nt``p`GJmcm@-{bBvga-Za5hyION%_ir1wTZt^Ligy*N%e=Xqb`{=-|9Q zT9`3O7lpF)Ai4AMO-zZ!hx&;uQ8HUbU6QAL>`%)8Zp^(WlK#oRM!VnaJtSWsy7<== zE)X7r$Ts0}cGM$DCgDe@$Liyh$-}J%TF=^Jj893-be>JWD5x(SWF&I&Dis@Nb7=i@ z8b5>gA#Pj|7-sDn1HjLH=em`BO~c@e7q|ds={nKy8K;GN+T5(~bGr)v{VXg{{s^U* zJfT3?p$pnBW_n-WFgqSdSFhkAOTPUSJRYB%yc9CqJHga%iV0$G5sLmFfal zZ7&-^tMC5dPgY-V#FRPhEJ#V4-@E0p`~4NgZ;883geLQG^Cu(*6qFhieAy?@u|Fas z5$(_8{RhLG+a2Il-ujF zEM_@VmXp0MwAd098g8Q9AT&~Xw&&hdo=SUK|I$VKS4*+G;|dM)vjh-I zKV0RuHDcBo_H`siBD*b7L!_P*t(>O%gyUK8^l32_)Sc)0S1)UKSxmWtlJlHt_;4|g zEH5?Aa60EkyRP~}RJ-PK)C(R1^8<`qUrhBMhp?-((F>=_s6AyCGcj_SE*deeJk{^d z8zHMEAyofD(N50fOcGy6M0eA+JFU=tYpR~Yl|jF8jv33Lfd0&V5~2+1TO)D2T1LI! zzeV+!kznL+S7+B}Is4=>V$wOMY$ROv+WKg&*%)OG1jzldlD{n}7QgEu3a$1czb`aa zT75XOord%y1+F7HLzWCytUktm(2_>C{_vRY+lQ;ONfKgO>($dCDy}(wraK%+(M{Cv z3wrLGY#t`$N>&{^d-!&20OvRcdHc*j0sbn9&(+`y3}sc9V!q&if+qv6Jr2$#mdtYvfFg?w-Tn7(xvfY0vOqa-WoCOzlj_uEod=3><;OLPOJ`+!U@|r&(!Z)@yMJun z=G&7?cVjYHVpCNPGq^D`3Ha08A4;IRm6Twycg&TCUjCa8jX2ecNtF; zTxgVel@(j*wo&U=W8t=bo6coH(Xz`ZDN)I!puuH2;DLt?$GQ{(yd8(8Lp zShX7`IBbHaMa(Q5m2CWJ?GzWL)@zy$7WCRWZH%i_1TL2^H?DkRm3XpfG#)eKJW|rx zgks$Ix_3y1QTWV6TU*1`UK3L}EPi@E+OAJ%_VD(P!+A0dF%l9PrE}wxofB;Wq{nVd zEdhV$7>~YnStr$W>3l}`OPVcq=p3o%oT?el({1gAuD;5hg;DO#!Z3~diuIokc#Jb- z2+gR^0@1w2Kw{lucG|8YwJ&SoyDvLSwf45Kg!txI&JajGncmab?S7S9^}L%>nx^JH zyFrF7r8n7!r;<%f|1GxY?T6T6>!Rjj_YjrNVkON=MUEr%42G3ED`;C+%(z_A69B?Q z;tQeqq|U7o@nwtI3_HG!T}v}ko{zQrLZkDOxf0~G1(SwpZ*(x3jj7CrX^V4rZ&y-_ zhmn%I{_sf70>RH}m5R0DG_lQRwM8}+?-jH!1Rb}LS|YVoJAuy)3B5839rOCIFa3m< z_^(NwnM>D&hP4MD9Sv+K-fVZ<#F$rX23~YC+J2P$j~L&Y=I zIcXGyr$Z!9lci}F2vLbE|DXYVL&pu5SX}6D>^LsAM@?n0D_P>Yy*h*3o zG^SKCy-ZBXv(0y`|0QYQ>bPZ=vHa%ZH^0HG`M{DYm-db9cj5|Tbt9Wg-2xW_H?B@u z_1Jd%himuu$86r%R__ldAKSlP+?^1n#Q>U($uG_DV@C6|>f%fRifeQE=2+&uey5qV z!u4sVS0LNh8CGw!=Rqh5TFuE^?jg$PQ))&yU4!U+nCa>xO2SI-j4Pv!2kN@m;z;3A z*F^Su6xe`R&-95!wcU;Yvo8(1-h(go)TB>lrhijib_l0khu`a)rOWl%_@y+BdRwmb zv`J=}@_fREN=|`I#T$zWg9Ki4$6koSM#_FWckgL=q*ktF*I`8Py|1+R@Zu=ft$FP#O}k$D?Tv<+fSk{3_PsJs zb{)~)7%b{s{ZIm2r^P@gnb@Z!%4NAiS{kR4u~+y$%xkS3oIWD5oP347<6o04vi!mc z4`QX?e&i%zUq5t=v_-FcYK`%$jd4xW%*-m=<2K^DO(}8Arg^D&gI~KKnJWf7BEsr< zgI#UJomVPq03@t5qsF7(L$TbuoGLgPyL+gAYiiQ5wkcxawAtEII{<8Sa(!v$YLZhQ zs~h0(K#fK6hHQa%naX(AAjI>}>Fd?5N<5f&+RJrp{xnvKOLgl_2U`zfPDSw2%4Dy- zza&n{8nv)>qrfp!DRJW*gX_u@7Pk=XOe^-j=X4X|4Q&e7dC20|&8O=&nm&<;%=ECc z$QQq|eV^~DY`mAy*@>)os9$2J@yd6BOmEWtTcI2#PeKbWtD|~gD_a;FTiuvCLe=RW z?snDPpR&X7Y@woBm+n7OS5#BsuV<~RIN;JjAd)> z!==5c<1!6gFklaTlTl*hvtV%YpQ@RE?C;T1NWrA3Ww6euam%iox10P^b!*&u+cMfa zv^-)QCylt*S`@n{=DMAG6CoDu-kvmA)|ZpyT{>+Ven-LuAC>R6+5T(@Jc8<$nkCNg z-g|ej->)+#=2KTj#a1_8&Y=yLeD@~mN%cKwK#GQRW&(6vGuzKv&TeO)lM>dl>-1VE zPmc+)HaF^ql}D+@pT4B!Vq+%%<7-v;z4u;n@7g|X%}#`4E2lcwc}-;1KAtiI5&YDB z`!piz9iK$ncAFKXiMd>?ST2hg^kLm%jo==7(SCY< z$X~9(J0OBzc_|=&ZrXOVTxMf*kg>Y2BCePTS8k1ExsImZeut6k#uY8D z%@HX%Tm#LSEzOx-%|%A*kT9qrmF`SX%5I0(28GZKuiUE2itY74RKYc1r>?C=7sF!{J9(b~NsX3leE!RO>h>%66+YgXFgN$jDCnb+478Bqe6Q)n z$JVZEdTvH_Lp_#5H8s&rhdw`c9uDE-s}u15>}m3>Ru$&rsEn#!zD)iK@sL_QeqR)Voo)CCG2l_ z&~P*n&YU}E)>WUU^Qs>@~RfO&Mkybu8{y(1>h<2xUN_jrLAf-i$M%}m^q{#0M zC#9;EQv$je;oA8o`6Hf7#|3%KOTJ0OrI-{fm0t6r5pnU^E~wxj+`1U+RG-Gak8bfjWc1Ptx1P%@ zyS7c1y!D^H3UV=hT?TbYz8&fP^72^e&#{juD0fY?W%^CH&P5dtm3PPKlTIF5{iOLy zZexO*F@P(^*?eL0OOA}VM+D2iWr6>?<&M%pI?yA`?>;O_&SOxXHEk=$DFB0vP-~Oi zpjhd+v&Z-4UjF*UV2e`1;hz4+lxIb?m0WZHp9!>k9J*7R16xI^qn!@;FqcfBljxS7 z*e|R1=ardP+x+`&|v@^E4JLzJh+ntePTH|Q{6xF&z-^JtN&h`sa-33zpNag`Gm26unQYp_pjN}LZ zbhrP#+WYP4Y~ILIQ4aks%ZNsy8l3{c$p_qr;B9)0^Xsic4HuZ2QH<%P#(9r|RRF+4 z6WpKe`47A9pMTG2;Vuxs3Iw^?n^n85Z{_IMe=oKJE%NMQ3wHf&x9w%Op~tl5gt8bA zM}#9i4RetNf|-f=i1*1rZ!lfq8$Ch5#@zLyxAg|sls1cnz(0NL|9X$V{flt!8yMvd zf~8)nx54N+DNpu{!V_Bd1D*$_z=|ixq^_(9LC{ck-I*o6$V{H6qW zB)OC)Kri^f%qz^D9?c8K3$Om?x z;t1APx}1Z-RkkrdL|q4zH{1v1U#g(a#J{ctV^Cc{gK+TE@fqU|9OorSx$$6HSYhGi zKrbQ3HO}szm-^2u0lTdcPYOy9>fFoy@Nf{O3IwSPJwsp!c!Lr{p72j|*ilea-M#Yr zW`PkHtAJo%(_^rB5$Lu7fI;VM`l5+#@Ok>&4Vjm*nvef_{q@KR@p_C+L$=s8E(MKPquFZddD=^8YaAMa0?X+U@%Ej#10%p=U{l3#u_jFX|Mn5 zjom%)%xm-B;*L@WtLmQ8Sm~%6WS9;5cWEfMg4dUb{7nqjDCf+n+CvmLL-R4O@$vB& z=X9jwVtfEMQ3WiP<6>>9Lwm)4xYvZMx8&!YrWbB%Iy~myoQ8rOiR``n(N!B_Z}a1k zj$9+9(xonw5E~^vyJlE*GtD)8WS8uI%&8Je!SZL3ecPK1I%3w9DoxX zBKKMpFX`3hZ;U`<4gi88iwT3^z1sk~*RA8sg2b*`YH!(Qe$jJ}=N*}tC9-}kj- z#x67(XZYcn84dg?l-xj2jJwkqD|N6qDY){cie^aOehlx|FMs*IkZp4xZ5S3StrUGs zCE<510Q`fR>5b1KGyYx-j)W|kfc%h$D5b<}J|O$f-<6bA15<(;;IGosspT3f!d0>L zR!OY%ZU!Rhk4chA#eIqa_kPi+>)Z>-gpHBXe|b{i!*#JS5pG}f{Oh;{KdZ*| zy4M30>T2lnB~psmgN0KhOGwZcgBa+Ypgrq%ZMt|O!ye2>)LR5>j{obyGn#b(FAQHW z;pH2xu%8_)98XEj{_mnV`QE?Y){+k-_fiuGv6bK(LjwuYL=rH>O)tcpY}?B9u=tb| zOrY>MVb4hXV-OzXk9PR}aGO&X@+S=O6tyB5vVgtKDT&%QEo~GX)D-3(? z-w!ElEO_ja^y>u~rw_-&i2O9!a}`M}Gphv;<{OF8vH$eEfBFqJI2D!JNBfQ*=V$@}@NDq_2GjW2 z-Ha=eC(}7hj^GSmKGP!d{Kg_u;R}Df!BpHC!a2Q<&tHJaDilsUkXSkl(K8j>TZMqF z{+an9CexajD?&u|dtps1%`0$14`O6D`~eB}1KMZPOibA*P2bgAeXaBE}N z0b{HSMzDWBG$HE+j*`$X3h4BRfs=={;6;ARzDC?NzsJ`#F0E3*jNG)*jiXArUa zoGCa}(u)j(T*o-eR|Y5Ff?LgsAcz+Xfjx8pC>WPlAG<_nd0_$AvQlnvk2pm<^q1_j z_KuZyT!Tr}L>8hkl93Sz9#@w@6W&US0{7nY3lov|CmAcdi=Kt>W4(V{T>sMx#dR#$ z)oM@Xcbk8KMn+fuciT^d#>hZjqK^?p9uq7GPCe*~m1?f8Q#{5kgk!vhSPv^G)E`{f zP4w*vxF15}E~6U6*G-Xfb+$>4tNLRIyZQkh=anBw?d-8t7siztm8=7VxPyTOxp=qj zt$JI{|5pi2TS-2tFFG^G|2-KU|R{V;kZAFBGBzf$s71X^@u%P8E)5OKZ#XUZAWR4Uy6E1y%M(Ul?~_Y^E%2`e{>PEQ8vfeBKjYu85b;|~ z`V}Jnk?sGN#NgMx4^aTq3;??)W^ehgVB1 zwkQof*nP+w+5?vW+(nPl2n6RZcYVwlhgu^py|z`4C5NMbVO{%2Zkhkf^DkrUK&Kxz;waDc|yJ4 zc=BGGvZf|Ia9;zEPQBtKdNfc9jk0I*>Bw;%RAM7X{!@XDF`@f+{nh|_d?JE-z^Q*-%Wo~l>a!5{?Kck!zlWO)Hp(0h z01Qi6)=;2G{%B9S%@JHjc<>OhLXPRga%?4(UAl1VnT5e??IrQKCxUbDPY!{ZcmP~R z^W8z>t?Z=B+lp@U61^OT(#3R(0N^jzAUB8vx*OiHr{x6VJ87D1fQlVbmWX&fiG!cy za}IXJ;(Or^1MYhnyuog#poMZTy)~99@G=jVX&npUHHYKp(?k;e=M%vuSY}cZfY^Lg z1uL}g0`@ z53_TiWO;3xAEM+x82)t|3&Nrs^?N?Tk9Uk)=c1kGK6HKQpO0RyGD?b4UN1jl+m@-# zh+GzH8Jx}ng;3o|4!Y;6k@r8I$(vCxIZ0?#cmLaawc^~!%1aV! zUpnBu8FyohMqr2fh)frrgmVv*pVYwI}{|*_f?$bdUQ{0dcwiKRbC6{NS5qHADK1~pMR-ZbYHK>uHS{* zuram+ZUMUQgPA)`0I1kqy^ZwBkRZ@B0S8fccCdPOZ6foMTJ#`zV*A3`E_rK1ias7} z_}AwOUTP64R~@SZ^PnJ!?bQb4qEk5N%RbnqZV}+c=vw%;J}FlEze2~22Lta<1|cml zHkiayfS*nPyb%GD4Kj=;>~ea%%lxzb?{iw~ie_OswMl0XN*=iaZv z36GnvONhZV^g-dqTpfZe2xN_MJ}=S&UBW_eot5iV+l20=@-rcKA~z9XykrRg8YW53 zoCE&-L(uI|rP>7;#F2ulkq`F*9EMuGXB%LOeo6{Lr-4md)S8u=k}_Cz^t!)ppb!g; zC}^RRvG|!Mu2OlQ4bg?_J_Y9uUnBz4Bze*09;;4mHD4W=bZ0rQ&h1GmosXLF&-pCO zUOpRO!RzQ;cl40>%GZ^ST5&1>vt8kDmL9VYPhhUUf9~GbH^)Tn?=6Kbb(`?SN&k(8 zh%fS+2cQ{OXWDcgWu*Kl)bvbRaHtisKbE>OwJYqqyZfjr)><>OZyml`C-ywl>T;cA zyxxhXNirij2m{aG&A2X?bih%4Wzk=LOAvp$UBRX6enUV22chwW?|(P={I39n)n;l3 z1kI`^Os;CtDjJI0SJ7)3)(vM?&H-MYzaKD}5o3|W?toF3W1(IB!2)iq=!O-Dpw*t# zTB=Sl-S-%rOyLQ;xt`X%0_%BJCljMo#1}{TNTptO0=~ zV;xeo@tH{?_pKf7wiKf+Ql3a2U6~y+`P+Yq9Jf*tEB$*UdB3^7Y_Ze>UcGv?1#Ov~ zQgo;OYeoN?f1%fiRoaV2b|$r^wW4w*y=@0``D(pIeo>_QekL+Z?#nhGB1sF&j8nGU@$%@ZyI+o1)e30f7Q zr49g8x&t^jr1Lasu{hqE1=D879F}-X2;>a_ewm5lm=DgdtK7%>E&GCJth8K8{NX>B zSbr2eKSQ6jt$U9L>U1M-NhO%qMb_WAS4DBDs-R+X%JiZ~(`=%alJ{3A#R8D|c!VcD z*)?G)H?7J`Vq*ZQhmw;!+iz^cc%r7ee``s9J^yhYz+=IyIjW? zPlIks+Z(27dn;btrT_-rrZ2st0BT%pI~7iUHRl8!ytvsR#%;l-+1!>3YoOnyo$r64 zcfQ+78;cjS36THO(*8LD$WnB`mwvS)G+G9WKQ%yPc1S|T3+<-HT=~>MIFr6|21KIJ zXgt#FL2LbL_WX~!^}l)%Kf}PUUc|3n#J|L=|0TVMUk!I8nf+?G|5nBPFPUfnaHA5y zL%JZI6HqB1Eh=Qv58$Xsx2@HHC8LHKC>>t#8r_%Ba9=lS+}7DvWFi1Uz$vM?#Cc^Iv}Y=+pmo!$NfF|E@4i{ogz8n{WyEM6IS$gNv* z*Tr?#PrC|00mkd?y$wj`{djWH z9cb-za!YwCti|jIB6fYyLlXddfI0}O#3^q*k~$A(l&_6k?%;B z8-eV{Is4+-q1swNOvC|V3TZ=C9TJ(uicBuHYcs+{uAxucb#)l&fa}BB3OccV0G~!4w)moy$XI&kt|?9|;MKlGwLB z*VjT?Z27Q!Dj&p&+_K3H-P|il&_JSE1M6Z}7Enm*7)9=n6|fTuC$sC2YyB5c7Dc*~ z$kK|i5E{1_f(%I@4{LkjDM0O*Eb;PNqgY6>`%K<4QDl4sJs0!?gsiOsR=IZL|_ z2{MVVez@Ky4$Y&#(b59;ye+VTPE#H4hSsu1cMmS6>kvnW5cq}O8EwRa5<_C)z++lt zgPRBh1_;|{kPdf16POLJ0GanVw0JYks}T3@u=@iSRA2AyA+t zAk7>D=x5nbf&c;+1=*Cl7G6LG2RQCodgDct?XArSu_WG{cWrBYBHkWElcvXh=xyUY zN^;kh3aBK>afW=!M~QS9V;}sys^9#ZS9KHxj!^PQCRwe_anTpfbld{JQ!n7eMPW#! z7~Xpu;AL3=_G+7GZW91hc8C=Nkp;45ahE1R|Bk$zXq}4L8jBe`8nbf2tQ%?I6~cQs z;dy-A9Oh>}*WK2dpFBL+oxKlZ>KAQAH1d*J`^Hv63>*qK$?)Nglh*A91|7r6OM(8- zROPcRlm!$e$DJl0A^}IZND?6C3CB4hM?lN&d0DzhT&27~FC{`GXmfqDb8i(AQMoEM z?q37bBZRh|2M*(+hr|{do3QDwfY9Z8N%Tb=MNx|XSGQNWE4TDrHMO>eh*c$6bRX?s z?_18+1#2)j=cSw&v-wLvZ24P_OzcXmbnm&!lD@@^m6civt;|x0wmy*GYB@_Lj||lY zW!k0jSpXk6E`f{Kt5SBa+>8F%etdh_HMn*8$GgKp0KYEMO3#ED3WRd%SHBf9?ILaA z@cz@kG!X+o3a3gAc%W`RtjL8I8tsv-;8`Ozt z-2ZSotB$keZr&D*2gyMj`BGrf@3`4DcM*%{F#b#=J-UBp!21;Io(PC#tq_7=j zLxR-JtA8^{ArRnS;IUuWrr(B6|3B|10t&biL>H)*d+oGA5{4Q0tuel5p*)ujt(K?w z>!MB4b05wbs{H33INa52$W+La4AnERV2!qVu z%u8o$&cd_G(Rk{@k0<;7ZIiN zl*%i!L2h&iCgCa>I~j)e7G^Jz!ta4(x@0v88l+d25ZRBg-VW|sx}%IMZ}u8nWG6>m zVsJPWaf{g#-3R%}fQOy8wVGUJ;^>gSTlY&mf9&1)U` zZu2l!TI*>Iv2=4{KMse*pOo2;vHmt*WJm0`h9Mk_|5M$hJyM=wT8?LsCdH`N=k)=Y z6COtv%(mkn<9I4VL)*VaOEM(KAutQ*6O?;MC_7kG0O<&+bai5!6;3oV zAwC%4_R!A~`7ksx|H8ttIpPkG`%P_uC;Yb`0n}Oh)#0#zJfdW}7!j9D?|e&w4j;A* zCRVWdNO}M@!aN6)Te@b%;t9;6HhwA3^nF4KL{5E_RP2|Z{nVUWXnyma4w?g98~y-r zc%rW~3gUIjwE*A4JUK+<2sG{^IerlHJbFDuMau^+zoC7O=_eZjEj9|}V^rvOJ5qG` zvgLDP-@;2W2e_y|0O(W;7{EKgB7CB+dO3%~x`v}ZJXjw)+Lo>!A1n>}zUtL5p2cuR z5xWt^?ovO9p`S08RUKLrK&;NCL3pq~2K~4YRNNgf zl2AY00Z3Eb$&M_=n>X`X%R?HWEIg9(m4Qg$0^LKToe$NAC7?WO_j8p7znsi%R{S)aEb|Hg(pkXfwMXz#OOu^>`cV5=gN^a4C707Ppu~6IriYeD0sCAo_Rd2Ts1q>` zLF{jcUPnN;{tRNmz}cl06kQvQvz*Fn*Pk*P^Mc+d->8xgU~NNG(aXe$x5HF~7qOLq z>Y|3)@!rzLnfM<;+bcoxYAsVxtB=&Y-OJc>xBsdT#PKsB_ZW%1rq^Pm7#704dcO85 zWNz_k5J-Cg0u`(E^2I-UrY3D^lPe%&*eSU2<6Xq0`kB2Tcb9Nv+T)Ks{vQ`2Kj~*- zTDI7>(%Ik|dMol?<-S=cZ%Y-!5J?cu(jS$o;I?Y(ysNzs_(Aw!L*#uz96g6l}~vC)Y2+w_EVjG-v#th+IB4`v#HsuebDm! zHeM(|B#N{Be00<9e#=^VZjfr|GGGLNuExE$q{{nQ^<2#Sq_S&=;LeJEg<1>gAs)A@9vIS?Wxm>w#1mojOAEM|JZcIP> ztHp5N?(d7*d+TPj52G9QLMI8|L6$*bUvJ?DZ{sO?-n#`&|p*fRCro`Q%ZO zSLA%Vxd|r+$*bwB+5GOF<5v6l=+s4?7tEZNxSG?iVlAa;o>K^1^Zrf}RJp<`Q zF3QWxBNe1#yLrm9M=1Hz7Q`sU-%ep$u>)`<{1HuzHt5I6#;k(pz+=uf02vBytqcu8 z1Jq)4m_>pGXE=vm*-a#LFzt3v@%*Ww09<^xlpuSgaSS6zb-Mr{kyJw{!7i8L_X615 z5F|g+@L}N#FR=J4s|Se?72L0pq#VJBf(|Oe?4CPm4(&+GPu#UX<>Fj#%IHcg1G>HU zYr7C7z7jC*s#^Lm;X&~8wTPfjX4#TjZvq$@A4RUpz4LSoWa#@H4=ael(ZkhZ2l16~ z9aici9q>3T?tS7$A&_O_5)%{0V{!_e-a&^Px6F4GIY!RYRxzn-JvE2OF}%tj9f$;$ zAGfFo_JnhAh>-nm#E$@C&)t?hUjb0`>hyv>wa>IL?OK@Wm*||7KM3aMSKm(%dt^Hk zb{0E8U9lny2vV_x-ov0=Its>~=WAhH!3n+a?E6~%a7EP&J8yEo5&BjBKHOl%167p#2GHZ6?rTK80e|eB0goZ~*R?B#Tc#=5C z3ePqTK$_#6(RvMl+_?!RWOykYGpMk?nw}c7IuDERTuH$PemVd5hL~+1T_l1)qrH~3 zTq~LS5h~=VH8G62;b-Kl`JVgwfi>o=d^u2ns(}PDh@2DXegvG06XZ*W>^uOZO>#rbN-K?OOsscuX?L0^W=;P(D zh^*)dgIL2q(vd1Em@k~p(AP7hxq6@3M)$I~w zXyf6{zYt6NiLx`7(f$fI1eKjK?}@rOY-33zha0l&lBdFG zIF2REf>UhRwRTNA%}E%@pqbS{Ob>*GSdrK8vl#*EU~ds>k_ZST-S{jz{9I`e5cBEp zH?9|ZV+Wqyr6U%!D_D>f<|_Rm4GN_62*pq~RXP19bMM3+Lr{f@nrg($=tB7U0;mII zz&#%57QVMGl*11Of>s`KNUxV0)Dac~ufxz0vtbB5aQ#)bgz?#mC0dBohsl8D6dROpGp$id>3gL;jXx9qdgF1T_X1*+NRJyF-IQ!XxaIK?oA;XE=+#J!PA|gS|l3 zoOC@1VO<7}f?`%+PJ?)fvz9YjhT2W%Bj0-}%_Ff^GW zNx=UYZoksPKw!iy04fT!>Ti_est$t`RC0-*v}IYvW_LvZLUBW|66;GTTT~caNP9K0 z&-#)K$A?2WIY?93hLT++!@r=<(WFeb+K)&76I;bJ-d;mVNvUQpIINtBHvgeibwEYK zQZ`Mitk^l-o8EU6zmm*|Y3WnMI}dDcohg3|)gM|8eP;>fH__cHH%uVGASTC9~Swi`v6pj6C|+K;~UM;t`v)$8w(&SALgq>UkX7 zbjs_j7=*+El%P7LL!v+4KoY^NsuEsv^JNmLId-@wQbXZ%Zzw>Z1_(47uGvsio`Y6! z@rUcTdSrEwIPOrbB8v#h+4eb%?CDpjB5O{RXPa09b2cHTNBe#)c zQxJ>^=V03)_h^At9cs8#RTsgQ-bwMQ_UR^+=f1~XZk~;%ojyMGE9aJ z(43E-?n!k0n#vgNxcfU1l8odbjDfgXw==-SKj-yITpZYcwL!sCHrP_bKrdTm(4UI+vpdk&^Jr6)6zq5t+Lh=xJ~ zxpjSPu!WFa4b@N2XhRY3pU&S$1dfod0um!x5%I3MO@yhg0YHS|p^DXr3Lns#zVro- z^J0bzs6AKDfTZzU2f`tO{d8+)G12)1&hV;B4v}NSM2*x0vbWDcC4=07S3?ef-tSFH zI|;+dhF-gV`htE}CkMw(>;R(6&bYl&J3I?Za8^NLb77Hl6^2(;pkVaz4PHKiACa_z zJhSLH!aIBvFRe5lK@9a*LYg#+%e0p=q9z>+I>E`=a%52gnnMb1#paJeE(znXTy#?4uBZo{Rc^HBw{ z-az<`SPaz5?#LL<9-1KHtp+8dul()+&5tuP>;CXZ!TC=-x{v~B%f6eJOi=-XqI*bl zAde?X$h^9rSn-!k`UvGy<%#l%4ItKSPeXpR?Ii9pi5amj{#_0x+~r&byz~h}U1B$7 z6S(-iS|mdsk)kE`?QZRdA4|VP2qx699!UO*wL5qHN@FBs zzbXWfy#5ssegy;sr1UEw{0a!jZQ@rz0J-#E6~eE8fM7*_RS3TV0@Atp6%c+^2tUzH zzXHOqfbc6I{MO9=b!GTfA^f^B{9^?3zoJ6OK7ft}Atpp-qjdFq#4M-})WG;o7r;ZO zzXgg+AL};Fkp~R&p;=EZXJ&-61(VCV<3%e&k5HYN?-_{=;rEeb??Z#qZGUx7k<}2$ z1@ue?^=9kUA=tvfSdT-#Fm-j!>5mya_%|UD@(h?yfN9#A>fcH)wUTy8o67IzM9{`- zUi|i@051bXU%g|kp9rV(d_=!LFe9zYhU5Xo)7V0Lju?&pKDl8wRtH$)mh<%jh>ZG; zk;aspsR+xTh6XVV@)bF4I+?U`)f=8xPQY}X1Xq>f?d7H#0yZMdA-tBP>!Y- zwkC)y88Mty&p(Ukv><@{UcH|xHSHB!i^U`Ivyy~H%5i;SG<@@2S7ba9&M0u|ZXTz! zQ$%#10c%x)VL`NCu7Tng(7;r*wgG+?0Lsq^n4IHQig;<==aIxlr!h9}ckz|KPxG%f zz~uR-!4WdxPMLWn>o*88!hwij&u}ED5M8Kn-Aoalf5yp)TRBBjO9ZO$HS-G76~Yg| zxYP`XX=@cv145k}q-K0wTHhsx-gHJ+AwWGi3B$=C67LEo^DasjsVfuGp1#kDnTRdB zjC+a~%A$1kA`J>4LBhV7PHZPUIl&mfcN|0x{15=mV6|Udp~CDuaiz7)&z>%gW=Y@X`sVG>Hi}gy%@M%6NhfznjnY*35Pw z5DKc^KRFo(60!kvj&mPab;nzh5j4V{28;F#%>(qpxd*6@iw@}SMbLAFU|cx_$ZKB& zcLg#@n^`<5>o@{dQ(J`?is(BtVAAp4yEBTuhu;`Pq{#u8l&6RR$d;}sfRk-TY{y&9 zEP?v&+$zGE&D(CS5NrfxhV!Zf5r>n9UDujpY5=RP8qEz3O@iQ;C~)6aPdjBZ!o=7Z z)}m`8b+V`!oW{1t`zqI{=jnlH)9j$2X_kzed2K zm8ps8_Uf}>^RPNVvEc^Kv8O}v&lG%*DhTR4oeaVA%VPKgenZ~yey#Q-28RD z&ux2?tz7vP8M9iRF&csD@qRZaMHHEa;85B))c+JTUr6BMv){bRNQuFzwJiQ|*8d}9 zAXW&4;{e!}@;SGy4Yq0DbFLdJ9e~u}AF~3uZY_ezfx~O#Pb-k&kLF)WL>wd#l;Sq< z6x1J+%aUg$#{fgL22j{X5e*XbNu zfDu8VNI)tD+%E<#r$KmAexMyv4cn7;F&cvIdBjf;)aZA=T1oM#+1Qv1tMKpNoVL>= z|H&Q*JP_c}pCojN@WW`Kl$dynI6Lyo5sSQ|EZ5iq5M~YnPl+pie2D%yKw8WfdMSMU z6#fBEr2+W6AL4s!{2AU3BhZzN9HtS#-6+r@;`)}26pmn=kBiL^d+!atk~gJ25l+T5 z9Q8Q`Qb4eoikG8!e#@oz8SCDZ0kYf4A9wj=vabk zoWQsX;*SYm=294^=Bt%)@0u-yw1u#Z5aFIkyFP~imu{*UR$Rv&VGCkdq=sPRYa;uY zic5e+p0mRVD~FW5t%-O$Uv28e?PQl>08kVl<=+DFm-)Dewrr{V=B5S2@~Dm-L>_0j=byuwgzGGNKeEUIeuIen2%@im zoMnshRegsMk2Pv|BXSP)gVQKP0%;O#E*G`?z6}j3pz^er@4*IouC18$LBQqX(MPD5 zBaPj}>Lng#B+MXN45SaLA!0CsW`YK?^8+fw!BX?n3+|JE$lpp*9BGfnIagPKWBhS? zx*p&XaRb#7p$s6!U_OwGy5w$76=3nk|4)0@8q?$%g`EyioM4+V)B#14VL=L!4RF*z zEfmL`6DJ_6h-h^P1F?vXGNfKoLCq`^luS`l(a}m*KtWVGugp3ODiP;hv5Iw&R$66% zld|WqWPd;RAO4c?HRXNJdCvKsOMSo>41Nb_=ftNRs6yN`rP;W6hBZ$JyJ)oKX(W=-ZLld2zYVeo8&mf49(>ij-=yqH&b%XquiR{mqTiM{Ka90x z3WD>{|2qaa5sA$cNH1F%lFhwPG(0I_Xi-*(!{tC@lwQQ+xGx*J4l6*%Yt^sj0Og66 zG9^Fms}$T6DnMiA5`EUz%^?&?=Qrf!$3knw3j+cZW+Z5f2>nIapG55a@xop~I53s; z#{5vx%tjbt65xH_uDOv{M&+8spd`C1Gg0zRdDFv~_`GRxYPR+Y(-3f+trn|exfI#3 z2D(`G%5$6Ep`3`Yaw=Ui(*gH}p)o_i+PeVmPFi|-vYx7lLEi#fNkwVa1N{EU(Q2Le zMG&ae6ry0YrMw$Y_xbz%N6nnv%(RwDPIp_Fxk~4J(Mno`^-+# znb_+qL%IiridJKG5Citlk9Iif|&w#m7 z()~-LZmg9wj|Jw7WF504vmu^t2|W6ey=I(Qu>zg(2iS;~$>)h2Lu}K8ifG}dO!3Fz zH%7<1x;g(EVYe#jf4!&Je6LZ7nq2=zTx1kjdtIsD4~UQ00ku+%sCE85EL$73l{aI*3d^u29j0NeAabv zvF$v$j*fQ^&^E1P1)xjSSY%yL#IzfqF~|`FyJ&PB-9ZV@FoeAX50lT8G%8DRC+7mV z_!(b9rei>PNho*_DLtQg;*QVNux>56q=H+xg%j4%fp9qwc;UR5iwc)`$+ft~cDO}3R8M1PQtH>1UbaY_qNTS*nJY&E|CHyFu zG2MAzwEzm@hD6e<;6_&el_u-k3f*B89B&VO-)GsczanH!Cnb>;euf5Q&Ph-%*WU^Q zOljcf4HIGXM*;^= za+#v=ZS&*kLCftr39V=xT|6f7&Gm~aAC|nvteh@=$#k^lZq5)YUe6M#dmwQePRk;+ z1H0wJZ)&h}X#<>5Yc$)h9^e}w5T8sM#~g@o?e~u7gH)f*vvV1)#%KP;tJKA}y?>vj z60*OEWcoboh+6J2ZiHI?KUn+4t*f_<8s*R*6E!Dpguy?;;2)vmjL>oZrQ;N5s(WYx zKM2Qp5>D!Yyt=2JNWbZKI_FO|Z7tvcr0Wyw7C@`C=F%e(!#v{afOjT7uuFmYLt1HM zMfgS|RQ;>lPtBL@hK8xPl>wt}PI~v)csI-n?{UKQuhOu5rNL|=l%%Ex>=wB8sv&41 zCqi!OuUr5exMSTqJIn8>-c!zG%5#`~IUtU<5fWqb_D)lvr8M^bOqHqv`yNB7@1mBn z|73|_y!{fPA|v6kE0cQ4=Y9OmlaTNmDe*HV`{IPqs6wpHo_>wCTMqEp`7Ml-lvrwo z<|YR?DV$GbTzE8x;VYc)Tf9`gwoT+d?Ea5c6^S0R+J7GFwj)Ia_joCMn%BI7`f}l0 bzP0I>->T#r^_swjQTT}weHo<_rfm5KN?SH( literal 0 HcmV?d00001 From 5a09a7e4e11d533cfeaf21f553d086573e05f597 Mon Sep 17 00:00:00 2001 From: Francisco Arceo Date: Sat, 21 Sep 2024 07:49:37 -0400 Subject: [PATCH 75/96] chore: Adding unit test to test feature view dummy entity serialization after apply() (#4553) * chore: Adding unit test to test feature view dummy entity serialization after apply() Signed-off-by: Francisco Javier Arceo * updated Signed-off-by: Francisco Javier Arceo --------- Signed-off-by: Francisco Javier Arceo --- .../test_local_feature_store.py | 47 ++++++++++++++++++- 1 file changed, 46 insertions(+), 1 deletion(-) diff --git a/sdk/python/tests/unit/local_feast_tests/test_local_feature_store.py b/sdk/python/tests/unit/local_feast_tests/test_local_feature_store.py index c86441d56c..5ed16a8430 100644 --- a/sdk/python/tests/unit/local_feast_tests/test_local_feature_store.py +++ b/sdk/python/tests/unit/local_feast_tests/test_local_feature_store.py @@ -11,7 +11,7 @@ from feast.entity import Entity from feast.feast_object import ALL_RESOURCE_TYPES from feast.feature_store import FeatureStore -from feast.feature_view import FeatureView +from feast.feature_view import DUMMY_ENTITY_ID, FeatureView from feast.field import Field from feast.infra.offline_stores.file_source import FileSource from feast.infra.online_stores.sqlite import SqliteOnlineStoreConfig @@ -342,6 +342,51 @@ def test_apply_entities_and_feature_views(test_feature_store): test_feature_store.teardown() +@pytest.mark.parametrize( + "test_feature_store", + [lazy_fixture("feature_store_with_local_registry")], +) +def test_apply_dummuy_entity_and_feature_view_columns(test_feature_store): + assert isinstance(test_feature_store, FeatureStore) + # Create Feature Views + batch_source = FileSource( + file_format=ParquetFormat(), + path="file://feast/*", + timestamp_field="ts_col", + created_timestamp_column="timestamp", + ) + + e1 = Entity(name="fs1_my_entity_1", description="something") + + fv = FeatureView( + name="my_feature_view_no_entity", + schema=[ + Field(name="fs1_my_feature_1", dtype=Int64), + Field(name="fs1_my_feature_2", dtype=String), + Field(name="fs1_my_feature_3", dtype=Array(String)), + Field(name="fs1_my_feature_4", dtype=Array(Bytes)), + Field(name="fs1_my_entity_2", dtype=Int64), + ], + entities=[], + tags={"team": "matchmaking"}, + source=batch_source, + ttl=timedelta(minutes=5), + ) + + # Check that the entity_columns are empty before applying + assert fv.entity_columns == [] + + # Register Feature View + test_feature_store.apply([fv, e1]) + fv_actual = test_feature_store.get_feature_view("my_feature_view_no_entity") + + # Note that after the apply() the feature_view serializes the Dummy Entity ID + assert fv.entity_columns[0].name == DUMMY_ENTITY_ID + assert fv_actual.entity_columns[0].name == DUMMY_ENTITY_ID + + test_feature_store.teardown() + + @pytest.mark.parametrize( "test_feature_store", [lazy_fixture("feature_store_with_local_registry")], From 334e5d78855709d4ca56619f16eecb414f88ce2d Mon Sep 17 00:00:00 2001 From: Harri Lehtola <1781172+peruukki@users.noreply.github.com> Date: Sat, 21 Sep 2024 16:55:37 +0300 Subject: [PATCH 76/96] feat: Publish TypeScript types in Feast UI package (#4551) --- ui/package.json | 5 +++-- ui/tsconfig.build-lib.json | 11 +++++++++++ 2 files changed, 14 insertions(+), 2 deletions(-) create mode 100644 ui/tsconfig.build-lib.json diff --git a/ui/package.json b/ui/package.json index 3a609f3c83..c104c00ae5 100644 --- a/ui/package.json +++ b/ui/package.json @@ -6,6 +6,7 @@ "dist" ], "main": "./dist/feast-ui.cjs", + "types": "./dist/FeastUI.d.ts", "module": "./dist/feast-ui.module.js", "peerDependencies": { "@elastic/datemath": "^5.0.3", @@ -54,8 +55,8 @@ "scripts": { "start": "npm run generate-protos && react-scripts start", "build": "npm run generate-protos && react-scripts build", - "build:lib": "npm run generate-protos && rimraf ./dist && tsc && rollup -c", - "build:lib-dev": "npm run generate-protos && rimraf ./dist && tsc && rollup -c && yalc publish -f", + "build:lib": "npm run generate-protos && rimraf ./dist && tsc --project ./tsconfig.build-lib.json && rollup -c", + "build:lib-dev": "npm run build:lib && yalc publish -f", "test": "npm run generate-protos && react-scripts test", "eject": "react-scripts eject", "generate-protos": "pbjs --no-encode -o src/protos.js -w commonjs -t static-module `find ../protos/feast/ -iname *.proto` && pbts -n protos -o src/protos.d.ts src/protos.js" diff --git a/ui/tsconfig.build-lib.json b/ui/tsconfig.build-lib.json new file mode 100644 index 0000000000..c29bd063f0 --- /dev/null +++ b/ui/tsconfig.build-lib.json @@ -0,0 +1,11 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "declaration": true, + "declarationMap": true, + "emitDeclarationOnly": true, + "noEmit": false, + "outDir": "./dist", + "rootDir": "./src" + } +} From 351a2d0a7f9808178ab9d201083eb2894ce7384f Mon Sep 17 00:00:00 2001 From: Bhargav Dodla <13788369+EXPEbdodla@users.noreply.github.com> Date: Sat, 21 Sep 2024 15:15:20 -0700 Subject: [PATCH 77/96] fix: Deleting data from feast_metadata when we delete project (#4550) * fix: Deleting data from feast_metadata when we delete project Signed-off-by: Bhargav Dodla * fix: Deleting for snowflake Signed-off-by: Bhargav Dodla --------- Signed-off-by: Bhargav Dodla Co-authored-by: Bhargav Dodla --- sdk/python/feast/infra/registry/snowflake.py | 1 + sdk/python/feast/infra/registry/sql.py | 1 + 2 files changed, 2 insertions(+) diff --git a/sdk/python/feast/infra/registry/snowflake.py b/sdk/python/feast/infra/registry/snowflake.py index f9dd37e516..e68d9d64b5 100644 --- a/sdk/python/feast/infra/registry/snowflake.py +++ b/sdk/python/feast/infra/registry/snowflake.py @@ -1303,6 +1303,7 @@ def delete_project( "DATA_SOURCES", "ENTITIES", "PERMISSIONS", + "FEAST_METADATA", "PROJECTS", }: query = f""" diff --git a/sdk/python/feast/infra/registry/sql.py b/sdk/python/feast/infra/registry/sql.py index a6a2417c6e..6ae27acf4e 100644 --- a/sdk/python/feast/infra/registry/sql.py +++ b/sdk/python/feast/infra/registry/sql.py @@ -1237,6 +1237,7 @@ def delete_project( data_sources, entities, permissions, + feast_metadata, projects, }: stmt = delete(t).where(t.c.project_id == name) From e781e1652cadc6576dbab369248d6e4afdb5f158 Mon Sep 17 00:00:00 2001 From: Harri Lehtola <1781172+peruukki@users.noreply.github.com> Date: Sun, 22 Sep 2024 06:44:45 +0300 Subject: [PATCH 78/96] fix: Remove Feast UI TypeScript dependencies from `peerDependencies` and `dependencies` (#4554) --- ui/package.json | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/ui/package.json b/ui/package.json index c104c00ae5..978be97b88 100644 --- a/ui/package.json +++ b/ui/package.json @@ -12,9 +12,6 @@ "@elastic/datemath": "^5.0.3", "@elastic/eui": "^55.0.1", "@emotion/react": "^11.7.1", - "@types/d3": "^7.1.0", - "@types/react": "^17.0.20", - "@types/react-dom": "^17.0.9", "d3": "^7.3.0", "inter-ui": "^3.19.3", "moment": "^2.29.1", @@ -25,7 +22,6 @@ "react-query": "^3.34.12", "react-router-dom": "6", "react-scripts": "^5.0.0", - "typescript": "^4.4.2", "use-query-params": "^1.2.3", "zod": "^3.11.6" }, @@ -33,11 +29,6 @@ "@elastic/datemath": "^5.0.3", "@elastic/eui": "^55.0.1", "@emotion/react": "^11.7.1", - "@types/d3": "^7.1.0", - "@types/jest": "^27.0.1", - "@types/node": "^16.7.13", - "@types/react": "^17.0.20", - "@types/react-dom": "^17.0.9", "d3": "^7.3.0", "inter-ui": "^3.19.3", "moment": "^2.29.1", @@ -96,6 +87,11 @@ "@testing-library/jest-dom": "^5.14.1", "@testing-library/react": "^12.0.0", "@testing-library/user-event": "^13.2.1", + "@types/d3": "^7.1.0", + "@types/jest": "^27.0.1", + "@types/node": "^16.7.13", + "@types/react": "^17.0.20", + "@types/react-dom": "^17.0.9", "msw": "^0.36.8", "protobufjs-cli": "^1.0.2", "react": "^17.0.2", From fcd1bd4c383ffa2794912fa93ec783baced85dea Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 21 Sep 2024 23:54:36 -0400 Subject: [PATCH 79/96] chore: Bump gopkg.in/yaml.v3 from 3.0.0-20200313102051-9f266ea9e77c to 3.0.0 (#3752) chore: Bump gopkg.in/yaml.v3 Bumps gopkg.in/yaml.v3 from 3.0.0-20200313102051-9f266ea9e77c to 3.0.0. --- updated-dependencies: - dependency-name: gopkg.in/yaml.v3 dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/go.mod b/go.mod index 0f73328c72..61063a0cda 100644 --- a/go.mod +++ b/go.mod @@ -46,5 +46,5 @@ require ( google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 // indirect gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect gopkg.in/yaml.v2 v2.4.0 // indirect - gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c // indirect + gopkg.in/yaml.v3 v3.0.0 // indirect ) diff --git a/go.sum b/go.sum index a793b09aec..83bbc041c5 100644 --- a/go.sum +++ b/go.sum @@ -1854,8 +1854,9 @@ gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0 h1:hjy8E9ON/egN1tAYqKb61G10WtihqetD4sz2H+8nIeA= +gopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= From 00910bcdc8f642cfe83778b5ee22db97470e29ee Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Sun, 22 Sep 2024 20:46:10 +0400 Subject: [PATCH 80/96] chore: Commit generated python proto files (#4546) * chore: commit generated python files to repo Signed-off-by: tokoko * merge from master Signed-off-by: tokoko * chore: remove protos from gitignore Signed-off-by: tokoko --------- Signed-off-by: tokoko --- .gitignore | 1 - Makefile | 13 +- infra/scripts/generate_protos.py | 80 + pyproject.toml | 3 - sdk/python/feast/protos/__init__.py | 0 sdk/python/feast/protos/feast/__init__.py | 0 .../protos/feast/core/Aggregation_pb2.py | 28 + .../protos/feast/core/Aggregation_pb2.pyi | 42 + .../protos/feast/core/Aggregation_pb2_grpc.py | 4 + .../feast/protos/feast/core/DataFormat_pb2.py | 39 + .../protos/feast/core/DataFormat_pb2.pyi | 143 ++ .../protos/feast/core/DataFormat_pb2_grpc.py | 4 + .../feast/protos/feast/core/DataSource_pb2.py | 72 + .../protos/feast/core/DataSource_pb2.pyi | 559 ++++++ .../protos/feast/core/DataSource_pb2_grpc.py | 4 + .../protos/feast/core/DatastoreTable_pb2.py | 28 + .../protos/feast/core/DatastoreTable_pb2.pyi | 67 + .../feast/core/DatastoreTable_pb2_grpc.py | 4 + .../protos/feast/core/DynamoDBTable_pb2.py | 27 + .../protos/feast/core/DynamoDBTable_pb2.pyi | 50 + .../feast/core/DynamoDBTable_pb2_grpc.py | 4 + .../feast/protos/feast/core/Entity_pb2.py | 37 + .../feast/protos/feast/core/Entity_pb2.pyi | 130 ++ .../protos/feast/core/Entity_pb2_grpc.py | 4 + .../protos/feast/core/FeatureService_pb2.py | 55 + .../protos/feast/core/FeatureService_pb2.pyi | 266 +++ .../feast/core/FeatureService_pb2_grpc.py | 4 + .../protos/feast/core/FeatureTable_pb2.py | 39 + .../protos/feast/core/FeatureTable_pb2.pyi | 166 ++ .../feast/core/FeatureTable_pb2_grpc.py | 4 + .../feast/core/FeatureViewProjection_pb2.py | 32 + .../feast/core/FeatureViewProjection_pb2.pyi | 66 + .../core/FeatureViewProjection_pb2_grpc.py | 4 + .../protos/feast/core/FeatureView_pb2.py | 41 + .../protos/feast/core/FeatureView_pb2.pyi | 194 +++ .../protos/feast/core/FeatureView_pb2_grpc.py | 4 + .../feast/protos/feast/core/Feature_pb2.py | 32 + .../feast/protos/feast/core/Feature_pb2.pyi | 75 + .../protos/feast/core/Feature_pb2_grpc.py | 4 + .../protos/feast/core/InfraObject_pb2.py | 34 + .../protos/feast/core/InfraObject_pb2.pyi | 101 ++ .../protos/feast/core/InfraObject_pb2_grpc.py | 4 + .../feast/core/OnDemandFeatureView_pb2.py | 53 + .../feast/core/OnDemandFeatureView_pb2.pyi | 219 +++ .../core/OnDemandFeatureView_pb2_grpc.py | 4 + .../feast/protos/feast/core/Permission_pb2.py | 45 + .../protos/feast/core/Permission_pb2.pyi | 195 +++ .../protos/feast/core/Permission_pb2_grpc.py | 4 + .../feast/protos/feast/core/Policy_pb2.py | 29 + .../feast/protos/feast/core/Policy_pb2.pyi | 58 + .../protos/feast/core/Policy_pb2_grpc.py | 4 + .../feast/protos/feast/core/Project_pb2.py | 36 + .../feast/protos/feast/core/Project_pb2.pyi | 119 ++ .../protos/feast/core/Project_pb2_grpc.py | 4 + .../feast/protos/feast/core/Registry_pb2.py | 44 + .../feast/protos/feast/core/Registry_pb2.pyi | 140 ++ .../protos/feast/core/Registry_pb2_grpc.py | 4 + .../protos/feast/core/SavedDataset_pb2.py | 39 + .../protos/feast/core/SavedDataset_pb2.pyi | 192 ++ .../feast/core/SavedDataset_pb2_grpc.py | 4 + .../protos/feast/core/SqliteTable_pb2.py | 27 + .../protos/feast/core/SqliteTable_pb2.pyi | 50 + .../protos/feast/core/SqliteTable_pb2_grpc.py | 4 + .../feast/protos/feast/core/Store_pb2.py | 37 + .../feast/protos/feast/core/Store_pb2.pyi | 234 +++ .../feast/protos/feast/core/Store_pb2_grpc.py | 4 + .../feast/core/StreamFeatureView_pb2.py | 42 + .../feast/core/StreamFeatureView_pb2.pyi | 170 ++ .../feast/core/StreamFeatureView_pb2_grpc.py | 4 + .../protos/feast/core/Transformation_pb2.py | 31 + .../protos/feast/core/Transformation_pb2.pyi | 80 + .../feast/core/Transformation_pb2_grpc.py | 4 + .../feast/core/ValidationProfile_pb2.py | 37 + .../feast/core/ValidationProfile_pb2.pyi | 136 ++ .../feast/core/ValidationProfile_pb2_grpc.py | 4 + .../feast/protos/feast/core/__init__.py | 0 .../feast/registry/RegistryServer_pb2.py | 198 +++ .../feast/registry/RegistryServer_pb2.pyi | 1318 ++++++++++++++ .../feast/registry/RegistryServer_pb2_grpc.py | 1542 +++++++++++++++++ .../feast/protos/feast/registry/__init__.py | 0 .../protos/feast/serving/Connector_pb2.py | 39 + .../protos/feast/serving/Connector_pb2.pyi | 97 ++ .../feast/serving/Connector_pb2_grpc.py | 66 + .../protos/feast/serving/GrpcServer_pb2.py | 43 + .../protos/feast/serving/GrpcServer_pb2.pyi | 120 ++ .../feast/serving/GrpcServer_pb2_grpc.py | 133 ++ .../feast/serving/ServingService_pb2.py | 63 + .../feast/serving/ServingService_pb2.pyi | 347 ++++ .../feast/serving/ServingService_pb2_grpc.py | 101 ++ .../serving/TransformationService_pb2.py | 39 + .../serving/TransformationService_pb2.pyi | 136 ++ .../serving/TransformationService_pb2_grpc.py | 99 ++ .../feast/protos/feast/serving/__init__.py | 0 .../feast/protos/feast/storage/Redis_pb2.py | 28 + .../feast/protos/feast/storage/Redis_pb2.pyi | 54 + .../protos/feast/storage/Redis_pb2_grpc.py | 4 + .../feast/protos/feast/storage/__init__.py | 0 .../feast/protos/feast/types/EntityKey_pb2.py | 28 + .../protos/feast/types/EntityKey_pb2.pyi | 51 + .../protos/feast/types/EntityKey_pb2_grpc.py | 4 + .../feast/protos/feast/types/Field_pb2.py | 32 + .../feast/protos/feast/types/Field_pb2.pyi | 73 + .../protos/feast/types/Field_pb2_grpc.py | 4 + .../feast/protos/feast/types/Value_pb2.py | 49 + .../feast/protos/feast/types/Value_pb2.pyi | 296 ++++ .../protos/feast/types/Value_pb2_grpc.py | 4 + .../feast/protos/feast/types/__init__.py | 0 .../requirements/py3.10-ci-requirements.txt | 163 +- .../requirements/py3.10-requirements.txt | 39 +- .../requirements/py3.11-ci-requirements.txt | 165 +- .../requirements/py3.11-requirements.txt | 39 +- .../requirements/py3.9-ci-requirements.txt | 167 +- .../requirements/py3.9-requirements.txt | 39 +- setup.py | 128 +- 114 files changed, 9568 insertions(+), 665 deletions(-) create mode 100644 infra/scripts/generate_protos.py create mode 100644 sdk/python/feast/protos/__init__.py create mode 100644 sdk/python/feast/protos/feast/__init__.py create mode 100644 sdk/python/feast/protos/feast/core/Aggregation_pb2.py create mode 100644 sdk/python/feast/protos/feast/core/Aggregation_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/core/Aggregation_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/core/DataFormat_pb2.py create mode 100644 sdk/python/feast/protos/feast/core/DataFormat_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/core/DataFormat_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/core/DataSource_pb2.py create mode 100644 sdk/python/feast/protos/feast/core/DataSource_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/core/DataSource_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/core/DatastoreTable_pb2.py create mode 100644 sdk/python/feast/protos/feast/core/DatastoreTable_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/core/DatastoreTable_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/core/DynamoDBTable_pb2.py create mode 100644 sdk/python/feast/protos/feast/core/DynamoDBTable_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/core/DynamoDBTable_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/core/Entity_pb2.py create mode 100644 sdk/python/feast/protos/feast/core/Entity_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/core/Entity_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/core/FeatureService_pb2.py create mode 100644 sdk/python/feast/protos/feast/core/FeatureService_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/core/FeatureService_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/core/FeatureTable_pb2.py create mode 100644 sdk/python/feast/protos/feast/core/FeatureTable_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/core/FeatureTable_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2.py create mode 100644 sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/core/FeatureView_pb2.py create mode 100644 sdk/python/feast/protos/feast/core/FeatureView_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/core/FeatureView_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/core/Feature_pb2.py create mode 100644 sdk/python/feast/protos/feast/core/Feature_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/core/Feature_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/core/InfraObject_pb2.py create mode 100644 sdk/python/feast/protos/feast/core/InfraObject_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/core/InfraObject_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/core/OnDemandFeatureView_pb2.py create mode 100644 sdk/python/feast/protos/feast/core/OnDemandFeatureView_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/core/OnDemandFeatureView_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/core/Permission_pb2.py create mode 100644 sdk/python/feast/protos/feast/core/Permission_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/core/Permission_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/core/Policy_pb2.py create mode 100644 sdk/python/feast/protos/feast/core/Policy_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/core/Policy_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/core/Project_pb2.py create mode 100644 sdk/python/feast/protos/feast/core/Project_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/core/Project_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/core/Registry_pb2.py create mode 100644 sdk/python/feast/protos/feast/core/Registry_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/core/Registry_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/core/SavedDataset_pb2.py create mode 100644 sdk/python/feast/protos/feast/core/SavedDataset_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/core/SavedDataset_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/core/SqliteTable_pb2.py create mode 100644 sdk/python/feast/protos/feast/core/SqliteTable_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/core/SqliteTable_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/core/Store_pb2.py create mode 100644 sdk/python/feast/protos/feast/core/Store_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/core/Store_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/core/StreamFeatureView_pb2.py create mode 100644 sdk/python/feast/protos/feast/core/StreamFeatureView_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/core/StreamFeatureView_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/core/Transformation_pb2.py create mode 100644 sdk/python/feast/protos/feast/core/Transformation_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/core/Transformation_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/core/ValidationProfile_pb2.py create mode 100644 sdk/python/feast/protos/feast/core/ValidationProfile_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/core/ValidationProfile_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/core/__init__.py create mode 100644 sdk/python/feast/protos/feast/registry/RegistryServer_pb2.py create mode 100644 sdk/python/feast/protos/feast/registry/RegistryServer_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/registry/RegistryServer_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/registry/__init__.py create mode 100644 sdk/python/feast/protos/feast/serving/Connector_pb2.py create mode 100644 sdk/python/feast/protos/feast/serving/Connector_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/serving/Connector_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/serving/GrpcServer_pb2.py create mode 100644 sdk/python/feast/protos/feast/serving/GrpcServer_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/serving/GrpcServer_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/serving/ServingService_pb2.py create mode 100644 sdk/python/feast/protos/feast/serving/ServingService_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/serving/ServingService_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/serving/TransformationService_pb2.py create mode 100644 sdk/python/feast/protos/feast/serving/TransformationService_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/serving/TransformationService_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/serving/__init__.py create mode 100644 sdk/python/feast/protos/feast/storage/Redis_pb2.py create mode 100644 sdk/python/feast/protos/feast/storage/Redis_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/storage/Redis_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/storage/__init__.py create mode 100644 sdk/python/feast/protos/feast/types/EntityKey_pb2.py create mode 100644 sdk/python/feast/protos/feast/types/EntityKey_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/types/EntityKey_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/types/Field_pb2.py create mode 100644 sdk/python/feast/protos/feast/types/Field_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/types/Field_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/types/Value_pb2.py create mode 100644 sdk/python/feast/protos/feast/types/Value_pb2.pyi create mode 100644 sdk/python/feast/protos/feast/types/Value_pb2_grpc.py create mode 100644 sdk/python/feast/protos/feast/types/__init__.py diff --git a/.gitignore b/.gitignore index e4e82bfce4..d558463c65 100644 --- a/.gitignore +++ b/.gitignore @@ -185,7 +185,6 @@ dmypy.json # Protos sdk/python/docs/html -sdk/python/feast/protos/ sdk/go/protos/ go/protos/ diff --git a/Makefile b/Makefile index 8a9f643967..f4b34124f7 100644 --- a/Makefile +++ b/Makefile @@ -40,29 +40,20 @@ build: protos build-java build-docker install-python-ci-dependencies: python -m piptools sync sdk/python/requirements/py$(PYTHON_VERSION)-ci-requirements.txt pip install --no-deps -e . - python setup.py build_python_protos --inplace install-python-ci-dependencies-uv: uv pip sync --system sdk/python/requirements/py$(PYTHON_VERSION)-ci-requirements.txt uv pip install --system --no-deps -e . - python setup.py build_python_protos --inplace install-python-ci-dependencies-uv-venv: uv pip sync sdk/python/requirements/py$(PYTHON_VERSION)-ci-requirements.txt uv pip install --no-deps -e . - python setup.py build_python_protos --inplace - -install-protoc-dependencies: - pip install "protobuf<5" "grpcio-tools>=1.56.2,<2" "mypy-protobuf>=3.1" lock-python-ci-dependencies: uv pip compile --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py$(PYTHON_VERSION)-ci-requirements.txt -package-protos: - cp -r ${ROOT_DIR}/protos ${ROOT_DIR}/sdk/python/feast/protos - -compile-protos-python: install-protoc-dependencies - python setup.py build_python_protos --inplace +compile-protos-python: + python infra/scripts/generate_protos.py install-python: python -m piptools sync sdk/python/requirements/py$(PYTHON_VERSION)-requirements.txt diff --git a/infra/scripts/generate_protos.py b/infra/scripts/generate_protos.py new file mode 100644 index 0000000000..2ce7e29e12 --- /dev/null +++ b/infra/scripts/generate_protos.py @@ -0,0 +1,80 @@ +import os +import sys +import glob +import subprocess +from pathlib import Path + +repo_root = str(Path(__file__).resolve().parent) + +PROTO_SUBDIRS = ["core", "registry", "serving", "types", "storage"] +PYTHON_CODE_PREFIX = "sdk/python" + +class BuildPythonProtosCommand: + description = "Builds the proto files into Python files." + user_options = [ + ("inplace", "i", "Write generated proto files to source directory."), + ] + + def __init__(self): + self.python_protoc = [ + sys.executable, + "-m", + "grpc_tools.protoc", + ] + self.proto_folder = "protos" + self.sub_folders = PROTO_SUBDIRS + self.inplace = 0 + + @property + def python_folder(self): + return "sdk/python/feast/protos" + + def _generate_python_protos(self, path: str): + proto_files = glob.glob(os.path.join(self.proto_folder, path)) + Path(self.python_folder).mkdir(parents=True, exist_ok=True) + subprocess.check_call( + self.python_protoc + + [ + "-I", + self.proto_folder, + "--python_out", + self.python_folder, + "--grpc_python_out", + self.python_folder, + "--mypy_out", + self.python_folder, + ] + + proto_files + ) + + def run(self): + for sub_folder in self.sub_folders: + self._generate_python_protos(f"feast/{sub_folder}/*.proto") + # We need the __init__ files for each of the generated subdirs + # so that they are regular packages, and don't need the `--namespace-packages` flags + # when being typechecked using mypy. + with open(f"{self.python_folder}/feast/{sub_folder}/__init__.py", "w"): + pass + + with open(f"{self.python_folder}/__init__.py", "w"): + pass + with open(f"{self.python_folder}/feast/__init__.py", "w"): + pass + + for path in Path(self.python_folder).rglob("*.py"): + for folder in self.sub_folders: + # Read in the file + with open(path, "r") as file: + filedata = file.read() + + # Replace the target string + filedata = filedata.replace( + f"from feast.{folder}", f"from feast.protos.feast.{folder}" + ) + + # Write the file out again + with open(path, "w") as file: + file.write(filedata) + +if __name__ == "__main__": + BuildPythonProtosCommand().run() \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index c91608b6ce..d772bab9ea 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,8 +1,5 @@ [build-system] requires = [ - "protobuf<5", - "grpcio-tools>=1.56.2,<2", - "mypy-protobuf>=3.1", "pybindgen==0.22.0", "setuptools>=60", "setuptools_scm>=6.2", diff --git a/sdk/python/feast/protos/__init__.py b/sdk/python/feast/protos/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/protos/feast/__init__.py b/sdk/python/feast/protos/feast/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/protos/feast/core/Aggregation_pb2.py b/sdk/python/feast/protos/feast/core/Aggregation_pb2.py new file mode 100644 index 0000000000..922f8f40aa --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Aggregation_pb2.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/Aggregation.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1c\x66\x65\x61st/core/Aggregation.proto\x12\nfeast.core\x1a\x1egoogle/protobuf/duration.proto\"\x92\x01\n\x0b\x41ggregation\x12\x0e\n\x06\x63olumn\x18\x01 \x01(\t\x12\x10\n\x08\x66unction\x18\x02 \x01(\t\x12.\n\x0btime_window\x18\x03 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x31\n\x0eslide_interval\x18\x04 \x01(\x0b\x32\x19.google.protobuf.DurationBU\n\x10\x66\x65\x61st.proto.coreB\x10\x41ggregationProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.Aggregation_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\020AggregationProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_AGGREGATION']._serialized_start=77 + _globals['_AGGREGATION']._serialized_end=223 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/Aggregation_pb2.pyi b/sdk/python/feast/protos/feast/core/Aggregation_pb2.pyi new file mode 100644 index 0000000000..ceb8b1f813 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Aggregation_pb2.pyi @@ -0,0 +1,42 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" +import builtins +import google.protobuf.descriptor +import google.protobuf.duration_pb2 +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class Aggregation(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + COLUMN_FIELD_NUMBER: builtins.int + FUNCTION_FIELD_NUMBER: builtins.int + TIME_WINDOW_FIELD_NUMBER: builtins.int + SLIDE_INTERVAL_FIELD_NUMBER: builtins.int + column: builtins.str + function: builtins.str + @property + def time_window(self) -> google.protobuf.duration_pb2.Duration: ... + @property + def slide_interval(self) -> google.protobuf.duration_pb2.Duration: ... + def __init__( + self, + *, + column: builtins.str = ..., + function: builtins.str = ..., + time_window: google.protobuf.duration_pb2.Duration | None = ..., + slide_interval: google.protobuf.duration_pb2.Duration | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["slide_interval", b"slide_interval", "time_window", b"time_window"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["column", b"column", "function", b"function", "slide_interval", b"slide_interval", "time_window", b"time_window"]) -> None: ... + +global___Aggregation = Aggregation diff --git a/sdk/python/feast/protos/feast/core/Aggregation_pb2_grpc.py b/sdk/python/feast/protos/feast/core/Aggregation_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Aggregation_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/DataFormat_pb2.py b/sdk/python/feast/protos/feast/core/DataFormat_pb2.py new file mode 100644 index 0000000000..a3883dcec3 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/DataFormat_pb2.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/DataFormat.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1b\x66\x65\x61st/core/DataFormat.proto\x12\nfeast.core\"\xb2\x01\n\nFileFormat\x12>\n\x0eparquet_format\x18\x01 \x01(\x0b\x32$.feast.core.FileFormat.ParquetFormatH\x00\x12:\n\x0c\x64\x65lta_format\x18\x02 \x01(\x0b\x32\".feast.core.FileFormat.DeltaFormatH\x00\x1a\x0f\n\rParquetFormat\x1a\r\n\x0b\x44\x65ltaFormatB\x08\n\x06\x66ormat\"\xb7\x02\n\x0cStreamFormat\x12:\n\x0b\x61vro_format\x18\x01 \x01(\x0b\x32#.feast.core.StreamFormat.AvroFormatH\x00\x12<\n\x0cproto_format\x18\x02 \x01(\x0b\x32$.feast.core.StreamFormat.ProtoFormatH\x00\x12:\n\x0bjson_format\x18\x03 \x01(\x0b\x32#.feast.core.StreamFormat.JsonFormatH\x00\x1a!\n\x0bProtoFormat\x12\x12\n\nclass_path\x18\x01 \x01(\t\x1a!\n\nAvroFormat\x12\x13\n\x0bschema_json\x18\x01 \x01(\t\x1a!\n\nJsonFormat\x12\x13\n\x0bschema_json\x18\x01 \x01(\tB\x08\n\x06\x66ormatBT\n\x10\x66\x65\x61st.proto.coreB\x0f\x44\x61taFormatProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.DataFormat_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\017DataFormatProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_FILEFORMAT']._serialized_start=44 + _globals['_FILEFORMAT']._serialized_end=222 + _globals['_FILEFORMAT_PARQUETFORMAT']._serialized_start=182 + _globals['_FILEFORMAT_PARQUETFORMAT']._serialized_end=197 + _globals['_FILEFORMAT_DELTAFORMAT']._serialized_start=199 + _globals['_FILEFORMAT_DELTAFORMAT']._serialized_end=212 + _globals['_STREAMFORMAT']._serialized_start=225 + _globals['_STREAMFORMAT']._serialized_end=536 + _globals['_STREAMFORMAT_PROTOFORMAT']._serialized_start=423 + _globals['_STREAMFORMAT_PROTOFORMAT']._serialized_end=456 + _globals['_STREAMFORMAT_AVROFORMAT']._serialized_start=458 + _globals['_STREAMFORMAT_AVROFORMAT']._serialized_end=491 + _globals['_STREAMFORMAT_JSONFORMAT']._serialized_start=493 + _globals['_STREAMFORMAT_JSONFORMAT']._serialized_end=526 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/DataFormat_pb2.pyi b/sdk/python/feast/protos/feast/core/DataFormat_pb2.pyi new file mode 100644 index 0000000000..1f904e9886 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/DataFormat_pb2.pyi @@ -0,0 +1,143 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2020 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import google.protobuf.descriptor +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class FileFormat(google.protobuf.message.Message): + """Defines the file format encoding the features/entity data in files""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class ParquetFormat(google.protobuf.message.Message): + """Defines options for the Parquet data format""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + + class DeltaFormat(google.protobuf.message.Message): + """Defines options for delta data format""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + + PARQUET_FORMAT_FIELD_NUMBER: builtins.int + DELTA_FORMAT_FIELD_NUMBER: builtins.int + @property + def parquet_format(self) -> global___FileFormat.ParquetFormat: ... + @property + def delta_format(self) -> global___FileFormat.DeltaFormat: ... + def __init__( + self, + *, + parquet_format: global___FileFormat.ParquetFormat | None = ..., + delta_format: global___FileFormat.DeltaFormat | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["delta_format", b"delta_format", "format", b"format", "parquet_format", b"parquet_format"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["delta_format", b"delta_format", "format", b"format", "parquet_format", b"parquet_format"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["format", b"format"]) -> typing_extensions.Literal["parquet_format", "delta_format"] | None: ... + +global___FileFormat = FileFormat + +class StreamFormat(google.protobuf.message.Message): + """Defines the data format encoding features/entity data in data streams""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class ProtoFormat(google.protobuf.message.Message): + """Defines options for the protobuf data format""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CLASS_PATH_FIELD_NUMBER: builtins.int + class_path: builtins.str + """Classpath to the generated Java Protobuf class that can be used to decode + Feature data from the obtained stream message + """ + def __init__( + self, + *, + class_path: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["class_path", b"class_path"]) -> None: ... + + class AvroFormat(google.protobuf.message.Message): + """Defines options for the avro data format""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SCHEMA_JSON_FIELD_NUMBER: builtins.int + schema_json: builtins.str + """Optional if used in a File DataSource as schema is embedded in avro file. + Specifies the schema of the Avro message as JSON string. + """ + def __init__( + self, + *, + schema_json: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["schema_json", b"schema_json"]) -> None: ... + + class JsonFormat(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SCHEMA_JSON_FIELD_NUMBER: builtins.int + schema_json: builtins.str + def __init__( + self, + *, + schema_json: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["schema_json", b"schema_json"]) -> None: ... + + AVRO_FORMAT_FIELD_NUMBER: builtins.int + PROTO_FORMAT_FIELD_NUMBER: builtins.int + JSON_FORMAT_FIELD_NUMBER: builtins.int + @property + def avro_format(self) -> global___StreamFormat.AvroFormat: ... + @property + def proto_format(self) -> global___StreamFormat.ProtoFormat: ... + @property + def json_format(self) -> global___StreamFormat.JsonFormat: ... + def __init__( + self, + *, + avro_format: global___StreamFormat.AvroFormat | None = ..., + proto_format: global___StreamFormat.ProtoFormat | None = ..., + json_format: global___StreamFormat.JsonFormat | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["avro_format", b"avro_format", "format", b"format", "json_format", b"json_format", "proto_format", b"proto_format"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["avro_format", b"avro_format", "format", b"format", "json_format", b"json_format", "proto_format", b"proto_format"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["format", b"format"]) -> typing_extensions.Literal["avro_format", "proto_format", "json_format"] | None: ... + +global___StreamFormat = StreamFormat diff --git a/sdk/python/feast/protos/feast/core/DataFormat_pb2_grpc.py b/sdk/python/feast/protos/feast/core/DataFormat_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/DataFormat_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/DataSource_pb2.py b/sdk/python/feast/protos/feast/core/DataSource_pb2.py new file mode 100644 index 0000000000..b58c33a383 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/DataSource_pb2.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/DataSource.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from feast.protos.feast.core import DataFormat_pb2 as feast_dot_core_dot_DataFormat__pb2 +from feast.protos.feast.types import Value_pb2 as feast_dot_types_dot_Value__pb2 +from feast.protos.feast.core import Feature_pb2 as feast_dot_core_dot_Feature__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1b\x66\x65\x61st/core/DataSource.proto\x12\nfeast.core\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1b\x66\x65\x61st/core/DataFormat.proto\x1a\x17\x66\x65\x61st/types/Value.proto\x1a\x18\x66\x65\x61st/core/Feature.proto\"\xc0\x16\n\nDataSource\x12\x0c\n\x04name\x18\x14 \x01(\t\x12\x0f\n\x07project\x18\x15 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x17 \x01(\t\x12.\n\x04tags\x18\x18 \x03(\x0b\x32 .feast.core.DataSource.TagsEntry\x12\r\n\x05owner\x18\x19 \x01(\t\x12/\n\x04type\x18\x01 \x01(\x0e\x32!.feast.core.DataSource.SourceType\x12?\n\rfield_mapping\x18\x02 \x03(\x0b\x32(.feast.core.DataSource.FieldMappingEntry\x12\x17\n\x0ftimestamp_field\x18\x03 \x01(\t\x12\x1d\n\x15\x64\x61te_partition_column\x18\x04 \x01(\t\x12 \n\x18\x63reated_timestamp_column\x18\x05 \x01(\t\x12\x1e\n\x16\x64\x61ta_source_class_type\x18\x11 \x01(\t\x12,\n\x0c\x62\x61tch_source\x18\x1a \x01(\x0b\x32\x16.feast.core.DataSource\x12/\n\x04meta\x18\x32 \x01(\x0b\x32!.feast.core.DataSource.SourceMeta\x12:\n\x0c\x66ile_options\x18\x0b \x01(\x0b\x32\".feast.core.DataSource.FileOptionsH\x00\x12\x42\n\x10\x62igquery_options\x18\x0c \x01(\x0b\x32&.feast.core.DataSource.BigQueryOptionsH\x00\x12<\n\rkafka_options\x18\r \x01(\x0b\x32#.feast.core.DataSource.KafkaOptionsH\x00\x12@\n\x0fkinesis_options\x18\x0e \x01(\x0b\x32%.feast.core.DataSource.KinesisOptionsH\x00\x12\x42\n\x10redshift_options\x18\x0f \x01(\x0b\x32&.feast.core.DataSource.RedshiftOptionsH\x00\x12I\n\x14request_data_options\x18\x12 \x01(\x0b\x32).feast.core.DataSource.RequestDataOptionsH\x00\x12\x44\n\x0e\x63ustom_options\x18\x10 \x01(\x0b\x32*.feast.core.DataSource.CustomSourceOptionsH\x00\x12\x44\n\x11snowflake_options\x18\x13 \x01(\x0b\x32\'.feast.core.DataSource.SnowflakeOptionsH\x00\x12:\n\x0cpush_options\x18\x16 \x01(\x0b\x32\".feast.core.DataSource.PushOptionsH\x00\x12<\n\rspark_options\x18\x1b \x01(\x0b\x32#.feast.core.DataSource.SparkOptionsH\x00\x12<\n\rtrino_options\x18\x1e \x01(\x0b\x32#.feast.core.DataSource.TrinoOptionsH\x00\x12>\n\x0e\x61thena_options\x18# \x01(\x0b\x32$.feast.core.DataSource.AthenaOptionsH\x00\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x33\n\x11\x46ieldMappingEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x82\x01\n\nSourceMeta\x12:\n\x16\x65\x61rliestEventTimestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x38\n\x14latestEventTimestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x65\n\x0b\x46ileOptions\x12+\n\x0b\x66ile_format\x18\x01 \x01(\x0b\x32\x16.feast.core.FileFormat\x12\x0b\n\x03uri\x18\x02 \x01(\t\x12\x1c\n\x14s3_endpoint_override\x18\x03 \x01(\t\x1a/\n\x0f\x42igQueryOptions\x12\r\n\x05table\x18\x01 \x01(\t\x12\r\n\x05query\x18\x02 \x01(\t\x1a,\n\x0cTrinoOptions\x12\r\n\x05table\x18\x01 \x01(\t\x12\r\n\x05query\x18\x02 \x01(\t\x1a\xae\x01\n\x0cKafkaOptions\x12\x1f\n\x17kafka_bootstrap_servers\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x30\n\x0emessage_format\x18\x03 \x01(\x0b\x32\x18.feast.core.StreamFormat\x12<\n\x19watermark_delay_threshold\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\x1a\x66\n\x0eKinesisOptions\x12\x0e\n\x06region\x18\x01 \x01(\t\x12\x13\n\x0bstream_name\x18\x02 \x01(\t\x12/\n\rrecord_format\x18\x03 \x01(\x0b\x32\x18.feast.core.StreamFormat\x1aQ\n\x0fRedshiftOptions\x12\r\n\x05table\x18\x01 \x01(\t\x12\r\n\x05query\x18\x02 \x01(\t\x12\x0e\n\x06schema\x18\x03 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x04 \x01(\t\x1aT\n\rAthenaOptions\x12\r\n\x05table\x18\x01 \x01(\t\x12\r\n\x05query\x18\x02 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x03 \x01(\t\x12\x13\n\x0b\x64\x61ta_source\x18\x04 \x01(\t\x1aX\n\x10SnowflakeOptions\x12\r\n\x05table\x18\x01 \x01(\t\x12\r\n\x05query\x18\x02 \x01(\t\x12\x0e\n\x06schema\x18\x03 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x04 \x01(\tJ\x04\x08\x05\x10\x06\x1aO\n\x0cSparkOptions\x12\r\n\x05table\x18\x01 \x01(\t\x12\r\n\x05query\x18\x02 \x01(\t\x12\x0c\n\x04path\x18\x03 \x01(\t\x12\x13\n\x0b\x66ile_format\x18\x04 \x01(\t\x1a,\n\x13\x43ustomSourceOptions\x12\x15\n\rconfiguration\x18\x01 \x01(\x0c\x1a\xf7\x01\n\x12RequestDataOptions\x12Z\n\x11\x64\x65precated_schema\x18\x02 \x03(\x0b\x32?.feast.core.DataSource.RequestDataOptions.DeprecatedSchemaEntry\x12)\n\x06schema\x18\x03 \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x1aT\n\x15\x44\x65precatedSchemaEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12*\n\x05value\x18\x02 \x01(\x0e\x32\x1b.feast.types.ValueType.Enum:\x02\x38\x01J\x04\x08\x01\x10\x02\x1a\x13\n\x0bPushOptionsJ\x04\x08\x01\x10\x02\"\xf8\x01\n\nSourceType\x12\x0b\n\x07INVALID\x10\x00\x12\x0e\n\nBATCH_FILE\x10\x01\x12\x13\n\x0f\x42\x41TCH_SNOWFLAKE\x10\x08\x12\x12\n\x0e\x42\x41TCH_BIGQUERY\x10\x02\x12\x12\n\x0e\x42\x41TCH_REDSHIFT\x10\x05\x12\x10\n\x0cSTREAM_KAFKA\x10\x03\x12\x12\n\x0eSTREAM_KINESIS\x10\x04\x12\x11\n\rCUSTOM_SOURCE\x10\x06\x12\x12\n\x0eREQUEST_SOURCE\x10\x07\x12\x0f\n\x0bPUSH_SOURCE\x10\t\x12\x0f\n\x0b\x42\x41TCH_TRINO\x10\n\x12\x0f\n\x0b\x42\x41TCH_SPARK\x10\x0b\x12\x10\n\x0c\x42\x41TCH_ATHENA\x10\x0c\x42\t\n\x07optionsJ\x04\x08\x06\x10\x0b\x42T\n\x10\x66\x65\x61st.proto.coreB\x0f\x44\x61taSourceProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.DataSource_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\017DataSourceProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_DATASOURCE_TAGSENTRY']._options = None + _globals['_DATASOURCE_TAGSENTRY']._serialized_options = b'8\001' + _globals['_DATASOURCE_FIELDMAPPINGENTRY']._options = None + _globals['_DATASOURCE_FIELDMAPPINGENTRY']._serialized_options = b'8\001' + _globals['_DATASOURCE_REQUESTDATAOPTIONS_DEPRECATEDSCHEMAENTRY']._options = None + _globals['_DATASOURCE_REQUESTDATAOPTIONS_DEPRECATEDSCHEMAENTRY']._serialized_options = b'8\001' + _globals['_DATASOURCE']._serialized_start=189 + _globals['_DATASOURCE']._serialized_end=3069 + _globals['_DATASOURCE_TAGSENTRY']._serialized_start=1436 + _globals['_DATASOURCE_TAGSENTRY']._serialized_end=1479 + _globals['_DATASOURCE_FIELDMAPPINGENTRY']._serialized_start=1481 + _globals['_DATASOURCE_FIELDMAPPINGENTRY']._serialized_end=1532 + _globals['_DATASOURCE_SOURCEMETA']._serialized_start=1535 + _globals['_DATASOURCE_SOURCEMETA']._serialized_end=1665 + _globals['_DATASOURCE_FILEOPTIONS']._serialized_start=1667 + _globals['_DATASOURCE_FILEOPTIONS']._serialized_end=1768 + _globals['_DATASOURCE_BIGQUERYOPTIONS']._serialized_start=1770 + _globals['_DATASOURCE_BIGQUERYOPTIONS']._serialized_end=1817 + _globals['_DATASOURCE_TRINOOPTIONS']._serialized_start=1819 + _globals['_DATASOURCE_TRINOOPTIONS']._serialized_end=1863 + _globals['_DATASOURCE_KAFKAOPTIONS']._serialized_start=1866 + _globals['_DATASOURCE_KAFKAOPTIONS']._serialized_end=2040 + _globals['_DATASOURCE_KINESISOPTIONS']._serialized_start=2042 + _globals['_DATASOURCE_KINESISOPTIONS']._serialized_end=2144 + _globals['_DATASOURCE_REDSHIFTOPTIONS']._serialized_start=2146 + _globals['_DATASOURCE_REDSHIFTOPTIONS']._serialized_end=2227 + _globals['_DATASOURCE_ATHENAOPTIONS']._serialized_start=2229 + _globals['_DATASOURCE_ATHENAOPTIONS']._serialized_end=2313 + _globals['_DATASOURCE_SNOWFLAKEOPTIONS']._serialized_start=2315 + _globals['_DATASOURCE_SNOWFLAKEOPTIONS']._serialized_end=2403 + _globals['_DATASOURCE_SPARKOPTIONS']._serialized_start=2405 + _globals['_DATASOURCE_SPARKOPTIONS']._serialized_end=2484 + _globals['_DATASOURCE_CUSTOMSOURCEOPTIONS']._serialized_start=2486 + _globals['_DATASOURCE_CUSTOMSOURCEOPTIONS']._serialized_end=2530 + _globals['_DATASOURCE_REQUESTDATAOPTIONS']._serialized_start=2533 + _globals['_DATASOURCE_REQUESTDATAOPTIONS']._serialized_end=2780 + _globals['_DATASOURCE_REQUESTDATAOPTIONS_DEPRECATEDSCHEMAENTRY']._serialized_start=2690 + _globals['_DATASOURCE_REQUESTDATAOPTIONS_DEPRECATEDSCHEMAENTRY']._serialized_end=2774 + _globals['_DATASOURCE_PUSHOPTIONS']._serialized_start=2782 + _globals['_DATASOURCE_PUSHOPTIONS']._serialized_end=2801 + _globals['_DATASOURCE_SOURCETYPE']._serialized_start=2804 + _globals['_DATASOURCE_SOURCETYPE']._serialized_end=3052 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/DataSource_pb2.pyi b/sdk/python/feast/protos/feast/core/DataSource_pb2.pyi new file mode 100644 index 0000000000..94336638e1 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/DataSource_pb2.pyi @@ -0,0 +1,559 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2020 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import collections.abc +import feast.core.DataFormat_pb2 +import feast.core.Feature_pb2 +import feast.types.Value_pb2 +import google.protobuf.descriptor +import google.protobuf.duration_pb2 +import google.protobuf.internal.containers +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class DataSource(google.protobuf.message.Message): + """Defines a Data Source that can be used source Feature data + Next available id: 28 + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _SourceType: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _SourceTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DataSource._SourceType.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + INVALID: DataSource._SourceType.ValueType # 0 + BATCH_FILE: DataSource._SourceType.ValueType # 1 + BATCH_SNOWFLAKE: DataSource._SourceType.ValueType # 8 + BATCH_BIGQUERY: DataSource._SourceType.ValueType # 2 + BATCH_REDSHIFT: DataSource._SourceType.ValueType # 5 + STREAM_KAFKA: DataSource._SourceType.ValueType # 3 + STREAM_KINESIS: DataSource._SourceType.ValueType # 4 + CUSTOM_SOURCE: DataSource._SourceType.ValueType # 6 + REQUEST_SOURCE: DataSource._SourceType.ValueType # 7 + PUSH_SOURCE: DataSource._SourceType.ValueType # 9 + BATCH_TRINO: DataSource._SourceType.ValueType # 10 + BATCH_SPARK: DataSource._SourceType.ValueType # 11 + BATCH_ATHENA: DataSource._SourceType.ValueType # 12 + + class SourceType(_SourceType, metaclass=_SourceTypeEnumTypeWrapper): + """Type of Data Source. + Next available id: 12 + """ + + INVALID: DataSource.SourceType.ValueType # 0 + BATCH_FILE: DataSource.SourceType.ValueType # 1 + BATCH_SNOWFLAKE: DataSource.SourceType.ValueType # 8 + BATCH_BIGQUERY: DataSource.SourceType.ValueType # 2 + BATCH_REDSHIFT: DataSource.SourceType.ValueType # 5 + STREAM_KAFKA: DataSource.SourceType.ValueType # 3 + STREAM_KINESIS: DataSource.SourceType.ValueType # 4 + CUSTOM_SOURCE: DataSource.SourceType.ValueType # 6 + REQUEST_SOURCE: DataSource.SourceType.ValueType # 7 + PUSH_SOURCE: DataSource.SourceType.ValueType # 9 + BATCH_TRINO: DataSource.SourceType.ValueType # 10 + BATCH_SPARK: DataSource.SourceType.ValueType # 11 + BATCH_ATHENA: DataSource.SourceType.ValueType # 12 + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + class FieldMappingEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + class SourceMeta(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + EARLIESTEVENTTIMESTAMP_FIELD_NUMBER: builtins.int + LATESTEVENTTIMESTAMP_FIELD_NUMBER: builtins.int + @property + def earliestEventTimestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def latestEventTimestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + def __init__( + self, + *, + earliestEventTimestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + latestEventTimestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["earliestEventTimestamp", b"earliestEventTimestamp", "latestEventTimestamp", b"latestEventTimestamp"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["earliestEventTimestamp", b"earliestEventTimestamp", "latestEventTimestamp", b"latestEventTimestamp"]) -> None: ... + + class FileOptions(google.protobuf.message.Message): + """Defines options for DataSource that sources features from a file""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FILE_FORMAT_FIELD_NUMBER: builtins.int + URI_FIELD_NUMBER: builtins.int + S3_ENDPOINT_OVERRIDE_FIELD_NUMBER: builtins.int + @property + def file_format(self) -> feast.core.DataFormat_pb2.FileFormat: ... + uri: builtins.str + """Target URL of file to retrieve and source features from. + s3://path/to/file for AWS S3 storage + gs://path/to/file for GCP GCS storage + file:///path/to/file for local storage + """ + s3_endpoint_override: builtins.str + """override AWS S3 storage endpoint with custom S3 endpoint""" + def __init__( + self, + *, + file_format: feast.core.DataFormat_pb2.FileFormat | None = ..., + uri: builtins.str = ..., + s3_endpoint_override: builtins.str = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["file_format", b"file_format"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["file_format", b"file_format", "s3_endpoint_override", b"s3_endpoint_override", "uri", b"uri"]) -> None: ... + + class BigQueryOptions(google.protobuf.message.Message): + """Defines options for DataSource that sources features from a BigQuery Query""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TABLE_FIELD_NUMBER: builtins.int + QUERY_FIELD_NUMBER: builtins.int + table: builtins.str + """Full table reference in the form of [project:dataset.table]""" + query: builtins.str + """SQL query that returns a table containing feature data. Must contain an event_timestamp column, and respective + entity columns + """ + def __init__( + self, + *, + table: builtins.str = ..., + query: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["query", b"query", "table", b"table"]) -> None: ... + + class TrinoOptions(google.protobuf.message.Message): + """Defines options for DataSource that sources features from a Trino Query""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TABLE_FIELD_NUMBER: builtins.int + QUERY_FIELD_NUMBER: builtins.int + table: builtins.str + """Full table reference in the form of [project:dataset.table]""" + query: builtins.str + """SQL query that returns a table containing feature data. Must contain an event_timestamp column, and respective + entity columns + """ + def __init__( + self, + *, + table: builtins.str = ..., + query: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["query", b"query", "table", b"table"]) -> None: ... + + class KafkaOptions(google.protobuf.message.Message): + """Defines options for DataSource that sources features from Kafka messages. + Each message should be a Protobuf that can be decoded with the generated + Java Protobuf class at the given class path + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KAFKA_BOOTSTRAP_SERVERS_FIELD_NUMBER: builtins.int + TOPIC_FIELD_NUMBER: builtins.int + MESSAGE_FORMAT_FIELD_NUMBER: builtins.int + WATERMARK_DELAY_THRESHOLD_FIELD_NUMBER: builtins.int + kafka_bootstrap_servers: builtins.str + """Comma separated list of Kafka bootstrap servers. Used for feature tables without a defined source host[:port]]""" + topic: builtins.str + """Kafka topic to collect feature data from.""" + @property + def message_format(self) -> feast.core.DataFormat_pb2.StreamFormat: + """Defines the stream data format encoding feature/entity data in Kafka messages.""" + @property + def watermark_delay_threshold(self) -> google.protobuf.duration_pb2.Duration: + """Watermark delay threshold for stream data""" + def __init__( + self, + *, + kafka_bootstrap_servers: builtins.str = ..., + topic: builtins.str = ..., + message_format: feast.core.DataFormat_pb2.StreamFormat | None = ..., + watermark_delay_threshold: google.protobuf.duration_pb2.Duration | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["message_format", b"message_format", "watermark_delay_threshold", b"watermark_delay_threshold"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["kafka_bootstrap_servers", b"kafka_bootstrap_servers", "message_format", b"message_format", "topic", b"topic", "watermark_delay_threshold", b"watermark_delay_threshold"]) -> None: ... + + class KinesisOptions(google.protobuf.message.Message): + """Defines options for DataSource that sources features from Kinesis records. + Each record should be a Protobuf that can be decoded with the generated + Java Protobuf class at the given class path + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + REGION_FIELD_NUMBER: builtins.int + STREAM_NAME_FIELD_NUMBER: builtins.int + RECORD_FORMAT_FIELD_NUMBER: builtins.int + region: builtins.str + """AWS region of the Kinesis stream""" + stream_name: builtins.str + """Name of the Kinesis stream to obtain feature data from.""" + @property + def record_format(self) -> feast.core.DataFormat_pb2.StreamFormat: + """Defines the data format encoding the feature/entity data in Kinesis records. + Kinesis Data Sources support Avro and Proto as data formats. + """ + def __init__( + self, + *, + region: builtins.str = ..., + stream_name: builtins.str = ..., + record_format: feast.core.DataFormat_pb2.StreamFormat | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["record_format", b"record_format"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["record_format", b"record_format", "region", b"region", "stream_name", b"stream_name"]) -> None: ... + + class RedshiftOptions(google.protobuf.message.Message): + """Defines options for DataSource that sources features from a Redshift Query""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TABLE_FIELD_NUMBER: builtins.int + QUERY_FIELD_NUMBER: builtins.int + SCHEMA_FIELD_NUMBER: builtins.int + DATABASE_FIELD_NUMBER: builtins.int + table: builtins.str + """Redshift table name""" + query: builtins.str + """SQL query that returns a table containing feature data. Must contain an event_timestamp column, and respective + entity columns + """ + schema: builtins.str + """Redshift schema name""" + database: builtins.str + """Redshift database name""" + def __init__( + self, + *, + table: builtins.str = ..., + query: builtins.str = ..., + schema: builtins.str = ..., + database: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["database", b"database", "query", b"query", "schema", b"schema", "table", b"table"]) -> None: ... + + class AthenaOptions(google.protobuf.message.Message): + """Defines options for DataSource that sources features from a Athena Query""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TABLE_FIELD_NUMBER: builtins.int + QUERY_FIELD_NUMBER: builtins.int + DATABASE_FIELD_NUMBER: builtins.int + DATA_SOURCE_FIELD_NUMBER: builtins.int + table: builtins.str + """Athena table name""" + query: builtins.str + """SQL query that returns a table containing feature data. Must contain an event_timestamp column, and respective + entity columns + """ + database: builtins.str + """Athena database name""" + data_source: builtins.str + """Athena schema name""" + def __init__( + self, + *, + table: builtins.str = ..., + query: builtins.str = ..., + database: builtins.str = ..., + data_source: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["data_source", b"data_source", "database", b"database", "query", b"query", "table", b"table"]) -> None: ... + + class SnowflakeOptions(google.protobuf.message.Message): + """Defines options for DataSource that sources features from a Snowflake Query""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TABLE_FIELD_NUMBER: builtins.int + QUERY_FIELD_NUMBER: builtins.int + SCHEMA_FIELD_NUMBER: builtins.int + DATABASE_FIELD_NUMBER: builtins.int + table: builtins.str + """Snowflake table name""" + query: builtins.str + """SQL query that returns a table containing feature data. Must contain an event_timestamp column, and respective + entity columns + """ + schema: builtins.str + """Snowflake schema name""" + database: builtins.str + """Snowflake schema name""" + def __init__( + self, + *, + table: builtins.str = ..., + query: builtins.str = ..., + schema: builtins.str = ..., + database: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["database", b"database", "query", b"query", "schema", b"schema", "table", b"table"]) -> None: ... + + class SparkOptions(google.protobuf.message.Message): + """Defines options for DataSource that sources features from a spark table/query""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TABLE_FIELD_NUMBER: builtins.int + QUERY_FIELD_NUMBER: builtins.int + PATH_FIELD_NUMBER: builtins.int + FILE_FORMAT_FIELD_NUMBER: builtins.int + table: builtins.str + """Table name""" + query: builtins.str + """Spark SQl query that returns the table, this is an alternative to `table`""" + path: builtins.str + """Path from which spark can read the table, this is an alternative to `table`""" + file_format: builtins.str + """Format of files at `path` (e.g. parquet, avro, etc)""" + def __init__( + self, + *, + table: builtins.str = ..., + query: builtins.str = ..., + path: builtins.str = ..., + file_format: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["file_format", b"file_format", "path", b"path", "query", b"query", "table", b"table"]) -> None: ... + + class CustomSourceOptions(google.protobuf.message.Message): + """Defines configuration for custom third-party data sources.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CONFIGURATION_FIELD_NUMBER: builtins.int + configuration: builtins.bytes + """Serialized configuration information for the data source. The implementer of the custom data source is + responsible for serializing and deserializing data from bytes + """ + def __init__( + self, + *, + configuration: builtins.bytes = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["configuration", b"configuration"]) -> None: ... + + class RequestDataOptions(google.protobuf.message.Message): + """Defines options for DataSource that sources features from request data""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class DeprecatedSchemaEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: feast.types.Value_pb2.ValueType.Enum.ValueType + def __init__( + self, + *, + key: builtins.str = ..., + value: feast.types.Value_pb2.ValueType.Enum.ValueType = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + DEPRECATED_SCHEMA_FIELD_NUMBER: builtins.int + SCHEMA_FIELD_NUMBER: builtins.int + @property + def deprecated_schema(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, feast.types.Value_pb2.ValueType.Enum.ValueType]: + """Mapping of feature name to type""" + @property + def schema(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Feature_pb2.FeatureSpecV2]: ... + def __init__( + self, + *, + deprecated_schema: collections.abc.Mapping[builtins.str, feast.types.Value_pb2.ValueType.Enum.ValueType] | None = ..., + schema: collections.abc.Iterable[feast.core.Feature_pb2.FeatureSpecV2] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["deprecated_schema", b"deprecated_schema", "schema", b"schema"]) -> None: ... + + class PushOptions(google.protobuf.message.Message): + """Defines options for DataSource that supports pushing data to it. This allows data to be pushed to + the online store on-demand, such as by stream consumers. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + OWNER_FIELD_NUMBER: builtins.int + TYPE_FIELD_NUMBER: builtins.int + FIELD_MAPPING_FIELD_NUMBER: builtins.int + TIMESTAMP_FIELD_FIELD_NUMBER: builtins.int + DATE_PARTITION_COLUMN_FIELD_NUMBER: builtins.int + CREATED_TIMESTAMP_COLUMN_FIELD_NUMBER: builtins.int + DATA_SOURCE_CLASS_TYPE_FIELD_NUMBER: builtins.int + BATCH_SOURCE_FIELD_NUMBER: builtins.int + META_FIELD_NUMBER: builtins.int + FILE_OPTIONS_FIELD_NUMBER: builtins.int + BIGQUERY_OPTIONS_FIELD_NUMBER: builtins.int + KAFKA_OPTIONS_FIELD_NUMBER: builtins.int + KINESIS_OPTIONS_FIELD_NUMBER: builtins.int + REDSHIFT_OPTIONS_FIELD_NUMBER: builtins.int + REQUEST_DATA_OPTIONS_FIELD_NUMBER: builtins.int + CUSTOM_OPTIONS_FIELD_NUMBER: builtins.int + SNOWFLAKE_OPTIONS_FIELD_NUMBER: builtins.int + PUSH_OPTIONS_FIELD_NUMBER: builtins.int + SPARK_OPTIONS_FIELD_NUMBER: builtins.int + TRINO_OPTIONS_FIELD_NUMBER: builtins.int + ATHENA_OPTIONS_FIELD_NUMBER: builtins.int + name: builtins.str + """Unique name of data source within the project""" + project: builtins.str + """Name of Feast project that this data source belongs to.""" + description: builtins.str + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + owner: builtins.str + type: global___DataSource.SourceType.ValueType + @property + def field_mapping(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """Defines mapping between fields in the sourced data + and fields in parent FeatureTable. + """ + timestamp_field: builtins.str + """Must specify event timestamp column name""" + date_partition_column: builtins.str + """(Optional) Specify partition column + useful for file sources + """ + created_timestamp_column: builtins.str + """Must specify creation timestamp column name""" + data_source_class_type: builtins.str + """This is an internal field that is represents the python class for the data source object a proto object represents. + This should be set by feast, and not by users. + The field is used primarily by custom data sources and is mandatory for them to set. Feast may set it for + first party sources as well. + """ + @property + def batch_source(self) -> global___DataSource: + """Optional batch source for streaming sources for historical features and materialization.""" + @property + def meta(self) -> global___DataSource.SourceMeta: ... + @property + def file_options(self) -> global___DataSource.FileOptions: ... + @property + def bigquery_options(self) -> global___DataSource.BigQueryOptions: ... + @property + def kafka_options(self) -> global___DataSource.KafkaOptions: ... + @property + def kinesis_options(self) -> global___DataSource.KinesisOptions: ... + @property + def redshift_options(self) -> global___DataSource.RedshiftOptions: ... + @property + def request_data_options(self) -> global___DataSource.RequestDataOptions: ... + @property + def custom_options(self) -> global___DataSource.CustomSourceOptions: ... + @property + def snowflake_options(self) -> global___DataSource.SnowflakeOptions: ... + @property + def push_options(self) -> global___DataSource.PushOptions: ... + @property + def spark_options(self) -> global___DataSource.SparkOptions: ... + @property + def trino_options(self) -> global___DataSource.TrinoOptions: ... + @property + def athena_options(self) -> global___DataSource.AthenaOptions: ... + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + description: builtins.str = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + owner: builtins.str = ..., + type: global___DataSource.SourceType.ValueType = ..., + field_mapping: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + timestamp_field: builtins.str = ..., + date_partition_column: builtins.str = ..., + created_timestamp_column: builtins.str = ..., + data_source_class_type: builtins.str = ..., + batch_source: global___DataSource | None = ..., + meta: global___DataSource.SourceMeta | None = ..., + file_options: global___DataSource.FileOptions | None = ..., + bigquery_options: global___DataSource.BigQueryOptions | None = ..., + kafka_options: global___DataSource.KafkaOptions | None = ..., + kinesis_options: global___DataSource.KinesisOptions | None = ..., + redshift_options: global___DataSource.RedshiftOptions | None = ..., + request_data_options: global___DataSource.RequestDataOptions | None = ..., + custom_options: global___DataSource.CustomSourceOptions | None = ..., + snowflake_options: global___DataSource.SnowflakeOptions | None = ..., + push_options: global___DataSource.PushOptions | None = ..., + spark_options: global___DataSource.SparkOptions | None = ..., + trino_options: global___DataSource.TrinoOptions | None = ..., + athena_options: global___DataSource.AthenaOptions | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["athena_options", b"athena_options", "batch_source", b"batch_source", "bigquery_options", b"bigquery_options", "custom_options", b"custom_options", "file_options", b"file_options", "kafka_options", b"kafka_options", "kinesis_options", b"kinesis_options", "meta", b"meta", "options", b"options", "push_options", b"push_options", "redshift_options", b"redshift_options", "request_data_options", b"request_data_options", "snowflake_options", b"snowflake_options", "spark_options", b"spark_options", "trino_options", b"trino_options"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["athena_options", b"athena_options", "batch_source", b"batch_source", "bigquery_options", b"bigquery_options", "created_timestamp_column", b"created_timestamp_column", "custom_options", b"custom_options", "data_source_class_type", b"data_source_class_type", "date_partition_column", b"date_partition_column", "description", b"description", "field_mapping", b"field_mapping", "file_options", b"file_options", "kafka_options", b"kafka_options", "kinesis_options", b"kinesis_options", "meta", b"meta", "name", b"name", "options", b"options", "owner", b"owner", "project", b"project", "push_options", b"push_options", "redshift_options", b"redshift_options", "request_data_options", b"request_data_options", "snowflake_options", b"snowflake_options", "spark_options", b"spark_options", "tags", b"tags", "timestamp_field", b"timestamp_field", "trino_options", b"trino_options", "type", b"type"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["options", b"options"]) -> typing_extensions.Literal["file_options", "bigquery_options", "kafka_options", "kinesis_options", "redshift_options", "request_data_options", "custom_options", "snowflake_options", "push_options", "spark_options", "trino_options", "athena_options"] | None: ... + +global___DataSource = DataSource diff --git a/sdk/python/feast/protos/feast/core/DataSource_pb2_grpc.py b/sdk/python/feast/protos/feast/core/DataSource_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/DataSource_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/DatastoreTable_pb2.py b/sdk/python/feast/protos/feast/core/DatastoreTable_pb2.py new file mode 100644 index 0000000000..c5dbc3ec64 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/DatastoreTable_pb2.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/DatastoreTable.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1f\x66\x65\x61st/core/DatastoreTable.proto\x12\nfeast.core\x1a\x1egoogle/protobuf/wrappers.proto\"\xc2\x01\n\x0e\x44\x61tastoreTable\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x30\n\nproject_id\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12/\n\tnamespace\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12.\n\x08\x64\x61tabase\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.StringValueBX\n\x10\x66\x65\x61st.proto.coreB\x13\x44\x61tastoreTableProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.DatastoreTable_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\023DatastoreTableProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_DATASTORETABLE']._serialized_start=80 + _globals['_DATASTORETABLE']._serialized_end=274 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/DatastoreTable_pb2.pyi b/sdk/python/feast/protos/feast/core/DatastoreTable_pb2.pyi new file mode 100644 index 0000000000..6339a97536 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/DatastoreTable_pb2.pyi @@ -0,0 +1,67 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +* Copyright 2021 The Feast Authors +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* https://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +""" +import builtins +import google.protobuf.descriptor +import google.protobuf.message +import google.protobuf.wrappers_pb2 +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class DatastoreTable(google.protobuf.message.Message): + """Represents a Datastore table""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PROJECT_FIELD_NUMBER: builtins.int + NAME_FIELD_NUMBER: builtins.int + PROJECT_ID_FIELD_NUMBER: builtins.int + NAMESPACE_FIELD_NUMBER: builtins.int + DATABASE_FIELD_NUMBER: builtins.int + project: builtins.str + """Feast project of the table""" + name: builtins.str + """Name of the table""" + @property + def project_id(self) -> google.protobuf.wrappers_pb2.StringValue: + """GCP project id""" + @property + def namespace(self) -> google.protobuf.wrappers_pb2.StringValue: + """Datastore namespace""" + @property + def database(self) -> google.protobuf.wrappers_pb2.StringValue: + """Firestore database""" + def __init__( + self, + *, + project: builtins.str = ..., + name: builtins.str = ..., + project_id: google.protobuf.wrappers_pb2.StringValue | None = ..., + namespace: google.protobuf.wrappers_pb2.StringValue | None = ..., + database: google.protobuf.wrappers_pb2.StringValue | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["database", b"database", "namespace", b"namespace", "project_id", b"project_id"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["database", b"database", "name", b"name", "namespace", b"namespace", "project", b"project", "project_id", b"project_id"]) -> None: ... + +global___DatastoreTable = DatastoreTable diff --git a/sdk/python/feast/protos/feast/core/DatastoreTable_pb2_grpc.py b/sdk/python/feast/protos/feast/core/DatastoreTable_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/DatastoreTable_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/DynamoDBTable_pb2.py b/sdk/python/feast/protos/feast/core/DynamoDBTable_pb2.py new file mode 100644 index 0000000000..34b813f39a --- /dev/null +++ b/sdk/python/feast/protos/feast/core/DynamoDBTable_pb2.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/DynamoDBTable.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1e\x66\x65\x61st/core/DynamoDBTable.proto\x12\nfeast.core\"-\n\rDynamoDBTable\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x02 \x01(\tBW\n\x10\x66\x65\x61st.proto.coreB\x12\x44ynamoDBTableProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.DynamoDBTable_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\022DynamoDBTableProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_DYNAMODBTABLE']._serialized_start=46 + _globals['_DYNAMODBTABLE']._serialized_end=91 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/DynamoDBTable_pb2.pyi b/sdk/python/feast/protos/feast/core/DynamoDBTable_pb2.pyi new file mode 100644 index 0000000000..cd9edd9a03 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/DynamoDBTable_pb2.pyi @@ -0,0 +1,50 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +* Copyright 2021 The Feast Authors +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* https://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +""" +import builtins +import google.protobuf.descriptor +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class DynamoDBTable(google.protobuf.message.Message): + """Represents a DynamoDB table""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + REGION_FIELD_NUMBER: builtins.int + name: builtins.str + """Name of the table""" + region: builtins.str + """Region of the table""" + def __init__( + self, + *, + name: builtins.str = ..., + region: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "region", b"region"]) -> None: ... + +global___DynamoDBTable = DynamoDBTable diff --git a/sdk/python/feast/protos/feast/core/DynamoDBTable_pb2_grpc.py b/sdk/python/feast/protos/feast/core/DynamoDBTable_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/DynamoDBTable_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/Entity_pb2.py b/sdk/python/feast/protos/feast/core/Entity_pb2.py new file mode 100644 index 0000000000..5a192854ca --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Entity_pb2.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/Entity.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from feast.protos.feast.types import Value_pb2 as feast_dot_types_dot_Value__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x17\x66\x65\x61st/core/Entity.proto\x12\nfeast.core\x1a\x17\x66\x65\x61st/types/Value.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"V\n\x06\x45ntity\x12&\n\x04spec\x18\x01 \x01(\x0b\x32\x18.feast.core.EntitySpecV2\x12$\n\x04meta\x18\x02 \x01(\x0b\x32\x16.feast.core.EntityMeta\"\xf3\x01\n\x0c\x45ntitySpecV2\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\t \x01(\t\x12/\n\nvalue_type\x18\x02 \x01(\x0e\x32\x1b.feast.types.ValueType.Enum\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x10\n\x08join_key\x18\x04 \x01(\t\x12\x30\n\x04tags\x18\x08 \x03(\x0b\x32\".feast.core.EntitySpecV2.TagsEntry\x12\r\n\x05owner\x18\n \x01(\t\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x7f\n\nEntityMeta\x12\x35\n\x11\x63reated_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\x16last_updated_timestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampBP\n\x10\x66\x65\x61st.proto.coreB\x0b\x45ntityProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.Entity_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\013EntityProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_ENTITYSPECV2_TAGSENTRY']._options = None + _globals['_ENTITYSPECV2_TAGSENTRY']._serialized_options = b'8\001' + _globals['_ENTITY']._serialized_start=97 + _globals['_ENTITY']._serialized_end=183 + _globals['_ENTITYSPECV2']._serialized_start=186 + _globals['_ENTITYSPECV2']._serialized_end=429 + _globals['_ENTITYSPECV2_TAGSENTRY']._serialized_start=386 + _globals['_ENTITYSPECV2_TAGSENTRY']._serialized_end=429 + _globals['_ENTITYMETA']._serialized_start=431 + _globals['_ENTITYMETA']._serialized_end=558 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/Entity_pb2.pyi b/sdk/python/feast/protos/feast/core/Entity_pb2.pyi new file mode 100644 index 0000000000..732b3e1032 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Entity_pb2.pyi @@ -0,0 +1,130 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +* Copyright 2020 The Feast Authors +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* https://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +""" +import builtins +import collections.abc +import feast.types.Value_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class Entity(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SPEC_FIELD_NUMBER: builtins.int + META_FIELD_NUMBER: builtins.int + @property + def spec(self) -> global___EntitySpecV2: + """User-specified specifications of this entity.""" + @property + def meta(self) -> global___EntityMeta: + """System-populated metadata for this entity.""" + def __init__( + self, + *, + spec: global___EntitySpecV2 | None = ..., + meta: global___EntityMeta | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> None: ... + +global___Entity = Entity + +class EntitySpecV2(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + VALUE_TYPE_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + JOIN_KEY_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + OWNER_FIELD_NUMBER: builtins.int + name: builtins.str + """Name of the entity.""" + project: builtins.str + """Name of Feast project that this feature table belongs to.""" + value_type: feast.types.Value_pb2.ValueType.Enum.ValueType + """Type of the entity.""" + description: builtins.str + """Description of the entity.""" + join_key: builtins.str + """Join key for the entity (i.e. name of the column the entity maps to).""" + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """User defined metadata""" + owner: builtins.str + """Owner of the entity.""" + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + value_type: feast.types.Value_pb2.ValueType.Enum.ValueType = ..., + description: builtins.str = ..., + join_key: builtins.str = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + owner: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["description", b"description", "join_key", b"join_key", "name", b"name", "owner", b"owner", "project", b"project", "tags", b"tags", "value_type", b"value_type"]) -> None: ... + +global___EntitySpecV2 = EntitySpecV2 + +class EntityMeta(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CREATED_TIMESTAMP_FIELD_NUMBER: builtins.int + LAST_UPDATED_TIMESTAMP_FIELD_NUMBER: builtins.int + @property + def created_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def last_updated_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + def __init__( + self, + *, + created_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + last_updated_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp"]) -> None: ... + +global___EntityMeta = EntityMeta diff --git a/sdk/python/feast/protos/feast/core/Entity_pb2_grpc.py b/sdk/python/feast/protos/feast/core/Entity_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Entity_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/FeatureService_pb2.py b/sdk/python/feast/protos/feast/core/FeatureService_pb2.py new file mode 100644 index 0000000000..cf6ac46ac5 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/FeatureService_pb2.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/FeatureService.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from feast.protos.feast.core import FeatureViewProjection_pb2 as feast_dot_core_dot_FeatureViewProjection__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1f\x66\x65\x61st/core/FeatureService.proto\x12\nfeast.core\x1a\x1fgoogle/protobuf/timestamp.proto\x1a&feast/core/FeatureViewProjection.proto\"l\n\x0e\x46\x65\x61tureService\x12,\n\x04spec\x18\x01 \x01(\x0b\x32\x1e.feast.core.FeatureServiceSpec\x12,\n\x04meta\x18\x02 \x01(\x0b\x32\x1e.feast.core.FeatureServiceMeta\"\xa4\x02\n\x12\x46\x65\x61tureServiceSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x33\n\x08\x66\x65\x61tures\x18\x03 \x03(\x0b\x32!.feast.core.FeatureViewProjection\x12\x36\n\x04tags\x18\x04 \x03(\x0b\x32(.feast.core.FeatureServiceSpec.TagsEntry\x12\x13\n\x0b\x64\x65scription\x18\x05 \x01(\t\x12\r\n\x05owner\x18\x06 \x01(\t\x12\x31\n\x0elogging_config\x18\x07 \x01(\x0b\x32\x19.feast.core.LoggingConfig\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x87\x01\n\x12\x46\x65\x61tureServiceMeta\x12\x35\n\x11\x63reated_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\x16last_updated_timestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\x9a\x07\n\rLoggingConfig\x12\x13\n\x0bsample_rate\x18\x01 \x01(\x02\x12\x45\n\x10\x66ile_destination\x18\x03 \x01(\x0b\x32).feast.core.LoggingConfig.FileDestinationH\x00\x12M\n\x14\x62igquery_destination\x18\x04 \x01(\x0b\x32-.feast.core.LoggingConfig.BigQueryDestinationH\x00\x12M\n\x14redshift_destination\x18\x05 \x01(\x0b\x32-.feast.core.LoggingConfig.RedshiftDestinationH\x00\x12O\n\x15snowflake_destination\x18\x06 \x01(\x0b\x32..feast.core.LoggingConfig.SnowflakeDestinationH\x00\x12I\n\x12\x63ustom_destination\x18\x07 \x01(\x0b\x32+.feast.core.LoggingConfig.CustomDestinationH\x00\x12I\n\x12\x61thena_destination\x18\x08 \x01(\x0b\x32+.feast.core.LoggingConfig.AthenaDestinationH\x00\x1aS\n\x0f\x46ileDestination\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x1c\n\x14s3_endpoint_override\x18\x02 \x01(\t\x12\x14\n\x0cpartition_by\x18\x03 \x03(\t\x1a(\n\x13\x42igQueryDestination\x12\x11\n\ttable_ref\x18\x01 \x01(\t\x1a)\n\x13RedshiftDestination\x12\x12\n\ntable_name\x18\x01 \x01(\t\x1a\'\n\x11\x41thenaDestination\x12\x12\n\ntable_name\x18\x01 \x01(\t\x1a*\n\x14SnowflakeDestination\x12\x12\n\ntable_name\x18\x01 \x01(\t\x1a\x99\x01\n\x11\x43ustomDestination\x12\x0c\n\x04kind\x18\x01 \x01(\t\x12G\n\x06\x63onfig\x18\x02 \x03(\x0b\x32\x37.feast.core.LoggingConfig.CustomDestination.ConfigEntry\x1a-\n\x0b\x43onfigEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\r\n\x0b\x64\x65stinationBX\n\x10\x66\x65\x61st.proto.coreB\x13\x46\x65\x61tureServiceProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.FeatureService_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\023FeatureServiceProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_FEATURESERVICESPEC_TAGSENTRY']._options = None + _globals['_FEATURESERVICESPEC_TAGSENTRY']._serialized_options = b'8\001' + _globals['_LOGGINGCONFIG_CUSTOMDESTINATION_CONFIGENTRY']._options = None + _globals['_LOGGINGCONFIG_CUSTOMDESTINATION_CONFIGENTRY']._serialized_options = b'8\001' + _globals['_FEATURESERVICE']._serialized_start=120 + _globals['_FEATURESERVICE']._serialized_end=228 + _globals['_FEATURESERVICESPEC']._serialized_start=231 + _globals['_FEATURESERVICESPEC']._serialized_end=523 + _globals['_FEATURESERVICESPEC_TAGSENTRY']._serialized_start=480 + _globals['_FEATURESERVICESPEC_TAGSENTRY']._serialized_end=523 + _globals['_FEATURESERVICEMETA']._serialized_start=526 + _globals['_FEATURESERVICEMETA']._serialized_end=661 + _globals['_LOGGINGCONFIG']._serialized_start=664 + _globals['_LOGGINGCONFIG']._serialized_end=1586 + _globals['_LOGGINGCONFIG_FILEDESTINATION']._serialized_start=1162 + _globals['_LOGGINGCONFIG_FILEDESTINATION']._serialized_end=1245 + _globals['_LOGGINGCONFIG_BIGQUERYDESTINATION']._serialized_start=1247 + _globals['_LOGGINGCONFIG_BIGQUERYDESTINATION']._serialized_end=1287 + _globals['_LOGGINGCONFIG_REDSHIFTDESTINATION']._serialized_start=1289 + _globals['_LOGGINGCONFIG_REDSHIFTDESTINATION']._serialized_end=1330 + _globals['_LOGGINGCONFIG_ATHENADESTINATION']._serialized_start=1332 + _globals['_LOGGINGCONFIG_ATHENADESTINATION']._serialized_end=1371 + _globals['_LOGGINGCONFIG_SNOWFLAKEDESTINATION']._serialized_start=1373 + _globals['_LOGGINGCONFIG_SNOWFLAKEDESTINATION']._serialized_end=1415 + _globals['_LOGGINGCONFIG_CUSTOMDESTINATION']._serialized_start=1418 + _globals['_LOGGINGCONFIG_CUSTOMDESTINATION']._serialized_end=1571 + _globals['_LOGGINGCONFIG_CUSTOMDESTINATION_CONFIGENTRY']._serialized_start=1526 + _globals['_LOGGINGCONFIG_CUSTOMDESTINATION_CONFIGENTRY']._serialized_end=1571 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/FeatureService_pb2.pyi b/sdk/python/feast/protos/feast/core/FeatureService_pb2.pyi new file mode 100644 index 0000000000..b3305b72df --- /dev/null +++ b/sdk/python/feast/protos/feast/core/FeatureService_pb2.pyi @@ -0,0 +1,266 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" +import builtins +import collections.abc +import feast.core.FeatureViewProjection_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class FeatureService(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SPEC_FIELD_NUMBER: builtins.int + META_FIELD_NUMBER: builtins.int + @property + def spec(self) -> global___FeatureServiceSpec: + """User-specified specifications of this feature service.""" + @property + def meta(self) -> global___FeatureServiceMeta: + """System-populated metadata for this feature service.""" + def __init__( + self, + *, + spec: global___FeatureServiceSpec | None = ..., + meta: global___FeatureServiceMeta | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> None: ... + +global___FeatureService = FeatureService + +class FeatureServiceSpec(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + FEATURES_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + OWNER_FIELD_NUMBER: builtins.int + LOGGING_CONFIG_FIELD_NUMBER: builtins.int + name: builtins.str + """Name of the Feature Service. Must be unique. Not updated.""" + project: builtins.str + """Name of Feast project that this Feature Service belongs to.""" + @property + def features(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.FeatureViewProjection_pb2.FeatureViewProjection]: + """Represents a projection that's to be applied on top of the FeatureView. + Contains data such as the features to use from a FeatureView. + """ + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """User defined metadata""" + description: builtins.str + """Description of the feature service.""" + owner: builtins.str + """Owner of the feature service.""" + @property + def logging_config(self) -> global___LoggingConfig: + """(optional) if provided logging will be enabled for this feature service.""" + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + features: collections.abc.Iterable[feast.core.FeatureViewProjection_pb2.FeatureViewProjection] | None = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + description: builtins.str = ..., + owner: builtins.str = ..., + logging_config: global___LoggingConfig | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["logging_config", b"logging_config"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["description", b"description", "features", b"features", "logging_config", b"logging_config", "name", b"name", "owner", b"owner", "project", b"project", "tags", b"tags"]) -> None: ... + +global___FeatureServiceSpec = FeatureServiceSpec + +class FeatureServiceMeta(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CREATED_TIMESTAMP_FIELD_NUMBER: builtins.int + LAST_UPDATED_TIMESTAMP_FIELD_NUMBER: builtins.int + @property + def created_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Time where this Feature Service is created""" + @property + def last_updated_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Time where this Feature Service is last updated""" + def __init__( + self, + *, + created_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + last_updated_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp"]) -> None: ... + +global___FeatureServiceMeta = FeatureServiceMeta + +class LoggingConfig(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class FileDestination(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PATH_FIELD_NUMBER: builtins.int + S3_ENDPOINT_OVERRIDE_FIELD_NUMBER: builtins.int + PARTITION_BY_FIELD_NUMBER: builtins.int + path: builtins.str + s3_endpoint_override: builtins.str + @property + def partition_by(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """column names to use for partitioning""" + def __init__( + self, + *, + path: builtins.str = ..., + s3_endpoint_override: builtins.str = ..., + partition_by: collections.abc.Iterable[builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["partition_by", b"partition_by", "path", b"path", "s3_endpoint_override", b"s3_endpoint_override"]) -> None: ... + + class BigQueryDestination(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TABLE_REF_FIELD_NUMBER: builtins.int + table_ref: builtins.str + """Full table reference in the form of [project:dataset.table]""" + def __init__( + self, + *, + table_ref: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["table_ref", b"table_ref"]) -> None: ... + + class RedshiftDestination(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TABLE_NAME_FIELD_NUMBER: builtins.int + table_name: builtins.str + """Destination table name. ClusterId and database will be taken from an offline store config""" + def __init__( + self, + *, + table_name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["table_name", b"table_name"]) -> None: ... + + class AthenaDestination(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TABLE_NAME_FIELD_NUMBER: builtins.int + table_name: builtins.str + """Destination table name. data_source and database will be taken from an offline store config""" + def __init__( + self, + *, + table_name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["table_name", b"table_name"]) -> None: ... + + class SnowflakeDestination(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TABLE_NAME_FIELD_NUMBER: builtins.int + table_name: builtins.str + """Destination table name. Schema and database will be taken from an offline store config""" + def __init__( + self, + *, + table_name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["table_name", b"table_name"]) -> None: ... + + class CustomDestination(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class ConfigEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + KIND_FIELD_NUMBER: builtins.int + CONFIG_FIELD_NUMBER: builtins.int + kind: builtins.str + @property + def config(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + kind: builtins.str = ..., + config: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["config", b"config", "kind", b"kind"]) -> None: ... + + SAMPLE_RATE_FIELD_NUMBER: builtins.int + FILE_DESTINATION_FIELD_NUMBER: builtins.int + BIGQUERY_DESTINATION_FIELD_NUMBER: builtins.int + REDSHIFT_DESTINATION_FIELD_NUMBER: builtins.int + SNOWFLAKE_DESTINATION_FIELD_NUMBER: builtins.int + CUSTOM_DESTINATION_FIELD_NUMBER: builtins.int + ATHENA_DESTINATION_FIELD_NUMBER: builtins.int + sample_rate: builtins.float + @property + def file_destination(self) -> global___LoggingConfig.FileDestination: ... + @property + def bigquery_destination(self) -> global___LoggingConfig.BigQueryDestination: ... + @property + def redshift_destination(self) -> global___LoggingConfig.RedshiftDestination: ... + @property + def snowflake_destination(self) -> global___LoggingConfig.SnowflakeDestination: ... + @property + def custom_destination(self) -> global___LoggingConfig.CustomDestination: ... + @property + def athena_destination(self) -> global___LoggingConfig.AthenaDestination: ... + def __init__( + self, + *, + sample_rate: builtins.float = ..., + file_destination: global___LoggingConfig.FileDestination | None = ..., + bigquery_destination: global___LoggingConfig.BigQueryDestination | None = ..., + redshift_destination: global___LoggingConfig.RedshiftDestination | None = ..., + snowflake_destination: global___LoggingConfig.SnowflakeDestination | None = ..., + custom_destination: global___LoggingConfig.CustomDestination | None = ..., + athena_destination: global___LoggingConfig.AthenaDestination | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["athena_destination", b"athena_destination", "bigquery_destination", b"bigquery_destination", "custom_destination", b"custom_destination", "destination", b"destination", "file_destination", b"file_destination", "redshift_destination", b"redshift_destination", "snowflake_destination", b"snowflake_destination"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["athena_destination", b"athena_destination", "bigquery_destination", b"bigquery_destination", "custom_destination", b"custom_destination", "destination", b"destination", "file_destination", b"file_destination", "redshift_destination", b"redshift_destination", "sample_rate", b"sample_rate", "snowflake_destination", b"snowflake_destination"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["destination", b"destination"]) -> typing_extensions.Literal["file_destination", "bigquery_destination", "redshift_destination", "snowflake_destination", "custom_destination", "athena_destination"] | None: ... + +global___LoggingConfig = LoggingConfig diff --git a/sdk/python/feast/protos/feast/core/FeatureService_pb2_grpc.py b/sdk/python/feast/protos/feast/core/FeatureService_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/FeatureService_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/FeatureTable_pb2.py b/sdk/python/feast/protos/feast/core/FeatureTable_pb2.py new file mode 100644 index 0000000000..713e72b5d3 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/FeatureTable_pb2.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/FeatureTable.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from feast.protos.feast.core import DataSource_pb2 as feast_dot_core_dot_DataSource__pb2 +from feast.protos.feast.core import Feature_pb2 as feast_dot_core_dot_Feature__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1d\x66\x65\x61st/core/FeatureTable.proto\x12\nfeast.core\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1b\x66\x65\x61st/core/DataSource.proto\x1a\x18\x66\x65\x61st/core/Feature.proto\"f\n\x0c\x46\x65\x61tureTable\x12*\n\x04spec\x18\x01 \x01(\x0b\x32\x1c.feast.core.FeatureTableSpec\x12*\n\x04meta\x18\x02 \x01(\x0b\x32\x1c.feast.core.FeatureTableMeta\"\xe2\x02\n\x10\x46\x65\x61tureTableSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\t \x01(\t\x12\x10\n\x08\x65ntities\x18\x03 \x03(\t\x12+\n\x08\x66\x65\x61tures\x18\x04 \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x12\x38\n\x06labels\x18\x05 \x03(\x0b\x32(.feast.core.FeatureTableSpec.LabelsEntry\x12*\n\x07max_age\x18\x06 \x01(\x0b\x32\x19.google.protobuf.Duration\x12,\n\x0c\x62\x61tch_source\x18\x07 \x01(\x0b\x32\x16.feast.core.DataSource\x12-\n\rstream_source\x18\x08 \x01(\x0b\x32\x16.feast.core.DataSource\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xa5\x01\n\x10\x46\x65\x61tureTableMeta\x12\x35\n\x11\x63reated_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\x16last_updated_timestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x10\n\x08revision\x18\x03 \x01(\x03\x12\x0c\n\x04hash\x18\x04 \x01(\tBV\n\x10\x66\x65\x61st.proto.coreB\x11\x46\x65\x61tureTableProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.FeatureTable_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\021FeatureTableProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_FEATURETABLESPEC_LABELSENTRY']._options = None + _globals['_FEATURETABLESPEC_LABELSENTRY']._serialized_options = b'8\001' + _globals['_FEATURETABLE']._serialized_start=165 + _globals['_FEATURETABLE']._serialized_end=267 + _globals['_FEATURETABLESPEC']._serialized_start=270 + _globals['_FEATURETABLESPEC']._serialized_end=624 + _globals['_FEATURETABLESPEC_LABELSENTRY']._serialized_start=579 + _globals['_FEATURETABLESPEC_LABELSENTRY']._serialized_end=624 + _globals['_FEATURETABLEMETA']._serialized_start=627 + _globals['_FEATURETABLEMETA']._serialized_end=792 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/FeatureTable_pb2.pyi b/sdk/python/feast/protos/feast/core/FeatureTable_pb2.pyi new file mode 100644 index 0000000000..dd41c2d214 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/FeatureTable_pb2.pyi @@ -0,0 +1,166 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2020 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import collections.abc +import feast.core.DataSource_pb2 +import feast.core.Feature_pb2 +import google.protobuf.descriptor +import google.protobuf.duration_pb2 +import google.protobuf.internal.containers +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class FeatureTable(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SPEC_FIELD_NUMBER: builtins.int + META_FIELD_NUMBER: builtins.int + @property + def spec(self) -> global___FeatureTableSpec: + """User-specified specifications of this feature table.""" + @property + def meta(self) -> global___FeatureTableMeta: + """System-populated metadata for this feature table.""" + def __init__( + self, + *, + spec: global___FeatureTableSpec | None = ..., + meta: global___FeatureTableMeta | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> None: ... + +global___FeatureTable = FeatureTable + +class FeatureTableSpec(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class LabelsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + ENTITIES_FIELD_NUMBER: builtins.int + FEATURES_FIELD_NUMBER: builtins.int + LABELS_FIELD_NUMBER: builtins.int + MAX_AGE_FIELD_NUMBER: builtins.int + BATCH_SOURCE_FIELD_NUMBER: builtins.int + STREAM_SOURCE_FIELD_NUMBER: builtins.int + name: builtins.str + """Name of the feature table. Must be unique. Not updated.""" + project: builtins.str + """Name of Feast project that this feature table belongs to.""" + @property + def entities(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """List names of entities to associate with the Features defined in this + Feature Table. Not updatable. + """ + @property + def features(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Feature_pb2.FeatureSpecV2]: + """List of features specifications for each feature defined with this feature table.""" + @property + def labels(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """User defined metadata""" + @property + def max_age(self) -> google.protobuf.duration_pb2.Duration: + """Features in this feature table can only be retrieved from online serving + younger than max age. Age is measured as the duration of time between + the feature's event timestamp and when the feature is retrieved + Feature values outside max age will be returned as unset values and indicated to end user + """ + @property + def batch_source(self) -> feast.core.DataSource_pb2.DataSource: + """Batch/Offline DataSource to source batch/offline feature data. + Only batch DataSource can be specified + (ie source type should start with 'BATCH_') + """ + @property + def stream_source(self) -> feast.core.DataSource_pb2.DataSource: + """Stream/Online DataSource to source stream/online feature data. + Only stream DataSource can be specified + (ie source type should start with 'STREAM_') + """ + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + entities: collections.abc.Iterable[builtins.str] | None = ..., + features: collections.abc.Iterable[feast.core.Feature_pb2.FeatureSpecV2] | None = ..., + labels: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + max_age: google.protobuf.duration_pb2.Duration | None = ..., + batch_source: feast.core.DataSource_pb2.DataSource | None = ..., + stream_source: feast.core.DataSource_pb2.DataSource | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["batch_source", b"batch_source", "max_age", b"max_age", "stream_source", b"stream_source"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["batch_source", b"batch_source", "entities", b"entities", "features", b"features", "labels", b"labels", "max_age", b"max_age", "name", b"name", "project", b"project", "stream_source", b"stream_source"]) -> None: ... + +global___FeatureTableSpec = FeatureTableSpec + +class FeatureTableMeta(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CREATED_TIMESTAMP_FIELD_NUMBER: builtins.int + LAST_UPDATED_TIMESTAMP_FIELD_NUMBER: builtins.int + REVISION_FIELD_NUMBER: builtins.int + HASH_FIELD_NUMBER: builtins.int + @property + def created_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Time where this Feature Table is created""" + @property + def last_updated_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Time where this Feature Table is last updated""" + revision: builtins.int + """Auto incrementing revision no. of this Feature Table""" + hash: builtins.str + """Hash entities, features, batch_source and stream_source to inform JobService if + jobs should be restarted should hash change + """ + def __init__( + self, + *, + created_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + last_updated_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + revision: builtins.int = ..., + hash: builtins.str = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "hash", b"hash", "last_updated_timestamp", b"last_updated_timestamp", "revision", b"revision"]) -> None: ... + +global___FeatureTableMeta = FeatureTableMeta diff --git a/sdk/python/feast/protos/feast/core/FeatureTable_pb2_grpc.py b/sdk/python/feast/protos/feast/core/FeatureTable_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/FeatureTable_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2.py b/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2.py new file mode 100644 index 0000000000..286f511658 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/FeatureViewProjection.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from feast.protos.feast.core import Feature_pb2 as feast_dot_core_dot_Feature__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&feast/core/FeatureViewProjection.proto\x12\nfeast.core\x1a\x18\x66\x65\x61st/core/Feature.proto\"\x83\x02\n\x15\x46\x65\x61tureViewProjection\x12\x19\n\x11\x66\x65\x61ture_view_name\x18\x01 \x01(\t\x12\x1f\n\x17\x66\x65\x61ture_view_name_alias\x18\x03 \x01(\t\x12\x32\n\x0f\x66\x65\x61ture_columns\x18\x02 \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x12G\n\x0cjoin_key_map\x18\x04 \x03(\x0b\x32\x31.feast.core.FeatureViewProjection.JoinKeyMapEntry\x1a\x31\n\x0fJoinKeyMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42Z\n\x10\x66\x65\x61st.proto.coreB\x15\x46\x65\x61tureReferenceProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.FeatureViewProjection_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\025FeatureReferenceProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_FEATUREVIEWPROJECTION_JOINKEYMAPENTRY']._options = None + _globals['_FEATUREVIEWPROJECTION_JOINKEYMAPENTRY']._serialized_options = b'8\001' + _globals['_FEATUREVIEWPROJECTION']._serialized_start=81 + _globals['_FEATUREVIEWPROJECTION']._serialized_end=340 + _globals['_FEATUREVIEWPROJECTION_JOINKEYMAPENTRY']._serialized_start=291 + _globals['_FEATUREVIEWPROJECTION_JOINKEYMAPENTRY']._serialized_end=340 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2.pyi b/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2.pyi new file mode 100644 index 0000000000..2c0a298e14 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2.pyi @@ -0,0 +1,66 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" +import builtins +import collections.abc +import feast.core.Feature_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class FeatureViewProjection(google.protobuf.message.Message): + """A projection to be applied on top of a FeatureView. + Contains the modifications to a FeatureView such as the features subset to use. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class JoinKeyMapEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + FEATURE_VIEW_NAME_FIELD_NUMBER: builtins.int + FEATURE_VIEW_NAME_ALIAS_FIELD_NUMBER: builtins.int + FEATURE_COLUMNS_FIELD_NUMBER: builtins.int + JOIN_KEY_MAP_FIELD_NUMBER: builtins.int + feature_view_name: builtins.str + """The feature view name""" + feature_view_name_alias: builtins.str + """Alias for feature view name""" + @property + def feature_columns(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Feature_pb2.FeatureSpecV2]: + """The features of the feature view that are a part of the feature reference.""" + @property + def join_key_map(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """Map for entity join_key overrides of feature data entity join_key to entity data join_key""" + def __init__( + self, + *, + feature_view_name: builtins.str = ..., + feature_view_name_alias: builtins.str = ..., + feature_columns: collections.abc.Iterable[feast.core.Feature_pb2.FeatureSpecV2] | None = ..., + join_key_map: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["feature_columns", b"feature_columns", "feature_view_name", b"feature_view_name", "feature_view_name_alias", b"feature_view_name_alias", "join_key_map", b"join_key_map"]) -> None: ... + +global___FeatureViewProjection = FeatureViewProjection diff --git a/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2_grpc.py b/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/FeatureView_pb2.py b/sdk/python/feast/protos/feast/core/FeatureView_pb2.py new file mode 100644 index 0000000000..f1480593d9 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/FeatureView_pb2.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/FeatureView.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from feast.protos.feast.core import DataSource_pb2 as feast_dot_core_dot_DataSource__pb2 +from feast.protos.feast.core import Feature_pb2 as feast_dot_core_dot_Feature__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1c\x66\x65\x61st/core/FeatureView.proto\x12\nfeast.core\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1b\x66\x65\x61st/core/DataSource.proto\x1a\x18\x66\x65\x61st/core/Feature.proto\"c\n\x0b\x46\x65\x61tureView\x12)\n\x04spec\x18\x01 \x01(\x0b\x32\x1b.feast.core.FeatureViewSpec\x12)\n\x04meta\x18\x02 \x01(\x0b\x32\x1b.feast.core.FeatureViewMeta\"\xbd\x03\n\x0f\x46\x65\x61tureViewSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x10\n\x08\x65ntities\x18\x03 \x03(\t\x12+\n\x08\x66\x65\x61tures\x18\x04 \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x12\x31\n\x0e\x65ntity_columns\x18\x0c \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x12\x13\n\x0b\x64\x65scription\x18\n \x01(\t\x12\x33\n\x04tags\x18\x05 \x03(\x0b\x32%.feast.core.FeatureViewSpec.TagsEntry\x12\r\n\x05owner\x18\x0b \x01(\t\x12&\n\x03ttl\x18\x06 \x01(\x0b\x32\x19.google.protobuf.Duration\x12,\n\x0c\x62\x61tch_source\x18\x07 \x01(\x0b\x32\x16.feast.core.DataSource\x12-\n\rstream_source\x18\t \x01(\x0b\x32\x16.feast.core.DataSource\x12\x0e\n\x06online\x18\x08 \x01(\x08\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xcc\x01\n\x0f\x46\x65\x61tureViewMeta\x12\x35\n\x11\x63reated_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\x16last_updated_timestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x46\n\x19materialization_intervals\x18\x03 \x03(\x0b\x32#.feast.core.MaterializationInterval\"w\n\x17MaterializationInterval\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampBU\n\x10\x66\x65\x61st.proto.coreB\x10\x46\x65\x61tureViewProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.FeatureView_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\020FeatureViewProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_FEATUREVIEWSPEC_TAGSENTRY']._options = None + _globals['_FEATUREVIEWSPEC_TAGSENTRY']._serialized_options = b'8\001' + _globals['_FEATUREVIEW']._serialized_start=164 + _globals['_FEATUREVIEW']._serialized_end=263 + _globals['_FEATUREVIEWSPEC']._serialized_start=266 + _globals['_FEATUREVIEWSPEC']._serialized_end=711 + _globals['_FEATUREVIEWSPEC_TAGSENTRY']._serialized_start=668 + _globals['_FEATUREVIEWSPEC_TAGSENTRY']._serialized_end=711 + _globals['_FEATUREVIEWMETA']._serialized_start=714 + _globals['_FEATUREVIEWMETA']._serialized_end=918 + _globals['_MATERIALIZATIONINTERVAL']._serialized_start=920 + _globals['_MATERIALIZATIONINTERVAL']._serialized_end=1039 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/FeatureView_pb2.pyi b/sdk/python/feast/protos/feast/core/FeatureView_pb2.pyi new file mode 100644 index 0000000000..e1d4e2dfee --- /dev/null +++ b/sdk/python/feast/protos/feast/core/FeatureView_pb2.pyi @@ -0,0 +1,194 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2020 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import collections.abc +import feast.core.DataSource_pb2 +import feast.core.Feature_pb2 +import google.protobuf.descriptor +import google.protobuf.duration_pb2 +import google.protobuf.internal.containers +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class FeatureView(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SPEC_FIELD_NUMBER: builtins.int + META_FIELD_NUMBER: builtins.int + @property + def spec(self) -> global___FeatureViewSpec: + """User-specified specifications of this feature view.""" + @property + def meta(self) -> global___FeatureViewMeta: + """System-populated metadata for this feature view.""" + def __init__( + self, + *, + spec: global___FeatureViewSpec | None = ..., + meta: global___FeatureViewMeta | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> None: ... + +global___FeatureView = FeatureView + +class FeatureViewSpec(google.protobuf.message.Message): + """Next available id: 13 + TODO(adchia): refactor common fields from this and ODFV into separate metadata proto + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + ENTITIES_FIELD_NUMBER: builtins.int + FEATURES_FIELD_NUMBER: builtins.int + ENTITY_COLUMNS_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + OWNER_FIELD_NUMBER: builtins.int + TTL_FIELD_NUMBER: builtins.int + BATCH_SOURCE_FIELD_NUMBER: builtins.int + STREAM_SOURCE_FIELD_NUMBER: builtins.int + ONLINE_FIELD_NUMBER: builtins.int + name: builtins.str + """Name of the feature view. Must be unique. Not updated.""" + project: builtins.str + """Name of Feast project that this feature view belongs to.""" + @property + def entities(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """List of names of entities associated with this feature view.""" + @property + def features(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Feature_pb2.FeatureSpecV2]: + """List of specifications for each feature defined as part of this feature view.""" + @property + def entity_columns(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Feature_pb2.FeatureSpecV2]: + """List of specifications for each entity defined as part of this feature view.""" + description: builtins.str + """Description of the feature view.""" + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """User defined metadata""" + owner: builtins.str + """Owner of the feature view.""" + @property + def ttl(self) -> google.protobuf.duration_pb2.Duration: + """Features in this feature view can only be retrieved from online serving + younger than ttl. Ttl is measured as the duration of time between + the feature's event timestamp and when the feature is retrieved + Feature values outside ttl will be returned as unset values and indicated to end user + """ + @property + def batch_source(self) -> feast.core.DataSource_pb2.DataSource: + """Batch/Offline DataSource where this view can retrieve offline feature data.""" + @property + def stream_source(self) -> feast.core.DataSource_pb2.DataSource: + """Streaming DataSource from where this view can consume "online" feature data.""" + online: builtins.bool + """Whether these features should be served online or not""" + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + entities: collections.abc.Iterable[builtins.str] | None = ..., + features: collections.abc.Iterable[feast.core.Feature_pb2.FeatureSpecV2] | None = ..., + entity_columns: collections.abc.Iterable[feast.core.Feature_pb2.FeatureSpecV2] | None = ..., + description: builtins.str = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + owner: builtins.str = ..., + ttl: google.protobuf.duration_pb2.Duration | None = ..., + batch_source: feast.core.DataSource_pb2.DataSource | None = ..., + stream_source: feast.core.DataSource_pb2.DataSource | None = ..., + online: builtins.bool = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["batch_source", b"batch_source", "stream_source", b"stream_source", "ttl", b"ttl"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["batch_source", b"batch_source", "description", b"description", "entities", b"entities", "entity_columns", b"entity_columns", "features", b"features", "name", b"name", "online", b"online", "owner", b"owner", "project", b"project", "stream_source", b"stream_source", "tags", b"tags", "ttl", b"ttl"]) -> None: ... + +global___FeatureViewSpec = FeatureViewSpec + +class FeatureViewMeta(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CREATED_TIMESTAMP_FIELD_NUMBER: builtins.int + LAST_UPDATED_TIMESTAMP_FIELD_NUMBER: builtins.int + MATERIALIZATION_INTERVALS_FIELD_NUMBER: builtins.int + @property + def created_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Time where this Feature View is created""" + @property + def last_updated_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Time where this Feature View is last updated""" + @property + def materialization_intervals(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___MaterializationInterval]: + """List of pairs (start_time, end_time) for which this feature view has been materialized.""" + def __init__( + self, + *, + created_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + last_updated_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + materialization_intervals: collections.abc.Iterable[global___MaterializationInterval] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp", "materialization_intervals", b"materialization_intervals"]) -> None: ... + +global___FeatureViewMeta = FeatureViewMeta + +class MaterializationInterval(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + START_TIME_FIELD_NUMBER: builtins.int + END_TIME_FIELD_NUMBER: builtins.int + @property + def start_time(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def end_time(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + def __init__( + self, + *, + start_time: google.protobuf.timestamp_pb2.Timestamp | None = ..., + end_time: google.protobuf.timestamp_pb2.Timestamp | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["end_time", b"end_time", "start_time", b"start_time"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["end_time", b"end_time", "start_time", b"start_time"]) -> None: ... + +global___MaterializationInterval = MaterializationInterval diff --git a/sdk/python/feast/protos/feast/core/FeatureView_pb2_grpc.py b/sdk/python/feast/protos/feast/core/FeatureView_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/FeatureView_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/Feature_pb2.py b/sdk/python/feast/protos/feast/core/Feature_pb2.py new file mode 100644 index 0000000000..dd7c6008ef --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Feature_pb2.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/Feature.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from feast.protos.feast.types import Value_pb2 as feast_dot_types_dot_Value__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x18\x66\x65\x61st/core/Feature.proto\x12\nfeast.core\x1a\x17\x66\x65\x61st/types/Value.proto\"\xc3\x01\n\rFeatureSpecV2\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\nvalue_type\x18\x02 \x01(\x0e\x32\x1b.feast.types.ValueType.Enum\x12\x31\n\x04tags\x18\x03 \x03(\x0b\x32#.feast.core.FeatureSpecV2.TagsEntry\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42Q\n\x10\x66\x65\x61st.proto.coreB\x0c\x46\x65\x61tureProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.Feature_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\014FeatureProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_FEATURESPECV2_TAGSENTRY']._options = None + _globals['_FEATURESPECV2_TAGSENTRY']._serialized_options = b'8\001' + _globals['_FEATURESPECV2']._serialized_start=66 + _globals['_FEATURESPECV2']._serialized_end=261 + _globals['_FEATURESPECV2_TAGSENTRY']._serialized_start=218 + _globals['_FEATURESPECV2_TAGSENTRY']._serialized_end=261 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/Feature_pb2.pyi b/sdk/python/feast/protos/feast/core/Feature_pb2.pyi new file mode 100644 index 0000000000..f4235b0965 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Feature_pb2.pyi @@ -0,0 +1,75 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2020 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import collections.abc +import feast.types.Value_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class FeatureSpecV2(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + VALUE_TYPE_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + name: builtins.str + """Name of the feature. Not updatable.""" + value_type: feast.types.Value_pb2.ValueType.Enum.ValueType + """Value type of the feature. Not updatable.""" + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """Tags for user defined metadata on a feature""" + description: builtins.str + """Description of the feature.""" + def __init__( + self, + *, + name: builtins.str = ..., + value_type: feast.types.Value_pb2.ValueType.Enum.ValueType = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + description: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["description", b"description", "name", b"name", "tags", b"tags", "value_type", b"value_type"]) -> None: ... + +global___FeatureSpecV2 = FeatureSpecV2 diff --git a/sdk/python/feast/protos/feast/core/Feature_pb2_grpc.py b/sdk/python/feast/protos/feast/core/Feature_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Feature_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/InfraObject_pb2.py b/sdk/python/feast/protos/feast/core/InfraObject_pb2.py new file mode 100644 index 0000000000..0804aecbf6 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/InfraObject_pb2.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/InfraObject.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from feast.protos.feast.core import DatastoreTable_pb2 as feast_dot_core_dot_DatastoreTable__pb2 +from feast.protos.feast.core import DynamoDBTable_pb2 as feast_dot_core_dot_DynamoDBTable__pb2 +from feast.protos.feast.core import SqliteTable_pb2 as feast_dot_core_dot_SqliteTable__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1c\x66\x65\x61st/core/InfraObject.proto\x12\nfeast.core\x1a\x1f\x66\x65\x61st/core/DatastoreTable.proto\x1a\x1e\x66\x65\x61st/core/DynamoDBTable.proto\x1a\x1c\x66\x65\x61st/core/SqliteTable.proto\"7\n\x05Infra\x12.\n\rinfra_objects\x18\x01 \x03(\x0b\x32\x17.feast.core.InfraObject\"\xb6\x02\n\x0bInfraObject\x12\x1f\n\x17infra_object_class_type\x18\x01 \x01(\t\x12\x33\n\x0e\x64ynamodb_table\x18\x02 \x01(\x0b\x32\x19.feast.core.DynamoDBTableH\x00\x12\x35\n\x0f\x64\x61tastore_table\x18\x03 \x01(\x0b\x32\x1a.feast.core.DatastoreTableH\x00\x12/\n\x0csqlite_table\x18\x04 \x01(\x0b\x32\x17.feast.core.SqliteTableH\x00\x12;\n\x0c\x63ustom_infra\x18\x64 \x01(\x0b\x32#.feast.core.InfraObject.CustomInfraH\x00\x1a\x1c\n\x0b\x43ustomInfra\x12\r\n\x05\x66ield\x18\x01 \x01(\x0c\x42\x0e\n\x0cinfra_objectBU\n\x10\x66\x65\x61st.proto.coreB\x10InfraObjectProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.InfraObject_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\020InfraObjectProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_INFRA']._serialized_start=139 + _globals['_INFRA']._serialized_end=194 + _globals['_INFRAOBJECT']._serialized_start=197 + _globals['_INFRAOBJECT']._serialized_end=507 + _globals['_INFRAOBJECT_CUSTOMINFRA']._serialized_start=463 + _globals['_INFRAOBJECT_CUSTOMINFRA']._serialized_end=491 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/InfraObject_pb2.pyi b/sdk/python/feast/protos/feast/core/InfraObject_pb2.pyi new file mode 100644 index 0000000000..38b31b7317 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/InfraObject_pb2.pyi @@ -0,0 +1,101 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +* Copyright 2021 The Feast Authors +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* https://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +""" +import builtins +import collections.abc +import feast.core.DatastoreTable_pb2 +import feast.core.DynamoDBTable_pb2 +import feast.core.SqliteTable_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class Infra(google.protobuf.message.Message): + """Represents a set of infrastructure objects managed by Feast""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + INFRA_OBJECTS_FIELD_NUMBER: builtins.int + @property + def infra_objects(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___InfraObject]: + """List of infrastructure objects managed by Feast""" + def __init__( + self, + *, + infra_objects: collections.abc.Iterable[global___InfraObject] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["infra_objects", b"infra_objects"]) -> None: ... + +global___Infra = Infra + +class InfraObject(google.protobuf.message.Message): + """Represents a single infrastructure object managed by Feast""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class CustomInfra(google.protobuf.message.Message): + """Allows for custom infra objects to be added""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FIELD_FIELD_NUMBER: builtins.int + field: builtins.bytes + def __init__( + self, + *, + field: builtins.bytes = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["field", b"field"]) -> None: ... + + INFRA_OBJECT_CLASS_TYPE_FIELD_NUMBER: builtins.int + DYNAMODB_TABLE_FIELD_NUMBER: builtins.int + DATASTORE_TABLE_FIELD_NUMBER: builtins.int + SQLITE_TABLE_FIELD_NUMBER: builtins.int + CUSTOM_INFRA_FIELD_NUMBER: builtins.int + infra_object_class_type: builtins.str + """Represents the Python class for the infrastructure object""" + @property + def dynamodb_table(self) -> feast.core.DynamoDBTable_pb2.DynamoDBTable: ... + @property + def datastore_table(self) -> feast.core.DatastoreTable_pb2.DatastoreTable: ... + @property + def sqlite_table(self) -> feast.core.SqliteTable_pb2.SqliteTable: ... + @property + def custom_infra(self) -> global___InfraObject.CustomInfra: ... + def __init__( + self, + *, + infra_object_class_type: builtins.str = ..., + dynamodb_table: feast.core.DynamoDBTable_pb2.DynamoDBTable | None = ..., + datastore_table: feast.core.DatastoreTable_pb2.DatastoreTable | None = ..., + sqlite_table: feast.core.SqliteTable_pb2.SqliteTable | None = ..., + custom_infra: global___InfraObject.CustomInfra | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["custom_infra", b"custom_infra", "datastore_table", b"datastore_table", "dynamodb_table", b"dynamodb_table", "infra_object", b"infra_object", "sqlite_table", b"sqlite_table"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["custom_infra", b"custom_infra", "datastore_table", b"datastore_table", "dynamodb_table", b"dynamodb_table", "infra_object", b"infra_object", "infra_object_class_type", b"infra_object_class_type", "sqlite_table", b"sqlite_table"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["infra_object", b"infra_object"]) -> typing_extensions.Literal["dynamodb_table", "datastore_table", "sqlite_table", "custom_infra"] | None: ... + +global___InfraObject = InfraObject diff --git a/sdk/python/feast/protos/feast/core/InfraObject_pb2_grpc.py b/sdk/python/feast/protos/feast/core/InfraObject_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/InfraObject_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/OnDemandFeatureView_pb2.py b/sdk/python/feast/protos/feast/core/OnDemandFeatureView_pb2.py new file mode 100644 index 0000000000..4be551724c --- /dev/null +++ b/sdk/python/feast/protos/feast/core/OnDemandFeatureView_pb2.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/OnDemandFeatureView.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from feast.protos.feast.core import FeatureView_pb2 as feast_dot_core_dot_FeatureView__pb2 +from feast.protos.feast.core import FeatureViewProjection_pb2 as feast_dot_core_dot_FeatureViewProjection__pb2 +from feast.protos.feast.core import Feature_pb2 as feast_dot_core_dot_Feature__pb2 +from feast.protos.feast.core import DataSource_pb2 as feast_dot_core_dot_DataSource__pb2 +from feast.protos.feast.core import Transformation_pb2 as feast_dot_core_dot_Transformation__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$feast/core/OnDemandFeatureView.proto\x12\nfeast.core\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1c\x66\x65\x61st/core/FeatureView.proto\x1a&feast/core/FeatureViewProjection.proto\x1a\x18\x66\x65\x61st/core/Feature.proto\x1a\x1b\x66\x65\x61st/core/DataSource.proto\x1a\x1f\x66\x65\x61st/core/Transformation.proto\"{\n\x13OnDemandFeatureView\x12\x31\n\x04spec\x18\x01 \x01(\x0b\x32#.feast.core.OnDemandFeatureViewSpec\x12\x31\n\x04meta\x18\x02 \x01(\x0b\x32#.feast.core.OnDemandFeatureViewMeta\"\x99\x04\n\x17OnDemandFeatureViewSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12+\n\x08\x66\x65\x61tures\x18\x03 \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x12\x41\n\x07sources\x18\x04 \x03(\x0b\x32\x30.feast.core.OnDemandFeatureViewSpec.SourcesEntry\x12\x42\n\x15user_defined_function\x18\x05 \x01(\x0b\x32\x1f.feast.core.UserDefinedFunctionB\x02\x18\x01\x12\x43\n\x16\x66\x65\x61ture_transformation\x18\n \x01(\x0b\x32#.feast.core.FeatureTransformationV2\x12\x13\n\x0b\x64\x65scription\x18\x06 \x01(\t\x12;\n\x04tags\x18\x07 \x03(\x0b\x32-.feast.core.OnDemandFeatureViewSpec.TagsEntry\x12\r\n\x05owner\x18\x08 \x01(\t\x12\x0c\n\x04mode\x18\x0b \x01(\t\x1aJ\n\x0cSourcesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12)\n\x05value\x18\x02 \x01(\x0b\x32\x1a.feast.core.OnDemandSource:\x02\x38\x01\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x8c\x01\n\x17OnDemandFeatureViewMeta\x12\x35\n\x11\x63reated_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\x16last_updated_timestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\xc8\x01\n\x0eOnDemandSource\x12/\n\x0c\x66\x65\x61ture_view\x18\x01 \x01(\x0b\x32\x17.feast.core.FeatureViewH\x00\x12\x44\n\x17\x66\x65\x61ture_view_projection\x18\x03 \x01(\x0b\x32!.feast.core.FeatureViewProjectionH\x00\x12\x35\n\x13request_data_source\x18\x02 \x01(\x0b\x32\x16.feast.core.DataSourceH\x00\x42\x08\n\x06source\"H\n\x13UserDefinedFunction\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x62ody\x18\x02 \x01(\x0c\x12\x11\n\tbody_text\x18\x03 \x01(\t:\x02\x18\x01\x42]\n\x10\x66\x65\x61st.proto.coreB\x18OnDemandFeatureViewProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.OnDemandFeatureView_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\030OnDemandFeatureViewProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_ONDEMANDFEATUREVIEWSPEC_SOURCESENTRY']._options = None + _globals['_ONDEMANDFEATUREVIEWSPEC_SOURCESENTRY']._serialized_options = b'8\001' + _globals['_ONDEMANDFEATUREVIEWSPEC_TAGSENTRY']._options = None + _globals['_ONDEMANDFEATUREVIEWSPEC_TAGSENTRY']._serialized_options = b'8\001' + _globals['_ONDEMANDFEATUREVIEWSPEC'].fields_by_name['user_defined_function']._options = None + _globals['_ONDEMANDFEATUREVIEWSPEC'].fields_by_name['user_defined_function']._serialized_options = b'\030\001' + _globals['_USERDEFINEDFUNCTION']._options = None + _globals['_USERDEFINEDFUNCTION']._serialized_options = b'\030\001' + _globals['_ONDEMANDFEATUREVIEW']._serialized_start=243 + _globals['_ONDEMANDFEATUREVIEW']._serialized_end=366 + _globals['_ONDEMANDFEATUREVIEWSPEC']._serialized_start=369 + _globals['_ONDEMANDFEATUREVIEWSPEC']._serialized_end=906 + _globals['_ONDEMANDFEATUREVIEWSPEC_SOURCESENTRY']._serialized_start=787 + _globals['_ONDEMANDFEATUREVIEWSPEC_SOURCESENTRY']._serialized_end=861 + _globals['_ONDEMANDFEATUREVIEWSPEC_TAGSENTRY']._serialized_start=863 + _globals['_ONDEMANDFEATUREVIEWSPEC_TAGSENTRY']._serialized_end=906 + _globals['_ONDEMANDFEATUREVIEWMETA']._serialized_start=909 + _globals['_ONDEMANDFEATUREVIEWMETA']._serialized_end=1049 + _globals['_ONDEMANDSOURCE']._serialized_start=1052 + _globals['_ONDEMANDSOURCE']._serialized_end=1252 + _globals['_USERDEFINEDFUNCTION']._serialized_start=1254 + _globals['_USERDEFINEDFUNCTION']._serialized_end=1326 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/OnDemandFeatureView_pb2.pyi b/sdk/python/feast/protos/feast/core/OnDemandFeatureView_pb2.pyi new file mode 100644 index 0000000000..d72a8f9862 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/OnDemandFeatureView_pb2.pyi @@ -0,0 +1,219 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2020 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import collections.abc +import feast.core.DataSource_pb2 +import feast.core.FeatureViewProjection_pb2 +import feast.core.FeatureView_pb2 +import feast.core.Feature_pb2 +import feast.core.Transformation_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class OnDemandFeatureView(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SPEC_FIELD_NUMBER: builtins.int + META_FIELD_NUMBER: builtins.int + @property + def spec(self) -> global___OnDemandFeatureViewSpec: + """User-specified specifications of this feature view.""" + @property + def meta(self) -> global___OnDemandFeatureViewMeta: ... + def __init__( + self, + *, + spec: global___OnDemandFeatureViewSpec | None = ..., + meta: global___OnDemandFeatureViewMeta | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> None: ... + +global___OnDemandFeatureView = OnDemandFeatureView + +class OnDemandFeatureViewSpec(google.protobuf.message.Message): + """Next available id: 9""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class SourcesEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + @property + def value(self) -> global___OnDemandSource: ... + def __init__( + self, + *, + key: builtins.str = ..., + value: global___OnDemandSource | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + FEATURES_FIELD_NUMBER: builtins.int + SOURCES_FIELD_NUMBER: builtins.int + USER_DEFINED_FUNCTION_FIELD_NUMBER: builtins.int + FEATURE_TRANSFORMATION_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + OWNER_FIELD_NUMBER: builtins.int + MODE_FIELD_NUMBER: builtins.int + name: builtins.str + """Name of the feature view. Must be unique. Not updated.""" + project: builtins.str + """Name of Feast project that this feature view belongs to.""" + @property + def features(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Feature_pb2.FeatureSpecV2]: + """List of features specifications for each feature defined with this feature view.""" + @property + def sources(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___OnDemandSource]: + """Map of sources for this feature view.""" + @property + def user_defined_function(self) -> global___UserDefinedFunction: ... + @property + def feature_transformation(self) -> feast.core.Transformation_pb2.FeatureTransformationV2: + """Oneof with {user_defined_function, on_demand_substrait_transformation}""" + description: builtins.str + """Description of the on demand feature view.""" + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """User defined metadata.""" + owner: builtins.str + """Owner of the on demand feature view.""" + mode: builtins.str + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + features: collections.abc.Iterable[feast.core.Feature_pb2.FeatureSpecV2] | None = ..., + sources: collections.abc.Mapping[builtins.str, global___OnDemandSource] | None = ..., + user_defined_function: global___UserDefinedFunction | None = ..., + feature_transformation: feast.core.Transformation_pb2.FeatureTransformationV2 | None = ..., + description: builtins.str = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + owner: builtins.str = ..., + mode: builtins.str = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["feature_transformation", b"feature_transformation", "user_defined_function", b"user_defined_function"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["description", b"description", "feature_transformation", b"feature_transformation", "features", b"features", "mode", b"mode", "name", b"name", "owner", b"owner", "project", b"project", "sources", b"sources", "tags", b"tags", "user_defined_function", b"user_defined_function"]) -> None: ... + +global___OnDemandFeatureViewSpec = OnDemandFeatureViewSpec + +class OnDemandFeatureViewMeta(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CREATED_TIMESTAMP_FIELD_NUMBER: builtins.int + LAST_UPDATED_TIMESTAMP_FIELD_NUMBER: builtins.int + @property + def created_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Time where this Feature View is created""" + @property + def last_updated_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Time where this Feature View is last updated""" + def __init__( + self, + *, + created_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + last_updated_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp"]) -> None: ... + +global___OnDemandFeatureViewMeta = OnDemandFeatureViewMeta + +class OnDemandSource(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FEATURE_VIEW_FIELD_NUMBER: builtins.int + FEATURE_VIEW_PROJECTION_FIELD_NUMBER: builtins.int + REQUEST_DATA_SOURCE_FIELD_NUMBER: builtins.int + @property + def feature_view(self) -> feast.core.FeatureView_pb2.FeatureView: ... + @property + def feature_view_projection(self) -> feast.core.FeatureViewProjection_pb2.FeatureViewProjection: ... + @property + def request_data_source(self) -> feast.core.DataSource_pb2.DataSource: ... + def __init__( + self, + *, + feature_view: feast.core.FeatureView_pb2.FeatureView | None = ..., + feature_view_projection: feast.core.FeatureViewProjection_pb2.FeatureViewProjection | None = ..., + request_data_source: feast.core.DataSource_pb2.DataSource | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["feature_view", b"feature_view", "feature_view_projection", b"feature_view_projection", "request_data_source", b"request_data_source", "source", b"source"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["feature_view", b"feature_view", "feature_view_projection", b"feature_view_projection", "request_data_source", b"request_data_source", "source", b"source"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["source", b"source"]) -> typing_extensions.Literal["feature_view", "feature_view_projection", "request_data_source"] | None: ... + +global___OnDemandSource = OnDemandSource + +class UserDefinedFunction(google.protobuf.message.Message): + """Serialized representation of python function.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + BODY_FIELD_NUMBER: builtins.int + BODY_TEXT_FIELD_NUMBER: builtins.int + name: builtins.str + """The function name""" + body: builtins.bytes + """The python-syntax function body (serialized by dill)""" + body_text: builtins.str + """The string representation of the udf""" + def __init__( + self, + *, + name: builtins.str = ..., + body: builtins.bytes = ..., + body_text: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["body", b"body", "body_text", b"body_text", "name", b"name"]) -> None: ... + +global___UserDefinedFunction = UserDefinedFunction diff --git a/sdk/python/feast/protos/feast/core/OnDemandFeatureView_pb2_grpc.py b/sdk/python/feast/protos/feast/core/OnDemandFeatureView_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/OnDemandFeatureView_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/Permission_pb2.py b/sdk/python/feast/protos/feast/core/Permission_pb2.py new file mode 100644 index 0000000000..822ad0261b --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Permission_pb2.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/Permission.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from feast.protos.feast.core import Policy_pb2 as feast_dot_core_dot_Policy__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1b\x66\x65\x61st/core/Permission.proto\x12\nfeast.core\x1a\x17\x66\x65\x61st/core/Policy.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"`\n\nPermission\x12(\n\x04spec\x18\x01 \x01(\x0b\x32\x1a.feast.core.PermissionSpec\x12(\n\x04meta\x18\x02 \x01(\x0b\x32\x1a.feast.core.PermissionMeta\"\x9f\x06\n\x0ePermissionSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12.\n\x05types\x18\x03 \x03(\x0e\x32\x1f.feast.core.PermissionSpec.Type\x12\x14\n\x0cname_pattern\x18\x04 \x01(\t\x12\x43\n\rrequired_tags\x18\x05 \x03(\x0b\x32,.feast.core.PermissionSpec.RequiredTagsEntry\x12\x39\n\x07\x61\x63tions\x18\x06 \x03(\x0e\x32(.feast.core.PermissionSpec.AuthzedAction\x12\"\n\x06policy\x18\x07 \x01(\x0b\x32\x12.feast.core.Policy\x12\x32\n\x04tags\x18\x08 \x03(\x0b\x32$.feast.core.PermissionSpec.TagsEntry\x1a\x33\n\x11RequiredTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x89\x01\n\rAuthzedAction\x12\n\n\x06\x43REATE\x10\x00\x12\x0c\n\x08\x44\x45SCRIBE\x10\x01\x12\n\n\x06UPDATE\x10\x02\x12\n\n\x06\x44\x45LETE\x10\x03\x12\x0f\n\x0bREAD_ONLINE\x10\x04\x12\x10\n\x0cREAD_OFFLINE\x10\x05\x12\x10\n\x0cWRITE_ONLINE\x10\x06\x12\x11\n\rWRITE_OFFLINE\x10\x07\"\xe1\x01\n\x04Type\x12\x10\n\x0c\x46\x45\x41TURE_VIEW\x10\x00\x12\x1a\n\x16ON_DEMAND_FEATURE_VIEW\x10\x01\x12\x16\n\x12\x42\x41TCH_FEATURE_VIEW\x10\x02\x12\x17\n\x13STREAM_FEATURE_VIEW\x10\x03\x12\n\n\x06\x45NTITY\x10\x04\x12\x13\n\x0f\x46\x45\x41TURE_SERVICE\x10\x05\x12\x0f\n\x0b\x44\x41TA_SOURCE\x10\x06\x12\x18\n\x14VALIDATION_REFERENCE\x10\x07\x12\x11\n\rSAVED_DATASET\x10\x08\x12\x0e\n\nPERMISSION\x10\t\x12\x0b\n\x07PROJECT\x10\n\"\x83\x01\n\x0ePermissionMeta\x12\x35\n\x11\x63reated_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\x16last_updated_timestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampBT\n\x10\x66\x65\x61st.proto.coreB\x0fPermissionProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.Permission_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\017PermissionProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_PERMISSIONSPEC_REQUIREDTAGSENTRY']._options = None + _globals['_PERMISSIONSPEC_REQUIREDTAGSENTRY']._serialized_options = b'8\001' + _globals['_PERMISSIONSPEC_TAGSENTRY']._options = None + _globals['_PERMISSIONSPEC_TAGSENTRY']._serialized_options = b'8\001' + _globals['_PERMISSION']._serialized_start=101 + _globals['_PERMISSION']._serialized_end=197 + _globals['_PERMISSIONSPEC']._serialized_start=200 + _globals['_PERMISSIONSPEC']._serialized_end=999 + _globals['_PERMISSIONSPEC_REQUIREDTAGSENTRY']._serialized_start=535 + _globals['_PERMISSIONSPEC_REQUIREDTAGSENTRY']._serialized_end=586 + _globals['_PERMISSIONSPEC_TAGSENTRY']._serialized_start=588 + _globals['_PERMISSIONSPEC_TAGSENTRY']._serialized_end=631 + _globals['_PERMISSIONSPEC_AUTHZEDACTION']._serialized_start=634 + _globals['_PERMISSIONSPEC_AUTHZEDACTION']._serialized_end=771 + _globals['_PERMISSIONSPEC_TYPE']._serialized_start=774 + _globals['_PERMISSIONSPEC_TYPE']._serialized_end=999 + _globals['_PERMISSIONMETA']._serialized_start=1002 + _globals['_PERMISSIONMETA']._serialized_end=1133 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/Permission_pb2.pyi b/sdk/python/feast/protos/feast/core/Permission_pb2.pyi new file mode 100644 index 0000000000..1155c13188 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Permission_pb2.pyi @@ -0,0 +1,195 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" +import builtins +import collections.abc +import feast.core.Policy_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class Permission(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SPEC_FIELD_NUMBER: builtins.int + META_FIELD_NUMBER: builtins.int + @property + def spec(self) -> global___PermissionSpec: + """User-specified specifications of this permission.""" + @property + def meta(self) -> global___PermissionMeta: + """System-populated metadata for this permission.""" + def __init__( + self, + *, + spec: global___PermissionSpec | None = ..., + meta: global___PermissionMeta | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> None: ... + +global___Permission = Permission + +class PermissionSpec(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _AuthzedAction: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _AuthzedActionEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[PermissionSpec._AuthzedAction.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + CREATE: PermissionSpec._AuthzedAction.ValueType # 0 + DESCRIBE: PermissionSpec._AuthzedAction.ValueType # 1 + UPDATE: PermissionSpec._AuthzedAction.ValueType # 2 + DELETE: PermissionSpec._AuthzedAction.ValueType # 3 + READ_ONLINE: PermissionSpec._AuthzedAction.ValueType # 4 + READ_OFFLINE: PermissionSpec._AuthzedAction.ValueType # 5 + WRITE_ONLINE: PermissionSpec._AuthzedAction.ValueType # 6 + WRITE_OFFLINE: PermissionSpec._AuthzedAction.ValueType # 7 + + class AuthzedAction(_AuthzedAction, metaclass=_AuthzedActionEnumTypeWrapper): ... + CREATE: PermissionSpec.AuthzedAction.ValueType # 0 + DESCRIBE: PermissionSpec.AuthzedAction.ValueType # 1 + UPDATE: PermissionSpec.AuthzedAction.ValueType # 2 + DELETE: PermissionSpec.AuthzedAction.ValueType # 3 + READ_ONLINE: PermissionSpec.AuthzedAction.ValueType # 4 + READ_OFFLINE: PermissionSpec.AuthzedAction.ValueType # 5 + WRITE_ONLINE: PermissionSpec.AuthzedAction.ValueType # 6 + WRITE_OFFLINE: PermissionSpec.AuthzedAction.ValueType # 7 + + class _Type: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _TypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[PermissionSpec._Type.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + FEATURE_VIEW: PermissionSpec._Type.ValueType # 0 + ON_DEMAND_FEATURE_VIEW: PermissionSpec._Type.ValueType # 1 + BATCH_FEATURE_VIEW: PermissionSpec._Type.ValueType # 2 + STREAM_FEATURE_VIEW: PermissionSpec._Type.ValueType # 3 + ENTITY: PermissionSpec._Type.ValueType # 4 + FEATURE_SERVICE: PermissionSpec._Type.ValueType # 5 + DATA_SOURCE: PermissionSpec._Type.ValueType # 6 + VALIDATION_REFERENCE: PermissionSpec._Type.ValueType # 7 + SAVED_DATASET: PermissionSpec._Type.ValueType # 8 + PERMISSION: PermissionSpec._Type.ValueType # 9 + PROJECT: PermissionSpec._Type.ValueType # 10 + + class Type(_Type, metaclass=_TypeEnumTypeWrapper): ... + FEATURE_VIEW: PermissionSpec.Type.ValueType # 0 + ON_DEMAND_FEATURE_VIEW: PermissionSpec.Type.ValueType # 1 + BATCH_FEATURE_VIEW: PermissionSpec.Type.ValueType # 2 + STREAM_FEATURE_VIEW: PermissionSpec.Type.ValueType # 3 + ENTITY: PermissionSpec.Type.ValueType # 4 + FEATURE_SERVICE: PermissionSpec.Type.ValueType # 5 + DATA_SOURCE: PermissionSpec.Type.ValueType # 6 + VALIDATION_REFERENCE: PermissionSpec.Type.ValueType # 7 + SAVED_DATASET: PermissionSpec.Type.ValueType # 8 + PERMISSION: PermissionSpec.Type.ValueType # 9 + PROJECT: PermissionSpec.Type.ValueType # 10 + + class RequiredTagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + TYPES_FIELD_NUMBER: builtins.int + NAME_PATTERN_FIELD_NUMBER: builtins.int + REQUIRED_TAGS_FIELD_NUMBER: builtins.int + ACTIONS_FIELD_NUMBER: builtins.int + POLICY_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + name: builtins.str + """Name of the permission. Must be unique. Not updated.""" + project: builtins.str + """Name of Feast project.""" + @property + def types(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___PermissionSpec.Type.ValueType]: ... + name_pattern: builtins.str + @property + def required_tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + @property + def actions(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___PermissionSpec.AuthzedAction.ValueType]: + """List of actions.""" + @property + def policy(self) -> feast.core.Policy_pb2.Policy: + """the policy.""" + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """User defined metadata""" + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + types: collections.abc.Iterable[global___PermissionSpec.Type.ValueType] | None = ..., + name_pattern: builtins.str = ..., + required_tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + actions: collections.abc.Iterable[global___PermissionSpec.AuthzedAction.ValueType] | None = ..., + policy: feast.core.Policy_pb2.Policy | None = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["policy", b"policy"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["actions", b"actions", "name", b"name", "name_pattern", b"name_pattern", "policy", b"policy", "project", b"project", "required_tags", b"required_tags", "tags", b"tags", "types", b"types"]) -> None: ... + +global___PermissionSpec = PermissionSpec + +class PermissionMeta(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CREATED_TIMESTAMP_FIELD_NUMBER: builtins.int + LAST_UPDATED_TIMESTAMP_FIELD_NUMBER: builtins.int + @property + def created_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def last_updated_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + def __init__( + self, + *, + created_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + last_updated_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp"]) -> None: ... + +global___PermissionMeta = PermissionMeta diff --git a/sdk/python/feast/protos/feast/core/Permission_pb2_grpc.py b/sdk/python/feast/protos/feast/core/Permission_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Permission_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/Policy_pb2.py b/sdk/python/feast/protos/feast/core/Policy_pb2.py new file mode 100644 index 0000000000..2fac866115 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Policy_pb2.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/Policy.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x17\x66\x65\x61st/core/Policy.proto\x12\nfeast.core\"p\n\x06Policy\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x38\n\x11role_based_policy\x18\x03 \x01(\x0b\x32\x1b.feast.core.RoleBasedPolicyH\x00\x42\r\n\x0bpolicy_type\" \n\x0fRoleBasedPolicy\x12\r\n\x05roles\x18\x01 \x03(\tBP\n\x10\x66\x65\x61st.proto.coreB\x0bPolicyProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.Policy_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\013PolicyProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_POLICY']._serialized_start=39 + _globals['_POLICY']._serialized_end=151 + _globals['_ROLEBASEDPOLICY']._serialized_start=153 + _globals['_ROLEBASEDPOLICY']._serialized_end=185 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/Policy_pb2.pyi b/sdk/python/feast/protos/feast/core/Policy_pb2.pyi new file mode 100644 index 0000000000..f19b18fff4 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Policy_pb2.pyi @@ -0,0 +1,58 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class Policy(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + ROLE_BASED_POLICY_FIELD_NUMBER: builtins.int + name: builtins.str + """Name of the policy.""" + project: builtins.str + """Name of Feast project.""" + @property + def role_based_policy(self) -> global___RoleBasedPolicy: ... + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + role_based_policy: global___RoleBasedPolicy | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["policy_type", b"policy_type", "role_based_policy", b"role_based_policy"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "policy_type", b"policy_type", "project", b"project", "role_based_policy", b"role_based_policy"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["policy_type", b"policy_type"]) -> typing_extensions.Literal["role_based_policy"] | None: ... + +global___Policy = Policy + +class RoleBasedPolicy(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ROLES_FIELD_NUMBER: builtins.int + @property + def roles(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """List of roles in this policy.""" + def __init__( + self, + *, + roles: collections.abc.Iterable[builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["roles", b"roles"]) -> None: ... + +global___RoleBasedPolicy = RoleBasedPolicy diff --git a/sdk/python/feast/protos/feast/core/Policy_pb2_grpc.py b/sdk/python/feast/protos/feast/core/Policy_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Policy_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/Project_pb2.py b/sdk/python/feast/protos/feast/core/Project_pb2.py new file mode 100644 index 0000000000..cfbf122014 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Project_pb2.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/Project.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x18\x66\x65\x61st/core/Project.proto\x12\nfeast.core\x1a\x1fgoogle/protobuf/timestamp.proto\"W\n\x07Project\x12%\n\x04spec\x18\x01 \x01(\x0b\x32\x17.feast.core.ProjectSpec\x12%\n\x04meta\x18\x02 \x01(\x0b\x32\x17.feast.core.ProjectMeta\"\x9d\x01\n\x0bProjectSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12/\n\x04tags\x18\x03 \x03(\x0b\x32!.feast.core.ProjectSpec.TagsEntry\x12\r\n\x05owner\x18\x04 \x01(\t\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x80\x01\n\x0bProjectMeta\x12\x35\n\x11\x63reated_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\x16last_updated_timestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampBQ\n\x10\x66\x65\x61st.proto.coreB\x0cProjectProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.Project_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\014ProjectProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_PROJECTSPEC_TAGSENTRY']._options = None + _globals['_PROJECTSPEC_TAGSENTRY']._serialized_options = b'8\001' + _globals['_PROJECT']._serialized_start=73 + _globals['_PROJECT']._serialized_end=160 + _globals['_PROJECTSPEC']._serialized_start=163 + _globals['_PROJECTSPEC']._serialized_end=320 + _globals['_PROJECTSPEC_TAGSENTRY']._serialized_start=277 + _globals['_PROJECTSPEC_TAGSENTRY']._serialized_end=320 + _globals['_PROJECTMETA']._serialized_start=323 + _globals['_PROJECTMETA']._serialized_end=451 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/Project_pb2.pyi b/sdk/python/feast/protos/feast/core/Project_pb2.pyi new file mode 100644 index 0000000000..e3cce2ec42 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Project_pb2.pyi @@ -0,0 +1,119 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +* Copyright 2020 The Feast Authors +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* https://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +""" +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class Project(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SPEC_FIELD_NUMBER: builtins.int + META_FIELD_NUMBER: builtins.int + @property + def spec(self) -> global___ProjectSpec: + """User-specified specifications of this entity.""" + @property + def meta(self) -> global___ProjectMeta: + """System-populated metadata for this entity.""" + def __init__( + self, + *, + spec: global___ProjectSpec | None = ..., + meta: global___ProjectMeta | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> None: ... + +global___Project = Project + +class ProjectSpec(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + OWNER_FIELD_NUMBER: builtins.int + name: builtins.str + """Name of the Project""" + description: builtins.str + """Description of the Project""" + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """User defined metadata""" + owner: builtins.str + """Owner of the Project""" + def __init__( + self, + *, + name: builtins.str = ..., + description: builtins.str = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + owner: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["description", b"description", "name", b"name", "owner", b"owner", "tags", b"tags"]) -> None: ... + +global___ProjectSpec = ProjectSpec + +class ProjectMeta(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CREATED_TIMESTAMP_FIELD_NUMBER: builtins.int + LAST_UPDATED_TIMESTAMP_FIELD_NUMBER: builtins.int + @property + def created_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Time when the Project is created""" + @property + def last_updated_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Time when the Project is last updated with registry changes (Apply stage)""" + def __init__( + self, + *, + created_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + last_updated_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp"]) -> None: ... + +global___ProjectMeta = ProjectMeta diff --git a/sdk/python/feast/protos/feast/core/Project_pb2_grpc.py b/sdk/python/feast/protos/feast/core/Project_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Project_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/Registry_pb2.py b/sdk/python/feast/protos/feast/core/Registry_pb2.py new file mode 100644 index 0000000000..671958d80c --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Registry_pb2.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/Registry.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from feast.protos.feast.core import Entity_pb2 as feast_dot_core_dot_Entity__pb2 +from feast.protos.feast.core import FeatureService_pb2 as feast_dot_core_dot_FeatureService__pb2 +from feast.protos.feast.core import FeatureTable_pb2 as feast_dot_core_dot_FeatureTable__pb2 +from feast.protos.feast.core import FeatureView_pb2 as feast_dot_core_dot_FeatureView__pb2 +from feast.protos.feast.core import InfraObject_pb2 as feast_dot_core_dot_InfraObject__pb2 +from feast.protos.feast.core import OnDemandFeatureView_pb2 as feast_dot_core_dot_OnDemandFeatureView__pb2 +from feast.protos.feast.core import StreamFeatureView_pb2 as feast_dot_core_dot_StreamFeatureView__pb2 +from feast.protos.feast.core import DataSource_pb2 as feast_dot_core_dot_DataSource__pb2 +from feast.protos.feast.core import SavedDataset_pb2 as feast_dot_core_dot_SavedDataset__pb2 +from feast.protos.feast.core import ValidationProfile_pb2 as feast_dot_core_dot_ValidationProfile__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from feast.protos.feast.core import Permission_pb2 as feast_dot_core_dot_Permission__pb2 +from feast.protos.feast.core import Project_pb2 as feast_dot_core_dot_Project__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19\x66\x65\x61st/core/Registry.proto\x12\nfeast.core\x1a\x17\x66\x65\x61st/core/Entity.proto\x1a\x1f\x66\x65\x61st/core/FeatureService.proto\x1a\x1d\x66\x65\x61st/core/FeatureTable.proto\x1a\x1c\x66\x65\x61st/core/FeatureView.proto\x1a\x1c\x66\x65\x61st/core/InfraObject.proto\x1a$feast/core/OnDemandFeatureView.proto\x1a\"feast/core/StreamFeatureView.proto\x1a\x1b\x66\x65\x61st/core/DataSource.proto\x1a\x1d\x66\x65\x61st/core/SavedDataset.proto\x1a\"feast/core/ValidationProfile.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1b\x66\x65\x61st/core/Permission.proto\x1a\x18\x66\x65\x61st/core/Project.proto\"\xff\x05\n\x08Registry\x12$\n\x08\x65ntities\x18\x01 \x03(\x0b\x32\x12.feast.core.Entity\x12\x30\n\x0e\x66\x65\x61ture_tables\x18\x02 \x03(\x0b\x32\x18.feast.core.FeatureTable\x12.\n\rfeature_views\x18\x06 \x03(\x0b\x32\x17.feast.core.FeatureView\x12,\n\x0c\x64\x61ta_sources\x18\x0c \x03(\x0b\x32\x16.feast.core.DataSource\x12@\n\x17on_demand_feature_views\x18\x08 \x03(\x0b\x32\x1f.feast.core.OnDemandFeatureView\x12;\n\x14stream_feature_views\x18\x0e \x03(\x0b\x32\x1d.feast.core.StreamFeatureView\x12\x34\n\x10\x66\x65\x61ture_services\x18\x07 \x03(\x0b\x32\x1a.feast.core.FeatureService\x12\x30\n\x0esaved_datasets\x18\x0b \x03(\x0b\x32\x18.feast.core.SavedDataset\x12>\n\x15validation_references\x18\r \x03(\x0b\x32\x1f.feast.core.ValidationReference\x12 \n\x05infra\x18\n \x01(\x0b\x32\x11.feast.core.Infra\x12\x39\n\x10project_metadata\x18\x0f \x03(\x0b\x32\x1b.feast.core.ProjectMetadataB\x02\x18\x01\x12\x1f\n\x17registry_schema_version\x18\x03 \x01(\t\x12\x12\n\nversion_id\x18\x04 \x01(\t\x12\x30\n\x0clast_updated\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x0bpermissions\x18\x10 \x03(\x0b\x32\x16.feast.core.Permission\x12%\n\x08projects\x18\x11 \x03(\x0b\x32\x13.feast.core.Project\"8\n\x0fProjectMetadata\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x14\n\x0cproject_uuid\x18\x02 \x01(\tBR\n\x10\x66\x65\x61st.proto.coreB\rRegistryProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.Registry_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\rRegistryProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_REGISTRY'].fields_by_name['project_metadata']._options = None + _globals['_REGISTRY'].fields_by_name['project_metadata']._serialized_options = b'\030\001' + _globals['_REGISTRY']._serialized_start=449 + _globals['_REGISTRY']._serialized_end=1216 + _globals['_PROJECTMETADATA']._serialized_start=1218 + _globals['_PROJECTMETADATA']._serialized_end=1274 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/Registry_pb2.pyi b/sdk/python/feast/protos/feast/core/Registry_pb2.pyi new file mode 100644 index 0000000000..fca49c7548 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Registry_pb2.pyi @@ -0,0 +1,140 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +* Copyright 2020 The Feast Authors +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* https://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +""" +import builtins +import collections.abc +import feast.core.DataSource_pb2 +import feast.core.Entity_pb2 +import feast.core.FeatureService_pb2 +import feast.core.FeatureTable_pb2 +import feast.core.FeatureView_pb2 +import feast.core.InfraObject_pb2 +import feast.core.OnDemandFeatureView_pb2 +import feast.core.Permission_pb2 +import feast.core.Project_pb2 +import feast.core.SavedDataset_pb2 +import feast.core.StreamFeatureView_pb2 +import feast.core.ValidationProfile_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class Registry(google.protobuf.message.Message): + """Next id: 18""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ENTITIES_FIELD_NUMBER: builtins.int + FEATURE_TABLES_FIELD_NUMBER: builtins.int + FEATURE_VIEWS_FIELD_NUMBER: builtins.int + DATA_SOURCES_FIELD_NUMBER: builtins.int + ON_DEMAND_FEATURE_VIEWS_FIELD_NUMBER: builtins.int + STREAM_FEATURE_VIEWS_FIELD_NUMBER: builtins.int + FEATURE_SERVICES_FIELD_NUMBER: builtins.int + SAVED_DATASETS_FIELD_NUMBER: builtins.int + VALIDATION_REFERENCES_FIELD_NUMBER: builtins.int + INFRA_FIELD_NUMBER: builtins.int + PROJECT_METADATA_FIELD_NUMBER: builtins.int + REGISTRY_SCHEMA_VERSION_FIELD_NUMBER: builtins.int + VERSION_ID_FIELD_NUMBER: builtins.int + LAST_UPDATED_FIELD_NUMBER: builtins.int + PERMISSIONS_FIELD_NUMBER: builtins.int + PROJECTS_FIELD_NUMBER: builtins.int + @property + def entities(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Entity_pb2.Entity]: ... + @property + def feature_tables(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.FeatureTable_pb2.FeatureTable]: ... + @property + def feature_views(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.FeatureView_pb2.FeatureView]: ... + @property + def data_sources(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.DataSource_pb2.DataSource]: ... + @property + def on_demand_feature_views(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.OnDemandFeatureView_pb2.OnDemandFeatureView]: ... + @property + def stream_feature_views(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.StreamFeatureView_pb2.StreamFeatureView]: ... + @property + def feature_services(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.FeatureService_pb2.FeatureService]: ... + @property + def saved_datasets(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.SavedDataset_pb2.SavedDataset]: ... + @property + def validation_references(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.ValidationProfile_pb2.ValidationReference]: ... + @property + def infra(self) -> feast.core.InfraObject_pb2.Infra: ... + @property + def project_metadata(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ProjectMetadata]: + """Tracking metadata of Feast by project""" + registry_schema_version: builtins.str + """to support migrations; incremented when schema is changed""" + version_id: builtins.str + """version id, random string generated on each update of the data; now used only for debugging purposes""" + @property + def last_updated(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def permissions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Permission_pb2.Permission]: ... + @property + def projects(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Project_pb2.Project]: ... + def __init__( + self, + *, + entities: collections.abc.Iterable[feast.core.Entity_pb2.Entity] | None = ..., + feature_tables: collections.abc.Iterable[feast.core.FeatureTable_pb2.FeatureTable] | None = ..., + feature_views: collections.abc.Iterable[feast.core.FeatureView_pb2.FeatureView] | None = ..., + data_sources: collections.abc.Iterable[feast.core.DataSource_pb2.DataSource] | None = ..., + on_demand_feature_views: collections.abc.Iterable[feast.core.OnDemandFeatureView_pb2.OnDemandFeatureView] | None = ..., + stream_feature_views: collections.abc.Iterable[feast.core.StreamFeatureView_pb2.StreamFeatureView] | None = ..., + feature_services: collections.abc.Iterable[feast.core.FeatureService_pb2.FeatureService] | None = ..., + saved_datasets: collections.abc.Iterable[feast.core.SavedDataset_pb2.SavedDataset] | None = ..., + validation_references: collections.abc.Iterable[feast.core.ValidationProfile_pb2.ValidationReference] | None = ..., + infra: feast.core.InfraObject_pb2.Infra | None = ..., + project_metadata: collections.abc.Iterable[global___ProjectMetadata] | None = ..., + registry_schema_version: builtins.str = ..., + version_id: builtins.str = ..., + last_updated: google.protobuf.timestamp_pb2.Timestamp | None = ..., + permissions: collections.abc.Iterable[feast.core.Permission_pb2.Permission] | None = ..., + projects: collections.abc.Iterable[feast.core.Project_pb2.Project] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["infra", b"infra", "last_updated", b"last_updated"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["data_sources", b"data_sources", "entities", b"entities", "feature_services", b"feature_services", "feature_tables", b"feature_tables", "feature_views", b"feature_views", "infra", b"infra", "last_updated", b"last_updated", "on_demand_feature_views", b"on_demand_feature_views", "permissions", b"permissions", "project_metadata", b"project_metadata", "projects", b"projects", "registry_schema_version", b"registry_schema_version", "saved_datasets", b"saved_datasets", "stream_feature_views", b"stream_feature_views", "validation_references", b"validation_references", "version_id", b"version_id"]) -> None: ... + +global___Registry = Registry + +class ProjectMetadata(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PROJECT_FIELD_NUMBER: builtins.int + PROJECT_UUID_FIELD_NUMBER: builtins.int + project: builtins.str + project_uuid: builtins.str + def __init__( + self, + *, + project: builtins.str = ..., + project_uuid: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["project", b"project", "project_uuid", b"project_uuid"]) -> None: ... + +global___ProjectMetadata = ProjectMetadata diff --git a/sdk/python/feast/protos/feast/core/Registry_pb2_grpc.py b/sdk/python/feast/protos/feast/core/Registry_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Registry_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/SavedDataset_pb2.py b/sdk/python/feast/protos/feast/core/SavedDataset_pb2.py new file mode 100644 index 0000000000..fe1e2d49ea --- /dev/null +++ b/sdk/python/feast/protos/feast/core/SavedDataset_pb2.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/SavedDataset.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from feast.protos.feast.core import DataSource_pb2 as feast_dot_core_dot_DataSource__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1d\x66\x65\x61st/core/SavedDataset.proto\x12\nfeast.core\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1b\x66\x65\x61st/core/DataSource.proto\"\xa5\x02\n\x10SavedDatasetSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x10\n\x08\x66\x65\x61tures\x18\x03 \x03(\t\x12\x11\n\tjoin_keys\x18\x04 \x03(\t\x12\x1a\n\x12\x66ull_feature_names\x18\x05 \x01(\x08\x12\x30\n\x07storage\x18\x06 \x01(\x0b\x32\x1f.feast.core.SavedDatasetStorage\x12\x1c\n\x14\x66\x65\x61ture_service_name\x18\x08 \x01(\t\x12\x34\n\x04tags\x18\x07 \x03(\x0b\x32&.feast.core.SavedDatasetSpec.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xa9\x04\n\x13SavedDatasetStorage\x12:\n\x0c\x66ile_storage\x18\x04 \x01(\x0b\x32\".feast.core.DataSource.FileOptionsH\x00\x12\x42\n\x10\x62igquery_storage\x18\x05 \x01(\x0b\x32&.feast.core.DataSource.BigQueryOptionsH\x00\x12\x42\n\x10redshift_storage\x18\x06 \x01(\x0b\x32&.feast.core.DataSource.RedshiftOptionsH\x00\x12\x44\n\x11snowflake_storage\x18\x07 \x01(\x0b\x32\'.feast.core.DataSource.SnowflakeOptionsH\x00\x12<\n\rtrino_storage\x18\x08 \x01(\x0b\x32#.feast.core.DataSource.TrinoOptionsH\x00\x12<\n\rspark_storage\x18\t \x01(\x0b\x32#.feast.core.DataSource.SparkOptionsH\x00\x12\x44\n\x0e\x63ustom_storage\x18\n \x01(\x0b\x32*.feast.core.DataSource.CustomSourceOptionsH\x00\x12>\n\x0e\x61thena_storage\x18\x0b \x01(\x0b\x32$.feast.core.DataSource.AthenaOptionsH\x00\x42\x06\n\x04kind\"\xf7\x01\n\x10SavedDatasetMeta\x12\x35\n\x11\x63reated_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\x16last_updated_timestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x37\n\x13min_event_timestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x37\n\x13max_event_timestamp\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"f\n\x0cSavedDataset\x12*\n\x04spec\x18\x01 \x01(\x0b\x32\x1c.feast.core.SavedDatasetSpec\x12*\n\x04meta\x18\x02 \x01(\x0b\x32\x1c.feast.core.SavedDatasetMetaBV\n\x10\x66\x65\x61st.proto.coreB\x11SavedDatasetProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.SavedDataset_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\021SavedDatasetProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_SAVEDDATASETSPEC_TAGSENTRY']._options = None + _globals['_SAVEDDATASETSPEC_TAGSENTRY']._serialized_options = b'8\001' + _globals['_SAVEDDATASETSPEC']._serialized_start=108 + _globals['_SAVEDDATASETSPEC']._serialized_end=401 + _globals['_SAVEDDATASETSPEC_TAGSENTRY']._serialized_start=358 + _globals['_SAVEDDATASETSPEC_TAGSENTRY']._serialized_end=401 + _globals['_SAVEDDATASETSTORAGE']._serialized_start=404 + _globals['_SAVEDDATASETSTORAGE']._serialized_end=957 + _globals['_SAVEDDATASETMETA']._serialized_start=960 + _globals['_SAVEDDATASETMETA']._serialized_end=1207 + _globals['_SAVEDDATASET']._serialized_start=1209 + _globals['_SAVEDDATASET']._serialized_end=1311 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/SavedDataset_pb2.pyi b/sdk/python/feast/protos/feast/core/SavedDataset_pb2.pyi new file mode 100644 index 0000000000..47525b64ed --- /dev/null +++ b/sdk/python/feast/protos/feast/core/SavedDataset_pb2.pyi @@ -0,0 +1,192 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2021 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import collections.abc +import feast.core.DataSource_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class SavedDatasetSpec(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + FEATURES_FIELD_NUMBER: builtins.int + JOIN_KEYS_FIELD_NUMBER: builtins.int + FULL_FEATURE_NAMES_FIELD_NUMBER: builtins.int + STORAGE_FIELD_NUMBER: builtins.int + FEATURE_SERVICE_NAME_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + name: builtins.str + """Name of the dataset. Must be unique since it's possible to overwrite dataset by name""" + project: builtins.str + """Name of Feast project that this Dataset belongs to.""" + @property + def features(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """list of feature references with format ":" """ + @property + def join_keys(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """entity columns + request columns from all feature views used during retrieval""" + full_feature_names: builtins.bool + """Whether full feature names are used in stored data""" + @property + def storage(self) -> global___SavedDatasetStorage: ... + feature_service_name: builtins.str + """Optional and only populated if generated from a feature service fetch""" + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """User defined metadata""" + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + features: collections.abc.Iterable[builtins.str] | None = ..., + join_keys: collections.abc.Iterable[builtins.str] | None = ..., + full_feature_names: builtins.bool = ..., + storage: global___SavedDatasetStorage | None = ..., + feature_service_name: builtins.str = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["storage", b"storage"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["feature_service_name", b"feature_service_name", "features", b"features", "full_feature_names", b"full_feature_names", "join_keys", b"join_keys", "name", b"name", "project", b"project", "storage", b"storage", "tags", b"tags"]) -> None: ... + +global___SavedDatasetSpec = SavedDatasetSpec + +class SavedDatasetStorage(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FILE_STORAGE_FIELD_NUMBER: builtins.int + BIGQUERY_STORAGE_FIELD_NUMBER: builtins.int + REDSHIFT_STORAGE_FIELD_NUMBER: builtins.int + SNOWFLAKE_STORAGE_FIELD_NUMBER: builtins.int + TRINO_STORAGE_FIELD_NUMBER: builtins.int + SPARK_STORAGE_FIELD_NUMBER: builtins.int + CUSTOM_STORAGE_FIELD_NUMBER: builtins.int + ATHENA_STORAGE_FIELD_NUMBER: builtins.int + @property + def file_storage(self) -> feast.core.DataSource_pb2.DataSource.FileOptions: ... + @property + def bigquery_storage(self) -> feast.core.DataSource_pb2.DataSource.BigQueryOptions: ... + @property + def redshift_storage(self) -> feast.core.DataSource_pb2.DataSource.RedshiftOptions: ... + @property + def snowflake_storage(self) -> feast.core.DataSource_pb2.DataSource.SnowflakeOptions: ... + @property + def trino_storage(self) -> feast.core.DataSource_pb2.DataSource.TrinoOptions: ... + @property + def spark_storage(self) -> feast.core.DataSource_pb2.DataSource.SparkOptions: ... + @property + def custom_storage(self) -> feast.core.DataSource_pb2.DataSource.CustomSourceOptions: ... + @property + def athena_storage(self) -> feast.core.DataSource_pb2.DataSource.AthenaOptions: ... + def __init__( + self, + *, + file_storage: feast.core.DataSource_pb2.DataSource.FileOptions | None = ..., + bigquery_storage: feast.core.DataSource_pb2.DataSource.BigQueryOptions | None = ..., + redshift_storage: feast.core.DataSource_pb2.DataSource.RedshiftOptions | None = ..., + snowflake_storage: feast.core.DataSource_pb2.DataSource.SnowflakeOptions | None = ..., + trino_storage: feast.core.DataSource_pb2.DataSource.TrinoOptions | None = ..., + spark_storage: feast.core.DataSource_pb2.DataSource.SparkOptions | None = ..., + custom_storage: feast.core.DataSource_pb2.DataSource.CustomSourceOptions | None = ..., + athena_storage: feast.core.DataSource_pb2.DataSource.AthenaOptions | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["athena_storage", b"athena_storage", "bigquery_storage", b"bigquery_storage", "custom_storage", b"custom_storage", "file_storage", b"file_storage", "kind", b"kind", "redshift_storage", b"redshift_storage", "snowflake_storage", b"snowflake_storage", "spark_storage", b"spark_storage", "trino_storage", b"trino_storage"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["athena_storage", b"athena_storage", "bigquery_storage", b"bigquery_storage", "custom_storage", b"custom_storage", "file_storage", b"file_storage", "kind", b"kind", "redshift_storage", b"redshift_storage", "snowflake_storage", b"snowflake_storage", "spark_storage", b"spark_storage", "trino_storage", b"trino_storage"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["kind", b"kind"]) -> typing_extensions.Literal["file_storage", "bigquery_storage", "redshift_storage", "snowflake_storage", "trino_storage", "spark_storage", "custom_storage", "athena_storage"] | None: ... + +global___SavedDatasetStorage = SavedDatasetStorage + +class SavedDatasetMeta(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CREATED_TIMESTAMP_FIELD_NUMBER: builtins.int + LAST_UPDATED_TIMESTAMP_FIELD_NUMBER: builtins.int + MIN_EVENT_TIMESTAMP_FIELD_NUMBER: builtins.int + MAX_EVENT_TIMESTAMP_FIELD_NUMBER: builtins.int + @property + def created_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Time when this saved dataset is created""" + @property + def last_updated_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Time when this saved dataset is last updated""" + @property + def min_event_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Min timestamp in the dataset (needed for retrieval)""" + @property + def max_event_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Max timestamp in the dataset (needed for retrieval)""" + def __init__( + self, + *, + created_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + last_updated_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + min_event_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + max_event_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp", "max_event_timestamp", b"max_event_timestamp", "min_event_timestamp", b"min_event_timestamp"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp", "max_event_timestamp", b"max_event_timestamp", "min_event_timestamp", b"min_event_timestamp"]) -> None: ... + +global___SavedDatasetMeta = SavedDatasetMeta + +class SavedDataset(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SPEC_FIELD_NUMBER: builtins.int + META_FIELD_NUMBER: builtins.int + @property + def spec(self) -> global___SavedDatasetSpec: ... + @property + def meta(self) -> global___SavedDatasetMeta: ... + def __init__( + self, + *, + spec: global___SavedDatasetSpec | None = ..., + meta: global___SavedDatasetMeta | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> None: ... + +global___SavedDataset = SavedDataset diff --git a/sdk/python/feast/protos/feast/core/SavedDataset_pb2_grpc.py b/sdk/python/feast/protos/feast/core/SavedDataset_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/SavedDataset_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/SqliteTable_pb2.py b/sdk/python/feast/protos/feast/core/SqliteTable_pb2.py new file mode 100644 index 0000000000..8cc14781c7 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/SqliteTable_pb2.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/SqliteTable.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1c\x66\x65\x61st/core/SqliteTable.proto\x12\nfeast.core\")\n\x0bSqliteTable\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\tBU\n\x10\x66\x65\x61st.proto.coreB\x10SqliteTableProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.SqliteTable_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\020SqliteTableProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_SQLITETABLE']._serialized_start=44 + _globals['_SQLITETABLE']._serialized_end=85 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/SqliteTable_pb2.pyi b/sdk/python/feast/protos/feast/core/SqliteTable_pb2.pyi new file mode 100644 index 0000000000..10ecebf362 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/SqliteTable_pb2.pyi @@ -0,0 +1,50 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +* Copyright 2021 The Feast Authors +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* https://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +""" +import builtins +import google.protobuf.descriptor +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class SqliteTable(google.protobuf.message.Message): + """Represents a Sqlite table""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PATH_FIELD_NUMBER: builtins.int + NAME_FIELD_NUMBER: builtins.int + path: builtins.str + """Absolute path of the table""" + name: builtins.str + """Name of the table""" + def __init__( + self, + *, + path: builtins.str = ..., + name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "path", b"path"]) -> None: ... + +global___SqliteTable = SqliteTable diff --git a/sdk/python/feast/protos/feast/core/SqliteTable_pb2_grpc.py b/sdk/python/feast/protos/feast/core/SqliteTable_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/SqliteTable_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/Store_pb2.py b/sdk/python/feast/protos/feast/core/Store_pb2.py new file mode 100644 index 0000000000..7d24e11947 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Store_pb2.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/Store.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x16\x66\x65\x61st/core/Store.proto\x12\nfeast.core\"\xfd\x06\n\x05Store\x12\x0c\n\x04name\x18\x01 \x01(\t\x12)\n\x04type\x18\x02 \x01(\x0e\x32\x1b.feast.core.Store.StoreType\x12\x35\n\rsubscriptions\x18\x04 \x03(\x0b\x32\x1e.feast.core.Store.Subscription\x12\x35\n\x0credis_config\x18\x0b \x01(\x0b\x32\x1d.feast.core.Store.RedisConfigH\x00\x12\x44\n\x14redis_cluster_config\x18\x0e \x01(\x0b\x32$.feast.core.Store.RedisClusterConfigH\x00\x1a\x88\x01\n\x0bRedisConfig\x12\x0c\n\x04host\x18\x01 \x01(\t\x12\x0c\n\x04port\x18\x02 \x01(\x05\x12\x1a\n\x12initial_backoff_ms\x18\x03 \x01(\x05\x12\x13\n\x0bmax_retries\x18\x04 \x01(\x05\x12\x1f\n\x17\x66lush_frequency_seconds\x18\x05 \x01(\x05\x12\x0b\n\x03ssl\x18\x06 \x01(\x08\x1a\xdb\x02\n\x12RedisClusterConfig\x12\x19\n\x11\x63onnection_string\x18\x01 \x01(\t\x12\x1a\n\x12initial_backoff_ms\x18\x02 \x01(\x05\x12\x13\n\x0bmax_retries\x18\x03 \x01(\x05\x12\x1f\n\x17\x66lush_frequency_seconds\x18\x04 \x01(\x05\x12\x12\n\nkey_prefix\x18\x05 \x01(\t\x12\x17\n\x0f\x65nable_fallback\x18\x06 \x01(\x08\x12\x17\n\x0f\x66\x61llback_prefix\x18\x07 \x01(\t\x12@\n\tread_from\x18\x08 \x01(\x0e\x32-.feast.core.Store.RedisClusterConfig.ReadFrom\"P\n\x08ReadFrom\x12\n\n\x06MASTER\x10\x00\x12\x14\n\x10MASTER_PREFERRED\x10\x01\x12\x0b\n\x07REPLICA\x10\x02\x12\x15\n\x11REPLICA_PREFERRED\x10\x03\x1a\x44\n\x0cSubscription\x12\x0f\n\x07project\x18\x03 \x01(\t\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07\x65xclude\x18\x04 \x01(\x08J\x04\x08\x02\x10\x03\"N\n\tStoreType\x12\x0b\n\x07INVALID\x10\x00\x12\t\n\x05REDIS\x10\x01\x12\x11\n\rREDIS_CLUSTER\x10\x04\"\x04\x08\x02\x10\x02\"\x04\x08\x03\x10\x03\"\x04\x08\x0c\x10\x0c\"\x04\x08\r\x10\rB\x08\n\x06\x63onfigBO\n\x10\x66\x65\x61st.proto.coreB\nStoreProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.Store_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\nStoreProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_STORE']._serialized_start=39 + _globals['_STORE']._serialized_end=932 + _globals['_STORE_REDISCONFIG']._serialized_start=286 + _globals['_STORE_REDISCONFIG']._serialized_end=422 + _globals['_STORE_REDISCLUSTERCONFIG']._serialized_start=425 + _globals['_STORE_REDISCLUSTERCONFIG']._serialized_end=772 + _globals['_STORE_REDISCLUSTERCONFIG_READFROM']._serialized_start=692 + _globals['_STORE_REDISCLUSTERCONFIG_READFROM']._serialized_end=772 + _globals['_STORE_SUBSCRIPTION']._serialized_start=774 + _globals['_STORE_SUBSCRIPTION']._serialized_end=842 + _globals['_STORE_STORETYPE']._serialized_start=844 + _globals['_STORE_STORETYPE']._serialized_end=922 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/Store_pb2.pyi b/sdk/python/feast/protos/feast/core/Store_pb2.pyi new file mode 100644 index 0000000000..5ee957d184 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Store_pb2.pyi @@ -0,0 +1,234 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +* Copyright 2019 The Feast Authors +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* https://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +""" +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class Store(google.protobuf.message.Message): + """Store provides a location where Feast reads and writes feature values. + Feature values will be written to the Store in the form of FeatureRow elements. + The way FeatureRow is encoded and decoded when it is written to and read from + the Store depends on the type of the Store. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _StoreType: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _StoreTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Store._StoreType.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + INVALID: Store._StoreType.ValueType # 0 + REDIS: Store._StoreType.ValueType # 1 + """Redis stores a FeatureRow element as a key, value pair. + + The Redis data types used (https://redis.io/topics/data-types): + - key: STRING + - value: STRING + + Encodings: + - key: byte array of RedisKey (refer to feast.storage.RedisKeyV2) + - value: Redis hashmap + """ + REDIS_CLUSTER: Store._StoreType.ValueType # 4 + + class StoreType(_StoreType, metaclass=_StoreTypeEnumTypeWrapper): ... + INVALID: Store.StoreType.ValueType # 0 + REDIS: Store.StoreType.ValueType # 1 + """Redis stores a FeatureRow element as a key, value pair. + + The Redis data types used (https://redis.io/topics/data-types): + - key: STRING + - value: STRING + + Encodings: + - key: byte array of RedisKey (refer to feast.storage.RedisKeyV2) + - value: Redis hashmap + """ + REDIS_CLUSTER: Store.StoreType.ValueType # 4 + + class RedisConfig(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + HOST_FIELD_NUMBER: builtins.int + PORT_FIELD_NUMBER: builtins.int + INITIAL_BACKOFF_MS_FIELD_NUMBER: builtins.int + MAX_RETRIES_FIELD_NUMBER: builtins.int + FLUSH_FREQUENCY_SECONDS_FIELD_NUMBER: builtins.int + SSL_FIELD_NUMBER: builtins.int + host: builtins.str + port: builtins.int + initial_backoff_ms: builtins.int + """Optional. The number of milliseconds to wait before retrying failed Redis connection. + By default, Feast uses exponential backoff policy and "initial_backoff_ms" sets the initial wait duration. + """ + max_retries: builtins.int + """Optional. Maximum total number of retries for connecting to Redis. Default to zero retries.""" + flush_frequency_seconds: builtins.int + """Optional. How often flush data to redis""" + ssl: builtins.bool + """Optional. Connect over SSL.""" + def __init__( + self, + *, + host: builtins.str = ..., + port: builtins.int = ..., + initial_backoff_ms: builtins.int = ..., + max_retries: builtins.int = ..., + flush_frequency_seconds: builtins.int = ..., + ssl: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["flush_frequency_seconds", b"flush_frequency_seconds", "host", b"host", "initial_backoff_ms", b"initial_backoff_ms", "max_retries", b"max_retries", "port", b"port", "ssl", b"ssl"]) -> None: ... + + class RedisClusterConfig(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _ReadFrom: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _ReadFromEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Store.RedisClusterConfig._ReadFrom.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + MASTER: Store.RedisClusterConfig._ReadFrom.ValueType # 0 + MASTER_PREFERRED: Store.RedisClusterConfig._ReadFrom.ValueType # 1 + REPLICA: Store.RedisClusterConfig._ReadFrom.ValueType # 2 + REPLICA_PREFERRED: Store.RedisClusterConfig._ReadFrom.ValueType # 3 + + class ReadFrom(_ReadFrom, metaclass=_ReadFromEnumTypeWrapper): + """Optional. Priority of nodes when reading from cluster""" + + MASTER: Store.RedisClusterConfig.ReadFrom.ValueType # 0 + MASTER_PREFERRED: Store.RedisClusterConfig.ReadFrom.ValueType # 1 + REPLICA: Store.RedisClusterConfig.ReadFrom.ValueType # 2 + REPLICA_PREFERRED: Store.RedisClusterConfig.ReadFrom.ValueType # 3 + + CONNECTION_STRING_FIELD_NUMBER: builtins.int + INITIAL_BACKOFF_MS_FIELD_NUMBER: builtins.int + MAX_RETRIES_FIELD_NUMBER: builtins.int + FLUSH_FREQUENCY_SECONDS_FIELD_NUMBER: builtins.int + KEY_PREFIX_FIELD_NUMBER: builtins.int + ENABLE_FALLBACK_FIELD_NUMBER: builtins.int + FALLBACK_PREFIX_FIELD_NUMBER: builtins.int + READ_FROM_FIELD_NUMBER: builtins.int + connection_string: builtins.str + """List of Redis Uri for all the nodes in Redis Cluster, comma separated. Eg. host1:6379, host2:6379""" + initial_backoff_ms: builtins.int + max_retries: builtins.int + flush_frequency_seconds: builtins.int + """Optional. How often flush data to redis""" + key_prefix: builtins.str + """Optional. Append a prefix to the Redis Key""" + enable_fallback: builtins.bool + """Optional. Enable fallback to another key prefix if the original key is not present. + Useful for migrating key prefix without re-ingestion. Disabled by default. + """ + fallback_prefix: builtins.str + """Optional. This would be the fallback prefix to use if enable_fallback is true.""" + read_from: global___Store.RedisClusterConfig.ReadFrom.ValueType + def __init__( + self, + *, + connection_string: builtins.str = ..., + initial_backoff_ms: builtins.int = ..., + max_retries: builtins.int = ..., + flush_frequency_seconds: builtins.int = ..., + key_prefix: builtins.str = ..., + enable_fallback: builtins.bool = ..., + fallback_prefix: builtins.str = ..., + read_from: global___Store.RedisClusterConfig.ReadFrom.ValueType = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["connection_string", b"connection_string", "enable_fallback", b"enable_fallback", "fallback_prefix", b"fallback_prefix", "flush_frequency_seconds", b"flush_frequency_seconds", "initial_backoff_ms", b"initial_backoff_ms", "key_prefix", b"key_prefix", "max_retries", b"max_retries", "read_from", b"read_from"]) -> None: ... + + class Subscription(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PROJECT_FIELD_NUMBER: builtins.int + NAME_FIELD_NUMBER: builtins.int + EXCLUDE_FIELD_NUMBER: builtins.int + project: builtins.str + """Name of project that the feature sets belongs to. This can be one of + - [project_name] + - * + If an asterisk is provided, filtering on projects will be disabled. All projects will + be matched. It is NOT possible to provide an asterisk with a string in order to do + pattern matching. + """ + name: builtins.str + """Name of the desired feature set. Asterisks can be used as wildcards in the name. + Matching on names is only permitted if a specific project is defined. It is disallowed + If the project name is set to "*" + e.g. + - * can be used to match all feature sets + - my-feature-set* can be used to match all features prefixed by "my-feature-set" + - my-feature-set-6 can be used to select a single feature set + """ + exclude: builtins.bool + """All matches with exclude enabled will be filtered out instead of added""" + def __init__( + self, + *, + project: builtins.str = ..., + name: builtins.str = ..., + exclude: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["exclude", b"exclude", "name", b"name", "project", b"project"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + TYPE_FIELD_NUMBER: builtins.int + SUBSCRIPTIONS_FIELD_NUMBER: builtins.int + REDIS_CONFIG_FIELD_NUMBER: builtins.int + REDIS_CLUSTER_CONFIG_FIELD_NUMBER: builtins.int + name: builtins.str + """Name of the store.""" + type: global___Store.StoreType.ValueType + """Type of store.""" + @property + def subscriptions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Store.Subscription]: + """Feature sets to subscribe to.""" + @property + def redis_config(self) -> global___Store.RedisConfig: ... + @property + def redis_cluster_config(self) -> global___Store.RedisClusterConfig: ... + def __init__( + self, + *, + name: builtins.str = ..., + type: global___Store.StoreType.ValueType = ..., + subscriptions: collections.abc.Iterable[global___Store.Subscription] | None = ..., + redis_config: global___Store.RedisConfig | None = ..., + redis_cluster_config: global___Store.RedisClusterConfig | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["config", b"config", "redis_cluster_config", b"redis_cluster_config", "redis_config", b"redis_config"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["config", b"config", "name", b"name", "redis_cluster_config", b"redis_cluster_config", "redis_config", b"redis_config", "subscriptions", b"subscriptions", "type", b"type"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["config", b"config"]) -> typing_extensions.Literal["redis_config", "redis_cluster_config"] | None: ... + +global___Store = Store diff --git a/sdk/python/feast/protos/feast/core/Store_pb2_grpc.py b/sdk/python/feast/protos/feast/core/Store_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Store_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/StreamFeatureView_pb2.py b/sdk/python/feast/protos/feast/core/StreamFeatureView_pb2.py new file mode 100644 index 0000000000..ba19088edd --- /dev/null +++ b/sdk/python/feast/protos/feast/core/StreamFeatureView_pb2.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/StreamFeatureView.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from feast.protos.feast.core import OnDemandFeatureView_pb2 as feast_dot_core_dot_OnDemandFeatureView__pb2 +from feast.protos.feast.core import FeatureView_pb2 as feast_dot_core_dot_FeatureView__pb2 +from feast.protos.feast.core import Feature_pb2 as feast_dot_core_dot_Feature__pb2 +from feast.protos.feast.core import DataSource_pb2 as feast_dot_core_dot_DataSource__pb2 +from feast.protos.feast.core import Aggregation_pb2 as feast_dot_core_dot_Aggregation__pb2 +from feast.protos.feast.core import Transformation_pb2 as feast_dot_core_dot_Transformation__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"feast/core/StreamFeatureView.proto\x12\nfeast.core\x1a\x1egoogle/protobuf/duration.proto\x1a$feast/core/OnDemandFeatureView.proto\x1a\x1c\x66\x65\x61st/core/FeatureView.proto\x1a\x18\x66\x65\x61st/core/Feature.proto\x1a\x1b\x66\x65\x61st/core/DataSource.proto\x1a\x1c\x66\x65\x61st/core/Aggregation.proto\x1a\x1f\x66\x65\x61st/core/Transformation.proto\"o\n\x11StreamFeatureView\x12/\n\x04spec\x18\x01 \x01(\x0b\x32!.feast.core.StreamFeatureViewSpec\x12)\n\x04meta\x18\x02 \x01(\x0b\x32\x1b.feast.core.FeatureViewMeta\"\xa8\x05\n\x15StreamFeatureViewSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x10\n\x08\x65ntities\x18\x03 \x03(\t\x12+\n\x08\x66\x65\x61tures\x18\x04 \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x12\x31\n\x0e\x65ntity_columns\x18\x05 \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x12\x13\n\x0b\x64\x65scription\x18\x06 \x01(\t\x12\x39\n\x04tags\x18\x07 \x03(\x0b\x32+.feast.core.StreamFeatureViewSpec.TagsEntry\x12\r\n\x05owner\x18\x08 \x01(\t\x12&\n\x03ttl\x18\t \x01(\x0b\x32\x19.google.protobuf.Duration\x12,\n\x0c\x62\x61tch_source\x18\n \x01(\x0b\x32\x16.feast.core.DataSource\x12-\n\rstream_source\x18\x0b \x01(\x0b\x32\x16.feast.core.DataSource\x12\x0e\n\x06online\x18\x0c \x01(\x08\x12\x42\n\x15user_defined_function\x18\r \x01(\x0b\x32\x1f.feast.core.UserDefinedFunctionB\x02\x18\x01\x12\x0c\n\x04mode\x18\x0e \x01(\t\x12-\n\x0c\x61ggregations\x18\x0f \x03(\x0b\x32\x17.feast.core.Aggregation\x12\x17\n\x0ftimestamp_field\x18\x10 \x01(\t\x12\x43\n\x16\x66\x65\x61ture_transformation\x18\x11 \x01(\x0b\x32#.feast.core.FeatureTransformationV2\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42[\n\x10\x66\x65\x61st.proto.coreB\x16StreamFeatureViewProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.StreamFeatureView_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\026StreamFeatureViewProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_STREAMFEATUREVIEWSPEC_TAGSENTRY']._options = None + _globals['_STREAMFEATUREVIEWSPEC_TAGSENTRY']._serialized_options = b'8\001' + _globals['_STREAMFEATUREVIEWSPEC'].fields_by_name['user_defined_function']._options = None + _globals['_STREAMFEATUREVIEWSPEC'].fields_by_name['user_defined_function']._serialized_options = b'\030\001' + _globals['_STREAMFEATUREVIEW']._serialized_start=268 + _globals['_STREAMFEATUREVIEW']._serialized_end=379 + _globals['_STREAMFEATUREVIEWSPEC']._serialized_start=382 + _globals['_STREAMFEATUREVIEWSPEC']._serialized_end=1062 + _globals['_STREAMFEATUREVIEWSPEC_TAGSENTRY']._serialized_start=1019 + _globals['_STREAMFEATUREVIEWSPEC_TAGSENTRY']._serialized_end=1062 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/StreamFeatureView_pb2.pyi b/sdk/python/feast/protos/feast/core/StreamFeatureView_pb2.pyi new file mode 100644 index 0000000000..70e897a2f2 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/StreamFeatureView_pb2.pyi @@ -0,0 +1,170 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2020 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import collections.abc +import feast.core.Aggregation_pb2 +import feast.core.DataSource_pb2 +import feast.core.FeatureView_pb2 +import feast.core.Feature_pb2 +import feast.core.OnDemandFeatureView_pb2 +import feast.core.Transformation_pb2 +import google.protobuf.descriptor +import google.protobuf.duration_pb2 +import google.protobuf.internal.containers +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class StreamFeatureView(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SPEC_FIELD_NUMBER: builtins.int + META_FIELD_NUMBER: builtins.int + @property + def spec(self) -> global___StreamFeatureViewSpec: + """User-specified specifications of this feature view.""" + @property + def meta(self) -> feast.core.FeatureView_pb2.FeatureViewMeta: ... + def __init__( + self, + *, + spec: global___StreamFeatureViewSpec | None = ..., + meta: feast.core.FeatureView_pb2.FeatureViewMeta | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> None: ... + +global___StreamFeatureView = StreamFeatureView + +class StreamFeatureViewSpec(google.protobuf.message.Message): + """Next available id: 17""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + ENTITIES_FIELD_NUMBER: builtins.int + FEATURES_FIELD_NUMBER: builtins.int + ENTITY_COLUMNS_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + OWNER_FIELD_NUMBER: builtins.int + TTL_FIELD_NUMBER: builtins.int + BATCH_SOURCE_FIELD_NUMBER: builtins.int + STREAM_SOURCE_FIELD_NUMBER: builtins.int + ONLINE_FIELD_NUMBER: builtins.int + USER_DEFINED_FUNCTION_FIELD_NUMBER: builtins.int + MODE_FIELD_NUMBER: builtins.int + AGGREGATIONS_FIELD_NUMBER: builtins.int + TIMESTAMP_FIELD_FIELD_NUMBER: builtins.int + FEATURE_TRANSFORMATION_FIELD_NUMBER: builtins.int + name: builtins.str + """Name of the feature view. Must be unique. Not updated.""" + project: builtins.str + """Name of Feast project that this feature view belongs to.""" + @property + def entities(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """List of names of entities associated with this feature view.""" + @property + def features(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Feature_pb2.FeatureSpecV2]: + """List of specifications for each feature defined as part of this feature view.""" + @property + def entity_columns(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Feature_pb2.FeatureSpecV2]: + """List of specifications for each entity defined as part of this feature view.""" + description: builtins.str + """Description of the feature view.""" + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """User defined metadata""" + owner: builtins.str + """Owner of the feature view.""" + @property + def ttl(self) -> google.protobuf.duration_pb2.Duration: + """Features in this feature view can only be retrieved from online serving + younger than ttl. Ttl is measured as the duration of time between + the feature's event timestamp and when the feature is retrieved + Feature values outside ttl will be returned as unset values and indicated to end user + """ + @property + def batch_source(self) -> feast.core.DataSource_pb2.DataSource: + """Batch/Offline DataSource where this view can retrieve offline feature data.""" + @property + def stream_source(self) -> feast.core.DataSource_pb2.DataSource: + """Streaming DataSource from where this view can consume "online" feature data.""" + online: builtins.bool + """Whether these features should be served online or not""" + @property + def user_defined_function(self) -> feast.core.OnDemandFeatureView_pb2.UserDefinedFunction: + """Serialized function that is encoded in the streamfeatureview""" + mode: builtins.str + """Mode of execution""" + @property + def aggregations(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Aggregation_pb2.Aggregation]: + """Aggregation definitions""" + timestamp_field: builtins.str + """Timestamp field for aggregation""" + @property + def feature_transformation(self) -> feast.core.Transformation_pb2.FeatureTransformationV2: + """Oneof with {user_defined_function, on_demand_substrait_transformation}""" + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + entities: collections.abc.Iterable[builtins.str] | None = ..., + features: collections.abc.Iterable[feast.core.Feature_pb2.FeatureSpecV2] | None = ..., + entity_columns: collections.abc.Iterable[feast.core.Feature_pb2.FeatureSpecV2] | None = ..., + description: builtins.str = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + owner: builtins.str = ..., + ttl: google.protobuf.duration_pb2.Duration | None = ..., + batch_source: feast.core.DataSource_pb2.DataSource | None = ..., + stream_source: feast.core.DataSource_pb2.DataSource | None = ..., + online: builtins.bool = ..., + user_defined_function: feast.core.OnDemandFeatureView_pb2.UserDefinedFunction | None = ..., + mode: builtins.str = ..., + aggregations: collections.abc.Iterable[feast.core.Aggregation_pb2.Aggregation] | None = ..., + timestamp_field: builtins.str = ..., + feature_transformation: feast.core.Transformation_pb2.FeatureTransformationV2 | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["batch_source", b"batch_source", "feature_transformation", b"feature_transformation", "stream_source", b"stream_source", "ttl", b"ttl", "user_defined_function", b"user_defined_function"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["aggregations", b"aggregations", "batch_source", b"batch_source", "description", b"description", "entities", b"entities", "entity_columns", b"entity_columns", "feature_transformation", b"feature_transformation", "features", b"features", "mode", b"mode", "name", b"name", "online", b"online", "owner", b"owner", "project", b"project", "stream_source", b"stream_source", "tags", b"tags", "timestamp_field", b"timestamp_field", "ttl", b"ttl", "user_defined_function", b"user_defined_function"]) -> None: ... + +global___StreamFeatureViewSpec = StreamFeatureViewSpec diff --git a/sdk/python/feast/protos/feast/core/StreamFeatureView_pb2_grpc.py b/sdk/python/feast/protos/feast/core/StreamFeatureView_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/StreamFeatureView_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/Transformation_pb2.py b/sdk/python/feast/protos/feast/core/Transformation_pb2.py new file mode 100644 index 0000000000..9fd11d3026 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Transformation_pb2.py @@ -0,0 +1,31 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/Transformation.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1f\x66\x65\x61st/core/Transformation.proto\x12\nfeast.core\"F\n\x15UserDefinedFunctionV2\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x62ody\x18\x02 \x01(\x0c\x12\x11\n\tbody_text\x18\x03 \x01(\t\"\xba\x01\n\x17\x46\x65\x61tureTransformationV2\x12\x42\n\x15user_defined_function\x18\x01 \x01(\x0b\x32!.feast.core.UserDefinedFunctionV2H\x00\x12I\n\x18substrait_transformation\x18\x02 \x01(\x0b\x32%.feast.core.SubstraitTransformationV2H\x00\x42\x10\n\x0etransformation\"J\n\x19SubstraitTransformationV2\x12\x16\n\x0esubstrait_plan\x18\x01 \x01(\x0c\x12\x15\n\ribis_function\x18\x02 \x01(\x0c\x42_\n\x10\x66\x65\x61st.proto.coreB\x1a\x46\x65\x61tureTransformationProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.Transformation_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\032FeatureTransformationProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_USERDEFINEDFUNCTIONV2']._serialized_start=47 + _globals['_USERDEFINEDFUNCTIONV2']._serialized_end=117 + _globals['_FEATURETRANSFORMATIONV2']._serialized_start=120 + _globals['_FEATURETRANSFORMATIONV2']._serialized_end=306 + _globals['_SUBSTRAITTRANSFORMATIONV2']._serialized_start=308 + _globals['_SUBSTRAITTRANSFORMATIONV2']._serialized_end=382 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/Transformation_pb2.pyi b/sdk/python/feast/protos/feast/core/Transformation_pb2.pyi new file mode 100644 index 0000000000..1120c447e0 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Transformation_pb2.pyi @@ -0,0 +1,80 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" +import builtins +import google.protobuf.descriptor +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class UserDefinedFunctionV2(google.protobuf.message.Message): + """Serialized representation of python function.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + BODY_FIELD_NUMBER: builtins.int + BODY_TEXT_FIELD_NUMBER: builtins.int + name: builtins.str + """The function name""" + body: builtins.bytes + """The python-syntax function body (serialized by dill)""" + body_text: builtins.str + """The string representation of the udf""" + def __init__( + self, + *, + name: builtins.str = ..., + body: builtins.bytes = ..., + body_text: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["body", b"body", "body_text", b"body_text", "name", b"name"]) -> None: ... + +global___UserDefinedFunctionV2 = UserDefinedFunctionV2 + +class FeatureTransformationV2(google.protobuf.message.Message): + """A feature transformation executed as a user-defined function""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + USER_DEFINED_FUNCTION_FIELD_NUMBER: builtins.int + SUBSTRAIT_TRANSFORMATION_FIELD_NUMBER: builtins.int + @property + def user_defined_function(self) -> global___UserDefinedFunctionV2: ... + @property + def substrait_transformation(self) -> global___SubstraitTransformationV2: ... + def __init__( + self, + *, + user_defined_function: global___UserDefinedFunctionV2 | None = ..., + substrait_transformation: global___SubstraitTransformationV2 | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["substrait_transformation", b"substrait_transformation", "transformation", b"transformation", "user_defined_function", b"user_defined_function"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["substrait_transformation", b"substrait_transformation", "transformation", b"transformation", "user_defined_function", b"user_defined_function"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["transformation", b"transformation"]) -> typing_extensions.Literal["user_defined_function", "substrait_transformation"] | None: ... + +global___FeatureTransformationV2 = FeatureTransformationV2 + +class SubstraitTransformationV2(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SUBSTRAIT_PLAN_FIELD_NUMBER: builtins.int + IBIS_FUNCTION_FIELD_NUMBER: builtins.int + substrait_plan: builtins.bytes + ibis_function: builtins.bytes + def __init__( + self, + *, + substrait_plan: builtins.bytes = ..., + ibis_function: builtins.bytes = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["ibis_function", b"ibis_function", "substrait_plan", b"substrait_plan"]) -> None: ... + +global___SubstraitTransformationV2 = SubstraitTransformationV2 diff --git a/sdk/python/feast/protos/feast/core/Transformation_pb2_grpc.py b/sdk/python/feast/protos/feast/core/Transformation_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Transformation_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/ValidationProfile_pb2.py b/sdk/python/feast/protos/feast/core/ValidationProfile_pb2.py new file mode 100644 index 0000000000..0fb27ceab1 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/ValidationProfile_pb2.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/ValidationProfile.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"feast/core/ValidationProfile.proto\x12\nfeast.core\"\x83\x01\n\x14GEValidationProfiler\x12\x46\n\x08profiler\x18\x01 \x01(\x0b\x32\x34.feast.core.GEValidationProfiler.UserDefinedProfiler\x1a#\n\x13UserDefinedProfiler\x12\x0c\n\x04\x62ody\x18\x01 \x01(\x0c\"0\n\x13GEValidationProfile\x12\x19\n\x11\x65xpectation_suite\x18\x01 \x01(\x0c\"\xdd\x02\n\x13ValidationReference\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1e\n\x16reference_dataset_name\x18\x02 \x01(\t\x12\x0f\n\x07project\x18\x03 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12\x37\n\x04tags\x18\x05 \x03(\x0b\x32).feast.core.ValidationReference.TagsEntry\x12\x37\n\x0bge_profiler\x18\x06 \x01(\x0b\x32 .feast.core.GEValidationProfilerH\x00\x12\x35\n\nge_profile\x18\x07 \x01(\x0b\x32\x1f.feast.core.GEValidationProfileH\x01\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08profilerB\x10\n\x0e\x63\x61\x63hed_profileBV\n\x10\x66\x65\x61st.proto.coreB\x11ValidationProfileZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.ValidationProfile_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\021ValidationProfileZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_VALIDATIONREFERENCE_TAGSENTRY']._options = None + _globals['_VALIDATIONREFERENCE_TAGSENTRY']._serialized_options = b'8\001' + _globals['_GEVALIDATIONPROFILER']._serialized_start=51 + _globals['_GEVALIDATIONPROFILER']._serialized_end=182 + _globals['_GEVALIDATIONPROFILER_USERDEFINEDPROFILER']._serialized_start=147 + _globals['_GEVALIDATIONPROFILER_USERDEFINEDPROFILER']._serialized_end=182 + _globals['_GEVALIDATIONPROFILE']._serialized_start=184 + _globals['_GEVALIDATIONPROFILE']._serialized_end=232 + _globals['_VALIDATIONREFERENCE']._serialized_start=235 + _globals['_VALIDATIONREFERENCE']._serialized_end=584 + _globals['_VALIDATIONREFERENCE_TAGSENTRY']._serialized_start=511 + _globals['_VALIDATIONREFERENCE_TAGSENTRY']._serialized_end=554 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/ValidationProfile_pb2.pyi b/sdk/python/feast/protos/feast/core/ValidationProfile_pb2.pyi new file mode 100644 index 0000000000..93da1e0f5e --- /dev/null +++ b/sdk/python/feast/protos/feast/core/ValidationProfile_pb2.pyi @@ -0,0 +1,136 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2021 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import sys +import typing + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class GEValidationProfiler(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class UserDefinedProfiler(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + BODY_FIELD_NUMBER: builtins.int + body: builtins.bytes + """The python-syntax function body (serialized by dill)""" + def __init__( + self, + *, + body: builtins.bytes = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["body", b"body"]) -> None: ... + + PROFILER_FIELD_NUMBER: builtins.int + @property + def profiler(self) -> global___GEValidationProfiler.UserDefinedProfiler: ... + def __init__( + self, + *, + profiler: global___GEValidationProfiler.UserDefinedProfiler | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["profiler", b"profiler"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["profiler", b"profiler"]) -> None: ... + +global___GEValidationProfiler = GEValidationProfiler + +class GEValidationProfile(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + EXPECTATION_SUITE_FIELD_NUMBER: builtins.int + expectation_suite: builtins.bytes + """JSON-serialized ExpectationSuite object""" + def __init__( + self, + *, + expectation_suite: builtins.bytes = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["expectation_suite", b"expectation_suite"]) -> None: ... + +global___GEValidationProfile = GEValidationProfile + +class ValidationReference(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + REFERENCE_DATASET_NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + GE_PROFILER_FIELD_NUMBER: builtins.int + GE_PROFILE_FIELD_NUMBER: builtins.int + name: builtins.str + """Unique name of validation reference within the project""" + reference_dataset_name: builtins.str + """Name of saved dataset used as reference dataset""" + project: builtins.str + """Name of Feast project that this object source belongs to""" + description: builtins.str + """Description of the validation reference""" + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """User defined metadata""" + @property + def ge_profiler(self) -> global___GEValidationProfiler: ... + @property + def ge_profile(self) -> global___GEValidationProfile: ... + def __init__( + self, + *, + name: builtins.str = ..., + reference_dataset_name: builtins.str = ..., + project: builtins.str = ..., + description: builtins.str = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ge_profiler: global___GEValidationProfiler | None = ..., + ge_profile: global___GEValidationProfile | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["cached_profile", b"cached_profile", "ge_profile", b"ge_profile", "ge_profiler", b"ge_profiler", "profiler", b"profiler"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["cached_profile", b"cached_profile", "description", b"description", "ge_profile", b"ge_profile", "ge_profiler", b"ge_profiler", "name", b"name", "profiler", b"profiler", "project", b"project", "reference_dataset_name", b"reference_dataset_name", "tags", b"tags"]) -> None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing_extensions.Literal["cached_profile", b"cached_profile"]) -> typing_extensions.Literal["ge_profile"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing_extensions.Literal["profiler", b"profiler"]) -> typing_extensions.Literal["ge_profiler"] | None: ... + +global___ValidationReference = ValidationReference diff --git a/sdk/python/feast/protos/feast/core/ValidationProfile_pb2_grpc.py b/sdk/python/feast/protos/feast/core/ValidationProfile_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/ValidationProfile_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/__init__.py b/sdk/python/feast/protos/feast/core/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/protos/feast/registry/RegistryServer_pb2.py b/sdk/python/feast/protos/feast/registry/RegistryServer_pb2.py new file mode 100644 index 0000000000..e0cae3da4b --- /dev/null +++ b/sdk/python/feast/protos/feast/registry/RegistryServer_pb2.py @@ -0,0 +1,198 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/registry/RegistryServer.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from feast.protos.feast.core import Registry_pb2 as feast_dot_core_dot_Registry__pb2 +from feast.protos.feast.core import Entity_pb2 as feast_dot_core_dot_Entity__pb2 +from feast.protos.feast.core import DataSource_pb2 as feast_dot_core_dot_DataSource__pb2 +from feast.protos.feast.core import FeatureView_pb2 as feast_dot_core_dot_FeatureView__pb2 +from feast.protos.feast.core import StreamFeatureView_pb2 as feast_dot_core_dot_StreamFeatureView__pb2 +from feast.protos.feast.core import OnDemandFeatureView_pb2 as feast_dot_core_dot_OnDemandFeatureView__pb2 +from feast.protos.feast.core import FeatureService_pb2 as feast_dot_core_dot_FeatureService__pb2 +from feast.protos.feast.core import SavedDataset_pb2 as feast_dot_core_dot_SavedDataset__pb2 +from feast.protos.feast.core import ValidationProfile_pb2 as feast_dot_core_dot_ValidationProfile__pb2 +from feast.protos.feast.core import InfraObject_pb2 as feast_dot_core_dot_InfraObject__pb2 +from feast.protos.feast.core import Permission_pb2 as feast_dot_core_dot_Permission__pb2 +from feast.protos.feast.core import Project_pb2 as feast_dot_core_dot_Project__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n#feast/registry/RegistryServer.proto\x12\x0e\x66\x65\x61st.registry\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x19\x66\x65\x61st/core/Registry.proto\x1a\x17\x66\x65\x61st/core/Entity.proto\x1a\x1b\x66\x65\x61st/core/DataSource.proto\x1a\x1c\x66\x65\x61st/core/FeatureView.proto\x1a\"feast/core/StreamFeatureView.proto\x1a$feast/core/OnDemandFeatureView.proto\x1a\x1f\x66\x65\x61st/core/FeatureService.proto\x1a\x1d\x66\x65\x61st/core/SavedDataset.proto\x1a\"feast/core/ValidationProfile.proto\x1a\x1c\x66\x65\x61st/core/InfraObject.proto\x1a\x1b\x66\x65\x61st/core/Permission.proto\x1a\x18\x66\x65\x61st/core/Project.proto\"!\n\x0eRefreshRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\"W\n\x12UpdateInfraRequest\x12 \n\x05infra\x18\x01 \x01(\x0b\x32\x11.feast.core.Infra\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x03 \x01(\x08\"7\n\x0fGetInfraRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x02 \x01(\x08\"B\n\x1aListProjectMetadataRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x02 \x01(\x08\"T\n\x1bListProjectMetadataResponse\x12\x35\n\x10project_metadata\x18\x01 \x03(\x0b\x32\x1b.feast.core.ProjectMetadata\"\xcb\x01\n\x1b\x41pplyMaterializationRequest\x12-\n\x0c\x66\x65\x61ture_view\x18\x01 \x01(\x0b\x32\x17.feast.core.FeatureView\x12\x0f\n\x07project\x18\x02 \x01(\t\x12.\n\nstart_date\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_date\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0e\n\x06\x63ommit\x18\x05 \x01(\x08\"Y\n\x12\x41pplyEntityRequest\x12\"\n\x06\x65ntity\x18\x01 \x01(\x0b\x32\x12.feast.core.Entity\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x03 \x01(\x08\"F\n\x10GetEntityRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x03 \x01(\x08\"\xa5\x01\n\x13ListEntitiesRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x02 \x01(\x08\x12;\n\x04tags\x18\x03 \x03(\x0b\x32-.feast.registry.ListEntitiesRequest.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"<\n\x14ListEntitiesResponse\x12$\n\x08\x65ntities\x18\x01 \x03(\x0b\x32\x12.feast.core.Entity\"D\n\x13\x44\x65leteEntityRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x03 \x01(\x08\"f\n\x16\x41pplyDataSourceRequest\x12+\n\x0b\x64\x61ta_source\x18\x01 \x01(\x0b\x32\x16.feast.core.DataSource\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x03 \x01(\x08\"J\n\x14GetDataSourceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x03 \x01(\x08\"\xab\x01\n\x16ListDataSourcesRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x02 \x01(\x08\x12>\n\x04tags\x18\x03 \x03(\x0b\x32\x30.feast.registry.ListDataSourcesRequest.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"G\n\x17ListDataSourcesResponse\x12,\n\x0c\x64\x61ta_sources\x18\x01 \x03(\x0b\x32\x16.feast.core.DataSource\"H\n\x17\x44\x65leteDataSourceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x03 \x01(\x08\"\x81\x02\n\x17\x41pplyFeatureViewRequest\x12/\n\x0c\x66\x65\x61ture_view\x18\x01 \x01(\x0b\x32\x17.feast.core.FeatureViewH\x00\x12\x41\n\x16on_demand_feature_view\x18\x02 \x01(\x0b\x32\x1f.feast.core.OnDemandFeatureViewH\x00\x12<\n\x13stream_feature_view\x18\x03 \x01(\x0b\x32\x1d.feast.core.StreamFeatureViewH\x00\x12\x0f\n\x07project\x18\x04 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x05 \x01(\x08\x42\x13\n\x11\x62\x61se_feature_view\"K\n\x15GetFeatureViewRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x03 \x01(\x08\"\xad\x01\n\x17ListFeatureViewsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x02 \x01(\x08\x12?\n\x04tags\x18\x03 \x03(\x0b\x32\x31.feast.registry.ListFeatureViewsRequest.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"J\n\x18ListFeatureViewsResponse\x12.\n\rfeature_views\x18\x01 \x03(\x0b\x32\x17.feast.core.FeatureView\"I\n\x18\x44\x65leteFeatureViewRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x03 \x01(\x08\"\xd6\x01\n\x0e\x41nyFeatureView\x12/\n\x0c\x66\x65\x61ture_view\x18\x01 \x01(\x0b\x32\x17.feast.core.FeatureViewH\x00\x12\x41\n\x16on_demand_feature_view\x18\x02 \x01(\x0b\x32\x1f.feast.core.OnDemandFeatureViewH\x00\x12<\n\x13stream_feature_view\x18\x03 \x01(\x0b\x32\x1d.feast.core.StreamFeatureViewH\x00\x42\x12\n\x10\x61ny_feature_view\"N\n\x18GetAnyFeatureViewRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x03 \x01(\x08\"U\n\x19GetAnyFeatureViewResponse\x12\x38\n\x10\x61ny_feature_view\x18\x01 \x01(\x0b\x32\x1e.feast.registry.AnyFeatureView\"\xb3\x01\n\x1aListAllFeatureViewsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x02 \x01(\x08\x12\x42\n\x04tags\x18\x03 \x03(\x0b\x32\x34.feast.registry.ListAllFeatureViewsRequest.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"T\n\x1bListAllFeatureViewsResponse\x12\x35\n\rfeature_views\x18\x01 \x03(\x0b\x32\x1e.feast.registry.AnyFeatureView\"Q\n\x1bGetStreamFeatureViewRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x03 \x01(\x08\"\xb9\x01\n\x1dListStreamFeatureViewsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x02 \x01(\x08\x12\x45\n\x04tags\x18\x03 \x03(\x0b\x32\x37.feast.registry.ListStreamFeatureViewsRequest.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"]\n\x1eListStreamFeatureViewsResponse\x12;\n\x14stream_feature_views\x18\x01 \x03(\x0b\x32\x1d.feast.core.StreamFeatureView\"S\n\x1dGetOnDemandFeatureViewRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x03 \x01(\x08\"\xbd\x01\n\x1fListOnDemandFeatureViewsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x02 \x01(\x08\x12G\n\x04tags\x18\x03 \x03(\x0b\x32\x39.feast.registry.ListOnDemandFeatureViewsRequest.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"d\n ListOnDemandFeatureViewsResponse\x12@\n\x17on_demand_feature_views\x18\x01 \x03(\x0b\x32\x1f.feast.core.OnDemandFeatureView\"r\n\x1a\x41pplyFeatureServiceRequest\x12\x33\n\x0f\x66\x65\x61ture_service\x18\x01 \x01(\x0b\x32\x1a.feast.core.FeatureService\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x03 \x01(\x08\"N\n\x18GetFeatureServiceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x03 \x01(\x08\"\xb3\x01\n\x1aListFeatureServicesRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x02 \x01(\x08\x12\x42\n\x04tags\x18\x03 \x03(\x0b\x32\x34.feast.registry.ListFeatureServicesRequest.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"S\n\x1bListFeatureServicesResponse\x12\x34\n\x10\x66\x65\x61ture_services\x18\x01 \x03(\x0b\x32\x1a.feast.core.FeatureService\"L\n\x1b\x44\x65leteFeatureServiceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x03 \x01(\x08\"l\n\x18\x41pplySavedDatasetRequest\x12/\n\rsaved_dataset\x18\x01 \x01(\x0b\x32\x18.feast.core.SavedDataset\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x03 \x01(\x08\"L\n\x16GetSavedDatasetRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x03 \x01(\x08\"\xaf\x01\n\x18ListSavedDatasetsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x02 \x01(\x08\x12@\n\x04tags\x18\x03 \x03(\x0b\x32\x32.feast.registry.ListSavedDatasetsRequest.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"M\n\x19ListSavedDatasetsResponse\x12\x30\n\x0esaved_datasets\x18\x01 \x03(\x0b\x32\x18.feast.core.SavedDataset\"J\n\x19\x44\x65leteSavedDatasetRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x03 \x01(\x08\"\x81\x01\n\x1f\x41pplyValidationReferenceRequest\x12=\n\x14validation_reference\x18\x01 \x01(\x0b\x32\x1f.feast.core.ValidationReference\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x03 \x01(\x08\"S\n\x1dGetValidationReferenceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x03 \x01(\x08\"\xbd\x01\n\x1fListValidationReferencesRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x02 \x01(\x08\x12G\n\x04tags\x18\x03 \x03(\x0b\x32\x39.feast.registry.ListValidationReferencesRequest.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"b\n ListValidationReferencesResponse\x12>\n\x15validation_references\x18\x01 \x03(\x0b\x32\x1f.feast.core.ValidationReference\"Q\n DeleteValidationReferenceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x03 \x01(\x08\"e\n\x16\x41pplyPermissionRequest\x12*\n\npermission\x18\x01 \x01(\x0b\x32\x16.feast.core.Permission\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x03 \x01(\x08\"J\n\x14GetPermissionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x03 \x01(\x08\"\xab\x01\n\x16ListPermissionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x02 \x01(\x08\x12>\n\x04tags\x18\x03 \x03(\x0b\x32\x30.feast.registry.ListPermissionsRequest.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"F\n\x17ListPermissionsResponse\x12+\n\x0bpermissions\x18\x01 \x03(\x0b\x32\x16.feast.core.Permission\"H\n\x17\x44\x65letePermissionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x03 \x01(\x08\"K\n\x13\x41pplyProjectRequest\x12$\n\x07project\x18\x01 \x01(\x0b\x32\x13.feast.core.Project\x12\x0e\n\x06\x63ommit\x18\x02 \x01(\x08\"6\n\x11GetProjectRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x02 \x01(\x08\"\x94\x01\n\x13ListProjectsRequest\x12\x13\n\x0b\x61llow_cache\x18\x01 \x01(\x08\x12;\n\x04tags\x18\x02 \x03(\x0b\x32-.feast.registry.ListProjectsRequest.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"=\n\x14ListProjectsResponse\x12%\n\x08projects\x18\x01 \x03(\x0b\x32\x13.feast.core.Project\"4\n\x14\x44\x65leteProjectRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x02 \x01(\x08\x32\xcb \n\x0eRegistryServer\x12K\n\x0b\x41pplyEntity\x12\".feast.registry.ApplyEntityRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x43\n\tGetEntity\x12 .feast.registry.GetEntityRequest\x1a\x12.feast.core.Entity\"\x00\x12[\n\x0cListEntities\x12#.feast.registry.ListEntitiesRequest\x1a$.feast.registry.ListEntitiesResponse\"\x00\x12M\n\x0c\x44\x65leteEntity\x12#.feast.registry.DeleteEntityRequest\x1a\x16.google.protobuf.Empty\"\x00\x12S\n\x0f\x41pplyDataSource\x12&.feast.registry.ApplyDataSourceRequest\x1a\x16.google.protobuf.Empty\"\x00\x12O\n\rGetDataSource\x12$.feast.registry.GetDataSourceRequest\x1a\x16.feast.core.DataSource\"\x00\x12\x64\n\x0fListDataSources\x12&.feast.registry.ListDataSourcesRequest\x1a\'.feast.registry.ListDataSourcesResponse\"\x00\x12U\n\x10\x44\x65leteDataSource\x12\'.feast.registry.DeleteDataSourceRequest\x1a\x16.google.protobuf.Empty\"\x00\x12U\n\x10\x41pplyFeatureView\x12\'.feast.registry.ApplyFeatureViewRequest\x1a\x16.google.protobuf.Empty\"\x00\x12W\n\x11\x44\x65leteFeatureView\x12(.feast.registry.DeleteFeatureViewRequest\x1a\x16.google.protobuf.Empty\"\x00\x12j\n\x11GetAnyFeatureView\x12(.feast.registry.GetAnyFeatureViewRequest\x1a).feast.registry.GetAnyFeatureViewResponse\"\x00\x12p\n\x13ListAllFeatureViews\x12*.feast.registry.ListAllFeatureViewsRequest\x1a+.feast.registry.ListAllFeatureViewsResponse\"\x00\x12R\n\x0eGetFeatureView\x12%.feast.registry.GetFeatureViewRequest\x1a\x17.feast.core.FeatureView\"\x00\x12g\n\x10ListFeatureViews\x12\'.feast.registry.ListFeatureViewsRequest\x1a(.feast.registry.ListFeatureViewsResponse\"\x00\x12\x64\n\x14GetStreamFeatureView\x12+.feast.registry.GetStreamFeatureViewRequest\x1a\x1d.feast.core.StreamFeatureView\"\x00\x12y\n\x16ListStreamFeatureViews\x12-.feast.registry.ListStreamFeatureViewsRequest\x1a..feast.registry.ListStreamFeatureViewsResponse\"\x00\x12j\n\x16GetOnDemandFeatureView\x12-.feast.registry.GetOnDemandFeatureViewRequest\x1a\x1f.feast.core.OnDemandFeatureView\"\x00\x12\x7f\n\x18ListOnDemandFeatureViews\x12/.feast.registry.ListOnDemandFeatureViewsRequest\x1a\x30.feast.registry.ListOnDemandFeatureViewsResponse\"\x00\x12[\n\x13\x41pplyFeatureService\x12*.feast.registry.ApplyFeatureServiceRequest\x1a\x16.google.protobuf.Empty\"\x00\x12[\n\x11GetFeatureService\x12(.feast.registry.GetFeatureServiceRequest\x1a\x1a.feast.core.FeatureService\"\x00\x12p\n\x13ListFeatureServices\x12*.feast.registry.ListFeatureServicesRequest\x1a+.feast.registry.ListFeatureServicesResponse\"\x00\x12]\n\x14\x44\x65leteFeatureService\x12+.feast.registry.DeleteFeatureServiceRequest\x1a\x16.google.protobuf.Empty\"\x00\x12W\n\x11\x41pplySavedDataset\x12(.feast.registry.ApplySavedDatasetRequest\x1a\x16.google.protobuf.Empty\"\x00\x12U\n\x0fGetSavedDataset\x12&.feast.registry.GetSavedDatasetRequest\x1a\x18.feast.core.SavedDataset\"\x00\x12j\n\x11ListSavedDatasets\x12(.feast.registry.ListSavedDatasetsRequest\x1a).feast.registry.ListSavedDatasetsResponse\"\x00\x12Y\n\x12\x44\x65leteSavedDataset\x12).feast.registry.DeleteSavedDatasetRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x65\n\x18\x41pplyValidationReference\x12/.feast.registry.ApplyValidationReferenceRequest\x1a\x16.google.protobuf.Empty\"\x00\x12j\n\x16GetValidationReference\x12-.feast.registry.GetValidationReferenceRequest\x1a\x1f.feast.core.ValidationReference\"\x00\x12\x7f\n\x18ListValidationReferences\x12/.feast.registry.ListValidationReferencesRequest\x1a\x30.feast.registry.ListValidationReferencesResponse\"\x00\x12g\n\x19\x44\x65leteValidationReference\x12\x30.feast.registry.DeleteValidationReferenceRequest\x1a\x16.google.protobuf.Empty\"\x00\x12S\n\x0f\x41pplyPermission\x12&.feast.registry.ApplyPermissionRequest\x1a\x16.google.protobuf.Empty\"\x00\x12O\n\rGetPermission\x12$.feast.registry.GetPermissionRequest\x1a\x16.feast.core.Permission\"\x00\x12\x64\n\x0fListPermissions\x12&.feast.registry.ListPermissionsRequest\x1a\'.feast.registry.ListPermissionsResponse\"\x00\x12U\n\x10\x44\x65letePermission\x12\'.feast.registry.DeletePermissionRequest\x1a\x16.google.protobuf.Empty\"\x00\x12M\n\x0c\x41pplyProject\x12#.feast.registry.ApplyProjectRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x46\n\nGetProject\x12!.feast.registry.GetProjectRequest\x1a\x13.feast.core.Project\"\x00\x12[\n\x0cListProjects\x12#.feast.registry.ListProjectsRequest\x1a$.feast.registry.ListProjectsResponse\"\x00\x12O\n\rDeleteProject\x12$.feast.registry.DeleteProjectRequest\x1a\x16.google.protobuf.Empty\"\x00\x12]\n\x14\x41pplyMaterialization\x12+.feast.registry.ApplyMaterializationRequest\x1a\x16.google.protobuf.Empty\"\x00\x12p\n\x13ListProjectMetadata\x12*.feast.registry.ListProjectMetadataRequest\x1a+.feast.registry.ListProjectMetadataResponse\"\x00\x12K\n\x0bUpdateInfra\x12\".feast.registry.UpdateInfraRequest\x1a\x16.google.protobuf.Empty\"\x00\x12@\n\x08GetInfra\x12\x1f.feast.registry.GetInfraRequest\x1a\x11.feast.core.Infra\"\x00\x12:\n\x06\x43ommit\x12\x16.google.protobuf.Empty\x1a\x16.google.protobuf.Empty\"\x00\x12\x43\n\x07Refresh\x12\x1e.feast.registry.RefreshRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x37\n\x05Proto\x12\x16.google.protobuf.Empty\x1a\x14.feast.core.Registry\"\x00\x62\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.registry.RegistryServer_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals['_LISTENTITIESREQUEST_TAGSENTRY']._options = None + _globals['_LISTENTITIESREQUEST_TAGSENTRY']._serialized_options = b'8\001' + _globals['_LISTDATASOURCESREQUEST_TAGSENTRY']._options = None + _globals['_LISTDATASOURCESREQUEST_TAGSENTRY']._serialized_options = b'8\001' + _globals['_LISTFEATUREVIEWSREQUEST_TAGSENTRY']._options = None + _globals['_LISTFEATUREVIEWSREQUEST_TAGSENTRY']._serialized_options = b'8\001' + _globals['_LISTALLFEATUREVIEWSREQUEST_TAGSENTRY']._options = None + _globals['_LISTALLFEATUREVIEWSREQUEST_TAGSENTRY']._serialized_options = b'8\001' + _globals['_LISTSTREAMFEATUREVIEWSREQUEST_TAGSENTRY']._options = None + _globals['_LISTSTREAMFEATUREVIEWSREQUEST_TAGSENTRY']._serialized_options = b'8\001' + _globals['_LISTONDEMANDFEATUREVIEWSREQUEST_TAGSENTRY']._options = None + _globals['_LISTONDEMANDFEATUREVIEWSREQUEST_TAGSENTRY']._serialized_options = b'8\001' + _globals['_LISTFEATURESERVICESREQUEST_TAGSENTRY']._options = None + _globals['_LISTFEATURESERVICESREQUEST_TAGSENTRY']._serialized_options = b'8\001' + _globals['_LISTSAVEDDATASETSREQUEST_TAGSENTRY']._options = None + _globals['_LISTSAVEDDATASETSREQUEST_TAGSENTRY']._serialized_options = b'8\001' + _globals['_LISTVALIDATIONREFERENCESREQUEST_TAGSENTRY']._options = None + _globals['_LISTVALIDATIONREFERENCESREQUEST_TAGSENTRY']._serialized_options = b'8\001' + _globals['_LISTPERMISSIONSREQUEST_TAGSENTRY']._options = None + _globals['_LISTPERMISSIONSREQUEST_TAGSENTRY']._serialized_options = b'8\001' + _globals['_LISTPROJECTSREQUEST_TAGSENTRY']._options = None + _globals['_LISTPROJECTSREQUEST_TAGSENTRY']._serialized_options = b'8\001' + _globals['_REFRESHREQUEST']._serialized_start=487 + _globals['_REFRESHREQUEST']._serialized_end=520 + _globals['_UPDATEINFRAREQUEST']._serialized_start=522 + _globals['_UPDATEINFRAREQUEST']._serialized_end=609 + _globals['_GETINFRAREQUEST']._serialized_start=611 + _globals['_GETINFRAREQUEST']._serialized_end=666 + _globals['_LISTPROJECTMETADATAREQUEST']._serialized_start=668 + _globals['_LISTPROJECTMETADATAREQUEST']._serialized_end=734 + _globals['_LISTPROJECTMETADATARESPONSE']._serialized_start=736 + _globals['_LISTPROJECTMETADATARESPONSE']._serialized_end=820 + _globals['_APPLYMATERIALIZATIONREQUEST']._serialized_start=823 + _globals['_APPLYMATERIALIZATIONREQUEST']._serialized_end=1026 + _globals['_APPLYENTITYREQUEST']._serialized_start=1028 + _globals['_APPLYENTITYREQUEST']._serialized_end=1117 + _globals['_GETENTITYREQUEST']._serialized_start=1119 + _globals['_GETENTITYREQUEST']._serialized_end=1189 + _globals['_LISTENTITIESREQUEST']._serialized_start=1192 + _globals['_LISTENTITIESREQUEST']._serialized_end=1357 + _globals['_LISTENTITIESREQUEST_TAGSENTRY']._serialized_start=1314 + _globals['_LISTENTITIESREQUEST_TAGSENTRY']._serialized_end=1357 + _globals['_LISTENTITIESRESPONSE']._serialized_start=1359 + _globals['_LISTENTITIESRESPONSE']._serialized_end=1419 + _globals['_DELETEENTITYREQUEST']._serialized_start=1421 + _globals['_DELETEENTITYREQUEST']._serialized_end=1489 + _globals['_APPLYDATASOURCEREQUEST']._serialized_start=1491 + _globals['_APPLYDATASOURCEREQUEST']._serialized_end=1593 + _globals['_GETDATASOURCEREQUEST']._serialized_start=1595 + _globals['_GETDATASOURCEREQUEST']._serialized_end=1669 + _globals['_LISTDATASOURCESREQUEST']._serialized_start=1672 + _globals['_LISTDATASOURCESREQUEST']._serialized_end=1843 + _globals['_LISTDATASOURCESREQUEST_TAGSENTRY']._serialized_start=1314 + _globals['_LISTDATASOURCESREQUEST_TAGSENTRY']._serialized_end=1357 + _globals['_LISTDATASOURCESRESPONSE']._serialized_start=1845 + _globals['_LISTDATASOURCESRESPONSE']._serialized_end=1916 + _globals['_DELETEDATASOURCEREQUEST']._serialized_start=1918 + _globals['_DELETEDATASOURCEREQUEST']._serialized_end=1990 + _globals['_APPLYFEATUREVIEWREQUEST']._serialized_start=1993 + _globals['_APPLYFEATUREVIEWREQUEST']._serialized_end=2250 + _globals['_GETFEATUREVIEWREQUEST']._serialized_start=2252 + _globals['_GETFEATUREVIEWREQUEST']._serialized_end=2327 + _globals['_LISTFEATUREVIEWSREQUEST']._serialized_start=2330 + _globals['_LISTFEATUREVIEWSREQUEST']._serialized_end=2503 + _globals['_LISTFEATUREVIEWSREQUEST_TAGSENTRY']._serialized_start=1314 + _globals['_LISTFEATUREVIEWSREQUEST_TAGSENTRY']._serialized_end=1357 + _globals['_LISTFEATUREVIEWSRESPONSE']._serialized_start=2505 + _globals['_LISTFEATUREVIEWSRESPONSE']._serialized_end=2579 + _globals['_DELETEFEATUREVIEWREQUEST']._serialized_start=2581 + _globals['_DELETEFEATUREVIEWREQUEST']._serialized_end=2654 + _globals['_ANYFEATUREVIEW']._serialized_start=2657 + _globals['_ANYFEATUREVIEW']._serialized_end=2871 + _globals['_GETANYFEATUREVIEWREQUEST']._serialized_start=2873 + _globals['_GETANYFEATUREVIEWREQUEST']._serialized_end=2951 + _globals['_GETANYFEATUREVIEWRESPONSE']._serialized_start=2953 + _globals['_GETANYFEATUREVIEWRESPONSE']._serialized_end=3038 + _globals['_LISTALLFEATUREVIEWSREQUEST']._serialized_start=3041 + _globals['_LISTALLFEATUREVIEWSREQUEST']._serialized_end=3220 + _globals['_LISTALLFEATUREVIEWSREQUEST_TAGSENTRY']._serialized_start=1314 + _globals['_LISTALLFEATUREVIEWSREQUEST_TAGSENTRY']._serialized_end=1357 + _globals['_LISTALLFEATUREVIEWSRESPONSE']._serialized_start=3222 + _globals['_LISTALLFEATUREVIEWSRESPONSE']._serialized_end=3306 + _globals['_GETSTREAMFEATUREVIEWREQUEST']._serialized_start=3308 + _globals['_GETSTREAMFEATUREVIEWREQUEST']._serialized_end=3389 + _globals['_LISTSTREAMFEATUREVIEWSREQUEST']._serialized_start=3392 + _globals['_LISTSTREAMFEATUREVIEWSREQUEST']._serialized_end=3577 + _globals['_LISTSTREAMFEATUREVIEWSREQUEST_TAGSENTRY']._serialized_start=1314 + _globals['_LISTSTREAMFEATUREVIEWSREQUEST_TAGSENTRY']._serialized_end=1357 + _globals['_LISTSTREAMFEATUREVIEWSRESPONSE']._serialized_start=3579 + _globals['_LISTSTREAMFEATUREVIEWSRESPONSE']._serialized_end=3672 + _globals['_GETONDEMANDFEATUREVIEWREQUEST']._serialized_start=3674 + _globals['_GETONDEMANDFEATUREVIEWREQUEST']._serialized_end=3757 + _globals['_LISTONDEMANDFEATUREVIEWSREQUEST']._serialized_start=3760 + _globals['_LISTONDEMANDFEATUREVIEWSREQUEST']._serialized_end=3949 + _globals['_LISTONDEMANDFEATUREVIEWSREQUEST_TAGSENTRY']._serialized_start=1314 + _globals['_LISTONDEMANDFEATUREVIEWSREQUEST_TAGSENTRY']._serialized_end=1357 + _globals['_LISTONDEMANDFEATUREVIEWSRESPONSE']._serialized_start=3951 + _globals['_LISTONDEMANDFEATUREVIEWSRESPONSE']._serialized_end=4051 + _globals['_APPLYFEATURESERVICEREQUEST']._serialized_start=4053 + _globals['_APPLYFEATURESERVICEREQUEST']._serialized_end=4167 + _globals['_GETFEATURESERVICEREQUEST']._serialized_start=4169 + _globals['_GETFEATURESERVICEREQUEST']._serialized_end=4247 + _globals['_LISTFEATURESERVICESREQUEST']._serialized_start=4250 + _globals['_LISTFEATURESERVICESREQUEST']._serialized_end=4429 + _globals['_LISTFEATURESERVICESREQUEST_TAGSENTRY']._serialized_start=1314 + _globals['_LISTFEATURESERVICESREQUEST_TAGSENTRY']._serialized_end=1357 + _globals['_LISTFEATURESERVICESRESPONSE']._serialized_start=4431 + _globals['_LISTFEATURESERVICESRESPONSE']._serialized_end=4514 + _globals['_DELETEFEATURESERVICEREQUEST']._serialized_start=4516 + _globals['_DELETEFEATURESERVICEREQUEST']._serialized_end=4592 + _globals['_APPLYSAVEDDATASETREQUEST']._serialized_start=4594 + _globals['_APPLYSAVEDDATASETREQUEST']._serialized_end=4702 + _globals['_GETSAVEDDATASETREQUEST']._serialized_start=4704 + _globals['_GETSAVEDDATASETREQUEST']._serialized_end=4780 + _globals['_LISTSAVEDDATASETSREQUEST']._serialized_start=4783 + _globals['_LISTSAVEDDATASETSREQUEST']._serialized_end=4958 + _globals['_LISTSAVEDDATASETSREQUEST_TAGSENTRY']._serialized_start=1314 + _globals['_LISTSAVEDDATASETSREQUEST_TAGSENTRY']._serialized_end=1357 + _globals['_LISTSAVEDDATASETSRESPONSE']._serialized_start=4960 + _globals['_LISTSAVEDDATASETSRESPONSE']._serialized_end=5037 + _globals['_DELETESAVEDDATASETREQUEST']._serialized_start=5039 + _globals['_DELETESAVEDDATASETREQUEST']._serialized_end=5113 + _globals['_APPLYVALIDATIONREFERENCEREQUEST']._serialized_start=5116 + _globals['_APPLYVALIDATIONREFERENCEREQUEST']._serialized_end=5245 + _globals['_GETVALIDATIONREFERENCEREQUEST']._serialized_start=5247 + _globals['_GETVALIDATIONREFERENCEREQUEST']._serialized_end=5330 + _globals['_LISTVALIDATIONREFERENCESREQUEST']._serialized_start=5333 + _globals['_LISTVALIDATIONREFERENCESREQUEST']._serialized_end=5522 + _globals['_LISTVALIDATIONREFERENCESREQUEST_TAGSENTRY']._serialized_start=1314 + _globals['_LISTVALIDATIONREFERENCESREQUEST_TAGSENTRY']._serialized_end=1357 + _globals['_LISTVALIDATIONREFERENCESRESPONSE']._serialized_start=5524 + _globals['_LISTVALIDATIONREFERENCESRESPONSE']._serialized_end=5622 + _globals['_DELETEVALIDATIONREFERENCEREQUEST']._serialized_start=5624 + _globals['_DELETEVALIDATIONREFERENCEREQUEST']._serialized_end=5705 + _globals['_APPLYPERMISSIONREQUEST']._serialized_start=5707 + _globals['_APPLYPERMISSIONREQUEST']._serialized_end=5808 + _globals['_GETPERMISSIONREQUEST']._serialized_start=5810 + _globals['_GETPERMISSIONREQUEST']._serialized_end=5884 + _globals['_LISTPERMISSIONSREQUEST']._serialized_start=5887 + _globals['_LISTPERMISSIONSREQUEST']._serialized_end=6058 + _globals['_LISTPERMISSIONSREQUEST_TAGSENTRY']._serialized_start=1314 + _globals['_LISTPERMISSIONSREQUEST_TAGSENTRY']._serialized_end=1357 + _globals['_LISTPERMISSIONSRESPONSE']._serialized_start=6060 + _globals['_LISTPERMISSIONSRESPONSE']._serialized_end=6130 + _globals['_DELETEPERMISSIONREQUEST']._serialized_start=6132 + _globals['_DELETEPERMISSIONREQUEST']._serialized_end=6204 + _globals['_APPLYPROJECTREQUEST']._serialized_start=6206 + _globals['_APPLYPROJECTREQUEST']._serialized_end=6281 + _globals['_GETPROJECTREQUEST']._serialized_start=6283 + _globals['_GETPROJECTREQUEST']._serialized_end=6337 + _globals['_LISTPROJECTSREQUEST']._serialized_start=6340 + _globals['_LISTPROJECTSREQUEST']._serialized_end=6488 + _globals['_LISTPROJECTSREQUEST_TAGSENTRY']._serialized_start=1314 + _globals['_LISTPROJECTSREQUEST_TAGSENTRY']._serialized_end=1357 + _globals['_LISTPROJECTSRESPONSE']._serialized_start=6490 + _globals['_LISTPROJECTSRESPONSE']._serialized_end=6551 + _globals['_DELETEPROJECTREQUEST']._serialized_start=6553 + _globals['_DELETEPROJECTREQUEST']._serialized_end=6605 + _globals['_REGISTRYSERVER']._serialized_start=6608 + _globals['_REGISTRYSERVER']._serialized_end=10779 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/registry/RegistryServer_pb2.pyi b/sdk/python/feast/protos/feast/registry/RegistryServer_pb2.pyi new file mode 100644 index 0000000000..f4507c02e2 --- /dev/null +++ b/sdk/python/feast/protos/feast/registry/RegistryServer_pb2.pyi @@ -0,0 +1,1318 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" +import builtins +import collections.abc +import feast.core.DataSource_pb2 +import feast.core.Entity_pb2 +import feast.core.FeatureService_pb2 +import feast.core.FeatureView_pb2 +import feast.core.InfraObject_pb2 +import feast.core.OnDemandFeatureView_pb2 +import feast.core.Permission_pb2 +import feast.core.Project_pb2 +import feast.core.Registry_pb2 +import feast.core.SavedDataset_pb2 +import feast.core.StreamFeatureView_pb2 +import feast.core.ValidationProfile_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class RefreshRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PROJECT_FIELD_NUMBER: builtins.int + project: builtins.str + def __init__( + self, + *, + project: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["project", b"project"]) -> None: ... + +global___RefreshRequest = RefreshRequest + +class UpdateInfraRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + INFRA_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + @property + def infra(self) -> feast.core.InfraObject_pb2.Infra: ... + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + infra: feast.core.InfraObject_pb2.Infra | None = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["infra", b"infra"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "infra", b"infra", "project", b"project"]) -> None: ... + +global___UpdateInfraRequest = UpdateInfraRequest + +class GetInfraRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + project: builtins.str + allow_cache: builtins.bool + def __init__( + self, + *, + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "project", b"project"]) -> None: ... + +global___GetInfraRequest = GetInfraRequest + +class ListProjectMetadataRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + project: builtins.str + allow_cache: builtins.bool + def __init__( + self, + *, + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "project", b"project"]) -> None: ... + +global___ListProjectMetadataRequest = ListProjectMetadataRequest + +class ListProjectMetadataResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PROJECT_METADATA_FIELD_NUMBER: builtins.int + @property + def project_metadata(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Registry_pb2.ProjectMetadata]: ... + def __init__( + self, + *, + project_metadata: collections.abc.Iterable[feast.core.Registry_pb2.ProjectMetadata] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["project_metadata", b"project_metadata"]) -> None: ... + +global___ListProjectMetadataResponse = ListProjectMetadataResponse + +class ApplyMaterializationRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FEATURE_VIEW_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + START_DATE_FIELD_NUMBER: builtins.int + END_DATE_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + @property + def feature_view(self) -> feast.core.FeatureView_pb2.FeatureView: ... + project: builtins.str + @property + def start_date(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def end_date(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + commit: builtins.bool + def __init__( + self, + *, + feature_view: feast.core.FeatureView_pb2.FeatureView | None = ..., + project: builtins.str = ..., + start_date: google.protobuf.timestamp_pb2.Timestamp | None = ..., + end_date: google.protobuf.timestamp_pb2.Timestamp | None = ..., + commit: builtins.bool = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["end_date", b"end_date", "feature_view", b"feature_view", "start_date", b"start_date"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "end_date", b"end_date", "feature_view", b"feature_view", "project", b"project", "start_date", b"start_date"]) -> None: ... + +global___ApplyMaterializationRequest = ApplyMaterializationRequest + +class ApplyEntityRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ENTITY_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + @property + def entity(self) -> feast.core.Entity_pb2.Entity: ... + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + entity: feast.core.Entity_pb2.Entity | None = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["entity", b"entity"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "entity", b"entity", "project", b"project"]) -> None: ... + +global___ApplyEntityRequest = ApplyEntityRequest + +class GetEntityRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + allow_cache: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "name", b"name", "project", b"project"]) -> None: ... + +global___GetEntityRequest = GetEntityRequest + +class ListEntitiesRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + project: builtins.str + allow_cache: builtins.bool + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "project", b"project", "tags", b"tags"]) -> None: ... + +global___ListEntitiesRequest = ListEntitiesRequest + +class ListEntitiesResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ENTITIES_FIELD_NUMBER: builtins.int + @property + def entities(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Entity_pb2.Entity]: ... + def __init__( + self, + *, + entities: collections.abc.Iterable[feast.core.Entity_pb2.Entity] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["entities", b"entities"]) -> None: ... + +global___ListEntitiesResponse = ListEntitiesResponse + +class DeleteEntityRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "name", b"name", "project", b"project"]) -> None: ... + +global___DeleteEntityRequest = DeleteEntityRequest + +class ApplyDataSourceRequest(google.protobuf.message.Message): + """DataSources""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DATA_SOURCE_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + @property + def data_source(self) -> feast.core.DataSource_pb2.DataSource: ... + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + data_source: feast.core.DataSource_pb2.DataSource | None = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["data_source", b"data_source"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "data_source", b"data_source", "project", b"project"]) -> None: ... + +global___ApplyDataSourceRequest = ApplyDataSourceRequest + +class GetDataSourceRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + allow_cache: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "name", b"name", "project", b"project"]) -> None: ... + +global___GetDataSourceRequest = GetDataSourceRequest + +class ListDataSourcesRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + project: builtins.str + allow_cache: builtins.bool + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "project", b"project", "tags", b"tags"]) -> None: ... + +global___ListDataSourcesRequest = ListDataSourcesRequest + +class ListDataSourcesResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DATA_SOURCES_FIELD_NUMBER: builtins.int + @property + def data_sources(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.DataSource_pb2.DataSource]: ... + def __init__( + self, + *, + data_sources: collections.abc.Iterable[feast.core.DataSource_pb2.DataSource] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["data_sources", b"data_sources"]) -> None: ... + +global___ListDataSourcesResponse = ListDataSourcesResponse + +class DeleteDataSourceRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "name", b"name", "project", b"project"]) -> None: ... + +global___DeleteDataSourceRequest = DeleteDataSourceRequest + +class ApplyFeatureViewRequest(google.protobuf.message.Message): + """FeatureViews""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FEATURE_VIEW_FIELD_NUMBER: builtins.int + ON_DEMAND_FEATURE_VIEW_FIELD_NUMBER: builtins.int + STREAM_FEATURE_VIEW_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + @property + def feature_view(self) -> feast.core.FeatureView_pb2.FeatureView: ... + @property + def on_demand_feature_view(self) -> feast.core.OnDemandFeatureView_pb2.OnDemandFeatureView: ... + @property + def stream_feature_view(self) -> feast.core.StreamFeatureView_pb2.StreamFeatureView: ... + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + feature_view: feast.core.FeatureView_pb2.FeatureView | None = ..., + on_demand_feature_view: feast.core.OnDemandFeatureView_pb2.OnDemandFeatureView | None = ..., + stream_feature_view: feast.core.StreamFeatureView_pb2.StreamFeatureView | None = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["base_feature_view", b"base_feature_view", "feature_view", b"feature_view", "on_demand_feature_view", b"on_demand_feature_view", "stream_feature_view", b"stream_feature_view"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["base_feature_view", b"base_feature_view", "commit", b"commit", "feature_view", b"feature_view", "on_demand_feature_view", b"on_demand_feature_view", "project", b"project", "stream_feature_view", b"stream_feature_view"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["base_feature_view", b"base_feature_view"]) -> typing_extensions.Literal["feature_view", "on_demand_feature_view", "stream_feature_view"] | None: ... + +global___ApplyFeatureViewRequest = ApplyFeatureViewRequest + +class GetFeatureViewRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + allow_cache: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "name", b"name", "project", b"project"]) -> None: ... + +global___GetFeatureViewRequest = GetFeatureViewRequest + +class ListFeatureViewsRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + project: builtins.str + allow_cache: builtins.bool + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "project", b"project", "tags", b"tags"]) -> None: ... + +global___ListFeatureViewsRequest = ListFeatureViewsRequest + +class ListFeatureViewsResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FEATURE_VIEWS_FIELD_NUMBER: builtins.int + @property + def feature_views(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.FeatureView_pb2.FeatureView]: ... + def __init__( + self, + *, + feature_views: collections.abc.Iterable[feast.core.FeatureView_pb2.FeatureView] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["feature_views", b"feature_views"]) -> None: ... + +global___ListFeatureViewsResponse = ListFeatureViewsResponse + +class DeleteFeatureViewRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "name", b"name", "project", b"project"]) -> None: ... + +global___DeleteFeatureViewRequest = DeleteFeatureViewRequest + +class AnyFeatureView(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FEATURE_VIEW_FIELD_NUMBER: builtins.int + ON_DEMAND_FEATURE_VIEW_FIELD_NUMBER: builtins.int + STREAM_FEATURE_VIEW_FIELD_NUMBER: builtins.int + @property + def feature_view(self) -> feast.core.FeatureView_pb2.FeatureView: ... + @property + def on_demand_feature_view(self) -> feast.core.OnDemandFeatureView_pb2.OnDemandFeatureView: ... + @property + def stream_feature_view(self) -> feast.core.StreamFeatureView_pb2.StreamFeatureView: ... + def __init__( + self, + *, + feature_view: feast.core.FeatureView_pb2.FeatureView | None = ..., + on_demand_feature_view: feast.core.OnDemandFeatureView_pb2.OnDemandFeatureView | None = ..., + stream_feature_view: feast.core.StreamFeatureView_pb2.StreamFeatureView | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["any_feature_view", b"any_feature_view", "feature_view", b"feature_view", "on_demand_feature_view", b"on_demand_feature_view", "stream_feature_view", b"stream_feature_view"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["any_feature_view", b"any_feature_view", "feature_view", b"feature_view", "on_demand_feature_view", b"on_demand_feature_view", "stream_feature_view", b"stream_feature_view"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["any_feature_view", b"any_feature_view"]) -> typing_extensions.Literal["feature_view", "on_demand_feature_view", "stream_feature_view"] | None: ... + +global___AnyFeatureView = AnyFeatureView + +class GetAnyFeatureViewRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + allow_cache: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "name", b"name", "project", b"project"]) -> None: ... + +global___GetAnyFeatureViewRequest = GetAnyFeatureViewRequest + +class GetAnyFeatureViewResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ANY_FEATURE_VIEW_FIELD_NUMBER: builtins.int + @property + def any_feature_view(self) -> global___AnyFeatureView: ... + def __init__( + self, + *, + any_feature_view: global___AnyFeatureView | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["any_feature_view", b"any_feature_view"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["any_feature_view", b"any_feature_view"]) -> None: ... + +global___GetAnyFeatureViewResponse = GetAnyFeatureViewResponse + +class ListAllFeatureViewsRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + project: builtins.str + allow_cache: builtins.bool + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "project", b"project", "tags", b"tags"]) -> None: ... + +global___ListAllFeatureViewsRequest = ListAllFeatureViewsRequest + +class ListAllFeatureViewsResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FEATURE_VIEWS_FIELD_NUMBER: builtins.int + @property + def feature_views(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___AnyFeatureView]: ... + def __init__( + self, + *, + feature_views: collections.abc.Iterable[global___AnyFeatureView] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["feature_views", b"feature_views"]) -> None: ... + +global___ListAllFeatureViewsResponse = ListAllFeatureViewsResponse + +class GetStreamFeatureViewRequest(google.protobuf.message.Message): + """StreamFeatureView""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + allow_cache: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "name", b"name", "project", b"project"]) -> None: ... + +global___GetStreamFeatureViewRequest = GetStreamFeatureViewRequest + +class ListStreamFeatureViewsRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + project: builtins.str + allow_cache: builtins.bool + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "project", b"project", "tags", b"tags"]) -> None: ... + +global___ListStreamFeatureViewsRequest = ListStreamFeatureViewsRequest + +class ListStreamFeatureViewsResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + STREAM_FEATURE_VIEWS_FIELD_NUMBER: builtins.int + @property + def stream_feature_views(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.StreamFeatureView_pb2.StreamFeatureView]: ... + def __init__( + self, + *, + stream_feature_views: collections.abc.Iterable[feast.core.StreamFeatureView_pb2.StreamFeatureView] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["stream_feature_views", b"stream_feature_views"]) -> None: ... + +global___ListStreamFeatureViewsResponse = ListStreamFeatureViewsResponse + +class GetOnDemandFeatureViewRequest(google.protobuf.message.Message): + """OnDemandFeatureView""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + allow_cache: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "name", b"name", "project", b"project"]) -> None: ... + +global___GetOnDemandFeatureViewRequest = GetOnDemandFeatureViewRequest + +class ListOnDemandFeatureViewsRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + project: builtins.str + allow_cache: builtins.bool + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "project", b"project", "tags", b"tags"]) -> None: ... + +global___ListOnDemandFeatureViewsRequest = ListOnDemandFeatureViewsRequest + +class ListOnDemandFeatureViewsResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ON_DEMAND_FEATURE_VIEWS_FIELD_NUMBER: builtins.int + @property + def on_demand_feature_views(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.OnDemandFeatureView_pb2.OnDemandFeatureView]: ... + def __init__( + self, + *, + on_demand_feature_views: collections.abc.Iterable[feast.core.OnDemandFeatureView_pb2.OnDemandFeatureView] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["on_demand_feature_views", b"on_demand_feature_views"]) -> None: ... + +global___ListOnDemandFeatureViewsResponse = ListOnDemandFeatureViewsResponse + +class ApplyFeatureServiceRequest(google.protobuf.message.Message): + """FeatureServices""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FEATURE_SERVICE_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + @property + def feature_service(self) -> feast.core.FeatureService_pb2.FeatureService: ... + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + feature_service: feast.core.FeatureService_pb2.FeatureService | None = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["feature_service", b"feature_service"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "feature_service", b"feature_service", "project", b"project"]) -> None: ... + +global___ApplyFeatureServiceRequest = ApplyFeatureServiceRequest + +class GetFeatureServiceRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + allow_cache: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "name", b"name", "project", b"project"]) -> None: ... + +global___GetFeatureServiceRequest = GetFeatureServiceRequest + +class ListFeatureServicesRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + project: builtins.str + allow_cache: builtins.bool + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "project", b"project", "tags", b"tags"]) -> None: ... + +global___ListFeatureServicesRequest = ListFeatureServicesRequest + +class ListFeatureServicesResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FEATURE_SERVICES_FIELD_NUMBER: builtins.int + @property + def feature_services(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.FeatureService_pb2.FeatureService]: ... + def __init__( + self, + *, + feature_services: collections.abc.Iterable[feast.core.FeatureService_pb2.FeatureService] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["feature_services", b"feature_services"]) -> None: ... + +global___ListFeatureServicesResponse = ListFeatureServicesResponse + +class DeleteFeatureServiceRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "name", b"name", "project", b"project"]) -> None: ... + +global___DeleteFeatureServiceRequest = DeleteFeatureServiceRequest + +class ApplySavedDatasetRequest(google.protobuf.message.Message): + """SavedDataset""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SAVED_DATASET_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + @property + def saved_dataset(self) -> feast.core.SavedDataset_pb2.SavedDataset: ... + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + saved_dataset: feast.core.SavedDataset_pb2.SavedDataset | None = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["saved_dataset", b"saved_dataset"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "project", b"project", "saved_dataset", b"saved_dataset"]) -> None: ... + +global___ApplySavedDatasetRequest = ApplySavedDatasetRequest + +class GetSavedDatasetRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + allow_cache: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "name", b"name", "project", b"project"]) -> None: ... + +global___GetSavedDatasetRequest = GetSavedDatasetRequest + +class ListSavedDatasetsRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + project: builtins.str + allow_cache: builtins.bool + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "project", b"project", "tags", b"tags"]) -> None: ... + +global___ListSavedDatasetsRequest = ListSavedDatasetsRequest + +class ListSavedDatasetsResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SAVED_DATASETS_FIELD_NUMBER: builtins.int + @property + def saved_datasets(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.SavedDataset_pb2.SavedDataset]: ... + def __init__( + self, + *, + saved_datasets: collections.abc.Iterable[feast.core.SavedDataset_pb2.SavedDataset] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["saved_datasets", b"saved_datasets"]) -> None: ... + +global___ListSavedDatasetsResponse = ListSavedDatasetsResponse + +class DeleteSavedDatasetRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "name", b"name", "project", b"project"]) -> None: ... + +global___DeleteSavedDatasetRequest = DeleteSavedDatasetRequest + +class ApplyValidationReferenceRequest(google.protobuf.message.Message): + """ValidationReference""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VALIDATION_REFERENCE_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + @property + def validation_reference(self) -> feast.core.ValidationProfile_pb2.ValidationReference: ... + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + validation_reference: feast.core.ValidationProfile_pb2.ValidationReference | None = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["validation_reference", b"validation_reference"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "project", b"project", "validation_reference", b"validation_reference"]) -> None: ... + +global___ApplyValidationReferenceRequest = ApplyValidationReferenceRequest + +class GetValidationReferenceRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + allow_cache: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "name", b"name", "project", b"project"]) -> None: ... + +global___GetValidationReferenceRequest = GetValidationReferenceRequest + +class ListValidationReferencesRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + project: builtins.str + allow_cache: builtins.bool + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "project", b"project", "tags", b"tags"]) -> None: ... + +global___ListValidationReferencesRequest = ListValidationReferencesRequest + +class ListValidationReferencesResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VALIDATION_REFERENCES_FIELD_NUMBER: builtins.int + @property + def validation_references(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.ValidationProfile_pb2.ValidationReference]: ... + def __init__( + self, + *, + validation_references: collections.abc.Iterable[feast.core.ValidationProfile_pb2.ValidationReference] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["validation_references", b"validation_references"]) -> None: ... + +global___ListValidationReferencesResponse = ListValidationReferencesResponse + +class DeleteValidationReferenceRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "name", b"name", "project", b"project"]) -> None: ... + +global___DeleteValidationReferenceRequest = DeleteValidationReferenceRequest + +class ApplyPermissionRequest(google.protobuf.message.Message): + """Permissions""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PERMISSION_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + @property + def permission(self) -> feast.core.Permission_pb2.Permission: ... + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + permission: feast.core.Permission_pb2.Permission | None = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["permission", b"permission"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "permission", b"permission", "project", b"project"]) -> None: ... + +global___ApplyPermissionRequest = ApplyPermissionRequest + +class GetPermissionRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + allow_cache: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "name", b"name", "project", b"project"]) -> None: ... + +global___GetPermissionRequest = GetPermissionRequest + +class ListPermissionsRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + project: builtins.str + allow_cache: builtins.bool + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "project", b"project", "tags", b"tags"]) -> None: ... + +global___ListPermissionsRequest = ListPermissionsRequest + +class ListPermissionsResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PERMISSIONS_FIELD_NUMBER: builtins.int + @property + def permissions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Permission_pb2.Permission]: ... + def __init__( + self, + *, + permissions: collections.abc.Iterable[feast.core.Permission_pb2.Permission] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["permissions", b"permissions"]) -> None: ... + +global___ListPermissionsResponse = ListPermissionsResponse + +class DeletePermissionRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "name", b"name", "project", b"project"]) -> None: ... + +global___DeletePermissionRequest = DeletePermissionRequest + +class ApplyProjectRequest(google.protobuf.message.Message): + """Projects""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + @property + def project(self) -> feast.core.Project_pb2.Project: ... + commit: builtins.bool + def __init__( + self, + *, + project: feast.core.Project_pb2.Project | None = ..., + commit: builtins.bool = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["project", b"project"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "project", b"project"]) -> None: ... + +global___ApplyProjectRequest = ApplyProjectRequest + +class GetProjectRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + name: builtins.str + allow_cache: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + allow_cache: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "name", b"name"]) -> None: ... + +global___GetProjectRequest = GetProjectRequest + +class ListProjectsRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + ALLOW_CACHE_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + allow_cache: builtins.bool + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + allow_cache: builtins.bool = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "tags", b"tags"]) -> None: ... + +global___ListProjectsRequest = ListProjectsRequest + +class ListProjectsResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PROJECTS_FIELD_NUMBER: builtins.int + @property + def projects(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Project_pb2.Project]: ... + def __init__( + self, + *, + projects: collections.abc.Iterable[feast.core.Project_pb2.Project] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["projects", b"projects"]) -> None: ... + +global___ListProjectsResponse = ListProjectsResponse + +class DeleteProjectRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + name: builtins.str + commit: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "name", b"name"]) -> None: ... + +global___DeleteProjectRequest = DeleteProjectRequest diff --git a/sdk/python/feast/protos/feast/registry/RegistryServer_pb2_grpc.py b/sdk/python/feast/protos/feast/registry/RegistryServer_pb2_grpc.py new file mode 100644 index 0000000000..bab23c4394 --- /dev/null +++ b/sdk/python/feast/protos/feast/registry/RegistryServer_pb2_grpc.py @@ -0,0 +1,1542 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from feast.protos.feast.core import DataSource_pb2 as feast_dot_core_dot_DataSource__pb2 +from feast.protos.feast.core import Entity_pb2 as feast_dot_core_dot_Entity__pb2 +from feast.protos.feast.core import FeatureService_pb2 as feast_dot_core_dot_FeatureService__pb2 +from feast.protos.feast.core import FeatureView_pb2 as feast_dot_core_dot_FeatureView__pb2 +from feast.protos.feast.core import InfraObject_pb2 as feast_dot_core_dot_InfraObject__pb2 +from feast.protos.feast.core import OnDemandFeatureView_pb2 as feast_dot_core_dot_OnDemandFeatureView__pb2 +from feast.protos.feast.core import Permission_pb2 as feast_dot_core_dot_Permission__pb2 +from feast.protos.feast.core import Project_pb2 as feast_dot_core_dot_Project__pb2 +from feast.protos.feast.core import Registry_pb2 as feast_dot_core_dot_Registry__pb2 +from feast.protos.feast.core import SavedDataset_pb2 as feast_dot_core_dot_SavedDataset__pb2 +from feast.protos.feast.core import StreamFeatureView_pb2 as feast_dot_core_dot_StreamFeatureView__pb2 +from feast.protos.feast.core import ValidationProfile_pb2 as feast_dot_core_dot_ValidationProfile__pb2 +from feast.protos.feast.registry import RegistryServer_pb2 as feast_dot_registry_dot_RegistryServer__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 + + +class RegistryServerStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.ApplyEntity = channel.unary_unary( + '/feast.registry.RegistryServer/ApplyEntity', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyEntityRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.GetEntity = channel.unary_unary( + '/feast.registry.RegistryServer/GetEntity', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.GetEntityRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_Entity__pb2.Entity.FromString, + ) + self.ListEntities = channel.unary_unary( + '/feast.registry.RegistryServer/ListEntities', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListEntitiesRequest.SerializeToString, + response_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListEntitiesResponse.FromString, + ) + self.DeleteEntity = channel.unary_unary( + '/feast.registry.RegistryServer/DeleteEntity', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.DeleteEntityRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ApplyDataSource = channel.unary_unary( + '/feast.registry.RegistryServer/ApplyDataSource', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyDataSourceRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.GetDataSource = channel.unary_unary( + '/feast.registry.RegistryServer/GetDataSource', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.GetDataSourceRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_DataSource__pb2.DataSource.FromString, + ) + self.ListDataSources = channel.unary_unary( + '/feast.registry.RegistryServer/ListDataSources', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListDataSourcesRequest.SerializeToString, + response_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListDataSourcesResponse.FromString, + ) + self.DeleteDataSource = channel.unary_unary( + '/feast.registry.RegistryServer/DeleteDataSource', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.DeleteDataSourceRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ApplyFeatureView = channel.unary_unary( + '/feast.registry.RegistryServer/ApplyFeatureView', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyFeatureViewRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.DeleteFeatureView = channel.unary_unary( + '/feast.registry.RegistryServer/DeleteFeatureView', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.DeleteFeatureViewRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.GetAnyFeatureView = channel.unary_unary( + '/feast.registry.RegistryServer/GetAnyFeatureView', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.GetAnyFeatureViewRequest.SerializeToString, + response_deserializer=feast_dot_registry_dot_RegistryServer__pb2.GetAnyFeatureViewResponse.FromString, + ) + self.ListAllFeatureViews = channel.unary_unary( + '/feast.registry.RegistryServer/ListAllFeatureViews', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListAllFeatureViewsRequest.SerializeToString, + response_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListAllFeatureViewsResponse.FromString, + ) + self.GetFeatureView = channel.unary_unary( + '/feast.registry.RegistryServer/GetFeatureView', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.GetFeatureViewRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_FeatureView__pb2.FeatureView.FromString, + ) + self.ListFeatureViews = channel.unary_unary( + '/feast.registry.RegistryServer/ListFeatureViews', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListFeatureViewsRequest.SerializeToString, + response_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListFeatureViewsResponse.FromString, + ) + self.GetStreamFeatureView = channel.unary_unary( + '/feast.registry.RegistryServer/GetStreamFeatureView', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.GetStreamFeatureViewRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_StreamFeatureView__pb2.StreamFeatureView.FromString, + ) + self.ListStreamFeatureViews = channel.unary_unary( + '/feast.registry.RegistryServer/ListStreamFeatureViews', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListStreamFeatureViewsRequest.SerializeToString, + response_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListStreamFeatureViewsResponse.FromString, + ) + self.GetOnDemandFeatureView = channel.unary_unary( + '/feast.registry.RegistryServer/GetOnDemandFeatureView', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.GetOnDemandFeatureViewRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_OnDemandFeatureView__pb2.OnDemandFeatureView.FromString, + ) + self.ListOnDemandFeatureViews = channel.unary_unary( + '/feast.registry.RegistryServer/ListOnDemandFeatureViews', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListOnDemandFeatureViewsRequest.SerializeToString, + response_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListOnDemandFeatureViewsResponse.FromString, + ) + self.ApplyFeatureService = channel.unary_unary( + '/feast.registry.RegistryServer/ApplyFeatureService', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyFeatureServiceRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.GetFeatureService = channel.unary_unary( + '/feast.registry.RegistryServer/GetFeatureService', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.GetFeatureServiceRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_FeatureService__pb2.FeatureService.FromString, + ) + self.ListFeatureServices = channel.unary_unary( + '/feast.registry.RegistryServer/ListFeatureServices', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListFeatureServicesRequest.SerializeToString, + response_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListFeatureServicesResponse.FromString, + ) + self.DeleteFeatureService = channel.unary_unary( + '/feast.registry.RegistryServer/DeleteFeatureService', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.DeleteFeatureServiceRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ApplySavedDataset = channel.unary_unary( + '/feast.registry.RegistryServer/ApplySavedDataset', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ApplySavedDatasetRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.GetSavedDataset = channel.unary_unary( + '/feast.registry.RegistryServer/GetSavedDataset', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.GetSavedDatasetRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_SavedDataset__pb2.SavedDataset.FromString, + ) + self.ListSavedDatasets = channel.unary_unary( + '/feast.registry.RegistryServer/ListSavedDatasets', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListSavedDatasetsRequest.SerializeToString, + response_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListSavedDatasetsResponse.FromString, + ) + self.DeleteSavedDataset = channel.unary_unary( + '/feast.registry.RegistryServer/DeleteSavedDataset', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.DeleteSavedDatasetRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ApplyValidationReference = channel.unary_unary( + '/feast.registry.RegistryServer/ApplyValidationReference', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyValidationReferenceRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.GetValidationReference = channel.unary_unary( + '/feast.registry.RegistryServer/GetValidationReference', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.GetValidationReferenceRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_ValidationProfile__pb2.ValidationReference.FromString, + ) + self.ListValidationReferences = channel.unary_unary( + '/feast.registry.RegistryServer/ListValidationReferences', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListValidationReferencesRequest.SerializeToString, + response_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListValidationReferencesResponse.FromString, + ) + self.DeleteValidationReference = channel.unary_unary( + '/feast.registry.RegistryServer/DeleteValidationReference', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.DeleteValidationReferenceRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ApplyPermission = channel.unary_unary( + '/feast.registry.RegistryServer/ApplyPermission', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyPermissionRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.GetPermission = channel.unary_unary( + '/feast.registry.RegistryServer/GetPermission', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.GetPermissionRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_Permission__pb2.Permission.FromString, + ) + self.ListPermissions = channel.unary_unary( + '/feast.registry.RegistryServer/ListPermissions', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListPermissionsRequest.SerializeToString, + response_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListPermissionsResponse.FromString, + ) + self.DeletePermission = channel.unary_unary( + '/feast.registry.RegistryServer/DeletePermission', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.DeletePermissionRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ApplyProject = channel.unary_unary( + '/feast.registry.RegistryServer/ApplyProject', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyProjectRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.GetProject = channel.unary_unary( + '/feast.registry.RegistryServer/GetProject', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.GetProjectRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_Project__pb2.Project.FromString, + ) + self.ListProjects = channel.unary_unary( + '/feast.registry.RegistryServer/ListProjects', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListProjectsRequest.SerializeToString, + response_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListProjectsResponse.FromString, + ) + self.DeleteProject = channel.unary_unary( + '/feast.registry.RegistryServer/DeleteProject', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.DeleteProjectRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ApplyMaterialization = channel.unary_unary( + '/feast.registry.RegistryServer/ApplyMaterialization', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyMaterializationRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ListProjectMetadata = channel.unary_unary( + '/feast.registry.RegistryServer/ListProjectMetadata', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListProjectMetadataRequest.SerializeToString, + response_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListProjectMetadataResponse.FromString, + ) + self.UpdateInfra = channel.unary_unary( + '/feast.registry.RegistryServer/UpdateInfra', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.UpdateInfraRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.GetInfra = channel.unary_unary( + '/feast.registry.RegistryServer/GetInfra', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.GetInfraRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_InfraObject__pb2.Infra.FromString, + ) + self.Commit = channel.unary_unary( + '/feast.registry.RegistryServer/Commit', + request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.Refresh = channel.unary_unary( + '/feast.registry.RegistryServer/Refresh', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.RefreshRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.Proto = channel.unary_unary( + '/feast.registry.RegistryServer/Proto', + request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + response_deserializer=feast_dot_core_dot_Registry__pb2.Registry.FromString, + ) + + +class RegistryServerServicer(object): + """Missing associated documentation comment in .proto file.""" + + def ApplyEntity(self, request, context): + """Entity RPCs + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetEntity(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListEntities(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteEntity(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ApplyDataSource(self, request, context): + """DataSource RPCs + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetDataSource(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListDataSources(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteDataSource(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ApplyFeatureView(self, request, context): + """FeatureView RPCs + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteFeatureView(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetAnyFeatureView(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListAllFeatureViews(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetFeatureView(self, request, context): + """plain FeatureView RPCs + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListFeatureViews(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetStreamFeatureView(self, request, context): + """StreamFeatureView RPCs + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListStreamFeatureViews(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetOnDemandFeatureView(self, request, context): + """OnDemandFeatureView RPCs + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListOnDemandFeatureViews(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ApplyFeatureService(self, request, context): + """FeatureService RPCs + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetFeatureService(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListFeatureServices(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteFeatureService(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ApplySavedDataset(self, request, context): + """SavedDataset RPCs + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetSavedDataset(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListSavedDatasets(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteSavedDataset(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ApplyValidationReference(self, request, context): + """ValidationReference RPCs + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetValidationReference(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListValidationReferences(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteValidationReference(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ApplyPermission(self, request, context): + """Permission RPCs + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetPermission(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListPermissions(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeletePermission(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ApplyProject(self, request, context): + """Project RPCs + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetProject(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListProjects(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteProject(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ApplyMaterialization(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListProjectMetadata(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateInfra(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetInfra(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Commit(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Refresh(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Proto(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_RegistryServerServicer_to_server(servicer, server): + rpc_method_handlers = { + 'ApplyEntity': grpc.unary_unary_rpc_method_handler( + servicer.ApplyEntity, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyEntityRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'GetEntity': grpc.unary_unary_rpc_method_handler( + servicer.GetEntity, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.GetEntityRequest.FromString, + response_serializer=feast_dot_core_dot_Entity__pb2.Entity.SerializeToString, + ), + 'ListEntities': grpc.unary_unary_rpc_method_handler( + servicer.ListEntities, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListEntitiesRequest.FromString, + response_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListEntitiesResponse.SerializeToString, + ), + 'DeleteEntity': grpc.unary_unary_rpc_method_handler( + servicer.DeleteEntity, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.DeleteEntityRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ApplyDataSource': grpc.unary_unary_rpc_method_handler( + servicer.ApplyDataSource, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyDataSourceRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'GetDataSource': grpc.unary_unary_rpc_method_handler( + servicer.GetDataSource, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.GetDataSourceRequest.FromString, + response_serializer=feast_dot_core_dot_DataSource__pb2.DataSource.SerializeToString, + ), + 'ListDataSources': grpc.unary_unary_rpc_method_handler( + servicer.ListDataSources, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListDataSourcesRequest.FromString, + response_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListDataSourcesResponse.SerializeToString, + ), + 'DeleteDataSource': grpc.unary_unary_rpc_method_handler( + servicer.DeleteDataSource, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.DeleteDataSourceRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ApplyFeatureView': grpc.unary_unary_rpc_method_handler( + servicer.ApplyFeatureView, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyFeatureViewRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'DeleteFeatureView': grpc.unary_unary_rpc_method_handler( + servicer.DeleteFeatureView, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.DeleteFeatureViewRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'GetAnyFeatureView': grpc.unary_unary_rpc_method_handler( + servicer.GetAnyFeatureView, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.GetAnyFeatureViewRequest.FromString, + response_serializer=feast_dot_registry_dot_RegistryServer__pb2.GetAnyFeatureViewResponse.SerializeToString, + ), + 'ListAllFeatureViews': grpc.unary_unary_rpc_method_handler( + servicer.ListAllFeatureViews, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListAllFeatureViewsRequest.FromString, + response_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListAllFeatureViewsResponse.SerializeToString, + ), + 'GetFeatureView': grpc.unary_unary_rpc_method_handler( + servicer.GetFeatureView, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.GetFeatureViewRequest.FromString, + response_serializer=feast_dot_core_dot_FeatureView__pb2.FeatureView.SerializeToString, + ), + 'ListFeatureViews': grpc.unary_unary_rpc_method_handler( + servicer.ListFeatureViews, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListFeatureViewsRequest.FromString, + response_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListFeatureViewsResponse.SerializeToString, + ), + 'GetStreamFeatureView': grpc.unary_unary_rpc_method_handler( + servicer.GetStreamFeatureView, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.GetStreamFeatureViewRequest.FromString, + response_serializer=feast_dot_core_dot_StreamFeatureView__pb2.StreamFeatureView.SerializeToString, + ), + 'ListStreamFeatureViews': grpc.unary_unary_rpc_method_handler( + servicer.ListStreamFeatureViews, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListStreamFeatureViewsRequest.FromString, + response_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListStreamFeatureViewsResponse.SerializeToString, + ), + 'GetOnDemandFeatureView': grpc.unary_unary_rpc_method_handler( + servicer.GetOnDemandFeatureView, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.GetOnDemandFeatureViewRequest.FromString, + response_serializer=feast_dot_core_dot_OnDemandFeatureView__pb2.OnDemandFeatureView.SerializeToString, + ), + 'ListOnDemandFeatureViews': grpc.unary_unary_rpc_method_handler( + servicer.ListOnDemandFeatureViews, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListOnDemandFeatureViewsRequest.FromString, + response_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListOnDemandFeatureViewsResponse.SerializeToString, + ), + 'ApplyFeatureService': grpc.unary_unary_rpc_method_handler( + servicer.ApplyFeatureService, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyFeatureServiceRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'GetFeatureService': grpc.unary_unary_rpc_method_handler( + servicer.GetFeatureService, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.GetFeatureServiceRequest.FromString, + response_serializer=feast_dot_core_dot_FeatureService__pb2.FeatureService.SerializeToString, + ), + 'ListFeatureServices': grpc.unary_unary_rpc_method_handler( + servicer.ListFeatureServices, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListFeatureServicesRequest.FromString, + response_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListFeatureServicesResponse.SerializeToString, + ), + 'DeleteFeatureService': grpc.unary_unary_rpc_method_handler( + servicer.DeleteFeatureService, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.DeleteFeatureServiceRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ApplySavedDataset': grpc.unary_unary_rpc_method_handler( + servicer.ApplySavedDataset, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ApplySavedDatasetRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'GetSavedDataset': grpc.unary_unary_rpc_method_handler( + servicer.GetSavedDataset, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.GetSavedDatasetRequest.FromString, + response_serializer=feast_dot_core_dot_SavedDataset__pb2.SavedDataset.SerializeToString, + ), + 'ListSavedDatasets': grpc.unary_unary_rpc_method_handler( + servicer.ListSavedDatasets, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListSavedDatasetsRequest.FromString, + response_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListSavedDatasetsResponse.SerializeToString, + ), + 'DeleteSavedDataset': grpc.unary_unary_rpc_method_handler( + servicer.DeleteSavedDataset, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.DeleteSavedDatasetRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ApplyValidationReference': grpc.unary_unary_rpc_method_handler( + servicer.ApplyValidationReference, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyValidationReferenceRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'GetValidationReference': grpc.unary_unary_rpc_method_handler( + servicer.GetValidationReference, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.GetValidationReferenceRequest.FromString, + response_serializer=feast_dot_core_dot_ValidationProfile__pb2.ValidationReference.SerializeToString, + ), + 'ListValidationReferences': grpc.unary_unary_rpc_method_handler( + servicer.ListValidationReferences, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListValidationReferencesRequest.FromString, + response_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListValidationReferencesResponse.SerializeToString, + ), + 'DeleteValidationReference': grpc.unary_unary_rpc_method_handler( + servicer.DeleteValidationReference, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.DeleteValidationReferenceRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ApplyPermission': grpc.unary_unary_rpc_method_handler( + servicer.ApplyPermission, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyPermissionRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'GetPermission': grpc.unary_unary_rpc_method_handler( + servicer.GetPermission, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.GetPermissionRequest.FromString, + response_serializer=feast_dot_core_dot_Permission__pb2.Permission.SerializeToString, + ), + 'ListPermissions': grpc.unary_unary_rpc_method_handler( + servicer.ListPermissions, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListPermissionsRequest.FromString, + response_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListPermissionsResponse.SerializeToString, + ), + 'DeletePermission': grpc.unary_unary_rpc_method_handler( + servicer.DeletePermission, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.DeletePermissionRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ApplyProject': grpc.unary_unary_rpc_method_handler( + servicer.ApplyProject, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyProjectRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'GetProject': grpc.unary_unary_rpc_method_handler( + servicer.GetProject, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.GetProjectRequest.FromString, + response_serializer=feast_dot_core_dot_Project__pb2.Project.SerializeToString, + ), + 'ListProjects': grpc.unary_unary_rpc_method_handler( + servicer.ListProjects, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListProjectsRequest.FromString, + response_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListProjectsResponse.SerializeToString, + ), + 'DeleteProject': grpc.unary_unary_rpc_method_handler( + servicer.DeleteProject, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.DeleteProjectRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ApplyMaterialization': grpc.unary_unary_rpc_method_handler( + servicer.ApplyMaterialization, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyMaterializationRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ListProjectMetadata': grpc.unary_unary_rpc_method_handler( + servicer.ListProjectMetadata, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListProjectMetadataRequest.FromString, + response_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListProjectMetadataResponse.SerializeToString, + ), + 'UpdateInfra': grpc.unary_unary_rpc_method_handler( + servicer.UpdateInfra, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.UpdateInfraRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'GetInfra': grpc.unary_unary_rpc_method_handler( + servicer.GetInfra, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.GetInfraRequest.FromString, + response_serializer=feast_dot_core_dot_InfraObject__pb2.Infra.SerializeToString, + ), + 'Commit': grpc.unary_unary_rpc_method_handler( + servicer.Commit, + request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'Refresh': grpc.unary_unary_rpc_method_handler( + servicer.Refresh, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.RefreshRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'Proto': grpc.unary_unary_rpc_method_handler( + servicer.Proto, + request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + response_serializer=feast_dot_core_dot_Registry__pb2.Registry.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'feast.registry.RegistryServer', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class RegistryServer(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def ApplyEntity(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ApplyEntity', + feast_dot_registry_dot_RegistryServer__pb2.ApplyEntityRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetEntity(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/GetEntity', + feast_dot_registry_dot_RegistryServer__pb2.GetEntityRequest.SerializeToString, + feast_dot_core_dot_Entity__pb2.Entity.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ListEntities(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ListEntities', + feast_dot_registry_dot_RegistryServer__pb2.ListEntitiesRequest.SerializeToString, + feast_dot_registry_dot_RegistryServer__pb2.ListEntitiesResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def DeleteEntity(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/DeleteEntity', + feast_dot_registry_dot_RegistryServer__pb2.DeleteEntityRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ApplyDataSource(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ApplyDataSource', + feast_dot_registry_dot_RegistryServer__pb2.ApplyDataSourceRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetDataSource(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/GetDataSource', + feast_dot_registry_dot_RegistryServer__pb2.GetDataSourceRequest.SerializeToString, + feast_dot_core_dot_DataSource__pb2.DataSource.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ListDataSources(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ListDataSources', + feast_dot_registry_dot_RegistryServer__pb2.ListDataSourcesRequest.SerializeToString, + feast_dot_registry_dot_RegistryServer__pb2.ListDataSourcesResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def DeleteDataSource(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/DeleteDataSource', + feast_dot_registry_dot_RegistryServer__pb2.DeleteDataSourceRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ApplyFeatureView(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ApplyFeatureView', + feast_dot_registry_dot_RegistryServer__pb2.ApplyFeatureViewRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def DeleteFeatureView(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/DeleteFeatureView', + feast_dot_registry_dot_RegistryServer__pb2.DeleteFeatureViewRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetAnyFeatureView(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/GetAnyFeatureView', + feast_dot_registry_dot_RegistryServer__pb2.GetAnyFeatureViewRequest.SerializeToString, + feast_dot_registry_dot_RegistryServer__pb2.GetAnyFeatureViewResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ListAllFeatureViews(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ListAllFeatureViews', + feast_dot_registry_dot_RegistryServer__pb2.ListAllFeatureViewsRequest.SerializeToString, + feast_dot_registry_dot_RegistryServer__pb2.ListAllFeatureViewsResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetFeatureView(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/GetFeatureView', + feast_dot_registry_dot_RegistryServer__pb2.GetFeatureViewRequest.SerializeToString, + feast_dot_core_dot_FeatureView__pb2.FeatureView.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ListFeatureViews(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ListFeatureViews', + feast_dot_registry_dot_RegistryServer__pb2.ListFeatureViewsRequest.SerializeToString, + feast_dot_registry_dot_RegistryServer__pb2.ListFeatureViewsResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetStreamFeatureView(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/GetStreamFeatureView', + feast_dot_registry_dot_RegistryServer__pb2.GetStreamFeatureViewRequest.SerializeToString, + feast_dot_core_dot_StreamFeatureView__pb2.StreamFeatureView.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ListStreamFeatureViews(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ListStreamFeatureViews', + feast_dot_registry_dot_RegistryServer__pb2.ListStreamFeatureViewsRequest.SerializeToString, + feast_dot_registry_dot_RegistryServer__pb2.ListStreamFeatureViewsResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetOnDemandFeatureView(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/GetOnDemandFeatureView', + feast_dot_registry_dot_RegistryServer__pb2.GetOnDemandFeatureViewRequest.SerializeToString, + feast_dot_core_dot_OnDemandFeatureView__pb2.OnDemandFeatureView.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ListOnDemandFeatureViews(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ListOnDemandFeatureViews', + feast_dot_registry_dot_RegistryServer__pb2.ListOnDemandFeatureViewsRequest.SerializeToString, + feast_dot_registry_dot_RegistryServer__pb2.ListOnDemandFeatureViewsResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ApplyFeatureService(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ApplyFeatureService', + feast_dot_registry_dot_RegistryServer__pb2.ApplyFeatureServiceRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetFeatureService(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/GetFeatureService', + feast_dot_registry_dot_RegistryServer__pb2.GetFeatureServiceRequest.SerializeToString, + feast_dot_core_dot_FeatureService__pb2.FeatureService.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ListFeatureServices(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ListFeatureServices', + feast_dot_registry_dot_RegistryServer__pb2.ListFeatureServicesRequest.SerializeToString, + feast_dot_registry_dot_RegistryServer__pb2.ListFeatureServicesResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def DeleteFeatureService(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/DeleteFeatureService', + feast_dot_registry_dot_RegistryServer__pb2.DeleteFeatureServiceRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ApplySavedDataset(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ApplySavedDataset', + feast_dot_registry_dot_RegistryServer__pb2.ApplySavedDatasetRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetSavedDataset(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/GetSavedDataset', + feast_dot_registry_dot_RegistryServer__pb2.GetSavedDatasetRequest.SerializeToString, + feast_dot_core_dot_SavedDataset__pb2.SavedDataset.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ListSavedDatasets(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ListSavedDatasets', + feast_dot_registry_dot_RegistryServer__pb2.ListSavedDatasetsRequest.SerializeToString, + feast_dot_registry_dot_RegistryServer__pb2.ListSavedDatasetsResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def DeleteSavedDataset(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/DeleteSavedDataset', + feast_dot_registry_dot_RegistryServer__pb2.DeleteSavedDatasetRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ApplyValidationReference(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ApplyValidationReference', + feast_dot_registry_dot_RegistryServer__pb2.ApplyValidationReferenceRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetValidationReference(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/GetValidationReference', + feast_dot_registry_dot_RegistryServer__pb2.GetValidationReferenceRequest.SerializeToString, + feast_dot_core_dot_ValidationProfile__pb2.ValidationReference.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ListValidationReferences(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ListValidationReferences', + feast_dot_registry_dot_RegistryServer__pb2.ListValidationReferencesRequest.SerializeToString, + feast_dot_registry_dot_RegistryServer__pb2.ListValidationReferencesResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def DeleteValidationReference(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/DeleteValidationReference', + feast_dot_registry_dot_RegistryServer__pb2.DeleteValidationReferenceRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ApplyPermission(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ApplyPermission', + feast_dot_registry_dot_RegistryServer__pb2.ApplyPermissionRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetPermission(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/GetPermission', + feast_dot_registry_dot_RegistryServer__pb2.GetPermissionRequest.SerializeToString, + feast_dot_core_dot_Permission__pb2.Permission.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ListPermissions(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ListPermissions', + feast_dot_registry_dot_RegistryServer__pb2.ListPermissionsRequest.SerializeToString, + feast_dot_registry_dot_RegistryServer__pb2.ListPermissionsResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def DeletePermission(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/DeletePermission', + feast_dot_registry_dot_RegistryServer__pb2.DeletePermissionRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ApplyProject(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ApplyProject', + feast_dot_registry_dot_RegistryServer__pb2.ApplyProjectRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetProject(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/GetProject', + feast_dot_registry_dot_RegistryServer__pb2.GetProjectRequest.SerializeToString, + feast_dot_core_dot_Project__pb2.Project.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ListProjects(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ListProjects', + feast_dot_registry_dot_RegistryServer__pb2.ListProjectsRequest.SerializeToString, + feast_dot_registry_dot_RegistryServer__pb2.ListProjectsResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def DeleteProject(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/DeleteProject', + feast_dot_registry_dot_RegistryServer__pb2.DeleteProjectRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ApplyMaterialization(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ApplyMaterialization', + feast_dot_registry_dot_RegistryServer__pb2.ApplyMaterializationRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ListProjectMetadata(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ListProjectMetadata', + feast_dot_registry_dot_RegistryServer__pb2.ListProjectMetadataRequest.SerializeToString, + feast_dot_registry_dot_RegistryServer__pb2.ListProjectMetadataResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def UpdateInfra(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/UpdateInfra', + feast_dot_registry_dot_RegistryServer__pb2.UpdateInfraRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetInfra(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/GetInfra', + feast_dot_registry_dot_RegistryServer__pb2.GetInfraRequest.SerializeToString, + feast_dot_core_dot_InfraObject__pb2.Infra.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def Commit(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/Commit', + google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def Refresh(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/Refresh', + feast_dot_registry_dot_RegistryServer__pb2.RefreshRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def Proto(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/Proto', + google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + feast_dot_core_dot_Registry__pb2.Registry.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/sdk/python/feast/protos/feast/registry/__init__.py b/sdk/python/feast/protos/feast/registry/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/protos/feast/serving/Connector_pb2.py b/sdk/python/feast/protos/feast/serving/Connector_pb2.py new file mode 100644 index 0000000000..b38471dea8 --- /dev/null +++ b/sdk/python/feast/protos/feast/serving/Connector_pb2.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/serving/Connector.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from feast.protos.feast.types import Value_pb2 as feast_dot_types_dot_Value__pb2 +from feast.protos.feast.types import EntityKey_pb2 as feast_dot_types_dot_EntityKey__pb2 +from feast.protos.feast.serving import ServingService_pb2 as feast_dot_serving_dot_ServingService__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1d\x66\x65\x61st/serving/Connector.proto\x12\x0egrpc.connector\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17\x66\x65\x61st/types/Value.proto\x1a\x1b\x66\x65\x61st/types/EntityKey.proto\x1a\"feast/serving/ServingService.proto\"\x9a\x01\n\x10\x43onnectorFeature\x12\x34\n\treference\x18\x01 \x01(\x0b\x32!.feast.serving.FeatureReferenceV2\x12-\n\ttimestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12!\n\x05value\x18\x03 \x01(\x0b\x32\x12.feast.types.Value\"M\n\x14\x43onnectorFeatureList\x12\x35\n\x0b\x66\x65\x61tureList\x18\x01 \x03(\x0b\x32 .grpc.connector.ConnectorFeature\"_\n\x11OnlineReadRequest\x12*\n\nentityKeys\x18\x01 \x03(\x0b\x32\x16.feast.types.EntityKey\x12\x0c\n\x04view\x18\x02 \x01(\t\x12\x10\n\x08\x66\x65\x61tures\x18\x03 \x03(\t\"K\n\x12OnlineReadResponse\x12\x35\n\x07results\x18\x01 \x03(\x0b\x32$.grpc.connector.ConnectorFeatureList2b\n\x0bOnlineStore\x12S\n\nOnlineRead\x12!.grpc.connector.OnlineReadRequest\x1a\".grpc.connector.OnlineReadResponseB4Z2github.com/feast-dev/feast/go/protos/feast/servingb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.serving.Connector_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'Z2github.com/feast-dev/feast/go/protos/feast/serving' + _globals['_CONNECTORFEATURE']._serialized_start=173 + _globals['_CONNECTORFEATURE']._serialized_end=327 + _globals['_CONNECTORFEATURELIST']._serialized_start=329 + _globals['_CONNECTORFEATURELIST']._serialized_end=406 + _globals['_ONLINEREADREQUEST']._serialized_start=408 + _globals['_ONLINEREADREQUEST']._serialized_end=503 + _globals['_ONLINEREADRESPONSE']._serialized_start=505 + _globals['_ONLINEREADRESPONSE']._serialized_end=580 + _globals['_ONLINESTORE']._serialized_start=582 + _globals['_ONLINESTORE']._serialized_end=680 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/serving/Connector_pb2.pyi b/sdk/python/feast/protos/feast/serving/Connector_pb2.pyi new file mode 100644 index 0000000000..f87109e0fa --- /dev/null +++ b/sdk/python/feast/protos/feast/serving/Connector_pb2.pyi @@ -0,0 +1,97 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" +import builtins +import collections.abc +import feast.serving.ServingService_pb2 +import feast.types.EntityKey_pb2 +import feast.types.Value_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class ConnectorFeature(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + REFERENCE_FIELD_NUMBER: builtins.int + TIMESTAMP_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + @property + def reference(self) -> feast.serving.ServingService_pb2.FeatureReferenceV2: ... + @property + def timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def value(self) -> feast.types.Value_pb2.Value: ... + def __init__( + self, + *, + reference: feast.serving.ServingService_pb2.FeatureReferenceV2 | None = ..., + timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + value: feast.types.Value_pb2.Value | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["reference", b"reference", "timestamp", b"timestamp", "value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["reference", b"reference", "timestamp", b"timestamp", "value", b"value"]) -> None: ... + +global___ConnectorFeature = ConnectorFeature + +class ConnectorFeatureList(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FEATURELIST_FIELD_NUMBER: builtins.int + @property + def featureList(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ConnectorFeature]: ... + def __init__( + self, + *, + featureList: collections.abc.Iterable[global___ConnectorFeature] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["featureList", b"featureList"]) -> None: ... + +global___ConnectorFeatureList = ConnectorFeatureList + +class OnlineReadRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ENTITYKEYS_FIELD_NUMBER: builtins.int + VIEW_FIELD_NUMBER: builtins.int + FEATURES_FIELD_NUMBER: builtins.int + @property + def entityKeys(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.types.EntityKey_pb2.EntityKey]: ... + view: builtins.str + @property + def features(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + def __init__( + self, + *, + entityKeys: collections.abc.Iterable[feast.types.EntityKey_pb2.EntityKey] | None = ..., + view: builtins.str = ..., + features: collections.abc.Iterable[builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["entityKeys", b"entityKeys", "features", b"features", "view", b"view"]) -> None: ... + +global___OnlineReadRequest = OnlineReadRequest + +class OnlineReadResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + RESULTS_FIELD_NUMBER: builtins.int + @property + def results(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ConnectorFeatureList]: ... + def __init__( + self, + *, + results: collections.abc.Iterable[global___ConnectorFeatureList] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["results", b"results"]) -> None: ... + +global___OnlineReadResponse = OnlineReadResponse diff --git a/sdk/python/feast/protos/feast/serving/Connector_pb2_grpc.py b/sdk/python/feast/protos/feast/serving/Connector_pb2_grpc.py new file mode 100644 index 0000000000..dfadf982dd --- /dev/null +++ b/sdk/python/feast/protos/feast/serving/Connector_pb2_grpc.py @@ -0,0 +1,66 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from feast.protos.feast.serving import Connector_pb2 as feast_dot_serving_dot_Connector__pb2 + + +class OnlineStoreStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.OnlineRead = channel.unary_unary( + '/grpc.connector.OnlineStore/OnlineRead', + request_serializer=feast_dot_serving_dot_Connector__pb2.OnlineReadRequest.SerializeToString, + response_deserializer=feast_dot_serving_dot_Connector__pb2.OnlineReadResponse.FromString, + ) + + +class OnlineStoreServicer(object): + """Missing associated documentation comment in .proto file.""" + + def OnlineRead(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_OnlineStoreServicer_to_server(servicer, server): + rpc_method_handlers = { + 'OnlineRead': grpc.unary_unary_rpc_method_handler( + servicer.OnlineRead, + request_deserializer=feast_dot_serving_dot_Connector__pb2.OnlineReadRequest.FromString, + response_serializer=feast_dot_serving_dot_Connector__pb2.OnlineReadResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'grpc.connector.OnlineStore', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class OnlineStore(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def OnlineRead(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/grpc.connector.OnlineStore/OnlineRead', + feast_dot_serving_dot_Connector__pb2.OnlineReadRequest.SerializeToString, + feast_dot_serving_dot_Connector__pb2.OnlineReadResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/sdk/python/feast/protos/feast/serving/GrpcServer_pb2.py b/sdk/python/feast/protos/feast/serving/GrpcServer_pb2.py new file mode 100644 index 0000000000..8e40630cff --- /dev/null +++ b/sdk/python/feast/protos/feast/serving/GrpcServer_pb2.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/serving/GrpcServer.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from feast.protos.feast.serving import ServingService_pb2 as feast_dot_serving_dot_ServingService__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1e\x66\x65\x61st/serving/GrpcServer.proto\x1a\"feast/serving/ServingService.proto\"\xb3\x01\n\x0bPushRequest\x12,\n\x08\x66\x65\x61tures\x18\x01 \x03(\x0b\x32\x1a.PushRequest.FeaturesEntry\x12\x1b\n\x13stream_feature_view\x18\x02 \x01(\t\x12\x1c\n\x14\x61llow_registry_cache\x18\x03 \x01(\x08\x12\n\n\x02to\x18\x04 \x01(\t\x1a/\n\rFeaturesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x1e\n\x0cPushResponse\x12\x0e\n\x06status\x18\x01 \x01(\x08\"\xc1\x01\n\x19WriteToOnlineStoreRequest\x12:\n\x08\x66\x65\x61tures\x18\x01 \x03(\x0b\x32(.WriteToOnlineStoreRequest.FeaturesEntry\x12\x19\n\x11\x66\x65\x61ture_view_name\x18\x02 \x01(\t\x12\x1c\n\x14\x61llow_registry_cache\x18\x03 \x01(\x08\x1a/\n\rFeaturesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\",\n\x1aWriteToOnlineStoreResponse\x12\x0e\n\x06status\x18\x01 \x01(\x08\x32\xf1\x01\n\x11GrpcFeatureServer\x12%\n\x04Push\x12\x0c.PushRequest\x1a\r.PushResponse\"\x00\x12M\n\x12WriteToOnlineStore\x12\x1a.WriteToOnlineStoreRequest\x1a\x1b.WriteToOnlineStoreResponse\x12\x66\n\x11GetOnlineFeatures\x12\'.feast.serving.GetOnlineFeaturesRequest\x1a(.feast.serving.GetOnlineFeaturesResponseb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.serving.GrpcServer_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals['_PUSHREQUEST_FEATURESENTRY']._options = None + _globals['_PUSHREQUEST_FEATURESENTRY']._serialized_options = b'8\001' + _globals['_WRITETOONLINESTOREREQUEST_FEATURESENTRY']._options = None + _globals['_WRITETOONLINESTOREREQUEST_FEATURESENTRY']._serialized_options = b'8\001' + _globals['_PUSHREQUEST']._serialized_start=71 + _globals['_PUSHREQUEST']._serialized_end=250 + _globals['_PUSHREQUEST_FEATURESENTRY']._serialized_start=203 + _globals['_PUSHREQUEST_FEATURESENTRY']._serialized_end=250 + _globals['_PUSHRESPONSE']._serialized_start=252 + _globals['_PUSHRESPONSE']._serialized_end=282 + _globals['_WRITETOONLINESTOREREQUEST']._serialized_start=285 + _globals['_WRITETOONLINESTOREREQUEST']._serialized_end=478 + _globals['_WRITETOONLINESTOREREQUEST_FEATURESENTRY']._serialized_start=203 + _globals['_WRITETOONLINESTOREREQUEST_FEATURESENTRY']._serialized_end=250 + _globals['_WRITETOONLINESTORERESPONSE']._serialized_start=480 + _globals['_WRITETOONLINESTORERESPONSE']._serialized_end=524 + _globals['_GRPCFEATURESERVER']._serialized_start=527 + _globals['_GRPCFEATURESERVER']._serialized_end=768 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/serving/GrpcServer_pb2.pyi b/sdk/python/feast/protos/feast/serving/GrpcServer_pb2.pyi new file mode 100644 index 0000000000..54964f46e5 --- /dev/null +++ b/sdk/python/feast/protos/feast/serving/GrpcServer_pb2.pyi @@ -0,0 +1,120 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class PushRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class FeaturesEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + FEATURES_FIELD_NUMBER: builtins.int + STREAM_FEATURE_VIEW_FIELD_NUMBER: builtins.int + ALLOW_REGISTRY_CACHE_FIELD_NUMBER: builtins.int + TO_FIELD_NUMBER: builtins.int + @property + def features(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + stream_feature_view: builtins.str + allow_registry_cache: builtins.bool + to: builtins.str + def __init__( + self, + *, + features: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + stream_feature_view: builtins.str = ..., + allow_registry_cache: builtins.bool = ..., + to: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_registry_cache", b"allow_registry_cache", "features", b"features", "stream_feature_view", b"stream_feature_view", "to", b"to"]) -> None: ... + +global___PushRequest = PushRequest + +class PushResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + STATUS_FIELD_NUMBER: builtins.int + status: builtins.bool + def __init__( + self, + *, + status: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["status", b"status"]) -> None: ... + +global___PushResponse = PushResponse + +class WriteToOnlineStoreRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class FeaturesEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + FEATURES_FIELD_NUMBER: builtins.int + FEATURE_VIEW_NAME_FIELD_NUMBER: builtins.int + ALLOW_REGISTRY_CACHE_FIELD_NUMBER: builtins.int + @property + def features(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + feature_view_name: builtins.str + allow_registry_cache: builtins.bool + def __init__( + self, + *, + features: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + feature_view_name: builtins.str = ..., + allow_registry_cache: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_registry_cache", b"allow_registry_cache", "feature_view_name", b"feature_view_name", "features", b"features"]) -> None: ... + +global___WriteToOnlineStoreRequest = WriteToOnlineStoreRequest + +class WriteToOnlineStoreResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + STATUS_FIELD_NUMBER: builtins.int + status: builtins.bool + def __init__( + self, + *, + status: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["status", b"status"]) -> None: ... + +global___WriteToOnlineStoreResponse = WriteToOnlineStoreResponse diff --git a/sdk/python/feast/protos/feast/serving/GrpcServer_pb2_grpc.py b/sdk/python/feast/protos/feast/serving/GrpcServer_pb2_grpc.py new file mode 100644 index 0000000000..b381cc0f41 --- /dev/null +++ b/sdk/python/feast/protos/feast/serving/GrpcServer_pb2_grpc.py @@ -0,0 +1,133 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from feast.protos.feast.serving import GrpcServer_pb2 as feast_dot_serving_dot_GrpcServer__pb2 +from feast.protos.feast.serving import ServingService_pb2 as feast_dot_serving_dot_ServingService__pb2 + + +class GrpcFeatureServerStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.Push = channel.unary_unary( + '/GrpcFeatureServer/Push', + request_serializer=feast_dot_serving_dot_GrpcServer__pb2.PushRequest.SerializeToString, + response_deserializer=feast_dot_serving_dot_GrpcServer__pb2.PushResponse.FromString, + ) + self.WriteToOnlineStore = channel.unary_unary( + '/GrpcFeatureServer/WriteToOnlineStore', + request_serializer=feast_dot_serving_dot_GrpcServer__pb2.WriteToOnlineStoreRequest.SerializeToString, + response_deserializer=feast_dot_serving_dot_GrpcServer__pb2.WriteToOnlineStoreResponse.FromString, + ) + self.GetOnlineFeatures = channel.unary_unary( + '/GrpcFeatureServer/GetOnlineFeatures', + request_serializer=feast_dot_serving_dot_ServingService__pb2.GetOnlineFeaturesRequest.SerializeToString, + response_deserializer=feast_dot_serving_dot_ServingService__pb2.GetOnlineFeaturesResponse.FromString, + ) + + +class GrpcFeatureServerServicer(object): + """Missing associated documentation comment in .proto file.""" + + def Push(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def WriteToOnlineStore(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetOnlineFeatures(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_GrpcFeatureServerServicer_to_server(servicer, server): + rpc_method_handlers = { + 'Push': grpc.unary_unary_rpc_method_handler( + servicer.Push, + request_deserializer=feast_dot_serving_dot_GrpcServer__pb2.PushRequest.FromString, + response_serializer=feast_dot_serving_dot_GrpcServer__pb2.PushResponse.SerializeToString, + ), + 'WriteToOnlineStore': grpc.unary_unary_rpc_method_handler( + servicer.WriteToOnlineStore, + request_deserializer=feast_dot_serving_dot_GrpcServer__pb2.WriteToOnlineStoreRequest.FromString, + response_serializer=feast_dot_serving_dot_GrpcServer__pb2.WriteToOnlineStoreResponse.SerializeToString, + ), + 'GetOnlineFeatures': grpc.unary_unary_rpc_method_handler( + servicer.GetOnlineFeatures, + request_deserializer=feast_dot_serving_dot_ServingService__pb2.GetOnlineFeaturesRequest.FromString, + response_serializer=feast_dot_serving_dot_ServingService__pb2.GetOnlineFeaturesResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'GrpcFeatureServer', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class GrpcFeatureServer(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def Push(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/GrpcFeatureServer/Push', + feast_dot_serving_dot_GrpcServer__pb2.PushRequest.SerializeToString, + feast_dot_serving_dot_GrpcServer__pb2.PushResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def WriteToOnlineStore(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/GrpcFeatureServer/WriteToOnlineStore', + feast_dot_serving_dot_GrpcServer__pb2.WriteToOnlineStoreRequest.SerializeToString, + feast_dot_serving_dot_GrpcServer__pb2.WriteToOnlineStoreResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetOnlineFeatures(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/GrpcFeatureServer/GetOnlineFeatures', + feast_dot_serving_dot_ServingService__pb2.GetOnlineFeaturesRequest.SerializeToString, + feast_dot_serving_dot_ServingService__pb2.GetOnlineFeaturesResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/sdk/python/feast/protos/feast/serving/ServingService_pb2.py b/sdk/python/feast/protos/feast/serving/ServingService_pb2.py new file mode 100644 index 0000000000..fa86664057 --- /dev/null +++ b/sdk/python/feast/protos/feast/serving/ServingService_pb2.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/serving/ServingService.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from feast.protos.feast.types import Value_pb2 as feast_dot_types_dot_Value__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"feast/serving/ServingService.proto\x12\rfeast.serving\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17\x66\x65\x61st/types/Value.proto\"\x1c\n\x1aGetFeastServingInfoRequest\".\n\x1bGetFeastServingInfoResponse\x12\x0f\n\x07version\x18\x01 \x01(\t\"E\n\x12\x46\x65\x61tureReferenceV2\x12\x19\n\x11\x66\x65\x61ture_view_name\x18\x01 \x01(\t\x12\x14\n\x0c\x66\x65\x61ture_name\x18\x02 \x01(\t\"\xfd\x02\n\x1aGetOnlineFeaturesRequestV2\x12\x33\n\x08\x66\x65\x61tures\x18\x04 \x03(\x0b\x32!.feast.serving.FeatureReferenceV2\x12H\n\x0b\x65ntity_rows\x18\x02 \x03(\x0b\x32\x33.feast.serving.GetOnlineFeaturesRequestV2.EntityRow\x12\x0f\n\x07project\x18\x05 \x01(\t\x1a\xce\x01\n\tEntityRow\x12-\n\ttimestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12O\n\x06\x66ields\x18\x02 \x03(\x0b\x32?.feast.serving.GetOnlineFeaturesRequestV2.EntityRow.FieldsEntry\x1a\x41\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.feast.types.Value:\x02\x38\x01\"\x1a\n\x0b\x46\x65\x61tureList\x12\x0b\n\x03val\x18\x01 \x03(\t\"\xc8\x03\n\x18GetOnlineFeaturesRequest\x12\x19\n\x0f\x66\x65\x61ture_service\x18\x01 \x01(\tH\x00\x12.\n\x08\x66\x65\x61tures\x18\x02 \x01(\x0b\x32\x1a.feast.serving.FeatureListH\x00\x12G\n\x08\x65ntities\x18\x03 \x03(\x0b\x32\x35.feast.serving.GetOnlineFeaturesRequest.EntitiesEntry\x12\x1a\n\x12\x66ull_feature_names\x18\x04 \x01(\x08\x12T\n\x0frequest_context\x18\x05 \x03(\x0b\x32;.feast.serving.GetOnlineFeaturesRequest.RequestContextEntry\x1aK\n\rEntitiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12)\n\x05value\x18\x02 \x01(\x0b\x32\x1a.feast.types.RepeatedValue:\x02\x38\x01\x1aQ\n\x13RequestContextEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12)\n\x05value\x18\x02 \x01(\x0b\x32\x1a.feast.types.RepeatedValue:\x02\x38\x01\x42\x06\n\x04kind\"\xd2\x02\n\x19GetOnlineFeaturesResponse\x12\x42\n\x08metadata\x18\x01 \x01(\x0b\x32\x30.feast.serving.GetOnlineFeaturesResponseMetadata\x12G\n\x07results\x18\x02 \x03(\x0b\x32\x36.feast.serving.GetOnlineFeaturesResponse.FeatureVector\x12\x0e\n\x06status\x18\x03 \x01(\x08\x1a\x97\x01\n\rFeatureVector\x12\"\n\x06values\x18\x01 \x03(\x0b\x32\x12.feast.types.Value\x12,\n\x08statuses\x18\x02 \x03(\x0e\x32\x1a.feast.serving.FieldStatus\x12\x34\n\x10\x65vent_timestamps\x18\x03 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\"V\n!GetOnlineFeaturesResponseMetadata\x12\x31\n\rfeature_names\x18\x01 \x01(\x0b\x32\x1a.feast.serving.FeatureList*[\n\x0b\x46ieldStatus\x12\x0b\n\x07INVALID\x10\x00\x12\x0b\n\x07PRESENT\x10\x01\x12\x0e\n\nNULL_VALUE\x10\x02\x12\r\n\tNOT_FOUND\x10\x03\x12\x13\n\x0fOUTSIDE_MAX_AGE\x10\x04\x32\xe6\x01\n\x0eServingService\x12l\n\x13GetFeastServingInfo\x12).feast.serving.GetFeastServingInfoRequest\x1a*.feast.serving.GetFeastServingInfoResponse\x12\x66\n\x11GetOnlineFeatures\x12\'.feast.serving.GetOnlineFeaturesRequest\x1a(.feast.serving.GetOnlineFeaturesResponseBZ\n\x13\x66\x65\x61st.proto.servingB\x0fServingAPIProtoZ2github.com/feast-dev/feast/go/protos/feast/servingb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.serving.ServingService_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\023feast.proto.servingB\017ServingAPIProtoZ2github.com/feast-dev/feast/go/protos/feast/serving' + _globals['_GETONLINEFEATURESREQUESTV2_ENTITYROW_FIELDSENTRY']._options = None + _globals['_GETONLINEFEATURESREQUESTV2_ENTITYROW_FIELDSENTRY']._serialized_options = b'8\001' + _globals['_GETONLINEFEATURESREQUEST_ENTITIESENTRY']._options = None + _globals['_GETONLINEFEATURESREQUEST_ENTITIESENTRY']._serialized_options = b'8\001' + _globals['_GETONLINEFEATURESREQUEST_REQUESTCONTEXTENTRY']._options = None + _globals['_GETONLINEFEATURESREQUEST_REQUESTCONTEXTENTRY']._serialized_options = b'8\001' + _globals['_FIELDSTATUS']._serialized_start=1560 + _globals['_FIELDSTATUS']._serialized_end=1651 + _globals['_GETFEASTSERVINGINFOREQUEST']._serialized_start=111 + _globals['_GETFEASTSERVINGINFOREQUEST']._serialized_end=139 + _globals['_GETFEASTSERVINGINFORESPONSE']._serialized_start=141 + _globals['_GETFEASTSERVINGINFORESPONSE']._serialized_end=187 + _globals['_FEATUREREFERENCEV2']._serialized_start=189 + _globals['_FEATUREREFERENCEV2']._serialized_end=258 + _globals['_GETONLINEFEATURESREQUESTV2']._serialized_start=261 + _globals['_GETONLINEFEATURESREQUESTV2']._serialized_end=642 + _globals['_GETONLINEFEATURESREQUESTV2_ENTITYROW']._serialized_start=436 + _globals['_GETONLINEFEATURESREQUESTV2_ENTITYROW']._serialized_end=642 + _globals['_GETONLINEFEATURESREQUESTV2_ENTITYROW_FIELDSENTRY']._serialized_start=577 + _globals['_GETONLINEFEATURESREQUESTV2_ENTITYROW_FIELDSENTRY']._serialized_end=642 + _globals['_FEATURELIST']._serialized_start=644 + _globals['_FEATURELIST']._serialized_end=670 + _globals['_GETONLINEFEATURESREQUEST']._serialized_start=673 + _globals['_GETONLINEFEATURESREQUEST']._serialized_end=1129 + _globals['_GETONLINEFEATURESREQUEST_ENTITIESENTRY']._serialized_start=963 + _globals['_GETONLINEFEATURESREQUEST_ENTITIESENTRY']._serialized_end=1038 + _globals['_GETONLINEFEATURESREQUEST_REQUESTCONTEXTENTRY']._serialized_start=1040 + _globals['_GETONLINEFEATURESREQUEST_REQUESTCONTEXTENTRY']._serialized_end=1121 + _globals['_GETONLINEFEATURESRESPONSE']._serialized_start=1132 + _globals['_GETONLINEFEATURESRESPONSE']._serialized_end=1470 + _globals['_GETONLINEFEATURESRESPONSE_FEATUREVECTOR']._serialized_start=1319 + _globals['_GETONLINEFEATURESRESPONSE_FEATUREVECTOR']._serialized_end=1470 + _globals['_GETONLINEFEATURESRESPONSEMETADATA']._serialized_start=1472 + _globals['_GETONLINEFEATURESRESPONSEMETADATA']._serialized_end=1558 + _globals['_SERVINGSERVICE']._serialized_start=1654 + _globals['_SERVINGSERVICE']._serialized_end=1884 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/serving/ServingService_pb2.pyi b/sdk/python/feast/protos/feast/serving/ServingService_pb2.pyi new file mode 100644 index 0000000000..3c5e57ae45 --- /dev/null +++ b/sdk/python/feast/protos/feast/serving/ServingService_pb2.pyi @@ -0,0 +1,347 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2018 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import collections.abc +import feast.types.Value_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class _FieldStatus: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _FieldStatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_FieldStatus.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + INVALID: _FieldStatus.ValueType # 0 + """Status is unset for this field.""" + PRESENT: _FieldStatus.ValueType # 1 + """Field value is present for this field and age is within max age.""" + NULL_VALUE: _FieldStatus.ValueType # 2 + """Values could be found for entity key and age is within max age, but + this field value is not assigned a value on ingestion into feast. + """ + NOT_FOUND: _FieldStatus.ValueType # 3 + """Entity key did not return any values as they do not exist in Feast. + This could suggest that the feature values have not yet been ingested + into feast or the ingestion failed. + """ + OUTSIDE_MAX_AGE: _FieldStatus.ValueType # 4 + """Values could be found for entity key, but field values are outside the maximum + allowable range. + """ + +class FieldStatus(_FieldStatus, metaclass=_FieldStatusEnumTypeWrapper): ... + +INVALID: FieldStatus.ValueType # 0 +"""Status is unset for this field.""" +PRESENT: FieldStatus.ValueType # 1 +"""Field value is present for this field and age is within max age.""" +NULL_VALUE: FieldStatus.ValueType # 2 +"""Values could be found for entity key and age is within max age, but +this field value is not assigned a value on ingestion into feast. +""" +NOT_FOUND: FieldStatus.ValueType # 3 +"""Entity key did not return any values as they do not exist in Feast. +This could suggest that the feature values have not yet been ingested +into feast or the ingestion failed. +""" +OUTSIDE_MAX_AGE: FieldStatus.ValueType # 4 +"""Values could be found for entity key, but field values are outside the maximum +allowable range. +""" +global___FieldStatus = FieldStatus + +class GetFeastServingInfoRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +global___GetFeastServingInfoRequest = GetFeastServingInfoRequest + +class GetFeastServingInfoResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VERSION_FIELD_NUMBER: builtins.int + version: builtins.str + """Feast version of this serving deployment.""" + def __init__( + self, + *, + version: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["version", b"version"]) -> None: ... + +global___GetFeastServingInfoResponse = GetFeastServingInfoResponse + +class FeatureReferenceV2(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FEATURE_VIEW_NAME_FIELD_NUMBER: builtins.int + FEATURE_NAME_FIELD_NUMBER: builtins.int + feature_view_name: builtins.str + """Name of the Feature View to retrieve the feature from.""" + feature_name: builtins.str + """Name of the Feature to retrieve the feature from.""" + def __init__( + self, + *, + feature_view_name: builtins.str = ..., + feature_name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["feature_name", b"feature_name", "feature_view_name", b"feature_view_name"]) -> None: ... + +global___FeatureReferenceV2 = FeatureReferenceV2 + +class GetOnlineFeaturesRequestV2(google.protobuf.message.Message): + """ToDo (oleksii): remove this message (since it's not used) and move EntityRow on package level""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class EntityRow(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class FieldsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + @property + def value(self) -> feast.types.Value_pb2.Value: ... + def __init__( + self, + *, + key: builtins.str = ..., + value: feast.types.Value_pb2.Value | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + TIMESTAMP_FIELD_NUMBER: builtins.int + FIELDS_FIELD_NUMBER: builtins.int + @property + def timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Request timestamp of this row. This value will be used, + together with maxAge, to determine feature staleness. + """ + @property + def fields(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, feast.types.Value_pb2.Value]: + """Map containing mapping of entity name to entity value.""" + def __init__( + self, + *, + timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + fields: collections.abc.Mapping[builtins.str, feast.types.Value_pb2.Value] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["timestamp", b"timestamp"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["fields", b"fields", "timestamp", b"timestamp"]) -> None: ... + + FEATURES_FIELD_NUMBER: builtins.int + ENTITY_ROWS_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + @property + def features(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___FeatureReferenceV2]: + """List of features that are being retrieved""" + @property + def entity_rows(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GetOnlineFeaturesRequestV2.EntityRow]: + """List of entity rows, containing entity id and timestamp data. + Used during retrieval of feature rows and for joining feature + rows into a final dataset + """ + project: builtins.str + """Optional field to specify project name override. If specified, uses the + given project for retrieval. Overrides the projects specified in + Feature References if both are specified. + """ + def __init__( + self, + *, + features: collections.abc.Iterable[global___FeatureReferenceV2] | None = ..., + entity_rows: collections.abc.Iterable[global___GetOnlineFeaturesRequestV2.EntityRow] | None = ..., + project: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["entity_rows", b"entity_rows", "features", b"features", "project", b"project"]) -> None: ... + +global___GetOnlineFeaturesRequestV2 = GetOnlineFeaturesRequestV2 + +class FeatureList(google.protobuf.message.Message): + """In JSON "val" field can be omitted""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VAL_FIELD_NUMBER: builtins.int + @property + def val(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + def __init__( + self, + *, + val: collections.abc.Iterable[builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["val", b"val"]) -> None: ... + +global___FeatureList = FeatureList + +class GetOnlineFeaturesRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class EntitiesEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + @property + def value(self) -> feast.types.Value_pb2.RepeatedValue: ... + def __init__( + self, + *, + key: builtins.str = ..., + value: feast.types.Value_pb2.RepeatedValue | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + class RequestContextEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + @property + def value(self) -> feast.types.Value_pb2.RepeatedValue: ... + def __init__( + self, + *, + key: builtins.str = ..., + value: feast.types.Value_pb2.RepeatedValue | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + FEATURE_SERVICE_FIELD_NUMBER: builtins.int + FEATURES_FIELD_NUMBER: builtins.int + ENTITIES_FIELD_NUMBER: builtins.int + FULL_FEATURE_NAMES_FIELD_NUMBER: builtins.int + REQUEST_CONTEXT_FIELD_NUMBER: builtins.int + feature_service: builtins.str + @property + def features(self) -> global___FeatureList: ... + @property + def entities(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, feast.types.Value_pb2.RepeatedValue]: + """The entity data is specified in a columnar format + A map of entity name -> list of values + """ + full_feature_names: builtins.bool + @property + def request_context(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, feast.types.Value_pb2.RepeatedValue]: + """Context for OnDemand Feature Transformation + (was moved to dedicated parameter to avoid unnecessary separation logic on serving side) + A map of variable name -> list of values + """ + def __init__( + self, + *, + feature_service: builtins.str = ..., + features: global___FeatureList | None = ..., + entities: collections.abc.Mapping[builtins.str, feast.types.Value_pb2.RepeatedValue] | None = ..., + full_feature_names: builtins.bool = ..., + request_context: collections.abc.Mapping[builtins.str, feast.types.Value_pb2.RepeatedValue] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["feature_service", b"feature_service", "features", b"features", "kind", b"kind"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["entities", b"entities", "feature_service", b"feature_service", "features", b"features", "full_feature_names", b"full_feature_names", "kind", b"kind", "request_context", b"request_context"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["kind", b"kind"]) -> typing_extensions.Literal["feature_service", "features"] | None: ... + +global___GetOnlineFeaturesRequest = GetOnlineFeaturesRequest + +class GetOnlineFeaturesResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class FeatureVector(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VALUES_FIELD_NUMBER: builtins.int + STATUSES_FIELD_NUMBER: builtins.int + EVENT_TIMESTAMPS_FIELD_NUMBER: builtins.int + @property + def values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.types.Value_pb2.Value]: ... + @property + def statuses(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___FieldStatus.ValueType]: ... + @property + def event_timestamps(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[google.protobuf.timestamp_pb2.Timestamp]: ... + def __init__( + self, + *, + values: collections.abc.Iterable[feast.types.Value_pb2.Value] | None = ..., + statuses: collections.abc.Iterable[global___FieldStatus.ValueType] | None = ..., + event_timestamps: collections.abc.Iterable[google.protobuf.timestamp_pb2.Timestamp] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["event_timestamps", b"event_timestamps", "statuses", b"statuses", "values", b"values"]) -> None: ... + + METADATA_FIELD_NUMBER: builtins.int + RESULTS_FIELD_NUMBER: builtins.int + STATUS_FIELD_NUMBER: builtins.int + @property + def metadata(self) -> global___GetOnlineFeaturesResponseMetadata: ... + @property + def results(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GetOnlineFeaturesResponse.FeatureVector]: + """Length of "results" array should match length of requested features. + We also preserve the same order of features here as in metadata.feature_names + """ + status: builtins.bool + def __init__( + self, + *, + metadata: global___GetOnlineFeaturesResponseMetadata | None = ..., + results: collections.abc.Iterable[global___GetOnlineFeaturesResponse.FeatureVector] | None = ..., + status: builtins.bool = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["metadata", b"metadata"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["metadata", b"metadata", "results", b"results", "status", b"status"]) -> None: ... + +global___GetOnlineFeaturesResponse = GetOnlineFeaturesResponse + +class GetOnlineFeaturesResponseMetadata(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FEATURE_NAMES_FIELD_NUMBER: builtins.int + @property + def feature_names(self) -> global___FeatureList: ... + def __init__( + self, + *, + feature_names: global___FeatureList | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["feature_names", b"feature_names"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["feature_names", b"feature_names"]) -> None: ... + +global___GetOnlineFeaturesResponseMetadata = GetOnlineFeaturesResponseMetadata diff --git a/sdk/python/feast/protos/feast/serving/ServingService_pb2_grpc.py b/sdk/python/feast/protos/feast/serving/ServingService_pb2_grpc.py new file mode 100644 index 0000000000..d3cd055f66 --- /dev/null +++ b/sdk/python/feast/protos/feast/serving/ServingService_pb2_grpc.py @@ -0,0 +1,101 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from feast.protos.feast.serving import ServingService_pb2 as feast_dot_serving_dot_ServingService__pb2 + + +class ServingServiceStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.GetFeastServingInfo = channel.unary_unary( + '/feast.serving.ServingService/GetFeastServingInfo', + request_serializer=feast_dot_serving_dot_ServingService__pb2.GetFeastServingInfoRequest.SerializeToString, + response_deserializer=feast_dot_serving_dot_ServingService__pb2.GetFeastServingInfoResponse.FromString, + ) + self.GetOnlineFeatures = channel.unary_unary( + '/feast.serving.ServingService/GetOnlineFeatures', + request_serializer=feast_dot_serving_dot_ServingService__pb2.GetOnlineFeaturesRequest.SerializeToString, + response_deserializer=feast_dot_serving_dot_ServingService__pb2.GetOnlineFeaturesResponse.FromString, + ) + + +class ServingServiceServicer(object): + """Missing associated documentation comment in .proto file.""" + + def GetFeastServingInfo(self, request, context): + """Get information about this Feast serving. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetOnlineFeatures(self, request, context): + """Get online features synchronously. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_ServingServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'GetFeastServingInfo': grpc.unary_unary_rpc_method_handler( + servicer.GetFeastServingInfo, + request_deserializer=feast_dot_serving_dot_ServingService__pb2.GetFeastServingInfoRequest.FromString, + response_serializer=feast_dot_serving_dot_ServingService__pb2.GetFeastServingInfoResponse.SerializeToString, + ), + 'GetOnlineFeatures': grpc.unary_unary_rpc_method_handler( + servicer.GetOnlineFeatures, + request_deserializer=feast_dot_serving_dot_ServingService__pb2.GetOnlineFeaturesRequest.FromString, + response_serializer=feast_dot_serving_dot_ServingService__pb2.GetOnlineFeaturesResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'feast.serving.ServingService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class ServingService(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def GetFeastServingInfo(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.serving.ServingService/GetFeastServingInfo', + feast_dot_serving_dot_ServingService__pb2.GetFeastServingInfoRequest.SerializeToString, + feast_dot_serving_dot_ServingService__pb2.GetFeastServingInfoResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetOnlineFeatures(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.serving.ServingService/GetOnlineFeatures', + feast_dot_serving_dot_ServingService__pb2.GetOnlineFeaturesRequest.SerializeToString, + feast_dot_serving_dot_ServingService__pb2.GetOnlineFeaturesResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/sdk/python/feast/protos/feast/serving/TransformationService_pb2.py b/sdk/python/feast/protos/feast/serving/TransformationService_pb2.py new file mode 100644 index 0000000000..bc060e9a77 --- /dev/null +++ b/sdk/python/feast/protos/feast/serving/TransformationService_pb2.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/serving/TransformationService.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n)feast/serving/TransformationService.proto\x12\rfeast.serving\"+\n\tValueType\x12\x15\n\x0b\x61rrow_value\x18\x01 \x01(\x0cH\x00\x42\x07\n\x05value\"%\n#GetTransformationServiceInfoRequest\"\x9c\x01\n$GetTransformationServiceInfoResponse\x12\x0f\n\x07version\x18\x01 \x01(\t\x12\x36\n\x04type\x18\x02 \x01(\x0e\x32(.feast.serving.TransformationServiceType\x12+\n#transformation_service_type_details\x18\x03 \x01(\t\"\x88\x01\n\x18TransformFeaturesRequest\x12#\n\x1bon_demand_feature_view_name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x36\n\x14transformation_input\x18\x03 \x01(\x0b\x32\x18.feast.serving.ValueType\"T\n\x19TransformFeaturesResponse\x12\x37\n\x15transformation_output\x18\x03 \x01(\x0b\x32\x18.feast.serving.ValueType*\x94\x01\n\x19TransformationServiceType\x12\'\n#TRANSFORMATION_SERVICE_TYPE_INVALID\x10\x00\x12&\n\"TRANSFORMATION_SERVICE_TYPE_PYTHON\x10\x01\x12&\n\"TRANSFORMATION_SERVICE_TYPE_CUSTOM\x10\x64\x32\x89\x02\n\x15TransformationService\x12\x87\x01\n\x1cGetTransformationServiceInfo\x12\x32.feast.serving.GetTransformationServiceInfoRequest\x1a\x33.feast.serving.GetTransformationServiceInfoResponse\x12\x66\n\x11TransformFeatures\x12\'.feast.serving.TransformFeaturesRequest\x1a(.feast.serving.TransformFeaturesResponseBh\n\x13\x66\x65\x61st.proto.servingB\x1dTransformationServiceAPIProtoZ2github.com/feast-dev/feast/go/protos/feast/servingb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.serving.TransformationService_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\023feast.proto.servingB\035TransformationServiceAPIProtoZ2github.com/feast-dev/feast/go/protos/feast/serving' + _globals['_TRANSFORMATIONSERVICETYPE']._serialized_start=529 + _globals['_TRANSFORMATIONSERVICETYPE']._serialized_end=677 + _globals['_VALUETYPE']._serialized_start=60 + _globals['_VALUETYPE']._serialized_end=103 + _globals['_GETTRANSFORMATIONSERVICEINFOREQUEST']._serialized_start=105 + _globals['_GETTRANSFORMATIONSERVICEINFOREQUEST']._serialized_end=142 + _globals['_GETTRANSFORMATIONSERVICEINFORESPONSE']._serialized_start=145 + _globals['_GETTRANSFORMATIONSERVICEINFORESPONSE']._serialized_end=301 + _globals['_TRANSFORMFEATURESREQUEST']._serialized_start=304 + _globals['_TRANSFORMFEATURESREQUEST']._serialized_end=440 + _globals['_TRANSFORMFEATURESRESPONSE']._serialized_start=442 + _globals['_TRANSFORMFEATURESRESPONSE']._serialized_end=526 + _globals['_TRANSFORMATIONSERVICE']._serialized_start=680 + _globals['_TRANSFORMATIONSERVICE']._serialized_end=945 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/serving/TransformationService_pb2.pyi b/sdk/python/feast/protos/feast/serving/TransformationService_pb2.pyi new file mode 100644 index 0000000000..3e0752b7bd --- /dev/null +++ b/sdk/python/feast/protos/feast/serving/TransformationService_pb2.pyi @@ -0,0 +1,136 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2021 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import google.protobuf.descriptor +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class _TransformationServiceType: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _TransformationServiceTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_TransformationServiceType.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + TRANSFORMATION_SERVICE_TYPE_INVALID: _TransformationServiceType.ValueType # 0 + TRANSFORMATION_SERVICE_TYPE_PYTHON: _TransformationServiceType.ValueType # 1 + TRANSFORMATION_SERVICE_TYPE_CUSTOM: _TransformationServiceType.ValueType # 100 + +class TransformationServiceType(_TransformationServiceType, metaclass=_TransformationServiceTypeEnumTypeWrapper): ... + +TRANSFORMATION_SERVICE_TYPE_INVALID: TransformationServiceType.ValueType # 0 +TRANSFORMATION_SERVICE_TYPE_PYTHON: TransformationServiceType.ValueType # 1 +TRANSFORMATION_SERVICE_TYPE_CUSTOM: TransformationServiceType.ValueType # 100 +global___TransformationServiceType = TransformationServiceType + +class ValueType(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ARROW_VALUE_FIELD_NUMBER: builtins.int + arrow_value: builtins.bytes + """Having a oneOf provides forward compatibility if we need to support compound types + that are not supported by arrow natively. + """ + def __init__( + self, + *, + arrow_value: builtins.bytes = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["arrow_value", b"arrow_value", "value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["arrow_value", b"arrow_value", "value", b"value"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["value", b"value"]) -> typing_extensions.Literal["arrow_value"] | None: ... + +global___ValueType = ValueType + +class GetTransformationServiceInfoRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +global___GetTransformationServiceInfoRequest = GetTransformationServiceInfoRequest + +class GetTransformationServiceInfoResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VERSION_FIELD_NUMBER: builtins.int + TYPE_FIELD_NUMBER: builtins.int + TRANSFORMATION_SERVICE_TYPE_DETAILS_FIELD_NUMBER: builtins.int + version: builtins.str + """Feast version of this transformation service deployment.""" + type: global___TransformationServiceType.ValueType + """Type of transformation service deployment. This is either Python, or custom""" + transformation_service_type_details: builtins.str + def __init__( + self, + *, + version: builtins.str = ..., + type: global___TransformationServiceType.ValueType = ..., + transformation_service_type_details: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["transformation_service_type_details", b"transformation_service_type_details", "type", b"type", "version", b"version"]) -> None: ... + +global___GetTransformationServiceInfoResponse = GetTransformationServiceInfoResponse + +class TransformFeaturesRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ON_DEMAND_FEATURE_VIEW_NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + TRANSFORMATION_INPUT_FIELD_NUMBER: builtins.int + on_demand_feature_view_name: builtins.str + project: builtins.str + @property + def transformation_input(self) -> global___ValueType: ... + def __init__( + self, + *, + on_demand_feature_view_name: builtins.str = ..., + project: builtins.str = ..., + transformation_input: global___ValueType | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["transformation_input", b"transformation_input"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["on_demand_feature_view_name", b"on_demand_feature_view_name", "project", b"project", "transformation_input", b"transformation_input"]) -> None: ... + +global___TransformFeaturesRequest = TransformFeaturesRequest + +class TransformFeaturesResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TRANSFORMATION_OUTPUT_FIELD_NUMBER: builtins.int + @property + def transformation_output(self) -> global___ValueType: ... + def __init__( + self, + *, + transformation_output: global___ValueType | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["transformation_output", b"transformation_output"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["transformation_output", b"transformation_output"]) -> None: ... + +global___TransformFeaturesResponse = TransformFeaturesResponse diff --git a/sdk/python/feast/protos/feast/serving/TransformationService_pb2_grpc.py b/sdk/python/feast/protos/feast/serving/TransformationService_pb2_grpc.py new file mode 100644 index 0000000000..30099e39ca --- /dev/null +++ b/sdk/python/feast/protos/feast/serving/TransformationService_pb2_grpc.py @@ -0,0 +1,99 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from feast.protos.feast.serving import TransformationService_pb2 as feast_dot_serving_dot_TransformationService__pb2 + + +class TransformationServiceStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.GetTransformationServiceInfo = channel.unary_unary( + '/feast.serving.TransformationService/GetTransformationServiceInfo', + request_serializer=feast_dot_serving_dot_TransformationService__pb2.GetTransformationServiceInfoRequest.SerializeToString, + response_deserializer=feast_dot_serving_dot_TransformationService__pb2.GetTransformationServiceInfoResponse.FromString, + ) + self.TransformFeatures = channel.unary_unary( + '/feast.serving.TransformationService/TransformFeatures', + request_serializer=feast_dot_serving_dot_TransformationService__pb2.TransformFeaturesRequest.SerializeToString, + response_deserializer=feast_dot_serving_dot_TransformationService__pb2.TransformFeaturesResponse.FromString, + ) + + +class TransformationServiceServicer(object): + """Missing associated documentation comment in .proto file.""" + + def GetTransformationServiceInfo(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def TransformFeatures(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_TransformationServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'GetTransformationServiceInfo': grpc.unary_unary_rpc_method_handler( + servicer.GetTransformationServiceInfo, + request_deserializer=feast_dot_serving_dot_TransformationService__pb2.GetTransformationServiceInfoRequest.FromString, + response_serializer=feast_dot_serving_dot_TransformationService__pb2.GetTransformationServiceInfoResponse.SerializeToString, + ), + 'TransformFeatures': grpc.unary_unary_rpc_method_handler( + servicer.TransformFeatures, + request_deserializer=feast_dot_serving_dot_TransformationService__pb2.TransformFeaturesRequest.FromString, + response_serializer=feast_dot_serving_dot_TransformationService__pb2.TransformFeaturesResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'feast.serving.TransformationService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class TransformationService(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def GetTransformationServiceInfo(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.serving.TransformationService/GetTransformationServiceInfo', + feast_dot_serving_dot_TransformationService__pb2.GetTransformationServiceInfoRequest.SerializeToString, + feast_dot_serving_dot_TransformationService__pb2.GetTransformationServiceInfoResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def TransformFeatures(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.serving.TransformationService/TransformFeatures', + feast_dot_serving_dot_TransformationService__pb2.TransformFeaturesRequest.SerializeToString, + feast_dot_serving_dot_TransformationService__pb2.TransformFeaturesResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/sdk/python/feast/protos/feast/serving/__init__.py b/sdk/python/feast/protos/feast/serving/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/protos/feast/storage/Redis_pb2.py b/sdk/python/feast/protos/feast/storage/Redis_pb2.py new file mode 100644 index 0000000000..37d59c9df5 --- /dev/null +++ b/sdk/python/feast/protos/feast/storage/Redis_pb2.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/storage/Redis.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from feast.protos.feast.types import Value_pb2 as feast_dot_types_dot_Value__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19\x66\x65\x61st/storage/Redis.proto\x12\rfeast.storage\x1a\x17\x66\x65\x61st/types/Value.proto\"^\n\nRedisKeyV2\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x14\n\x0c\x65ntity_names\x18\x02 \x03(\t\x12)\n\rentity_values\x18\x03 \x03(\x0b\x32\x12.feast.types.ValueBU\n\x13\x66\x65\x61st.proto.storageB\nRedisProtoZ2github.com/feast-dev/feast/go/protos/feast/storageb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.storage.Redis_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\023feast.proto.storageB\nRedisProtoZ2github.com/feast-dev/feast/go/protos/feast/storage' + _globals['_REDISKEYV2']._serialized_start=69 + _globals['_REDISKEYV2']._serialized_end=163 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/storage/Redis_pb2.pyi b/sdk/python/feast/protos/feast/storage/Redis_pb2.pyi new file mode 100644 index 0000000000..74cc2b07f0 --- /dev/null +++ b/sdk/python/feast/protos/feast/storage/Redis_pb2.pyi @@ -0,0 +1,54 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2019 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import collections.abc +import feast.types.Value_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class RedisKeyV2(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PROJECT_FIELD_NUMBER: builtins.int + ENTITY_NAMES_FIELD_NUMBER: builtins.int + ENTITY_VALUES_FIELD_NUMBER: builtins.int + project: builtins.str + @property + def entity_names(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + @property + def entity_values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.types.Value_pb2.Value]: ... + def __init__( + self, + *, + project: builtins.str = ..., + entity_names: collections.abc.Iterable[builtins.str] | None = ..., + entity_values: collections.abc.Iterable[feast.types.Value_pb2.Value] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["entity_names", b"entity_names", "entity_values", b"entity_values", "project", b"project"]) -> None: ... + +global___RedisKeyV2 = RedisKeyV2 diff --git a/sdk/python/feast/protos/feast/storage/Redis_pb2_grpc.py b/sdk/python/feast/protos/feast/storage/Redis_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/storage/Redis_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/storage/__init__.py b/sdk/python/feast/protos/feast/storage/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/protos/feast/types/EntityKey_pb2.py b/sdk/python/feast/protos/feast/types/EntityKey_pb2.py new file mode 100644 index 0000000000..a6e1abf730 --- /dev/null +++ b/sdk/python/feast/protos/feast/types/EntityKey_pb2.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/types/EntityKey.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from feast.protos.feast.types import Value_pb2 as feast_dot_types_dot_Value__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1b\x66\x65\x61st/types/EntityKey.proto\x12\x0b\x66\x65\x61st.types\x1a\x17\x66\x65\x61st/types/Value.proto\"I\n\tEntityKey\x12\x11\n\tjoin_keys\x18\x01 \x03(\t\x12)\n\rentity_values\x18\x02 \x03(\x0b\x32\x12.feast.types.ValueBU\n\x11\x66\x65\x61st.proto.typesB\x0e\x45ntityKeyProtoZ0github.com/feast-dev/feast/go/protos/feast/typesb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.types.EntityKey_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\021feast.proto.typesB\016EntityKeyProtoZ0github.com/feast-dev/feast/go/protos/feast/types' + _globals['_ENTITYKEY']._serialized_start=69 + _globals['_ENTITYKEY']._serialized_end=142 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/types/EntityKey_pb2.pyi b/sdk/python/feast/protos/feast/types/EntityKey_pb2.pyi new file mode 100644 index 0000000000..fe65e0c1b3 --- /dev/null +++ b/sdk/python/feast/protos/feast/types/EntityKey_pb2.pyi @@ -0,0 +1,51 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2018 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import collections.abc +import feast.types.Value_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class EntityKey(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + JOIN_KEYS_FIELD_NUMBER: builtins.int + ENTITY_VALUES_FIELD_NUMBER: builtins.int + @property + def join_keys(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + @property + def entity_values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.types.Value_pb2.Value]: ... + def __init__( + self, + *, + join_keys: collections.abc.Iterable[builtins.str] | None = ..., + entity_values: collections.abc.Iterable[feast.types.Value_pb2.Value] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["entity_values", b"entity_values", "join_keys", b"join_keys"]) -> None: ... + +global___EntityKey = EntityKey diff --git a/sdk/python/feast/protos/feast/types/EntityKey_pb2_grpc.py b/sdk/python/feast/protos/feast/types/EntityKey_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/types/EntityKey_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/types/Field_pb2.py b/sdk/python/feast/protos/feast/types/Field_pb2.py new file mode 100644 index 0000000000..973fdc6cde --- /dev/null +++ b/sdk/python/feast/protos/feast/types/Field_pb2.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/types/Field.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from feast.protos.feast.types import Value_pb2 as feast_dot_types_dot_Value__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x17\x66\x65\x61st/types/Field.proto\x12\x0b\x66\x65\x61st.types\x1a\x17\x66\x65\x61st/types/Value.proto\"\xaf\x01\n\x05\x46ield\x12\x0c\n\x04name\x18\x01 \x01(\t\x12*\n\x05value\x18\x02 \x01(\x0e\x32\x1b.feast.types.ValueType.Enum\x12*\n\x04tags\x18\x03 \x03(\x0b\x32\x1c.feast.types.Field.TagsEntry\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42Q\n\x11\x66\x65\x61st.proto.typesB\nFieldProtoZ0github.com/feast-dev/feast/go/protos/feast/typesb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.types.Field_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\021feast.proto.typesB\nFieldProtoZ0github.com/feast-dev/feast/go/protos/feast/types' + _globals['_FIELD_TAGSENTRY']._options = None + _globals['_FIELD_TAGSENTRY']._serialized_options = b'8\001' + _globals['_FIELD']._serialized_start=66 + _globals['_FIELD']._serialized_end=241 + _globals['_FIELD_TAGSENTRY']._serialized_start=198 + _globals['_FIELD_TAGSENTRY']._serialized_end=241 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/types/Field_pb2.pyi b/sdk/python/feast/protos/feast/types/Field_pb2.pyi new file mode 100644 index 0000000000..28a2194237 --- /dev/null +++ b/sdk/python/feast/protos/feast/types/Field_pb2.pyi @@ -0,0 +1,73 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2018 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import collections.abc +import feast.types.Value_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class Field(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + name: builtins.str + value: feast.types.Value_pb2.ValueType.Enum.ValueType + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """Tags for user defined metadata on a field""" + description: builtins.str + """Description of the field.""" + def __init__( + self, + *, + name: builtins.str = ..., + value: feast.types.Value_pb2.ValueType.Enum.ValueType = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + description: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["description", b"description", "name", b"name", "tags", b"tags", "value", b"value"]) -> None: ... + +global___Field = Field diff --git a/sdk/python/feast/protos/feast/types/Field_pb2_grpc.py b/sdk/python/feast/protos/feast/types/Field_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/types/Field_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/types/Value_pb2.py b/sdk/python/feast/protos/feast/types/Value_pb2.py new file mode 100644 index 0000000000..18ee331180 --- /dev/null +++ b/sdk/python/feast/protos/feast/types/Value_pb2.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/types/Value.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x17\x66\x65\x61st/types/Value.proto\x12\x0b\x66\x65\x61st.types\"\x97\x02\n\tValueType\"\x89\x02\n\x04\x45num\x12\x0b\n\x07INVALID\x10\x00\x12\t\n\x05\x42YTES\x10\x01\x12\n\n\x06STRING\x10\x02\x12\t\n\x05INT32\x10\x03\x12\t\n\x05INT64\x10\x04\x12\n\n\x06\x44OUBLE\x10\x05\x12\t\n\x05\x46LOAT\x10\x06\x12\x08\n\x04\x42OOL\x10\x07\x12\x12\n\x0eUNIX_TIMESTAMP\x10\x08\x12\x0e\n\nBYTES_LIST\x10\x0b\x12\x0f\n\x0bSTRING_LIST\x10\x0c\x12\x0e\n\nINT32_LIST\x10\r\x12\x0e\n\nINT64_LIST\x10\x0e\x12\x0f\n\x0b\x44OUBLE_LIST\x10\x0f\x12\x0e\n\nFLOAT_LIST\x10\x10\x12\r\n\tBOOL_LIST\x10\x11\x12\x17\n\x13UNIX_TIMESTAMP_LIST\x10\x12\x12\x08\n\x04NULL\x10\x13\"\x82\x05\n\x05Value\x12\x13\n\tbytes_val\x18\x01 \x01(\x0cH\x00\x12\x14\n\nstring_val\x18\x02 \x01(\tH\x00\x12\x13\n\tint32_val\x18\x03 \x01(\x05H\x00\x12\x13\n\tint64_val\x18\x04 \x01(\x03H\x00\x12\x14\n\ndouble_val\x18\x05 \x01(\x01H\x00\x12\x13\n\tfloat_val\x18\x06 \x01(\x02H\x00\x12\x12\n\x08\x62ool_val\x18\x07 \x01(\x08H\x00\x12\x1c\n\x12unix_timestamp_val\x18\x08 \x01(\x03H\x00\x12\x30\n\x0e\x62ytes_list_val\x18\x0b \x01(\x0b\x32\x16.feast.types.BytesListH\x00\x12\x32\n\x0fstring_list_val\x18\x0c \x01(\x0b\x32\x17.feast.types.StringListH\x00\x12\x30\n\x0eint32_list_val\x18\r \x01(\x0b\x32\x16.feast.types.Int32ListH\x00\x12\x30\n\x0eint64_list_val\x18\x0e \x01(\x0b\x32\x16.feast.types.Int64ListH\x00\x12\x32\n\x0f\x64ouble_list_val\x18\x0f \x01(\x0b\x32\x17.feast.types.DoubleListH\x00\x12\x30\n\x0e\x66loat_list_val\x18\x10 \x01(\x0b\x32\x16.feast.types.FloatListH\x00\x12.\n\rbool_list_val\x18\x11 \x01(\x0b\x32\x15.feast.types.BoolListH\x00\x12\x39\n\x17unix_timestamp_list_val\x18\x12 \x01(\x0b\x32\x16.feast.types.Int64ListH\x00\x12%\n\x08null_val\x18\x13 \x01(\x0e\x32\x11.feast.types.NullH\x00\x42\x05\n\x03val\"\x18\n\tBytesList\x12\x0b\n\x03val\x18\x01 \x03(\x0c\"\x19\n\nStringList\x12\x0b\n\x03val\x18\x01 \x03(\t\"\x18\n\tInt32List\x12\x0b\n\x03val\x18\x01 \x03(\x05\"\x18\n\tInt64List\x12\x0b\n\x03val\x18\x01 \x03(\x03\"\x19\n\nDoubleList\x12\x0b\n\x03val\x18\x01 \x03(\x01\"\x18\n\tFloatList\x12\x0b\n\x03val\x18\x01 \x03(\x02\"\x17\n\x08\x42oolList\x12\x0b\n\x03val\x18\x01 \x03(\x08\"0\n\rRepeatedValue\x12\x1f\n\x03val\x18\x01 \x03(\x0b\x32\x12.feast.types.Value*\x10\n\x04Null\x12\x08\n\x04NULL\x10\x00\x42Q\n\x11\x66\x65\x61st.proto.typesB\nValueProtoZ0github.com/feast-dev/feast/go/protos/feast/typesb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.types.Value_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\021feast.proto.typesB\nValueProtoZ0github.com/feast-dev/feast/go/protos/feast/types' + _globals['_NULL']._serialized_start=1200 + _globals['_NULL']._serialized_end=1216 + _globals['_VALUETYPE']._serialized_start=41 + _globals['_VALUETYPE']._serialized_end=320 + _globals['_VALUETYPE_ENUM']._serialized_start=55 + _globals['_VALUETYPE_ENUM']._serialized_end=320 + _globals['_VALUE']._serialized_start=323 + _globals['_VALUE']._serialized_end=965 + _globals['_BYTESLIST']._serialized_start=967 + _globals['_BYTESLIST']._serialized_end=991 + _globals['_STRINGLIST']._serialized_start=993 + _globals['_STRINGLIST']._serialized_end=1018 + _globals['_INT32LIST']._serialized_start=1020 + _globals['_INT32LIST']._serialized_end=1044 + _globals['_INT64LIST']._serialized_start=1046 + _globals['_INT64LIST']._serialized_end=1070 + _globals['_DOUBLELIST']._serialized_start=1072 + _globals['_DOUBLELIST']._serialized_end=1097 + _globals['_FLOATLIST']._serialized_start=1099 + _globals['_FLOATLIST']._serialized_end=1123 + _globals['_BOOLLIST']._serialized_start=1125 + _globals['_BOOLLIST']._serialized_end=1148 + _globals['_REPEATEDVALUE']._serialized_start=1150 + _globals['_REPEATEDVALUE']._serialized_end=1198 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/types/Value_pb2.pyi b/sdk/python/feast/protos/feast/types/Value_pb2.pyi new file mode 100644 index 0000000000..15e4870e6a --- /dev/null +++ b/sdk/python/feast/protos/feast/types/Value_pb2.pyi @@ -0,0 +1,296 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2018 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class _Null: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _NullEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_Null.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + NULL: _Null.ValueType # 0 + +class Null(_Null, metaclass=_NullEnumTypeWrapper): ... + +NULL: Null.ValueType # 0 +global___Null = Null + +class ValueType(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _Enum: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _EnumEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ValueType._Enum.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + INVALID: ValueType._Enum.ValueType # 0 + BYTES: ValueType._Enum.ValueType # 1 + STRING: ValueType._Enum.ValueType # 2 + INT32: ValueType._Enum.ValueType # 3 + INT64: ValueType._Enum.ValueType # 4 + DOUBLE: ValueType._Enum.ValueType # 5 + FLOAT: ValueType._Enum.ValueType # 6 + BOOL: ValueType._Enum.ValueType # 7 + UNIX_TIMESTAMP: ValueType._Enum.ValueType # 8 + BYTES_LIST: ValueType._Enum.ValueType # 11 + STRING_LIST: ValueType._Enum.ValueType # 12 + INT32_LIST: ValueType._Enum.ValueType # 13 + INT64_LIST: ValueType._Enum.ValueType # 14 + DOUBLE_LIST: ValueType._Enum.ValueType # 15 + FLOAT_LIST: ValueType._Enum.ValueType # 16 + BOOL_LIST: ValueType._Enum.ValueType # 17 + UNIX_TIMESTAMP_LIST: ValueType._Enum.ValueType # 18 + NULL: ValueType._Enum.ValueType # 19 + + class Enum(_Enum, metaclass=_EnumEnumTypeWrapper): ... + INVALID: ValueType.Enum.ValueType # 0 + BYTES: ValueType.Enum.ValueType # 1 + STRING: ValueType.Enum.ValueType # 2 + INT32: ValueType.Enum.ValueType # 3 + INT64: ValueType.Enum.ValueType # 4 + DOUBLE: ValueType.Enum.ValueType # 5 + FLOAT: ValueType.Enum.ValueType # 6 + BOOL: ValueType.Enum.ValueType # 7 + UNIX_TIMESTAMP: ValueType.Enum.ValueType # 8 + BYTES_LIST: ValueType.Enum.ValueType # 11 + STRING_LIST: ValueType.Enum.ValueType # 12 + INT32_LIST: ValueType.Enum.ValueType # 13 + INT64_LIST: ValueType.Enum.ValueType # 14 + DOUBLE_LIST: ValueType.Enum.ValueType # 15 + FLOAT_LIST: ValueType.Enum.ValueType # 16 + BOOL_LIST: ValueType.Enum.ValueType # 17 + UNIX_TIMESTAMP_LIST: ValueType.Enum.ValueType # 18 + NULL: ValueType.Enum.ValueType # 19 + + def __init__( + self, + ) -> None: ... + +global___ValueType = ValueType + +class Value(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + BYTES_VAL_FIELD_NUMBER: builtins.int + STRING_VAL_FIELD_NUMBER: builtins.int + INT32_VAL_FIELD_NUMBER: builtins.int + INT64_VAL_FIELD_NUMBER: builtins.int + DOUBLE_VAL_FIELD_NUMBER: builtins.int + FLOAT_VAL_FIELD_NUMBER: builtins.int + BOOL_VAL_FIELD_NUMBER: builtins.int + UNIX_TIMESTAMP_VAL_FIELD_NUMBER: builtins.int + BYTES_LIST_VAL_FIELD_NUMBER: builtins.int + STRING_LIST_VAL_FIELD_NUMBER: builtins.int + INT32_LIST_VAL_FIELD_NUMBER: builtins.int + INT64_LIST_VAL_FIELD_NUMBER: builtins.int + DOUBLE_LIST_VAL_FIELD_NUMBER: builtins.int + FLOAT_LIST_VAL_FIELD_NUMBER: builtins.int + BOOL_LIST_VAL_FIELD_NUMBER: builtins.int + UNIX_TIMESTAMP_LIST_VAL_FIELD_NUMBER: builtins.int + NULL_VAL_FIELD_NUMBER: builtins.int + bytes_val: builtins.bytes + string_val: builtins.str + int32_val: builtins.int + int64_val: builtins.int + double_val: builtins.float + float_val: builtins.float + bool_val: builtins.bool + unix_timestamp_val: builtins.int + @property + def bytes_list_val(self) -> global___BytesList: ... + @property + def string_list_val(self) -> global___StringList: ... + @property + def int32_list_val(self) -> global___Int32List: ... + @property + def int64_list_val(self) -> global___Int64List: ... + @property + def double_list_val(self) -> global___DoubleList: ... + @property + def float_list_val(self) -> global___FloatList: ... + @property + def bool_list_val(self) -> global___BoolList: ... + @property + def unix_timestamp_list_val(self) -> global___Int64List: ... + null_val: global___Null.ValueType + def __init__( + self, + *, + bytes_val: builtins.bytes = ..., + string_val: builtins.str = ..., + int32_val: builtins.int = ..., + int64_val: builtins.int = ..., + double_val: builtins.float = ..., + float_val: builtins.float = ..., + bool_val: builtins.bool = ..., + unix_timestamp_val: builtins.int = ..., + bytes_list_val: global___BytesList | None = ..., + string_list_val: global___StringList | None = ..., + int32_list_val: global___Int32List | None = ..., + int64_list_val: global___Int64List | None = ..., + double_list_val: global___DoubleList | None = ..., + float_list_val: global___FloatList | None = ..., + bool_list_val: global___BoolList | None = ..., + unix_timestamp_list_val: global___Int64List | None = ..., + null_val: global___Null.ValueType = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["bool_list_val", b"bool_list_val", "bool_val", b"bool_val", "bytes_list_val", b"bytes_list_val", "bytes_val", b"bytes_val", "double_list_val", b"double_list_val", "double_val", b"double_val", "float_list_val", b"float_list_val", "float_val", b"float_val", "int32_list_val", b"int32_list_val", "int32_val", b"int32_val", "int64_list_val", b"int64_list_val", "int64_val", b"int64_val", "null_val", b"null_val", "string_list_val", b"string_list_val", "string_val", b"string_val", "unix_timestamp_list_val", b"unix_timestamp_list_val", "unix_timestamp_val", b"unix_timestamp_val", "val", b"val"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["bool_list_val", b"bool_list_val", "bool_val", b"bool_val", "bytes_list_val", b"bytes_list_val", "bytes_val", b"bytes_val", "double_list_val", b"double_list_val", "double_val", b"double_val", "float_list_val", b"float_list_val", "float_val", b"float_val", "int32_list_val", b"int32_list_val", "int32_val", b"int32_val", "int64_list_val", b"int64_list_val", "int64_val", b"int64_val", "null_val", b"null_val", "string_list_val", b"string_list_val", "string_val", b"string_val", "unix_timestamp_list_val", b"unix_timestamp_list_val", "unix_timestamp_val", b"unix_timestamp_val", "val", b"val"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["val", b"val"]) -> typing_extensions.Literal["bytes_val", "string_val", "int32_val", "int64_val", "double_val", "float_val", "bool_val", "unix_timestamp_val", "bytes_list_val", "string_list_val", "int32_list_val", "int64_list_val", "double_list_val", "float_list_val", "bool_list_val", "unix_timestamp_list_val", "null_val"] | None: ... + +global___Value = Value + +class BytesList(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VAL_FIELD_NUMBER: builtins.int + @property + def val(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bytes]: ... + def __init__( + self, + *, + val: collections.abc.Iterable[builtins.bytes] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["val", b"val"]) -> None: ... + +global___BytesList = BytesList + +class StringList(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VAL_FIELD_NUMBER: builtins.int + @property + def val(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + def __init__( + self, + *, + val: collections.abc.Iterable[builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["val", b"val"]) -> None: ... + +global___StringList = StringList + +class Int32List(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VAL_FIELD_NUMBER: builtins.int + @property + def val(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + def __init__( + self, + *, + val: collections.abc.Iterable[builtins.int] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["val", b"val"]) -> None: ... + +global___Int32List = Int32List + +class Int64List(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VAL_FIELD_NUMBER: builtins.int + @property + def val(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + def __init__( + self, + *, + val: collections.abc.Iterable[builtins.int] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["val", b"val"]) -> None: ... + +global___Int64List = Int64List + +class DoubleList(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VAL_FIELD_NUMBER: builtins.int + @property + def val(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]: ... + def __init__( + self, + *, + val: collections.abc.Iterable[builtins.float] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["val", b"val"]) -> None: ... + +global___DoubleList = DoubleList + +class FloatList(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VAL_FIELD_NUMBER: builtins.int + @property + def val(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]: ... + def __init__( + self, + *, + val: collections.abc.Iterable[builtins.float] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["val", b"val"]) -> None: ... + +global___FloatList = FloatList + +class BoolList(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VAL_FIELD_NUMBER: builtins.int + @property + def val(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: ... + def __init__( + self, + *, + val: collections.abc.Iterable[builtins.bool] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["val", b"val"]) -> None: ... + +global___BoolList = BoolList + +class RepeatedValue(google.protobuf.message.Message): + """This is to avoid an issue of being unable to specify `repeated value` in oneofs or maps + In JSON "val" field can be omitted + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VAL_FIELD_NUMBER: builtins.int + @property + def val(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Value]: ... + def __init__( + self, + *, + val: collections.abc.Iterable[global___Value] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["val", b"val"]) -> None: ... + +global___RepeatedValue = RepeatedValue diff --git a/sdk/python/feast/protos/feast/types/Value_pb2_grpc.py b/sdk/python/feast/protos/feast/types/Value_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/types/Value_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/types/__init__.py b/sdk/python/feast/protos/feast/types/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 59e799ebab..0a46400007 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -1,7 +1,6 @@ # This file was autogenerated by uv via the following command: # uv pip compile -p 3.10 --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.10-ci-requirements.txt -aiobotocore==2.15.0 - # via feast (setup.py) +aiobotocore==2.15.1 aiohappyeyeballs==2.4.0 # via aiohttp aiohttp==3.10.5 @@ -22,8 +21,6 @@ anyio==4.5.0 # jupyter-server # starlette # watchfiles -appnope==0.1.4 - # via ipykernel argon2-cffi==23.1.0 # via jupyter-server argon2-cffi-bindings==21.2.0 @@ -33,7 +30,6 @@ arrow==1.3.0 asn1crypto==1.5.1 # via snowflake-connector-python assertpy==1.1 - # via feast (setup.py) asttokens==2.4.1 # via stack-data async-lru==2.0.4 @@ -55,10 +51,8 @@ azure-core==1.31.0 # via # azure-identity # azure-storage-blob -azure-identity==1.17.1 - # via feast (setup.py) +azure-identity==1.18.0 azure-storage-blob==12.23.0 - # via feast (setup.py) babel==2.16.0 # via # jupyterlab-server @@ -66,14 +60,11 @@ babel==2.16.0 beautifulsoup4==4.12.3 # via nbconvert bigtree==0.21.1 - # via feast (setup.py) bleach==6.1.0 # via nbconvert -boto3==1.35.16 - # via - # feast (setup.py) - # moto -botocore==1.35.16 +boto3==1.35.23 + # via moto +botocore==1.35.23 # via # aiobotocore # boto3 @@ -81,13 +72,11 @@ botocore==1.35.16 # s3transfer build==1.2.2 # via - # feast (setup.py) # pip-tools # singlestoredb cachetools==5.5.0 # via google-auth cassandra-driver==3.29.2 - # via feast (setup.py) certifi==2024.8.30 # via # elastic-transport @@ -110,7 +99,6 @@ charset-normalizer==3.3.2 # snowflake-connector-python click==8.1.7 # via - # feast (setup.py) # dask # geomet # great-expectations @@ -119,9 +107,7 @@ click==8.1.7 cloudpickle==3.0.0 # via dask colorama==0.4.6 - # via - # feast (setup.py) - # great-expectations + # via great-expectations comm==0.2.2 # via # ipykernel @@ -130,7 +116,6 @@ coverage[toml]==7.6.1 # via pytest-cov cryptography==42.0.8 # via - # feast (setup.py) # azure-identity # azure-storage-blob # great-expectations @@ -145,9 +130,7 @@ cryptography==42.0.8 cython==3.0.11 # via thriftpy2 dask[dataframe]==2024.9.0 - # via - # feast (setup.py) - # dask-expr + # via dask-expr dask-expr==1.1.14 # via dask db-dtypes==1.3.0 @@ -159,11 +142,9 @@ decorator==5.1.1 defusedxml==0.7.1 # via nbconvert deltalake==0.20.0 - # via feast (setup.py) deprecation==2.1.0 # via python-keycloak dill==0.3.8 - # via feast (setup.py) distlib==0.3.8 # via virtualenv docker==7.1.0 @@ -175,7 +156,6 @@ duckdb==1.1.0 elastic-transport==8.15.0 # via elasticsearch elasticsearch==8.15.1 - # via feast (setup.py) entrypoints==0.4 # via altair exceptiongroup==1.2.2 @@ -188,7 +168,6 @@ execnet==2.1.1 executing==2.1.0 # via stack-data fastapi==0.115.0 - # via feast (setup.py) fastjsonschema==2.20.0 # via nbformat filelock==3.16.1 @@ -202,21 +181,18 @@ frozenlist==1.4.1 # aiohttp # aiosignal fsspec==2024.9.0 - # via - # feast (setup.py) - # dask + # via dask geomet==0.2.1.post1 # via cassandra-driver -google-api-core[grpc]==2.19.2 +google-api-core[grpc]==2.20.0 # via - # feast (setup.py) # google-cloud-bigquery # google-cloud-bigquery-storage # google-cloud-bigtable # google-cloud-core # google-cloud-datastore # google-cloud-storage -google-auth==2.34.0 +google-auth==2.35.0 # via # google-api-core # google-cloud-bigquery @@ -227,11 +203,8 @@ google-auth==2.34.0 # google-cloud-storage # kubernetes google-cloud-bigquery[pandas]==3.25.0 - # via feast (setup.py) google-cloud-bigquery-storage==2.26.0 - # via feast (setup.py) google-cloud-bigtable==2.26.0 - # via feast (setup.py) google-cloud-core==2.4.1 # via # google-cloud-bigquery @@ -239,9 +212,7 @@ google-cloud-core==2.4.1 # google-cloud-datastore # google-cloud-storage google-cloud-datastore==2.20.1 - # via feast (setup.py) google-cloud-storage==2.18.2 - # via feast (setup.py) google-crc32c==1.6.0 # via # google-cloud-storage @@ -252,17 +223,16 @@ google-resumable-media==2.7.2 # google-cloud-storage googleapis-common-protos[grpc]==1.65.0 # via - # feast (setup.py) # google-api-core # grpc-google-iam-v1 # grpcio-status great-expectations==0.18.21 - # via feast (setup.py) +greenlet==3.1.0 + # via sqlalchemy grpc-google-iam-v1==0.13.1 # via google-cloud-bigtable grpcio==1.66.1 # via - # feast (setup.py) # google-api-core # googleapis-common-protos # grpc-google-iam-v1 @@ -272,42 +242,30 @@ grpcio==1.66.1 # grpcio-testing # grpcio-tools grpcio-health-checking==1.62.3 - # via feast (setup.py) grpcio-reflection==1.62.3 - # via feast (setup.py) grpcio-status==1.62.3 # via google-api-core grpcio-testing==1.62.3 - # via feast (setup.py) grpcio-tools==1.62.3 - # via feast (setup.py) gunicorn==23.0.0 - # via feast (setup.py) h11==0.14.0 # via # httpcore # uvicorn happybase==1.2.0 - # via feast (setup.py) hazelcast-python-client==5.5.0 - # via feast (setup.py) hiredis==2.4.0 - # via feast (setup.py) httpcore==1.0.5 # via httpx httptools==0.6.1 # via uvicorn httpx==0.27.2 # via - # feast (setup.py) # jupyterlab # python-keycloak ibis-framework[duckdb]==9.5.0 - # via - # feast (setup.py) - # ibis-substrait + # via ibis-substrait ibis-substrait==4.0.1 - # via feast (setup.py) identify==2.6.1 # via pre-commit idna==3.10 @@ -343,7 +301,6 @@ jedi==0.19.1 # via ipython jinja2==3.1.4 # via - # feast (setup.py) # altair # great-expectations # jupyter-server @@ -366,7 +323,6 @@ jsonpointer==3.0.0 # jsonschema jsonschema[format-nongpl]==4.23.0 # via - # feast (setup.py) # altair # great-expectations # jupyter-events @@ -414,7 +370,6 @@ jupyterlab-widgets==3.0.13 jwcrypto==1.5.6 # via python-keycloak kubernetes==20.13.0 - # via feast (setup.py) locket==1.0.0 # via partd makefun==1.15.4 @@ -435,17 +390,13 @@ matplotlib-inline==0.1.7 mdurl==0.1.2 # via markdown-it-py minio==7.1.0 - # via feast (setup.py) mistune==3.0.2 # via # great-expectations # nbconvert mmh3==5.0.0 - # via feast (setup.py) mock==2.0.0 - # via feast (setup.py) moto==4.2.14 - # via feast (setup.py) msal==1.31.0 # via # azure-identity @@ -457,13 +408,10 @@ multidict==6.1.0 # aiohttp # yarl mypy==1.11.2 - # via - # feast (setup.py) - # sqlalchemy + # via sqlalchemy mypy-extensions==1.0.0 # via mypy mypy-protobuf==3.3.0 - # via feast (setup.py) nbclient==0.10.0 # via nbconvert nbconvert==7.16.4 @@ -486,7 +434,6 @@ notebook-shim==0.2.4 # notebook numpy==1.26.4 # via - # feast (setup.py) # altair # dask # db-dtypes @@ -521,7 +468,6 @@ packaging==24.1 # sphinx pandas==2.2.2 # via - # feast (setup.py) # altair # dask # dask-expr @@ -547,7 +493,6 @@ pexpect==4.9.0 pip==24.2 # via pip-tools pip-tools==7.4.1 - # via feast (setup.py) platformdirs==3.11.0 # via # jupyter-core @@ -560,11 +505,8 @@ ply==3.11 portalocker==2.10.1 # via msal-extensions pre-commit==3.3.1 - # via feast (setup.py) prometheus-client==0.20.0 - # via - # feast (setup.py) - # jupyter-server + # via jupyter-server prompt-toolkit==3.0.47 # via ipython proto-plus==1.24.0 @@ -575,7 +517,6 @@ proto-plus==1.24.0 # google-cloud-datastore protobuf==4.25.5 # via - # feast (setup.py) # google-api-core # google-cloud-bigquery-storage # google-cloud-bigtable @@ -591,11 +532,8 @@ protobuf==4.25.5 # proto-plus # substrait psutil==5.9.0 - # via - # feast (setup.py) - # ipykernel + # via ipykernel psycopg[binary, pool]==3.2.2 - # via feast (setup.py) psycopg-binary==3.2.2 # via psycopg psycopg-pool==3.2.3 @@ -607,14 +545,12 @@ ptyprocess==0.7.0 pure-eval==0.2.3 # via stack-data py==1.11.0 - # via feast (setup.py) py-cpuinfo==9.0.0 # via pytest-benchmark py4j==0.10.9.7 # via pyspark pyarrow==17.0.0 # via - # feast (setup.py) # dask-expr # db-dtypes # deltalake @@ -630,35 +566,28 @@ pyasn1==0.6.1 pyasn1-modules==0.4.1 # via google-auth pybindgen==0.22.1 - # via feast (setup.py) pycparser==2.22 # via cffi pydantic==2.9.2 # via - # feast (setup.py) # fastapi # great-expectations pydantic-core==2.23.4 # via pydantic pygments==2.18.0 # via - # feast (setup.py) # ipython # nbconvert # rich # sphinx pyjwt[crypto]==2.9.0 # via - # feast (setup.py) # msal # singlestoredb # snowflake-connector-python pymssql==2.3.1 - # via feast (setup.py) pymysql==1.1.1 - # via feast (setup.py) pyodbc==5.1.0 - # via feast (setup.py) pyopenssl==24.2.1 # via snowflake-connector-python pyparsing==3.1.4 @@ -668,10 +597,8 @@ pyproject-hooks==1.1.0 # build # pip-tools pyspark==3.5.2 - # via feast (setup.py) pytest==7.4.4 # via - # feast (setup.py) # pytest-benchmark # pytest-cov # pytest-env @@ -681,21 +608,13 @@ pytest==7.4.4 # pytest-timeout # pytest-xdist pytest-benchmark==3.4.1 - # via feast (setup.py) pytest-cov==5.0.0 - # via feast (setup.py) pytest-env==1.1.3 - # via feast (setup.py) pytest-lazy-fixture==0.6.3 - # via feast (setup.py) pytest-mock==1.10.4 - # via feast (setup.py) pytest-ordering==0.6 - # via feast (setup.py) pytest-timeout==1.4.2 - # via feast (setup.py) pytest-xdist==3.6.1 - # via feast (setup.py) python-dateutil==2.9.0.post0 # via # arrow @@ -713,7 +632,6 @@ python-dotenv==1.0.1 python-json-logger==2.0.7 # via jupyter-events python-keycloak==4.2.2 - # via feast (setup.py) pytz==2024.2 # via # great-expectations @@ -723,7 +641,6 @@ pytz==2024.2 # trino pyyaml==6.0.2 # via - # feast (setup.py) # dask # ibis-substrait # jupyter-events @@ -737,19 +654,15 @@ pyzmq==26.2.0 # jupyter-client # jupyter-server redis==4.6.0 - # via feast (setup.py) referencing==0.35.1 # via # jsonschema # jsonschema-specifications # jupyter-events regex==2024.9.11 - # via - # feast (setup.py) - # parsimonious + # via parsimonious requests==2.32.3 # via - # feast (setup.py) # azure-core # docker # google-api-core @@ -794,8 +707,7 @@ ruamel-yaml==0.17.40 # via great-expectations ruamel-yaml-clib==0.2.8 # via ruamel-yaml -ruff==0.6.5 - # via feast (setup.py) +ruff==0.6.6 s3transfer==0.10.2 # via boto3 scipy==1.14.1 @@ -810,7 +722,6 @@ setuptools==75.1.0 # pip-tools # singlestoredb singlestoredb==1.6.3 - # via feast (setup.py) six==1.16.0 # via # asttokens @@ -831,13 +742,11 @@ sniffio==1.3.1 snowballstemmer==2.2.0 # via sphinx snowflake-connector-python[pandas]==3.12.2 - # via feast (setup.py) sortedcontainers==2.4.0 # via snowflake-connector-python soupsieve==2.6 # via beautifulsoup4 sphinx==6.2.1 - # via feast (setup.py) sphinxcontrib-applehelp==2.0.0 # via sphinx sphinxcontrib-devhelp==2.0.0 @@ -851,11 +760,9 @@ sphinxcontrib-qthelp==2.0.0 sphinxcontrib-serializinghtml==2.0.0 # via sphinx sqlalchemy[mypy]==2.0.35 - # via feast (setup.py) sqlglot==25.20.1 # via ibis-framework sqlite-vec==0.1.1 - # via feast (setup.py) sqlparams==6.1.0 # via singlestoredb stack-data==0.6.3 @@ -865,21 +772,17 @@ starlette==0.38.5 substrait==0.23.0 # via ibis-substrait tabulate==0.9.0 - # via feast (setup.py) tenacity==8.5.0 - # via feast (setup.py) terminado==0.18.1 # via # jupyter-server # jupyter-server-terminals testcontainers==4.4.0 - # via feast (setup.py) thriftpy2==0.5.2 # via happybase tinycss2==1.3.0 # via nbconvert toml==0.10.2 - # via feast (setup.py) tomli==2.0.1 # via # build @@ -907,9 +810,7 @@ tornado==6.4.1 # notebook # terminado tqdm==4.66.5 - # via - # feast (setup.py) - # great-expectations + # via great-expectations traitlets==5.14.3 # via # comm @@ -926,37 +827,23 @@ traitlets==5.14.3 # nbconvert # nbformat trino==0.329.0 - # via feast (setup.py) typeguard==4.3.0 - # via feast (setup.py) types-cffi==1.16.0.20240331 # via types-pyopenssl types-protobuf==3.19.22 - # via - # feast (setup.py) - # mypy-protobuf + # via mypy-protobuf types-pymysql==1.1.0.20240524 - # via feast (setup.py) types-pyopenssl==24.1.0.20240722 # via types-redis types-python-dateutil==2.9.0.20240906 - # via - # feast (setup.py) - # arrow + # via arrow types-pytz==2024.2.0.20240913 - # via feast (setup.py) types-pyyaml==6.0.12.20240917 - # via feast (setup.py) types-redis==4.6.0.20240903 - # via feast (setup.py) types-requests==2.30.0.0 - # via feast (setup.py) types-setuptools==75.1.0.20240917 - # via - # feast (setup.py) - # types-cffi + # via types-cffi types-tabulate==0.9.0.20240106 - # via feast (setup.py) types-urllib3==1.26.25.14 # via types-requests typing-extensions==4.12.2 @@ -992,7 +879,6 @@ uri-template==1.3.0 # via jsonschema urllib3==2.2.3 # via - # feast (setup.py) # botocore # docker # elastic-transport @@ -1003,13 +889,10 @@ urllib3==2.2.3 # responses # testcontainers uvicorn[standard]==0.30.6 - # via feast (setup.py) uvloop==0.20.0 # via uvicorn virtualenv==20.23.0 - # via - # feast (setup.py) - # pre-commit + # via pre-commit watchfiles==0.24.0 # via uvicorn wcwidth==0.2.13 diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index 26eeca3529..9e5eb0be72 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -11,36 +11,30 @@ attrs==24.2.0 # jsonschema # referencing bigtree==0.21.1 - # via feast (setup.py) certifi==2024.8.30 # via requests charset-normalizer==3.3.2 # via requests click==8.1.7 # via - # feast (setup.py) # dask # uvicorn cloudpickle==3.0.0 # via dask colorama==0.4.6 - # via feast (setup.py) dask[dataframe]==2024.9.0 - # via - # feast (setup.py) - # dask-expr + # via dask-expr dask-expr==1.1.14 # via dask dill==0.3.8 - # via feast (setup.py) exceptiongroup==1.2.2 # via anyio fastapi==0.115.0 - # via feast (setup.py) fsspec==2024.9.0 # via dask +greenlet==3.1.0 + # via sqlalchemy gunicorn==23.0.0 - # via feast (setup.py) h11==0.14.0 # via uvicorn httptools==0.6.1 @@ -52,9 +46,7 @@ idna==3.10 importlib-metadata==8.5.0 # via dask jinja2==3.1.4 - # via feast (setup.py) jsonschema==4.23.0 - # via feast (setup.py) jsonschema-specifications==2023.12.1 # via jsonschema locket==1.0.0 @@ -62,14 +54,12 @@ locket==1.0.0 markupsafe==2.1.5 # via jinja2 mmh3==5.0.0 - # via feast (setup.py) mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy numpy==1.26.4 # via - # feast (setup.py) # dask # pandas # pyarrow @@ -79,31 +69,21 @@ packaging==24.1 # gunicorn pandas==2.2.2 # via - # feast (setup.py) # dask # dask-expr partd==1.4.2 # via dask prometheus-client==0.20.0 - # via feast (setup.py) protobuf==4.25.5 - # via feast (setup.py) psutil==6.0.0 - # via feast (setup.py) pyarrow==17.0.0 - # via - # feast (setup.py) - # dask-expr + # via dask-expr pydantic==2.9.2 - # via - # feast (setup.py) - # fastapi + # via fastapi pydantic-core==2.23.4 # via pydantic pygments==2.18.0 - # via feast (setup.py) pyjwt==2.9.0 - # via feast (setup.py) python-dateutil==2.9.0.post0 # via pandas python-dotenv==1.0.1 @@ -112,7 +92,6 @@ pytz==2024.2 # via pandas pyyaml==6.0.2 # via - # feast (setup.py) # dask # uvicorn referencing==0.35.1 @@ -120,7 +99,6 @@ referencing==0.35.1 # jsonschema # jsonschema-specifications requests==2.32.3 - # via feast (setup.py) rpds-py==0.20.0 # via # jsonschema @@ -130,15 +108,11 @@ six==1.16.0 sniffio==1.3.1 # via anyio sqlalchemy[mypy]==2.0.35 - # via feast (setup.py) starlette==0.38.5 # via fastapi tabulate==0.9.0 - # via feast (setup.py) tenacity==8.5.0 - # via feast (setup.py) toml==0.10.2 - # via feast (setup.py) tomli==2.0.1 # via mypy toolz==0.12.1 @@ -146,9 +120,7 @@ toolz==0.12.1 # dask # partd tqdm==4.66.5 - # via feast (setup.py) typeguard==4.3.0 - # via feast (setup.py) typing-extensions==4.12.2 # via # anyio @@ -164,7 +136,6 @@ tzdata==2024.1 urllib3==2.2.3 # via requests uvicorn[standard]==0.30.6 - # via feast (setup.py) uvloop==0.20.0 # via uvicorn watchfiles==0.24.0 diff --git a/sdk/python/requirements/py3.11-ci-requirements.txt b/sdk/python/requirements/py3.11-ci-requirements.txt index 9f57ecd841..26ced829c6 100644 --- a/sdk/python/requirements/py3.11-ci-requirements.txt +++ b/sdk/python/requirements/py3.11-ci-requirements.txt @@ -1,7 +1,6 @@ # This file was autogenerated by uv via the following command: # uv pip compile -p 3.11 --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.11-ci-requirements.txt -aiobotocore==2.15.0 - # via feast (setup.py) +aiobotocore==2.15.1 aiohappyeyeballs==2.4.0 # via aiohttp aiohttp==3.10.5 @@ -22,8 +21,6 @@ anyio==4.5.0 # jupyter-server # starlette # watchfiles -appnope==0.1.4 - # via ipykernel argon2-cffi==23.1.0 # via jupyter-server argon2-cffi-bindings==21.2.0 @@ -33,7 +30,6 @@ arrow==1.3.0 asn1crypto==1.5.1 # via snowflake-connector-python assertpy==1.1 - # via feast (setup.py) asttokens==2.4.1 # via stack-data async-lru==2.0.4 @@ -53,10 +49,8 @@ azure-core==1.31.0 # via # azure-identity # azure-storage-blob -azure-identity==1.17.1 - # via feast (setup.py) +azure-identity==1.18.0 azure-storage-blob==12.23.0 - # via feast (setup.py) babel==2.16.0 # via # jupyterlab-server @@ -64,14 +58,11 @@ babel==2.16.0 beautifulsoup4==4.12.3 # via nbconvert bigtree==0.21.1 - # via feast (setup.py) bleach==6.1.0 # via nbconvert -boto3==1.35.16 - # via - # feast (setup.py) - # moto -botocore==1.35.16 +boto3==1.35.23 + # via moto +botocore==1.35.23 # via # aiobotocore # boto3 @@ -79,13 +70,11 @@ botocore==1.35.16 # s3transfer build==1.2.2 # via - # feast (setup.py) # pip-tools # singlestoredb cachetools==5.5.0 # via google-auth cassandra-driver==3.29.2 - # via feast (setup.py) certifi==2024.8.30 # via # elastic-transport @@ -108,7 +97,6 @@ charset-normalizer==3.3.2 # snowflake-connector-python click==8.1.7 # via - # feast (setup.py) # dask # geomet # great-expectations @@ -117,9 +105,7 @@ click==8.1.7 cloudpickle==3.0.0 # via dask colorama==0.4.6 - # via - # feast (setup.py) - # great-expectations + # via great-expectations comm==0.2.2 # via # ipykernel @@ -128,7 +114,6 @@ coverage[toml]==7.6.1 # via pytest-cov cryptography==42.0.8 # via - # feast (setup.py) # azure-identity # azure-storage-blob # great-expectations @@ -143,9 +128,7 @@ cryptography==42.0.8 cython==3.0.11 # via thriftpy2 dask[dataframe]==2024.9.0 - # via - # feast (setup.py) - # dask-expr + # via dask-expr dask-expr==1.1.14 # via dask db-dtypes==1.3.0 @@ -157,11 +140,9 @@ decorator==5.1.1 defusedxml==0.7.1 # via nbconvert deltalake==0.20.0 - # via feast (setup.py) deprecation==2.1.0 # via python-keycloak dill==0.3.8 - # via feast (setup.py) distlib==0.3.8 # via virtualenv docker==7.1.0 @@ -173,7 +154,6 @@ duckdb==1.1.0 elastic-transport==8.15.0 # via elasticsearch elasticsearch==8.15.1 - # via feast (setup.py) entrypoints==0.4 # via altair execnet==2.1.1 @@ -181,7 +161,6 @@ execnet==2.1.1 executing==2.1.0 # via stack-data fastapi==0.115.0 - # via feast (setup.py) fastjsonschema==2.20.0 # via nbformat filelock==3.16.1 @@ -195,21 +174,18 @@ frozenlist==1.4.1 # aiohttp # aiosignal fsspec==2024.9.0 - # via - # feast (setup.py) - # dask + # via dask geomet==0.2.1.post1 # via cassandra-driver -google-api-core[grpc]==2.19.2 +google-api-core[grpc]==2.20.0 # via - # feast (setup.py) # google-cloud-bigquery # google-cloud-bigquery-storage # google-cloud-bigtable # google-cloud-core # google-cloud-datastore # google-cloud-storage -google-auth==2.34.0 +google-auth==2.35.0 # via # google-api-core # google-cloud-bigquery @@ -220,11 +196,8 @@ google-auth==2.34.0 # google-cloud-storage # kubernetes google-cloud-bigquery[pandas]==3.25.0 - # via feast (setup.py) google-cloud-bigquery-storage==2.26.0 - # via feast (setup.py) google-cloud-bigtable==2.26.0 - # via feast (setup.py) google-cloud-core==2.4.1 # via # google-cloud-bigquery @@ -232,9 +205,7 @@ google-cloud-core==2.4.1 # google-cloud-datastore # google-cloud-storage google-cloud-datastore==2.20.1 - # via feast (setup.py) google-cloud-storage==2.18.2 - # via feast (setup.py) google-crc32c==1.6.0 # via # google-cloud-storage @@ -245,17 +216,16 @@ google-resumable-media==2.7.2 # google-cloud-storage googleapis-common-protos[grpc]==1.65.0 # via - # feast (setup.py) # google-api-core # grpc-google-iam-v1 # grpcio-status great-expectations==0.18.21 - # via feast (setup.py) +greenlet==3.1.0 + # via sqlalchemy grpc-google-iam-v1==0.13.1 # via google-cloud-bigtable grpcio==1.66.1 # via - # feast (setup.py) # google-api-core # googleapis-common-protos # grpc-google-iam-v1 @@ -265,42 +235,30 @@ grpcio==1.66.1 # grpcio-testing # grpcio-tools grpcio-health-checking==1.62.3 - # via feast (setup.py) grpcio-reflection==1.62.3 - # via feast (setup.py) grpcio-status==1.62.3 # via google-api-core grpcio-testing==1.62.3 - # via feast (setup.py) grpcio-tools==1.62.3 - # via feast (setup.py) gunicorn==23.0.0 - # via feast (setup.py) h11==0.14.0 # via # httpcore # uvicorn happybase==1.2.0 - # via feast (setup.py) hazelcast-python-client==5.5.0 - # via feast (setup.py) hiredis==2.4.0 - # via feast (setup.py) httpcore==1.0.5 # via httpx httptools==0.6.1 # via uvicorn httpx==0.27.2 # via - # feast (setup.py) # jupyterlab # python-keycloak ibis-framework[duckdb]==9.5.0 - # via - # feast (setup.py) - # ibis-substrait + # via ibis-substrait ibis-substrait==4.0.1 - # via feast (setup.py) identify==2.6.1 # via pre-commit idna==3.10 @@ -334,7 +292,6 @@ jedi==0.19.1 # via ipython jinja2==3.1.4 # via - # feast (setup.py) # altair # great-expectations # jupyter-server @@ -357,7 +314,6 @@ jsonpointer==3.0.0 # jsonschema jsonschema[format-nongpl]==4.23.0 # via - # feast (setup.py) # altair # great-expectations # jupyter-events @@ -405,7 +361,6 @@ jupyterlab-widgets==3.0.13 jwcrypto==1.5.6 # via python-keycloak kubernetes==20.13.0 - # via feast (setup.py) locket==1.0.0 # via partd makefun==1.15.4 @@ -426,17 +381,13 @@ matplotlib-inline==0.1.7 mdurl==0.1.2 # via markdown-it-py minio==7.1.0 - # via feast (setup.py) mistune==3.0.2 # via # great-expectations # nbconvert mmh3==5.0.0 - # via feast (setup.py) mock==2.0.0 - # via feast (setup.py) moto==4.2.14 - # via feast (setup.py) msal==1.31.0 # via # azure-identity @@ -448,13 +399,10 @@ multidict==6.1.0 # aiohttp # yarl mypy==1.11.2 - # via - # feast (setup.py) - # sqlalchemy + # via sqlalchemy mypy-extensions==1.0.0 # via mypy mypy-protobuf==3.3.0 - # via feast (setup.py) nbclient==0.10.0 # via nbconvert nbconvert==7.16.4 @@ -477,7 +425,6 @@ notebook-shim==0.2.4 # notebook numpy==1.26.4 # via - # feast (setup.py) # altair # dask # db-dtypes @@ -512,7 +459,6 @@ packaging==24.1 # sphinx pandas==2.2.2 # via - # feast (setup.py) # altair # dask # dask-expr @@ -538,7 +484,6 @@ pexpect==4.9.0 pip==24.2 # via pip-tools pip-tools==7.4.1 - # via feast (setup.py) platformdirs==3.11.0 # via # jupyter-core @@ -551,11 +496,8 @@ ply==3.11 portalocker==2.10.1 # via msal-extensions pre-commit==3.3.1 - # via feast (setup.py) prometheus-client==0.20.0 - # via - # feast (setup.py) - # jupyter-server + # via jupyter-server prompt-toolkit==3.0.47 # via ipython proto-plus==1.24.0 @@ -566,7 +508,6 @@ proto-plus==1.24.0 # google-cloud-datastore protobuf==4.25.5 # via - # feast (setup.py) # google-api-core # google-cloud-bigquery-storage # google-cloud-bigtable @@ -582,11 +523,8 @@ protobuf==4.25.5 # proto-plus # substrait psutil==5.9.0 - # via - # feast (setup.py) - # ipykernel + # via ipykernel psycopg[binary, pool]==3.2.2 - # via feast (setup.py) psycopg-binary==3.2.2 # via psycopg psycopg-pool==3.2.3 @@ -598,14 +536,12 @@ ptyprocess==0.7.0 pure-eval==0.2.3 # via stack-data py==1.11.0 - # via feast (setup.py) py-cpuinfo==9.0.0 # via pytest-benchmark py4j==0.10.9.7 # via pyspark pyarrow==17.0.0 # via - # feast (setup.py) # dask-expr # db-dtypes # deltalake @@ -621,35 +557,28 @@ pyasn1==0.6.1 pyasn1-modules==0.4.1 # via google-auth pybindgen==0.22.1 - # via feast (setup.py) pycparser==2.22 # via cffi pydantic==2.9.2 # via - # feast (setup.py) # fastapi # great-expectations pydantic-core==2.23.4 # via pydantic pygments==2.18.0 # via - # feast (setup.py) # ipython # nbconvert # rich # sphinx pyjwt[crypto]==2.9.0 # via - # feast (setup.py) # msal # singlestoredb # snowflake-connector-python pymssql==2.3.1 - # via feast (setup.py) pymysql==1.1.1 - # via feast (setup.py) pyodbc==5.1.0 - # via feast (setup.py) pyopenssl==24.2.1 # via snowflake-connector-python pyparsing==3.1.4 @@ -659,10 +588,8 @@ pyproject-hooks==1.1.0 # build # pip-tools pyspark==3.5.2 - # via feast (setup.py) pytest==7.4.4 # via - # feast (setup.py) # pytest-benchmark # pytest-cov # pytest-env @@ -672,21 +599,13 @@ pytest==7.4.4 # pytest-timeout # pytest-xdist pytest-benchmark==3.4.1 - # via feast (setup.py) pytest-cov==5.0.0 - # via feast (setup.py) pytest-env==1.1.3 - # via feast (setup.py) pytest-lazy-fixture==0.6.3 - # via feast (setup.py) pytest-mock==1.10.4 - # via feast (setup.py) pytest-ordering==0.6 - # via feast (setup.py) pytest-timeout==1.4.2 - # via feast (setup.py) pytest-xdist==3.6.1 - # via feast (setup.py) python-dateutil==2.9.0.post0 # via # arrow @@ -704,7 +623,6 @@ python-dotenv==1.0.1 python-json-logger==2.0.7 # via jupyter-events python-keycloak==4.2.2 - # via feast (setup.py) pytz==2024.2 # via # great-expectations @@ -714,7 +632,6 @@ pytz==2024.2 # trino pyyaml==6.0.2 # via - # feast (setup.py) # dask # ibis-substrait # jupyter-events @@ -728,19 +645,15 @@ pyzmq==26.2.0 # jupyter-client # jupyter-server redis==4.6.0 - # via feast (setup.py) referencing==0.35.1 # via # jsonschema # jsonschema-specifications # jupyter-events regex==2024.9.11 - # via - # feast (setup.py) - # parsimonious + # via parsimonious requests==2.32.3 # via - # feast (setup.py) # azure-core # docker # google-api-core @@ -785,8 +698,7 @@ ruamel-yaml==0.17.40 # via great-expectations ruamel-yaml-clib==0.2.8 # via ruamel-yaml -ruff==0.6.5 - # via feast (setup.py) +ruff==0.6.6 s3transfer==0.10.2 # via boto3 scipy==1.14.1 @@ -801,7 +713,6 @@ setuptools==75.1.0 # pip-tools # singlestoredb singlestoredb==1.6.3 - # via feast (setup.py) six==1.16.0 # via # asttokens @@ -822,13 +733,11 @@ sniffio==1.3.1 snowballstemmer==2.2.0 # via sphinx snowflake-connector-python[pandas]==3.12.2 - # via feast (setup.py) sortedcontainers==2.4.0 # via snowflake-connector-python soupsieve==2.6 # via beautifulsoup4 sphinx==6.2.1 - # via feast (setup.py) sphinxcontrib-applehelp==2.0.0 # via sphinx sphinxcontrib-devhelp==2.0.0 @@ -842,11 +751,9 @@ sphinxcontrib-qthelp==2.0.0 sphinxcontrib-serializinghtml==2.0.0 # via sphinx sqlalchemy[mypy]==2.0.35 - # via feast (setup.py) sqlglot==25.20.1 # via ibis-framework sqlite-vec==0.1.1 - # via feast (setup.py) sqlparams==6.1.0 # via singlestoredb stack-data==0.6.3 @@ -856,23 +763,17 @@ starlette==0.38.5 substrait==0.23.0 # via ibis-substrait tabulate==0.9.0 - # via feast (setup.py) tenacity==8.5.0 - # via feast (setup.py) terminado==0.18.1 # via # jupyter-server # jupyter-server-terminals testcontainers==4.4.0 - # via feast (setup.py) thriftpy2==0.5.2 # via happybase tinycss2==1.3.0 # via nbconvert toml==0.10.2 - # via feast (setup.py) -tomli==2.0.1 - # via coverage tomlkit==0.13.2 # via snowflake-connector-python toolz==0.12.1 @@ -890,9 +791,7 @@ tornado==6.4.1 # notebook # terminado tqdm==4.66.5 - # via - # feast (setup.py) - # great-expectations + # via great-expectations traitlets==5.14.3 # via # comm @@ -909,37 +808,23 @@ traitlets==5.14.3 # nbconvert # nbformat trino==0.329.0 - # via feast (setup.py) typeguard==4.3.0 - # via feast (setup.py) types-cffi==1.16.0.20240331 # via types-pyopenssl types-protobuf==3.19.22 - # via - # feast (setup.py) - # mypy-protobuf + # via mypy-protobuf types-pymysql==1.1.0.20240524 - # via feast (setup.py) types-pyopenssl==24.1.0.20240722 # via types-redis types-python-dateutil==2.9.0.20240906 - # via - # feast (setup.py) - # arrow + # via arrow types-pytz==2024.2.0.20240913 - # via feast (setup.py) types-pyyaml==6.0.12.20240917 - # via feast (setup.py) types-redis==4.6.0.20240903 - # via feast (setup.py) types-requests==2.30.0.0 - # via feast (setup.py) types-setuptools==75.1.0.20240917 - # via - # feast (setup.py) - # types-cffi + # via types-cffi types-tabulate==0.9.0.20240106 - # via feast (setup.py) types-urllib3==1.26.25.14 # via types-requests typing-extensions==4.12.2 @@ -971,7 +856,6 @@ uri-template==1.3.0 # via jsonschema urllib3==2.2.3 # via - # feast (setup.py) # botocore # docker # elastic-transport @@ -982,13 +866,10 @@ urllib3==2.2.3 # responses # testcontainers uvicorn[standard]==0.30.6 - # via feast (setup.py) uvloop==0.20.0 # via uvicorn virtualenv==20.23.0 - # via - # feast (setup.py) - # pre-commit + # via pre-commit watchfiles==0.24.0 # via uvicorn wcwidth==0.2.13 diff --git a/sdk/python/requirements/py3.11-requirements.txt b/sdk/python/requirements/py3.11-requirements.txt index 5c20e45f07..1ce25e7d5b 100644 --- a/sdk/python/requirements/py3.11-requirements.txt +++ b/sdk/python/requirements/py3.11-requirements.txt @@ -11,34 +11,28 @@ attrs==24.2.0 # jsonschema # referencing bigtree==0.21.1 - # via feast (setup.py) certifi==2024.8.30 # via requests charset-normalizer==3.3.2 # via requests click==8.1.7 # via - # feast (setup.py) # dask # uvicorn cloudpickle==3.0.0 # via dask colorama==0.4.6 - # via feast (setup.py) dask[dataframe]==2024.9.0 - # via - # feast (setup.py) - # dask-expr + # via dask-expr dask-expr==1.1.14 # via dask dill==0.3.8 - # via feast (setup.py) fastapi==0.115.0 - # via feast (setup.py) fsspec==2024.9.0 # via dask +greenlet==3.1.0 + # via sqlalchemy gunicorn==23.0.0 - # via feast (setup.py) h11==0.14.0 # via uvicorn httptools==0.6.1 @@ -50,9 +44,7 @@ idna==3.10 importlib-metadata==8.5.0 # via dask jinja2==3.1.4 - # via feast (setup.py) jsonschema==4.23.0 - # via feast (setup.py) jsonschema-specifications==2023.12.1 # via jsonschema locket==1.0.0 @@ -60,14 +52,12 @@ locket==1.0.0 markupsafe==2.1.5 # via jinja2 mmh3==5.0.0 - # via feast (setup.py) mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy numpy==1.26.4 # via - # feast (setup.py) # dask # pandas # pyarrow @@ -77,31 +67,21 @@ packaging==24.1 # gunicorn pandas==2.2.2 # via - # feast (setup.py) # dask # dask-expr partd==1.4.2 # via dask prometheus-client==0.20.0 - # via feast (setup.py) protobuf==4.25.5 - # via feast (setup.py) psutil==6.0.0 - # via feast (setup.py) pyarrow==17.0.0 - # via - # feast (setup.py) - # dask-expr + # via dask-expr pydantic==2.9.2 - # via - # feast (setup.py) - # fastapi + # via fastapi pydantic-core==2.23.4 # via pydantic pygments==2.18.0 - # via feast (setup.py) pyjwt==2.9.0 - # via feast (setup.py) python-dateutil==2.9.0.post0 # via pandas python-dotenv==1.0.1 @@ -110,7 +90,6 @@ pytz==2024.2 # via pandas pyyaml==6.0.2 # via - # feast (setup.py) # dask # uvicorn referencing==0.35.1 @@ -118,7 +97,6 @@ referencing==0.35.1 # jsonschema # jsonschema-specifications requests==2.32.3 - # via feast (setup.py) rpds-py==0.20.0 # via # jsonschema @@ -128,23 +106,17 @@ six==1.16.0 sniffio==1.3.1 # via anyio sqlalchemy[mypy]==2.0.35 - # via feast (setup.py) starlette==0.38.5 # via fastapi tabulate==0.9.0 - # via feast (setup.py) tenacity==8.5.0 - # via feast (setup.py) toml==0.10.2 - # via feast (setup.py) toolz==0.12.1 # via # dask # partd tqdm==4.66.5 - # via feast (setup.py) typeguard==4.3.0 - # via feast (setup.py) typing-extensions==4.12.2 # via # fastapi @@ -158,7 +130,6 @@ tzdata==2024.1 urllib3==2.2.3 # via requests uvicorn[standard]==0.30.6 - # via feast (setup.py) uvloop==0.20.0 # via uvicorn watchfiles==0.24.0 diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index bbdca890b6..c8c92969fc 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -1,7 +1,6 @@ # This file was autogenerated by uv via the following command: # uv pip compile -p 3.9 --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.9-ci-requirements.txt -aiobotocore==2.15.0 - # via feast (setup.py) +aiobotocore==2.15.1 aiohappyeyeballs==2.4.0 # via aiohttp aiohttp==3.10.5 @@ -22,8 +21,6 @@ anyio==4.5.0 # jupyter-server # starlette # watchfiles -appnope==0.1.4 - # via ipykernel argon2-cffi==23.1.0 # via jupyter-server argon2-cffi-bindings==21.2.0 @@ -33,7 +30,6 @@ arrow==1.3.0 asn1crypto==1.5.1 # via snowflake-connector-python assertpy==1.1 - # via feast (setup.py) asttokens==2.4.1 # via stack-data async-lru==2.0.4 @@ -55,10 +51,8 @@ azure-core==1.31.0 # via # azure-identity # azure-storage-blob -azure-identity==1.17.1 - # via feast (setup.py) +azure-identity==1.18.0 azure-storage-blob==12.23.0 - # via feast (setup.py) babel==2.16.0 # via # jupyterlab-server @@ -68,14 +62,11 @@ beautifulsoup4==4.12.3 bidict==0.23.1 # via ibis-framework bigtree==0.21.1 - # via feast (setup.py) bleach==6.1.0 # via nbconvert -boto3==1.35.16 - # via - # feast (setup.py) - # moto -botocore==1.35.16 +boto3==1.35.23 + # via moto +botocore==1.35.23 # via # aiobotocore # boto3 @@ -83,13 +74,11 @@ botocore==1.35.16 # s3transfer build==1.2.2 # via - # feast (setup.py) # pip-tools # singlestoredb cachetools==5.5.0 # via google-auth cassandra-driver==3.29.2 - # via feast (setup.py) certifi==2024.8.30 # via # elastic-transport @@ -112,7 +101,6 @@ charset-normalizer==3.3.2 # snowflake-connector-python click==8.1.7 # via - # feast (setup.py) # dask # geomet # great-expectations @@ -121,9 +109,7 @@ click==8.1.7 cloudpickle==3.0.0 # via dask colorama==0.4.6 - # via - # feast (setup.py) - # great-expectations + # via great-expectations comm==0.2.2 # via # ipykernel @@ -132,7 +118,6 @@ coverage[toml]==7.6.1 # via pytest-cov cryptography==42.0.8 # via - # feast (setup.py) # azure-identity # azure-storage-blob # great-expectations @@ -147,9 +132,7 @@ cryptography==42.0.8 cython==3.0.11 # via thriftpy2 dask[dataframe]==2024.8.0 - # via - # feast (setup.py) - # dask-expr + # via dask-expr dask-expr==1.1.10 # via dask db-dtypes==1.3.0 @@ -161,11 +144,9 @@ decorator==5.1.1 defusedxml==0.7.1 # via nbconvert deltalake==0.20.0 - # via feast (setup.py) deprecation==2.1.0 # via python-keycloak dill==0.3.8 - # via feast (setup.py) distlib==0.3.8 # via virtualenv docker==7.1.0 @@ -177,7 +158,6 @@ duckdb==0.10.3 elastic-transport==8.15.0 # via elasticsearch elasticsearch==8.15.1 - # via feast (setup.py) entrypoints==0.4 # via altair exceptiongroup==1.2.2 @@ -190,7 +170,6 @@ execnet==2.1.1 executing==2.1.0 # via stack-data fastapi==0.115.0 - # via feast (setup.py) fastjsonschema==2.20.0 # via nbformat filelock==3.16.1 @@ -204,21 +183,18 @@ frozenlist==1.4.1 # aiohttp # aiosignal fsspec==2024.9.0 - # via - # feast (setup.py) - # dask + # via dask geomet==0.2.1.post1 # via cassandra-driver -google-api-core[grpc]==2.19.2 +google-api-core[grpc]==2.20.0 # via - # feast (setup.py) # google-cloud-bigquery # google-cloud-bigquery-storage # google-cloud-bigtable # google-cloud-core # google-cloud-datastore # google-cloud-storage -google-auth==2.34.0 +google-auth==2.35.0 # via # google-api-core # google-cloud-bigquery @@ -229,11 +205,8 @@ google-auth==2.34.0 # google-cloud-storage # kubernetes google-cloud-bigquery[pandas]==3.25.0 - # via feast (setup.py) google-cloud-bigquery-storage==2.26.0 - # via feast (setup.py) google-cloud-bigtable==2.26.0 - # via feast (setup.py) google-cloud-core==2.4.1 # via # google-cloud-bigquery @@ -241,9 +214,7 @@ google-cloud-core==2.4.1 # google-cloud-datastore # google-cloud-storage google-cloud-datastore==2.20.1 - # via feast (setup.py) google-cloud-storage==2.18.2 - # via feast (setup.py) google-crc32c==1.6.0 # via # google-cloud-storage @@ -254,17 +225,16 @@ google-resumable-media==2.7.2 # google-cloud-storage googleapis-common-protos[grpc]==1.65.0 # via - # feast (setup.py) # google-api-core # grpc-google-iam-v1 # grpcio-status great-expectations==0.18.21 - # via feast (setup.py) +greenlet==3.1.0 + # via sqlalchemy grpc-google-iam-v1==0.13.1 # via google-cloud-bigtable grpcio==1.66.1 # via - # feast (setup.py) # google-api-core # googleapis-common-protos # grpc-google-iam-v1 @@ -274,42 +244,30 @@ grpcio==1.66.1 # grpcio-testing # grpcio-tools grpcio-health-checking==1.62.3 - # via feast (setup.py) grpcio-reflection==1.62.3 - # via feast (setup.py) grpcio-status==1.62.3 # via google-api-core grpcio-testing==1.62.3 - # via feast (setup.py) grpcio-tools==1.62.3 - # via feast (setup.py) gunicorn==23.0.0 - # via feast (setup.py) h11==0.14.0 # via # httpcore # uvicorn happybase==1.2.0 - # via feast (setup.py) hazelcast-python-client==5.5.0 - # via feast (setup.py) hiredis==2.4.0 - # via feast (setup.py) httpcore==1.0.5 # via httpx httptools==0.6.1 # via uvicorn httpx==0.27.2 # via - # feast (setup.py) # jupyterlab # python-keycloak ibis-framework[duckdb]==9.0.0 - # via - # feast (setup.py) - # ibis-substrait + # via ibis-substrait ibis-substrait==4.0.1 - # via feast (setup.py) identify==2.6.1 # via pre-commit idna==3.10 @@ -352,7 +310,6 @@ jedi==0.19.1 # via ipython jinja2==3.1.4 # via - # feast (setup.py) # altair # great-expectations # jupyter-server @@ -375,7 +332,6 @@ jsonpointer==3.0.0 # jsonschema jsonschema[format-nongpl]==4.23.0 # via - # feast (setup.py) # altair # great-expectations # jupyter-events @@ -423,7 +379,6 @@ jupyterlab-widgets==3.0.13 jwcrypto==1.5.6 # via python-keycloak kubernetes==20.13.0 - # via feast (setup.py) locket==1.0.0 # via partd makefun==1.15.4 @@ -444,17 +399,13 @@ matplotlib-inline==0.1.7 mdurl==0.1.2 # via markdown-it-py minio==7.1.0 - # via feast (setup.py) mistune==3.0.2 # via # great-expectations # nbconvert mmh3==5.0.0 - # via feast (setup.py) mock==2.0.0 - # via feast (setup.py) moto==4.2.14 - # via feast (setup.py) msal==1.31.0 # via # azure-identity @@ -466,13 +417,10 @@ multidict==6.1.0 # aiohttp # yarl mypy==1.11.2 - # via - # feast (setup.py) - # sqlalchemy + # via sqlalchemy mypy-extensions==1.0.0 # via mypy mypy-protobuf==3.3.0 - # via feast (setup.py) nbclient==0.10.0 # via nbconvert nbconvert==7.16.4 @@ -495,7 +443,6 @@ notebook-shim==0.2.4 # notebook numpy==1.26.4 # via - # feast (setup.py) # altair # dask # db-dtypes @@ -529,7 +476,6 @@ packaging==24.1 # sphinx pandas==2.2.2 # via - # feast (setup.py) # altair # dask # dask-expr @@ -555,7 +501,6 @@ pexpect==4.9.0 pip==24.2 # via pip-tools pip-tools==7.4.1 - # via feast (setup.py) platformdirs==3.11.0 # via # jupyter-core @@ -568,11 +513,8 @@ ply==3.11 portalocker==2.10.1 # via msal-extensions pre-commit==3.3.1 - # via feast (setup.py) prometheus-client==0.20.0 - # via - # feast (setup.py) - # jupyter-server + # via jupyter-server prompt-toolkit==3.0.47 # via ipython proto-plus==1.24.0 @@ -583,7 +525,6 @@ proto-plus==1.24.0 # google-cloud-datastore protobuf==4.25.5 # via - # feast (setup.py) # google-api-core # google-cloud-bigquery-storage # google-cloud-bigtable @@ -599,12 +540,9 @@ protobuf==4.25.5 # proto-plus # substrait psutil==5.9.0 - # via - # feast (setup.py) - # ipykernel -psycopg[binary, pool]==3.1.18 - # via feast (setup.py) -psycopg-binary==3.1.18 + # via ipykernel +psycopg[binary, pool]==3.2.2 +psycopg-binary==3.2.2 # via psycopg psycopg-pool==3.2.3 # via psycopg @@ -615,14 +553,12 @@ ptyprocess==0.7.0 pure-eval==0.2.3 # via stack-data py==1.11.0 - # via feast (setup.py) py-cpuinfo==9.0.0 # via pytest-benchmark py4j==0.10.9.7 # via pyspark pyarrow==16.1.0 # via - # feast (setup.py) # dask-expr # db-dtypes # deltalake @@ -638,35 +574,28 @@ pyasn1==0.6.1 pyasn1-modules==0.4.1 # via google-auth pybindgen==0.22.1 - # via feast (setup.py) pycparser==2.22 # via cffi pydantic==2.9.2 # via - # feast (setup.py) # fastapi # great-expectations pydantic-core==2.23.4 # via pydantic pygments==2.18.0 # via - # feast (setup.py) # ipython # nbconvert # rich # sphinx pyjwt[crypto]==2.9.0 # via - # feast (setup.py) # msal # singlestoredb # snowflake-connector-python pymssql==2.3.1 - # via feast (setup.py) pymysql==1.1.1 - # via feast (setup.py) pyodbc==5.1.0 - # via feast (setup.py) pyopenssl==24.2.1 # via snowflake-connector-python pyparsing==3.1.4 @@ -676,10 +605,8 @@ pyproject-hooks==1.1.0 # build # pip-tools pyspark==3.5.2 - # via feast (setup.py) pytest==7.4.4 # via - # feast (setup.py) # pytest-benchmark # pytest-cov # pytest-env @@ -689,21 +616,13 @@ pytest==7.4.4 # pytest-timeout # pytest-xdist pytest-benchmark==3.4.1 - # via feast (setup.py) pytest-cov==5.0.0 - # via feast (setup.py) pytest-env==1.1.3 - # via feast (setup.py) pytest-lazy-fixture==0.6.3 - # via feast (setup.py) pytest-mock==1.10.4 - # via feast (setup.py) pytest-ordering==0.6 - # via feast (setup.py) pytest-timeout==1.4.2 - # via feast (setup.py) pytest-xdist==3.6.1 - # via feast (setup.py) python-dateutil==2.9.0.post0 # via # arrow @@ -721,7 +640,6 @@ python-dotenv==1.0.1 python-json-logger==2.0.7 # via jupyter-events python-keycloak==4.2.2 - # via feast (setup.py) pytz==2024.2 # via # great-expectations @@ -731,7 +649,6 @@ pytz==2024.2 # trino pyyaml==6.0.2 # via - # feast (setup.py) # dask # ibis-substrait # jupyter-events @@ -745,19 +662,15 @@ pyzmq==26.2.0 # jupyter-client # jupyter-server redis==4.6.0 - # via feast (setup.py) referencing==0.35.1 # via # jsonschema # jsonschema-specifications # jupyter-events regex==2024.9.11 - # via - # feast (setup.py) - # parsimonious + # via parsimonious requests==2.32.3 # via - # feast (setup.py) # azure-core # docker # google-api-core @@ -802,8 +715,7 @@ ruamel-yaml==0.17.40 # via great-expectations ruamel-yaml-clib==0.2.8 # via ruamel-yaml -ruff==0.6.5 - # via feast (setup.py) +ruff==0.6.6 s3transfer==0.10.2 # via boto3 scipy==1.13.1 @@ -818,7 +730,6 @@ setuptools==75.1.0 # pip-tools # singlestoredb singlestoredb==1.6.3 - # via feast (setup.py) six==1.16.0 # via # asttokens @@ -839,13 +750,11 @@ sniffio==1.3.1 snowballstemmer==2.2.0 # via sphinx snowflake-connector-python[pandas]==3.12.2 - # via feast (setup.py) sortedcontainers==2.4.0 # via snowflake-connector-python soupsieve==2.6 # via beautifulsoup4 sphinx==6.2.1 - # via feast (setup.py) sphinxcontrib-applehelp==2.0.0 # via sphinx sphinxcontrib-devhelp==2.0.0 @@ -859,11 +768,9 @@ sphinxcontrib-qthelp==2.0.0 sphinxcontrib-serializinghtml==2.0.0 # via sphinx sqlalchemy[mypy]==2.0.35 - # via feast (setup.py) sqlglot==23.12.2 # via ibis-framework sqlite-vec==0.1.1 - # via feast (setup.py) sqlparams==6.1.0 # via singlestoredb stack-data==0.6.3 @@ -873,21 +780,17 @@ starlette==0.38.5 substrait==0.23.0 # via ibis-substrait tabulate==0.9.0 - # via feast (setup.py) tenacity==8.5.0 - # via feast (setup.py) terminado==0.18.1 # via # jupyter-server # jupyter-server-terminals testcontainers==4.4.0 - # via feast (setup.py) thriftpy2==0.5.2 # via happybase tinycss2==1.3.0 # via nbconvert toml==0.10.2 - # via feast (setup.py) tomli==2.0.1 # via # build @@ -915,9 +818,7 @@ tornado==6.4.1 # notebook # terminado tqdm==4.66.5 - # via - # feast (setup.py) - # great-expectations + # via great-expectations traitlets==5.14.3 # via # comm @@ -934,37 +835,23 @@ traitlets==5.14.3 # nbconvert # nbformat trino==0.329.0 - # via feast (setup.py) typeguard==4.3.0 - # via feast (setup.py) types-cffi==1.16.0.20240331 # via types-pyopenssl types-protobuf==3.19.22 - # via - # feast (setup.py) - # mypy-protobuf + # via mypy-protobuf types-pymysql==1.1.0.20240524 - # via feast (setup.py) types-pyopenssl==24.1.0.20240722 # via types-redis types-python-dateutil==2.9.0.20240906 - # via - # feast (setup.py) - # arrow + # via arrow types-pytz==2024.2.0.20240913 - # via feast (setup.py) types-pyyaml==6.0.12.20240917 - # via feast (setup.py) types-redis==4.6.0.20240903 - # via feast (setup.py) types-requests==2.30.0.0 - # via feast (setup.py) types-setuptools==75.1.0.20240917 - # via - # feast (setup.py) - # types-cffi + # via types-cffi types-tabulate==0.9.0.20240106 - # via feast (setup.py) types-urllib3==1.26.25.14 # via types-requests typing-extensions==4.12.2 @@ -1002,7 +889,6 @@ uri-template==1.3.0 # via jsonschema urllib3==1.26.20 # via - # feast (setup.py) # botocore # docker # elastic-transport @@ -1014,13 +900,10 @@ urllib3==1.26.20 # snowflake-connector-python # testcontainers uvicorn[standard]==0.30.6 - # via feast (setup.py) uvloop==0.20.0 # via uvicorn virtualenv==20.23.0 - # via - # feast (setup.py) - # pre-commit + # via pre-commit watchfiles==0.24.0 # via uvicorn wcwidth==0.2.13 diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index 7ffef84b23..857d7d72bf 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -11,36 +11,30 @@ attrs==24.2.0 # jsonschema # referencing bigtree==0.21.1 - # via feast (setup.py) certifi==2024.8.30 # via requests charset-normalizer==3.3.2 # via requests click==8.1.7 # via - # feast (setup.py) # dask # uvicorn cloudpickle==3.0.0 # via dask colorama==0.4.6 - # via feast (setup.py) dask[dataframe]==2024.8.0 - # via - # feast (setup.py) - # dask-expr + # via dask-expr dask-expr==1.1.10 # via dask dill==0.3.8 - # via feast (setup.py) exceptiongroup==1.2.2 # via anyio fastapi==0.115.0 - # via feast (setup.py) fsspec==2024.9.0 # via dask +greenlet==3.1.0 + # via sqlalchemy gunicorn==23.0.0 - # via feast (setup.py) h11==0.14.0 # via uvicorn httptools==0.6.1 @@ -54,9 +48,7 @@ importlib-metadata==8.5.0 # dask # typeguard jinja2==3.1.4 - # via feast (setup.py) jsonschema==4.23.0 - # via feast (setup.py) jsonschema-specifications==2023.12.1 # via jsonschema locket==1.0.0 @@ -64,14 +56,12 @@ locket==1.0.0 markupsafe==2.1.5 # via jinja2 mmh3==5.0.0 - # via feast (setup.py) mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy numpy==1.26.4 # via - # feast (setup.py) # dask # pandas # pyarrow @@ -81,31 +71,21 @@ packaging==24.1 # gunicorn pandas==2.2.2 # via - # feast (setup.py) # dask # dask-expr partd==1.4.2 # via dask prometheus-client==0.20.0 - # via feast (setup.py) protobuf==4.25.5 - # via feast (setup.py) psutil==6.0.0 - # via feast (setup.py) pyarrow==17.0.0 - # via - # feast (setup.py) - # dask-expr + # via dask-expr pydantic==2.9.2 - # via - # feast (setup.py) - # fastapi + # via fastapi pydantic-core==2.23.4 # via pydantic pygments==2.18.0 - # via feast (setup.py) pyjwt==2.9.0 - # via feast (setup.py) python-dateutil==2.9.0.post0 # via pandas python-dotenv==1.0.1 @@ -114,7 +94,6 @@ pytz==2024.2 # via pandas pyyaml==6.0.2 # via - # feast (setup.py) # dask # uvicorn referencing==0.35.1 @@ -122,7 +101,6 @@ referencing==0.35.1 # jsonschema # jsonschema-specifications requests==2.32.3 - # via feast (setup.py) rpds-py==0.20.0 # via # jsonschema @@ -132,15 +110,11 @@ six==1.16.0 sniffio==1.3.1 # via anyio sqlalchemy[mypy]==2.0.35 - # via feast (setup.py) starlette==0.38.5 # via fastapi tabulate==0.9.0 - # via feast (setup.py) tenacity==8.5.0 - # via feast (setup.py) toml==0.10.2 - # via feast (setup.py) tomli==2.0.1 # via mypy toolz==0.12.1 @@ -148,9 +122,7 @@ toolz==0.12.1 # dask # partd tqdm==4.66.5 - # via feast (setup.py) typeguard==4.3.0 - # via feast (setup.py) typing-extensions==4.12.2 # via # anyio @@ -167,7 +139,6 @@ tzdata==2024.1 urllib3==2.2.3 # via requests uvicorn[standard]==0.30.6 - # via feast (setup.py) uvloop==0.20.0 # via uvicorn watchfiles==0.24.0 diff --git a/setup.py b/setup.py index 5a6f18db35..c62fb8c50f 100644 --- a/setup.py +++ b/setup.py @@ -11,20 +11,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import glob import os import pathlib import re import shutil -import subprocess -import sys -from pathlib import Path -from setuptools import Command, find_packages, setup -from setuptools.command.build_ext import build_ext as _build_ext -from setuptools.command.build_py import build_py -from setuptools.command.develop import develop -from setuptools.command.install import install +from setuptools import find_packages, setup NAME = "feast" DESCRIPTION = "Python SDK for Feast" @@ -157,7 +149,6 @@ "virtualenv==20.23.0", "cryptography>=35.0,<43", "ruff>=0.3.3", - "protobuf<5", "mypy-protobuf>=3.1", "grpcio-tools>=1.56.2,<2", "grpcio-testing>=1.56.2,<2", @@ -244,107 +235,8 @@ else: use_scm_version = None -PROTO_SUBDIRS = ["core", "registry", "serving", "types", "storage"] PYTHON_CODE_PREFIX = "sdk/python" - -class BuildPythonProtosCommand(Command): - description = "Builds the proto files into Python files." - user_options = [ - ("inplace", "i", "Write generated proto files to source directory."), - ] - - def initialize_options(self): - self.python_protoc = [ - sys.executable, - "-m", - "grpc_tools.protoc", - ] # find_executable("protoc") - self.proto_folder = os.path.join(repo_root, "protos") - self.sub_folders = PROTO_SUBDIRS - self.build_lib = None - self.inplace = 0 - - def finalize_options(self): - self.set_undefined_options("build", ("build_lib", "build_lib")) - - @property - def python_folder(self): - if self.inplace: - return os.path.join( - os.path.dirname(__file__) or os.getcwd(), "sdk/python/feast/protos" - ) - - return os.path.join(self.build_lib, "feast/protos") - - def _generate_python_protos(self, path: str): - proto_files = glob.glob(os.path.join(self.proto_folder, path)) - Path(self.python_folder).mkdir(parents=True, exist_ok=True) - subprocess.check_call( - self.python_protoc - + [ - "-I", - self.proto_folder, - "--python_out", - self.python_folder, - "--grpc_python_out", - self.python_folder, - "--mypy_out", - self.python_folder, - ] - + proto_files - ) - - def run(self): - for sub_folder in self.sub_folders: - self._generate_python_protos(f"feast/{sub_folder}/*.proto") - # We need the __init__ files for each of the generated subdirs - # so that they are regular packages, and don't need the `--namespace-packages` flags - # when being typechecked using mypy. - with open(f"{self.python_folder}/feast/{sub_folder}/__init__.py", "w"): - pass - - with open(f"{self.python_folder}/__init__.py", "w"): - pass - with open(f"{self.python_folder}/feast/__init__.py", "w"): - pass - - for path in Path(self.python_folder).rglob("*.py"): - for folder in self.sub_folders: - # Read in the file - with open(path, "r") as file: - filedata = file.read() - - # Replace the target string - filedata = filedata.replace( - f"from feast.{folder}", f"from feast.protos.feast.{folder}" - ) - - # Write the file out again - with open(path, "w") as file: - file.write(filedata) - - -class BuildCommand(build_py): - """Custom build command.""" - - def run(self): - self.run_command("build_python_protos") - - self.run_command("build_ext") - build_py.run(self) - - -class DevelopCommand(develop): - """Custom develop command.""" - - def run(self): - self.reinitialize_command("build_python_protos", inplace=1) - self.run_command("build_python_protos") - - develop.run(self) - - setup( name=NAME, author=AUTHOR, @@ -358,8 +250,6 @@ def run(self): ), package_dir={"": PYTHON_CODE_PREFIX}, install_requires=REQUIRED, - # https://stackoverflow.com/questions/28509965/setuptools-development-requirements - # Install dev requirements with: pip install -e .[dev] extras_require={ "dev": DEV_REQUIRED, "ci": CI_REQUIRED, @@ -402,17 +292,7 @@ def run(self): entry_points={"console_scripts": ["feast=feast.cli:cli"]}, use_scm_version=use_scm_version, setup_requires=[ - # snowflake udf packages refer to conda packages, not pypi libraries. Conda stack is still on protobuf 4 - # So we are adding protobuf<5 as a requirement - "protobuf<5", - "grpcio-tools>=1.56.2,<2", - "mypy-protobuf>=3.1", - "pybindgen==0.22.0", - "setuptools_scm>=6.2", - ], - cmdclass={ - "build_python_protos": BuildPythonProtosCommand, - "build_py": BuildCommand, - "develop": DevelopCommand, - }, + "pybindgen==0.22.0", #TODO do we need this? + "setuptools_scm>=6.2", #TODO do we need this? + ] ) From f308572715d0593951f71bb3da5c5be6de29a2f9 Mon Sep 17 00:00:00 2001 From: Tornike Gurgenidze Date: Sun, 22 Sep 2024 22:12:23 +0400 Subject: [PATCH 81/96] fix: Import grpc only for type checking in errors.py (#4533) * fix: grpc import error Signed-off-by: tokoko * fix: loosen protobuf build requirement Signed-off-by: tokoko * fix: pin grpcio-tools version Signed-off-by: tokoko * fix: revert build-system in pyproject Signed-off-by: tokoko * fix: add manual install of setuptools and grpcio-tools Signed-off-by: tokoko * fix: remove incorrect pixi call Signed-off-by: tokoko * fix: add in-function imports in errors.py Signed-off-by: tokoko * fix: formatting Signed-off-by: tokoko * fix: merge changes from master Signed-off-by: tokoko * fix: add line endings Signed-off-by: tokoko * fix: add line endings Signed-off-by: tokoko * chore: remove proto generation from make commands Signed-off-by: tokoko --------- Signed-off-by: tokoko --- .devcontainer/devcontainer.json | 4 ++-- .github/workflows/smoke_tests.yml | 38 +++++++++++++++++++++++++++++++ Makefile | 8 +++++++ pyproject.toml | 1 - sdk/python/feast/errors.py | 18 +++++++++++---- 5 files changed, 61 insertions(+), 8 deletions(-) create mode 100644 .github/workflows/smoke_tests.yml diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 04fcbb00aa..1b15dcf882 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -17,13 +17,13 @@ "ghcr.io/devcontainers-contrib/features/maven-sdkman:2": { "jdkVersion": "11.0.24-amzn" } - }, + } // Use 'forwardPorts' to make a list of ports inside the container available locally. // "forwardPorts": [], // Uncomment the next line to run commands after the container is created. - "postCreateCommand": "make install-python-ci-dependencies-uv-venv" + // "postCreateCommand": "make install-python-ci-dependencies-uv-venv" // Configure tool-specific properties. // "customizations": {}, diff --git a/.github/workflows/smoke_tests.yml b/.github/workflows/smoke_tests.yml new file mode 100644 index 0000000000..782f8b3f51 --- /dev/null +++ b/.github/workflows/smoke_tests.yml @@ -0,0 +1,38 @@ +name: smoke-tests + +on: [pull_request] +jobs: + unit-test-python: + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + python-version: [ "3.9", "3.10", "3.11"] + os: [ ubuntu-latest ] + env: + OS: ${{ matrix.os }} + PYTHON: ${{ matrix.python-version }} + steps: + - uses: actions/checkout@v4 + - name: Setup Python + id: setup-python + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + architecture: x64 + - name: Install uv + run: | + curl -LsSf https://astral.sh/uv/install.sh | sh + - name: Get uv cache dir + id: uv-cache + run: | + echo "::set-output name=dir::$(uv cache dir)" + - name: uv cache + uses: actions/cache@v4 + with: + path: ${{ steps.uv-cache.outputs.dir }} + key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-uv-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }} + - name: Install dependencies + run: make install-python-dependencies-uv + - name: Test Imports + run: python -c "from feast import cli" \ No newline at end of file diff --git a/Makefile b/Makefile index f4b34124f7..6831a58337 100644 --- a/Makefile +++ b/Makefile @@ -37,6 +37,14 @@ build: protos build-java build-docker # Python SDK +install-python-dependencies-uv: + uv pip sync --system sdk/python/requirements/py$(PYTHON_VERSION)-requirements.txt + uv pip install --system --no-deps . + +install-python-dependencies-uv-venv: + uv pip sync sdk/python/requirements/py$(PYTHON_VERSION)-requirements.txt + uv pip install --no-deps . + install-python-ci-dependencies: python -m piptools sync sdk/python/requirements/py$(PYTHON_VERSION)-ci-requirements.txt pip install --no-deps -e . diff --git a/pyproject.toml b/pyproject.toml index d772bab9ea..2a051231e2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,7 +6,6 @@ requires = [ "sphinx!=4.0.0", "wheel", ] -build-backend = "setuptools.build_meta" [tool.setuptools_scm] # Including this section is comparable to supplying use_scm_version=True in setup.py. diff --git a/sdk/python/feast/errors.py b/sdk/python/feast/errors.py index 4dbb220c1e..11ce9ebc62 100644 --- a/sdk/python/feast/errors.py +++ b/sdk/python/feast/errors.py @@ -1,11 +1,13 @@ import importlib import json import logging -from typing import Any, List, Optional, Set +from typing import TYPE_CHECKING, Any, List, Optional, Set from colorama import Fore, Style from fastapi import status as HttpStatusCode -from grpc import StatusCode as GrpcStatusCode + +if TYPE_CHECKING: + from grpc import StatusCode as GrpcStatusCode from feast.field import Field @@ -15,7 +17,9 @@ class FeastError(Exception): pass - def grpc_status_code(self) -> GrpcStatusCode: + def grpc_status_code(self) -> "GrpcStatusCode": + from grpc import StatusCode as GrpcStatusCode + return GrpcStatusCode.INTERNAL def http_status_code(self) -> int: @@ -89,7 +93,9 @@ def __init__(self, ds_name: str): class FeastObjectNotFoundException(FeastError): pass - def grpc_status_code(self) -> GrpcStatusCode: + def grpc_status_code(self) -> "GrpcStatusCode": + from grpc import StatusCode as GrpcStatusCode + return GrpcStatusCode.NOT_FOUND def http_status_code(self) -> int: @@ -504,7 +510,9 @@ class FeastPermissionError(FeastError, PermissionError): def __init__(self, details: str): super().__init__(f"Permission error:\n{details}") - def grpc_status_code(self) -> GrpcStatusCode: + def grpc_status_code(self) -> "GrpcStatusCode": + from grpc import StatusCode as GrpcStatusCode + return GrpcStatusCode.PERMISSION_DENIED def http_status_code(self) -> int: From 42936084a7d214d65faea5359ae70eefda8d23ad Mon Sep 17 00:00:00 2001 From: Harri Lehtola <1781172+peruukki@users.noreply.github.com> Date: Sun, 22 Sep 2024 21:21:25 +0300 Subject: [PATCH 82/96] fix: Update react-router-dom to 6.3.0 and restrict its version in Feast UI (#4556) As noted in #3794, Feast UI is not compatible with latest react-router-dom versions, more precisely from 6.4.0 onwards. Limit react-router-dom version to a compatible range to avoid the runtime errors mentioned in the issue when installing peer dependencies without specifying exact versions. After setting the restricted versions, `yarn install` updated `react-router-dom` to the latest compatible version 6.3.0. It should be a minor upgrade (https://github.com/remix-run/react-router/blob/main/CHANGELOG.md#v630), and I didn't notice anything not working in the UI after it. Signed-off-by: Harri Lehtola --- ui/package.json | 4 ++-- ui/yarn.lock | 18 +++++++++--------- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/ui/package.json b/ui/package.json index 978be97b88..bc2a71378a 100644 --- a/ui/package.json +++ b/ui/package.json @@ -20,7 +20,7 @@ "react": "^17.0.2", "react-dom": "^17.0.2", "react-query": "^3.34.12", - "react-router-dom": "6", + "react-router-dom": "<6.4.0", "react-scripts": "^5.0.0", "use-query-params": "^1.2.3", "zod": "^3.11.6" @@ -37,7 +37,7 @@ "query-string": "^7.1.1", "react-code-blocks": "^0.0.9-0", "react-query": "^3.34.12", - "react-router-dom": "6", + "react-router-dom": "<6.4.0", "react-scripts": "^5.0.0", "tslib": "^2.3.1", "use-query-params": "^1.2.3", diff --git a/ui/yarn.lock b/ui/yarn.lock index 26c833fa11..02f6634914 100644 --- a/ui/yarn.lock +++ b/ui/yarn.lock @@ -9345,18 +9345,18 @@ react-remove-scroll@^2.5.2: use-callback-ref "^1.3.0" use-sidecar "^1.1.2" -react-router-dom@6: - version "6.2.1" - resolved "https://registry.yarnpkg.com/react-router-dom/-/react-router-dom-6.2.1.tgz#32ec81829152fbb8a7b045bf593a22eadf019bec" - integrity sha512-I6Zax+/TH/cZMDpj3/4Fl2eaNdcvoxxHoH1tYOREsQ22OKDYofGebrNm6CTPUcvLvZm63NL/vzCYdjf9CUhqmA== +react-router-dom@<6.4.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/react-router-dom/-/react-router-dom-6.3.0.tgz#a0216da813454e521905b5fa55e0e5176123f43d" + integrity sha512-uaJj7LKytRxZNQV8+RbzJWnJ8K2nPsOOEuX7aQstlMZKQT0164C+X2w6bnkqU3sjtLvpd5ojrezAyfZ1+0sStw== dependencies: history "^5.2.0" - react-router "6.2.1" + react-router "6.3.0" -react-router@6.2.1: - version "6.2.1" - resolved "https://registry.yarnpkg.com/react-router/-/react-router-6.2.1.tgz#be2a97a6006ce1d9123c28934e604faef51448a3" - integrity sha512-2fG0udBtxou9lXtK97eJeET2ki5//UWfQSl1rlJ7quwe6jrktK9FCCc8dQb5QY6jAv3jua8bBQRhhDOM/kVRsg== +react-router@6.3.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/react-router/-/react-router-6.3.0.tgz#3970cc64b4cb4eae0c1ea5203a80334fdd175557" + integrity sha512-7Wh1DzVQ+tlFjkeo+ujvjSqSJmkt1+8JO+T5xklPlgrh70y7ogx75ODRW0ThWhY7S+6yEDks8TYrtQe/aoboBQ== dependencies: history "^5.2.0" From 6dee6887418d7f9a9bc22f045a087c048e63eb37 Mon Sep 17 00:00:00 2001 From: Harri Lehtola <1781172+peruukki@users.noreply.github.com> Date: Sun, 22 Sep 2024 21:21:44 +0300 Subject: [PATCH 83/96] docs: Fix small typos in Feast UI projects list documentation (#4555) I initially created a `project-list.json` file, then found out it should be `projects-list.json`. Then spotted another tiny typo, so fixed it too. Signed-off-by: Harri Lehtola --- docs/reference/alpha-web-ui.md | 2 +- ui/README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/alpha-web-ui.md b/docs/reference/alpha-web-ui.md index 398c8de0ae..2caeed9e2a 100644 --- a/docs/reference/alpha-web-ui.md +++ b/docs/reference/alpha-web-ui.md @@ -70,7 +70,7 @@ ReactDOM.render( ); ``` -When you start the React app, it will look for `project-list.json` to find a list of your projects. The JSON should looks something like this. +When you start the React app, it will look for `projects-list.json` to find a list of your projects. The JSON should look something like this. ```json { diff --git a/ui/README.md b/ui/README.md index 12aacd329e..852bddc296 100644 --- a/ui/README.md +++ b/ui/README.md @@ -46,7 +46,7 @@ ReactDOM.render( ); ``` -When you start the React app, it will look for `projects-list.json` to find a list of your projects. The JSON should looks something like this. +When you start the React app, it will look for `projects-list.json` to find a list of your projects. The JSON should look something like this. ```json { From 8ffb97ad2d368fb1403a6dbba9c5b4012bb69752 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 22 Sep 2024 22:23:16 +0400 Subject: [PATCH 84/96] chore: Bump webpack from 5.76.1 to 5.94.0 in /ui (#4492) Bumps [webpack](https://github.com/webpack/webpack) from 5.76.1 to 5.94.0. - [Release notes](https://github.com/webpack/webpack/releases) - [Commits](https://github.com/webpack/webpack/compare/v5.76.1...v5.94.0) --- updated-dependencies: - dependency-name: webpack dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- ui/yarn.lock | 440 ++++++++++++++++++++++++++++++++------------------- 1 file changed, 281 insertions(+), 159 deletions(-) diff --git a/ui/yarn.lock b/ui/yarn.lock index 02f6634914..1f36143b67 100644 --- a/ui/yarn.lock +++ b/ui/yarn.lock @@ -1627,16 +1627,35 @@ "@jridgewell/sourcemap-codec" "^1.4.10" "@jridgewell/trace-mapping" "^0.3.9" +"@jridgewell/gen-mapping@^0.3.5": + version "0.3.5" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz#dcce6aff74bdf6dad1a95802b69b04a2fcb1fb36" + integrity sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg== + dependencies: + "@jridgewell/set-array" "^1.2.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.24" + "@jridgewell/resolve-uri@3.1.0", "@jridgewell/resolve-uri@^3.0.3": version "3.1.0" resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== +"@jridgewell/resolve-uri@^3.1.0": + version "3.1.2" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz#7a0ee601f60f99a20c7c7c5ff0c80388c1189bd6" + integrity sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw== + "@jridgewell/set-array@^1.0.1": version "1.1.2" resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== +"@jridgewell/set-array@^1.2.1": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.2.1.tgz#558fb6472ed16a4c850b889530e6b36438c49280" + integrity sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A== + "@jridgewell/source-map@^0.3.2": version "0.3.2" resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.2.tgz#f45351aaed4527a298512ec72f81040c998580fb" @@ -1645,11 +1664,24 @@ "@jridgewell/gen-mapping" "^0.3.0" "@jridgewell/trace-mapping" "^0.3.9" +"@jridgewell/source-map@^0.3.3": + version "0.3.6" + resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.6.tgz#9d71ca886e32502eb9362c9a74a46787c36df81a" + integrity sha512-1ZJTZebgqllO79ue2bm3rIGud/bOe0pP5BjSRCRxxYkEZS8STV7zN84UBbiYu7jy+eCKSnVIUgoWWE/tt+shMQ== + dependencies: + "@jridgewell/gen-mapping" "^0.3.5" + "@jridgewell/trace-mapping" "^0.3.25" + "@jridgewell/sourcemap-codec@1.4.14", "@jridgewell/sourcemap-codec@^1.4.10": version "1.4.14" resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== +"@jridgewell/sourcemap-codec@^1.4.14": + version "1.5.0" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz#3188bcb273a414b0d215fd22a58540b989b9409a" + integrity sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ== + "@jridgewell/trace-mapping@^0.3.0": version "0.3.4" resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.4.tgz#f6a0832dffd5b8a6aaa633b7d9f8e8e94c83a0c3" @@ -1666,6 +1698,14 @@ "@jridgewell/resolve-uri" "3.1.0" "@jridgewell/sourcemap-codec" "1.4.14" +"@jridgewell/trace-mapping@^0.3.20", "@jridgewell/trace-mapping@^0.3.24", "@jridgewell/trace-mapping@^0.3.25": + version "0.3.25" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz#15f190e98895f3fc23276ee14bc76b675c2e50f0" + integrity sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ== + dependencies: + "@jridgewell/resolve-uri" "^3.1.0" + "@jridgewell/sourcemap-codec" "^1.4.14" + "@jridgewell/trace-mapping@^0.3.9": version "0.3.14" resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.14.tgz#b231a081d8f66796e475ad588a1ef473112701ed" @@ -2362,22 +2402,6 @@ "@types/d3-transition" "*" "@types/d3-zoom" "*" -"@types/eslint-scope@^3.7.3": - version "3.7.4" - resolved "https://registry.yarnpkg.com/@types/eslint-scope/-/eslint-scope-3.7.4.tgz#37fc1223f0786c39627068a12e94d6e6fc61de16" - integrity sha512-9K4zoImiZc3HlIp6AVUDE4CWYx22a+lhSZMYNpbjW04+YF0KWj4pJXnEMjdnFTiQibFFmElcsasJXDbdI/EPhA== - dependencies: - "@types/eslint" "*" - "@types/estree" "*" - -"@types/eslint@*": - version "8.4.1" - resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-8.4.1.tgz#c48251553e8759db9e656de3efc846954ac32304" - integrity sha512-GE44+DNEyxxh2Kc6ro/VkIj+9ma0pO0bwv9+uHSyBrikYOHr8zYcdPvnBOp1aw8s+CjRvuSx7CyWqRrNFQ59mA== - dependencies: - "@types/estree" "*" - "@types/json-schema" "*" - "@types/eslint@^7.28.2": version "7.29.0" resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-7.29.0.tgz#e56ddc8e542815272720bb0b4ccc2aff9c3e1c78" @@ -2396,10 +2420,10 @@ resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.39.tgz#e177e699ee1b8c22d23174caaa7422644389509f" integrity sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw== -"@types/estree@^0.0.51": - version "0.0.51" - resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.51.tgz#cfd70924a25a3fd32b218e5e420e6897e1ac4f40" - integrity sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ== +"@types/estree@^1.0.5": + version "1.0.5" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.5.tgz#a6ce3e556e00fd9895dd872dd172ad0d4bd687f4" + integrity sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw== "@types/express-serve-static-core@*", "@types/express-serve-static-core@^4.17.18": version "4.17.28" @@ -2873,125 +2897,125 @@ "@typescript-eslint/types" "5.10.1" eslint-visitor-keys "^3.0.0" -"@webassemblyjs/ast@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.11.1.tgz#2bfd767eae1a6996f432ff7e8d7fc75679c0b6a7" - integrity sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw== +"@webassemblyjs/ast@1.12.1", "@webassemblyjs/ast@^1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.12.1.tgz#bb16a0e8b1914f979f45864c23819cc3e3f0d4bb" + integrity sha512-EKfMUOPRRUTy5UII4qJDGPpqfwjOmZ5jeGFwid9mnoqIFK+e0vqoi1qH56JpmZSzEL53jKnNzScdmftJyG5xWg== dependencies: - "@webassemblyjs/helper-numbers" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/helper-numbers" "1.11.6" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" -"@webassemblyjs/floating-point-hex-parser@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz#f6c61a705f0fd7a6aecaa4e8198f23d9dc179e4f" - integrity sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ== +"@webassemblyjs/floating-point-hex-parser@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.6.tgz#dacbcb95aff135c8260f77fa3b4c5fea600a6431" + integrity sha512-ejAj9hfRJ2XMsNHk/v6Fu2dGS+i4UaXBXGemOfQ/JfQ6mdQg/WXtwleQRLLS4OvfDhv8rYnVwH27YJLMyYsxhw== -"@webassemblyjs/helper-api-error@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.1.tgz#1a63192d8788e5c012800ba6a7a46c705288fd16" - integrity sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg== +"@webassemblyjs/helper-api-error@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.6.tgz#6132f68c4acd59dcd141c44b18cbebbd9f2fa768" + integrity sha512-o0YkoP4pVu4rN8aTJgAyj9hC2Sv5UlkzCHhxqWj8butaLvnpdc2jOwh4ewE6CX0txSfLn/UYaV/pheS2Txg//Q== -"@webassemblyjs/helper-buffer@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.1.tgz#832a900eb444884cde9a7cad467f81500f5e5ab5" - integrity sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA== +"@webassemblyjs/helper-buffer@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.12.1.tgz#6df20d272ea5439bf20ab3492b7fb70e9bfcb3f6" + integrity sha512-nzJwQw99DNDKr9BVCOZcLuJJUlqkJh+kVzVl6Fmq/tI5ZtEyWT1KZMyOXltXLZJmDtvLCDgwsyrkohEtopTXCw== -"@webassemblyjs/helper-numbers@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.1.tgz#64d81da219fbbba1e3bd1bfc74f6e8c4e10a62ae" - integrity sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ== +"@webassemblyjs/helper-numbers@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.6.tgz#cbce5e7e0c1bd32cf4905ae444ef64cea919f1b5" + integrity sha512-vUIhZ8LZoIWHBohiEObxVm6hwP034jwmc9kuq5GdHZH0wiLVLIPcMCdpJzG4C11cHoQ25TFIQj9kaVADVX7N3g== dependencies: - "@webassemblyjs/floating-point-hex-parser" "1.11.1" - "@webassemblyjs/helper-api-error" "1.11.1" + "@webassemblyjs/floating-point-hex-parser" "1.11.6" + "@webassemblyjs/helper-api-error" "1.11.6" "@xtuc/long" "4.2.2" -"@webassemblyjs/helper-wasm-bytecode@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz#f328241e41e7b199d0b20c18e88429c4433295e1" - integrity sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q== +"@webassemblyjs/helper-wasm-bytecode@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.6.tgz#bb2ebdb3b83aa26d9baad4c46d4315283acd51e9" + integrity sha512-sFFHKwcmBprO9e7Icf0+gddyWYDViL8bpPjJJl0WHxCdETktXdmtWLGVzoHbqUcY4Be1LkNfwTmXOJUFZYSJdA== -"@webassemblyjs/helper-wasm-section@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.1.tgz#21ee065a7b635f319e738f0dd73bfbda281c097a" - integrity sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg== +"@webassemblyjs/helper-wasm-section@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.12.1.tgz#3da623233ae1a60409b509a52ade9bc22a37f7bf" + integrity sha512-Jif4vfB6FJlUlSbgEMHUyk1j234GTNG9dBJ4XJdOySoj518Xj0oGsNi59cUQF4RRMS9ouBUxDDdyBVfPTypa5g== dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-buffer" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/wasm-gen" "1.11.1" + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-buffer" "1.12.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + "@webassemblyjs/wasm-gen" "1.12.1" -"@webassemblyjs/ieee754@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.11.1.tgz#963929e9bbd05709e7e12243a099180812992614" - integrity sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ== +"@webassemblyjs/ieee754@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.11.6.tgz#bb665c91d0b14fffceb0e38298c329af043c6e3a" + integrity sha512-LM4p2csPNvbij6U1f19v6WR56QZ8JcHg3QIJTlSwzFcmx6WSORicYj6I63f9yU1kEUtrpG+kjkiIAkevHpDXrg== dependencies: "@xtuc/ieee754" "^1.2.0" -"@webassemblyjs/leb128@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.11.1.tgz#ce814b45574e93d76bae1fb2644ab9cdd9527aa5" - integrity sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw== +"@webassemblyjs/leb128@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.11.6.tgz#70e60e5e82f9ac81118bc25381a0b283893240d7" + integrity sha512-m7a0FhE67DQXgouf1tbN5XQcdWoNgaAuoULHIfGFIEVKA6tu/edls6XnIlkmS6FrXAquJRPni3ZZKjw6FSPjPQ== dependencies: "@xtuc/long" "4.2.2" -"@webassemblyjs/utf8@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.11.1.tgz#d1f8b764369e7c6e6bae350e854dec9a59f0a3ff" - integrity sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ== - -"@webassemblyjs/wasm-edit@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.1.tgz#ad206ebf4bf95a058ce9880a8c092c5dec8193d6" - integrity sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-buffer" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/helper-wasm-section" "1.11.1" - "@webassemblyjs/wasm-gen" "1.11.1" - "@webassemblyjs/wasm-opt" "1.11.1" - "@webassemblyjs/wasm-parser" "1.11.1" - "@webassemblyjs/wast-printer" "1.11.1" - -"@webassemblyjs/wasm-gen@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.1.tgz#86c5ea304849759b7d88c47a32f4f039ae3c8f76" - integrity sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/ieee754" "1.11.1" - "@webassemblyjs/leb128" "1.11.1" - "@webassemblyjs/utf8" "1.11.1" - -"@webassemblyjs/wasm-opt@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.1.tgz#657b4c2202f4cf3b345f8a4c6461c8c2418985f2" - integrity sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-buffer" "1.11.1" - "@webassemblyjs/wasm-gen" "1.11.1" - "@webassemblyjs/wasm-parser" "1.11.1" - -"@webassemblyjs/wasm-parser@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.1.tgz#86ca734534f417e9bd3c67c7a1c75d8be41fb199" - integrity sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-api-error" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/ieee754" "1.11.1" - "@webassemblyjs/leb128" "1.11.1" - "@webassemblyjs/utf8" "1.11.1" - -"@webassemblyjs/wast-printer@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.11.1.tgz#d0c73beda8eec5426f10ae8ef55cee5e7084c2f0" - integrity sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg== - dependencies: - "@webassemblyjs/ast" "1.11.1" +"@webassemblyjs/utf8@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.11.6.tgz#90f8bc34c561595fe156603be7253cdbcd0fab5a" + integrity sha512-vtXf2wTQ3+up9Zsg8sa2yWiQpzSsMyXj0qViVP6xKGCUT8p8YJ6HqI7l5eCnWx1T/FYdsv07HQs2wTFbbof/RA== + +"@webassemblyjs/wasm-edit@^1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.12.1.tgz#9f9f3ff52a14c980939be0ef9d5df9ebc678ae3b" + integrity sha512-1DuwbVvADvS5mGnXbE+c9NfA8QRcZ6iKquqjjmR10k6o+zzsRVesil54DKexiowcFCPdr/Q0qaMgB01+SQ1u6g== + dependencies: + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-buffer" "1.12.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + "@webassemblyjs/helper-wasm-section" "1.12.1" + "@webassemblyjs/wasm-gen" "1.12.1" + "@webassemblyjs/wasm-opt" "1.12.1" + "@webassemblyjs/wasm-parser" "1.12.1" + "@webassemblyjs/wast-printer" "1.12.1" + +"@webassemblyjs/wasm-gen@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.12.1.tgz#a6520601da1b5700448273666a71ad0a45d78547" + integrity sha512-TDq4Ojh9fcohAw6OIMXqiIcTq5KUXTGRkVxbSo1hQnSy6lAM5GSdfwWeSxpAo0YzgsgF182E/U0mDNhuA0tW7w== + dependencies: + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + "@webassemblyjs/ieee754" "1.11.6" + "@webassemblyjs/leb128" "1.11.6" + "@webassemblyjs/utf8" "1.11.6" + +"@webassemblyjs/wasm-opt@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.12.1.tgz#9e6e81475dfcfb62dab574ac2dda38226c232bc5" + integrity sha512-Jg99j/2gG2iaz3hijw857AVYekZe2SAskcqlWIZXjji5WStnOpVoat3gQfT/Q5tb2djnCjBtMocY/Su1GfxPBg== + dependencies: + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-buffer" "1.12.1" + "@webassemblyjs/wasm-gen" "1.12.1" + "@webassemblyjs/wasm-parser" "1.12.1" + +"@webassemblyjs/wasm-parser@1.12.1", "@webassemblyjs/wasm-parser@^1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.12.1.tgz#c47acb90e6f083391e3fa61d113650eea1e95937" + integrity sha512-xikIi7c2FHXysxXe3COrVUPSheuBtpcfhbpFj4gmu7KRLYOzANztwUU0IbsqvMqzuNK2+glRGWCEqZo1WCLyAQ== + dependencies: + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-api-error" "1.11.6" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + "@webassemblyjs/ieee754" "1.11.6" + "@webassemblyjs/leb128" "1.11.6" + "@webassemblyjs/utf8" "1.11.6" + +"@webassemblyjs/wast-printer@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.12.1.tgz#bcecf661d7d1abdaf989d8341a4833e33e2b31ac" + integrity sha512-+X4WAlOisVWQMikjbcvY2e0rwPsKQ9F688lksZhBcPycBBuii3O7m8FACbDMWDojpAqvjIncrG8J0XHKyQfVeA== + dependencies: + "@webassemblyjs/ast" "1.12.1" "@xtuc/long" "4.2.2" "@xmldom/xmldom@^0.7.2": @@ -3030,10 +3054,10 @@ acorn-globals@^6.0.0: acorn "^7.1.1" acorn-walk "^7.1.1" -acorn-import-assertions@^1.7.6: - version "1.8.0" - resolved "https://registry.yarnpkg.com/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz#ba2b5939ce62c238db6d93d81c9b111b29b855e9" - integrity sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw== +acorn-import-attributes@^1.9.5: + version "1.9.5" + resolved "https://registry.yarnpkg.com/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz#7eb1557b1ba05ef18b5ed0ec67591bfab04688ef" + integrity sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ== acorn-jsx@^5.3.1, acorn-jsx@^5.3.2: version "5.3.2" @@ -3074,6 +3098,11 @@ acorn@^8.8.0: resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.8.0.tgz#88c0187620435c7f6015803f5539dae05a9dbea8" integrity sha512-QOxyigPVrpZ2GXT+PFyZTl6TtOFc5egxHIP9IlQ+RbupQuX4RkT/Bee4/kQuC02Xkzg84JcT7oLYtDIQxp+v7w== +acorn@^8.8.2: + version "8.12.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.12.1.tgz#71616bdccbe25e27a54439e0046e89ca76df2248" + integrity sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg== + address@^1.0.1, address@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/address/-/address-1.1.2.tgz#bf1116c9c758c51b7a933d296b72c221ed9428b6" @@ -3629,7 +3658,7 @@ browser-process-hrtime@^1.0.0: resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== -browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.16.6, browserslist@^4.17.5, browserslist@^4.18.1, browserslist@^4.19.1: +browserslist@^4.0.0, browserslist@^4.16.6, browserslist@^4.17.5, browserslist@^4.18.1, browserslist@^4.19.1: version "4.19.1" resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.19.1.tgz#4ac0435b35ab655896c31d53018b6dd5e9e4c9a3" integrity sha512-u2tbbG5PdKRTUoctO3NBD8FQ5HdPh1ZXPHzp1rwaa5jTc+RV9/+RlWiAIKmjRPQF+xbGM9Kklj5bZQFa2s/38A== @@ -3640,6 +3669,16 @@ browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.16.6, browserslist@^4 node-releases "^2.0.1" picocolors "^1.0.0" +browserslist@^4.21.10: + version "4.23.3" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.23.3.tgz#debb029d3c93ebc97ffbc8d9cbb03403e227c800" + integrity sha512-btwCFJVjI4YWDNfau8RhZ+B1Q/VLoUITrm3RlP6y1tYGWIOa+InuYiRGXUBXo8nA1qKmHMyLB/iVQg5TT4eFoA== + dependencies: + caniuse-lite "^1.0.30001646" + electron-to-chromium "^1.5.4" + node-releases "^2.0.18" + update-browserslist-db "^1.1.0" + bser@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" @@ -3736,6 +3775,11 @@ caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001286, caniuse-lite@^1.0.30001297, can resolved "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001416.tgz" integrity sha512-06wzzdAkCPZO+Qm4e/eNghZBDfVNDsCgw33T27OwBH9unE9S478OYw//Q2L7Npf/zBzs7rjZOszIFQkwQKAEqA== +caniuse-lite@^1.0.30001646: + version "1.0.30001657" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001657.tgz#29fd504bffca719d1c6b63a1f6f840be1973a660" + integrity sha512-DPbJAlP8/BAXy3IgiWmZKItubb3TYGP0WscQQlVGIfT4s/YlFYVuJgyOsQNP7rJRChx/qdMeLJQJP0Sgg2yjNA== + case-sensitive-paths-webpack-plugin@^2.4.0: version "2.4.0" resolved "https://registry.yarnpkg.com/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.4.0.tgz#db64066c6422eed2e08cc14b986ca43796dbc6d4" @@ -4997,6 +5041,11 @@ electron-to-chromium@^1.4.17: resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.57.tgz#2b2766df76ac8dbc0a1d41249bc5684a31849892" integrity sha512-FNC+P5K1n6pF+M0zIK+gFCoXcJhhzDViL3DRIGy2Fv5PohuSES1JHR7T+GlwxSxlzx4yYbsuzCZvHxcBSRCIOw== +electron-to-chromium@^1.5.4: + version "1.5.15" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.5.15.tgz#3c969a29b03682db7a3032283ec8be6e75effe50" + integrity sha512-Z4rIDoImwEJW+YYKnPul4DzqsWVqYetYVN3XqDmRpgV0mjz0hYTaeeh+8/9CL1bk3AHYmF4freW/NTiVoXA2gA== + emittery@^0.8.1: version "0.8.1" resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.8.1.tgz#bb23cc86d03b30aa75a7f734819dee2e1ba70860" @@ -5027,10 +5076,10 @@ encodeurl@~1.0.2: resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== -enhanced-resolve@^5.10.0: - version "5.12.0" - resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.12.0.tgz#300e1c90228f5b570c4d35babf263f6da7155634" - integrity sha512-QHTXI/sZQmko1cbDoNAa3mJ5qhWUUNAq3vR0/YiD379fWQrcfuoX1+HW2S0MTt7XmoPLapdaDKUtelUSPic7hQ== +enhanced-resolve@^5.17.1: + version "5.17.1" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.17.1.tgz#67bfbbcc2f81d511be77d686a90267ef7f898a15" + integrity sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg== dependencies: graceful-fs "^4.2.4" tapable "^2.2.0" @@ -5085,10 +5134,10 @@ es-abstract@^1.17.2, es-abstract@^1.19.0, es-abstract@^1.19.1: string.prototype.trimstart "^1.0.4" unbox-primitive "^1.0.1" -es-module-lexer@^0.9.0: - version "0.9.3" - resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-0.9.3.tgz#6f13db00cc38417137daf74366f535c8eb438f19" - integrity sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ== +es-module-lexer@^1.2.1: + version "1.5.4" + resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-1.5.4.tgz#a8efec3a3da991e60efa6b633a7cad6ab8d26b78" + integrity sha512-MVNK56NiMrOwitFB7cqDwq0CQutbw+0BvLshJSse0MUNU+y1FC3bUS/AQg7oUng+/wKrrki7JfmwtVHkVfPLlw== es-to-primitive@^1.2.1: version "1.2.1" @@ -5104,6 +5153,11 @@ escalade@^3.1.1: resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== +escalade@^3.1.2: + version "3.2.0" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.2.0.tgz#011a3f69856ba189dffa7dc8fcce99d2a87903e5" + integrity sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA== + escape-html@~1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" @@ -5970,7 +6024,7 @@ good-listener@^1.2.2: dependencies: delegate "^3.1.2" -graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: +graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6: version "4.2.9" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.9.tgz#041b05df45755e587a24942279b9d113146e1c96" integrity sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ== @@ -5980,6 +6034,11 @@ graceful-fs@^4.1.9: resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== +graceful-fs@^4.2.11: + version "4.2.11" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" + integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== + graphql@^15.5.1: version "15.8.0" resolved "https://registry.yarnpkg.com/graphql/-/graphql-15.8.0.tgz#33410e96b012fa3bdb1091cc99a94769db212b38" @@ -7228,6 +7287,15 @@ jest-worker@^27.0.2, jest-worker@^27.3.1, jest-worker@^27.4.1, jest-worker@^27.4 merge-stream "^2.0.0" supports-color "^8.0.0" +jest-worker@^27.4.5: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-27.5.1.tgz#8d146f0900e8973b106b6f73cc1e9a8cb86f8db0" + integrity sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg== + dependencies: + "@types/node" "*" + merge-stream "^2.0.0" + supports-color "^8.0.0" + jest@^27.4.3: version "27.4.7" resolved "https://registry.yarnpkg.com/jest/-/jest-27.4.7.tgz#87f74b9026a1592f2da05b4d258e57505f28eca4" @@ -7983,6 +8051,11 @@ node-releases@^2.0.1: resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.1.tgz#3d1d395f204f1f2f29a54358b9fb678765ad2fc5" integrity sha512-CqyzN6z7Q6aMeF/ktcMVTzhAHCEpf8SOarwpzpf8pNBY2k5/oM34UHldUwp8VKI7uxct2HxSRdJjBaZeESzcxA== +node-releases@^2.0.18: + version "2.0.18" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.18.tgz#f010e8d35e2fe8d6b2944f03f70213ecedc4ca3f" + integrity sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g== + normalize-path@^3.0.0, normalize-path@~3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" @@ -8406,6 +8479,11 @@ picocolors@^1.0.0: resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== +picocolors@^1.0.1: + version "1.1.0" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.1.0.tgz#5358b76a78cde483ba5cef6a9dc9671440b27d59" + integrity sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw== + picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.2, picomatch@^2.2.3, picomatch@^2.3.0: version "2.3.1" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" @@ -9945,7 +10023,7 @@ schema-utils@^2.6.5: ajv "^6.12.4" ajv-keywords "^3.5.2" -schema-utils@^3.0.0, schema-utils@^3.1.0, schema-utils@^3.1.1: +schema-utils@^3.0.0, schema-utils@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-3.1.1.tgz#bc74c4b6b6995c1d88f76a8b77bea7219e0c8281" integrity sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw== @@ -9954,6 +10032,15 @@ schema-utils@^3.0.0, schema-utils@^3.1.0, schema-utils@^3.1.1: ajv "^6.12.5" ajv-keywords "^3.5.2" +schema-utils@^3.2.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-3.3.0.tgz#f50a88877c3c01652a15b622ae9e9795df7a60fe" + integrity sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg== + dependencies: + "@types/json-schema" "^7.0.8" + ajv "^6.12.5" + ajv-keywords "^3.5.2" + schema-utils@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-4.0.0.tgz#60331e9e3ae78ec5d16353c467c34b3a0a1d3df7" @@ -10031,6 +10118,13 @@ serialize-javascript@^6.0.0: dependencies: randombytes "^2.1.0" +serialize-javascript@^6.0.1: + version "6.0.2" + resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-6.0.2.tgz#defa1e055c83bf6d59ea805d8da862254eb6a6c2" + integrity sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g== + dependencies: + randombytes "^2.1.0" + serialize-query-params@^1.3.5: version "1.3.6" resolved "https://registry.yarnpkg.com/serialize-query-params/-/serialize-query-params-1.3.6.tgz#5dd5225db85ce747fe6fbc4897628504faafec6d" @@ -10626,7 +10720,7 @@ terminal-link@^2.0.0: ansi-escapes "^4.2.1" supports-hyperlinks "^2.0.0" -terser-webpack-plugin@^5.1.3, terser-webpack-plugin@^5.2.5: +terser-webpack-plugin@^5.2.5: version "5.3.0" resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.0.tgz#21641326486ecf91d8054161c816e464435bae9f" integrity sha512-LPIisi3Ol4chwAaPP8toUJ3L4qCM1G0wao7L3qNv57Drezxj6+VEyySpPw4B1HSO2Eg/hDY/MNF5XihCAoqnsQ== @@ -10637,6 +10731,17 @@ terser-webpack-plugin@^5.1.3, terser-webpack-plugin@^5.2.5: source-map "^0.6.1" terser "^5.7.2" +terser-webpack-plugin@^5.3.10: + version "5.3.10" + resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.10.tgz#904f4c9193c6fd2a03f693a2150c62a92f40d199" + integrity sha512-BKFPWlPDndPs+NGGCr1U59t0XScL5317Y0UReNrHaw9/FwhPENlq6bfgs+4yPfyP51vqC1bQ4rp1EfXW5ZSH9w== + dependencies: + "@jridgewell/trace-mapping" "^0.3.20" + jest-worker "^27.4.5" + schema-utils "^3.1.1" + serialize-javascript "^6.0.1" + terser "^5.26.0" + terser@^5.0.0, terser@^5.10.0, terser@^5.7.2: version "5.14.2" resolved "https://registry.yarnpkg.com/terser/-/terser-5.14.2.tgz#9ac9f22b06994d736174f4091aa368db896f1c10" @@ -10647,6 +10752,16 @@ terser@^5.0.0, terser@^5.10.0, terser@^5.7.2: commander "^2.20.0" source-map-support "~0.5.20" +terser@^5.26.0: + version "5.31.6" + resolved "https://registry.yarnpkg.com/terser/-/terser-5.31.6.tgz#c63858a0f0703988d0266a82fcbf2d7ba76422b1" + integrity sha512-PQ4DAriWzKj+qgehQ7LK5bQqCFNMmlhjR2PFFLuqGCpuCAauxemVBWwWOxo3UIwWQx8+Pr61Df++r76wDmkQBg== + dependencies: + "@jridgewell/source-map" "^0.3.3" + acorn "^8.8.2" + commander "^2.20.0" + source-map-support "~0.5.20" + test-exclude@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" @@ -11062,6 +11177,14 @@ upath@^1.2.0: resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== +update-browserslist-db@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.1.0.tgz#7ca61c0d8650766090728046e416a8cde682859e" + integrity sha512-EdRAaAyk2cUE1wOf2DkEhzxqOQvFOoRJFNS6NeyJ01Gp2beMRpBAINjM2iDXE3KCuKhwnvHIQCJm6ThL2Z+HzQ== + dependencies: + escalade "^3.1.2" + picocolors "^1.0.1" + uri-js@^4.2.2: version "4.4.1" resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" @@ -11205,10 +11328,10 @@ walker@^1.0.7: dependencies: makeerror "1.0.12" -watchpack@^2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.4.0.tgz#fa33032374962c78113f93c7f2fb4c54c9862a5d" - integrity sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg== +watchpack@^2.4.1: + version "2.4.2" + resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.4.2.tgz#2feeaed67412e7c33184e5a79ca738fbd38564da" + integrity sha512-TnbFSbcOCcDgjZ4piURLCbJ3nJhznVh9kw6F6iokjiFPl8ONxe9A6nMDVXDiNbrSfLILs6vB07F7wLBrwPYzJw== dependencies: glob-to-regexp "^0.4.1" graceful-fs "^4.1.2" @@ -11328,33 +11451,32 @@ webpack-sources@^3.2.3: integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== webpack@^5.64.4: - version "5.76.1" - resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.76.1.tgz#7773de017e988bccb0f13c7d75ec245f377d295c" - integrity sha512-4+YIK4Abzv8172/SGqObnUjaIHjLEuUasz9EwQj/9xmPPkYJy2Mh03Q/lJfSD3YLzbxy5FeTq5Uw0323Oh6SJQ== - dependencies: - "@types/eslint-scope" "^3.7.3" - "@types/estree" "^0.0.51" - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/wasm-edit" "1.11.1" - "@webassemblyjs/wasm-parser" "1.11.1" + version "5.94.0" + resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.94.0.tgz#77a6089c716e7ab90c1c67574a28da518a20970f" + integrity sha512-KcsGn50VT+06JH/iunZJedYGUJS5FGjow8wb9c0v5n1Om8O1g4L6LjtfxwlXIATopoQu+vOXXa7gYisWxCoPyg== + dependencies: + "@types/estree" "^1.0.5" + "@webassemblyjs/ast" "^1.12.1" + "@webassemblyjs/wasm-edit" "^1.12.1" + "@webassemblyjs/wasm-parser" "^1.12.1" acorn "^8.7.1" - acorn-import-assertions "^1.7.6" - browserslist "^4.14.5" + acorn-import-attributes "^1.9.5" + browserslist "^4.21.10" chrome-trace-event "^1.0.2" - enhanced-resolve "^5.10.0" - es-module-lexer "^0.9.0" + enhanced-resolve "^5.17.1" + es-module-lexer "^1.2.1" eslint-scope "5.1.1" events "^3.2.0" glob-to-regexp "^0.4.1" - graceful-fs "^4.2.9" + graceful-fs "^4.2.11" json-parse-even-better-errors "^2.3.1" loader-runner "^4.2.0" mime-types "^2.1.27" neo-async "^2.6.2" - schema-utils "^3.1.0" + schema-utils "^3.2.0" tapable "^2.1.1" - terser-webpack-plugin "^5.1.3" - watchpack "^2.4.0" + terser-webpack-plugin "^5.3.10" + watchpack "^2.4.1" webpack-sources "^3.2.3" websocket-driver@>=0.5.1, websocket-driver@^0.7.4: From cb2aa2c310203f395da027c7ddf8cda0e59f8359 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 22 Sep 2024 22:23:53 +0400 Subject: [PATCH 85/96] chore: Bump micromatch from 4.0.5 to 4.0.8 in /sdk/python/feast/ui (#4448) Bumps [micromatch](https://github.com/micromatch/micromatch) from 4.0.5 to 4.0.8. - [Release notes](https://github.com/micromatch/micromatch/releases) - [Changelog](https://github.com/micromatch/micromatch/blob/4.0.8/CHANGELOG.md) - [Commits](https://github.com/micromatch/micromatch/compare/4.0.5...4.0.8) --- updated-dependencies: - dependency-name: micromatch dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- sdk/python/feast/ui/yarn.lock | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/sdk/python/feast/ui/yarn.lock b/sdk/python/feast/ui/yarn.lock index 452b6f9f31..1132d6ed0f 100644 --- a/sdk/python/feast/ui/yarn.lock +++ b/sdk/python/feast/ui/yarn.lock @@ -3593,7 +3593,7 @@ brace-expansion@^2.0.1: dependencies: balanced-match "^1.0.0" -braces@^3.0.2, braces@~3.0.2: +braces@^3.0.3, braces@~3.0.2: version "3.0.3" resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.3.tgz#490332f40919452272d55a8480adc0c441358789" integrity sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA== @@ -7436,11 +7436,11 @@ methods@~1.1.2: integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4= micromatch@^4.0.2, micromatch@^4.0.4: - version "4.0.5" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" - integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== + version "4.0.8" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.8.tgz#d66fa18f3a47076789320b9b1af32bd86d9fa202" + integrity sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA== dependencies: - braces "^3.0.2" + braces "^3.0.3" picomatch "^2.3.1" microseconds@0.2.0: From 792fb7325661a1dcbd1100b681ccdd58f396a1c4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Sep 2024 09:51:38 +0400 Subject: [PATCH 86/96] chore: Bump express from 4.19.2 to 4.21.0 in /sdk/python/feast/ui (#4522) Bumps [express](https://github.com/expressjs/express) from 4.19.2 to 4.21.0. - [Release notes](https://github.com/expressjs/express/releases) - [Changelog](https://github.com/expressjs/express/blob/4.21.0/History.md) - [Commits](https://github.com/expressjs/express/compare/4.19.2...4.21.0) --- updated-dependencies: - dependency-name: express dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- sdk/python/feast/ui/yarn.lock | 194 ++++++++++++++++++++++++++-------- 1 file changed, 150 insertions(+), 44 deletions(-) diff --git a/sdk/python/feast/ui/yarn.lock b/sdk/python/feast/ui/yarn.lock index 1132d6ed0f..d9e9ce03c8 100644 --- a/sdk/python/feast/ui/yarn.lock +++ b/sdk/python/feast/ui/yarn.lock @@ -3545,10 +3545,10 @@ bluebird@^3.5.5: resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== -body-parser@1.20.2: - version "1.20.2" - resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.2.tgz#6feb0e21c4724d06de7ff38da36dad4f57a747fd" - integrity sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA== +body-parser@1.20.3: + version "1.20.3" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.3.tgz#1953431221c6fb5cd63c4b36d53fab0928e548c6" + integrity sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g== dependencies: bytes "3.1.2" content-type "~1.0.5" @@ -3558,7 +3558,7 @@ body-parser@1.20.2: http-errors "2.0.0" iconv-lite "0.4.24" on-finished "2.4.1" - qs "6.11.0" + qs "6.13.0" raw-body "2.5.2" type-is "~1.6.18" unpipe "1.0.0" @@ -3675,6 +3675,17 @@ call-bind@^1.0.0, call-bind@^1.0.2: function-bind "^1.1.1" get-intrinsic "^1.0.2" +call-bind@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.7.tgz#06016599c40c56498c18769d2730be242b6fa3b9" + integrity sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w== + dependencies: + es-define-property "^1.0.0" + es-errors "^1.3.0" + function-bind "^1.1.2" + get-intrinsic "^1.2.4" + set-function-length "^1.2.1" + callsites@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" @@ -4632,6 +4643,15 @@ default-gateway@^6.0.3: dependencies: execa "^5.0.0" +define-data-property@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/define-data-property/-/define-data-property-1.1.4.tgz#894dc141bb7d3060ae4366f6a0107e68fbe48c5e" + integrity sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A== + dependencies: + es-define-property "^1.0.0" + es-errors "^1.3.0" + gopd "^1.0.1" + define-lazy-prop@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f" @@ -4912,6 +4932,11 @@ encodeurl@~1.0.2: resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k= +encodeurl@~2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-2.0.0.tgz#7b8ea898077d7e409d3ac45474ea38eaf0857a58" + integrity sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg== + enhanced-resolve@^5.17.1: version "5.17.1" resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.17.1.tgz#67bfbbcc2f81d511be77d686a90267ef7f898a15" @@ -4968,6 +4993,18 @@ es-abstract@^1.17.2, es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.19 string.prototype.trimstart "^1.0.5" unbox-primitive "^1.0.2" +es-define-property@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/es-define-property/-/es-define-property-1.0.0.tgz#c7faefbdff8b2696cf5f46921edfb77cc4ba3845" + integrity sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ== + dependencies: + get-intrinsic "^1.2.4" + +es-errors@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/es-errors/-/es-errors-1.3.0.tgz#05f75a25dab98e4fb1dcd5e1472c0546d5057c8f" + integrity sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw== + es-module-lexer@^1.2.1: version "1.5.4" resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-1.5.4.tgz#a8efec3a3da991e60efa6b633a7cad6ab8d26b78" @@ -5330,36 +5367,36 @@ expect@^27.5.1: jest-message-util "^27.5.1" express@^4.17.3: - version "4.19.2" - resolved "https://registry.yarnpkg.com/express/-/express-4.19.2.tgz#e25437827a3aa7f2a827bc8171bbbb664a356465" - integrity sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q== + version "4.21.0" + resolved "https://registry.yarnpkg.com/express/-/express-4.21.0.tgz#d57cb706d49623d4ac27833f1cbc466b668eb915" + integrity sha512-VqcNGcj/Id5ZT1LZ/cfihi3ttTn+NJmkli2eZADigjq29qTlWi/hAQ43t/VLPq8+UX06FCEx3ByOYet6ZFblng== dependencies: accepts "~1.3.8" array-flatten "1.1.1" - body-parser "1.20.2" + body-parser "1.20.3" content-disposition "0.5.4" content-type "~1.0.4" cookie "0.6.0" cookie-signature "1.0.6" debug "2.6.9" depd "2.0.0" - encodeurl "~1.0.2" + encodeurl "~2.0.0" escape-html "~1.0.3" etag "~1.8.1" - finalhandler "1.2.0" + finalhandler "1.3.1" fresh "0.5.2" http-errors "2.0.0" - merge-descriptors "1.0.1" + merge-descriptors "1.0.3" methods "~1.1.2" on-finished "2.4.1" parseurl "~1.3.3" - path-to-regexp "0.1.7" + path-to-regexp "0.1.10" proxy-addr "~2.0.7" - qs "6.11.0" + qs "6.13.0" range-parser "~1.2.1" safe-buffer "5.2.1" - send "0.18.0" - serve-static "1.15.0" + send "0.19.0" + serve-static "1.16.2" setprototypeof "1.2.0" statuses "2.0.1" type-is "~1.6.18" @@ -5471,13 +5508,13 @@ filter-obj@^1.1.0: resolved "https://registry.yarnpkg.com/filter-obj/-/filter-obj-1.1.0.tgz#9b311112bc6c6127a16e016c6c5d7f19e0805c5b" integrity sha1-mzERErxsYSehbgFsbF1/GeCAXFs= -finalhandler@1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.2.0.tgz#7d23fe5731b207b4640e4fcd00aec1f9207a7b32" - integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg== +finalhandler@1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.3.1.tgz#0c575f1d1d324ddd1da35ad7ece3df7d19088019" + integrity sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ== dependencies: debug "2.6.9" - encodeurl "~1.0.2" + encodeurl "~2.0.0" escape-html "~1.0.3" on-finished "2.4.1" parseurl "~1.3.3" @@ -5645,6 +5682,11 @@ function-bind@^1.1.1: resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== +function-bind@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" + integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== + function.prototype.name@^1.1.5: version "1.1.5" resolved "https://registry.yarnpkg.com/function.prototype.name/-/function.prototype.name-1.1.5.tgz#cce0505fe1ffb80503e6f9e46cc64e46a12a9621" @@ -5684,6 +5726,17 @@ get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1: has "^1.0.3" has-symbols "^1.0.1" +get-intrinsic@^1.1.3, get-intrinsic@^1.2.4: + version "1.2.4" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.4.tgz#e385f5a4b5227d449c3eabbad05494ef0abbeadd" + integrity sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ== + dependencies: + es-errors "^1.3.0" + function-bind "^1.1.2" + has-proto "^1.0.1" + has-symbols "^1.0.3" + hasown "^2.0.0" + get-nonce@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/get-nonce/-/get-nonce-1.0.1.tgz#fdf3f0278073820d2ce9426c18f07481b1e0cdf3" @@ -5790,6 +5843,13 @@ good-listener@^1.2.2: dependencies: delegate "^3.1.2" +gopd@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.0.1.tgz#29ff76de69dac7489b7c0918a5788e56477c332c" + integrity sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA== + dependencies: + get-intrinsic "^1.1.3" + graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: version "4.2.10" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" @@ -5839,6 +5899,18 @@ has-property-descriptors@^1.0.0: dependencies: get-intrinsic "^1.1.1" +has-property-descriptors@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz#963ed7d071dc7bf5f084c5bfbe0d1b6222586854" + integrity sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg== + dependencies: + es-define-property "^1.0.0" + +has-proto@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has-proto/-/has-proto-1.0.3.tgz#b31ddfe9b0e6e9914536a6ab286426d0214f77fd" + integrity sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q== + has-symbols@^1.0.1, has-symbols@^1.0.2, has-symbols@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" @@ -5858,6 +5930,13 @@ has@^1.0.3: dependencies: function-bind "^1.1.1" +hasown@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.2.tgz#003eaf91be7adc372e84ec59dc37252cedb80003" + integrity sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ== + dependencies: + function-bind "^1.1.2" + hast-to-hyperscript@^9.0.0: version "9.0.1" resolved "https://registry.yarnpkg.com/hast-to-hyperscript/-/hast-to-hyperscript-9.0.1.tgz#9b67fd188e4c81e8ad66f803855334173920218d" @@ -7415,10 +7494,10 @@ memfs@^3.4.3: resolved "https://registry.yarnpkg.com/memoize-one/-/memoize-one-5.2.1.tgz#8337aa3c4335581839ec01c3d594090cebe8f00e" integrity sha512-zYiwtZUcYyXKo/np96AGZAckk+FWWsUdJ3cHGGmld7+AhvcWmQyGCYUh1hc4Q/pkOhb65dQR/pqCyK0cOaHz4Q== -merge-descriptors@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" - integrity sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E= +merge-descriptors@1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.3.tgz#d80319a65f3c7935351e5cfdac8f9318504dbed5" + integrity sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ== merge-stream@^2.0.0: version "2.0.0" @@ -7671,6 +7750,11 @@ object-inspect@^1.12.0, object-inspect@^1.9.0: resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.0.tgz#6e2c120e868fd1fd18cb4f18c31741d0d6e776f0" integrity sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g== +object-inspect@^1.13.1: + version "1.13.2" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.13.2.tgz#dea0088467fb991e67af4058147a24824a3043ff" + integrity sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g== + object-keys@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" @@ -7958,10 +8042,10 @@ path-parse@^1.0.6, path-parse@^1.0.7: resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== -path-to-regexp@0.1.7: - version "0.1.7" - resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" - integrity sha1-32BBeABfUi8V60SQ5yR6G/qmf4w= +path-to-regexp@0.1.10: + version "0.1.10" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.10.tgz#67e9108c5c0551b9e5326064387de4763c4d5f8b" + integrity sha512-7lf7qcQidTku0Gu3YDPc8DJ1q7OOucfa/BSsIwjuh56VU7katFvuM8hULfkwB3Fns/rsVF7PwPKVw1sl5KQS9w== path-type@^4.0.0: version "4.0.0" @@ -8682,12 +8766,12 @@ q@^1.1.2: resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7" integrity sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc= -qs@6.11.0: - version "6.11.0" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.0.tgz#fd0d963446f7a65e1367e01abd85429453f0c37a" - integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q== +qs@6.13.0: + version "6.13.0" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.13.0.tgz#6ca3bd58439f7e245655798997787b0d88a51906" + integrity sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg== dependencies: - side-channel "^1.0.4" + side-channel "^1.0.6" query-string@^7.1.1: version "7.1.1" @@ -9533,10 +9617,10 @@ semver@^7.3.2, semver@^7.3.5: dependencies: lru-cache "^6.0.0" -send@0.18.0: - version "0.18.0" - resolved "https://registry.yarnpkg.com/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be" - integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg== +send@0.19.0: + version "0.19.0" + resolved "https://registry.yarnpkg.com/send/-/send-0.19.0.tgz#bbc5a388c8ea6c048967049dbeac0e4a3f09d7f8" + integrity sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw== dependencies: debug "2.6.9" depd "2.0.0" @@ -9591,15 +9675,27 @@ serve-index@^1.9.1: mime-types "~2.1.17" parseurl "~1.3.2" -serve-static@1.15.0: - version "1.15.0" - resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.15.0.tgz#faaef08cffe0a1a62f60cad0c4e513cff0ac9540" - integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g== +serve-static@1.16.2: + version "1.16.2" + resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.16.2.tgz#b6a5343da47f6bdd2673848bf45754941e803296" + integrity sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw== dependencies: - encodeurl "~1.0.2" + encodeurl "~2.0.0" escape-html "~1.0.3" parseurl "~1.3.3" - send "0.18.0" + send "0.19.0" + +set-function-length@^1.2.1: + version "1.2.2" + resolved "https://registry.yarnpkg.com/set-function-length/-/set-function-length-1.2.2.tgz#aac72314198eaed975cf77b2c3b6b880695e5449" + integrity sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg== + dependencies: + define-data-property "^1.1.4" + es-errors "^1.3.0" + function-bind "^1.1.2" + get-intrinsic "^1.2.4" + gopd "^1.0.1" + has-property-descriptors "^1.0.2" setprototypeof@1.1.0: version "1.1.0" @@ -9642,6 +9738,16 @@ side-channel@^1.0.4: get-intrinsic "^1.0.2" object-inspect "^1.9.0" +side-channel@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.6.tgz#abd25fb7cd24baf45466406b1096b7831c9215f2" + integrity sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA== + dependencies: + call-bind "^1.0.7" + es-errors "^1.3.0" + get-intrinsic "^1.2.4" + object-inspect "^1.13.1" + signal-exit@^3.0.2, signal-exit@^3.0.3: version "3.0.7" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" From d5ef57e0b52ba3439ddf69dbc818d57f15280e14 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Sep 2024 06:16:04 -0400 Subject: [PATCH 87/96] chore: Bump cryptography from 42.0.8 to 43.0.1 in /sdk/python/requirements (#4557) --- sdk/python/requirements/py3.10-ci-requirements.txt | 2 +- sdk/python/requirements/py3.11-ci-requirements.txt | 2 +- sdk/python/requirements/py3.9-ci-requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 0a46400007..6268de6ae1 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -114,7 +114,7 @@ comm==0.2.2 # ipywidgets coverage[toml]==7.6.1 # via pytest-cov -cryptography==42.0.8 +cryptography==43.0.1 # via # azure-identity # azure-storage-blob diff --git a/sdk/python/requirements/py3.11-ci-requirements.txt b/sdk/python/requirements/py3.11-ci-requirements.txt index 26ced829c6..946d4e0519 100644 --- a/sdk/python/requirements/py3.11-ci-requirements.txt +++ b/sdk/python/requirements/py3.11-ci-requirements.txt @@ -112,7 +112,7 @@ comm==0.2.2 # ipywidgets coverage[toml]==7.6.1 # via pytest-cov -cryptography==42.0.8 +cryptography==43.0.1 # via # azure-identity # azure-storage-blob diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index c8c92969fc..5ea2c58819 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -116,7 +116,7 @@ comm==0.2.2 # ipywidgets coverage[toml]==7.6.1 # via pytest-cov -cryptography==42.0.8 +cryptography==43.0.1 # via # azure-identity # azure-storage-blob From 07954960c5501e2ecc1f1285ddf4aa68f9ac880b Mon Sep 17 00:00:00 2001 From: Francisco Arceo Date: Mon, 23 Sep 2024 14:21:43 -0400 Subject: [PATCH 88/96] feat: Updating FeatureViewProjection and OnDemandFeatureView to add batch_source and entities (#4530) * feat: Updating protos for Projections to include more info Signed-off-by: Francisco Javier Arceo * adding unit test Signed-off-by: Francisco Javier Arceo * adding type checking where batch source is already serialized into protobuf Signed-off-by: Francisco Javier Arceo * almost got everything working and type validation behaving Signed-off-by: Francisco Javier Arceo * cleaned up and have tests behaving Signed-off-by: Francisco Javier Arceo * removed comment Signed-off-by: Francisco Javier Arceo * updated FeatureViewProjection batch_source serialization Signed-off-by: Francisco Javier Arceo * trying to debug a test Signed-off-by: Francisco Javier Arceo * handling snowflake issue, cant confirm why it is happening so just going to put a workaround Signed-off-by: Francisco Javier Arceo * linter Signed-off-by: Francisco Javier Arceo * trying to handle it correctly Signed-off-by: Francisco Javier Arceo * handling the else case for from_feature_view_definition Signed-off-by: Francisco Javier Arceo * adding print Signed-off-by: Francisco Javier Arceo * adding test of issue Signed-off-by: Francisco Javier Arceo * think i got everything working now Signed-off-by: Francisco Javier Arceo * removing print Signed-off-by: Francisco Javier Arceo --------- Signed-off-by: Francisco Javier Arceo --- protos/feast/core/FeatureViewProjection.proto | 10 ++ protos/feast/core/OnDemandFeatureView.proto | 6 + sdk/python/feast/base_feature_view.py | 12 +- sdk/python/feast/feature_view.py | 5 +- sdk/python/feast/feature_view_projection.py | 86 +++++++++++- sdk/python/feast/inference.py | 110 +++++++++++++-- .../infra/materialization/snowflake_engine.py | 8 +- sdk/python/feast/on_demand_feature_view.py | 129 ++++++++++++++++-- sdk/python/feast/utils.py | 4 +- .../materialization/test_snowflake.py | 2 + .../test_local_feature_store.py | 3 +- sdk/python/tests/unit/test_feature_views.py | 22 ++- 12 files changed, 363 insertions(+), 34 deletions(-) diff --git a/protos/feast/core/FeatureViewProjection.proto b/protos/feast/core/FeatureViewProjection.proto index 36d17632e7..b0e697b656 100644 --- a/protos/feast/core/FeatureViewProjection.proto +++ b/protos/feast/core/FeatureViewProjection.proto @@ -6,6 +6,7 @@ option java_outer_classname = "FeatureReferenceProto"; option java_package = "feast.proto.core"; import "feast/core/Feature.proto"; +import "feast/core/DataSource.proto"; // A projection to be applied on top of a FeatureView. @@ -22,4 +23,13 @@ message FeatureViewProjection { // Map for entity join_key overrides of feature data entity join_key to entity data join_key map join_key_map = 4; + + string timestamp_field = 5; + string date_partition_column = 6; + string created_timestamp_column = 7; + // Batch/Offline DataSource where this view can retrieve offline feature data. + DataSource batch_source = 8; + // Streaming DataSource from where this view can consume "online" feature data. + DataSource stream_source = 9; + } diff --git a/protos/feast/core/OnDemandFeatureView.proto b/protos/feast/core/OnDemandFeatureView.proto index 7a5fec1650..c915e32e16 100644 --- a/protos/feast/core/OnDemandFeatureView.proto +++ b/protos/feast/core/OnDemandFeatureView.proto @@ -63,6 +63,12 @@ message OnDemandFeatureViewSpec { // Owner of the on demand feature view. string owner = 8; string mode = 11; + bool write_to_online_store = 12; + + // List of names of entities associated with this feature view. + repeated string entities = 13; + // List of specifications for each entity defined as part of this feature view. + repeated FeatureSpecV2 entity_columns = 14; } message OnDemandFeatureViewMeta { diff --git a/sdk/python/feast/base_feature_view.py b/sdk/python/feast/base_feature_view.py index 31140e2899..d7dc2237bd 100644 --- a/sdk/python/feast/base_feature_view.py +++ b/sdk/python/feast/base_feature_view.py @@ -18,6 +18,7 @@ from google.protobuf.json_format import MessageToJson from google.protobuf.message import Message +from feast.data_source import DataSource from feast.feature_view_projection import FeatureViewProjection from feast.field import Field from feast.protos.feast.core.FeatureView_pb2 import FeatureView as FeatureViewProto @@ -65,6 +66,7 @@ def __init__( description: str = "", tags: Optional[Dict[str, str]] = None, owner: str = "", + source: Optional[DataSource] = None, ): """ Creates a BaseFeatureView object. @@ -76,7 +78,8 @@ def __init__( tags (optional): A dictionary of key-value pairs to store arbitrary metadata. owner (optional): The owner of the base feature view, typically the email of the primary maintainer. - + source (optional): The source of data for this group of features. May be a stream source, or a batch source. + If a stream source, the source should contain a batch_source for backfills & batch materialization. Raises: ValueError: A field mapping conflicts with an Entity or a Feature. """ @@ -90,6 +93,9 @@ def __init__( self.created_timestamp = None self.last_updated_timestamp = None + if source: + self.source = source + @property @abstractmethod def proto_class(self) -> Type[Message]: @@ -156,6 +162,10 @@ def __eq__(self, other): or self.tags != other.tags or self.owner != other.owner ): + # This is meant to ignore the File Source change to Push Source + if isinstance(type(self.source), type(other.source)): + if self.source != other.source: + return False return False return True diff --git a/sdk/python/feast/feature_view.py b/sdk/python/feast/feature_view.py index dd01078e20..33ea761158 100644 --- a/sdk/python/feast/feature_view.py +++ b/sdk/python/feast/feature_view.py @@ -206,6 +206,7 @@ def __init__( description=description, tags=tags, owner=owner, + source=source, ) self.online = online self.materialization_intervals = [] @@ -429,7 +430,9 @@ def from_proto(cls, feature_view_proto: FeatureViewProto): # FeatureViewProjections are not saved in the FeatureView proto. # Create the default projection. - feature_view.projection = FeatureViewProjection.from_definition(feature_view) + feature_view.projection = FeatureViewProjection.from_feature_view_definition( + feature_view + ) if feature_view_proto.meta.HasField("created_timestamp"): feature_view.created_timestamp = ( diff --git a/sdk/python/feast/feature_view_projection.py b/sdk/python/feast/feature_view_projection.py index ff5b1b6e06..70415e9ed3 100644 --- a/sdk/python/feast/feature_view_projection.py +++ b/sdk/python/feast/feature_view_projection.py @@ -2,6 +2,7 @@ from attr import dataclass +from feast.data_source import DataSource from feast.field import Field from feast.protos.feast.core.FeatureViewProjection_pb2 import ( FeatureViewProjection as FeatureViewProjectionProto, @@ -9,6 +10,7 @@ if TYPE_CHECKING: from feast.base_feature_view import BaseFeatureView + from feast.feature_view import FeatureView @dataclass @@ -27,6 +29,13 @@ class FeatureViewProjection: is not ready to be projected, i.e. still needs to go through feature inference. join_key_map: A map to modify join key columns during retrieval of this feature view projection. + timestamp_field: The timestamp field of the feature view projection. + date_partition_column: The date partition column of the feature view projection. + created_timestamp_column: The created timestamp column of the feature view projection. + batch_source: The batch source of data where this group of features + is stored. This is optional ONLY if a push source is specified as the + stream_source, since push sources contain their own batch sources. + """ name: str @@ -34,15 +43,29 @@ class FeatureViewProjection: desired_features: List[str] features: List[Field] join_key_map: Dict[str, str] = {} + timestamp_field: Optional[str] = None + date_partition_column: Optional[str] = None + created_timestamp_column: Optional[str] = None + batch_source: Optional[DataSource] = None def name_to_use(self): return self.name_alias or self.name def to_proto(self) -> FeatureViewProjectionProto: + batch_source = None + if getattr(self, "batch_source", None): + if isinstance(self.batch_source, DataSource): + batch_source = self.batch_source.to_proto() + else: + batch_source = self.batch_source feature_reference_proto = FeatureViewProjectionProto( feature_view_name=self.name, feature_view_name_alias=self.name_alias or "", join_key_map=self.join_key_map, + timestamp_field=self.timestamp_field or "", + date_partition_column=self.date_partition_column or "", + created_timestamp_column=self.created_timestamp_column or "", + batch_source=batch_source, ) for feature in self.features: feature_reference_proto.feature_columns.append(feature.to_proto()) @@ -50,27 +73,76 @@ def to_proto(self) -> FeatureViewProjectionProto: return feature_reference_proto @staticmethod - def from_proto(proto: FeatureViewProjectionProto): + def from_proto(proto: FeatureViewProjectionProto) -> "FeatureViewProjection": + batch_source = ( + DataSource.from_proto(proto.batch_source) + if str(getattr(proto, "batch_source")) + else None + ) feature_view_projection = FeatureViewProjection( name=proto.feature_view_name, name_alias=proto.feature_view_name_alias or None, features=[], join_key_map=dict(proto.join_key_map), desired_features=[], + timestamp_field=proto.timestamp_field or None, + date_partition_column=proto.date_partition_column or None, + created_timestamp_column=proto.created_timestamp_column or None, + batch_source=batch_source, ) for feature_column in proto.feature_columns: feature_view_projection.features.append(Field.from_proto(feature_column)) return feature_view_projection + @staticmethod + def from_feature_view_definition(feature_view: "FeatureView"): + # TODO need to implement this for StreamFeatureViews + if getattr(feature_view, "batch_source", None): + return FeatureViewProjection( + name=feature_view.name, + name_alias=None, + features=feature_view.features, + desired_features=[], + timestamp_field=feature_view.batch_source.created_timestamp_column + or None, + created_timestamp_column=feature_view.batch_source.created_timestamp_column + or None, + date_partition_column=feature_view.batch_source.date_partition_column + or None, + batch_source=feature_view.batch_source or None, + ) + else: + return FeatureViewProjection( + name=feature_view.name, + name_alias=None, + features=feature_view.features, + desired_features=[], + ) + @staticmethod def from_definition(base_feature_view: "BaseFeatureView"): - return FeatureViewProjection( - name=base_feature_view.name, - name_alias=None, - features=base_feature_view.features, - desired_features=[], - ) + if getattr(base_feature_view, "batch_source", None): + return FeatureViewProjection( + name=base_feature_view.name, + name_alias=None, + features=base_feature_view.features, + desired_features=[], + timestamp_field=base_feature_view.batch_source.created_timestamp_column # type:ignore[attr-defined] + or None, + created_timestamp_column=base_feature_view.batch_source.created_timestamp_column # type:ignore[attr-defined] + or None, + date_partition_column=base_feature_view.batch_source.date_partition_column # type:ignore[attr-defined] + or None, + batch_source=base_feature_view.batch_source or None, # type:ignore[attr-defined] + ) + else: + return FeatureViewProjection( + name=base_feature_view.name, + name_alias=None, + features=base_feature_view.features, + desired_features=[], + ) def get_feature(self, feature_name: str) -> Field: try: diff --git a/sdk/python/feast/inference.py b/sdk/python/feast/inference.py index 28a170172c..b9fb9b694d 100644 --- a/sdk/python/feast/inference.py +++ b/sdk/python/feast/inference.py @@ -13,6 +13,7 @@ from feast.infra.offline_stores.file_source import FileSource from feast.infra.offline_stores.redshift_source import RedshiftSource from feast.infra.offline_stores.snowflake_source import SnowflakeSource +from feast.on_demand_feature_view import OnDemandFeatureView from feast.repo_config import RepoConfig from feast.stream_feature_view import StreamFeatureView from feast.types import String @@ -94,7 +95,7 @@ def update_data_sources_with_inferred_event_timestamp_col( def update_feature_views_with_inferred_features_and_entities( - fvs: Union[List[FeatureView], List[StreamFeatureView]], + fvs: Union[List[FeatureView], List[StreamFeatureView], List[OnDemandFeatureView]], entities: List[Entity], config: RepoConfig, ) -> None: @@ -121,35 +122,37 @@ def update_feature_views_with_inferred_features_and_entities( join_keys = set( [ entity_name_to_join_key_map.get(entity_name) - for entity_name in fv.entities + for entity_name in getattr(fv, "entities", []) ] ) # Fields whose names match a join key are considered to be entity columns; all # other fields are considered to be feature columns. + entity_columns = fv.entity_columns if fv.entity_columns else [] for field in fv.schema: if field.name in join_keys: # Do not override a preexisting field with the same name. if field.name not in [ - entity_column.name for entity_column in fv.entity_columns + entity_column.name for entity_column in entity_columns ]: - fv.entity_columns.append(field) + entity_columns.append(field) else: if field.name not in [feature.name for feature in fv.features]: fv.features.append(field) # Respect the `value_type` attribute of the entity, if it is specified. - for entity_name in fv.entities: + fv_entities = getattr(fv, "entities", []) + for entity_name in fv_entities: entity = entity_name_to_entity_map.get(entity_name) # pass when entity does not exist. Entityless feature view case if entity is None: continue if ( entity.join_key - not in [entity_column.name for entity_column in fv.entity_columns] + not in [entity_column.name for entity_column in entity_columns] and entity.value_type != ValueType.UNKNOWN ): - fv.entity_columns.append( + entity_columns.append( Field( name=entity.join_key, dtype=from_value_type(entity.value_type), @@ -158,12 +161,13 @@ def update_feature_views_with_inferred_features_and_entities( # Infer a dummy entity column for entityless feature views. if ( - len(fv.entities) == 1 - and fv.entities[0] == DUMMY_ENTITY_NAME - and not fv.entity_columns + len(fv_entities) == 1 + and fv_entities[0] == DUMMY_ENTITY_NAME + and not entity_columns ): - fv.entity_columns.append(Field(name=DUMMY_ENTITY_ID, dtype=String)) + entity_columns.append(Field(name=DUMMY_ENTITY_ID, dtype=String)) + fv.entity_columns = entity_columns # Run inference for entity columns if there are fewer entity fields than expected. run_inference_for_entities = len(fv.entity_columns) < len(join_keys) @@ -186,7 +190,7 @@ def update_feature_views_with_inferred_features_and_entities( def _infer_features_and_entities( - fv: FeatureView, + fv: Union[FeatureView, OnDemandFeatureView], join_keys: Set[Optional[str]], run_inference_for_features, config, @@ -200,6 +204,11 @@ def _infer_features_and_entities( run_inference_for_features: Whether to run inference for features. config: The config for the current feature store. """ + if isinstance(fv, OnDemandFeatureView): + return _infer_on_demand_features_and_entities( + fv, join_keys, run_inference_for_features, config + ) + columns_to_exclude = { fv.batch_source.timestamp_field, fv.batch_source.created_timestamp_column, @@ -246,3 +255,80 @@ def _infer_features_and_entities( ) if field.name not in [feature.name for feature in fv.features]: fv.features.append(field) + + +def _infer_on_demand_features_and_entities( + fv: OnDemandFeatureView, + join_keys: Set[Optional[str]], + run_inference_for_features, + config, +) -> None: + """ + Updates the specific feature in place with inferred features and entities. + Args: + fv: The feature view on which to run inference. + join_keys: The set of join keys for the feature view's entities. + run_inference_for_features: Whether to run inference for features. + config: The config for the current feature store. + """ + entity_columns: list[Field] = [] + columns_to_exclude = set() + for ( + source_feature_view_name, + source_feature_view, + ) in fv.source_feature_view_projections.items(): + columns_to_exclude.add(source_feature_view.timestamp_field) + columns_to_exclude.add(source_feature_view.created_timestamp_column) + + batch_source = getattr(source_feature_view, "batch_source") + batch_field_mapping = getattr(batch_source or None, "field_mapping") + if batch_field_mapping: + for ( + original_col, + mapped_col, + ) in batch_field_mapping.items(): + if mapped_col in columns_to_exclude: + columns_to_exclude.remove(mapped_col) + columns_to_exclude.add(original_col) + + table_column_names_and_types = ( + batch_source.get_table_column_names_and_types(config) + ) + for col_name, col_datatype in table_column_names_and_types: + if col_name in columns_to_exclude: + continue + elif col_name in join_keys: + field = Field( + name=col_name, + dtype=from_value_type( + batch_source.source_datatype_to_feast_value_type()(col_datatype) + ), + ) + if field.name not in [ + entity_column.name + for entity_column in entity_columns + if hasattr(entity_column, "name") + ]: + entity_columns.append(field) + elif not re.match( + "^__|__$", col_name + ): # double underscores often signal an internal-use column + if run_inference_for_features: + feature_name = ( + batch_field_mapping[col_name] + if col_name in batch_field_mapping + else col_name + ) + field = Field( + name=feature_name, + dtype=from_value_type( + batch_source.source_datatype_to_feast_value_type()( + col_datatype + ) + ), + ) + if field.name not in [ + feature.name for feature in source_feature_view.features + ]: + source_feature_view.features.append(field) + fv.entity_columns = entity_columns diff --git a/sdk/python/feast/infra/materialization/snowflake_engine.py b/sdk/python/feast/infra/materialization/snowflake_engine.py index e8b0857e5d..600e1b20d8 100644 --- a/sdk/python/feast/infra/materialization/snowflake_engine.py +++ b/sdk/python/feast/infra/materialization/snowflake_engine.py @@ -285,8 +285,14 @@ def _materialize_one( fv_latest_values_sql = offline_job.to_sql() + if feature_view.entity_columns: + first_feature_view_entity_name = getattr( + feature_view.entity_columns[0], "name", None + ) + else: + first_feature_view_entity_name = None if ( - feature_view.entity_columns[0].name == DUMMY_ENTITY_ID + first_feature_view_entity_name == DUMMY_ENTITY_ID ): # entityless Feature View's placeholder entity entities_to_write = 1 else: diff --git a/sdk/python/feast/on_demand_feature_view.py b/sdk/python/feast/on_demand_feature_view.py index 47fcf29926..1b75d23ed4 100644 --- a/sdk/python/feast/on_demand_feature_view.py +++ b/sdk/python/feast/on_demand_feature_view.py @@ -3,7 +3,7 @@ import inspect import warnings from types import FunctionType -from typing import Any, Optional, Union, get_type_hints +from typing import Any, List, Optional, Union, get_type_hints import dill import pandas as pd @@ -12,8 +12,9 @@ from feast.base_feature_view import BaseFeatureView from feast.data_source import RequestSource +from feast.entity import Entity from feast.errors import RegistryInferenceFailure, SpecifiedFeaturesNotPresentError -from feast.feature_view import FeatureView +from feast.feature_view import DUMMY_ENTITY_NAME, FeatureView from feast.feature_view_projection import FeatureViewProjection from feast.field import Field, from_value_type from feast.protos.feast.core.OnDemandFeatureView_pb2 import ( @@ -61,7 +62,8 @@ class OnDemandFeatureView(BaseFeatureView): """ name: str - features: list[Field] + entities: Optional[List[str]] + features: List[Field] source_feature_view_projections: dict[str, FeatureViewProjection] source_request_sources: dict[str, RequestSource] feature_transformation: Union[ @@ -71,13 +73,15 @@ class OnDemandFeatureView(BaseFeatureView): description: str tags: dict[str, str] owner: str + write_to_online_store: bool def __init__( # noqa: C901 self, *, name: str, - schema: list[Field], - sources: list[ + entities: Optional[List[Entity]] = None, + schema: Optional[List[Field]] = None, + sources: List[ Union[ FeatureView, RequestSource, @@ -93,12 +97,14 @@ def __init__( # noqa: C901 description: str = "", tags: Optional[dict[str, str]] = None, owner: str = "", + write_to_online_store: bool = False, ): """ Creates an OnDemandFeatureView object. Args: name: The unique name of the on demand feature view. + entities (optional): The list of names of entities that this feature view is associated with. schema: The list of features in the output of the on demand feature view, after the transformation has been applied. sources: A map from input source names to the actual input sources, which may be @@ -113,6 +119,8 @@ def __init__( # noqa: C901 tags (optional): A dictionary of key-value pairs to store arbitrary metadata. owner (optional): The owner of the on demand feature view, typically the email of the primary maintainer. + write_to_online_store (optional): A boolean that indicates whether to write the on demand feature view to + the online store for faster retrieval. """ super().__init__( name=name, @@ -122,6 +130,8 @@ def __init__( # noqa: C901 owner=owner, ) + schema = schema or [] + self.entities = [e.name for e in entities] if entities else [DUMMY_ENTITY_NAME] self.mode = mode.lower() if self.mode not in {"python", "pandas", "substrait"}: @@ -152,12 +162,48 @@ def __init__( # noqa: C901 self.source_request_sources[odfv_source.name] = odfv_source elif isinstance(odfv_source, FeatureViewProjection): self.source_feature_view_projections[odfv_source.name] = odfv_source + else: self.source_feature_view_projections[odfv_source.name] = ( odfv_source.projection ) + features: List[Field] = [] + self.entity_columns = [] + + join_keys: List[str] = [] + if entities: + for entity in entities: + join_keys.append(entity.join_key) + # Ensure that entities have unique join keys. + if len(set(join_keys)) < len(join_keys): + raise ValueError( + "A feature view should not have entities that share a join key." + ) + + for field in schema: + if field.name in join_keys: + self.entity_columns.append(field) + + # Confirm that the inferred type matches the specified entity type, if it exists. + matching_entities = ( + [e for e in entities if e.join_key == field.name] + if entities + else [] + ) + assert len(matching_entities) == 1 + entity = matching_entities[0] + if entity.value_type != ValueType.UNKNOWN: + if from_value_type(entity.value_type) != field.dtype: + raise ValueError( + f"Entity {entity.name} has type {entity.value_type}, which does not match the inferred type {field.dtype}." + ) + else: + features.append(field) + + self.features = features self.feature_transformation = feature_transformation + self.write_to_online_store = write_to_online_store @property def proto_class(self) -> type[OnDemandFeatureViewProto]: @@ -174,8 +220,13 @@ def __copy__(self): description=self.description, tags=self.tags, owner=self.owner, + write_to_online_store=self.write_to_online_store, ) + fv.entities = self.entities + fv.features = self.features fv.projection = copy.copy(self.projection) + fv.entity_columns = copy.copy(self.entity_columns) + return fv def __eq__(self, other): @@ -184,20 +235,46 @@ def __eq__(self, other): "Comparisons should only involve OnDemandFeatureView class objects." ) - if not super().__eq__(other): - return False - + # Note, no longer evaluating the base feature view layer as ODFVs can have + # multiple datasources and a base_feature_view only has one source + # though maybe that shouldn't be true if ( self.source_feature_view_projections != other.source_feature_view_projections + or self.description != other.description or self.source_request_sources != other.source_request_sources or self.mode != other.mode or self.feature_transformation != other.feature_transformation + or self.write_to_online_store != other.write_to_online_store + or sorted(self.entity_columns) != sorted(other.entity_columns) ): return False return True + @property + def join_keys(self) -> List[str]: + """Returns a list of all the join keys.""" + return [entity.name for entity in self.entity_columns] + + @property + def schema(self) -> List[Field]: + return list(set(self.entity_columns + self.features)) + + def ensure_valid(self): + """ + Validates the state of this feature view locally. + + Raises: + ValueError: The On Demand feature view does not have an entity when trying to use write_to_online_store. + """ + super().ensure_valid() + + if self.write_to_online_store and not self.entities: + raise ValueError( + "On Demand Feature views require an entity if write_to_online_store=True" + ) + def __hash__(self): return super().__hash__() @@ -216,7 +293,7 @@ def to_proto(self) -> OnDemandFeatureViewProto: sources = {} for source_name, fv_projection in self.source_feature_view_projections.items(): sources[source_name] = OnDemandSource( - feature_view_projection=fv_projection.to_proto() + feature_view_projection=fv_projection.to_proto(), ) for ( source_name, @@ -239,6 +316,10 @@ def to_proto(self) -> OnDemandFeatureViewProto: ) spec = OnDemandFeatureViewSpec( name=self.name, + entities=self.entities if self.entities else None, + entity_columns=[ + field.to_proto() for field in self.entity_columns if self.entity_columns + ], features=[feature.to_proto() for feature in self.features], sources=sources, feature_transformation=feature_transformation, @@ -246,6 +327,7 @@ def to_proto(self) -> OnDemandFeatureViewProto: description=self.description, tags=self.tags, owner=self.owner, + write_to_online_store=self.write_to_online_store, ) return OnDemandFeatureViewProto(spec=spec, meta=meta) @@ -335,6 +417,24 @@ def from_proto( else: raise ValueError("At least one transformation type needs to be provided") + if hasattr(on_demand_feature_view_proto.spec, "write_to_online_store"): + write_to_online_store = ( + on_demand_feature_view_proto.spec.write_to_online_store + ) + else: + write_to_online_store = False + if hasattr(on_demand_feature_view_proto.spec, "entities"): + entities = list(on_demand_feature_view_proto.spec.entities) + else: + entities = [] + if hasattr(on_demand_feature_view_proto.spec, "entity_columns"): + entity_columns = [ + Field.from_proto(field_proto) + for field_proto in on_demand_feature_view_proto.spec.entity_columns + ] + else: + entity_columns = [] + on_demand_feature_view_obj = cls( name=on_demand_feature_view_proto.spec.name, schema=[ @@ -350,8 +450,12 @@ def from_proto( description=on_demand_feature_view_proto.spec.description, tags=dict(on_demand_feature_view_proto.spec.tags), owner=on_demand_feature_view_proto.spec.owner, + write_to_online_store=write_to_online_store, ) + on_demand_feature_view_obj.entities = entities + on_demand_feature_view_obj.entity_columns = entity_columns + # FeatureViewProjections are not saved in the OnDemandFeatureView proto. # Create the default projection. on_demand_feature_view_obj.projection = FeatureViewProjection.from_definition( @@ -595,6 +699,7 @@ def get_requested_odfvs( def on_demand_feature_view( *, + entities: Optional[List[Entity]] = None, schema: list[Field], sources: list[ Union[ @@ -607,11 +712,13 @@ def on_demand_feature_view( description: str = "", tags: Optional[dict[str, str]] = None, owner: str = "", + write_to_online_store: bool = False, ): """ Creates an OnDemandFeatureView object with the given user function as udf. Args: + entities (Optional): The list of names of entities that this feature view is associated with. schema: The list of features in the output of the on demand feature view, after the transformation has been applied. sources: A map from input source names to the actual input sources, which may be @@ -622,6 +729,8 @@ def on_demand_feature_view( tags (optional): A dictionary of key-value pairs to store arbitrary metadata. owner (optional): The owner of the on demand feature view, typically the email of the primary maintainer. + write_to_online_store (optional): A boolean that indicates whether to write the on demand feature view to + the online store for faster retrieval. """ def mainify(obj) -> None: @@ -664,6 +773,8 @@ def decorator(user_function): description=description, tags=tags, owner=owner, + write_to_online_store=write_to_online_store, + entities=entities, ) functools.update_wrapper( wrapper=on_demand_feature_view_obj, wrapped=user_function diff --git a/sdk/python/feast/utils.py b/sdk/python/feast/utils.py index 2ab73ae089..8a9f1fadae 100644 --- a/sdk/python/feast/utils.py +++ b/sdk/python/feast/utils.py @@ -344,7 +344,9 @@ def _group_feature_refs( # on demand view to on demand view proto on_demand_view_index = { - view.projection.name_to_use(): view for view in all_on_demand_feature_views + view.projection.name_to_use(): view + for view in all_on_demand_feature_views + if view.projection } # view name to feature names diff --git a/sdk/python/tests/integration/materialization/test_snowflake.py b/sdk/python/tests/integration/materialization/test_snowflake.py index f53c3ca753..dc9d684ab5 100644 --- a/sdk/python/tests/integration/materialization/test_snowflake.py +++ b/sdk/python/tests/integration/materialization/test_snowflake.py @@ -221,9 +221,11 @@ def test_snowflake_materialization_entityless_fv(): ttl=timedelta(weeks=52), source=ds, ) + assert overall_stats_fv.entity_columns == [] try: fs.apply([overall_stats_fv, driver]) + assert overall_stats_fv.entity_columns != [] # materialization is run in two steps and # we use timestamp from generated dataframe as a split point diff --git a/sdk/python/tests/unit/local_feast_tests/test_local_feature_store.py b/sdk/python/tests/unit/local_feast_tests/test_local_feature_store.py index 5ed16a8430..cc48295b20 100644 --- a/sdk/python/tests/unit/local_feast_tests/test_local_feature_store.py +++ b/sdk/python/tests/unit/local_feast_tests/test_local_feature_store.py @@ -209,8 +209,9 @@ def test_apply_feature_view_with_inline_batch_source( test_feature_store.apply([entity, driver_fv]) fvs = test_feature_store.list_batch_feature_views() + dfv = fvs[0] assert len(fvs) == 1 - assert fvs[0] == driver_fv + assert dfv == driver_fv ds = test_feature_store.list_data_sources() assert len(ds) == 1 diff --git a/sdk/python/tests/unit/test_feature_views.py b/sdk/python/tests/unit/test_feature_views.py index 981968df0d..ce789c706c 100644 --- a/sdk/python/tests/unit/test_feature_views.py +++ b/sdk/python/tests/unit/test_feature_views.py @@ -111,7 +111,27 @@ def test_hash(): assert len(s4) == 3 -# TODO(felixwang9817): Add tests for proto conversion. +def test_proto_conversion(): + file_source = FileSource(name="my-file-source", path="test.parquet") + feature_view_1 = FeatureView( + name="my-feature-view", + entities=[], + schema=[ + Field(name="feature1", dtype=Float32), + Field(name="feature2", dtype=Float32), + ], + source=file_source, + ) + + feature_view_proto = feature_view_1.to_proto() + assert ( + feature_view_proto.spec.name == "my-feature-view" + and feature_view_proto.spec.batch_source.file_options.uri == "test.parquet" + and feature_view_proto.spec.batch_source.name == "my-file-source" + and feature_view_proto.spec.batch_source.type == 1 + ) + + # TODO(felixwang9817): Add tests for field mapping logic. From e675cbdaf638c6208cb09a41fe8ed34216c9b87f Mon Sep 17 00:00:00 2001 From: Daniele Martinoli <86618610+dmartinol@users.noreply.github.com> Date: Mon, 23 Sep 2024 22:38:43 +0200 Subject: [PATCH 89/96] docs: Example to install feast on local computer using Kind (#4528) * Simple deployment on Kind Signed-off-by: Daniele Martinoli * updated notebooks output Signed-off-by: Daniele Martinoli * added missing README Signed-off-by: Daniele Martinoli * typo Signed-off-by: Daniele Martinoli --------- Signed-off-by: Daniele Martinoli --- examples/kind-quickstart/01-Install.ipynb | 932 ++++++++++++++++++ examples/kind-quickstart/02-Client.ipynb | 606 ++++++++++++ examples/kind-quickstart/03-Uninstall.ipynb | 120 +++ examples/kind-quickstart/README.md | 7 + examples/kind-quickstart/client/__init__.py | 0 .../kind-quickstart/client/feature_store.yaml | 14 + examples/kind-quickstart/init-job.yaml | 31 + .../kind-quickstart/postgres/postgres.yaml | 83 ++ examples/kind-quickstart/src/__init__.py | 0 examples/kind-quickstart/src/utils.py | 12 + 10 files changed, 1805 insertions(+) create mode 100644 examples/kind-quickstart/01-Install.ipynb create mode 100644 examples/kind-quickstart/02-Client.ipynb create mode 100644 examples/kind-quickstart/03-Uninstall.ipynb create mode 100644 examples/kind-quickstart/README.md create mode 100644 examples/kind-quickstart/client/__init__.py create mode 100644 examples/kind-quickstart/client/feature_store.yaml create mode 100644 examples/kind-quickstart/init-job.yaml create mode 100644 examples/kind-quickstart/postgres/postgres.yaml create mode 100644 examples/kind-quickstart/src/__init__.py create mode 100644 examples/kind-quickstart/src/utils.py diff --git a/examples/kind-quickstart/01-Install.ipynb b/examples/kind-quickstart/01-Install.ipynb new file mode 100644 index 0000000000..e5ece97fc2 --- /dev/null +++ b/examples/kind-quickstart/01-Install.ipynb @@ -0,0 +1,932 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: feast==0.40.1 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (0.40.1)\n", + "Requirement already satisfied: click<9.0.0,>=7.0.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (8.1.7)\n", + "Requirement already satisfied: colorama<1,>=0.3.9 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (0.4.6)\n", + "Requirement already satisfied: dill~=0.3.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (0.3.8)\n", + "Requirement already satisfied: mypy-protobuf>=3.1 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (3.6.0)\n", + "Requirement already satisfied: Jinja2<4,>=2 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (3.1.4)\n", + "Requirement already satisfied: jsonschema in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (4.22.0)\n", + "Requirement already satisfied: mmh3 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (4.1.0)\n", + "Requirement already satisfied: numpy<2,>=1.22 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (1.26.4)\n", + "Requirement already satisfied: pandas<3,>=1.4.3 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (2.2.2)\n", + "Requirement already satisfied: protobuf<5.0.0,>=4.24.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (4.25.4)\n", + "Requirement already satisfied: pyarrow>=4 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (15.0.2)\n", + "Requirement already satisfied: pydantic>=2.0.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (2.7.4)\n", + "Requirement already satisfied: pygments<3,>=2.12.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (2.18.0)\n", + "Requirement already satisfied: PyYAML<7,>=5.4.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (6.0.1)\n", + "Requirement already satisfied: requests in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (2.32.3)\n", + "Requirement already satisfied: SQLAlchemy>1 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from SQLAlchemy[mypy]>1->feast==0.40.1) (2.0.34)\n", + "Requirement already satisfied: tabulate<1,>=0.8.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (0.9.0)\n", + "Requirement already satisfied: tenacity<9,>=7 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (8.5.0)\n", + "Requirement already satisfied: toml<1,>=0.10.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (0.10.2)\n", + "Requirement already satisfied: tqdm<5,>=4 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (4.66.4)\n", + "Requirement already satisfied: typeguard>=4.0.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (4.3.0)\n", + "Requirement already satisfied: fastapi>=0.68.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (0.114.2)\n", + "Requirement already satisfied: uvicorn<1,>=0.14.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from uvicorn[standard]<1,>=0.14.0->feast==0.40.1) (0.30.6)\n", + "Requirement already satisfied: dask>=2024.2.1 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from dask[dataframe]>=2024.2.1->feast==0.40.1) (2024.6.2)\n", + "Requirement already satisfied: gunicorn in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (23.0.0)\n", + "Requirement already satisfied: cloudpickle>=1.5.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (3.0.0)\n", + "Requirement already satisfied: fsspec>=2021.09.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (2023.12.2)\n", + "Requirement already satisfied: packaging>=20.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (24.1)\n", + "Requirement already satisfied: partd>=1.2.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (1.4.2)\n", + "Requirement already satisfied: toolz>=0.10.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (0.12.1)\n", + "Requirement already satisfied: importlib-metadata>=4.13.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (8.0.0)\n", + "Requirement already satisfied: dask-expr<1.2,>=1.1 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from dask[dataframe]>=2024.2.1->feast==0.40.1) (1.1.6)\n", + "Requirement already satisfied: starlette<0.39.0,>=0.37.2 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from fastapi>=0.68.0->feast==0.40.1) (0.38.5)\n", + "Requirement already satisfied: typing-extensions>=4.8.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from fastapi>=0.68.0->feast==0.40.1) (4.12.2)\n", + "Requirement already satisfied: MarkupSafe>=2.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from Jinja2<4,>=2->feast==0.40.1) (2.1.5)\n", + "Requirement already satisfied: types-protobuf>=4.24 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from mypy-protobuf>=3.1->feast==0.40.1) (5.27.0.20240626)\n", + "Requirement already satisfied: python-dateutil>=2.8.2 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from pandas<3,>=1.4.3->feast==0.40.1) (2.9.0.post0)\n", + "Requirement already satisfied: pytz>=2020.1 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from pandas<3,>=1.4.3->feast==0.40.1) (2024.1)\n", + "Requirement already satisfied: tzdata>=2022.7 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from pandas<3,>=1.4.3->feast==0.40.1) (2024.1)\n", + "Requirement already satisfied: annotated-types>=0.4.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from pydantic>=2.0.0->feast==0.40.1) (0.7.0)\n", + "Requirement already satisfied: pydantic-core==2.18.4 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from pydantic>=2.0.0->feast==0.40.1) (2.18.4)\n", + "Requirement already satisfied: mypy>=0.910 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from SQLAlchemy[mypy]>1->feast==0.40.1) (1.10.1)\n", + "Requirement already satisfied: h11>=0.8 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from uvicorn<1,>=0.14.0->uvicorn[standard]<1,>=0.14.0->feast==0.40.1) (0.14.0)\n", + "Requirement already satisfied: httptools>=0.5.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from uvicorn[standard]<1,>=0.14.0->feast==0.40.1) (0.6.1)\n", + "Requirement already satisfied: python-dotenv>=0.13 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from uvicorn[standard]<1,>=0.14.0->feast==0.40.1) (1.0.1)\n", + "Requirement already satisfied: uvloop!=0.15.0,!=0.15.1,>=0.14.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from uvicorn[standard]<1,>=0.14.0->feast==0.40.1) (0.19.0)\n", + "Requirement already satisfied: watchfiles>=0.13 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from uvicorn[standard]<1,>=0.14.0->feast==0.40.1) (0.22.0)\n", + "Requirement already satisfied: websockets>=10.4 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from uvicorn[standard]<1,>=0.14.0->feast==0.40.1) (12.0)\n", + "Requirement already satisfied: attrs>=22.2.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from jsonschema->feast==0.40.1) (23.2.0)\n", + "Requirement already satisfied: jsonschema-specifications>=2023.03.6 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from jsonschema->feast==0.40.1) (2023.12.1)\n", + "Requirement already satisfied: referencing>=0.28.4 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from jsonschema->feast==0.40.1) (0.35.1)\n", + "Requirement already satisfied: rpds-py>=0.7.1 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from jsonschema->feast==0.40.1) (0.18.1)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from requests->feast==0.40.1) (3.3.2)\n", + "Requirement already satisfied: idna<4,>=2.5 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from requests->feast==0.40.1) (3.7)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from requests->feast==0.40.1) (1.26.19)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from requests->feast==0.40.1) (2024.7.4)\n", + "Requirement already satisfied: zipp>=0.5 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from importlib-metadata>=4.13.0->dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (3.19.1)\n", + "Requirement already satisfied: mypy-extensions>=1.0.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from mypy>=0.910->SQLAlchemy[mypy]>1->feast==0.40.1) (1.0.0)\n", + "Requirement already satisfied: locket in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from partd>=1.2.0->dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (1.0.0)\n", + "Requirement already satisfied: six>=1.5 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from python-dateutil>=2.8.2->pandas<3,>=1.4.3->feast==0.40.1) (1.16.0)\n", + "Requirement already satisfied: anyio<5,>=3.4.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from starlette<0.39.0,>=0.37.2->fastapi>=0.68.0->feast==0.40.1) (4.4.0)\n", + "Requirement already satisfied: sniffio>=1.1 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from anyio<5,>=3.4.0->starlette<0.39.0,>=0.37.2->fastapi>=0.68.0->feast==0.40.1) (1.3.1)\n", + "Note: you may need to restart the kernel to use updated packages.\n" + ] + } + ], + "source": [ + "# WE MUST ENSURE PYTHON CONSISTENCY BETWEEN NOTEBOOK AND FEAST SERVERS\n", + "# LAUNCH THIS NOTEBOOK FROM A CLEAN PYTHON ENVIRONMENT >3.9\n", + "%pip install feast==0.40.1" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Install Feast on Kind\n", + "## Objective\n", + "\n", + "Provide a reference implementation of a runbook to deploy a Feast development environment on a Kubernets cluster using [Kind](https://kind.sigs.k8s.io/docs/user/quick-start).\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Prerequisites\n", + "* [Kind](https://kind.sigs.k8s.io/) cluster and a Docker runtime container\n", + "* [kubectl](https://kubernetes.io/docs/tasks/tools/#kubectl) Kubernetes CLI tool.\n", + "* [Helm](https://helm.sh/) Kubernetes package manager.\n", + "* [yq](https://github.com/mikefarah/yq) YAML processor." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Install Prerequisites\n", + "The following commands install and configure all the prerequisites on MacOS environment. You can find the\n", + "equivalent instructions on the offical documentation pages:\n", + "* Install Kind and Docker runtime (e.g. [Colima](https://github.com/abiosoft/colima)).\n", + "* Create Kind cluster named `feast`.\n", + "* Install and setup the `kubectl` context.\n", + "* `Helm`.\n", + "* `yq`.\n", + "```bash\n", + "brew install colima\n", + "colima start\n", + "brew install kind\n", + "kind create cluster --name feast\n", + "kind start\n", + "brew install helm\n", + "brew install kubectl\n", + "kubectl config use-context kind-feast\n", + "brew install yq\n", + "```\n", + "\n", + "Additionally, we create a `feast` namespace and use it as the default for the `kubectl` CLI:" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "namespace/feast created\n", + "Context \"kind-feast\" modified.\n" + ] + } + ], + "source": [ + "!kubectl create ns feast\n", + "!kubectl config set-context --current --namespace feast" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Validate the cluster setup:" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "NAME STATUS AGE\n", + "default Active 26h\n", + "feast Active 3s\n", + "kube-node-lease Active 26h\n", + "kube-public Active 26h\n", + "kube-system Active 26h\n", + "local-path-storage Active 26h\n" + ] + } + ], + "source": [ + "!kubectl get ns" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Deployment Architecture\n", + "The primary objective of this runbook is to guide the deployment of Feast services on a Kubernetes Kind cluster, using the default `postgres` template to set up a basic feature store.\n", + "\n", + "> πŸš€ We will also add instructions to repeat the example with a custom project, for a personalized experience.\n", + "\n", + "In this notebook, we will deploy a distributed topology of Feast services, which includes:\n", + "\n", + "* `Registry Server`: Exposes endpoints at the [default port 6570](https://github.com/feast-dev/feast/blob/89bc5512572130510dd18690309b5a392aaf73b1/sdk/python/feast/constants.py#L39) and handles metadata storage for feature definitions.\n", + "* `Online Store Server`: Exposes endpoints at the [default port 6566](https://github.com/feast-dev/feast/blob/4a6b663f80bc91d6de35ed2ec428d34811d17a18/sdk/python/feast/cli.py#L871-L872). This service uses the `Registry Server` to query metadata and is responsible for low-latency serving of features.\n", + "* `Offline Store Server`: Exposes endpoints at the [default port 8815](https://github.com/feast-dev/feast/blob/89bc5512572130510dd18690309b5a392aaf73b1/sdk/python/feast/constants.py#L42). It uses the `Registry Server` to query metadata and provides access to batch data for historical feature retrieval.\n", + "\n", + "Each service is backed by a `PostgreSQL` database, which is also deployed within the same Kind cluster.\n", + "\n", + "Finally, port forwarding will be configured to expose these Feast services locally. This will allow a local client, implemented in the accompanying client notebook, to interact with the deployed services." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Install PostgreSQL\n", + "Install the [reference deployment](./postgres/postgres.yaml) to install and configure a simple PostgreSQL database." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "secret/postgres-secret created\n", + "persistentvolume/postgres-volume created\n", + "persistentvolumeclaim/postgres-volume-claim created\n", + "deployment.apps/postgres created\n", + "service/postgres created\n", + "deployment.apps/postgres condition met\n" + ] + } + ], + "source": [ + "!kubectl apply -f postgres/postgres.yaml\n", + "!kubectl wait --for=condition=available deployment/postgres --timeout=2m" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "NAME READY STATUS RESTARTS AGE\n", + "postgres-76c8d94d6-pngvm 1/1 Running 0 8s\n", + "NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\n", + "postgres NodePort 10.96.231.4 5432:30565/TCP 8s\n" + ] + } + ], + "source": [ + "!kubectl get pods\n", + "!kubectl get svc" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Create the feature store project\n", + "Use the `feast init` command to create the default project.\n", + "\n", + "We also start port forwarding for the `postgres` service to populate the tables with default data." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "> πŸš€ If you want to use a custom configuration, replace it under the sample/feature_repo folder and skip this section" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Port-forwarding postgres with process ID: 9611\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Forwarding from 127.0.0.1:5432 -> 5432\n", + "Forwarding from [::1]:5432 -> 5432\n" + ] + } + ], + "source": [ + "from src.utils import port_forward\n", + "psql_process = port_forward(\"postgres\", 5432, 5432)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We are going to emulate the `feast init -t postgres sample` command using Python code. This is needed to mock the request of additional\n", + "parameters to configure the DB connection and also request the upload of example data to Postgres tables." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Handling connection for 5432\n", + "Handling connection for 5432\n", + "\n", + "Creating a new Feast repository in \u001b[1m\u001b[32m/Users/dmartino/projects/AI/feast/feast/examples/kind-quickstart/sample\u001b[0m.\n", + "\n" + ] + } + ], + "source": [ + "from feast.repo_operations import init_repo\n", + "from unittest import mock\n", + "from feast.templates.postgres.bootstrap import bootstrap\n", + "\n", + "project_directory = \"sample\"\n", + "template = \"postgres\"\n", + "\n", + "with mock.patch(\"click.prompt\", side_effect=[\"localhost\", \"5432\", \"feast\", \"public\", \"feast\", \"feast\"]):\n", + " with mock.patch(\"click.confirm\", side_effect=[True]):\n", + " init_repo(project_directory, template)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Verify that the DB includes the expected tables with pre-populated data." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " List of relations\n", + " Schema | Name | Type | Owner \n", + "--------+---------------------------+-------+-------\n", + " public | feast_driver_hourly_stats | table | feast\n", + "(1 row)\n", + "\n", + " count \n", + "-------\n", + " 1807\n", + "(1 row)\n", + "\n" + ] + } + ], + "source": [ + "!PSQL_POD=$(kubectl get pods -l app=postgres -oname) && kubectl exec $PSQL_POD -- psql -h localhost -U feast feast -c '\\dt'\n", + "!PSQL_POD=$(kubectl get pods -l app=postgres -oname) && kubectl exec $PSQL_POD -- psql -h localhost -U feast feast -c 'select count(*) from feast_driver_hourly_stats'" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Finally, let's stop port forwarding." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " 501 10392 6947 0 1:12PM ttys051 0:00.12 /bin/zsh -c ps -ef | grep port-forward\n", + " 501 10394 10392 0 1:12PM ttys051 0:00.00 grep port-forward\n" + ] + } + ], + "source": [ + "psql_process.terminate()\n", + "!ps -ef | grep port-forward" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Generate server configurations\n", + "Each server has its own configuration that we generate from the one initialized before.\n", + "\n", + "We use `yq` to manipulate the original configuration and generate the server specifics.\n", + "\n", + "Note: from now on, we assume that the Feast service names will be as follows:\n", + "* For `Registry Server`: `registry-server`\n", + "* For `Online Store`: `online-server`\n", + "* For `Offline Store`: `offline-server`\n", + "\n", + "> πŸš€ If you used different service names, replace the `host` parameter in the following `yq` commands." + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "env: FEATURE_REPO_DIR=sample/feature_repo\n", + "project: sample\n", + "provider: local\n", + "registry:\n", + " registry_type: sql\n", + " path: postgresql://feast:feast@postgres:5432/feast\n", + " cache_ttl_seconds: 60\n", + " sqlalchemy_config_kwargs:\n", + " echo: false\n", + " pool_pre_ping: true\n", + "online_store:\n", + " type: postgres\n", + " host: postgres\n", + " port: 5432\n", + " database: feast\n", + " db_schema: public\n", + " user: feast\n", + " password: feast\n", + "offline_store:\n", + " type: postgres\n", + " host: postgres\n", + " port: 5432\n", + " database: feast\n", + " db_schema: public\n", + " user: feast\n", + " password: feast\n", + "entity_key_serialization_version: 2\n" + ] + } + ], + "source": [ + "%env FEATURE_REPO_DIR=sample/feature_repo\n", + "# Adjust the database host to match the postgres service\n", + "!yq -i '.registry.path=\"postgresql://feast:feast@postgres:5432/feast\"' $FEATURE_REPO_DIR/feature_store.yaml\n", + "!yq -i '.online_store.host=\"postgres\"' $FEATURE_REPO_DIR/feature_store.yaml\n", + "!yq -i '.offline_store.host=\"postgres\"' $FEATURE_REPO_DIR/feature_store.yaml\n", + "!cat $FEATURE_REPO_DIR/feature_store.yaml" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "project: sample\n", + "registry:\n", + " registry_type: sql\n", + " path: postgresql://feast:feast@postgres:5432/feast\n", + " cache_ttl_seconds: 60\n", + " sqlalchemy_config_kwargs:\n", + " echo: false\n", + " pool_pre_ping: true\n", + "provider: local\n", + "entity_key_serialization_version: 2\n" + ] + } + ], + "source": [ + "# Registry server has only `registry` section\n", + "!cat $FEATURE_REPO_DIR/feature_store.yaml | yq '.project | {key: .}, .registry | {key: .}, .provider | {key: .}, .entity_key_serialization_version | {key: .}' > registry_feature_store.yaml\n", + "! cat registry_feature_store.yaml" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "project: sample\n", + "provider: local\n", + "online_store:\n", + " type: postgres\n", + " host: postgres\n", + " port: 5432\n", + " database: feast\n", + " db_schema: public\n", + " user: feast\n", + " password: feast\n", + "entity_key_serialization_version: 2\n", + "registry:\n", + " path: registry-server:80\n", + " registry_type: remote\n", + "offline_store:\n", + " type: remote\n", + " host: offline-server\n", + " port: 80\n" + ] + } + ], + "source": [ + "# Online server has `online_store` section, a remote `registry` and a remote `offline_store`\n", + "!cat $FEATURE_REPO_DIR/feature_store.yaml | yq '.project | {key: .}, .provider | {key: .}, .online_store | {key: .}, .entity_key_serialization_version | {key: .}' > online_feature_store.yaml\n", + "!yq -i '.registry.path=\"registry-server:80\"' online_feature_store.yaml\n", + "!yq -i '.registry.registry_type=\"remote\"' online_feature_store.yaml\n", + "!yq -i '.offline_store.type=\"remote\"' online_feature_store.yaml\n", + "!yq -i '.offline_store.host=\"offline-server\"' online_feature_store.yaml\n", + "!yq -i '.offline_store.port=80' online_feature_store.yaml\n", + "\n", + "!cat online_feature_store.yaml" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "project: sample\n", + "provider: local\n", + "offline_store:\n", + " type: postgres\n", + " host: postgres\n", + " port: 5432\n", + " database: feast\n", + " db_schema: public\n", + " user: feast\n", + " password: feast\n", + "entity_key_serialization_version: 2\n", + "registry:\n", + " path: registry-server:80\n", + " registry_type: remote\n" + ] + } + ], + "source": [ + "# Offline server has `offline_store` section and a remote `registry`\n", + "!cat $FEATURE_REPO_DIR/feature_store.yaml | yq '.project | {key: .}, .provider | {key: .}, .offline_store | {key: .}, .entity_key_serialization_version | {key: .}' > offline_feature_store.yaml\n", + "!yq -i '.registry.path=\"registry-server:80\"' offline_feature_store.yaml\n", + "!yq -i '.registry.registry_type=\"remote\"' offline_feature_store.yaml\n", + "!cat offline_feature_store.yaml" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Encode configuration files\n", + "Next step is to encode in base64 the configuration files for each server. We'll store the output in environment variables." + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "def base64_file(file):\n", + " import base64\n", + "\n", + " with open(file, 'rb') as file:\n", + " yaml_content = file.read()\n", + " return base64.b64encode(yaml_content).decode('utf-8')\n", + "\n", + "os.environ['REGISTRY_CONFIG_BASE64'] = base64_file('registry_feature_store.yaml')\n", + "os.environ['ONLINE_CONFIG_BASE64'] = base64_file('online_feature_store.yaml')\n", + "os.environ['OFFLINE_CONFIG_BASE64'] = base64_file('offline_feature_store.yaml')" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "REGISTRY_CONFIG_BASE64=cHJvamVjdDogc2FtcGxlCnJlZ2lzdHJ5OgogIHJlZ2lzdHJ5X3R5cGU6IHNxbAogIHBhdGg6IHBvc3RncmVzcWw6Ly9mZWFzdDpmZWFzdEBwb3N0Z3Jlczo1NDMyL2ZlYXN0CiAgY2FjaGVfdHRsX3NlY29uZHM6IDYwCiAgc3FsYWxjaGVteV9jb25maWdfa3dhcmdzOgogICAgZWNobzogZmFsc2UKICAgIHBvb2xfcHJlX3Bpbmc6IHRydWUKcHJvdmlkZXI6IGxvY2FsCmVudGl0eV9rZXlfc2VyaWFsaXphdGlvbl92ZXJzaW9uOiAyCg==\n", + "ONLINE_CONFIG_BASE64=cHJvamVjdDogc2FtcGxlCnByb3ZpZGVyOiBsb2NhbApvbmxpbmVfc3RvcmU6CiAgdHlwZTogcG9zdGdyZXMKICBob3N0OiBwb3N0Z3JlcwogIHBvcnQ6IDU0MzIKICBkYXRhYmFzZTogZmVhc3QKICBkYl9zY2hlbWE6IHB1YmxpYwogIHVzZXI6IGZlYXN0CiAgcGFzc3dvcmQ6IGZlYXN0CmVudGl0eV9rZXlfc2VyaWFsaXphdGlvbl92ZXJzaW9uOiAyCnJlZ2lzdHJ5OgogIHBhdGg6IHJlZ2lzdHJ5LXNlcnZlcjo4MAogIHJlZ2lzdHJ5X3R5cGU6IHJlbW90ZQpvZmZsaW5lX3N0b3JlOgogIHR5cGU6IHJlbW90ZQogIGhvc3Q6IG9mZmxpbmUtc2VydmVyCiAgcG9ydDogODAK\n", + "OFFLINE_CONFIG_BASE64=cHJvamVjdDogc2FtcGxlCnByb3ZpZGVyOiBsb2NhbApvZmZsaW5lX3N0b3JlOgogIHR5cGU6IHBvc3RncmVzCiAgaG9zdDogcG9zdGdyZXMKICBwb3J0OiA1NDMyCiAgZGF0YWJhc2U6IGZlYXN0CiAgZGJfc2NoZW1hOiBwdWJsaWMKICB1c2VyOiBmZWFzdAogIHBhc3N3b3JkOiBmZWFzdAplbnRpdHlfa2V5X3NlcmlhbGl6YXRpb25fdmVyc2lvbjogMgpyZWdpc3RyeToKICBwYXRoOiByZWdpc3RyeS1zZXJ2ZXI6ODAKICByZWdpc3RyeV90eXBlOiByZW1vdGUK\n" + ] + } + ], + "source": [ + "!env | grep BASE64" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Install servers\n", + "We'll use the charts defined in this local repository to install the servers.\n", + "\n", + "The installation order reflects the dependency between the deployments:\n", + "* `Registry Server` starts first because it has no dependencies\n", + "* Then `Offline Server` as it depends only on the `Registry Server`\n", + "* Last the `Online Server` that depends on both the other servers" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "env: FEAST_IMAGE_REPO=feastdev/feature-server\n", + "env: FEAST_IMAGE_VERSION=0.40.1\n" + ] + } + ], + "source": [ + "%env FEAST_IMAGE_REPO=feastdev/feature-server\n", + "%env FEAST_IMAGE_VERSION=0.40.1" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Release \"feast-registry\" does not exist. Installing it now.\n", + "NAME: feast-registry\n", + "LAST DEPLOYED: Tue Sep 17 13:14:05 2024\n", + "NAMESPACE: feast\n", + "STATUS: deployed\n", + "REVISION: 1\n", + "TEST SUITE: None\n", + "deployment.apps/registry-server condition met\n" + ] + } + ], + "source": [ + "# Registry\n", + "!helm upgrade --install feast-registry ../../infra/charts/feast-feature-server \\\n", + "--set fullnameOverride=registry-server --set feast_mode=registry \\\n", + "--set image.repository=${FEAST_IMAGE_REPO} --set image.tag=${FEAST_IMAGE_VERSION} \\\n", + "--set feature_store_yaml_base64=$REGISTRY_CONFIG_BASE64\n", + "\n", + "!kubectl wait --for=condition=available deployment/registry-server --timeout=2m" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Release \"feast-offline\" does not exist. Installing it now.\n", + "NAME: feast-offline\n", + "LAST DEPLOYED: Tue Sep 17 13:14:33 2024\n", + "NAMESPACE: feast\n", + "STATUS: deployed\n", + "REVISION: 1\n", + "TEST SUITE: None\n", + "deployment.apps/offline-server condition met\n" + ] + } + ], + "source": [ + "# Offline\n", + "!helm upgrade --install feast-offline ../../infra/charts/feast-feature-server \\\n", + "--set fullnameOverride=offline-server --set feast_mode=offline \\\n", + "--set image.repository=${FEAST_IMAGE_REPO} --set image.tag=${FEAST_IMAGE_VERSION} \\\n", + "--set feature_store_yaml_base64=$OFFLINE_CONFIG_BASE64\n", + "\n", + "!kubectl wait --for=condition=available deployment/offline-server --timeout=2m" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Release \"feast-online\" does not exist. Installing it now.\n", + "NAME: feast-online\n", + "LAST DEPLOYED: Tue Sep 17 13:14:55 2024\n", + "NAMESPACE: feast\n", + "STATUS: deployed\n", + "REVISION: 1\n", + "TEST SUITE: None\n", + "deployment.apps/online-server condition met\n" + ] + } + ], + "source": [ + "# Online\n", + "!helm upgrade --install feast-online ../../infra/charts/feast-feature-server \\\n", + "--set fullnameOverride=online-server --set feast_mode=online \\\n", + "--set image.repository=${FEAST_IMAGE_REPO} --set image.tag=${FEAST_IMAGE_VERSION} \\\n", + "--set feature_store_yaml_base64=$ONLINE_CONFIG_BASE64\n", + "\n", + "!kubectl wait --for=condition=available deployment/online-server --timeout=2m" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Validate deployment\n", + "Fist validate application and service status:" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\n", + "offline-server ClusterIP 10.96.24.216 80/TCP 44s\n", + "online-server ClusterIP 10.96.36.113 80/TCP 22s\n", + "postgres NodePort 10.96.231.4 5432:30565/TCP 4m14s\n", + "registry-server ClusterIP 10.96.128.48 80/TCP 71s\n", + "NAME READY UP-TO-DATE AVAILABLE AGE\n", + "offline-server 1/1 1 1 44s\n", + "online-server 1/1 1 1 22s\n", + "postgres 1/1 1 1 4m14s\n", + "registry-server 1/1 1 1 71s\n", + "NAME READY STATUS RESTARTS AGE\n", + "offline-server-6c59467c75-9jvq7 1/1 Running 0 45s\n", + "online-server-76968bbc48-qlvvj 1/1 Running 0 23s\n", + "postgres-76c8d94d6-pngvm 1/1 Running 0 4m15s\n", + "registry-server-597c5cd445-nrm75 1/1 Running 0 72s\n" + ] + } + ], + "source": [ + "!kubectl get svc\n", + "!kubectl get deployments\n", + "!kubectl get pods" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Then verify the content of the local configuration file (it's stored in `/tmp/` folder with random subfolder)." + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "project: sample\n", + "registry:\n", + " registry_type: sql\n", + " path: postgresql://feast:feast@postgres:5432/feast\n", + " cache_ttl_seconds: 60\n", + " sqlalchemy_config_kwargs:\n", + " echo: false\n", + " pool_pre_ping: true\n", + "provider: local\n", + "entity_key_serialization_version: 2\n" + ] + } + ], + "source": [ + "!kubectl exec deployment/registry-server -- find /tmp -name feature_store.yaml -exec cat {} \\;" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "project: sample\n", + "provider: local\n", + "offline_store:\n", + " type: postgres\n", + " host: postgres\n", + " port: 5432\n", + " database: feast\n", + " db_schema: public\n", + " user: feast\n", + " password: feast\n", + "entity_key_serialization_version: 2\n", + "registry:\n", + " path: registry-server:80\n", + " registry_type: remote\n" + ] + } + ], + "source": [ + "!kubectl exec deployment/offline-server -- find /tmp -name feature_store.yaml -exec cat {} \\;" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "project: sample\n", + "provider: local\n", + "online_store:\n", + " type: postgres\n", + " host: postgres\n", + " port: 5432\n", + " database: feast\n", + " db_schema: public\n", + " user: feast\n", + " password: feast\n", + "entity_key_serialization_version: 2\n", + "registry:\n", + " path: registry-server:80\n", + " registry_type: remote\n", + "offline_store:\n", + " type: remote\n", + " host: offline-server\n", + " port: 80\n" + ] + } + ], + "source": [ + "!kubectl exec deployment/online-server -- find /tmp -name feature_store.yaml -exec cat {} \\;" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Finally, let's verify the `feast` version in each server" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + ": MADV_DONTNEED does not work (memset will be used instead)\n", + ": (This is the expected behaviour if you are running under QEMU)\n", + "Feast SDK Version: \"0.40.1\"\n", + ": MADV_DONTNEED does not work (memset will be used instead)\n", + ": (This is the expected behaviour if you are running under QEMU)\n", + "Feast SDK Version: \"0.40.1\"\n", + ": MADV_DONTNEED does not work (memset will be used instead)\n", + ": (This is the expected behaviour if you are running under QEMU)\n", + "Feast SDK Version: \"0.40.1\"\n" + ] + } + ], + "source": [ + "!kubectl exec deployment/registry-server -- feast version\n", + "!kubectl exec deployment/offline-server -- feast version\n", + "!kubectl exec deployment/online-server -- feast version" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "feast3.11", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/examples/kind-quickstart/02-Client.ipynb b/examples/kind-quickstart/02-Client.ipynb new file mode 100644 index 0000000000..322a95a61b --- /dev/null +++ b/examples/kind-quickstart/02-Client.ipynb @@ -0,0 +1,606 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: feast==0.40.1 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (0.40.1)\n", + "Requirement already satisfied: click<9.0.0,>=7.0.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (8.1.7)\n", + "Requirement already satisfied: colorama<1,>=0.3.9 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (0.4.6)\n", + "Requirement already satisfied: dill~=0.3.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (0.3.8)\n", + "Requirement already satisfied: mypy-protobuf>=3.1 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (3.3.0)\n", + "Requirement already satisfied: Jinja2<4,>=2 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (3.1.4)\n", + "Requirement already satisfied: jsonschema in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (4.22.0)\n", + "Requirement already satisfied: mmh3 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (4.1.0)\n", + "Requirement already satisfied: numpy<2,>=1.22 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (1.26.4)\n", + "Requirement already satisfied: pandas<3,>=1.4.3 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (2.2.2)\n", + "Requirement already satisfied: protobuf<5.0.0,>=4.24.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (4.25.3)\n", + "Requirement already satisfied: pyarrow>=4 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (15.0.2)\n", + "Requirement already satisfied: pydantic>=2.0.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (2.7.4)\n", + "Requirement already satisfied: pygments<3,>=2.12.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (2.18.0)\n", + "Requirement already satisfied: PyYAML<7,>=5.4.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (6.0.1)\n", + "Requirement already satisfied: requests in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (2.32.3)\n", + "Requirement already satisfied: SQLAlchemy>1 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from SQLAlchemy[mypy]>1->feast==0.40.1) (2.0.31)\n", + "Requirement already satisfied: tabulate<1,>=0.8.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (0.9.0)\n", + "Requirement already satisfied: tenacity<9,>=7 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (8.4.2)\n", + "Requirement already satisfied: toml<1,>=0.10.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (0.10.2)\n", + "Requirement already satisfied: tqdm<5,>=4 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (4.66.4)\n", + "Requirement already satisfied: typeguard>=4.0.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (4.3.0)\n", + "Requirement already satisfied: fastapi>=0.68.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (0.111.0)\n", + "Requirement already satisfied: uvicorn<1,>=0.14.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from uvicorn[standard]<1,>=0.14.0->feast==0.40.1) (0.30.1)\n", + "Requirement already satisfied: dask>=2024.2.1 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from dask[dataframe]>=2024.2.1->feast==0.40.1) (2024.6.2)\n", + "Requirement already satisfied: gunicorn in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (22.0.0)\n", + "Requirement already satisfied: cloudpickle>=1.5.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (3.0.0)\n", + "Requirement already satisfied: fsspec>=2021.09.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (2023.12.2)\n", + "Requirement already satisfied: packaging>=20.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (24.1)\n", + "Requirement already satisfied: partd>=1.2.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (1.4.2)\n", + "Requirement already satisfied: toolz>=0.10.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (0.12.1)\n", + "Requirement already satisfied: importlib-metadata>=4.13.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (8.0.0)\n", + "Requirement already satisfied: dask-expr<1.2,>=1.1 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from dask[dataframe]>=2024.2.1->feast==0.40.1) (1.1.6)\n", + "Requirement already satisfied: starlette<0.38.0,>=0.37.2 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from fastapi>=0.68.0->feast==0.40.1) (0.37.2)\n", + "Requirement already satisfied: typing-extensions>=4.8.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from fastapi>=0.68.0->feast==0.40.1) (4.12.2)\n", + "Requirement already satisfied: fastapi-cli>=0.0.2 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from fastapi>=0.68.0->feast==0.40.1) (0.0.4)\n", + "Requirement already satisfied: httpx>=0.23.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from fastapi>=0.68.0->feast==0.40.1) (0.27.0)\n", + "Requirement already satisfied: python-multipart>=0.0.7 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from fastapi>=0.68.0->feast==0.40.1) (0.0.9)\n", + "Requirement already satisfied: ujson!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,>=4.0.1 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from fastapi>=0.68.0->feast==0.40.1) (5.10.0)\n", + "Requirement already satisfied: orjson>=3.2.1 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from fastapi>=0.68.0->feast==0.40.1) (3.10.5)\n", + "Requirement already satisfied: email_validator>=2.0.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from fastapi>=0.68.0->feast==0.40.1) (2.2.0)\n", + "Requirement already satisfied: MarkupSafe>=2.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from Jinja2<4,>=2->feast==0.40.1) (2.1.5)\n", + "Requirement already satisfied: types-protobuf>=3.19.12 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from mypy-protobuf>=3.1->feast==0.40.1) (3.19.22)\n", + "Requirement already satisfied: python-dateutil>=2.8.2 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from pandas<3,>=1.4.3->feast==0.40.1) (2.9.0.post0)\n", + "Requirement already satisfied: pytz>=2020.1 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from pandas<3,>=1.4.3->feast==0.40.1) (2024.1)\n", + "Requirement already satisfied: tzdata>=2022.7 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from pandas<3,>=1.4.3->feast==0.40.1) (2024.1)\n", + "Requirement already satisfied: annotated-types>=0.4.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from pydantic>=2.0.0->feast==0.40.1) (0.7.0)\n", + "Requirement already satisfied: pydantic-core==2.18.4 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from pydantic>=2.0.0->feast==0.40.1) (2.18.4)\n", + "Requirement already satisfied: mypy>=0.910 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from SQLAlchemy[mypy]>1->feast==0.40.1) (1.10.1)\n", + "Requirement already satisfied: h11>=0.8 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from uvicorn<1,>=0.14.0->uvicorn[standard]<1,>=0.14.0->feast==0.40.1) (0.14.0)\n", + "Requirement already satisfied: httptools>=0.5.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from uvicorn[standard]<1,>=0.14.0->feast==0.40.1) (0.6.1)\n", + "Requirement already satisfied: python-dotenv>=0.13 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from uvicorn[standard]<1,>=0.14.0->feast==0.40.1) (1.0.1)\n", + "Requirement already satisfied: uvloop!=0.15.0,!=0.15.1,>=0.14.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from uvicorn[standard]<1,>=0.14.0->feast==0.40.1) (0.19.0)\n", + "Requirement already satisfied: watchfiles>=0.13 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from uvicorn[standard]<1,>=0.14.0->feast==0.40.1) (0.22.0)\n", + "Requirement already satisfied: websockets>=10.4 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from uvicorn[standard]<1,>=0.14.0->feast==0.40.1) (12.0)\n", + "Requirement already satisfied: attrs>=22.2.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from jsonschema->feast==0.40.1) (23.2.0)\n", + "Requirement already satisfied: jsonschema-specifications>=2023.03.6 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from jsonschema->feast==0.40.1) (2023.12.1)\n", + "Requirement already satisfied: referencing>=0.28.4 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from jsonschema->feast==0.40.1) (0.35.1)\n", + "Requirement already satisfied: rpds-py>=0.7.1 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from jsonschema->feast==0.40.1) (0.18.1)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from requests->feast==0.40.1) (3.3.2)\n", + "Requirement already satisfied: idna<4,>=2.5 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from requests->feast==0.40.1) (3.7)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from requests->feast==0.40.1) (1.26.19)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from requests->feast==0.40.1) (2024.7.4)\n", + "Requirement already satisfied: dnspython>=2.0.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from email_validator>=2.0.0->fastapi>=0.68.0->feast==0.40.1) (2.6.1)\n", + "Requirement already satisfied: typer>=0.12.3 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from fastapi-cli>=0.0.2->fastapi>=0.68.0->feast==0.40.1) (0.12.3)\n", + "Requirement already satisfied: anyio in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from httpx>=0.23.0->fastapi>=0.68.0->feast==0.40.1) (4.4.0)\n", + "Requirement already satisfied: httpcore==1.* in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from httpx>=0.23.0->fastapi>=0.68.0->feast==0.40.1) (1.0.5)\n", + "Requirement already satisfied: sniffio in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from httpx>=0.23.0->fastapi>=0.68.0->feast==0.40.1) (1.3.1)\n", + "Requirement already satisfied: zipp>=0.5 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from importlib-metadata>=4.13.0->dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (3.19.1)\n", + "Requirement already satisfied: mypy-extensions>=1.0.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from mypy>=0.910->SQLAlchemy[mypy]>1->feast==0.40.1) (1.0.0)\n", + "Requirement already satisfied: locket in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from partd>=1.2.0->dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (1.0.0)\n", + "Requirement already satisfied: six>=1.5 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from python-dateutil>=2.8.2->pandas<3,>=1.4.3->feast==0.40.1) (1.16.0)\n", + "Requirement already satisfied: shellingham>=1.3.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from typer>=0.12.3->fastapi-cli>=0.0.2->fastapi>=0.68.0->feast==0.40.1) (1.5.4)\n", + "Requirement already satisfied: rich>=10.11.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from typer>=0.12.3->fastapi-cli>=0.0.2->fastapi>=0.68.0->feast==0.40.1) (13.7.1)\n", + "Requirement already satisfied: markdown-it-py>=2.2.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from rich>=10.11.0->typer>=0.12.3->fastapi-cli>=0.0.2->fastapi>=0.68.0->feast==0.40.1) (3.0.0)\n", + "Requirement already satisfied: mdurl~=0.1 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from markdown-it-py>=2.2.0->rich>=10.11.0->typer>=0.12.3->fastapi-cli>=0.0.2->fastapi>=0.68.0->feast==0.40.1) (0.1.2)\n", + "\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m24.1.1\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m24.2\u001b[0m\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpip install --upgrade pip\u001b[0m\n", + "Note: you may need to restart the kernel to use updated packages.\n" + ] + } + ], + "source": [ + "# WE MUST ENSURE PYTHON CONSISTENCY BETWEEN NOTEBOOK AND FEAST SERVERS\n", + "# LAUNCH THIS NOTEBOOK FROM A CLEAN PYTHON ENVIRONMENT >3.9\n", + "%pip install feast==0.40.1" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Run a test client\n", + "\n", + "> πŸš€ This test is developer to work only with the default feature store generated by `feast init`. \n", + "> \n", + "> To test a custom feature store you need to run a custom test application, but still using the same client configuration that we've prepared." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Apply the feature store definitions" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The feature store cannot be initialized using remote services. \n", + "\n", + "We'll use the original `feature_store.yaml` from within a Kubernetes `Job` to run `feast apply`.\n", + "\n", + "For the same reason, we also run an initial materialization from the `Job`, otherwise it would fail because of uninmplemented APIs in the remote servers, like [online_write_batch](https://github.com/feast-dev/feast/blob/4a6b663f80bc91d6de35ed2ec428d34811d17a18/sdk/python/feast/infra/online_stores/remote.py#L50).\n", + "\n", + "First we create a `ConfigMap` holding the required code and configuration." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "env: FEATURE_REPO_DIR=sample/feature_repo\n", + "Error from server (NotFound): configmaps \"sample-repo\" not found\n", + "configmap/sample-repo created\n", + "\n", + "Inspect keys of sample-repo ConfigMap\n", + "example_repo.py\n", + "feature_store.yaml\n" + ] + } + ], + "source": [ + "%env FEATURE_REPO_DIR=sample/feature_repo\n", + "!kubectl delete configmap sample-repo\n", + "!kubectl create configmap sample-repo --from-file=${FEATURE_REPO_DIR}/example_repo.py,${FEATURE_REPO_DIR}/feature_store.yaml\n", + "!echo\n", + "!echo \"Inspect keys of sample-repo ConfigMap\"\n", + "!kubectl get configmaps sample-repo -oyaml | yq '.data[] | key'" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Then we create the `Job` to apply the definitions, according to the [init-job.yaml](./init-job.yaml) manifest" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Error from server (NotFound): error when deleting \"init-job.yaml\": jobs.batch \"feast-apply-job\" not found\n", + "job.batch/feast-apply-job created\n" + ] + } + ], + "source": [ + "!kubectl delete -f init-job.yaml\n", + "!kubectl apply -f init-job.yaml" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Monitoring the log of the `Job`." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "pod/feast-apply-job-tzscd condition met\n", + "Starting feast initialization job...\n", + ": MADV_DONTNEED does not work (memset will be used instead)\n", + ": (This is the expected behaviour if you are running under QEMU)\n", + "09/17/2024 11:18:10 AM feast.repo_config WARNING: The `path` of the `RegistryConfig` starts with a plain `postgresql` string. We are updating this to `postgresql+psycopg` to ensure that the `psycopg3` driver is used by `sqlalchemy`. If you want to use `psycopg2` pass `postgresql+psycopg2` explicitely to `path`. To silence this warning, pass `postgresql+psycopg` explicitely to `path`.\n", + "/usr/local/lib/python3.11/site-packages/feast/feature_store.py:590: RuntimeWarning: On demand feature view is an experimental feature. This API is stable, but the functionality does not scale well for offline retrieval\n", + " warnings.warn(\n", + "Deploying infrastructure for driver_hourly_stats_fresh\n", + "Deploying infrastructure for driver_hourly_stats\n", + ": MADV_DONTNEED does not work (memset will be used instead)\n", + ": (This is the expected behaviour if you are running under QEMU)\n", + "09/17/2024 11:18:21 AM feast.repo_config WARNING: The `path` of the `RegistryConfig` starts with a plain `postgresql` string. We are updating this to `postgresql+psycopg` to ensure that the `psycopg3` driver is used by `sqlalchemy`. If you want to use `psycopg2` pass `postgresql+psycopg2` explicitely to `path`. To silence this warning, pass `postgresql+psycopg` explicitely to `path`.\n", + "09/17/2024 11:18:21 AM root WARNING: _list_feature_views will make breaking changes. Please use _list_batch_feature_views instead. _list_feature_views will behave like _list_all_feature_views in the future.\n", + "Materializing \u001b[1m\u001b[32m2\u001b[0m feature views to \u001b[1m\u001b[32m2024-09-17 11:18:11+00:00\u001b[0m into the \u001b[1m\u001b[32mpostgres\u001b[0m online store.\n", + "\n", + "\u001b[1m\u001b[32mdriver_hourly_stats_fresh\u001b[0m from \u001b[1m\u001b[32m2024-09-16 11:18:21+00:00\u001b[0m to \u001b[1m\u001b[32m2024-09-17 11:18:11+00:00\u001b[0m:\n", + "100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 5/5 [00:00<00:00, 72.23it/s]\n", + "\u001b[1m\u001b[32mdriver_hourly_stats\u001b[0m from \u001b[1m\u001b[32m2024-09-16 11:18:22+00:00\u001b[0m to \u001b[1m\u001b[32m2024-09-17 11:18:11+00:00\u001b[0m:\n", + "100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 5/5 [00:00<00:00, 654.75it/s]\n", + "Feast initialization completed successfully.\n" + ] + } + ], + "source": [ + "!INIT_JOB_POD=$(kubectl get pods -l job-name=feast-apply-job -oname) && kubectl wait --for=condition=podscheduled $INIT_JOB_POD --timeout=2m\n", + "!INIT_JOB_POD=$(kubectl get pods -l job-name=feast-apply-job -oname) && kubectl logs -f $INIT_JOB_POD\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Forwarding the feast service ports" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To run the test client from the notebook, we need to forward the service ports to ports on the current host." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\n", + "offline-server ClusterIP 10.96.24.216 80/TCP 3m58s\n", + "online-server ClusterIP 10.96.36.113 80/TCP 3m36s\n", + "postgres NodePort 10.96.231.4 5432:30565/TCP 7m28s\n", + "registry-server ClusterIP 10.96.128.48 80/TCP 4m25s\n" + ] + } + ], + "source": [ + "!kubectl get svc" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Port-forwarding registry-server with process ID: 15094\n", + "Port-forwarding offline-server with process ID: 15095\n", + "Port-forwarding online-server with process ID: 15096\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Forwarding from 127.0.0.1:8002 -> 8815\n", + "Forwarding from 127.0.0.1:8003 -> 6566\n", + "Forwarding from 127.0.0.1:8001 -> 6570\n", + "Forwarding from [::1]:8002 -> 8815\n", + "Forwarding from [::1]:8003 -> 6566\n", + "Forwarding from [::1]:8001 -> 6570\n" + ] + } + ], + "source": [ + "from src.utils import port_forward\n", + "registry_process = port_forward(\"registry-server\", 8001)\n", + "offline_process = port_forward(\"offline-server\", 8002)\n", + "online_process = port_forward(\"online-server\", 8003)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " 501 15094 13456 0 1:18PM ?? 0:00.06 kubectl port-forward service/registry-server 8001:80\n", + " 501 15095 13456 0 1:18PM ?? 0:00.05 kubectl port-forward service/offline-server 8002:80\n", + " 501 15096 13456 0 1:18PM ?? 0:00.06 kubectl port-forward service/online-server 8003:80\n", + " 501 15170 13456 0 1:18PM ttys051 0:00.14 /bin/zsh -c ps -ef | grep port-forward\n", + " 501 15173 15170 0 1:18PM ttys051 0:00.00 grep port-forward\n" + ] + } + ], + "source": [ + "!ps -ef | grep port-forward" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Client configuration\n", + "The client configuration is using only remote clients connected to the forwarded ports, from 8001 to 8003." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "project: sample\n", + "registry:\n", + " path: localhost:8001\n", + " registry_type: remote\n", + "offline_store:\n", + " host: localhost\n", + " port: 8002\n", + " type: remote\n", + "online_store:\n", + " path: http://localhost:8003\n", + " type: remote\n", + "entity_key_serialization_version: 2\n", + "auth:\n", + " type: no_auth\n" + ] + } + ], + "source": [ + "!cat client/feature_store.yaml" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Install test code\n", + "First we copy the test code from `sample/feature_repo` to `client` folder." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "client/__init__.py client/test_workflow.py\n" + ] + } + ], + "source": [ + "!cp sample/feature_repo/test_workflow.py client\n", + "!ls client/*.py" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We update the original test to comment the `apply`, `teardown` and `materialize-incremental` commands." + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "12,13c12,13\n", + "< # print(\"\\n--- Run feast apply to setup feature store on Postgres ---\")\n", + "< # subprocess.run([\"feast\", \"apply\"])\n", + "---\n", + "> print(\"\\n--- Run feast apply to setup feature store on Postgres ---\")\n", + "> subprocess.run([\"feast\", \"apply\"])\n", + "21,22c21,22\n", + "< # print(\"\\n--- Load features into online store ---\")\n", + "< # store.materialize_incremental(end_date=datetime.now())\n", + "---\n", + "> print(\"\\n--- Load features into online store ---\")\n", + "> store.materialize_incremental(end_date=datetime.now())\n", + "56,57c56,57\n", + "< # print(\"\\n--- Run feast teardown ---\")\n", + "< # subprocess.run([\"feast\", \"teardown\"])\n", + "---\n", + "> print(\"\\n--- Run feast teardown ---\")\n", + "> subprocess.run([\"feast\", \"teardown\"])\n" + ] + } + ], + "source": [ + "!sed -i.bk 's/subprocess.run/# subprocess.run/' client/test_workflow.py\n", + "!sed -i.bk 's/print(\"\\\\n--- Run feast/# print(\"\\\\n--- Run feast/' client/test_workflow.py\n", + "!sed -i.bk 's/store.materialize_incremental/# store.materialize_incremental/' client/test_workflow.py\n", + "!sed -i.bk 's/print(\"\\\\n--- Load features/# print(\"\\\\n--- Load features/' client/test_workflow.py\n", + "!diff client/test_workflow.py sample/feature_repo/test_workflow.py" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Finally, we run the full test suite from the client folder." + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Handling connection for 8001\n", + "\n", + "--- Historical features for training ---\n", + "WARNING:root:_list_feature_views will make breaking changes. Please use _list_batch_feature_views instead. _list_feature_views will behave like _list_all_feature_views in the future.\n", + "Handling connection for 8002\n", + " driver_id event_timestamp ... conv_rate_plus_val1 conv_rate_plus_val2\n", + "0 1001 2021-04-12 10:59:42 ... 1.302426 10.302426\n", + "1 1002 2021-04-12 08:12:10 ... 2.436384 20.436384\n", + "2 1003 2021-04-12 16:40:26 ... 3.954102 30.954102\n", + "\n", + "[3 rows x 10 columns]\n", + "\n", + "--- Historical features for batch scoring ---\n", + "WARNING:root:_list_feature_views will make breaking changes. Please use _list_batch_feature_views instead. _list_feature_views will behave like _list_all_feature_views in the future.\n", + "Handling connection for 8002\n", + " driver_id ... conv_rate_plus_val2\n", + "0 1001 ... 10.798974\n", + "1 1002 ... 20.316096\n", + "2 1003 ... 30.202964\n", + "\n", + "[3 rows x 10 columns]\n", + "\n", + "--- Online features ---\n", + "WARNING:root:_list_feature_views will make breaking changes. Please use _list_batch_feature_views instead. _list_feature_views will behave like _list_all_feature_views in the future.\n", + "Handling connection for 8003\n", + "acc_rate : [0.22748562693595886, 0.9316393733024597]\n", + "conv_rate_plus_val1 : [1000.7989742159843, 1001.3160955905914]Handling connection for 8003\n", + "\n", + "conv_rate_plus_val2 : [2000.7989742159843, 2002.3160955905914]\n", + "driver_id : [1001, 1002]\n", + "\n", + "--- Online features retrieved (instead) through a feature service---\n", + "WARNING:root:_list_feature_views will make breaking changes. Please use _list_batch_feature_views instead. _list_feature_views will behave like _list_all_feature_views in the future.\n", + "Handling connection for 8003\n", + "conv_rate : [0.7989742159843445, 0.31609559059143066]\n", + "conv_rate_plus_val1 : [1000.7989742159843, 1001.3160955905914]\n", + "conv_rate_plus_val2 : [2000.7989742159843, 2002.3160955905914]\n", + "driver_id : [1001, 1002]\n", + "\n", + "--- Online features retrieved (using feature service v3, which uses a feature view with a push source---\n", + "WARNING:root:_list_feature_views will make breaking changes. Please use _list_batch_feature_views instead. _list_feature_views will behave like _list_all_feature_views in the future.\n", + "acc_rate : [0.22748562693595886, 0.9316393733024597]\n", + "avg_daily_trips : [451, 417]\n", + "conv_rate : [0.7989742159843445, 0.31609559059143066]\n", + "conv_rate_plus_val1 : [1000.7989742159843, 1001.3160955905914]\n", + "conv_rate_plus_val2 : [2000.7989742159843, 2002.3160955905914]\n", + "driver_id : [1001, 1002]\n", + "\n", + "--- Simulate a stream event ingestion of the hourly stats df ---\n", + " driver_id event_timestamp ... acc_rate avg_daily_trips\n", + "0 1001 2024-09-17 13:19:54.105733 ... 1.0 1000\n", + "\n", + "[1 rows x 6 columns]\n", + "WARNING:root:list_feature_views will make breaking changes. Please use list_batch_feature_views instead. list_feature_views will behave like list_all_feature_views in the future.\n", + "WARNING:root:_list_feature_views will make breaking changes. Please use _list_batch_feature_views instead. _list_feature_views will behave like _list_all_feature_views in the future.\n", + "Traceback (most recent call last):\n", + " File \"/Users/dmartino/projects/AI/feast/feast/examples/kind-quickstart/client/test_workflow.py\", line 130, in \n", + " run_demo()\n", + " File \"/Users/dmartino/projects/AI/feast/feast/examples/kind-quickstart/client/test_workflow.py\", line 51, in run_demo\n", + " store.push(\"driver_stats_push_source\", event_df, to=PushMode.ONLINE_AND_OFFLINE)\n", + " File \"/Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages/feast/feature_store.py\", line 1423, in push\n", + " self.write_to_online_store(\n", + " File \"/Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages/feast/feature_store.py\", line 1449, in write_to_online_store\n", + " feature_view: FeatureView = self.get_stream_feature_view(\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"/Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages/feast/feature_store.py\", line 504, in get_stream_feature_view\n", + " return self._get_stream_feature_view(\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"/Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages/feast/feature_store.py\", line 514, in _get_stream_feature_view\n", + " stream_feature_view = self._registry.get_stream_feature_view(\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"/Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages/feast/infra/registry/remote.py\", line 209, in get_stream_feature_view\n", + " response = self.stub.GetStreamFeatureView(request)\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"/Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages/grpc/_channel.py\", line 1181, in __call__\n", + " return _end_unary_response_blocking(state, call, False, None)\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"/Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages/grpc/_channel.py\", line 1006, in _end_unary_response_blocking\n", + " raise _InactiveRpcError(state) # pytype: disable=not-instantiable\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + "grpc._channel._InactiveRpcError: <_InactiveRpcError of RPC that terminated with:\n", + "\tstatus = StatusCode.UNKNOWN\n", + "\tdetails = \"Exception calling application: Feature view driver_hourly_stats_fresh does not exist in project sample\"\n", + "\tdebug_error_string = \"UNKNOWN:Error received from peer {grpc_message:\"Exception calling application: Feature view driver_hourly_stats_fresh does not exist in project sample\", grpc_status:2, created_time:\"2024-09-17T13:19:54.127834+02:00\"}\"\n", + ">\n" + ] + } + ], + "source": [ + "!cd client && python test_workflow.py" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Note If you see the following error, it is likely due to the [issue #4392](https://github.com/feast-dev/feast/issues/4392):\n", + "Remote registry client does not map application errors:\n", + "\n", + "```\n", + "Feature view driver_hourly_stats_fresh does not exist in project sample\n", + "```" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Terminate port forwarding" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " 501 16434 13456 0 1:20PM ttys051 0:00.12 /bin/zsh -c ps -ef | grep port-forward\n", + " 501 16436 16434 0 1:20PM ttys051 0:00.00 grep port-forward\n" + ] + } + ], + "source": [ + "registry_process.terminate()\n", + "offline_process.terminate()\n", + "online_process.terminate()\n", + "!ps -ef | grep port-forward" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "feast3.11", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/examples/kind-quickstart/03-Uninstall.ipynb b/examples/kind-quickstart/03-Uninstall.ipynb new file mode 100644 index 0000000000..20874fc1b7 --- /dev/null +++ b/examples/kind-quickstart/03-Uninstall.ipynb @@ -0,0 +1,120 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Uninstall deployment\n", + "Use Helm to uninstall all the previous deployments" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "release \"feast-online\" uninstalled\n", + "release \"feast-offline\" uninstalled\n", + "release \"feast-registry\" uninstalled\n", + "NAME\tNAMESPACE\tREVISION\tUPDATED\tSTATUS\tCHART\tAPP VERSION\n" + ] + } + ], + "source": [ + "!helm uninstall feast-online\n", + "!helm uninstall feast-offline\n", + "!helm uninstall feast-registry\n", + "!helm list" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Delete the PostgreSQL deployment." + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "secret \"postgres-secret\" deleted\n", + "persistentvolume \"postgres-volume\" deleted\n", + "persistentvolumeclaim \"postgres-volume-claim\" deleted\n", + "deployment.apps \"postgres\" deleted\n", + "service \"postgres\" deleted\n" + ] + } + ], + "source": [ + "!kubectl delete -f postgres/postgres.yaml" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "No resources found in feast namespace.\n", + "No resources found in feast namespace.\n", + "NAME READY STATUS RESTARTS AGE\n", + "feast-apply-job-tzscd 0/1 Completed 0 2m40s\n" + ] + } + ], + "source": [ + "!kubectl get svc\n", + "!kubectl get deployments\n", + "!kubectl get pods" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "feast3.11", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/examples/kind-quickstart/README.md b/examples/kind-quickstart/README.md new file mode 100644 index 0000000000..25ecfc8ecf --- /dev/null +++ b/examples/kind-quickstart/README.md @@ -0,0 +1,7 @@ +# Install and run Feast with Kind + +The following notebooks will guide you through an end-to-end journey to install and validate a simple Feast feature store in a +Kind Kubernetes cluster: +* [01-Install.ipynb](./01-Install.ipynb): Install and configure the cluster, then the Feast components. +* [02-Client.ipynb](./02-Client.ipynb): Validate the feature store with a remote test application runnning on the notebook. +* [03-Uninstall.ipynb](./03-Uninstall.ipynb): Clear the installed deployments. diff --git a/examples/kind-quickstart/client/__init__.py b/examples/kind-quickstart/client/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/examples/kind-quickstart/client/feature_store.yaml b/examples/kind-quickstart/client/feature_store.yaml new file mode 100644 index 0000000000..62acd3ead6 --- /dev/null +++ b/examples/kind-quickstart/client/feature_store.yaml @@ -0,0 +1,14 @@ +project: sample +registry: + path: localhost:8001 + registry_type: remote +offline_store: + host: localhost + port: 8002 + type: remote +online_store: + path: http://localhost:8003 + type: remote +entity_key_serialization_version: 2 +auth: + type: no_auth diff --git a/examples/kind-quickstart/init-job.yaml b/examples/kind-quickstart/init-job.yaml new file mode 100644 index 0000000000..68df35af73 --- /dev/null +++ b/examples/kind-quickstart/init-job.yaml @@ -0,0 +1,31 @@ +apiVersion: batch/v1 +kind: Job +metadata: + name: feast-apply-job +spec: + template: + spec: + containers: + - name: feast-apply + image: feastdev/feature-server:0.40.1 + command: ["/bin/sh", "-c"] + args: + - | + echo "Starting feast initialization job..."; + mkdir /tmp/sample; + cd /tmp/sample; + cp /sample/* .; + sed -i 's/localhost/postgres/' feature_store.yaml; + feast apply; + CURRENT_TIME=$(date -u +"%Y-%m-%dT%H:%M:%S"); + feast materialize-incremental $CURRENT_TIME; + echo "Feast initialization completed successfully."; + volumeMounts: + - name: sample-repo-files + mountPath: /sample + restartPolicy: Never + volumes: + - name: sample-repo-files + configMap: + name: sample-repo + backoffLimit: 1 diff --git a/examples/kind-quickstart/postgres/postgres.yaml b/examples/kind-quickstart/postgres/postgres.yaml new file mode 100644 index 0000000000..c89a01f0f4 --- /dev/null +++ b/examples/kind-quickstart/postgres/postgres.yaml @@ -0,0 +1,83 @@ +#https://www.digitalocean.com/community/tutorials/how-to-deploy-postgres-to-kubernetes-cluster +apiVersion: v1 +kind: Secret +metadata: + name: postgres-secret + labels: + app: postgres +stringData: + POSTGRES_DB: feast + POSTGRES_USER: feast + POSTGRES_PASSWORD: feast +--- +apiVersion: v1 +kind: PersistentVolume +metadata: + name: postgres-volume + labels: + type: local + app: postgres +spec: + capacity: + storage: 1Gi + accessModes: + - ReadWriteOnce + hostPath: + path: /data/postgresql +--- +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: postgres-volume-claim + labels: + app: postgres +spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 1Gi +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: postgres +spec: + replicas: 1 + selector: + matchLabels: + app: postgres + template: + metadata: + labels: + app: postgres + spec: + containers: + - name: postgres + image: 'postgres:15-alpine' + imagePullPolicy: IfNotPresent + ports: + - containerPort: 5432 + envFrom: + - secretRef: + name: postgres-secret + volumeMounts: + - mountPath: /var/lib/postgresql/data + name: postgresdata + volumes: + - name: postgresdata + persistentVolumeClaim: + claimName: postgres-volume-claim +--- +apiVersion: v1 +kind: Service +metadata: + name: postgres + labels: + app: postgres +spec: + type: NodePort + ports: + - port: 5432 + selector: + app: postgres \ No newline at end of file diff --git a/examples/kind-quickstart/src/__init__.py b/examples/kind-quickstart/src/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/examples/kind-quickstart/src/utils.py b/examples/kind-quickstart/src/utils.py new file mode 100644 index 0000000000..ea549d7ed8 --- /dev/null +++ b/examples/kind-quickstart/src/utils.py @@ -0,0 +1,12 @@ +import subprocess + +def port_forward(service, external_port, local_port=80) : + """ + Run a background process to forward port 80 of the given `service` service to the given `external_port` port. + + Returns: the process instance + """ + command = ["kubectl", "port-forward", f"service/{service}", f"{external_port}:{local_port}"] + process = subprocess.Popen(command) + print(f"Port-forwarding {service} with process ID: {process.pid}") + return process From 0192b2eb245c8e0ea9a913195ddf28382dc23982 Mon Sep 17 00:00:00 2001 From: lokeshrangineni <19699092+lokeshrangineni@users.noreply.github.com> Date: Tue, 24 Sep 2024 01:29:07 -0400 Subject: [PATCH 90/96] fix: Fixing the master branch build failure. (#4563) Fixing the master branch build failure. Signed-off-by: Lokesh Rangineni <19699092+lokeshrangineni@users.noreply.github.com> --- .../feast/core/FeatureViewProjection_pb2.py | 11 +++++---- .../feast/core/FeatureViewProjection_pb2.pyi | 23 +++++++++++++++++- .../feast/core/OnDemandFeatureView_pb2.py | 24 +++++++++---------- .../feast/core/OnDemandFeatureView_pb2.pyi | 15 +++++++++++- 4 files changed, 54 insertions(+), 19 deletions(-) diff --git a/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2.py b/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2.py index 286f511658..b47d4fe392 100644 --- a/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2.py +++ b/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2.py @@ -13,9 +13,10 @@ from feast.protos.feast.core import Feature_pb2 as feast_dot_core_dot_Feature__pb2 +from feast.protos.feast.core import DataSource_pb2 as feast_dot_core_dot_DataSource__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&feast/core/FeatureViewProjection.proto\x12\nfeast.core\x1a\x18\x66\x65\x61st/core/Feature.proto\"\x83\x02\n\x15\x46\x65\x61tureViewProjection\x12\x19\n\x11\x66\x65\x61ture_view_name\x18\x01 \x01(\t\x12\x1f\n\x17\x66\x65\x61ture_view_name_alias\x18\x03 \x01(\t\x12\x32\n\x0f\x66\x65\x61ture_columns\x18\x02 \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x12G\n\x0cjoin_key_map\x18\x04 \x03(\x0b\x32\x31.feast.core.FeatureViewProjection.JoinKeyMapEntry\x1a\x31\n\x0fJoinKeyMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42Z\n\x10\x66\x65\x61st.proto.coreB\x15\x46\x65\x61tureReferenceProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&feast/core/FeatureViewProjection.proto\x12\nfeast.core\x1a\x18\x66\x65\x61st/core/Feature.proto\x1a\x1b\x66\x65\x61st/core/DataSource.proto\"\xba\x03\n\x15\x46\x65\x61tureViewProjection\x12\x19\n\x11\x66\x65\x61ture_view_name\x18\x01 \x01(\t\x12\x1f\n\x17\x66\x65\x61ture_view_name_alias\x18\x03 \x01(\t\x12\x32\n\x0f\x66\x65\x61ture_columns\x18\x02 \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x12G\n\x0cjoin_key_map\x18\x04 \x03(\x0b\x32\x31.feast.core.FeatureViewProjection.JoinKeyMapEntry\x12\x17\n\x0ftimestamp_field\x18\x05 \x01(\t\x12\x1d\n\x15\x64\x61te_partition_column\x18\x06 \x01(\t\x12 \n\x18\x63reated_timestamp_column\x18\x07 \x01(\t\x12,\n\x0c\x62\x61tch_source\x18\x08 \x01(\x0b\x32\x16.feast.core.DataSource\x12-\n\rstream_source\x18\t \x01(\x0b\x32\x16.feast.core.DataSource\x1a\x31\n\x0fJoinKeyMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42Z\n\x10\x66\x65\x61st.proto.coreB\x15\x46\x65\x61tureReferenceProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -25,8 +26,8 @@ _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\025FeatureReferenceProtoZ/github.com/feast-dev/feast/go/protos/feast/core' _globals['_FEATUREVIEWPROJECTION_JOINKEYMAPENTRY']._options = None _globals['_FEATUREVIEWPROJECTION_JOINKEYMAPENTRY']._serialized_options = b'8\001' - _globals['_FEATUREVIEWPROJECTION']._serialized_start=81 - _globals['_FEATUREVIEWPROJECTION']._serialized_end=340 - _globals['_FEATUREVIEWPROJECTION_JOINKEYMAPENTRY']._serialized_start=291 - _globals['_FEATUREVIEWPROJECTION_JOINKEYMAPENTRY']._serialized_end=340 + _globals['_FEATUREVIEWPROJECTION']._serialized_start=110 + _globals['_FEATUREVIEWPROJECTION']._serialized_end=552 + _globals['_FEATUREVIEWPROJECTION_JOINKEYMAPENTRY']._serialized_start=503 + _globals['_FEATUREVIEWPROJECTION_JOINKEYMAPENTRY']._serialized_end=552 # @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2.pyi b/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2.pyi index 2c0a298e14..6b44ad4a93 100644 --- a/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2.pyi +++ b/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2.pyi @@ -4,6 +4,7 @@ isort:skip_file """ import builtins import collections.abc +import feast.core.DataSource_pb2 import feast.core.Feature_pb2 import google.protobuf.descriptor import google.protobuf.internal.containers @@ -43,6 +44,11 @@ class FeatureViewProjection(google.protobuf.message.Message): FEATURE_VIEW_NAME_ALIAS_FIELD_NUMBER: builtins.int FEATURE_COLUMNS_FIELD_NUMBER: builtins.int JOIN_KEY_MAP_FIELD_NUMBER: builtins.int + TIMESTAMP_FIELD_FIELD_NUMBER: builtins.int + DATE_PARTITION_COLUMN_FIELD_NUMBER: builtins.int + CREATED_TIMESTAMP_COLUMN_FIELD_NUMBER: builtins.int + BATCH_SOURCE_FIELD_NUMBER: builtins.int + STREAM_SOURCE_FIELD_NUMBER: builtins.int feature_view_name: builtins.str """The feature view name""" feature_view_name_alias: builtins.str @@ -53,6 +59,15 @@ class FeatureViewProjection(google.protobuf.message.Message): @property def join_key_map(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """Map for entity join_key overrides of feature data entity join_key to entity data join_key""" + timestamp_field: builtins.str + date_partition_column: builtins.str + created_timestamp_column: builtins.str + @property + def batch_source(self) -> feast.core.DataSource_pb2.DataSource: + """Batch/Offline DataSource where this view can retrieve offline feature data.""" + @property + def stream_source(self) -> feast.core.DataSource_pb2.DataSource: + """Streaming DataSource from where this view can consume "online" feature data.""" def __init__( self, *, @@ -60,7 +75,13 @@ class FeatureViewProjection(google.protobuf.message.Message): feature_view_name_alias: builtins.str = ..., feature_columns: collections.abc.Iterable[feast.core.Feature_pb2.FeatureSpecV2] | None = ..., join_key_map: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + timestamp_field: builtins.str = ..., + date_partition_column: builtins.str = ..., + created_timestamp_column: builtins.str = ..., + batch_source: feast.core.DataSource_pb2.DataSource | None = ..., + stream_source: feast.core.DataSource_pb2.DataSource | None = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["feature_columns", b"feature_columns", "feature_view_name", b"feature_view_name", "feature_view_name_alias", b"feature_view_name_alias", "join_key_map", b"join_key_map"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["batch_source", b"batch_source", "stream_source", b"stream_source"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["batch_source", b"batch_source", "created_timestamp_column", b"created_timestamp_column", "date_partition_column", b"date_partition_column", "feature_columns", b"feature_columns", "feature_view_name", b"feature_view_name", "feature_view_name_alias", b"feature_view_name_alias", "join_key_map", b"join_key_map", "stream_source", b"stream_source", "timestamp_field", b"timestamp_field"]) -> None: ... global___FeatureViewProjection = FeatureViewProjection diff --git a/sdk/python/feast/protos/feast/core/OnDemandFeatureView_pb2.py b/sdk/python/feast/protos/feast/core/OnDemandFeatureView_pb2.py index 4be551724c..a27c4fba3b 100644 --- a/sdk/python/feast/protos/feast/core/OnDemandFeatureView_pb2.py +++ b/sdk/python/feast/protos/feast/core/OnDemandFeatureView_pb2.py @@ -20,7 +20,7 @@ from feast.protos.feast.core import Transformation_pb2 as feast_dot_core_dot_Transformation__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$feast/core/OnDemandFeatureView.proto\x12\nfeast.core\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1c\x66\x65\x61st/core/FeatureView.proto\x1a&feast/core/FeatureViewProjection.proto\x1a\x18\x66\x65\x61st/core/Feature.proto\x1a\x1b\x66\x65\x61st/core/DataSource.proto\x1a\x1f\x66\x65\x61st/core/Transformation.proto\"{\n\x13OnDemandFeatureView\x12\x31\n\x04spec\x18\x01 \x01(\x0b\x32#.feast.core.OnDemandFeatureViewSpec\x12\x31\n\x04meta\x18\x02 \x01(\x0b\x32#.feast.core.OnDemandFeatureViewMeta\"\x99\x04\n\x17OnDemandFeatureViewSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12+\n\x08\x66\x65\x61tures\x18\x03 \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x12\x41\n\x07sources\x18\x04 \x03(\x0b\x32\x30.feast.core.OnDemandFeatureViewSpec.SourcesEntry\x12\x42\n\x15user_defined_function\x18\x05 \x01(\x0b\x32\x1f.feast.core.UserDefinedFunctionB\x02\x18\x01\x12\x43\n\x16\x66\x65\x61ture_transformation\x18\n \x01(\x0b\x32#.feast.core.FeatureTransformationV2\x12\x13\n\x0b\x64\x65scription\x18\x06 \x01(\t\x12;\n\x04tags\x18\x07 \x03(\x0b\x32-.feast.core.OnDemandFeatureViewSpec.TagsEntry\x12\r\n\x05owner\x18\x08 \x01(\t\x12\x0c\n\x04mode\x18\x0b \x01(\t\x1aJ\n\x0cSourcesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12)\n\x05value\x18\x02 \x01(\x0b\x32\x1a.feast.core.OnDemandSource:\x02\x38\x01\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x8c\x01\n\x17OnDemandFeatureViewMeta\x12\x35\n\x11\x63reated_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\x16last_updated_timestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\xc8\x01\n\x0eOnDemandSource\x12/\n\x0c\x66\x65\x61ture_view\x18\x01 \x01(\x0b\x32\x17.feast.core.FeatureViewH\x00\x12\x44\n\x17\x66\x65\x61ture_view_projection\x18\x03 \x01(\x0b\x32!.feast.core.FeatureViewProjectionH\x00\x12\x35\n\x13request_data_source\x18\x02 \x01(\x0b\x32\x16.feast.core.DataSourceH\x00\x42\x08\n\x06source\"H\n\x13UserDefinedFunction\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x62ody\x18\x02 \x01(\x0c\x12\x11\n\tbody_text\x18\x03 \x01(\t:\x02\x18\x01\x42]\n\x10\x66\x65\x61st.proto.coreB\x18OnDemandFeatureViewProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$feast/core/OnDemandFeatureView.proto\x12\nfeast.core\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1c\x66\x65\x61st/core/FeatureView.proto\x1a&feast/core/FeatureViewProjection.proto\x1a\x18\x66\x65\x61st/core/Feature.proto\x1a\x1b\x66\x65\x61st/core/DataSource.proto\x1a\x1f\x66\x65\x61st/core/Transformation.proto\"{\n\x13OnDemandFeatureView\x12\x31\n\x04spec\x18\x01 \x01(\x0b\x32#.feast.core.OnDemandFeatureViewSpec\x12\x31\n\x04meta\x18\x02 \x01(\x0b\x32#.feast.core.OnDemandFeatureViewMeta\"\xfd\x04\n\x17OnDemandFeatureViewSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12+\n\x08\x66\x65\x61tures\x18\x03 \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x12\x41\n\x07sources\x18\x04 \x03(\x0b\x32\x30.feast.core.OnDemandFeatureViewSpec.SourcesEntry\x12\x42\n\x15user_defined_function\x18\x05 \x01(\x0b\x32\x1f.feast.core.UserDefinedFunctionB\x02\x18\x01\x12\x43\n\x16\x66\x65\x61ture_transformation\x18\n \x01(\x0b\x32#.feast.core.FeatureTransformationV2\x12\x13\n\x0b\x64\x65scription\x18\x06 \x01(\t\x12;\n\x04tags\x18\x07 \x03(\x0b\x32-.feast.core.OnDemandFeatureViewSpec.TagsEntry\x12\r\n\x05owner\x18\x08 \x01(\t\x12\x0c\n\x04mode\x18\x0b \x01(\t\x12\x1d\n\x15write_to_online_store\x18\x0c \x01(\x08\x12\x10\n\x08\x65ntities\x18\r \x03(\t\x12\x31\n\x0e\x65ntity_columns\x18\x0e \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x1aJ\n\x0cSourcesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12)\n\x05value\x18\x02 \x01(\x0b\x32\x1a.feast.core.OnDemandSource:\x02\x38\x01\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x8c\x01\n\x17OnDemandFeatureViewMeta\x12\x35\n\x11\x63reated_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\x16last_updated_timestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\xc8\x01\n\x0eOnDemandSource\x12/\n\x0c\x66\x65\x61ture_view\x18\x01 \x01(\x0b\x32\x17.feast.core.FeatureViewH\x00\x12\x44\n\x17\x66\x65\x61ture_view_projection\x18\x03 \x01(\x0b\x32!.feast.core.FeatureViewProjectionH\x00\x12\x35\n\x13request_data_source\x18\x02 \x01(\x0b\x32\x16.feast.core.DataSourceH\x00\x42\x08\n\x06source\"H\n\x13UserDefinedFunction\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x62ody\x18\x02 \x01(\x0c\x12\x11\n\tbody_text\x18\x03 \x01(\t:\x02\x18\x01\x42]\n\x10\x66\x65\x61st.proto.coreB\x18OnDemandFeatureViewProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -39,15 +39,15 @@ _globals['_ONDEMANDFEATUREVIEW']._serialized_start=243 _globals['_ONDEMANDFEATUREVIEW']._serialized_end=366 _globals['_ONDEMANDFEATUREVIEWSPEC']._serialized_start=369 - _globals['_ONDEMANDFEATUREVIEWSPEC']._serialized_end=906 - _globals['_ONDEMANDFEATUREVIEWSPEC_SOURCESENTRY']._serialized_start=787 - _globals['_ONDEMANDFEATUREVIEWSPEC_SOURCESENTRY']._serialized_end=861 - _globals['_ONDEMANDFEATUREVIEWSPEC_TAGSENTRY']._serialized_start=863 - _globals['_ONDEMANDFEATUREVIEWSPEC_TAGSENTRY']._serialized_end=906 - _globals['_ONDEMANDFEATUREVIEWMETA']._serialized_start=909 - _globals['_ONDEMANDFEATUREVIEWMETA']._serialized_end=1049 - _globals['_ONDEMANDSOURCE']._serialized_start=1052 - _globals['_ONDEMANDSOURCE']._serialized_end=1252 - _globals['_USERDEFINEDFUNCTION']._serialized_start=1254 - _globals['_USERDEFINEDFUNCTION']._serialized_end=1326 + _globals['_ONDEMANDFEATUREVIEWSPEC']._serialized_end=1006 + _globals['_ONDEMANDFEATUREVIEWSPEC_SOURCESENTRY']._serialized_start=887 + _globals['_ONDEMANDFEATUREVIEWSPEC_SOURCESENTRY']._serialized_end=961 + _globals['_ONDEMANDFEATUREVIEWSPEC_TAGSENTRY']._serialized_start=963 + _globals['_ONDEMANDFEATUREVIEWSPEC_TAGSENTRY']._serialized_end=1006 + _globals['_ONDEMANDFEATUREVIEWMETA']._serialized_start=1009 + _globals['_ONDEMANDFEATUREVIEWMETA']._serialized_end=1149 + _globals['_ONDEMANDSOURCE']._serialized_start=1152 + _globals['_ONDEMANDSOURCE']._serialized_end=1352 + _globals['_USERDEFINEDFUNCTION']._serialized_start=1354 + _globals['_USERDEFINEDFUNCTION']._serialized_end=1426 # @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/OnDemandFeatureView_pb2.pyi b/sdk/python/feast/protos/feast/core/OnDemandFeatureView_pb2.pyi index d72a8f9862..b2ec15b186 100644 --- a/sdk/python/feast/protos/feast/core/OnDemandFeatureView_pb2.pyi +++ b/sdk/python/feast/protos/feast/core/OnDemandFeatureView_pb2.pyi @@ -104,6 +104,9 @@ class OnDemandFeatureViewSpec(google.protobuf.message.Message): TAGS_FIELD_NUMBER: builtins.int OWNER_FIELD_NUMBER: builtins.int MODE_FIELD_NUMBER: builtins.int + WRITE_TO_ONLINE_STORE_FIELD_NUMBER: builtins.int + ENTITIES_FIELD_NUMBER: builtins.int + ENTITY_COLUMNS_FIELD_NUMBER: builtins.int name: builtins.str """Name of the feature view. Must be unique. Not updated.""" project: builtins.str @@ -127,6 +130,13 @@ class OnDemandFeatureViewSpec(google.protobuf.message.Message): owner: builtins.str """Owner of the on demand feature view.""" mode: builtins.str + write_to_online_store: builtins.bool + @property + def entities(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """List of names of entities associated with this feature view.""" + @property + def entity_columns(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Feature_pb2.FeatureSpecV2]: + """List of specifications for each entity defined as part of this feature view.""" def __init__( self, *, @@ -140,9 +150,12 @@ class OnDemandFeatureViewSpec(google.protobuf.message.Message): tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., owner: builtins.str = ..., mode: builtins.str = ..., + write_to_online_store: builtins.bool = ..., + entities: collections.abc.Iterable[builtins.str] | None = ..., + entity_columns: collections.abc.Iterable[feast.core.Feature_pb2.FeatureSpecV2] | None = ..., ) -> None: ... def HasField(self, field_name: typing_extensions.Literal["feature_transformation", b"feature_transformation", "user_defined_function", b"user_defined_function"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["description", b"description", "feature_transformation", b"feature_transformation", "features", b"features", "mode", b"mode", "name", b"name", "owner", b"owner", "project", b"project", "sources", b"sources", "tags", b"tags", "user_defined_function", b"user_defined_function"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["description", b"description", "entities", b"entities", "entity_columns", b"entity_columns", "feature_transformation", b"feature_transformation", "features", b"features", "mode", b"mode", "name", b"name", "owner", b"owner", "project", b"project", "sources", b"sources", "tags", b"tags", "user_defined_function", b"user_defined_function", "write_to_online_store", b"write_to_online_store"]) -> None: ... global___OnDemandFeatureViewSpec = OnDemandFeatureViewSpec From 5850adc1290aa6e93f36fc6df4214cd9fdee39d4 Mon Sep 17 00:00:00 2001 From: Francisco Javier Arceo Date: Tue, 24 Sep 2024 11:32:40 -0400 Subject: [PATCH 91/96] chore: Adding docs, community, and examples to ignore during PR tests Signed-off-by: Francisco Javier Arceo --- .github/workflows/pr_integration_tests.yml | 6 +++++- .github/workflows/pr_local_integration_tests.yml | 5 ++++- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pr_integration_tests.yml b/.github/workflows/pr_integration_tests.yml index f4a9132d29..98dc06bcc1 100644 --- a/.github/workflows/pr_integration_tests.yml +++ b/.github/workflows/pr_integration_tests.yml @@ -2,6 +2,10 @@ name: pr-integration-tests on: pull_request_target: + paths-ignore: + - 'community/**' + - 'docs/**' + - 'examples/**' types: - opened - synchronize @@ -96,4 +100,4 @@ jobs: SNOWFLAKE_CI_PASSWORD: ${{ secrets.SNOWFLAKE_CI_PASSWORD }} SNOWFLAKE_CI_ROLE: ${{ secrets.SNOWFLAKE_CI_ROLE }} SNOWFLAKE_CI_WAREHOUSE: ${{ secrets.SNOWFLAKE_CI_WAREHOUSE }} - run: make test-python-integration \ No newline at end of file + run: make test-python-integration diff --git a/.github/workflows/pr_local_integration_tests.yml b/.github/workflows/pr_local_integration_tests.yml index 3de7262193..0a85361e3c 100644 --- a/.github/workflows/pr_local_integration_tests.yml +++ b/.github/workflows/pr_local_integration_tests.yml @@ -3,11 +3,14 @@ name: pr-local-integration-tests on: pull_request_target: + paths-ignore: + - 'community/**' + - 'docs/**' + - 'examples/**' types: - opened - synchronize - labeled - jobs: integration-test-python-local: # when using pull_request_target, all jobs MUST have this if check for 'ok-to-test' or 'approved' for security purposes. From 626c94f755fa8f04a5672be40abdd7bc0e1502b2 Mon Sep 17 00:00:00 2001 From: Francisco Arceo Date: Tue, 24 Sep 2024 23:29:12 -0400 Subject: [PATCH 92/96] chore: Test FeatureView apply to expose entity_columns behavior pre and post apply() (#4571) * chore: Adding docs, community, and examples to ignore during PR tests Signed-off-by: Francisco Javier Arceo * chore: Updating test to confirm feature view apply behavior for entity columns Signed-off-by: Francisco Javier Arceo * removing changes from other PR Signed-off-by: Francisco Javier Arceo --------- Signed-off-by: Francisco Javier Arceo --- .github/workflows/pr_integration_tests.yml | 6 +----- .github/workflows/pr_local_integration_tests.yml | 5 +---- .../tests/unit/test_on_demand_python_transformation.py | 7 +++++++ 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/pr_integration_tests.yml b/.github/workflows/pr_integration_tests.yml index 98dc06bcc1..f4a9132d29 100644 --- a/.github/workflows/pr_integration_tests.yml +++ b/.github/workflows/pr_integration_tests.yml @@ -2,10 +2,6 @@ name: pr-integration-tests on: pull_request_target: - paths-ignore: - - 'community/**' - - 'docs/**' - - 'examples/**' types: - opened - synchronize @@ -100,4 +96,4 @@ jobs: SNOWFLAKE_CI_PASSWORD: ${{ secrets.SNOWFLAKE_CI_PASSWORD }} SNOWFLAKE_CI_ROLE: ${{ secrets.SNOWFLAKE_CI_ROLE }} SNOWFLAKE_CI_WAREHOUSE: ${{ secrets.SNOWFLAKE_CI_WAREHOUSE }} - run: make test-python-integration + run: make test-python-integration \ No newline at end of file diff --git a/.github/workflows/pr_local_integration_tests.yml b/.github/workflows/pr_local_integration_tests.yml index 0a85361e3c..3de7262193 100644 --- a/.github/workflows/pr_local_integration_tests.yml +++ b/.github/workflows/pr_local_integration_tests.yml @@ -3,14 +3,11 @@ name: pr-local-integration-tests on: pull_request_target: - paths-ignore: - - 'community/**' - - 'docs/**' - - 'examples/**' types: - opened - synchronize - labeled + jobs: integration-test-python-local: # when using pull_request_target, all jobs MUST have this if check for 'ok-to-test' or 'approved' for security purposes. diff --git a/sdk/python/tests/unit/test_on_demand_python_transformation.py b/sdk/python/tests/unit/test_on_demand_python_transformation.py index c5bd68d6a8..ff7ad494ca 100644 --- a/sdk/python/tests/unit/test_on_demand_python_transformation.py +++ b/sdk/python/tests/unit/test_on_demand_python_transformation.py @@ -307,6 +307,8 @@ def setUp(self): online=True, source=driver_stats_source, ) + assert driver_stats_fv.entities == [driver.name] + assert driver_stats_fv.entity_columns == [] request_source = RequestSource( name="request_source", @@ -373,6 +375,11 @@ def python_view(inputs: dict[str, Any]) -> dict[str, Any]: feature_view_name="driver_hourly_stats", df=driver_df ) + fv_applied = self.store.get_feature_view("driver_hourly_stats") + assert fv_applied.entities == [driver.name] + # Note here that after apply() is called, the entity_columns are populated with the join_key + assert fv_applied.entity_columns[0].name == driver.join_key + def test_python_transformation_returning_all_data_types(self): entity_rows = [ { From 354c059e5475f9c3927d9180a421118507a22cf0 Mon Sep 17 00:00:00 2001 From: Francisco Arceo Date: Tue, 24 Sep 2024 23:31:14 -0400 Subject: [PATCH 93/96] feat: Adding registry cache support for get_on_demand_feature_view (#4572) Signed-off-by: Francisco Javier Arceo --- sdk/python/feast/feature_store.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/sdk/python/feast/feature_store.py b/sdk/python/feast/feature_store.py index ab2bc6cec2..52556eda15 100644 --- a/sdk/python/feast/feature_store.py +++ b/sdk/python/feast/feature_store.py @@ -492,20 +492,24 @@ def _get_stream_feature_view( stream_feature_view.entities = [] return stream_feature_view - def get_on_demand_feature_view(self, name: str) -> OnDemandFeatureView: + def get_on_demand_feature_view( + self, name: str, allow_registry_cache: bool = False + ) -> OnDemandFeatureView: """ Retrieves a feature view. Args: name: Name of feature view. - + allow_registry_cache: (Optional) Whether to allow returning this entity from a cached registry Returns: The specified feature view. Raises: FeatureViewNotFoundException: The feature view could not be found. """ - return self._registry.get_on_demand_feature_view(name, self.project) + return self._registry.get_on_demand_feature_view( + name, self.project, allow_cache=allow_registry_cache + ) def get_data_source(self, name: str) -> DataSource: """ From 3198371fc0e07f6b51b62c7e3abbc48729078bb9 Mon Sep 17 00:00:00 2001 From: Theodor Mihalache <84387487+tmihalac@users.noreply.github.com> Date: Wed, 25 Sep 2024 13:01:49 -0400 Subject: [PATCH 94/96] fix: Removed usage of pull_request_target as much as possible to prevent security concerns (#4549) * Test workflow changes: on from pull_request_target to pull_request Signed-off-by: Theodor Mihalache * fix: Removed usage of pull_request_target as much as possible to prevent security concerns Signed-off-by: Theodor Mihalache --------- Signed-off-by: Theodor Mihalache --- .github/workflows/java_pr.yml | 7 +++++++ .github/workflows/lint_pr.yml | 8 ++------ .github/workflows/pr_integration_tests.yml | 4 ++++ .github/workflows/pr_local_integration_tests.yml | 12 +++++------- 4 files changed, 18 insertions(+), 13 deletions(-) diff --git a/.github/workflows/java_pr.yml b/.github/workflows/java_pr.yml index fa373fea23..caf31ab47f 100644 --- a/.github/workflows/java_pr.yml +++ b/.github/workflows/java_pr.yml @@ -7,6 +7,9 @@ on: - synchronize - labeled +permissions: + pull-requests: read + jobs: lint-java: # when using pull_request_target, all jobs MUST have this if check for 'ok-to-test' or 'approved' for security purposes. @@ -23,6 +26,7 @@ jobs: # code from the PR. ref: refs/pull/${{ github.event.pull_request.number }}/merge submodules: recursive + persist-credentials: false - name: Lint java run: make lint-java @@ -42,6 +46,7 @@ jobs: # code from the PR. ref: refs/pull/${{ github.event.pull_request.number }}/merge submodules: recursive + persist-credentials: false - name: Set up JDK 11 uses: actions/setup-java@v1 with: @@ -84,6 +89,7 @@ jobs: - uses: actions/checkout@v4 with: submodules: 'true' + persist-credentials: false - name: Setup Python uses: actions/setup-python@v5 id: setup-python @@ -120,6 +126,7 @@ jobs: # code from the PR. ref: refs/pull/${{ github.event.pull_request.number }}/merge submodules: recursive + persist-credentials: false - name: Set up JDK 11 uses: actions/setup-java@v1 with: diff --git a/.github/workflows/lint_pr.yml b/.github/workflows/lint_pr.yml index d1aa7d16a3..8173225845 100644 --- a/.github/workflows/lint_pr.yml +++ b/.github/workflows/lint_pr.yml @@ -1,20 +1,16 @@ name: lint-pr on: - pull_request_target: + pull_request: types: - opened - edited - synchronize -permissions: - # read-only perms specified due to use of pull_request_target in lieu of security label check - pull-requests: read - jobs: validate-title: if: - github.repository == 'feast-dev/feast' + github.event.pull_request.base.repo.full_name == 'feast-dev/feast' name: Validate PR title runs-on: ubuntu-latest steps: diff --git a/.github/workflows/pr_integration_tests.yml b/.github/workflows/pr_integration_tests.yml index f4a9132d29..59de3ce958 100644 --- a/.github/workflows/pr_integration_tests.yml +++ b/.github/workflows/pr_integration_tests.yml @@ -11,6 +11,9 @@ on: #concurrency: # group: pr-integration-tests-${{ github.event.pull_request.number }} # cancel-in-progress: true +permissions: + actions: write + pull-requests: read jobs: integration-test-python: @@ -46,6 +49,7 @@ jobs: # code from the PR. ref: refs/pull/${{ github.event.pull_request.number }}/merge submodules: recursive + persist-credentials: false - name: Setup Python uses: actions/setup-python@v5 id: setup-python diff --git a/.github/workflows/pr_local_integration_tests.yml b/.github/workflows/pr_local_integration_tests.yml index 3de7262193..6515d411f0 100644 --- a/.github/workflows/pr_local_integration_tests.yml +++ b/.github/workflows/pr_local_integration_tests.yml @@ -2,7 +2,7 @@ name: pr-local-integration-tests # This runs local tests with containerized stubs of online stores. This is the main dev workflow on: - pull_request_target: + pull_request: types: - opened - synchronize @@ -10,11 +10,10 @@ on: jobs: integration-test-python-local: - # when using pull_request_target, all jobs MUST have this if check for 'ok-to-test' or 'approved' for security purposes. if: ((github.event.action == 'labeled' && (github.event.label.name == 'approved' || github.event.label.name == 'lgtm' || github.event.label.name == 'ok-to-test')) || (github.event.action != 'labeled' && (contains(github.event.pull_request.labels.*.name, 'ok-to-test') || contains(github.event.pull_request.labels.*.name, 'approved') || contains(github.event.pull_request.labels.*.name, 'lgtm')))) && - github.repository == 'feast-dev/feast' + github.event.pull_request.base.repo.full_name == 'feast-dev/feast' runs-on: ${{ matrix.os }} strategy: fail-fast: false @@ -27,10 +26,9 @@ jobs: steps: - uses: actions/checkout@v4 with: - # pull_request_target runs the workflow in the context of the base repo - # as such actions/checkout needs to be explicit configured to retrieve - # code from the PR. - ref: refs/pull/${{ github.event.pull_request.number }}/merge + repository: ${{ github.event.repository.full_name }} # Uses the full repository name + ref: ${{ github.ref }} # Uses the ref from the event + token: ${{ secrets.GITHUB_TOKEN }} # Automatically provided token submodules: recursive - name: Setup Python uses: actions/setup-python@v5 From 1c68dc9b0de698d6a74b626a3f38dc813fd28886 Mon Sep 17 00:00:00 2001 From: "devin-ai-integration[bot]" <158243242+devin-ai-integration[bot]@users.noreply.github.com> Date: Wed, 25 Sep 2024 20:11:48 +0000 Subject: [PATCH 95/96] Fix deprecation warnings for datetime adapter and timestamp converter in SQLite --- sdk/python/feast/infra/online_stores/sqlite.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/sdk/python/feast/infra/online_stores/sqlite.py b/sdk/python/feast/infra/online_stores/sqlite.py index 061a766b8c..10ba0c06cf 100644 --- a/sdk/python/feast/infra/online_stores/sqlite.py +++ b/sdk/python/feast/infra/online_stores/sqlite.py @@ -109,6 +109,11 @@ def online_write_batch( project = config.project + def adapt_datetime(dt): + return dt.isoformat() + + sqlite3.register_adapter(datetime, adapt_datetime) + with conn: for entity_key, values, timestamp, created_ts in data: entity_key_bin = serialize_entity_key( @@ -200,6 +205,11 @@ def online_read( conn = self._get_conn(config) cur = conn.cursor() + def convert_timestamp(ts_str): + return datetime.fromisoformat(ts_str) + + sqlite3.register_converter("timestamp", convert_timestamp) + result: List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]] = [] # Fetch all entities in one go From 3c3ceb267dd198ee2fcedcb292d49ba3a6f5613d Mon Sep 17 00:00:00 2001 From: "devin-ai-integration[bot]" <158243242+devin-ai-integration[bot]@users.noreply.github.com> Date: Wed, 25 Sep 2024 20:16:46 +0000 Subject: [PATCH 96/96] Fix TypeError in convert_timestamp by decoding bytes to string --- sdk/python/feast/infra/online_stores/sqlite.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/python/feast/infra/online_stores/sqlite.py b/sdk/python/feast/infra/online_stores/sqlite.py index 10ba0c06cf..c54a336aad 100644 --- a/sdk/python/feast/infra/online_stores/sqlite.py +++ b/sdk/python/feast/infra/online_stores/sqlite.py @@ -206,7 +206,7 @@ def online_read( cur = conn.cursor() def convert_timestamp(ts_str): - return datetime.fromisoformat(ts_str) + return datetime.fromisoformat(ts_str.decode('utf-8') if isinstance(ts_str, bytes) else ts_str) sqlite3.register_converter("timestamp", convert_timestamp)