diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 04fcbb00aa..1b15dcf882 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -17,13 +17,13 @@ "ghcr.io/devcontainers-contrib/features/maven-sdkman:2": { "jdkVersion": "11.0.24-amzn" } - }, + } // Use 'forwardPorts' to make a list of ports inside the container available locally. // "forwardPorts": [], // Uncomment the next line to run commands after the container is created. - "postCreateCommand": "make install-python-ci-dependencies-uv-venv" + // "postCreateCommand": "make install-python-ci-dependencies-uv-venv" // Configure tool-specific properties. // "customizations": {}, diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index b7d630e8bc..40986a87db 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -1,11 +1,11 @@ diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index f04015a989..14d0b7d5ae 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -160,7 +160,7 @@ jobs: with: python-version: ${{ matrix.python-version }} architecture: x64 - - uses: actions/download-artifact@v2 + - uses: actions/download-artifact@v4.1.7 with: name: wheels path: dist diff --git a/.github/workflows/java_pr.yml b/.github/workflows/java_pr.yml index fa373fea23..caf31ab47f 100644 --- a/.github/workflows/java_pr.yml +++ b/.github/workflows/java_pr.yml @@ -7,6 +7,9 @@ on: - synchronize - labeled +permissions: + pull-requests: read + jobs: lint-java: # when using pull_request_target, all jobs MUST have this if check for 'ok-to-test' or 'approved' for security purposes. @@ -23,6 +26,7 @@ jobs: # code from the PR. ref: refs/pull/${{ github.event.pull_request.number }}/merge submodules: recursive + persist-credentials: false - name: Lint java run: make lint-java @@ -42,6 +46,7 @@ jobs: # code from the PR. ref: refs/pull/${{ github.event.pull_request.number }}/merge submodules: recursive + persist-credentials: false - name: Set up JDK 11 uses: actions/setup-java@v1 with: @@ -84,6 +89,7 @@ jobs: - uses: actions/checkout@v4 with: submodules: 'true' + persist-credentials: false - name: Setup Python uses: actions/setup-python@v5 id: setup-python @@ -120,6 +126,7 @@ jobs: # code from the PR. ref: refs/pull/${{ github.event.pull_request.number }}/merge submodules: recursive + persist-credentials: false - name: Set up JDK 11 uses: actions/setup-java@v1 with: diff --git a/.github/workflows/lint_pr.yml b/.github/workflows/lint_pr.yml index d1aa7d16a3..8173225845 100644 --- a/.github/workflows/lint_pr.yml +++ b/.github/workflows/lint_pr.yml @@ -1,20 +1,16 @@ name: lint-pr on: - pull_request_target: + pull_request: types: - opened - edited - synchronize -permissions: - # read-only perms specified due to use of pull_request_target in lieu of security label check - pull-requests: read - jobs: validate-title: if: - github.repository == 'feast-dev/feast' + github.event.pull_request.base.repo.full_name == 'feast-dev/feast' name: Validate PR title runs-on: ubuntu-latest steps: diff --git a/.github/workflows/pr_integration_tests.yml b/.github/workflows/pr_integration_tests.yml index f4a9132d29..59de3ce958 100644 --- a/.github/workflows/pr_integration_tests.yml +++ b/.github/workflows/pr_integration_tests.yml @@ -11,6 +11,9 @@ on: #concurrency: # group: pr-integration-tests-${{ github.event.pull_request.number }} # cancel-in-progress: true +permissions: + actions: write + pull-requests: read jobs: integration-test-python: @@ -46,6 +49,7 @@ jobs: # code from the PR. ref: refs/pull/${{ github.event.pull_request.number }}/merge submodules: recursive + persist-credentials: false - name: Setup Python uses: actions/setup-python@v5 id: setup-python diff --git a/.github/workflows/pr_local_integration_tests.yml b/.github/workflows/pr_local_integration_tests.yml index 3de7262193..6515d411f0 100644 --- a/.github/workflows/pr_local_integration_tests.yml +++ b/.github/workflows/pr_local_integration_tests.yml @@ -2,7 +2,7 @@ name: pr-local-integration-tests # This runs local tests with containerized stubs of online stores. This is the main dev workflow on: - pull_request_target: + pull_request: types: - opened - synchronize @@ -10,11 +10,10 @@ on: jobs: integration-test-python-local: - # when using pull_request_target, all jobs MUST have this if check for 'ok-to-test' or 'approved' for security purposes. if: ((github.event.action == 'labeled' && (github.event.label.name == 'approved' || github.event.label.name == 'lgtm' || github.event.label.name == 'ok-to-test')) || (github.event.action != 'labeled' && (contains(github.event.pull_request.labels.*.name, 'ok-to-test') || contains(github.event.pull_request.labels.*.name, 'approved') || contains(github.event.pull_request.labels.*.name, 'lgtm')))) && - github.repository == 'feast-dev/feast' + github.event.pull_request.base.repo.full_name == 'feast-dev/feast' runs-on: ${{ matrix.os }} strategy: fail-fast: false @@ -27,10 +26,9 @@ jobs: steps: - uses: actions/checkout@v4 with: - # pull_request_target runs the workflow in the context of the base repo - # as such actions/checkout needs to be explicit configured to retrieve - # code from the PR. - ref: refs/pull/${{ github.event.pull_request.number }}/merge + repository: ${{ github.event.repository.full_name }} # Uses the full repository name + ref: ${{ github.ref }} # Uses the ref from the event + token: ${{ secrets.GITHUB_TOKEN }} # Automatically provided token submodules: recursive - name: Setup Python uses: actions/setup-python@v5 diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index e56296ec4b..0342943313 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -134,7 +134,7 @@ jobs: runs-on: ubuntu-latest needs: [build_wheels] steps: - - uses: actions/download-artifact@v2 + - uses: actions/download-artifact@v4.1.7 with: name: wheels path: dist diff --git a/.github/workflows/smoke_tests.yml b/.github/workflows/smoke_tests.yml new file mode 100644 index 0000000000..782f8b3f51 --- /dev/null +++ b/.github/workflows/smoke_tests.yml @@ -0,0 +1,38 @@ +name: smoke-tests + +on: [pull_request] +jobs: + unit-test-python: + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + python-version: [ "3.9", "3.10", "3.11"] + os: [ ubuntu-latest ] + env: + OS: ${{ matrix.os }} + PYTHON: ${{ matrix.python-version }} + steps: + - uses: actions/checkout@v4 + - name: Setup Python + id: setup-python + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + architecture: x64 + - name: Install uv + run: | + curl -LsSf https://astral.sh/uv/install.sh | sh + - name: Get uv cache dir + id: uv-cache + run: | + echo "::set-output name=dir::$(uv cache dir)" + - name: uv cache + uses: actions/cache@v4 + with: + path: ${{ steps.uv-cache.outputs.dir }} + key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-uv-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }} + - name: Install dependencies + run: make install-python-dependencies-uv + - name: Test Imports + run: python -c "from feast import cli" \ No newline at end of file diff --git a/.gitignore b/.gitignore index e4e82bfce4..d558463c65 100644 --- a/.gitignore +++ b/.gitignore @@ -185,7 +185,6 @@ dmypy.json # Protos sdk/python/docs/html -sdk/python/feast/protos/ sdk/go/protos/ go/protos/ diff --git a/.releaserc.js b/.releaserc.js index c4ad52c9b2..ee9c62a04d 100644 --- a/.releaserc.js +++ b/.releaserc.js @@ -66,7 +66,7 @@ module.exports = { "CHANGELOG.md", "java/pom.xml", "infra/charts/**/*.*", - "infra/feast-operator/**/*.*", + "infra/feast-operator/**/*", "ui/package.json", "sdk/python/feast/ui/package.json", "sdk/python/feast/ui/yarn.lock" diff --git a/CODEOWNERS b/CODEOWNERS index 18914d9f5d..75ede8b6aa 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -2,13 +2,13 @@ # for more info about CODEOWNERS file # Core Interfaces -/sdk/python/feast/infra/offline_stores/offline_store.py @feast-dev/maintainers @sfc-gh-madkins -/sdk/python/feast/infra/online_stores/online_store.py @feast-dev/maintainers @DvirDukhan -/sdk/python/feast/infra/materialization_engine/batch_materialization_engine.py @feast-dev/maintainers @whoahbot @sfc-gh-madkins +/sdk/python/feast/infra/offline_stores/offline_store.py @feast-dev/maintainers +/sdk/python/feast/infra/online_stores/online_store.py @feast-dev/maintainers +/sdk/python/feast/infra/materialization_engine/batch_materialization_engine.py @feast-dev/maintainers # ==== Offline Stores ==== # Core utils -/sdk/python/feast/infra/offline_stores/offline_utils.py @feast-dev/maintainers @sfc-gh-madkins +/sdk/python/feast/infra/offline_stores/offline_utils.py @feast-dev/maintainers # Offline interfaces /sdk/python/feast/infra/offline_stores/offline_store.py @feast-dev/maintainers @@ -18,38 +18,10 @@ /sdk/python/feast/infra/offline_stores/bigquery_source.py @sudohainguyen /sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py @sudohainguyen -# Snowflake -/sdk/python/feast/infra/offline_stores/snowflake* @sfc-gh-madkins - -# Athena (contrib) -/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/ @toping4445 - -# Azure SQL (contrib) -/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/ @kevjumba - -# Spark (contrib) -/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/ @niklasvm @kevjumba - # ==== Online Stores ==== # HBase /sdk/python/feast/infra/online_stores/hbase.py @sudohainguyen /sdk/python/feast/infra/online_stores/contrib/hbase_online_store @sudohainguyen -# Redis -/sdk/python/feast/infra/online_stores/redis.py @DvirDukhan -/java/feast/serving/connectors/redis/ @DvirDukhan - -# Snowflake -/sdk/python/feast/infra/online_stores/snowflake.py @sfc-gh-madkins - -# Cassandra (contrib) -/sdk/python/feast/infra/online_stores/cassandra_online_store/ @hemidactylus - # ==== Batch Materialization Engines ==== - -# Snowflake -/sdk/python/feast/infra/materialization/snowflake* @sfc-gh-madkins - -# AWS Lambda -/sdk/python/feast/infra/materialization/contrib/aws_lambda/ @achals diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 2bc0915002..eae34fe0c3 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,3 +1,3 @@

Development Guide: Main Feast Repository

-> Please see [Development Guide](https://docs.feast.dev/project/development-guide) for project level development instructions, including instructions for Maintainers. +> Please see [Development Guide](docs/project/development-guide.md) for project level development instructions, including instructions for Maintainers. diff --git a/Makefile b/Makefile index 5e3bd0d913..6831a58337 100644 --- a/Makefile +++ b/Makefile @@ -21,6 +21,7 @@ ifeq ($(shell uname -s), Darwin) OS = osx endif TRINO_VERSION ?= 376 +PYTHON_VERSION = ${shell python --version | grep -Eo '[0-9]\.[0-9]+'} # General @@ -36,44 +37,48 @@ build: protos build-java build-docker # Python SDK +install-python-dependencies-uv: + uv pip sync --system sdk/python/requirements/py$(PYTHON_VERSION)-requirements.txt + uv pip install --system --no-deps . + +install-python-dependencies-uv-venv: + uv pip sync sdk/python/requirements/py$(PYTHON_VERSION)-requirements.txt + uv pip install --no-deps . + install-python-ci-dependencies: - python -m piptools sync sdk/python/requirements/py$(PYTHON)-ci-requirements.txt + python -m piptools sync sdk/python/requirements/py$(PYTHON_VERSION)-ci-requirements.txt pip install --no-deps -e . - python setup.py build_python_protos --inplace install-python-ci-dependencies-uv: - uv pip sync --system sdk/python/requirements/py$(PYTHON)-ci-requirements.txt + uv pip sync --system sdk/python/requirements/py$(PYTHON_VERSION)-ci-requirements.txt uv pip install --system --no-deps -e . - python setup.py build_python_protos --inplace install-python-ci-dependencies-uv-venv: - uv pip sync sdk/python/requirements/py$(PYTHON)-ci-requirements.txt + uv pip sync sdk/python/requirements/py$(PYTHON_VERSION)-ci-requirements.txt uv pip install --no-deps -e . - python setup.py build_python_protos --inplace lock-python-ci-dependencies: - uv pip compile --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py$(PYTHON)-ci-requirements.txt - -package-protos: - cp -r ${ROOT_DIR}/protos ${ROOT_DIR}/sdk/python/feast/protos + uv pip compile --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py$(PYTHON_VERSION)-ci-requirements.txt compile-protos-python: - python setup.py build_python_protos --inplace + python infra/scripts/generate_protos.py install-python: - python -m piptools sync sdk/python/requirements/py$(PYTHON)-requirements.txt + python -m piptools sync sdk/python/requirements/py$(PYTHON_VERSION)-requirements.txt python setup.py develop lock-python-dependencies: - uv pip compile --system --no-strip-extras setup.py --output-file sdk/python/requirements/py$(PYTHON)-requirements.txt + uv pip compile --system --no-strip-extras setup.py --output-file sdk/python/requirements/py$(PYTHON_VERSION)-requirements.txt lock-python-dependencies-all: - pixi run --environment py39 --manifest-path infra/scripts/pixi/pixi.toml "uv pip compile --system --no-strip-extras setup.py --output-file sdk/python/requirements/py3.9-requirements.txt" - pixi run --environment py39 --manifest-path infra/scripts/pixi/pixi.toml "uv pip compile --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.9-ci-requirements.txt" - pixi run --environment py310 --manifest-path infra/scripts/pixi/pixi.toml "uv pip compile --system --no-strip-extras setup.py --output-file sdk/python/requirements/py3.10-requirements.txt" - pixi run --environment py310 --manifest-path infra/scripts/pixi/pixi.toml "uv pip compile --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.10-ci-requirements.txt" - pixi run --environment py311 --manifest-path infra/scripts/pixi/pixi.toml "uv pip compile --system --no-strip-extras setup.py --output-file sdk/python/requirements/py3.11-requirements.txt" - pixi run --environment py311 --manifest-path infra/scripts/pixi/pixi.toml "uv pip compile --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.11-ci-requirements.txt" + # Remove all existing requirements because we noticed the lock file is not always updated correctly. Removing and running the command again ensures that the lock file is always up to date. + rm -r sdk/python/requirements/* + pixi run --environment py39 --manifest-path infra/scripts/pixi/pixi.toml "uv pip compile -p 3.9 --system --no-strip-extras setup.py --output-file sdk/python/requirements/py3.9-requirements.txt" + pixi run --environment py39 --manifest-path infra/scripts/pixi/pixi.toml "uv pip compile -p 3.9 --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.9-ci-requirements.txt" + pixi run --environment py310 --manifest-path infra/scripts/pixi/pixi.toml "uv pip compile -p 3.10 --system --no-strip-extras setup.py --output-file sdk/python/requirements/py3.10-requirements.txt" + pixi run --environment py310 --manifest-path infra/scripts/pixi/pixi.toml "uv pip compile -p 3.10 --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.10-ci-requirements.txt" + pixi run --environment py311 --manifest-path infra/scripts/pixi/pixi.toml "uv pip compile -p 3.11 --system --no-strip-extras setup.py --output-file sdk/python/requirements/py3.11-requirements.txt" + pixi run --environment py311 --manifest-path infra/scripts/pixi/pixi.toml "uv pip compile -p 3.11 --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.11-ci-requirements.txt" benchmark-python: IS_TEST=True python -m pytest --integration --benchmark --benchmark-autosave --benchmark-save-data sdk/python/tests @@ -85,14 +90,14 @@ test-python-unit: python -m pytest -n 8 --color=yes sdk/python/tests test-python-integration: - python -m pytest -n 8 --integration --color=yes --durations=10 --timeout=1200 --timeout_method=thread \ + python -m pytest -n 8 --integration --color=yes --durations=10 --timeout=1200 --timeout_method=thread --dist loadgroup \ -k "(not snowflake or not test_historical_features_main)" \ sdk/python/tests test-python-integration-local: FEAST_IS_LOCAL_TEST=True \ FEAST_LOCAL_ONLINE_CONTAINER=True \ - python -m pytest -n 8 --color=yes --integration --durations=5 --dist loadgroup \ + python -m pytest -n 8 --color=yes --integration --durations=10 --timeout=1200 --timeout_method=thread --dist loadgroup \ -k "not test_lambda_materialization and not test_snowflake_materialization" \ sdk/python/tests @@ -394,9 +399,6 @@ test-trino-plugin-locally: kill-trino-locally: cd ${ROOT_DIR}; docker stop trino -install-protoc-dependencies: - pip install --ignore-installed protobuf==4.24.0 "grpcio-tools>=1.56.2,<2" mypy-protobuf==3.1.0 - # Docker build-docker: build-feature-server-python-aws-docker build-feature-transformation-server-docker build-feature-server-java-docker diff --git a/README.md b/README.md index ede28c4c95..6f17e7fa6c 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ [![GitHub Release](https://img.shields.io/github/v/release/feast-dev/feast.svg?style=flat&sort=semver&color=blue)](https://github.com/feast-dev/feast/releases) ## Join us on Slack! -πŸ‘‹πŸ‘‹πŸ‘‹ [Come say hi on Slack!](https://join.slack.com/t/feastopensource/signup) +πŸ‘‹πŸ‘‹πŸ‘‹ [Come say hi on Slack!](https://communityinviter.com/apps/feastopensource/feast-the-open-source-feature-store) ## Overview @@ -187,6 +187,7 @@ The list below contains the functionality that contributors are planning to deve * [x] On-demand Transformations (Beta release. See [RFC](https://docs.google.com/document/d/1lgfIw0Drc65LpaxbUu49RCeJgMew547meSJttnUqz7c/edit#)) * [x] Streaming Transformations (Alpha release. See [RFC](https://docs.google.com/document/d/1UzEyETHUaGpn0ap4G82DHluiCj7zEbrQLkJJkKSv4e8/edit)) * [ ] Batch transformation (In progress. See [RFC](https://docs.google.com/document/d/1964OkzuBljifDvkV-0fakp2uaijnVzdwWNGdz7Vz50A/edit)) + * [ ] Persistent On-demand Transformations (Beta release. See [GitHub Issue](https://github.com/feast-dev/feast/issues/4376)) * **Streaming** * [x] [Custom streaming ingestion job support](https://docs.feast.dev/how-to-guides/customizing-feast/creating-a-custom-provider) * [x] [Push based streaming data ingestion to online store](https://docs.feast.dev/reference/data-sources/push) @@ -208,6 +209,9 @@ The list below contains the functionality that contributors are planning to deve * [x] Amundsen integration (see [Feast extractor](https://github.com/amundsen-io/amundsen/blob/main/databuilder/databuilder/extractor/feast_extractor.py)) * [x] DataHub integration (see [DataHub Feast docs](https://datahubproject.io/docs/generated/ingestion/sources/feast/)) * [x] Feast Web UI (Beta release. See [docs](https://docs.feast.dev/reference/alpha-web-ui)) + * [ ] Feast Lineage Explorer +* **Natural Language Processing** + * [x] Vector Search (Alpha release. See [RFC](https://docs.google.com/document/d/18IWzLEA9i2lDWnbfbwXnMCg3StlqaLVI-uRpQjr_Vos/edit#heading=h.9gaqqtox9jg6)) ## πŸŽ“ Important Resources diff --git a/community/ADOPTERS.md b/community/ADOPTERS.md new file mode 100644 index 0000000000..a16fbef379 --- /dev/null +++ b/community/ADOPTERS.md @@ -0,0 +1,15 @@ +# Adopters of Feast + +Below are the adopters of Feast. If you are using Feast please add +yourself into the following list by a pull request. Please keep the list in +alphabetical order. + +| Organization | Contact | GitHub Username | +| ------------ | ------- | ------- | +| Affirm | Francisco Javier Arceo | franciscojavierarceo | +| Bank of Georgia | Tornike Gurgenidze | tokoko | +| Get Ground | Zhiling Chen | zhilingc | +| Gojek | Pradithya Aria Pura | pradithya | +| Twitter | David Liu | mavysavydav| +| Shopify | Matt Delacour | MattDelac | +| Snowflake | Miles Adkins | sfc-gh-madkins | diff --git a/docs/README.md b/docs/README.md index d391069429..6652eaddc8 100644 --- a/docs/README.md +++ b/docs/README.md @@ -2,7 +2,16 @@ ## What is Feast? -Feast (**Fea**ture **St**ore) is a customizable operational data system that re-uses existing infrastructure to manage and serve machine learning features to realtime models. +Feast (**Fea**ture **St**ore) is an [open-source](https://github.com/feast-dev/feast) feature store that helps teams +operate production ML systems at scale by allowing them to define, manage, validate, and serve features for production +AI/ML. + +Feast's feature store is composed of two foundational components: (1) an [offline store](getting-started/components/offline-store.md) +for historical feature extraction used in model training and an (2) [online store](getting-started/components/online-store.md) +for serving features at low-latency in production systems and applications. + +Feast is a configurable operational data system that re-uses existing infrastructure to manage and serve machine learning +features to realtime models. For more details please review our [architecture](getting-started/architecture/overview.md). Feast allows ML platform teams to: @@ -20,38 +29,31 @@ Feast allows ML platform teams to: **Note:** Feast uses a push model for online serving. This means that the feature store pushes feature values to the online store, which reduces the latency of feature retrieval. This is more efficient than a pull model, where the model serving system must make a request to the feature store to retrieve feature values. See -[this document](getting-started/architecture-and-components/push-vs-pull-model.md) for a more detailed discussion. -{% endhint %} - -{% hint style="info" %} +[this document](getting-started/architecture/push-vs-pull-model.md) for a more detailed discussion. {% endhint %} ## Who is Feast for? -Feast helps ML platform teams with DevOps experience productionize real-time models. Feast can also help these teams build towards a feature platform that improves collaboration between engineers and data scientists. +Feast helps ML platform/MLOps teams with DevOps experience productionize real-time models. Feast also helps these teams +build a feature platform that improves collaboration between data engineers, software engineers, machine learning +engineers, and data scientists. Feast is likely **not** the right tool if you - * are in an organization that’s just getting started with ML and is not yet sure what the business impact of ML is -* rely primarily on unstructured data -* need very low latency feature retrieval (e.g. p99 feature retrieval << 10ms) -* have a small team to support a large number of use cases ## What Feast is not? ### Feast is not -* **an** [**ETL**](https://en.wikipedia.org/wiki/Extract,\_transform,\_load) / [**ELT**](https://en.wikipedia.org/wiki/Extract,\_load,\_transform) **system:** Feast is not (and does not plan to become) a general purpose data transformation or pipelining system. Users often leverage tools like [dbt](https://www.getdbt.com) to manage upstream data transformations. +* **an** [**ETL**](https://en.wikipedia.org/wiki/Extract,\_transform,\_load) / [**ELT**](https://en.wikipedia.org/wiki/Extract,\_load,\_transform) **system.** Feast is not a general purpose data pipelining system. Users often leverage tools like [dbt](https://www.getdbt.com) to manage upstream data transformations. Feast does support some [transformations](getting-started/architecture/feature-transformetion.md). * **a data orchestration tool:** Feast does not manage or orchestrate complex workflow DAGs. It relies on upstream data pipelines to produce feature values and integrations with tools like [Airflow](https://airflow.apache.org) to make features consistently available. * **a data warehouse:** Feast is not a replacement for your data warehouse or the source of truth for all transformed data in your organization. Rather, Feast is a light-weight downstream layer that can serve data from an existing data warehouse (or other data sources) to models in production. * **a database:** Feast is not a database, but helps manage data stored in other systems (e.g. BigQuery, Snowflake, DynamoDB, Redis) to make features consistently available at training / serving time ### Feast does not _fully_ solve - * **reproducible model training / model backtesting / experiment management**: Feast captures feature and model metadata, but does not version-control datasets / labels or manage train / test splits. Other tools like [DVC](https://dvc.org/), [MLflow](https://www.mlflow.org/), and [Kubeflow](https://www.kubeflow.org/) are better suited for this. -* **batch + streaming feature engineering**: Feast primarily processes already transformed feature values but is investing in supporting batch and streaming transformations. +* **batch feature engineering**: Feast supports on demand and streaming transformations. Feast is also investing in supporting batch transformations. * **native streaming feature integration:** Feast enables users to push streaming features, but does not pull from streaming sources or manage streaming pipelines. -* **feature sharing**: Feast has experimental functionality to enable discovery and cataloguing of feature metadata with a [Feast web UI (alpha)](https://docs.feast.dev/reference/alpha-web-ui). Feast also has community contributed plugins with [DataHub](https://datahubproject.io/docs/generated/ingestion/sources/feast/) and [Amundsen](https://github.com/amundsen-io/amundsen/blob/4a9d60176767c4d68d1cad5b093320ea22e26a49/databuilder/databuilder/extractor/feast\_extractor.py). * **lineage:** Feast helps tie feature values to model versions, but is not a complete solution for capturing end-to-end lineage from raw data sources to model versions. Feast also has community contributed plugins with [DataHub](https://datahubproject.io/docs/generated/ingestion/sources/feast/) and [Amundsen](https://github.com/amundsen-io/amundsen/blob/4a9d60176767c4d68d1cad5b093320ea22e26a49/databuilder/databuilder/extractor/feast\_extractor.py). * **data quality / drift detection**: Feast has experimental integrations with [Great Expectations](https://greatexpectations.io/), but is not purpose built to solve data drift / data quality issues. This requires more sophisticated monitoring across data pipelines, served feature values, labels, and model versions. @@ -74,7 +76,7 @@ Explore the following resources to get started with Feast: * [Quickstart](getting-started/quickstart.md) is the fastest way to get started with Feast * [Concepts](getting-started/concepts/) describes all important Feast API concepts -* [Architecture](getting-started/architecture-and-components/) describes Feast's overall architecture. +* [Architecture](getting-started/architecture/) describes Feast's overall architecture. * [Tutorials](tutorials/tutorials-overview/) shows full examples of using Feast in machine learning applications. * [Running Feast with Snowflake/GCP/AWS](how-to-guides/feast-snowflake-gcp-aws/) provides a more in-depth guide to using Feast. * [Reference](reference/feast-cli-commands.md) contains detailed API and design documents. diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index 87c3626254..1c4cece799 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -8,6 +8,14 @@ ## Getting started * [Quickstart](getting-started/quickstart.md) +* [Architecture](getting-started/architecture/README.md) + * [Overview](getting-started/architecture/overview.md) + * [Language](getting-started/architecture/language.md) + * [Push vs Pull Model](getting-started/architecture/push-vs-pull-model.md) + * [Write Patterns](getting-started/architecture/write-patterns.md) + * [Feature Transformation](getting-started/architecture/feature-transformation.md) + * [Feature Serving and Model Inference](getting-started/architecture/model-inference.md) + * [Role-Based Access Control (RBAC)](getting-started/architecture/rbac.md) * [Concepts](getting-started/concepts/README.md) * [Overview](getting-started/concepts/overview.md) * [Data ingestion](getting-started/concepts/data-ingestion.md) @@ -15,17 +23,16 @@ * [Feature view](getting-started/concepts/feature-view.md) * [Feature retrieval](getting-started/concepts/feature-retrieval.md) * [Point-in-time joins](getting-started/concepts/point-in-time-joins.md) - * [Registry](getting-started/concepts/registry.md) + * [Permission](getting-started/concepts/permission.md) * [\[Alpha\] Saved dataset](getting-started/concepts/dataset.md) -* [Architecture](getting-started/architecture-and-components/README.md) - * [Overview](getting-started/architecture-and-components/overview.md) - * [Language](getting-started/architecture-and-components/language.md) - * [Push vs Pull Model](getting-started/architecture-and-components/push-vs-pull-model.md) - * [Registry](getting-started/architecture-and-components/registry.md) - * [Offline store](getting-started/architecture-and-components/offline-store.md) - * [Online store](getting-started/architecture-and-components/online-store.md) - * [Batch Materialization Engine](getting-started/architecture-and-components/batch-materialization-engine.md) - * [Provider](getting-started/architecture-and-components/provider.md) +* [Components](getting-started/components/README.md) + * [Overview](getting-started/components/overview.md) + * [Registry](getting-started/components/registry.md) + * [Offline store](getting-started/components/offline-store.md) + * [Online store](getting-started/components/online-store.md) + * [Batch Materialization Engine](getting-started/components/batch-materialization-engine.md) + * [Provider](getting-started/components/provider.md) + * [Authorization Manager](getting-started/components/authz_manager.md) * [Third party integrations](getting-started/third-party-integrations.md) * [FAQ](getting-started/faq.md) @@ -37,7 +44,6 @@ * [Real-time credit scoring on AWS](tutorials/tutorials-overview/real-time-credit-scoring-on-aws.md) * [Driver stats on Snowflake](tutorials/tutorials-overview/driver-stats-on-snowflake.md) * [Validating historical features with Great Expectations](tutorials/validating-historical-features.md) -* [Using Scalable Registry](tutorials/using-scalable-registry.md) * [Building streaming features](tutorials/building-streaming-features.md) ## How-to Guides @@ -102,10 +108,15 @@ * [PostgreSQL (contrib)](reference/online-stores/postgres.md) * [Cassandra + Astra DB (contrib)](reference/online-stores/cassandra.md) * [MySQL (contrib)](reference/online-stores/mysql.md) - * [Rockset (contrib)](reference/online-stores/rockset.md) * [Hazelcast (contrib)](reference/online-stores/hazelcast.md) * [ScyllaDB (contrib)](reference/online-stores/scylladb.md) * [SingleStore (contrib)](reference/online-stores/singlestore.md) +* [Registries](reference/registries/README.md) + * [Local](reference/registries/local.md) + * [S3](reference/registries/s3.md) + * [GCS](reference/registries/gcs.md) + * [SQL](reference/registries/sql.md) + * [Snowflake](reference/registries/snowflake.md) * [Providers](reference/providers/README.md) * [Local](reference/providers/local.md) * [Google Cloud Platform](reference/providers/google-cloud-platform.md) diff --git a/docs/community.md b/docs/community.md index 21cca702bf..640b5238b8 100644 --- a/docs/community.md +++ b/docs/community.md @@ -2,6 +2,8 @@ ## Links & Resources +* [Come say hi on Slack!](https://communityinviter.com/apps/feastopensource/feast-the-open-source-feature-store) + * As a part of the Linux Foundation, we ask community members to adhere to the [Linux Foundation Code of Conduct](https://events.linuxfoundation.org/about/code-of-conduct/) * [GitHub Repository](https://github.com/feast-dev/feast/): Find the complete Feast codebase on GitHub. * [Community Governance Doc](https://github.com/feast-dev/feast/blob/master/community): See the governance model of Feast, including who the maintainers are and how decisions are made. * [Google Folder](https://drive.google.com/drive/u/0/folders/1jgMHOPDT2DvBlJeO9LCM79DP4lm4eOrR): This folder is used as a central repository for all Feast resources. For example: diff --git a/docs/getting-started/architecture-and-components/registry.md b/docs/getting-started/architecture-and-components/registry.md deleted file mode 100644 index 0939fb53fc..0000000000 --- a/docs/getting-started/architecture-and-components/registry.md +++ /dev/null @@ -1,31 +0,0 @@ -# Registry - -The Feast feature registry is a central catalog of all the feature definitions and their related metadata. It allows data scientists to search, discover, and collaborate on new features. - -Each Feast deployment has a single feature registry. Feast only supports file-based registries today, but supports four different backends. - -* `Local`: Used as a local backend for storing the registry during development -* `S3`: Used as a centralized backend for storing the registry on AWS -* `GCS`: Used as a centralized backend for storing the registry on GCP -* `[Alpha] Azure`: Used as centralized backend for storing the registry on Azure Blob storage. - -The feature registry is updated during different operations when using Feast. More specifically, objects within the registry \(entities, feature views, feature services\) are updated when running `apply` from the Feast CLI, but metadata about objects can also be updated during operations like materialization. - -Users interact with a feature registry through the Feast SDK. Listing all feature views: - -```python -fs = FeatureStore("my_feature_repo/") -print(fs.list_feature_views()) -``` - -Or retrieving a specific feature view: - -```python -fs = FeatureStore("my_feature_repo/") -fv = fs.get_feature_view(β€œmy_fv1”) -``` - -{% hint style="info" %} -The feature registry is a [Protobuf representation](https://github.com/feast-dev/feast/blob/master/protos/feast/core/Registry.proto) of Feast metadata. This Protobuf file can be read programmatically from other programming languages, but no compatibility guarantees are made on the internal structure of the registry. -{% endhint %} - diff --git a/docs/getting-started/architecture/README.md b/docs/getting-started/architecture/README.md new file mode 100644 index 0000000000..030bc62f4c --- /dev/null +++ b/docs/getting-started/architecture/README.md @@ -0,0 +1,29 @@ +# Architecture + +{% content-ref url="overview.md" %} +[overview.md](overview.md) +{% endcontent-ref %} + +{% content-ref url="language.md" %} +[language.md](language.md) +{% endcontent-ref %} + +{% content-ref url="push-vs-pull-model.md" %} +[push-vs-pull-model.md](push-vs-pull-model.md) +{% endcontent-ref %} + +{% content-ref url="write-patterns.md" %} +[write-patterns.md](write-patterns.md) +{% endcontent-ref %} + +{% content-ref url="feature-transformation.md" %} +[feature-transformation.md](feature-transformation.md) +{% endcontent-ref %} + +{% content-ref url="model-inference.md" %} +[model-inference.md](model-inference.md) +{% endcontent-ref %} + +{% content-ref url="rbac.md" %} +[rbac.md](rbac.md) +{% endcontent-ref %} \ No newline at end of file diff --git a/docs/getting-started/architecture/feature-transformation.md b/docs/getting-started/architecture/feature-transformation.md new file mode 100644 index 0000000000..1a15d4c3a5 --- /dev/null +++ b/docs/getting-started/architecture/feature-transformation.md @@ -0,0 +1,21 @@ +# Feature Transformation + +A *feature transformation* is a function that takes some set of input data and +returns some set of output data. Feature transformations can happen on either raw data or derived data. + +## Feature Transformation Engines +Feature transformations can be executed by three types of "transformation engines": + +1. The Feast Feature Server +2. An Offline Store (e.g., Snowflake, BigQuery, DuckDB, Spark, etc.) +3. A Stream processor (e.g., Flink or Spark Streaming) + +The three transformation engines are coupled with the [communication pattern used for writes](write-patterns.md). + +Importantly, this implies that different feature transformation code may be +used under different transformation engines, so understanding the tradeoffs of +when to use which transformation engine/communication pattern is extremely critical to +the success of your implementation. + +In general, we recommend transformation engines and network calls to be chosen by aligning it with what is most +appropriate for the data producer, feature/model usage, and overall product. \ No newline at end of file diff --git a/docs/getting-started/architecture-and-components/language.md b/docs/getting-started/architecture/language.md similarity index 98% rename from docs/getting-started/architecture-and-components/language.md rename to docs/getting-started/architecture/language.md index 916dff28d7..cff0fc467b 100644 --- a/docs/getting-started/architecture-and-components/language.md +++ b/docs/getting-started/architecture/language.md @@ -1,10 +1,10 @@ # Python: The Language of Production Machine Learning -Use Python to serve your features online. +Use Python to serve your features. ## Why should you use Python to Serve features for Machine Learning? -Python has emerged as the primary language for machine learning, and this extends to feature serving and there are five main reasons Feast recommends using a microservice in Feast. +Python has emerged as the primary language for machine learning, and this extends to feature serving and there are five main reasons Feast recommends using a microservice written in Python. ## 1. Python is the language of Machine Learning diff --git a/docs/getting-started/architecture/model-inference.md b/docs/getting-started/architecture/model-inference.md new file mode 100644 index 0000000000..3a061603c1 --- /dev/null +++ b/docs/getting-started/architecture/model-inference.md @@ -0,0 +1,97 @@ +# Feature Serving and Model Inference + +Production machine learning systems can choose from four approaches to serving machine learning predictions (the output +of model inference): +1. Online model inference with online features +2. Offline mode inference without online features +3. Online model inference with online features and cached predictions +4. Online model inference without features + +*Note: online features can be sourced from batch, streaming, or request data sources.* + +These three approaches have different tradeoffs but, in general, have significant implementation differences. + +## 1. Online Model Inference with Online Features +Online model inference with online features is a powerful approach to serving data-driven machine learning applications. +This requires a feature store to serve online features and a model server to serve model predictions (e.g., KServe). +This approach is particularly useful for applications where request-time data is required to run inference. +```python +features = store.get_online_features( + feature_refs=[ + "user_data:click_through_rate", + "user_data:number_of_clicks", + "user_data:average_page_duration", + ], + entity_rows=[{"user_id": 1}], +) +model_predictions = model_server.predict(features) +``` + +## 2. Offline Model Inference without Online Features +Typically, Machine Learning teams find serving precomputed model predictions to be the most straightforward to implement. +This approach simply treats the model predictions as a feature and serves them from the feature store using the standard +Feast sdk. These model predictions are typically generated through some batch process where the model scores are precomputed. +As a concrete example, the batch process can be as simple as a script that runs model inference locally for a set of users that +can output a CSV. This output file could be used for materialization so that the model could be served online as shown in the +code below. +```python +model_predictions = store.get_online_features( + feature_refs=[ + "user_data:model_predictions", + ], + entity_rows=[{"user_id": 1}], +) +``` +Notice that the model server is not involved in this approach. Instead, the model predictions are precomputed and +materialized to the online store. + +While this approach can lead to quick impact for different business use cases, it suffers from stale data as well +as only serving users/entities that were available at the time of the batch computation. In some cases, this tradeoff +may be tolerable. + +## 3. Online Model Inference with Online Features and Cached Predictions +This approach is the most sophisticated where inference is optimized for low-latency by caching predictions and running +model inference when data producers write features to the online store. This approach is particularly useful for +applications where features are coming from multiple data sources, the model is computationally expensive to run, or +latency is a significant constraint. + +```python +# Client Reads +features = store.get_online_features( + feature_refs=[ + "user_data:click_through_rate", + "user_data:number_of_clicks", + "user_data:average_page_duration", + "user_data:model_predictions", + ], + entity_rows=[{"user_id": 1}], +) +if features.to_dict().get('user_data:model_predictions') is None: + model_predictions = model_server.predict(features) + store.write_to_online_store(feature_view_name="user_data", df=pd.DataFrame(model_predictions)) +``` +Note that in this case a seperate call to `write_to_online_store` is required when the underlying data changes and +predictions change along with it. + +```python +# Client Writes from the Data Producer +user_data = request.POST.get('user_data') +model_predictions = model_server.predict(user_data) # assume this includes `user_data` in the Data Frame +store.write_to_online_store(feature_view_name="user_data", df=pd.DataFrame(model_predictions)) +``` +While this requires additional writes for every data producer, this approach will result in the lowest latency for +model inference. + +## 4. Online Model Inference without Features +This approach does not require Feast. The model server can directly serve predictions without any features. This +approach is common in Large Language Models (LLMs) and other models that do not require features to make predictions. + +Note that generative models using Retrieval Augmented Generation (RAG) do require features where the +[document embeddings](../../reference/alpha-vector-database.md) are treated as features, which Feast supports +(this would fall under "Online Model Inference with Online Features"). + +### Client Orchestration +Implicit in the code examples above is a design choice about how clients orchestrate calls to get features and run model inference. +The examples had a Feast-centric pattern because they are inputs to the model, so the sequencing is fairly obvious. +An alternative approach can be Inference-centric where a client would call an inference endpoint and the inference +service would be responsible for orchestration. diff --git a/docs/getting-started/architecture/overview.md b/docs/getting-started/architecture/overview.md new file mode 100644 index 0000000000..86ee75aaa6 --- /dev/null +++ b/docs/getting-started/architecture/overview.md @@ -0,0 +1,23 @@ +# Overview + +![Feast Architecture Diagram](<../../assets/feast_marchitecture.png>) + +Feast's architecture is designed to be flexible and scalable. It is composed of several components that work together to provide a feature store that can be used to serve features for training and inference. + +* Feast uses a [Push Model](push-vs-pull-model.md) to ingest data from different sources and store feature values in the +online store. +This allows Feast to serve features in real-time with low latency. + +* Feast supports [feature transformation](feature-transformation.md) for On Demand and Streaming data sources and + will support Batch transformations in the future. For Streaming and Batch data sources, Feast requires a separate +[Feature Transformation Engine](feature-transformation.md#feature-transformation-engines) (in the batch case, this is +typically your Offline Store). We are exploring adding a default streaming engine to Feast. + +* Domain expertise is recommended when integrating a data source with Feast understand the [tradeoffs from different + write patterns](write-patterns.md) to your application + +* We recommend [using Python](language.md) for your Feature Store microservice. As mentioned in the document, precomputing features is the recommended optimal path to ensure low latency performance. Reducing feature serving to a lightweight database lookup is the ideal pattern, which means the marginal overhead of Python should be tolerable. Because of this we believe the pros of Python outweigh the costs, as reimplementing feature logic is undesirable. Java and Go Clients are also available for online feature retrieval. + +* [Role-Based Access Control (RBAC)](rbac.md) is a security mechanism that restricts access to resources based on the roles of individual users within an organization. In the context of the Feast, RBAC ensures that only authorized users or groups can access or modify specific resources, thereby maintaining data security and operational integrity. + + diff --git a/docs/getting-started/architecture-and-components/push-vs-pull-model.md b/docs/getting-started/architecture/push-vs-pull-model.md similarity index 66% rename from docs/getting-started/architecture-and-components/push-vs-pull-model.md rename to docs/getting-started/architecture/push-vs-pull-model.md index a1f404221b..b205e97fc5 100644 --- a/docs/getting-started/architecture-and-components/push-vs-pull-model.md +++ b/docs/getting-started/architecture/push-vs-pull-model.md @@ -6,15 +6,23 @@ in the online store, to serve features in real-time. In a [Pull Model](https://en.wikipedia.org/wiki/Pull_technology), Feast would pull data from the data producers at request time and store the feature values in -the online store before serving them (storing them would actually be unneccessary). +the online store before serving them (storing them would actually be unnecessary). This approach would incur additional network latency as Feast would need to orchestrate a request to each data producer, which would mean the latency would be at least as long as your slowest call. So, in order to serve features as fast as possible, we push data to Feast and store the feature values in the online store. -The trade-off with the Push Model is that strong consistency is not gauranteed out -of the box. Instead, stong consistency has to be explicitly designed for in orchestrating +The trade-off with the Push Model is that strong consistency is not guaranteed out +of the box. Instead, strong consistency has to be explicitly designed for in orchestrating the updates to Feast and the client usage. The significant advantage with this approach is that Feast is read-optimized for low-latency -feature retrieval. \ No newline at end of file +feature retrieval. + +# How to Push + +Implicit in the Push model are decisions about _how_ and _when_ to push feature values to the online store. + +From a developer's perspective, there are three ways to push feature values to the online store with different tradeoffs. + +They are discussed further in the [Write Patterns](getting-started/architecture/write-patterns.md) section. diff --git a/docs/getting-started/architecture/rbac.jpg b/docs/getting-started/architecture/rbac.jpg new file mode 100644 index 0000000000..0de87d1718 Binary files /dev/null and b/docs/getting-started/architecture/rbac.jpg differ diff --git a/docs/getting-started/architecture/rbac.md b/docs/getting-started/architecture/rbac.md new file mode 100644 index 0000000000..9a51fba6ac --- /dev/null +++ b/docs/getting-started/architecture/rbac.md @@ -0,0 +1,56 @@ +# Role-Based Access Control (RBAC) in Feast + +## Introduction + +Role-Based Access Control (RBAC) is a security mechanism that restricts access to resources based on the roles of individual users within an organization. In the context of the Feast, RBAC ensures that only authorized users or groups can access or modify specific resources, thereby maintaining data security and operational integrity. + +## Functional Requirements + +The RBAC implementation in Feast is designed to: + +- **Assign Permissions**: Allow administrators to assign permissions for various operations and resources to users or groups based on their roles. +- **Seamless Integration**: Integrate smoothly with existing business code without requiring significant modifications. +- **Backward Compatibility**: Maintain support for non-authorized models as the default to ensure backward compatibility. + +## Business Goals + +The primary business goals of implementing RBAC in the Feast are: + +1. **Feature Sharing**: Enable multiple teams to share the feature store while ensuring controlled access. This allows for collaborative work without compromising data security. +2. **Access Control Management**: Prevent unauthorized access to team-specific resources and spaces, governing the operations that each user or group can perform. + +## Reference Architecture + +Feast operates as a collection of connected services, each enforcing authorization permissions. The architecture is designed as a distributed microservices system with the following key components: + +- **Service Endpoints**: These enforce authorization permissions, ensuring that only authorized requests are processed. +- **Client Integration**: Clients authenticate with feature servers by attaching authorization token to each request. +- **Service-to-Service Communication**: This is always granted. + +![rbac.jpg](rbac.jpg) + +## Permission Model + +The RBAC system in Feast uses a permission model that defines the following concepts: + +- **Resource**: An object within Feast that needs to be secured against unauthorized access. +- **Action**: A logical operation performed on a resource, such as Create, Describe, Update, Delete, Read, or write operations. +- **Policy**: A set of rules that enforce authorization decisions on resources. The default implementation uses role-based policies. + + + +## Authorization Architecture + +The authorization architecture in Feast is built with the following components: + +- **Token Extractor**: Extracts the authorization token from the request header. +- **Token Parser**: Parses the token to retrieve user details. +- **Policy Enforcer**: Validates the secured endpoint against the retrieved user details. +- **Token Injector**: Adds the authorization token to each secured request header. + + + + + + + diff --git a/docs/getting-started/architecture/write-patterns.md b/docs/getting-started/architecture/write-patterns.md new file mode 100644 index 0000000000..4674b5504d --- /dev/null +++ b/docs/getting-started/architecture/write-patterns.md @@ -0,0 +1,67 @@ +# Writing Data to Feast + +Feast uses a [Push Model](getting-started/architecture/push-vs-pull-model.md) to push features to the online store. + +This has two important consequences: (1) communication patterns between the Data Producer (i.e., the client) and Feast (i.e,. the server) and (2) feature computation and +_feature value_ write patterns to Feast's online store. + +Data Producers (i.e., services that generate data) send data to Feast so that Feast can write feature values to the online store. That data can +be either raw data where Feast computes and stores the feature values or precomputed feature values. + +## Communication Patterns + +There are two ways a client (or Data Producer) can *_send_* data to the online store: + +1. Synchronously + - Using a synchronous API call for a small number of entities or a single entity (e.g., using the [`push` or `write_to_online_store` methods](../../reference/data-sources/push.md#pushing-data)) or the Feature Server's [`push` endpoint](../../reference/feature-servers/python-feature-server.md#pushing-features-to-the-online-and-offline-stores)) +2. Asynchronously + - Using an asynchronous API call for a small number of entities or a single entity (e.g., using the [`push` or `write_to_online_store` methods](../../reference/data-sources/push.md#pushing-data)) or the Feature Server's [`push` endpoint](../../reference/feature-servers/python-feature-server.md#pushing-features-to-the-online-and-offline-stores)) + - Using a "batch job" for a large number of entities (e.g., using a [batch materialization engine](../components/batch-materialization-engine.md)) + +Note, in some contexts, developers may "batch" a group of entities together and write them to the online store in a +single API call. This is a common pattern when writing data to the online store to reduce write loads but we would +not qualify this as a batch job. + +## Feature Value Write Patterns + +Writing feature values to the online store (i.e., the server) can be done in two ways: Precomputing the transformations client-side or Computing the transformations On Demand server-side. + +### Combining Approaches + +In some scenarios, a combination of Precomputed and On Demand transformations may be optimal. + +When selecting feature value write patterns, one must consider the specific requirements of your application, the acceptable correctness of the data, the latency tolerance, and the computational resources available. Making deliberate choices can help the performance and reliability of your service. + +There are two ways the client can write *feature values* to the online store: + +1. Precomputing transformations +2. Computing transformations On Demand +3. Hybrid (Precomputed + On Demand) + +### 1. Precomputing Transformations +Precomputed transformations can happen outside of Feast (e.g., via some batch job or streaming application) or inside of the Feast feature server when writing to the online store via the `push` or `write-to-online-store` api. + +### 2. Computing Transformations On Demand +On Demand transformations can only happen inside of Feast at either (1) the time of the client's request or (2) when the data producer writes to the online store. + +### 3. Hybrid (Precomputed + On Demand) +The hybrid approach allows for precomputed transformations to happen inside or outside of Feast and have the On Demand transformations happen at client request time. This is particularly convenient for "Time Since Last" types of features (e.g., time since purchase). + +## Tradeoffs + +When deciding between synchronous and asynchronous data writes, several tradeoffs should be considered: + +- **Data Consistency**: Asynchronous writes allow Data Producers to send data without waiting for the write operation to complete, which can lead to situations where the data in the online store is stale. This might be acceptable in scenarios where absolute freshness is not critical. However, for critical operations, such as calculating loan amounts in financial applications, stale data can lead to incorrect decisions, making synchronous writes essential. +- **Correctness**: The risk of data being out-of-date must be weighed against the operational requirements. For instance, in a lending application, having up-to-date feature data can be crucial for correctness (depending upon the features and raw data), thus favoring synchronous writes. In less sensitive contexts, the eventual consistency offered by asynchronous writes might be sufficient. +- **Service Coupling**: Synchronous writes result in tighter coupling between services. If a write operation fails, it can cause the dependent service operation to fail as well, which might be a significant drawback in systems requiring high reliability and independence between services. +- **Application Latency**: Asynchronous writes typically reduce the perceived latency from the client's perspective because the client does not wait for the write operation to complete. This can enhance the user experience and efficiency in environments where operations are not critically dependent on immediate data freshness. + +The table below can help guide the most appropriate data write and feature computation strategies based on specific application needs and data sensitivity. + +| Data Write Type | Feature Computation | Scenario | Recommended Approach | +|----------|-----------------|---------------------|----------------------| +| Asynchronous | On Demand | Data-intensive applications tolerant to staleness | Opt for asynchronous writes with on-demand computation to balance load and manage resource usage efficiently. | +| Asynchronous | Precomputed | High volume, non-critical data processing | Use asynchronous batch jobs with precomputed transformations for efficiency and scalability. | +| Synchronous | On Demand | High-stakes decision making | Use synchronous writes with on-demand feature computation to ensure data freshness and correctness. | +| Synchronous | Precomputed | User-facing applications requiring quick feedback | Use synchronous writes with precomputed features to reduce latency and improve user experience. | +| Synchronous | Hybrid (Precomputed + On Demand) | High-stakes decision making that want to optimize for latency under constraints| Use synchronous writes with precomputed features where possible and a select set of on demand computations to reduce latency and improve user experience. | diff --git a/docs/getting-started/architecture-and-components/README.md b/docs/getting-started/components/README.md similarity index 65% rename from docs/getting-started/architecture-and-components/README.md rename to docs/getting-started/components/README.md index 050a430c97..e1c000abce 100644 --- a/docs/getting-started/architecture-and-components/README.md +++ b/docs/getting-started/components/README.md @@ -1,16 +1,4 @@ -# Architecture - -{% content-ref url="language.md" %} -[language.md](language.md) -{% endcontent-ref %} - -{% content-ref url="overview.md" %} -[overview.md](overview.md) -{% endcontent-ref %} - -{% content-ref url="push-vs-pull-model.md" %} -[push-vs-pull-model.md](push-vs-pull-model.md) -{% endcontent-ref %} +# Components {% content-ref url="registry.md" %} [registry.md](registry.md) @@ -31,3 +19,7 @@ {% content-ref url="provider.md" %} [provider.md](provider.md) {% endcontent-ref %} + +{% content-ref url="authz_manager.md" %} +[authz_manager.md](authz_manager.md) +{% endcontent-ref %} diff --git a/docs/getting-started/components/authz_manager.md b/docs/getting-started/components/authz_manager.md new file mode 100644 index 0000000000..20fcdca107 --- /dev/null +++ b/docs/getting-started/components/authz_manager.md @@ -0,0 +1,117 @@ +# Authorization Manager +An Authorization Manager is an instance of the `AuthManager` class that is plugged into one of the Feast servers to extract user details from the current request and inject them into the [permission](../../getting-started/concepts/permission.md) framework. + +{% hint style="info" %} +**Note**: Feast does not provide authentication capabilities; it is the client's responsibility to manage the authentication token and pass it to +the Feast server, which then validates the token and extracts user details from the configured authentication server. +{% endhint %} + +Two authorization managers are supported out-of-the-box: +* One using a configurable OIDC server to extract the user details. +* One using the Kubernetes RBAC resources to extract the user details. + +These instances are created when the Feast servers are initialized, according to the authorization configuration defined in +their own `feature_store.yaml`. + +Feast servers and clients must have consistent authorization configuration, so that the client proxies can automatically inject +the authorization tokens that the server can properly identify and use to enforce permission validations. + + +## Design notes +The server-side implementation of the authorization functionality is defined [here](./../../../sdk/python/feast/permissions/server). +Few of the key models, classes to understand the authorization implementation on the client side can be found [here](./../../../sdk/python/feast/permissions/client). + +## Configuring Authorization +The authorization is configured using a dedicated `auth` section in the `feature_store.yaml` configuration. + +**Note**: As a consequence, when deploying the Feast servers with the Helm [charts](../../../infra/charts/feast-feature-server/README.md), +the `feature_store_yaml_base64` value must include the `auth` section to specify the authorization configuration. + +### No Authorization +This configuration applies the default `no_auth` authorization: +```yaml +project: my-project +auth: + type: no_auth +... +``` + +### OIDC Authorization +With OIDC authorization, the Feast client proxies retrieve the JWT token from an OIDC server (or [Identity Provider](https://openid.net/developers/how-connect-works/)) +and append it in every request to a Feast server, using an [Authorization Bearer Token](https://developer.mozilla.org/en-US/docs/Web/HTTP/Authentication#bearer). + +The server, in turn, uses the same OIDC server to validate the token and extract the user roles from the token itself. + +Some assumptions are made in the OIDC server configuration: +* The OIDC token refers to a client with roles matching the RBAC roles of the configured `Permission`s (*) +* The roles are exposed in the access token that is passed to the server +* The JWT token is expected to have a verified signature and not be expired. The Feast OIDC token parser logic validates for `verify_signature` and `verify_exp` so make sure that the given OIDC provider is configured to meet these requirements. +* The preferred_username should be part of the JWT token claim. + + +(*) Please note that **the role match is case-sensitive**, e.g. the name of the role in the OIDC server and in the `Permission` configuration +must be exactly the same. + +For example, the access token for a client `app` of a user with `reader` role should have the following `resource_access` section: +```json +{ + "resource_access": { + "app": { + "roles": [ + "reader" + ] + } + } +} +``` + +An example of feast OIDC authorization configuration on the server side is the following: +```yaml +project: my-project +auth: + type: oidc + client_id: _CLIENT_ID__ + auth_discovery_url: _OIDC_SERVER_URL_/realms/master/.well-known/openid-configuration +... +``` + +In case of client configuration, the following settings username, password and client_secret must be added to specify the current user: +```yaml +auth: + type: oidc + ... + username: _USERNAME_ + password: _PASSWORD_ + client_secret: _CLIENT_SECRET__ +``` + +Below is an example of feast full OIDC client auth configuration: +```yaml +project: my-project +auth: + type: oidc + client_id: test_client_id + client_secret: test_client_secret + username: test_user_name + password: test_password + auth_discovery_url: http://localhost:8080/realms/master/.well-known/openid-configuration +``` + +### Kubernetes RBAC Authorization +With Kubernetes RBAC Authorization, the client uses the service account token as the authorizarion bearer token, and the +server fetches the associated roles from the Kubernetes RBAC resources. + +An example of Kubernetes RBAC authorization configuration is the following: +{% hint style="info" %} +**NOTE**: This configuration will only work if you deploy feast on Openshift or a Kubernetes platform. +{% endhint %} +```yaml +project: my-project +auth: + type: kubernetes +... +``` + +In case the client cannot run on the same cluster as the servers, the client token can be injected using the `LOCAL_K8S_TOKEN` +environment variable on the client side. The value must refer to the token of a service account created on the servers cluster +and linked to the desired RBAC roles. \ No newline at end of file diff --git a/docs/getting-started/architecture-and-components/batch-materialization-engine.md b/docs/getting-started/components/batch-materialization-engine.md similarity index 100% rename from docs/getting-started/architecture-and-components/batch-materialization-engine.md rename to docs/getting-started/components/batch-materialization-engine.md diff --git a/docs/getting-started/architecture-and-components/offline-store.md b/docs/getting-started/components/offline-store.md similarity index 100% rename from docs/getting-started/architecture-and-components/offline-store.md rename to docs/getting-started/components/offline-store.md diff --git a/docs/getting-started/architecture-and-components/online-store.md b/docs/getting-started/components/online-store.md similarity index 100% rename from docs/getting-started/architecture-and-components/online-store.md rename to docs/getting-started/components/online-store.md diff --git a/docs/getting-started/architecture-and-components/overview.md b/docs/getting-started/components/overview.md similarity index 85% rename from docs/getting-started/architecture-and-components/overview.md rename to docs/getting-started/components/overview.md index f4d543cd5a..ac0b99de8a 100644 --- a/docs/getting-started/architecture-and-components/overview.md +++ b/docs/getting-started/components/overview.md @@ -28,11 +28,4 @@ A complete Feast deployment contains the following components: * **Batch Materialization Engine:** The [Batch Materialization Engine](batch-materialization-engine.md) component launches a process which loads data into the online store from the offline store. By default, Feast uses a local in-process engine implementation to materialize data. However, additional infrastructure can be used for a more scalable materialization process. * **Online Store:** The online store is a database that stores only the latest feature values for each entity. The online store is either populated through materialization jobs or through [stream ingestion](../../reference/data-sources/push.md). * **Offline Store:** The offline store persists batch data that has been ingested into Feast. This data is used for producing training datasets. For feature retrieval and materialization, Feast does not manage the offline store directly, but runs queries against it. However, offline stores can be configured to support writes if Feast configures logging functionality of served features. - -{% hint style="info" %} -Java and Go Clients are also available for online feature retrieval. - -In general, we recommend [using Python](language.md) for your Feature Store microservice. - -As mentioned in the document, precomputing features is the recommended optimal path to ensure low latency performance. Reducing feature serving to a lightweight database lookup is the ideal pattern, which means the marginal overhead of Python should be tolerable. Because of this we believe the pros of Python outweigh the costs, as reimplementing feature logic is undesirable. -{% endhint %} +* **Authorization Manager**: The authorization manager detects authentication tokens from client requests to Feast servers and uses this information to enforce permission policies on the requested services. diff --git a/docs/getting-started/architecture-and-components/provider.md b/docs/getting-started/components/provider.md similarity index 100% rename from docs/getting-started/architecture-and-components/provider.md rename to docs/getting-started/components/provider.md diff --git a/docs/getting-started/components/registry.md b/docs/getting-started/components/registry.md new file mode 100644 index 0000000000..0c85c5ad36 --- /dev/null +++ b/docs/getting-started/components/registry.md @@ -0,0 +1,51 @@ +# Registry + +The Feast feature registry is a central catalog of all feature definitions and their related metadata. Feast uses the registry to store all applied Feast objects (e.g. Feature views, entities, etc). It allows data scientists to search, discover, and collaborate on new features. The registry exposes methods to apply, list, retrieve and delete these objects, and is an abstraction with multiple implementations. + +Feast comes with built-in file-based and sql-based registry implementations. By default, Feast uses a file-based registry, which stores the protobuf representation of the registry as a serialized file in the local file system. For more details on which registries are supported, please see [Registries](../../reference/registries/). + +## Updating the registry + +We recommend users store their Feast feature definitions in a version controlled repository, which then via CI/CD +automatically stays synced with the registry. Users will often also want multiple registries to correspond to +different environments (e.g. dev vs staging vs prod), with staging and production registries with locked down write +access since they can impact real user traffic. See [Running Feast in Production](../../how-to-guides/running-feast-in-production.md#1.-automatically-deploying-changes-to-your-feature-definitions) for details on how to set this up. + +## Accessing the registry from clients + +Users can specify the registry through a `feature_store.yaml` config file, or programmatically. We often see teams +preferring the programmatic approach because it makes notebook driven development very easy: + +### Option 1: programmatically specifying the registry + +```python +repo_config = RepoConfig( + registry=RegistryConfig(path="gs://feast-test-gcs-bucket/registry.pb"), + project="feast_demo_gcp", + provider="gcp", + offline_store="file", # Could also be the OfflineStoreConfig e.g. FileOfflineStoreConfig + online_store="null", # Could also be the OnlineStoreConfig e.g. RedisOnlineStoreConfig +) +store = FeatureStore(config=repo_config) +``` + +### Option 2: specifying the registry in the project's `feature_store.yaml` file + +```yaml +project: feast_demo_aws +provider: aws +registry: s3://feast-test-s3-bucket/registry.pb +online_store: null +offline_store: + type: file +``` + +Instantiating a `FeatureStore` object can then point to this: + +```python +store = FeatureStore(repo_path=".") +``` + +{% hint style="info" %} +The file-based feature registry is a [Protobuf representation](https://github.com/feast-dev/feast/blob/master/protos/feast/core/Registry.proto) of Feast metadata. This Protobuf file can be read programmatically from other programming languages, but no compatibility guarantees are made on the internal structure of the registry. +{% endhint %} \ No newline at end of file diff --git a/docs/getting-started/architecture-and-components/stream-processor.md b/docs/getting-started/components/stream-processor.md similarity index 100% rename from docs/getting-started/architecture-and-components/stream-processor.md rename to docs/getting-started/components/stream-processor.md diff --git a/docs/getting-started/concepts/README.md b/docs/getting-started/concepts/README.md index e805e3b486..a32c53b5f4 100644 --- a/docs/getting-started/concepts/README.md +++ b/docs/getting-started/concepts/README.md @@ -24,10 +24,14 @@ [point-in-time-joins.md](point-in-time-joins.md) {% endcontent-ref %} -{% content-ref url="registry.md" %} -[registry.md](registry.md) -{% endcontent-ref %} - {% content-ref url="dataset.md" %} [dataset.md](dataset.md) {% endcontent-ref %} + +{% content-ref url="permission.md" %} +[permission.md](permission.md) +{% endcontent-ref %} + +{% content-ref url="tags.md" %} +[tags.md](tags.md) +{% endcontent-ref %} diff --git a/docs/getting-started/concepts/dataset.md b/docs/getting-started/concepts/dataset.md index d55adb4703..829ad4284e 100644 --- a/docs/getting-started/concepts/dataset.md +++ b/docs/getting-started/concepts/dataset.md @@ -2,7 +2,7 @@ Feast datasets allow for conveniently saving dataframes that include both features and entities to be subsequently used for data analysis and model training. [Data Quality Monitoring](https://docs.google.com/document/d/110F72d4NTv80p35wDSONxhhPBqWRwbZXG4f9mNEMd98) was the primary motivation for creating dataset concept. -Dataset's metadata is stored in the Feast registry and raw data (features, entities, additional input keys and timestamp) is stored in the [offline store](../architecture-and-components/offline-store.md). +Dataset's metadata is stored in the Feast registry and raw data (features, entities, additional input keys and timestamp) is stored in the [offline store](../components/offline-store.md). Dataset can be created from: diff --git a/docs/getting-started/concepts/permission.md b/docs/getting-started/concepts/permission.md new file mode 100644 index 0000000000..5bca1bd568 --- /dev/null +++ b/docs/getting-started/concepts/permission.md @@ -0,0 +1,112 @@ +# Permission + +## Overview + +The Feast permissions model allows to configure granular permission policies to all the resources defined in a feature store. + +The configured permissions are stored in the Feast registry and accessible through the CLI and the registry APIs. + +The permission authorization enforcement is performed when requests are executed through one of the Feast (Python) servers +- The online feature server (REST) +- The offline feature server (Arrow Flight) +- The registry server (gRPC) + +Note that there is no permission enforcement when accessing the Feast API with a local provider. + +## Concepts + +The permission model is based on the following components: +- A `resource` is a Feast object that we want to secure against unauthorized access. + - We assume that the resource has a `name` attribute and optional dictionary of associated key-value `tags`. +- An `action` is a logical operation executed on the secured resource, like: + - `create`: Create an instance. + - `describe`: Access the instance state. + - `update`: Update the instance state. + - `delete`: Delete an instance. + - `read`: Read both online and offline stores. + - `read_online`: Read the online store. + - `read_offline`: Read the offline store. + - `write`: Write on any store. + - `write_online`: Write to the online store. + - `write_offline`: Write to the offline store. +- A `policy` identifies the rule for enforcing authorization decisions on secured resources, based on the current user. + - A default implementation is provided for role-based policies, using the user roles to grant or deny access to the requested actions + on the secured resources. + +The `Permission` class identifies a single permission configured on the feature store and is identified by these attributes: +- `name`: The permission name. +- `types`: The list of protected resource types. Defaults to all managed types, e.g. the `ALL_RESOURCE_TYPES` alias. All sub-classes are included in the resource match. +- `name_pattern`: A regex to match the resource name. Defaults to `None`, meaning that no name filtering is applied +- `required_tags`: Dictionary of key-value pairs that must match the resource tags. Defaults to `None`, meaning that no tags filtering is applied. +- `actions`: The actions authorized by this permission. Defaults to `ALL_VALUES`, an alias defined in the `action` module. +- `policy`: The policy to be applied to validate a client request. + +To simplify configuration, several constants are defined to streamline the permissions setup: +- In module `feast.feast_object`: + - `ALL_RESOURCE_TYPES` is the list of all the `FeastObject` types. + - `ALL_FEATURE_VIEW_TYPES` is the list of all the feature view types, including those not inheriting from `FeatureView` type like + `OnDemandFeatureView`. +- In module `feast.permissions.action`: + - `ALL_ACTIONS` is the list of all managed actions. + - `READ` includes all the read actions for online and offline store. + - `WRITE` includes all the write actions for online and offline store. + - `CRUD` includes all the state management actions to create, describe, update or delete a Feast resource. + +Given the above definitions, the feature store can be configured with granular control over each resource, enabling partitioned access by +teams to meet organizational requirements for service and data sharing, and protection of sensitive information. + +The `feast` CLI includes a new `permissions` command to list the registered permissions, with options to identify the matching resources for each configured permission and the existing resources that are not covered by any permission. + +{% hint style="info" %} +**Note**: Feast resources that do not match any of the configured permissions are not secured by any authorization policy, meaning any user can execute any action on such resources. +{% endhint %} + +## Definition examples +This permission definition grants access to the resource state and the ability to read all of the stores for any feature view or +feature service to all users with the role `super-reader`: +```py +Permission( + name="feature-reader", + types=[FeatureView, FeatureService], + policy=RoleBasedPolicy(roles=["super-reader"]), + actions=[AuthzedAction.DESCRIBE, READ], +) +``` + +This example grants permission to write on all the data sources with `risk_level` tag set to `high` only to users with role `admin` or `data_team`: +```py +Permission( + name="ds-writer", + types=[DataSource], + required_tags={"risk_level": "high"}, + policy=RoleBasedPolicy(roles=["admin", "data_team"]), + actions=[AuthzedAction.WRITE], +) +``` + +{% hint style="info" %} +**Note**: When using multiple roles in a role-based policy, the user must be granted at least one of the specified roles. +{% endhint %} + + +The following permission grants authorization to read the offline store of all the feature views including `risky` in the name, to users with role `trusted`: + +```py +Permission( + name="reader", + types=[FeatureView], + name_pattern=".*risky.*", + policy=RoleBasedPolicy(roles=["trusted"]), + actions=[AuthzedAction.READ_OFFLINE], +) +``` + +## Authorization configuration +In order to leverage the permission functionality, the `auth` section is needed in the `feature_store.yaml` configuration. +Currently, Feast supports OIDC and Kubernetes RBAC authorization protocols. + +The default configuration, if you don't specify the `auth` configuration section, is `no_auth`, indicating that no permission +enforcement is applied. + +The `auth` section includes a `type` field specifying the actual authorization protocol, and protocol-specific fields that +are specified in [Authorization Manager](../components/authz_manager.md). diff --git a/docs/getting-started/concepts/registry.md b/docs/getting-started/concepts/registry.md deleted file mode 100644 index 8ac32ce87b..0000000000 --- a/docs/getting-started/concepts/registry.md +++ /dev/null @@ -1,107 +0,0 @@ -# Registry - -Feast uses a registry to store all applied Feast objects (e.g. Feature views, entities, etc). The registry exposes -methods to apply, list, retrieve and delete these objects, and is an abstraction with multiple implementations. - -### Options for registry implementations - -#### File-based registry -By default, Feast uses a file-based registry implementation, which stores the protobuf representation of the registry as -a serialized file. This registry file can be stored in a local file system, or in cloud storage (in, say, S3 or GCS, or Azure). - -The quickstart guides that use `feast init` will use a registry on a local file system. To allow Feast to configure -a remote file registry, you need to create a GCS / S3 bucket that Feast can understand: -{% tabs %} -{% tab title="Example S3 file registry" %} -```yaml -project: feast_demo_aws -provider: aws -registry: - path: s3://[YOUR BUCKET YOU CREATED]/registry.pb - cache_ttl_seconds: 60 -online_store: null -offline_store: - type: file -``` -{% endtab %} - -{% tab title="Example GCS file registry" %} -```yaml -project: feast_demo_gcp -provider: gcp -registry: - path: gs://[YOUR BUCKET YOU CREATED]/registry.pb - cache_ttl_seconds: 60 -online_store: null -offline_store: - type: file -``` -{% endtab %} -{% endtabs %} - -However, there are inherent limitations with a file-based registry, since changing a single field in the registry -requires re-writing the whole registry file. With multiple concurrent writers, this presents a risk of data loss, or -bottlenecks writes to the registry since all changes have to be serialized (e.g. when running materialization for -multiple feature views or time ranges concurrently). - -#### SQL Registry -Alternatively, a [SQL Registry](../../tutorials/using-scalable-registry.md) can be used for a more scalable registry. - -The configuration roughly looks like: -```yaml -project: -provider: -online_store: redis -offline_store: file -registry: - registry_type: sql - path: postgresql://postgres:mysecretpassword@127.0.0.1:55001/feast - cache_ttl_seconds: 60 - sqlalchemy_config_kwargs: - echo: false - pool_pre_ping: true -``` - -This supports any SQLAlchemy compatible database as a backend. The exact schema can be seen in [sql.py](https://github.com/feast-dev/feast/blob/master/sdk/python/feast/infra/registry/sql.py) - -### Updating the registry - -We recommend users store their Feast feature definitions in a version controlled repository, which then via CI/CD -automatically stays synced with the registry. Users will often also want multiple registries to correspond to -different environments (e.g. dev vs staging vs prod), with staging and production registries with locked down write -access since they can impact real user traffic. See [Running Feast in Production](../../how-to-guides/running-feast-in-production.md#1.-automatically-deploying-changes-to-your-feature-definitions) for details on how to set this up. - -### Accessing the registry from clients - -Users can specify the registry through a `feature_store.yaml` config file, or programmatically. We often see teams -preferring the programmatic approach because it makes notebook driven development very easy: - -#### Option 1: programmatically specifying the registry - -```python -repo_config = RepoConfig( - registry=RegistryConfig(path="gs://feast-test-gcs-bucket/registry.pb"), - project="feast_demo_gcp", - provider="gcp", - offline_store="file", # Could also be the OfflineStoreConfig e.g. FileOfflineStoreConfig - online_store="null", # Could also be the OnlineStoreConfig e.g. RedisOnlineStoreConfig -) -store = FeatureStore(config=repo_config) -``` - -#### Option 2: specifying the registry in the project's `feature_store.yaml` file - -```yaml -project: feast_demo_aws -provider: aws -registry: s3://feast-test-s3-bucket/registry.pb -online_store: null -offline_store: - type: file -``` - -Instantiating a `FeatureStore` object can then point to this: - -```python -store = FeatureStore(repo_path=".") -``` \ No newline at end of file diff --git a/docs/getting-started/concepts/tags.md b/docs/getting-started/concepts/tags.md new file mode 100644 index 0000000000..d5b285f7c7 --- /dev/null +++ b/docs/getting-started/concepts/tags.md @@ -0,0 +1,59 @@ +# Tags + +## Overview + +Tags in Feast allow for efficient filtering of Feast objects when listing them in the UI, CLI, or querying the registry directly. + +The way to define tags on the feast objects is through the definition file or directly in the object that will be applied to the feature store. + +## Examples + +In this example we define a Feature View in a definition file that has a tag: +```python +driver_stats_fv = FeatureView( + name="driver_hourly_stats", + entities=[driver], + ttl=timedelta(days=1), + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64, description="Average daily trips"), + ], + online=True, + source=driver_stats_source, + # Tags are user defined key/value pairs that are attached to each + # feature view + tags={"team": "driver_performance"}, +) +``` + +In this example we define a Stream Feature View that has a tag, in the code: +```python + sfv = StreamFeatureView( + name="test kafka stream feature view", + entities=[entity], + schema=[], + description="desc", + timestamp_field="event_timestamp", + source=stream_source, + tags={"team": "driver_performance"}, +``` + +An example of filtering feature-views with the tag `team:driver_performance`: +```commandline +$ feast feature-views list --tags team:driver_performance +NAME ENTITIES TYPE +driver_hourly_stats {'driver'} FeatureView +driver_hourly_stats_fresh {'driver'} FeatureView +``` + +The same example of listing feature-views without tag filtering: +```commandline +$ feast feature-views list +NAME ENTITIES TYPE +driver_hourly_stats {'driver'} FeatureView +driver_hourly_stats_fresh {'driver'} FeatureView +transformed_conv_rate_fresh {'driver'} OnDemandFeatureView +transformed_conv_rate {'driver'} OnDemandFeatureView +``` + diff --git a/docs/getting-started/faq.md b/docs/getting-started/faq.md index d603e12ab6..6567ae181d 100644 --- a/docs/getting-started/faq.md +++ b/docs/getting-started/faq.md @@ -29,7 +29,7 @@ Feature views once they are used by a feature service are intended to be immutab ### What is the difference between data sources and the offline store? -The data source itself defines the underlying data warehouse table in which the features are stored. The offline store interface defines the APIs required to make an arbitrary compute layer work for Feast (e.g. pulling features given a set of feature views from their sources, exporting the data set results to different formats). Please see [data sources](concepts/data-ingestion.md) and [offline store](architecture-and-components/offline-store.md) for more details. +The data source itself defines the underlying data warehouse table in which the features are stored. The offline store interface defines the APIs required to make an arbitrary compute layer work for Feast (e.g. pulling features given a set of feature views from their sources, exporting the data set results to different formats). Please see [data sources](concepts/data-ingestion.md) and [offline store](components/offline-store.md) for more details. ### Is it possible to have offline and online stores from different providers? diff --git a/docs/getting-started/quickstart.md b/docs/getting-started/quickstart.md index 01c039e9c5..7169989e7e 100644 --- a/docs/getting-started/quickstart.md +++ b/docs/getting-started/quickstart.md @@ -103,12 +103,17 @@ from feast import ( FeatureView, Field, FileSource, + Project, PushSource, RequestSource, ) from feast.on_demand_feature_view import on_demand_feature_view from feast.types import Float32, Float64, Int64 +# Define a project for the feature repo +project = Project(name="my_project", description="A project for driver statistics") + + # Define an entity for the driver. You can think of an entity as a primary key used to # fetch features. driver = Entity(name="driver", join_keys=["driver_id"]) @@ -623,6 +628,6 @@ show up in the upcoming concepts + architecture + tutorial pages as well. ## Next steps * Read the [Concepts](concepts/) page to understand the Feast data model. -* Read the [Architecture](architecture-and-components/) page. +* Read the [Architecture](architecture/) page. * Check out our [Tutorials](../tutorials/tutorials-overview/) section for more examples on how to use Feast. * Follow our [Running Feast with Snowflake/GCP/AWS](../how-to-guides/feast-snowflake-gcp-aws/) guide for a more in-depth tutorial on using Feast. diff --git a/docs/how-to-guides/scaling-feast.md b/docs/how-to-guides/scaling-feast.md index ce63f027c9..7e4f27b1dd 100644 --- a/docs/how-to-guides/scaling-feast.md +++ b/docs/how-to-guides/scaling-feast.md @@ -20,7 +20,7 @@ The recommended solution in this case is to use the [SQL based registry](../tuto The default Feast materialization process is an in-memory process, which pulls data from the offline store before writing it to the online store. However, this process does not scale for large data sets, since it's executed on a single-process. -Feast supports pluggable [Materialization Engines](../getting-started/architecture-and-components/batch-materialization-engine.md), that allow the materialization process to be scaled up. +Feast supports pluggable [Materialization Engines](../getting-started/components/batch-materialization-engine.md), that allow the materialization process to be scaled up. Aside from the local process, Feast supports a [Lambda-based materialization engine](https://rtd.feast.dev/en/master/#alpha-lambda-based-engine), and a [Bytewax-based materialization engine](https://rtd.feast.dev/en/master/#bytewax-engine). Users may also be able to build an engine to scale up materialization using existing infrastructure in their organizations. \ No newline at end of file diff --git a/docs/project/development-guide.md b/docs/project/development-guide.md index e3b09294bc..b613774190 100644 --- a/docs/project/development-guide.md +++ b/docs/project/development-guide.md @@ -14,9 +14,9 @@ - [Pre-commit Hooks](#pre-commit-hooks) - [Signing off commits](#signing-off-commits) - [Incorporating upstream changes from master](#incorporating-upstream-changes-from-master) - - [Feast Python SDK / CLI](#feast-python-sdk--cli) + - [Feast Python SDK and CLI](#feast-python-sdk-and-cli) - [Environment Setup](#environment-setup) - - [Code Style \& Linting](#code-style--linting) + - [Code Style and Linting](#code-style-and-linting) - [Unit Tests](#unit-tests) - [Integration Tests](#integration-tests) - [Local integration tests](#local-integration-tests) @@ -39,7 +39,7 @@ ## Overview This guide is targeted at developers looking to contribute to Feast components in the main Feast repository: -- [Feast Python SDK / CLI](#feast-python-sdk--cli) +- [Feast Python SDK and CLI](#feast-python-sdk-and-cli) - [Feast Java Serving](#feast-java-serving) Please see [this page](../reference/codebase-structure.md) for more details on the structure of the entire codebase. @@ -118,9 +118,9 @@ Our preference is the use of `git rebase [master]` instead of `git merge` : `git Note that this means if you are midway through working through a PR and rebase, you'll have to force push: `git push --force-with-lease origin [branch name]` -## Feast Python SDK / CLI +## Feast Python SDK and CLI ### Environment Setup -Setting up your development environment for Feast Python SDK / CLI: +Setting up your development environment for Feast Python SDK and CLI: 1. Ensure that you have Docker installed in your environment. Docker is used to provision service dependencies during testing, and build images for feature servers and other components. - Please note that we use [Docker with BuiltKit](https://docs.docker.com/develop/develop-images/build_enhancements/). - _Alternatively_ - To use [podman](https://podman.io/) on a Fedora or RHEL machine, follow this [guide](https://github.com/feast-dev/feast/issues/4190) @@ -132,8 +132,7 @@ Setting up your development environment for Feast Python SDK / CLI: source venv/bin/activate ``` 4. (M1 Mac only): Follow the [dev guide](https://github.com/feast-dev/feast/issues/2105) -5. Install uv -It is recommended to use uv for managing python dependencies. +5. Install uv. It is recommended to use uv for managing python dependencies. ```sh curl -LsSf https://astral.sh/uv/install.sh | sh ``` @@ -145,28 +144,33 @@ pip install uv ``` make build-ui ``` -7. (Optional) install pixi -pixi is necessary to run step 8 for all python versions at once. +7. (Optional) install pixi. pixi is necessary to run step 8 for all python versions at once. ```sh curl -fsSL https://pixi.sh/install.sh | bash ``` -8. (Optional): Recompile python lock files -If you make changes to requirements or simply want to update python lock files to reflect latest versioons. +8. (Optional): Recompile python lock files. Only when you make changes to requirements or simply want to update python lock files to reflect latest versioons. ```sh make lock-python-dependencies-all ``` -9. Install development dependencies for Feast Python SDK / CLI -This will install package versions from the lock file, install editable version of feast and compile protobufs. +9. Install development dependencies for Feast Python SDK and CLI. This will install package versions from the lock file, install editable version of feast and compile protobufs. + +If running inside a virtual environment: +```sh +make install-python-ci-dependencies-uv-venv +``` + +Otherwise: ```sh make install-python-ci-dependencies-uv ``` + 10. Spin up Docker Image ```sh docker build -t docker-whale -f ./sdk/python/feast/infra/feature_servers/multicloud/Dockerfile . ``` -### Code Style & Linting -Feast Python SDK / CLI codebase: +### Code Style and Linting +Feast Python SDK and CLI codebase: - Conforms to [Black code style](https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html) - Has type annotations as enforced by `mypy` - Has imports sorted by `ruff` (see [isort (I) rules](https://docs.astral.sh/ruff/rules/#isort-i)) @@ -186,7 +190,7 @@ make lint-python > Setup [pre-commit hooks](#pre-commit-hooks) to automatically format and lint on commit. ### Unit Tests -Unit tests (`pytest`) for the Feast Python SDK / CLI can run as follows: +Unit tests (`pytest`) for the Feast Python SDK and CLI can run as follows: ```sh make test-python-unit ``` @@ -194,7 +198,7 @@ make test-python-unit > :warning: Local configuration can interfere with Unit tests and cause them to fail: > - Ensure [no AWS configuration is present](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html) > and [no AWS credentials can be accessed](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html#configuring-credentials) by `boto3` -> - Ensure Feast Python SDK / CLI is not configured with configuration overrides (ie `~/.feast/config` should be empty). +> - Ensure Feast Python SDK and CLI is not configured with configuration overrides (ie `~/.feast/config` should be empty). ### Integration Tests There are two sets of tests you can run: diff --git a/docs/reference/alpha-web-ui.md b/docs/reference/alpha-web-ui.md index 398c8de0ae..2caeed9e2a 100644 --- a/docs/reference/alpha-web-ui.md +++ b/docs/reference/alpha-web-ui.md @@ -70,7 +70,7 @@ ReactDOM.render( ); ``` -When you start the React app, it will look for `project-list.json` to find a list of your projects. The JSON should looks something like this. +When you start the React app, it will look for `projects-list.json` to find a list of your projects. The JSON should look something like this. ```json { diff --git a/docs/reference/batch-materialization/README.md b/docs/reference/batch-materialization/README.md index 8511fd81d0..a05d6d75e5 100644 --- a/docs/reference/batch-materialization/README.md +++ b/docs/reference/batch-materialization/README.md @@ -1,6 +1,6 @@ # Batch materialization -Please see [Batch Materialization Engine](../../getting-started/architecture-and-components/batch-materialization-engine.md) for an explanation of batch materialization engines. +Please see [Batch Materialization Engine](../../getting-started/components/batch-materialization-engine.md) for an explanation of batch materialization engines. {% page-ref page="snowflake.md" %} diff --git a/docs/reference/codebase-structure.md b/docs/reference/codebase-structure.md index 8eb5572679..7077e48fef 100644 --- a/docs/reference/codebase-structure.md +++ b/docs/reference/codebase-structure.md @@ -34,7 +34,7 @@ There are also several important submodules: * `ui/` contains the embedded Web UI, to be launched on the `feast ui` command. Of these submodules, `infra/` is the most important. -It contains the interfaces for the [provider](getting-started/architecture-and-components/provider.md), [offline store](getting-started/architecture-and-components/offline-store.md), [online store](getting-started/architecture-and-components/online-store.md), [batch materialization engine](getting-started/architecture-and-components/batch-materialization-engine.md), and [registry](getting-started/architecture-and-components/registry.md), as well as all of their individual implementations. +It contains the interfaces for the [provider](getting-started/components/provider.md), [offline store](getting-started/components/offline-store.md), [online store](getting-started/components/online-store.md), [batch materialization engine](getting-started/components/batch-materialization-engine.md), and [registry](getting-started/components/registry.md), as well as all of their individual implementations. ``` $ tree --dirsfirst -L 1 infra diff --git a/docs/reference/feast-cli-commands.md b/docs/reference/feast-cli-commands.md index afcfcfef64..b32db3215a 100644 --- a/docs/reference/feast-cli-commands.md +++ b/docs/reference/feast-cli-commands.md @@ -24,6 +24,7 @@ Commands: init Create a new Feast repository materialize Run a (non-incremental) materialization job to... materialize-incremental Run an incremental materialization job to ingest... + permissions Access permissions registry-dump Print contents of the metadata registry teardown Tear down deployed feature store infrastructure version Display Feast SDK version @@ -155,6 +156,185 @@ Load data from feature views into the online store, beginning from either the pr feast materialize-incremental 2022-01-01T00:00:00 ``` +## Permissions + +### List permissions +List all registered permission + +```text +feast permissions list + +Options: + --tags TEXT Filter by tags (e.g. --tags 'key:value' --tags 'key:value, + key:value, ...'). Items return when ALL tags match. + -v, --verbose Print the resources matching each configured permission +``` + +```text ++-----------------------+-------------+-----------------------+-----------+----------------+-------------------------+ +| NAME | TYPES | NAME_PATTERN | ACTIONS | ROLES | REQUIRED_TAGS | ++=======================+=============+=======================+===========+================+================+========+ +| reader_permission1234 | FeatureView | transformed_conv_rate | DESCRIBE | reader | - | ++-----------------------+-------------+-----------------------+-----------+----------------+-------------------------+ +| writer_permission1234 | FeatureView | transformed_conv_rate | CREATE | writer | - | ++-----------------------+-------------+-----------------------+-----------+----------------+-------------------------+ +| special | FeatureView | special.* | DESCRIBE | admin | test-key2 : test-value2 | +| | | | UPDATE | special-reader | test-key : test-value | ++-----------------------+-------------+-----------------------+-----------+----------------+-------------------------+ +``` + +`verbose` option describes the resources matching each configured permission: + +```text +feast permissions list -v +``` + +```text +Permissions: + +permissions +β”œβ”€β”€ reader_permission1234 ['reader'] +β”‚ └── FeatureView: none +└── writer_permission1234 ['writer'] + β”œβ”€β”€ FeatureView: none + │── OnDemandFeatureView: ['transformed_conv_rate_fresh', 'transformed_conv_rate'] + └── BatchFeatureView: ['driver_hourly_stats', 'driver_hourly_stats_fresh'] +``` + +### Describe a permission +Describes the provided permission + +```text +feast permissions describe permission-name +name: permission-name +types: +- FEATURE_VIEW +namePattern: transformed_conv_rate +requiredTags: + required1: required-value1 + required2: required-value2 +actions: +- DESCRIBE +policy: + roleBasedPolicy: + roles: + - reader +tags: + key1: value1 + key2: value2 + +``` +### Permission check +The `permissions check` command is used to identify resources that lack the appropriate permissions based on their type, name, or tags. + +This command is particularly useful for administrators when roles, actions, or permissions have been modified or newly configured. By running this command, administrators can easily verify which resources and actions are not protected by any permission configuration, ensuring that proper security measures are in place. + +```text +> feast permissions check + + +The following resources are not secured by any permission configuration: +NAME TYPE +driver Entity +driver_hourly_stats_fresh FeatureView +The following actions are not secured by any permission configuration (Note: this might not be a security concern, depending on the used APIs): +NAME TYPE UNSECURED ACTIONS +driver Entity CREATE + DESCRIBE + UPDATE + DELETE + READ_ONLINE + READ_OFFLINE + WRITE_ONLINE + WRITE_OFFLINE +driver_hourly_stats_fresh FeatureView CREATE + DESCRIBE + UPDATE + DELETE + READ_ONLINE + READ_OFFLINE + WRITE_ONLINE + WRITE_OFFLINE + +Based on the above results, the administrator can reassess the permissions configuration and make any necessary adjustments to meet their security requirements. + +If no resources are accessible publicly, the permissions check command will return the following response: +> feast permissions check +The following resources are not secured by any permission configuration: +NAME TYPE +The following actions are not secured by any permission configuration (Note: this might not be a security concern, depending on the used APIs): +NAME TYPE UNSECURED ACTIONS +``` + + +### List of the configured roles +List all the configured roles + +```text +feast permissions list-roles + +Options: + --verbose Print the resources and actions permitted to each configured + role +``` + +```text +ROLE NAME +admin +reader +writer +``` + +`verbose` option describes the resources and actions permitted to each managed role: + +```text +feast permissions list-roles -v +``` + +```text +ROLE NAME RESOURCE NAME RESOURCE TYPE PERMITTED ACTIONS +admin driver_hourly_stats_source FileSource CREATE + DELETE + QUERY_OFFLINE + QUERY_ONLINE + DESCRIBE + UPDATE +admin vals_to_add RequestSource CREATE + DELETE + QUERY_OFFLINE + QUERY_ONLINE + DESCRIBE + UPDATE +admin driver_stats_push_source PushSource CREATE + DELETE + QUERY_OFFLINE + QUERY_ONLINE + DESCRIBE + UPDATE +admin driver_hourly_stats_source FileSource CREATE + DELETE + QUERY_OFFLINE + QUERY_ONLINE + DESCRIBE + UPDATE +admin vals_to_add RequestSource CREATE + DELETE + QUERY_OFFLINE + QUERY_ONLINE + DESCRIBE + UPDATE +admin driver_stats_push_source PushSource CREATE + DELETE + QUERY_OFFLINE + QUERY_ONLINE + DESCRIBE + UPDATE +reader driver_hourly_stats FeatureView DESCRIBE +reader driver_hourly_stats_fresh FeatureView DESCRIBE +... +``` + + ## Teardown Tear down deployed feature store infrastructure diff --git a/docs/reference/feature-servers/offline-feature-server.md b/docs/reference/feature-servers/offline-feature-server.md index 6c2fdf7a25..1db5adacd8 100644 --- a/docs/reference/feature-servers/offline-feature-server.md +++ b/docs/reference/feature-servers/offline-feature-server.md @@ -33,3 +33,20 @@ Please see the detail how to configure offline store client [remote-offline-stor The set of functionalities supported by remote offline stores is the same as those supported by offline stores with the SDK, which are described in detail [here](../offline-stores/overview.md#functionality). +# Offline Feature Server Permissions and Access Control + +## API Endpoints and Permissions + +| Endpoint | Resource Type | Permission | Description | +| ------------------------------------- |------------------|---------------|---------------------------------------------------| +| offline_write_batch | FeatureView | Write Offline | Write a batch of data to the offline store | +| write_logged_features | FeatureService | Write Offline | Write logged features to the offline store | +| persist | DataSource | Write Offline | Persist the result of a read in the offline store | +| get_historical_features | FeatureView | Read Offline | Retrieve historical features | +| pull_all_from_table_or_query | DataSource | Read Offline | Pull all data from a table or read it | +| pull_latest_from_table_or_query | DataSource | Read Offline | Pull the latest data from a table or read it | + + +## How to configure Authentication and Authorization ? + +Please refer the [page](./../../../docs/getting-started/concepts/permission.md) for more details on how to configure authentication and authorization. \ No newline at end of file diff --git a/docs/reference/feature-servers/python-feature-server.md b/docs/reference/feature-servers/python-feature-server.md index 0d8a0aef75..255b85e606 100644 --- a/docs/reference/feature-servers/python-feature-server.md +++ b/docs/reference/feature-servers/python-feature-server.md @@ -153,7 +153,7 @@ curl -X POST \ ### Pushing features to the online and offline stores -The Python feature server also exposes an endpoint for [push sources](../../data-sources/push.md). This endpoint allows you to push data to the online and/or offline store. +The Python feature server also exposes an endpoint for [push sources](../data-sources/push.md). This endpoint allows you to push data to the online and/or offline store. The request definition for `PushMode` is a string parameter `to` where the options are: \[`"online"`, `"offline"`, `"online_and_offline"`]. @@ -199,3 +199,19 @@ requests.post( "http://localhost:6566/push", data=json.dumps(push_data)) ``` + +# Online Feature Server Permissions and Access Control + +## API Endpoints and Permissions + +| Endpoint | Resource Type | Permission | Description | +| ---------------------------- |---------------------------------|-------------------------------------------------------| ------------------------------------------------------------------------ | +| /get-online-features | FeatureView,OnDemandFeatureView | Read Online | Get online features from the feature store | +| /push | FeatureView | Write Online, Write Offline, Write Online and Offline | Push features to the feature store (online, offline, or both) | +| /write-to-online-store | FeatureView | Write Online | Write features to the online store | +| /materialize | FeatureView | Write Online | Materialize features within a specified time range | +| /materialize-incremental | FeatureView | Write Online | Incrementally materialize features up to a specified timestamp | + +## How to configure Authentication and Authorization ? + +Please refer the [page](./../../../docs/getting-started/concepts/permission.md) for more details on how to configure authentication and authorization. \ No newline at end of file diff --git a/docs/reference/offline-stores/README.md b/docs/reference/offline-stores/README.md index 33eca6d426..87c92bfcf8 100644 --- a/docs/reference/offline-stores/README.md +++ b/docs/reference/offline-stores/README.md @@ -1,6 +1,6 @@ # Offline stores -Please see [Offline Store](../../getting-started/architecture-and-components/offline-store.md) for a conceptual explanation of offline stores. +Please see [Offline Store](../../getting-started/components/offline-store.md) for a conceptual explanation of offline stores. {% content-ref url="overview.md" %} [overview.md](overview.md) diff --git a/docs/reference/offline-stores/remote-offline-store.md b/docs/reference/offline-stores/remote-offline-store.md index 0179e0f06f..8057ae3284 100644 --- a/docs/reference/offline-stores/remote-offline-store.md +++ b/docs/reference/offline-stores/remote-offline-store.md @@ -25,4 +25,7 @@ The complete example can be find under [remote-offline-store-example](../../../e ## How to configure the server -Please see the detail how to configure offline feature server [offline-feature-server.md](../feature-servers/offline-feature-server.md) \ No newline at end of file +Please see the detail how to configure offline feature server [offline-feature-server.md](../feature-servers/offline-feature-server.md) + +## How to configure Authentication and Authorization +Please refer the [page](./../../../docs/getting-started/concepts/permission.md) for more details on how to configure authentication and authorization. diff --git a/docs/reference/online-stores/README.md b/docs/reference/online-stores/README.md index 0acf6701f9..cdb9c37c1d 100644 --- a/docs/reference/online-stores/README.md +++ b/docs/reference/online-stores/README.md @@ -1,6 +1,6 @@ # Online stores -Please see [Online Store](../../getting-started/architecture-and-components/online-store.md) for an explanation of online stores. +Please see [Online Store](../../getting-started/components/online-store.md) for an explanation of online stores. {% content-ref url="overview.md" %} [overview.md](overview.md) @@ -50,10 +50,6 @@ Please see [Online Store](../../getting-started/architecture-and-components/onli [mysql.md](mysql.md) {% endcontent-ref %} -{% content-ref url="rockset.md" %} -[rockset.md](rockset.md) -{% endcontent-ref %} - {% content-ref url="hazelcast.md" %} [hazelcast.md](hazelcast.md) {% endcontent-ref %} diff --git a/docs/reference/online-stores/remote.md b/docs/reference/online-stores/remote.md index c560fa6f22..4dd4fb65b5 100644 --- a/docs/reference/online-stores/remote.md +++ b/docs/reference/online-stores/remote.md @@ -11,11 +11,17 @@ The registry is pointing to registry of remote feature store. If it is not acces {% code title="feature_store.yaml" %} ```yaml project: my-local-project - registry: /remote/data/registry.db - provider: local - online_store: - path: http://localhost:6566 - type: remote - entity_key_serialization_version: 2 +registry: /remote/data/registry.db +provider: local +online_store: + path: http://localhost:6566 + type: remote +entity_key_serialization_version: 2 +auth: + type: no_auth ``` -{% endcode %} \ No newline at end of file +{% endcode %} + +## How to configure Authentication and Authorization +Please refer the [page](./../../../docs/getting-started/concepts/permission.md) for more details on how to configure authentication and authorization. + diff --git a/docs/reference/online-stores/rockset.md b/docs/reference/online-stores/rockset.md deleted file mode 100644 index 082bddf37b..0000000000 --- a/docs/reference/online-stores/rockset.md +++ /dev/null @@ -1,84 +0,0 @@ -# Rockset (contrib) - -## Description - -In Alpha Development. - -The [Rockset](https://rockset.com/demo-signup/) online store provides support for materializing feature values within a Rockset collection in order to serve features in real-time. - -* Each document is uniquely identified by its '_id' value. Repeated inserts into the same document '_id' will result in an upsert. - -Rockset indexes all columns allowing for quick per feature look up and also allows for a dynamic typed schema that can change based on any new requirements. API Keys can be found in the Rockset console. -You can also find host urls on the same tab by clicking "View Region Endpoint Urls". - -Data Model Used Per Doc - -``` -{ - "_id": (STRING) Unique Identifier for the feature document. - : (STRING) Feature Values Mapped by Feature Name. Feature - values stored as a serialized hex string. - .... - "event_ts": (STRING) ISO Stringified Timestamp. - "created_ts": (STRING) ISO Stringified Timestamp. -} -``` - - -## Example - -```yaml -project: my_feature_app -registry: data/registry.db -provider: local -online_store: - ## Basic Configs ## - - # If apikey or host is left blank the driver will try to pull - # these values from environment variables ROCKSET_APIKEY and - # ROCKSET_APISERVER respectively. - type: rockset - api_key: - host: - - ## Advanced Configs ## - - # Batch size of records that will be turned per page when - # paginating a batched read. - # - # read_pagination_batch_size: 100 - - # The amount of time, in seconds, we will wait for the - # collection to become visible to the API. - # - # collection_created_timeout_secs: 60 - - # The amount of time, in seconds, we will wait for the - # collection to enter READY state. - # - # collection_ready_timeout_secs: 1800 - - # Whether to wait for all writes to be flushed from log - # and queryable before returning write as completed. If - # False, documents that are written may not be seen - # immediately in subsequent reads. - # - # fence_all_writes: True - - # The amount of time we will wait, in seconds, for the - # write fence to be passed - # - # fence_timeout_secs: 600 - - # Initial backoff, in seconds, we will wait between - # requests when polling for a response. - # - # initial_request_backoff_secs: 2 - - # Initial backoff, in seconds, we will wait between - # requests when polling for a response. - # max_request_backoff_secs: 30 - - # The max amount of times we will retry a failed request. - # max_request_attempts: 10000 -``` diff --git a/docs/reference/providers/README.md b/docs/reference/providers/README.md index 20686a1e14..925ae8ebc1 100644 --- a/docs/reference/providers/README.md +++ b/docs/reference/providers/README.md @@ -1,6 +1,6 @@ # Providers -Please see [Provider](../../getting-started/architecture-and-components/provider.md) for an explanation of providers. +Please see [Provider](../../getting-started/components/provider.md) for an explanation of providers. {% page-ref page="local.md" %} diff --git a/docs/reference/registries/README.md b/docs/reference/registries/README.md new file mode 100644 index 0000000000..1310506f1d --- /dev/null +++ b/docs/reference/registries/README.md @@ -0,0 +1,23 @@ +# Registies + +Please see [Registry](../../getting-started/architecture-and-components/registry.md) for a conceptual explanation of registries. + +{% content-ref url="local.md" %} +[local.md](local.md) +{% endcontent-ref %} + +{% content-ref url="s3.md" %} +[s3.md](s3.md) +{% endcontent-ref %} + +{% content-ref url="gcs.md" %} +[gcs.md](gcs.md) +{% endcontent-ref %} + +{% content-ref url="sql.md" %} +[sql.md](sql.md) +{% endcontent-ref %} + +{% content-ref url="snowflake.md" %} +[snowflake.md](snowflake.md) +{% endcontent-ref %} diff --git a/docs/reference/registries/gcs.md b/docs/reference/registries/gcs.md new file mode 100644 index 0000000000..13c9657aa1 --- /dev/null +++ b/docs/reference/registries/gcs.md @@ -0,0 +1,23 @@ +# GCS Registry + +## Description + +GCS registry provides support for storing the protobuf representation of your feature store objects (data sources, feature views, feature services, etc.) uing Google Cloud Storage. + +While it can be used in production, there are still inherent limitations with a file-based registries, since changing a single field in the registry requires re-writing the whole registry file. With multiple concurrent writers, this presents a risk of data loss, or bottlenecks writes to the registry since all changes have to be serialized (e.g. when running materialization for multiple feature views or time ranges concurrently). + +An example of how to configure this would be: + +## Example + +{% code title="feature_store.yaml" %} +```yaml +project: feast_gcp +registry: + path: gs://[YOUR BUCKET YOU CREATED]/registry.pb + cache_ttl_seconds: 60 +online_store: null +offline_store: + type: dask +``` +{% endcode %} \ No newline at end of file diff --git a/docs/reference/registries/local.md b/docs/reference/registries/local.md new file mode 100644 index 0000000000..ad1d98cea9 --- /dev/null +++ b/docs/reference/registries/local.md @@ -0,0 +1,23 @@ +# Local Registry + +## Description + +Local registry provides support for storing the protobuf representation of your feature store objects (data sources, feature views, feature services, etc.) in local file system. It is only intended to be used for experimentation with Feast and should not be used in production. + +There are inherent limitations with a file-based registries, since changing a single field in the registry requires re-writing the whole registry file. With multiple concurrent writers, this presents a risk of data loss, or bottlenecks writes to the registry since all changes have to be serialized (e.g. when running materialization for multiple feature views or time ranges concurrently). + +An example of how to configure this would be: + +## Example + +{% code title="feature_store.yaml" %} +```yaml +project: feast_local +registry: + path: registry.pb + cache_ttl_seconds: 60 +online_store: null +offline_store: + type: dask +``` +{% endcode %} \ No newline at end of file diff --git a/docs/reference/registries/s3.md b/docs/reference/registries/s3.md new file mode 100644 index 0000000000..65069c415c --- /dev/null +++ b/docs/reference/registries/s3.md @@ -0,0 +1,23 @@ +# S3 Registry + +## Description + +S3 registry provides support for storing the protobuf representation of your feature store objects (data sources, feature views, feature services, etc.) in S3 file system. + +While it can be used in production, there are still inherent limitations with a file-based registries, since changing a single field in the registry requires re-writing the whole registry file. With multiple concurrent writers, this presents a risk of data loss, or bottlenecks writes to the registry since all changes have to be serialized (e.g. when running materialization for multiple feature views or time ranges concurrently). + +An example of how to configure this would be: + +## Example + +{% code title="feature_store.yaml" %} +```yaml +project: feast_aws_s3 +registry: + path: s3://[YOUR BUCKET YOU CREATED]/registry.pb + cache_ttl_seconds: 60 +online_store: null +offline_store: + type: dask +``` +{% endcode %} \ No newline at end of file diff --git a/docs/reference/registry/snowflake.md b/docs/reference/registries/snowflake.md similarity index 97% rename from docs/reference/registry/snowflake.md rename to docs/reference/registries/snowflake.md index 31b0db9582..00d87b1977 100644 --- a/docs/reference/registry/snowflake.md +++ b/docs/reference/registries/snowflake.md @@ -1,4 +1,4 @@ -# Snowflake registry +# Snowflake Registry ## Description diff --git a/docs/tutorials/using-scalable-registry.md b/docs/reference/registries/sql.md similarity index 97% rename from docs/tutorials/using-scalable-registry.md rename to docs/reference/registries/sql.md index 25746f60e2..631a20cbe3 100644 --- a/docs/tutorials/using-scalable-registry.md +++ b/docs/reference/registries/sql.md @@ -1,9 +1,4 @@ ---- -description: >- - Tutorial on how to use the SQL registry for scalable registry updates ---- - -# Using Scalable Registry +# SQL Registry ## Overview diff --git a/docs/reference/registry/registry-permissions.md b/docs/reference/registry/registry-permissions.md new file mode 100644 index 0000000000..65508ef5b2 --- /dev/null +++ b/docs/reference/registry/registry-permissions.md @@ -0,0 +1,45 @@ +# Registry Permissions and Access Control + + +## API Endpoints and Permissions + +| Endpoint | Resource Type | Permission | Description | +| ------------------------ |---------------------|------------------------| -------------------------------------------------------------- | +| ApplyEntity | Entity | Create, Update, Delete | Apply an entity to the registry | +| GetEntity | Entity | Read | Get an entity from the registry | +| ListEntities | Entity | Read | List entities in the registry | +| DeleteEntity | Entity | Delete | Delete an entity from the registry | +| ApplyDataSource | DataSource | Create, Update, Delete | Apply a data source to the registry | +| GetDataSource | DataSource | Read | Get a data source from the registry | +| ListDataSources | DataSource | Read | List data sources in the registry | +| DeleteDataSource | DataSource | Delete | Delete a data source from the registry | +| ApplyFeatureView | FeatureView | Create, Update, Delete | Apply a feature view to the registry | +| GetFeatureView | FeatureView | Read | Get a feature view from the registry | +| ListFeatureViews | FeatureView | Read | List feature views in the registry | +| DeleteFeatureView | FeatureView | Delete | Delete a feature view from the registry | +| GetStreamFeatureView | StreamFeatureView | Read | Get a stream feature view from the registry | +| ListStreamFeatureViews | StreamFeatureView | Read | List stream feature views in the registry | +| GetOnDemandFeatureView | OnDemandFeatureView | Read | Get an on-demand feature view from the registry | +| ListOnDemandFeatureViews | OnDemandFeatureView | Read | List on-demand feature views in the registry | +| ApplyFeatureService | FeatureService | Create, Update, Delete | Apply a feature service to the registry | +| GetFeatureService | FeatureService | Read | Get a feature service from the registry | +| ListFeatureServices | FeatureService | Read | List feature services in the registry | +| DeleteFeatureService | FeatureService | Delete | Delete a feature service from the registry | +| ApplySavedDataset | SavedDataset | Create, Update, Delete | Apply a saved dataset to the registry | +| GetSavedDataset | SavedDataset | Read | Get a saved dataset from the registry | +| ListSavedDatasets | SavedDataset | Read | List saved datasets in the registry | +| DeleteSavedDataset | SavedDataset | Delete | Delete a saved dataset from the registry | +| ApplyValidationReference | ValidationReference | Create, Update, Delete | Apply a validation reference to the registry | +| GetValidationReference | ValidationReference | Read | Get a validation reference from the registry | +| ListValidationReferences | ValidationReference | Read | List validation references in the registry | +| DeleteValidationReference| ValidationReference | Delete | Delete a validation reference from the registry | +| ApplyPermission | Permission | Create, Update, Delete | Apply a permission to the registry | +| GetPermission | Permission | Read | Get a permission from the registry | +| ListPermissions | Permission | Read | List permissions in the registry | +| DeletePermission | Permission | Delete | Delete a permission from the registry | +| Commit | | None | Commit changes to the registry | +| Refresh | | None | Refresh the registry | +| Proto | | None | Get the proto representation of the registry | + +## How to configure Authentication and Authorization +Please refer the [page](./../../../docs/getting-started/concepts/permission.md) for more details on how to configure authentication and authorization. diff --git a/docs/roadmap.md b/docs/roadmap.md index e1ba6f3333..ff6549a3cb 100644 --- a/docs/roadmap.md +++ b/docs/roadmap.md @@ -42,6 +42,7 @@ The list below contains the functionality that contributors are planning to deve * [x] On-demand Transformations (Beta release. See [RFC](https://docs.google.com/document/d/1lgfIw0Drc65LpaxbUu49RCeJgMew547meSJttnUqz7c/edit#)) * [x] Streaming Transformations (Alpha release. See [RFC](https://docs.google.com/document/d/1UzEyETHUaGpn0ap4G82DHluiCj7zEbrQLkJJkKSv4e8/edit)) * [ ] Batch transformation (In progress. See [RFC](https://docs.google.com/document/d/1964OkzuBljifDvkV-0fakp2uaijnVzdwWNGdz7Vz50A/edit)) + * [ ] Persistent On-demand Transformations (Beta release. See [GitHub Issue](https://github.com/feast-dev/feast/issues/4376)) * **Streaming** * [x] [Custom streaming ingestion job support](https://docs.feast.dev/how-to-guides/customizing-feast/creating-a-custom-provider) * [x] [Push based streaming data ingestion to online store](https://docs.feast.dev/reference/data-sources/push) @@ -63,3 +64,6 @@ The list below contains the functionality that contributors are planning to deve * [x] Amundsen integration (see [Feast extractor](https://github.com/amundsen-io/amundsen/blob/main/databuilder/databuilder/extractor/feast_extractor.py)) * [x] DataHub integration (see [DataHub Feast docs](https://datahubproject.io/docs/generated/ingestion/sources/feast/)) * [x] Feast Web UI (Beta release. See [docs](https://docs.feast.dev/reference/alpha-web-ui)) + * [ ] Feast Lineage Explorer +* **Natural Language Processing** + * [x] Vector Search (Alpha release. See [RFC](https://docs.google.com/document/d/18IWzLEA9i2lDWnbfbwXnMCg3StlqaLVI-uRpQjr_Vos/edit#heading=h.9gaqqtox9jg6)) diff --git a/environment-setup.md b/environment-setup.md index 5dde9dfd94..581dc35f77 100644 --- a/environment-setup.md +++ b/environment-setup.md @@ -13,11 +13,10 @@ pip install cryptography -U conda install protobuf conda install pymssql pip install -e ".[dev]" -make install-protoc-dependencies PYTHON=3.9 make install-python-ci-dependencies PYTHON=3.9 ``` 4. start the docker daemon 5. run unit tests: ```bash make test-python-unit -``` \ No newline at end of file +``` diff --git a/examples/README.md b/examples/README.md new file mode 100644 index 0000000000..91799864aa --- /dev/null +++ b/examples/README.md @@ -0,0 +1,16 @@ +# Feast Examples + +1. **[Quickstart Example](https://github.com/feast-dev/feast/tree/master/examples/quickstart)**: This is a step-by-step guide for getting started with Feast. + +2. **[Java Demo](https://github.com/feast-dev/feast/tree/master/examples/java-demo)**: Demonstrates how to use Feast with Java feature server and deployed with Kubernetes. + +3. **[Python Helm Demo](https://github.com/feast-dev/feast/tree/master/examples/python-helm-demo)**: Demonstrates Feast with Kubernetes using Helm charts and Python feature server. + +4. **[RBAC Local](https://github.com/feast-dev/feast/tree/master/examples/rbac-local)**: Demonstrates using notebooks how configure and test Role-Based Access Control (RBAC) for securing access in Feast using OIDC authorization type with in a local environment. + +5. **[RBAC Remote](https://github.com/feast-dev/feast/tree/master/examples/rbac-local)**: Demonstrates how to configure and test Role-Based Access Control (RBAC) for securing access in Feast using Kubernetes or OIDC Authentication type with in Kubernetes environment. + +6. **[Remote Offline Store](https://github.com/feast-dev/feast/tree/master/examples/remote-offline-store)**: Demonstrates how to set up and use remote offline server. + +7. **[Podman/Podman Compose_local](https://github.com/feast-dev/feast/tree/master/examples/podman_local)**: Demonstrates how to deploy Feast remote server components using Podman Compose locally. + diff --git a/examples/kind-quickstart/01-Install.ipynb b/examples/kind-quickstart/01-Install.ipynb new file mode 100644 index 0000000000..e5ece97fc2 --- /dev/null +++ b/examples/kind-quickstart/01-Install.ipynb @@ -0,0 +1,932 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: feast==0.40.1 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (0.40.1)\n", + "Requirement already satisfied: click<9.0.0,>=7.0.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (8.1.7)\n", + "Requirement already satisfied: colorama<1,>=0.3.9 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (0.4.6)\n", + "Requirement already satisfied: dill~=0.3.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (0.3.8)\n", + "Requirement already satisfied: mypy-protobuf>=3.1 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (3.6.0)\n", + "Requirement already satisfied: Jinja2<4,>=2 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (3.1.4)\n", + "Requirement already satisfied: jsonschema in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (4.22.0)\n", + "Requirement already satisfied: mmh3 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (4.1.0)\n", + "Requirement already satisfied: numpy<2,>=1.22 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (1.26.4)\n", + "Requirement already satisfied: pandas<3,>=1.4.3 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (2.2.2)\n", + "Requirement already satisfied: protobuf<5.0.0,>=4.24.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (4.25.4)\n", + "Requirement already satisfied: pyarrow>=4 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (15.0.2)\n", + "Requirement already satisfied: pydantic>=2.0.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (2.7.4)\n", + "Requirement already satisfied: pygments<3,>=2.12.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (2.18.0)\n", + "Requirement already satisfied: PyYAML<7,>=5.4.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (6.0.1)\n", + "Requirement already satisfied: requests in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (2.32.3)\n", + "Requirement already satisfied: SQLAlchemy>1 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from SQLAlchemy[mypy]>1->feast==0.40.1) (2.0.34)\n", + "Requirement already satisfied: tabulate<1,>=0.8.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (0.9.0)\n", + "Requirement already satisfied: tenacity<9,>=7 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (8.5.0)\n", + "Requirement already satisfied: toml<1,>=0.10.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (0.10.2)\n", + "Requirement already satisfied: tqdm<5,>=4 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (4.66.4)\n", + "Requirement already satisfied: typeguard>=4.0.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (4.3.0)\n", + "Requirement already satisfied: fastapi>=0.68.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (0.114.2)\n", + "Requirement already satisfied: uvicorn<1,>=0.14.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from uvicorn[standard]<1,>=0.14.0->feast==0.40.1) (0.30.6)\n", + "Requirement already satisfied: dask>=2024.2.1 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from dask[dataframe]>=2024.2.1->feast==0.40.1) (2024.6.2)\n", + "Requirement already satisfied: gunicorn in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from feast==0.40.1) (23.0.0)\n", + "Requirement already satisfied: cloudpickle>=1.5.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (3.0.0)\n", + "Requirement already satisfied: fsspec>=2021.09.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (2023.12.2)\n", + "Requirement already satisfied: packaging>=20.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (24.1)\n", + "Requirement already satisfied: partd>=1.2.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (1.4.2)\n", + "Requirement already satisfied: toolz>=0.10.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (0.12.1)\n", + "Requirement already satisfied: importlib-metadata>=4.13.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (8.0.0)\n", + "Requirement already satisfied: dask-expr<1.2,>=1.1 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from dask[dataframe]>=2024.2.1->feast==0.40.1) (1.1.6)\n", + "Requirement already satisfied: starlette<0.39.0,>=0.37.2 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from fastapi>=0.68.0->feast==0.40.1) (0.38.5)\n", + "Requirement already satisfied: typing-extensions>=4.8.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from fastapi>=0.68.0->feast==0.40.1) (4.12.2)\n", + "Requirement already satisfied: MarkupSafe>=2.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from Jinja2<4,>=2->feast==0.40.1) (2.1.5)\n", + "Requirement already satisfied: types-protobuf>=4.24 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from mypy-protobuf>=3.1->feast==0.40.1) (5.27.0.20240626)\n", + "Requirement already satisfied: python-dateutil>=2.8.2 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from pandas<3,>=1.4.3->feast==0.40.1) (2.9.0.post0)\n", + "Requirement already satisfied: pytz>=2020.1 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from pandas<3,>=1.4.3->feast==0.40.1) (2024.1)\n", + "Requirement already satisfied: tzdata>=2022.7 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from pandas<3,>=1.4.3->feast==0.40.1) (2024.1)\n", + "Requirement already satisfied: annotated-types>=0.4.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from pydantic>=2.0.0->feast==0.40.1) (0.7.0)\n", + "Requirement already satisfied: pydantic-core==2.18.4 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from pydantic>=2.0.0->feast==0.40.1) (2.18.4)\n", + "Requirement already satisfied: mypy>=0.910 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from SQLAlchemy[mypy]>1->feast==0.40.1) (1.10.1)\n", + "Requirement already satisfied: h11>=0.8 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from uvicorn<1,>=0.14.0->uvicorn[standard]<1,>=0.14.0->feast==0.40.1) (0.14.0)\n", + "Requirement already satisfied: httptools>=0.5.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from uvicorn[standard]<1,>=0.14.0->feast==0.40.1) (0.6.1)\n", + "Requirement already satisfied: python-dotenv>=0.13 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from uvicorn[standard]<1,>=0.14.0->feast==0.40.1) (1.0.1)\n", + "Requirement already satisfied: uvloop!=0.15.0,!=0.15.1,>=0.14.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from uvicorn[standard]<1,>=0.14.0->feast==0.40.1) (0.19.0)\n", + "Requirement already satisfied: watchfiles>=0.13 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from uvicorn[standard]<1,>=0.14.0->feast==0.40.1) (0.22.0)\n", + "Requirement already satisfied: websockets>=10.4 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from uvicorn[standard]<1,>=0.14.0->feast==0.40.1) (12.0)\n", + "Requirement already satisfied: attrs>=22.2.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from jsonschema->feast==0.40.1) (23.2.0)\n", + "Requirement already satisfied: jsonschema-specifications>=2023.03.6 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from jsonschema->feast==0.40.1) (2023.12.1)\n", + "Requirement already satisfied: referencing>=0.28.4 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from jsonschema->feast==0.40.1) (0.35.1)\n", + "Requirement already satisfied: rpds-py>=0.7.1 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from jsonschema->feast==0.40.1) (0.18.1)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from requests->feast==0.40.1) (3.3.2)\n", + "Requirement already satisfied: idna<4,>=2.5 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from requests->feast==0.40.1) (3.7)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from requests->feast==0.40.1) (1.26.19)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from requests->feast==0.40.1) (2024.7.4)\n", + "Requirement already satisfied: zipp>=0.5 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from importlib-metadata>=4.13.0->dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (3.19.1)\n", + "Requirement already satisfied: mypy-extensions>=1.0.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from mypy>=0.910->SQLAlchemy[mypy]>1->feast==0.40.1) (1.0.0)\n", + "Requirement already satisfied: locket in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from partd>=1.2.0->dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (1.0.0)\n", + "Requirement already satisfied: six>=1.5 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from python-dateutil>=2.8.2->pandas<3,>=1.4.3->feast==0.40.1) (1.16.0)\n", + "Requirement already satisfied: anyio<5,>=3.4.0 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from starlette<0.39.0,>=0.37.2->fastapi>=0.68.0->feast==0.40.1) (4.4.0)\n", + "Requirement already satisfied: sniffio>=1.1 in /Users/dmartino/.pyenv/versions/3.11.9/lib/python3.11/site-packages (from anyio<5,>=3.4.0->starlette<0.39.0,>=0.37.2->fastapi>=0.68.0->feast==0.40.1) (1.3.1)\n", + "Note: you may need to restart the kernel to use updated packages.\n" + ] + } + ], + "source": [ + "# WE MUST ENSURE PYTHON CONSISTENCY BETWEEN NOTEBOOK AND FEAST SERVERS\n", + "# LAUNCH THIS NOTEBOOK FROM A CLEAN PYTHON ENVIRONMENT >3.9\n", + "%pip install feast==0.40.1" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Install Feast on Kind\n", + "## Objective\n", + "\n", + "Provide a reference implementation of a runbook to deploy a Feast development environment on a Kubernets cluster using [Kind](https://kind.sigs.k8s.io/docs/user/quick-start).\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Prerequisites\n", + "* [Kind](https://kind.sigs.k8s.io/) cluster and a Docker runtime container\n", + "* [kubectl](https://kubernetes.io/docs/tasks/tools/#kubectl) Kubernetes CLI tool.\n", + "* [Helm](https://helm.sh/) Kubernetes package manager.\n", + "* [yq](https://github.com/mikefarah/yq) YAML processor." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Install Prerequisites\n", + "The following commands install and configure all the prerequisites on MacOS environment. You can find the\n", + "equivalent instructions on the offical documentation pages:\n", + "* Install Kind and Docker runtime (e.g. [Colima](https://github.com/abiosoft/colima)).\n", + "* Create Kind cluster named `feast`.\n", + "* Install and setup the `kubectl` context.\n", + "* `Helm`.\n", + "* `yq`.\n", + "```bash\n", + "brew install colima\n", + "colima start\n", + "brew install kind\n", + "kind create cluster --name feast\n", + "kind start\n", + "brew install helm\n", + "brew install kubectl\n", + "kubectl config use-context kind-feast\n", + "brew install yq\n", + "```\n", + "\n", + "Additionally, we create a `feast` namespace and use it as the default for the `kubectl` CLI:" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "namespace/feast created\n", + "Context \"kind-feast\" modified.\n" + ] + } + ], + "source": [ + "!kubectl create ns feast\n", + "!kubectl config set-context --current --namespace feast" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Validate the cluster setup:" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "NAME STATUS AGE\n", + "default Active 26h\n", + "feast Active 3s\n", + "kube-node-lease Active 26h\n", + "kube-public Active 26h\n", + "kube-system Active 26h\n", + "local-path-storage Active 26h\n" + ] + } + ], + "source": [ + "!kubectl get ns" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Deployment Architecture\n", + "The primary objective of this runbook is to guide the deployment of Feast services on a Kubernetes Kind cluster, using the default `postgres` template to set up a basic feature store.\n", + "\n", + "> πŸš€ We will also add instructions to repeat the example with a custom project, for a personalized experience.\n", + "\n", + "In this notebook, we will deploy a distributed topology of Feast services, which includes:\n", + "\n", + "* `Registry Server`: Exposes endpoints at the [default port 6570](https://github.com/feast-dev/feast/blob/89bc5512572130510dd18690309b5a392aaf73b1/sdk/python/feast/constants.py#L39) and handles metadata storage for feature definitions.\n", + "* `Online Store Server`: Exposes endpoints at the [default port 6566](https://github.com/feast-dev/feast/blob/4a6b663f80bc91d6de35ed2ec428d34811d17a18/sdk/python/feast/cli.py#L871-L872). This service uses the `Registry Server` to query metadata and is responsible for low-latency serving of features.\n", + "* `Offline Store Server`: Exposes endpoints at the [default port 8815](https://github.com/feast-dev/feast/blob/89bc5512572130510dd18690309b5a392aaf73b1/sdk/python/feast/constants.py#L42). It uses the `Registry Server` to query metadata and provides access to batch data for historical feature retrieval.\n", + "\n", + "Each service is backed by a `PostgreSQL` database, which is also deployed within the same Kind cluster.\n", + "\n", + "Finally, port forwarding will be configured to expose these Feast services locally. This will allow a local client, implemented in the accompanying client notebook, to interact with the deployed services." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Install PostgreSQL\n", + "Install the [reference deployment](./postgres/postgres.yaml) to install and configure a simple PostgreSQL database." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "secret/postgres-secret created\n", + "persistentvolume/postgres-volume created\n", + "persistentvolumeclaim/postgres-volume-claim created\n", + "deployment.apps/postgres created\n", + "service/postgres created\n", + "deployment.apps/postgres condition met\n" + ] + } + ], + "source": [ + "!kubectl apply -f postgres/postgres.yaml\n", + "!kubectl wait --for=condition=available deployment/postgres --timeout=2m" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "NAME READY STATUS RESTARTS AGE\n", + "postgres-76c8d94d6-pngvm 1/1 Running 0 8s\n", + "NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\n", + "postgres NodePort 10.96.231.4 5432:30565/TCP 8s\n" + ] + } + ], + "source": [ + "!kubectl get pods\n", + "!kubectl get svc" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Create the feature store project\n", + "Use the `feast init` command to create the default project.\n", + "\n", + "We also start port forwarding for the `postgres` service to populate the tables with default data." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "> πŸš€ If you want to use a custom configuration, replace it under the sample/feature_repo folder and skip this section" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Port-forwarding postgres with process ID: 9611\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Forwarding from 127.0.0.1:5432 -> 5432\n", + "Forwarding from [::1]:5432 -> 5432\n" + ] + } + ], + "source": [ + "from src.utils import port_forward\n", + "psql_process = port_forward(\"postgres\", 5432, 5432)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We are going to emulate the `feast init -t postgres sample` command using Python code. This is needed to mock the request of additional\n", + "parameters to configure the DB connection and also request the upload of example data to Postgres tables." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Handling connection for 5432\n", + "Handling connection for 5432\n", + "\n", + "Creating a new Feast repository in \u001b[1m\u001b[32m/Users/dmartino/projects/AI/feast/feast/examples/kind-quickstart/sample\u001b[0m.\n", + "\n" + ] + } + ], + "source": [ + "from feast.repo_operations import init_repo\n", + "from unittest import mock\n", + "from feast.templates.postgres.bootstrap import bootstrap\n", + "\n", + "project_directory = \"sample\"\n", + "template = \"postgres\"\n", + "\n", + "with mock.patch(\"click.prompt\", side_effect=[\"localhost\", \"5432\", \"feast\", \"public\", \"feast\", \"feast\"]):\n", + " with mock.patch(\"click.confirm\", side_effect=[True]):\n", + " init_repo(project_directory, template)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Verify that the DB includes the expected tables with pre-populated data." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " List of relations\n", + " Schema | Name | Type | Owner \n", + "--------+---------------------------+-------+-------\n", + " public | feast_driver_hourly_stats | table | feast\n", + "(1 row)\n", + "\n", + " count \n", + "-------\n", + " 1807\n", + "(1 row)\n", + "\n" + ] + } + ], + "source": [ + "!PSQL_POD=$(kubectl get pods -l app=postgres -oname) && kubectl exec $PSQL_POD -- psql -h localhost -U feast feast -c '\\dt'\n", + "!PSQL_POD=$(kubectl get pods -l app=postgres -oname) && kubectl exec $PSQL_POD -- psql -h localhost -U feast feast -c 'select count(*) from feast_driver_hourly_stats'" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Finally, let's stop port forwarding." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " 501 10392 6947 0 1:12PM ttys051 0:00.12 /bin/zsh -c ps -ef | grep port-forward\n", + " 501 10394 10392 0 1:12PM ttys051 0:00.00 grep port-forward\n" + ] + } + ], + "source": [ + "psql_process.terminate()\n", + "!ps -ef | grep port-forward" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Generate server configurations\n", + "Each server has its own configuration that we generate from the one initialized before.\n", + "\n", + "We use `yq` to manipulate the original configuration and generate the server specifics.\n", + "\n", + "Note: from now on, we assume that the Feast service names will be as follows:\n", + "* For `Registry Server`: `registry-server`\n", + "* For `Online Store`: `online-server`\n", + "* For `Offline Store`: `offline-server`\n", + "\n", + "> πŸš€ If you used different service names, replace the `host` parameter in the following `yq` commands." + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "env: FEATURE_REPO_DIR=sample/feature_repo\n", + "project: sample\n", + "provider: local\n", + "registry:\n", + " registry_type: sql\n", + " path: postgresql://feast:feast@postgres:5432/feast\n", + " cache_ttl_seconds: 60\n", + " sqlalchemy_config_kwargs:\n", + " echo: false\n", + " pool_pre_ping: true\n", + "online_store:\n", + " type: postgres\n", + " host: postgres\n", + " port: 5432\n", + " database: feast\n", + " db_schema: public\n", + " user: feast\n", + " password: feast\n", + "offline_store:\n", + " type: postgres\n", + " host: postgres\n", + " port: 5432\n", + " database: feast\n", + " db_schema: public\n", + " user: feast\n", + " password: feast\n", + "entity_key_serialization_version: 2\n" + ] + } + ], + "source": [ + "%env FEATURE_REPO_DIR=sample/feature_repo\n", + "# Adjust the database host to match the postgres service\n", + "!yq -i '.registry.path=\"postgresql://feast:feast@postgres:5432/feast\"' $FEATURE_REPO_DIR/feature_store.yaml\n", + "!yq -i '.online_store.host=\"postgres\"' $FEATURE_REPO_DIR/feature_store.yaml\n", + "!yq -i '.offline_store.host=\"postgres\"' $FEATURE_REPO_DIR/feature_store.yaml\n", + "!cat $FEATURE_REPO_DIR/feature_store.yaml" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "project: sample\n", + "registry:\n", + " registry_type: sql\n", + " path: postgresql://feast:feast@postgres:5432/feast\n", + " cache_ttl_seconds: 60\n", + " sqlalchemy_config_kwargs:\n", + " echo: false\n", + " pool_pre_ping: true\n", + "provider: local\n", + "entity_key_serialization_version: 2\n" + ] + } + ], + "source": [ + "# Registry server has only `registry` section\n", + "!cat $FEATURE_REPO_DIR/feature_store.yaml | yq '.project | {key: .}, .registry | {key: .}, .provider | {key: .}, .entity_key_serialization_version | {key: .}' > registry_feature_store.yaml\n", + "! cat registry_feature_store.yaml" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "project: sample\n", + "provider: local\n", + "online_store:\n", + " type: postgres\n", + " host: postgres\n", + " port: 5432\n", + " database: feast\n", + " db_schema: public\n", + " user: feast\n", + " password: feast\n", + "entity_key_serialization_version: 2\n", + "registry:\n", + " path: registry-server:80\n", + " registry_type: remote\n", + "offline_store:\n", + " type: remote\n", + " host: offline-server\n", + " port: 80\n" + ] + } + ], + "source": [ + "# Online server has `online_store` section, a remote `registry` and a remote `offline_store`\n", + "!cat $FEATURE_REPO_DIR/feature_store.yaml | yq '.project | {key: .}, .provider | {key: .}, .online_store | {key: .}, .entity_key_serialization_version | {key: .}' > online_feature_store.yaml\n", + "!yq -i '.registry.path=\"registry-server:80\"' online_feature_store.yaml\n", + "!yq -i '.registry.registry_type=\"remote\"' online_feature_store.yaml\n", + "!yq -i '.offline_store.type=\"remote\"' online_feature_store.yaml\n", + "!yq -i '.offline_store.host=\"offline-server\"' online_feature_store.yaml\n", + "!yq -i '.offline_store.port=80' online_feature_store.yaml\n", + "\n", + "!cat online_feature_store.yaml" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "project: sample\n", + "provider: local\n", + "offline_store:\n", + " type: postgres\n", + " host: postgres\n", + " port: 5432\n", + " database: feast\n", + " db_schema: public\n", + " user: feast\n", + " password: feast\n", + "entity_key_serialization_version: 2\n", + "registry:\n", + " path: registry-server:80\n", + " registry_type: remote\n" + ] + } + ], + "source": [ + "# Offline server has `offline_store` section and a remote `registry`\n", + "!cat $FEATURE_REPO_DIR/feature_store.yaml | yq '.project | {key: .}, .provider | {key: .}, .offline_store | {key: .}, .entity_key_serialization_version | {key: .}' > offline_feature_store.yaml\n", + "!yq -i '.registry.path=\"registry-server:80\"' offline_feature_store.yaml\n", + "!yq -i '.registry.registry_type=\"remote\"' offline_feature_store.yaml\n", + "!cat offline_feature_store.yaml" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Encode configuration files\n", + "Next step is to encode in base64 the configuration files for each server. We'll store the output in environment variables." + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "def base64_file(file):\n", + " import base64\n", + "\n", + " with open(file, 'rb') as file:\n", + " yaml_content = file.read()\n", + " return base64.b64encode(yaml_content).decode('utf-8')\n", + "\n", + "os.environ['REGISTRY_CONFIG_BASE64'] = base64_file('registry_feature_store.yaml')\n", + "os.environ['ONLINE_CONFIG_BASE64'] = base64_file('online_feature_store.yaml')\n", + "os.environ['OFFLINE_CONFIG_BASE64'] = base64_file('offline_feature_store.yaml')" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "REGISTRY_CONFIG_BASE64=cHJvamVjdDogc2FtcGxlCnJlZ2lzdHJ5OgogIHJlZ2lzdHJ5X3R5cGU6IHNxbAogIHBhdGg6IHBvc3RncmVzcWw6Ly9mZWFzdDpmZWFzdEBwb3N0Z3Jlczo1NDMyL2ZlYXN0CiAgY2FjaGVfdHRsX3NlY29uZHM6IDYwCiAgc3FsYWxjaGVteV9jb25maWdfa3dhcmdzOgogICAgZWNobzogZmFsc2UKICAgIHBvb2xfcHJlX3Bpbmc6IHRydWUKcHJvdmlkZXI6IGxvY2FsCmVudGl0eV9rZXlfc2VyaWFsaXphdGlvbl92ZXJzaW9uOiAyCg==\n", + "ONLINE_CONFIG_BASE64=cHJvamVjdDogc2FtcGxlCnByb3ZpZGVyOiBsb2NhbApvbmxpbmVfc3RvcmU6CiAgdHlwZTogcG9zdGdyZXMKICBob3N0OiBwb3N0Z3JlcwogIHBvcnQ6IDU0MzIKICBkYXRhYmFzZTogZmVhc3QKICBkYl9zY2hlbWE6IHB1YmxpYwogIHVzZXI6IGZlYXN0CiAgcGFzc3dvcmQ6IGZlYXN0CmVudGl0eV9rZXlfc2VyaWFsaXphdGlvbl92ZXJzaW9uOiAyCnJlZ2lzdHJ5OgogIHBhdGg6IHJlZ2lzdHJ5LXNlcnZlcjo4MAogIHJlZ2lzdHJ5X3R5cGU6IHJlbW90ZQpvZmZsaW5lX3N0b3JlOgogIHR5cGU6IHJlbW90ZQogIGhvc3Q6IG9mZmxpbmUtc2VydmVyCiAgcG9ydDogODAK\n", + "OFFLINE_CONFIG_BASE64=cHJvamVjdDogc2FtcGxlCnByb3ZpZGVyOiBsb2NhbApvZmZsaW5lX3N0b3JlOgogIHR5cGU6IHBvc3RncmVzCiAgaG9zdDogcG9zdGdyZXMKICBwb3J0OiA1NDMyCiAgZGF0YWJhc2U6IGZlYXN0CiAgZGJfc2NoZW1hOiBwdWJsaWMKICB1c2VyOiBmZWFzdAogIHBhc3N3b3JkOiBmZWFzdAplbnRpdHlfa2V5X3NlcmlhbGl6YXRpb25fdmVyc2lvbjogMgpyZWdpc3RyeToKICBwYXRoOiByZWdpc3RyeS1zZXJ2ZXI6ODAKICByZWdpc3RyeV90eXBlOiByZW1vdGUK\n" + ] + } + ], + "source": [ + "!env | grep BASE64" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Install servers\n", + "We'll use the charts defined in this local repository to install the servers.\n", + "\n", + "The installation order reflects the dependency between the deployments:\n", + "* `Registry Server` starts first because it has no dependencies\n", + "* Then `Offline Server` as it depends only on the `Registry Server`\n", + "* Last the `Online Server` that depends on both the other servers" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "env: FEAST_IMAGE_REPO=feastdev/feature-server\n", + "env: FEAST_IMAGE_VERSION=0.40.1\n" + ] + } + ], + "source": [ + "%env FEAST_IMAGE_REPO=feastdev/feature-server\n", + "%env FEAST_IMAGE_VERSION=0.40.1" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Release \"feast-registry\" does not exist. Installing it now.\n", + "NAME: feast-registry\n", + "LAST DEPLOYED: Tue Sep 17 13:14:05 2024\n", + "NAMESPACE: feast\n", + "STATUS: deployed\n", + "REVISION: 1\n", + "TEST SUITE: None\n", + "deployment.apps/registry-server condition met\n" + ] + } + ], + "source": [ + "# Registry\n", + "!helm upgrade --install feast-registry ../../infra/charts/feast-feature-server \\\n", + "--set fullnameOverride=registry-server --set feast_mode=registry \\\n", + "--set image.repository=${FEAST_IMAGE_REPO} --set image.tag=${FEAST_IMAGE_VERSION} \\\n", + "--set feature_store_yaml_base64=$REGISTRY_CONFIG_BASE64\n", + "\n", + "!kubectl wait --for=condition=available deployment/registry-server --timeout=2m" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Release \"feast-offline\" does not exist. Installing it now.\n", + "NAME: feast-offline\n", + "LAST DEPLOYED: Tue Sep 17 13:14:33 2024\n", + "NAMESPACE: feast\n", + "STATUS: deployed\n", + "REVISION: 1\n", + "TEST SUITE: None\n", + "deployment.apps/offline-server condition met\n" + ] + } + ], + "source": [ + "# Offline\n", + "!helm upgrade --install feast-offline ../../infra/charts/feast-feature-server \\\n", + "--set fullnameOverride=offline-server --set feast_mode=offline \\\n", + "--set image.repository=${FEAST_IMAGE_REPO} --set image.tag=${FEAST_IMAGE_VERSION} \\\n", + "--set feature_store_yaml_base64=$OFFLINE_CONFIG_BASE64\n", + "\n", + "!kubectl wait --for=condition=available deployment/offline-server --timeout=2m" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Release \"feast-online\" does not exist. Installing it now.\n", + "NAME: feast-online\n", + "LAST DEPLOYED: Tue Sep 17 13:14:55 2024\n", + "NAMESPACE: feast\n", + "STATUS: deployed\n", + "REVISION: 1\n", + "TEST SUITE: None\n", + "deployment.apps/online-server condition met\n" + ] + } + ], + "source": [ + "# Online\n", + "!helm upgrade --install feast-online ../../infra/charts/feast-feature-server \\\n", + "--set fullnameOverride=online-server --set feast_mode=online \\\n", + "--set image.repository=${FEAST_IMAGE_REPO} --set image.tag=${FEAST_IMAGE_VERSION} \\\n", + "--set feature_store_yaml_base64=$ONLINE_CONFIG_BASE64\n", + "\n", + "!kubectl wait --for=condition=available deployment/online-server --timeout=2m" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Validate deployment\n", + "Fist validate application and service status:" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\n", + "offline-server ClusterIP 10.96.24.216 80/TCP 44s\n", + "online-server ClusterIP 10.96.36.113 80/TCP 22s\n", + "postgres NodePort 10.96.231.4 5432:30565/TCP 4m14s\n", + "registry-server ClusterIP 10.96.128.48 80/TCP 71s\n", + "NAME READY UP-TO-DATE AVAILABLE AGE\n", + "offline-server 1/1 1 1 44s\n", + "online-server 1/1 1 1 22s\n", + "postgres 1/1 1 1 4m14s\n", + "registry-server 1/1 1 1 71s\n", + "NAME READY STATUS RESTARTS AGE\n", + "offline-server-6c59467c75-9jvq7 1/1 Running 0 45s\n", + "online-server-76968bbc48-qlvvj 1/1 Running 0 23s\n", + "postgres-76c8d94d6-pngvm 1/1 Running 0 4m15s\n", + "registry-server-597c5cd445-nrm75 1/1 Running 0 72s\n" + ] + } + ], + "source": [ + "!kubectl get svc\n", + "!kubectl get deployments\n", + "!kubectl get pods" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Then verify the content of the local configuration file (it's stored in `/tmp/` folder with random subfolder)." + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "project: sample\n", + "registry:\n", + " registry_type: sql\n", + " path: postgresql://feast:feast@postgres:5432/feast\n", + " cache_ttl_seconds: 60\n", + " sqlalchemy_config_kwargs:\n", + " echo: false\n", + " pool_pre_ping: true\n", + "provider: local\n", + "entity_key_serialization_version: 2\n" + ] + } + ], + "source": [ + "!kubectl exec deployment/registry-server -- find /tmp -name feature_store.yaml -exec cat {} \\;" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "project: sample\n", + "provider: local\n", + "offline_store:\n", + " type: postgres\n", + " host: postgres\n", + " port: 5432\n", + " database: feast\n", + " db_schema: public\n", + " user: feast\n", + " password: feast\n", + "entity_key_serialization_version: 2\n", + "registry:\n", + " path: registry-server:80\n", + " registry_type: remote\n" + ] + } + ], + "source": [ + "!kubectl exec deployment/offline-server -- find /tmp -name feature_store.yaml -exec cat {} \\;" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "project: sample\n", + "provider: local\n", + "online_store:\n", + " type: postgres\n", + " host: postgres\n", + " port: 5432\n", + " database: feast\n", + " db_schema: public\n", + " user: feast\n", + " password: feast\n", + "entity_key_serialization_version: 2\n", + "registry:\n", + " path: registry-server:80\n", + " registry_type: remote\n", + "offline_store:\n", + " type: remote\n", + " host: offline-server\n", + " port: 80\n" + ] + } + ], + "source": [ + "!kubectl exec deployment/online-server -- find /tmp -name feature_store.yaml -exec cat {} \\;" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Finally, let's verify the `feast` version in each server" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + ": MADV_DONTNEED does not work (memset will be used instead)\n", + ": (This is the expected behaviour if you are running under QEMU)\n", + "Feast SDK Version: \"0.40.1\"\n", + ": MADV_DONTNEED does not work (memset will be used instead)\n", + ": (This is the expected behaviour if you are running under QEMU)\n", + "Feast SDK Version: \"0.40.1\"\n", + ": MADV_DONTNEED does not work (memset will be used instead)\n", + ": (This is the expected behaviour if you are running under QEMU)\n", + "Feast SDK Version: \"0.40.1\"\n" + ] + } + ], + "source": [ + "!kubectl exec deployment/registry-server -- feast version\n", + "!kubectl exec deployment/offline-server -- feast version\n", + "!kubectl exec deployment/online-server -- feast version" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "feast3.11", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/examples/kind-quickstart/02-Client.ipynb b/examples/kind-quickstart/02-Client.ipynb new file mode 100644 index 0000000000..322a95a61b --- /dev/null +++ b/examples/kind-quickstart/02-Client.ipynb @@ -0,0 +1,606 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: feast==0.40.1 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (0.40.1)\n", + "Requirement already satisfied: click<9.0.0,>=7.0.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (8.1.7)\n", + "Requirement already satisfied: colorama<1,>=0.3.9 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (0.4.6)\n", + "Requirement already satisfied: dill~=0.3.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (0.3.8)\n", + "Requirement already satisfied: mypy-protobuf>=3.1 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (3.3.0)\n", + "Requirement already satisfied: Jinja2<4,>=2 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (3.1.4)\n", + "Requirement already satisfied: jsonschema in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (4.22.0)\n", + "Requirement already satisfied: mmh3 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (4.1.0)\n", + "Requirement already satisfied: numpy<2,>=1.22 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (1.26.4)\n", + "Requirement already satisfied: pandas<3,>=1.4.3 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (2.2.2)\n", + "Requirement already satisfied: protobuf<5.0.0,>=4.24.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (4.25.3)\n", + "Requirement already satisfied: pyarrow>=4 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (15.0.2)\n", + "Requirement already satisfied: pydantic>=2.0.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (2.7.4)\n", + "Requirement already satisfied: pygments<3,>=2.12.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (2.18.0)\n", + "Requirement already satisfied: PyYAML<7,>=5.4.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (6.0.1)\n", + "Requirement already satisfied: requests in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (2.32.3)\n", + "Requirement already satisfied: SQLAlchemy>1 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from SQLAlchemy[mypy]>1->feast==0.40.1) (2.0.31)\n", + "Requirement already satisfied: tabulate<1,>=0.8.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (0.9.0)\n", + "Requirement already satisfied: tenacity<9,>=7 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (8.4.2)\n", + "Requirement already satisfied: toml<1,>=0.10.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (0.10.2)\n", + "Requirement already satisfied: tqdm<5,>=4 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (4.66.4)\n", + "Requirement already satisfied: typeguard>=4.0.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (4.3.0)\n", + "Requirement already satisfied: fastapi>=0.68.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (0.111.0)\n", + "Requirement already satisfied: uvicorn<1,>=0.14.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from uvicorn[standard]<1,>=0.14.0->feast==0.40.1) (0.30.1)\n", + "Requirement already satisfied: dask>=2024.2.1 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from dask[dataframe]>=2024.2.1->feast==0.40.1) (2024.6.2)\n", + "Requirement already satisfied: gunicorn in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from feast==0.40.1) (22.0.0)\n", + "Requirement already satisfied: cloudpickle>=1.5.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (3.0.0)\n", + "Requirement already satisfied: fsspec>=2021.09.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (2023.12.2)\n", + "Requirement already satisfied: packaging>=20.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (24.1)\n", + "Requirement already satisfied: partd>=1.2.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (1.4.2)\n", + "Requirement already satisfied: toolz>=0.10.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (0.12.1)\n", + "Requirement already satisfied: importlib-metadata>=4.13.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (8.0.0)\n", + "Requirement already satisfied: dask-expr<1.2,>=1.1 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from dask[dataframe]>=2024.2.1->feast==0.40.1) (1.1.6)\n", + "Requirement already satisfied: starlette<0.38.0,>=0.37.2 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from fastapi>=0.68.0->feast==0.40.1) (0.37.2)\n", + "Requirement already satisfied: typing-extensions>=4.8.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from fastapi>=0.68.0->feast==0.40.1) (4.12.2)\n", + "Requirement already satisfied: fastapi-cli>=0.0.2 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from fastapi>=0.68.0->feast==0.40.1) (0.0.4)\n", + "Requirement already satisfied: httpx>=0.23.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from fastapi>=0.68.0->feast==0.40.1) (0.27.0)\n", + "Requirement already satisfied: python-multipart>=0.0.7 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from fastapi>=0.68.0->feast==0.40.1) (0.0.9)\n", + "Requirement already satisfied: ujson!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,>=4.0.1 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from fastapi>=0.68.0->feast==0.40.1) (5.10.0)\n", + "Requirement already satisfied: orjson>=3.2.1 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from fastapi>=0.68.0->feast==0.40.1) (3.10.5)\n", + "Requirement already satisfied: email_validator>=2.0.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from fastapi>=0.68.0->feast==0.40.1) (2.2.0)\n", + "Requirement already satisfied: MarkupSafe>=2.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from Jinja2<4,>=2->feast==0.40.1) (2.1.5)\n", + "Requirement already satisfied: types-protobuf>=3.19.12 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from mypy-protobuf>=3.1->feast==0.40.1) (3.19.22)\n", + "Requirement already satisfied: python-dateutil>=2.8.2 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from pandas<3,>=1.4.3->feast==0.40.1) (2.9.0.post0)\n", + "Requirement already satisfied: pytz>=2020.1 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from pandas<3,>=1.4.3->feast==0.40.1) (2024.1)\n", + "Requirement already satisfied: tzdata>=2022.7 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from pandas<3,>=1.4.3->feast==0.40.1) (2024.1)\n", + "Requirement already satisfied: annotated-types>=0.4.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from pydantic>=2.0.0->feast==0.40.1) (0.7.0)\n", + "Requirement already satisfied: pydantic-core==2.18.4 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from pydantic>=2.0.0->feast==0.40.1) (2.18.4)\n", + "Requirement already satisfied: mypy>=0.910 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from SQLAlchemy[mypy]>1->feast==0.40.1) (1.10.1)\n", + "Requirement already satisfied: h11>=0.8 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from uvicorn<1,>=0.14.0->uvicorn[standard]<1,>=0.14.0->feast==0.40.1) (0.14.0)\n", + "Requirement already satisfied: httptools>=0.5.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from uvicorn[standard]<1,>=0.14.0->feast==0.40.1) (0.6.1)\n", + "Requirement already satisfied: python-dotenv>=0.13 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from uvicorn[standard]<1,>=0.14.0->feast==0.40.1) (1.0.1)\n", + "Requirement already satisfied: uvloop!=0.15.0,!=0.15.1,>=0.14.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from uvicorn[standard]<1,>=0.14.0->feast==0.40.1) (0.19.0)\n", + "Requirement already satisfied: watchfiles>=0.13 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from uvicorn[standard]<1,>=0.14.0->feast==0.40.1) (0.22.0)\n", + "Requirement already satisfied: websockets>=10.4 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from uvicorn[standard]<1,>=0.14.0->feast==0.40.1) (12.0)\n", + "Requirement already satisfied: attrs>=22.2.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from jsonschema->feast==0.40.1) (23.2.0)\n", + "Requirement already satisfied: jsonschema-specifications>=2023.03.6 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from jsonschema->feast==0.40.1) (2023.12.1)\n", + "Requirement already satisfied: referencing>=0.28.4 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from jsonschema->feast==0.40.1) (0.35.1)\n", + "Requirement already satisfied: rpds-py>=0.7.1 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from jsonschema->feast==0.40.1) (0.18.1)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from requests->feast==0.40.1) (3.3.2)\n", + "Requirement already satisfied: idna<4,>=2.5 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from requests->feast==0.40.1) (3.7)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from requests->feast==0.40.1) (1.26.19)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from requests->feast==0.40.1) (2024.7.4)\n", + "Requirement already satisfied: dnspython>=2.0.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from email_validator>=2.0.0->fastapi>=0.68.0->feast==0.40.1) (2.6.1)\n", + "Requirement already satisfied: typer>=0.12.3 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from fastapi-cli>=0.0.2->fastapi>=0.68.0->feast==0.40.1) (0.12.3)\n", + "Requirement already satisfied: anyio in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from httpx>=0.23.0->fastapi>=0.68.0->feast==0.40.1) (4.4.0)\n", + "Requirement already satisfied: httpcore==1.* in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from httpx>=0.23.0->fastapi>=0.68.0->feast==0.40.1) (1.0.5)\n", + "Requirement already satisfied: sniffio in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from httpx>=0.23.0->fastapi>=0.68.0->feast==0.40.1) (1.3.1)\n", + "Requirement already satisfied: zipp>=0.5 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from importlib-metadata>=4.13.0->dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (3.19.1)\n", + "Requirement already satisfied: mypy-extensions>=1.0.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from mypy>=0.910->SQLAlchemy[mypy]>1->feast==0.40.1) (1.0.0)\n", + "Requirement already satisfied: locket in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from partd>=1.2.0->dask>=2024.2.1->dask[dataframe]>=2024.2.1->feast==0.40.1) (1.0.0)\n", + "Requirement already satisfied: six>=1.5 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from python-dateutil>=2.8.2->pandas<3,>=1.4.3->feast==0.40.1) (1.16.0)\n", + "Requirement already satisfied: shellingham>=1.3.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from typer>=0.12.3->fastapi-cli>=0.0.2->fastapi>=0.68.0->feast==0.40.1) (1.5.4)\n", + "Requirement already satisfied: rich>=10.11.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from typer>=0.12.3->fastapi-cli>=0.0.2->fastapi>=0.68.0->feast==0.40.1) (13.7.1)\n", + "Requirement already satisfied: markdown-it-py>=2.2.0 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from rich>=10.11.0->typer>=0.12.3->fastapi-cli>=0.0.2->fastapi>=0.68.0->feast==0.40.1) (3.0.0)\n", + "Requirement already satisfied: mdurl~=0.1 in /Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages (from markdown-it-py>=2.2.0->rich>=10.11.0->typer>=0.12.3->fastapi-cli>=0.0.2->fastapi>=0.68.0->feast==0.40.1) (0.1.2)\n", + "\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m24.1.1\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m24.2\u001b[0m\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpip install --upgrade pip\u001b[0m\n", + "Note: you may need to restart the kernel to use updated packages.\n" + ] + } + ], + "source": [ + "# WE MUST ENSURE PYTHON CONSISTENCY BETWEEN NOTEBOOK AND FEAST SERVERS\n", + "# LAUNCH THIS NOTEBOOK FROM A CLEAN PYTHON ENVIRONMENT >3.9\n", + "%pip install feast==0.40.1" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Run a test client\n", + "\n", + "> πŸš€ This test is developer to work only with the default feature store generated by `feast init`. \n", + "> \n", + "> To test a custom feature store you need to run a custom test application, but still using the same client configuration that we've prepared." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Apply the feature store definitions" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The feature store cannot be initialized using remote services. \n", + "\n", + "We'll use the original `feature_store.yaml` from within a Kubernetes `Job` to run `feast apply`.\n", + "\n", + "For the same reason, we also run an initial materialization from the `Job`, otherwise it would fail because of uninmplemented APIs in the remote servers, like [online_write_batch](https://github.com/feast-dev/feast/blob/4a6b663f80bc91d6de35ed2ec428d34811d17a18/sdk/python/feast/infra/online_stores/remote.py#L50).\n", + "\n", + "First we create a `ConfigMap` holding the required code and configuration." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "env: FEATURE_REPO_DIR=sample/feature_repo\n", + "Error from server (NotFound): configmaps \"sample-repo\" not found\n", + "configmap/sample-repo created\n", + "\n", + "Inspect keys of sample-repo ConfigMap\n", + "example_repo.py\n", + "feature_store.yaml\n" + ] + } + ], + "source": [ + "%env FEATURE_REPO_DIR=sample/feature_repo\n", + "!kubectl delete configmap sample-repo\n", + "!kubectl create configmap sample-repo --from-file=${FEATURE_REPO_DIR}/example_repo.py,${FEATURE_REPO_DIR}/feature_store.yaml\n", + "!echo\n", + "!echo \"Inspect keys of sample-repo ConfigMap\"\n", + "!kubectl get configmaps sample-repo -oyaml | yq '.data[] | key'" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Then we create the `Job` to apply the definitions, according to the [init-job.yaml](./init-job.yaml) manifest" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Error from server (NotFound): error when deleting \"init-job.yaml\": jobs.batch \"feast-apply-job\" not found\n", + "job.batch/feast-apply-job created\n" + ] + } + ], + "source": [ + "!kubectl delete -f init-job.yaml\n", + "!kubectl apply -f init-job.yaml" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Monitoring the log of the `Job`." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "pod/feast-apply-job-tzscd condition met\n", + "Starting feast initialization job...\n", + ": MADV_DONTNEED does not work (memset will be used instead)\n", + ": (This is the expected behaviour if you are running under QEMU)\n", + "09/17/2024 11:18:10 AM feast.repo_config WARNING: The `path` of the `RegistryConfig` starts with a plain `postgresql` string. We are updating this to `postgresql+psycopg` to ensure that the `psycopg3` driver is used by `sqlalchemy`. If you want to use `psycopg2` pass `postgresql+psycopg2` explicitely to `path`. To silence this warning, pass `postgresql+psycopg` explicitely to `path`.\n", + "/usr/local/lib/python3.11/site-packages/feast/feature_store.py:590: RuntimeWarning: On demand feature view is an experimental feature. This API is stable, but the functionality does not scale well for offline retrieval\n", + " warnings.warn(\n", + "Deploying infrastructure for driver_hourly_stats_fresh\n", + "Deploying infrastructure for driver_hourly_stats\n", + ": MADV_DONTNEED does not work (memset will be used instead)\n", + ": (This is the expected behaviour if you are running under QEMU)\n", + "09/17/2024 11:18:21 AM feast.repo_config WARNING: The `path` of the `RegistryConfig` starts with a plain `postgresql` string. We are updating this to `postgresql+psycopg` to ensure that the `psycopg3` driver is used by `sqlalchemy`. If you want to use `psycopg2` pass `postgresql+psycopg2` explicitely to `path`. To silence this warning, pass `postgresql+psycopg` explicitely to `path`.\n", + "09/17/2024 11:18:21 AM root WARNING: _list_feature_views will make breaking changes. Please use _list_batch_feature_views instead. _list_feature_views will behave like _list_all_feature_views in the future.\n", + "Materializing \u001b[1m\u001b[32m2\u001b[0m feature views to \u001b[1m\u001b[32m2024-09-17 11:18:11+00:00\u001b[0m into the \u001b[1m\u001b[32mpostgres\u001b[0m online store.\n", + "\n", + "\u001b[1m\u001b[32mdriver_hourly_stats_fresh\u001b[0m from \u001b[1m\u001b[32m2024-09-16 11:18:21+00:00\u001b[0m to \u001b[1m\u001b[32m2024-09-17 11:18:11+00:00\u001b[0m:\n", + "100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 5/5 [00:00<00:00, 72.23it/s]\n", + "\u001b[1m\u001b[32mdriver_hourly_stats\u001b[0m from \u001b[1m\u001b[32m2024-09-16 11:18:22+00:00\u001b[0m to \u001b[1m\u001b[32m2024-09-17 11:18:11+00:00\u001b[0m:\n", + "100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 5/5 [00:00<00:00, 654.75it/s]\n", + "Feast initialization completed successfully.\n" + ] + } + ], + "source": [ + "!INIT_JOB_POD=$(kubectl get pods -l job-name=feast-apply-job -oname) && kubectl wait --for=condition=podscheduled $INIT_JOB_POD --timeout=2m\n", + "!INIT_JOB_POD=$(kubectl get pods -l job-name=feast-apply-job -oname) && kubectl logs -f $INIT_JOB_POD\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Forwarding the feast service ports" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To run the test client from the notebook, we need to forward the service ports to ports on the current host." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\n", + "offline-server ClusterIP 10.96.24.216 80/TCP 3m58s\n", + "online-server ClusterIP 10.96.36.113 80/TCP 3m36s\n", + "postgres NodePort 10.96.231.4 5432:30565/TCP 7m28s\n", + "registry-server ClusterIP 10.96.128.48 80/TCP 4m25s\n" + ] + } + ], + "source": [ + "!kubectl get svc" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Port-forwarding registry-server with process ID: 15094\n", + "Port-forwarding offline-server with process ID: 15095\n", + "Port-forwarding online-server with process ID: 15096\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Forwarding from 127.0.0.1:8002 -> 8815\n", + "Forwarding from 127.0.0.1:8003 -> 6566\n", + "Forwarding from 127.0.0.1:8001 -> 6570\n", + "Forwarding from [::1]:8002 -> 8815\n", + "Forwarding from [::1]:8003 -> 6566\n", + "Forwarding from [::1]:8001 -> 6570\n" + ] + } + ], + "source": [ + "from src.utils import port_forward\n", + "registry_process = port_forward(\"registry-server\", 8001)\n", + "offline_process = port_forward(\"offline-server\", 8002)\n", + "online_process = port_forward(\"online-server\", 8003)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " 501 15094 13456 0 1:18PM ?? 0:00.06 kubectl port-forward service/registry-server 8001:80\n", + " 501 15095 13456 0 1:18PM ?? 0:00.05 kubectl port-forward service/offline-server 8002:80\n", + " 501 15096 13456 0 1:18PM ?? 0:00.06 kubectl port-forward service/online-server 8003:80\n", + " 501 15170 13456 0 1:18PM ttys051 0:00.14 /bin/zsh -c ps -ef | grep port-forward\n", + " 501 15173 15170 0 1:18PM ttys051 0:00.00 grep port-forward\n" + ] + } + ], + "source": [ + "!ps -ef | grep port-forward" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Client configuration\n", + "The client configuration is using only remote clients connected to the forwarded ports, from 8001 to 8003." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "project: sample\n", + "registry:\n", + " path: localhost:8001\n", + " registry_type: remote\n", + "offline_store:\n", + " host: localhost\n", + " port: 8002\n", + " type: remote\n", + "online_store:\n", + " path: http://localhost:8003\n", + " type: remote\n", + "entity_key_serialization_version: 2\n", + "auth:\n", + " type: no_auth\n" + ] + } + ], + "source": [ + "!cat client/feature_store.yaml" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Install test code\n", + "First we copy the test code from `sample/feature_repo` to `client` folder." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "client/__init__.py client/test_workflow.py\n" + ] + } + ], + "source": [ + "!cp sample/feature_repo/test_workflow.py client\n", + "!ls client/*.py" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We update the original test to comment the `apply`, `teardown` and `materialize-incremental` commands." + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "12,13c12,13\n", + "< # print(\"\\n--- Run feast apply to setup feature store on Postgres ---\")\n", + "< # subprocess.run([\"feast\", \"apply\"])\n", + "---\n", + "> print(\"\\n--- Run feast apply to setup feature store on Postgres ---\")\n", + "> subprocess.run([\"feast\", \"apply\"])\n", + "21,22c21,22\n", + "< # print(\"\\n--- Load features into online store ---\")\n", + "< # store.materialize_incremental(end_date=datetime.now())\n", + "---\n", + "> print(\"\\n--- Load features into online store ---\")\n", + "> store.materialize_incremental(end_date=datetime.now())\n", + "56,57c56,57\n", + "< # print(\"\\n--- Run feast teardown ---\")\n", + "< # subprocess.run([\"feast\", \"teardown\"])\n", + "---\n", + "> print(\"\\n--- Run feast teardown ---\")\n", + "> subprocess.run([\"feast\", \"teardown\"])\n" + ] + } + ], + "source": [ + "!sed -i.bk 's/subprocess.run/# subprocess.run/' client/test_workflow.py\n", + "!sed -i.bk 's/print(\"\\\\n--- Run feast/# print(\"\\\\n--- Run feast/' client/test_workflow.py\n", + "!sed -i.bk 's/store.materialize_incremental/# store.materialize_incremental/' client/test_workflow.py\n", + "!sed -i.bk 's/print(\"\\\\n--- Load features/# print(\"\\\\n--- Load features/' client/test_workflow.py\n", + "!diff client/test_workflow.py sample/feature_repo/test_workflow.py" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Finally, we run the full test suite from the client folder." + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Handling connection for 8001\n", + "\n", + "--- Historical features for training ---\n", + "WARNING:root:_list_feature_views will make breaking changes. Please use _list_batch_feature_views instead. _list_feature_views will behave like _list_all_feature_views in the future.\n", + "Handling connection for 8002\n", + " driver_id event_timestamp ... conv_rate_plus_val1 conv_rate_plus_val2\n", + "0 1001 2021-04-12 10:59:42 ... 1.302426 10.302426\n", + "1 1002 2021-04-12 08:12:10 ... 2.436384 20.436384\n", + "2 1003 2021-04-12 16:40:26 ... 3.954102 30.954102\n", + "\n", + "[3 rows x 10 columns]\n", + "\n", + "--- Historical features for batch scoring ---\n", + "WARNING:root:_list_feature_views will make breaking changes. Please use _list_batch_feature_views instead. _list_feature_views will behave like _list_all_feature_views in the future.\n", + "Handling connection for 8002\n", + " driver_id ... conv_rate_plus_val2\n", + "0 1001 ... 10.798974\n", + "1 1002 ... 20.316096\n", + "2 1003 ... 30.202964\n", + "\n", + "[3 rows x 10 columns]\n", + "\n", + "--- Online features ---\n", + "WARNING:root:_list_feature_views will make breaking changes. Please use _list_batch_feature_views instead. _list_feature_views will behave like _list_all_feature_views in the future.\n", + "Handling connection for 8003\n", + "acc_rate : [0.22748562693595886, 0.9316393733024597]\n", + "conv_rate_plus_val1 : [1000.7989742159843, 1001.3160955905914]Handling connection for 8003\n", + "\n", + "conv_rate_plus_val2 : [2000.7989742159843, 2002.3160955905914]\n", + "driver_id : [1001, 1002]\n", + "\n", + "--- Online features retrieved (instead) through a feature service---\n", + "WARNING:root:_list_feature_views will make breaking changes. Please use _list_batch_feature_views instead. _list_feature_views will behave like _list_all_feature_views in the future.\n", + "Handling connection for 8003\n", + "conv_rate : [0.7989742159843445, 0.31609559059143066]\n", + "conv_rate_plus_val1 : [1000.7989742159843, 1001.3160955905914]\n", + "conv_rate_plus_val2 : [2000.7989742159843, 2002.3160955905914]\n", + "driver_id : [1001, 1002]\n", + "\n", + "--- Online features retrieved (using feature service v3, which uses a feature view with a push source---\n", + "WARNING:root:_list_feature_views will make breaking changes. Please use _list_batch_feature_views instead. _list_feature_views will behave like _list_all_feature_views in the future.\n", + "acc_rate : [0.22748562693595886, 0.9316393733024597]\n", + "avg_daily_trips : [451, 417]\n", + "conv_rate : [0.7989742159843445, 0.31609559059143066]\n", + "conv_rate_plus_val1 : [1000.7989742159843, 1001.3160955905914]\n", + "conv_rate_plus_val2 : [2000.7989742159843, 2002.3160955905914]\n", + "driver_id : [1001, 1002]\n", + "\n", + "--- Simulate a stream event ingestion of the hourly stats df ---\n", + " driver_id event_timestamp ... acc_rate avg_daily_trips\n", + "0 1001 2024-09-17 13:19:54.105733 ... 1.0 1000\n", + "\n", + "[1 rows x 6 columns]\n", + "WARNING:root:list_feature_views will make breaking changes. Please use list_batch_feature_views instead. list_feature_views will behave like list_all_feature_views in the future.\n", + "WARNING:root:_list_feature_views will make breaking changes. Please use _list_batch_feature_views instead. _list_feature_views will behave like _list_all_feature_views in the future.\n", + "Traceback (most recent call last):\n", + " File \"/Users/dmartino/projects/AI/feast/feast/examples/kind-quickstart/client/test_workflow.py\", line 130, in \n", + " run_demo()\n", + " File \"/Users/dmartino/projects/AI/feast/feast/examples/kind-quickstart/client/test_workflow.py\", line 51, in run_demo\n", + " store.push(\"driver_stats_push_source\", event_df, to=PushMode.ONLINE_AND_OFFLINE)\n", + " File \"/Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages/feast/feature_store.py\", line 1423, in push\n", + " self.write_to_online_store(\n", + " File \"/Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages/feast/feature_store.py\", line 1449, in write_to_online_store\n", + " feature_view: FeatureView = self.get_stream_feature_view(\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"/Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages/feast/feature_store.py\", line 504, in get_stream_feature_view\n", + " return self._get_stream_feature_view(\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"/Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages/feast/feature_store.py\", line 514, in _get_stream_feature_view\n", + " stream_feature_view = self._registry.get_stream_feature_view(\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"/Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages/feast/infra/registry/remote.py\", line 209, in get_stream_feature_view\n", + " response = self.stub.GetStreamFeatureView(request)\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"/Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages/grpc/_channel.py\", line 1181, in __call__\n", + " return _end_unary_response_blocking(state, call, False, None)\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"/Users/dmartino/miniconda3/envs/feast3.11/lib/python3.11/site-packages/grpc/_channel.py\", line 1006, in _end_unary_response_blocking\n", + " raise _InactiveRpcError(state) # pytype: disable=not-instantiable\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + "grpc._channel._InactiveRpcError: <_InactiveRpcError of RPC that terminated with:\n", + "\tstatus = StatusCode.UNKNOWN\n", + "\tdetails = \"Exception calling application: Feature view driver_hourly_stats_fresh does not exist in project sample\"\n", + "\tdebug_error_string = \"UNKNOWN:Error received from peer {grpc_message:\"Exception calling application: Feature view driver_hourly_stats_fresh does not exist in project sample\", grpc_status:2, created_time:\"2024-09-17T13:19:54.127834+02:00\"}\"\n", + ">\n" + ] + } + ], + "source": [ + "!cd client && python test_workflow.py" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Note If you see the following error, it is likely due to the [issue #4392](https://github.com/feast-dev/feast/issues/4392):\n", + "Remote registry client does not map application errors:\n", + "\n", + "```\n", + "Feature view driver_hourly_stats_fresh does not exist in project sample\n", + "```" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Terminate port forwarding" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " 501 16434 13456 0 1:20PM ttys051 0:00.12 /bin/zsh -c ps -ef | grep port-forward\n", + " 501 16436 16434 0 1:20PM ttys051 0:00.00 grep port-forward\n" + ] + } + ], + "source": [ + "registry_process.terminate()\n", + "offline_process.terminate()\n", + "online_process.terminate()\n", + "!ps -ef | grep port-forward" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "feast3.11", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/examples/kind-quickstart/03-Uninstall.ipynb b/examples/kind-quickstart/03-Uninstall.ipynb new file mode 100644 index 0000000000..20874fc1b7 --- /dev/null +++ b/examples/kind-quickstart/03-Uninstall.ipynb @@ -0,0 +1,120 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Uninstall deployment\n", + "Use Helm to uninstall all the previous deployments" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "release \"feast-online\" uninstalled\n", + "release \"feast-offline\" uninstalled\n", + "release \"feast-registry\" uninstalled\n", + "NAME\tNAMESPACE\tREVISION\tUPDATED\tSTATUS\tCHART\tAPP VERSION\n" + ] + } + ], + "source": [ + "!helm uninstall feast-online\n", + "!helm uninstall feast-offline\n", + "!helm uninstall feast-registry\n", + "!helm list" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Delete the PostgreSQL deployment." + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "secret \"postgres-secret\" deleted\n", + "persistentvolume \"postgres-volume\" deleted\n", + "persistentvolumeclaim \"postgres-volume-claim\" deleted\n", + "deployment.apps \"postgres\" deleted\n", + "service \"postgres\" deleted\n" + ] + } + ], + "source": [ + "!kubectl delete -f postgres/postgres.yaml" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "No resources found in feast namespace.\n", + "No resources found in feast namespace.\n", + "NAME READY STATUS RESTARTS AGE\n", + "feast-apply-job-tzscd 0/1 Completed 0 2m40s\n" + ] + } + ], + "source": [ + "!kubectl get svc\n", + "!kubectl get deployments\n", + "!kubectl get pods" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "feast3.11", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/examples/kind-quickstart/README.md b/examples/kind-quickstart/README.md new file mode 100644 index 0000000000..25ecfc8ecf --- /dev/null +++ b/examples/kind-quickstart/README.md @@ -0,0 +1,7 @@ +# Install and run Feast with Kind + +The following notebooks will guide you through an end-to-end journey to install and validate a simple Feast feature store in a +Kind Kubernetes cluster: +* [01-Install.ipynb](./01-Install.ipynb): Install and configure the cluster, then the Feast components. +* [02-Client.ipynb](./02-Client.ipynb): Validate the feature store with a remote test application runnning on the notebook. +* [03-Uninstall.ipynb](./03-Uninstall.ipynb): Clear the installed deployments. diff --git a/sdk/python/feast/infra/online_stores/contrib/rockset_online_store/__init__.py b/examples/kind-quickstart/client/__init__.py similarity index 100% rename from sdk/python/feast/infra/online_stores/contrib/rockset_online_store/__init__.py rename to examples/kind-quickstart/client/__init__.py diff --git a/examples/kind-quickstart/client/feature_store.yaml b/examples/kind-quickstart/client/feature_store.yaml new file mode 100644 index 0000000000..62acd3ead6 --- /dev/null +++ b/examples/kind-quickstart/client/feature_store.yaml @@ -0,0 +1,14 @@ +project: sample +registry: + path: localhost:8001 + registry_type: remote +offline_store: + host: localhost + port: 8002 + type: remote +online_store: + path: http://localhost:8003 + type: remote +entity_key_serialization_version: 2 +auth: + type: no_auth diff --git a/examples/kind-quickstart/init-job.yaml b/examples/kind-quickstart/init-job.yaml new file mode 100644 index 0000000000..68df35af73 --- /dev/null +++ b/examples/kind-quickstart/init-job.yaml @@ -0,0 +1,31 @@ +apiVersion: batch/v1 +kind: Job +metadata: + name: feast-apply-job +spec: + template: + spec: + containers: + - name: feast-apply + image: feastdev/feature-server:0.40.1 + command: ["/bin/sh", "-c"] + args: + - | + echo "Starting feast initialization job..."; + mkdir /tmp/sample; + cd /tmp/sample; + cp /sample/* .; + sed -i 's/localhost/postgres/' feature_store.yaml; + feast apply; + CURRENT_TIME=$(date -u +"%Y-%m-%dT%H:%M:%S"); + feast materialize-incremental $CURRENT_TIME; + echo "Feast initialization completed successfully."; + volumeMounts: + - name: sample-repo-files + mountPath: /sample + restartPolicy: Never + volumes: + - name: sample-repo-files + configMap: + name: sample-repo + backoffLimit: 1 diff --git a/examples/kind-quickstart/postgres/postgres.yaml b/examples/kind-quickstart/postgres/postgres.yaml new file mode 100644 index 0000000000..c89a01f0f4 --- /dev/null +++ b/examples/kind-quickstart/postgres/postgres.yaml @@ -0,0 +1,83 @@ +#https://www.digitalocean.com/community/tutorials/how-to-deploy-postgres-to-kubernetes-cluster +apiVersion: v1 +kind: Secret +metadata: + name: postgres-secret + labels: + app: postgres +stringData: + POSTGRES_DB: feast + POSTGRES_USER: feast + POSTGRES_PASSWORD: feast +--- +apiVersion: v1 +kind: PersistentVolume +metadata: + name: postgres-volume + labels: + type: local + app: postgres +spec: + capacity: + storage: 1Gi + accessModes: + - ReadWriteOnce + hostPath: + path: /data/postgresql +--- +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: postgres-volume-claim + labels: + app: postgres +spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 1Gi +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: postgres +spec: + replicas: 1 + selector: + matchLabels: + app: postgres + template: + metadata: + labels: + app: postgres + spec: + containers: + - name: postgres + image: 'postgres:15-alpine' + imagePullPolicy: IfNotPresent + ports: + - containerPort: 5432 + envFrom: + - secretRef: + name: postgres-secret + volumeMounts: + - mountPath: /var/lib/postgresql/data + name: postgresdata + volumes: + - name: postgresdata + persistentVolumeClaim: + claimName: postgres-volume-claim +--- +apiVersion: v1 +kind: Service +metadata: + name: postgres + labels: + app: postgres +spec: + type: NodePort + ports: + - port: 5432 + selector: + app: postgres \ No newline at end of file diff --git a/sdk/python/feast/templates/rockset/__init__.py b/examples/kind-quickstart/src/__init__.py similarity index 100% rename from sdk/python/feast/templates/rockset/__init__.py rename to examples/kind-quickstart/src/__init__.py diff --git a/examples/kind-quickstart/src/utils.py b/examples/kind-quickstart/src/utils.py new file mode 100644 index 0000000000..ea549d7ed8 --- /dev/null +++ b/examples/kind-quickstart/src/utils.py @@ -0,0 +1,12 @@ +import subprocess + +def port_forward(service, external_port, local_port=80) : + """ + Run a background process to forward port 80 of the given `service` service to the given `external_port` port. + + Returns: the process instance + """ + command = ["kubectl", "port-forward", f"service/{service}", f"{external_port}:{local_port}"] + process = subprocess.Popen(command) + print(f"Port-forwarding {service} with process ID: {process.pid}") + return process diff --git a/examples/podman_local/README.md b/examples/podman_local/README.md new file mode 100644 index 0000000000..f5b6ad40d4 --- /dev/null +++ b/examples/podman_local/README.md @@ -0,0 +1,72 @@ + +# Feast example using Podman and Podman Compose + +This guide explains how to deploy Feast remote server components using Podman Compose locally and run an example using the client. + +## Prerequisites + +1. **Podman**: [Podman installation guide](https://podman.io/). +2. **Podman Compose**: [Podman Compose Installation guide](https://github.com/containers/podman-compose/tree/main?tab=readme-ov-file#installation]). +3. **Python 3.9+ environment** +4. **Feast CLI** + +## Setup + +### 1. **Feast Project Setup** + +- The project [feature_repo](feature_repo) already created using `feast init` command + +### 2. **Run the Podman Compose File** + +- Use the [docker-compose.yml](docker-compose.yml) file to install and run the Feast feature servers (online, offline, and registry) on podman. The docker-compose file uses the `feastdev/feature-server:latest` image. Each respective service has specific port mappings and maps the volume from the `./feature_repo` configuration. +- To start the feature servers, run the following command: + + ```bash + podman-compose up -d + ``` + +- This will launch the necessary containers for online, offline, and registry feature servers. + +### 3. **Verify the Installation** + +- Use the `podman ps` command to verify that the containers are running: + + ```bash + podman ps + ``` + + Example output: + + ``` + CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES + 61442d6d6ef3 docker.io/feastdev/feature-server:latest feast -c /feature... 2 minutes ago Up 2 minutes 0.0.0.0:6566->6566/tcp online-feature-server + 1274c21716a6 docker.io/feastdev/feature-server:latest feast -c /feature... 2 minutes ago Up 2 minutes 0.0.0.0:8815->8815/tcp offline-feature-server + 4e38ca8c39db docker.io/feastdev/feature-server:latest feast -c /feature... 2 minutes ago Up 2 minutes 0.0.0.0:6570->6570/tcp registry-feature-server + ``` + +- Alternatively, you can verify the running containers through **Podman Desktop**: + ![podman.png](podman.png) + +### 4. **Run Feast Apply** + +- To apply the feature store definitions to the remote registry, run the following command: + + ```bash + podman exec registry-feature-server feast -c /feature_repo apply + ``` + +### 5. **Run Client Examples** + +- The [client](client) folder contains example client-side configurations and code: + - [feature_store.yaml](client/feature_repo/feature_store.yaml): Configuration for the feature store. + - [test.py](client/feature_repo/test.py): Example Python script to interact with the Feast server. + +### 6. **Cleanup** + +- To stop and remove the running containers, run the following command: + + ```bash + podman-compose down + ``` + +- This will stop all the feature server containers and clean up the environment. diff --git a/examples/podman_local/__init__.py b/examples/podman_local/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/examples/podman_local/client/feature_repo/feature_store.yaml b/examples/podman_local/client/feature_repo/feature_store.yaml new file mode 100644 index 0000000000..d4ad1ccb6f --- /dev/null +++ b/examples/podman_local/client/feature_repo/feature_store.yaml @@ -0,0 +1,12 @@ +project: my_project +registry: + registry_type: remote + path: localhost:6570 +offline_store: + type: remote + host: localhost + port: 8815 +online_store: + type: remote + path: http://localhost:6566 + diff --git a/examples/podman_local/client/feature_repo/test.py b/examples/podman_local/client/feature_repo/test.py new file mode 100644 index 0000000000..13ab2444aa --- /dev/null +++ b/examples/podman_local/client/feature_repo/test.py @@ -0,0 +1,123 @@ +import subprocess +from datetime import datetime +import pandas as pd +from feast import FeatureStore +from feast.data_source import PushMode + +def run_demo(): + try: + store = FeatureStore(repo_path=".") + + print("\n--- Historical features for training ---") + fetch_historical_features_entity_df(store, for_batch_scoring=False) + + print("\n--- Historical features for batch scoring ---") + fetch_historical_features_entity_df(store, for_batch_scoring=True) + + print("\n--- Load features into online store ---") + store.materialize_incremental(end_date=datetime.now()) + + print("\n--- Online features ---") + fetch_online_features(store) + + print("\n--- Online features retrieved (instead) through a feature service---") + fetch_online_features(store, source="feature_service") + + print( + "\n--- Online features retrieved (using feature service v3, which uses a feature view with a push source---" + ) + fetch_online_features(store, source="push") + + print("\n--- Simulate a stream event ingestion of the hourly stats df ---") + event_df = pd.DataFrame.from_dict( + { + "driver_id": [1001], + "event_timestamp": [ + datetime.now(), + ], + "created": [ + datetime.now(), + ], + "conv_rate": [1.0], + "acc_rate": [1.0], + "avg_daily_trips": [1000], + } + ) + print(event_df) + store.push("driver_stats_push_source", event_df, to=PushMode.ONLINE_AND_OFFLINE) + + print("\n--- Online features again with updated values from a stream push---") + fetch_online_features(store, source="push") + except Exception as e: + print(f"An error occurred in run_demo: {e}") + +def fetch_historical_features_entity_df(store: FeatureStore, for_batch_scoring: bool): + try: + entity_df = pd.DataFrame.from_dict( + { + "driver_id": [1001, 1002, 1003], + "event_timestamp": [ + datetime(2021, 4, 12, 10, 59, 42), + datetime(2021, 4, 12, 8, 12, 10), + datetime(2021, 4, 12, 16, 40, 26), + ], + "label_driver_reported_satisfaction": [1, 5, 3], + "val_to_add": [1, 2, 3], + "val_to_add_2": [10, 20, 30], + } + ) + if for_batch_scoring: + entity_df["event_timestamp"] = pd.to_datetime("now", utc=True) + + training_df = store.get_historical_features( + entity_df=entity_df, + features=[ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips", + "transformed_conv_rate:conv_rate_plus_val1", + "transformed_conv_rate:conv_rate_plus_val2", + ], + ).to_df() + print(training_df.head()) + except Exception as e: + print(f"An error occurred in fetch_historical_features_entity_df: {e}") + +def fetch_online_features(store, source: str = ""): + try: + entity_rows = [ + { + "driver_id": 1001, + "val_to_add": 1000, + "val_to_add_2": 2000, + }, + { + "driver_id": 1002, + "val_to_add": 1001, + "val_to_add_2": 2002, + }, + ] + if source == "feature_service": + features_to_fetch = store.get_feature_service("driver_activity_v1") + elif source == "push": + features_to_fetch = store.get_feature_service("driver_activity_v3") + else: + features_to_fetch = [ + "driver_hourly_stats:acc_rate", + "transformed_conv_rate:conv_rate_plus_val1", + "transformed_conv_rate:conv_rate_plus_val2", + ] + returned_features = store.get_online_features( + features=features_to_fetch, + entity_rows=entity_rows, + ).to_dict() + for key, value in sorted(returned_features.items()): + print(key, " : ", value) + except Exception as e: + print(f"An error occurred in fetch_online_features: {e}") + +if __name__ == "__main__": + try: + run_demo() + except Exception as e: + print(f"An error occurred in the main block: {e}") diff --git a/examples/podman_local/docker-compose.yml b/examples/podman_local/docker-compose.yml new file mode 100644 index 0000000000..5bc1ae546a --- /dev/null +++ b/examples/podman_local/docker-compose.yml @@ -0,0 +1,33 @@ +version: '3.9' + +x-defaults: &default-settings + image: feastdev/feature-server:latest + restart: unless-stopped + +services: + online-feature-server: + <<: *default-settings + container_name: online-feature-server + command: feast -c /feature_repo serve -h 0.0.0.0 + ports: + - "6566:6566" + volumes: + - ./feature_repo:/feature_repo + + offline-feature-server: + <<: *default-settings + container_name: offline-feature-server + command: feast -c /feature_repo serve_offline -h 0.0.0.0 + ports: + - "8815:8815" + volumes: + - ./feature_repo:/feature_repo + + registry-feature-server: + <<: *default-settings + container_name: registry-feature-server + command: feast -c /feature_repo serve_registry + ports: + - "6570:6570" + volumes: + - ./feature_repo:/feature_repo diff --git a/examples/podman_local/feature_repo/__init__.py b/examples/podman_local/feature_repo/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/examples/podman_local/feature_repo/data/driver_stats.parquet b/examples/podman_local/feature_repo/data/driver_stats.parquet new file mode 100644 index 0000000000..7ea02b9a9f Binary files /dev/null and b/examples/podman_local/feature_repo/data/driver_stats.parquet differ diff --git a/examples/podman_local/feature_repo/example_repo.py b/examples/podman_local/feature_repo/example_repo.py new file mode 100644 index 0000000000..60ddd49f9c --- /dev/null +++ b/examples/podman_local/feature_repo/example_repo.py @@ -0,0 +1,144 @@ +# This is an example feature definition file + +from datetime import timedelta + +import pandas as pd + +from feast import ( + Entity, + FeatureService, + FeatureView, + Field, + FileSource, + PushSource, + RequestSource, +) +from feast.feature_logging import LoggingConfig +from feast.infra.offline_stores.file_source import FileLoggingDestination +from feast.on_demand_feature_view import on_demand_feature_view +from feast.types import Float32, Float64, Int64 + +# Define an entity for the driver. You can think of an entity as a primary key used to +# fetch features. +driver = Entity(name="driver", join_keys=["driver_id"]) + +# Read data from parquet files. Parquet is convenient for local development mode. For +# production, you can use your favorite DWH, such as BigQuery. See Feast documentation +# for more info. +driver_stats_source = FileSource( + name="driver_hourly_stats_source", + path="/feature_repo/data/driver_stats.parquet", + timestamp_field="event_timestamp", + created_timestamp_column="created", +) + +# Our parquet files contain sample data that includes a driver_id column, timestamps and +# three feature column. Here we define a Feature View that will allow us to serve this +# data to our model online. +driver_stats_fv = FeatureView( + # The unique name of this feature view. Two feature views in a single + # project cannot have the same name + name="driver_hourly_stats", + entities=[driver], + ttl=timedelta(days=1), + # The list of features defined below act as a schema to both define features + # for both materialization of features into a store, and are used as references + # during retrieval for building a training dataset or serving features + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64, description="Average daily trips"), + ], + online=True, + source=driver_stats_source, + # Tags are user defined key/value pairs that are attached to each + # feature view + tags={"team": "driver_performance"}, +) + +# Define a request data source which encodes features / information only +# available at request time (e.g. part of the user initiated HTTP request) +input_request = RequestSource( + name="vals_to_add", + schema=[ + Field(name="val_to_add", dtype=Int64), + Field(name="val_to_add_2", dtype=Int64), + ], +) + + +# Define an on demand feature view which can generate new features based on +# existing feature views and RequestSource features +@on_demand_feature_view( + sources=[driver_stats_fv, input_request], + schema=[ + Field(name="conv_rate_plus_val1", dtype=Float64), + Field(name="conv_rate_plus_val2", dtype=Float64), + ], +) +def transformed_conv_rate(inputs: pd.DataFrame) -> pd.DataFrame: + df = pd.DataFrame() + df["conv_rate_plus_val1"] = inputs["conv_rate"] + inputs["val_to_add"] + df["conv_rate_plus_val2"] = inputs["conv_rate"] + inputs["val_to_add_2"] + return df + + +# This groups features into a model version +driver_activity_v1 = FeatureService( + name="driver_activity_v1", + features=[ + driver_stats_fv[["conv_rate"]], # Sub-selects a feature from a feature view + transformed_conv_rate, # Selects all features from the feature view + ], + logging_config=LoggingConfig( + destination=FileLoggingDestination(path="/feature_repo/data") + ), +) +driver_activity_v2 = FeatureService( + name="driver_activity_v2", features=[driver_stats_fv, transformed_conv_rate] +) + +# Defines a way to push data (to be available offline, online or both) into Feast. +driver_stats_push_source = PushSource( + name="driver_stats_push_source", + batch_source=driver_stats_source, +) + +# Defines a slightly modified version of the feature view from above, where the source +# has been changed to the push source. This allows fresh features to be directly pushed +# to the online store for this feature view. +driver_stats_fresh_fv = FeatureView( + name="driver_hourly_stats_fresh", + entities=[driver], + ttl=timedelta(days=1), + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64), + ], + online=True, + source=driver_stats_push_source, # Changed from above + tags={"team": "driver_performance"}, +) + + +# Define an on demand feature view which can generate new features based on +# existing feature views and RequestSource features +@on_demand_feature_view( + sources=[driver_stats_fresh_fv, input_request], # relies on fresh version of FV + schema=[ + Field(name="conv_rate_plus_val1", dtype=Float64), + Field(name="conv_rate_plus_val2", dtype=Float64), + ], +) +def transformed_conv_rate_fresh(inputs: pd.DataFrame) -> pd.DataFrame: + df = pd.DataFrame() + df["conv_rate_plus_val1"] = inputs["conv_rate"] + inputs["val_to_add"] + df["conv_rate_plus_val2"] = inputs["conv_rate"] + inputs["val_to_add_2"] + return df + + +driver_activity_v3 = FeatureService( + name="driver_activity_v3", + features=[driver_stats_fresh_fv, transformed_conv_rate_fresh], +) diff --git a/examples/podman_local/feature_repo/feature_store.yaml b/examples/podman_local/feature_repo/feature_store.yaml new file mode 100644 index 0000000000..3e6a360316 --- /dev/null +++ b/examples/podman_local/feature_repo/feature_store.yaml @@ -0,0 +1,9 @@ +project: my_project +# By default, the registry is a file (but can be turned into a more scalable SQL-backed registry) +registry: data/registry.db +# The provider primarily specifies default offline / online stores & storing the registry in a given cloud +provider: local +online_store: + type: sqlite + path: data/online_store.db +entity_key_serialization_version: 2 diff --git a/examples/podman_local/podman.png b/examples/podman_local/podman.png new file mode 100644 index 0000000000..9aeb11f7f7 Binary files /dev/null and b/examples/podman_local/podman.png differ diff --git a/examples/rbac-local/01.1-start-keycloak.ipynb b/examples/rbac-local/01.1-start-keycloak.ipynb new file mode 100644 index 0000000000..f73e699833 --- /dev/null +++ b/examples/rbac-local/01.1-start-keycloak.ipynb @@ -0,0 +1,94 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "e46a65b1-7cf0-4cc2-8aca-529d659630a4", + "metadata": {}, + "source": [ + "# Start Keycloak server" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "374e8693-7e47-4985-b7f6-a9b818b0b4d0", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Updating the configuration and installing your custom providers, if any. Please wait.\n", + "2024-09-09 06:37:54,515 WARN [io.qua.dep.ind.IndexWrapper] (build-5) Failed to index org.springframework.core.io.Resource: Class does not exist in ClassLoader QuarkusClassLoader:Deployment Class Loader: PROD for keycloak@6d91790b\n", + "2024-09-09 06:37:54,518 WARN [io.qua.dep.ind.IndexWrapper] (build-5) Failed to index org.springframework.core.io.DefaultResourceLoader: Class does not exist in ClassLoader QuarkusClassLoader:Deployment Class Loader: PROD for keycloak@6d91790b\n", + "2024-09-09 06:37:54,519 WARN [io.qua.dep.ind.IndexWrapper] (build-5) Failed to index org.springframework.core.io.ResourceLoader: Class does not exist in ClassLoader QuarkusClassLoader:Deployment Class Loader: PROD for keycloak@6d91790b\n", + "2024-09-09 06:37:54,525 WARN [io.qua.dep.ind.IndexWrapper] (build-5) Failed to index org.apache.tools.ant.Task: Class does not exist in ClassLoader QuarkusClassLoader:Deployment Class Loader: PROD for keycloak@6d91790b\n", + "2024-09-09 06:37:54,568 WARN [io.qua.dep.ind.IndexWrapper] (build-5) Failed to index org.apache.activemq.artemis.core.journal.RecordInfo: Class does not exist in ClassLoader QuarkusClassLoader:Deployment Class Loader: PROD for keycloak@6d91790b\n", + "2024-09-09 06:37:54,568 WARN [io.qua.dep.ind.IndexWrapper] (build-5) Failed to index org.apache.activemq.artemis.core.journal.Journal: Class does not exist in ClassLoader QuarkusClassLoader:Deployment Class Loader: PROD for keycloak@6d91790b\n", + "2024-09-09 06:37:54,569 WARN [io.qua.dep.ind.IndexWrapper] (build-5) Failed to index io.mashona.logwriting.ArrayStore: Class does not exist in ClassLoader QuarkusClassLoader:Deployment Class Loader: PROD for keycloak@6d91790b\n", + "2024-09-09 06:37:54,573 WARN [io.qua.dep.ind.IndexWrapper] (build-5) Failed to index jakarta.jms.XAConnection: Class does not exist in ClassLoader QuarkusClassLoader:Deployment Class Loader: PROD for keycloak@6d91790b\n", + "2024-09-09 06:37:54,574 WARN [io.qua.dep.ind.IndexWrapper] (build-5) Failed to index jakarta.jms.XASession: Class does not exist in ClassLoader QuarkusClassLoader:Deployment Class Loader: PROD for keycloak@6d91790b\n", + "2024-09-09 06:37:54,574 WARN [io.qua.dep.ind.IndexWrapper] (build-5) Failed to index jakarta.jms.XAConnectionFactory: Class does not exist in ClassLoader QuarkusClassLoader:Deployment Class Loader: PROD for keycloak@6d91790b\n", + "2024-09-09 06:37:54,657 WARN [io.qua.dep.ind.IndexWrapper] (build-5) Failed to index jakarta.jms.Connection: Class does not exist in ClassLoader QuarkusClassLoader:Deployment Class Loader: PROD for keycloak@6d91790b\n", + "2024-09-09 06:37:58,410 INFO [io.qua.dep.QuarkusAugmentor] (main) Quarkus augmentation completed in 7235ms\n", + "2024-09-09 06:37:59,697 INFO [org.keycloak.quarkus.runtime.hostname.DefaultHostnameProvider] (main) Hostname settings: Base URL: , Hostname: , Strict HTTPS: false, Path: , Strict BackChannel: false, Admin URL: , Admin: , Port: -1, Proxied: false\n", + "2024-09-09 06:37:59,903 WARN [org.infinispan.CONFIG] (keycloak-cache-init) ISPN000569: Unable to persist Infinispan internal caches as no global state enabled\n", + "2024-09-09 06:37:59,949 INFO [org.infinispan.CONTAINER] (keycloak-cache-init) ISPN000556: Starting user marshaller 'org.infinispan.jboss.marshalling.core.JBossUserMarshaller'\n", + "2024-09-09 06:38:01,394 WARN [io.quarkus.agroal.runtime.DataSources] (JPA Startup Thread) Datasource enables XA but transaction recovery is not enabled. Please enable transaction recovery by setting quarkus.transaction-manager.enable-recovery=true, otherwise data may be lost if the application is terminated abruptly\n", + "2024-09-09 06:38:02,119 INFO [org.keycloak.connections.infinispan.DefaultInfinispanConnectionProviderFactory] (main) Node name: node_693934, Site name: null\n", + "2024-09-09 06:38:02,122 INFO [org.keycloak.broker.provider.AbstractIdentityProviderMapper] (main) Registering class org.keycloak.broker.provider.mappersync.ConfigSyncEventListener\n", + "2024-09-09 06:38:03,086 INFO [org.keycloak.quarkus.runtime.storage.legacy.liquibase.QuarkusJpaUpdaterProvider] (main) Initializing database schema. Using changelog META-INF/jpa-changelog-master.xml\n", + "\n", + "UPDATE SUMMARY\n", + "Run: 124\n", + "Previously run: 0\n", + "Filtered out: 0\n", + "-------------------------------\n", + "Total change sets: 124\n", + "\n", + "2024-09-09 06:38:05,143 INFO [org.keycloak.services] (main) KC-SERVICES0050: Initializing master realm\n", + "2024-09-09 06:38:06,418 INFO [org.keycloak.services] (main) KC-SERVICES0009: Added user 'admin' to realm 'master'\n", + "2024-09-09 06:38:06,492 INFO [io.quarkus] (main) Keycloak 24.0.4 on JVM (powered by Quarkus 3.8.4) started in 7.761s. Listening on: http://0.0.0.0:8080\n", + "2024-09-09 06:38:06,492 INFO [io.quarkus] (main) Profile dev activated. \n", + "2024-09-09 06:38:06,492 INFO [io.quarkus] (main) Installed features: [agroal, cdi, hibernate-orm, jdbc-h2, keycloak, logging-gelf, narayana-jta, reactive-routes, resteasy-reactive, resteasy-reactive-jackson, smallrye-context-propagation, vertx]\n", + "2024-09-09 06:38:06,495 WARN [org.keycloak.quarkus.runtime.KeycloakMain] (main) Running the server in development mode. DO NOT use this configuration in production.\n" + ] + } + ], + "source": [ + "!docker run --rm -p 9999:8080 --name my-keycloak \\\n", + "-e KEYCLOAK_ADMIN=admin -e KEYCLOAK_ADMIN_PASSWORD=admin \\\n", + "quay.io/keycloak/keycloak:24.0.4 start-dev" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d2d1e035-85b3-4d77-abb3-13af5e31ef37", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/rbac-local/01.2-setup-keycloak.ipynb b/examples/rbac-local/01.2-setup-keycloak.ipynb new file mode 100644 index 0000000000..d896bd82df --- /dev/null +++ b/examples/rbac-local/01.2-setup-keycloak.ipynb @@ -0,0 +1,416 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "e8952066-7a10-4c9b-a4b7-27be074ae269", + "metadata": {}, + "source": [ + "## Create Keycloak resources" + ] + }, + { + "cell_type": "markdown", + "id": "7252812d-90eb-4752-91a7-d46b400bacd8", + "metadata": {}, + "source": [ + "Wait until Keycloak is running" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "e5d13f76-f184-44f6-8542-54a61060e531", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{\u001b[36m\"Status\"\u001b[0m:\u001b[32m \"running\"\u001b[0m,\u001b[36m \"Running\"\u001b[0m:\u001b[95m true\u001b[0m,\u001b[36m \"Paused\"\u001b[0m:\u001b[95m false\u001b[0m,\u001b[36m \"Restarting\"\u001b[0m:\u001b[95m false\u001b[0m,\u001b[36m \"OOMKilled\"\u001b[0m:\u001b[95m false\u001b[0m,\u001b[36m \"Dead\"\u001b[0m:\u001b[95m false\u001b[0m,\u001b[36m \"Pid\"\u001b[0m:\u001b[95m 2838024\u001b[0m,\u001b[36m \"ExitCode\"\u001b[0m:\u001b[95m 0\u001b[0m,\u001b[36m \"Error\"\u001b[0m:\u001b[32m \"\"\u001b[0m,\u001b[36m \"StartedAt\"\u001b[0m:\u001b[32m \"2024-09-09T06:37:49.055739669Z\"\u001b[0m,\u001b[36m \"FinishedAt\"\u001b[0m:\u001b[32m \"0001-01-01T00:00:00Z\"\u001b[0m}\n" + ] + } + ], + "source": [ + "!docker inspect --format='json' my-keycloak | yq '.[0].State'" + ] + }, + { + "cell_type": "markdown", + "id": "cc9a6329-4e89-464c-ac48-dbbadaf72a2b", + "metadata": {}, + "source": [ + "Then create a sample realm and client with some roles and users matching the test environment." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "d5c60591-f41d-4a5e-9b18-93385a889495", + "metadata": {}, + "outputs": [], + "source": [ + "import requests\n", + "import json\n", + "from dotenv import set_key\n", + "\n", + "OIDC_SERVER_URL = \"http://0.0.0.0:9999\"\n", + "ADMIN_USERNAME = \"admin\"\n", + "ADMIN_PASSWORD = \"admin\"\n", + "\n", + "access_token: str = \"\"" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "d16969bc-423a-4d18-afa3-97a791b84b13", + "metadata": {}, + "outputs": [], + "source": [ + "def get_token():\n", + " token_url = f\"{OIDC_SERVER_URL}/realms/master/protocol/openid-connect/token\"\n", + "\n", + " token_data = {\n", + " \"grant_type\": \"password\",\n", + " \"client_id\": \"admin-cli\",\n", + " \"username\": ADMIN_USERNAME,\n", + " \"password\": ADMIN_PASSWORD,\n", + " }\n", + "\n", + " token_response = requests.post(token_url, data=token_data)\n", + " if token_response.status_code == 200:\n", + " global access_token\n", + " access_token = token_response.json()[\"access_token\"]\n", + " return access_token\n", + " else:\n", + " print(\n", + " f\"Failed to obtain access token: {token_response.status_code} - {token_response.text}\"\n", + " )\n", + " raise Exception(\"Not authenticated\")\n", + "\n", + "\n", + "def keycloak_post(endpoint, data=None):\n", + " url = f\"{OIDC_SERVER_URL}/admin/{endpoint}\"\n", + " print(f\"Creating {endpoint}\")\n", + " global access_token\n", + " headers = {\n", + " \"Content-Type\": \"application/json\",\n", + " \"Authorization\": f\"Bearer {access_token}\",\n", + " }\n", + " response = requests.request(\"POST\", url, headers=headers, data=json.dumps(data))\n", + " print(f\"POST response.status_code is {response.status_code}\")\n", + " return response.status_code\n", + "\n", + "\n", + "def keycloak_get(endpoint):\n", + " url = f\"{OIDC_SERVER_URL}/admin/{endpoint}\"\n", + " global access_token\n", + " headers = {\n", + " \"Content-Type\": \"application/json\",\n", + " \"Authorization\": f\"Bearer {access_token}\",\n", + " }\n", + " response = requests.request(\"GET\", url, headers=headers)\n", + " print(f\"GET response.status_code is {response.status_code}\")\n", + " return response.json()\n", + "\n", + "\n", + "def create_realm(realm_name):\n", + " data = {\"realm\": realm_name, \"enabled\": \"true\"}\n", + " keycloak_post(\"realms\", data=data)\n", + " response = keycloak_get(f\"realms/{realm_name}\")\n", + " return response[\"id\"]\n", + "\n", + "\n", + "def create_client(realm_name, client_name):\n", + " data = {\n", + " \"clientId\": client_name,\n", + " \"enabled\": \"true\",\n", + " \"redirectUris\": [\n", + " \"http://localhost:8000/*\",\n", + " \"http://127.0.0.1:8000/*\",\n", + " \"http://0.0.0.0:8000/*\",\n", + " ],\n", + " \"publicClient\": False,\n", + " \"authorizationServicesEnabled\": True,\n", + " \"protocol\": \"openid-connect\",\n", + " \"standardFlowEnabled\": True,\n", + " \"directAccessGrantsEnabled\": True,\n", + " \"serviceAccountsEnabled\": True,\n", + " }\n", + " keycloak_post(f\"realms/{realm_name}/clients\", data=data)\n", + " response = keycloak_get(f\"realms/{realm_name}/clients\")\n", + " client = None\n", + " for c in response:\n", + " if c[\"clientId\"] == client_name:\n", + " client = c\n", + " break\n", + " client_id = client[\"id\"]\n", + " client_secret = client[\"secret\"]\n", + " return client_id, client_secret\n", + "\n", + "\n", + "def create_client_roles(realm_name, client_id, roles):\n", + " for role_name in roles:\n", + " data = {\"name\": role_name, \"clientRole\": True}\n", + " keycloak_post(f\"realms/{realm_name}/clients/{client_id}/roles\", data=data)\n", + "\n", + " response = keycloak_get(f\"realms/{realm_name}/clients/{client_id}/roles\")\n", + " roles_by_name = dict((role[\"name\"], role[\"id\"]) for role in response)\n", + " print(roles_by_name)\n", + " return roles_by_name\n", + "\n", + "\n", + "def create_user_with_roles(\n", + " realm_name, username, password, client_id, roles_by_name, roles\n", + "):\n", + " data = {\n", + " \"username\": username,\n", + " \"enabled\": True,\n", + " \"email\": f\"{username}@poc.com\",\n", + " \"emailVerified\": True,\n", + " \"firstName\": \"user\",\n", + " \"lastName\": f\"{username}\",\n", + " \"credentials\": [{\"type\": \"password\", \"value\": password}],\n", + " \"realmRoles\": [],\n", + " }\n", + " keycloak_post(f\"realms/{realm_name}/users\", data=data)\n", + " response = keycloak_get(f\"realms/{realm_name}/users\")\n", + " user = None\n", + " for u in response:\n", + " if u[\"username\"] == username:\n", + " user = u\n", + " break\n", + " user_id = user[\"id\"]\n", + "\n", + " data = [\n", + " {\n", + " \"id\": roles_by_name[role_name],\n", + " \"name\": role_name,\n", + " }\n", + " for role_name in roles\n", + " ]\n", + " keycloak_post(\n", + " f\"realms/{realm_name}/users/{user_id}/role-mappings/clients/{client_id}\",\n", + " data=data,\n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e097fac1-f2c3-4afe-b78c-2c8279e3a84e", + "metadata": {}, + "outputs": [], + "source": [ + "get_token()" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "f114fa41-8cea-486f-baf4-998cbf69fea4", + "metadata": {}, + "outputs": [], + "source": [ + "realm_name = \"rbac_example\"\n", + "client_name = \"app\"\n", + "password = \"password\"" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "0f889548-9b60-448b-beed-ac3fc1890b13", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Creating realms\n", + "POST response.status_code is 201\n", + "GET response.status_code is 200\n", + "Creating realms/rbac_example/clients\n", + "POST response.status_code is 201\n", + "GET response.status_code is 200\n", + "Creating realms/rbac_example/clients/c3475e89-27c3-41ac-a3d1-0bbcaf68083b/roles\n", + "POST response.status_code is 201\n", + "Creating realms/rbac_example/clients/c3475e89-27c3-41ac-a3d1-0bbcaf68083b/roles\n", + "POST response.status_code is 201\n", + "Creating realms/rbac_example/clients/c3475e89-27c3-41ac-a3d1-0bbcaf68083b/roles\n", + "POST response.status_code is 201\n", + "Creating realms/rbac_example/clients/c3475e89-27c3-41ac-a3d1-0bbcaf68083b/roles\n", + "POST response.status_code is 201\n", + "GET response.status_code is 200\n", + "{'store_admin': '2d7a675f-031d-42b1-aba6-eb28a95561af', 'batch_admin': '8664084a-4e3c-42b0-8e37-70a8fea012b3', 'reader': '6cbf4473-c165-48bd-b572-d20133ae2b2b', 'uma_protection': '172d464d-92c7-4055-95af-3e048d8077b2', 'fresh_writer': '9e2abf47-a7af-414e-bf14-2c9897933532'}\n" + ] + } + ], + "source": [ + "realm_id = create_realm(realm_name)\n", + "client_id, client_secret = create_client(realm_name, client_name)\n", + "\n", + "roles_by_name = create_client_roles(\n", + " realm_name, client_id, [\"reader\", \"fresh_writer\", \"store_admin\", \"batch_admin\"]\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "a3430d83-107d-44ad-acf2-0df810dff0ff", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Creating realms/rbac_example/users\n", + "POST response.status_code is 201\n", + "GET response.status_code is 200\n", + "Creating realms/rbac_example/users/a87b4ca8-e1a9-40f7-a166-f48fe45beec2/role-mappings/clients/c3475e89-27c3-41ac-a3d1-0bbcaf68083b\n", + "POST response.status_code is 204\n", + "Creating realms/rbac_example/users\n", + "POST response.status_code is 201\n", + "GET response.status_code is 200\n", + "Creating realms/rbac_example/users/eb343a9b-d800-4fff-96b6-4588c7db08de/role-mappings/clients/c3475e89-27c3-41ac-a3d1-0bbcaf68083b\n", + "POST response.status_code is 204\n", + "Creating realms/rbac_example/users\n", + "POST response.status_code is 201\n", + "GET response.status_code is 200\n", + "Creating realms/rbac_example/users/91bfbaae-e1fd-4167-9432-2d1d8ca8c838/role-mappings/clients/c3475e89-27c3-41ac-a3d1-0bbcaf68083b\n", + "POST response.status_code is 204\n", + "Creating realms/rbac_example/users\n", + "POST response.status_code is 201\n", + "GET response.status_code is 200\n", + "Creating realms/rbac_example/users/4d67e8ca-6c2a-48b7-b511-c3f6197aa5ae/role-mappings/clients/c3475e89-27c3-41ac-a3d1-0bbcaf68083b\n", + "POST response.status_code is 204\n" + ] + } + ], + "source": [ + "create_user_with_roles(\n", + " realm_name, \"reader\", password, client_id, roles_by_name, [\"reader\"]\n", + ")\n", + "create_user_with_roles(\n", + " realm_name,\n", + " \"writer\",\n", + " password,\n", + " client_id,\n", + " roles_by_name,\n", + " [\"fresh_writer\"],\n", + ")\n", + "create_user_with_roles(\n", + " realm_name,\n", + " \"batch_admin\",\n", + " password,\n", + " client_id,\n", + " roles_by_name,\n", + " [\"batch_admin\"],\n", + ")\n", + "create_user_with_roles(\n", + " realm_name,\n", + " \"admin\",\n", + " password,\n", + " client_id,\n", + " roles_by_name,\n", + " [\"store_admin\"],\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "54317f9e-476b-4b8e-864a-a07c54b549f4", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Realm rbac_example setup completed.\n", + "Client app created with ID c3475e89-27c3-41ac-a3d1-0bbcaf68083b and secret REDACTED\n", + "Settings configured in .env\n" + ] + } + ], + "source": [ + "print(f\"Realm {realm_name} setup completed.\")\n", + "print(\n", + " f\"Client {client_name} created with ID {client_id} and secret {client_secret}\"\n", + ")\n", + "\n", + "env_file = \".env\"\n", + "with open(env_file, \"w\") as file:\n", + " pass\n", + "\n", + "# Write property P=1 to the .env file\n", + "set_key(env_file, \"OIDC_SERVER_URL\", OIDC_SERVER_URL)\n", + "set_key(env_file, \"REALM\", realm_name)\n", + "set_key(env_file, \"CLIENT_ID\", client_name)\n", + "set_key(env_file, \"CLIENT_SECRET\", client_secret)\n", + "set_key(env_file, \"PASSWORD\", password)\n", + "print(f\"Settings configured in {env_file}\")" + ] + }, + { + "cell_type": "markdown", + "id": "35dcd5ed-4004-4570-965f-0f68668605d8", + "metadata": {}, + "source": [ + "The [.env](.env) file contains the settings of the created realm, including the client secret to be used to connect the server." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "46a1e2c7-e379-461d-b0bf-82354378e830", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "OIDC_SERVER_URL='http://0.0.0.0:9999'\n", + "REALM='rbac_example'\n", + "CLIENT_ID='app'\n", + "CLIENT_SECRET='REDACTED'\n", + "PASSWORD='password'\n" + ] + } + ], + "source": [ + "!cat .env" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d035826b-64d6-47cc-a48e-26eb29b31fc7", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/rbac-local/01.3-setup-feast.ipynb b/examples/rbac-local/01.3-setup-feast.ipynb new file mode 100644 index 0000000000..e7e0943094 --- /dev/null +++ b/examples/rbac-local/01.3-setup-feast.ipynb @@ -0,0 +1,1029 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "a7b2570a-bdf1-477a-8799-0aefe81a0e28", + "metadata": {}, + "source": [ + "## Setup Feast\n", + "Create a sample `rbac` project with local storage." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "74c1ee91-1816-4338-aabf-7851b655b061", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Creating a new Feast repository in \u001b[1m\u001b[32m/Users/dmartino/projects/AI/feast/feast/examples/rbac-local/rbac\u001b[0m.\n", + "\n" + ] + } + ], + "source": [ + "!rm -rf rbac\n", + "!feast init rbac" + ] + }, + { + "cell_type": "markdown", + "id": "e3215797-198a-49af-a241-7e0117634897", + "metadata": {}, + "source": [ + "Update the `feature_store.yaml` with an `auth` section derived from the Keycloak setup file [.env](.env)" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "a09d2198-9e3a-48f6-8c9d-72d62d20cd57", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "OIDC_SERVER_URL='http://0.0.0.0:9999'\n", + "REALM='rbac_example'\n", + "CLIENT_ID='app'\n", + "CLIENT_SECRET='REDACTED'\n", + "PASSWORD='password'\n" + ] + } + ], + "source": [ + "!cat .env" + ] + }, + { + "cell_type": "markdown", + "id": "6cd89872-a6c6-4be0-a6e3-8fd60d448b7b", + "metadata": {}, + "source": [ + "### Update the server YAML\n", + "Update the server YAML to use OIDC authorization" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "e16d5a44-ab0c-4ca8-8491-e7d9073469f8", + "metadata": {}, + "outputs": [], + "source": [ + "from dotenv import load_dotenv\n", + "import os\n", + "import yaml\n", + "\n", + "def load_config_file(path):\n", + " load_dotenv()\n", + "\n", + " with open(path, 'r') as file:\n", + " config = yaml.safe_load(file) or {}\n", + " return config" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "cd30523b-4e1c-4d56-9c72-84aacb46b29d", + "metadata": {}, + "outputs": [], + "source": [ + "def update_config_with_auth(config, is_client=False):\n", + " config['auth']={}\n", + " config['auth']['type']='oidc'\n", + " config['auth']['auth_discovery_url']=f\"{os.getenv('OIDC_SERVER_URL')}/realms/{os.getenv('REALM')}/.well-known/openid-configuration\"\n", + " config['auth']['client_id']=os.getenv('CLIENT_ID')\n", + " if is_client:\n", + " config['auth']['client_secret']=os.getenv('CLIENT_SECRET')\n", + " config['auth']['username']=''\n", + " config['auth']['password']='password'" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "1631a8c8-f635-4970-8653-06c147b1c128", + "metadata": {}, + "outputs": [], + "source": [ + "def update_config_file(path):\n", + " with open(path, 'w') as file:\n", + " yaml.safe_dump(config, file, default_flow_style=False)" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "78898d46-1185-4528-8f08-b137dd49246a", + "metadata": {}, + "outputs": [], + "source": [ + "config = load_config_file('rbac/feature_repo/feature_store.yaml')\n", + "update_config_with_auth(config)\n", + "update_config_file('rbac/feature_repo/feature_store.yaml')" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "e2437286-2907-4818-87ad-a2293f21311e", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "auth:\n", + " auth_discovery_url: http://0.0.0.0:9999/realms/rbac_example/.well-known/openid-configuration\n", + " client_id: app\n", + " type: oidc\n", + "entity_key_serialization_version: 2\n", + "online_store:\n", + " path: data/online_store.db\n", + " type: sqlite\n", + "project: rbac\n", + "provider: local\n", + "registry: data/registry.db\n" + ] + } + ], + "source": [ + "!cat rbac/feature_repo/feature_store.yaml" + ] + }, + { + "cell_type": "markdown", + "id": "fa715453-8c41-4f57-8cf2-c96f6a211cde", + "metadata": {}, + "source": [ + "### Update the client YAML\n", + "Update the client YAML to use OIDC authorization" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "886a558a-1746-44fa-9e38-0e381b3b3deb", + "metadata": {}, + "outputs": [], + "source": [ + "config = load_config_file('client/feature_store.yaml')\n", + "update_config_with_auth(config, is_client=True)\n", + "update_config_file('client/feature_store.yaml')" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "267a72e4-443a-4b08-bd59-84d475a29e2a", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "auth:\n", + " auth_discovery_url: http://0.0.0.0:9999/realms/rbac_example/.well-known/openid-configuration\n", + " client_id: app\n", + " client_secret: REDACTED\n", + " password: password\n", + " type: oidc\n", + " username: ''\n", + "entity_key_serialization_version: 2\n", + "offline_store:\n", + " host: localhost\n", + " port: 8815\n", + " type: remote\n", + "online_store:\n", + " path: http://localhost:6566\n", + " type: remote\n", + "project: rbac\n", + "registry:\n", + " path: localhost:6570\n", + " registry_type: remote\n" + ] + } + ], + "source": [ + "!cat client/feature_store.yaml" + ] + }, + { + "cell_type": "markdown", + "id": "f71f5189-4423-4720-bbd2-fcb9b778a26b", + "metadata": {}, + "source": [ + "### Apply the configuration" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "e0c24e05-6e38-4ff1-9c39-73818fe41f18", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Applying changes for project rbac\n", + "/Users/dmartino/projects/AI/feast/feast/sdk/python/feast/feature_store.py:562: RuntimeWarning: On demand feature view is an experimental feature. This API is stable, but the functionality does not scale well for offline retrieval\n", + " warnings.warn(\n", + "Created project \u001b[1m\u001b[32mrbac\u001b[0m\n", + "Created entity \u001b[1m\u001b[32mdriver\u001b[0m\n", + "Created feature view \u001b[1m\u001b[32mdriver_hourly_stats\u001b[0m\n", + "Created feature view \u001b[1m\u001b[32mdriver_hourly_stats_fresh\u001b[0m\n", + "Created on demand feature view \u001b[1m\u001b[32mtransformed_conv_rate_fresh\u001b[0m\n", + "Created on demand feature view \u001b[1m\u001b[32mtransformed_conv_rate\u001b[0m\n", + "Created feature service \u001b[1m\u001b[32mdriver_activity_v1\u001b[0m\n", + "Created feature service \u001b[1m\u001b[32mdriver_activity_v3\u001b[0m\n", + "Created feature service \u001b[1m\u001b[32mdriver_activity_v2\u001b[0m\n", + "\n", + "Created sqlite table \u001b[1m\u001b[32mrbac_driver_hourly_stats_fresh\u001b[0m\n", + "Created sqlite table \u001b[1m\u001b[32mrbac_driver_hourly_stats\u001b[0m\n", + "\n" + ] + } + ], + "source": [ + "!feast -c rbac/feature_repo apply" + ] + }, + { + "cell_type": "markdown", + "id": "69b9857a-e32b-47ed-a120-57919ecb6b5d", + "metadata": {}, + "source": [ + "### Validate permissions" + ] + }, + { + "cell_type": "markdown", + "id": "867f565d-9740-4790-8d11-31001d920358", + "metadata": {}, + "source": [ + "There are no permissions after applying the example:" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "id": "004f16bf-d125-4aec-b683-3e9653815a27", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "NAME TYPES NAME_PATTERN ACTIONS ROLES REQUIRED_TAGS\n" + ] + } + ], + "source": [ + "!feast -c rbac/feature_repo permissions list" + ] + }, + { + "cell_type": "markdown", + "id": "f2276488-39ec-4ae8-bb69-08dce7ad1bd4", + "metadata": {}, + "source": [ + "The `permissions check` command identifies the resources that have no permissions matching their type, name or tags." + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "id": "9fdd2660-c0f5-4dc9-a2da-d45751dcfa01", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[1m\u001b[31mThe following resources are not secured by any permission configuration:\u001b[0m\n", + "NAME TYPE\n", + "driver Entity\n", + "driver_hourly_stats FeatureView\n", + "driver_hourly_stats_fresh FeatureView\n", + "transformed_conv_rate_fresh OnDemandFeatureView\n", + "transformed_conv_rate OnDemandFeatureView\n", + "driver_activity_v1 FeatureService\n", + "driver_activity_v3 FeatureService\n", + "driver_activity_v2 FeatureService\n", + "vals_to_add RequestSource\n", + "driver_stats_push_source PushSource\n", + "driver_hourly_stats_source FileSource\n", + "\u001b[1m\u001b[31mThe following actions are not secured by any permission configuration (Note: this might not be a security concern, depending on the used APIs):\u001b[0m\n", + "NAME TYPE UNSECURED ACTIONS\n", + "driver Entity CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "driver_hourly_stats FeatureView CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "driver_hourly_stats_fresh FeatureView CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "transformed_conv_rate_fresh OnDemandFeatureView CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "transformed_conv_rate OnDemandFeatureView CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "driver_activity_v1 FeatureService CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "driver_activity_v3 FeatureService CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "driver_activity_v2 FeatureService CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "vals_to_add RequestSource CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "driver_stats_push_source PushSource CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "driver_hourly_stats_source FileSource CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n" + ] + } + ], + "source": [ + "!feast -c rbac/feature_repo permissions check" + ] + }, + { + "cell_type": "markdown", + "id": "eb65649d-7ba7-494f-9e01-772842304ca1", + "metadata": {}, + "source": [ + "### Applying permissions\n", + "Let's create some Permissions to cover basic scenarios.\n", + "\n", + "First a simple permission to read the status of all the objects." + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "id": "3e910c5d-2f27-4f19-b324-c00347133da7", + "metadata": {}, + "outputs": [], + "source": [ + "from feast import FeatureStore\n", + "from feast.feast_object import ALL_RESOURCE_TYPES\n", + "from feast.permissions.action import CRUD, AuthzedAction, ALL_ACTIONS\n", + "from feast.permissions.permission import Permission\n", + "from feast.permissions.policy import RoleBasedPolicy" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "id": "9e85bb35-cf12-4860-90d6-d1cd4830049c", + "metadata": {}, + "outputs": [], + "source": [ + "store = FeatureStore(\"rbac/feature_repo\")" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "87cc7c4f-48af-4158-adee-b1ccd8a72ea7", + "metadata": {}, + "outputs": [], + "source": [ + "read_permission = Permission(\n", + " name=\"read_permission\",\n", + " types=ALL_RESOURCE_TYPES,\n", + " policy=RoleBasedPolicy(roles=[\"reader\"]),\n", + " actions=AuthzedAction.DESCRIBE\n", + ")\n", + "store.registry.apply_permission(read_permission, store.project)" + ] + }, + { + "cell_type": "markdown", + "id": "e1dcb0d3-21e3-44b7-9ad5-c6b2b1e45b33", + "metadata": {}, + "source": [ + "Now a specific permission to write online data (e.g. `materialize`) the `FeatureView`s whose name ends by `fresh`" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "id": "1c2fecdd-056e-4462-b1ad-eec123e282dd", + "metadata": {}, + "outputs": [], + "source": [ + "from feast.feature_view import FeatureView\n", + "write_fresh_permission = Permission(\n", + " name=\"write_fresh_permission\",\n", + " types=FeatureView,\n", + " name_pattern=\".*_fresh\",\n", + " policy=RoleBasedPolicy(roles=[\"fresh_writer\"]),\n", + " actions=AuthzedAction.WRITE_ONLINE\n", + ")\n", + "store.registry.apply_permission(write_fresh_permission, store.project)" + ] + }, + { + "cell_type": "markdown", + "id": "71edd0ea-67b5-4845-b8ae-602ed3883bb7", + "metadata": {}, + "source": [ + "Another one to match allow access to OFFLINE functions." + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "id": "c74e677c-3959-4963-b683-a5289c8238c9", + "metadata": {}, + "outputs": [], + "source": [ + "from feast.feature_view import FeatureView\n", + "from feast.feature_service import FeatureService\n", + "from feast.on_demand_feature_view import OnDemandFeatureView\n", + "offline_permission = Permission(\n", + " name=\"offline_permission\",\n", + " types=[FeatureView, OnDemandFeatureView, FeatureService],\n", + " policy=RoleBasedPolicy(roles=[\"batch_admin\"]),\n", + " actions= CRUD + [AuthzedAction.WRITE_OFFLINE, AuthzedAction.READ_OFFLINE]\n", + ")\n", + "store.registry.apply_permission(offline_permission, store.project)" + ] + }, + { + "cell_type": "markdown", + "id": "3edc08f5-40e1-488a-b749-9b1f5fc31061", + "metadata": {}, + "source": [ + "Finally, ad `admin` permission to manage all the resources" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "id": "739a26ee-e08e-461a-9f75-59158328fc90", + "metadata": {}, + "outputs": [], + "source": [ + "admin_permission = Permission(\n", + " name=\"admin_permission\",\n", + " types=ALL_RESOURCE_TYPES,\n", + " policy=RoleBasedPolicy(roles=[\"store_admin\"]),\n", + " actions=ALL_ACTIONS\n", + ")\n", + "store.registry.apply_permission(admin_permission, store.project)" + ] + }, + { + "cell_type": "markdown", + "id": "916c9399-866e-4796-9858-a890ceb29e48", + "metadata": {}, + "source": [ + "## Validate registered permissions" + ] + }, + { + "cell_type": "markdown", + "id": "aed869b3-c567-428f-8a69-9c322b62f7c6", + "metadata": {}, + "source": [ + "List all the permissions." + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "id": "cd284369-1cef-4cf6-859f-ea79d1450ed2", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "NAME TYPES NAME_PATTERN ACTIONS ROLES REQUIRED_TAGS\n", + "read_permission Project DESCRIBE reader -\n", + " FeatureView\n", + " OnDemandFeatureView\n", + " BatchFeatureView\n", + " StreamFeatureView\n", + " Entity\n", + " FeatureService\n", + " DataSource\n", + " ValidationReference\n", + " SavedDataset\n", + " Permission\n", + "write_fresh_permission FeatureView .*_fresh WRITE_ONLINE fresh_writer -\n", + "offline_permission FeatureView CREATE batch_admin -\n", + " OnDemandFeatureView DESCRIBE\n", + " FeatureService UPDATE\n", + " DELETE\n", + " WRITE_OFFLINE\n", + " READ_OFFLINE\n", + "admin_permission Project CREATE store_admin -\n", + " FeatureView DESCRIBE\n", + " OnDemandFeatureView UPDATE\n", + " BatchFeatureView DELETE\n", + " StreamFeatureView READ_ONLINE\n", + " Entity READ_OFFLINE\n", + " FeatureService WRITE_ONLINE\n", + " DataSource WRITE_OFFLINE\n", + " ValidationReference\n", + " SavedDataset\n", + " Permission\n" + ] + } + ], + "source": [ + "!feast -c rbac/feature_repo permissions list" + ] + }, + { + "cell_type": "markdown", + "id": "be3873ee-2514-4aec-8fe8-8b54a3602651", + "metadata": {}, + "source": [ + "List all the resources matching each configured permission." + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "id": "419df226-36df-4d19-be0d-ba82813fef80", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "\u001b[1m\u001b[32mThe structure of the \u001b[1m\u001b[37mfeast-permissions list --verbose \u001b[1m\u001b[32mcommand will be as in the following example:\n", + "\n", + "\u001b[2mFor example: \u001b[0m\u001b[1m\u001b[32m\n", + "\n", + "permissions\n", + "β”œβ”€β”€ permission_1 ['role names list']\n", + "β”‚ β”œβ”€β”€ FeatureView: ['feature view names']\n", + "β”‚ β”œβ”€β”€ FeatureService: none\n", + "β”‚ └── ..\n", + "β”œβ”€β”€ permission_2 ['role names list']\n", + "└── ..\n", + "\n", + "-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------\u001b[0m\n", + " \n", + "Permissions:\n", + "\n", + "permissions\n", + "β”œβ”€β”€ read_permission ['reader']\n", + "β”‚ β”œβ”€β”€ FeatureView ['driver_hourly_stats_fresh', 'transformed_conv_rate_fresh', 'transformed_conv_rate', 'driver_hourly_stats']\n", + "β”‚ β”œβ”€β”€ OnDemandFeatureView ['transformed_conv_rate_fresh', 'transformed_conv_rate']\n", + "β”‚ β”œβ”€β”€ BatchFeatureView ['driver_hourly_stats_fresh', 'driver_hourly_stats']\n", + "β”‚ β”œβ”€β”€ StreamFeatureView: none\n", + "β”‚ β”œβ”€β”€ Entity: ['driver']\n", + "β”‚ β”œβ”€β”€ FeatureService: ['driver_activity_v3', 'driver_activity_v2', 'driver_activity_v1']\n", + "β”‚ β”œβ”€β”€ DataSource: ['driver_stats_push_source', 'driver_hourly_stats_source', 'vals_to_add']\n", + "β”‚ β”œβ”€β”€ ValidationReference: none\n", + "β”‚ └── SavedDataset: none\n", + "β”œβ”€β”€ write_fresh_permission ['fresh_writer']\n", + "β”‚ └── FeatureView ['driver_hourly_stats_fresh']\n", + "β”œβ”€β”€ offline_permission ['batch_admin']\n", + "β”‚ β”œβ”€β”€ FeatureView ['driver_hourly_stats_fresh', 'transformed_conv_rate_fresh', 'transformed_conv_rate', 'driver_hourly_stats']\n", + "β”‚ β”œβ”€β”€ OnDemandFeatureView ['transformed_conv_rate_fresh', 'transformed_conv_rate']\n", + "β”‚ └── FeatureService: ['driver_activity_v3', 'driver_activity_v2', 'driver_activity_v1']\n", + "└── admin_permission ['store_admin']\n", + " β”œβ”€β”€ FeatureView ['driver_hourly_stats_fresh', 'transformed_conv_rate_fresh', 'transformed_conv_rate', 'driver_hourly_stats']\n", + " β”œβ”€β”€ OnDemandFeatureView ['transformed_conv_rate_fresh', 'transformed_conv_rate']\n", + " β”œβ”€β”€ BatchFeatureView ['driver_hourly_stats_fresh', 'driver_hourly_stats']\n", + " β”œβ”€β”€ StreamFeatureView: none\n", + " β”œβ”€β”€ Entity: ['driver']\n", + " β”œβ”€β”€ FeatureService: ['driver_activity_v3', 'driver_activity_v2', 'driver_activity_v1']\n", + " β”œβ”€β”€ DataSource: ['driver_stats_push_source', 'driver_hourly_stats_source', 'vals_to_add']\n", + " β”œβ”€β”€ ValidationReference: none\n", + " └── SavedDataset: none\n" + ] + } + ], + "source": [ + "!feast -c rbac/feature_repo permissions list -v" + ] + }, + { + "cell_type": "markdown", + "id": "90319f10-abce-4a18-9891-7428c8781187", + "metadata": {}, + "source": [ + "Describe one of the permissions." + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "id": "cec436ce-5d1c-455e-a6d7-80f84380e83a", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "spec:\n", + " name: admin_permission\n", + " types:\n", + " - PROJECT\n", + " - FEATURE_VIEW\n", + " - ON_DEMAND_FEATURE_VIEW\n", + " - BATCH_FEATURE_VIEW\n", + " - STREAM_FEATURE_VIEW\n", + " - ENTITY\n", + " - FEATURE_SERVICE\n", + " - DATA_SOURCE\n", + " - VALIDATION_REFERENCE\n", + " - SAVED_DATASET\n", + " - PERMISSION\n", + " actions:\n", + " - CREATE\n", + " - DESCRIBE\n", + " - UPDATE\n", + " - DELETE\n", + " - READ_ONLINE\n", + " - READ_OFFLINE\n", + " - WRITE_ONLINE\n", + " - WRITE_OFFLINE\n", + " policy:\n", + " roleBasedPolicy:\n", + " roles:\n", + " - store_admin\n", + "meta:\n", + " createdTimestamp: '2024-09-09T06:41:28.335684Z'\n", + " lastUpdatedTimestamp: '2024-09-09T06:41:28.335684Z'\n", + "\n" + ] + } + ], + "source": [ + "!feast -c rbac/feature_repo permissions describe admin_permission" + ] + }, + { + "cell_type": "markdown", + "id": "a267a3bb-9861-43eb-9f7b-33f5d5a23e81", + "metadata": {}, + "source": [ + "List the roles specified by these permissions." + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "id": "b6a3f4a6-e3ab-4aaa-9a15-69ea63246b45", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+--------------+\n", + "| ROLE NAME |\n", + "+==============+\n", + "| batch_admin |\n", + "+--------------+\n", + "| fresh_writer |\n", + "+--------------+\n", + "| reader |\n", + "+--------------+\n", + "| store_admin |\n", + "+--------------+\n" + ] + } + ], + "source": [ + "!feast -c rbac/feature_repo permissions list-roles" + ] + }, + { + "cell_type": "markdown", + "id": "0dbb31d3-edc9-4146-a46c-146d7f59532a", + "metadata": {}, + "source": [ + "For each configured role, list all the resources and operations that are allowed to a user impersonating this role." + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "id": "45832f21-43c6-4784-ba88-1e65fa8479b5", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ROLE NAME RESOURCE NAME RESOURCE TYPE PERMITTED ACTIONS\n", + "batch_admin driver Entity -\n", + "batch_admin driver_hourly_stats FeatureView CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_OFFLINE\n", + " WRITE_OFFLINE\n", + "batch_admin driver_hourly_stats_fresh FeatureView CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_OFFLINE\n", + " WRITE_OFFLINE\n", + "batch_admin transformed_conv_rate_fresh OnDemandFeatureView CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_OFFLINE\n", + " WRITE_OFFLINE\n", + "batch_admin transformed_conv_rate OnDemandFeatureView CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_OFFLINE\n", + " WRITE_OFFLINE\n", + "batch_admin driver_activity_v1 FeatureService CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_OFFLINE\n", + " WRITE_OFFLINE\n", + "batch_admin driver_activity_v3 FeatureService CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_OFFLINE\n", + " WRITE_OFFLINE\n", + "batch_admin driver_activity_v2 FeatureService CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_OFFLINE\n", + " WRITE_OFFLINE\n", + "batch_admin vals_to_add RequestSource -\n", + "batch_admin driver_stats_push_source PushSource -\n", + "batch_admin driver_hourly_stats_source FileSource -\n", + "batch_admin read_permission Permission -\n", + "batch_admin write_fresh_permission Permission -\n", + "batch_admin offline_permission Permission -\n", + "batch_admin admin_permission Permission -\n", + "fresh_writer driver Entity -\n", + "fresh_writer driver_hourly_stats FeatureView -\n", + "fresh_writer driver_hourly_stats_fresh FeatureView WRITE_ONLINE\n", + "fresh_writer transformed_conv_rate_fresh OnDemandFeatureView -\n", + "fresh_writer transformed_conv_rate OnDemandFeatureView -\n", + "fresh_writer driver_activity_v1 FeatureService -\n", + "fresh_writer driver_activity_v3 FeatureService -\n", + "fresh_writer driver_activity_v2 FeatureService -\n", + "fresh_writer vals_to_add RequestSource -\n", + "fresh_writer driver_stats_push_source PushSource -\n", + "fresh_writer driver_hourly_stats_source FileSource -\n", + "fresh_writer read_permission Permission -\n", + "fresh_writer write_fresh_permission Permission -\n", + "fresh_writer offline_permission Permission -\n", + "fresh_writer admin_permission Permission -\n", + "reader driver Entity DESCRIBE\n", + "reader driver_hourly_stats FeatureView DESCRIBE\n", + "reader driver_hourly_stats_fresh FeatureView DESCRIBE\n", + "reader transformed_conv_rate_fresh OnDemandFeatureView DESCRIBE\n", + "reader transformed_conv_rate OnDemandFeatureView DESCRIBE\n", + "reader driver_activity_v1 FeatureService DESCRIBE\n", + "reader driver_activity_v3 FeatureService DESCRIBE\n", + "reader driver_activity_v2 FeatureService DESCRIBE\n", + "reader vals_to_add RequestSource DESCRIBE\n", + "reader driver_stats_push_source PushSource DESCRIBE\n", + "reader driver_hourly_stats_source FileSource DESCRIBE\n", + "reader read_permission Permission DESCRIBE\n", + "reader write_fresh_permission Permission DESCRIBE\n", + "reader offline_permission Permission DESCRIBE\n", + "reader admin_permission Permission DESCRIBE\n", + "store_admin driver Entity CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "store_admin driver_hourly_stats FeatureView CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "store_admin driver_hourly_stats_fresh FeatureView CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "store_admin transformed_conv_rate_fresh OnDemandFeatureView CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "store_admin transformed_conv_rate OnDemandFeatureView CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "store_admin driver_activity_v1 FeatureService CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "store_admin driver_activity_v3 FeatureService CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "store_admin driver_activity_v2 FeatureService CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "store_admin vals_to_add RequestSource CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "store_admin driver_stats_push_source PushSource CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "store_admin driver_hourly_stats_source FileSource CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "store_admin read_permission Permission CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "store_admin write_fresh_permission Permission CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "store_admin offline_permission Permission CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n", + "store_admin admin_permission Permission CREATE\n", + " DESCRIBE\n", + " UPDATE\n", + " DELETE\n", + " READ_ONLINE\n", + " READ_OFFLINE\n", + " WRITE_ONLINE\n", + " WRITE_OFFLINE\n" + ] + } + ], + "source": [ + "!feast -c rbac/feature_repo permissions list-roles -v" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c7960d2c-e43f-46b4-8cb3-5c6fc9dbaba8", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/rbac-local/02-registry_server.ipynb b/examples/rbac-local/02-registry_server.ipynb new file mode 100644 index 0000000000..43a5ead908 --- /dev/null +++ b/examples/rbac-local/02-registry_server.ipynb @@ -0,0 +1,73 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "70df4877-177b-441a-a745-f0cd091e0a3a", + "metadata": {}, + "source": [ + "## Registry server" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "ef9f796d-f9d7-47d0-96c2-03b38a219d83", + "metadata": {}, + "outputs": [], + "source": [ + "!lsof -i :6570\n", + "# !kill -9 64859 98087" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "bd303508-9f32-4bdb-87c2-729e3ab62b4f", + "metadata": {}, + "outputs": [], + "source": [ + "from feast import FeatureStore\n", + "store = FeatureStore(repo_path=\"rbac/feature_repo\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "29127952-f9d5-44c4-b7c3-437e0b55c4b0", + "metadata": {}, + "outputs": [], + "source": [ + "store.serve_registry(6570)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5c285fb3-442b-4bb4-bf34-2a61ae5fe76a", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/rbac-local/03-online_server.ipynb b/examples/rbac-local/03-online_server.ipynb new file mode 100644 index 0000000000..f80ef35a17 --- /dev/null +++ b/examples/rbac-local/03-online_server.ipynb @@ -0,0 +1,111 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "d75bb824-a6cf-493e-87a8-2ae1095cf918", + "metadata": {}, + "source": [ + "## Online server" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "27a17dd4-08f5-4f01-b5a4-a76aa99952a1", + "metadata": {}, + "outputs": [], + "source": [ + "!lsof -i :6566\n", + "# !kill -9 64859 98087" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "897f5979-da53-4441-ac31-f5cd40abf6cd", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "YES\n" + ] + } + ], + "source": [ + "# This must be YES on MacOS\n", + "!echo $OBJC_DISABLE_INITIALIZE_FORK_SAFETY" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "73b219c3-7782-4e09-9897-d01f44ccae2d", + "metadata": {}, + "outputs": [], + "source": [ + "# from feast import FeatureStore\n", + "# store = FeatureStore(repo_path=\"rbac/feature_repo\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d1619739-f763-45bb-a1f1-53f6452bc60a", + "metadata": {}, + "outputs": [], + "source": [ + "# store.serve(\n", + "# host=\"localhost\",\n", + "# port=6566,\n", + "# type_=\"http\",\n", + "# no_access_log=False,\n", + "# workers=1,\n", + "# metrics=False,\n", + "# keep_alive_timeout=5,\n", + "# registry_ttl_sec=5,\n", + "# )" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bc804040-9cd0-4dbc-a63d-a81de9422605", + "metadata": {}, + "outputs": [], + "source": [ + "!feast -c rbac/feature_repo serve" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c3bc63e7-cf7c-4132-b39b-3cd75a1d6755", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/rbac-local/04-offline_server.ipynb b/examples/rbac-local/04-offline_server.ipynb new file mode 100644 index 0000000000..62ad8b1a78 --- /dev/null +++ b/examples/rbac-local/04-offline_server.ipynb @@ -0,0 +1,99 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "36f5d04f-b456-4e65-91a8-482c91f854c1", + "metadata": {}, + "source": [ + "## Offline server" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "86924e3b-d7dc-46e1-a9f4-05c8abee4da8", + "metadata": {}, + "outputs": [], + "source": [ + "!lsof -i :8815\n", + "# !kill -9 64859 98087" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "73b219c3-7782-4e09-9897-d01f44ccae2d", + "metadata": {}, + "outputs": [], + "source": [ + "from feast import FeatureStore\n", + "store = FeatureStore(repo_path=\"rbac/feature_repo\")" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "75967678-0573-410e-b9dd-09743b67eac3", + "metadata": {}, + "outputs": [], + "source": [ + "import logging\n", + "import sys\n", + "from io import StringIO\n", + "logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(message)s')\n", + "logger = logging.getLogger() " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5400ee1b-de0a-4fe9-9003-83d0af0863e6", + "metadata": {}, + "outputs": [], + "source": [ + "store.serve_offline(\"localhost\", 8815)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8b822d5c-41d9-477a-8b42-c4701784bac2", + "metadata": {}, + "outputs": [], + "source": [ + "# Run this in case it's needed to force materialize from offline server\n", + "from datetime import datetime\n", + "store.materialize_incremental(end_date=datetime.now())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ff854a14-4649-4d40-94fa-b6e2b8577afa", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/rbac-local/README.md b/examples/rbac-local/README.md new file mode 100644 index 0000000000..dd1128e94a --- /dev/null +++ b/examples/rbac-local/README.md @@ -0,0 +1,57 @@ +# RBAC demo +RBAC demo with local environment. + +## System Requirements +* Clone of the Feast repo +* Docker +* yq + +## Architecture +The demo creates the following components: +* An OIDC authorization server using a Keycloak docker container and initialized for demo purposes with a sample realm. +* A sample feature store using `feast init`, later adapted to use the `oidc` authorization against the sample realm. +* Three servers running the registry, online and offline stores. +* A client application connected to the servers to run test code. + +## Setup the environment +Run the sample notebooks to setup the environment: +* [01.1-startkeycloak](./01.1-startkeycloak.ipynb) to start a Keycloak container. +* [01.2-setup-keycloak.ipynb](./01.2-setup-keycloak.ipynb) to configure Keycloak with all the needed resources for the next steps. +* [01.3-setup-feast.ipynb](./01.3-setup-feast.ipynb) to create the sample Feast store and inject the authoprization settings +* [02-registry_server.ipynb](./02-registry_server.ipynb) to start the Registry server +* [03-online_server.ipynb](./03-online_server.ipynb) to start the Online store server +* [04-offline_server.ipynb](04-offline_server.ipynb) to start the Offline store server + +**Note**: For MacOs users, you must set this environment variable before launching the notebook server: +```bash +OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES +``` + +## Goal +Once the environment is defined, we can use the [client.ipynb](./client.ipynb) notebook to verify how the behavior changes +according to the configured user. + +In particular, given the configured permissions: +| Permission | Types | Name pattern | Actions | Roles | +|------------|-------|--------------|---------|-------| +| read_permission | ALL | | DESCRIBE | reader | +| write_fresh_permission | FeatureView1 | .*_fresh | WRITE_ONLINE | fresh_writer | +| offline_permission | FeatureView, OnDemandFeatureView, FeatureService | | CRUD, WRITE_OFFLINE, QUERY_OFFLINE | batch_admin | +| admin_permission | ALL | | ALL | store_admin | + +and the user roles defined in Keycloak: +| User | Roles | +|------|-------| +| reader | reader | +| writer | fresh_writer | +| batch_admin | batch_admin | +| admin | store_admin | + +We should expect the following behavior for each test section of the [client notebook](./client.ipynb): +| User | Basic validation | Historical | Materialization | Online | Stream push | +|------|------------------|------------|-------------------|--------|-------------| +| reader | Ok | Denied | Denied | Denied | Denied | +| writer | Empty | Denied | Ok | Denied | Denied | +| batch_admin | No Entities and Permissions | Ok | Denied | Denied | Denied | +| admin | Ok | Ok | Ok | Ok | Ok | + diff --git a/examples/rbac-local/client.ipynb b/examples/rbac-local/client.ipynb new file mode 100644 index 0000000000..7e5561f5f7 --- /dev/null +++ b/examples/rbac-local/client.ipynb @@ -0,0 +1,607 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "bee9388f-8ffc-4fcd-930f-197ec3c2dd96", + "metadata": {}, + "source": [ + "# Test client" + ] + }, + { + "cell_type": "code", + "execution_count": 44, + "id": "eceb50b4-c516-4224-a0b1-efd31bb78c29", + "metadata": {}, + "outputs": [], + "source": [ + "import yaml\n", + "def update_username(username):\n", + " path = 'client/feature_store.yaml'\n", + " with open(path, 'r') as file:\n", + " config = yaml.safe_load(file) or {}\n", + " config['auth']['username'] = username\n", + " with open(path, 'w') as file:\n", + " yaml.safe_dump(config, file, default_flow_style=False)" + ] + }, + { + "cell_type": "markdown", + "id": "08a4020a-10ad-476a-af25-26a09d3d4786", + "metadata": {}, + "source": [ + "# Update test user\n", + "Use one of `reader`, `writer`, `batch_admin` or `admin` (password is fixed) as the current `username`." + ] + }, + { + "cell_type": "code", + "execution_count": 45, + "id": "564849f9-c95a-4278-9fa7-fa09694e5d93", + "metadata": {}, + "outputs": [], + "source": [ + "username = 'reader'\n", + "update_username(username)" + ] + }, + { + "cell_type": "code", + "execution_count": 46, + "id": "6ffb2c42-5a5d-495c-92c5-0729f0144fb8", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "auth:\n", + " auth_discovery_url: http://0.0.0.0:9999/realms/rbac_example/.well-known/openid-configuration\n", + " client_id: app\n", + " client_secret: REDACTED\n", + " password: password\n", + " type: oidc\n", + " username: reader\n", + "entity_key_serialization_version: 2\n", + "offline_store:\n", + " host: localhost\n", + " port: 8815\n", + " type: remote\n", + "online_store:\n", + " path: http://localhost:6566\n", + " type: remote\n", + "project: rbac\n", + "registry:\n", + " path: localhost:6570\n", + " registry_type: remote\n" + ] + } + ], + "source": [ + "!cat client/feature_store.yaml" + ] + }, + { + "cell_type": "markdown", + "id": "664b6f52-d8cf-4145-bf7a-fcce111a34da", + "metadata": {}, + "source": [ + "## Updating logger\n", + "The following is needed to log in the notebook the output the messages logged by th Feast application." + ] + }, + { + "cell_type": "code", + "execution_count": 47, + "id": "3a6fe206-63f8-486f-88cb-b4e888cb6855", + "metadata": {}, + "outputs": [], + "source": [ + "import logging\n", + "import sys\n", + "from io import StringIO\n", + "logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(message)s')\n", + "logger = logging.getLogger()" + ] + }, + { + "cell_type": "markdown", + "id": "a1eb1495-1f38-4165-a6a4-26a2087f1635", + "metadata": {}, + "source": [ + "## Setup Feast client\n", + "Initialize the Feast store from the [client configuration](./client/feature_store.yaml)" + ] + }, + { + "cell_type": "code", + "execution_count": 48, + "id": "b2292e78-cf30-441c-b67f-36e1f1a81923", + "metadata": {}, + "outputs": [], + "source": [ + "from feast.feature_store import FeatureStore" + ] + }, + { + "cell_type": "code", + "execution_count": 49, + "id": "bb653327-9eb3-448f-b320-625337851522", + "metadata": {}, + "outputs": [], + "source": [ + "store = FeatureStore(repo_path=\"client\")" + ] + }, + { + "cell_type": "markdown", + "id": "7e826371-3df5-483a-878d-ce79e8b907e3", + "metadata": {}, + "source": [ + "## Basic validation\n", + "Verify the authorization config and run some GET APIs on the registry." + ] + }, + { + "cell_type": "code", + "execution_count": 50, + "id": "a59979af-a438-436d-918c-3174d94ade5b", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Authorization config is: {'auth_discovery_url': 'http://0.0.0.0:9999/realms/rbac_example/.well-known/openid-configuration', 'client_id': 'app', 'client_secret': 'REDACTED', 'password': 'password', 'type': 'oidc', 'username': 'reader'}\n" + ] + } + ], + "source": [ + "print(f\"Authorization config is: {store.config.auth}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 51, + "id": "bf0af19c-6609-4cb4-86f3-a976528c3966", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Entity found driver\n" + ] + } + ], + "source": [ + "for e in store.list_entities():\n", + " print(f\"Entity found {e.name}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 52, + "id": "0494a65f-64bf-45f0-a772-ee6d8b89c91e", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "FeatureView found driver_hourly_stats of type FeatureView\n", + "FeatureView found driver_hourly_stats_fresh of type FeatureView\n", + "FeatureView found transformed_conv_rate_fresh of type OnDemandFeatureView\n", + "FeatureView found transformed_conv_rate of type OnDemandFeatureView\n" + ] + } + ], + "source": [ + "for fv in store.list_all_feature_views():\n", + " print(f\"FeatureView found {fv.name} of type {type(fv).__name__}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 53, + "id": "0832822f-e954-4d43-a96f-de5cf05acb2b", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "FeatureService found driver_activity_v1 of type FeatureService\n", + "FeatureService found driver_activity_v3 of type FeatureService\n", + "FeatureService found driver_activity_v2 of type FeatureService\n" + ] + } + ], + "source": [ + "for fs in store.list_feature_services():\n", + " print(f\"FeatureService found {fs.name} of type {type(fs).__name__}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 54, + "id": "98fd0767-4305-4b18-a50b-298fa7103815", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "NAME TYPES NAME_PATTERN ACTIONS ROLES REQUIRED_TAGS\n", + "read_permission Project DESCRIBE reader -\n", + " FeatureView\n", + " OnDemandFeatureView\n", + " BatchFeatureView\n", + " StreamFeatureView\n", + " Entity\n", + " FeatureService\n", + " DataSource\n", + " ValidationReference\n", + " SavedDataset\n", + " Permission\n", + "write_fresh_permission FeatureView .*_fresh WRITE_ONLINE fresh_writer -\n", + "offline_permission FeatureView CREATE batch_admin -\n", + " OnDemandFeatureView DESCRIBE\n", + " FeatureService UPDATE\n", + " DELETE\n", + " WRITE_OFFLINE\n", + " READ_OFFLINE\n", + "admin_permission Project CREATE store_admin -\n", + " FeatureView DESCRIBE\n", + " OnDemandFeatureView UPDATE\n", + " BatchFeatureView DELETE\n", + " StreamFeatureView READ_ONLINE\n", + " Entity READ_OFFLINE\n", + " FeatureService WRITE_ONLINE\n", + " DataSource WRITE_OFFLINE\n", + " ValidationReference\n", + " SavedDataset\n", + " Permission\n" + ] + } + ], + "source": [ + "!feast -c client permissions list" + ] + }, + { + "cell_type": "markdown", + "id": "ad2d56ee-e7a9-463e-a597-932c10f8df1c", + "metadata": {}, + "source": [ + "## Validating with test_workflow.py\n", + "The following test functions were copied from the `test_workflow.py` template but we added `try` blocks to print only \n", + "the relevant error messages, since we expect to receive errors from the permission enforcement modules." + ] + }, + { + "cell_type": "code", + "execution_count": 55, + "id": "930f7e8c-c2a0-4425-99c2-c9958a5a7632", + "metadata": {}, + "outputs": [], + "source": [ + "import subprocess\n", + "from datetime import datetime\n", + "\n", + "import pandas as pd\n", + "\n", + "from feast import FeatureStore\n", + "from feast.data_source import PushMode\n", + "\n", + "def fetch_historical_features_entity_df(store: FeatureStore, for_batch_scoring: bool):\n", + " # Note: see https://docs.feast.dev/getting-started/concepts/feature-retrieval for more details on how to retrieve\n", + " # for all entities in the offline store instead\n", + " entity_df = pd.DataFrame.from_dict(\n", + " {\n", + " # entity's join key -> entity values\n", + " \"driver_id\": [1001, 1002, 1003],\n", + " # \"event_timestamp\" (reserved key) -> timestamps\n", + " \"event_timestamp\": [\n", + " datetime(2021, 4, 12, 10, 59, 42),\n", + " datetime(2021, 4, 12, 8, 12, 10),\n", + " datetime(2021, 4, 12, 16, 40, 26),\n", + " ],\n", + " # (optional) label name -> label values. Feast does not process these\n", + " \"label_driver_reported_satisfaction\": [1, 5, 3],\n", + " # values we're using for an on-demand transformation\n", + " \"val_to_add\": [1, 2, 3],\n", + " \"val_to_add_2\": [10, 20, 30],\n", + " }\n", + " )\n", + " # For batch scoring, we want the latest timestamps\n", + " if for_batch_scoring:\n", + " entity_df[\"event_timestamp\"] = pd.to_datetime(\"now\", utc=True)\n", + "\n", + " try:\n", + " training_df = store.get_historical_features(\n", + " entity_df=entity_df,\n", + " features=[\n", + " \"driver_hourly_stats:conv_rate\",\n", + " \"driver_hourly_stats:acc_rate\",\n", + " \"driver_hourly_stats:avg_daily_trips\",\n", + " \"transformed_conv_rate:conv_rate_plus_val1\",\n", + " \"transformed_conv_rate:conv_rate_plus_val2\",\n", + " ],\n", + " ).to_df()\n", + " print(training_df.head())\n", + " except Exception as e:\n", + " print(f\"Failed to run `store.get_historical_features`: {e}\")\n", + "\n", + "\n", + "def fetch_online_features(store, source: str = \"\"):\n", + " entity_rows = [\n", + " # {join_key: entity_value}\n", + " {\n", + " \"driver_id\": 1001,\n", + " \"val_to_add\": 1000,\n", + " \"val_to_add_2\": 2000,\n", + " },\n", + " {\n", + " \"driver_id\": 1002,\n", + " \"val_to_add\": 1001,\n", + " \"val_to_add_2\": 2002,\n", + " },\n", + " ]\n", + " if source == \"feature_service\":\n", + " try:\n", + " features_to_fetch = store.get_feature_service(\"driver_activity_v1\")\n", + " except Exception as e:\n", + " print(f\"Failed to run `store.get_feature_service`: {e}\")\n", + " elif source == \"push\":\n", + " try:\n", + " features_to_fetch = store.get_feature_service(\"driver_activity_v3\")\n", + " except Exception as e:\n", + " print(f\"Failed to run `store.get_feature_service`: {e}\")\n", + " else:\n", + " features_to_fetch = [\n", + " \"driver_hourly_stats:acc_rate\",\n", + " \"transformed_conv_rate:conv_rate_plus_val1\",\n", + " \"transformed_conv_rate:conv_rate_plus_val2\",\n", + " ]\n", + " try:\n", + " returned_features = store.get_online_features(\n", + " features=features_to_fetch,\n", + " entity_rows=entity_rows,\n", + " ).to_dict()\n", + " for key, value in sorted(returned_features.items()):\n", + " print(key, \" : \", value)\n", + " except Exception as e:\n", + " print(f\"Failed to run `store.get_online_features`: {e}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 56, + "id": "86359ae5-e723-4976-89bb-e772f597ed60", + "metadata": {}, + "outputs": [], + "source": [ + "store = FeatureStore(repo_path=\"client\")" + ] + }, + { + "cell_type": "markdown", + "id": "c0fed355-a1ac-4515-ae27-9d0feca886f4", + "metadata": {}, + "source": [ + "### Historical features" + ] + }, + { + "cell_type": "code", + "execution_count": 57, + "id": "e18dba03-6199-4b48-a9cb-23e3fa51a505", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "--- Historical features for training ---\n", + "Failed to run `store.get_historical_features`: Permission error:\n", + "Permission offline_permission denied execution of ['READ_OFFLINE'] to FeatureView:driver_hourly_stats: Requires roles ['batch_admin'],Permission admin_permission denied execution of ['READ_OFFLINE'] to FeatureView:driver_hourly_stats: Requires roles ['store_admin']. Detail: Python exception: FeastPermissionError. gRPC client debug context: UNKNOWN:Error received from peer ipv6:%5B::1%5D:8815 {grpc_message:\"Permission error:\\nPermission offline_permission denied execution of [\\'READ_OFFLINE\\'] to FeatureView:driver_hourly_stats: Requires roles [\\'batch_admin\\'],Permission admin_permission denied execution of [\\'READ_OFFLINE\\'] to FeatureView:driver_hourly_stats: Requires roles [\\'store_admin\\']. Detail: Python exception: FeastPermissionError\", grpc_status:2, created_time:\"2024-09-09T08:52:22.529654+02:00\"}. Client context: IOError: Server never sent a data message. Detail: Internal\n", + "\n", + "--- Historical features for batch scoring ---\n", + "Failed to run `store.get_historical_features`: Permission error:\n", + "Permission offline_permission denied execution of ['READ_OFFLINE'] to FeatureView:driver_hourly_stats: Requires roles ['batch_admin'],Permission admin_permission denied execution of ['READ_OFFLINE'] to FeatureView:driver_hourly_stats: Requires roles ['store_admin']. Detail: Python exception: FeastPermissionError. gRPC client debug context: UNKNOWN:Error received from peer ipv6:%5B::1%5D:8815 {created_time:\"2024-09-09T08:52:23.51953+02:00\", grpc_status:2, grpc_message:\"Permission error:\\nPermission offline_permission denied execution of [\\'READ_OFFLINE\\'] to FeatureView:driver_hourly_stats: Requires roles [\\'batch_admin\\'],Permission admin_permission denied execution of [\\'READ_OFFLINE\\'] to FeatureView:driver_hourly_stats: Requires roles [\\'store_admin\\']. Detail: Python exception: FeastPermissionError\"}. Client context: IOError: Server never sent a data message. Detail: Internal\n" + ] + } + ], + "source": [ + "print(\"\\n--- Historical features for training ---\")\n", + "fetch_historical_features_entity_df(store, for_batch_scoring=False)\n", + "\n", + "print(\"\\n--- Historical features for batch scoring ---\")\n", + "fetch_historical_features_entity_df(store, for_batch_scoring=True)" + ] + }, + { + "cell_type": "markdown", + "id": "83bdd1a1-7071-4c51-bf69-9b2bade572a1", + "metadata": {}, + "source": [ + "### Materialization" + ] + }, + { + "cell_type": "code", + "execution_count": 58, + "id": "baeed80c-d2bf-4ac2-ae97-dc689c32e797", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "--- Load features into online store ---\n", + "Materializing \u001b[1m\u001b[32m2\u001b[0m feature views to \u001b[1m\u001b[32m2024-09-09 08:52:23+02:00\u001b[0m into the \u001b[1m\u001b[32mremote\u001b[0m online store.\n", + "\n", + "\u001b[1m\u001b[32mdriver_hourly_stats\u001b[0m from \u001b[1m\u001b[32m2024-09-09 10:50:53+02:00\u001b[0m to \u001b[1m\u001b[32m2024-09-09 08:52:23+02:00\u001b[0m:\n", + "Failed to run `store.materialize_incremental`: Permission error:\n", + "Permission admin_permission denied execution of ['READ_OFFLINE'] to FileSource:driver_hourly_stats_source: Requires roles ['store_admin']. Detail: Python exception: FeastPermissionError. gRPC client debug context: UNKNOWN:Error received from peer ipv6:%5B::1%5D:8815 {created_time:\"2024-09-09T08:52:24.551895+02:00\", grpc_status:2, grpc_message:\"Permission error:\\nPermission admin_permission denied execution of [\\'READ_OFFLINE\\'] to FileSource:driver_hourly_stats_source: Requires roles [\\'store_admin\\']. Detail: Python exception: FeastPermissionError\"}. Client context: IOError: Server never sent a data message. Detail: Internal\n" + ] + } + ], + "source": [ + "print(\"\\n--- Load features into online store ---\")\n", + "try:\n", + " store.materialize_incremental(end_date=datetime.now())\n", + "except Exception as e:\n", + " print(f\"Failed to run `store.materialize_incremental`: {e}\")" + ] + }, + { + "cell_type": "markdown", + "id": "f3ef1e87-a98e-447e-893a-d10e205d87c5", + "metadata": {}, + "source": [ + "### Online features" + ] + }, + { + "cell_type": "code", + "execution_count": 59, + "id": "feb552de-77da-4177-bc4e-4c882ca91fe8", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "--- Online features ---\n", + "Failed to run `store.get_online_features`: Permission error:\n", + "Permission admin_permission denied execution of ['READ_ONLINE'] to FeatureView:driver_hourly_stats: Requires roles ['store_admin']\n", + "\n", + "--- Online features retrieved (instead) through a feature service---\n", + "Failed to run `store.get_online_features`: Permission error:\n", + "Permission admin_permission denied execution of ['READ_ONLINE'] to FeatureView:driver_hourly_stats: Requires roles ['store_admin']\n", + "\n", + "--- Online features retrieved (using feature service v3, which uses a feature view with a push source---\n", + "Failed to run `store.get_online_features`: Permission error:\n", + "Permission admin_permission denied execution of ['READ_ONLINE'] to FeatureView:driver_hourly_stats: Requires roles ['store_admin']\n" + ] + } + ], + "source": [ + "print(\"\\n--- Online features ---\")\n", + "fetch_online_features(store)\n", + "\n", + "print(\"\\n--- Online features retrieved (instead) through a feature service---\")\n", + "fetch_online_features(store, source=\"feature_service\")\n", + "\n", + "print(\n", + " \"\\n--- Online features retrieved (using feature service v3, which uses a feature view with a push source---\"\n", + ")\n", + "fetch_online_features(store, source=\"push\")" + ] + }, + { + "cell_type": "markdown", + "id": "7ce5704c-86ef-4d00-a111-b86e853f2cca", + "metadata": {}, + "source": [ + "### Stream push" + ] + }, + { + "cell_type": "code", + "execution_count": 60, + "id": "e53317fc-8e6b-4dc3-89ca-28d6be04b98a", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "--- Simulate a stream event ingestion of the hourly stats df ---\n", + " driver_id event_timestamp created conv_rate \\\n", + "0 1001 2024-09-09 08:52:33.038542 2024-09-09 08:52:33.038547 1.0 \n", + "\n", + " acc_rate avg_daily_trips \n", + "0 1.0 1000 \n", + "Failed to run `store.push`: \n", + "\n", + "--- Online features again with updated values from a stream push---\n", + "Failed to run `store.get_online_features`: Permission error:\n", + "Permission admin_permission denied execution of ['READ_ONLINE'] to FeatureView:driver_hourly_stats: Requires roles ['store_admin']\n" + ] + } + ], + "source": [ + "print(\"\\n--- Simulate a stream event ingestion of the hourly stats df ---\")\n", + "event_df = pd.DataFrame.from_dict(\n", + " {\n", + " \"driver_id\": [1001],\n", + " \"event_timestamp\": [\n", + " datetime.now(),\n", + " ],\n", + " \"created\": [\n", + " datetime.now(),\n", + " ],\n", + " \"conv_rate\": [1.0],\n", + " \"acc_rate\": [1.0],\n", + " \"avg_daily_trips\": [1000],\n", + " }\n", + ")\n", + "print(event_df)\n", + "try:\n", + " store.push(\"driver_stats_push_source\", event_df, to=PushMode.ONLINE_AND_OFFLINE)\n", + "except Exception as e:\n", + " print(f\"Failed to run `store.push`: {e}\") \n", + "\n", + "print(\"\\n--- Online features again with updated values from a stream push---\")\n", + "fetch_online_features(store, source=\"push\")" + ] + }, + { + "cell_type": "markdown", + "id": "5709f71b-ddff-4048-9db1-98d4090326e1", + "metadata": {}, + "source": [ + "**Note** If you see the following error, it is likely due to the issue [#4392: Remote registry client does not map application errors](https://github.com/feast-dev/feast/issues/4392):\n", + "```\n", + "Feature view driver_hourly_stats_fresh does not exist in project rbac\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "573d9e29-4ba8-41f4-b6a1-82a24d4550b5", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/rbac-local/client/feature_store.yaml b/examples/rbac-local/client/feature_store.yaml new file mode 100644 index 0000000000..d428adf671 --- /dev/null +++ b/examples/rbac-local/client/feature_store.yaml @@ -0,0 +1,12 @@ +entity_key_serialization_version: 2 +offline_store: + host: localhost + port: 8815 + type: remote +online_store: + path: http://localhost:6566 + type: remote +project: rbac +registry: + path: localhost:6570 + registry_type: remote diff --git a/examples/rbac-remote/README.md b/examples/rbac-remote/README.md new file mode 100644 index 0000000000..118800db55 --- /dev/null +++ b/examples/rbac-remote/README.md @@ -0,0 +1,171 @@ +# Feast Deployment with RBAC + +## Demo Summary +This demo showcases how to enable Role-Based Access Control (RBAC) for Feast using Kubernetes or [OIDC](https://openid.net/developers/how-connect-works/) Authentication type. +The demo steps involve deploying server components (registry, offline, online) and client examples within a Kubernetes environment. +The goal is to ensure secure access control based on user roles and permissions. For understanding the Feast RBAC framework +Please read these reference documents. +- [RBAC Architecture](https://docs.feast.dev/v/master/getting-started/architecture/rbac) +- [RBAC Permission](https://docs.feast.dev/v/master/getting-started/concepts/permission). +- [RBAC Authorization Manager](https://docs.feast.dev/v/master/getting-started/components/authz_manager) + +## Tools and Projects +- Kubernetes +- Feast +- PostgreSQL Database +- [Keycloak](https://www.keycloak.org) (if OIDC) + +## Application Environment + +This demo contains the following components: + +1. Feast Remote Server components (online, offline, registry). +2. Feast Remote Client RBAC example. +3. Yaml Configuration and installation related scripts files. + +![demo.jpg](demo.jpg) + +## Setup Instructions + +The application works with Kubernetes or OpenShift and the instructions assume that you are using a Kubernetes or OpenShift cluster. + +### Prerequisites + +1. Kubernetes Cluster and Kubernetes CLI (kubectl). +2. Helm: Ensure you have Helm installed for deploying the Feast components. +3. Python environment. +4. Feast CLI latest version. + +## 1. Prerequisites Step + + - **Step 1 : Create the Feast project with PostgreSQL.** + + * Install the PostgreSQL on a Kubernetes cluster if you are using OpenShift you can install using [OpenShift Template](https://github.com/RHEcosystemAppEng/feast-workshop-team-share/tree/main/feast_postgres#1-install-postgresql-on-openshift-using-openshift-template) + * Port Forward the PostgreSQL Database to your local machine. Since we are setting up the Feast project locally using the Feast CLI, we need to port forward PostgreSQL: + ``` kubectl port-forward svc/postgresql 5432:5432``` + * Create a feature repository/project using the cli with PostgreSQL. Please see the instructions for more details [here](https://docs.feast.dev/reference/offline-stores/postgres#getting-started). + For this (local) example setup, we create a project with name server using these settings for the [feature_store.yaml](server/feature_repo/feature_store.yaml). + +## 2. Authorization Setup + +### A. Kubernetes Authorization +- **Step 1: Create Remote configuration Files** + - Set the auth type to `kubernetes` in the respective `feature_store` files + + ```yaml + auth: + type: kubernetes + ``` + - For each server, feature store YAML files can be created for example like below: + + **Registry Server:** [feature_store_registry.yaml](server/k8s/feature_store_registry.yaml) + + **Offline Server :** [feature_store_offline.yaml](server/k8s/feature_store_offline.yaml) + + **Online Server :** [feature_store_online.yaml](server/k8s/feature_store_online.yaml) + +- **Step 2: Deploy the Server Components** + - Run the installation script. The setup script will deploy the server components based on the user's confirmation, enter `k8s` for kubernetes authentication deployment. The script will deploy all the components with the namespace `feast-dev`. + + ```sh + ./install_feast.sh + ``` + +### B. OIDC Authorization +- **Step 1: Setup Keycloak** + - See the documentation [here](https://www.keycloak.org/getting-started/getting-started-kube) and install Keycloak. + - Create a new realm with the name `feast-rbac` from the admin console. + - Under the `feast-rbac` realm, create a new client with the name `feast-client` + - Generate the secret for the `feast-client`. +- **Step 2: Create the Server Feature Store Files** + - Set the auth type to `oidc` in the respective `feature_store` files + + ```yaml + auth: + type: oidc + client_id: _CLIENT_ID__ + auth_discovery_url: _OIDC_SERVER_URL_/realms/feast-rbac/.well-known/openid-configuration + ``` + - For each server the feature store YAML files can be created for example like below: + + **Registry Server:** [feature_store_registry.yaml](server/oidc/feature_store_registry.yaml) + + **Offline Server :** [feature_store_offline.yaml](server/oidc/feature_store_offline.yaml) + + **Online Server :** [feature_store_online.yaml](server/oidc/feature_store_online.yaml) + +- **Step 3: Deploy the Server Components** + - Run the installation script. Enter `oidc` for the Keycloak authentication deployment. The script will deploy all of the components with the namespace `feast-dev`. + + ```sh + ./install_feast.sh + ``` + +## 3. Client Setup + +### A. Kubernetes Authorization +- **Step 1: Create the Client Feature Store YAML** + - Set up the client feature store with remote connection details for the registry, online, and offline store with auth type `kuberentes` . See the client remote setting example here: [feature_store.yaml](client/k8s/feature_repo/feature_store.yaml) +- **Step 2: Deploy the Client Examples** + - As an example, we created 3 different users: 1. [admin_user](client/k8s/admin_user_resources.yaml), 2. [readonly_user](client/k8s/readonly_user_resources.yaml) and 3. [unauthorized_user](client/k8s/unauthorized_user_resources.yaml) . + - Each user is assigned their own service account and roles, as shown in the table below. + ##### Roles and Permissions for Examples (Admin and User) + | **User** | **Service Account** | **Roles** | **Permission** | **Feast Resources** | **Actions** | + |-----------------|----------------------------|------------------|--------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------| + | admin | feast-admin-sa | feast-admin-role | feast_admin_permission | FeatureView, OnDemandFeatureView, BatchFeatureView, StreamFeatureView, Entity, FeatureService, DataSource, ValidationReference, SavedDataset, Permission | CREATE, DESCRIBE, UPDATE, DELETE, READ_ONLINE, READY_OFFLINE, WRITE_ONLINE, WRITE_OFFLINE | + | user | feast-user-sa | feast-user-role | feast_user_permission | FeatureView, OnDemandFeatureView, BatchFeatureView, StreamFeatureView, Entity, FeatureService, DataSource, ValidationReference, SavedDataset, Permission | READ, READ_OFFLINE, READ_ONLINE | + |unauthorized-user| feast-unauthorized-user-sa | | + - To deploy the client confirm `Apply client creation examples` `Y` + - The Deployment of the overall setup looks like : + + ![Deployment.png](deployment.png) + +### B. OIDC Authorization +- **Step 1: Create the Client Feature Store YAML** + - Set up the client feature store with the remote connection details for the registry, online, and offline store. + - Set the `Auth type` to `oidc` + - update the client secret in client side `feature_store.yaml` or if required any other settings as show below. + ``` + auth_discovery_url: https://keycloak-feast-dev.apps.com/realms/feast-rbac/.well-known/openid-configuration + client_id: feast-client + client_secret: update-this-value + username: ${FEAST_USERNAME} + password: ${FEAST_PASSWORD} + ``` + - See the client remote setting example here: [feature_store.yaml](client/oidc/feature_repo/feature_store.yaml) +- **Step 2: Create the Roles and Users** + - Under the `feast-client` create the two roles `feast-admin-role` and `feast-user-role` + - Under the `feast-rbac` realm, create 3 different users: `admin-user`, `readonly-user`, and `unauthorized-user`. Assign the password `feast` to each user. + - Map the roles to users: select the `admin-user`, go to `Role mapping`, and assign the `feast-admin-role`. Select the `readonly-user` and assign the `feast-user-role`. For the `unauthorized-user`, do not assign any roles. +- **Step 3: Deploy the Client Examples** + - For OIDC, similar to the k8s examples, create different deployments and add the username and password as environment variables: 1. [admin_user](client/oidc/admin_user_resources.yaml), 2. [readonly_user](client/oidc/readonly_user_resources.yaml) and 3. [unauthorized_user](client/oidc/unauthorized_user_resources.yaml) . + - To deploy the client confirm `Apply client creation examples` `Y` + +## 4. Permissions Management +- **Step 1: Apply the Permissions** + - See the code example in [permissions_apply.py](server/feature_repo/permissions_apply.py) for applying the permissions for both Kubernetes and OIDC setup. + - The `install_feast.sh` has the option to apply permission from the pod with the user's confirmation `Do you want to copy files and execute 'feast apply in the pod? (y/n)`. +- **Step 2: Validate the Permissions** + - use the Feast cli to validate the permissions with the command `feast permissions list` for more details use `feast permissions list -v`. Additionally, there are other commands such as: + `feast permissions check / describe / list-roles` +## 5. Validating the Permissions/RBAC results +- **Run the Examples** + - As outlined in the [test.py](client/k8s/feature_repo/test.py) script, the example attempts to fetch Historical Features, perform Materialization, fetch Online Features, and push to the online/offline store based on user roles. + - The `admin-user` can perform all actions on all objects. + - The `readonly-user` can only read or query all objects. + - `unauthorized user` should not able to read or write any resources as no role is defined for this user. + - From each user's pod run the example `python feature_repo/test.py` + +## 6. Local Testing and Cleanup +- **Local Testing** + - For local testing, port forward the services PostgreSQL Service and Feast Servers with the commands below: + ``` + kubectl port-forward svc/postgresql 5432:5432 + kubectl port-forward svc/feast-offline-server-feast-feature-server 8815:80 + kubectl port-forward svc/feast-registry-server-feast-feature-server 6570:80 + kubectl port-forward svc/feast-feature-server 6566:80 + ``` + - When testing in Kubernetes, users can set the environment variable `LOCAL_K8S_TOKEN` in each example. The token can be obtained from the service account. +- **Cleanup** + - Run the command + - ```./cleanup_feast.sh``` \ No newline at end of file diff --git a/examples/rbac-remote/cleanup_feast.sh b/examples/rbac-remote/cleanup_feast.sh new file mode 100755 index 0000000000..18acf6727c --- /dev/null +++ b/examples/rbac-remote/cleanup_feast.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +DEFAULT_HELM_RELEASES=("feast-feature-server" "feast-offline-server" "feast-registry-server") +NAMESPACE="feast-dev" + +HELM_RELEASES=(${1:-${DEFAULT_HELM_RELEASES[@]}}) +NAMESPACE=${2:-$NAMESPACE} + +echo "Deleting Helm releases..." +for release in "${HELM_RELEASES[@]}"; do + helm uninstall $release -n $NAMESPACE +done + +echo "Deleting Kubernetes roles, role bindings, and service accounts for clients" +kubectl delete -f client/k8s/admin_user_resources.yaml +kubectl delete -f client/k8s/readonly_user_resources.yaml +kubectl delete -f client/k8s/unauthorized_user_resources.yaml +kubectl delete -f client/oidc/admin_user_resources.yaml +kubectl delete -f client/oidc/readonly_user_resources.yaml +kubectl delete -f client/oidc/unauthorized_user_resources.yaml +kubectl delete -f server/k8s/server_resources.yaml +kubectl delete configmap client-feature-repo-config + +echo "Cleanup completed." diff --git a/examples/rbac-remote/client/k8s/admin_user_resources.yaml b/examples/rbac-remote/client/k8s/admin_user_resources.yaml new file mode 100644 index 0000000000..d5df8bcbf2 --- /dev/null +++ b/examples/rbac-remote/client/k8s/admin_user_resources.yaml @@ -0,0 +1,56 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + name: feast-admin-sa + namespace: feast-dev +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role +metadata: + name: feast-admin-role + namespace: feast-dev +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: feast-admin-rolebinding + namespace: feast-dev +subjects: + - kind: ServiceAccount + name: feast-admin-sa + namespace: feast-dev +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: feast-admin-role +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: client-admin-user + namespace: feast-dev + labels: + app: client-admin +spec: + replicas: 1 + selector: + matchLabels: + app: client-admin + template: + metadata: + labels: + app: client-admin + spec: + serviceAccountName: feast-admin-sa + containers: + - name: client-admin-container + image: feastdev/feature-server:latest + imagePullPolicy: Always + command: ["sleep", "infinity"] + volumeMounts: + - name: client-feature-repo-config + mountPath: /feature_repo + volumes: + - name: client-feature-repo-config + configMap: + name: client-feature-repo-config diff --git a/examples/rbac-remote/client/k8s/feature_repo/feature_store.yaml b/examples/rbac-remote/client/k8s/feature_repo/feature_store.yaml new file mode 100644 index 0000000000..d316005098 --- /dev/null +++ b/examples/rbac-remote/client/k8s/feature_repo/feature_store.yaml @@ -0,0 +1,14 @@ +project: server +registry: + registry_type: remote + path: feast-registry-server-feast-feature-server.feast-dev.svc.cluster.local:80 +offline_store: + type: remote + host: feast-offline-server-feast-feature-server.feast-dev.svc.cluster.local + port: 80 +online_store: + type: remote + path: http://feast-feature-server.feast-dev.svc.cluster.local:80 +auth: + type: kubernetes + diff --git a/examples/rbac-remote/client/k8s/feature_repo/test.py b/examples/rbac-remote/client/k8s/feature_repo/test.py new file mode 100644 index 0000000000..6e1480bc94 --- /dev/null +++ b/examples/rbac-remote/client/k8s/feature_repo/test.py @@ -0,0 +1,140 @@ +import os +from datetime import datetime + +import pandas as pd +from feast import FeatureStore +from feast.data_source import PushMode + + +def run_demo(): + try: + os.environ["LOCAL_K8S_TOKEN"] = "" + + store = FeatureStore(repo_path="/feature_repo") + + print("\n--- Historical features for training ---") + fetch_historical_features_entity_df(store, for_batch_scoring=False) + + print("\n--- Historical features for batch scoring ---") + fetch_historical_features_entity_df(store, for_batch_scoring=True) + + try: + print("\n--- Load features into online store/materialize_incremental ---") + feature_views= store.list_feature_views() + if not feature_views: + raise PermissionError("No access to feature-views or no feature-views available.") + store.materialize_incremental(end_date=datetime.now()) + except PermissionError as pe: + print(f"Permission error: {pe}") + except Exception as e: + print(f"An occurred while performing materialize incremental: {e}") + + print("\n--- Online features ---") + fetch_online_features(store) + + print("\n--- Online features retrieved (instead) through a feature service---") + fetch_online_features(store, source="feature_service") + + print( + "\n--- Online features retrieved (using feature service v3, which uses a feature view with a push source---" + ) + fetch_online_features(store, source="push") + + print("\n--- Simulate a stream event ingestion of the hourly stats df ---") + event_df = pd.DataFrame.from_dict( + { + "driver_id": [1001], + "event_timestamp": [datetime.now()], + "created": [datetime.now()], + "conv_rate": [1.0], + "acc_rate": [1.0], + "avg_daily_trips": [1000], + } + ) + store.push("driver_stats_push_source", event_df, to=PushMode.ONLINE_AND_OFFLINE) + + print("\n--- Online features again with updated values from a stream push---") + fetch_online_features(store, source="push") + + except Exception as e: + print(f"An error occurred: {e}") + + +def fetch_historical_features_entity_df(store: FeatureStore, for_batch_scoring: bool): + try: + entity_df = pd.DataFrame.from_dict( + { + "driver_id": [1001, 1002, 1003], + "event_timestamp": [ + datetime(2021, 4, 12, 10, 59, 42), + datetime(2021, 4, 12, 8, 12, 10), + datetime(2021, 4, 12, 16, 40, 26), + ], + "label_driver_reported_satisfaction": [1, 5, 3], + # values we're using for an on-demand transformation + "val_to_add": [1, 2, 3], + "val_to_add_2": [10, 20, 30], + + } + + ) + if for_batch_scoring: + entity_df["event_timestamp"] = pd.to_datetime("now", utc=True) + + training_df = store.get_historical_features( + entity_df=entity_df, + features=[ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips", + "transformed_conv_rate:conv_rate_plus_val1", + "transformed_conv_rate:conv_rate_plus_val2", + ], + ).to_df() + print(training_df.head()) + + except Exception as e: + print(f"An error occurred while fetching historical features: {e}") + + +def fetch_online_features(store, source: str = ""): + try: + entity_rows = [ + # {join_key: entity_value} + { + "driver_id": 1001, + "val_to_add": 1000, + "val_to_add_2": 2000, + }, + { + "driver_id": 1002, + "val_to_add": 1001, + "val_to_add_2": 2002, + }, + ] + if source == "feature_service": + features_to_fetch = store.get_feature_service("driver_activity_v1") + elif source == "push": + features_to_fetch = store.get_feature_service("driver_activity_v3") + else: + features_to_fetch = [ + "driver_hourly_stats:acc_rate", + "transformed_conv_rate:conv_rate_plus_val1", + "transformed_conv_rate:conv_rate_plus_val2", + ] + returned_features = store.get_online_features( + features=features_to_fetch, + entity_rows=entity_rows, + ).to_dict() + for key, value in sorted(returned_features.items()): + print(key, " : ", value) + + except Exception as e: + print(f"An error occurred while fetching online features: {e}") + + +if __name__ == "__main__": + try: + run_demo() + except Exception as e: + print(f"An error occurred in the main execution: {e}") diff --git a/examples/rbac-remote/client/k8s/readonly_user_resources.yaml b/examples/rbac-remote/client/k8s/readonly_user_resources.yaml new file mode 100644 index 0000000000..c9094e7f2f --- /dev/null +++ b/examples/rbac-remote/client/k8s/readonly_user_resources.yaml @@ -0,0 +1,57 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + name: feast-user-sa + namespace: feast-dev +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role +metadata: + name: feast-user-role + namespace: feast-dev +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: feast-user-rolebinding + namespace: feast-dev +subjects: + - kind: ServiceAccount + name: feast-user-sa + namespace: feast-dev +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: feast-user-role +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: client-readonly-user + namespace: feast-dev + labels: + app: client-user +spec: + replicas: 1 + selector: + matchLabels: + app: client-user + template: + metadata: + labels: + app: client-user + spec: + serviceAccountName: feast-user-sa + containers: + - name: client-user-container + image: feastdev/feature-server:latest + imagePullPolicy: Always + command: ["sleep", "infinity"] + volumeMounts: + - name: client-feature-repo-config + mountPath: /feature_repo + volumes: + - name: client-feature-repo-config + configMap: + name: client-feature-repo-config + diff --git a/examples/rbac-remote/client/k8s/unauthorized_user_resources.yaml b/examples/rbac-remote/client/k8s/unauthorized_user_resources.yaml new file mode 100644 index 0000000000..5068c94fd9 --- /dev/null +++ b/examples/rbac-remote/client/k8s/unauthorized_user_resources.yaml @@ -0,0 +1,36 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + name: feast-unauthorized-user-sa + namespace: feast-dev +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: client-unauthorized-user + namespace: feast-dev + labels: + app: client-unauthorized-user +spec: + replicas: 1 + selector: + matchLabels: + app: client-unauthorized-user + template: + metadata: + labels: + app: client-unauthorized-user + spec: + serviceAccountName: feast-unauthorized-user-sa + containers: + - name: client-unauthorized-user-container + image: feastdev/feature-server:latest + imagePullPolicy: Always + command: ["sleep", "infinity"] + volumeMounts: + - name: client-feature-repo-config + mountPath: /feature_repo + volumes: + - name: client-feature-repo-config + configMap: + name: client-feature-repo-config diff --git a/examples/rbac-remote/client/oidc/admin_user_resources.yaml b/examples/rbac-remote/client/oidc/admin_user_resources.yaml new file mode 100644 index 0000000000..7843ce3c9d --- /dev/null +++ b/examples/rbac-remote/client/oidc/admin_user_resources.yaml @@ -0,0 +1,34 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: client-admin-user + namespace: feast-dev + labels: + app: client-admin +spec: + replicas: 1 + selector: + matchLabels: + app: client-admin + template: + metadata: + labels: + app: client-admin + spec: + containers: + - name: client-admin-container + image: feastdev/feature-server:latest + imagePullPolicy: Always + command: ["sleep", "infinity"] + env: + - name: FEAST_USERNAME + value: admin-user + - name: FEAST_PASSWORD + value: feast + volumeMounts: + - name: client-feature-repo-config + mountPath: /feature_repo + volumes: + - name: client-feature-repo-config + configMap: + name: client-feature-repo-config diff --git a/examples/rbac-remote/client/oidc/feature_repo/feature_store.yaml b/examples/rbac-remote/client/oidc/feature_repo/feature_store.yaml new file mode 100644 index 0000000000..1454e16df9 --- /dev/null +++ b/examples/rbac-remote/client/oidc/feature_repo/feature_store.yaml @@ -0,0 +1,19 @@ +project: server +registry: + registry_type: remote + path: feast-registry-server-feast-feature-server.feast-dev.svc.cluster.local:80 +offline_store: + type: remote + host: feast-offline-server-feast-feature-server.feast-dev.svc.cluster.local + port: 80 +online_store: + type: remote + path: http://feast-feature-server.feast-dev.svc.cluster.local:80 +auth: + type: oidc + auth_discovery_url: https://keycloak-feast-dev.apps.com/realms/feast-rbac/.well-known/openid-configuration + client_id: feast-client + client_secret: update-this-value + username: ${FEAST_USERNAME} + password: ${FEAST_PASSWORD} +entity_key_serialization_version: 2 diff --git a/examples/rbac-remote/client/oidc/feature_repo/test.py b/examples/rbac-remote/client/oidc/feature_repo/test.py new file mode 100644 index 0000000000..6e1480bc94 --- /dev/null +++ b/examples/rbac-remote/client/oidc/feature_repo/test.py @@ -0,0 +1,140 @@ +import os +from datetime import datetime + +import pandas as pd +from feast import FeatureStore +from feast.data_source import PushMode + + +def run_demo(): + try: + os.environ["LOCAL_K8S_TOKEN"] = "" + + store = FeatureStore(repo_path="/feature_repo") + + print("\n--- Historical features for training ---") + fetch_historical_features_entity_df(store, for_batch_scoring=False) + + print("\n--- Historical features for batch scoring ---") + fetch_historical_features_entity_df(store, for_batch_scoring=True) + + try: + print("\n--- Load features into online store/materialize_incremental ---") + feature_views= store.list_feature_views() + if not feature_views: + raise PermissionError("No access to feature-views or no feature-views available.") + store.materialize_incremental(end_date=datetime.now()) + except PermissionError as pe: + print(f"Permission error: {pe}") + except Exception as e: + print(f"An occurred while performing materialize incremental: {e}") + + print("\n--- Online features ---") + fetch_online_features(store) + + print("\n--- Online features retrieved (instead) through a feature service---") + fetch_online_features(store, source="feature_service") + + print( + "\n--- Online features retrieved (using feature service v3, which uses a feature view with a push source---" + ) + fetch_online_features(store, source="push") + + print("\n--- Simulate a stream event ingestion of the hourly stats df ---") + event_df = pd.DataFrame.from_dict( + { + "driver_id": [1001], + "event_timestamp": [datetime.now()], + "created": [datetime.now()], + "conv_rate": [1.0], + "acc_rate": [1.0], + "avg_daily_trips": [1000], + } + ) + store.push("driver_stats_push_source", event_df, to=PushMode.ONLINE_AND_OFFLINE) + + print("\n--- Online features again with updated values from a stream push---") + fetch_online_features(store, source="push") + + except Exception as e: + print(f"An error occurred: {e}") + + +def fetch_historical_features_entity_df(store: FeatureStore, for_batch_scoring: bool): + try: + entity_df = pd.DataFrame.from_dict( + { + "driver_id": [1001, 1002, 1003], + "event_timestamp": [ + datetime(2021, 4, 12, 10, 59, 42), + datetime(2021, 4, 12, 8, 12, 10), + datetime(2021, 4, 12, 16, 40, 26), + ], + "label_driver_reported_satisfaction": [1, 5, 3], + # values we're using for an on-demand transformation + "val_to_add": [1, 2, 3], + "val_to_add_2": [10, 20, 30], + + } + + ) + if for_batch_scoring: + entity_df["event_timestamp"] = pd.to_datetime("now", utc=True) + + training_df = store.get_historical_features( + entity_df=entity_df, + features=[ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips", + "transformed_conv_rate:conv_rate_plus_val1", + "transformed_conv_rate:conv_rate_plus_val2", + ], + ).to_df() + print(training_df.head()) + + except Exception as e: + print(f"An error occurred while fetching historical features: {e}") + + +def fetch_online_features(store, source: str = ""): + try: + entity_rows = [ + # {join_key: entity_value} + { + "driver_id": 1001, + "val_to_add": 1000, + "val_to_add_2": 2000, + }, + { + "driver_id": 1002, + "val_to_add": 1001, + "val_to_add_2": 2002, + }, + ] + if source == "feature_service": + features_to_fetch = store.get_feature_service("driver_activity_v1") + elif source == "push": + features_to_fetch = store.get_feature_service("driver_activity_v3") + else: + features_to_fetch = [ + "driver_hourly_stats:acc_rate", + "transformed_conv_rate:conv_rate_plus_val1", + "transformed_conv_rate:conv_rate_plus_val2", + ] + returned_features = store.get_online_features( + features=features_to_fetch, + entity_rows=entity_rows, + ).to_dict() + for key, value in sorted(returned_features.items()): + print(key, " : ", value) + + except Exception as e: + print(f"An error occurred while fetching online features: {e}") + + +if __name__ == "__main__": + try: + run_demo() + except Exception as e: + print(f"An error occurred in the main execution: {e}") diff --git a/examples/rbac-remote/client/oidc/readonly_user_resources.yaml b/examples/rbac-remote/client/oidc/readonly_user_resources.yaml new file mode 100644 index 0000000000..c43137bfba --- /dev/null +++ b/examples/rbac-remote/client/oidc/readonly_user_resources.yaml @@ -0,0 +1,34 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: client-readonly-user + namespace: feast-dev + labels: + app: client-user +spec: + replicas: 1 + selector: + matchLabels: + app: client-user + template: + metadata: + labels: + app: client-user + spec: + containers: + - name: client-admin-container + image: feastdev/feature-server:latest + imagePullPolicy: Always + command: ["sleep", "infinity"] + env: + - name: FEAST_USERNAME + value: readonly-user + - name: FEAST_PASSWORD + value: feast + volumeMounts: + - name: client-feature-repo-config + mountPath: /feature_repo + volumes: + - name: client-feature-repo-config + configMap: + name: client-feature-repo-config diff --git a/examples/rbac-remote/client/oidc/unauthorized_user_resources.yaml b/examples/rbac-remote/client/oidc/unauthorized_user_resources.yaml new file mode 100644 index 0000000000..f99bb3e987 --- /dev/null +++ b/examples/rbac-remote/client/oidc/unauthorized_user_resources.yaml @@ -0,0 +1,35 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: client-unauthorized-user + namespace: feast-dev + labels: + app: client-unauthorized-user +spec: + replicas: 1 + selector: + matchLabels: + app: client-unauthorized-user + template: + metadata: + labels: + app: client-unauthorized-user + spec: + containers: + - name: client-admin-container + image: feastdev/feature-server:latest + imagePullPolicy: Always + command: ["sleep", "infinity"] + env: + - name: FEAST_USERNAME + value: unauthorized-user + - name: FEAST_PASSWORD + value: feast + volumeMounts: + - name: client-feature-repo-config + mountPath: /feature_repo + volumes: + - name: client-feature-repo-config + configMap: + name: client-feature-repo-config + diff --git a/examples/rbac-remote/demo.jpg b/examples/rbac-remote/demo.jpg new file mode 100644 index 0000000000..718e49dde6 Binary files /dev/null and b/examples/rbac-remote/demo.jpg differ diff --git a/examples/rbac-remote/deployment.png b/examples/rbac-remote/deployment.png new file mode 100644 index 0000000000..9b9a0d7b2a Binary files /dev/null and b/examples/rbac-remote/deployment.png differ diff --git a/examples/rbac-remote/install_feast.sh b/examples/rbac-remote/install_feast.sh new file mode 100755 index 0000000000..b87d44b335 --- /dev/null +++ b/examples/rbac-remote/install_feast.sh @@ -0,0 +1,109 @@ +#!/bin/bash + +# Specify the RBAC type (folder) +read -p "Enter RBAC type (e.g., k8s or oidc): " FOLDER + +echo "You have selected the RBAC type: $FOLDER" + +# feature_store files name for the servers +OFFLINE_YAML="feature_store_offline.yaml" +ONLINE_YAML="feature_store_online.yaml" +REGISTRY_YAML="feature_store_registry.yaml" + +# Helm chart path and service account +HELM_CHART_PATH="../../infra/charts/feast-feature-server" +SERVICE_ACCOUNT_NAME="feast-sa" +CLIENT_REPO_DIR="client/$FOLDER/feature_repo" + +# Function to check if a file exists and encode it to base64 +encode_to_base64() { + local file_path=$1 + if [ ! -f "$file_path" ]; then + echo "Error: File not found at $file_path" + exit 1 + fi + base64 < "$file_path" +} + +FEATURE_STORE_OFFLINE_YAML_PATH="server/$FOLDER/$OFFLINE_YAML" +FEATURE_STORE_ONLINE_YAML_PATH="server/$FOLDER/$ONLINE_YAML" +FEATURE_STORE_REGISTRY_YAML_PATH="server/$FOLDER/$REGISTRY_YAML" + +# Encode the YAML files to base64 +FEATURE_STORE_OFFLINE_YAML_BASE64=$(encode_to_base64 "$FEATURE_STORE_OFFLINE_YAML_PATH") +FEATURE_STORE_ONLINE_YAML_BASE64=$(encode_to_base64 "$FEATURE_STORE_ONLINE_YAML_PATH") +FEATURE_STORE_REGISTRY_YAML_BASE64=$(encode_to_base64 "$FEATURE_STORE_REGISTRY_YAML_PATH") + +# Check if base64 encoding was successful +if [ -z "$FEATURE_STORE_OFFLINE_YAML_BASE64" ] || [ -z "$FEATURE_STORE_ONLINE_YAML_BASE64" ] || [ -z "$FEATURE_STORE_REGISTRY_YAML_BASE64" ]; then + echo "Error: Failed to base64 encode one or more feature_store.yaml files in folder $FOLDER." + exit 1 +fi + +# Upgrade or install Feast components for the specified folder +read -p "Deploy Feast server components for $FOLDER? (y/n) " confirm_server +if [[ $confirm_server == [yY] ]]; then + # Apply the server service accounts and role bindings + kubectl apply -f "server/k8s/server_resources.yaml" + + # Upgrade or install Feast components + echo "Upgrading or installing Feast server components for $FOLDER" + + helm upgrade --install feast-registry-server $HELM_CHART_PATH \ + --set feast_mode=registry \ + --set feature_store_yaml_base64=$FEATURE_STORE_REGISTRY_YAML_BASE64 \ + --set serviceAccount.name=$SERVICE_ACCOUNT_NAME + + helm upgrade --install feast-feature-server $HELM_CHART_PATH \ + --set feature_store_yaml_base64=$FEATURE_STORE_ONLINE_YAML_BASE64 \ + --set serviceAccount.name=$SERVICE_ACCOUNT_NAME + + helm upgrade --install feast-offline-server $HELM_CHART_PATH \ + --set feast_mode=offline \ + --set feature_store_yaml_base64=$FEATURE_STORE_OFFLINE_YAML_BASE64 \ + --set serviceAccount.name=$SERVICE_ACCOUNT_NAME + + echo "Server components deployed for $FOLDER." +else + echo "Server components not deployed for $FOLDER." +fi + +read -p "Apply client creation examples ? (y/n) " confirm_clients +if [[ $confirm_clients == [yY] ]]; then + kubectl delete configmap client-feature-repo-config --ignore-not-found + kubectl create configmap client-feature-repo-config --from-file=$CLIENT_REPO_DIR + + kubectl apply -f "client/$FOLDER/admin_user_resources.yaml" + kubectl apply -f "client/$FOLDER/readonly_user_resources.yaml" + kubectl apply -f "client/$FOLDER/unauthorized_user_resources.yaml" + + echo "Client resources applied." +else + echo "Client resources not applied." +fi + +read -p "Apply 'feast apply' in the remote registry? (y/n) " confirm_apply +if [[ $confirm_apply == [yY] ]]; then + + POD_NAME=$(kubectl get pods --no-headers -o custom-columns=":metadata.name" | grep '^feast-registry-server-feast-feature-server') + + if [ -z "$POD_NAME" ]; then + echo "No pod found with the prefix feast-registry-server-feast-feature-server" + exit 1 + fi + + LOCAL_DIR="./server/feature_repo/" + REMOTE_DIR="/app/" + + echo "Copying files from $LOCAL_DIR to $POD_NAME:$REMOTE_DIR" + kubectl cp $LOCAL_DIR $POD_NAME:$REMOTE_DIR + + echo "Files copied successfully!" + + kubectl exec $POD_NAME -- feast -c feature_repo apply + echo "'feast apply' command executed successfully in the for remote registry." +else + echo "'feast apply' not performed ." +fi + +echo "Setup completed." diff --git a/examples/rbac-remote/server/feature_repo/example_repo.py b/examples/rbac-remote/server/feature_repo/example_repo.py new file mode 100644 index 0000000000..5b8105bb94 --- /dev/null +++ b/examples/rbac-remote/server/feature_repo/example_repo.py @@ -0,0 +1,130 @@ +# This is an example feature definition file + +from datetime import timedelta + +import pandas as pd + +from feast import Entity, FeatureService, FeatureView, Field, PushSource, RequestSource +from feast.infra.offline_stores.contrib.postgres_offline_store.postgres_source import PostgreSQLSource + +from feast.on_demand_feature_view import on_demand_feature_view +from feast.types import Float32, Float64, Int64 + +# Define an entity for the driver. You can think of an entity as a primary key used to +# fetch features. +driver = Entity(name="driver", join_keys=["driver_id"]) + +driver_stats_source = PostgreSQLSource( + name="driver_hourly_stats_source", + query="SELECT * FROM feast_driver_hourly_stats", + timestamp_field="event_timestamp", + created_timestamp_column="created", +) + +# Our parquet files contain sample data that includes a driver_id column, timestamps and +# three feature column. Here we define a Feature View that will allow us to serve this +# data to our model online. +driver_stats_fv = FeatureView( + # The unique name of this feature view. Two feature views in a single + # project cannot have the same name + name="driver_hourly_stats", + entities=[driver], + ttl=timedelta(days=1), + # The list of features defined below act as a schema to both define features + # for both materialization of features into a store, and are used as references + # during retrieval for building a training dataset or serving features + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64), + ], + online=True, + source=driver_stats_source, + # Tags are user defined key/value pairs that are attached to each + # feature view + tags={"team": "driver_performance"}, +) + +# Define a request data source which encodes features / information only +# available at request time (e.g. part of the user initiated HTTP request) +input_request = RequestSource( + name="vals_to_add", + schema=[ + Field(name="val_to_add", dtype=Int64), + Field(name="val_to_add_2", dtype=Int64), + ], +) + + +# Define an on demand feature view which can generate new features based on +# existing feature views and RequestSource features +@on_demand_feature_view( + sources=[driver_stats_fv, input_request], + schema=[ + Field(name="conv_rate_plus_val1", dtype=Float64), + Field(name="conv_rate_plus_val2", dtype=Float64), + ], +) +def transformed_conv_rate(inputs: pd.DataFrame) -> pd.DataFrame: + df = pd.DataFrame() + df["conv_rate_plus_val1"] = inputs["conv_rate"] + inputs["val_to_add"] + df["conv_rate_plus_val2"] = inputs["conv_rate"] + inputs["val_to_add_2"] + return df + + +# This groups features into a model version +driver_activity_v1 = FeatureService( + name="driver_activity_v1", + features=[ + driver_stats_fv[["conv_rate"]], # Sub-selects a feature from a feature view + transformed_conv_rate, # Selects all features from the feature view + ], +) +driver_activity_v2 = FeatureService( + name="driver_activity_v2", features=[driver_stats_fv, transformed_conv_rate] +) + +# Defines a way to push data (to be available offline, online or both) into Feast. +driver_stats_push_source = PushSource( + name="driver_stats_push_source", + batch_source=driver_stats_source, +) + +# Defines a slightly modified version of the feature view from above, where the source +# has been changed to the push source. This allows fresh features to be directly pushed +# to the online store for this feature view. +driver_stats_fresh_fv = FeatureView( + name="driver_hourly_stats_fresh", + entities=[driver], + ttl=timedelta(days=1), + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64), + ], + online=True, + source=driver_stats_push_source, # Changed from above + tags={"team": "driver_performance"}, +) + + +# Define an on demand feature view which can generate new features based on +# existing feature views and RequestSource features +@on_demand_feature_view( + sources=[driver_stats_fresh_fv, input_request], # relies on fresh version of FV + schema=[ + Field(name="conv_rate_plus_val1", dtype=Float64), + Field(name="conv_rate_plus_val2", dtype=Float64), + ], +) +def transformed_conv_rate_fresh(inputs: pd.DataFrame) -> pd.DataFrame: + df = pd.DataFrame() + df["conv_rate_plus_val1"] = inputs["conv_rate"] + inputs["val_to_add"] + df["conv_rate_plus_val2"] = inputs["conv_rate"] + inputs["val_to_add_2"] + return df + + +driver_activity_v3 = FeatureService( + name="driver_activity_v3", + features=[driver_stats_fresh_fv, transformed_conv_rate_fresh], +) diff --git a/examples/rbac-remote/server/feature_repo/feature_store.yaml b/examples/rbac-remote/server/feature_repo/feature_store.yaml new file mode 100644 index 0000000000..78b13c660b --- /dev/null +++ b/examples/rbac-remote/server/feature_repo/feature_store.yaml @@ -0,0 +1,26 @@ +project: server +provider: local +registry: + registry_type: sql + path: postgresql+psycopg://feast:feast@postgresql.feast-dev.svc.cluster.local:5432/feast + cache_ttl_seconds: 60 + sqlalchemy_config_kwargs: + echo: false + pool_pre_ping: true +online_store: + type: postgres + host: postgresql.feast-dev.svc.cluster.local + port: 5432 + database: feast + db_schema: public + user: feast + password: feast +offline_store: + type: postgres + host: postgresql.feast-dev.svc.cluster.local + port: 5432 + database: feast + db_schema: public + user: feast + password: feast +entity_key_serialization_version: 2 diff --git a/examples/rbac-remote/server/feature_repo/permissions_apply.py b/examples/rbac-remote/server/feature_repo/permissions_apply.py new file mode 100644 index 0000000000..93bdf2ffc6 --- /dev/null +++ b/examples/rbac-remote/server/feature_repo/permissions_apply.py @@ -0,0 +1,21 @@ +from feast.feast_object import ALL_RESOURCE_TYPES +from feast.permissions.action import READ, AuthzedAction, ALL_ACTIONS +from feast.permissions.permission import Permission +from feast.permissions.policy import RoleBasedPolicy + +admin_roles = ["feast-admin-role"] +user_roles = ["feast-user-role"] + +user_perm = Permission( + name="feast_user_permission", + types=ALL_RESOURCE_TYPES, + policy=RoleBasedPolicy(roles=user_roles), + actions=[AuthzedAction.DESCRIBE] + READ +) + +admin_perm = Permission( + name="feast_admin_permission", + types=ALL_RESOURCE_TYPES, + policy=RoleBasedPolicy(roles=admin_roles), + actions=ALL_ACTIONS +) diff --git a/examples/rbac-remote/server/k8s/feature_store_offline.yaml b/examples/rbac-remote/server/k8s/feature_store_offline.yaml new file mode 100644 index 0000000000..4fc01508bd --- /dev/null +++ b/examples/rbac-remote/server/k8s/feature_store_offline.yaml @@ -0,0 +1,16 @@ +project: server +provider: local +registry: + registry_type: remote + path: feast-registry-server-feast-feature-server.feast-dev.svc.cluster.local:80 +offline_store: + type: postgres + host: postgresql.feast-dev.svc.cluster.local + port: 5432 + database: feast + db_schema: public + user: feast + password: feast +auth: + type: kubernetes +entity_key_serialization_version: 2 diff --git a/examples/rbac-remote/server/k8s/feature_store_online.yaml b/examples/rbac-remote/server/k8s/feature_store_online.yaml new file mode 100644 index 0000000000..aa167731b2 --- /dev/null +++ b/examples/rbac-remote/server/k8s/feature_store_online.yaml @@ -0,0 +1,20 @@ +project: server +provider: local +registry: + registry_type: remote + path: feast-registry-server-feast-feature-server.feast-dev.svc.cluster.local:80 +online_store: + type: postgres + host: postgresql.feast-dev.svc.cluster.local + port: 5432 + database: feast + db_schema: public + user: feast + password: feast +offline_store: + type: remote + host: feast-offline-server-feast-feature-server.feast-dev.svc.cluster.local + port: 80 +auth: + type: kubernetes +entity_key_serialization_version: 2 diff --git a/examples/rbac-remote/server/k8s/feature_store_registry.yaml b/examples/rbac-remote/server/k8s/feature_store_registry.yaml new file mode 100644 index 0000000000..579141fb01 --- /dev/null +++ b/examples/rbac-remote/server/k8s/feature_store_registry.yaml @@ -0,0 +1,12 @@ +project: server +provider: local +registry: + registry_type: sql + path: postgresql+psycopg://feast:feast@postgresql.feast-dev.svc.cluster.local:5432/feast + cache_ttl_seconds: 60 + sqlalchemy_config_kwargs: + echo: false + pool_pre_ping: true +auth: + type: kubernetes +entity_key_serialization_version: 2 diff --git a/examples/rbac-remote/server/k8s/server_resources.yaml b/examples/rbac-remote/server/k8s/server_resources.yaml new file mode 100644 index 0000000000..03e35495d6 --- /dev/null +++ b/examples/rbac-remote/server/k8s/server_resources.yaml @@ -0,0 +1,27 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + name: feast-sa + namespace: feast-dev +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRole +metadata: + name: feast-cluster-role +rules: + - apiGroups: ["rbac.authorization.k8s.io"] + resources: ["roles", "rolebindings", "clusterrolebindings"] + verbs: ["get", "list", "watch"] +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRoleBinding +metadata: + name: feast-cluster-rolebinding +subjects: + - kind: ServiceAccount + name: feast-sa + namespace: feast-dev +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: ClusterRole + name: feast-cluster-role diff --git a/examples/rbac-remote/server/oidc/feature_store_offline.yaml b/examples/rbac-remote/server/oidc/feature_store_offline.yaml new file mode 100644 index 0000000000..8ed4cc1ff3 --- /dev/null +++ b/examples/rbac-remote/server/oidc/feature_store_offline.yaml @@ -0,0 +1,18 @@ +project: server +provider: local +registry: + registry_type: remote + path: feast-registry-server-feast-feature-server.feast-dev.svc.cluster.local:80 +offline_store: + type: postgres + host: postgresql.feast-dev.svc.cluster.local + port: 5432 + database: feast + db_schema: public + user: feast + password: feast +auth: + type: oidc + auth_discovery_url: https://keycloak-feast-dev.apps.com/realms/feast-rbac/.well-known/openid-configuration + client_id: feast-client +entity_key_serialization_version: 2 diff --git a/examples/rbac-remote/server/oidc/feature_store_online.yaml b/examples/rbac-remote/server/oidc/feature_store_online.yaml new file mode 100644 index 0000000000..c47c3a0662 --- /dev/null +++ b/examples/rbac-remote/server/oidc/feature_store_online.yaml @@ -0,0 +1,22 @@ +project: server +provider: local +registry: + registry_type: remote + path: feast-registry-server-feast-feature-server.feast-dev.svc.cluster.local:80 +online_store: + type: postgres + host: postgresql.feast-dev.svc.cluster.local + port: 5432 + database: feast + db_schema: public + user: feast + password: feast +offline_store: + type: remote + host: feast-offline-server-feast-feature-server.feast-dev.svc.cluster.local + port: 80 +auth: + type: oidc + auth_discovery_url: https://keycloak-feast-dev.apps.com/realms/feast-rbac/.well-known/openid-configuration + client_id: feast-client +entity_key_serialization_version: 2 diff --git a/examples/rbac-remote/server/oidc/feature_store_registry.yaml b/examples/rbac-remote/server/oidc/feature_store_registry.yaml new file mode 100644 index 0000000000..a661d9dc56 --- /dev/null +++ b/examples/rbac-remote/server/oidc/feature_store_registry.yaml @@ -0,0 +1,14 @@ +project: server +provider: local +registry: + registry_type: sql + path: postgresql+psycopg://feast:feast@postgresql.feast-dev.svc.cluster.local:5432/feast + cache_ttl_seconds: 60 + sqlalchemy_config_kwargs: + echo: false + pool_pre_ping: true +auth: + type: oidc + auth_discovery_url: https://keycloak-feast-dev.apps.com/realms/feast-rbac/.well-known/openid-configuration + client_id: feast-client +entity_key_serialization_version: 2 diff --git a/go.mod b/go.mod index 0f73328c72..61063a0cda 100644 --- a/go.mod +++ b/go.mod @@ -46,5 +46,5 @@ require ( google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 // indirect gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect gopkg.in/yaml.v2 v2.4.0 // indirect - gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c // indirect + gopkg.in/yaml.v3 v3.0.0 // indirect ) diff --git a/go.sum b/go.sum index a793b09aec..83bbc041c5 100644 --- a/go.sum +++ b/go.sum @@ -1854,8 +1854,9 @@ gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0 h1:hjy8E9ON/egN1tAYqKb61G10WtihqetD4sz2H+8nIeA= +gopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= diff --git a/infra/charts/feast-feature-server/templates/deployment.yaml b/infra/charts/feast-feature-server/templates/deployment.yaml index a550433db5..1f673280fe 100644 --- a/infra/charts/feast-feature-server/templates/deployment.yaml +++ b/infra/charts/feast-feature-server/templates/deployment.yaml @@ -21,6 +21,7 @@ spec: labels: {{- include "feast-feature-server.selectorLabels" . | nindent 8 }} spec: + serviceAccountName: {{ .Values.serviceAccount.name | default "default" }} {{- with .Values.imagePullSecrets }} imagePullSecrets: {{- toYaml . | nindent 8 }} @@ -36,23 +37,33 @@ spec: env: - name: FEATURE_STORE_YAML_BASE64 value: {{ .Values.feature_store_yaml_base64 }} + - name: INTRA_COMMUNICATION_BASE64 + value: {{ "intra-server-communication" | b64enc }} command: {{- if eq .Values.feast_mode "offline" }} - "feast" + - "--log-level" + - "{{ .Values.logLevel }}" - "serve_offline" - "-h" - "0.0.0.0" {{- else if eq .Values.feast_mode "ui" }} - "feast" + - "--log-level" + - "{{ .Values.logLevel }}" - "ui" - "-h" - "0.0.0.0" {{- else if eq .Values.feast_mode "registry" }} - "feast" + - "--log-level" + - "{{ .Values.logLevel }}" - "serve_registry" {{- else }} {{- if .Values.metrics.enlabled }} - "feast" + - "--log-level" + - "{{ .Values.logLevel }}" - "serve" - "--metrics" - "-h" diff --git a/infra/charts/feast-feature-server/values.yaml b/infra/charts/feast-feature-server/values.yaml index 64d805a66c..f0bc55a646 100644 --- a/infra/charts/feast-feature-server/values.yaml +++ b/infra/charts/feast-feature-server/values.yaml @@ -11,6 +11,8 @@ image: # image.tag -- The Docker image tag (can be overwritten if custom feature server deps are needed for on demand transforms) tag: 0.40.0 +logLevel: "WARNING" # Set log level DEBUG, INFO, WARNING, ERROR, and CRITICAL (case-insensitive) + imagePullSecrets: [] nameOverride: "" fullnameOverride: "" @@ -44,6 +46,9 @@ service: type: ClusterIP port: 80 +serviceAccount: + name: "" + resources: {} # We usually recommend not to specify default resources and to leave this as a conscious # choice for the user. This also increases chances charts run on environments with little diff --git a/infra/scripts/generate_protos.py b/infra/scripts/generate_protos.py new file mode 100644 index 0000000000..2ce7e29e12 --- /dev/null +++ b/infra/scripts/generate_protos.py @@ -0,0 +1,80 @@ +import os +import sys +import glob +import subprocess +from pathlib import Path + +repo_root = str(Path(__file__).resolve().parent) + +PROTO_SUBDIRS = ["core", "registry", "serving", "types", "storage"] +PYTHON_CODE_PREFIX = "sdk/python" + +class BuildPythonProtosCommand: + description = "Builds the proto files into Python files." + user_options = [ + ("inplace", "i", "Write generated proto files to source directory."), + ] + + def __init__(self): + self.python_protoc = [ + sys.executable, + "-m", + "grpc_tools.protoc", + ] + self.proto_folder = "protos" + self.sub_folders = PROTO_SUBDIRS + self.inplace = 0 + + @property + def python_folder(self): + return "sdk/python/feast/protos" + + def _generate_python_protos(self, path: str): + proto_files = glob.glob(os.path.join(self.proto_folder, path)) + Path(self.python_folder).mkdir(parents=True, exist_ok=True) + subprocess.check_call( + self.python_protoc + + [ + "-I", + self.proto_folder, + "--python_out", + self.python_folder, + "--grpc_python_out", + self.python_folder, + "--mypy_out", + self.python_folder, + ] + + proto_files + ) + + def run(self): + for sub_folder in self.sub_folders: + self._generate_python_protos(f"feast/{sub_folder}/*.proto") + # We need the __init__ files for each of the generated subdirs + # so that they are regular packages, and don't need the `--namespace-packages` flags + # when being typechecked using mypy. + with open(f"{self.python_folder}/feast/{sub_folder}/__init__.py", "w"): + pass + + with open(f"{self.python_folder}/__init__.py", "w"): + pass + with open(f"{self.python_folder}/feast/__init__.py", "w"): + pass + + for path in Path(self.python_folder).rglob("*.py"): + for folder in self.sub_folders: + # Read in the file + with open(path, "r") as file: + filedata = file.read() + + # Replace the target string + filedata = filedata.replace( + f"from feast.{folder}", f"from feast.protos.feast.{folder}" + ) + + # Write the file out again + with open(path, "w") as file: + file.write(filedata) + +if __name__ == "__main__": + BuildPythonProtosCommand().run() \ No newline at end of file diff --git a/infra/templates/README.md.jinja2 b/infra/templates/README.md.jinja2 index 1cce08ecfa..9c7df17da9 100644 --- a/infra/templates/README.md.jinja2 +++ b/infra/templates/README.md.jinja2 @@ -14,6 +14,9 @@ [![License](https://img.shields.io/badge/License-Apache%202.0-blue)](https://github.com/feast-dev/feast/blob/master/LICENSE) [![GitHub Release](https://img.shields.io/github/v/release/feast-dev/feast.svg?style=flat&sort=semver&color=blue)](https://github.com/feast-dev/feast/releases) +## Join us on Slack! +πŸ‘‹πŸ‘‹πŸ‘‹ [Come say hi on Slack!](https://communityinviter.com/apps/feastopensource/feast-the-open-source-feature-store) + ## Overview Feast (**Fea**ture **St**ore) is an open source feature store for machine learning. Feast is the fastest path to manage existing infrastructure to productionize analytic data for model training and online inference. diff --git a/protos/feast/core/FeatureViewProjection.proto b/protos/feast/core/FeatureViewProjection.proto index 36d17632e7..b0e697b656 100644 --- a/protos/feast/core/FeatureViewProjection.proto +++ b/protos/feast/core/FeatureViewProjection.proto @@ -6,6 +6,7 @@ option java_outer_classname = "FeatureReferenceProto"; option java_package = "feast.proto.core"; import "feast/core/Feature.proto"; +import "feast/core/DataSource.proto"; // A projection to be applied on top of a FeatureView. @@ -22,4 +23,13 @@ message FeatureViewProjection { // Map for entity join_key overrides of feature data entity join_key to entity data join_key map join_key_map = 4; + + string timestamp_field = 5; + string date_partition_column = 6; + string created_timestamp_column = 7; + // Batch/Offline DataSource where this view can retrieve offline feature data. + DataSource batch_source = 8; + // Streaming DataSource from where this view can consume "online" feature data. + DataSource stream_source = 9; + } diff --git a/protos/feast/core/OnDemandFeatureView.proto b/protos/feast/core/OnDemandFeatureView.proto index 7a5fec1650..c915e32e16 100644 --- a/protos/feast/core/OnDemandFeatureView.proto +++ b/protos/feast/core/OnDemandFeatureView.proto @@ -63,6 +63,12 @@ message OnDemandFeatureViewSpec { // Owner of the on demand feature view. string owner = 8; string mode = 11; + bool write_to_online_store = 12; + + // List of names of entities associated with this feature view. + repeated string entities = 13; + // List of specifications for each entity defined as part of this feature view. + repeated FeatureSpecV2 entity_columns = 14; } message OnDemandFeatureViewMeta { diff --git a/protos/feast/core/Permission.proto b/protos/feast/core/Permission.proto new file mode 100644 index 0000000000..400f70a11b --- /dev/null +++ b/protos/feast/core/Permission.proto @@ -0,0 +1,70 @@ +syntax = "proto3"; +package feast.core; + +option go_package = "github.com/feast-dev/feast/go/protos/feast/core"; +option java_outer_classname = "PermissionProto"; +option java_package = "feast.proto.core"; + +import "feast/core/Policy.proto"; +import "google/protobuf/timestamp.proto"; + +message Permission { + // User-specified specifications of this permission. + PermissionSpec spec = 1; + + // System-populated metadata for this permission. + PermissionMeta meta = 2; +} + +message PermissionSpec { + enum AuthzedAction { + CREATE = 0; + DESCRIBE = 1; + UPDATE = 2; + DELETE = 3; + READ_ONLINE = 4; + READ_OFFLINE = 5; + WRITE_ONLINE = 6; + WRITE_OFFLINE = 7; + } + + // Name of the permission. Must be unique. Not updated. + string name = 1; + + // Name of Feast project. + string project = 2; + + enum Type { + FEATURE_VIEW = 0; + ON_DEMAND_FEATURE_VIEW = 1; + BATCH_FEATURE_VIEW = 2; + STREAM_FEATURE_VIEW= 3; + ENTITY = 4; + FEATURE_SERVICE = 5; + DATA_SOURCE = 6; + VALIDATION_REFERENCE = 7; + SAVED_DATASET = 8; + PERMISSION = 9; + PROJECT = 10; + } + + repeated Type types = 3; + + string name_pattern = 4; + + map required_tags = 5; + + // List of actions. + repeated AuthzedAction actions = 6; + + // the policy. + Policy policy = 7; + + // User defined metadata + map tags = 8; +} + +message PermissionMeta { + google.protobuf.Timestamp created_timestamp = 1; + google.protobuf.Timestamp last_updated_timestamp = 2; +} diff --git a/protos/feast/core/Policy.proto b/protos/feast/core/Policy.proto new file mode 100644 index 0000000000..7ad42b9797 --- /dev/null +++ b/protos/feast/core/Policy.proto @@ -0,0 +1,23 @@ +syntax = "proto3"; +package feast.core; + +option go_package = "github.com/feast-dev/feast/go/protos/feast/core"; +option java_outer_classname = "PolicyProto"; +option java_package = "feast.proto.core"; + +message Policy { + // Name of the policy. + string name = 1; + + // Name of Feast project. + string project = 2; + + oneof policy_type { + RoleBasedPolicy role_based_policy = 3; + } +} + +message RoleBasedPolicy { + // List of roles in this policy. + repeated string roles = 1; +} diff --git a/protos/feast/core/Project.proto b/protos/feast/core/Project.proto new file mode 100644 index 0000000000..08e8b38f23 --- /dev/null +++ b/protos/feast/core/Project.proto @@ -0,0 +1,52 @@ +// +// * Copyright 2020 The Feast Authors +// * +// * Licensed under the Apache License, Version 2.0 (the "License"); +// * you may not use this file except in compliance with the License. +// * You may obtain a copy of the License at +// * +// * https://www.apache.org/licenses/LICENSE-2.0 +// * +// * Unless required by applicable law or agreed to in writing, software +// * distributed under the License is distributed on an "AS IS" BASIS, +// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// * See the License for the specific language governing permissions and +// * limitations under the License. +// + +syntax = "proto3"; + +package feast.core; +option java_package = "feast.proto.core"; +option java_outer_classname = "ProjectProto"; +option go_package = "github.com/feast-dev/feast/go/protos/feast/core"; + +import "google/protobuf/timestamp.proto"; + +message Project { + // User-specified specifications of this entity. + ProjectSpec spec = 1; + // System-populated metadata for this entity. + ProjectMeta meta = 2; +} + +message ProjectSpec { + // Name of the Project + string name = 1; + + // Description of the Project + string description = 2; + + // User defined metadata + map tags = 3; + + // Owner of the Project + string owner = 4; +} + +message ProjectMeta { + // Time when the Project is created + google.protobuf.Timestamp created_timestamp = 1; + // Time when the Project is last updated with registry changes (Apply stage) + google.protobuf.Timestamp last_updated_timestamp = 2; +} diff --git a/protos/feast/core/Registry.proto b/protos/feast/core/Registry.proto index 0c3f8a53f9..45ecd2c173 100644 --- a/protos/feast/core/Registry.proto +++ b/protos/feast/core/Registry.proto @@ -32,8 +32,10 @@ import "feast/core/DataSource.proto"; import "feast/core/SavedDataset.proto"; import "feast/core/ValidationProfile.proto"; import "google/protobuf/timestamp.proto"; +import "feast/core/Permission.proto"; +import "feast/core/Project.proto"; -// Next id: 16 +// Next id: 18 message Registry { repeated Entity entities = 1; repeated FeatureTable feature_tables = 2; @@ -46,11 +48,13 @@ message Registry { repeated ValidationReference validation_references = 13; Infra infra = 10; // Tracking metadata of Feast by project - repeated ProjectMetadata project_metadata = 15; + repeated ProjectMetadata project_metadata = 15 [deprecated = true]; string registry_schema_version = 3; // to support migrations; incremented when schema is changed string version_id = 4; // version id, random string generated on each update of the data; now used only for debugging purposes google.protobuf.Timestamp last_updated = 5; + repeated Permission permissions = 16; + repeated Project projects = 17; } message ProjectMetadata { diff --git a/protos/feast/registry/RegistryServer.proto b/protos/feast/registry/RegistryServer.proto index 44529f5409..6685bc0baa 100644 --- a/protos/feast/registry/RegistryServer.proto +++ b/protos/feast/registry/RegistryServer.proto @@ -14,6 +14,8 @@ import "feast/core/FeatureService.proto"; import "feast/core/SavedDataset.proto"; import "feast/core/ValidationProfile.proto"; import "feast/core/InfraObject.proto"; +import "feast/core/Permission.proto"; +import "feast/core/Project.proto"; service RegistryServer{ // Entity RPCs @@ -30,9 +32,13 @@ service RegistryServer{ // FeatureView RPCs rpc ApplyFeatureView (ApplyFeatureViewRequest) returns (google.protobuf.Empty) {} + rpc DeleteFeatureView (DeleteFeatureViewRequest) returns (google.protobuf.Empty) {} + rpc GetAnyFeatureView (GetAnyFeatureViewRequest) returns (GetAnyFeatureViewResponse) {} + rpc ListAllFeatureViews (ListAllFeatureViewsRequest) returns (ListAllFeatureViewsResponse) {} + + // plain FeatureView RPCs rpc GetFeatureView (GetFeatureViewRequest) returns (feast.core.FeatureView) {} rpc ListFeatureViews (ListFeatureViewsRequest) returns (ListFeatureViewsResponse) {} - rpc DeleteFeatureView (DeleteFeatureViewRequest) returns (google.protobuf.Empty) {} // StreamFeatureView RPCs rpc GetStreamFeatureView (GetStreamFeatureViewRequest) returns (feast.core.StreamFeatureView) {} @@ -59,7 +65,19 @@ service RegistryServer{ rpc GetValidationReference (GetValidationReferenceRequest) returns (feast.core.ValidationReference) {} rpc ListValidationReferences (ListValidationReferencesRequest) returns (ListValidationReferencesResponse) {} rpc DeleteValidationReference (DeleteValidationReferenceRequest) returns (google.protobuf.Empty) {} - + + // Permission RPCs + rpc ApplyPermission (ApplyPermissionRequest) returns (google.protobuf.Empty) {} + rpc GetPermission (GetPermissionRequest) returns (feast.core.Permission) {} + rpc ListPermissions (ListPermissionsRequest) returns (ListPermissionsResponse) {} + rpc DeletePermission (DeletePermissionRequest) returns (google.protobuf.Empty) {} + + // Project RPCs + rpc ApplyProject (ApplyProjectRequest) returns (google.protobuf.Empty) {} + rpc GetProject (GetProjectRequest) returns (feast.core.Project) {} + rpc ListProjects (ListProjectsRequest) returns (ListProjectsResponse) {} + rpc DeleteProject (DeleteProjectRequest) returns (google.protobuf.Empty) {} + rpc ApplyMaterialization (ApplyMaterializationRequest) returns (google.protobuf.Empty) {} rpc ListProjectMetadata (ListProjectMetadataRequest) returns (ListProjectMetadataResponse) {} rpc UpdateInfra (UpdateInfraRequest) returns (google.protobuf.Empty) {} @@ -194,6 +212,35 @@ message DeleteFeatureViewRequest { bool commit = 3; } +message AnyFeatureView { + oneof any_feature_view { + feast.core.FeatureView feature_view = 1; + feast.core.OnDemandFeatureView on_demand_feature_view = 2; + feast.core.StreamFeatureView stream_feature_view = 3; + } +} + +message GetAnyFeatureViewRequest { + string name = 1; + string project = 2; + bool allow_cache = 3; +} + +message GetAnyFeatureViewResponse { + AnyFeatureView any_feature_view = 1; +} + +message ListAllFeatureViewsRequest { + string project = 1; + bool allow_cache = 2; + map tags = 3; +} + +message ListAllFeatureViewsResponse { + repeated AnyFeatureView feature_views = 1; +} + + // StreamFeatureView message GetStreamFeatureViewRequest { @@ -277,6 +324,7 @@ message GetSavedDatasetRequest { message ListSavedDatasetsRequest { string project = 1; bool allow_cache = 2; + map tags = 3; } message ListSavedDatasetsResponse { @@ -306,6 +354,7 @@ message GetValidationReferenceRequest { message ListValidationReferencesRequest { string project = 1; bool allow_cache = 2; + map tags = 3; } message ListValidationReferencesResponse { @@ -316,4 +365,60 @@ message DeleteValidationReferenceRequest { string name = 1; string project = 2; bool commit = 3; -} \ No newline at end of file +} + +// Permissions + +message ApplyPermissionRequest { + feast.core.Permission permission = 1; + string project = 2; + bool commit = 3; +} + +message GetPermissionRequest { + string name = 1; + string project = 2; + bool allow_cache = 3; +} + +message ListPermissionsRequest { + string project = 1; + bool allow_cache = 2; + map tags = 3; +} + +message ListPermissionsResponse { + repeated feast.core.Permission permissions = 1; +} + +message DeletePermissionRequest { + string name = 1; + string project = 2; + bool commit = 3; +} + +// Projects + +message ApplyProjectRequest { + feast.core.Project project = 1; + bool commit = 2; +} + +message GetProjectRequest { + string name = 1; + bool allow_cache = 2; +} + +message ListProjectsRequest { + bool allow_cache = 1; + map tags = 2; +} + +message ListProjectsResponse { + repeated feast.core.Project projects = 1; +} + +message DeleteProjectRequest { + string name = 1; + bool commit = 2; +} diff --git a/pyproject.toml b/pyproject.toml index 00170ab443..2a051231e2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,11 @@ [build-system] -requires = ["setuptools>=60", "wheel", "setuptools_scm>=6.2", "grpcio", "grpcio-tools>=1.47.0", "mypy-protobuf==3.1", "sphinx!=4.0.0"] -build-backend = "setuptools.build_meta" +requires = [ + "pybindgen==0.22.0", + "setuptools>=60", + "setuptools_scm>=6.2", + "sphinx!=4.0.0", + "wheel", +] [tool.setuptools_scm] # Including this section is comparable to supplying use_scm_version=True in setup.py. diff --git a/sdk/python/docs/index.rst b/sdk/python/docs/index.rst index 1ef6bd16c8..86354f80c7 100644 --- a/sdk/python/docs/index.rst +++ b/sdk/python/docs/index.rst @@ -453,4 +453,60 @@ Snowflake Engine :members: .. autoclass:: feast.infra.materialization.contrib.spark.spark_materialization_engine.SparkMaterializationJob - :members: \ No newline at end of file + :members: + +Permission +============================ + +.. autoclass:: feast.permissions.permission.Permission + :members: + +.. autoclass:: feast.permissions.action.AuthzedAction + :members: + +.. autoclass:: feast.permissions.policy.Policy + :members: + +.. autofunction:: feast.permissions.enforcer.enforce_policy + +Auth Config +--------------------------- + +.. autoclass:: feast.permissions.auth_model.AuthConfig + :members: + +.. autoclass:: feast.permissions.auth_model.KubernetesAuthConfig + :members: + +.. autoclass:: feast.permissions.auth_model.OidcAuthConfig + :members: + +Auth Manager +--------------------------- + +.. autoclass:: feast.permissions.auth.AuthManager + :members: + +.. autoclass:: feast.permissions.auth.token_parser.TokenParser + :members: + +.. autoclass:: feast.permissions.auth.token_extractor.TokenExtractor + :members: + +.. autoclass:: feast.permissions.auth.kubernetes_token_parser.KubernetesTokenParser + :members: + +.. autoclass:: feast.permissions.auth.oidc_token_parser.OidcTokenParser + :members: + +Auth Client Manager +--------------------------- + +.. autoclass:: feast.permissions.client.auth_client_manager.AuthenticationClientManager + :members: + +.. autoclass:: feast.permissions.client.kubernetes_auth_client_manager.KubernetesAuthClientManager + :members: + +.. autoclass:: feast.permissions.client.oidc_authentication_client_manager.OidcAuthClientManager + :members: diff --git a/sdk/python/docs/source/feast.infra.feature_servers.aws_lambda.rst b/sdk/python/docs/source/feast.infra.feature_servers.aws_lambda.rst deleted file mode 100644 index de90bfc000..0000000000 --- a/sdk/python/docs/source/feast.infra.feature_servers.aws_lambda.rst +++ /dev/null @@ -1,29 +0,0 @@ -feast.infra.feature\_servers.aws\_lambda package -================================================ - -Submodules ----------- - -feast.infra.feature\_servers.aws\_lambda.app module ---------------------------------------------------- - -.. automodule:: feast.infra.feature_servers.aws_lambda.app - :members: - :undoc-members: - :show-inheritance: - -feast.infra.feature\_servers.aws\_lambda.config module ------------------------------------------------------- - -.. automodule:: feast.infra.feature_servers.aws_lambda.config - :members: - :undoc-members: - :show-inheritance: - -Module contents ---------------- - -.. automodule:: feast.infra.feature_servers.aws_lambda - :members: - :undoc-members: - :show-inheritance: diff --git a/sdk/python/docs/source/feast.infra.feature_servers.gcp_cloudrun.rst b/sdk/python/docs/source/feast.infra.feature_servers.gcp_cloudrun.rst deleted file mode 100644 index f7fdaf5b36..0000000000 --- a/sdk/python/docs/source/feast.infra.feature_servers.gcp_cloudrun.rst +++ /dev/null @@ -1,29 +0,0 @@ -feast.infra.feature\_servers.gcp\_cloudrun package -================================================== - -Submodules ----------- - -feast.infra.feature\_servers.gcp\_cloudrun.app module ------------------------------------------------------ - -.. automodule:: feast.infra.feature_servers.gcp_cloudrun.app - :members: - :undoc-members: - :show-inheritance: - -feast.infra.feature\_servers.gcp\_cloudrun.config module --------------------------------------------------------- - -.. automodule:: feast.infra.feature_servers.gcp_cloudrun.config - :members: - :undoc-members: - :show-inheritance: - -Module contents ---------------- - -.. automodule:: feast.infra.feature_servers.gcp_cloudrun - :members: - :undoc-members: - :show-inheritance: diff --git a/sdk/python/docs/source/feast.infra.materialization.contrib.bytewax.rst b/sdk/python/docs/source/feast.infra.materialization.contrib.bytewax.rst deleted file mode 100644 index 86fbaa6151..0000000000 --- a/sdk/python/docs/source/feast.infra.materialization.contrib.bytewax.rst +++ /dev/null @@ -1,29 +0,0 @@ -feast.infra.materialization.contrib.bytewax package -================================================================= - -Submodules ----------- - -feast.infra.materialization.contrib.bytewax.bytewax\_materialization\_engine ----------------------------------------------------------------------- - -.. automodule:: feast.infra.materialization.contrib.bytewax.bytewax_materialization_engine - :members: - :undoc-members: - :show-inheritance: - -feast.infra.materialization.contrib.bytewax.bytewax\_materialization\_job ----------------------------------------------------------------------- - -.. automodule:: feast.infra.materialization.contrib.bytewax.bytewax_materialization_job - :members: - :undoc-members: - :show-inheritance: - -Module contents ---------------- - -.. automodule:: feast.infra.materialization.contrib.bytewax - :members: - :undoc-members: - :show-inheritance: diff --git a/sdk/python/docs/source/feast.infra.materialization.lambda.rst b/sdk/python/docs/source/feast.infra.materialization.lambda.rst deleted file mode 100644 index 7ca1d44314..0000000000 --- a/sdk/python/docs/source/feast.infra.materialization.lambda.rst +++ /dev/null @@ -1,29 +0,0 @@ -feast.infra.materialization.lambda package -========================================== - -Submodules ----------- - -feast.infra.materialization.lambda.app module ---------------------------------------------- - -.. automodule:: feast.infra.materialization.lambda.app - :members: - :undoc-members: - :show-inheritance: - -feast.infra.materialization.lambda.lambda\_engine module --------------------------------------------------------- - -.. automodule:: feast.infra.materialization.lambda.lambda_engine - :members: - :undoc-members: - :show-inheritance: - -Module contents ---------------- - -.. automodule:: feast.infra.materialization.lambda - :members: - :undoc-members: - :show-inheritance: diff --git a/sdk/python/docs/source/feast.infra.online_stores.contrib.rockset_online_store.rst b/sdk/python/docs/source/feast.infra.online_stores.contrib.rockset_online_store.rst deleted file mode 100644 index b3de7479a0..0000000000 --- a/sdk/python/docs/source/feast.infra.online_stores.contrib.rockset_online_store.rst +++ /dev/null @@ -1,21 +0,0 @@ -feast.infra.online\_stores.contrib.rockset\_online\_store package -================================================================= - -Submodules ----------- - -feast.infra.online\_stores.contrib.rockset\_online\_store.rockset module ------------------------------------------------------------------------- - -.. automodule:: feast.infra.online_stores.contrib.rockset_online_store.rockset - :members: - :undoc-members: - :show-inheritance: - -Module contents ---------------- - -.. automodule:: feast.infra.online_stores.contrib.rockset_online_store - :members: - :undoc-members: - :show-inheritance: diff --git a/sdk/python/docs/source/feast.infra.online_stores.contrib.rst b/sdk/python/docs/source/feast.infra.online_stores.contrib.rst index 9d301fcd0d..8c9dd7e549 100644 --- a/sdk/python/docs/source/feast.infra.online_stores.contrib.rst +++ b/sdk/python/docs/source/feast.infra.online_stores.contrib.rst @@ -12,7 +12,6 @@ Subpackages feast.infra.online_stores.contrib.hbase_online_store feast.infra.online_stores.contrib.ikv_online_store feast.infra.online_stores.contrib.mysql_online_store - feast.infra.online_stores.contrib.rockset_online_store Submodules ---------- diff --git a/sdk/python/docs/source/feast.infra.registry.contrib.postgres.rst b/sdk/python/docs/source/feast.infra.registry.contrib.postgres.rst deleted file mode 100644 index 3f31990805..0000000000 --- a/sdk/python/docs/source/feast.infra.registry.contrib.postgres.rst +++ /dev/null @@ -1,21 +0,0 @@ -feast.infra.registry.contrib.postgres package -============================================= - -Submodules ----------- - -feast.infra.registry.contrib.postgres.postgres\_registry\_store module ----------------------------------------------------------------------- - -.. automodule:: feast.infra.registry.contrib.postgres.postgres_registry_store - :members: - :undoc-members: - :show-inheritance: - -Module contents ---------------- - -.. automodule:: feast.infra.registry.contrib.postgres - :members: - :undoc-members: - :show-inheritance: diff --git a/sdk/python/docs/source/feast.infra.registry_stores.rst b/sdk/python/docs/source/feast.infra.registry_stores.rst deleted file mode 100644 index cff02fa338..0000000000 --- a/sdk/python/docs/source/feast.infra.registry_stores.rst +++ /dev/null @@ -1,21 +0,0 @@ -feast.infra.registry\_stores package -==================================== - -Submodules ----------- - -feast.infra.registry\_stores.sql module ---------------------------------------- - -.. automodule:: feast.infra.registry_stores.sql - :members: - :undoc-members: - :show-inheritance: - -Module contents ---------------- - -.. automodule:: feast.infra.registry_stores - :members: - :undoc-members: - :show-inheritance: diff --git a/sdk/python/docs/source/feast.permissions.auth.rst b/sdk/python/docs/source/feast.permissions.auth.rst new file mode 100644 index 0000000000..3826bfc217 --- /dev/null +++ b/sdk/python/docs/source/feast.permissions.auth.rst @@ -0,0 +1,61 @@ +feast.permissions.auth package +============================== + +Submodules +---------- + +feast.permissions.auth.auth\_manager module +------------------------------------------- + +.. automodule:: feast.permissions.auth.auth_manager + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.auth.auth\_type module +---------------------------------------- + +.. automodule:: feast.permissions.auth.auth_type + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.auth.kubernetes\_token\_parser module +------------------------------------------------------- + +.. automodule:: feast.permissions.auth.kubernetes_token_parser + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.auth.oidc\_token\_parser module +------------------------------------------------- + +.. automodule:: feast.permissions.auth.oidc_token_parser + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.auth.token\_extractor module +---------------------------------------------- + +.. automodule:: feast.permissions.auth.token_extractor + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.auth.token\_parser module +------------------------------------------- + +.. automodule:: feast.permissions.auth.token_parser + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: feast.permissions.auth + :members: + :undoc-members: + :show-inheritance: diff --git a/sdk/python/docs/source/feast.permissions.client.rst b/sdk/python/docs/source/feast.permissions.client.rst new file mode 100644 index 0000000000..f346801210 --- /dev/null +++ b/sdk/python/docs/source/feast.permissions.client.rst @@ -0,0 +1,69 @@ +feast.permissions.client package +================================ + +Submodules +---------- + +feast.permissions.client.arrow\_flight\_auth\_interceptor module +---------------------------------------------------------------- + +.. automodule:: feast.permissions.client.arrow_flight_auth_interceptor + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.client.auth\_client\_manager module +----------------------------------------------------- + +.. automodule:: feast.permissions.client.auth_client_manager + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.client.auth\_client\_manager\_factory module +-------------------------------------------------------------- + +.. automodule:: feast.permissions.client.auth_client_manager_factory + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.client.grpc\_client\_auth\_interceptor module +--------------------------------------------------------------- + +.. automodule:: feast.permissions.client.grpc_client_auth_interceptor + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.client.http\_auth\_requests\_wrapper module +------------------------------------------------------------- + +.. automodule:: feast.permissions.client.http_auth_requests_wrapper + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.client.kubernetes\_auth\_client\_manager module +----------------------------------------------------------------- + +.. automodule:: feast.permissions.client.kubernetes_auth_client_manager + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.client.oidc\_authentication\_client\_manager module +--------------------------------------------------------------------- + +.. automodule:: feast.permissions.client.oidc_authentication_client_manager + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: feast.permissions.client + :members: + :undoc-members: + :show-inheritance: diff --git a/sdk/python/docs/source/feast.permissions.rst b/sdk/python/docs/source/feast.permissions.rst new file mode 100644 index 0000000000..d8731111e1 --- /dev/null +++ b/sdk/python/docs/source/feast.permissions.rst @@ -0,0 +1,111 @@ +feast.permissions package +========================= + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + feast.permissions.auth + feast.permissions.client + feast.permissions.server + +Submodules +---------- + +feast.permissions.action module +------------------------------- + +.. automodule:: feast.permissions.action + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.auth\_model module +------------------------------------ + +.. automodule:: feast.permissions.auth_model + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.decision module +--------------------------------- + +.. automodule:: feast.permissions.decision + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.decorator module +---------------------------------- + +.. automodule:: feast.permissions.decorator + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.enforcer module +--------------------------------- + +.. automodule:: feast.permissions.enforcer + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.matcher module +-------------------------------- + +.. automodule:: feast.permissions.matcher + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.oidc\_service module +-------------------------------------- + +.. automodule:: feast.permissions.oidc_service + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.permission module +----------------------------------- + +.. automodule:: feast.permissions.permission + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.policy module +------------------------------- + +.. automodule:: feast.permissions.policy + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.security\_manager module +------------------------------------------ + +.. automodule:: feast.permissions.security_manager + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.user module +----------------------------- + +.. automodule:: feast.permissions.user + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: feast.permissions + :members: + :undoc-members: + :show-inheritance: diff --git a/sdk/python/docs/source/feast.permissions.server.rst b/sdk/python/docs/source/feast.permissions.server.rst new file mode 100644 index 0000000000..33a9d8df64 --- /dev/null +++ b/sdk/python/docs/source/feast.permissions.server.rst @@ -0,0 +1,69 @@ +feast.permissions.server package +================================ + +Submodules +---------- + +feast.permissions.server.arrow module +------------------------------------- + +.. automodule:: feast.permissions.server.arrow + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.server.arrow\_flight\_token\_extractor module +--------------------------------------------------------------- + +.. automodule:: feast.permissions.server.arrow_flight_token_extractor + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.server.grpc module +------------------------------------ + +.. automodule:: feast.permissions.server.grpc + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.server.grpc\_token\_extractor module +------------------------------------------------------ + +.. automodule:: feast.permissions.server.grpc_token_extractor + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.server.rest module +------------------------------------ + +.. automodule:: feast.permissions.server.rest + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.server.rest\_token\_extractor module +------------------------------------------------------ + +.. automodule:: feast.permissions.server.rest_token_extractor + :members: + :undoc-members: + :show-inheritance: + +feast.permissions.server.utils module +------------------------------------- + +.. automodule:: feast.permissions.server.utils + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: feast.permissions.server + :members: + :undoc-members: + :show-inheritance: diff --git a/sdk/python/docs/source/feast.protos.feast.core.rst b/sdk/python/docs/source/feast.protos.feast.core.rst index a8691c20fe..9d079953c1 100644 --- a/sdk/python/docs/source/feast.protos.feast.core.rst +++ b/sdk/python/docs/source/feast.protos.feast.core.rst @@ -212,6 +212,38 @@ feast.protos.feast.core.OnDemandFeatureView\_pb2\_grpc module :undoc-members: :show-inheritance: +feast.protos.feast.core.Permission\_pb2 module +---------------------------------------------- + +.. automodule:: feast.protos.feast.core.Permission_pb2 + :members: + :undoc-members: + :show-inheritance: + +feast.protos.feast.core.Permission\_pb2\_grpc module +---------------------------------------------------- + +.. automodule:: feast.protos.feast.core.Permission_pb2_grpc + :members: + :undoc-members: + :show-inheritance: + +feast.protos.feast.core.Policy\_pb2 module +------------------------------------------ + +.. automodule:: feast.protos.feast.core.Policy_pb2 + :members: + :undoc-members: + :show-inheritance: + +feast.protos.feast.core.Policy\_pb2\_grpc module +------------------------------------------------ + +.. automodule:: feast.protos.feast.core.Policy_pb2_grpc + :members: + :undoc-members: + :show-inheritance: + feast.protos.feast.core.Registry\_pb2 module -------------------------------------------- diff --git a/sdk/python/docs/source/feast.rst b/sdk/python/docs/source/feast.rst index 95fbea8d7a..b8c04ebde6 100644 --- a/sdk/python/docs/source/feast.rst +++ b/sdk/python/docs/source/feast.rst @@ -12,6 +12,7 @@ Subpackages feast.embedded_go feast.infra feast.loaders + feast.permissions feast.protos feast.transformation feast.ui @@ -51,6 +52,14 @@ feast.cli module :undoc-members: :show-inheritance: +feast.cli\_utils module +----------------------- + +.. automodule:: feast.cli_utils + :members: + :undoc-members: + :show-inheritance: + feast.constants module ---------------------- diff --git a/sdk/python/docs/source/index.rst b/sdk/python/docs/source/index.rst index 1ef6bd16c8..86354f80c7 100644 --- a/sdk/python/docs/source/index.rst +++ b/sdk/python/docs/source/index.rst @@ -453,4 +453,60 @@ Snowflake Engine :members: .. autoclass:: feast.infra.materialization.contrib.spark.spark_materialization_engine.SparkMaterializationJob - :members: \ No newline at end of file + :members: + +Permission +============================ + +.. autoclass:: feast.permissions.permission.Permission + :members: + +.. autoclass:: feast.permissions.action.AuthzedAction + :members: + +.. autoclass:: feast.permissions.policy.Policy + :members: + +.. autofunction:: feast.permissions.enforcer.enforce_policy + +Auth Config +--------------------------- + +.. autoclass:: feast.permissions.auth_model.AuthConfig + :members: + +.. autoclass:: feast.permissions.auth_model.KubernetesAuthConfig + :members: + +.. autoclass:: feast.permissions.auth_model.OidcAuthConfig + :members: + +Auth Manager +--------------------------- + +.. autoclass:: feast.permissions.auth.AuthManager + :members: + +.. autoclass:: feast.permissions.auth.token_parser.TokenParser + :members: + +.. autoclass:: feast.permissions.auth.token_extractor.TokenExtractor + :members: + +.. autoclass:: feast.permissions.auth.kubernetes_token_parser.KubernetesTokenParser + :members: + +.. autoclass:: feast.permissions.auth.oidc_token_parser.OidcTokenParser + :members: + +Auth Client Manager +--------------------------- + +.. autoclass:: feast.permissions.client.auth_client_manager.AuthenticationClientManager + :members: + +.. autoclass:: feast.permissions.client.kubernetes_auth_client_manager.KubernetesAuthClientManager + :members: + +.. autoclass:: feast.permissions.client.oidc_authentication_client_manager.OidcAuthClientManager + :members: diff --git a/sdk/python/feast/__init__.py b/sdk/python/feast/__init__.py index 52734bc71e..71122b7047 100644 --- a/sdk/python/feast/__init__.py +++ b/sdk/python/feast/__init__.py @@ -18,6 +18,7 @@ from .feature_view import FeatureView from .field import Field from .on_demand_feature_view import OnDemandFeatureView +from .project import Project from .repo_config import RepoConfig from .stream_feature_view import StreamFeatureView from .value_type import ValueType @@ -49,4 +50,5 @@ "PushSource", "RequestSource", "AthenaSource", + "Project", ] diff --git a/sdk/python/feast/arrow_error_handler.py b/sdk/python/feast/arrow_error_handler.py new file mode 100644 index 0000000000..e873592bd5 --- /dev/null +++ b/sdk/python/feast/arrow_error_handler.py @@ -0,0 +1,49 @@ +import logging +from functools import wraps + +import pyarrow.flight as fl + +from feast.errors import FeastError + +logger = logging.getLogger(__name__) + + +def arrow_client_error_handling_decorator(func): + @wraps(func) + def wrapper(*args, **kwargs): + try: + return func(*args, **kwargs) + except Exception as e: + mapped_error = FeastError.from_error_detail(_get_exception_data(e.args[0])) + if mapped_error is not None: + raise mapped_error + raise e + + return wrapper + + +def arrow_server_error_handling_decorator(func): + @wraps(func) + def wrapper(*args, **kwargs): + try: + return func(*args, **kwargs) + except Exception as e: + if isinstance(e, FeastError): + raise fl.FlightError(e.to_error_detail()) + + return wrapper + + +def _get_exception_data(except_str) -> str: + substring = "Flight error: " + + # Find the starting index of the substring + position = except_str.find(substring) + end_json_index = except_str.find("}") + + if position != -1 and end_json_index != -1: + # Extract the part of the string after the substring + result = except_str[position + len(substring) : end_json_index + 1] + return result + + return "" diff --git a/sdk/python/feast/base_feature_view.py b/sdk/python/feast/base_feature_view.py index 31140e2899..d7dc2237bd 100644 --- a/sdk/python/feast/base_feature_view.py +++ b/sdk/python/feast/base_feature_view.py @@ -18,6 +18,7 @@ from google.protobuf.json_format import MessageToJson from google.protobuf.message import Message +from feast.data_source import DataSource from feast.feature_view_projection import FeatureViewProjection from feast.field import Field from feast.protos.feast.core.FeatureView_pb2 import FeatureView as FeatureViewProto @@ -65,6 +66,7 @@ def __init__( description: str = "", tags: Optional[Dict[str, str]] = None, owner: str = "", + source: Optional[DataSource] = None, ): """ Creates a BaseFeatureView object. @@ -76,7 +78,8 @@ def __init__( tags (optional): A dictionary of key-value pairs to store arbitrary metadata. owner (optional): The owner of the base feature view, typically the email of the primary maintainer. - + source (optional): The source of data for this group of features. May be a stream source, or a batch source. + If a stream source, the source should contain a batch_source for backfills & batch materialization. Raises: ValueError: A field mapping conflicts with an Entity or a Feature. """ @@ -90,6 +93,9 @@ def __init__( self.created_timestamp = None self.last_updated_timestamp = None + if source: + self.source = source + @property @abstractmethod def proto_class(self) -> Type[Message]: @@ -156,6 +162,10 @@ def __eq__(self, other): or self.tags != other.tags or self.owner != other.owner ): + # This is meant to ignore the File Source change to Push Source + if isinstance(type(self.source), type(other.source)): + if self.source != other.source: + return False return False return True diff --git a/sdk/python/feast/cli.py b/sdk/python/feast/cli.py index f4e3e97d27..499788101e 100644 --- a/sdk/python/feast/cli.py +++ b/sdk/python/feast/cli.py @@ -16,23 +16,27 @@ from datetime import datetime from importlib.metadata import version as importlib_version from pathlib import Path -from typing import List, Optional +from typing import Any, List, Optional import click import yaml +from bigtree import Node from colorama import Fore, Style from dateutil import parser from pygments import formatters, highlight, lexers -from feast import utils +import feast.cli_utils as cli_utils +from feast import BatchFeatureView, Entity, FeatureService, StreamFeatureView, utils from feast.constants import ( DEFAULT_FEATURE_TRANSFORMATION_SERVER_PORT, DEFAULT_OFFLINE_SERVER_PORT, DEFAULT_REGISTRY_SERVER_PORT, ) +from feast.data_source import DataSource from feast.errors import FeastObjectNotFoundException, FeastProviderLoginError from feast.feature_view import FeatureView from feast.on_demand_feature_view import OnDemandFeatureView +from feast.permissions.policy import RoleBasedPolicy from feast.repo_config import load_repo_config from feast.repo_operations import ( apply_total, @@ -44,6 +48,7 @@ registry_dump, teardown, ) +from feast.saved_dataset import SavedDataset, ValidationReference from feast.utils import maybe_local_tz _logger = logging.getLogger(__name__) @@ -249,6 +254,79 @@ def data_source_list(ctx: click.Context, tags: list[str]): print(tabulate(table, headers=["NAME", "CLASS"], tablefmt="plain")) +@cli.group(name="projects") +def projects_cmd(): + """ + Access projects + """ + pass + + +@projects_cmd.command("describe") +@click.argument("name", type=click.STRING) +@click.pass_context +def project_describe(ctx: click.Context, name: str): + """ + Describe a project + """ + store = create_feature_store(ctx) + + try: + project = store.get_project(name) + except FeastObjectNotFoundException as e: + print(e) + exit(1) + + print( + yaml.dump( + yaml.safe_load(str(project)), default_flow_style=False, sort_keys=False + ) + ) + + +@projects_cmd.command("current_project") +@click.pass_context +def project_current(ctx: click.Context): + """ + Returns the current project configured with FeatureStore object + """ + store = create_feature_store(ctx) + + try: + project = store.get_project(name=None) + except FeastObjectNotFoundException as e: + print(e) + exit(1) + + print( + yaml.dump( + yaml.safe_load(str(project)), default_flow_style=False, sort_keys=False + ) + ) + + +@projects_cmd.command(name="list") +@tagsOption +@click.pass_context +def project_list(ctx: click.Context, tags: list[str]): + """ + List all projects + """ + store = create_feature_store(ctx) + table = [] + tags_filter = utils.tags_list_to_dict(tags) + for project in store.list_projects(tags=tags_filter): + table.append([project.name, project.description, project.tags, project.owner]) + + from tabulate import tabulate + + print( + tabulate( + table, headers=["NAME", "DESCRIPTION", "TAGS", "OWNER"], tablefmt="plain" + ) + ) + + @cli.group(name="entities") def entities_cmd(): """ @@ -465,6 +543,156 @@ def on_demand_feature_view_list(ctx: click.Context, tags: list[str]): print(tabulate(table, headers=["NAME"], tablefmt="plain")) +@cli.group(name="saved-datasets") +def saved_datasets_cmd(): + """ + [Experimental] Access saved datasets + """ + pass + + +@saved_datasets_cmd.command("describe") +@click.argument("name", type=click.STRING) +@click.pass_context +def saved_datasets_describe(ctx: click.Context, name: str): + """ + [Experimental] Describe a saved dataset + """ + store = create_feature_store(ctx) + + try: + saved_dataset = store.get_saved_dataset(name) + except FeastObjectNotFoundException as e: + print(e) + exit(1) + + print( + yaml.dump( + yaml.safe_load(str(saved_dataset)), + default_flow_style=False, + sort_keys=False, + ) + ) + + +@saved_datasets_cmd.command(name="list") +@tagsOption +@click.pass_context +def saved_datasets_list(ctx: click.Context, tags: list[str]): + """ + [Experimental] List all saved datasets + """ + store = create_feature_store(ctx) + table = [] + tags_filter = utils.tags_list_to_dict(tags) + for saved_dataset in store.list_saved_datasets(tags=tags_filter): + table.append([saved_dataset.name]) + + from tabulate import tabulate + + print(tabulate(table, headers=["NAME"], tablefmt="plain")) + + +@cli.group(name="stream-feature-views") +def stream_feature_views_cmd(): + """ + [Experimental] Access stream feature views + """ + pass + + +@stream_feature_views_cmd.command("describe") +@click.argument("name", type=click.STRING) +@click.pass_context +def stream_feature_views_describe(ctx: click.Context, name: str): + """ + [Experimental] Describe a stream feature view + """ + store = create_feature_store(ctx) + + try: + stream_feature_view = store.get_stream_feature_view(name) + except FeastObjectNotFoundException as e: + print(e) + exit(1) + + print( + yaml.dump( + yaml.safe_load(str(stream_feature_view)), + default_flow_style=False, + sort_keys=False, + ) + ) + + +@stream_feature_views_cmd.command(name="list") +@tagsOption +@click.pass_context +def stream_feature_views_list(ctx: click.Context, tags: list[str]): + """ + [Experimental] List all stream feature views + """ + store = create_feature_store(ctx) + table = [] + tags_filter = utils.tags_list_to_dict(tags) + for stream_feature_view in store.list_stream_feature_views(tags=tags_filter): + table.append([stream_feature_view.name]) + + from tabulate import tabulate + + print(tabulate(table, headers=["NAME"], tablefmt="plain")) + + +@cli.group(name="validation-references") +def validation_references_cmd(): + """ + [Experimental] Access validation references + """ + pass + + +@validation_references_cmd.command("describe") +@click.argument("name", type=click.STRING) +@click.pass_context +def validation_references_describe(ctx: click.Context, name: str): + """ + [Experimental] Describe a validation reference + """ + store = create_feature_store(ctx) + + try: + validation_reference = store.get_validation_reference(name) + except FeastObjectNotFoundException as e: + print(e) + exit(1) + + print( + yaml.dump( + yaml.safe_load(str(validation_reference)), + default_flow_style=False, + sort_keys=False, + ) + ) + + +@validation_references_cmd.command(name="list") +@tagsOption +@click.pass_context +def validation_references_list(ctx: click.Context, tags: list[str]): + """ + [Experimental] List all validation references + """ + store = create_feature_store(ctx) + table = [] + tags_filter = utils.tags_list_to_dict(tags) + for validation_reference in store.list_validation_references(tags=tags_filter): + table.append([validation_reference.name]) + + from tabulate import tabulate + + print(tabulate(table, headers=["NAME"], tablefmt="plain")) + + @cli.command("plan", cls=NoOptionDefaultFormat) @click.option( "--skip-source-validation", @@ -609,7 +837,6 @@ def materialize_incremental_command(ctx: click.Context, end_ts: str, views: List "postgres", "hbase", "cassandra", - "rockset", "hazelcast", "ikv", ], @@ -879,5 +1106,253 @@ def validate( exit(1) +@cli.group(name="permissions") +def feast_permissions_cmd(): + """ + Access permissions + """ + pass + + +@feast_permissions_cmd.command(name="list") +@click.option( + "--verbose", + "-v", + is_flag=True, + help="Print the resources matching each configured permission", +) +@tagsOption +@click.pass_context +def feast_permissions_list_command(ctx: click.Context, verbose: bool, tags: list[str]): + from tabulate import tabulate + + table: list[Any] = [] + tags_filter = utils.tags_list_to_dict(tags) + + store = create_feature_store(ctx) + + permissions = store.list_permissions(tags=tags_filter) + + root_node = Node("permissions") + roles: set[str] = set() + + for p in permissions: + policy = p.policy + if not verbose: + cli_utils.handle_not_verbose_permissions_command(p, policy, table) + else: + if isinstance(policy, RoleBasedPolicy) and len(policy.get_roles()) > 0: + roles = set(policy.get_roles()) + permission_node = Node( + p.name + " " + str(list(roles)), parent=root_node + ) + else: + permission_node = Node(p.name, parent=root_node) + + for feast_type in p.types: + if feast_type in [ + FeatureView, + OnDemandFeatureView, + BatchFeatureView, + StreamFeatureView, + ]: + cli_utils.handle_fv_verbose_permissions_command( + feast_type, # type: ignore[arg-type] + p, + permission_node, + store, + tags_filter, + ) + elif feast_type == Entity: + cli_utils.handle_entity_verbose_permissions_command( + feast_type, # type: ignore[arg-type] + p, + permission_node, + store, + tags_filter, + ) + elif feast_type == FeatureService: + cli_utils.handle_fs_verbose_permissions_command( + feast_type, # type: ignore[arg-type] + p, + permission_node, + store, + tags_filter, + ) + elif feast_type == DataSource: + cli_utils.handle_ds_verbose_permissions_command( + feast_type, # type: ignore[arg-type] + p, + permission_node, + store, + tags_filter, + ) + elif feast_type == ValidationReference: + cli_utils.handle_vr_verbose_permissions_command( + feast_type, # type: ignore[arg-type] + p, + permission_node, + store, + tags_filter, + ) + elif feast_type == SavedDataset: + cli_utils.handle_sd_verbose_permissions_command( + feast_type, # type: ignore[arg-type] + p, + permission_node, + store, + tags_filter, + ) + + if not verbose: + print( + tabulate( + table, + headers=[ + "NAME", + "TYPES", + "NAME_PATTERN", + "ACTIONS", + "ROLES", + "REQUIRED_TAGS", + ], + tablefmt="plain", + ) + ) + else: + cli_utils.print_permission_verbose_example() + + print("Permissions:") + print("") + root_node.show() + + +@feast_permissions_cmd.command("describe") +@click.argument("name", type=click.STRING) +@click.pass_context +def permission_describe(ctx: click.Context, name: str): + """ + Describe a permission + """ + store = create_feature_store(ctx) + + try: + permission = store.get_permission(name) + except FeastObjectNotFoundException as e: + print(e) + exit(1) + + print( + yaml.dump( + yaml.safe_load(str(permission)), default_flow_style=False, sort_keys=False + ) + ) + + +@feast_permissions_cmd.command(name="check") +@click.pass_context +def feast_permissions_check_command(ctx: click.Context): + """ + Validate the permissions configuration + """ + from tabulate import tabulate + + all_unsecured_table: list[Any] = [] + store = create_feature_store(ctx) + permissions = store.list_permissions() + objects = cli_utils.fetch_all_feast_objects( + store=store, + ) + + print( + f"{Style.BRIGHT + Fore.RED}The following resources are not secured by any permission configuration:{Style.RESET_ALL}" + ) + for o in objects: + cli_utils.handle_permissions_check_command( + object=o, permissions=permissions, table=all_unsecured_table + ) + print( + tabulate( + all_unsecured_table, + headers=[ + "NAME", + "TYPE", + ], + tablefmt="plain", + ) + ) + + all_unsecured_actions_table: list[Any] = [] + print( + f"{Style.BRIGHT + Fore.RED}The following actions are not secured by any permission configuration (Note: this might not be a security concern, depending on the used APIs):{Style.RESET_ALL}" + ) + for o in objects: + cli_utils.handle_permissions_check_command_with_actions( + object=o, permissions=permissions, table=all_unsecured_actions_table + ) + print( + tabulate( + all_unsecured_actions_table, + headers=[ + "NAME", + "TYPE", + "UNSECURED ACTIONS", + ], + tablefmt="plain", + ) + ) + + +@feast_permissions_cmd.command(name="list-roles") +@click.option( + "--verbose", + "-v", + is_flag=True, + help="Print the resources and actions permitted to each configured role", +) +@click.pass_context +def feast_permissions_list_roles_command(ctx: click.Context, verbose: bool): + """ + List all the configured roles + """ + from tabulate import tabulate + + table: list[Any] = [] + store = create_feature_store(ctx) + permissions = store.list_permissions() + if not verbose: + cli_utils.handler_list_all_permissions_roles( + permissions=permissions, table=table + ) + print( + tabulate( + table, + headers=[ + "ROLE NAME", + ], + tablefmt="grid", + ) + ) + else: + objects = cli_utils.fetch_all_feast_objects( + store=store, + ) + cli_utils.handler_list_all_permissions_roles_verbose( + objects=objects, permissions=permissions, table=table + ) + print( + tabulate( + table, + headers=[ + "ROLE NAME", + "RESOURCE NAME", + "RESOURCE TYPE", + "PERMITTED ACTIONS", + ], + tablefmt="plain", + ) + ) + + if __name__ == "__main__": cli() diff --git a/sdk/python/feast/cli_utils.py b/sdk/python/feast/cli_utils.py new file mode 100644 index 0000000000..4152eb219b --- /dev/null +++ b/sdk/python/feast/cli_utils.py @@ -0,0 +1,328 @@ +from typing import Any, Optional + +from bigtree import Node +from colorama import Fore, Style + +from feast import ( + BatchFeatureView, + FeatureService, + FeatureStore, + FeatureView, + OnDemandFeatureView, + StreamFeatureView, +) +from feast.feast_object import FeastObject +from feast.permissions.action import ALL_ACTIONS +from feast.permissions.decision import DecisionEvaluator +from feast.permissions.permission import Permission +from feast.permissions.policy import Policy, RoleBasedPolicy +from feast.permissions.user import User + + +def print_permission_verbose_example(): + print("") + print( + f"{Style.BRIGHT + Fore.GREEN}The structure of the {Style.BRIGHT + Fore.WHITE}feast-permissions list --verbose {Style.BRIGHT + Fore.GREEN}command will be as in the following example:" + ) + print("") + print(f"{Style.DIM}For example: {Style.RESET_ALL}{Style.BRIGHT + Fore.GREEN}") + print("") + explanation_root_node = Node("permissions") + explanation_permission_node = Node( + "permission_1" + " " + str(["role names list"]), + parent=explanation_root_node, + ) + Node( + FeatureView.__name__ + ": " + str(["feature view names"]), + parent=explanation_permission_node, + ) + Node(FeatureService.__name__ + ": none", parent=explanation_permission_node) + Node("..", parent=explanation_permission_node) + Node( + "permission_2" + " " + str(["role names list"]), + parent=explanation_root_node, + ) + Node("..", parent=explanation_root_node) + explanation_root_node.show() + print( + f""" +-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------{Style.RESET_ALL} + """ + ) + + +def handle_sd_verbose_permissions_command( + feast_type: list[FeastObject], + p: Permission, + policy_node: Node, + store: FeatureStore, + tags_filter: Optional[dict[str, str]], +): + saved_datasets = store.list_saved_datasets(tags=tags_filter) + saved_datasets_names = set() + for sd in saved_datasets: + if p.match_resource(sd): + saved_datasets_names.add(sd.name) + if len(saved_datasets_names) > 0: + Node( + feast_type.__name__ + ": " + str(list(saved_datasets_names)), # type: ignore[union-attr, attr-defined] + parent=policy_node, + ) + else: + Node(feast_type.__name__ + ": none", parent=policy_node) # type: ignore[union-attr, attr-defined] + + +def handle_vr_verbose_permissions_command( + feast_type: list[FeastObject], + p: Permission, + policy_node: Node, + store: FeatureStore, + tags_filter: Optional[dict[str, str]], +): + validation_references = store.list_validation_references(tags=tags_filter) + validation_references_names = set() + for vr in validation_references: + if p.match_resource(vr): + validation_references_names.add(vr.name) + if len(validation_references_names) > 0: + Node( + feast_type.__name__ + ": " + str(list(validation_references_names)), # type: ignore[union-attr, attr-defined] + parent=policy_node, + ) + else: + Node(feast_type.__name__ + ": none", parent=policy_node) # type: ignore[union-attr, attr-defined] + + +def handle_ds_verbose_permissions_command( + feast_type: list[FeastObject], + p: Permission, + policy_node: Node, + store: FeatureStore, + tags_filter: Optional[dict[str, str]], +): + data_sources = store.list_data_sources(tags=tags_filter) + data_sources_names = set() + for ds in data_sources: + if p.match_resource(ds): + data_sources_names.add(ds.name) + if len(data_sources_names) > 0: + Node( + feast_type.__name__ + ": " + str(list(data_sources_names)), # type: ignore[union-attr, attr-defined] + parent=policy_node, + ) + else: + Node(feast_type.__name__ + ": none", parent=policy_node) # type: ignore[union-attr, attr-defined] + + +def handle_fs_verbose_permissions_command( + feast_type: list[FeastObject], + p: Permission, + policy_node: Node, + store: FeatureStore, + tags_filter: Optional[dict[str, str]], +): + feature_services = store.list_feature_services(tags=tags_filter) + feature_services_names = set() + for fs in feature_services: + if p.match_resource(fs): + feature_services_names.add(fs.name) + if len(feature_services_names) > 0: + Node( + feast_type.__name__ + ": " + str(list(feature_services_names)), # type: ignore[union-attr, attr-defined] + parent=policy_node, + ) + else: + Node(feast_type.__name__ + ": none", parent=policy_node) # type: ignore[union-attr, attr-defined] + + +def handle_entity_verbose_permissions_command( + feast_type: list[FeastObject], + p: Permission, + policy_node: Node, + store: FeatureStore, + tags_filter: Optional[dict[str, str]], +): + entities = store.list_entities(tags=tags_filter) + entities_names = set() + for e in entities: + if p.match_resource(e): + entities_names.add(e.name) + if len(entities_names) > 0: + Node(feast_type.__name__ + ": " + str(list(entities_names)), parent=policy_node) # type: ignore[union-attr, attr-defined] + else: + Node(feast_type.__name__ + ": none", parent=policy_node) # type: ignore[union-attr, attr-defined] + + +def handle_fv_verbose_permissions_command( + feast_type: list[FeastObject], + p: Permission, + policy_node: Node, + store: FeatureStore, + tags_filter: Optional[dict[str, str]], +): + feature_views = [] + feature_views_names = set() + if feast_type == FeatureView: + feature_views = store.list_all_feature_views(tags=tags_filter) # type: ignore[assignment] + elif feast_type == OnDemandFeatureView: + feature_views = store.list_on_demand_feature_views( + tags=tags_filter # type: ignore[assignment] + ) + elif feast_type == BatchFeatureView: + feature_views = store.list_batch_feature_views(tags=tags_filter) # type: ignore[assignment] + elif feast_type == StreamFeatureView: + feature_views = store.list_stream_feature_views( + tags=tags_filter # type: ignore[assignment] + ) + for fv in feature_views: + if p.match_resource(fv): # type: ignore[arg-type] + feature_views_names.add(fv.name) + if len(feature_views_names) > 0: + Node( + feast_type.__name__ + " " + str(list(feature_views_names)), # type: ignore[union-attr, attr-defined] + parent=policy_node, + ) + else: + Node(feast_type.__name__ + ": none", parent=policy_node) # type: ignore[union-attr, attr-defined] + + +def handle_not_verbose_permissions_command( + p: Permission, policy: Policy, table: list[Any] +): + roles: set[str] = set() + if isinstance(policy, RoleBasedPolicy): + roles = set(policy.get_roles()) + table.append( + [ + p.name, + _to_multi_line([t.__name__ for t in p.types]), # type: ignore[union-attr, attr-defined] + p.name_pattern, + _to_multi_line([a.value.upper() for a in p.actions]), + _to_multi_line(sorted(roles)), + _dict_to_multi_line(p.required_tags), + ], + ) + + +def fetch_all_feast_objects(store: FeatureStore) -> list[FeastObject]: + objects: list[FeastObject] = [] + objects.extend(store.list_entities()) + objects.extend(store.list_all_feature_views()) # type: ignore[arg-type] + objects.extend(store.list_feature_services()) + objects.extend(store.list_data_sources()) + objects.extend(store.list_validation_references()) + objects.extend(store.list_saved_datasets()) + objects.extend(store.list_permissions()) + return objects + + +def handle_permissions_check_command( + object: FeastObject, permissions: list[Permission], table: list[Any] +): + for p in permissions: + if p.match_resource(object): + return + table.append( + [ + object.name, + type(object).__name__, + ] + ) + + +def handle_permissions_check_command_with_actions( + object: FeastObject, permissions: list[Permission], table: list[Any] +): + unmatched_actions = ALL_ACTIONS.copy() + for p in permissions: + if p.match_resource(object): + for action in ALL_ACTIONS: + if p.match_actions([action]) and action in unmatched_actions: + unmatched_actions.remove(action) + + if unmatched_actions: + table.append( + [ + object.name, + type(object).__name__, + _to_multi_line([a.value.upper() for a in unmatched_actions]), + ] + ) + + +def fetch_all_permission_roles(permissions: list[Permission]) -> list[str]: + all_roles = set() + for p in permissions: + if isinstance(p.policy, RoleBasedPolicy) and len(p.policy.get_roles()) > 0: + all_roles.update(p.policy.get_roles()) + + return sorted(all_roles) + + +def handler_list_all_permissions_roles(permissions: list[Permission], table: list[Any]): + all_roles = fetch_all_permission_roles(permissions) + for role in all_roles: + table.append( + [ + role, + ] + ) + + +def handler_list_all_permissions_roles_verbose( + objects: list[FeastObject], permissions: list[Permission], table: list[Any] +): + all_roles = fetch_all_permission_roles(permissions) + + for role in all_roles: + for o in objects: + permitted_actions = ALL_ACTIONS.copy() + for action in ALL_ACTIONS: + # Following code is derived from enforcer.enforce_policy but has a different return type and does not raise FeastPermissionError + matching_permissions = [ + p + for p in permissions + if p.match_resource(o) and p.match_actions([action]) + ] + + if matching_permissions: + evaluator = DecisionEvaluator( + len(matching_permissions), + ) + for p in matching_permissions: + permission_grant, permission_explanation = ( + p.policy.validate_user(user=User(username="", roles=[role])) + ) + evaluator.add_grant( + permission_grant, + f"Permission {p.name} denied access: {permission_explanation}", + ) + + if evaluator.is_decided(): + grant, explanations = evaluator.grant() + if not grant: + permitted_actions.remove(action) + break + else: + permitted_actions.remove(action) + + table.append( + [ + role, + o.name, + type(o).__name__, + _to_multi_line([a.value.upper() for a in permitted_actions]), + ] + ) + + +def _to_multi_line(values: list[str]) -> str: + if not values: + return "-" + return "\n".join(values) + + +def _dict_to_multi_line(values: dict[str, str]) -> str: + if not values: + return "-" + return "\n".join([f"{key} : {value}" for key, value in values.items()]) diff --git a/sdk/python/feast/data_source.py b/sdk/python/feast/data_source.py index 17fbfd5fcf..f7881c5045 100644 --- a/sdk/python/feast/data_source.py +++ b/sdk/python/feast/data_source.py @@ -524,12 +524,19 @@ def __init__( *, name: str, schema: List[Field], + timestamp_field: Optional[str] = None, description: Optional[str] = "", tags: Optional[Dict[str, str]] = None, owner: Optional[str] = "", ): """Creates a RequestSource object.""" - super().__init__(name=name, description=description, tags=tags, owner=owner) + super().__init__( + name=name, + timestamp_field=timestamp_field, + description=description, + tags=tags, + owner=owner, + ) self.schema = schema def validate(self, config: RepoConfig): @@ -570,6 +577,7 @@ def from_proto(data_source: DataSourceProto): return RequestSource( name=data_source.name, schema=list_schema, + timestamp_field=data_source.timestamp_field, description=data_source.description, tags=dict(data_source.tags), owner=data_source.owner, @@ -593,6 +601,7 @@ def to_proto(self) -> DataSourceProto: tags=self.tags, owner=self.owner, ) + data_source_proto.timestamp_field = self.timestamp_field data_source_proto.request_data_options.schema.extend(schema_pb) return data_source_proto diff --git a/sdk/python/feast/diff/registry_diff.py b/sdk/python/feast/diff/registry_diff.py index 9236b087d4..272c4590d8 100644 --- a/sdk/python/feast/diff/registry_diff.py +++ b/sdk/python/feast/diff/registry_diff.py @@ -10,6 +10,8 @@ from feast.feature_view import DUMMY_ENTITY_NAME from feast.infra.registry.base_registry import BaseRegistry from feast.infra.registry.registry import FEAST_OBJECT_TYPES, FeastObjectType +from feast.permissions.permission import Permission +from feast.project import Project from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto from feast.protos.feast.core.Entity_pb2 import Entity as EntityProto from feast.protos.feast.core.FeatureService_pb2 import ( @@ -20,6 +22,7 @@ OnDemandFeatureView as OnDemandFeatureViewProto, ) from feast.protos.feast.core.OnDemandFeatureView_pb2 import OnDemandFeatureViewSpec +from feast.protos.feast.core.Permission_pb2 import Permission as PermissionProto from feast.protos.feast.core.SavedDataset_pb2 import SavedDataset as SavedDatasetProto from feast.protos.feast.core.StreamFeatureView_pb2 import ( StreamFeatureView as StreamFeatureViewProto, @@ -111,6 +114,7 @@ def tag_objects_for_keep_delete_update_add( StreamFeatureViewProto, ValidationReferenceProto, SavedDatasetProto, + PermissionProto, ) @@ -354,11 +358,25 @@ def apply_diff_to_registry( project, commit=False, ) + elif feast_object_diff.feast_object_type == FeastObjectType.PERMISSION: + permission_obj = cast( + Permission, feast_object_diff.current_feast_object + ) + registry.delete_permission( + permission_obj.name, + project, + commit=False, + ) if feast_object_diff.transition_type in [ TransitionType.CREATE, TransitionType.UPDATE, ]: + if feast_object_diff.feast_object_type == FeastObjectType.PROJECT: + registry.apply_project( + cast(Project, feast_object_diff.new_feast_object), + commit=False, + ) if feast_object_diff.feast_object_type == FeastObjectType.DATA_SOURCE: registry.apply_data_source( cast(DataSource, feast_object_diff.new_feast_object), @@ -387,6 +405,12 @@ def apply_diff_to_registry( project, commit=False, ) + elif feast_object_diff.feast_object_type == FeastObjectType.PERMISSION: + registry.apply_permission( + cast(Permission, feast_object_diff.new_feast_object), + project, + commit=False, + ) if commit: registry.commit() diff --git a/sdk/python/feast/driver_test_data.py b/sdk/python/feast/driver_test_data.py index defeb404a3..23f1f12477 100644 --- a/sdk/python/feast/driver_test_data.py +++ b/sdk/python/feast/driver_test_data.py @@ -1,10 +1,11 @@ # This module generates dummy data to be used for tests and examples. import itertools +from datetime import timedelta, timezone from enum import Enum import numpy as np import pandas as pd -from pytz import FixedOffset, timezone, utc +from zoneinfo import ZoneInfo from feast.infra.offline_stores.offline_utils import ( DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL, @@ -22,11 +23,15 @@ def _convert_event_timestamp(event_timestamp: pd.Timestamp, t: EventTimestampTyp if t == EventTimestampType.TZ_NAIVE: return event_timestamp elif t == EventTimestampType.TZ_AWARE_UTC: - return event_timestamp.replace(tzinfo=utc) + return event_timestamp.replace(tzinfo=timezone.utc) elif t == EventTimestampType.TZ_AWARE_FIXED_OFFSET: - return event_timestamp.replace(tzinfo=utc).astimezone(FixedOffset(60)) + return event_timestamp.replace(tzinfo=timezone.utc).astimezone( + tz=timezone(timedelta(minutes=60)) + ) elif t == EventTimestampType.TZ_AWARE_US_PACIFIC: - return event_timestamp.replace(tzinfo=utc).astimezone(timezone("US/Pacific")) + return event_timestamp.replace(tzinfo=timezone.utc).astimezone( + tz=ZoneInfo("US/Pacific") + ) def create_orders_df( diff --git a/sdk/python/feast/embedded_go/type_map.py b/sdk/python/feast/embedded_go/type_map.py index e70dc3be86..8f467c57ca 100644 --- a/sdk/python/feast/embedded_go/type_map.py +++ b/sdk/python/feast/embedded_go/type_map.py @@ -1,12 +1,12 @@ +from datetime import timezone from typing import List import pyarrow as pa -import pytz from feast.protos.feast.types import Value_pb2 from feast.types import Array, PrimitiveFeastType -PA_TIMESTAMP_TYPE = pa.timestamp("s", tz=pytz.UTC) +PA_TIMESTAMP_TYPE = pa.timestamp("s", tz=timezone.utc) ARROW_TYPE_TO_PROTO_FIELD = { pa.int32(): "int32_val", diff --git a/sdk/python/feast/errors.py b/sdk/python/feast/errors.py index c4c1157626..11ce9ebc62 100644 --- a/sdk/python/feast/errors.py +++ b/sdk/python/feast/errors.py @@ -1,34 +1,106 @@ -from typing import Any, List, Set +import importlib +import json +import logging +from typing import TYPE_CHECKING, Any, List, Optional, Set from colorama import Fore, Style +from fastapi import status as HttpStatusCode + +if TYPE_CHECKING: + from grpc import StatusCode as GrpcStatusCode from feast.field import Field +logger = logging.getLogger(__name__) + + +class FeastError(Exception): + pass -class DataSourceNotFoundException(Exception): + def grpc_status_code(self) -> "GrpcStatusCode": + from grpc import StatusCode as GrpcStatusCode + + return GrpcStatusCode.INTERNAL + + def http_status_code(self) -> int: + return HttpStatusCode.HTTP_500_INTERNAL_SERVER_ERROR + + def __str__(self) -> str: + if hasattr(self, "__overridden_message__"): + return str(getattr(self, "__overridden_message__")) + return super().__str__() + + def __repr__(self) -> str: + if hasattr(self, "__overridden_message__"): + return f"{type(self).__name__}('{getattr(self,'__overridden_message__')}')" + return super().__repr__() + + def to_error_detail(self) -> str: + """ + Returns a JSON representation of the error for serialization purposes. + + Returns: + str: a string representation of a JSON document including `module`, `class` and `message` fields. + """ + + m = { + "module": f"{type(self).__module__}", + "class": f"{type(self).__name__}", + "message": f"{str(self)}", + } + return json.dumps(m) + + @staticmethod + def from_error_detail(detail: str) -> Optional["FeastError"]: + try: + m = json.loads(detail) + if all(f in m for f in ["module", "class", "message"]): + module_name = m["module"] + class_name = m["class"] + message = m["message"] + module = importlib.import_module(module_name) + class_reference = getattr(module, class_name) + + instance = class_reference.__new__(class_reference) + setattr(instance, "__overridden_message__", message) + return instance + except Exception as e: + logger.warning(f"Invalid error detail: {detail}: {e}") + return None + + +class DataSourceNotFoundException(FeastError): def __init__(self, path): super().__init__( f"Unable to find table at '{path}'. Please check that table exists." ) -class DataSourceNoNameException(Exception): +class DataSourceNoNameException(FeastError): def __init__(self): super().__init__( "Unable to infer a name for this data source. Either table or name must be specified." ) -class DataSourceRepeatNamesException(Exception): +class DataSourceRepeatNamesException(FeastError): def __init__(self, ds_name: str): super().__init__( f"Multiple data sources share the same case-insensitive name {ds_name}." ) -class FeastObjectNotFoundException(Exception): +class FeastObjectNotFoundException(FeastError): pass + def grpc_status_code(self) -> "GrpcStatusCode": + from grpc import StatusCode as GrpcStatusCode + + return GrpcStatusCode.NOT_FOUND + + def http_status_code(self) -> int: + return HttpStatusCode.HTTP_404_NOT_FOUND + class EntityNotFoundException(FeastObjectNotFoundException): def __init__(self, name, project=None): @@ -110,49 +182,49 @@ def __init__(self, name: str, project: str): ) -class FeastProviderLoginError(Exception): +class FeastProviderLoginError(FeastError): """Error class that indicates a user has not authenticated with their provider.""" -class FeastProviderNotImplementedError(Exception): +class FeastProviderNotImplementedError(FeastError): def __init__(self, provider_name): super().__init__(f"Provider '{provider_name}' is not implemented") -class FeastRegistryNotSetError(Exception): +class FeastRegistryNotSetError(FeastError): def __init__(self): super().__init__("Registry is not set, but is required") -class FeastFeatureServerTypeInvalidError(Exception): +class FeastFeatureServerTypeInvalidError(FeastError): def __init__(self, feature_server_type: str): super().__init__( f"Feature server type was set to {feature_server_type}, but this type is invalid" ) -class FeastRegistryTypeInvalidError(Exception): +class FeastRegistryTypeInvalidError(FeastError): def __init__(self, registry_type: str): super().__init__( f"Feature server type was set to {registry_type}, but this type is invalid" ) -class FeastModuleImportError(Exception): +class FeastModuleImportError(FeastError): def __init__(self, module_name: str, class_name: str): super().__init__( f"Could not import module '{module_name}' while attempting to load class '{class_name}'" ) -class FeastClassImportError(Exception): +class FeastClassImportError(FeastError): def __init__(self, module_name: str, class_name: str): super().__init__( f"Could not import class '{class_name}' from module '{module_name}'" ) -class FeastExtrasDependencyImportError(Exception): +class FeastExtrasDependencyImportError(FeastError): def __init__(self, extras_type: str, nested_error: str): message = ( nested_error @@ -162,14 +234,14 @@ def __init__(self, extras_type: str, nested_error: str): super().__init__(message) -class FeastOfflineStoreUnsupportedDataSource(Exception): +class FeastOfflineStoreUnsupportedDataSource(FeastError): def __init__(self, offline_store_name: str, data_source_name: str): super().__init__( f"Offline Store '{offline_store_name}' does not support data source '{data_source_name}'" ) -class FeatureNameCollisionError(Exception): +class FeatureNameCollisionError(FeastError): def __init__(self, feature_refs_collisions: List[str], full_feature_names: bool): if full_feature_names: collisions = [ref.replace(":", "__") for ref in feature_refs_collisions] @@ -191,7 +263,7 @@ def __init__(self, feature_refs_collisions: List[str], full_feature_names: bool) ) -class SpecifiedFeaturesNotPresentError(Exception): +class SpecifiedFeaturesNotPresentError(FeastError): def __init__( self, specified_features: List[Field], @@ -204,40 +276,47 @@ def __init__( ) -class SavedDatasetLocationAlreadyExists(Exception): +class SavedDatasetLocationAlreadyExists(FeastError): def __init__(self, location: str): super().__init__(f"Saved dataset location {location} already exists.") -class FeastOfflineStoreInvalidName(Exception): +class FeastOfflineStoreInvalidName(FeastError): def __init__(self, offline_store_class_name: str): super().__init__( f"Offline Store Class '{offline_store_class_name}' should end with the string `OfflineStore`.'" ) -class FeastOnlineStoreInvalidName(Exception): +class FeastOnlineStoreInvalidName(FeastError): def __init__(self, online_store_class_name: str): super().__init__( f"Online Store Class '{online_store_class_name}' should end with the string `OnlineStore`.'" ) -class FeastInvalidBaseClass(Exception): +class FeastInvalidAuthConfigClass(FeastError): + def __init__(self, auth_config_class_name: str): + super().__init__( + f"Auth Config Class '{auth_config_class_name}' should end with the string `AuthConfig`.'" + ) + + +class FeastInvalidBaseClass(FeastError): def __init__(self, class_name: str, class_type: str): super().__init__( f"Class '{class_name}' should have `{class_type}` as a base class." ) -class FeastOnlineStoreUnsupportedDataSource(Exception): +class FeastOnlineStoreUnsupportedDataSource(FeastError): def __init__(self, online_store_name: str, data_source_name: str): super().__init__( f"Online Store '{online_store_name}' does not support data source '{data_source_name}'" ) -class FeastEntityDFMissingColumnsError(Exception): +class FeastEntityDFMissingColumnsError(FeastError): def __init__(self, expected, missing): super().__init__( f"The entity dataframe you have provided must contain columns {expected}, " @@ -245,7 +324,7 @@ def __init__(self, expected, missing): ) -class FeastJoinKeysDuringMaterialization(Exception): +class FeastJoinKeysDuringMaterialization(FeastError): def __init__( self, source: str, join_key_columns: Set[str], source_columns: Set[str] ): @@ -255,7 +334,7 @@ def __init__( ) -class DockerDaemonNotRunning(Exception): +class DockerDaemonNotRunning(FeastError): def __init__(self): super().__init__( "The Docker Python sdk cannot connect to the Docker daemon. Please make sure you have" @@ -263,7 +342,7 @@ def __init__(self): ) -class RegistryInferenceFailure(Exception): +class RegistryInferenceFailure(FeastError): def __init__(self, repo_obj_type: str, specific_issue: str): super().__init__( f"Inference to fill in missing information for {repo_obj_type} failed. {specific_issue}. " @@ -271,58 +350,58 @@ def __init__(self, repo_obj_type: str, specific_issue: str): ) -class BigQueryJobStillRunning(Exception): +class BigQueryJobStillRunning(FeastError): def __init__(self, job_id): super().__init__(f"The BigQuery job with ID '{job_id}' is still running.") -class BigQueryJobCancelled(Exception): +class BigQueryJobCancelled(FeastError): def __init__(self, job_id): super().__init__(f"The BigQuery job with ID '{job_id}' was cancelled") -class RedshiftCredentialsError(Exception): +class RedshiftCredentialsError(FeastError): def __init__(self): super().__init__("Redshift API failed due to incorrect credentials") -class RedshiftQueryError(Exception): +class RedshiftQueryError(FeastError): def __init__(self, details): super().__init__(f"Redshift SQL Query failed to finish. Details: {details}") -class RedshiftTableNameTooLong(Exception): +class RedshiftTableNameTooLong(FeastError): def __init__(self, table_name: str): super().__init__( f"Redshift table names have a maximum length of 127 characters, but the table name {table_name} has length {len(table_name)} characters." ) -class SnowflakeCredentialsError(Exception): +class SnowflakeCredentialsError(FeastError): def __init__(self): super().__init__("Snowflake Connector failed due to incorrect credentials") -class SnowflakeQueryError(Exception): +class SnowflakeQueryError(FeastError): def __init__(self, details): super().__init__(f"Snowflake SQL Query failed to finish. Details: {details}") -class EntityTimestampInferenceException(Exception): +class EntityTimestampInferenceException(FeastError): def __init__(self, expected_column_name: str): super().__init__( f"Please provide an entity_df with a column named {expected_column_name} representing the time of events." ) -class FeatureViewMissingDuringFeatureServiceInference(Exception): +class FeatureViewMissingDuringFeatureServiceInference(FeastError): def __init__(self, feature_view_name: str, feature_service_name: str): super().__init__( f"Missing {feature_view_name} feature view during inference for {feature_service_name} feature service." ) -class InvalidEntityType(Exception): +class InvalidEntityType(FeastError): def __init__(self, entity_type: type): super().__init__( f"The entity dataframe you have provided must be a Pandas DataFrame or a SQL query, " @@ -330,7 +409,7 @@ def __init__(self, entity_type: type): ) -class ConflictingFeatureViewNames(Exception): +class ConflictingFeatureViewNames(FeastError): # TODO: print file location of conflicting feature views def __init__(self, feature_view_name: str): super().__init__( @@ -338,64 +417,103 @@ def __init__(self, feature_view_name: str): ) -class FeastInvalidInfraObjectType(Exception): +class FeastInvalidInfraObjectType(FeastError): def __init__(self): super().__init__("Could not identify the type of the InfraObject.") -class SnowflakeIncompleteConfig(Exception): +class SnowflakeIncompleteConfig(FeastError): def __init__(self, e: KeyError): super().__init__(f"{e} not defined in a config file or feature_store.yaml file") -class SnowflakeQueryUnknownError(Exception): +class SnowflakeQueryUnknownError(FeastError): def __init__(self, query: str): super().__init__(f"Snowflake query failed: {query}") -class InvalidFeaturesParameterType(Exception): +class InvalidFeaturesParameterType(FeastError): def __init__(self, features: Any): super().__init__( f"Invalid `features` parameter type {type(features)}. Expected one of List[str] and FeatureService." ) -class EntitySQLEmptyResults(Exception): +class EntitySQLEmptyResults(FeastError): def __init__(self, entity_sql: str): super().__init__( f"No entity values found from the specified SQL query to generate the entity dataframe: {entity_sql}." ) -class EntityDFNotDateTime(Exception): +class EntityDFNotDateTime(FeastError): def __init__(self): super().__init__( "The entity dataframe specified does not have the timestamp field as a datetime." ) -class PushSourceNotFoundException(Exception): +class PushSourceNotFoundException(FeastError): def __init__(self, push_source_name: str): super().__init__(f"Unable to find push source '{push_source_name}'.") + def http_status_code(self) -> int: + return HttpStatusCode.HTTP_422_UNPROCESSABLE_ENTITY + -class ReadOnlyRegistryException(Exception): +class ReadOnlyRegistryException(FeastError): def __init__(self): super().__init__("Registry implementation is read-only.") -class DataFrameSerializationError(Exception): +class DataFrameSerializationError(FeastError): def __init__(self, input_dict: dict): super().__init__( f"Failed to serialize the provided dictionary into a pandas DataFrame: {input_dict.keys()}" ) -class ZeroRowsQueryResult(Exception): +class PermissionNotFoundException(FeastError): + def __init__(self, name, project): + super().__init__(f"Permission {name} does not exist in project {project}") + + +class PermissionObjectNotFoundException(FeastObjectNotFoundException): + def __init__(self, name, project=None): + if project: + super().__init__(f"Permission {name} does not exist in project {project}") + else: + super().__init__(f"Permission {name} does not exist") + + +class ProjectNotFoundException(FeastError): + def __init__(self, project): + super().__init__(f"Project {project} does not exist in registry") + + +class ProjectObjectNotFoundException(FeastObjectNotFoundException): + def __init__(self, name, project=None): + super().__init__(f"Project {name} does not exist") + + +class ZeroRowsQueryResult(FeastError): def __init__(self, query: str): super().__init__(f"This query returned zero rows:\n{query}") -class ZeroColumnQueryResult(Exception): +class ZeroColumnQueryResult(FeastError): def __init__(self, query: str): super().__init__(f"This query returned zero columns:\n{query}") + + +class FeastPermissionError(FeastError, PermissionError): + def __init__(self, details: str): + super().__init__(f"Permission error:\n{details}") + + def grpc_status_code(self) -> "GrpcStatusCode": + from grpc import StatusCode as GrpcStatusCode + + return GrpcStatusCode.PERMISSION_DENIED + + def http_status_code(self) -> int: + return HttpStatusCode.HTTP_403_FORBIDDEN diff --git a/sdk/python/feast/feast_object.py b/sdk/python/feast/feast_object.py index d9505dcb9f..63fa1e913b 100644 --- a/sdk/python/feast/feast_object.py +++ b/sdk/python/feast/feast_object.py @@ -1,4 +1,7 @@ -from typing import Union +from typing import Union, get_args + +from feast.project import Project +from feast.protos.feast.core.Project_pb2 import ProjectSpec from .batch_feature_view import BatchFeatureView from .data_source import DataSource @@ -6,11 +9,13 @@ from .feature_service import FeatureService from .feature_view import FeatureView from .on_demand_feature_view import OnDemandFeatureView +from .permissions.permission import Permission from .protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto from .protos.feast.core.Entity_pb2 import EntitySpecV2 from .protos.feast.core.FeatureService_pb2 import FeatureServiceSpec from .protos.feast.core.FeatureView_pb2 import FeatureViewSpec from .protos.feast.core.OnDemandFeatureView_pb2 import OnDemandFeatureViewSpec +from .protos.feast.core.Permission_pb2 import PermissionSpec as PermissionSpec from .protos.feast.core.SavedDataset_pb2 import SavedDatasetSpec from .protos.feast.core.StreamFeatureView_pb2 import StreamFeatureViewSpec from .protos.feast.core.ValidationProfile_pb2 import ( @@ -21,6 +26,7 @@ # Convenience type representing all Feast objects FeastObject = Union[ + Project, FeatureView, OnDemandFeatureView, BatchFeatureView, @@ -30,9 +36,11 @@ DataSource, ValidationReference, SavedDataset, + Permission, ] FeastObjectSpecProto = Union[ + ProjectSpec, FeatureViewSpec, OnDemandFeatureViewSpec, StreamFeatureViewSpec, @@ -41,4 +49,13 @@ DataSourceProto, ValidationReferenceProto, SavedDatasetSpec, + PermissionSpec, +] + +ALL_RESOURCE_TYPES = list(get_args(FeastObject)) +ALL_FEATURE_VIEW_TYPES = [ + FeatureView, + OnDemandFeatureView, + BatchFeatureView, + StreamFeatureView, ] diff --git a/sdk/python/feast/feature_logging.py b/sdk/python/feast/feature_logging.py index 2843f87121..9bd5d8a91c 100644 --- a/sdk/python/feast/feature_logging.py +++ b/sdk/python/feast/feature_logging.py @@ -1,8 +1,8 @@ import abc +from datetime import timezone from typing import TYPE_CHECKING, Dict, Optional, Type, cast import pyarrow as pa -from pytz import UTC from feast.data_source import DataSource from feast.embedded_go.type_map import FEAST_TYPE_TO_ARROW_TYPE, PA_TIMESTAMP_TYPE @@ -97,7 +97,7 @@ def get_schema(self, registry: "BaseRegistry") -> pa.Schema: ) # system columns - fields[LOG_TIMESTAMP_FIELD] = pa.timestamp("us", tz=UTC) + fields[LOG_TIMESTAMP_FIELD] = pa.timestamp("us", tz=timezone.utc) fields[LOG_DATE_FIELD] = pa.date32() fields[REQUEST_ID_FIELD] = pa.string() diff --git a/sdk/python/feast/feature_server.py b/sdk/python/feast/feature_server.py index 908c9741c2..9757e95143 100644 --- a/sdk/python/feast/feature_server.py +++ b/sdk/python/feast/feature_server.py @@ -9,9 +9,9 @@ import pandas as pd import psutil from dateutil import parser -from fastapi import FastAPI, HTTPException, Request, Response, status +from fastapi import Depends, FastAPI, Request, Response, status from fastapi.logger import logger -from fastapi.params import Depends +from fastapi.responses import JSONResponse from google.protobuf.json_format import MessageToDict from prometheus_client import Gauge, start_http_server from pydantic import BaseModel @@ -20,7 +20,19 @@ from feast import proto_json, utils from feast.constants import DEFAULT_FEATURE_SERVER_REGISTRY_TTL from feast.data_source import PushMode -from feast.errors import PushSourceNotFoundException +from feast.errors import ( + FeastError, + FeatureViewNotFoundException, +) +from feast.permissions.action import WRITE, AuthzedAction +from feast.permissions.security_manager import assert_permissions +from feast.permissions.server.rest import inject_user_details +from feast.permissions.server.utils import ( + ServerType, + init_auth_manager, + init_security_manager, + str_to_auth_manager_type, +) # Define prometheus metrics cpu_usage_gauge = Gauge( @@ -78,9 +90,11 @@ def async_refresh(): registry_proto = store.registry.proto() if shutting_down: return - nonlocal active_timer - active_timer = threading.Timer(registry_ttl_sec, async_refresh) - active_timer.start() + + if registry_ttl_sec: + nonlocal active_timer + active_timer = threading.Timer(registry_ttl_sec, async_refresh) + active_timer.start() @asynccontextmanager async def lifespan(app: FastAPI): @@ -93,115 +107,163 @@ async def lifespan(app: FastAPI): async def get_body(request: Request): return await request.body() - @app.post("/get-online-features") + @app.post( + "/get-online-features", + dependencies=[Depends(inject_user_details)], + ) def get_online_features(body=Depends(get_body)): - try: - body = json.loads(body) - # Initialize parameters for FeatureStore.get_online_features(...) call - if "feature_service" in body: - features = store.get_feature_service( - body["feature_service"], allow_cache=True + body = json.loads(body) + full_feature_names = body.get("full_feature_names", False) + entity_rows = body["entities"] + # Initialize parameters for FeatureStore.get_online_features(...) call + if "feature_service" in body: + feature_service = store.get_feature_service( + body["feature_service"], allow_cache=True + ) + assert_permissions( + resource=feature_service, actions=[AuthzedAction.READ_ONLINE] + ) + features = feature_service + else: + features = body["features"] + all_feature_views, all_on_demand_feature_views = ( + utils._get_feature_views_to_use( + store.registry, + store.project, + features, + allow_cache=True, + hide_dummy_entity=False, + ) + ) + for feature_view in all_feature_views: + assert_permissions( + resource=feature_view, actions=[AuthzedAction.READ_ONLINE] + ) + for od_feature_view in all_on_demand_feature_views: + assert_permissions( + resource=od_feature_view, actions=[AuthzedAction.READ_ONLINE] ) - else: - features = body["features"] - full_feature_names = body.get("full_feature_names", False) + response_proto = store.get_online_features( + features=features, + entity_rows=entity_rows, + full_feature_names=full_feature_names, + ).proto - response_proto = store.get_online_features( - features=features, - entity_rows=body["entities"], - full_feature_names=full_feature_names, - ).proto + # Convert the Protobuf object to JSON and return it + return MessageToDict( + response_proto, preserving_proto_field_name=True, float_precision=18 + ) - # Convert the Protobuf object to JSON and return it - return MessageToDict( - response_proto, preserving_proto_field_name=True, float_precision=18 + @app.post("/push", dependencies=[Depends(inject_user_details)]) + def push(body=Depends(get_body)): + request = PushFeaturesRequest(**json.loads(body)) + df = pd.DataFrame(request.df) + actions = [] + if request.to == "offline": + to = PushMode.OFFLINE + actions = [AuthzedAction.WRITE_OFFLINE] + elif request.to == "online": + to = PushMode.ONLINE + actions = [AuthzedAction.WRITE_ONLINE] + elif request.to == "online_and_offline": + to = PushMode.ONLINE_AND_OFFLINE + actions = WRITE + else: + raise ValueError( + f"{request.to} is not a supported push format. Please specify one of these ['online', 'offline', 'online_and_offline']." ) - except Exception as e: - # Print the original exception on the server side - logger.exception(traceback.format_exc()) - # Raise HTTPException to return the error message to the client - raise HTTPException(status_code=500, detail=str(e)) - @app.post("/push") - def push(body=Depends(get_body)): - try: - request = PushFeaturesRequest(**json.loads(body)) - df = pd.DataFrame(request.df) - if request.to == "offline": - to = PushMode.OFFLINE - elif request.to == "online": - to = PushMode.ONLINE - elif request.to == "online_and_offline": - to = PushMode.ONLINE_AND_OFFLINE - else: - raise ValueError( - f"{request.to} is not a supported push format. Please specify one of these ['online', 'offline', 'online_and_offline']." - ) - store.push( - push_source_name=request.push_source_name, - df=df, - allow_registry_cache=request.allow_registry_cache, - to=to, + from feast.data_source import PushSource + + all_fvs = store.list_feature_views( + allow_cache=request.allow_registry_cache + ) + store.list_stream_feature_views(allow_cache=request.allow_registry_cache) + fvs_with_push_sources = { + fv + for fv in all_fvs + if ( + fv.stream_source is not None + and isinstance(fv.stream_source, PushSource) + and fv.stream_source.name == request.push_source_name ) - except PushSourceNotFoundException as e: - # Print the original exception on the server side - logger.exception(traceback.format_exc()) - # Raise HTTPException to return the error message to the client - raise HTTPException(status_code=422, detail=str(e)) - except Exception as e: - # Print the original exception on the server side - logger.exception(traceback.format_exc()) - # Raise HTTPException to return the error message to the client - raise HTTPException(status_code=500, detail=str(e)) - - @app.post("/write-to-online-store") + } + + for feature_view in fvs_with_push_sources: + assert_permissions(resource=feature_view, actions=actions) + + store.push( + push_source_name=request.push_source_name, + df=df, + allow_registry_cache=request.allow_registry_cache, + to=to, + ) + + @app.post("/write-to-online-store", dependencies=[Depends(inject_user_details)]) def write_to_online_store(body=Depends(get_body)): + request = WriteToFeatureStoreRequest(**json.loads(body)) + df = pd.DataFrame(request.df) + feature_view_name = request.feature_view_name + allow_registry_cache = request.allow_registry_cache try: - request = WriteToFeatureStoreRequest(**json.loads(body)) - df = pd.DataFrame(request.df) - store.write_to_online_store( - feature_view_name=request.feature_view_name, - df=df, - allow_registry_cache=request.allow_registry_cache, + feature_view = store.get_stream_feature_view( + feature_view_name, allow_registry_cache=allow_registry_cache + ) + except FeatureViewNotFoundException: + feature_view = store.get_feature_view( + feature_view_name, allow_registry_cache=allow_registry_cache ) - except Exception as e: - # Print the original exception on the server side - logger.exception(traceback.format_exc()) - # Raise HTTPException to return the error message to the client - raise HTTPException(status_code=500, detail=str(e)) + + assert_permissions(resource=feature_view, actions=[AuthzedAction.WRITE_ONLINE]) + store.write_to_online_store( + feature_view_name=feature_view_name, + df=df, + allow_registry_cache=allow_registry_cache, + ) @app.get("/health") def health(): return Response(status_code=status.HTTP_200_OK) - @app.post("/materialize") + @app.post("/materialize", dependencies=[Depends(inject_user_details)]) def materialize(body=Depends(get_body)): - try: - request = MaterializeRequest(**json.loads(body)) - store.materialize( - utils.make_tzaware(parser.parse(request.start_ts)), - utils.make_tzaware(parser.parse(request.end_ts)), - request.feature_views, + request = MaterializeRequest(**json.loads(body)) + for feature_view in request.feature_views: + assert_permissions( + resource=feature_view, actions=[AuthzedAction.WRITE_ONLINE] ) - except Exception as e: - # Print the original exception on the server side - logger.exception(traceback.format_exc()) - # Raise HTTPException to return the error message to the client - raise HTTPException(status_code=500, detail=str(e)) + store.materialize( + utils.make_tzaware(parser.parse(request.start_ts)), + utils.make_tzaware(parser.parse(request.end_ts)), + request.feature_views, + ) - @app.post("/materialize-incremental") + @app.post("/materialize-incremental", dependencies=[Depends(inject_user_details)]) def materialize_incremental(body=Depends(get_body)): - try: - request = MaterializeIncrementalRequest(**json.loads(body)) - store.materialize_incremental( - utils.make_tzaware(parser.parse(request.end_ts)), request.feature_views + request = MaterializeIncrementalRequest(**json.loads(body)) + for feature_view in request.feature_views: + assert_permissions( + resource=feature_view, actions=[AuthzedAction.WRITE_ONLINE] + ) + store.materialize_incremental( + utils.make_tzaware(parser.parse(request.end_ts)), request.feature_views + ) + + @app.exception_handler(Exception) + async def rest_exception_handler(request: Request, exc: Exception): + # Print the original exception on the server side + logger.exception(traceback.format_exc()) + + if isinstance(exc, FeastError): + return JSONResponse( + status_code=exc.http_status_code(), + content=exc.to_error_detail(), + ) + else: + return JSONResponse( + status_code=500, + content=str(exc), ) - except Exception as e: - # Print the original exception on the server side - logger.exception(traceback.format_exc()) - # Raise HTTPException to return the error message to the client - raise HTTPException(status_code=500, detail=str(e)) return app @@ -231,15 +293,15 @@ def load(self): def monitor_resources(self, interval: int = 5): """Function to monitor and update CPU and memory usage metrics.""" - print(f"Start monitor_resources({interval})") + logger.debug(f"Starting resource monitoring with interval {interval} seconds") p = psutil.Process() - print(f"PID is {p.pid}") + logger.debug(f"PID is {p.pid}") while True: with p.oneshot(): cpu_usage = p.cpu_percent() memory_usage = p.memory_percent() - print(f"cpu_usage is {cpu_usage}") - print(f"memory_usage is {memory_usage}") + logger.debug(f"CPU usage: {cpu_usage}%, Memory usage: {memory_usage}%") + logger.debug(f"CPU usage: {cpu_usage}%, Memory usage: {memory_usage}%") cpu_usage_gauge.set(cpu_usage) memory_usage_gauge.set(memory_usage) time.sleep(interval) @@ -256,15 +318,27 @@ def start_server( metrics: bool, ): if metrics: - print("Start Prometheus Server") + logger.info("Starting Prometheus Server") start_http_server(8000) - print("Start a background thread to monitor CPU and memory usage") + logger.debug("Starting background thread to monitor CPU and memory usage") monitoring_thread = threading.Thread( target=monitor_resources, args=(5,), daemon=True ) monitoring_thread.start() + logger.debug("start_server called") + auth_type = str_to_auth_manager_type(store.config.auth_config.type) + logger.info(f"Auth type: {auth_type}") + init_security_manager(auth_type=auth_type, fs=store) + logger.debug("Security manager initialized successfully") + init_auth_manager( + auth_type=auth_type, + server_type=ServerType.REST, + auth_config=store.config.auth_config, + ) + logger.debug("Auth manager initialized successfully") + if sys.platform != "win32": FeastServeApplication( store=store, diff --git a/sdk/python/feast/feature_store.py b/sdk/python/feast/feature_store.py index 77638f5a62..52556eda15 100644 --- a/sdk/python/feast/feature_store.py +++ b/sdk/python/feast/feature_store.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. import itertools -import logging import os import warnings from datetime import datetime, timedelta @@ -60,11 +59,7 @@ ) from feast.feast_object import FeastObject from feast.feature_service import FeatureService -from feast.feature_view import ( - DUMMY_ENTITY, - DUMMY_ENTITY_NAME, - FeatureView, -) +from feast.feature_view import DUMMY_ENTITY, DUMMY_ENTITY_NAME, FeatureView from feast.inference import ( update_data_sources_with_inferred_event_timestamp_col, update_feature_views_with_inferred_features_and_entities, @@ -76,12 +71,16 @@ from feast.infra.registry.sql import SqlRegistry from feast.on_demand_feature_view import OnDemandFeatureView from feast.online_response import OnlineResponse +from feast.permissions.permission import Permission +from feast.project import Project from feast.protos.feast.core.InfraObject_pb2 import Infra as InfraProto from feast.protos.feast.serving.ServingService_pb2 import ( FieldStatus, GetOnlineFeaturesResponse, ) +from feast.protos.feast.types.EntityKey_pb2 import EntityKey from feast.protos.feast.types.Value_pb2 import RepeatedValue, Value +from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.repo_config import RepoConfig, load_repo_config from feast.repo_contents import RepoContents from feast.saved_dataset import SavedDataset, SavedDatasetStorage, ValidationReference @@ -157,11 +156,16 @@ def __init__( elif registry_config and registry_config.registry_type == "remote": from feast.infra.registry.remote import RemoteRegistry - self._registry = RemoteRegistry(registry_config, self.config.project, None) + self._registry = RemoteRegistry( + registry_config, self.config.project, None, self.config.auth_config + ) else: - r = Registry(self.config.project, registry_config, repo_path=self.repo_path) - r._initialize_registry(self.config.project) - self._registry = r + self._registry = Registry( + self.config.project, + registry_config, + repo_path=self.repo_path, + auth_config=self.config.auth_config, + ) self._provider = get_provider(self.config) @@ -197,13 +201,8 @@ def refresh_registry(self): greater than 0, then once the cache becomes stale (more time than the TTL has passed), a new cache will be downloaded synchronously, which may increase latencies if the triggering method is get_online_features(). """ - registry_config = self.config.registry - registry = Registry( - self.config.project, registry_config, repo_path=self.repo_path - ) - registry.refresh(self.config.project) - self._registry = registry + self._registry.refresh(self.project) def list_entities( self, allow_cache: bool = False, tags: Optional[dict[str, str]] = None @@ -249,9 +248,26 @@ def list_feature_services( """ return self._registry.list_feature_services(self.project, tags=tags) + def _list_all_feature_views( + self, allow_cache: bool = False, tags: Optional[dict[str, str]] = None + ) -> List[BaseFeatureView]: + feature_views = [] + for fv in self.registry.list_all_feature_views( + self.project, allow_cache=allow_cache, tags=tags + ): + if ( + isinstance(fv, FeatureView) + and fv.entities + and fv.entities[0] == DUMMY_ENTITY_NAME + ): + fv.entities = [] + fv.entity_columns = [] + feature_views.append(fv) + return feature_views + def list_all_feature_views( self, allow_cache: bool = False, tags: Optional[dict[str, str]] = None - ) -> List[Union[FeatureView, StreamFeatureView, OnDemandFeatureView]]: + ) -> List[BaseFeatureView]: """ Retrieves the list of feature views from the registry. @@ -276,10 +292,6 @@ def list_feature_views( Returns: A list of feature views. """ - logging.warning( - "list_feature_views will make breaking changes. Please use list_batch_feature_views instead. " - "list_feature_views will behave like list_all_feature_views in the future." - ) return utils._list_feature_views( self._registry, self.project, allow_cache, tags=tags ) @@ -299,44 +311,6 @@ def list_batch_feature_views( """ return self._list_batch_feature_views(allow_cache=allow_cache, tags=tags) - def _list_all_feature_views( - self, - allow_cache: bool = False, - tags: Optional[dict[str, str]] = None, - ) -> List[Union[FeatureView, StreamFeatureView, OnDemandFeatureView]]: - all_feature_views = ( - utils._list_feature_views( - self._registry, self.project, allow_cache, tags=tags - ) - + self._list_stream_feature_views(allow_cache, tags=tags) - + self.list_on_demand_feature_views(allow_cache, tags=tags) - ) - return all_feature_views - - def _list_feature_views( - self, - allow_cache: bool = False, - hide_dummy_entity: bool = True, - tags: Optional[dict[str, str]] = None, - ) -> List[FeatureView]: - logging.warning( - "_list_feature_views will make breaking changes. Please use _list_batch_feature_views instead. " - "_list_feature_views will behave like _list_all_feature_views in the future." - ) - feature_views = [] - for fv in self._registry.list_feature_views( - self.project, allow_cache=allow_cache, tags=tags - ): - if ( - hide_dummy_entity - and fv.entities - and fv.entities[0] == DUMMY_ENTITY_NAME - ): - fv.entities = [] - fv.entity_columns = [] - feature_views.append(fv) - return feature_views - def _list_batch_feature_views( self, allow_cache: bool = False, @@ -518,20 +492,24 @@ def _get_stream_feature_view( stream_feature_view.entities = [] return stream_feature_view - def get_on_demand_feature_view(self, name: str) -> OnDemandFeatureView: + def get_on_demand_feature_view( + self, name: str, allow_registry_cache: bool = False + ) -> OnDemandFeatureView: """ Retrieves a feature view. Args: name: Name of feature view. - + allow_registry_cache: (Optional) Whether to allow returning this entity from a cached registry Returns: The specified feature view. Raises: FeatureViewNotFoundException: The feature view could not be found. """ - return self._registry.get_on_demand_feature_view(name, self.project) + return self._registry.get_on_demand_feature_view( + name, self.project, allow_cache=allow_registry_cache + ) def get_data_source(self, name: str) -> DataSource: """ @@ -729,12 +707,14 @@ def plan( ... source=driver_hourly_stats, ... ) >>> registry_diff, infra_diff, new_infra = fs.plan(RepoContents( + ... projects=[Project(name="project")], ... data_sources=[driver_hourly_stats], ... feature_views=[driver_hourly_stats_view], ... on_demand_feature_views=list(), ... stream_feature_views=list(), ... entities=[driver], - ... feature_services=list())) # register entity and feature view + ... feature_services=list(), + ... permissions=list())) # register entity and feature view """ # Validate and run inference on all the objects to be registered. self._validate_all_feature_views( @@ -790,6 +770,7 @@ def _apply_diffs( def apply( self, objects: Union[ + Project, DataSource, Entity, FeatureView, @@ -798,6 +779,7 @@ def apply( StreamFeatureView, FeatureService, ValidationReference, + Permission, List[FeastObject], ], objects_to_delete: Optional[List[FeastObject]] = None, @@ -849,6 +831,9 @@ def apply( objects_to_delete = [] # Separate all objects into entities, feature services, and different feature view types. + projects_to_update = [ob for ob in objects if isinstance(ob, Project)] + if len(projects_to_update) > 1: + raise ValueError("Only one project can be applied at a time.") entities_to_update = [ob for ob in objects if isinstance(ob, Entity)] views_to_update = [ ob @@ -869,6 +854,7 @@ def apply( validation_references_to_update = [ ob for ob in objects if isinstance(ob, ValidationReference) ] + permissions_to_update = [ob for ob in objects if isinstance(ob, Permission)] batch_sources_to_add: List[DataSource] = [] for data_source in data_sources_set_to_update: @@ -910,6 +896,8 @@ def apply( ) # Add all objects to the registry and update the provider's infrastructure. + for project in projects_to_update: + self._registry.apply_project(project, commit=False) for ds in data_sources_to_update: self._registry.apply_data_source(ds, project=self.project, commit=False) for view in itertools.chain(views_to_update, odfvs_to_update, sfvs_to_update): @@ -924,10 +912,15 @@ def apply( self._registry.apply_validation_reference( validation_references, project=self.project, commit=False ) + for permission in permissions_to_update: + self._registry.apply_permission( + permission, project=self.project, commit=False + ) entities_to_delete = [] views_to_delete = [] sfvs_to_delete = [] + permissions_to_delete = [] if not partial: # Delete all registry objects that should not exist. entities_to_delete = [ @@ -956,6 +949,9 @@ def apply( validation_references_to_delete = [ ob for ob in objects_to_delete if isinstance(ob, ValidationReference) ] + permissions_to_delete = [ + ob for ob in objects_to_delete if isinstance(ob, Permission) + ] for data_source in data_sources_to_delete: self._registry.delete_data_source( @@ -985,6 +981,10 @@ def apply( self._registry.delete_validation_reference( validation_references.name, project=self.project, commit=False ) + for permission in permissions_to_delete: + self._registry.delete_permission( + permission.name, project=self.project, commit=False + ) tables_to_delete: List[FeatureView] = ( views_to_delete + sfvs_to_delete if not partial else [] # type: ignore @@ -1672,20 +1672,29 @@ def retrieve_online_documents( distance_metric, ) - # TODO Refactor to better way of populating result - # TODO populate entity in the response after returning entity in document_features is supported # TODO currently not return the vector value since it is same as feature value, if embedding is supported, # the feature value can be raw text before embedded - document_feature_vals = [feature[2] for feature in document_features] - document_feature_distance_vals = [feature[4] for feature in document_features] + entity_key_vals = [feature[1] for feature in document_features] + join_key_values: Dict[str, List[ValueProto]] = {} + for entity_key_val in entity_key_vals: + if entity_key_val is not None: + for join_key, entity_value in zip( + entity_key_val.join_keys, entity_key_val.entity_values + ): + if join_key not in join_key_values: + join_key_values[join_key] = [] + join_key_values[join_key].append(entity_value) + + document_feature_vals = [feature[4] for feature in document_features] + document_feature_distance_vals = [feature[5] for feature in document_features] online_features_response = GetOnlineFeaturesResponse(results=[]) utils._populate_result_rows_from_columnar( online_features_response=online_features_response, - data={requested_feature: document_feature_vals}, - ) - utils._populate_result_rows_from_columnar( - online_features_response=online_features_response, - data={"distance": document_feature_distance_vals}, + data={ + **join_key_values, + requested_feature: document_feature_vals, + "distance": document_feature_distance_vals, + }, ) return OnlineResponse(online_features_response) @@ -1697,7 +1706,11 @@ def _retrieve_from_online_store( query: List[float], top_k: int, distance_metric: Optional[str], - ) -> List[Tuple[Timestamp, "FieldStatus.ValueType", Value, Value, Value]]: + ) -> List[ + Tuple[ + Timestamp, Optional[EntityKey], "FieldStatus.ValueType", Value, Value, Value + ] + ]: """ Search and return document features from the online document store. """ @@ -1713,7 +1726,7 @@ def _retrieve_from_online_store( read_row_protos = [] row_ts_proto = Timestamp() - for row_ts, feature_val, vector_value, distance_val in documents: + for row_ts, entity_key, feature_val, vector_value, distance_val in documents: # Reset timestamp to default or update if row_ts is not None if row_ts is not None: row_ts_proto.FromDatetime(row_ts) @@ -1727,7 +1740,14 @@ def _retrieve_from_online_store( status = FieldStatus.PRESENT read_row_protos.append( - (row_ts_proto, status, feature_val, vector_value, distance_val) + ( + row_ts_proto, + entity_key, + status, + feature_val, + vector_value, + distance_val, + ) ) return read_row_protos @@ -1915,6 +1935,102 @@ def get_validation_reference( ref._dataset = self.get_saved_dataset(ref.dataset_name) return ref + def list_validation_references( + self, allow_cache: bool = False, tags: Optional[dict[str, str]] = None + ) -> List[ValidationReference]: + """ + Retrieves the list of validation references from the registry. + + Args: + allow_cache: Whether to allow returning validation references from a cached registry. + tags: Filter by tags. + + Returns: + A list of validation references. + """ + return self._registry.list_validation_references( + self.project, allow_cache=allow_cache, tags=tags + ) + + def list_permissions( + self, allow_cache: bool = False, tags: Optional[dict[str, str]] = None + ) -> List[Permission]: + """ + Retrieves the list of permissions from the registry. + + Args: + allow_cache: Whether to allow returning permissions from a cached registry. + tags: Filter by tags. + + Returns: + A list of permissions. + """ + return self._registry.list_permissions( + self.project, allow_cache=allow_cache, tags=tags + ) + + def get_permission(self, name: str) -> Permission: + """ + Retrieves a permission from the registry. + + Args: + name: Name of the permission. + + Returns: + The specified permission. + + Raises: + PermissionObjectNotFoundException: The permission could not be found. + """ + return self._registry.get_permission(name, self.project) + + def list_projects( + self, allow_cache: bool = False, tags: Optional[dict[str, str]] = None + ) -> List[Project]: + """ + Retrieves the list of projects from the registry. + + Args: + allow_cache: Whether to allow returning projects from a cached registry. + tags: Filter by tags. + + Returns: + A list of projects. + """ + return self._registry.list_projects(allow_cache=allow_cache, tags=tags) + + def get_project(self, name: Optional[str]) -> Project: + """ + Retrieves a project from the registry. + + Args: + name: Name of the project. + + Returns: + The specified project. + + Raises: + ProjectObjectNotFoundException: The project could not be found. + """ + return self._registry.get_project(name or self.project) + + def list_saved_datasets( + self, allow_cache: bool = False, tags: Optional[dict[str, str]] = None + ) -> List[SavedDataset]: + """ + Retrieves the list of saved datasets from the registry. + + Args: + allow_cache: Whether to allow returning saved datasets from a cached registry. + tags: Filter by tags. + + Returns: + A list of saved datasets. + """ + return self._registry.list_saved_datasets( + self.project, allow_cache=allow_cache, tags=tags + ) + def _print_materialization_log( start_date, end_date, num_feature_views: int, online_store: str diff --git a/sdk/python/feast/feature_view.py b/sdk/python/feast/feature_view.py index 1a85a4b90c..33ea761158 100644 --- a/sdk/python/feast/feature_view.py +++ b/sdk/python/feast/feature_view.py @@ -206,6 +206,7 @@ def __init__( description=description, tags=tags, owner=owner, + source=source, ) self.online = online self.materialization_intervals = [] @@ -423,13 +424,15 @@ def from_proto(cls, feature_view_proto: FeatureViewProto): if len(feature_view.entities) != len(feature_view.entity_columns): warnings.warn( - f"There are some mismatches in your feature view's registered entities. Please check if you have applied your entities correctly." + f"There are some mismatches in your feature view: {feature_view.name} registered entities. Please check if you have applied your entities correctly." f"Entities: {feature_view.entities} vs Entity Columns: {feature_view.entity_columns}" ) # FeatureViewProjections are not saved in the FeatureView proto. # Create the default projection. - feature_view.projection = FeatureViewProjection.from_definition(feature_view) + feature_view.projection = FeatureViewProjection.from_feature_view_definition( + feature_view + ) if feature_view_proto.meta.HasField("created_timestamp"): feature_view.created_timestamp = ( diff --git a/sdk/python/feast/feature_view_projection.py b/sdk/python/feast/feature_view_projection.py index ff5b1b6e06..70415e9ed3 100644 --- a/sdk/python/feast/feature_view_projection.py +++ b/sdk/python/feast/feature_view_projection.py @@ -2,6 +2,7 @@ from attr import dataclass +from feast.data_source import DataSource from feast.field import Field from feast.protos.feast.core.FeatureViewProjection_pb2 import ( FeatureViewProjection as FeatureViewProjectionProto, @@ -9,6 +10,7 @@ if TYPE_CHECKING: from feast.base_feature_view import BaseFeatureView + from feast.feature_view import FeatureView @dataclass @@ -27,6 +29,13 @@ class FeatureViewProjection: is not ready to be projected, i.e. still needs to go through feature inference. join_key_map: A map to modify join key columns during retrieval of this feature view projection. + timestamp_field: The timestamp field of the feature view projection. + date_partition_column: The date partition column of the feature view projection. + created_timestamp_column: The created timestamp column of the feature view projection. + batch_source: The batch source of data where this group of features + is stored. This is optional ONLY if a push source is specified as the + stream_source, since push sources contain their own batch sources. + """ name: str @@ -34,15 +43,29 @@ class FeatureViewProjection: desired_features: List[str] features: List[Field] join_key_map: Dict[str, str] = {} + timestamp_field: Optional[str] = None + date_partition_column: Optional[str] = None + created_timestamp_column: Optional[str] = None + batch_source: Optional[DataSource] = None def name_to_use(self): return self.name_alias or self.name def to_proto(self) -> FeatureViewProjectionProto: + batch_source = None + if getattr(self, "batch_source", None): + if isinstance(self.batch_source, DataSource): + batch_source = self.batch_source.to_proto() + else: + batch_source = self.batch_source feature_reference_proto = FeatureViewProjectionProto( feature_view_name=self.name, feature_view_name_alias=self.name_alias or "", join_key_map=self.join_key_map, + timestamp_field=self.timestamp_field or "", + date_partition_column=self.date_partition_column or "", + created_timestamp_column=self.created_timestamp_column or "", + batch_source=batch_source, ) for feature in self.features: feature_reference_proto.feature_columns.append(feature.to_proto()) @@ -50,27 +73,76 @@ def to_proto(self) -> FeatureViewProjectionProto: return feature_reference_proto @staticmethod - def from_proto(proto: FeatureViewProjectionProto): + def from_proto(proto: FeatureViewProjectionProto) -> "FeatureViewProjection": + batch_source = ( + DataSource.from_proto(proto.batch_source) + if str(getattr(proto, "batch_source")) + else None + ) feature_view_projection = FeatureViewProjection( name=proto.feature_view_name, name_alias=proto.feature_view_name_alias or None, features=[], join_key_map=dict(proto.join_key_map), desired_features=[], + timestamp_field=proto.timestamp_field or None, + date_partition_column=proto.date_partition_column or None, + created_timestamp_column=proto.created_timestamp_column or None, + batch_source=batch_source, ) for feature_column in proto.feature_columns: feature_view_projection.features.append(Field.from_proto(feature_column)) return feature_view_projection + @staticmethod + def from_feature_view_definition(feature_view: "FeatureView"): + # TODO need to implement this for StreamFeatureViews + if getattr(feature_view, "batch_source", None): + return FeatureViewProjection( + name=feature_view.name, + name_alias=None, + features=feature_view.features, + desired_features=[], + timestamp_field=feature_view.batch_source.created_timestamp_column + or None, + created_timestamp_column=feature_view.batch_source.created_timestamp_column + or None, + date_partition_column=feature_view.batch_source.date_partition_column + or None, + batch_source=feature_view.batch_source or None, + ) + else: + return FeatureViewProjection( + name=feature_view.name, + name_alias=None, + features=feature_view.features, + desired_features=[], + ) + @staticmethod def from_definition(base_feature_view: "BaseFeatureView"): - return FeatureViewProjection( - name=base_feature_view.name, - name_alias=None, - features=base_feature_view.features, - desired_features=[], - ) + if getattr(base_feature_view, "batch_source", None): + return FeatureViewProjection( + name=base_feature_view.name, + name_alias=None, + features=base_feature_view.features, + desired_features=[], + timestamp_field=base_feature_view.batch_source.created_timestamp_column # type:ignore[attr-defined] + or None, + created_timestamp_column=base_feature_view.batch_source.created_timestamp_column # type:ignore[attr-defined] + or None, + date_partition_column=base_feature_view.batch_source.date_partition_column # type:ignore[attr-defined] + or None, + batch_source=base_feature_view.batch_source or None, # type:ignore[attr-defined] + ) + else: + return FeatureViewProjection( + name=base_feature_view.name, + name_alias=None, + features=base_feature_view.features, + desired_features=[], + ) def get_feature(self, feature_name: str) -> Field: try: diff --git a/sdk/python/feast/grpc_error_interceptor.py b/sdk/python/feast/grpc_error_interceptor.py new file mode 100644 index 0000000000..c638d461ed --- /dev/null +++ b/sdk/python/feast/grpc_error_interceptor.py @@ -0,0 +1,48 @@ +import grpc + +from feast.errors import FeastError + + +def exception_wrapper(behavior, request, context): + try: + return behavior(request, context) + except grpc.RpcError as e: + context.abort(e.code(), e.details()) + except FeastError as e: + context.abort( + e.grpc_status_code(), + e.to_error_detail(), + ) + + +class ErrorInterceptor(grpc.ServerInterceptor): + def intercept_service(self, continuation, handler_call_details): + handler = continuation(handler_call_details) + if handler is None: + return None + + if handler.unary_unary: + return grpc.unary_unary_rpc_method_handler( + lambda req, ctx: exception_wrapper(handler.unary_unary, req, ctx), + request_deserializer=handler.request_deserializer, + response_serializer=handler.response_serializer, + ) + elif handler.unary_stream: + return grpc.unary_stream_rpc_method_handler( + lambda req, ctx: exception_wrapper(handler.unary_stream, req, ctx), + request_deserializer=handler.request_deserializer, + response_serializer=handler.response_serializer, + ) + elif handler.stream_unary: + return grpc.stream_unary_rpc_method_handler( + lambda req, ctx: exception_wrapper(handler.stream_unary, req, ctx), + request_deserializer=handler.request_deserializer, + response_serializer=handler.response_serializer, + ) + elif handler.stream_stream: + return grpc.stream_stream_rpc_method_handler( + lambda req, ctx: exception_wrapper(handler.stream_stream, req, ctx), + request_deserializer=handler.request_deserializer, + response_serializer=handler.response_serializer, + ) + return handler diff --git a/sdk/python/feast/inference.py b/sdk/python/feast/inference.py index 28a170172c..b9fb9b694d 100644 --- a/sdk/python/feast/inference.py +++ b/sdk/python/feast/inference.py @@ -13,6 +13,7 @@ from feast.infra.offline_stores.file_source import FileSource from feast.infra.offline_stores.redshift_source import RedshiftSource from feast.infra.offline_stores.snowflake_source import SnowflakeSource +from feast.on_demand_feature_view import OnDemandFeatureView from feast.repo_config import RepoConfig from feast.stream_feature_view import StreamFeatureView from feast.types import String @@ -94,7 +95,7 @@ def update_data_sources_with_inferred_event_timestamp_col( def update_feature_views_with_inferred_features_and_entities( - fvs: Union[List[FeatureView], List[StreamFeatureView]], + fvs: Union[List[FeatureView], List[StreamFeatureView], List[OnDemandFeatureView]], entities: List[Entity], config: RepoConfig, ) -> None: @@ -121,35 +122,37 @@ def update_feature_views_with_inferred_features_and_entities( join_keys = set( [ entity_name_to_join_key_map.get(entity_name) - for entity_name in fv.entities + for entity_name in getattr(fv, "entities", []) ] ) # Fields whose names match a join key are considered to be entity columns; all # other fields are considered to be feature columns. + entity_columns = fv.entity_columns if fv.entity_columns else [] for field in fv.schema: if field.name in join_keys: # Do not override a preexisting field with the same name. if field.name not in [ - entity_column.name for entity_column in fv.entity_columns + entity_column.name for entity_column in entity_columns ]: - fv.entity_columns.append(field) + entity_columns.append(field) else: if field.name not in [feature.name for feature in fv.features]: fv.features.append(field) # Respect the `value_type` attribute of the entity, if it is specified. - for entity_name in fv.entities: + fv_entities = getattr(fv, "entities", []) + for entity_name in fv_entities: entity = entity_name_to_entity_map.get(entity_name) # pass when entity does not exist. Entityless feature view case if entity is None: continue if ( entity.join_key - not in [entity_column.name for entity_column in fv.entity_columns] + not in [entity_column.name for entity_column in entity_columns] and entity.value_type != ValueType.UNKNOWN ): - fv.entity_columns.append( + entity_columns.append( Field( name=entity.join_key, dtype=from_value_type(entity.value_type), @@ -158,12 +161,13 @@ def update_feature_views_with_inferred_features_and_entities( # Infer a dummy entity column for entityless feature views. if ( - len(fv.entities) == 1 - and fv.entities[0] == DUMMY_ENTITY_NAME - and not fv.entity_columns + len(fv_entities) == 1 + and fv_entities[0] == DUMMY_ENTITY_NAME + and not entity_columns ): - fv.entity_columns.append(Field(name=DUMMY_ENTITY_ID, dtype=String)) + entity_columns.append(Field(name=DUMMY_ENTITY_ID, dtype=String)) + fv.entity_columns = entity_columns # Run inference for entity columns if there are fewer entity fields than expected. run_inference_for_entities = len(fv.entity_columns) < len(join_keys) @@ -186,7 +190,7 @@ def update_feature_views_with_inferred_features_and_entities( def _infer_features_and_entities( - fv: FeatureView, + fv: Union[FeatureView, OnDemandFeatureView], join_keys: Set[Optional[str]], run_inference_for_features, config, @@ -200,6 +204,11 @@ def _infer_features_and_entities( run_inference_for_features: Whether to run inference for features. config: The config for the current feature store. """ + if isinstance(fv, OnDemandFeatureView): + return _infer_on_demand_features_and_entities( + fv, join_keys, run_inference_for_features, config + ) + columns_to_exclude = { fv.batch_source.timestamp_field, fv.batch_source.created_timestamp_column, @@ -246,3 +255,80 @@ def _infer_features_and_entities( ) if field.name not in [feature.name for feature in fv.features]: fv.features.append(field) + + +def _infer_on_demand_features_and_entities( + fv: OnDemandFeatureView, + join_keys: Set[Optional[str]], + run_inference_for_features, + config, +) -> None: + """ + Updates the specific feature in place with inferred features and entities. + Args: + fv: The feature view on which to run inference. + join_keys: The set of join keys for the feature view's entities. + run_inference_for_features: Whether to run inference for features. + config: The config for the current feature store. + """ + entity_columns: list[Field] = [] + columns_to_exclude = set() + for ( + source_feature_view_name, + source_feature_view, + ) in fv.source_feature_view_projections.items(): + columns_to_exclude.add(source_feature_view.timestamp_field) + columns_to_exclude.add(source_feature_view.created_timestamp_column) + + batch_source = getattr(source_feature_view, "batch_source") + batch_field_mapping = getattr(batch_source or None, "field_mapping") + if batch_field_mapping: + for ( + original_col, + mapped_col, + ) in batch_field_mapping.items(): + if mapped_col in columns_to_exclude: + columns_to_exclude.remove(mapped_col) + columns_to_exclude.add(original_col) + + table_column_names_and_types = ( + batch_source.get_table_column_names_and_types(config) + ) + for col_name, col_datatype in table_column_names_and_types: + if col_name in columns_to_exclude: + continue + elif col_name in join_keys: + field = Field( + name=col_name, + dtype=from_value_type( + batch_source.source_datatype_to_feast_value_type()(col_datatype) + ), + ) + if field.name not in [ + entity_column.name + for entity_column in entity_columns + if hasattr(entity_column, "name") + ]: + entity_columns.append(field) + elif not re.match( + "^__|__$", col_name + ): # double underscores often signal an internal-use column + if run_inference_for_features: + feature_name = ( + batch_field_mapping[col_name] + if col_name in batch_field_mapping + else col_name + ) + field = Field( + name=feature_name, + dtype=from_value_type( + batch_source.source_datatype_to_feast_value_type()( + col_datatype + ) + ), + ) + if field.name not in [ + feature.name for feature in source_feature_view.features + ]: + source_feature_view.features.append(field) + fv.entity_columns = entity_columns diff --git a/sdk/python/feast/infra/materialization/kubernetes/Dockerfile b/sdk/python/feast/infra/materialization/kubernetes/Dockerfile index 510bb72285..38d4f5f188 100644 --- a/sdk/python/feast/infra/materialization/kubernetes/Dockerfile +++ b/sdk/python/feast/infra/materialization/kubernetes/Dockerfile @@ -5,7 +5,7 @@ RUN apt-get update && \ WORKDIR /app -COPY sdk/python/feast/infra/materialization/kuberentes/main.py /app +COPY sdk/python/feast/infra/materialization/kubernetes/main.py /app # Copy necessary parts of the Feast codebase COPY sdk/python sdk/python diff --git a/sdk/python/feast/infra/materialization/snowflake_engine.py b/sdk/python/feast/infra/materialization/snowflake_engine.py index f77239398e..600e1b20d8 100644 --- a/sdk/python/feast/infra/materialization/snowflake_engine.py +++ b/sdk/python/feast/infra/materialization/snowflake_engine.py @@ -1,14 +1,13 @@ import os import shutil from dataclasses import dataclass -from datetime import datetime +from datetime import datetime, timezone from typing import Callable, List, Literal, Optional, Sequence, Union import click import pandas as pd from colorama import Fore, Style from pydantic import ConfigDict, Field, StrictStr -from pytz import utc from tqdm import tqdm import feast @@ -70,6 +69,9 @@ class SnowflakeMaterializationEngineConfig(FeastConfigBaseModel): private_key: Optional[str] = None """ Snowflake private key file path""" + private_key_content: Optional[bytes] = None + """ Snowflake private key stored as bytes""" + private_key_passphrase: Optional[str] = None """ Snowflake private key file passphrase""" @@ -126,16 +128,16 @@ def update( stage_context = f'"{self.repo_config.batch_engine.database}"."{self.repo_config.batch_engine.schema_}"' stage_path = f'{stage_context}."feast_{project}"' with GetSnowflakeConnection(self.repo_config.batch_engine) as conn: - query = f"SHOW STAGES IN {stage_context}" + query = f"SHOW USER FUNCTIONS LIKE 'FEAST_{project.upper()}%' IN SCHEMA {stage_context}" cursor = execute_snowflake_statement(conn, query) - stage_list = pd.DataFrame( + function_list = pd.DataFrame( cursor.fetchall(), columns=[column.name for column in cursor.description], ) - # if the stage already exists, + # if the SHOW FUNCTIONS query returns results, # assumes that the materialization functions have been deployed - if f"feast_{project}" in stage_list["name"].tolist(): + if len(function_list.index) > 0: click.echo( f"Materialization functions for {Style.BRIGHT + Fore.GREEN}{project}{Style.RESET_ALL} already detected." ) @@ -147,7 +149,7 @@ def update( ) click.echo() - query = f"CREATE STAGE {stage_path}" + query = f"CREATE STAGE IF NOT EXISTS {stage_path}" execute_snowflake_statement(conn, query) copy_path, zip_path = package_snowpark_zip(project) @@ -273,15 +275,24 @@ def _materialize_one( execute_snowflake_statement(conn, query).fetchall()[0][0] / 1_000_000_000 ) - if last_commit_change_time < start_date.astimezone(tz=utc).timestamp(): + if ( + last_commit_change_time + < start_date.astimezone(tz=timezone.utc).timestamp() + ): return SnowflakeMaterializationJob( job_id=job_id, status=MaterializationJobStatus.SUCCEEDED ) fv_latest_values_sql = offline_job.to_sql() + if feature_view.entity_columns: + first_feature_view_entity_name = getattr( + feature_view.entity_columns[0], "name", None + ) + else: + first_feature_view_entity_name = None if ( - feature_view.entity_columns[0].name == DUMMY_ENTITY_ID + first_feature_view_entity_name == DUMMY_ENTITY_ID ): # entityless Feature View's placeholder entity entities_to_write = 1 else: diff --git a/sdk/python/feast/infra/offline_stores/bigquery.py b/sdk/python/feast/infra/offline_stores/bigquery.py index ef12eba442..3ee1717461 100644 --- a/sdk/python/feast/infra/offline_stores/bigquery.py +++ b/sdk/python/feast/infra/offline_stores/bigquery.py @@ -114,7 +114,7 @@ class BigQueryOfflineStoreConfig(FeastConfigBaseModel): @field_validator("billing_project_id") def project_id_exists(cls, v, values, **kwargs): - if v and not values["project_id"]: + if v and not values.data["project_id"]: raise ValueError( "please specify project_id if billing_project_id is specified" ) diff --git a/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena.py b/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena.py index ce731f0198..ea0d6386cb 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena.py +++ b/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena.py @@ -1,6 +1,6 @@ import contextlib import uuid -from datetime import datetime +from datetime import datetime, timezone from pathlib import Path from typing import ( Callable, @@ -19,7 +19,6 @@ import pyarrow import pyarrow as pa from pydantic import StrictStr -from pytz import utc from feast import OnDemandFeatureView from feast.data_source import DataSource @@ -100,8 +99,8 @@ def pull_latest_from_table_or_query( athena_client = aws_utils.get_athena_data_client(config.offline_store.region) s3_resource = aws_utils.get_s3_resource(config.offline_store.region) - start_date = start_date.astimezone(tz=utc) - end_date = end_date.astimezone(tz=utc) + start_date = start_date.astimezone(tz=timezone.utc) + end_date = end_date.astimezone(tz=timezone.utc) query = f""" SELECT @@ -151,7 +150,7 @@ def pull_all_from_table_or_query( query = f""" SELECT {field_string} FROM {from_expression} - WHERE {timestamp_field} BETWEEN TIMESTAMP '{start_date.astimezone(tz=utc).strftime("%Y-%m-%d %H:%M:%S.%f")[:-3]}' AND TIMESTAMP '{end_date.astimezone(tz=utc).strftime("%Y-%m-%d %H:%M:%S.%f")[:-3]}' + WHERE {timestamp_field} BETWEEN TIMESTAMP '{start_date.astimezone(tz=timezone.utc).strftime("%Y-%m-%d %H:%M:%S.%f")[:-3]}' AND TIMESTAMP '{end_date.astimezone(tz=timezone.utc).strftime("%Y-%m-%d %H:%M:%S.%f")[:-3]}' {"AND "+date_partition_column+" >= '"+start_date.strftime('%Y-%m-%d')+"' AND "+date_partition_column+" <= '"+end_date.strftime('%Y-%m-%d')+"' " if date_partition_column != "" and date_partition_column is not None else ''} """ diff --git a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py index c4740a960e..5239cfb474 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py +++ b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py @@ -1,6 +1,6 @@ import contextlib from dataclasses import asdict -from datetime import datetime +from datetime import datetime, timezone from typing import ( Any, Callable, @@ -20,7 +20,6 @@ import pyarrow as pa from jinja2 import BaseLoader, Environment from psycopg import sql -from pytz import utc from feast.data_source import DataSource from feast.errors import InvalidEntityType, ZeroColumnQueryResult, ZeroRowsQueryResult @@ -214,8 +213,8 @@ def pull_all_from_table_or_query( join_key_columns + feature_name_columns + [timestamp_field] ) - start_date = start_date.astimezone(tz=utc) - end_date = end_date.astimezone(tz=utc) + start_date = start_date.astimezone(tz=timezone.utc) + end_date = end_date.astimezone(tz=timezone.utc) query = f""" SELECT {field_string} diff --git a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py index 2d5a00c296..2896d565d3 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py +++ b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py @@ -2,7 +2,7 @@ import tempfile import uuid import warnings -from datetime import datetime +from datetime import datetime, timezone from typing import Any, Callable, Dict, List, Optional, Tuple, Union import numpy as np @@ -14,7 +14,6 @@ from pydantic import StrictStr from pyspark import SparkConf from pyspark.sql import SparkSession -from pytz import utc from feast import FeatureView, OnDemandFeatureView from feast.data_source import DataSource @@ -284,8 +283,8 @@ def pull_all_from_table_or_query( fields = ", ".join(join_key_columns + feature_name_columns + [timestamp_field]) from_expression = data_source.get_table_query_string() - start_date = start_date.astimezone(tz=utc) - end_date = end_date.astimezone(tz=utc) + start_date = start_date.astimezone(tz=timezone.utc) + end_date = end_date.astimezone(tz=timezone.utc) query = f""" SELECT {fields} @@ -520,13 +519,10 @@ def _upload_entity_df( entity_df[event_timestamp_col], utc=True ) spark_session.createDataFrame(entity_df).createOrReplaceTempView(table_name) - return elif isinstance(entity_df, str): spark_session.sql(entity_df).createOrReplaceTempView(table_name) - return elif isinstance(entity_df, pyspark.sql.DataFrame): entity_df.createOrReplaceTempView(table_name) - return else: raise InvalidEntityType(type(entity_df)) @@ -534,7 +530,7 @@ def _upload_entity_df( def _format_datetime(t: datetime) -> str: # Since Hive does not support timezone, need to transform to utc. if t.tzinfo: - t = t.astimezone(tz=utc) + t = t.astimezone(tz=timezone.utc) dt = t.strftime("%Y-%m-%d %H:%M:%S.%f") return dt diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/connectors/upload.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/connectors/upload.py index 9e2ea3708d..1b55199193 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/connectors/upload.py +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/connectors/upload.py @@ -18,13 +18,12 @@ ``` """ -from datetime import datetime +from datetime import datetime, timezone from typing import Any, Dict, Iterator, Optional, Set import numpy as np import pandas as pd import pyarrow -from pytz import utc from feast.infra.offline_stores.contrib.trino_offline_store.trino_queries import Trino from feast.infra.offline_stores.contrib.trino_offline_store.trino_type_map import ( @@ -141,7 +140,7 @@ def _format_value(row: pd.Series, schema: Dict[str, Any]) -> str: def format_datetime(t: datetime) -> str: if t.tzinfo: - t = t.astimezone(tz=utc) + t = t.astimezone(tz=timezone.utc) return t.strftime("%Y-%m-%d %H:%M:%S.%f") diff --git a/sdk/python/feast/infra/offline_stores/dask.py b/sdk/python/feast/infra/offline_stores/dask.py index 4a63baf646..52ad88d299 100644 --- a/sdk/python/feast/infra/offline_stores/dask.py +++ b/sdk/python/feast/infra/offline_stores/dask.py @@ -1,6 +1,6 @@ import os import uuid -from datetime import datetime +from datetime import datetime, timezone from pathlib import Path from typing import Any, Callable, Dict, List, Literal, Optional, Tuple, Union @@ -178,6 +178,8 @@ def evaluate_historical_retrieval(): entity_df_event_timestamp_col_type = entity_df_with_features.dtypes[ entity_df_event_timestamp_col ] + + # TODO: need to figure out why the value of entity_df_event_timestamp_col_type.tz is pytz.UTC if ( not hasattr(entity_df_event_timestamp_col_type, "tz") or entity_df_event_timestamp_col_type.tz != pytz.UTC @@ -189,7 +191,7 @@ def evaluate_historical_retrieval(): ].apply( lambda x: x if x.tzinfo is not None - else x.replace(tzinfo=pytz.utc) + else x.replace(tzinfo=timezone.utc) ) ) @@ -616,6 +618,7 @@ def _normalize_timestamp( if created_timestamp_column: created_timestamp_column_type = df_to_join_types[created_timestamp_column] + # TODO: need to figure out why the value of timestamp_field_type.tz is pytz.UTC if not hasattr(timestamp_field_type, "tz") or timestamp_field_type.tz != pytz.UTC: # if you are querying for the event timestamp field, we have to deduplicate if len(df_to_join[timestamp_field].shape) > 1: @@ -624,10 +627,11 @@ def _normalize_timestamp( # Make sure all timestamp fields are tz-aware. We default tz-naive fields to UTC df_to_join[timestamp_field] = df_to_join[timestamp_field].apply( - lambda x: x if x.tzinfo is not None else x.replace(tzinfo=pytz.utc), + lambda x: x if x.tzinfo else x.replace(tzinfo=timezone.utc), meta=(timestamp_field, "datetime64[ns, UTC]"), ) + # TODO: need to figure out why the value of created_timestamp_column_type.tz is pytz.UTC if created_timestamp_column and ( not hasattr(created_timestamp_column_type, "tz") or created_timestamp_column_type.tz != pytz.UTC @@ -640,7 +644,7 @@ def _normalize_timestamp( df_to_join[created_timestamp_column] = df_to_join[ created_timestamp_column ].apply( - lambda x: x if x.tzinfo is not None else x.replace(tzinfo=pytz.utc), + lambda x: x if x.tzinfo else x.replace(tzinfo=timezone.utc), meta=(timestamp_field, "datetime64[ns, UTC]"), ) diff --git a/sdk/python/feast/infra/offline_stores/ibis.py b/sdk/python/feast/infra/offline_stores/ibis.py index 4de16cbda3..61c477baec 100644 --- a/sdk/python/feast/infra/offline_stores/ibis.py +++ b/sdk/python/feast/infra/offline_stores/ibis.py @@ -1,7 +1,7 @@ import random import string import uuid -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from pathlib import Path from typing import Any, Callable, Dict, List, Optional, Tuple, Union @@ -12,7 +12,6 @@ import pyarrow from ibis.expr import datatypes as dt from ibis.expr.types import Table -from pytz import utc from feast.data_source import DataSource from feast.feature_logging import LoggingConfig, LoggingSource @@ -55,8 +54,8 @@ def pull_latest_from_table_or_query_ibis( fields = join_key_columns + feature_name_columns + [timestamp_field] if created_timestamp_column: fields.append(created_timestamp_column) - start_date = start_date.astimezone(tz=utc) - end_date = end_date.astimezone(tz=utc) + start_date = start_date.astimezone(tz=timezone.utc) + end_date = end_date.astimezone(tz=timezone.utc) table = data_source_reader(data_source) @@ -265,8 +264,8 @@ def pull_all_from_table_or_query_ibis( staging_location_endpoint_override: Optional[str] = None, ) -> RetrievalJob: fields = join_key_columns + feature_name_columns + [timestamp_field] - start_date = start_date.astimezone(tz=utc) - end_date = end_date.astimezone(tz=utc) + start_date = start_date.astimezone(tz=timezone.utc) + end_date = end_date.astimezone(tz=timezone.utc) table = data_source_reader(data_source) diff --git a/sdk/python/feast/infra/offline_stores/redshift.py b/sdk/python/feast/infra/offline_stores/redshift.py index cec21c35c1..ed76f830f3 100644 --- a/sdk/python/feast/infra/offline_stores/redshift.py +++ b/sdk/python/feast/infra/offline_stores/redshift.py @@ -1,6 +1,6 @@ import contextlib import uuid -from datetime import datetime +from datetime import datetime, timezone from pathlib import Path from typing import ( Any, @@ -21,7 +21,6 @@ import pyarrow as pa from dateutil import parser from pydantic import StrictStr, model_validator -from pytz import utc from feast import OnDemandFeatureView, RedshiftSource from feast.data_source import DataSource @@ -127,8 +126,8 @@ def pull_latest_from_table_or_query( ) s3_resource = aws_utils.get_s3_resource(config.offline_store.region) - start_date = start_date.astimezone(tz=utc) - end_date = end_date.astimezone(tz=utc) + start_date = start_date.astimezone(tz=timezone.utc) + end_date = end_date.astimezone(tz=timezone.utc) query = f""" SELECT @@ -174,8 +173,8 @@ def pull_all_from_table_or_query( ) s3_resource = aws_utils.get_s3_resource(config.offline_store.region) - start_date = start_date.astimezone(tz=utc) - end_date = end_date.astimezone(tz=utc) + start_date = start_date.astimezone(tz=timezone.utc) + end_date = end_date.astimezone(tz=timezone.utc) query = f""" SELECT {field_string} diff --git a/sdk/python/feast/infra/offline_stores/remote.py b/sdk/python/feast/infra/offline_stores/remote.py index dc657017d9..8154f75f87 100644 --- a/sdk/python/feast/infra/offline_stores/remote.py +++ b/sdk/python/feast/infra/offline_stores/remote.py @@ -10,9 +10,12 @@ import pyarrow as pa import pyarrow.flight as fl import pyarrow.parquet +from pyarrow import Schema +from pyarrow._flight import FlightCallOptions, FlightDescriptor, Ticket from pydantic import StrictInt, StrictStr from feast import OnDemandFeatureView +from feast.arrow_error_handler import arrow_client_error_handling_decorator from feast.data_source import DataSource from feast.feature_logging import ( FeatureServiceLoggingSource, @@ -27,12 +30,54 @@ RetrievalMetadata, ) from feast.infra.registry.base_registry import BaseRegistry +from feast.permissions.auth.auth_type import AuthType +from feast.permissions.auth_model import AuthConfig +from feast.permissions.client.arrow_flight_auth_interceptor import ( + FlightAuthInterceptorFactory, +) from feast.repo_config import FeastConfigBaseModel, RepoConfig from feast.saved_dataset import SavedDatasetStorage logger = logging.getLogger(__name__) +class FeastFlightClient(fl.FlightClient): + @arrow_client_error_handling_decorator + def get_flight_info( + self, descriptor: FlightDescriptor, options: FlightCallOptions = None + ): + return super().get_flight_info(descriptor, options) + + @arrow_client_error_handling_decorator + def do_get(self, ticket: Ticket, options: FlightCallOptions = None): + return super().do_get(ticket, options) + + @arrow_client_error_handling_decorator + def do_put( + self, + descriptor: FlightDescriptor, + schema: Schema, + options: FlightCallOptions = None, + ): + return super().do_put(descriptor, schema, options) + + @arrow_client_error_handling_decorator + def list_flights(self, criteria: bytes = b"", options: FlightCallOptions = None): + return super().list_flights(criteria, options) + + @arrow_client_error_handling_decorator + def list_actions(self, options: FlightCallOptions = None): + return super().list_actions(options) + + +def build_arrow_flight_client(host: str, port, auth_config: AuthConfig): + if auth_config.type != AuthType.NONE.value: + middlewares = [FlightAuthInterceptorFactory(auth_config)] + return FeastFlightClient(f"grpc://{host}:{port}", middleware=middlewares) + + return FeastFlightClient(f"grpc://{host}:{port}") + + class RemoteOfflineStoreConfig(FeastConfigBaseModel): type: Literal["remote"] = "remote" host: StrictStr @@ -45,7 +90,7 @@ class RemoteOfflineStoreConfig(FeastConfigBaseModel): class RemoteRetrievalJob(RetrievalJob): def __init__( self, - client: fl.FlightClient, + client: FeastFlightClient, api: str, api_parameters: Dict[str, Any], entity_df: Union[pd.DataFrame, str] = None, @@ -69,7 +114,11 @@ def _to_df_internal(self, timeout: Optional[int] = None) -> pd.DataFrame: # This is where do_get service is invoked def _to_arrow_internal(self, timeout: Optional[int] = None) -> pa.Table: return _send_retrieve_remote( - self.api, self.api_parameters, self.entity_df, self.table, self.client + self.api, + self.api_parameters, + self.entity_df, + self.table, + self.client, ) @property @@ -128,8 +177,9 @@ def get_historical_features( ) -> RemoteRetrievalJob: assert isinstance(config.offline_store, RemoteOfflineStoreConfig) - # Initialize the client connection - client = RemoteOfflineStore.init_client(config) + client = build_arrow_flight_client( + config.offline_store.host, config.offline_store.port, config.auth_config + ) feature_view_names = [fv.name for fv in feature_views] name_aliases = [fv.projection.name_alias for fv in feature_views] @@ -163,7 +213,9 @@ def pull_all_from_table_or_query( assert isinstance(config.offline_store, RemoteOfflineStoreConfig) # Initialize the client connection - client = RemoteOfflineStore.init_client(config) + client = build_arrow_flight_client( + config.offline_store.host, config.offline_store.port, config.auth_config + ) api_parameters = { "data_source_name": data_source.name, @@ -194,7 +246,9 @@ def pull_latest_from_table_or_query( assert isinstance(config.offline_store, RemoteOfflineStoreConfig) # Initialize the client connection - client = RemoteOfflineStore.init_client(config) + client = build_arrow_flight_client( + config.offline_store.host, config.offline_store.port, config.auth_config + ) api_parameters = { "data_source_name": data_source.name, @@ -227,7 +281,9 @@ def write_logged_features( data = pyarrow.parquet.read_table(data, use_threads=False, pre_buffer=False) # Initialize the client connection - client = RemoteOfflineStore.init_client(config) + client = build_arrow_flight_client( + config.offline_store.host, config.offline_store.port, config.auth_config + ) api_parameters = { "feature_service_name": source._feature_service.name, @@ -251,7 +307,9 @@ def offline_write_batch( assert isinstance(config.offline_store, RemoteOfflineStoreConfig) # Initialize the client connection - client = RemoteOfflineStore.init_client(config) + client = build_arrow_flight_client( + config.offline_store.host, config.offline_store.port, config.auth_config + ) feature_view_names = [feature_view.name] name_aliases = [feature_view.projection.name_alias] @@ -270,13 +328,6 @@ def offline_write_batch( entity_df=None, ) - @staticmethod - def init_client(config): - location = f"grpc://{config.offline_store.host}:{config.offline_store.port}" - client = fl.connect(location=location) - logger.info(f"Connecting FlightClient at {location}") - return client - def _create_retrieval_metadata(feature_refs: List[str], entity_df: pd.DataFrame): entity_schema = _get_entity_schema( @@ -329,23 +380,32 @@ def _send_retrieve_remote( api_parameters: Dict[str, Any], entity_df: Union[pd.DataFrame, str], table: pa.Table, - client: fl.FlightClient, + client: FeastFlightClient, ): - command_descriptor = _call_put(api, api_parameters, client, entity_df, table) + command_descriptor = _call_put( + api, + api_parameters, + client, + entity_df, + table, + ) return _call_get(client, command_descriptor) -def _call_get(client: fl.FlightClient, command_descriptor: fl.FlightDescriptor): +def _call_get( + client: FeastFlightClient, + command_descriptor: fl.FlightDescriptor, +): flight = client.get_flight_info(command_descriptor) ticket = flight.endpoints[0].ticket reader = client.do_get(ticket) - return reader.read_all() + return read_all(reader) def _call_put( api: str, api_parameters: Dict[str, Any], - client: fl.FlightClient, + client: FeastFlightClient, entity_df: Union[pd.DataFrame, str], table: pa.Table, ): @@ -373,7 +433,7 @@ def _put_parameters( command_descriptor: fl.FlightDescriptor, entity_df: Union[pd.DataFrame, str], table: pa.Table, - client: fl.FlightClient, + client: FeastFlightClient, ): updatedTable: pa.Table @@ -384,15 +444,22 @@ def _put_parameters( else: updatedTable = _create_empty_table() - writer, _ = client.do_put( - command_descriptor, - updatedTable.schema, - ) + writer, _ = client.do_put(command_descriptor, updatedTable.schema) + + write_table(writer, updatedTable) + - writer.write_table(updatedTable) +@arrow_client_error_handling_decorator +def write_table(writer, updated_table: pa.Table): + writer.write_table(updated_table) writer.close() +@arrow_client_error_handling_decorator +def read_all(reader): + return reader.read_all() + + def _create_empty_table(): schema = pa.schema( { diff --git a/sdk/python/feast/infra/offline_stores/snowflake.py b/sdk/python/feast/infra/offline_stores/snowflake.py index 96552ff87e..9418171a96 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake.py +++ b/sdk/python/feast/infra/offline_stores/snowflake.py @@ -3,7 +3,7 @@ import os import uuid import warnings -from datetime import datetime +from datetime import datetime, timezone from functools import reduce from pathlib import Path from typing import ( @@ -25,7 +25,6 @@ import pandas as pd import pyarrow from pydantic import ConfigDict, Field, StrictStr -from pytz import utc from feast import OnDemandFeatureView from feast.data_source import DataSource @@ -107,6 +106,9 @@ class SnowflakeOfflineStoreConfig(FeastConfigBaseModel): private_key: Optional[str] = None """ Snowflake private key file path""" + private_key_content: Optional[bytes] = None + """ Snowflake private key stored as bytes""" + private_key_passphrase: Optional[str] = None """ Snowflake private key file passphrase""" @@ -193,8 +195,8 @@ def pull_latest_from_table_or_query( with GetSnowflakeConnection(config.offline_store) as conn: snowflake_conn = conn - start_date = start_date.astimezone(tz=utc) - end_date = end_date.astimezone(tz=utc) + start_date = start_date.astimezone(tz=timezone.utc) + end_date = end_date.astimezone(tz=timezone.utc) query = f""" SELECT @@ -245,8 +247,8 @@ def pull_all_from_table_or_query( with GetSnowflakeConnection(config.offline_store) as conn: snowflake_conn = conn - start_date = start_date.astimezone(tz=utc) - end_date = end_date.astimezone(tz=utc) + start_date = start_date.astimezone(tz=timezone.utc) + end_date = end_date.astimezone(tz=timezone.utc) query = f""" SELECT {field_string} diff --git a/sdk/python/feast/infra/online_stores/contrib/elasticsearch.py b/sdk/python/feast/infra/online_stores/contrib/elasticsearch.py index 429327e651..a0c25b931a 100644 --- a/sdk/python/feast/infra/online_stores/contrib/elasticsearch.py +++ b/sdk/python/feast/infra/online_stores/contrib/elasticsearch.py @@ -6,15 +6,18 @@ from datetime import datetime from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple -import pytz from elasticsearch import Elasticsearch, helpers from feast import Entity, FeatureView, RepoConfig -from feast.infra.key_encoding_utils import get_list_val_str, serialize_entity_key +from feast.infra.key_encoding_utils import ( + get_list_val_str, + serialize_entity_key, +) from feast.infra.online_stores.online_store import OnlineStore from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.repo_config import FeastConfigBaseModel +from feast.utils import _build_retrieve_online_document_record, to_naive_utc class ElasticSearchOnlineStoreConfig(FeastConfigBaseModel): @@ -96,9 +99,9 @@ def online_write_batch( entity_key_serialization_version=config.entity_key_serialization_version, ) encoded_entity_key = base64.b64encode(entity_key_bin).decode("utf-8") - timestamp = _to_naive_utc(timestamp) + timestamp = to_naive_utc(timestamp) if created_ts is not None: - created_ts = _to_naive_utc(created_ts) + created_ts = to_naive_utc(created_ts) for feature_name, value in values.items(): encoded_value = base64.b64encode(value.SerializeToString()).decode( "utf-8" @@ -224,6 +227,7 @@ def retrieve_online_documents( ) -> List[ Tuple[ Optional[datetime], + Optional[EntityKeyProto], Optional[ValueProto], Optional[ValueProto], Optional[ValueProto], @@ -232,6 +236,7 @@ def retrieve_online_documents( result: List[ Tuple[ Optional[datetime], + Optional[EntityKeyProto], Optional[ValueProto], Optional[ValueProto], Optional[ValueProto], @@ -247,30 +252,21 @@ def retrieve_online_documents( ) rows = response["hits"]["hits"][0:top_k] for row in rows: + entity_key = row["_source"]["entity_key"] feature_value = row["_source"]["feature_value"] vector_value = row["_source"]["vector_value"] timestamp = row["_source"]["timestamp"] distance = row["_score"] timestamp = datetime.strptime(timestamp, "%Y-%m-%dT%H:%M:%S.%f") - feature_value_proto = ValueProto() - feature_value_proto.ParseFromString(base64.b64decode(feature_value)) - - vector_value_proto = ValueProto(string_val=str(vector_value)) - distance_value_proto = ValueProto(float_val=distance) result.append( - ( + _build_retrieve_online_document_record( + entity_key, + base64.b64decode(feature_value), + str(vector_value), + distance, timestamp, - feature_value_proto, - vector_value_proto, - distance_value_proto, + config.entity_key_serialization_version, ) ) return result - - -def _to_naive_utc(ts: datetime): - if ts.tzinfo is None: - return ts - else: - return ts.astimezone(pytz.utc).replace(tzinfo=None) diff --git a/sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/hazelcast_online_store.py b/sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/hazelcast_online_store.py index 497d8909af..c56d394c21 100644 --- a/sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/hazelcast_online_store.py +++ b/sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/hazelcast_online_store.py @@ -23,7 +23,6 @@ from datetime import datetime, timezone from typing import Any, Callable, Dict, List, Literal, Optional, Sequence, Tuple -import pytz from hazelcast.client import HazelcastClient from hazelcast.core import HazelcastJsonValue from hazelcast.discovery import HazelcastCloudDiscovery @@ -167,10 +166,10 @@ def online_write_batch( entity_key_serialization_version=2, ) ).decode("utf-8") - event_ts_utc = pytz.utc.localize(event_ts, is_dst=None).timestamp() + event_ts_utc = event_ts.astimezone(tz=timezone.utc).timestamp() created_ts_utc = 0.0 if created_ts is not None: - created_ts_utc = pytz.utc.localize(created_ts, is_dst=None).timestamp() + created_ts_utc = created_ts.astimezone(tz=timezone.utc).timestamp() for feature_name, value in values.items(): feature_value = base64.b64encode(value.SerializeToString()).decode( "utf-8" diff --git a/sdk/python/feast/infra/online_stores/contrib/ikv_online_store/ikv.py b/sdk/python/feast/infra/online_stores/contrib/ikv_online_store/ikv.py index 6b721bddf8..c8f0ad65c9 100644 --- a/sdk/python/feast/infra/online_stores/contrib/ikv_online_store/ikv.py +++ b/sdk/python/feast/infra/online_stores/contrib/ikv_online_store/ikv.py @@ -1,4 +1,4 @@ -from datetime import datetime +from datetime import datetime, timezone from typing import ( Any, Callable, @@ -11,7 +11,6 @@ Tuple, ) -import pytz from google.protobuf.timestamp_pb2 import Timestamp from ikvpy.client import IKVReader, IKVWriter from ikvpy.clientoptions import ClientOptions, ClientOptionsBuilder @@ -163,7 +162,7 @@ def _decode_fields_for_primary_key( if dt_bytes: proto_timestamp = Timestamp() proto_timestamp.ParseFromString(dt_bytes) - dt = datetime.fromtimestamp(proto_timestamp.seconds, tz=pytz.utc) + dt = datetime.fromtimestamp(proto_timestamp.seconds, tz=timezone.utc) # decode other features features = {} diff --git a/sdk/python/feast/infra/online_stores/contrib/mysql_online_store/mysql.py b/sdk/python/feast/infra/online_stores/contrib/mysql_online_store/mysql.py index 26916a9fcb..64111ca42c 100644 --- a/sdk/python/feast/infra/online_stores/contrib/mysql_online_store/mysql.py +++ b/sdk/python/feast/infra/online_stores/contrib/mysql_online_store/mysql.py @@ -4,7 +4,6 @@ from typing import Any, Callable, Dict, List, Literal, Optional, Sequence, Tuple import pymysql -import pytz from pydantic import StrictStr from pymysql.connections import Connection from pymysql.cursors import Cursor @@ -15,6 +14,7 @@ from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.repo_config import FeastConfigBaseModel +from feast.utils import to_naive_utc class MySQLOnlineStoreConfig(FeastConfigBaseModel): @@ -74,9 +74,9 @@ def online_write_batch( entity_key, entity_key_serialization_version=2, ).hex() - timestamp = _to_naive_utc(timestamp) + timestamp = to_naive_utc(timestamp) if created_ts is not None: - created_ts = _to_naive_utc(created_ts) + created_ts = to_naive_utc(created_ts) for feature_name, val in values.items(): self.write_to_table( @@ -223,10 +223,3 @@ def _drop_table_and_index(cur: Cursor, project: str, table: FeatureView) -> None def _table_id(project: str, table: FeatureView) -> str: return f"{project}_{table.name}" - - -def _to_naive_utc(ts: datetime) -> datetime: - if ts.tzinfo is None: - return ts - else: - return ts.astimezone(pytz.utc).replace(tzinfo=None) diff --git a/sdk/python/feast/infra/online_stores/contrib/postgres.py b/sdk/python/feast/infra/online_stores/contrib/postgres.py index ff73a4a347..8125da33be 100644 --- a/sdk/python/feast/infra/online_stores/contrib/postgres.py +++ b/sdk/python/feast/infra/online_stores/contrib/postgres.py @@ -16,7 +16,6 @@ Union, ) -import pytz from psycopg import AsyncConnection, sql from psycopg.connection import Connection from psycopg_pool import AsyncConnectionPool, ConnectionPool @@ -24,6 +23,9 @@ from feast import Entity from feast.feature_view import FeatureView from feast.infra.key_encoding_utils import get_list_val_str, serialize_entity_key +from feast.infra.online_stores.contrib.singlestore_online_store.singlestore import ( + _to_naive_utc, +) from feast.infra.online_stores.online_store import OnlineStore from feast.infra.utils.postgres.connection_utils import ( _get_conn, @@ -35,6 +37,7 @@ from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.repo_config import RepoConfig +from feast.utils import _build_retrieve_online_document_record SUPPORTED_DISTANCE_METRICS_DICT = { "cosine": "<=>", @@ -358,6 +361,7 @@ def retrieve_online_documents( ) -> List[ Tuple[ Optional[datetime], + Optional[EntityKeyProto], Optional[ValueProto], Optional[ValueProto], Optional[ValueProto], @@ -389,12 +393,11 @@ def retrieve_online_documents( ) distance_metric_sql = SUPPORTED_DISTANCE_METRICS_DICT[distance_metric] - # Convert the embedding to a string to be used in postgres vector search - query_embedding_str = f"[{','.join(str(el) for el in embedding)}]" result: List[ Tuple[ Optional[datetime], + Optional[EntityKeyProto], Optional[ValueProto], Optional[ValueProto], Optional[ValueProto], @@ -413,45 +416,37 @@ def retrieve_online_documents( feature_name, value, vector_value, - vector_value {distance_metric_sql} %s as distance, + vector_value {distance_metric_sql} %s::vector as distance, event_ts FROM {table_name} WHERE feature_name = {feature_name} ORDER BY distance LIMIT {top_k}; """ ).format( - distance_metric_sql=distance_metric_sql, + distance_metric_sql=sql.SQL(distance_metric_sql), table_name=sql.Identifier(table_name), feature_name=sql.Literal(requested_feature), top_k=sql.Literal(top_k), ), - (query_embedding_str,), + (embedding,), ) rows = cur.fetchall() - for ( entity_key, - feature_name, - value, + _, + feature_val, vector_value, - distance, + distance_val, event_ts, ) in rows: - # TODO Deserialize entity_key to return the entity in response - # entity_key_proto = EntityKeyProto() - # entity_key_proto_bin = bytes(entity_key) - - feature_value_proto = ValueProto() - feature_value_proto.ParseFromString(bytes(value)) - - vector_value_proto = ValueProto(string_val=vector_value) - distance_value_proto = ValueProto(float_val=distance) result.append( - ( + _build_retrieve_online_document_record( + entity_key, + feature_val, + vector_value, + distance_val, event_ts, - feature_value_proto, - vector_value_proto, - distance_value_proto, + config.entity_key_serialization_version, ) ) @@ -472,10 +467,3 @@ def _drop_table_and_index(table_name): sql.Identifier(table_name), sql.Identifier(f"{table_name}_ek"), ) - - -def _to_naive_utc(ts: datetime): - if ts.tzinfo is None: - return ts - else: - return ts.astimezone(pytz.utc).replace(tzinfo=None) diff --git a/sdk/python/feast/infra/online_stores/contrib/rockset_online_store/rockset.py b/sdk/python/feast/infra/online_stores/contrib/rockset_online_store/rockset.py deleted file mode 100644 index 31de7f9e9b..0000000000 --- a/sdk/python/feast/infra/online_stores/contrib/rockset_online_store/rockset.py +++ /dev/null @@ -1,520 +0,0 @@ -# Copyright 2022 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import json -import logging -import os -import random -import time -from datetime import datetime -from typing import Any, Callable, Dict, List, Literal, Optional, Sequence, Tuple, cast - -import requests -from rockset.exceptions import BadRequestException, RocksetException -from rockset.models import QueryRequestSql -from rockset.query_paginator import QueryPaginator -from rockset.rockset_client import RocksetClient - -from feast.entity import Entity -from feast.feature_view import FeatureView -from feast.infra.online_stores.helpers import compute_entity_id -from feast.infra.online_stores.online_store import OnlineStore -from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto -from feast.protos.feast.types.Value_pb2 import Value as ValueProto -from feast.repo_config import FeastConfigBaseModel, RepoConfig - -logger = logging.getLogger(__name__) - - -class RocksetOnlineStoreConfig(FeastConfigBaseModel): - """Online store config for Rockset store""" - - type: Literal["rockset"] = "rockset" - """Online store type selector""" - - api_key: Optional[str] = None - """Api Key to be used for Rockset Account. If not set the env var ROCKSET_APIKEY will be used.""" - - host: Optional[str] = None - """The Host Url for Rockset requests. If not set the env var ROCKSET_APISERVER will be used.""" - - read_pagination_batch_size: int = 100 - """Batch size of records that will be turned per page when paginating a batched read""" - - collection_created_timeout_secs: int = 60 - """The amount of time, in seconds, we will wait for the collection to become visible to the API""" - - collection_ready_timeout_secs: int = 30 * 60 - """The amount of time, in seconds, we will wait for the collection to enter READY state""" - - fence_all_writes: bool = True - """Whether to wait for all writes to be flushed from log and queryable. If False, documents that are written may not be seen immediately in subsequent reads""" - - fence_timeout_secs: int = 10 * 60 - """The amount of time we will wait, in seconds, for the write fence to be passed""" - - initial_request_backoff_secs: int = 2 - """Initial backoff, in seconds, we will wait between requests when polling for a response""" - - max_request_backoff_secs: int = 30 - """Initial backoff, in seconds, we will wait between requests when polling for a response""" - - max_request_attempts: int = 10 * 1000 - """The max amount of times we will retry a failed request""" - - -class RocksetOnlineStore(OnlineStore): - """ - Rockset implementation of the online store interface. - - Attributes: - _rockset_client: Rockset openapi client. - """ - - _rockset_client = None - - def online_write_batch( - self, - config: RepoConfig, - table: FeatureView, - data: List[ - Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]] - ], - progress: Optional[Callable[[int], Any]], - ) -> None: - """ - Write a batch of feature rows to online Rockset store. - - Args: - config: The RepoConfig for the current FeatureStore. - table: Feast FeatureView. - data: a list of quadruplets containing Feature data. Each quadruplet contains an Entity Key, - a dict containing feature values, an event timestamp for the row, and - the created timestamp for the row if it exists. - progress: Optional function to be called once every mini-batch of rows is written to - the online store. Can be used to display progress. - """ - - online_config = config.online_store - assert isinstance(online_config, RocksetOnlineStoreConfig) - - rs = self.get_rockset_client(online_config) - collection_name = self.get_collection_name(config, table) - - # We need to deduplicate on entity_id and we will save the latest timestamp version. - dedup_dict = {} - for feature_vals in data: - entity_key, features, timestamp, created_ts = feature_vals - serialized_key = compute_entity_id( - entity_key=entity_key, - entity_key_serialization_version=config.entity_key_serialization_version, - ) - - if serialized_key not in dedup_dict: - dedup_dict[serialized_key] = feature_vals - continue - - # If the entity already existings in the dictionary ignore the entry if it has a lower timestamp. - if timestamp <= dedup_dict[serialized_key][2]: - continue - - dedup_dict[serialized_key] = feature_vals - - request_batch = [] - for serialized_key, feature_vals in dedup_dict.items(): - document = {} - entity_key, features, timestamp, created_ts = feature_vals - document["_id"] = serialized_key - - # Rockset python client currently does not handle datetime correctly and will convert - # to string instead of native Rockset DATETIME. This will be fixed, but until then we - # use isoformat. - document["event_ts"] = timestamp.isoformat() - document["created_ts"] = ( - "" if created_ts is None else created_ts.isoformat() - ) - for k, v in features.items(): - # Rockset client currently does not support bytes type. - document[k] = v.SerializeToString().hex() - - # TODO: Implement async batching with retries. - request_batch.append(document) - - if progress: - progress(1) - - resp = rs.Documents.add_documents( - collection=collection_name, data=request_batch - ) - if online_config.fence_all_writes: - self.wait_for_fence(rs, collection_name, resp["last_offset"], online_config) - - return None - - def online_read( - self, - config: RepoConfig, - table: FeatureView, - entity_keys: List[EntityKeyProto], - requested_features: Optional[List[str]] = None, - ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: - """ - Retrieve feature values from the online Rockset store. - - Args: - config: The RepoConfig for the current FeatureStore. - table: Feast FeatureView. - entity_keys: a list of entity keys that should be read from the FeatureStore. - """ - online_config = config.online_store - assert isinstance(online_config, RocksetOnlineStoreConfig) - - rs = self.get_rockset_client(online_config) - collection_name = self.get_collection_name(config, table) - - feature_list = "" - if requested_features is not None: - feature_list = ",".join(requested_features) - - entity_serialized_key_list = [ - compute_entity_id( - k, - entity_key_serialization_version=config.entity_key_serialization_version, - ) - for k in entity_keys - ] - - entity_query_str = ",".join( - "'{id}'".format(id=s) for s in entity_serialized_key_list - ) - - query_str = f""" - SELECT - "_id", - "event_ts", - {feature_list} - FROM - {collection_name} - WHERE - "_id" IN ({entity_query_str}) - """ - - feature_set = set() - if requested_features: - feature_set.update(requested_features) - - result_map = {} - for page in QueryPaginator( - rs, - rs.Queries.query( - sql=QueryRequestSql( - query=query_str, - paginate=True, - initial_paginate_response_doc_count=online_config.read_pagination_batch_size, - ) - ), - ): - for doc in page: - result = {} - for k, v in doc.items(): - if k not in feature_set: - # We want to skip deserializing values that are not feature values like bookeeping values. - continue - - val = ValueProto() - - # TODO: Remove bytes <-> string parsing once client supports bytes. - val.ParseFromString(bytes.fromhex(v)) - result[k] = val - result_map[doc["_id"]] = ( - datetime.fromisoformat(doc["event_ts"]), - result, - ) - - results_list: List[ - Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]] - ] = [] - for key in entity_serialized_key_list: - if key not in result_map: - # If not found, we add a gap to let the client know. - results_list.append((None, None)) - continue - - results_list.append(result_map[key]) - - return results_list - - def update( - self, - config: RepoConfig, - tables_to_delete: Sequence[FeatureView], - tables_to_keep: Sequence[FeatureView], - entities_to_delete: Sequence[Entity], - entities_to_keep: Sequence[Entity], - partial: bool, - ): - """ - Update tables from the Rockset Online Store. - - Args: - config: The RepoConfig for the current FeatureStore. - tables_to_delete: Tables to delete from the Rockset Online Store. - tables_to_keep: Tables to keep in the Rockset Online Store. - """ - online_config = config.online_store - assert isinstance(online_config, RocksetOnlineStoreConfig) - rs = self.get_rockset_client(online_config) - - created_collections = [] - for table_instance in tables_to_keep: - try: - collection_name = self.get_collection_name(config, table_instance) - rs.Collections.create_file_upload_collection(name=collection_name) - created_collections.append(collection_name) - except BadRequestException as e: - if self.parse_request_error_type(e) == "AlreadyExists": - # Table already exists nothing to do. We should still make sure it is ready though. - created_collections.append(collection_name) - continue - raise - - for table_to_delete in tables_to_delete: - self.delete_collection( - rs, collection_name=self.get_collection_name(config, table_to_delete) - ) - - # Now wait for all collections to be READY. - self.wait_for_ready_collections( - rs, created_collections, online_config=online_config - ) - - def teardown( - self, - config: RepoConfig, - tables: Sequence[FeatureView], - entities: Sequence[Entity], - ): - """ - Delete all collections from the Rockset Online Store. - - Args: - config: The RepoConfig for the current FeatureStore. - tables: Tables to delete from the feature repo. - """ - online_config = config.online_store - assert isinstance(online_config, RocksetOnlineStoreConfig) - rs = self.get_rockset_client(online_config) - for table in tables: - self.delete_collection( - rs, collection_name=self.get_collection_name(config, table) - ) - - def get_rockset_client( - self, onlineConfig: RocksetOnlineStoreConfig - ) -> RocksetClient: - """ - Fetches the RocksetClient to be used for all requests for this online store based on the api - configuration in the provided config. If no configuration provided local ENV vars will be used. - - Args: - onlineConfig: The RocksetOnlineStoreConfig associated with this online store. - """ - if self._rockset_client is not None: - return self._rockset_client - - _api_key = ( - os.getenv("ROCKSET_APIKEY") - if isinstance(onlineConfig.api_key, type(None)) - else onlineConfig.api_key - ) - _host = ( - os.getenv("ROCKSET_APISERVER") - if isinstance(onlineConfig.host, type(None)) - else onlineConfig.host - ) - self._rockset_client = RocksetClient(host=_host, api_key=_api_key) - return self._rockset_client - - @staticmethod - def delete_collection(rs: RocksetClient, collection_name: str): - """ - Deletes the collection whose name was provided - - Args: - rs: The RocksetClient to be used for the deletion. - collection_name: The name of the collection to be deleted. - """ - - try: - rs.Collections.delete(collection=collection_name) - except RocksetException as e: - if RocksetOnlineStore.parse_request_error_type(e) == "NotFound": - logger.warning( - f"Trying to delete collection that does not exist {collection_name}" - ) - return - raise - - @staticmethod - def get_collection_name(config: RepoConfig, feature_view: FeatureView) -> str: - """ - Returns the collection name based on the provided config and FeatureView. - - Args: - config: RepoConfig for the online store. - feature_view: FeatureView that is backed by the returned collection name. - - Returns: - The collection name as a string. - """ - project_val = config.project if config.project else "feast" - table_name = feature_view.name if feature_view.name else "feature_store" - return f"{project_val}_{table_name}" - - @staticmethod - def parse_request_error_type(e: RocksetException) -> str: - """ - Parse a throw RocksetException. Will return a string representing the type of error that was thrown. - - Args: - e: The RockException that is being parsed. - - Returns: - Error type parsed as a string. - """ - - body_dict = json.loads(e.body) - return body_dict["type"] - - @staticmethod - def wait_for_fence( - rs: RocksetClient, - collection_name: str, - last_offset: str, - online_config: RocksetOnlineStoreConfig, - ): - """ - Waits until 'last_offset' is flushed and values are ready to be read. If wait lasts longer than the timeout specified in config - a timeout exception will be throw. - - Args: - rs: Rockset client that will be used to make all requests. - collection_name: Collection associated with the offsets we are waiting for. - last_offset: The actual offsets we are waiting to be flushed. - online_config: The config that will be used to determine timeouts and backout configurations. - """ - - resource_path = ( - f"/v1/orgs/self/ws/commons/collections/{collection_name}/offsets/commit" - ) - request = {"name": [last_offset]} - - headers = {} - headers["Content-Type"] = "application/json" - headers["Authorization"] = f"ApiKey {rs.api_client.configuration.api_key}" - - t_start = time.time() - for num_attempts in range(online_config.max_request_attempts): - delay = time.time() - t_start - resp = requests.post( - url=f"{rs.api_client.configuration.host}{resource_path}", - json=request, - headers=headers, - ) - - if resp.status_code == 200 and resp.json()["data"]["passed"] is True: - break - - if delay > online_config.fence_timeout_secs: - raise TimeoutError( - f"Write to collection {collection_name} at offset {last_offset} was not available for read after {delay} secs" - ) - - if resp.status_code == 429: - RocksetOnlineStore.backoff_sleep(num_attempts, online_config) - continue - elif resp.status_code != 200: - raise Exception(f"[{resp.status_code}]: {resp.reason}") - - RocksetOnlineStore.backoff_sleep(num_attempts, online_config) - - @staticmethod - def wait_for_ready_collections( - rs: RocksetClient, - collection_names: List[str], - online_config: RocksetOnlineStoreConfig, - ): - """ - Waits until all collections provided have entered READY state and can accept new documents. If wait - lasts longer than timeout a TimeoutError exception will be thrown. - - Args: - rs: Rockset client that will be used to make all requests. - collection_names: All collections that we will wait for. - timeout: The max amount of time we will wait for the collections to become READY. - """ - - t_start = time.time() - for cname in collection_names: - # We will wait until the provided timeout for all collections to become READY. - for num_attempts in range(online_config.max_request_attempts): - resp = None - delay = time.time() - t_start - try: - resp = rs.Collections.get(collection=cname) - except RocksetException as e: - error_type = RocksetOnlineStore.parse_request_error_type(e) - if error_type == "NotFound": - if delay > online_config.collection_created_timeout_secs: - raise TimeoutError( - f"Collection {cname} failed to become visible after {delay} seconds" - ) - elif error_type == "RateLimitExceeded": - RocksetOnlineStore.backoff_sleep(num_attempts, online_config) - continue - else: - raise - - if ( - resp is not None - and cast(Dict[str, dict], resp)["data"]["status"] == "READY" - ): - break - - if delay > online_config.collection_ready_timeout_secs: - raise TimeoutError( - f"Collection {cname} failed to become ready after {delay} seconds" - ) - - RocksetOnlineStore.backoff_sleep(num_attempts, online_config) - - @staticmethod - def backoff_sleep(attempts: int, online_config: RocksetOnlineStoreConfig): - """ - Sleep for the needed amount of time based on the number of request attempts. - - Args: - backoff: The amount of time we will sleep for - max_backoff: The max amount of time we should ever backoff for. - rate_limited: Whether this method is being called as part of a rate limited request. - """ - - default_backoff = online_config.initial_request_backoff_secs - - # Full jitter, exponential backoff. - backoff = random.uniform( - default_backoff, - min(default_backoff << attempts, online_config.max_request_backoff_secs), - ) - time.sleep(backoff) diff --git a/sdk/python/feast/infra/online_stores/contrib/singlestore_online_store/singlestore.py b/sdk/python/feast/infra/online_stores/contrib/singlestore_online_store/singlestore.py index e17a059c1a..3e921afcea 100644 --- a/sdk/python/feast/infra/online_stores/contrib/singlestore_online_store/singlestore.py +++ b/sdk/python/feast/infra/online_stores/contrib/singlestore_online_store/singlestore.py @@ -1,10 +1,9 @@ from __future__ import absolute_import from collections import defaultdict -from datetime import datetime +from datetime import datetime, timezone from typing import Any, Callable, Dict, List, Literal, Optional, Sequence, Tuple -import pytz import singlestoredb from pydantic import StrictStr from singlestoredb.connection import Connection, Cursor @@ -232,4 +231,4 @@ def _to_naive_utc(ts: datetime) -> datetime: if ts.tzinfo is None: return ts else: - return ts.astimezone(pytz.utc).replace(tzinfo=None) + return ts.astimezone(tz=timezone.utc).replace(tzinfo=None) diff --git a/sdk/python/feast/infra/online_stores/online_store.py b/sdk/python/feast/infra/online_stores/online_store.py index 9cf2ef95f6..fdb5b055cf 100644 --- a/sdk/python/feast/infra/online_stores/online_store.py +++ b/sdk/python/feast/infra/online_stores/online_store.py @@ -349,6 +349,7 @@ def retrieve_online_documents( ) -> List[ Tuple[ Optional[datetime], + Optional[EntityKeyProto], Optional[ValueProto], Optional[ValueProto], Optional[ValueProto], diff --git a/sdk/python/feast/infra/online_stores/redis.py b/sdk/python/feast/infra/online_stores/redis.py index 5f0156f620..59892fcbe0 100644 --- a/sdk/python/feast/infra/online_stores/redis.py +++ b/sdk/python/feast/infra/online_stores/redis.py @@ -13,7 +13,7 @@ # limitations under the License. import json import logging -from datetime import datetime +from datetime import datetime, timezone from enum import Enum from typing import ( Any, @@ -28,7 +28,6 @@ Union, ) -import pytz from google.protobuf.timestamp_pb2 import Timestamp from pydantic import StrictStr @@ -457,5 +456,5 @@ def _get_features_for_entity( if not res: return None, None else: - timestamp = datetime.fromtimestamp(res_ts.seconds, tz=pytz.utc) + timestamp = datetime.fromtimestamp(res_ts.seconds, tz=timezone.utc) return timestamp, res diff --git a/sdk/python/feast/infra/online_stores/remote.py b/sdk/python/feast/infra/online_stores/remote.py index 19e1b7d515..8a7e299516 100644 --- a/sdk/python/feast/infra/online_stores/remote.py +++ b/sdk/python/feast/infra/online_stores/remote.py @@ -24,6 +24,7 @@ from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.repo_config import FeastConfigBaseModel +from feast.rest_error_handler import rest_error_handling_decorator from feast.type_map import python_values_to_proto_values from feast.value_type import ValueType @@ -70,9 +71,7 @@ def online_read( req_body = self._construct_online_read_api_json_request( entity_keys, table, requested_features ) - response = requests.post( - f"{config.online_store.path}/get-online-features", data=req_body - ) + response = get_remote_online_features(config=config, req_body=req_body) if response.status_code == 200: logger.debug("Able to retrieve the online features from feature server.") response_json = json.loads(response.text) @@ -110,7 +109,7 @@ def online_read( result_tuples.append((event_ts, feature_values_dict)) return result_tuples else: - error_msg = f"Unable to retrieve the online store data using feature server API. Error_code={response.status_code}, error_message={response.reason}" + error_msg = f"Unable to retrieve the online store data using feature server API. Error_code={response.status_code}, error_message={response.text}" logger.error(error_msg) raise RuntimeError(error_msg) @@ -165,3 +164,12 @@ def teardown( entities: Sequence[Entity], ): pass + + +@rest_error_handling_decorator +def get_remote_online_features( + session: requests.Session, config: RepoConfig, req_body: str +) -> requests.Response: + return session.post( + f"{config.online_store.path}/get-online-features", data=req_body + ) diff --git a/sdk/python/feast/infra/online_stores/snowflake.py b/sdk/python/feast/infra/online_stores/snowflake.py index fef804a377..6f39bdd0f6 100644 --- a/sdk/python/feast/infra/online_stores/snowflake.py +++ b/sdk/python/feast/infra/online_stores/snowflake.py @@ -53,6 +53,9 @@ class SnowflakeOnlineStoreConfig(FeastConfigBaseModel): private_key: Optional[str] = None """ Snowflake private key file path""" + private_key_content: Optional[bytes] = None + """ Snowflake private key stored as bytes""" + private_key_passphrase: Optional[str] = None """ Snowflake private key file passphrase""" diff --git a/sdk/python/feast/infra/online_stores/sqlite.py b/sdk/python/feast/infra/online_stores/sqlite.py index 9896b766d4..c54a336aad 100644 --- a/sdk/python/feast/infra/online_stores/sqlite.py +++ b/sdk/python/feast/infra/online_stores/sqlite.py @@ -33,10 +33,9 @@ from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto from feast.protos.feast.core.SqliteTable_pb2 import SqliteTable as SqliteTableProto from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto -from feast.protos.feast.types.Value_pb2 import FloatList as FloatListProto from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.repo_config import FeastConfigBaseModel, RepoConfig -from feast.utils import to_naive_utc +from feast.utils import _build_retrieve_online_document_record, to_naive_utc class SqliteOnlineStoreConfig(FeastConfigBaseModel): @@ -110,6 +109,11 @@ def online_write_batch( project = config.project + def adapt_datetime(dt): + return dt.isoformat() + + sqlite3.register_adapter(datetime, adapt_datetime) + with conn: for entity_key, values, timestamp, created_ts in data: entity_key_bin = serialize_entity_key( @@ -201,6 +205,11 @@ def online_read( conn = self._get_conn(config) cur = conn.cursor() + def convert_timestamp(ts_str): + return datetime.fromisoformat(ts_str.decode('utf-8') if isinstance(ts_str, bytes) else ts_str) + + sqlite3.register_converter("timestamp", convert_timestamp) + result: List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]] = [] # Fetch all entities in one go @@ -303,6 +312,7 @@ def retrieve_online_documents( ) -> List[ Tuple[ Optional[datetime], + Optional[EntityKeyProto], Optional[ValueProto], Optional[ValueProto], Optional[ValueProto], @@ -385,6 +395,7 @@ def retrieve_online_documents( result: List[ Tuple[ Optional[datetime], + Optional[EntityKeyProto], Optional[ValueProto], Optional[ValueProto], Optional[ValueProto], @@ -392,19 +403,14 @@ def retrieve_online_documents( ] = [] for entity_key, _, string_value, distance, event_ts in rows: - feature_value_proto = ValueProto() - feature_value_proto.ParseFromString(string_value if string_value else b"") - vector_value_proto = ValueProto( - float_list_val=FloatListProto(val=embedding) - ) - distance_value_proto = ValueProto(float_val=distance) - result.append( - ( + _build_retrieve_online_document_record( + entity_key, + string_value if string_value else b"", + embedding, + distance, event_ts, - feature_value_proto, - vector_value_proto, - distance_value_proto, + config.entity_key_serialization_version, ) ) diff --git a/sdk/python/feast/infra/provider.py b/sdk/python/feast/infra/provider.py index 9940af1d02..c0062dde02 100644 --- a/sdk/python/feast/infra/provider.py +++ b/sdk/python/feast/infra/provider.py @@ -364,6 +364,7 @@ def retrieve_online_documents( ) -> List[ Tuple[ Optional[datetime], + Optional[EntityKeyProto], Optional[ValueProto], Optional[ValueProto], Optional[ValueProto], diff --git a/sdk/python/feast/infra/registry/base_registry.py b/sdk/python/feast/infra/registry/base_registry.py index 03bec64830..f2374edf1b 100644 --- a/sdk/python/feast/infra/registry/base_registry.py +++ b/sdk/python/feast/infra/registry/base_registry.py @@ -28,6 +28,8 @@ from feast.feature_view import FeatureView from feast.infra.infra_object import Infra from feast.on_demand_feature_view import OnDemandFeatureView +from feast.permissions.permission import Permission +from feast.project import Project from feast.project_metadata import ProjectMetadata from feast.protos.feast.core.Entity_pb2 import Entity as EntityProto from feast.protos.feast.core.FeatureService_pb2 import ( @@ -37,6 +39,8 @@ from feast.protos.feast.core.OnDemandFeatureView_pb2 import ( OnDemandFeatureView as OnDemandFeatureViewProto, ) +from feast.protos.feast.core.Permission_pb2 import Permission as PermissionProto +from feast.protos.feast.core.Project_pb2 import Project as ProjectProto from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto from feast.protos.feast.core.SavedDataset_pb2 import SavedDataset as SavedDatasetProto from feast.protos.feast.core.StreamFeatureView_pb2 import ( @@ -387,6 +391,44 @@ def list_feature_views( """ raise NotImplementedError + @abstractmethod + def get_any_feature_view( + self, name: str, project: str, allow_cache: bool = False + ) -> BaseFeatureView: + """ + Retrieves a feature view of any type. + + Args: + name: Name of feature view + project: Feast project that this feature view belongs to + allow_cache: Allow returning feature view from the cached registry + + Returns: + Returns either the specified feature view, or raises an exception if + none is found + """ + raise NotImplementedError + + @abstractmethod + def list_all_feature_views( + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[BaseFeatureView]: + """ + Retrieve a list of feature views of all types from the registry + + Args: + allow_cache: Allow returning feature views from the cached registry + project: Filter feature views based on project name + tags: Filter by tags + + Returns: + List of feature views + """ + raise NotImplementedError + @abstractmethod def apply_materialization( self, @@ -457,7 +499,10 @@ def delete_saved_dataset(self, name: str, project: str, commit: bool = True): @abstractmethod def list_saved_datasets( - self, project: str, allow_cache: bool = False + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, ) -> List[SavedDataset]: """ Retrieves a list of all saved datasets in specified project @@ -465,6 +510,7 @@ def list_saved_datasets( Args: project: Feast project allow_cache: Whether to allow returning this dataset from a cached registry + tags: Filter by tags Returns: Returns the list of SavedDatasets @@ -521,17 +567,21 @@ def get_validation_reference( # TODO: Needs to be implemented. def list_validation_references( - self, project: str, allow_cache: bool = False + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, ) -> List[ValidationReference]: """ Retrieve a list of validation references from the registry Args: - allow_cache: Allow returning feature views from the cached registry - project: Filter feature views based on project name + project: Filter validation references based on project name + allow_cache: Allow returning validation references from the cached registry + tags: Filter by tags Returns: - List of request feature views + List of request validation references """ raise NotImplementedError @@ -590,6 +640,134 @@ def get_user_metadata( self, project: str, feature_view: BaseFeatureView ) -> Optional[bytes]: ... + # Permission operations + @abstractmethod + def apply_permission( + self, permission: Permission, project: str, commit: bool = True + ): + """ + Registers a single permission with Feast + + Args: + permission: A permission that will be registered + project: Feast project that this permission belongs to + commit: Whether to immediately commit to the registry + """ + raise NotImplementedError + + @abstractmethod + def delete_permission(self, name: str, project: str, commit: bool = True): + """ + Deletes a permission or raises an exception if not found. + + Args: + name: Name of permission + project: Feast project that this permission belongs to + commit: Whether the change should be persisted immediately + """ + raise NotImplementedError + + @abstractmethod + def get_permission( + self, name: str, project: str, allow_cache: bool = False + ) -> Permission: + """ + Retrieves a permission. + + Args: + name: Name of permission + project: Feast project that this permission belongs to + allow_cache: Whether to allow returning this permission from a cached registry + + Returns: + Returns either the specified permission, or raises an exception if none is found + """ + raise NotImplementedError + + @abstractmethod + def list_permissions( + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[Permission]: + """ + Retrieve a list of permissions from the registry + + Args: + project: Filter permission based on project name + allow_cache: Whether to allow returning permissions from a cached registry + + Returns: + List of permissions + """ + raise NotImplementedError + + @abstractmethod + def apply_project( + self, + project: Project, + commit: bool = True, + ): + """ + Registers a project with Feast + + Args: + project: A project that will be registered + commit: Whether to immediately commit to the registry + """ + raise NotImplementedError + + @abstractmethod + def delete_project( + self, + name: str, + commit: bool = True, + ): + """ + Deletes a project or raises an ProjectNotFoundException exception if not found. + + Args: + project: Feast project name that needs to be deleted + commit: Whether the change should be persisted immediately + """ + raise NotImplementedError + + @abstractmethod + def get_project( + self, + name: str, + allow_cache: bool = False, + ) -> Project: + """ + Retrieves a project. + + Args: + name: Feast project name + allow_cache: Whether to allow returning this permission from a cached registry + + Returns: + Returns either the specified project, or raises ProjectObjectNotFoundException exception if none is found + """ + raise NotImplementedError + + @abstractmethod + def list_projects( + self, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[Project]: + """ + Retrieve a list of projects from the registry + + Args: + allow_cache: Whether to allow returning permissions from a cached registry + + Returns: + List of project + """ + raise NotImplementedError + @abstractmethod def proto(self) -> RegistryProto: """ @@ -716,6 +894,13 @@ def to_dict(self, project: str) -> Dict[str, List[Any]]: registry_dict["infra"].append( self._message_to_sorted_dict(infra_object.to_proto()) ) + for permission in sorted( + self.list_permissions(project=project), key=lambda ds: ds.name + ): + registry_dict["permissions"].append( + self._message_to_sorted_dict(permission.to_proto()) + ) + return registry_dict @staticmethod @@ -732,4 +917,8 @@ def deserialize_registry_values(serialized_proto, feast_obj_type) -> Any: return OnDemandFeatureViewProto.FromString(serialized_proto) if feast_obj_type == FeatureService: return FeatureServiceProto.FromString(serialized_proto) + if feast_obj_type == Permission: + return PermissionProto.FromString(serialized_proto) + if feast_obj_type == Project: + return ProjectProto.FromString(serialized_proto) return None diff --git a/sdk/python/feast/infra/registry/caching_registry.py b/sdk/python/feast/infra/registry/caching_registry.py index 298639028d..042eee06ab 100644 --- a/sdk/python/feast/infra/registry/caching_registry.py +++ b/sdk/python/feast/infra/registry/caching_registry.py @@ -1,11 +1,13 @@ import atexit import logging import threading +import warnings from abc import abstractmethod from datetime import timedelta from threading import Lock from typing import List, Optional +from feast.base_feature_view import BaseFeatureView from feast.data_source import DataSource from feast.entity import Entity from feast.feature_service import FeatureService @@ -14,7 +16,10 @@ from feast.infra.registry import proto_registry_utils from feast.infra.registry.base_registry import BaseRegistry from feast.on_demand_feature_view import OnDemandFeatureView +from feast.permissions.permission import Permission +from feast.project import Project from feast.project_metadata import ProjectMetadata +from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto from feast.saved_dataset import SavedDataset, ValidationReference from feast.stream_feature_view import StreamFeatureView from feast.utils import _utc_now @@ -24,14 +29,14 @@ class CachingRegistry(BaseRegistry): def __init__(self, project: str, cache_ttl_seconds: int, cache_mode: str): - self.cached_registry_proto = self.proto() - proto_registry_utils.init_project_metadata(self.cached_registry_proto, project) - self.cached_registry_proto_created = _utc_now() + self.cache_mode = cache_mode + self.cached_registry_proto = RegistryProto() self._refresh_lock = Lock() self.cached_registry_proto_ttl = timedelta( seconds=cache_ttl_seconds if cache_ttl_seconds is not None else 0 ) - self.cache_mode = cache_mode + self.cached_registry_proto = self.proto() + self.cached_registry_proto_created = _utc_now() if cache_mode == "thread": self._start_thread_async_refresh(cache_ttl_seconds) atexit.register(self._exit_handler) @@ -100,6 +105,39 @@ def list_entities( ) return self._list_entities(project, tags) + @abstractmethod + def _get_any_feature_view(self, name: str, project: str) -> BaseFeatureView: + pass + + def get_any_feature_view( + self, name: str, project: str, allow_cache: bool = False + ) -> BaseFeatureView: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.get_any_feature_view( + self.cached_registry_proto, name, project + ) + return self._get_any_feature_view(name, project) + + @abstractmethod + def _list_all_feature_views( + self, project: str, tags: Optional[dict[str, str]] + ) -> List[BaseFeatureView]: + pass + + def list_all_feature_views( + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[BaseFeatureView]: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.list_all_feature_views( + self.cached_registry_proto, project, tags + ) + return self._list_all_feature_views(project, tags) + @abstractmethod def _get_feature_view(self, name: str, project: str) -> FeatureView: pass @@ -249,18 +287,23 @@ def get_saved_dataset( return self._get_saved_dataset(name, project) @abstractmethod - def _list_saved_datasets(self, project: str) -> List[SavedDataset]: + def _list_saved_datasets( + self, project: str, tags: Optional[dict[str, str]] = None + ) -> List[SavedDataset]: pass def list_saved_datasets( - self, project: str, allow_cache: bool = False + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, ) -> List[SavedDataset]: if allow_cache: self._refresh_cached_registry_if_necessary() return proto_registry_utils.list_saved_datasets( - self.cached_registry_proto, project + self.cached_registry_proto, project, tags ) - return self._list_saved_datasets(project) + return self._list_saved_datasets(project, tags) @abstractmethod def _get_validation_reference(self, name: str, project: str) -> ValidationReference: @@ -277,18 +320,23 @@ def get_validation_reference( return self._get_validation_reference(name, project) @abstractmethod - def _list_validation_references(self, project: str) -> List[ValidationReference]: + def _list_validation_references( + self, project: str, tags: Optional[dict[str, str]] = None + ) -> List[ValidationReference]: pass def list_validation_references( - self, project: str, allow_cache: bool = False + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, ) -> List[ValidationReference]: if allow_cache: self._refresh_cached_registry_if_necessary() return proto_registry_utils.list_validation_references( - self.cached_registry_proto, project + self.cached_registry_proto, project, tags ) - return self._list_validation_references(project) + return self._list_validation_references(project, tags) @abstractmethod def _list_project_metadata(self, project: str) -> List[ProjectMetadata]: @@ -297,6 +345,10 @@ def _list_project_metadata(self, project: str) -> List[ProjectMetadata]: def list_project_metadata( self, project: str, allow_cache: bool = False ) -> List[ProjectMetadata]: + warnings.warn( + "list_project_metadata is deprecated and will be removed in a future version. Use list_projects() and get_project() methods instead.", + DeprecationWarning, + ) if allow_cache: self._refresh_cached_registry_if_necessary() return proto_registry_utils.list_project_metadata( @@ -311,35 +363,94 @@ def _get_infra(self, project: str) -> Infra: def get_infra(self, project: str, allow_cache: bool = False) -> Infra: return self._get_infra(project) - def refresh(self, project: Optional[str] = None): - if project: - project_metadata = proto_registry_utils.get_project_metadata( - registry_proto=self.cached_registry_proto, project=project + @abstractmethod + def _get_permission(self, name: str, project: str) -> Permission: + pass + + def get_permission( + self, name: str, project: str, allow_cache: bool = False + ) -> Permission: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.get_permission( + self.cached_registry_proto, name, project + ) + return self._get_permission(name, project) + + @abstractmethod + def _list_permissions( + self, project: str, tags: Optional[dict[str, str]] + ) -> List[Permission]: + pass + + def list_permissions( + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[Permission]: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.list_permissions( + self.cached_registry_proto, project, tags ) - if not project_metadata: - proto_registry_utils.init_project_metadata( - self.cached_registry_proto, project - ) + return self._list_permissions(project, tags) + + @abstractmethod + def _get_project(self, name: str) -> Project: + pass + + def get_project( + self, + name: str, + allow_cache: bool = False, + ) -> Project: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.get_project(self.cached_registry_proto, name) + return self._get_project(name) + + @abstractmethod + def _list_projects(self, tags: Optional[dict[str, str]]) -> List[Project]: + pass + + def list_projects( + self, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[Project]: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.list_projects(self.cached_registry_proto, tags) + return self._list_projects(tags) + + def refresh(self, project: Optional[str] = None): self.cached_registry_proto = self.proto() self.cached_registry_proto_created = _utc_now() def _refresh_cached_registry_if_necessary(self): if self.cache_mode == "sync": with self._refresh_lock: - expired = ( - self.cached_registry_proto is None - or self.cached_registry_proto_created is None - ) or ( - self.cached_registry_proto_ttl.total_seconds() - > 0 # 0 ttl means infinity - and ( - _utc_now() - > ( - self.cached_registry_proto_created - + self.cached_registry_proto_ttl + if self.cached_registry_proto == RegistryProto(): + # Avoids the need to refresh the registry when cache is not populated yet + # Specially during the __init__ phase + # proto() will populate the cache with project metadata if no objects are registered + expired = False + else: + expired = ( + self.cached_registry_proto is None + or self.cached_registry_proto_created is None + ) or ( + self.cached_registry_proto_ttl.total_seconds() + > 0 # 0 ttl means infinity + and ( + _utc_now() + > ( + self.cached_registry_proto_created + + self.cached_registry_proto_ttl + ) ) ) - ) if expired: logger.info("Registry cache expired, so refreshing") self.refresh() @@ -351,7 +462,7 @@ def _start_thread_async_refresh(self, cache_ttl_seconds): self.registry_refresh_thread = threading.Timer( cache_ttl_seconds, self._start_thread_async_refresh, [cache_ttl_seconds] ) - self.registry_refresh_thread.setDaemon(True) + self.registry_refresh_thread.daemon = True self.registry_refresh_thread.start() def _exit_handler(self): diff --git a/sdk/python/feast/infra/registry/proto_registry_utils.py b/sdk/python/feast/infra/registry/proto_registry_utils.py index 0e85f5b0a9..fc5c3f6671 100644 --- a/sdk/python/feast/infra/registry/proto_registry_utils.py +++ b/sdk/python/feast/infra/registry/proto_registry_utils.py @@ -1,8 +1,8 @@ -import uuid from functools import wraps from typing import List, Optional from feast import utils +from feast.base_feature_view import BaseFeatureView from feast.data_source import DataSource from feast.entity import Entity from feast.errors import ( @@ -10,12 +10,16 @@ EntityNotFoundException, FeatureServiceNotFoundException, FeatureViewNotFoundException, + PermissionObjectNotFoundException, + ProjectObjectNotFoundException, SavedDatasetNotFound, ValidationReferenceNotFound, ) from feast.feature_service import FeatureService from feast.feature_view import FeatureView from feast.on_demand_feature_view import OnDemandFeatureView +from feast.permissions.permission import Permission +from feast.project import Project from feast.project_metadata import ProjectMetadata from feast.protos.feast.core.Registry_pb2 import ProjectMetadata as ProjectMetadataProto from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto @@ -67,13 +71,6 @@ def wrapper( return wrapper -def init_project_metadata(cached_registry_proto: RegistryProto, project: str): - new_project_uuid = f"{uuid.uuid4()}" - cached_registry_proto.project_metadata.append( - ProjectMetadata(project_name=project, project_uuid=new_project_uuid).to_proto() - ) - - def get_project_metadata( registry_proto: Optional[RegistryProto], project: str ) -> Optional[ProjectMetadataProto]: @@ -97,6 +94,33 @@ def get_feature_service( raise FeatureServiceNotFoundException(name, project=project) +def get_any_feature_view( + registry_proto: RegistryProto, name: str, project: str +) -> BaseFeatureView: + for feature_view_proto in registry_proto.feature_views: + if ( + feature_view_proto.spec.name == name + and feature_view_proto.spec.project == project + ): + return FeatureView.from_proto(feature_view_proto) + + for feature_view_proto in registry_proto.stream_feature_views: + if ( + feature_view_proto.spec.name == name + and feature_view_proto.spec.project == project + ): + return StreamFeatureView.from_proto(feature_view_proto) + + for on_demand_feature_view in registry_proto.on_demand_feature_views: + if ( + on_demand_feature_view.spec.project == project + and on_demand_feature_view.spec.name == name + ): + return OnDemandFeatureView.from_proto(on_demand_feature_view) + + raise FeatureViewNotFoundException(name, project) + + def get_feature_view( registry_proto: RegistryProto, name: str, project: str ) -> FeatureView: @@ -183,6 +207,17 @@ def list_feature_services( return feature_services +@registry_proto_cache_with_tags +def list_all_feature_views( + registry_proto: RegistryProto, project: str, tags: Optional[dict[str, str]] +) -> List[BaseFeatureView]: + return ( + list_feature_views(registry_proto, project, tags) + + list_stream_feature_views(registry_proto, project, tags) + + list_on_demand_feature_views(registry_proto, project, tags) + ) + + @registry_proto_cache_with_tags def list_feature_views( registry_proto: RegistryProto, project: str, tags: Optional[dict[str, str]] @@ -252,24 +287,28 @@ def list_data_sources( return data_sources -@registry_proto_cache +@registry_proto_cache_with_tags def list_saved_datasets( - registry_proto: RegistryProto, project: str + registry_proto: RegistryProto, project: str, tags: Optional[dict[str, str]] ) -> List[SavedDataset]: saved_datasets = [] for saved_dataset in registry_proto.saved_datasets: - if saved_dataset.spec.project == project: + if saved_dataset.spec.project == project and utils.has_all_tags( + saved_dataset.tags, tags + ): saved_datasets.append(SavedDataset.from_proto(saved_dataset)) return saved_datasets -@registry_proto_cache +@registry_proto_cache_with_tags def list_validation_references( - registry_proto: RegistryProto, project: str + registry_proto: RegistryProto, project: str, tags: Optional[dict[str, str]] ) -> List[ValidationReference]: validation_references = [] for validation_reference in registry_proto.validation_references: - if validation_reference.project == project: + if validation_reference.project == project and utils.has_all_tags( + validation_reference.tags, tags + ): validation_references.append( ValidationReference.from_proto(validation_reference) ) @@ -285,3 +324,46 @@ def list_project_metadata( for project_metadata in registry_proto.project_metadata if project_metadata.project == project ] + + +@registry_proto_cache_with_tags +def list_permissions( + registry_proto: RegistryProto, project: str, tags: Optional[dict[str, str]] +) -> List[Permission]: + permissions = [] + for permission_proto in registry_proto.permissions: + if permission_proto.spec.project == project and utils.has_all_tags( + permission_proto.spec.tags, tags + ): + permissions.append(Permission.from_proto(permission_proto)) + return permissions + + +def get_permission( + registry_proto: RegistryProto, name: str, project: str +) -> Permission: + for permission_proto in registry_proto.permissions: + if ( + permission_proto.spec.project == project + and permission_proto.spec.name == name + ): + return Permission.from_proto(permission_proto) + raise PermissionObjectNotFoundException(name=name, project=project) + + +def list_projects( + registry_proto: RegistryProto, + tags: Optional[dict[str, str]], +) -> List[Project]: + projects = [] + for project_proto in registry_proto.projects: + if utils.has_all_tags(project_proto.spec.tags, tags): + projects.append(Project.from_proto(project_proto)) + return projects + + +def get_project(registry_proto: RegistryProto, name: str) -> Project: + for projects_proto in registry_proto.projects: + if projects_proto.spec.name == name: + return Project.from_proto(projects_proto) + raise ProjectObjectNotFoundException(name=name) diff --git a/sdk/python/feast/infra/registry/registry.py b/sdk/python/feast/infra/registry/registry.py index fe44e6253a..bf5dfbe24f 100644 --- a/sdk/python/feast/infra/registry/registry.py +++ b/sdk/python/feast/infra/registry/registry.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from enum import Enum from pathlib import Path from threading import Lock @@ -31,6 +31,9 @@ EntityNotFoundException, FeatureServiceNotFoundException, FeatureViewNotFoundException, + PermissionNotFoundException, + ProjectNotFoundException, + ProjectObjectNotFoundException, ValidationReferenceNotFound, ) from feast.feature_service import FeatureService @@ -41,6 +44,9 @@ from feast.infra.registry.base_registry import BaseRegistry from feast.infra.registry.registry_store import NoopRegistryStore from feast.on_demand_feature_view import OnDemandFeatureView +from feast.permissions.auth_model import AuthConfig, NoAuthConfig +from feast.permissions.permission import Permission +from feast.project import Project from feast.project_metadata import ProjectMetadata from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto from feast.repo_config import RegistryConfig @@ -67,18 +73,25 @@ class FeastObjectType(Enum): + PROJECT = "project" DATA_SOURCE = "data source" ENTITY = "entity" FEATURE_VIEW = "feature view" ON_DEMAND_FEATURE_VIEW = "on demand feature view" STREAM_FEATURE_VIEW = "stream feature view" FEATURE_SERVICE = "feature service" + PERMISSION = "permission" @staticmethod def get_objects_from_registry( registry: "BaseRegistry", project: str ) -> Dict["FeastObjectType", List[Any]]: return { + FeastObjectType.PROJECT: [ + project_obj + for project_obj in registry.list_projects() + if project_obj.name == project + ], FeastObjectType.DATA_SOURCE: registry.list_data_sources(project=project), FeastObjectType.ENTITY: registry.list_entities(project=project), FeastObjectType.FEATURE_VIEW: registry.list_feature_views(project=project), @@ -91,6 +104,7 @@ def get_objects_from_registry( FeastObjectType.FEATURE_SERVICE: registry.list_feature_services( project=project ), + FeastObjectType.PERMISSION: registry.list_permissions(project=project), } @staticmethod @@ -98,12 +112,14 @@ def get_objects_from_repo_contents( repo_contents: RepoContents, ) -> Dict["FeastObjectType", List[Any]]: return { + FeastObjectType.PROJECT: repo_contents.projects, FeastObjectType.DATA_SOURCE: repo_contents.data_sources, FeastObjectType.ENTITY: repo_contents.entities, FeastObjectType.FEATURE_VIEW: repo_contents.feature_views, FeastObjectType.ON_DEMAND_FEATURE_VIEW: repo_contents.on_demand_feature_views, FeastObjectType.STREAM_FEATURE_VIEW: repo_contents.stream_feature_views, FeastObjectType.FEATURE_SERVICE: repo_contents.feature_services, + FeastObjectType.PERMISSION: repo_contents.permissions, } @@ -151,38 +167,16 @@ def get_user_metadata( # The cached_registry_proto object is used for both reads and writes. In particular, # all write operations refresh the cache and modify it in memory; the write must # then be persisted to the underlying RegistryStore with a call to commit(). - cached_registry_proto: Optional[RegistryProto] = None - cached_registry_proto_created: Optional[datetime] = None + cached_registry_proto: RegistryProto + cached_registry_proto_created: datetime cached_registry_proto_ttl: timedelta - def __new__( - cls, - project: str, - registry_config: Optional[RegistryConfig], - repo_path: Optional[Path], - ): - # We override __new__ so that we can inspect registry_config and create a SqlRegistry without callers - # needing to make any changes. - if registry_config and registry_config.registry_type == "sql": - from feast.infra.registry.sql import SqlRegistry - - return SqlRegistry(registry_config, project, repo_path) - elif registry_config and registry_config.registry_type == "snowflake.registry": - from feast.infra.registry.snowflake import SnowflakeRegistry - - return SnowflakeRegistry(registry_config, project, repo_path) - elif registry_config and registry_config.registry_type == "remote": - from feast.infra.registry.remote import RemoteRegistry - - return RemoteRegistry(registry_config, project, repo_path) - else: - return super(Registry, cls).__new__(cls) - def __init__( self, project: str, registry_config: Optional[RegistryConfig], repo_path: Optional[Path], + auth_config: AuthConfig = NoAuthConfig(), ): """ Create the Registry object. @@ -194,6 +188,18 @@ def __init__( """ self._refresh_lock = Lock() + self._auth_config = auth_config + + registry_proto = RegistryProto() + registry_proto.registry_schema_version = REGISTRY_SCHEMA_VERSION + self.cached_registry_proto = registry_proto + self.cached_registry_proto_created = _utc_now() + + self.purge_feast_metadata = ( + registry_config.purge_feast_metadata + if registry_config is not None + else False + ) if registry_config: registry_store_type = registry_config.registry_store_type @@ -205,13 +211,54 @@ def __init__( self._registry_store = cls(registry_config, repo_path) self.cached_registry_proto_ttl = timedelta( - seconds=registry_config.cache_ttl_seconds - if registry_config.cache_ttl_seconds is not None - else 0 + seconds=( + registry_config.cache_ttl_seconds + if registry_config.cache_ttl_seconds is not None + else 0 + ) ) + try: + registry_proto = self._registry_store.get_registry_proto() + self.cached_registry_proto = registry_proto + self.cached_registry_proto_created = _utc_now() + # Sync feast_metadata to projects table + # when purge_feast_metadata is set to True, Delete data from + # feast_metadata table and list_project_metadata will not return any data + self._sync_feast_metadata_to_projects_table() + except FileNotFoundError: + logger.info("Registry file not found. Creating new registry.") + finally: + self.commit() + + def _sync_feast_metadata_to_projects_table(self): + """ + Sync feast_metadata to projects table + """ + feast_metadata_projects = [] + projects_set = [] + # List of project in project_metadata + for project_metadata in self.cached_registry_proto.project_metadata: + project = ProjectMetadata.from_proto(project_metadata) + feast_metadata_projects.append(project.project_name) + if len(feast_metadata_projects) > 0: + # List of project in projects + for project_metadata in self.cached_registry_proto.projects: + project = Project.from_proto(project_metadata) + projects_set.append(project.name) + + # Find object in feast_metadata_projects but not in projects + projects_to_sync = set(feast_metadata_projects) - set(projects_set) + # Sync feast_metadata to projects table + for project_name in projects_to_sync: + project = Project(name=project_name) + self.cached_registry_proto.projects.append(project.to_proto()) + + if self.purge_feast_metadata: + self.cached_registry_proto.project_metadata = [] + def clone(self) -> "Registry": - new_registry = Registry("project", None, None) + new_registry = Registry("project", None, None, self._auth_config) new_registry.cached_registry_proto_ttl = timedelta(seconds=0) new_registry.cached_registry_proto = ( self.cached_registry_proto.__deepcopy__() @@ -222,16 +269,6 @@ def clone(self) -> "Registry": new_registry._registry_store = NoopRegistryStore() return new_registry - def _initialize_registry(self, project: str): - """Explicitly initializes the registry with an empty proto if it doesn't exist.""" - try: - self._get_registry_proto(project=project) - except FileNotFoundError: - registry_proto = RegistryProto() - registry_proto.registry_schema_version = REGISTRY_SCHEMA_VERSION - proto_registry_utils.init_project_metadata(registry_proto, project) - self._registry_store.update_registry_proto(registry_proto) - def update_infra(self, infra: Infra, project: str, commit: bool = True): self._prepare_registry_for_changes(project) assert self.cached_registry_proto @@ -307,14 +344,11 @@ def apply_data_source( if existing_data_source_proto.name == data_source.name: del registry.data_sources[idx] data_source_proto = data_source.to_proto() - data_source_proto.data_source_class_type = ( - f"{data_source.__class__.__module__}.{data_source.__class__.__name__}" - ) data_source_proto.project = project data_source_proto.data_source_class_type = ( f"{data_source.__class__.__module__}.{data_source.__class__.__name__}" ) - registry.data_sources.append(data_source_proto) + self.cached_registry_proto.data_sources.append(data_source_proto) if commit: self.commit() @@ -357,7 +391,7 @@ def apply_feature_service( feature_service_proto = feature_service.to_proto() feature_service_proto.spec.project = project del registry.feature_services[idx] - registry.feature_services.append(feature_service_proto) + self.cached_registry_proto.feature_services.append(feature_service_proto) if commit: self.commit() @@ -551,7 +585,26 @@ def apply_materialization( self.commit() return - raise FeatureViewNotFoundException(feature_view.name, project) + def list_all_feature_views( + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[BaseFeatureView]: + registry_proto = self._get_registry_proto( + project=project, allow_cache=allow_cache + ) + return proto_registry_utils.list_all_feature_views( + registry_proto, project, tags + ) + + def get_any_feature_view( + self, name: str, project: str, allow_cache: bool = False + ) -> BaseFeatureView: + registry_proto = self._get_registry_proto( + project=project, allow_cache=allow_cache + ) + return proto_registry_utils.get_any_feature_view(registry_proto, name, project) def list_feature_views( self, @@ -709,12 +762,15 @@ def get_saved_dataset( return proto_registry_utils.get_saved_dataset(registry_proto, name, project) def list_saved_datasets( - self, project: str, allow_cache: bool = False + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, ) -> List[SavedDataset]: registry_proto = self._get_registry_proto( project=project, allow_cache=allow_cache ) - return proto_registry_utils.list_saved_datasets(registry_proto, project) + return proto_registry_utils.list_saved_datasets(registry_proto, project, tags) def apply_validation_reference( self, @@ -751,23 +807,29 @@ def get_validation_reference( ) def list_validation_references( - self, project: str, allow_cache: bool = False + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, ) -> List[ValidationReference]: registry_proto = self._get_registry_proto( project=project, allow_cache=allow_cache ) - return proto_registry_utils.list_validation_references(registry_proto, project) + return proto_registry_utils.list_validation_references( + registry_proto, project, tags + ) def delete_validation_reference(self, name: str, project: str, commit: bool = True): - registry_proto = self._prepare_registry_for_changes(project) + self._prepare_registry_for_changes(project) + assert self.cached_registry_proto for idx, existing_validation_reference in enumerate( - registry_proto.validation_references + self.cached_registry_proto.validation_references ): if ( existing_validation_reference.name == name and existing_validation_reference.project == project ): - del registry_proto.validation_references[idx] + del self.cached_registry_proto.validation_references[idx] if commit: self.commit() return @@ -797,37 +859,36 @@ def teardown(self): def proto(self) -> RegistryProto: return self.cached_registry_proto or RegistryProto() - def _prepare_registry_for_changes(self, project: str): + def _prepare_registry_for_changes(self, project_name: str): """Prepares the Registry for changes by refreshing the cache if necessary.""" + + assert self.cached_registry_proto is not None + try: - self._get_registry_proto(project=project, allow_cache=True) - if ( - proto_registry_utils.get_project_metadata( - self.cached_registry_proto, project - ) - is None - ): - # Project metadata not initialized yet. Try pulling without cache - self._get_registry_proto(project=project, allow_cache=False) - except FileNotFoundError: - registry_proto = RegistryProto() - registry_proto.registry_schema_version = REGISTRY_SCHEMA_VERSION + # Check if the project exists in the registry cache + self.get_project(name=project_name, allow_cache=True) + return self.cached_registry_proto + except ProjectObjectNotFoundException: + # If the project does not exist in cache, refresh cache from store + registry_proto = self._registry_store.get_registry_proto() self.cached_registry_proto = registry_proto self.cached_registry_proto_created = _utc_now() - # Initialize project metadata if needed - assert self.cached_registry_proto - if ( - proto_registry_utils.get_project_metadata( - self.cached_registry_proto, project - ) - is None - ): - proto_registry_utils.init_project_metadata( - self.cached_registry_proto, project - ) + try: + # Check if the project exists in the registry cache after refresh from store + self.get_project(name=project_name) + except ProjectObjectNotFoundException: + # If the project still does not exist, create it + project_proto = Project(name=project_name).to_proto() + self.cached_registry_proto.projects.append(project_proto) + if not self.purge_feast_metadata: + project_metadata_proto = ProjectMetadata( + project_name=project_name + ).to_proto() + self.cached_registry_proto.project_metadata.append( + project_metadata_proto + ) self.commit() - return self.cached_registry_proto def _get_registry_proto( @@ -842,10 +903,7 @@ def _get_registry_proto( Returns: Returns a RegistryProto object which represents the state of the registry """ with self._refresh_lock: - expired = ( - self.cached_registry_proto is None - or self.cached_registry_proto_created is None - ) or ( + expired = (self.cached_registry_proto_created is None) or ( self.cached_registry_proto_ttl.total_seconds() > 0 # 0 ttl means infinity and ( @@ -857,33 +915,12 @@ def _get_registry_proto( ) ) - if project: - old_project_metadata = proto_registry_utils.get_project_metadata( - registry_proto=self.cached_registry_proto, project=project - ) - - if allow_cache and not expired and old_project_metadata is not None: - assert isinstance(self.cached_registry_proto, RegistryProto) - return self.cached_registry_proto - elif allow_cache and not expired: - assert isinstance(self.cached_registry_proto, RegistryProto) + if allow_cache and not expired: return self.cached_registry_proto - logger.info("Registry cache expired, so refreshing") registry_proto = self._registry_store.get_registry_proto() self.cached_registry_proto = registry_proto self.cached_registry_proto_created = _utc_now() - - if not project: - return registry_proto - - project_metadata = proto_registry_utils.get_project_metadata( - registry_proto=registry_proto, project=project - ) - if not project_metadata: - proto_registry_utils.init_project_metadata(registry_proto, project) - self.commit() - return registry_proto def _check_conflicting_feature_view_names(self, feature_view: BaseFeatureView): @@ -905,3 +942,150 @@ def _existing_feature_view_names_to_fvs(self) -> Dict[str, Message]: fv.spec.name: fv for fv in self.cached_registry_proto.stream_feature_views } return {**odfvs, **fvs, **sfv} + + def get_permission( + self, name: str, project: str, allow_cache: bool = False + ) -> Permission: + registry_proto = self._get_registry_proto( + project=project, allow_cache=allow_cache + ) + return proto_registry_utils.get_permission(registry_proto, name, project) + + def list_permissions( + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[Permission]: + registry_proto = self._get_registry_proto( + project=project, allow_cache=allow_cache + ) + return proto_registry_utils.list_permissions(registry_proto, project, tags) + + def apply_permission( + self, permission: Permission, project: str, commit: bool = True + ): + now = _utc_now() + if not permission.created_timestamp: + permission.created_timestamp = now + permission.last_updated_timestamp = now + + registry = self._prepare_registry_for_changes(project) + for idx, existing_permission_proto in enumerate(registry.permissions): + if ( + existing_permission_proto.spec.name == permission.name + and existing_permission_proto.spec.project == project + ): + permission.created_timestamp = ( + existing_permission_proto.meta.created_timestamp.ToDatetime() + ) + del registry.permissions[idx] + + permission_proto = permission.to_proto() + permission_proto.spec.project = project + self.cached_registry_proto.permissions.append(permission_proto) + if commit: + self.commit() + + def delete_permission(self, name: str, project: str, commit: bool = True): + self._prepare_registry_for_changes(project) + assert self.cached_registry_proto + + for idx, permission_proto in enumerate(self.cached_registry_proto.permissions): + if ( + permission_proto.spec.name == name + and permission_proto.spec.project == project + ): + del self.cached_registry_proto.permissions[idx] + if commit: + self.commit() + return + raise PermissionNotFoundException(name, project) + + def apply_project( + self, + project: Project, + commit: bool = True, + ): + registry = self.cached_registry_proto + + for idx, existing_project_proto in enumerate(registry.projects): + if existing_project_proto.spec.name == project.name: + project.created_timestamp = ( + existing_project_proto.meta.created_timestamp.ToDatetime().replace( + tzinfo=timezone.utc + ) + ) + del registry.projects[idx] + + project_proto = project.to_proto() + self.cached_registry_proto.projects.append(project_proto) + if commit: + self.commit() + + def get_project( + self, + name: str, + allow_cache: bool = False, + ) -> Project: + registry_proto = self._get_registry_proto(project=name, allow_cache=allow_cache) + return proto_registry_utils.get_project(registry_proto, name) + + def list_projects( + self, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[Project]: + registry_proto = self._get_registry_proto(project=None, allow_cache=allow_cache) + return proto_registry_utils.list_projects( + registry_proto=registry_proto, tags=tags + ) + + def delete_project( + self, + name: str, + commit: bool = True, + ): + assert self.cached_registry_proto + + for idx, project_proto in enumerate(self.cached_registry_proto.projects): + if project_proto.spec.name == name: + list_validation_references = self.list_validation_references(name) + for validation_reference in list_validation_references: + self.delete_validation_reference(validation_reference.name, name) + + list_saved_datasets = self.list_saved_datasets(name) + for saved_dataset in list_saved_datasets: + self.delete_saved_dataset(saved_dataset.name, name) + + list_feature_services = self.list_feature_services(name) + for feature_service in list_feature_services: + self.delete_feature_service(feature_service.name, name) + + list_on_demand_feature_views = self.list_on_demand_feature_views(name) + for on_demand_feature_view in list_on_demand_feature_views: + self.delete_feature_view(on_demand_feature_view.name, name) + + list_stream_feature_views = self.list_stream_feature_views(name) + for stream_feature_view in list_stream_feature_views: + self.delete_feature_view(stream_feature_view.name, name) + + list_feature_views = self.list_feature_views(name) + for feature_view in list_feature_views: + self.delete_feature_view(feature_view.name, name) + + list_data_sources = self.list_data_sources(name) + for data_source in list_data_sources: + self.delete_data_source(data_source.name, name) + + list_entities = self.list_entities(name) + for entity in list_entities: + self.delete_entity(entity.name, name) + list_permissions = self.list_permissions(name) + for permission in list_permissions: + self.delete_permission(permission.name, name) + del self.cached_registry_proto.projects[idx] + if commit: + self.commit() + return + raise ProjectNotFoundException(name) diff --git a/sdk/python/feast/infra/registry/remote.py b/sdk/python/feast/infra/registry/remote.py index 9fa6d8ebee..cdb45f0363 100644 --- a/sdk/python/feast/infra/registry/remote.py +++ b/sdk/python/feast/infra/registry/remote.py @@ -15,6 +15,13 @@ from feast.infra.infra_object import Infra from feast.infra.registry.base_registry import BaseRegistry from feast.on_demand_feature_view import OnDemandFeatureView +from feast.permissions.auth.auth_type import AuthType +from feast.permissions.auth_model import AuthConfig, NoAuthConfig +from feast.permissions.client.grpc_client_auth_interceptor import ( + GrpcClientAuthHeaderInterceptor, +) +from feast.permissions.permission import Permission +from feast.project import Project from feast.project_metadata import ProjectMetadata from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto from feast.protos.feast.registry import RegistryServer_pb2, RegistryServer_pb2_grpc @@ -23,6 +30,24 @@ from feast.stream_feature_view import StreamFeatureView +def extract_base_feature_view( + any_feature_view: RegistryServer_pb2.AnyFeatureView, +) -> BaseFeatureView: + feature_view_type = any_feature_view.WhichOneof("any_feature_view") + if feature_view_type == "feature_view": + feature_view = FeatureView.from_proto(any_feature_view.feature_view) + elif feature_view_type == "on_demand_feature_view": + feature_view = OnDemandFeatureView.from_proto( + any_feature_view.on_demand_feature_view + ) + elif feature_view_type == "stream_feature_view": + feature_view = StreamFeatureView.from_proto( + any_feature_view.stream_feature_view + ) + + return feature_view + + class RemoteRegistryConfig(RegistryConfig): registry_type: StrictStr = "remote" """ str: Provider name or a class name that implements Registry.""" @@ -38,31 +63,39 @@ def __init__( registry_config: Union[RegistryConfig, RemoteRegistryConfig], project: str, repo_path: Optional[Path], + auth_config: AuthConfig = NoAuthConfig(), ): + self.auth_config = auth_config self.channel = grpc.insecure_channel(registry_config.path) + if self.auth_config.type != AuthType.NONE.value: + auth_header_interceptor = GrpcClientAuthHeaderInterceptor(auth_config) + self.channel = grpc.intercept_channel(self.channel, auth_header_interceptor) self.stub = RegistryServer_pb2_grpc.RegistryServerStub(self.channel) + def close(self): + if self.channel: + self.channel.close() + + def __del__(self): + self.close() + def apply_entity(self, entity: Entity, project: str, commit: bool = True): request = RegistryServer_pb2.ApplyEntityRequest( entity=entity.to_proto(), project=project, commit=commit ) - self.stub.ApplyEntity(request) def delete_entity(self, name: str, project: str, commit: bool = True): request = RegistryServer_pb2.DeleteEntityRequest( name=name, project=project, commit=commit ) - self.stub.DeleteEntity(request) def get_entity(self, name: str, project: str, allow_cache: bool = False) -> Entity: request = RegistryServer_pb2.GetEntityRequest( name=name, project=project, allow_cache=allow_cache ) - response = self.stub.GetEntity(request) - return Entity.from_proto(response) def list_entities( @@ -74,9 +107,7 @@ def list_entities( request = RegistryServer_pb2.ListEntitiesRequest( project=project, allow_cache=allow_cache, tags=tags ) - response = self.stub.ListEntities(request) - return [Entity.from_proto(entity) for entity in response.entities] def apply_data_source( @@ -85,14 +116,12 @@ def apply_data_source( request = RegistryServer_pb2.ApplyDataSourceRequest( data_source=data_source.to_proto(), project=project, commit=commit ) - self.stub.ApplyDataSource(request) def delete_data_source(self, name: str, project: str, commit: bool = True): request = RegistryServer_pb2.DeleteDataSourceRequest( name=name, project=project, commit=commit ) - self.stub.DeleteDataSource(request) def get_data_source( @@ -101,9 +130,7 @@ def get_data_source( request = RegistryServer_pb2.GetDataSourceRequest( name=name, project=project, allow_cache=allow_cache ) - response = self.stub.GetDataSource(request) - return DataSource.from_proto(response) def list_data_sources( @@ -115,9 +142,7 @@ def list_data_sources( request = RegistryServer_pb2.ListDataSourcesRequest( project=project, allow_cache=allow_cache, tags=tags ) - response = self.stub.ListDataSources(request) - return [ DataSource.from_proto(data_source) for data_source in response.data_sources ] @@ -128,14 +153,12 @@ def apply_feature_service( request = RegistryServer_pb2.ApplyFeatureServiceRequest( feature_service=feature_service.to_proto(), project=project, commit=commit ) - self.stub.ApplyFeatureService(request) def delete_feature_service(self, name: str, project: str, commit: bool = True): request = RegistryServer_pb2.DeleteFeatureServiceRequest( name=name, project=project, commit=commit ) - self.stub.DeleteFeatureService(request) def get_feature_service( @@ -144,9 +167,7 @@ def get_feature_service( request = RegistryServer_pb2.GetFeatureServiceRequest( name=name, project=project, allow_cache=allow_cache ) - response = self.stub.GetFeatureService(request) - return FeatureService.from_proto(response) def list_feature_services( @@ -158,9 +179,7 @@ def list_feature_services( request = RegistryServer_pb2.ListFeatureServicesRequest( project=project, allow_cache=allow_cache, tags=tags ) - response = self.stub.ListFeatureServices(request) - return [ FeatureService.from_proto(feature_service) for feature_service in response.feature_services @@ -177,15 +196,17 @@ def apply_feature_view( arg_name = "on_demand_feature_view" request = RegistryServer_pb2.ApplyFeatureViewRequest( - feature_view=feature_view.to_proto() - if arg_name == "feature_view" - else None, - stream_feature_view=feature_view.to_proto() - if arg_name == "stream_feature_view" - else None, - on_demand_feature_view=feature_view.to_proto() - if arg_name == "on_demand_feature_view" - else None, + feature_view=( + feature_view.to_proto() if arg_name == "feature_view" else None + ), + stream_feature_view=( + feature_view.to_proto() if arg_name == "stream_feature_view" else None + ), + on_demand_feature_view=( + feature_view.to_proto() + if arg_name == "on_demand_feature_view" + else None + ), project=project, commit=commit, ) @@ -196,7 +217,6 @@ def delete_feature_view(self, name: str, project: str, commit: bool = True): request = RegistryServer_pb2.DeleteFeatureViewRequest( name=name, project=project, commit=commit ) - self.stub.DeleteFeatureView(request) def get_stream_feature_view( @@ -205,9 +225,7 @@ def get_stream_feature_view( request = RegistryServer_pb2.GetStreamFeatureViewRequest( name=name, project=project, allow_cache=allow_cache ) - response = self.stub.GetStreamFeatureView(request) - return StreamFeatureView.from_proto(response) def list_stream_feature_views( @@ -219,9 +237,7 @@ def list_stream_feature_views( request = RegistryServer_pb2.ListStreamFeatureViewsRequest( project=project, allow_cache=allow_cache, tags=tags ) - response = self.stub.ListStreamFeatureViews(request) - return [ StreamFeatureView.from_proto(stream_feature_view) for stream_feature_view in response.stream_feature_views @@ -233,9 +249,7 @@ def get_on_demand_feature_view( request = RegistryServer_pb2.GetOnDemandFeatureViewRequest( name=name, project=project, allow_cache=allow_cache ) - response = self.stub.GetOnDemandFeatureView(request) - return OnDemandFeatureView.from_proto(response) def list_on_demand_feature_views( @@ -247,23 +261,50 @@ def list_on_demand_feature_views( request = RegistryServer_pb2.ListOnDemandFeatureViewsRequest( project=project, allow_cache=allow_cache, tags=tags ) - response = self.stub.ListOnDemandFeatureViews(request) - return [ OnDemandFeatureView.from_proto(on_demand_feature_view) for on_demand_feature_view in response.on_demand_feature_views ] + def get_any_feature_view( + self, name: str, project: str, allow_cache: bool = False + ) -> BaseFeatureView: + request = RegistryServer_pb2.GetAnyFeatureViewRequest( + name=name, project=project, allow_cache=allow_cache + ) + + response: RegistryServer_pb2.GetAnyFeatureViewResponse = ( + self.stub.GetAnyFeatureView(request) + ) + any_feature_view = response.any_feature_view + return extract_base_feature_view(any_feature_view) + + def list_all_feature_views( + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[BaseFeatureView]: + request = RegistryServer_pb2.ListAllFeatureViewsRequest( + project=project, allow_cache=allow_cache, tags=tags + ) + + response: RegistryServer_pb2.ListAllFeatureViewsResponse = ( + self.stub.ListAllFeatureViews(request) + ) + return [ + extract_base_feature_view(any_feature_view) + for any_feature_view in response.feature_views + ] + def get_feature_view( self, name: str, project: str, allow_cache: bool = False ) -> FeatureView: request = RegistryServer_pb2.GetFeatureViewRequest( name=name, project=project, allow_cache=allow_cache ) - response = self.stub.GetFeatureView(request) - return FeatureView.from_proto(response) def list_feature_views( @@ -275,7 +316,6 @@ def list_feature_views( request = RegistryServer_pb2.ListFeatureViewsRequest( project=project, allow_cache=allow_cache, tags=tags ) - response = self.stub.ListFeatureViews(request) return [ @@ -304,7 +344,6 @@ def apply_materialization( end_date=end_date_timestamp, commit=commit, ) - self.stub.ApplyMaterialization(request) def apply_saved_dataset( @@ -316,14 +355,12 @@ def apply_saved_dataset( request = RegistryServer_pb2.ApplySavedDatasetRequest( saved_dataset=saved_dataset.to_proto(), project=project, commit=commit ) - self.stub.ApplyFeatureService(request) def delete_saved_dataset(self, name: str, project: str, commit: bool = True): request = RegistryServer_pb2.DeleteSavedDatasetRequest( name=name, project=project, commit=commit ) - self.stub.DeleteSavedDataset(request) def get_saved_dataset( @@ -332,20 +369,19 @@ def get_saved_dataset( request = RegistryServer_pb2.GetSavedDatasetRequest( name=name, project=project, allow_cache=allow_cache ) - response = self.stub.GetSavedDataset(request) - return SavedDataset.from_proto(response) def list_saved_datasets( - self, project: str, allow_cache: bool = False + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, ) -> List[SavedDataset]: request = RegistryServer_pb2.ListSavedDatasetsRequest( - project=project, allow_cache=allow_cache + project=project, allow_cache=allow_cache, tags=tags ) - response = self.stub.ListSavedDatasets(request) - return [ SavedDataset.from_proto(saved_dataset) for saved_dataset in response.saved_datasets @@ -362,14 +398,12 @@ def apply_validation_reference( project=project, commit=commit, ) - self.stub.ApplyValidationReference(request) def delete_validation_reference(self, name: str, project: str, commit: bool = True): request = RegistryServer_pb2.DeleteValidationReferenceRequest( name=name, project=project, commit=commit ) - self.stub.DeleteValidationReference(request) def get_validation_reference( @@ -378,20 +412,19 @@ def get_validation_reference( request = RegistryServer_pb2.GetValidationReferenceRequest( name=name, project=project, allow_cache=allow_cache ) - response = self.stub.GetValidationReference(request) - return ValidationReference.from_proto(response) def list_validation_references( - self, project: str, allow_cache: bool = False + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, ) -> List[ValidationReference]: request = RegistryServer_pb2.ListValidationReferencesRequest( - project=project, allow_cache=allow_cache + project=project, allow_cache=allow_cache, tags=tags ) - response = self.stub.ListValidationReferences(request) - return [ ValidationReference.from_proto(validation_reference) for validation_reference in response.validation_references @@ -403,25 +436,20 @@ def list_project_metadata( request = RegistryServer_pb2.ListProjectMetadataRequest( project=project, allow_cache=allow_cache ) - response = self.stub.ListProjectMetadata(request) - return [ProjectMetadata.from_proto(pm) for pm in response.project_metadata] def update_infra(self, infra: Infra, project: str, commit: bool = True): request = RegistryServer_pb2.UpdateInfraRequest( infra=infra.to_proto(), project=project, commit=commit ) - self.stub.UpdateInfra(request) def get_infra(self, project: str, allow_cache: bool = False) -> Infra: request = RegistryServer_pb2.GetInfraRequest( project=project, allow_cache=allow_cache ) - response = self.stub.GetInfra(request) - return Infra.from_proto(response) def apply_user_metadata( @@ -437,6 +465,90 @@ def get_user_metadata( ) -> Optional[bytes]: pass + def apply_permission( + self, permission: Permission, project: str, commit: bool = True + ): + permission_proto = permission.to_proto() + permission_proto.spec.project = project + + request = RegistryServer_pb2.ApplyPermissionRequest( + permission=permission_proto, project=project, commit=commit + ) + self.stub.ApplyPermission(request) + + def delete_permission(self, name: str, project: str, commit: bool = True): + request = RegistryServer_pb2.DeletePermissionRequest( + name=name, project=project, commit=commit + ) + self.stub.DeletePermission(request) + + def get_permission( + self, name: str, project: str, allow_cache: bool = False + ) -> Permission: + request = RegistryServer_pb2.GetPermissionRequest( + name=name, project=project, allow_cache=allow_cache + ) + response = self.stub.GetPermission(request) + + return Permission.from_proto(response) + + def list_permissions( + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[Permission]: + request = RegistryServer_pb2.ListPermissionsRequest( + project=project, allow_cache=allow_cache, tags=tags + ) + response = self.stub.ListPermissions(request) + return [ + Permission.from_proto(permission) for permission in response.permissions + ] + + def apply_project( + self, + project: Project, + commit: bool = True, + ): + project_proto = project.to_proto() + + request = RegistryServer_pb2.ApplyProjectRequest( + project=project_proto, commit=commit + ) + self.stub.ApplyProject(request) + + def delete_project( + self, + name: str, + commit: bool = True, + ): + request = RegistryServer_pb2.DeleteProjectRequest(name=name, commit=commit) + self.stub.DeleteProject(request) + + def get_project( + self, + name: str, + allow_cache: bool = False, + ) -> Project: + request = RegistryServer_pb2.GetProjectRequest( + name=name, allow_cache=allow_cache + ) + response = self.stub.GetProject(request) + + return Project.from_proto(response) + + def list_projects( + self, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[Project]: + request = RegistryServer_pb2.ListProjectsRequest( + allow_cache=allow_cache, tags=tags + ) + response = self.stub.ListProjects(request) + return [Project.from_proto(project) for project in response.projects] + def proto(self) -> RegistryProto: return self.stub.Proto(Empty()) @@ -445,7 +557,6 @@ def commit(self): def refresh(self, project: Optional[str] = None): request = RegistryServer_pb2.RefreshRequest(project=str(project)) - self.stub.Refresh(request) def teardown(self): diff --git a/sdk/python/feast/infra/registry/snowflake.py b/sdk/python/feast/infra/registry/snowflake.py index f2bc09e7e4..e68d9d64b5 100644 --- a/sdk/python/feast/infra/registry/snowflake.py +++ b/sdk/python/feast/infra/registry/snowflake.py @@ -5,7 +5,7 @@ from datetime import datetime, timedelta, timezone from enum import Enum from threading import Lock -from typing import Any, Callable, List, Literal, Optional, Set, Union +from typing import Any, Callable, List, Literal, Optional, Union, cast from pydantic import ConfigDict, Field, StrictStr @@ -18,6 +18,9 @@ EntityNotFoundException, FeatureServiceNotFoundException, FeatureViewNotFoundException, + PermissionNotFoundException, + ProjectNotFoundException, + ProjectObjectNotFoundException, SavedDatasetNotFound, ValidationReferenceNotFound, ) @@ -31,6 +34,8 @@ execute_snowflake_statement, ) from feast.on_demand_feature_view import OnDemandFeatureView +from feast.permissions.permission import Permission +from feast.project import Project from feast.project_metadata import ProjectMetadata from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto from feast.protos.feast.core.Entity_pb2 import Entity as EntityProto @@ -42,6 +47,8 @@ from feast.protos.feast.core.OnDemandFeatureView_pb2 import ( OnDemandFeatureView as OnDemandFeatureViewProto, ) +from feast.protos.feast.core.Permission_pb2 import Permission as PermissionProto +from feast.protos.feast.core.Project_pb2 import Project as ProjectProto from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto from feast.protos.feast.core.SavedDataset_pb2 import SavedDataset as SavedDatasetProto from feast.protos.feast.core.StreamFeatureView_pb2 import ( @@ -96,6 +103,9 @@ class SnowflakeRegistryConfig(RegistryConfig): private_key: Optional[str] = None """ Snowflake private key file path""" + private_key_content: Optional[bytes] = None + """ Snowflake private key stored as bytes""" + private_key_passphrase: Optional[str] = None """ Snowflake private key file passphrase""" @@ -132,26 +142,57 @@ def __init__( query = command.replace("REGISTRY_PATH", f"{self.registry_path}") execute_snowflake_statement(conn, query) + self.purge_feast_metadata = registry_config.purge_feast_metadata + self._sync_feast_metadata_to_projects_table() + if not self.purge_feast_metadata: + self._maybe_init_project_metadata(project) + self.cached_registry_proto = self.proto() - proto_registry_utils.init_project_metadata(self.cached_registry_proto, project) self.cached_registry_proto_created = _utc_now() self._refresh_lock = Lock() self.cached_registry_proto_ttl = timedelta( - seconds=registry_config.cache_ttl_seconds - if registry_config.cache_ttl_seconds is not None - else 0 + seconds=( + registry_config.cache_ttl_seconds + if registry_config.cache_ttl_seconds is not None + else 0 + ) ) self.project = project - def refresh(self, project: Optional[str] = None): - if project: - project_metadata = proto_registry_utils.get_project_metadata( - registry_proto=self.cached_registry_proto, project=project + def _sync_feast_metadata_to_projects_table(self): + feast_metadata_projects: set = [] + projects_set: set = [] + + with GetSnowflakeConnection(self.registry_config) as conn: + query = ( + f'SELECT DISTINCT project_id FROM {self.registry_path}."FEAST_METADATA"' ) - if not project_metadata: - proto_registry_utils.init_project_metadata( - self.cached_registry_proto, project - ) + df = execute_snowflake_statement(conn, query).fetch_pandas_all() + + for row in df.iterrows(): + feast_metadata_projects.add(row[1]["PROJECT_ID"]) + + if len(feast_metadata_projects) > 0: + with GetSnowflakeConnection(self.registry_config) as conn: + query = f'SELECT project_id FROM {self.registry_path}."PROJECTS"' + df = execute_snowflake_statement(conn, query).fetch_pandas_all() + + for row in df.iterrows(): + projects_set.add(row[1]["PROJECT_ID"]) + + # Find object in feast_metadata_projects but not in projects + projects_to_sync = set(feast_metadata_projects) - set(projects_set) + for project_name in projects_to_sync: + self.apply_project(Project(name=project_name), commit=True) + + if self.purge_feast_metadata: + with GetSnowflakeConnection(self.registry_config) as conn: + query = f""" + DELETE FROM {self.registry_path}."FEAST_METADATA" + """ + execute_snowflake_statement(conn, query) + + def refresh(self, project: Optional[str] = None): self.cached_registry_proto = self.proto() self.cached_registry_proto_created = _utc_now() @@ -265,6 +306,17 @@ def update_infra(self, infra: Infra, project: str, commit: bool = True): name="infra_obj", ) + def _initialize_project_if_not_exists(self, project_name: str): + try: + self.get_project(project_name, allow_cache=True) + return + except ProjectObjectNotFoundException: + try: + self.get_project(project_name, allow_cache=False) + return + except ProjectObjectNotFoundException: + self.apply_project(Project(name=project_name), commit=True) + def _apply_object( self, table: str, @@ -274,7 +326,11 @@ def _apply_object( proto_field_name: str, name: Optional[str] = None, ): - self._maybe_init_project_metadata(project) + if not self.purge_feast_metadata: + self._maybe_init_project_metadata(project) + # Initialize project is necessary because FeatureStore object can apply objects individually without "feast apply" cli option + if not isinstance(obj, Project): + self._initialize_project_if_not_exists(project_name=project) name = name or (obj.name if hasattr(obj, "name") else None) assert name, f"name needs to be provided for {obj}" @@ -337,7 +393,24 @@ def _apply_object( """ execute_snowflake_statement(conn, query) - self._set_last_updated_metadata(update_datetime, project) + if not isinstance(obj, Project): + self.apply_project( + self.get_project(name=project, allow_cache=False), commit=True + ) + + if not self.purge_feast_metadata: + self._set_last_updated_metadata(update_datetime, project) + + def apply_permission( + self, permission: Permission, project: str, commit: bool = True + ): + return self._apply_object( + "PERMISSIONS", + project, + "PERMISSION_NAME", + permission, + "PERMISSION_PROTO", + ) # delete operations def delete_data_source(self, name: str, project: str, commit: bool = True): @@ -418,6 +491,15 @@ def _delete_object( return cursor.rowcount + def delete_permission(self, name: str, project: str, commit: bool = True): + return self._delete_object( + "PERMISSIONS", + name, + project, + "PERMISSION_NAME", + PermissionNotFoundException, + ) + # get operations def get_data_source( self, name: str, project: str, allow_cache: bool = False @@ -493,6 +575,76 @@ def get_feature_view( FeatureViewNotFoundException, ) + def get_any_feature_view( + self, name: str, project: str, allow_cache: bool = False + ) -> BaseFeatureView: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.get_any_feature_view( + self.cached_registry_proto, name, project + ) + fv = self._get_object( + "FEATURE_VIEWS", + name, + project, + FeatureViewProto, + FeatureView, + "FEATURE_VIEW_NAME", + "FEATURE_VIEW_PROTO", + None, + ) + + if not fv: + fv = self._get_object( + "STREAM_FEATURE_VIEWS", + name, + project, + StreamFeatureViewProto, + StreamFeatureView, + "STREAM_FEATURE_VIEW_NAME", + "STREAM_FEATURE_VIEW_PROTO", + None, + ) + if not fv: + fv = self._get_object( + "ON_DEMAND_FEATURE_VIEWS", + name, + project, + OnDemandFeatureViewProto, + OnDemandFeatureView, + "ON_DEMAND_FEATURE_VIEW_NAME", + "ON_DEMAND_FEATURE_VIEW_PROTO", + FeatureViewNotFoundException, + ) + return fv + + def list_all_feature_views( + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[BaseFeatureView]: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.list_all_feature_views( + self.cached_registry_proto, project, tags + ) + + return ( + cast( + list[BaseFeatureView], + self.list_feature_views(project, allow_cache, tags), + ) + + cast( + list[BaseFeatureView], + self.list_stream_feature_views(project, allow_cache, tags), + ) + + cast( + list[BaseFeatureView], + self.list_on_demand_feature_views(project, allow_cache, tags), + ) + ) + def get_infra(self, project: str, allow_cache: bool = False) -> Infra: infra_object = self._get_object( "MANAGED_INFRA", @@ -594,7 +746,6 @@ def _get_object( proto_field_name: str, not_found_exception: Optional[Callable], ): - self._maybe_init_project_metadata(project) with GetSnowflakeConnection(self.registry_config) as conn: query = f""" SELECT @@ -616,6 +767,25 @@ def _get_object( else: return None + def get_permission( + self, name: str, project: str, allow_cache: bool = False + ) -> Permission: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.get_permission( + self.cached_registry_proto, name, project + ) + return self._get_object( + "PERMISSIONS", + name, + project, + PermissionProto, + Permission, + "PERMISSION_NAME", + "PERMISSION_PROTO", + PermissionNotFoundException, + ) + # list operations def list_data_sources( self, @@ -713,12 +883,15 @@ def list_on_demand_feature_views( ) def list_saved_datasets( - self, project: str, allow_cache: bool = False + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, ) -> List[SavedDataset]: if allow_cache: self._refresh_cached_registry_if_necessary() return proto_registry_utils.list_saved_datasets( - self.cached_registry_proto, project + self.cached_registry_proto, project, tags ) return self._list_objects( "SAVED_DATASETS", @@ -726,6 +899,7 @@ def list_saved_datasets( SavedDatasetProto, SavedDataset, "SAVED_DATASET_PROTO", + tags=tags, ) def list_stream_feature_views( @@ -749,7 +923,10 @@ def list_stream_feature_views( ) def list_validation_references( - self, project: str, allow_cache: bool = False + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, ) -> List[ValidationReference]: return self._list_objects( "VALIDATION_REFERENCES", @@ -757,6 +934,7 @@ def list_validation_references( ValidationReferenceProto, ValidationReference, "VALIDATION_REFERENCE_PROTO", + tags=tags, ) def _list_objects( @@ -768,7 +946,6 @@ def _list_objects( proto_field_name: str, tags: Optional[dict[str, str]] = None, ): - self._maybe_init_project_metadata(project) with GetSnowflakeConnection(self.registry_config) as conn: query = f""" SELECT @@ -790,6 +967,26 @@ def _list_objects( return objects return [] + def list_permissions( + self, + project: str, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[Permission]: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.list_permissions( + self.cached_registry_proto, project + ) + return self._list_objects( + "PERMISSIONS", + project, + PermissionProto, + Permission, + "PERMISSION_PROTO", + tags, + ) + def apply_materialization( self, feature_view: FeatureView, @@ -919,8 +1116,27 @@ def get_user_metadata( def proto(self) -> RegistryProto: r = RegistryProto() last_updated_timestamps = [] - projects = self._get_all_projects() - for project in projects: + + def process_project(project: Project): + nonlocal r, last_updated_timestamps + project_name = project.name + last_updated_timestamp = project.last_updated_timestamp + + try: + cached_project = self.get_project(project_name, True) + except ProjectObjectNotFoundException: + cached_project = None + + allow_cache = False + + if cached_project is not None: + allow_cache = ( + last_updated_timestamp <= cached_project.last_updated_timestamp + ) + + r.projects.extend([project.to_proto()]) + last_updated_timestamps.append(last_updated_timestamp) + for lister, registry_proto_field in [ (self.list_entities, r.entities), (self.list_feature_views, r.feature_views), @@ -930,51 +1146,31 @@ def proto(self) -> RegistryProto: (self.list_feature_services, r.feature_services), (self.list_saved_datasets, r.saved_datasets), (self.list_validation_references, r.validation_references), - (self.list_project_metadata, r.project_metadata), + (self.list_permissions, r.permissions), ]: - objs: List[Any] = lister(project) # type: ignore + objs: List[Any] = lister(project_name, allow_cache) # type: ignore if objs: obj_protos = [obj.to_proto() for obj in objs] for obj_proto in obj_protos: if "spec" in obj_proto.DESCRIPTOR.fields_by_name: - obj_proto.spec.project = project + obj_proto.spec.project = project_name else: - obj_proto.project = project + obj_proto.project = project_name registry_proto_field.extend(obj_protos) # This is suuuper jank. Because of https://github.com/feast-dev/feast/issues/2783, # the registry proto only has a single infra field, which we're currently setting as the "last" project. - r.infra.CopyFrom(self.get_infra(project).to_proto()) - last_updated_timestamps.append(self._get_last_updated_metadata(project)) + r.infra.CopyFrom(self.get_infra(project_name).to_proto()) + + projects_list = self.list_projects(allow_cache=False) + for project in projects_list: + process_project(project) if last_updated_timestamps: r.last_updated.FromDatetime(max(last_updated_timestamps)) return r - def _get_all_projects(self) -> Set[str]: - projects = set() - - base_tables = [ - "DATA_SOURCES", - "ENTITIES", - "FEATURE_VIEWS", - "ON_DEMAND_FEATURE_VIEWS", - "STREAM_FEATURE_VIEWS", - ] - - with GetSnowflakeConnection(self.registry_config) as conn: - for table in base_tables: - query = ( - f'SELECT DISTINCT project_id FROM {self.registry_path}."{table}"' - ) - df = execute_snowflake_statement(conn, query).fetch_pandas_all() - - for row in df.iterrows(): - projects.add(row[1]["PROJECT_ID"]) - - return projects - def _get_last_updated_metadata(self, project: str): with GetSnowflakeConnection(self.registry_config) as conn: query = f""" @@ -1078,3 +1274,99 @@ def _set_last_updated_metadata(self, last_updated: datetime, project: str): def commit(self): pass + + def apply_project( + self, + project: Project, + commit: bool = True, + ): + return self._apply_object( + "PROJECTS", project.name, "project_name", project, "project_proto" + ) + + def delete_project( + self, + name: str, + commit: bool = True, + ): + project = self.get_project(name, allow_cache=False) + if project: + with GetSnowflakeConnection(self.registry_config) as conn: + for table in { + "MANAGED_INFRA", + "SAVED_DATASETS", + "VALIDATION_REFERENCES", + "FEATURE_SERVICES", + "FEATURE_VIEWS", + "ON_DEMAND_FEATURE_VIEWS", + "STREAM_FEATURE_VIEWS", + "DATA_SOURCES", + "ENTITIES", + "PERMISSIONS", + "FEAST_METADATA", + "PROJECTS", + }: + query = f""" + DELETE FROM {self.registry_path}."{table}" + WHERE + project_id = '{project}' + """ + execute_snowflake_statement(conn, query) + return + + raise ProjectNotFoundException(name) + + def _get_project( + self, + name: str, + ) -> Project: + return self._get_object( + table="PROJECTS", + name=name, + project=name, + proto_class=ProjectProto, + python_class=Project, + id_field_name="project_name", + proto_field_name="project_proto", + not_found_exception=ProjectObjectNotFoundException, + ) + + def get_project( + self, + name: str, + allow_cache: bool = False, + ) -> Project: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.get_project(self.cached_registry_proto, name) + return self._get_project(name) + + def _list_projects( + self, + tags: Optional[dict[str, str]], + ) -> List[Project]: + with GetSnowflakeConnection(self.registry_config) as conn: + query = f""" + SELECT project_proto FROM {self.registry_path}."PROJECTS" + """ + df = execute_snowflake_statement(conn, query).fetch_pandas_all() + if not df.empty: + objects = [] + for row in df.iterrows(): + obj = Project.from_proto( + ProjectProto.FromString(row[1]["project_proto"]) + ) + if has_all_tags(obj.tags, tags): + objects.append(obj) + return objects + return [] + + def list_projects( + self, + allow_cache: bool = False, + tags: Optional[dict[str, str]] = None, + ) -> List[Project]: + if allow_cache: + self._refresh_cached_registry_if_necessary() + return proto_registry_utils.list_projects(self.cached_registry_proto, tags) + return self._list_projects(tags) diff --git a/sdk/python/feast/infra/registry/sql.py b/sdk/python/feast/infra/registry/sql.py index a2b16a3a09..6ae27acf4e 100644 --- a/sdk/python/feast/infra/registry/sql.py +++ b/sdk/python/feast/infra/registry/sql.py @@ -1,14 +1,16 @@ import logging import uuid +from concurrent.futures import ThreadPoolExecutor from datetime import datetime, timezone from enum import Enum from pathlib import Path -from typing import Any, Callable, Dict, List, Optional, Set, Union +from typing import Any, Callable, Dict, List, Optional, Union, cast -from pydantic import StrictStr +from pydantic import StrictInt, StrictStr from sqlalchemy import ( # type: ignore BigInteger, Column, + Index, LargeBinary, MetaData, String, @@ -30,6 +32,9 @@ EntityNotFoundException, FeatureServiceNotFoundException, FeatureViewNotFoundException, + PermissionNotFoundException, + ProjectNotFoundException, + ProjectObjectNotFoundException, SavedDatasetNotFound, ValidationReferenceNotFound, ) @@ -38,6 +43,8 @@ from feast.infra.infra_object import Infra from feast.infra.registry.caching_registry import CachingRegistry from feast.on_demand_feature_view import OnDemandFeatureView +from feast.permissions.permission import Permission +from feast.project import Project from feast.project_metadata import ProjectMetadata from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto from feast.protos.feast.core.Entity_pb2 import Entity as EntityProto @@ -49,6 +56,8 @@ from feast.protos.feast.core.OnDemandFeatureView_pb2 import ( OnDemandFeatureView as OnDemandFeatureViewProto, ) +from feast.protos.feast.core.Permission_pb2 import Permission as PermissionProto +from feast.protos.feast.core.Project_pb2 import Project as ProjectProto from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto from feast.protos.feast.core.SavedDataset_pb2 import SavedDataset as SavedDatasetProto from feast.protos.feast.core.StreamFeatureView_pb2 import ( @@ -64,91 +73,131 @@ metadata = MetaData() + +projects = Table( + "projects", + metadata, + Column("project_id", String(255), primary_key=True), + Column("project_name", String(255), nullable=False), + Column("last_updated_timestamp", BigInteger, nullable=False), + Column("project_proto", LargeBinary, nullable=False), +) + +Index("idx_projects_project_id", projects.c.project_id) + entities = Table( "entities", metadata, - Column("entity_name", String(50), primary_key=True), - Column("project_id", String(50), primary_key=True), + Column("entity_name", String(255), primary_key=True), + Column("project_id", String(255), primary_key=True), Column("last_updated_timestamp", BigInteger, nullable=False), Column("entity_proto", LargeBinary, nullable=False), ) +Index("idx_entities_project_id", entities.c.project_id) + data_sources = Table( "data_sources", metadata, Column("data_source_name", String(255), primary_key=True), - Column("project_id", String(50), primary_key=True), + Column("project_id", String(255), primary_key=True), Column("last_updated_timestamp", BigInteger, nullable=False), Column("data_source_proto", LargeBinary, nullable=False), ) +Index("idx_data_sources_project_id", data_sources.c.project_id) + feature_views = Table( "feature_views", metadata, - Column("feature_view_name", String(50), primary_key=True), - Column("project_id", String(50), primary_key=True), + Column("feature_view_name", String(255), primary_key=True), + Column("project_id", String(255), primary_key=True), Column("last_updated_timestamp", BigInteger, nullable=False), Column("materialized_intervals", LargeBinary, nullable=True), Column("feature_view_proto", LargeBinary, nullable=False), Column("user_metadata", LargeBinary, nullable=True), ) +Index("idx_feature_views_project_id", feature_views.c.project_id) + stream_feature_views = Table( "stream_feature_views", metadata, - Column("feature_view_name", String(50), primary_key=True), - Column("project_id", String(50), primary_key=True), + Column("feature_view_name", String(255), primary_key=True), + Column("project_id", String(255), primary_key=True), Column("last_updated_timestamp", BigInteger, nullable=False), Column("feature_view_proto", LargeBinary, nullable=False), Column("user_metadata", LargeBinary, nullable=True), ) +Index("idx_stream_feature_views_project_id", stream_feature_views.c.project_id) + on_demand_feature_views = Table( "on_demand_feature_views", metadata, - Column("feature_view_name", String(50), primary_key=True), - Column("project_id", String(50), primary_key=True), + Column("feature_view_name", String(255), primary_key=True), + Column("project_id", String(255), primary_key=True), Column("last_updated_timestamp", BigInteger, nullable=False), Column("feature_view_proto", LargeBinary, nullable=False), Column("user_metadata", LargeBinary, nullable=True), ) +Index("idx_on_demand_feature_views_project_id", on_demand_feature_views.c.project_id) + feature_services = Table( "feature_services", metadata, - Column("feature_service_name", String(50), primary_key=True), - Column("project_id", String(50), primary_key=True), + Column("feature_service_name", String(255), primary_key=True), + Column("project_id", String(255), primary_key=True), Column("last_updated_timestamp", BigInteger, nullable=False), Column("feature_service_proto", LargeBinary, nullable=False), ) +Index("idx_feature_services_project_id", feature_services.c.project_id) + saved_datasets = Table( "saved_datasets", metadata, - Column("saved_dataset_name", String(50), primary_key=True), - Column("project_id", String(50), primary_key=True), + Column("saved_dataset_name", String(255), primary_key=True), + Column("project_id", String(255), primary_key=True), Column("last_updated_timestamp", BigInteger, nullable=False), Column("saved_dataset_proto", LargeBinary, nullable=False), ) +Index("idx_saved_datasets_project_id", saved_datasets.c.project_id) + validation_references = Table( "validation_references", metadata, - Column("validation_reference_name", String(50), primary_key=True), - Column("project_id", String(50), primary_key=True), + Column("validation_reference_name", String(255), primary_key=True), + Column("project_id", String(255), primary_key=True), Column("last_updated_timestamp", BigInteger, nullable=False), Column("validation_reference_proto", LargeBinary, nullable=False), ) +Index("idx_validation_references_project_id", validation_references.c.project_id) managed_infra = Table( "managed_infra", metadata, - Column("infra_name", String(50), primary_key=True), - Column("project_id", String(50), primary_key=True), + Column("infra_name", String(255), primary_key=True), + Column("project_id", String(255), primary_key=True), Column("last_updated_timestamp", BigInteger, nullable=False), Column("infra_proto", LargeBinary, nullable=False), ) +Index("idx_managed_infra_project_id", managed_infra.c.project_id) + +permissions = Table( + "permissions", + metadata, + Column("permission_name", String(255), primary_key=True), + Column("project_id", String(255), primary_key=True), + Column("last_updated_timestamp", BigInteger, nullable=False), + Column("permission_proto", LargeBinary, nullable=False), +) + +Index("idx_permissions_project_id", permissions.c.project_id) + class FeastMetadataKeys(Enum): LAST_UPDATED_TIMESTAMP = "last_updated_timestamp" @@ -158,12 +207,14 @@ class FeastMetadataKeys(Enum): feast_metadata = Table( "feast_metadata", metadata, - Column("project_id", String(50), primary_key=True), + Column("project_id", String(255), primary_key=True), Column("metadata_key", String(50), primary_key=True), Column("metadata_value", String(50), nullable=False), Column("last_updated_timestamp", BigInteger, nullable=False), ) +Index("idx_feast_metadata_project_id", feast_metadata.c.project_id) + logger = logging.getLogger(__name__) @@ -175,29 +226,91 @@ class SqlRegistryConfig(RegistryConfig): """ str: Path to metadata store. If registry_type is 'sql', then this is a database URL as expected by SQLAlchemy """ + read_path: Optional[StrictStr] = None + """ str: Read Path to metadata store if different from path. + If registry_type is 'sql', then this is a Read Endpoint for database URL. If not set, path will be used for read and write. """ + sqlalchemy_config_kwargs: Dict[str, Any] = {"echo": False} """ Dict[str, Any]: Extra arguments to pass to SQLAlchemy.create_engine. """ + cache_mode: StrictStr = "sync" + """ str: Cache mode type, Possible options are sync and thread(asynchronous caching using threading library)""" + + thread_pool_executor_worker_count: StrictInt = 0 + """ int: Number of worker threads to use for asynchronous caching in SQL Registry. If set to 0, it doesn't use ThreadPoolExecutor. """ + class SqlRegistry(CachingRegistry): def __init__( self, - registry_config: Optional[Union[RegistryConfig, SqlRegistryConfig]], + registry_config, project: str, repo_path: Optional[Path], ): - assert registry_config is not None, "SqlRegistry needs a valid registry_config" + assert registry_config is not None and isinstance( + registry_config, SqlRegistryConfig + ), "SqlRegistry needs a valid registry_config" - self.engine: Engine = create_engine( + self.registry_config = registry_config + + self.write_engine: Engine = create_engine( registry_config.path, **registry_config.sqlalchemy_config_kwargs ) - metadata.create_all(self.engine) + if registry_config.read_path: + self.read_engine: Engine = create_engine( + registry_config.read_path, + **registry_config.sqlalchemy_config_kwargs, + ) + else: + self.read_engine = self.write_engine + metadata.create_all(self.write_engine) + self.thread_pool_executor_worker_count = ( + registry_config.thread_pool_executor_worker_count + ) + self.purge_feast_metadata = registry_config.purge_feast_metadata + # Sync feast_metadata to projects table + # when purge_feast_metadata is set to True, Delete data from + # feast_metadata table and list_project_metadata will not return any data + self._sync_feast_metadata_to_projects_table() + if not self.purge_feast_metadata: + self._maybe_init_project_metadata(project) super().__init__( project=project, cache_ttl_seconds=registry_config.cache_ttl_seconds, cache_mode=registry_config.cache_mode, ) + def _sync_feast_metadata_to_projects_table(self): + feast_metadata_projects: set = [] + projects_set: set = [] + with self.read_engine.begin() as conn: + stmt = select(feast_metadata).where( + feast_metadata.c.metadata_key == FeastMetadataKeys.PROJECT_UUID.value + ) + rows = conn.execute(stmt).all() + for row in rows: + feast_metadata_projects.append(row._mapping["project_id"]) + + if len(feast_metadata_projects) > 0: + with self.read_engine.begin() as conn: + stmt = select(projects) + rows = conn.execute(stmt).all() + for row in rows: + projects_set.append(row._mapping["project_id"]) + + # Find object in feast_metadata_projects but not in projects + projects_to_sync = set(feast_metadata_projects) - set(projects_set) + for project_name in projects_to_sync: + self.apply_project(Project(name=project_name), commit=True) + + if self.purge_feast_metadata: + with self.write_engine.begin() as conn: + for project_name in feast_metadata_projects: + stmt = delete(feast_metadata).where( + feast_metadata.c.project_id == project_name + ) + conn.execute(stmt) + def teardown(self): for t in { entities, @@ -207,8 +320,9 @@ def teardown(self): on_demand_feature_views, saved_datasets, validation_references, + permissions, }: - with self.engine.begin() as conn: + with self.write_engine.begin() as conn: stmt = delete(t) conn.execute(stmt) @@ -257,6 +371,61 @@ def _get_entity(self, name: str, project: str) -> Entity: not_found_exception=EntityNotFoundException, ) + def _get_any_feature_view(self, name: str, project: str) -> BaseFeatureView: + fv = self._get_object( + table=feature_views, + name=name, + project=project, + proto_class=FeatureViewProto, + python_class=FeatureView, + id_field_name="feature_view_name", + proto_field_name="feature_view_proto", + not_found_exception=None, + ) + + if not fv: + fv = self._get_object( + table=on_demand_feature_views, + name=name, + project=project, + proto_class=OnDemandFeatureViewProto, + python_class=OnDemandFeatureView, + id_field_name="feature_view_name", + proto_field_name="feature_view_proto", + not_found_exception=None, + ) + + if not fv: + fv = self._get_object( + table=stream_feature_views, + name=name, + project=project, + proto_class=StreamFeatureViewProto, + python_class=StreamFeatureView, + id_field_name="feature_view_name", + proto_field_name="feature_view_proto", + not_found_exception=FeatureViewNotFoundException, + ) + return fv + + def _list_all_feature_views( + self, project: str, tags: Optional[dict[str, str]] + ) -> List[BaseFeatureView]: + return ( + cast( + list[BaseFeatureView], + self._list_feature_views(project=project, tags=tags), + ) + + cast( + list[BaseFeatureView], + self._list_stream_feature_views(project=project, tags=tags), + ) + + cast( + list[BaseFeatureView], + self._list_on_demand_feature_views(project=project, tags=tags), + ) + ) + def _get_feature_view(self, name: str, project: str) -> FeatureView: return self._get_object( table=feature_views, @@ -319,13 +488,16 @@ def _get_validation_reference(self, name: str, project: str) -> ValidationRefere not_found_exception=ValidationReferenceNotFound, ) - def _list_validation_references(self, project: str) -> List[ValidationReference]: + def _list_validation_references( + self, project: str, tags: Optional[dict[str, str]] = None + ) -> List[ValidationReference]: return self._list_objects( table=validation_references, project=project, proto_class=ValidationReferenceProto, python_class=ValidationReference, proto_field_name="validation_reference_proto", + tags=tags, ) def _list_entities( @@ -414,7 +586,7 @@ def apply_feature_service( ) def delete_data_source(self, name: str, project: str, commit: bool = True): - with self.engine.begin() as conn: + with self.write_engine.begin() as conn: stmt = delete(data_sources).where( data_sources.c.data_source_name == name, data_sources.c.project_id == project, @@ -447,13 +619,16 @@ def _list_feature_views( tags=tags, ) - def _list_saved_datasets(self, project: str) -> List[SavedDataset]: + def _list_saved_datasets( + self, project: str, tags: Optional[dict[str, str]] = None + ) -> List[SavedDataset]: return self._list_objects( saved_datasets, project, SavedDatasetProto, SavedDataset, "saved_dataset_proto", + tags=tags, ) def _list_on_demand_feature_views( @@ -469,7 +644,7 @@ def _list_on_demand_feature_views( ) def _list_project_metadata(self, project: str) -> List[ProjectMetadata]: - with self.engine.begin() as conn: + with self.read_engine.begin() as conn: stmt = select(feast_metadata).where( feast_metadata.c.project_id == project, ) @@ -588,7 +763,7 @@ def apply_user_metadata( table = self._infer_fv_table(feature_view) name = feature_view.name - with self.engine.begin() as conn: + with self.write_engine.begin() as conn: stmt = select(table).where( getattr(table.c, "feature_view_name") == name, table.c.project_id == project, @@ -643,7 +818,7 @@ def get_user_metadata( table = self._infer_fv_table(feature_view) name = feature_view.name - with self.engine.begin() as conn: + with self.read_engine.begin() as conn: stmt = select(table).where(getattr(table.c, "feature_view_name") == name) row = conn.execute(stmt).first() if row: @@ -654,8 +829,27 @@ def get_user_metadata( def proto(self) -> RegistryProto: r = RegistryProto() last_updated_timestamps = [] - projects = self._get_all_projects() - for project in projects: + + def process_project(project: Project): + nonlocal r, last_updated_timestamps + project_name = project.name + last_updated_timestamp = project.last_updated_timestamp + + try: + cached_project = self.get_project(project_name, True) + except ProjectObjectNotFoundException: + cached_project = None + + allow_cache = False + + if cached_project is not None: + allow_cache = ( + last_updated_timestamp <= cached_project.last_updated_timestamp + ) + + r.projects.extend([project.to_proto()]) + last_updated_timestamps.append(last_updated_timestamp) + for lister, registry_proto_field in [ (self.list_entities, r.entities), (self.list_feature_views, r.feature_views), @@ -665,22 +859,31 @@ def proto(self) -> RegistryProto: (self.list_feature_services, r.feature_services), (self.list_saved_datasets, r.saved_datasets), (self.list_validation_references, r.validation_references), - (self.list_project_metadata, r.project_metadata), + (self.list_permissions, r.permissions), ]: - objs: List[Any] = lister(project) # type: ignore + objs: List[Any] = lister(project_name, allow_cache) # type: ignore if objs: obj_protos = [obj.to_proto() for obj in objs] for obj_proto in obj_protos: if "spec" in obj_proto.DESCRIPTOR.fields_by_name: - obj_proto.spec.project = project + obj_proto.spec.project = project_name else: - obj_proto.project = project + obj_proto.project = project_name registry_proto_field.extend(obj_protos) # This is suuuper jank. Because of https://github.com/feast-dev/feast/issues/2783, # the registry proto only has a single infra field, which we're currently setting as the "last" project. - r.infra.CopyFrom(self.get_infra(project).to_proto()) - last_updated_timestamps.append(self._get_last_updated_metadata(project)) + r.infra.CopyFrom(self.get_infra(project_name).to_proto()) + + projects_list = self.list_projects(allow_cache=False) + if self.thread_pool_executor_worker_count == 0: + for project in projects_list: + process_project(project) + else: + with ThreadPoolExecutor( + max_workers=self.thread_pool_executor_worker_count + ) as executor: + executor.map(process_project, projects_list) if last_updated_timestamps: r.last_updated.FromDatetime(max(last_updated_timestamps)) @@ -691,6 +894,17 @@ def commit(self): # This method is a no-op since we're always writing values eagerly to the db. pass + def _initialize_project_if_not_exists(self, project_name: str): + try: + self.get_project(project_name, allow_cache=True) + return + except ProjectObjectNotFoundException: + try: + self.get_project(project_name, allow_cache=False) + return + except ProjectObjectNotFoundException: + self.apply_project(Project(name=project_name), commit=True) + def _apply_object( self, table: Table, @@ -700,12 +914,15 @@ def _apply_object( proto_field_name: str, name: Optional[str] = None, ): - self._maybe_init_project_metadata(project) - + if not self.purge_feast_metadata: + self._maybe_init_project_metadata(project) + # Initialize project is necessary because FeatureStore object can apply objects individually without "feast apply" cli option + if not isinstance(obj, Project): + self._initialize_project_if_not_exists(project_name=project) name = name or (obj.name if hasattr(obj, "name") else None) assert name, f"name needs to be provided for {obj}" - with self.engine.begin() as conn: + with self.write_engine.begin() as conn: update_datetime = _utc_now() update_time = int(update_datetime.timestamp()) stmt = select(table).where( @@ -721,12 +938,16 @@ def _apply_object( "saved_dataset_proto", "feature_view_proto", "feature_service_proto", + "permission_proto", + "project_proto", ]: deserialized_proto = self.deserialize_registry_values( row._mapping[proto_field_name], type(obj) ) obj.created_timestamp = ( - deserialized_proto.meta.created_timestamp.ToDatetime() + deserialized_proto.meta.created_timestamp.ToDatetime().replace( + tzinfo=timezone.utc + ) ) if isinstance(obj, (FeatureView, StreamFeatureView)): obj.update_materialization_intervals( @@ -768,11 +989,16 @@ def _apply_object( ) conn.execute(insert_stmt) - self._set_last_updated_metadata(update_datetime, project) + if not isinstance(obj, Project): + self.apply_project( + self.get_project(name=project, allow_cache=False), commit=True + ) + if not self.purge_feast_metadata: + self._set_last_updated_metadata(update_datetime, project) def _maybe_init_project_metadata(self, project): # Initialize project metadata if needed - with self.engine.begin() as conn: + with self.write_engine.begin() as conn: update_datetime = _utc_now() update_time = int(update_datetime.timestamp()) stmt = select(feast_metadata).where( @@ -799,14 +1025,18 @@ def _delete_object( id_field_name: str, not_found_exception: Optional[Callable], ): - with self.engine.begin() as conn: + with self.write_engine.begin() as conn: stmt = delete(table).where( getattr(table.c, id_field_name) == name, table.c.project_id == project ) rows = conn.execute(stmt) if rows.rowcount < 1 and not_found_exception: raise not_found_exception(name, project) - self._set_last_updated_metadata(_utc_now(), project) + self.apply_project( + self.get_project(name=project, allow_cache=False), commit=True + ) + if not self.purge_feast_metadata: + self._set_last_updated_metadata(_utc_now(), project) return rows.rowcount @@ -821,9 +1051,7 @@ def _get_object( proto_field_name: str, not_found_exception: Optional[Callable], ): - self._maybe_init_project_metadata(project) - - with self.engine.begin() as conn: + with self.read_engine.begin() as conn: stmt = select(table).where( getattr(table.c, id_field_name) == name, table.c.project_id == project ) @@ -845,8 +1073,7 @@ def _list_objects( proto_field_name: str, tags: Optional[dict[str, str]] = None, ): - self._maybe_init_project_metadata(project) - with self.engine.begin() as conn: + with self.read_engine.begin() as conn: stmt = select(table).where(table.c.project_id == project) rows = conn.execute(stmt).all() if rows: @@ -861,7 +1088,7 @@ def _list_objects( return [] def _set_last_updated_metadata(self, last_updated: datetime, project: str): - with self.engine.begin() as conn: + with self.write_engine.begin() as conn: stmt = select(feast_metadata).where( feast_metadata.c.metadata_key == FeastMetadataKeys.LAST_UPDATED_TIMESTAMP.value, @@ -895,7 +1122,7 @@ def _set_last_updated_metadata(self, last_updated: datetime, project: str): conn.execute(insert_stmt) def _get_last_updated_metadata(self, project: str): - with self.engine.begin() as conn: + with self.read_engine.begin() as conn: stmt = select(feast_metadata).where( feast_metadata.c.metadata_key == FeastMetadataKeys.LAST_UPDATED_TIMESTAMP.value, @@ -908,19 +1135,113 @@ def _get_last_updated_metadata(self, project: str): return datetime.fromtimestamp(update_time, tz=timezone.utc) - def _get_all_projects(self) -> Set[str]: - projects = set() - with self.engine.begin() as conn: - for table in { - entities, - data_sources, - feature_views, - on_demand_feature_views, - stream_feature_views, - }: - stmt = select(table) - rows = conn.execute(stmt).all() + def _get_permission(self, name: str, project: str) -> Permission: + return self._get_object( + table=permissions, + name=name, + project=project, + proto_class=PermissionProto, + python_class=Permission, + id_field_name="permission_name", + proto_field_name="permission_proto", + not_found_exception=PermissionNotFoundException, + ) + + def _list_permissions( + self, project: str, tags: Optional[dict[str, str]] + ) -> List[Permission]: + return self._list_objects( + permissions, + project, + PermissionProto, + Permission, + "permission_proto", + tags=tags, + ) + + def apply_permission( + self, permission: Permission, project: str, commit: bool = True + ): + return self._apply_object( + permissions, project, "permission_name", permission, "permission_proto" + ) + + def delete_permission(self, name: str, project: str, commit: bool = True): + with self.write_engine.begin() as conn: + stmt = delete(permissions).where( + permissions.c.permission_name == name, + permissions.c.project_id == project, + ) + rows = conn.execute(stmt) + if rows.rowcount < 1: + raise PermissionNotFoundException(name, project) + + def _list_projects( + self, + tags: Optional[dict[str, str]], + ) -> List[Project]: + with self.read_engine.begin() as conn: + stmt = select(projects) + rows = conn.execute(stmt).all() + if rows: + objects = [] for row in rows: - projects.add(row._mapping["project_id"]) + obj = Project.from_proto( + ProjectProto.FromString(row._mapping["project_proto"]) + ) + if utils.has_all_tags(obj.tags, tags): + objects.append(obj) + return objects + return [] - return projects + def _get_project( + self, + name: str, + ) -> Project: + return self._get_object( + table=projects, + name=name, + project=name, + proto_class=ProjectProto, + python_class=Project, + id_field_name="project_name", + proto_field_name="project_proto", + not_found_exception=ProjectObjectNotFoundException, + ) + + def apply_project( + self, + project: Project, + commit: bool = True, + ): + return self._apply_object( + projects, project.name, "project_name", project, "project_proto" + ) + + def delete_project( + self, + name: str, + commit: bool = True, + ): + project = self.get_project(name, allow_cache=False) + if project: + with self.write_engine.begin() as conn: + for t in { + managed_infra, + saved_datasets, + validation_references, + feature_services, + feature_views, + on_demand_feature_views, + stream_feature_views, + data_sources, + entities, + permissions, + feast_metadata, + projects, + }: + stmt = delete(t).where(t.c.project_id == name) + conn.execute(stmt) + return + + raise ProjectNotFoundException(name) diff --git a/sdk/python/feast/infra/utils/postgres/connection_utils.py b/sdk/python/feast/infra/utils/postgres/connection_utils.py index 3749fc2fc1..70148f3ee0 100644 --- a/sdk/python/feast/infra/utils/postgres/connection_utils.py +++ b/sdk/python/feast/infra/utils/postgres/connection_utils.py @@ -56,14 +56,14 @@ async def _get_connection_pool_async(config: PostgreSQLConfig) -> AsyncConnectio def _get_conninfo(config: PostgreSQLConfig) -> str: """Get the `conninfo` argument required for connection objects.""" - psycopg_config = { - "user": config.user, - "password": config.password, - "host": config.host, - "port": int(config.port), - "dbname": config.database, - } - return make_conninfo(conninfo="", **psycopg_config) + return make_conninfo( + conninfo="", + user=config.user, + password=config.password, + host=config.host, + port=int(config.port), + dbname=config.database, + ) def _get_conn_kwargs(config: PostgreSQLConfig) -> Dict[str, Any]: diff --git a/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_creation.sql b/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_creation.sql index aa35caeac4..fc13332e4b 100644 --- a/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_creation.sql +++ b/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_creation.sql @@ -1,3 +1,11 @@ +CREATE TABLE IF NOT EXISTS REGISTRY_PATH."PROJECTS" ( + project_id VARCHAR, + project_name VARCHAR NOT NULL, + last_updated_timestamp TIMESTAMP_LTZ NOT NULL, + project_proto BINARY NOT NULL, + PRIMARY KEY (project_id) +); + CREATE TABLE IF NOT EXISTS REGISTRY_PATH."DATA_SOURCES" ( data_source_name VARCHAR, project_id VARCHAR, @@ -80,4 +88,12 @@ CREATE TABLE IF NOT EXISTS REGISTRY_PATH."VALIDATION_REFERENCES" ( last_updated_timestamp TIMESTAMP_LTZ NOT NULL, validation_reference_proto BINARY NOT NULL, PRIMARY KEY (validation_reference_name, project_id) -) +); + +CREATE TABLE IF NOT EXISTS REGISTRY_PATH."PERMISSIONS" ( + permission_name VARCHAR, + project_id VARCHAR, + last_updated_timestamp TIMESTAMP_LTZ NOT NULL, + permission_proto BINARY NOT NULL, + PRIMARY KEY (permission_name, project_id) +); diff --git a/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_deletion.sql b/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_deletion.sql index a355c72062..780424abd1 100644 --- a/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_deletion.sql +++ b/sdk/python/feast/infra/utils/snowflake/registry/snowflake_table_deletion.sql @@ -17,3 +17,5 @@ DROP TABLE IF EXISTS REGISTRY_PATH."SAVED_DATASETS"; DROP TABLE IF EXISTS REGISTRY_PATH."STREAM_FEATURE_VIEWS"; DROP TABLE IF EXISTS REGISTRY_PATH."VALIDATION_REFERENCES" + +DROP TABLE IF EXISTS REGISTRY_PATH."PERMISSIONS" diff --git a/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py b/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py index dd965c4bed..b9035b40db 100644 --- a/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py +++ b/sdk/python/feast/infra/utils/snowflake/snowflake_utils.py @@ -84,9 +84,11 @@ def __enter__(self): # https://docs.snowflake.com/en/user-guide/python-connector-example.html#using-key-pair-authentication-key-pair-rotation # https://docs.snowflake.com/en/user-guide/key-pair-auth.html#configuring-key-pair-authentication - if "private_key" in kwargs: + if "private_key" in kwargs or "private_key_content" in kwargs: kwargs["private_key"] = parse_private_key_path( - kwargs["private_key"], kwargs["private_key_passphrase"] + kwargs.get("private_key_passphrase"), + kwargs.get("private_key"), + kwargs.get("private_key_content"), ) try: @@ -510,13 +512,27 @@ def chunk_helper(lst: pd.DataFrame, n: int) -> Iterator[Tuple[int, pd.DataFrame] yield int(i / n), lst[i : i + n] -def parse_private_key_path(key_path: str, private_key_passphrase: str) -> bytes: - with open(key_path, "rb") as key: +def parse_private_key_path( + private_key_passphrase: str, + key_path: Optional[str] = None, + private_key_content: Optional[bytes] = None, +) -> bytes: + """Returns snowflake pkb by parsing and reading either from key path or private_key_content as byte string.""" + if private_key_content: p_key = serialization.load_pem_private_key( - key.read(), + private_key_content, password=private_key_passphrase.encode(), backend=default_backend(), ) + elif key_path: + with open(key_path, "rb") as key: + p_key = serialization.load_pem_private_key( + key.read(), + password=private_key_passphrase.encode(), + backend=default_backend(), + ) + else: + raise ValueError("Please provide key_path or private_key_content.") pkb = p_key.private_bytes( encoding=serialization.Encoding.DER, diff --git a/sdk/python/feast/offline_server.py b/sdk/python/feast/offline_server.py index be92620d68..ff3db579d0 100644 --- a/sdk/python/feast/offline_server.py +++ b/sdk/python/feast/offline_server.py @@ -3,29 +3,69 @@ import logging import traceback from datetime import datetime -from typing import Any, Dict, List +from typing import Any, Dict, List, cast import pyarrow as pa import pyarrow.flight as fl from feast import FeatureStore, FeatureView, utils +from feast.arrow_error_handler import arrow_server_error_handling_decorator from feast.feature_logging import FeatureServiceLoggingSource from feast.feature_view import DUMMY_ENTITY_NAME from feast.infra.offline_stores.offline_utils import get_offline_store_from_config +from feast.permissions.action import AuthzedAction +from feast.permissions.security_manager import assert_permissions +from feast.permissions.server.arrow import ( + AuthorizationMiddlewareFactory, + inject_user_details_decorator, +) +from feast.permissions.server.utils import ( + AuthManagerType, + ServerType, + init_auth_manager, + init_security_manager, + str_to_auth_manager_type, +) from feast.saved_dataset import SavedDatasetStorage logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) class OfflineServer(fl.FlightServerBase): def __init__(self, store: FeatureStore, location: str, **kwargs): - super(OfflineServer, self).__init__(location, **kwargs) + super(OfflineServer, self).__init__( + location, + middleware=self.arrow_flight_auth_middleware( + str_to_auth_manager_type(store.config.auth_config.type) + ), + **kwargs, + ) self._location = location # A dictionary of configured flights, e.g. API calls received and not yet served self.flights: Dict[str, Any] = {} self.store = store self.offline_store = get_offline_store_from_config(store.config.offline_store) + def arrow_flight_auth_middleware( + self, + auth_type: AuthManagerType, + ) -> dict[str, fl.ServerMiddlewareFactory]: + """ + A dictionary with the configured middlewares to support extracting the user details when the authorization manager is defined. + The authorization middleware key is `auth`. + + Returns: + dict[str, fl.ServerMiddlewareFactory]: Optional dictionary of middlewares. If the authorization type is set to `NONE`, it returns an empty dict. + """ + + if auth_type == AuthManagerType.NONE: + return {} + + return { + "auth": AuthorizationMiddlewareFactory(), + } + @classmethod def descriptor_to_key(self, descriptor: fl.FlightDescriptor): return ( @@ -41,14 +81,8 @@ def _make_flight_info(self, key: Any, descriptor: fl.FlightDescriptor): return fl.FlightInfo(schema, descriptor, endpoints, -1, -1) - def get_flight_info( - self, context: fl.ServerCallContext, descriptor: fl.FlightDescriptor - ): - key = OfflineServer.descriptor_to_key(descriptor) - if key in self.flights: - return self._make_flight_info(key, descriptor) - raise KeyError("Flight not found.") - + @inject_user_details_decorator + @arrow_server_error_handling_decorator def list_flights(self, context: fl.ServerCallContext, criteria: bytes): for key, table in self.flights.items(): if key[1] is not None: @@ -58,8 +92,20 @@ def list_flights(self, context: fl.ServerCallContext, criteria: bytes): yield self._make_flight_info(key, descriptor) + @inject_user_details_decorator + @arrow_server_error_handling_decorator + def get_flight_info( + self, context: fl.ServerCallContext, descriptor: fl.FlightDescriptor + ): + key = OfflineServer.descriptor_to_key(descriptor) + if key in self.flights: + return self._make_flight_info(key, descriptor) + raise KeyError("Flight not found.") + # Expects to receive request parameters and stores them in the flights dictionary # Indexed by the unique command + @inject_user_details_decorator + @arrow_server_error_handling_decorator def do_put( self, context: fl.ServerCallContext, @@ -156,6 +202,8 @@ def _validate_do_get_parameters(self, command: dict): # Extracts the API parameters from the flights dictionary, delegates the execution to the FeatureStore instance # and returns the stream of data + @inject_user_details_decorator + @arrow_server_error_handling_decorator def do_get(self, context: fl.ServerCallContext, ticket: fl.Ticket): key = ast.literal_eval(ticket.ticket.decode()) if key not in self.flights: @@ -217,7 +265,15 @@ def offline_write_batch(self, command: dict, key: str): assert len(feature_views) == 1, "incorrect feature view" table = self.flights[key] self.offline_store.offline_write_batch( - self.store.config, feature_views[0], table, command["progress"] + self.store.config, + cast( + FeatureView, + assert_permissions( + feature_views[0], actions=[AuthzedAction.WRITE_OFFLINE] + ), + ), + table, + command["progress"], ) def _validate_write_logged_features_parameters(self, command: dict): @@ -234,6 +290,10 @@ def write_logged_features(self, command: dict, key: str): feature_service.logging_config is not None ), "feature service must have logging_config set" + assert_permissions( + resource=feature_service, + actions=[AuthzedAction.WRITE_OFFLINE], + ) self.offline_store.write_logged_features( config=self.store.config, data=table, @@ -260,10 +320,12 @@ def _validate_pull_all_from_table_or_query_parameters(self, command: dict): def pull_all_from_table_or_query(self, command: dict): self._validate_pull_all_from_table_or_query_parameters(command) + data_source = self.store.get_data_source(command["data_source_name"]) + assert_permissions(data_source, actions=[AuthzedAction.READ_OFFLINE]) return self.offline_store.pull_all_from_table_or_query( self.store.config, - self.store.get_data_source(command["data_source_name"]), + data_source, command["join_key_columns"], command["feature_name_columns"], command["timestamp_field"], @@ -287,10 +349,11 @@ def _validate_pull_latest_from_table_or_query_parameters(self, command: dict): def pull_latest_from_table_or_query(self, command: dict): self._validate_pull_latest_from_table_or_query_parameters(command) - + data_source = self.store.get_data_source(command["data_source_name"]) + assert_permissions(resource=data_source, actions=[AuthzedAction.READ_OFFLINE]) return self.offline_store.pull_latest_from_table_or_query( self.store.config, - self.store.get_data_source(command["data_source_name"]), + data_source, command["join_key_columns"], command["feature_name_columns"], command["timestamp_field"], @@ -299,6 +362,7 @@ def pull_latest_from_table_or_query(self, command: dict): utils.make_tzaware(datetime.fromisoformat(command["end_date"])), ) + @arrow_server_error_handling_decorator def list_actions(self, context): return [ ( @@ -343,6 +407,11 @@ def get_historical_features(self, command: dict, key: str): project=project, ) + for feature_view in feature_views: + assert_permissions( + resource=feature_view, actions=[AuthzedAction.READ_OFFLINE] + ) + retJob = self.offline_store.get_historical_features( config=self.store.config, feature_views=feature_views, @@ -377,6 +446,10 @@ def persist(self, command: dict, key: str): raise NotImplementedError data_source = self.store.get_data_source(command["data_source_name"]) + assert_permissions( + resource=data_source, + actions=[AuthzedAction.WRITE_OFFLINE], + ) storage = SavedDatasetStorage.from_data_source(data_source) ret_job.persist(storage, command["allow_overwrite"], command["timeout"]) except Exception as e: @@ -384,12 +457,6 @@ def persist(self, command: dict, key: str): traceback.print_exc() raise e - def do_action(self, context: fl.ServerCallContext, action: fl.Action): - pass - - def do_drop_dataset(self, dataset): - pass - def remove_dummies(fv: FeatureView) -> FeatureView: """ @@ -401,11 +468,23 @@ def remove_dummies(fv: FeatureView) -> FeatureView: return fv +def _init_auth_manager(store: FeatureStore): + auth_type = str_to_auth_manager_type(store.config.auth_config.type) + init_security_manager(auth_type=auth_type, fs=store) + init_auth_manager( + auth_type=auth_type, + server_type=ServerType.ARROW, + auth_config=store.config.auth_config, + ) + + def start_server( store: FeatureStore, host: str, port: int, ): + _init_auth_manager(store) + location = "grpc+tcp://{}:{}".format(host, port) server = OfflineServer(store, location) logger.info(f"Offline store server serving on {location}") diff --git a/sdk/python/feast/on_demand_feature_view.py b/sdk/python/feast/on_demand_feature_view.py index aeb1cc207a..1b75d23ed4 100644 --- a/sdk/python/feast/on_demand_feature_view.py +++ b/sdk/python/feast/on_demand_feature_view.py @@ -3,7 +3,7 @@ import inspect import warnings from types import FunctionType -from typing import Any, Optional, Union, get_type_hints +from typing import Any, List, Optional, Union, get_type_hints import dill import pandas as pd @@ -12,8 +12,9 @@ from feast.base_feature_view import BaseFeatureView from feast.data_source import RequestSource +from feast.entity import Entity from feast.errors import RegistryInferenceFailure, SpecifiedFeaturesNotPresentError -from feast.feature_view import FeatureView +from feast.feature_view import DUMMY_ENTITY_NAME, FeatureView from feast.feature_view_projection import FeatureViewProjection from feast.field import Field, from_value_type from feast.protos.feast.core.OnDemandFeatureView_pb2 import ( @@ -61,7 +62,8 @@ class OnDemandFeatureView(BaseFeatureView): """ name: str - features: list[Field] + entities: Optional[List[str]] + features: List[Field] source_feature_view_projections: dict[str, FeatureViewProjection] source_request_sources: dict[str, RequestSource] feature_transformation: Union[ @@ -71,13 +73,15 @@ class OnDemandFeatureView(BaseFeatureView): description: str tags: dict[str, str] owner: str + write_to_online_store: bool def __init__( # noqa: C901 self, *, name: str, - schema: list[Field], - sources: list[ + entities: Optional[List[Entity]] = None, + schema: Optional[List[Field]] = None, + sources: List[ Union[ FeatureView, RequestSource, @@ -93,12 +97,14 @@ def __init__( # noqa: C901 description: str = "", tags: Optional[dict[str, str]] = None, owner: str = "", + write_to_online_store: bool = False, ): """ Creates an OnDemandFeatureView object. Args: name: The unique name of the on demand feature view. + entities (optional): The list of names of entities that this feature view is associated with. schema: The list of features in the output of the on demand feature view, after the transformation has been applied. sources: A map from input source names to the actual input sources, which may be @@ -113,6 +119,8 @@ def __init__( # noqa: C901 tags (optional): A dictionary of key-value pairs to store arbitrary metadata. owner (optional): The owner of the on demand feature view, typically the email of the primary maintainer. + write_to_online_store (optional): A boolean that indicates whether to write the on demand feature view to + the online store for faster retrieval. """ super().__init__( name=name, @@ -122,6 +130,8 @@ def __init__( # noqa: C901 owner=owner, ) + schema = schema or [] + self.entities = [e.name for e in entities] if entities else [DUMMY_ENTITY_NAME] self.mode = mode.lower() if self.mode not in {"python", "pandas", "substrait"}: @@ -152,12 +162,48 @@ def __init__( # noqa: C901 self.source_request_sources[odfv_source.name] = odfv_source elif isinstance(odfv_source, FeatureViewProjection): self.source_feature_view_projections[odfv_source.name] = odfv_source + else: self.source_feature_view_projections[odfv_source.name] = ( odfv_source.projection ) + features: List[Field] = [] + self.entity_columns = [] + + join_keys: List[str] = [] + if entities: + for entity in entities: + join_keys.append(entity.join_key) + # Ensure that entities have unique join keys. + if len(set(join_keys)) < len(join_keys): + raise ValueError( + "A feature view should not have entities that share a join key." + ) + + for field in schema: + if field.name in join_keys: + self.entity_columns.append(field) + + # Confirm that the inferred type matches the specified entity type, if it exists. + matching_entities = ( + [e for e in entities if e.join_key == field.name] + if entities + else [] + ) + assert len(matching_entities) == 1 + entity = matching_entities[0] + if entity.value_type != ValueType.UNKNOWN: + if from_value_type(entity.value_type) != field.dtype: + raise ValueError( + f"Entity {entity.name} has type {entity.value_type}, which does not match the inferred type {field.dtype}." + ) + else: + features.append(field) + + self.features = features self.feature_transformation = feature_transformation + self.write_to_online_store = write_to_online_store @property def proto_class(self) -> type[OnDemandFeatureViewProto]: @@ -174,8 +220,13 @@ def __copy__(self): description=self.description, tags=self.tags, owner=self.owner, + write_to_online_store=self.write_to_online_store, ) + fv.entities = self.entities + fv.features = self.features fv.projection = copy.copy(self.projection) + fv.entity_columns = copy.copy(self.entity_columns) + return fv def __eq__(self, other): @@ -184,20 +235,46 @@ def __eq__(self, other): "Comparisons should only involve OnDemandFeatureView class objects." ) - if not super().__eq__(other): - return False - + # Note, no longer evaluating the base feature view layer as ODFVs can have + # multiple datasources and a base_feature_view only has one source + # though maybe that shouldn't be true if ( self.source_feature_view_projections != other.source_feature_view_projections + or self.description != other.description or self.source_request_sources != other.source_request_sources or self.mode != other.mode or self.feature_transformation != other.feature_transformation + or self.write_to_online_store != other.write_to_online_store + or sorted(self.entity_columns) != sorted(other.entity_columns) ): return False return True + @property + def join_keys(self) -> List[str]: + """Returns a list of all the join keys.""" + return [entity.name for entity in self.entity_columns] + + @property + def schema(self) -> List[Field]: + return list(set(self.entity_columns + self.features)) + + def ensure_valid(self): + """ + Validates the state of this feature view locally. + + Raises: + ValueError: The On Demand feature view does not have an entity when trying to use write_to_online_store. + """ + super().ensure_valid() + + if self.write_to_online_store and not self.entities: + raise ValueError( + "On Demand Feature views require an entity if write_to_online_store=True" + ) + def __hash__(self): return super().__hash__() @@ -216,7 +293,7 @@ def to_proto(self) -> OnDemandFeatureViewProto: sources = {} for source_name, fv_projection in self.source_feature_view_projections.items(): sources[source_name] = OnDemandSource( - feature_view_projection=fv_projection.to_proto() + feature_view_projection=fv_projection.to_proto(), ) for ( source_name, @@ -239,6 +316,10 @@ def to_proto(self) -> OnDemandFeatureViewProto: ) spec = OnDemandFeatureViewSpec( name=self.name, + entities=self.entities if self.entities else None, + entity_columns=[ + field.to_proto() for field in self.entity_columns if self.entity_columns + ], features=[feature.to_proto() for feature in self.features], sources=sources, feature_transformation=feature_transformation, @@ -246,6 +327,7 @@ def to_proto(self) -> OnDemandFeatureViewProto: description=self.description, tags=self.tags, owner=self.owner, + write_to_online_store=self.write_to_online_store, ) return OnDemandFeatureViewProto(spec=spec, meta=meta) @@ -335,6 +417,24 @@ def from_proto( else: raise ValueError("At least one transformation type needs to be provided") + if hasattr(on_demand_feature_view_proto.spec, "write_to_online_store"): + write_to_online_store = ( + on_demand_feature_view_proto.spec.write_to_online_store + ) + else: + write_to_online_store = False + if hasattr(on_demand_feature_view_proto.spec, "entities"): + entities = list(on_demand_feature_view_proto.spec.entities) + else: + entities = [] + if hasattr(on_demand_feature_view_proto.spec, "entity_columns"): + entity_columns = [ + Field.from_proto(field_proto) + for field_proto in on_demand_feature_view_proto.spec.entity_columns + ] + else: + entity_columns = [] + on_demand_feature_view_obj = cls( name=on_demand_feature_view_proto.spec.name, schema=[ @@ -346,12 +446,16 @@ def from_proto( ], sources=sources, feature_transformation=transformation, - mode=on_demand_feature_view_proto.spec.mode, + mode=on_demand_feature_view_proto.spec.mode or "pandas", description=on_demand_feature_view_proto.spec.description, tags=dict(on_demand_feature_view_proto.spec.tags), owner=on_demand_feature_view_proto.spec.owner, + write_to_online_store=write_to_online_store, ) + on_demand_feature_view_obj.entities = entities + on_demand_feature_view_obj.entity_columns = entity_columns + # FeatureViewProjections are not saved in the OnDemandFeatureView proto. # Create the default projection. on_demand_feature_view_obj.projection = FeatureViewProjection.from_definition( @@ -595,6 +699,7 @@ def get_requested_odfvs( def on_demand_feature_view( *, + entities: Optional[List[Entity]] = None, schema: list[Field], sources: list[ Union[ @@ -607,11 +712,13 @@ def on_demand_feature_view( description: str = "", tags: Optional[dict[str, str]] = None, owner: str = "", + write_to_online_store: bool = False, ): """ Creates an OnDemandFeatureView object with the given user function as udf. Args: + entities (Optional): The list of names of entities that this feature view is associated with. schema: The list of features in the output of the on demand feature view, after the transformation has been applied. sources: A map from input source names to the actual input sources, which may be @@ -622,6 +729,8 @@ def on_demand_feature_view( tags (optional): A dictionary of key-value pairs to store arbitrary metadata. owner (optional): The owner of the on demand feature view, typically the email of the primary maintainer. + write_to_online_store (optional): A boolean that indicates whether to write the on demand feature view to + the online store for faster retrieval. """ def mainify(obj) -> None: @@ -664,6 +773,8 @@ def decorator(user_function): description=description, tags=tags, owner=owner, + write_to_online_store=write_to_online_store, + entities=entities, ) functools.update_wrapper( wrapper=on_demand_feature_view_obj, wrapped=user_function diff --git a/sdk/python/feast/permissions/__init__.py b/sdk/python/feast/permissions/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/permissions/action.py b/sdk/python/feast/permissions/action.py new file mode 100644 index 0000000000..0e85c1685f --- /dev/null +++ b/sdk/python/feast/permissions/action.py @@ -0,0 +1,40 @@ +import enum + + +class AuthzedAction(enum.Enum): + """ + Identify the type of action being secured by the permissions framework, according to the familiar CRUD and Feast terminology. + """ + + CREATE = "create" # Create an instance + DESCRIBE = "describe" # Access the instance state + UPDATE = "update" # Update the instance state + DELETE = "delete" # Delete an instance + READ_ONLINE = "read_online" # Read the online store only + READ_OFFLINE = "read_offline" # Read the offline store only + WRITE_ONLINE = "write_online" # Write to the online store only + WRITE_OFFLINE = "write_offline" # Write to the offline store only + + +# Alias for all available actions +ALL_ACTIONS = [a for a in AuthzedAction.__members__.values()] + +# Alias for all read actions +READ = [ + AuthzedAction.READ_OFFLINE, + AuthzedAction.READ_ONLINE, +] +# Alias for all write actions +WRITE = [ + AuthzedAction.WRITE_OFFLINE, + AuthzedAction.WRITE_ONLINE, +] + + +# Alias for CRUD actions +CRUD = [ + AuthzedAction.CREATE, + AuthzedAction.DESCRIBE, + AuthzedAction.UPDATE, + AuthzedAction.DELETE, +] diff --git a/sdk/python/feast/permissions/auth/__init__.py b/sdk/python/feast/permissions/auth/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/permissions/auth/auth_manager.py b/sdk/python/feast/permissions/auth/auth_manager.py new file mode 100644 index 0000000000..e608904567 --- /dev/null +++ b/sdk/python/feast/permissions/auth/auth_manager.py @@ -0,0 +1,68 @@ +from abc import ABC +from typing import Optional + +from .token_extractor import NoAuthTokenExtractor, TokenExtractor +from .token_parser import NoAuthTokenParser, TokenParser + + +class AuthManager(ABC): + """ + The authorization manager offers services to manage authorization tokens from client requests + to extract user details before injecting them in the security context. + """ + + _token_parser: TokenParser + _token_extractor: TokenExtractor + + def __init__(self, token_parser: TokenParser, token_extractor: TokenExtractor): + self._token_parser = token_parser + self._token_extractor = token_extractor + + @property + def token_parser(self) -> TokenParser: + return self._token_parser + + @property + def token_extractor(self) -> TokenExtractor: + return self._token_extractor + + +""" +The possibly empty global instance of `AuthManager`. +""" +_auth_manager: Optional[AuthManager] = None + + +def get_auth_manager() -> AuthManager: + """ + Return the global instance of `AuthManager`. + + Raises: + RuntimeError if the clobal instance is not set. + """ + global _auth_manager + if _auth_manager is None: + raise RuntimeError( + "AuthManager is not initialized. Call 'set_auth_manager' first." + ) + return _auth_manager + + +def set_auth_manager(auth_manager: AuthManager): + """ + Initialize the global instance of `AuthManager`. + """ + + global _auth_manager + _auth_manager = auth_manager + + +class AllowAll(AuthManager): + """ + An AuthManager not extracting nor parsing the authorization token. + """ + + def __init__(self): + super().__init__( + token_extractor=NoAuthTokenExtractor(), token_parser=NoAuthTokenParser() + ) diff --git a/sdk/python/feast/permissions/auth/auth_type.py b/sdk/python/feast/permissions/auth/auth_type.py new file mode 100644 index 0000000000..3fa34f97bd --- /dev/null +++ b/sdk/python/feast/permissions/auth/auth_type.py @@ -0,0 +1,11 @@ +import enum + + +class AuthType(enum.Enum): + """ + Identify the type of authorization. + """ + + NONE = "no_auth" + OIDC = "oidc" + KUBERNETES = "kubernetes" diff --git a/sdk/python/feast/permissions/auth/kubernetes_token_parser.py b/sdk/python/feast/permissions/auth/kubernetes_token_parser.py new file mode 100644 index 0000000000..c34ebf386d --- /dev/null +++ b/sdk/python/feast/permissions/auth/kubernetes_token_parser.py @@ -0,0 +1,114 @@ +import logging +import os + +import jwt +from kubernetes import client, config +from starlette.authentication import ( + AuthenticationError, +) + +from feast.permissions.auth.token_parser import TokenParser +from feast.permissions.user import User + +logger = logging.getLogger(__name__) + + +class KubernetesTokenParser(TokenParser): + """ + A `TokenParser` implementation to use Kubernetes RBAC resources to retrieve the user details. + The assumption is that the request header includes an authorization bearer with the token of the + client `ServiceAccount`. + By inspecting the role bindings, this `TokenParser` extracts the associated `Role`s. + + The client `ServiceAccount` is instead used as the user name, together with the current namespace. + """ + + def __init__(self): + config.load_incluster_config() + self.v1 = client.CoreV1Api() + self.rbac_v1 = client.RbacAuthorizationV1Api() + + async def user_details_from_access_token(self, access_token: str) -> User: + """ + Extract the service account from the token and search the roles associated with it. + + Returns: + User: Current user, with associated roles. The `username` is the `:` separated concatenation of `namespace` and `service account name`. + + Raises: + AuthenticationError if any error happens. + """ + sa_namespace, sa_name = _decode_token(access_token) + current_user = f"{sa_namespace}:{sa_name}" + logger.info( + f"Request received from ServiceAccount: {sa_name} in namespace: {sa_namespace}" + ) + + intra_communication_base64 = os.getenv("INTRA_COMMUNICATION_BASE64") + if sa_name is not None and sa_name == intra_communication_base64: + return User(username=sa_name, roles=[]) + else: + roles = self.get_roles(sa_namespace, sa_name) + logger.info(f"Roles for ServiceAccount {sa_name}: {roles}") + + return User(username=current_user, roles=roles) + + def get_roles(self, namespace: str, service_account_name: str) -> list[str]: + """ + Fetches the Kubernetes `Role`s associated to the given `ServiceAccount` in the given `namespace`. + + The research also includes the `ClusterRole`s, so the running deployment must be granted enough permissions to query + for such instances in all the namespaces. + + Returns: + list[str]: Name of the `Role`s and `ClusterRole`s associated to the service account. No string manipulation is performed on the role name. + """ + role_bindings = self.rbac_v1.list_namespaced_role_binding(namespace) + cluster_role_bindings = self.rbac_v1.list_cluster_role_binding() + + roles: set[str] = set() + + for binding in role_bindings.items: + if binding.subjects is not None: + for subject in binding.subjects: + if ( + subject.kind == "ServiceAccount" + and subject.name == service_account_name + ): + roles.add(binding.role_ref.name) + + for binding in cluster_role_bindings.items: + if binding.subjects is not None: + for subject in binding.subjects: + if ( + subject.kind == "ServiceAccount" + and subject.name == service_account_name + and subject.namespace == namespace + ): + roles.add(binding.role_ref.name) + + return list(roles) + + +def _decode_token(access_token: str) -> tuple[str, str]: + """ + The `sub` portion of the decoded token includes the service account name in the format: `system:serviceaccount:NAMESPACE:SA_NAME` + + Returns: + str: the namespace name. + str: the `ServiceAccount` name. + """ + try: + decoded_token = jwt.decode(access_token, options={"verify_signature": False}) + if "sub" in decoded_token: + subject: str = decoded_token["sub"] + if len(subject.split(":")) != 4: + raise AuthenticationError( + f"Expecting 4 elements separated by : in th subject section, instead of {len(subject.split(':'))}." + ) + _, _, sa_namespace, sa_name = subject.split(":") + return (sa_namespace, sa_name) + else: + raise AuthenticationError("Missing sub section in received token.") + except jwt.DecodeError as e: + raise AuthenticationError(f"Error decoding JWT token: {e}") diff --git a/sdk/python/feast/permissions/auth/oidc_token_parser.py b/sdk/python/feast/permissions/auth/oidc_token_parser.py new file mode 100644 index 0000000000..ffff7e7ad3 --- /dev/null +++ b/sdk/python/feast/permissions/auth/oidc_token_parser.py @@ -0,0 +1,134 @@ +import logging +import os +from typing import Optional +from unittest.mock import Mock + +import jwt +from fastapi import Request +from fastapi.security import OAuth2AuthorizationCodeBearer +from jwt import PyJWKClient +from starlette.authentication import ( + AuthenticationError, +) + +from feast.permissions.auth.token_parser import TokenParser +from feast.permissions.auth_model import OidcAuthConfig +from feast.permissions.oidc_service import OIDCDiscoveryService +from feast.permissions.user import User + +logger = logging.getLogger(__name__) + + +class OidcTokenParser(TokenParser): + """ + A `TokenParser` to use an OIDC server to retrieve the user details. + Server settings are retrieved from the `auth` configurationof the Feature store. + """ + + _auth_config: OidcAuthConfig + + def __init__(self, auth_config: OidcAuthConfig): + self._auth_config = auth_config + self.oidc_discovery_service = OIDCDiscoveryService( + self._auth_config.auth_discovery_url + ) + + async def _validate_token(self, access_token: str): + """ + Validate the token extracted from the header of the user request against the OAuth2 server. + """ + # FastAPI's OAuth2AuthorizationCodeBearer requires a Request type but actually uses only the headers field + # https://github.com/tiangolo/fastapi/blob/eca465f4c96acc5f6a22e92fd2211675ca8a20c8/fastapi/security/oauth2.py#L380 + request = Mock(spec=Request) + request.headers = {"Authorization": f"Bearer {access_token}"} + + oauth_2_scheme = OAuth2AuthorizationCodeBearer( + tokenUrl=self.oidc_discovery_service.get_token_url(), + authorizationUrl=self.oidc_discovery_service.get_authorization_url(), + refreshUrl=self.oidc_discovery_service.get_refresh_url(), + ) + + await oauth_2_scheme(request=request) + + async def user_details_from_access_token(self, access_token: str) -> User: + """ + Validate the access token then decode it to extract the user credential and roles. + + Returns: + User: Current user, with associated roles. + + Raises: + AuthenticationError if any error happens. + """ + + # check if intra server communication + user = self._get_intra_comm_user(access_token) + if user: + return user + + try: + await self._validate_token(access_token) + logger.debug("Token successfully validated.") + except Exception as e: + logger.error(f"Token validation failed: {e}") + raise AuthenticationError(f"Invalid token: {e}") + + optional_custom_headers = {"User-agent": "custom-user-agent"} + jwks_client = PyJWKClient( + self.oidc_discovery_service.get_jwks_url(), headers=optional_custom_headers + ) + + try: + signing_key = jwks_client.get_signing_key_from_jwt(access_token) + data = jwt.decode( + access_token, + signing_key.key, + algorithms=["RS256"], + audience="account", + options={ + "verify_aud": False, + "verify_signature": True, + "verify_exp": True, + }, + leeway=10, # accepts tokens generated up to 10 seconds in the past, in case of clock skew + ) + + if "preferred_username" not in data: + raise AuthenticationError( + "Missing preferred_username field in access token." + ) + current_user = data["preferred_username"] + + if "resource_access" not in data: + logger.warning("Missing resource_access field in access token.") + client_id = self._auth_config.client_id + if client_id not in data["resource_access"]: + logger.warning( + f"Missing resource_access.{client_id} field in access token. Defaulting to empty roles." + ) + roles = [] + else: + roles = data["resource_access"][client_id]["roles"] + + logger.info(f"Extracted user {current_user} and roles {roles}") + return User(username=current_user, roles=roles) + except jwt.exceptions.InvalidTokenError: + logger.exception("Exception while parsing the token:") + raise AuthenticationError("Invalid token.") + + def _get_intra_comm_user(self, access_token: str) -> Optional[User]: + intra_communication_base64 = os.getenv("INTRA_COMMUNICATION_BASE64") + + if intra_communication_base64: + decoded_token = jwt.decode( + access_token, options={"verify_signature": False} + ) + if "preferred_username" in decoded_token: + preferred_username: str = decoded_token["preferred_username"] + if ( + preferred_username is not None + and preferred_username == intra_communication_base64 + ): + return User(username=preferred_username, roles=[]) + + return None diff --git a/sdk/python/feast/permissions/auth/token_extractor.py b/sdk/python/feast/permissions/auth/token_extractor.py new file mode 100644 index 0000000000..37779d7640 --- /dev/null +++ b/sdk/python/feast/permissions/auth/token_extractor.py @@ -0,0 +1,51 @@ +import re +from abc import ABC + +from starlette.authentication import ( + AuthenticationError, +) + + +class TokenExtractor(ABC): + """ + A class to extract the authorization token from a user request. + """ + + def extract_access_token(self, **kwargs) -> str: + """ + Extract the authorization token from a user request. + + The actual implementation has to specify what arguments have to be defined in the kwywork args `kwargs` + + Returns: + The extracted access token. + """ + raise NotImplementedError() + + def _extract_bearer_token(self, auth_header: str) -> str: + """ + Extract the bearer token from the authorization header value. + + Args: + auth_header: The full value of the authorization header. + + Returns: + str: The token value, without the `Bearer` part. + + Raises: + AuthenticationError if the authorization token does not match the `Bearer` scheme. + """ + pattern = r"(?i)Bearer .+" + if not bool(re.match(pattern, auth_header)): + raise AuthenticationError(f"Expected Bearer schema, found {auth_header}") + _, access_token = auth_header.split() + return access_token + + +class NoAuthTokenExtractor(TokenExtractor): + """ + A `TokenExtractor` always returning an empty token + """ + + def extract_access_token(self, **kwargs) -> str: + return "" diff --git a/sdk/python/feast/permissions/auth/token_parser.py b/sdk/python/feast/permissions/auth/token_parser.py new file mode 100644 index 0000000000..f8f2aee44a --- /dev/null +++ b/sdk/python/feast/permissions/auth/token_parser.py @@ -0,0 +1,28 @@ +from abc import ABC, abstractmethod + +from feast.permissions.user import User + + +class TokenParser(ABC): + """ + A class to parse an access token to extract the user credential and roles. + """ + + @abstractmethod + async def user_details_from_access_token(self, access_token: str) -> User: + """ + Parse the access token and return the current user and the list of associated roles. + + Returns: + User: Current user, with associated roles. + """ + raise NotImplementedError() + + +class NoAuthTokenParser(TokenParser): + """ + A `TokenParser` always returning an empty token + """ + + async def user_details_from_access_token(self, access_token: str, **kwargs) -> User: + return User(username="", roles=[]) diff --git a/sdk/python/feast/permissions/auth_model.py b/sdk/python/feast/permissions/auth_model.py new file mode 100644 index 0000000000..a3a3b32a4b --- /dev/null +++ b/sdk/python/feast/permissions/auth_model.py @@ -0,0 +1,26 @@ +from typing import Literal + +from feast.repo_config import FeastConfigBaseModel + + +class AuthConfig(FeastConfigBaseModel): + type: Literal["oidc", "kubernetes", "no_auth"] = "no_auth" + + +class OidcAuthConfig(AuthConfig): + auth_discovery_url: str + client_id: str + + +class OidcClientAuthConfig(OidcAuthConfig): + username: str + password: str + client_secret: str + + +class NoAuthConfig(AuthConfig): + pass + + +class KubernetesAuthConfig(AuthConfig): + pass diff --git a/sdk/python/feast/permissions/client/__init__.py b/sdk/python/feast/permissions/client/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/permissions/client/arrow_flight_auth_interceptor.py b/sdk/python/feast/permissions/client/arrow_flight_auth_interceptor.py new file mode 100644 index 0000000000..c3281bfa51 --- /dev/null +++ b/sdk/python/feast/permissions/client/arrow_flight_auth_interceptor.py @@ -0,0 +1,29 @@ +import pyarrow.flight as fl + +from feast.permissions.auth_model import AuthConfig +from feast.permissions.client.client_auth_token import get_auth_token + + +class FlightBearerTokenInterceptor(fl.ClientMiddleware): + def __init__(self, auth_config: AuthConfig): + super().__init__() + self.auth_config = auth_config + + def call_completed(self, exception): + pass + + def received_headers(self, headers): + pass + + def sending_headers(self): + access_token = get_auth_token(self.auth_config) + return {b"authorization": b"Bearer " + access_token.encode("utf-8")} + + +class FlightAuthInterceptorFactory(fl.ClientMiddlewareFactory): + def __init__(self, auth_config: AuthConfig): + super().__init__() + self.auth_config = auth_config + + def start_call(self, info): + return FlightBearerTokenInterceptor(self.auth_config) diff --git a/sdk/python/feast/permissions/client/auth_client_manager.py b/sdk/python/feast/permissions/client/auth_client_manager.py new file mode 100644 index 0000000000..2151cfb409 --- /dev/null +++ b/sdk/python/feast/permissions/client/auth_client_manager.py @@ -0,0 +1,49 @@ +import os +from abc import ABC, abstractmethod + +from feast.permissions.auth.auth_type import AuthType +from feast.permissions.auth_model import ( + AuthConfig, + KubernetesAuthConfig, + OidcClientAuthConfig, +) + + +class AuthenticationClientManager(ABC): + @abstractmethod + def get_token(self) -> str: + """Retrieves the token based on the authentication type configuration""" + pass + + +class AuthenticationClientManagerFactory(ABC): + def __init__(self, auth_config: AuthConfig): + self.auth_config = auth_config + + def get_auth_client_manager(self) -> AuthenticationClientManager: + from feast.permissions.client.intra_comm_authentication_client_manager import ( + IntraCommAuthClientManager, + ) + from feast.permissions.client.kubernetes_auth_client_manager import ( + KubernetesAuthClientManager, + ) + from feast.permissions.client.oidc_authentication_client_manager import ( + OidcAuthClientManager, + ) + + intra_communication_base64 = os.getenv("INTRA_COMMUNICATION_BASE64") + if intra_communication_base64: + return IntraCommAuthClientManager( + self.auth_config, intra_communication_base64 + ) + + if self.auth_config.type == AuthType.OIDC.value: + assert isinstance(self.auth_config, OidcClientAuthConfig) + return OidcAuthClientManager(self.auth_config) + elif self.auth_config.type == AuthType.KUBERNETES.value: + assert isinstance(self.auth_config, KubernetesAuthConfig) + return KubernetesAuthClientManager(self.auth_config) + else: + raise RuntimeError( + f"No Auth client manager implemented for the auth type:${self.auth_config.type}" + ) diff --git a/sdk/python/feast/permissions/client/client_auth_token.py b/sdk/python/feast/permissions/client/client_auth_token.py new file mode 100644 index 0000000000..68821e3f9c --- /dev/null +++ b/sdk/python/feast/permissions/client/client_auth_token.py @@ -0,0 +1,14 @@ +from feast.permissions.auth_model import ( + AuthConfig, +) +from feast.permissions.client.auth_client_manager import ( + AuthenticationClientManagerFactory, +) + + +def get_auth_token(auth_config: AuthConfig) -> str: + return ( + AuthenticationClientManagerFactory(auth_config) + .get_auth_client_manager() + .get_token() + ) diff --git a/sdk/python/feast/permissions/client/grpc_client_auth_interceptor.py b/sdk/python/feast/permissions/client/grpc_client_auth_interceptor.py new file mode 100644 index 0000000000..121735e351 --- /dev/null +++ b/sdk/python/feast/permissions/client/grpc_client_auth_interceptor.py @@ -0,0 +1,58 @@ +import logging + +import grpc + +from feast.errors import FeastError +from feast.permissions.auth_model import AuthConfig +from feast.permissions.client.client_auth_token import get_auth_token + +logger = logging.getLogger(__name__) + + +class GrpcClientAuthHeaderInterceptor( + grpc.UnaryUnaryClientInterceptor, + grpc.UnaryStreamClientInterceptor, + grpc.StreamUnaryClientInterceptor, + grpc.StreamStreamClientInterceptor, +): + def __init__(self, auth_type: AuthConfig): + self._auth_type = auth_type + + def intercept_unary_unary( + self, continuation, client_call_details, request_iterator + ): + return self._handle_call(continuation, client_call_details, request_iterator) + + def intercept_unary_stream( + self, continuation, client_call_details, request_iterator + ): + return self._handle_call(continuation, client_call_details, request_iterator) + + def intercept_stream_unary( + self, continuation, client_call_details, request_iterator + ): + return self._handle_call(continuation, client_call_details, request_iterator) + + def intercept_stream_stream( + self, continuation, client_call_details, request_iterator + ): + return self._handle_call(continuation, client_call_details, request_iterator) + + def _handle_call(self, continuation, client_call_details, request_iterator): + client_call_details = self._append_auth_header_metadata(client_call_details) + result = continuation(client_call_details, request_iterator) + if result.exception() is not None: + mapped_error = FeastError.from_error_detail(result.exception().details()) + if mapped_error is not None: + raise mapped_error + return result + + def _append_auth_header_metadata(self, client_call_details): + logger.debug( + "Intercepted the grpc api method call to inject Authorization header " + ) + metadata = client_call_details.metadata or [] + access_token = get_auth_token(self._auth_type) + metadata.append((b"authorization", b"Bearer " + access_token.encode("utf-8"))) + client_call_details = client_call_details._replace(metadata=metadata) + return client_call_details diff --git a/sdk/python/feast/permissions/client/http_auth_requests_wrapper.py b/sdk/python/feast/permissions/client/http_auth_requests_wrapper.py new file mode 100644 index 0000000000..ba02fab8d8 --- /dev/null +++ b/sdk/python/feast/permissions/client/http_auth_requests_wrapper.py @@ -0,0 +1,22 @@ +import requests +from requests import Session + +from feast.permissions.auth.auth_type import AuthType +from feast.permissions.auth_model import ( + AuthConfig, +) +from feast.permissions.client.client_auth_token import get_auth_token + + +class AuthenticatedRequestsSession(Session): + def __init__(self, auth_token: str): + super().__init__() + self.headers.update({"Authorization": f"Bearer {auth_token}"}) + + +def get_http_auth_requests_session(auth_config: AuthConfig) -> Session: + if auth_config.type == AuthType.NONE.value: + request_session = requests.session() + else: + request_session = AuthenticatedRequestsSession(get_auth_token(auth_config)) + return request_session diff --git a/sdk/python/feast/permissions/client/intra_comm_authentication_client_manager.py b/sdk/python/feast/permissions/client/intra_comm_authentication_client_manager.py new file mode 100644 index 0000000000..30476316c1 --- /dev/null +++ b/sdk/python/feast/permissions/client/intra_comm_authentication_client_manager.py @@ -0,0 +1,32 @@ +import logging + +import jwt + +from feast.permissions.auth.auth_type import AuthType +from feast.permissions.auth_model import AuthConfig +from feast.permissions.client.auth_client_manager import AuthenticationClientManager + +logger = logging.getLogger(__name__) + + +class IntraCommAuthClientManager(AuthenticationClientManager): + def __init__(self, auth_config: AuthConfig, intra_communication_base64: str): + self.auth_config = auth_config + self.intra_communication_base64 = intra_communication_base64 + logger.debug(f"AuthConfig type set to {self.auth_config.type}") + + def get_token(self): + if self.auth_config.type == AuthType.OIDC.value: + payload = { + "preferred_username": f"{self.intra_communication_base64}", # Subject claim + } + elif self.auth_config.type == AuthType.KUBERNETES.value: + payload = { + "sub": f":::{self.intra_communication_base64}", # Subject claim + } + else: + raise RuntimeError( + f"No Auth client manager implemented for the auth type:{self.auth_config.type}" + ) + + return jwt.encode(payload, "") diff --git a/sdk/python/feast/permissions/client/kubernetes_auth_client_manager.py b/sdk/python/feast/permissions/client/kubernetes_auth_client_manager.py new file mode 100644 index 0000000000..9957ff93a7 --- /dev/null +++ b/sdk/python/feast/permissions/client/kubernetes_auth_client_manager.py @@ -0,0 +1,54 @@ +import logging +import os + +import jwt + +from feast.permissions.auth_model import KubernetesAuthConfig +from feast.permissions.client.auth_client_manager import AuthenticationClientManager + +logger = logging.getLogger(__name__) + + +class KubernetesAuthClientManager(AuthenticationClientManager): + def __init__(self, auth_config: KubernetesAuthConfig): + self.auth_config = auth_config + self.token_file_path = "/var/run/secrets/kubernetes.io/serviceaccount/token" + + def get_token(self): + intra_communication_base64 = os.getenv("INTRA_COMMUNICATION_BASE64") + # If intra server communication call + if intra_communication_base64: + payload = { + "sub": f":::{intra_communication_base64}", # Subject claim + } + + return jwt.encode(payload, "") + + try: + token = self._read_token_from_file() + return token + except Exception as e: + logger.info(f"Error reading token from file: {e}") + logger.info("Attempting to read token from environment variable.") + try: + token = self._read_token_from_env() + return token + except Exception as env_e: + logger.exception( + f"Error reading token from environment variable: {env_e}" + ) + raise env_e + + def _read_token_from_file(self): + try: + with open(self.token_file_path, "r") as file: + token = file.read().strip() + return token + except Exception as e: + raise e + + def _read_token_from_env(self): + token = os.getenv("LOCAL_K8S_TOKEN") + if not token: + raise KeyError("LOCAL_K8S_TOKEN environment variable is not set.") + return token diff --git a/sdk/python/feast/permissions/client/oidc_authentication_client_manager.py b/sdk/python/feast/permissions/client/oidc_authentication_client_manager.py new file mode 100644 index 0000000000..3ba1c1b6a7 --- /dev/null +++ b/sdk/python/feast/permissions/client/oidc_authentication_client_manager.py @@ -0,0 +1,56 @@ +import logging +import os + +import jwt +import requests + +from feast.permissions.auth_model import OidcClientAuthConfig +from feast.permissions.client.auth_client_manager import AuthenticationClientManager +from feast.permissions.oidc_service import OIDCDiscoveryService + +logger = logging.getLogger(__name__) + + +class OidcAuthClientManager(AuthenticationClientManager): + def __init__(self, auth_config: OidcClientAuthConfig): + self.auth_config = auth_config + + def get_token(self): + intra_communication_base64 = os.getenv("INTRA_COMMUNICATION_BASE64") + # If intra server communication call + if intra_communication_base64: + payload = { + "preferred_username": f"{intra_communication_base64}", # Subject claim + } + + return jwt.encode(payload, "") + + # Fetch the token endpoint from the discovery URL + token_endpoint = OIDCDiscoveryService( + self.auth_config.auth_discovery_url + ).get_token_url() + + token_request_body = { + "grant_type": "password", + "client_id": self.auth_config.client_id, + "client_secret": self.auth_config.client_secret, + "username": self.auth_config.username, + "password": self.auth_config.password, + } + headers = {"Content-Type": "application/x-www-form-urlencoded"} + + token_response = requests.post( + token_endpoint, data=token_request_body, headers=headers + ) + if token_response.status_code == 200: + access_token = token_response.json()["access_token"] + if not access_token: + logger.debug( + f"access_token is empty for the client_id=${self.auth_config.client_id}" + ) + raise RuntimeError("access token is empty") + return access_token + else: + raise RuntimeError( + f"""Failed to obtain oidc access token:url=[{token_endpoint}] {token_response.status_code} - {token_response.text}""" + ) diff --git a/sdk/python/feast/permissions/decision.py b/sdk/python/feast/permissions/decision.py new file mode 100644 index 0000000000..963befe831 --- /dev/null +++ b/sdk/python/feast/permissions/decision.py @@ -0,0 +1,114 @@ +import enum +import logging +from typing import Optional + +logger = logging.getLogger(__name__) + + +class DecisionStrategy(enum.Enum): + """ + The strategy to be adopted in case multiple permissions match an execution request. + """ + + UNANIMOUS = "unanimous" # All policies must evaluate to a positive decision for the final decision to be also positive. + AFFIRMATIVE = ( + "affirmative" # At least one policy must evaluate to a positive decision + ) + # The number of positive decisions must be greater than the number of negative decisions. + # If the number of positive and negative decisions is the same, the final decision will be negative. + CONSENSUS = "consensus" + + +class DecisionEvaluator: + """ + A class to implement the decision logic, according to the selected strategy. + + Args: + decision_strategy: The associated `DecisionStrategy`. + num_of_voters: The expected number of votes to complete the decision. + + Examples: + Create the instance and specify the strategy and number of decisions: + `evaluator = DecisionEvaluator(DecisionStrategy.UNANIMOUS, 3) + + For each vote that you receivem, add a decision grant: `evaluator.add_grant(vote, message)` + and check if the decision process ended: `if evaluator.is_decided():` + Once decided, get the result and the failure explanations using: + `grant, explanations = evaluator.grant()` + """ + + def __init__( + self, + num_of_voters: int, + ): + # Only AFFIRMATIVE strategy is managed available + decision_strategy = DecisionStrategy.AFFIRMATIVE + self.num_of_voters = num_of_voters + + self.grant_count = 0 + self.deny_count = 0 + + self.grant_quorum = ( + 1 + if decision_strategy == DecisionStrategy.AFFIRMATIVE + else num_of_voters + if decision_strategy == DecisionStrategy.UNANIMOUS + else num_of_voters // 2 + 1 + ) + self.deny_quorum = ( + num_of_voters + if decision_strategy == DecisionStrategy.AFFIRMATIVE + else 1 + if decision_strategy == DecisionStrategy.UNANIMOUS + else num_of_voters // 2 + (num_of_voters % 2) + ) + self.grant_decision: Optional[bool] = None + self.explanations: list[str] = [] + logger.info( + f"Decision evaluation started with grant_quorum={self.grant_quorum}, deny_quorum={self.deny_quorum}" + ) + + def is_decided(self) -> bool: + """ + Returns: + bool: `True` when the decision process completed (e.g. we added as many votes as specified in the `num_of_voters` creation argument). + """ + return self.grant_decision is not None + + def grant(self) -> tuple[bool, list[str]]: + """ + Returns: + tuple[bool, list[str]]: The tuple of decision computation: a `bool` with the computation decision and a `list[str]` with the + denial explanations (possibly empty). + """ + logger.info( + f"Decided grant is {self.grant_decision}, explanations={self.explanations}" + ) + return bool(self.grant_decision), self.explanations + + def add_grant(self, grant: bool, explanation: str): + """ + Add a single vote to the decision computation, with a possible denial reason. + If the evaluation process already ended, additional votes are discarded. + + Args: + grant: `True` is the decision is accepted, `False` otherwise. + explanation: Denial reason (not considered when `vote` is `True`). + """ + + if self.is_decided(): + logger.warning("Grant decision already decided, discarding vote") + return + if grant: + self.grant_count += 1 + else: + self.deny_count += 1 + self.explanations.append(explanation) + + if self.grant_count >= self.grant_quorum: + self.grant_decision = True + if self.deny_count >= self.deny_quorum: + self.grant_decision = False + logger.debug( + f"After new grant: grants={self.grant_count}, deny_count={self.deny_count}, grant_decision={self.grant_decision}" + ) diff --git a/sdk/python/feast/permissions/decorator.py b/sdk/python/feast/permissions/decorator.py new file mode 100644 index 0000000000..3b9f7a4ae3 --- /dev/null +++ b/sdk/python/feast/permissions/decorator.py @@ -0,0 +1,42 @@ +import logging +from typing import Union + +from feast.permissions.action import AuthzedAction +from feast.permissions.matcher import is_a_feast_object +from feast.permissions.security_manager import assert_permissions + +logger = logging.getLogger(__name__) + + +def require_permissions(actions: Union[list[AuthzedAction], AuthzedAction]): + """ + A decorator to define the actions that are executed from the decorated class method and that must be protected + against unauthorized access. + + The first parameter of the protected method must be `self` + Args: + actions: The list of actions that must be permitted to the current user. + """ + + def require_permissions_decorator(func): + def permission_checker(*args, **kwargs): + logger.debug(f"permission_checker for {args}, {kwargs}") + resource = args[0] + if not is_a_feast_object(resource): + raise NotImplementedError( + f"The first argument is not of a managed type but {type(resource)}" + ) + + return assert_permissions( + resource=resource, + actions=actions, + ) + logger.debug( + f"Current User can invoke {actions} on {resource.name}:{type(resource)} " + ) + result = func(*args, **kwargs) + return result + + return permission_checker + + return require_permissions_decorator diff --git a/sdk/python/feast/permissions/enforcer.py b/sdk/python/feast/permissions/enforcer.py new file mode 100644 index 0000000000..d9855fef8c --- /dev/null +++ b/sdk/python/feast/permissions/enforcer.py @@ -0,0 +1,82 @@ +import logging + +from feast.errors import FeastPermissionError +from feast.feast_object import FeastObject +from feast.permissions.decision import DecisionEvaluator +from feast.permissions.permission import ( + AuthzedAction, + Permission, +) +from feast.permissions.user import User + +logger = logging.getLogger(__name__) + + +def enforce_policy( + permissions: list[Permission], + user: User, + resources: list[FeastObject], + actions: list[AuthzedAction], + filter_only: bool = False, +) -> list[FeastObject]: + """ + Define the logic to apply the configured permissions when a given action is requested on + a protected resource. + + If no permissions are defined, the result is to deny the execution. + + Args: + permissions: The configured set of `Permission`. + user: The current user. + resources: The resources for which we need to enforce authorized permission. + actions: The requested actions to be authorized. + filter_only: If `True`, it removes unauthorized resources from the returned value, otherwise it raises a `FeastPermissionError` the + first unauthorized resource. Defaults to `False`. + + Returns: + list[FeastObject]: A filtered list of the permitted resources. + + Raises: + FeastPermissionError: If the current user is not authorized to eecute the requested actions on the given resources (and `filter_only` is `False`). + """ + if not permissions: + return resources + + _permitted_resources: list[FeastObject] = [] + for resource in resources: + logger.debug( + f"Enforcing permission policies for {type(resource).__name__}:{resource.name} to execute {actions}" + ) + matching_permissions = [ + p + for p in permissions + if p.match_resource(resource) and p.match_actions(actions) + ] + + if matching_permissions: + evaluator = DecisionEvaluator(len(matching_permissions)) + for p in matching_permissions: + permission_grant, permission_explanation = p.policy.validate_user( + user=user + ) + evaluator.add_grant( + permission_grant, + f"Permission {p.name} denied execution of {[a.value.upper() for a in actions]} to {type(resource).__name__}:{resource.name}: {permission_explanation}", + ) + + if evaluator.is_decided(): + grant, explanations = evaluator.grant() + if not grant and not filter_only: + logger.error(f"Permission denied: {','.join(explanations)}") + raise FeastPermissionError(",".join(explanations)) + if grant: + logger.debug( + f"Permission granted for {type(resource).__name__}:{resource.name}" + ) + _permitted_resources.append(resource) + break + else: + message = f"No permissions defined to manage {actions} on {type(resource)}/{resource.name}." + logger.exception(f"**PERMISSION NOT GRANTED**: {message}") + raise FeastPermissionError(message) + return _permitted_resources diff --git a/sdk/python/feast/permissions/matcher.py b/sdk/python/feast/permissions/matcher.py new file mode 100644 index 0000000000..337bfd5c57 --- /dev/null +++ b/sdk/python/feast/permissions/matcher.py @@ -0,0 +1,129 @@ +""" +This module provides utility matching functions. +""" + +import logging +import re +from typing import TYPE_CHECKING, Any, Optional +from unittest.mock import Mock + +from feast.permissions.action import AuthzedAction + +if TYPE_CHECKING: + from feast.feast_object import FeastObject + +logger = logging.getLogger(__name__) + + +def is_a_feast_object(resource: Any): + """ + A matcher to verify that a given object is one of the Feast objects defined in the `FeastObject` type. + + Args: + resource: An object instance to verify. + Returns: + `True` if the given object is one of the types in the FeastObject alias or a subclass of one of them. + """ + from feast.feast_object import ALL_RESOURCE_TYPES + + for t in ALL_RESOURCE_TYPES: + # Use isinstance to pass Mock validation + if isinstance(resource, t): + return True + return False + + +def _get_type(resource: "FeastObject") -> Any: + is_mock = isinstance(resource, Mock) + if not is_mock: + return type(resource) + else: + return getattr(resource, "_spec_class", None) + + +def resource_match_config( + resource: "FeastObject", + expected_types: list["FeastObject"], + name_pattern: Optional[str] = None, + required_tags: Optional[dict[str, str]] = None, +) -> bool: + """ + Match a given Feast object against the configured type, name and tags in a permission configuration. + + Args: + resource: A FeastObject instance to match agains the permission. + expected_types: The list of object types configured in the permission. Type match also includes all the sub-classes. + name_pattern: The optional name pattern filter configured in the permission. + required_tags: The optional dictionary of required tags configured in the permission. + + Returns: + bool: `True` if the resource matches the configured permission filters. + """ + if resource is None: + logger.warning(f"None passed to {resource_match_config.__name__}") + return False + + _type = _get_type(resource) + if not is_a_feast_object(resource): + logger.warning(f"Given resource is not of a managed type but {_type}") + return False + + # mypy check ignored because of https://github.com/python/mypy/issues/11673, or it raises "Argument 2 to "isinstance" has incompatible type "tuple[Featu ..." + if not isinstance(resource, tuple(expected_types)): # type: ignore + logger.info( + f"Resource does not match any of the expected type {expected_types}" + ) + return False + + if name_pattern is not None: + if hasattr(resource, "name"): + if isinstance(resource.name, str): + match = bool(re.fullmatch(name_pattern, resource.name)) + if not match: + logger.info( + f"Resource name {resource.name} does not match pattern {name_pattern}" + ) + return False + else: + logger.warning( + f"Resource {resource} has no `name` attribute of unexpected type {type(resource.name)}" + ) + else: + logger.warning(f"Resource {resource} has no `name` attribute") + + if required_tags: + if hasattr(resource, "required_tags"): + if isinstance(resource.required_tags, dict): + for tag in required_tags.keys(): + required_value = required_tags.get(tag) + actual_value = resource.required_tags.get(tag) + if required_value != actual_value: + logger.info( + f"Unmatched value {actual_value} for required tag {tag}: expected {required_value}" + ) + return False + else: + logger.warning( + f"Resource {resource} has no `required_tags` attribute of unexpected type {type(resource.required_tags)}" + ) + else: + logger.warning(f"Resource {resource} has no `required_tags` attribute") + + return True + + +def actions_match_config( + requested_actions: list[AuthzedAction], + allowed_actions: list[AuthzedAction], +) -> bool: + """ + Match a list of actions against the actions defined in a permission configuration. + + Args: + requested_actions: A list of actions to be executed. + allowed_actions: The list of actions configured in the permission. + + Returns: + bool: `True` if all the given `requested_actions` are defined in the `allowed_actions`. + """ + return all(a in allowed_actions for a in requested_actions) diff --git a/sdk/python/feast/permissions/oidc_service.py b/sdk/python/feast/permissions/oidc_service.py new file mode 100644 index 0000000000..73d0ec8f1b --- /dev/null +++ b/sdk/python/feast/permissions/oidc_service.py @@ -0,0 +1,40 @@ +import requests + + +class OIDCDiscoveryService: + def __init__(self, discovery_url: str): + self.discovery_url = discovery_url + self._discovery_data = None # Initialize it lazily. + + @property + def discovery_data(self): + """Lazily fetches and caches the OIDC discovery data.""" + if self._discovery_data is None: + self._discovery_data = self._fetch_discovery_data() + return self._discovery_data + + def _fetch_discovery_data(self) -> dict: + try: + response = requests.get(self.discovery_url) + response.raise_for_status() + return response.json() + except requests.RequestException as e: + raise RuntimeError( + f"Error fetching OIDC discovery response, discovery url - {self.discovery_url}, exception - {e} " + ) + + def get_authorization_url(self) -> str: + """Returns the authorization endpoint URL.""" + return self.discovery_data.get("authorization_endpoint") + + def get_token_url(self) -> str: + """Returns the token endpoint URL.""" + return self.discovery_data.get("token_endpoint") + + def get_jwks_url(self) -> str: + """Returns the jwks endpoint URL.""" + return self.discovery_data.get("jwks_uri") + + def get_refresh_url(self) -> str: + """Returns the refresh token URL (usually same as token URL).""" + return self.get_token_url() diff --git a/sdk/python/feast/permissions/permission.py b/sdk/python/feast/permissions/permission.py new file mode 100644 index 0000000000..9046abbfa9 --- /dev/null +++ b/sdk/python/feast/permissions/permission.py @@ -0,0 +1,270 @@ +import logging +import re +from abc import ABC +from datetime import datetime +from typing import TYPE_CHECKING, Any, Dict, Optional, Union + +from google.protobuf.json_format import MessageToJson + +from feast.importer import import_class +from feast.permissions.action import ALL_ACTIONS, AuthzedAction +from feast.permissions.matcher import actions_match_config, resource_match_config +from feast.permissions.policy import AllowAll, Policy +from feast.protos.feast.core.Permission_pb2 import Permission as PermissionProto +from feast.protos.feast.core.Permission_pb2 import PermissionMeta as PermissionMetaProto +from feast.protos.feast.core.Permission_pb2 import PermissionSpec as PermissionSpecProto + +if TYPE_CHECKING: + from feast.feast_object import FeastObject + +logger = logging.getLogger(__name__) + +""" +Constant to refer to all the managed types. +""" + + +class Permission(ABC): + """ + The Permission class defines the authorization policy to be validated whenever the identified actions are + requested on the matching resources. + + Attributes: + name: The permission name (can be duplicated, used for logging troubleshooting). + types: The list of protected resource types as defined by the `FeastObject` type. The match includes all the sub-classes of the given types. + Defaults to all managed types (e.g. the `ALL_RESOURCE_TYPES` constant) + name_pattern: A regex to match the resource name. Defaults to None, meaning that no name filtering is applied + be present in a resource tags with the given value. Defaults to None, meaning that no tags filtering is applied. + actions: The actions authorized by this permission. Defaults to `ALL_ACTIONS`. + policy: The policy to be applied to validate a client request. + tags: A dictionary of key-value pairs to store arbitrary metadata. + required_tags: Dictionary of key-value pairs that must match the resource tags. All these tags must + """ + + _name: str + _types: list["FeastObject"] + _name_pattern: Optional[str] + _actions: list[AuthzedAction] + _policy: Policy + _tags: Dict[str, str] + _required_tags: dict[str, str] + created_timestamp: Optional[datetime] + last_updated_timestamp: Optional[datetime] + + def __init__( + self, + name: str, + types: Optional[Union[list["FeastObject"], "FeastObject"]] = None, + name_pattern: Optional[str] = None, + actions: Union[list[AuthzedAction], AuthzedAction] = ALL_ACTIONS, + policy: Policy = AllowAll, + tags: Optional[dict[str, str]] = None, + required_tags: Optional[dict[str, str]] = None, + ): + from feast.feast_object import ALL_RESOURCE_TYPES + + if not types: + types = ALL_RESOURCE_TYPES + for t in types if isinstance(types, list) else [types]: + if t not in ALL_RESOURCE_TYPES: + raise ValueError(f"{t} is not one of the managed types") + if actions is None or not actions: + raise ValueError("The list 'actions' must be non-empty.") + if not policy: + raise ValueError("The list 'policy' must be non-empty.") + self._name = name + self._types = types if isinstance(types, list) else [types] + self._name_pattern = _normalize_name_pattern(name_pattern) + self._actions = actions if isinstance(actions, list) else [actions] + self._policy = policy + self._tags = _normalize_tags(tags) + self._required_tags = _normalize_tags(required_tags) + self.created_timestamp = None + self.last_updated_timestamp = None + + def __eq__(self, other): + if not isinstance(other, Permission): + raise TypeError("Comparisons should only involve Permission class objects.") + + if ( + self.name != other.name + or self.name_pattern != other.name_pattern + or self.tags != other.tags + or self.policy != other.policy + or self.actions != other.actions + or self.required_tags != other.required_tags + ): + return False + + if set(self.types) != set(other.types): + return False + + return True + + def __hash__(self): + return hash(self.name) + + def __str__(self): + return str(MessageToJson(self.to_proto())) + + @property + def name(self) -> str: + return self._name + + @property + def types(self) -> list["FeastObject"]: + return self._types + + @property + def name_pattern(self) -> Optional[str]: + return self._name_pattern + + @property + def actions(self) -> list[AuthzedAction]: + return self._actions + + @property + def policy(self) -> Policy: + return self._policy + + @property + def tags(self) -> Dict[str, str]: + return self._tags + + @property + def required_tags(self) -> Dict[str, str]: + return self._required_tags + + def match_resource(self, resource: "FeastObject") -> bool: + """ + Returns: + `True` when the given resource matches the type, name and tags filters defined in the permission. + """ + return resource_match_config( + resource=resource, + expected_types=self.types, + name_pattern=self.name_pattern, + required_tags=self.required_tags, + ) + + def match_actions(self, requested_actions: list[AuthzedAction]) -> bool: + """ + Returns: + `True` when the given actions are included in the permitted actions. + """ + return actions_match_config( + allowed_actions=self.actions, + requested_actions=requested_actions, + ) + + @staticmethod + def from_proto(permission_proto: PermissionProto) -> Any: + """ + Converts permission config in protobuf spec to a Permission class object. + + Args: + permission_proto: A protobuf representation of a Permission. + + Returns: + A Permission class object. + """ + + types = [ + get_type_class_from_permission_type( + _PERMISSION_TYPES[PermissionSpecProto.Type.Name(t)] + ) + for t in permission_proto.spec.types + ] + actions = [ + AuthzedAction[PermissionSpecProto.AuthzedAction.Name(action)] + for action in permission_proto.spec.actions + ] + + permission = Permission( + permission_proto.spec.name, + types, + permission_proto.spec.name_pattern or None, + actions, + Policy.from_proto(permission_proto.spec.policy), + dict(permission_proto.spec.tags) or None, + dict(permission_proto.spec.required_tags) or None, + ) + + if permission_proto.meta.HasField("created_timestamp"): + permission.created_timestamp = ( + permission_proto.meta.created_timestamp.ToDatetime() + ) + if permission_proto.meta.HasField("last_updated_timestamp"): + permission.last_updated_timestamp = ( + permission_proto.meta.last_updated_timestamp.ToDatetime() + ) + + return permission + + def to_proto(self) -> PermissionProto: + """ + Converts a PermissionProto object to its protobuf representation. + """ + types = [ + PermissionSpecProto.Type.Value( + re.sub(r"([a-z])([A-Z])", r"\1_\2", t.__name__).upper() # type: ignore[union-attr] + ) + for t in self.types + ] + + actions = [ + PermissionSpecProto.AuthzedAction.Value(action.name) + for action in self.actions + ] + + permission_spec = PermissionSpecProto( + name=self.name, + types=types, + name_pattern=self.name_pattern if self.name_pattern is not None else "", + actions=actions, + policy=self.policy.to_proto(), + tags=self.tags, + required_tags=self.required_tags, + ) + + meta = PermissionMetaProto() + if self.created_timestamp: + meta.created_timestamp.FromDatetime(self.created_timestamp) + if self.last_updated_timestamp: + meta.last_updated_timestamp.FromDatetime(self.last_updated_timestamp) + + return PermissionProto(spec=permission_spec, meta=meta) + + +def _normalize_name_pattern(name_pattern: Optional[str]): + if name_pattern is not None: + return name_pattern.strip() + return None + + +def _normalize_tags(tags: Optional[dict[str, str]]): + if tags: + return { + k.strip(): v.strip() if isinstance(v, str) else v for k, v in tags.items() + } + return None + + +def get_type_class_from_permission_type(permission_type: str): + module_name, config_class_name = permission_type.rsplit(".", 1) + return import_class(module_name, config_class_name) + + +_PERMISSION_TYPES = { + "PROJECT": "feast.project.Project", + "FEATURE_VIEW": "feast.feature_view.FeatureView", + "ON_DEMAND_FEATURE_VIEW": "feast.on_demand_feature_view.OnDemandFeatureView", + "BATCH_FEATURE_VIEW": "feast.batch_feature_view.BatchFeatureView", + "STREAM_FEATURE_VIEW": "feast.stream_feature_view.StreamFeatureView", + "ENTITY": "feast.entity.Entity", + "FEATURE_SERVICE": "feast.feature_service.FeatureService", + "DATA_SOURCE": "feast.data_source.DataSource", + "VALIDATION_REFERENCE": "feast.saved_dataset.ValidationReference", + "SAVED_DATASET": "feast.saved_dataset.SavedDataset", + "PERMISSION": "feast.permissions.permission.Permission", +} diff --git a/sdk/python/feast/permissions/policy.py b/sdk/python/feast/permissions/policy.py new file mode 100644 index 0000000000..271448422f --- /dev/null +++ b/sdk/python/feast/permissions/policy.py @@ -0,0 +1,129 @@ +from abc import ABC, abstractmethod +from typing import Any + +from feast.permissions.user import User +from feast.protos.feast.core.Policy_pb2 import Policy as PolicyProto +from feast.protos.feast.core.Policy_pb2 import RoleBasedPolicy as RoleBasedPolicyProto + + +class Policy(ABC): + """ + An abstract class to ensure that the current user matches the configured security policies. + """ + + @abstractmethod + def validate_user(self, user: User) -> tuple[bool, str]: + """ + Validate the given user against the configured policy. + + Args: + user: The current user. + + Returns: + bool: `True` if the user matches the policy criteria, `False` otherwise. + str: A possibly empty explanation of the reason for not matching the configured policy. + """ + raise NotImplementedError + + @staticmethod + def from_proto(policy_proto: PolicyProto) -> Any: + """ + Converts policy config in protobuf spec to a Policy class object. + + Args: + policy_proto: A protobuf representation of a Policy. + + Returns: + A Policy class object. + """ + policy_type = policy_proto.WhichOneof("policy_type") + if policy_type == "role_based_policy": + return RoleBasedPolicy.from_proto(policy_proto) + if policy_type is None: + return None + raise NotImplementedError(f"policy_type is unsupported: {policy_type}") + + @abstractmethod + def to_proto(self) -> PolicyProto: + """ + Converts a PolicyProto object to its protobuf representation. + """ + raise NotImplementedError + + +class RoleBasedPolicy(Policy): + """ + A `Policy` implementation where the user roles must be enforced to grant access to the requested action. + At least one of the configured roles must be granted to the current user in order to allow the execution of the secured operation. + + E.g., if the policy enforces roles `a` and `b`, the user must have at least one of them in order to satisfy the policy. + """ + + def __init__( + self, + roles: list[str], + ): + self.roles = roles + + def __eq__(self, other): + if not isinstance(other, RoleBasedPolicy): + raise TypeError( + "Comparisons should only involve RoleBasedPolicy class objects." + ) + + if sorted(self.roles) != sorted(other.roles): + return False + + return True + + def get_roles(self) -> list[str]: + return self.roles + + def validate_user(self, user: User) -> tuple[bool, str]: + """ + Validate the given `user` against the configured roles. + """ + result = user.has_matching_role(self.roles) + explain = "" if result else f"Requires roles {self.roles}" + return (result, explain) + + @staticmethod + def from_proto(policy_proto: PolicyProto) -> Any: + """ + Converts policy config in protobuf spec to a Policy class object. + + Args: + policy_proto: A protobuf representation of a Policy. + + Returns: + A RoleBasedPolicy class object. + """ + return RoleBasedPolicy(roles=list(policy_proto.role_based_policy.roles)) + + def to_proto(self) -> PolicyProto: + """ + Converts a PolicyProto object to its protobuf representation. + """ + + role_based_policy_proto = RoleBasedPolicyProto(roles=self.roles) + policy_proto = PolicyProto(role_based_policy=role_based_policy_proto) + + return policy_proto + + +def allow_all(self, user: User) -> tuple[bool, str]: + return True, "" + + +def empty_policy(self) -> PolicyProto: + return PolicyProto() + + +""" +A `Policy` instance to allow execution of any action to each user +""" +AllowAll = type( + "AllowAll", + (Policy,), + {Policy.validate_user.__name__: allow_all, Policy.to_proto.__name__: empty_policy}, +)() diff --git a/sdk/python/feast/permissions/security_manager.py b/sdk/python/feast/permissions/security_manager.py new file mode 100644 index 0000000000..cb8cafd5b9 --- /dev/null +++ b/sdk/python/feast/permissions/security_manager.py @@ -0,0 +1,229 @@ +import logging +import os +from contextvars import ContextVar +from typing import Callable, List, Optional, Union + +from feast.errors import FeastObjectNotFoundException +from feast.feast_object import FeastObject +from feast.infra.registry.base_registry import BaseRegistry +from feast.permissions.action import AuthzedAction +from feast.permissions.enforcer import enforce_policy +from feast.permissions.permission import Permission +from feast.permissions.user import User +from feast.project import Project + +logger = logging.getLogger(__name__) + + +class SecurityManager: + """ + The security manager it's the entry point to validate the configuration of the current user against the configured permission policies. + It is accessed and defined using the global functions `get_security_manager` and `set_security_manager` + """ + + def __init__( + self, + project: str, + registry: BaseRegistry, + ): + self._project = project + self._registry = registry + self._current_user: ContextVar[Optional[User]] = ContextVar( + "current_user", default=None + ) + + def set_current_user(self, current_user: User): + """ + Init the user for the current context. + """ + self._current_user.set(current_user) + + @property + def current_user(self) -> Optional[User]: + """ + Returns: + str: the possibly empty instance of the current user. `contextvars` module is used to ensure that each concurrent request has its own + individual user. + """ + return self._current_user.get() + + @property + def permissions(self) -> list[Permission]: + """ + Returns: + list[Permission]: the list of `Permission` configured in the Feast registry. + """ + return self._registry.list_permissions(project=self._project) + + def assert_permissions( + self, + resources: list[FeastObject], + actions: Union[AuthzedAction, List[AuthzedAction]], + filter_only: bool = False, + ) -> list[FeastObject]: + """ + Verify if the current user is authorized to execute the requested actions on the given resources. + + If no permissions are defined, the result is to deny the execution. + + Args: + resources: The resources for which we need to enforce authorized permission. + actions: The requested actions to be authorized. + filter_only: If `True`, it removes unauthorized resources from the returned value, otherwise it raises a `FeastPermissionError` the + first unauthorized resource. Defaults to `False`. + + Returns: + list[FeastObject]: A filtered list of the permitted resources, possibly empty. + + Raises: + FeastPermissionError: If the current user is not authorized to execute all the requested actions on the given resources. + """ + return enforce_policy( + permissions=self.permissions, + user=self.current_user if self.current_user is not None else User("", []), + resources=resources, + actions=actions if isinstance(actions, list) else [actions], + filter_only=filter_only, + ) + + +def assert_permissions_to_update( + resource: FeastObject, + getter: Union[ + Callable[[str, str, bool], FeastObject], Callable[[str, bool], FeastObject] + ], + project: str, + allow_cache: bool = True, +) -> FeastObject: + """ + Verify if the current user is authorized to create or update the given resource. + If the resource already exists, the user must be granted permission to execute DESCRIBE and UPDATE actions. + If the resource does not exist, the user must be granted permission to execute the CREATE action. + + If no permissions are defined, the result is to deny the execution. + + Args: + resource: The resources for which we need to enforce authorized permission. + getter: The getter function used to retrieve the existing resource instance by name. + The signature must be `get_permission(self, name: str, project: str, allow_cache: bool)` + project: The project nane used in the getter function. + allow_cache: Whether to use cached data. Defaults to `True`. + Returns: + FeastObject: The original `resource`, if permitted. + + Raises: + FeastPermissionError: If the current user is not authorized to execute all the requested actions on the given resource or on the existing one. + """ + sm = get_security_manager() + if not is_auth_necessary(sm): + return resource + + actions = [AuthzedAction.DESCRIBE, AuthzedAction.UPDATE] + try: + if isinstance(resource, Project): + existing_resource = getter( + name=resource.name, + allow_cache=allow_cache, + ) # type: ignore[call-arg] + else: + existing_resource = getter( + name=resource.name, + project=project, + allow_cache=allow_cache, + ) # type: ignore[call-arg] + assert_permissions(resource=existing_resource, actions=actions) + except FeastObjectNotFoundException: + actions = [AuthzedAction.CREATE] + resource_to_update = assert_permissions(resource=resource, actions=actions) + return resource_to_update + + +def assert_permissions( + resource: FeastObject, + actions: Union[AuthzedAction, List[AuthzedAction]], +) -> FeastObject: + """ + A utility function to invoke the `assert_permissions` method on the global security manager. + + If no global `SecurityManager` is defined, the execution is permitted. + + Args: + resource: The resource for which we need to enforce authorized permission. + actions: The requested actions to be authorized. + Returns: + FeastObject: The original `resource`, if permitted. + + Raises: + FeastPermissionError: If the current user is not authorized to execute the requested actions on the given resources. + """ + + sm = get_security_manager() + if not is_auth_necessary(sm): + return resource + return sm.assert_permissions( # type: ignore[union-attr] + resources=[resource], actions=actions, filter_only=False + )[0] + + +def permitted_resources( + resources: list[FeastObject], + actions: Union[AuthzedAction, List[AuthzedAction]], +) -> list[FeastObject]: + """ + A utility function to invoke the `assert_permissions` method on the global security manager. + + If no global `SecurityManager` is defined, the execution is permitted. + + Args: + resources: The resources for which we need to enforce authorized permission. + actions: The requested actions to be authorized. + Returns: + list[FeastObject]]: A filtered list of the permitted resources, possibly empty. + """ + + sm = get_security_manager() + if not is_auth_necessary(sm): + return resources + return sm.assert_permissions(resources=resources, actions=actions, filter_only=True) # type: ignore[union-attr] + + +""" +The possibly empty global instance of `SecurityManager`. +""" +_sm: Optional[SecurityManager] = None + + +def get_security_manager() -> Optional[SecurityManager]: + """ + Return the global instance of `SecurityManager`. + """ + global _sm + return _sm + + +def set_security_manager(sm: SecurityManager): + """ + Initialize the global instance of `SecurityManager`. + """ + + global _sm + _sm = sm + + +def no_security_manager(): + """ + Initialize the empty global instance of `SecurityManager`. + """ + + global _sm + _sm = None + + +def is_auth_necessary(sm: Optional[SecurityManager]) -> bool: + intra_communication_base64 = os.getenv("INTRA_COMMUNICATION_BASE64") + + return ( + sm is not None + and sm.current_user is not None + and sm.current_user.username != intra_communication_base64 + ) diff --git a/sdk/python/feast/permissions/server/__init__.py b/sdk/python/feast/permissions/server/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/permissions/server/arrow.py b/sdk/python/feast/permissions/server/arrow.py new file mode 100644 index 0000000000..bf517d94ac --- /dev/null +++ b/sdk/python/feast/permissions/server/arrow.py @@ -0,0 +1,91 @@ +""" +A module with utility functions and classes to support authorizing the Arrow Flight servers. +""" + +import asyncio +import functools +import logging +from typing import cast + +import pyarrow.flight as fl +from pyarrow.flight import ServerCallContext + +from feast.permissions.auth.auth_manager import ( + get_auth_manager, +) +from feast.permissions.security_manager import get_security_manager +from feast.permissions.user import User + +logger = logging.getLogger(__name__) + + +class AuthorizationMiddlewareFactory(fl.ServerMiddlewareFactory): + """ + A middleware factory to intercept the authorization header and propagate it to the authorization middleware. + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def start_call(self, info, headers): + """ + Intercept the authorization header and propagate it to the authorization middleware. + """ + access_token = get_auth_manager().token_extractor.extract_access_token( + headers=headers + ) + return AuthorizationMiddleware(access_token=access_token) + + +class AuthorizationMiddleware(fl.ServerMiddleware): + """ + A server middleware holding the authorization header and offering a method to extract the user credentials. + """ + + def __init__(self, access_token: str, *args, **kwargs): + super().__init__(*args, **kwargs) + self.access_token = access_token + + def call_completed(self, exception): + if exception: + logger.exception( + f"{AuthorizationMiddleware.__name__} encountered an exception: {exception}" + ) + + async def extract_user(self) -> User: + """ + Use the configured `TokenParser` to extract the user credentials. + """ + return await get_auth_manager().token_parser.user_details_from_access_token( + self.access_token + ) + + +def inject_user_details(context: ServerCallContext): + """ + Function to use in Arrow Flight endpoints (e.g. `do_get`, `do_put` and so on) to access the token extracted from the header, + extract the user details out of it and propagate them to the current security manager, if any. + + Args: + context: The endpoint context. + """ + if context.get_middleware("auth") is None: + logger.warning("No `auth` middleware.") + return + + sm = get_security_manager() + if sm is not None: + auth_middleware = cast(AuthorizationMiddleware, context.get_middleware("auth")) + current_user = asyncio.run(auth_middleware.extract_user()) + logger.debug(f"User extracted: {current_user}") + + sm.set_current_user(current_user) + + +def inject_user_details_decorator(func): + @functools.wraps(func) + def wrapper(self, context, *args, **kwargs): + inject_user_details(context) + return func(self, context, *args, **kwargs) + + return wrapper diff --git a/sdk/python/feast/permissions/server/arrow_flight_token_extractor.py b/sdk/python/feast/permissions/server/arrow_flight_token_extractor.py new file mode 100644 index 0000000000..2378fa8b19 --- /dev/null +++ b/sdk/python/feast/permissions/server/arrow_flight_token_extractor.py @@ -0,0 +1,42 @@ +from starlette.authentication import ( + AuthenticationError, +) + +from feast.permissions.auth.token_extractor import TokenExtractor + + +class ArrowFlightTokenExtractor(TokenExtractor): + def extract_access_token(self, **kwargs) -> str: + """ + Token extractor for Arrow Flight requests. + + Requires a keyword argument called `headers` of type `dict`. + + Returns: + The extracted access token. + """ + + if "headers" not in kwargs: + raise ValueError("Missing keywork argument 'headers'") + if not isinstance(kwargs["headers"], dict): + raise ValueError( + f"The keywork argument 'headers' is not of the expected type {dict.__name__}" + ) + + access_token = None + headers = kwargs["headers"] + if isinstance(headers, dict): + for header in headers: + if header.lower() == "authorization": + # With Arrow Flight, the header value is a list and we take the 0-th element + if not isinstance(headers[header], list): + raise AuthenticationError( + f"Authorization header must be of type list, found {type(headers[header])}" + ) + + return self._extract_bearer_token(headers[header][0]) + + if access_token is None: + raise AuthenticationError("Missing authorization header") + + return access_token diff --git a/sdk/python/feast/permissions/server/grpc.py b/sdk/python/feast/permissions/server/grpc.py new file mode 100644 index 0000000000..9feea47a6c --- /dev/null +++ b/sdk/python/feast/permissions/server/grpc.py @@ -0,0 +1,33 @@ +import asyncio +import logging + +import grpc + +from feast.permissions.auth.auth_manager import ( + get_auth_manager, +) +from feast.permissions.security_manager import get_security_manager + +logger = logging.getLogger(__name__) + + +class AuthInterceptor(grpc.ServerInterceptor): + def intercept_service(self, continuation, handler_call_details): + sm = get_security_manager() + + if sm is not None: + auth_manager = get_auth_manager() + access_token = auth_manager.token_extractor.extract_access_token( + metadata=dict(handler_call_details.invocation_metadata) + ) + + logger.debug( + f"Fetching user details for token of length: {len(access_token)}" + ) + current_user = asyncio.run( + auth_manager.token_parser.user_details_from_access_token(access_token) + ) + logger.debug(f"User is: {current_user}") + sm.set_current_user(current_user) + + return continuation(handler_call_details) diff --git a/sdk/python/feast/permissions/server/grpc_token_extractor.py b/sdk/python/feast/permissions/server/grpc_token_extractor.py new file mode 100644 index 0000000000..d75a18ded5 --- /dev/null +++ b/sdk/python/feast/permissions/server/grpc_token_extractor.py @@ -0,0 +1,36 @@ +from starlette.authentication import ( + AuthenticationError, +) + +from feast.permissions.auth.token_extractor import TokenExtractor + + +class GrpcTokenExtractor(TokenExtractor): + def extract_access_token(self, **kwargs) -> str: + """ + Token extractor for grpc server requests. + + Requires a keyword argument called `metadata` of type `dict`. + + Returns: + The extracted access token. + """ + + if "metadata" not in kwargs: + raise ValueError("Missing keywork argument 'metadata'") + if not isinstance(kwargs["metadata"], dict): + raise ValueError( + f"The keywork argument 'metadata' is not of the expected type {dict.__name__} but {type(kwargs['metadata'])}" + ) + + access_token = None + metadata = kwargs["metadata"] + if isinstance(metadata, dict): + for header in metadata: + if header.lower() == "authorization": + return self._extract_bearer_token(metadata[header]) + + if access_token is None: + raise AuthenticationError("Missing authorization header") + + return access_token diff --git a/sdk/python/feast/permissions/server/rest.py b/sdk/python/feast/permissions/server/rest.py new file mode 100644 index 0000000000..ecced3b34a --- /dev/null +++ b/sdk/python/feast/permissions/server/rest.py @@ -0,0 +1,33 @@ +""" +A module with utility functions to support authorizing the REST servers using the FastAPI framework. +""" + +from typing import Any + +from fastapi.requests import Request + +from feast.permissions.auth.auth_manager import ( + get_auth_manager, +) +from feast.permissions.security_manager import get_security_manager + + +async def inject_user_details(request: Request) -> Any: + """ + A function to extract the authorization token from a user request, extract the user details and propagate them to the + current security manager, if any. + """ + sm = get_security_manager() + current_user = None + if sm is not None: + auth_manager = get_auth_manager() + access_token = auth_manager.token_extractor.extract_access_token( + request=request + ) + current_user = await auth_manager.token_parser.user_details_from_access_token( + access_token=access_token + ) + + sm.set_current_user(current_user) + + return current_user diff --git a/sdk/python/feast/permissions/server/rest_token_extractor.py b/sdk/python/feast/permissions/server/rest_token_extractor.py new file mode 100644 index 0000000000..894c18eedb --- /dev/null +++ b/sdk/python/feast/permissions/server/rest_token_extractor.py @@ -0,0 +1,38 @@ +from fastapi.requests import Request +from starlette.authentication import ( + AuthenticationError, +) + +from feast.permissions.auth.token_extractor import TokenExtractor + + +class RestTokenExtractor(TokenExtractor): + def extract_access_token(self, **kwargs) -> str: + """ + Token extractor for REST requests. + + Requires a keyword argument called `request` of type `Request` + + Returns: + The extracted access token. + """ + + if "request" not in kwargs: + raise ValueError("Missing keywork argument 'request'") + if not isinstance(kwargs["request"], Request): + raise ValueError( + f"The keywork argument 'request' is not of the expected type {Request.__name__}" + ) + + access_token = None + request = kwargs["request"] + if isinstance(request, Request): + headers = request.headers + for header in headers: + if header.lower() == "authorization": + return self._extract_bearer_token(headers[header]) + + if access_token is None: + raise AuthenticationError("Missing authorization header") + + return access_token diff --git a/sdk/python/feast/permissions/server/utils.py b/sdk/python/feast/permissions/server/utils.py new file mode 100644 index 0000000000..cd72ae5820 --- /dev/null +++ b/sdk/python/feast/permissions/server/utils.py @@ -0,0 +1,131 @@ +""" +A module with utility functions to support the authorization management in Feast servers. +""" + +import enum +import logging + +import feast +from feast.permissions.auth.auth_manager import ( + AllowAll, + AuthManager, + set_auth_manager, +) +from feast.permissions.auth.oidc_token_parser import OidcTokenParser +from feast.permissions.auth.token_extractor import TokenExtractor +from feast.permissions.auth.token_parser import TokenParser +from feast.permissions.auth_model import ( + AuthConfig, + OidcAuthConfig, +) +from feast.permissions.security_manager import ( + SecurityManager, + no_security_manager, + set_security_manager, +) +from feast.permissions.server.arrow_flight_token_extractor import ( + ArrowFlightTokenExtractor, +) +from feast.permissions.server.grpc_token_extractor import GrpcTokenExtractor +from feast.permissions.server.rest_token_extractor import RestTokenExtractor + +logger = logging.getLogger(__name__) + + +class ServerType(enum.Enum): + """ + Identify the server type. + """ + + REST = "rest" + ARROW = "arrow" + GRPC = "grpc" # TODO RBAC: to be completed + + +class AuthManagerType(enum.Enum): + """ + Identify the type of authorization manager. + """ + + NONE = "no_auth" + OIDC = "oidc" + KUBERNETES = "kubernetes" + + +def str_to_auth_manager_type(value: str) -> AuthManagerType: + for t in AuthManagerType.__members__.values(): + if t.value.lower() == value.lower(): + return t + + logger.warning( + f"Requested unmanaged AuthManagerType of value {value}. Using NONE instead." + ) + return AuthManagerType.NONE + + +def init_security_manager(auth_type: AuthManagerType, fs: "feast.FeatureStore"): + """ + Initialize the global security manager. + Must be invoked at Feast server initialization time to create the `SecurityManager` instance. + + Args: + auth_type: The authorization manager type. + registry: The feature store registry. + """ + if auth_type == AuthManagerType.NONE: + no_security_manager() + else: + # TODO permissions from registry + set_security_manager( + SecurityManager( + project=fs.project, + registry=fs.registry, + ) + ) + + +def init_auth_manager( + server_type: ServerType, auth_type: AuthManagerType, auth_config: AuthConfig +): + """ + Initialize the global authorization manager. + Must be invoked at Feast server initialization time to create the `AuthManager` instance. + + Args: + server_type: The server type. + auth_type: The authorization manager type. + + Raises: + ValueError: If any input argument has an unmanaged value. + """ + if auth_type == AuthManagerType.NONE: + set_auth_manager(AllowAll()) + else: + token_extractor: TokenExtractor + token_parser: TokenParser + + if server_type == ServerType.REST: + token_extractor = RestTokenExtractor() + elif server_type == ServerType.ARROW: + token_extractor = ArrowFlightTokenExtractor() + elif server_type == ServerType.GRPC: + token_extractor = GrpcTokenExtractor() + else: + raise ValueError(f"Unmanaged server type {server_type}") + + if auth_type == AuthManagerType.KUBERNETES: + from feast.permissions.auth.kubernetes_token_parser import ( + KubernetesTokenParser, + ) + + token_parser = KubernetesTokenParser() + elif auth_type == AuthManagerType.OIDC: + assert isinstance(auth_config, OidcAuthConfig) + token_parser = OidcTokenParser(auth_config=auth_config) + else: + raise ValueError(f"Unmanaged authorization manager type {auth_type}") + + auth_manager = AuthManager( + token_extractor=token_extractor, token_parser=token_parser + ) + set_auth_manager(auth_manager) diff --git a/sdk/python/feast/permissions/user.py b/sdk/python/feast/permissions/user.py new file mode 100644 index 0000000000..783b683de6 --- /dev/null +++ b/sdk/python/feast/permissions/user.py @@ -0,0 +1,38 @@ +import logging + +logger = logging.getLogger(__name__) + + +class User: + _username: str + _roles: list[str] + + def __init__(self, username: str, roles: list[str]): + self._username = username + self._roles = roles + + @property + def username(self): + return self._username + + @property + def roles(self): + return self._roles + + def has_matching_role(self, requested_roles: list[str]) -> bool: + """ + Verify that the user has at least one of the requested roles. + + Args: + requested_roles: The list of requested roles. + + Returns: + bool: `True` only if the user has any registered role and all the given roles are registered. + """ + logger.debug( + f"Check {self.username} has all {requested_roles}: currently {self.roles}" + ) + return any(role in self.roles for role in requested_roles) + + def __str__(self): + return f"{self.username} ({self.roles})" diff --git a/sdk/python/feast/project.py b/sdk/python/feast/project.py new file mode 100644 index 0000000000..d9ec45dcc9 --- /dev/null +++ b/sdk/python/feast/project.py @@ -0,0 +1,175 @@ +# Copyright 2019 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from datetime import datetime, timezone +from typing import Dict, Optional + +from google.protobuf.json_format import MessageToJson +from typeguard import typechecked + +from feast.protos.feast.core.Project_pb2 import Project as ProjectProto +from feast.protos.feast.core.Project_pb2 import ProjectMeta as ProjectMetaProto +from feast.protos.feast.core.Project_pb2 import ProjectSpec as ProjectSpecProto +from feast.utils import _utc_now + + +@typechecked +class Project: + """ + Project is a collection of Feast Objects. Projects provide complete isolation of + feature stores at the infrastructure level. + + Attributes: + name: The unique name of the project. + description: A human-readable description. + tags: A dictionary of key-value pairs to store arbitrary metadata. + owner: The owner of the project, typically the email of the primary maintainer. + created_timestamp: The time when the entity was created. + last_updated_timestamp: The time when the entity was last updated. + """ + + name: str + description: str + tags: Dict[str, str] + owner: str + created_timestamp: datetime + last_updated_timestamp: datetime + + def __init__( + self, + *, + name: str, + description: str = "", + tags: Optional[Dict[str, str]] = None, + owner: str = "", + created_timestamp: Optional[datetime] = None, + last_updated_timestamp: Optional[datetime] = None, + ): + """ + Creates Project object. + + Args: + name: The unique name of the project. + description (optional): A human-readable description. + tags (optional): A dictionary of key-value pairs to store arbitrary metadata. + owner (optional): The owner of the project, typically the email of the primary maintainer. + created_timestamp (optional): The time when the project was created. Defaults to + last_updated_timestamp (optional): The time when the project was last updated. + + Raises: + ValueError: Parameters are specified incorrectly. + """ + self.name = name + self.description = description + self.tags = tags if tags is not None else {} + self.owner = owner + updated_time = _utc_now() + self.created_timestamp = created_timestamp or updated_time + self.last_updated_timestamp = last_updated_timestamp or updated_time + + def __hash__(self) -> int: + return hash((self.name)) + + def __eq__(self, other): + if not isinstance(other, Project): + raise TypeError("Comparisons should only involve Project class objects.") + + if ( + self.name != other.name + or self.description != other.description + or self.tags != other.tags + or self.owner != other.owner + or self.created_timestamp != other.created_timestamp + or self.last_updated_timestamp != other.last_updated_timestamp + ): + return False + + return True + + def __str__(self): + return str(MessageToJson(self.to_proto())) + + def __lt__(self, other): + return self.name < other.name + + def is_valid(self): + """ + Validates the state of this project locally. + + Raises: + ValueError: The project does not have a name or does not have a type. + """ + if not self.name: + raise ValueError("The project does not have a name.") + + from feast.repo_operations import is_valid_name + + if not is_valid_name(self.name): + raise ValueError( + f"Project name, {self.name}, should only have " + f"alphanumerical values and underscores but not start with an underscore." + ) + + @classmethod + def from_proto(cls, project_proto: ProjectProto): + """ + Creates a project from a protobuf representation of an project. + + Args: + entity_proto: A protobuf representation of an project. + + Returns: + An Entity object based on the entity protobuf. + """ + project = cls( + name=project_proto.spec.name, + description=project_proto.spec.description, + tags=dict(project_proto.spec.tags), + owner=project_proto.spec.owner, + ) + if project_proto.meta.HasField("created_timestamp"): + project.created_timestamp = ( + project_proto.meta.created_timestamp.ToDatetime().replace( + tzinfo=timezone.utc + ) + ) + if project_proto.meta.HasField("last_updated_timestamp"): + project.last_updated_timestamp = ( + project_proto.meta.last_updated_timestamp.ToDatetime().replace( + tzinfo=timezone.utc + ) + ) + + return project + + def to_proto(self) -> ProjectProto: + """ + Converts an project object to its protobuf representation. + + Returns: + An ProjectProto protobuf. + """ + meta = ProjectMetaProto() + if self.created_timestamp: + meta.created_timestamp.FromDatetime(self.created_timestamp) + if self.last_updated_timestamp: + meta.last_updated_timestamp.FromDatetime(self.last_updated_timestamp) + + spec = ProjectSpecProto( + name=self.name, + description=self.description, + tags=self.tags, + owner=self.owner, + ) + + return ProjectProto(spec=spec, meta=meta) diff --git a/sdk/python/feast/protos/__init__.py b/sdk/python/feast/protos/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/protos/feast/__init__.py b/sdk/python/feast/protos/feast/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/protos/feast/core/Aggregation_pb2.py b/sdk/python/feast/protos/feast/core/Aggregation_pb2.py new file mode 100644 index 0000000000..922f8f40aa --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Aggregation_pb2.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/Aggregation.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1c\x66\x65\x61st/core/Aggregation.proto\x12\nfeast.core\x1a\x1egoogle/protobuf/duration.proto\"\x92\x01\n\x0b\x41ggregation\x12\x0e\n\x06\x63olumn\x18\x01 \x01(\t\x12\x10\n\x08\x66unction\x18\x02 \x01(\t\x12.\n\x0btime_window\x18\x03 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x31\n\x0eslide_interval\x18\x04 \x01(\x0b\x32\x19.google.protobuf.DurationBU\n\x10\x66\x65\x61st.proto.coreB\x10\x41ggregationProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.Aggregation_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\020AggregationProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_AGGREGATION']._serialized_start=77 + _globals['_AGGREGATION']._serialized_end=223 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/Aggregation_pb2.pyi b/sdk/python/feast/protos/feast/core/Aggregation_pb2.pyi new file mode 100644 index 0000000000..ceb8b1f813 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Aggregation_pb2.pyi @@ -0,0 +1,42 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" +import builtins +import google.protobuf.descriptor +import google.protobuf.duration_pb2 +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class Aggregation(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + COLUMN_FIELD_NUMBER: builtins.int + FUNCTION_FIELD_NUMBER: builtins.int + TIME_WINDOW_FIELD_NUMBER: builtins.int + SLIDE_INTERVAL_FIELD_NUMBER: builtins.int + column: builtins.str + function: builtins.str + @property + def time_window(self) -> google.protobuf.duration_pb2.Duration: ... + @property + def slide_interval(self) -> google.protobuf.duration_pb2.Duration: ... + def __init__( + self, + *, + column: builtins.str = ..., + function: builtins.str = ..., + time_window: google.protobuf.duration_pb2.Duration | None = ..., + slide_interval: google.protobuf.duration_pb2.Duration | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["slide_interval", b"slide_interval", "time_window", b"time_window"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["column", b"column", "function", b"function", "slide_interval", b"slide_interval", "time_window", b"time_window"]) -> None: ... + +global___Aggregation = Aggregation diff --git a/sdk/python/feast/protos/feast/core/Aggregation_pb2_grpc.py b/sdk/python/feast/protos/feast/core/Aggregation_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Aggregation_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/DataFormat_pb2.py b/sdk/python/feast/protos/feast/core/DataFormat_pb2.py new file mode 100644 index 0000000000..a3883dcec3 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/DataFormat_pb2.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/DataFormat.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1b\x66\x65\x61st/core/DataFormat.proto\x12\nfeast.core\"\xb2\x01\n\nFileFormat\x12>\n\x0eparquet_format\x18\x01 \x01(\x0b\x32$.feast.core.FileFormat.ParquetFormatH\x00\x12:\n\x0c\x64\x65lta_format\x18\x02 \x01(\x0b\x32\".feast.core.FileFormat.DeltaFormatH\x00\x1a\x0f\n\rParquetFormat\x1a\r\n\x0b\x44\x65ltaFormatB\x08\n\x06\x66ormat\"\xb7\x02\n\x0cStreamFormat\x12:\n\x0b\x61vro_format\x18\x01 \x01(\x0b\x32#.feast.core.StreamFormat.AvroFormatH\x00\x12<\n\x0cproto_format\x18\x02 \x01(\x0b\x32$.feast.core.StreamFormat.ProtoFormatH\x00\x12:\n\x0bjson_format\x18\x03 \x01(\x0b\x32#.feast.core.StreamFormat.JsonFormatH\x00\x1a!\n\x0bProtoFormat\x12\x12\n\nclass_path\x18\x01 \x01(\t\x1a!\n\nAvroFormat\x12\x13\n\x0bschema_json\x18\x01 \x01(\t\x1a!\n\nJsonFormat\x12\x13\n\x0bschema_json\x18\x01 \x01(\tB\x08\n\x06\x66ormatBT\n\x10\x66\x65\x61st.proto.coreB\x0f\x44\x61taFormatProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.DataFormat_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\017DataFormatProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_FILEFORMAT']._serialized_start=44 + _globals['_FILEFORMAT']._serialized_end=222 + _globals['_FILEFORMAT_PARQUETFORMAT']._serialized_start=182 + _globals['_FILEFORMAT_PARQUETFORMAT']._serialized_end=197 + _globals['_FILEFORMAT_DELTAFORMAT']._serialized_start=199 + _globals['_FILEFORMAT_DELTAFORMAT']._serialized_end=212 + _globals['_STREAMFORMAT']._serialized_start=225 + _globals['_STREAMFORMAT']._serialized_end=536 + _globals['_STREAMFORMAT_PROTOFORMAT']._serialized_start=423 + _globals['_STREAMFORMAT_PROTOFORMAT']._serialized_end=456 + _globals['_STREAMFORMAT_AVROFORMAT']._serialized_start=458 + _globals['_STREAMFORMAT_AVROFORMAT']._serialized_end=491 + _globals['_STREAMFORMAT_JSONFORMAT']._serialized_start=493 + _globals['_STREAMFORMAT_JSONFORMAT']._serialized_end=526 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/DataFormat_pb2.pyi b/sdk/python/feast/protos/feast/core/DataFormat_pb2.pyi new file mode 100644 index 0000000000..1f904e9886 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/DataFormat_pb2.pyi @@ -0,0 +1,143 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2020 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import google.protobuf.descriptor +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class FileFormat(google.protobuf.message.Message): + """Defines the file format encoding the features/entity data in files""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class ParquetFormat(google.protobuf.message.Message): + """Defines options for the Parquet data format""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + + class DeltaFormat(google.protobuf.message.Message): + """Defines options for delta data format""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + + PARQUET_FORMAT_FIELD_NUMBER: builtins.int + DELTA_FORMAT_FIELD_NUMBER: builtins.int + @property + def parquet_format(self) -> global___FileFormat.ParquetFormat: ... + @property + def delta_format(self) -> global___FileFormat.DeltaFormat: ... + def __init__( + self, + *, + parquet_format: global___FileFormat.ParquetFormat | None = ..., + delta_format: global___FileFormat.DeltaFormat | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["delta_format", b"delta_format", "format", b"format", "parquet_format", b"parquet_format"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["delta_format", b"delta_format", "format", b"format", "parquet_format", b"parquet_format"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["format", b"format"]) -> typing_extensions.Literal["parquet_format", "delta_format"] | None: ... + +global___FileFormat = FileFormat + +class StreamFormat(google.protobuf.message.Message): + """Defines the data format encoding features/entity data in data streams""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class ProtoFormat(google.protobuf.message.Message): + """Defines options for the protobuf data format""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CLASS_PATH_FIELD_NUMBER: builtins.int + class_path: builtins.str + """Classpath to the generated Java Protobuf class that can be used to decode + Feature data from the obtained stream message + """ + def __init__( + self, + *, + class_path: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["class_path", b"class_path"]) -> None: ... + + class AvroFormat(google.protobuf.message.Message): + """Defines options for the avro data format""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SCHEMA_JSON_FIELD_NUMBER: builtins.int + schema_json: builtins.str + """Optional if used in a File DataSource as schema is embedded in avro file. + Specifies the schema of the Avro message as JSON string. + """ + def __init__( + self, + *, + schema_json: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["schema_json", b"schema_json"]) -> None: ... + + class JsonFormat(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SCHEMA_JSON_FIELD_NUMBER: builtins.int + schema_json: builtins.str + def __init__( + self, + *, + schema_json: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["schema_json", b"schema_json"]) -> None: ... + + AVRO_FORMAT_FIELD_NUMBER: builtins.int + PROTO_FORMAT_FIELD_NUMBER: builtins.int + JSON_FORMAT_FIELD_NUMBER: builtins.int + @property + def avro_format(self) -> global___StreamFormat.AvroFormat: ... + @property + def proto_format(self) -> global___StreamFormat.ProtoFormat: ... + @property + def json_format(self) -> global___StreamFormat.JsonFormat: ... + def __init__( + self, + *, + avro_format: global___StreamFormat.AvroFormat | None = ..., + proto_format: global___StreamFormat.ProtoFormat | None = ..., + json_format: global___StreamFormat.JsonFormat | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["avro_format", b"avro_format", "format", b"format", "json_format", b"json_format", "proto_format", b"proto_format"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["avro_format", b"avro_format", "format", b"format", "json_format", b"json_format", "proto_format", b"proto_format"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["format", b"format"]) -> typing_extensions.Literal["avro_format", "proto_format", "json_format"] | None: ... + +global___StreamFormat = StreamFormat diff --git a/sdk/python/feast/protos/feast/core/DataFormat_pb2_grpc.py b/sdk/python/feast/protos/feast/core/DataFormat_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/DataFormat_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/DataSource_pb2.py b/sdk/python/feast/protos/feast/core/DataSource_pb2.py new file mode 100644 index 0000000000..b58c33a383 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/DataSource_pb2.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/DataSource.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from feast.protos.feast.core import DataFormat_pb2 as feast_dot_core_dot_DataFormat__pb2 +from feast.protos.feast.types import Value_pb2 as feast_dot_types_dot_Value__pb2 +from feast.protos.feast.core import Feature_pb2 as feast_dot_core_dot_Feature__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1b\x66\x65\x61st/core/DataSource.proto\x12\nfeast.core\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1b\x66\x65\x61st/core/DataFormat.proto\x1a\x17\x66\x65\x61st/types/Value.proto\x1a\x18\x66\x65\x61st/core/Feature.proto\"\xc0\x16\n\nDataSource\x12\x0c\n\x04name\x18\x14 \x01(\t\x12\x0f\n\x07project\x18\x15 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x17 \x01(\t\x12.\n\x04tags\x18\x18 \x03(\x0b\x32 .feast.core.DataSource.TagsEntry\x12\r\n\x05owner\x18\x19 \x01(\t\x12/\n\x04type\x18\x01 \x01(\x0e\x32!.feast.core.DataSource.SourceType\x12?\n\rfield_mapping\x18\x02 \x03(\x0b\x32(.feast.core.DataSource.FieldMappingEntry\x12\x17\n\x0ftimestamp_field\x18\x03 \x01(\t\x12\x1d\n\x15\x64\x61te_partition_column\x18\x04 \x01(\t\x12 \n\x18\x63reated_timestamp_column\x18\x05 \x01(\t\x12\x1e\n\x16\x64\x61ta_source_class_type\x18\x11 \x01(\t\x12,\n\x0c\x62\x61tch_source\x18\x1a \x01(\x0b\x32\x16.feast.core.DataSource\x12/\n\x04meta\x18\x32 \x01(\x0b\x32!.feast.core.DataSource.SourceMeta\x12:\n\x0c\x66ile_options\x18\x0b \x01(\x0b\x32\".feast.core.DataSource.FileOptionsH\x00\x12\x42\n\x10\x62igquery_options\x18\x0c \x01(\x0b\x32&.feast.core.DataSource.BigQueryOptionsH\x00\x12<\n\rkafka_options\x18\r \x01(\x0b\x32#.feast.core.DataSource.KafkaOptionsH\x00\x12@\n\x0fkinesis_options\x18\x0e \x01(\x0b\x32%.feast.core.DataSource.KinesisOptionsH\x00\x12\x42\n\x10redshift_options\x18\x0f \x01(\x0b\x32&.feast.core.DataSource.RedshiftOptionsH\x00\x12I\n\x14request_data_options\x18\x12 \x01(\x0b\x32).feast.core.DataSource.RequestDataOptionsH\x00\x12\x44\n\x0e\x63ustom_options\x18\x10 \x01(\x0b\x32*.feast.core.DataSource.CustomSourceOptionsH\x00\x12\x44\n\x11snowflake_options\x18\x13 \x01(\x0b\x32\'.feast.core.DataSource.SnowflakeOptionsH\x00\x12:\n\x0cpush_options\x18\x16 \x01(\x0b\x32\".feast.core.DataSource.PushOptionsH\x00\x12<\n\rspark_options\x18\x1b \x01(\x0b\x32#.feast.core.DataSource.SparkOptionsH\x00\x12<\n\rtrino_options\x18\x1e \x01(\x0b\x32#.feast.core.DataSource.TrinoOptionsH\x00\x12>\n\x0e\x61thena_options\x18# \x01(\x0b\x32$.feast.core.DataSource.AthenaOptionsH\x00\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x33\n\x11\x46ieldMappingEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x82\x01\n\nSourceMeta\x12:\n\x16\x65\x61rliestEventTimestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x38\n\x14latestEventTimestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x65\n\x0b\x46ileOptions\x12+\n\x0b\x66ile_format\x18\x01 \x01(\x0b\x32\x16.feast.core.FileFormat\x12\x0b\n\x03uri\x18\x02 \x01(\t\x12\x1c\n\x14s3_endpoint_override\x18\x03 \x01(\t\x1a/\n\x0f\x42igQueryOptions\x12\r\n\x05table\x18\x01 \x01(\t\x12\r\n\x05query\x18\x02 \x01(\t\x1a,\n\x0cTrinoOptions\x12\r\n\x05table\x18\x01 \x01(\t\x12\r\n\x05query\x18\x02 \x01(\t\x1a\xae\x01\n\x0cKafkaOptions\x12\x1f\n\x17kafka_bootstrap_servers\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x30\n\x0emessage_format\x18\x03 \x01(\x0b\x32\x18.feast.core.StreamFormat\x12<\n\x19watermark_delay_threshold\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\x1a\x66\n\x0eKinesisOptions\x12\x0e\n\x06region\x18\x01 \x01(\t\x12\x13\n\x0bstream_name\x18\x02 \x01(\t\x12/\n\rrecord_format\x18\x03 \x01(\x0b\x32\x18.feast.core.StreamFormat\x1aQ\n\x0fRedshiftOptions\x12\r\n\x05table\x18\x01 \x01(\t\x12\r\n\x05query\x18\x02 \x01(\t\x12\x0e\n\x06schema\x18\x03 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x04 \x01(\t\x1aT\n\rAthenaOptions\x12\r\n\x05table\x18\x01 \x01(\t\x12\r\n\x05query\x18\x02 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x03 \x01(\t\x12\x13\n\x0b\x64\x61ta_source\x18\x04 \x01(\t\x1aX\n\x10SnowflakeOptions\x12\r\n\x05table\x18\x01 \x01(\t\x12\r\n\x05query\x18\x02 \x01(\t\x12\x0e\n\x06schema\x18\x03 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x04 \x01(\tJ\x04\x08\x05\x10\x06\x1aO\n\x0cSparkOptions\x12\r\n\x05table\x18\x01 \x01(\t\x12\r\n\x05query\x18\x02 \x01(\t\x12\x0c\n\x04path\x18\x03 \x01(\t\x12\x13\n\x0b\x66ile_format\x18\x04 \x01(\t\x1a,\n\x13\x43ustomSourceOptions\x12\x15\n\rconfiguration\x18\x01 \x01(\x0c\x1a\xf7\x01\n\x12RequestDataOptions\x12Z\n\x11\x64\x65precated_schema\x18\x02 \x03(\x0b\x32?.feast.core.DataSource.RequestDataOptions.DeprecatedSchemaEntry\x12)\n\x06schema\x18\x03 \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x1aT\n\x15\x44\x65precatedSchemaEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12*\n\x05value\x18\x02 \x01(\x0e\x32\x1b.feast.types.ValueType.Enum:\x02\x38\x01J\x04\x08\x01\x10\x02\x1a\x13\n\x0bPushOptionsJ\x04\x08\x01\x10\x02\"\xf8\x01\n\nSourceType\x12\x0b\n\x07INVALID\x10\x00\x12\x0e\n\nBATCH_FILE\x10\x01\x12\x13\n\x0f\x42\x41TCH_SNOWFLAKE\x10\x08\x12\x12\n\x0e\x42\x41TCH_BIGQUERY\x10\x02\x12\x12\n\x0e\x42\x41TCH_REDSHIFT\x10\x05\x12\x10\n\x0cSTREAM_KAFKA\x10\x03\x12\x12\n\x0eSTREAM_KINESIS\x10\x04\x12\x11\n\rCUSTOM_SOURCE\x10\x06\x12\x12\n\x0eREQUEST_SOURCE\x10\x07\x12\x0f\n\x0bPUSH_SOURCE\x10\t\x12\x0f\n\x0b\x42\x41TCH_TRINO\x10\n\x12\x0f\n\x0b\x42\x41TCH_SPARK\x10\x0b\x12\x10\n\x0c\x42\x41TCH_ATHENA\x10\x0c\x42\t\n\x07optionsJ\x04\x08\x06\x10\x0b\x42T\n\x10\x66\x65\x61st.proto.coreB\x0f\x44\x61taSourceProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.DataSource_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\017DataSourceProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_DATASOURCE_TAGSENTRY']._options = None + _globals['_DATASOURCE_TAGSENTRY']._serialized_options = b'8\001' + _globals['_DATASOURCE_FIELDMAPPINGENTRY']._options = None + _globals['_DATASOURCE_FIELDMAPPINGENTRY']._serialized_options = b'8\001' + _globals['_DATASOURCE_REQUESTDATAOPTIONS_DEPRECATEDSCHEMAENTRY']._options = None + _globals['_DATASOURCE_REQUESTDATAOPTIONS_DEPRECATEDSCHEMAENTRY']._serialized_options = b'8\001' + _globals['_DATASOURCE']._serialized_start=189 + _globals['_DATASOURCE']._serialized_end=3069 + _globals['_DATASOURCE_TAGSENTRY']._serialized_start=1436 + _globals['_DATASOURCE_TAGSENTRY']._serialized_end=1479 + _globals['_DATASOURCE_FIELDMAPPINGENTRY']._serialized_start=1481 + _globals['_DATASOURCE_FIELDMAPPINGENTRY']._serialized_end=1532 + _globals['_DATASOURCE_SOURCEMETA']._serialized_start=1535 + _globals['_DATASOURCE_SOURCEMETA']._serialized_end=1665 + _globals['_DATASOURCE_FILEOPTIONS']._serialized_start=1667 + _globals['_DATASOURCE_FILEOPTIONS']._serialized_end=1768 + _globals['_DATASOURCE_BIGQUERYOPTIONS']._serialized_start=1770 + _globals['_DATASOURCE_BIGQUERYOPTIONS']._serialized_end=1817 + _globals['_DATASOURCE_TRINOOPTIONS']._serialized_start=1819 + _globals['_DATASOURCE_TRINOOPTIONS']._serialized_end=1863 + _globals['_DATASOURCE_KAFKAOPTIONS']._serialized_start=1866 + _globals['_DATASOURCE_KAFKAOPTIONS']._serialized_end=2040 + _globals['_DATASOURCE_KINESISOPTIONS']._serialized_start=2042 + _globals['_DATASOURCE_KINESISOPTIONS']._serialized_end=2144 + _globals['_DATASOURCE_REDSHIFTOPTIONS']._serialized_start=2146 + _globals['_DATASOURCE_REDSHIFTOPTIONS']._serialized_end=2227 + _globals['_DATASOURCE_ATHENAOPTIONS']._serialized_start=2229 + _globals['_DATASOURCE_ATHENAOPTIONS']._serialized_end=2313 + _globals['_DATASOURCE_SNOWFLAKEOPTIONS']._serialized_start=2315 + _globals['_DATASOURCE_SNOWFLAKEOPTIONS']._serialized_end=2403 + _globals['_DATASOURCE_SPARKOPTIONS']._serialized_start=2405 + _globals['_DATASOURCE_SPARKOPTIONS']._serialized_end=2484 + _globals['_DATASOURCE_CUSTOMSOURCEOPTIONS']._serialized_start=2486 + _globals['_DATASOURCE_CUSTOMSOURCEOPTIONS']._serialized_end=2530 + _globals['_DATASOURCE_REQUESTDATAOPTIONS']._serialized_start=2533 + _globals['_DATASOURCE_REQUESTDATAOPTIONS']._serialized_end=2780 + _globals['_DATASOURCE_REQUESTDATAOPTIONS_DEPRECATEDSCHEMAENTRY']._serialized_start=2690 + _globals['_DATASOURCE_REQUESTDATAOPTIONS_DEPRECATEDSCHEMAENTRY']._serialized_end=2774 + _globals['_DATASOURCE_PUSHOPTIONS']._serialized_start=2782 + _globals['_DATASOURCE_PUSHOPTIONS']._serialized_end=2801 + _globals['_DATASOURCE_SOURCETYPE']._serialized_start=2804 + _globals['_DATASOURCE_SOURCETYPE']._serialized_end=3052 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/DataSource_pb2.pyi b/sdk/python/feast/protos/feast/core/DataSource_pb2.pyi new file mode 100644 index 0000000000..94336638e1 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/DataSource_pb2.pyi @@ -0,0 +1,559 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2020 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import collections.abc +import feast.core.DataFormat_pb2 +import feast.core.Feature_pb2 +import feast.types.Value_pb2 +import google.protobuf.descriptor +import google.protobuf.duration_pb2 +import google.protobuf.internal.containers +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class DataSource(google.protobuf.message.Message): + """Defines a Data Source that can be used source Feature data + Next available id: 28 + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _SourceType: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _SourceTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DataSource._SourceType.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + INVALID: DataSource._SourceType.ValueType # 0 + BATCH_FILE: DataSource._SourceType.ValueType # 1 + BATCH_SNOWFLAKE: DataSource._SourceType.ValueType # 8 + BATCH_BIGQUERY: DataSource._SourceType.ValueType # 2 + BATCH_REDSHIFT: DataSource._SourceType.ValueType # 5 + STREAM_KAFKA: DataSource._SourceType.ValueType # 3 + STREAM_KINESIS: DataSource._SourceType.ValueType # 4 + CUSTOM_SOURCE: DataSource._SourceType.ValueType # 6 + REQUEST_SOURCE: DataSource._SourceType.ValueType # 7 + PUSH_SOURCE: DataSource._SourceType.ValueType # 9 + BATCH_TRINO: DataSource._SourceType.ValueType # 10 + BATCH_SPARK: DataSource._SourceType.ValueType # 11 + BATCH_ATHENA: DataSource._SourceType.ValueType # 12 + + class SourceType(_SourceType, metaclass=_SourceTypeEnumTypeWrapper): + """Type of Data Source. + Next available id: 12 + """ + + INVALID: DataSource.SourceType.ValueType # 0 + BATCH_FILE: DataSource.SourceType.ValueType # 1 + BATCH_SNOWFLAKE: DataSource.SourceType.ValueType # 8 + BATCH_BIGQUERY: DataSource.SourceType.ValueType # 2 + BATCH_REDSHIFT: DataSource.SourceType.ValueType # 5 + STREAM_KAFKA: DataSource.SourceType.ValueType # 3 + STREAM_KINESIS: DataSource.SourceType.ValueType # 4 + CUSTOM_SOURCE: DataSource.SourceType.ValueType # 6 + REQUEST_SOURCE: DataSource.SourceType.ValueType # 7 + PUSH_SOURCE: DataSource.SourceType.ValueType # 9 + BATCH_TRINO: DataSource.SourceType.ValueType # 10 + BATCH_SPARK: DataSource.SourceType.ValueType # 11 + BATCH_ATHENA: DataSource.SourceType.ValueType # 12 + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + class FieldMappingEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + class SourceMeta(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + EARLIESTEVENTTIMESTAMP_FIELD_NUMBER: builtins.int + LATESTEVENTTIMESTAMP_FIELD_NUMBER: builtins.int + @property + def earliestEventTimestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def latestEventTimestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + def __init__( + self, + *, + earliestEventTimestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + latestEventTimestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["earliestEventTimestamp", b"earliestEventTimestamp", "latestEventTimestamp", b"latestEventTimestamp"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["earliestEventTimestamp", b"earliestEventTimestamp", "latestEventTimestamp", b"latestEventTimestamp"]) -> None: ... + + class FileOptions(google.protobuf.message.Message): + """Defines options for DataSource that sources features from a file""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FILE_FORMAT_FIELD_NUMBER: builtins.int + URI_FIELD_NUMBER: builtins.int + S3_ENDPOINT_OVERRIDE_FIELD_NUMBER: builtins.int + @property + def file_format(self) -> feast.core.DataFormat_pb2.FileFormat: ... + uri: builtins.str + """Target URL of file to retrieve and source features from. + s3://path/to/file for AWS S3 storage + gs://path/to/file for GCP GCS storage + file:///path/to/file for local storage + """ + s3_endpoint_override: builtins.str + """override AWS S3 storage endpoint with custom S3 endpoint""" + def __init__( + self, + *, + file_format: feast.core.DataFormat_pb2.FileFormat | None = ..., + uri: builtins.str = ..., + s3_endpoint_override: builtins.str = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["file_format", b"file_format"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["file_format", b"file_format", "s3_endpoint_override", b"s3_endpoint_override", "uri", b"uri"]) -> None: ... + + class BigQueryOptions(google.protobuf.message.Message): + """Defines options for DataSource that sources features from a BigQuery Query""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TABLE_FIELD_NUMBER: builtins.int + QUERY_FIELD_NUMBER: builtins.int + table: builtins.str + """Full table reference in the form of [project:dataset.table]""" + query: builtins.str + """SQL query that returns a table containing feature data. Must contain an event_timestamp column, and respective + entity columns + """ + def __init__( + self, + *, + table: builtins.str = ..., + query: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["query", b"query", "table", b"table"]) -> None: ... + + class TrinoOptions(google.protobuf.message.Message): + """Defines options for DataSource that sources features from a Trino Query""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TABLE_FIELD_NUMBER: builtins.int + QUERY_FIELD_NUMBER: builtins.int + table: builtins.str + """Full table reference in the form of [project:dataset.table]""" + query: builtins.str + """SQL query that returns a table containing feature data. Must contain an event_timestamp column, and respective + entity columns + """ + def __init__( + self, + *, + table: builtins.str = ..., + query: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["query", b"query", "table", b"table"]) -> None: ... + + class KafkaOptions(google.protobuf.message.Message): + """Defines options for DataSource that sources features from Kafka messages. + Each message should be a Protobuf that can be decoded with the generated + Java Protobuf class at the given class path + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KAFKA_BOOTSTRAP_SERVERS_FIELD_NUMBER: builtins.int + TOPIC_FIELD_NUMBER: builtins.int + MESSAGE_FORMAT_FIELD_NUMBER: builtins.int + WATERMARK_DELAY_THRESHOLD_FIELD_NUMBER: builtins.int + kafka_bootstrap_servers: builtins.str + """Comma separated list of Kafka bootstrap servers. Used for feature tables without a defined source host[:port]]""" + topic: builtins.str + """Kafka topic to collect feature data from.""" + @property + def message_format(self) -> feast.core.DataFormat_pb2.StreamFormat: + """Defines the stream data format encoding feature/entity data in Kafka messages.""" + @property + def watermark_delay_threshold(self) -> google.protobuf.duration_pb2.Duration: + """Watermark delay threshold for stream data""" + def __init__( + self, + *, + kafka_bootstrap_servers: builtins.str = ..., + topic: builtins.str = ..., + message_format: feast.core.DataFormat_pb2.StreamFormat | None = ..., + watermark_delay_threshold: google.protobuf.duration_pb2.Duration | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["message_format", b"message_format", "watermark_delay_threshold", b"watermark_delay_threshold"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["kafka_bootstrap_servers", b"kafka_bootstrap_servers", "message_format", b"message_format", "topic", b"topic", "watermark_delay_threshold", b"watermark_delay_threshold"]) -> None: ... + + class KinesisOptions(google.protobuf.message.Message): + """Defines options for DataSource that sources features from Kinesis records. + Each record should be a Protobuf that can be decoded with the generated + Java Protobuf class at the given class path + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + REGION_FIELD_NUMBER: builtins.int + STREAM_NAME_FIELD_NUMBER: builtins.int + RECORD_FORMAT_FIELD_NUMBER: builtins.int + region: builtins.str + """AWS region of the Kinesis stream""" + stream_name: builtins.str + """Name of the Kinesis stream to obtain feature data from.""" + @property + def record_format(self) -> feast.core.DataFormat_pb2.StreamFormat: + """Defines the data format encoding the feature/entity data in Kinesis records. + Kinesis Data Sources support Avro and Proto as data formats. + """ + def __init__( + self, + *, + region: builtins.str = ..., + stream_name: builtins.str = ..., + record_format: feast.core.DataFormat_pb2.StreamFormat | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["record_format", b"record_format"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["record_format", b"record_format", "region", b"region", "stream_name", b"stream_name"]) -> None: ... + + class RedshiftOptions(google.protobuf.message.Message): + """Defines options for DataSource that sources features from a Redshift Query""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TABLE_FIELD_NUMBER: builtins.int + QUERY_FIELD_NUMBER: builtins.int + SCHEMA_FIELD_NUMBER: builtins.int + DATABASE_FIELD_NUMBER: builtins.int + table: builtins.str + """Redshift table name""" + query: builtins.str + """SQL query that returns a table containing feature data. Must contain an event_timestamp column, and respective + entity columns + """ + schema: builtins.str + """Redshift schema name""" + database: builtins.str + """Redshift database name""" + def __init__( + self, + *, + table: builtins.str = ..., + query: builtins.str = ..., + schema: builtins.str = ..., + database: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["database", b"database", "query", b"query", "schema", b"schema", "table", b"table"]) -> None: ... + + class AthenaOptions(google.protobuf.message.Message): + """Defines options for DataSource that sources features from a Athena Query""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TABLE_FIELD_NUMBER: builtins.int + QUERY_FIELD_NUMBER: builtins.int + DATABASE_FIELD_NUMBER: builtins.int + DATA_SOURCE_FIELD_NUMBER: builtins.int + table: builtins.str + """Athena table name""" + query: builtins.str + """SQL query that returns a table containing feature data. Must contain an event_timestamp column, and respective + entity columns + """ + database: builtins.str + """Athena database name""" + data_source: builtins.str + """Athena schema name""" + def __init__( + self, + *, + table: builtins.str = ..., + query: builtins.str = ..., + database: builtins.str = ..., + data_source: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["data_source", b"data_source", "database", b"database", "query", b"query", "table", b"table"]) -> None: ... + + class SnowflakeOptions(google.protobuf.message.Message): + """Defines options for DataSource that sources features from a Snowflake Query""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TABLE_FIELD_NUMBER: builtins.int + QUERY_FIELD_NUMBER: builtins.int + SCHEMA_FIELD_NUMBER: builtins.int + DATABASE_FIELD_NUMBER: builtins.int + table: builtins.str + """Snowflake table name""" + query: builtins.str + """SQL query that returns a table containing feature data. Must contain an event_timestamp column, and respective + entity columns + """ + schema: builtins.str + """Snowflake schema name""" + database: builtins.str + """Snowflake schema name""" + def __init__( + self, + *, + table: builtins.str = ..., + query: builtins.str = ..., + schema: builtins.str = ..., + database: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["database", b"database", "query", b"query", "schema", b"schema", "table", b"table"]) -> None: ... + + class SparkOptions(google.protobuf.message.Message): + """Defines options for DataSource that sources features from a spark table/query""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TABLE_FIELD_NUMBER: builtins.int + QUERY_FIELD_NUMBER: builtins.int + PATH_FIELD_NUMBER: builtins.int + FILE_FORMAT_FIELD_NUMBER: builtins.int + table: builtins.str + """Table name""" + query: builtins.str + """Spark SQl query that returns the table, this is an alternative to `table`""" + path: builtins.str + """Path from which spark can read the table, this is an alternative to `table`""" + file_format: builtins.str + """Format of files at `path` (e.g. parquet, avro, etc)""" + def __init__( + self, + *, + table: builtins.str = ..., + query: builtins.str = ..., + path: builtins.str = ..., + file_format: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["file_format", b"file_format", "path", b"path", "query", b"query", "table", b"table"]) -> None: ... + + class CustomSourceOptions(google.protobuf.message.Message): + """Defines configuration for custom third-party data sources.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CONFIGURATION_FIELD_NUMBER: builtins.int + configuration: builtins.bytes + """Serialized configuration information for the data source. The implementer of the custom data source is + responsible for serializing and deserializing data from bytes + """ + def __init__( + self, + *, + configuration: builtins.bytes = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["configuration", b"configuration"]) -> None: ... + + class RequestDataOptions(google.protobuf.message.Message): + """Defines options for DataSource that sources features from request data""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class DeprecatedSchemaEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: feast.types.Value_pb2.ValueType.Enum.ValueType + def __init__( + self, + *, + key: builtins.str = ..., + value: feast.types.Value_pb2.ValueType.Enum.ValueType = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + DEPRECATED_SCHEMA_FIELD_NUMBER: builtins.int + SCHEMA_FIELD_NUMBER: builtins.int + @property + def deprecated_schema(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, feast.types.Value_pb2.ValueType.Enum.ValueType]: + """Mapping of feature name to type""" + @property + def schema(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Feature_pb2.FeatureSpecV2]: ... + def __init__( + self, + *, + deprecated_schema: collections.abc.Mapping[builtins.str, feast.types.Value_pb2.ValueType.Enum.ValueType] | None = ..., + schema: collections.abc.Iterable[feast.core.Feature_pb2.FeatureSpecV2] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["deprecated_schema", b"deprecated_schema", "schema", b"schema"]) -> None: ... + + class PushOptions(google.protobuf.message.Message): + """Defines options for DataSource that supports pushing data to it. This allows data to be pushed to + the online store on-demand, such as by stream consumers. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + OWNER_FIELD_NUMBER: builtins.int + TYPE_FIELD_NUMBER: builtins.int + FIELD_MAPPING_FIELD_NUMBER: builtins.int + TIMESTAMP_FIELD_FIELD_NUMBER: builtins.int + DATE_PARTITION_COLUMN_FIELD_NUMBER: builtins.int + CREATED_TIMESTAMP_COLUMN_FIELD_NUMBER: builtins.int + DATA_SOURCE_CLASS_TYPE_FIELD_NUMBER: builtins.int + BATCH_SOURCE_FIELD_NUMBER: builtins.int + META_FIELD_NUMBER: builtins.int + FILE_OPTIONS_FIELD_NUMBER: builtins.int + BIGQUERY_OPTIONS_FIELD_NUMBER: builtins.int + KAFKA_OPTIONS_FIELD_NUMBER: builtins.int + KINESIS_OPTIONS_FIELD_NUMBER: builtins.int + REDSHIFT_OPTIONS_FIELD_NUMBER: builtins.int + REQUEST_DATA_OPTIONS_FIELD_NUMBER: builtins.int + CUSTOM_OPTIONS_FIELD_NUMBER: builtins.int + SNOWFLAKE_OPTIONS_FIELD_NUMBER: builtins.int + PUSH_OPTIONS_FIELD_NUMBER: builtins.int + SPARK_OPTIONS_FIELD_NUMBER: builtins.int + TRINO_OPTIONS_FIELD_NUMBER: builtins.int + ATHENA_OPTIONS_FIELD_NUMBER: builtins.int + name: builtins.str + """Unique name of data source within the project""" + project: builtins.str + """Name of Feast project that this data source belongs to.""" + description: builtins.str + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + owner: builtins.str + type: global___DataSource.SourceType.ValueType + @property + def field_mapping(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """Defines mapping between fields in the sourced data + and fields in parent FeatureTable. + """ + timestamp_field: builtins.str + """Must specify event timestamp column name""" + date_partition_column: builtins.str + """(Optional) Specify partition column + useful for file sources + """ + created_timestamp_column: builtins.str + """Must specify creation timestamp column name""" + data_source_class_type: builtins.str + """This is an internal field that is represents the python class for the data source object a proto object represents. + This should be set by feast, and not by users. + The field is used primarily by custom data sources and is mandatory for them to set. Feast may set it for + first party sources as well. + """ + @property + def batch_source(self) -> global___DataSource: + """Optional batch source for streaming sources for historical features and materialization.""" + @property + def meta(self) -> global___DataSource.SourceMeta: ... + @property + def file_options(self) -> global___DataSource.FileOptions: ... + @property + def bigquery_options(self) -> global___DataSource.BigQueryOptions: ... + @property + def kafka_options(self) -> global___DataSource.KafkaOptions: ... + @property + def kinesis_options(self) -> global___DataSource.KinesisOptions: ... + @property + def redshift_options(self) -> global___DataSource.RedshiftOptions: ... + @property + def request_data_options(self) -> global___DataSource.RequestDataOptions: ... + @property + def custom_options(self) -> global___DataSource.CustomSourceOptions: ... + @property + def snowflake_options(self) -> global___DataSource.SnowflakeOptions: ... + @property + def push_options(self) -> global___DataSource.PushOptions: ... + @property + def spark_options(self) -> global___DataSource.SparkOptions: ... + @property + def trino_options(self) -> global___DataSource.TrinoOptions: ... + @property + def athena_options(self) -> global___DataSource.AthenaOptions: ... + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + description: builtins.str = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + owner: builtins.str = ..., + type: global___DataSource.SourceType.ValueType = ..., + field_mapping: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + timestamp_field: builtins.str = ..., + date_partition_column: builtins.str = ..., + created_timestamp_column: builtins.str = ..., + data_source_class_type: builtins.str = ..., + batch_source: global___DataSource | None = ..., + meta: global___DataSource.SourceMeta | None = ..., + file_options: global___DataSource.FileOptions | None = ..., + bigquery_options: global___DataSource.BigQueryOptions | None = ..., + kafka_options: global___DataSource.KafkaOptions | None = ..., + kinesis_options: global___DataSource.KinesisOptions | None = ..., + redshift_options: global___DataSource.RedshiftOptions | None = ..., + request_data_options: global___DataSource.RequestDataOptions | None = ..., + custom_options: global___DataSource.CustomSourceOptions | None = ..., + snowflake_options: global___DataSource.SnowflakeOptions | None = ..., + push_options: global___DataSource.PushOptions | None = ..., + spark_options: global___DataSource.SparkOptions | None = ..., + trino_options: global___DataSource.TrinoOptions | None = ..., + athena_options: global___DataSource.AthenaOptions | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["athena_options", b"athena_options", "batch_source", b"batch_source", "bigquery_options", b"bigquery_options", "custom_options", b"custom_options", "file_options", b"file_options", "kafka_options", b"kafka_options", "kinesis_options", b"kinesis_options", "meta", b"meta", "options", b"options", "push_options", b"push_options", "redshift_options", b"redshift_options", "request_data_options", b"request_data_options", "snowflake_options", b"snowflake_options", "spark_options", b"spark_options", "trino_options", b"trino_options"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["athena_options", b"athena_options", "batch_source", b"batch_source", "bigquery_options", b"bigquery_options", "created_timestamp_column", b"created_timestamp_column", "custom_options", b"custom_options", "data_source_class_type", b"data_source_class_type", "date_partition_column", b"date_partition_column", "description", b"description", "field_mapping", b"field_mapping", "file_options", b"file_options", "kafka_options", b"kafka_options", "kinesis_options", b"kinesis_options", "meta", b"meta", "name", b"name", "options", b"options", "owner", b"owner", "project", b"project", "push_options", b"push_options", "redshift_options", b"redshift_options", "request_data_options", b"request_data_options", "snowflake_options", b"snowflake_options", "spark_options", b"spark_options", "tags", b"tags", "timestamp_field", b"timestamp_field", "trino_options", b"trino_options", "type", b"type"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["options", b"options"]) -> typing_extensions.Literal["file_options", "bigquery_options", "kafka_options", "kinesis_options", "redshift_options", "request_data_options", "custom_options", "snowflake_options", "push_options", "spark_options", "trino_options", "athena_options"] | None: ... + +global___DataSource = DataSource diff --git a/sdk/python/feast/protos/feast/core/DataSource_pb2_grpc.py b/sdk/python/feast/protos/feast/core/DataSource_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/DataSource_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/DatastoreTable_pb2.py b/sdk/python/feast/protos/feast/core/DatastoreTable_pb2.py new file mode 100644 index 0000000000..c5dbc3ec64 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/DatastoreTable_pb2.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/DatastoreTable.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1f\x66\x65\x61st/core/DatastoreTable.proto\x12\nfeast.core\x1a\x1egoogle/protobuf/wrappers.proto\"\xc2\x01\n\x0e\x44\x61tastoreTable\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x30\n\nproject_id\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12/\n\tnamespace\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12.\n\x08\x64\x61tabase\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.StringValueBX\n\x10\x66\x65\x61st.proto.coreB\x13\x44\x61tastoreTableProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.DatastoreTable_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\023DatastoreTableProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_DATASTORETABLE']._serialized_start=80 + _globals['_DATASTORETABLE']._serialized_end=274 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/DatastoreTable_pb2.pyi b/sdk/python/feast/protos/feast/core/DatastoreTable_pb2.pyi new file mode 100644 index 0000000000..6339a97536 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/DatastoreTable_pb2.pyi @@ -0,0 +1,67 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +* Copyright 2021 The Feast Authors +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* https://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +""" +import builtins +import google.protobuf.descriptor +import google.protobuf.message +import google.protobuf.wrappers_pb2 +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class DatastoreTable(google.protobuf.message.Message): + """Represents a Datastore table""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PROJECT_FIELD_NUMBER: builtins.int + NAME_FIELD_NUMBER: builtins.int + PROJECT_ID_FIELD_NUMBER: builtins.int + NAMESPACE_FIELD_NUMBER: builtins.int + DATABASE_FIELD_NUMBER: builtins.int + project: builtins.str + """Feast project of the table""" + name: builtins.str + """Name of the table""" + @property + def project_id(self) -> google.protobuf.wrappers_pb2.StringValue: + """GCP project id""" + @property + def namespace(self) -> google.protobuf.wrappers_pb2.StringValue: + """Datastore namespace""" + @property + def database(self) -> google.protobuf.wrappers_pb2.StringValue: + """Firestore database""" + def __init__( + self, + *, + project: builtins.str = ..., + name: builtins.str = ..., + project_id: google.protobuf.wrappers_pb2.StringValue | None = ..., + namespace: google.protobuf.wrappers_pb2.StringValue | None = ..., + database: google.protobuf.wrappers_pb2.StringValue | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["database", b"database", "namespace", b"namespace", "project_id", b"project_id"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["database", b"database", "name", b"name", "namespace", b"namespace", "project", b"project", "project_id", b"project_id"]) -> None: ... + +global___DatastoreTable = DatastoreTable diff --git a/sdk/python/feast/protos/feast/core/DatastoreTable_pb2_grpc.py b/sdk/python/feast/protos/feast/core/DatastoreTable_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/DatastoreTable_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/DynamoDBTable_pb2.py b/sdk/python/feast/protos/feast/core/DynamoDBTable_pb2.py new file mode 100644 index 0000000000..34b813f39a --- /dev/null +++ b/sdk/python/feast/protos/feast/core/DynamoDBTable_pb2.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/DynamoDBTable.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1e\x66\x65\x61st/core/DynamoDBTable.proto\x12\nfeast.core\"-\n\rDynamoDBTable\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x02 \x01(\tBW\n\x10\x66\x65\x61st.proto.coreB\x12\x44ynamoDBTableProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.DynamoDBTable_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\022DynamoDBTableProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_DYNAMODBTABLE']._serialized_start=46 + _globals['_DYNAMODBTABLE']._serialized_end=91 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/DynamoDBTable_pb2.pyi b/sdk/python/feast/protos/feast/core/DynamoDBTable_pb2.pyi new file mode 100644 index 0000000000..cd9edd9a03 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/DynamoDBTable_pb2.pyi @@ -0,0 +1,50 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +* Copyright 2021 The Feast Authors +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* https://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +""" +import builtins +import google.protobuf.descriptor +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class DynamoDBTable(google.protobuf.message.Message): + """Represents a DynamoDB table""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + REGION_FIELD_NUMBER: builtins.int + name: builtins.str + """Name of the table""" + region: builtins.str + """Region of the table""" + def __init__( + self, + *, + name: builtins.str = ..., + region: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "region", b"region"]) -> None: ... + +global___DynamoDBTable = DynamoDBTable diff --git a/sdk/python/feast/protos/feast/core/DynamoDBTable_pb2_grpc.py b/sdk/python/feast/protos/feast/core/DynamoDBTable_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/DynamoDBTable_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/Entity_pb2.py b/sdk/python/feast/protos/feast/core/Entity_pb2.py new file mode 100644 index 0000000000..5a192854ca --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Entity_pb2.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/Entity.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from feast.protos.feast.types import Value_pb2 as feast_dot_types_dot_Value__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x17\x66\x65\x61st/core/Entity.proto\x12\nfeast.core\x1a\x17\x66\x65\x61st/types/Value.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"V\n\x06\x45ntity\x12&\n\x04spec\x18\x01 \x01(\x0b\x32\x18.feast.core.EntitySpecV2\x12$\n\x04meta\x18\x02 \x01(\x0b\x32\x16.feast.core.EntityMeta\"\xf3\x01\n\x0c\x45ntitySpecV2\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\t \x01(\t\x12/\n\nvalue_type\x18\x02 \x01(\x0e\x32\x1b.feast.types.ValueType.Enum\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x10\n\x08join_key\x18\x04 \x01(\t\x12\x30\n\x04tags\x18\x08 \x03(\x0b\x32\".feast.core.EntitySpecV2.TagsEntry\x12\r\n\x05owner\x18\n \x01(\t\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x7f\n\nEntityMeta\x12\x35\n\x11\x63reated_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\x16last_updated_timestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampBP\n\x10\x66\x65\x61st.proto.coreB\x0b\x45ntityProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.Entity_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\013EntityProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_ENTITYSPECV2_TAGSENTRY']._options = None + _globals['_ENTITYSPECV2_TAGSENTRY']._serialized_options = b'8\001' + _globals['_ENTITY']._serialized_start=97 + _globals['_ENTITY']._serialized_end=183 + _globals['_ENTITYSPECV2']._serialized_start=186 + _globals['_ENTITYSPECV2']._serialized_end=429 + _globals['_ENTITYSPECV2_TAGSENTRY']._serialized_start=386 + _globals['_ENTITYSPECV2_TAGSENTRY']._serialized_end=429 + _globals['_ENTITYMETA']._serialized_start=431 + _globals['_ENTITYMETA']._serialized_end=558 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/Entity_pb2.pyi b/sdk/python/feast/protos/feast/core/Entity_pb2.pyi new file mode 100644 index 0000000000..732b3e1032 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Entity_pb2.pyi @@ -0,0 +1,130 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +* Copyright 2020 The Feast Authors +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* https://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +""" +import builtins +import collections.abc +import feast.types.Value_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class Entity(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SPEC_FIELD_NUMBER: builtins.int + META_FIELD_NUMBER: builtins.int + @property + def spec(self) -> global___EntitySpecV2: + """User-specified specifications of this entity.""" + @property + def meta(self) -> global___EntityMeta: + """System-populated metadata for this entity.""" + def __init__( + self, + *, + spec: global___EntitySpecV2 | None = ..., + meta: global___EntityMeta | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> None: ... + +global___Entity = Entity + +class EntitySpecV2(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + VALUE_TYPE_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + JOIN_KEY_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + OWNER_FIELD_NUMBER: builtins.int + name: builtins.str + """Name of the entity.""" + project: builtins.str + """Name of Feast project that this feature table belongs to.""" + value_type: feast.types.Value_pb2.ValueType.Enum.ValueType + """Type of the entity.""" + description: builtins.str + """Description of the entity.""" + join_key: builtins.str + """Join key for the entity (i.e. name of the column the entity maps to).""" + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """User defined metadata""" + owner: builtins.str + """Owner of the entity.""" + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + value_type: feast.types.Value_pb2.ValueType.Enum.ValueType = ..., + description: builtins.str = ..., + join_key: builtins.str = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + owner: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["description", b"description", "join_key", b"join_key", "name", b"name", "owner", b"owner", "project", b"project", "tags", b"tags", "value_type", b"value_type"]) -> None: ... + +global___EntitySpecV2 = EntitySpecV2 + +class EntityMeta(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CREATED_TIMESTAMP_FIELD_NUMBER: builtins.int + LAST_UPDATED_TIMESTAMP_FIELD_NUMBER: builtins.int + @property + def created_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def last_updated_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + def __init__( + self, + *, + created_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + last_updated_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp"]) -> None: ... + +global___EntityMeta = EntityMeta diff --git a/sdk/python/feast/protos/feast/core/Entity_pb2_grpc.py b/sdk/python/feast/protos/feast/core/Entity_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Entity_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/FeatureService_pb2.py b/sdk/python/feast/protos/feast/core/FeatureService_pb2.py new file mode 100644 index 0000000000..cf6ac46ac5 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/FeatureService_pb2.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/FeatureService.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from feast.protos.feast.core import FeatureViewProjection_pb2 as feast_dot_core_dot_FeatureViewProjection__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1f\x66\x65\x61st/core/FeatureService.proto\x12\nfeast.core\x1a\x1fgoogle/protobuf/timestamp.proto\x1a&feast/core/FeatureViewProjection.proto\"l\n\x0e\x46\x65\x61tureService\x12,\n\x04spec\x18\x01 \x01(\x0b\x32\x1e.feast.core.FeatureServiceSpec\x12,\n\x04meta\x18\x02 \x01(\x0b\x32\x1e.feast.core.FeatureServiceMeta\"\xa4\x02\n\x12\x46\x65\x61tureServiceSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x33\n\x08\x66\x65\x61tures\x18\x03 \x03(\x0b\x32!.feast.core.FeatureViewProjection\x12\x36\n\x04tags\x18\x04 \x03(\x0b\x32(.feast.core.FeatureServiceSpec.TagsEntry\x12\x13\n\x0b\x64\x65scription\x18\x05 \x01(\t\x12\r\n\x05owner\x18\x06 \x01(\t\x12\x31\n\x0elogging_config\x18\x07 \x01(\x0b\x32\x19.feast.core.LoggingConfig\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x87\x01\n\x12\x46\x65\x61tureServiceMeta\x12\x35\n\x11\x63reated_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\x16last_updated_timestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\x9a\x07\n\rLoggingConfig\x12\x13\n\x0bsample_rate\x18\x01 \x01(\x02\x12\x45\n\x10\x66ile_destination\x18\x03 \x01(\x0b\x32).feast.core.LoggingConfig.FileDestinationH\x00\x12M\n\x14\x62igquery_destination\x18\x04 \x01(\x0b\x32-.feast.core.LoggingConfig.BigQueryDestinationH\x00\x12M\n\x14redshift_destination\x18\x05 \x01(\x0b\x32-.feast.core.LoggingConfig.RedshiftDestinationH\x00\x12O\n\x15snowflake_destination\x18\x06 \x01(\x0b\x32..feast.core.LoggingConfig.SnowflakeDestinationH\x00\x12I\n\x12\x63ustom_destination\x18\x07 \x01(\x0b\x32+.feast.core.LoggingConfig.CustomDestinationH\x00\x12I\n\x12\x61thena_destination\x18\x08 \x01(\x0b\x32+.feast.core.LoggingConfig.AthenaDestinationH\x00\x1aS\n\x0f\x46ileDestination\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x1c\n\x14s3_endpoint_override\x18\x02 \x01(\t\x12\x14\n\x0cpartition_by\x18\x03 \x03(\t\x1a(\n\x13\x42igQueryDestination\x12\x11\n\ttable_ref\x18\x01 \x01(\t\x1a)\n\x13RedshiftDestination\x12\x12\n\ntable_name\x18\x01 \x01(\t\x1a\'\n\x11\x41thenaDestination\x12\x12\n\ntable_name\x18\x01 \x01(\t\x1a*\n\x14SnowflakeDestination\x12\x12\n\ntable_name\x18\x01 \x01(\t\x1a\x99\x01\n\x11\x43ustomDestination\x12\x0c\n\x04kind\x18\x01 \x01(\t\x12G\n\x06\x63onfig\x18\x02 \x03(\x0b\x32\x37.feast.core.LoggingConfig.CustomDestination.ConfigEntry\x1a-\n\x0b\x43onfigEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\r\n\x0b\x64\x65stinationBX\n\x10\x66\x65\x61st.proto.coreB\x13\x46\x65\x61tureServiceProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.FeatureService_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\023FeatureServiceProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_FEATURESERVICESPEC_TAGSENTRY']._options = None + _globals['_FEATURESERVICESPEC_TAGSENTRY']._serialized_options = b'8\001' + _globals['_LOGGINGCONFIG_CUSTOMDESTINATION_CONFIGENTRY']._options = None + _globals['_LOGGINGCONFIG_CUSTOMDESTINATION_CONFIGENTRY']._serialized_options = b'8\001' + _globals['_FEATURESERVICE']._serialized_start=120 + _globals['_FEATURESERVICE']._serialized_end=228 + _globals['_FEATURESERVICESPEC']._serialized_start=231 + _globals['_FEATURESERVICESPEC']._serialized_end=523 + _globals['_FEATURESERVICESPEC_TAGSENTRY']._serialized_start=480 + _globals['_FEATURESERVICESPEC_TAGSENTRY']._serialized_end=523 + _globals['_FEATURESERVICEMETA']._serialized_start=526 + _globals['_FEATURESERVICEMETA']._serialized_end=661 + _globals['_LOGGINGCONFIG']._serialized_start=664 + _globals['_LOGGINGCONFIG']._serialized_end=1586 + _globals['_LOGGINGCONFIG_FILEDESTINATION']._serialized_start=1162 + _globals['_LOGGINGCONFIG_FILEDESTINATION']._serialized_end=1245 + _globals['_LOGGINGCONFIG_BIGQUERYDESTINATION']._serialized_start=1247 + _globals['_LOGGINGCONFIG_BIGQUERYDESTINATION']._serialized_end=1287 + _globals['_LOGGINGCONFIG_REDSHIFTDESTINATION']._serialized_start=1289 + _globals['_LOGGINGCONFIG_REDSHIFTDESTINATION']._serialized_end=1330 + _globals['_LOGGINGCONFIG_ATHENADESTINATION']._serialized_start=1332 + _globals['_LOGGINGCONFIG_ATHENADESTINATION']._serialized_end=1371 + _globals['_LOGGINGCONFIG_SNOWFLAKEDESTINATION']._serialized_start=1373 + _globals['_LOGGINGCONFIG_SNOWFLAKEDESTINATION']._serialized_end=1415 + _globals['_LOGGINGCONFIG_CUSTOMDESTINATION']._serialized_start=1418 + _globals['_LOGGINGCONFIG_CUSTOMDESTINATION']._serialized_end=1571 + _globals['_LOGGINGCONFIG_CUSTOMDESTINATION_CONFIGENTRY']._serialized_start=1526 + _globals['_LOGGINGCONFIG_CUSTOMDESTINATION_CONFIGENTRY']._serialized_end=1571 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/FeatureService_pb2.pyi b/sdk/python/feast/protos/feast/core/FeatureService_pb2.pyi new file mode 100644 index 0000000000..b3305b72df --- /dev/null +++ b/sdk/python/feast/protos/feast/core/FeatureService_pb2.pyi @@ -0,0 +1,266 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" +import builtins +import collections.abc +import feast.core.FeatureViewProjection_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class FeatureService(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SPEC_FIELD_NUMBER: builtins.int + META_FIELD_NUMBER: builtins.int + @property + def spec(self) -> global___FeatureServiceSpec: + """User-specified specifications of this feature service.""" + @property + def meta(self) -> global___FeatureServiceMeta: + """System-populated metadata for this feature service.""" + def __init__( + self, + *, + spec: global___FeatureServiceSpec | None = ..., + meta: global___FeatureServiceMeta | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> None: ... + +global___FeatureService = FeatureService + +class FeatureServiceSpec(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + FEATURES_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + OWNER_FIELD_NUMBER: builtins.int + LOGGING_CONFIG_FIELD_NUMBER: builtins.int + name: builtins.str + """Name of the Feature Service. Must be unique. Not updated.""" + project: builtins.str + """Name of Feast project that this Feature Service belongs to.""" + @property + def features(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.FeatureViewProjection_pb2.FeatureViewProjection]: + """Represents a projection that's to be applied on top of the FeatureView. + Contains data such as the features to use from a FeatureView. + """ + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """User defined metadata""" + description: builtins.str + """Description of the feature service.""" + owner: builtins.str + """Owner of the feature service.""" + @property + def logging_config(self) -> global___LoggingConfig: + """(optional) if provided logging will be enabled for this feature service.""" + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + features: collections.abc.Iterable[feast.core.FeatureViewProjection_pb2.FeatureViewProjection] | None = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + description: builtins.str = ..., + owner: builtins.str = ..., + logging_config: global___LoggingConfig | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["logging_config", b"logging_config"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["description", b"description", "features", b"features", "logging_config", b"logging_config", "name", b"name", "owner", b"owner", "project", b"project", "tags", b"tags"]) -> None: ... + +global___FeatureServiceSpec = FeatureServiceSpec + +class FeatureServiceMeta(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CREATED_TIMESTAMP_FIELD_NUMBER: builtins.int + LAST_UPDATED_TIMESTAMP_FIELD_NUMBER: builtins.int + @property + def created_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Time where this Feature Service is created""" + @property + def last_updated_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Time where this Feature Service is last updated""" + def __init__( + self, + *, + created_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + last_updated_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp"]) -> None: ... + +global___FeatureServiceMeta = FeatureServiceMeta + +class LoggingConfig(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class FileDestination(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PATH_FIELD_NUMBER: builtins.int + S3_ENDPOINT_OVERRIDE_FIELD_NUMBER: builtins.int + PARTITION_BY_FIELD_NUMBER: builtins.int + path: builtins.str + s3_endpoint_override: builtins.str + @property + def partition_by(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """column names to use for partitioning""" + def __init__( + self, + *, + path: builtins.str = ..., + s3_endpoint_override: builtins.str = ..., + partition_by: collections.abc.Iterable[builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["partition_by", b"partition_by", "path", b"path", "s3_endpoint_override", b"s3_endpoint_override"]) -> None: ... + + class BigQueryDestination(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TABLE_REF_FIELD_NUMBER: builtins.int + table_ref: builtins.str + """Full table reference in the form of [project:dataset.table]""" + def __init__( + self, + *, + table_ref: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["table_ref", b"table_ref"]) -> None: ... + + class RedshiftDestination(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TABLE_NAME_FIELD_NUMBER: builtins.int + table_name: builtins.str + """Destination table name. ClusterId and database will be taken from an offline store config""" + def __init__( + self, + *, + table_name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["table_name", b"table_name"]) -> None: ... + + class AthenaDestination(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TABLE_NAME_FIELD_NUMBER: builtins.int + table_name: builtins.str + """Destination table name. data_source and database will be taken from an offline store config""" + def __init__( + self, + *, + table_name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["table_name", b"table_name"]) -> None: ... + + class SnowflakeDestination(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TABLE_NAME_FIELD_NUMBER: builtins.int + table_name: builtins.str + """Destination table name. Schema and database will be taken from an offline store config""" + def __init__( + self, + *, + table_name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["table_name", b"table_name"]) -> None: ... + + class CustomDestination(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class ConfigEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + KIND_FIELD_NUMBER: builtins.int + CONFIG_FIELD_NUMBER: builtins.int + kind: builtins.str + @property + def config(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + kind: builtins.str = ..., + config: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["config", b"config", "kind", b"kind"]) -> None: ... + + SAMPLE_RATE_FIELD_NUMBER: builtins.int + FILE_DESTINATION_FIELD_NUMBER: builtins.int + BIGQUERY_DESTINATION_FIELD_NUMBER: builtins.int + REDSHIFT_DESTINATION_FIELD_NUMBER: builtins.int + SNOWFLAKE_DESTINATION_FIELD_NUMBER: builtins.int + CUSTOM_DESTINATION_FIELD_NUMBER: builtins.int + ATHENA_DESTINATION_FIELD_NUMBER: builtins.int + sample_rate: builtins.float + @property + def file_destination(self) -> global___LoggingConfig.FileDestination: ... + @property + def bigquery_destination(self) -> global___LoggingConfig.BigQueryDestination: ... + @property + def redshift_destination(self) -> global___LoggingConfig.RedshiftDestination: ... + @property + def snowflake_destination(self) -> global___LoggingConfig.SnowflakeDestination: ... + @property + def custom_destination(self) -> global___LoggingConfig.CustomDestination: ... + @property + def athena_destination(self) -> global___LoggingConfig.AthenaDestination: ... + def __init__( + self, + *, + sample_rate: builtins.float = ..., + file_destination: global___LoggingConfig.FileDestination | None = ..., + bigquery_destination: global___LoggingConfig.BigQueryDestination | None = ..., + redshift_destination: global___LoggingConfig.RedshiftDestination | None = ..., + snowflake_destination: global___LoggingConfig.SnowflakeDestination | None = ..., + custom_destination: global___LoggingConfig.CustomDestination | None = ..., + athena_destination: global___LoggingConfig.AthenaDestination | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["athena_destination", b"athena_destination", "bigquery_destination", b"bigquery_destination", "custom_destination", b"custom_destination", "destination", b"destination", "file_destination", b"file_destination", "redshift_destination", b"redshift_destination", "snowflake_destination", b"snowflake_destination"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["athena_destination", b"athena_destination", "bigquery_destination", b"bigquery_destination", "custom_destination", b"custom_destination", "destination", b"destination", "file_destination", b"file_destination", "redshift_destination", b"redshift_destination", "sample_rate", b"sample_rate", "snowflake_destination", b"snowflake_destination"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["destination", b"destination"]) -> typing_extensions.Literal["file_destination", "bigquery_destination", "redshift_destination", "snowflake_destination", "custom_destination", "athena_destination"] | None: ... + +global___LoggingConfig = LoggingConfig diff --git a/sdk/python/feast/protos/feast/core/FeatureService_pb2_grpc.py b/sdk/python/feast/protos/feast/core/FeatureService_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/FeatureService_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/FeatureTable_pb2.py b/sdk/python/feast/protos/feast/core/FeatureTable_pb2.py new file mode 100644 index 0000000000..713e72b5d3 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/FeatureTable_pb2.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/FeatureTable.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from feast.protos.feast.core import DataSource_pb2 as feast_dot_core_dot_DataSource__pb2 +from feast.protos.feast.core import Feature_pb2 as feast_dot_core_dot_Feature__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1d\x66\x65\x61st/core/FeatureTable.proto\x12\nfeast.core\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1b\x66\x65\x61st/core/DataSource.proto\x1a\x18\x66\x65\x61st/core/Feature.proto\"f\n\x0c\x46\x65\x61tureTable\x12*\n\x04spec\x18\x01 \x01(\x0b\x32\x1c.feast.core.FeatureTableSpec\x12*\n\x04meta\x18\x02 \x01(\x0b\x32\x1c.feast.core.FeatureTableMeta\"\xe2\x02\n\x10\x46\x65\x61tureTableSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\t \x01(\t\x12\x10\n\x08\x65ntities\x18\x03 \x03(\t\x12+\n\x08\x66\x65\x61tures\x18\x04 \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x12\x38\n\x06labels\x18\x05 \x03(\x0b\x32(.feast.core.FeatureTableSpec.LabelsEntry\x12*\n\x07max_age\x18\x06 \x01(\x0b\x32\x19.google.protobuf.Duration\x12,\n\x0c\x62\x61tch_source\x18\x07 \x01(\x0b\x32\x16.feast.core.DataSource\x12-\n\rstream_source\x18\x08 \x01(\x0b\x32\x16.feast.core.DataSource\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xa5\x01\n\x10\x46\x65\x61tureTableMeta\x12\x35\n\x11\x63reated_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\x16last_updated_timestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x10\n\x08revision\x18\x03 \x01(\x03\x12\x0c\n\x04hash\x18\x04 \x01(\tBV\n\x10\x66\x65\x61st.proto.coreB\x11\x46\x65\x61tureTableProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.FeatureTable_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\021FeatureTableProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_FEATURETABLESPEC_LABELSENTRY']._options = None + _globals['_FEATURETABLESPEC_LABELSENTRY']._serialized_options = b'8\001' + _globals['_FEATURETABLE']._serialized_start=165 + _globals['_FEATURETABLE']._serialized_end=267 + _globals['_FEATURETABLESPEC']._serialized_start=270 + _globals['_FEATURETABLESPEC']._serialized_end=624 + _globals['_FEATURETABLESPEC_LABELSENTRY']._serialized_start=579 + _globals['_FEATURETABLESPEC_LABELSENTRY']._serialized_end=624 + _globals['_FEATURETABLEMETA']._serialized_start=627 + _globals['_FEATURETABLEMETA']._serialized_end=792 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/FeatureTable_pb2.pyi b/sdk/python/feast/protos/feast/core/FeatureTable_pb2.pyi new file mode 100644 index 0000000000..dd41c2d214 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/FeatureTable_pb2.pyi @@ -0,0 +1,166 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2020 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import collections.abc +import feast.core.DataSource_pb2 +import feast.core.Feature_pb2 +import google.protobuf.descriptor +import google.protobuf.duration_pb2 +import google.protobuf.internal.containers +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class FeatureTable(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SPEC_FIELD_NUMBER: builtins.int + META_FIELD_NUMBER: builtins.int + @property + def spec(self) -> global___FeatureTableSpec: + """User-specified specifications of this feature table.""" + @property + def meta(self) -> global___FeatureTableMeta: + """System-populated metadata for this feature table.""" + def __init__( + self, + *, + spec: global___FeatureTableSpec | None = ..., + meta: global___FeatureTableMeta | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> None: ... + +global___FeatureTable = FeatureTable + +class FeatureTableSpec(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class LabelsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + ENTITIES_FIELD_NUMBER: builtins.int + FEATURES_FIELD_NUMBER: builtins.int + LABELS_FIELD_NUMBER: builtins.int + MAX_AGE_FIELD_NUMBER: builtins.int + BATCH_SOURCE_FIELD_NUMBER: builtins.int + STREAM_SOURCE_FIELD_NUMBER: builtins.int + name: builtins.str + """Name of the feature table. Must be unique. Not updated.""" + project: builtins.str + """Name of Feast project that this feature table belongs to.""" + @property + def entities(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """List names of entities to associate with the Features defined in this + Feature Table. Not updatable. + """ + @property + def features(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Feature_pb2.FeatureSpecV2]: + """List of features specifications for each feature defined with this feature table.""" + @property + def labels(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """User defined metadata""" + @property + def max_age(self) -> google.protobuf.duration_pb2.Duration: + """Features in this feature table can only be retrieved from online serving + younger than max age. Age is measured as the duration of time between + the feature's event timestamp and when the feature is retrieved + Feature values outside max age will be returned as unset values and indicated to end user + """ + @property + def batch_source(self) -> feast.core.DataSource_pb2.DataSource: + """Batch/Offline DataSource to source batch/offline feature data. + Only batch DataSource can be specified + (ie source type should start with 'BATCH_') + """ + @property + def stream_source(self) -> feast.core.DataSource_pb2.DataSource: + """Stream/Online DataSource to source stream/online feature data. + Only stream DataSource can be specified + (ie source type should start with 'STREAM_') + """ + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + entities: collections.abc.Iterable[builtins.str] | None = ..., + features: collections.abc.Iterable[feast.core.Feature_pb2.FeatureSpecV2] | None = ..., + labels: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + max_age: google.protobuf.duration_pb2.Duration | None = ..., + batch_source: feast.core.DataSource_pb2.DataSource | None = ..., + stream_source: feast.core.DataSource_pb2.DataSource | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["batch_source", b"batch_source", "max_age", b"max_age", "stream_source", b"stream_source"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["batch_source", b"batch_source", "entities", b"entities", "features", b"features", "labels", b"labels", "max_age", b"max_age", "name", b"name", "project", b"project", "stream_source", b"stream_source"]) -> None: ... + +global___FeatureTableSpec = FeatureTableSpec + +class FeatureTableMeta(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CREATED_TIMESTAMP_FIELD_NUMBER: builtins.int + LAST_UPDATED_TIMESTAMP_FIELD_NUMBER: builtins.int + REVISION_FIELD_NUMBER: builtins.int + HASH_FIELD_NUMBER: builtins.int + @property + def created_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Time where this Feature Table is created""" + @property + def last_updated_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Time where this Feature Table is last updated""" + revision: builtins.int + """Auto incrementing revision no. of this Feature Table""" + hash: builtins.str + """Hash entities, features, batch_source and stream_source to inform JobService if + jobs should be restarted should hash change + """ + def __init__( + self, + *, + created_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + last_updated_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + revision: builtins.int = ..., + hash: builtins.str = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "hash", b"hash", "last_updated_timestamp", b"last_updated_timestamp", "revision", b"revision"]) -> None: ... + +global___FeatureTableMeta = FeatureTableMeta diff --git a/sdk/python/feast/protos/feast/core/FeatureTable_pb2_grpc.py b/sdk/python/feast/protos/feast/core/FeatureTable_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/FeatureTable_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2.py b/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2.py new file mode 100644 index 0000000000..b47d4fe392 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/FeatureViewProjection.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from feast.protos.feast.core import Feature_pb2 as feast_dot_core_dot_Feature__pb2 +from feast.protos.feast.core import DataSource_pb2 as feast_dot_core_dot_DataSource__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&feast/core/FeatureViewProjection.proto\x12\nfeast.core\x1a\x18\x66\x65\x61st/core/Feature.proto\x1a\x1b\x66\x65\x61st/core/DataSource.proto\"\xba\x03\n\x15\x46\x65\x61tureViewProjection\x12\x19\n\x11\x66\x65\x61ture_view_name\x18\x01 \x01(\t\x12\x1f\n\x17\x66\x65\x61ture_view_name_alias\x18\x03 \x01(\t\x12\x32\n\x0f\x66\x65\x61ture_columns\x18\x02 \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x12G\n\x0cjoin_key_map\x18\x04 \x03(\x0b\x32\x31.feast.core.FeatureViewProjection.JoinKeyMapEntry\x12\x17\n\x0ftimestamp_field\x18\x05 \x01(\t\x12\x1d\n\x15\x64\x61te_partition_column\x18\x06 \x01(\t\x12 \n\x18\x63reated_timestamp_column\x18\x07 \x01(\t\x12,\n\x0c\x62\x61tch_source\x18\x08 \x01(\x0b\x32\x16.feast.core.DataSource\x12-\n\rstream_source\x18\t \x01(\x0b\x32\x16.feast.core.DataSource\x1a\x31\n\x0fJoinKeyMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42Z\n\x10\x66\x65\x61st.proto.coreB\x15\x46\x65\x61tureReferenceProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.FeatureViewProjection_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\025FeatureReferenceProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_FEATUREVIEWPROJECTION_JOINKEYMAPENTRY']._options = None + _globals['_FEATUREVIEWPROJECTION_JOINKEYMAPENTRY']._serialized_options = b'8\001' + _globals['_FEATUREVIEWPROJECTION']._serialized_start=110 + _globals['_FEATUREVIEWPROJECTION']._serialized_end=552 + _globals['_FEATUREVIEWPROJECTION_JOINKEYMAPENTRY']._serialized_start=503 + _globals['_FEATUREVIEWPROJECTION_JOINKEYMAPENTRY']._serialized_end=552 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2.pyi b/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2.pyi new file mode 100644 index 0000000000..6b44ad4a93 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2.pyi @@ -0,0 +1,87 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" +import builtins +import collections.abc +import feast.core.DataSource_pb2 +import feast.core.Feature_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class FeatureViewProjection(google.protobuf.message.Message): + """A projection to be applied on top of a FeatureView. + Contains the modifications to a FeatureView such as the features subset to use. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class JoinKeyMapEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + FEATURE_VIEW_NAME_FIELD_NUMBER: builtins.int + FEATURE_VIEW_NAME_ALIAS_FIELD_NUMBER: builtins.int + FEATURE_COLUMNS_FIELD_NUMBER: builtins.int + JOIN_KEY_MAP_FIELD_NUMBER: builtins.int + TIMESTAMP_FIELD_FIELD_NUMBER: builtins.int + DATE_PARTITION_COLUMN_FIELD_NUMBER: builtins.int + CREATED_TIMESTAMP_COLUMN_FIELD_NUMBER: builtins.int + BATCH_SOURCE_FIELD_NUMBER: builtins.int + STREAM_SOURCE_FIELD_NUMBER: builtins.int + feature_view_name: builtins.str + """The feature view name""" + feature_view_name_alias: builtins.str + """Alias for feature view name""" + @property + def feature_columns(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Feature_pb2.FeatureSpecV2]: + """The features of the feature view that are a part of the feature reference.""" + @property + def join_key_map(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """Map for entity join_key overrides of feature data entity join_key to entity data join_key""" + timestamp_field: builtins.str + date_partition_column: builtins.str + created_timestamp_column: builtins.str + @property + def batch_source(self) -> feast.core.DataSource_pb2.DataSource: + """Batch/Offline DataSource where this view can retrieve offline feature data.""" + @property + def stream_source(self) -> feast.core.DataSource_pb2.DataSource: + """Streaming DataSource from where this view can consume "online" feature data.""" + def __init__( + self, + *, + feature_view_name: builtins.str = ..., + feature_view_name_alias: builtins.str = ..., + feature_columns: collections.abc.Iterable[feast.core.Feature_pb2.FeatureSpecV2] | None = ..., + join_key_map: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + timestamp_field: builtins.str = ..., + date_partition_column: builtins.str = ..., + created_timestamp_column: builtins.str = ..., + batch_source: feast.core.DataSource_pb2.DataSource | None = ..., + stream_source: feast.core.DataSource_pb2.DataSource | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["batch_source", b"batch_source", "stream_source", b"stream_source"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["batch_source", b"batch_source", "created_timestamp_column", b"created_timestamp_column", "date_partition_column", b"date_partition_column", "feature_columns", b"feature_columns", "feature_view_name", b"feature_view_name", "feature_view_name_alias", b"feature_view_name_alias", "join_key_map", b"join_key_map", "stream_source", b"stream_source", "timestamp_field", b"timestamp_field"]) -> None: ... + +global___FeatureViewProjection = FeatureViewProjection diff --git a/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2_grpc.py b/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/FeatureView_pb2.py b/sdk/python/feast/protos/feast/core/FeatureView_pb2.py new file mode 100644 index 0000000000..f1480593d9 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/FeatureView_pb2.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/FeatureView.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from feast.protos.feast.core import DataSource_pb2 as feast_dot_core_dot_DataSource__pb2 +from feast.protos.feast.core import Feature_pb2 as feast_dot_core_dot_Feature__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1c\x66\x65\x61st/core/FeatureView.proto\x12\nfeast.core\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1b\x66\x65\x61st/core/DataSource.proto\x1a\x18\x66\x65\x61st/core/Feature.proto\"c\n\x0b\x46\x65\x61tureView\x12)\n\x04spec\x18\x01 \x01(\x0b\x32\x1b.feast.core.FeatureViewSpec\x12)\n\x04meta\x18\x02 \x01(\x0b\x32\x1b.feast.core.FeatureViewMeta\"\xbd\x03\n\x0f\x46\x65\x61tureViewSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x10\n\x08\x65ntities\x18\x03 \x03(\t\x12+\n\x08\x66\x65\x61tures\x18\x04 \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x12\x31\n\x0e\x65ntity_columns\x18\x0c \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x12\x13\n\x0b\x64\x65scription\x18\n \x01(\t\x12\x33\n\x04tags\x18\x05 \x03(\x0b\x32%.feast.core.FeatureViewSpec.TagsEntry\x12\r\n\x05owner\x18\x0b \x01(\t\x12&\n\x03ttl\x18\x06 \x01(\x0b\x32\x19.google.protobuf.Duration\x12,\n\x0c\x62\x61tch_source\x18\x07 \x01(\x0b\x32\x16.feast.core.DataSource\x12-\n\rstream_source\x18\t \x01(\x0b\x32\x16.feast.core.DataSource\x12\x0e\n\x06online\x18\x08 \x01(\x08\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xcc\x01\n\x0f\x46\x65\x61tureViewMeta\x12\x35\n\x11\x63reated_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\x16last_updated_timestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x46\n\x19materialization_intervals\x18\x03 \x03(\x0b\x32#.feast.core.MaterializationInterval\"w\n\x17MaterializationInterval\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampBU\n\x10\x66\x65\x61st.proto.coreB\x10\x46\x65\x61tureViewProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.FeatureView_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\020FeatureViewProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_FEATUREVIEWSPEC_TAGSENTRY']._options = None + _globals['_FEATUREVIEWSPEC_TAGSENTRY']._serialized_options = b'8\001' + _globals['_FEATUREVIEW']._serialized_start=164 + _globals['_FEATUREVIEW']._serialized_end=263 + _globals['_FEATUREVIEWSPEC']._serialized_start=266 + _globals['_FEATUREVIEWSPEC']._serialized_end=711 + _globals['_FEATUREVIEWSPEC_TAGSENTRY']._serialized_start=668 + _globals['_FEATUREVIEWSPEC_TAGSENTRY']._serialized_end=711 + _globals['_FEATUREVIEWMETA']._serialized_start=714 + _globals['_FEATUREVIEWMETA']._serialized_end=918 + _globals['_MATERIALIZATIONINTERVAL']._serialized_start=920 + _globals['_MATERIALIZATIONINTERVAL']._serialized_end=1039 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/FeatureView_pb2.pyi b/sdk/python/feast/protos/feast/core/FeatureView_pb2.pyi new file mode 100644 index 0000000000..e1d4e2dfee --- /dev/null +++ b/sdk/python/feast/protos/feast/core/FeatureView_pb2.pyi @@ -0,0 +1,194 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2020 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import collections.abc +import feast.core.DataSource_pb2 +import feast.core.Feature_pb2 +import google.protobuf.descriptor +import google.protobuf.duration_pb2 +import google.protobuf.internal.containers +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class FeatureView(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SPEC_FIELD_NUMBER: builtins.int + META_FIELD_NUMBER: builtins.int + @property + def spec(self) -> global___FeatureViewSpec: + """User-specified specifications of this feature view.""" + @property + def meta(self) -> global___FeatureViewMeta: + """System-populated metadata for this feature view.""" + def __init__( + self, + *, + spec: global___FeatureViewSpec | None = ..., + meta: global___FeatureViewMeta | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> None: ... + +global___FeatureView = FeatureView + +class FeatureViewSpec(google.protobuf.message.Message): + """Next available id: 13 + TODO(adchia): refactor common fields from this and ODFV into separate metadata proto + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + ENTITIES_FIELD_NUMBER: builtins.int + FEATURES_FIELD_NUMBER: builtins.int + ENTITY_COLUMNS_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + OWNER_FIELD_NUMBER: builtins.int + TTL_FIELD_NUMBER: builtins.int + BATCH_SOURCE_FIELD_NUMBER: builtins.int + STREAM_SOURCE_FIELD_NUMBER: builtins.int + ONLINE_FIELD_NUMBER: builtins.int + name: builtins.str + """Name of the feature view. Must be unique. Not updated.""" + project: builtins.str + """Name of Feast project that this feature view belongs to.""" + @property + def entities(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """List of names of entities associated with this feature view.""" + @property + def features(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Feature_pb2.FeatureSpecV2]: + """List of specifications for each feature defined as part of this feature view.""" + @property + def entity_columns(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Feature_pb2.FeatureSpecV2]: + """List of specifications for each entity defined as part of this feature view.""" + description: builtins.str + """Description of the feature view.""" + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """User defined metadata""" + owner: builtins.str + """Owner of the feature view.""" + @property + def ttl(self) -> google.protobuf.duration_pb2.Duration: + """Features in this feature view can only be retrieved from online serving + younger than ttl. Ttl is measured as the duration of time between + the feature's event timestamp and when the feature is retrieved + Feature values outside ttl will be returned as unset values and indicated to end user + """ + @property + def batch_source(self) -> feast.core.DataSource_pb2.DataSource: + """Batch/Offline DataSource where this view can retrieve offline feature data.""" + @property + def stream_source(self) -> feast.core.DataSource_pb2.DataSource: + """Streaming DataSource from where this view can consume "online" feature data.""" + online: builtins.bool + """Whether these features should be served online or not""" + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + entities: collections.abc.Iterable[builtins.str] | None = ..., + features: collections.abc.Iterable[feast.core.Feature_pb2.FeatureSpecV2] | None = ..., + entity_columns: collections.abc.Iterable[feast.core.Feature_pb2.FeatureSpecV2] | None = ..., + description: builtins.str = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + owner: builtins.str = ..., + ttl: google.protobuf.duration_pb2.Duration | None = ..., + batch_source: feast.core.DataSource_pb2.DataSource | None = ..., + stream_source: feast.core.DataSource_pb2.DataSource | None = ..., + online: builtins.bool = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["batch_source", b"batch_source", "stream_source", b"stream_source", "ttl", b"ttl"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["batch_source", b"batch_source", "description", b"description", "entities", b"entities", "entity_columns", b"entity_columns", "features", b"features", "name", b"name", "online", b"online", "owner", b"owner", "project", b"project", "stream_source", b"stream_source", "tags", b"tags", "ttl", b"ttl"]) -> None: ... + +global___FeatureViewSpec = FeatureViewSpec + +class FeatureViewMeta(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CREATED_TIMESTAMP_FIELD_NUMBER: builtins.int + LAST_UPDATED_TIMESTAMP_FIELD_NUMBER: builtins.int + MATERIALIZATION_INTERVALS_FIELD_NUMBER: builtins.int + @property + def created_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Time where this Feature View is created""" + @property + def last_updated_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Time where this Feature View is last updated""" + @property + def materialization_intervals(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___MaterializationInterval]: + """List of pairs (start_time, end_time) for which this feature view has been materialized.""" + def __init__( + self, + *, + created_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + last_updated_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + materialization_intervals: collections.abc.Iterable[global___MaterializationInterval] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp", "materialization_intervals", b"materialization_intervals"]) -> None: ... + +global___FeatureViewMeta = FeatureViewMeta + +class MaterializationInterval(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + START_TIME_FIELD_NUMBER: builtins.int + END_TIME_FIELD_NUMBER: builtins.int + @property + def start_time(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def end_time(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + def __init__( + self, + *, + start_time: google.protobuf.timestamp_pb2.Timestamp | None = ..., + end_time: google.protobuf.timestamp_pb2.Timestamp | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["end_time", b"end_time", "start_time", b"start_time"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["end_time", b"end_time", "start_time", b"start_time"]) -> None: ... + +global___MaterializationInterval = MaterializationInterval diff --git a/sdk/python/feast/protos/feast/core/FeatureView_pb2_grpc.py b/sdk/python/feast/protos/feast/core/FeatureView_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/FeatureView_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/Feature_pb2.py b/sdk/python/feast/protos/feast/core/Feature_pb2.py new file mode 100644 index 0000000000..dd7c6008ef --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Feature_pb2.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/Feature.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from feast.protos.feast.types import Value_pb2 as feast_dot_types_dot_Value__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x18\x66\x65\x61st/core/Feature.proto\x12\nfeast.core\x1a\x17\x66\x65\x61st/types/Value.proto\"\xc3\x01\n\rFeatureSpecV2\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\nvalue_type\x18\x02 \x01(\x0e\x32\x1b.feast.types.ValueType.Enum\x12\x31\n\x04tags\x18\x03 \x03(\x0b\x32#.feast.core.FeatureSpecV2.TagsEntry\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42Q\n\x10\x66\x65\x61st.proto.coreB\x0c\x46\x65\x61tureProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.Feature_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\014FeatureProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_FEATURESPECV2_TAGSENTRY']._options = None + _globals['_FEATURESPECV2_TAGSENTRY']._serialized_options = b'8\001' + _globals['_FEATURESPECV2']._serialized_start=66 + _globals['_FEATURESPECV2']._serialized_end=261 + _globals['_FEATURESPECV2_TAGSENTRY']._serialized_start=218 + _globals['_FEATURESPECV2_TAGSENTRY']._serialized_end=261 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/Feature_pb2.pyi b/sdk/python/feast/protos/feast/core/Feature_pb2.pyi new file mode 100644 index 0000000000..f4235b0965 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Feature_pb2.pyi @@ -0,0 +1,75 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2020 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import collections.abc +import feast.types.Value_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class FeatureSpecV2(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + VALUE_TYPE_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + name: builtins.str + """Name of the feature. Not updatable.""" + value_type: feast.types.Value_pb2.ValueType.Enum.ValueType + """Value type of the feature. Not updatable.""" + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """Tags for user defined metadata on a feature""" + description: builtins.str + """Description of the feature.""" + def __init__( + self, + *, + name: builtins.str = ..., + value_type: feast.types.Value_pb2.ValueType.Enum.ValueType = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + description: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["description", b"description", "name", b"name", "tags", b"tags", "value_type", b"value_type"]) -> None: ... + +global___FeatureSpecV2 = FeatureSpecV2 diff --git a/sdk/python/feast/protos/feast/core/Feature_pb2_grpc.py b/sdk/python/feast/protos/feast/core/Feature_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Feature_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/InfraObject_pb2.py b/sdk/python/feast/protos/feast/core/InfraObject_pb2.py new file mode 100644 index 0000000000..0804aecbf6 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/InfraObject_pb2.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/InfraObject.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from feast.protos.feast.core import DatastoreTable_pb2 as feast_dot_core_dot_DatastoreTable__pb2 +from feast.protos.feast.core import DynamoDBTable_pb2 as feast_dot_core_dot_DynamoDBTable__pb2 +from feast.protos.feast.core import SqliteTable_pb2 as feast_dot_core_dot_SqliteTable__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1c\x66\x65\x61st/core/InfraObject.proto\x12\nfeast.core\x1a\x1f\x66\x65\x61st/core/DatastoreTable.proto\x1a\x1e\x66\x65\x61st/core/DynamoDBTable.proto\x1a\x1c\x66\x65\x61st/core/SqliteTable.proto\"7\n\x05Infra\x12.\n\rinfra_objects\x18\x01 \x03(\x0b\x32\x17.feast.core.InfraObject\"\xb6\x02\n\x0bInfraObject\x12\x1f\n\x17infra_object_class_type\x18\x01 \x01(\t\x12\x33\n\x0e\x64ynamodb_table\x18\x02 \x01(\x0b\x32\x19.feast.core.DynamoDBTableH\x00\x12\x35\n\x0f\x64\x61tastore_table\x18\x03 \x01(\x0b\x32\x1a.feast.core.DatastoreTableH\x00\x12/\n\x0csqlite_table\x18\x04 \x01(\x0b\x32\x17.feast.core.SqliteTableH\x00\x12;\n\x0c\x63ustom_infra\x18\x64 \x01(\x0b\x32#.feast.core.InfraObject.CustomInfraH\x00\x1a\x1c\n\x0b\x43ustomInfra\x12\r\n\x05\x66ield\x18\x01 \x01(\x0c\x42\x0e\n\x0cinfra_objectBU\n\x10\x66\x65\x61st.proto.coreB\x10InfraObjectProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.InfraObject_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\020InfraObjectProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_INFRA']._serialized_start=139 + _globals['_INFRA']._serialized_end=194 + _globals['_INFRAOBJECT']._serialized_start=197 + _globals['_INFRAOBJECT']._serialized_end=507 + _globals['_INFRAOBJECT_CUSTOMINFRA']._serialized_start=463 + _globals['_INFRAOBJECT_CUSTOMINFRA']._serialized_end=491 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/InfraObject_pb2.pyi b/sdk/python/feast/protos/feast/core/InfraObject_pb2.pyi new file mode 100644 index 0000000000..38b31b7317 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/InfraObject_pb2.pyi @@ -0,0 +1,101 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +* Copyright 2021 The Feast Authors +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* https://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +""" +import builtins +import collections.abc +import feast.core.DatastoreTable_pb2 +import feast.core.DynamoDBTable_pb2 +import feast.core.SqliteTable_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class Infra(google.protobuf.message.Message): + """Represents a set of infrastructure objects managed by Feast""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + INFRA_OBJECTS_FIELD_NUMBER: builtins.int + @property + def infra_objects(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___InfraObject]: + """List of infrastructure objects managed by Feast""" + def __init__( + self, + *, + infra_objects: collections.abc.Iterable[global___InfraObject] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["infra_objects", b"infra_objects"]) -> None: ... + +global___Infra = Infra + +class InfraObject(google.protobuf.message.Message): + """Represents a single infrastructure object managed by Feast""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class CustomInfra(google.protobuf.message.Message): + """Allows for custom infra objects to be added""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FIELD_FIELD_NUMBER: builtins.int + field: builtins.bytes + def __init__( + self, + *, + field: builtins.bytes = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["field", b"field"]) -> None: ... + + INFRA_OBJECT_CLASS_TYPE_FIELD_NUMBER: builtins.int + DYNAMODB_TABLE_FIELD_NUMBER: builtins.int + DATASTORE_TABLE_FIELD_NUMBER: builtins.int + SQLITE_TABLE_FIELD_NUMBER: builtins.int + CUSTOM_INFRA_FIELD_NUMBER: builtins.int + infra_object_class_type: builtins.str + """Represents the Python class for the infrastructure object""" + @property + def dynamodb_table(self) -> feast.core.DynamoDBTable_pb2.DynamoDBTable: ... + @property + def datastore_table(self) -> feast.core.DatastoreTable_pb2.DatastoreTable: ... + @property + def sqlite_table(self) -> feast.core.SqliteTable_pb2.SqliteTable: ... + @property + def custom_infra(self) -> global___InfraObject.CustomInfra: ... + def __init__( + self, + *, + infra_object_class_type: builtins.str = ..., + dynamodb_table: feast.core.DynamoDBTable_pb2.DynamoDBTable | None = ..., + datastore_table: feast.core.DatastoreTable_pb2.DatastoreTable | None = ..., + sqlite_table: feast.core.SqliteTable_pb2.SqliteTable | None = ..., + custom_infra: global___InfraObject.CustomInfra | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["custom_infra", b"custom_infra", "datastore_table", b"datastore_table", "dynamodb_table", b"dynamodb_table", "infra_object", b"infra_object", "sqlite_table", b"sqlite_table"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["custom_infra", b"custom_infra", "datastore_table", b"datastore_table", "dynamodb_table", b"dynamodb_table", "infra_object", b"infra_object", "infra_object_class_type", b"infra_object_class_type", "sqlite_table", b"sqlite_table"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["infra_object", b"infra_object"]) -> typing_extensions.Literal["dynamodb_table", "datastore_table", "sqlite_table", "custom_infra"] | None: ... + +global___InfraObject = InfraObject diff --git a/sdk/python/feast/protos/feast/core/InfraObject_pb2_grpc.py b/sdk/python/feast/protos/feast/core/InfraObject_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/InfraObject_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/OnDemandFeatureView_pb2.py b/sdk/python/feast/protos/feast/core/OnDemandFeatureView_pb2.py new file mode 100644 index 0000000000..a27c4fba3b --- /dev/null +++ b/sdk/python/feast/protos/feast/core/OnDemandFeatureView_pb2.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/OnDemandFeatureView.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from feast.protos.feast.core import FeatureView_pb2 as feast_dot_core_dot_FeatureView__pb2 +from feast.protos.feast.core import FeatureViewProjection_pb2 as feast_dot_core_dot_FeatureViewProjection__pb2 +from feast.protos.feast.core import Feature_pb2 as feast_dot_core_dot_Feature__pb2 +from feast.protos.feast.core import DataSource_pb2 as feast_dot_core_dot_DataSource__pb2 +from feast.protos.feast.core import Transformation_pb2 as feast_dot_core_dot_Transformation__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$feast/core/OnDemandFeatureView.proto\x12\nfeast.core\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1c\x66\x65\x61st/core/FeatureView.proto\x1a&feast/core/FeatureViewProjection.proto\x1a\x18\x66\x65\x61st/core/Feature.proto\x1a\x1b\x66\x65\x61st/core/DataSource.proto\x1a\x1f\x66\x65\x61st/core/Transformation.proto\"{\n\x13OnDemandFeatureView\x12\x31\n\x04spec\x18\x01 \x01(\x0b\x32#.feast.core.OnDemandFeatureViewSpec\x12\x31\n\x04meta\x18\x02 \x01(\x0b\x32#.feast.core.OnDemandFeatureViewMeta\"\xfd\x04\n\x17OnDemandFeatureViewSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12+\n\x08\x66\x65\x61tures\x18\x03 \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x12\x41\n\x07sources\x18\x04 \x03(\x0b\x32\x30.feast.core.OnDemandFeatureViewSpec.SourcesEntry\x12\x42\n\x15user_defined_function\x18\x05 \x01(\x0b\x32\x1f.feast.core.UserDefinedFunctionB\x02\x18\x01\x12\x43\n\x16\x66\x65\x61ture_transformation\x18\n \x01(\x0b\x32#.feast.core.FeatureTransformationV2\x12\x13\n\x0b\x64\x65scription\x18\x06 \x01(\t\x12;\n\x04tags\x18\x07 \x03(\x0b\x32-.feast.core.OnDemandFeatureViewSpec.TagsEntry\x12\r\n\x05owner\x18\x08 \x01(\t\x12\x0c\n\x04mode\x18\x0b \x01(\t\x12\x1d\n\x15write_to_online_store\x18\x0c \x01(\x08\x12\x10\n\x08\x65ntities\x18\r \x03(\t\x12\x31\n\x0e\x65ntity_columns\x18\x0e \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x1aJ\n\x0cSourcesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12)\n\x05value\x18\x02 \x01(\x0b\x32\x1a.feast.core.OnDemandSource:\x02\x38\x01\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x8c\x01\n\x17OnDemandFeatureViewMeta\x12\x35\n\x11\x63reated_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\x16last_updated_timestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\xc8\x01\n\x0eOnDemandSource\x12/\n\x0c\x66\x65\x61ture_view\x18\x01 \x01(\x0b\x32\x17.feast.core.FeatureViewH\x00\x12\x44\n\x17\x66\x65\x61ture_view_projection\x18\x03 \x01(\x0b\x32!.feast.core.FeatureViewProjectionH\x00\x12\x35\n\x13request_data_source\x18\x02 \x01(\x0b\x32\x16.feast.core.DataSourceH\x00\x42\x08\n\x06source\"H\n\x13UserDefinedFunction\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x62ody\x18\x02 \x01(\x0c\x12\x11\n\tbody_text\x18\x03 \x01(\t:\x02\x18\x01\x42]\n\x10\x66\x65\x61st.proto.coreB\x18OnDemandFeatureViewProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.OnDemandFeatureView_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\030OnDemandFeatureViewProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_ONDEMANDFEATUREVIEWSPEC_SOURCESENTRY']._options = None + _globals['_ONDEMANDFEATUREVIEWSPEC_SOURCESENTRY']._serialized_options = b'8\001' + _globals['_ONDEMANDFEATUREVIEWSPEC_TAGSENTRY']._options = None + _globals['_ONDEMANDFEATUREVIEWSPEC_TAGSENTRY']._serialized_options = b'8\001' + _globals['_ONDEMANDFEATUREVIEWSPEC'].fields_by_name['user_defined_function']._options = None + _globals['_ONDEMANDFEATUREVIEWSPEC'].fields_by_name['user_defined_function']._serialized_options = b'\030\001' + _globals['_USERDEFINEDFUNCTION']._options = None + _globals['_USERDEFINEDFUNCTION']._serialized_options = b'\030\001' + _globals['_ONDEMANDFEATUREVIEW']._serialized_start=243 + _globals['_ONDEMANDFEATUREVIEW']._serialized_end=366 + _globals['_ONDEMANDFEATUREVIEWSPEC']._serialized_start=369 + _globals['_ONDEMANDFEATUREVIEWSPEC']._serialized_end=1006 + _globals['_ONDEMANDFEATUREVIEWSPEC_SOURCESENTRY']._serialized_start=887 + _globals['_ONDEMANDFEATUREVIEWSPEC_SOURCESENTRY']._serialized_end=961 + _globals['_ONDEMANDFEATUREVIEWSPEC_TAGSENTRY']._serialized_start=963 + _globals['_ONDEMANDFEATUREVIEWSPEC_TAGSENTRY']._serialized_end=1006 + _globals['_ONDEMANDFEATUREVIEWMETA']._serialized_start=1009 + _globals['_ONDEMANDFEATUREVIEWMETA']._serialized_end=1149 + _globals['_ONDEMANDSOURCE']._serialized_start=1152 + _globals['_ONDEMANDSOURCE']._serialized_end=1352 + _globals['_USERDEFINEDFUNCTION']._serialized_start=1354 + _globals['_USERDEFINEDFUNCTION']._serialized_end=1426 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/OnDemandFeatureView_pb2.pyi b/sdk/python/feast/protos/feast/core/OnDemandFeatureView_pb2.pyi new file mode 100644 index 0000000000..b2ec15b186 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/OnDemandFeatureView_pb2.pyi @@ -0,0 +1,232 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2020 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import collections.abc +import feast.core.DataSource_pb2 +import feast.core.FeatureViewProjection_pb2 +import feast.core.FeatureView_pb2 +import feast.core.Feature_pb2 +import feast.core.Transformation_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class OnDemandFeatureView(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SPEC_FIELD_NUMBER: builtins.int + META_FIELD_NUMBER: builtins.int + @property + def spec(self) -> global___OnDemandFeatureViewSpec: + """User-specified specifications of this feature view.""" + @property + def meta(self) -> global___OnDemandFeatureViewMeta: ... + def __init__( + self, + *, + spec: global___OnDemandFeatureViewSpec | None = ..., + meta: global___OnDemandFeatureViewMeta | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> None: ... + +global___OnDemandFeatureView = OnDemandFeatureView + +class OnDemandFeatureViewSpec(google.protobuf.message.Message): + """Next available id: 9""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class SourcesEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + @property + def value(self) -> global___OnDemandSource: ... + def __init__( + self, + *, + key: builtins.str = ..., + value: global___OnDemandSource | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + FEATURES_FIELD_NUMBER: builtins.int + SOURCES_FIELD_NUMBER: builtins.int + USER_DEFINED_FUNCTION_FIELD_NUMBER: builtins.int + FEATURE_TRANSFORMATION_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + OWNER_FIELD_NUMBER: builtins.int + MODE_FIELD_NUMBER: builtins.int + WRITE_TO_ONLINE_STORE_FIELD_NUMBER: builtins.int + ENTITIES_FIELD_NUMBER: builtins.int + ENTITY_COLUMNS_FIELD_NUMBER: builtins.int + name: builtins.str + """Name of the feature view. Must be unique. Not updated.""" + project: builtins.str + """Name of Feast project that this feature view belongs to.""" + @property + def features(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Feature_pb2.FeatureSpecV2]: + """List of features specifications for each feature defined with this feature view.""" + @property + def sources(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___OnDemandSource]: + """Map of sources for this feature view.""" + @property + def user_defined_function(self) -> global___UserDefinedFunction: ... + @property + def feature_transformation(self) -> feast.core.Transformation_pb2.FeatureTransformationV2: + """Oneof with {user_defined_function, on_demand_substrait_transformation}""" + description: builtins.str + """Description of the on demand feature view.""" + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """User defined metadata.""" + owner: builtins.str + """Owner of the on demand feature view.""" + mode: builtins.str + write_to_online_store: builtins.bool + @property + def entities(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """List of names of entities associated with this feature view.""" + @property + def entity_columns(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Feature_pb2.FeatureSpecV2]: + """List of specifications for each entity defined as part of this feature view.""" + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + features: collections.abc.Iterable[feast.core.Feature_pb2.FeatureSpecV2] | None = ..., + sources: collections.abc.Mapping[builtins.str, global___OnDemandSource] | None = ..., + user_defined_function: global___UserDefinedFunction | None = ..., + feature_transformation: feast.core.Transformation_pb2.FeatureTransformationV2 | None = ..., + description: builtins.str = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + owner: builtins.str = ..., + mode: builtins.str = ..., + write_to_online_store: builtins.bool = ..., + entities: collections.abc.Iterable[builtins.str] | None = ..., + entity_columns: collections.abc.Iterable[feast.core.Feature_pb2.FeatureSpecV2] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["feature_transformation", b"feature_transformation", "user_defined_function", b"user_defined_function"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["description", b"description", "entities", b"entities", "entity_columns", b"entity_columns", "feature_transformation", b"feature_transformation", "features", b"features", "mode", b"mode", "name", b"name", "owner", b"owner", "project", b"project", "sources", b"sources", "tags", b"tags", "user_defined_function", b"user_defined_function", "write_to_online_store", b"write_to_online_store"]) -> None: ... + +global___OnDemandFeatureViewSpec = OnDemandFeatureViewSpec + +class OnDemandFeatureViewMeta(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CREATED_TIMESTAMP_FIELD_NUMBER: builtins.int + LAST_UPDATED_TIMESTAMP_FIELD_NUMBER: builtins.int + @property + def created_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Time where this Feature View is created""" + @property + def last_updated_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Time where this Feature View is last updated""" + def __init__( + self, + *, + created_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + last_updated_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp"]) -> None: ... + +global___OnDemandFeatureViewMeta = OnDemandFeatureViewMeta + +class OnDemandSource(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FEATURE_VIEW_FIELD_NUMBER: builtins.int + FEATURE_VIEW_PROJECTION_FIELD_NUMBER: builtins.int + REQUEST_DATA_SOURCE_FIELD_NUMBER: builtins.int + @property + def feature_view(self) -> feast.core.FeatureView_pb2.FeatureView: ... + @property + def feature_view_projection(self) -> feast.core.FeatureViewProjection_pb2.FeatureViewProjection: ... + @property + def request_data_source(self) -> feast.core.DataSource_pb2.DataSource: ... + def __init__( + self, + *, + feature_view: feast.core.FeatureView_pb2.FeatureView | None = ..., + feature_view_projection: feast.core.FeatureViewProjection_pb2.FeatureViewProjection | None = ..., + request_data_source: feast.core.DataSource_pb2.DataSource | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["feature_view", b"feature_view", "feature_view_projection", b"feature_view_projection", "request_data_source", b"request_data_source", "source", b"source"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["feature_view", b"feature_view", "feature_view_projection", b"feature_view_projection", "request_data_source", b"request_data_source", "source", b"source"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["source", b"source"]) -> typing_extensions.Literal["feature_view", "feature_view_projection", "request_data_source"] | None: ... + +global___OnDemandSource = OnDemandSource + +class UserDefinedFunction(google.protobuf.message.Message): + """Serialized representation of python function.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + BODY_FIELD_NUMBER: builtins.int + BODY_TEXT_FIELD_NUMBER: builtins.int + name: builtins.str + """The function name""" + body: builtins.bytes + """The python-syntax function body (serialized by dill)""" + body_text: builtins.str + """The string representation of the udf""" + def __init__( + self, + *, + name: builtins.str = ..., + body: builtins.bytes = ..., + body_text: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["body", b"body", "body_text", b"body_text", "name", b"name"]) -> None: ... + +global___UserDefinedFunction = UserDefinedFunction diff --git a/sdk/python/feast/protos/feast/core/OnDemandFeatureView_pb2_grpc.py b/sdk/python/feast/protos/feast/core/OnDemandFeatureView_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/OnDemandFeatureView_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/Permission_pb2.py b/sdk/python/feast/protos/feast/core/Permission_pb2.py new file mode 100644 index 0000000000..822ad0261b --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Permission_pb2.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/Permission.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from feast.protos.feast.core import Policy_pb2 as feast_dot_core_dot_Policy__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1b\x66\x65\x61st/core/Permission.proto\x12\nfeast.core\x1a\x17\x66\x65\x61st/core/Policy.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"`\n\nPermission\x12(\n\x04spec\x18\x01 \x01(\x0b\x32\x1a.feast.core.PermissionSpec\x12(\n\x04meta\x18\x02 \x01(\x0b\x32\x1a.feast.core.PermissionMeta\"\x9f\x06\n\x0ePermissionSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12.\n\x05types\x18\x03 \x03(\x0e\x32\x1f.feast.core.PermissionSpec.Type\x12\x14\n\x0cname_pattern\x18\x04 \x01(\t\x12\x43\n\rrequired_tags\x18\x05 \x03(\x0b\x32,.feast.core.PermissionSpec.RequiredTagsEntry\x12\x39\n\x07\x61\x63tions\x18\x06 \x03(\x0e\x32(.feast.core.PermissionSpec.AuthzedAction\x12\"\n\x06policy\x18\x07 \x01(\x0b\x32\x12.feast.core.Policy\x12\x32\n\x04tags\x18\x08 \x03(\x0b\x32$.feast.core.PermissionSpec.TagsEntry\x1a\x33\n\x11RequiredTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x89\x01\n\rAuthzedAction\x12\n\n\x06\x43REATE\x10\x00\x12\x0c\n\x08\x44\x45SCRIBE\x10\x01\x12\n\n\x06UPDATE\x10\x02\x12\n\n\x06\x44\x45LETE\x10\x03\x12\x0f\n\x0bREAD_ONLINE\x10\x04\x12\x10\n\x0cREAD_OFFLINE\x10\x05\x12\x10\n\x0cWRITE_ONLINE\x10\x06\x12\x11\n\rWRITE_OFFLINE\x10\x07\"\xe1\x01\n\x04Type\x12\x10\n\x0c\x46\x45\x41TURE_VIEW\x10\x00\x12\x1a\n\x16ON_DEMAND_FEATURE_VIEW\x10\x01\x12\x16\n\x12\x42\x41TCH_FEATURE_VIEW\x10\x02\x12\x17\n\x13STREAM_FEATURE_VIEW\x10\x03\x12\n\n\x06\x45NTITY\x10\x04\x12\x13\n\x0f\x46\x45\x41TURE_SERVICE\x10\x05\x12\x0f\n\x0b\x44\x41TA_SOURCE\x10\x06\x12\x18\n\x14VALIDATION_REFERENCE\x10\x07\x12\x11\n\rSAVED_DATASET\x10\x08\x12\x0e\n\nPERMISSION\x10\t\x12\x0b\n\x07PROJECT\x10\n\"\x83\x01\n\x0ePermissionMeta\x12\x35\n\x11\x63reated_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\x16last_updated_timestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampBT\n\x10\x66\x65\x61st.proto.coreB\x0fPermissionProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.Permission_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\017PermissionProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_PERMISSIONSPEC_REQUIREDTAGSENTRY']._options = None + _globals['_PERMISSIONSPEC_REQUIREDTAGSENTRY']._serialized_options = b'8\001' + _globals['_PERMISSIONSPEC_TAGSENTRY']._options = None + _globals['_PERMISSIONSPEC_TAGSENTRY']._serialized_options = b'8\001' + _globals['_PERMISSION']._serialized_start=101 + _globals['_PERMISSION']._serialized_end=197 + _globals['_PERMISSIONSPEC']._serialized_start=200 + _globals['_PERMISSIONSPEC']._serialized_end=999 + _globals['_PERMISSIONSPEC_REQUIREDTAGSENTRY']._serialized_start=535 + _globals['_PERMISSIONSPEC_REQUIREDTAGSENTRY']._serialized_end=586 + _globals['_PERMISSIONSPEC_TAGSENTRY']._serialized_start=588 + _globals['_PERMISSIONSPEC_TAGSENTRY']._serialized_end=631 + _globals['_PERMISSIONSPEC_AUTHZEDACTION']._serialized_start=634 + _globals['_PERMISSIONSPEC_AUTHZEDACTION']._serialized_end=771 + _globals['_PERMISSIONSPEC_TYPE']._serialized_start=774 + _globals['_PERMISSIONSPEC_TYPE']._serialized_end=999 + _globals['_PERMISSIONMETA']._serialized_start=1002 + _globals['_PERMISSIONMETA']._serialized_end=1133 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/Permission_pb2.pyi b/sdk/python/feast/protos/feast/core/Permission_pb2.pyi new file mode 100644 index 0000000000..1155c13188 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Permission_pb2.pyi @@ -0,0 +1,195 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" +import builtins +import collections.abc +import feast.core.Policy_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class Permission(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SPEC_FIELD_NUMBER: builtins.int + META_FIELD_NUMBER: builtins.int + @property + def spec(self) -> global___PermissionSpec: + """User-specified specifications of this permission.""" + @property + def meta(self) -> global___PermissionMeta: + """System-populated metadata for this permission.""" + def __init__( + self, + *, + spec: global___PermissionSpec | None = ..., + meta: global___PermissionMeta | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> None: ... + +global___Permission = Permission + +class PermissionSpec(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _AuthzedAction: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _AuthzedActionEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[PermissionSpec._AuthzedAction.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + CREATE: PermissionSpec._AuthzedAction.ValueType # 0 + DESCRIBE: PermissionSpec._AuthzedAction.ValueType # 1 + UPDATE: PermissionSpec._AuthzedAction.ValueType # 2 + DELETE: PermissionSpec._AuthzedAction.ValueType # 3 + READ_ONLINE: PermissionSpec._AuthzedAction.ValueType # 4 + READ_OFFLINE: PermissionSpec._AuthzedAction.ValueType # 5 + WRITE_ONLINE: PermissionSpec._AuthzedAction.ValueType # 6 + WRITE_OFFLINE: PermissionSpec._AuthzedAction.ValueType # 7 + + class AuthzedAction(_AuthzedAction, metaclass=_AuthzedActionEnumTypeWrapper): ... + CREATE: PermissionSpec.AuthzedAction.ValueType # 0 + DESCRIBE: PermissionSpec.AuthzedAction.ValueType # 1 + UPDATE: PermissionSpec.AuthzedAction.ValueType # 2 + DELETE: PermissionSpec.AuthzedAction.ValueType # 3 + READ_ONLINE: PermissionSpec.AuthzedAction.ValueType # 4 + READ_OFFLINE: PermissionSpec.AuthzedAction.ValueType # 5 + WRITE_ONLINE: PermissionSpec.AuthzedAction.ValueType # 6 + WRITE_OFFLINE: PermissionSpec.AuthzedAction.ValueType # 7 + + class _Type: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _TypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[PermissionSpec._Type.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + FEATURE_VIEW: PermissionSpec._Type.ValueType # 0 + ON_DEMAND_FEATURE_VIEW: PermissionSpec._Type.ValueType # 1 + BATCH_FEATURE_VIEW: PermissionSpec._Type.ValueType # 2 + STREAM_FEATURE_VIEW: PermissionSpec._Type.ValueType # 3 + ENTITY: PermissionSpec._Type.ValueType # 4 + FEATURE_SERVICE: PermissionSpec._Type.ValueType # 5 + DATA_SOURCE: PermissionSpec._Type.ValueType # 6 + VALIDATION_REFERENCE: PermissionSpec._Type.ValueType # 7 + SAVED_DATASET: PermissionSpec._Type.ValueType # 8 + PERMISSION: PermissionSpec._Type.ValueType # 9 + PROJECT: PermissionSpec._Type.ValueType # 10 + + class Type(_Type, metaclass=_TypeEnumTypeWrapper): ... + FEATURE_VIEW: PermissionSpec.Type.ValueType # 0 + ON_DEMAND_FEATURE_VIEW: PermissionSpec.Type.ValueType # 1 + BATCH_FEATURE_VIEW: PermissionSpec.Type.ValueType # 2 + STREAM_FEATURE_VIEW: PermissionSpec.Type.ValueType # 3 + ENTITY: PermissionSpec.Type.ValueType # 4 + FEATURE_SERVICE: PermissionSpec.Type.ValueType # 5 + DATA_SOURCE: PermissionSpec.Type.ValueType # 6 + VALIDATION_REFERENCE: PermissionSpec.Type.ValueType # 7 + SAVED_DATASET: PermissionSpec.Type.ValueType # 8 + PERMISSION: PermissionSpec.Type.ValueType # 9 + PROJECT: PermissionSpec.Type.ValueType # 10 + + class RequiredTagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + TYPES_FIELD_NUMBER: builtins.int + NAME_PATTERN_FIELD_NUMBER: builtins.int + REQUIRED_TAGS_FIELD_NUMBER: builtins.int + ACTIONS_FIELD_NUMBER: builtins.int + POLICY_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + name: builtins.str + """Name of the permission. Must be unique. Not updated.""" + project: builtins.str + """Name of Feast project.""" + @property + def types(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___PermissionSpec.Type.ValueType]: ... + name_pattern: builtins.str + @property + def required_tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + @property + def actions(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___PermissionSpec.AuthzedAction.ValueType]: + """List of actions.""" + @property + def policy(self) -> feast.core.Policy_pb2.Policy: + """the policy.""" + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """User defined metadata""" + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + types: collections.abc.Iterable[global___PermissionSpec.Type.ValueType] | None = ..., + name_pattern: builtins.str = ..., + required_tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + actions: collections.abc.Iterable[global___PermissionSpec.AuthzedAction.ValueType] | None = ..., + policy: feast.core.Policy_pb2.Policy | None = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["policy", b"policy"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["actions", b"actions", "name", b"name", "name_pattern", b"name_pattern", "policy", b"policy", "project", b"project", "required_tags", b"required_tags", "tags", b"tags", "types", b"types"]) -> None: ... + +global___PermissionSpec = PermissionSpec + +class PermissionMeta(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CREATED_TIMESTAMP_FIELD_NUMBER: builtins.int + LAST_UPDATED_TIMESTAMP_FIELD_NUMBER: builtins.int + @property + def created_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def last_updated_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + def __init__( + self, + *, + created_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + last_updated_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp"]) -> None: ... + +global___PermissionMeta = PermissionMeta diff --git a/sdk/python/feast/protos/feast/core/Permission_pb2_grpc.py b/sdk/python/feast/protos/feast/core/Permission_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Permission_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/Policy_pb2.py b/sdk/python/feast/protos/feast/core/Policy_pb2.py new file mode 100644 index 0000000000..2fac866115 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Policy_pb2.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/Policy.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x17\x66\x65\x61st/core/Policy.proto\x12\nfeast.core\"p\n\x06Policy\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x38\n\x11role_based_policy\x18\x03 \x01(\x0b\x32\x1b.feast.core.RoleBasedPolicyH\x00\x42\r\n\x0bpolicy_type\" \n\x0fRoleBasedPolicy\x12\r\n\x05roles\x18\x01 \x03(\tBP\n\x10\x66\x65\x61st.proto.coreB\x0bPolicyProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.Policy_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\013PolicyProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_POLICY']._serialized_start=39 + _globals['_POLICY']._serialized_end=151 + _globals['_ROLEBASEDPOLICY']._serialized_start=153 + _globals['_ROLEBASEDPOLICY']._serialized_end=185 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/Policy_pb2.pyi b/sdk/python/feast/protos/feast/core/Policy_pb2.pyi new file mode 100644 index 0000000000..f19b18fff4 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Policy_pb2.pyi @@ -0,0 +1,58 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class Policy(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + ROLE_BASED_POLICY_FIELD_NUMBER: builtins.int + name: builtins.str + """Name of the policy.""" + project: builtins.str + """Name of Feast project.""" + @property + def role_based_policy(self) -> global___RoleBasedPolicy: ... + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + role_based_policy: global___RoleBasedPolicy | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["policy_type", b"policy_type", "role_based_policy", b"role_based_policy"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "policy_type", b"policy_type", "project", b"project", "role_based_policy", b"role_based_policy"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["policy_type", b"policy_type"]) -> typing_extensions.Literal["role_based_policy"] | None: ... + +global___Policy = Policy + +class RoleBasedPolicy(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ROLES_FIELD_NUMBER: builtins.int + @property + def roles(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """List of roles in this policy.""" + def __init__( + self, + *, + roles: collections.abc.Iterable[builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["roles", b"roles"]) -> None: ... + +global___RoleBasedPolicy = RoleBasedPolicy diff --git a/sdk/python/feast/protos/feast/core/Policy_pb2_grpc.py b/sdk/python/feast/protos/feast/core/Policy_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Policy_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/Project_pb2.py b/sdk/python/feast/protos/feast/core/Project_pb2.py new file mode 100644 index 0000000000..cfbf122014 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Project_pb2.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/Project.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x18\x66\x65\x61st/core/Project.proto\x12\nfeast.core\x1a\x1fgoogle/protobuf/timestamp.proto\"W\n\x07Project\x12%\n\x04spec\x18\x01 \x01(\x0b\x32\x17.feast.core.ProjectSpec\x12%\n\x04meta\x18\x02 \x01(\x0b\x32\x17.feast.core.ProjectMeta\"\x9d\x01\n\x0bProjectSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12/\n\x04tags\x18\x03 \x03(\x0b\x32!.feast.core.ProjectSpec.TagsEntry\x12\r\n\x05owner\x18\x04 \x01(\t\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x80\x01\n\x0bProjectMeta\x12\x35\n\x11\x63reated_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\x16last_updated_timestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampBQ\n\x10\x66\x65\x61st.proto.coreB\x0cProjectProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.Project_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\014ProjectProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_PROJECTSPEC_TAGSENTRY']._options = None + _globals['_PROJECTSPEC_TAGSENTRY']._serialized_options = b'8\001' + _globals['_PROJECT']._serialized_start=73 + _globals['_PROJECT']._serialized_end=160 + _globals['_PROJECTSPEC']._serialized_start=163 + _globals['_PROJECTSPEC']._serialized_end=320 + _globals['_PROJECTSPEC_TAGSENTRY']._serialized_start=277 + _globals['_PROJECTSPEC_TAGSENTRY']._serialized_end=320 + _globals['_PROJECTMETA']._serialized_start=323 + _globals['_PROJECTMETA']._serialized_end=451 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/Project_pb2.pyi b/sdk/python/feast/protos/feast/core/Project_pb2.pyi new file mode 100644 index 0000000000..e3cce2ec42 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Project_pb2.pyi @@ -0,0 +1,119 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +* Copyright 2020 The Feast Authors +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* https://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +""" +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class Project(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SPEC_FIELD_NUMBER: builtins.int + META_FIELD_NUMBER: builtins.int + @property + def spec(self) -> global___ProjectSpec: + """User-specified specifications of this entity.""" + @property + def meta(self) -> global___ProjectMeta: + """System-populated metadata for this entity.""" + def __init__( + self, + *, + spec: global___ProjectSpec | None = ..., + meta: global___ProjectMeta | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> None: ... + +global___Project = Project + +class ProjectSpec(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + OWNER_FIELD_NUMBER: builtins.int + name: builtins.str + """Name of the Project""" + description: builtins.str + """Description of the Project""" + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """User defined metadata""" + owner: builtins.str + """Owner of the Project""" + def __init__( + self, + *, + name: builtins.str = ..., + description: builtins.str = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + owner: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["description", b"description", "name", b"name", "owner", b"owner", "tags", b"tags"]) -> None: ... + +global___ProjectSpec = ProjectSpec + +class ProjectMeta(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CREATED_TIMESTAMP_FIELD_NUMBER: builtins.int + LAST_UPDATED_TIMESTAMP_FIELD_NUMBER: builtins.int + @property + def created_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Time when the Project is created""" + @property + def last_updated_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Time when the Project is last updated with registry changes (Apply stage)""" + def __init__( + self, + *, + created_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + last_updated_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp"]) -> None: ... + +global___ProjectMeta = ProjectMeta diff --git a/sdk/python/feast/protos/feast/core/Project_pb2_grpc.py b/sdk/python/feast/protos/feast/core/Project_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Project_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/Registry_pb2.py b/sdk/python/feast/protos/feast/core/Registry_pb2.py new file mode 100644 index 0000000000..671958d80c --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Registry_pb2.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/Registry.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from feast.protos.feast.core import Entity_pb2 as feast_dot_core_dot_Entity__pb2 +from feast.protos.feast.core import FeatureService_pb2 as feast_dot_core_dot_FeatureService__pb2 +from feast.protos.feast.core import FeatureTable_pb2 as feast_dot_core_dot_FeatureTable__pb2 +from feast.protos.feast.core import FeatureView_pb2 as feast_dot_core_dot_FeatureView__pb2 +from feast.protos.feast.core import InfraObject_pb2 as feast_dot_core_dot_InfraObject__pb2 +from feast.protos.feast.core import OnDemandFeatureView_pb2 as feast_dot_core_dot_OnDemandFeatureView__pb2 +from feast.protos.feast.core import StreamFeatureView_pb2 as feast_dot_core_dot_StreamFeatureView__pb2 +from feast.protos.feast.core import DataSource_pb2 as feast_dot_core_dot_DataSource__pb2 +from feast.protos.feast.core import SavedDataset_pb2 as feast_dot_core_dot_SavedDataset__pb2 +from feast.protos.feast.core import ValidationProfile_pb2 as feast_dot_core_dot_ValidationProfile__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from feast.protos.feast.core import Permission_pb2 as feast_dot_core_dot_Permission__pb2 +from feast.protos.feast.core import Project_pb2 as feast_dot_core_dot_Project__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19\x66\x65\x61st/core/Registry.proto\x12\nfeast.core\x1a\x17\x66\x65\x61st/core/Entity.proto\x1a\x1f\x66\x65\x61st/core/FeatureService.proto\x1a\x1d\x66\x65\x61st/core/FeatureTable.proto\x1a\x1c\x66\x65\x61st/core/FeatureView.proto\x1a\x1c\x66\x65\x61st/core/InfraObject.proto\x1a$feast/core/OnDemandFeatureView.proto\x1a\"feast/core/StreamFeatureView.proto\x1a\x1b\x66\x65\x61st/core/DataSource.proto\x1a\x1d\x66\x65\x61st/core/SavedDataset.proto\x1a\"feast/core/ValidationProfile.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1b\x66\x65\x61st/core/Permission.proto\x1a\x18\x66\x65\x61st/core/Project.proto\"\xff\x05\n\x08Registry\x12$\n\x08\x65ntities\x18\x01 \x03(\x0b\x32\x12.feast.core.Entity\x12\x30\n\x0e\x66\x65\x61ture_tables\x18\x02 \x03(\x0b\x32\x18.feast.core.FeatureTable\x12.\n\rfeature_views\x18\x06 \x03(\x0b\x32\x17.feast.core.FeatureView\x12,\n\x0c\x64\x61ta_sources\x18\x0c \x03(\x0b\x32\x16.feast.core.DataSource\x12@\n\x17on_demand_feature_views\x18\x08 \x03(\x0b\x32\x1f.feast.core.OnDemandFeatureView\x12;\n\x14stream_feature_views\x18\x0e \x03(\x0b\x32\x1d.feast.core.StreamFeatureView\x12\x34\n\x10\x66\x65\x61ture_services\x18\x07 \x03(\x0b\x32\x1a.feast.core.FeatureService\x12\x30\n\x0esaved_datasets\x18\x0b \x03(\x0b\x32\x18.feast.core.SavedDataset\x12>\n\x15validation_references\x18\r \x03(\x0b\x32\x1f.feast.core.ValidationReference\x12 \n\x05infra\x18\n \x01(\x0b\x32\x11.feast.core.Infra\x12\x39\n\x10project_metadata\x18\x0f \x03(\x0b\x32\x1b.feast.core.ProjectMetadataB\x02\x18\x01\x12\x1f\n\x17registry_schema_version\x18\x03 \x01(\t\x12\x12\n\nversion_id\x18\x04 \x01(\t\x12\x30\n\x0clast_updated\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x0bpermissions\x18\x10 \x03(\x0b\x32\x16.feast.core.Permission\x12%\n\x08projects\x18\x11 \x03(\x0b\x32\x13.feast.core.Project\"8\n\x0fProjectMetadata\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x14\n\x0cproject_uuid\x18\x02 \x01(\tBR\n\x10\x66\x65\x61st.proto.coreB\rRegistryProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.Registry_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\rRegistryProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_REGISTRY'].fields_by_name['project_metadata']._options = None + _globals['_REGISTRY'].fields_by_name['project_metadata']._serialized_options = b'\030\001' + _globals['_REGISTRY']._serialized_start=449 + _globals['_REGISTRY']._serialized_end=1216 + _globals['_PROJECTMETADATA']._serialized_start=1218 + _globals['_PROJECTMETADATA']._serialized_end=1274 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/Registry_pb2.pyi b/sdk/python/feast/protos/feast/core/Registry_pb2.pyi new file mode 100644 index 0000000000..fca49c7548 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Registry_pb2.pyi @@ -0,0 +1,140 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +* Copyright 2020 The Feast Authors +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* https://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +""" +import builtins +import collections.abc +import feast.core.DataSource_pb2 +import feast.core.Entity_pb2 +import feast.core.FeatureService_pb2 +import feast.core.FeatureTable_pb2 +import feast.core.FeatureView_pb2 +import feast.core.InfraObject_pb2 +import feast.core.OnDemandFeatureView_pb2 +import feast.core.Permission_pb2 +import feast.core.Project_pb2 +import feast.core.SavedDataset_pb2 +import feast.core.StreamFeatureView_pb2 +import feast.core.ValidationProfile_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class Registry(google.protobuf.message.Message): + """Next id: 18""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ENTITIES_FIELD_NUMBER: builtins.int + FEATURE_TABLES_FIELD_NUMBER: builtins.int + FEATURE_VIEWS_FIELD_NUMBER: builtins.int + DATA_SOURCES_FIELD_NUMBER: builtins.int + ON_DEMAND_FEATURE_VIEWS_FIELD_NUMBER: builtins.int + STREAM_FEATURE_VIEWS_FIELD_NUMBER: builtins.int + FEATURE_SERVICES_FIELD_NUMBER: builtins.int + SAVED_DATASETS_FIELD_NUMBER: builtins.int + VALIDATION_REFERENCES_FIELD_NUMBER: builtins.int + INFRA_FIELD_NUMBER: builtins.int + PROJECT_METADATA_FIELD_NUMBER: builtins.int + REGISTRY_SCHEMA_VERSION_FIELD_NUMBER: builtins.int + VERSION_ID_FIELD_NUMBER: builtins.int + LAST_UPDATED_FIELD_NUMBER: builtins.int + PERMISSIONS_FIELD_NUMBER: builtins.int + PROJECTS_FIELD_NUMBER: builtins.int + @property + def entities(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Entity_pb2.Entity]: ... + @property + def feature_tables(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.FeatureTable_pb2.FeatureTable]: ... + @property + def feature_views(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.FeatureView_pb2.FeatureView]: ... + @property + def data_sources(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.DataSource_pb2.DataSource]: ... + @property + def on_demand_feature_views(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.OnDemandFeatureView_pb2.OnDemandFeatureView]: ... + @property + def stream_feature_views(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.StreamFeatureView_pb2.StreamFeatureView]: ... + @property + def feature_services(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.FeatureService_pb2.FeatureService]: ... + @property + def saved_datasets(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.SavedDataset_pb2.SavedDataset]: ... + @property + def validation_references(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.ValidationProfile_pb2.ValidationReference]: ... + @property + def infra(self) -> feast.core.InfraObject_pb2.Infra: ... + @property + def project_metadata(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ProjectMetadata]: + """Tracking metadata of Feast by project""" + registry_schema_version: builtins.str + """to support migrations; incremented when schema is changed""" + version_id: builtins.str + """version id, random string generated on each update of the data; now used only for debugging purposes""" + @property + def last_updated(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def permissions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Permission_pb2.Permission]: ... + @property + def projects(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Project_pb2.Project]: ... + def __init__( + self, + *, + entities: collections.abc.Iterable[feast.core.Entity_pb2.Entity] | None = ..., + feature_tables: collections.abc.Iterable[feast.core.FeatureTable_pb2.FeatureTable] | None = ..., + feature_views: collections.abc.Iterable[feast.core.FeatureView_pb2.FeatureView] | None = ..., + data_sources: collections.abc.Iterable[feast.core.DataSource_pb2.DataSource] | None = ..., + on_demand_feature_views: collections.abc.Iterable[feast.core.OnDemandFeatureView_pb2.OnDemandFeatureView] | None = ..., + stream_feature_views: collections.abc.Iterable[feast.core.StreamFeatureView_pb2.StreamFeatureView] | None = ..., + feature_services: collections.abc.Iterable[feast.core.FeatureService_pb2.FeatureService] | None = ..., + saved_datasets: collections.abc.Iterable[feast.core.SavedDataset_pb2.SavedDataset] | None = ..., + validation_references: collections.abc.Iterable[feast.core.ValidationProfile_pb2.ValidationReference] | None = ..., + infra: feast.core.InfraObject_pb2.Infra | None = ..., + project_metadata: collections.abc.Iterable[global___ProjectMetadata] | None = ..., + registry_schema_version: builtins.str = ..., + version_id: builtins.str = ..., + last_updated: google.protobuf.timestamp_pb2.Timestamp | None = ..., + permissions: collections.abc.Iterable[feast.core.Permission_pb2.Permission] | None = ..., + projects: collections.abc.Iterable[feast.core.Project_pb2.Project] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["infra", b"infra", "last_updated", b"last_updated"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["data_sources", b"data_sources", "entities", b"entities", "feature_services", b"feature_services", "feature_tables", b"feature_tables", "feature_views", b"feature_views", "infra", b"infra", "last_updated", b"last_updated", "on_demand_feature_views", b"on_demand_feature_views", "permissions", b"permissions", "project_metadata", b"project_metadata", "projects", b"projects", "registry_schema_version", b"registry_schema_version", "saved_datasets", b"saved_datasets", "stream_feature_views", b"stream_feature_views", "validation_references", b"validation_references", "version_id", b"version_id"]) -> None: ... + +global___Registry = Registry + +class ProjectMetadata(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PROJECT_FIELD_NUMBER: builtins.int + PROJECT_UUID_FIELD_NUMBER: builtins.int + project: builtins.str + project_uuid: builtins.str + def __init__( + self, + *, + project: builtins.str = ..., + project_uuid: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["project", b"project", "project_uuid", b"project_uuid"]) -> None: ... + +global___ProjectMetadata = ProjectMetadata diff --git a/sdk/python/feast/protos/feast/core/Registry_pb2_grpc.py b/sdk/python/feast/protos/feast/core/Registry_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Registry_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/SavedDataset_pb2.py b/sdk/python/feast/protos/feast/core/SavedDataset_pb2.py new file mode 100644 index 0000000000..fe1e2d49ea --- /dev/null +++ b/sdk/python/feast/protos/feast/core/SavedDataset_pb2.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/SavedDataset.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from feast.protos.feast.core import DataSource_pb2 as feast_dot_core_dot_DataSource__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1d\x66\x65\x61st/core/SavedDataset.proto\x12\nfeast.core\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1b\x66\x65\x61st/core/DataSource.proto\"\xa5\x02\n\x10SavedDatasetSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x10\n\x08\x66\x65\x61tures\x18\x03 \x03(\t\x12\x11\n\tjoin_keys\x18\x04 \x03(\t\x12\x1a\n\x12\x66ull_feature_names\x18\x05 \x01(\x08\x12\x30\n\x07storage\x18\x06 \x01(\x0b\x32\x1f.feast.core.SavedDatasetStorage\x12\x1c\n\x14\x66\x65\x61ture_service_name\x18\x08 \x01(\t\x12\x34\n\x04tags\x18\x07 \x03(\x0b\x32&.feast.core.SavedDatasetSpec.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xa9\x04\n\x13SavedDatasetStorage\x12:\n\x0c\x66ile_storage\x18\x04 \x01(\x0b\x32\".feast.core.DataSource.FileOptionsH\x00\x12\x42\n\x10\x62igquery_storage\x18\x05 \x01(\x0b\x32&.feast.core.DataSource.BigQueryOptionsH\x00\x12\x42\n\x10redshift_storage\x18\x06 \x01(\x0b\x32&.feast.core.DataSource.RedshiftOptionsH\x00\x12\x44\n\x11snowflake_storage\x18\x07 \x01(\x0b\x32\'.feast.core.DataSource.SnowflakeOptionsH\x00\x12<\n\rtrino_storage\x18\x08 \x01(\x0b\x32#.feast.core.DataSource.TrinoOptionsH\x00\x12<\n\rspark_storage\x18\t \x01(\x0b\x32#.feast.core.DataSource.SparkOptionsH\x00\x12\x44\n\x0e\x63ustom_storage\x18\n \x01(\x0b\x32*.feast.core.DataSource.CustomSourceOptionsH\x00\x12>\n\x0e\x61thena_storage\x18\x0b \x01(\x0b\x32$.feast.core.DataSource.AthenaOptionsH\x00\x42\x06\n\x04kind\"\xf7\x01\n\x10SavedDatasetMeta\x12\x35\n\x11\x63reated_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\x16last_updated_timestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x37\n\x13min_event_timestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x37\n\x13max_event_timestamp\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"f\n\x0cSavedDataset\x12*\n\x04spec\x18\x01 \x01(\x0b\x32\x1c.feast.core.SavedDatasetSpec\x12*\n\x04meta\x18\x02 \x01(\x0b\x32\x1c.feast.core.SavedDatasetMetaBV\n\x10\x66\x65\x61st.proto.coreB\x11SavedDatasetProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.SavedDataset_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\021SavedDatasetProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_SAVEDDATASETSPEC_TAGSENTRY']._options = None + _globals['_SAVEDDATASETSPEC_TAGSENTRY']._serialized_options = b'8\001' + _globals['_SAVEDDATASETSPEC']._serialized_start=108 + _globals['_SAVEDDATASETSPEC']._serialized_end=401 + _globals['_SAVEDDATASETSPEC_TAGSENTRY']._serialized_start=358 + _globals['_SAVEDDATASETSPEC_TAGSENTRY']._serialized_end=401 + _globals['_SAVEDDATASETSTORAGE']._serialized_start=404 + _globals['_SAVEDDATASETSTORAGE']._serialized_end=957 + _globals['_SAVEDDATASETMETA']._serialized_start=960 + _globals['_SAVEDDATASETMETA']._serialized_end=1207 + _globals['_SAVEDDATASET']._serialized_start=1209 + _globals['_SAVEDDATASET']._serialized_end=1311 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/SavedDataset_pb2.pyi b/sdk/python/feast/protos/feast/core/SavedDataset_pb2.pyi new file mode 100644 index 0000000000..47525b64ed --- /dev/null +++ b/sdk/python/feast/protos/feast/core/SavedDataset_pb2.pyi @@ -0,0 +1,192 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2021 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import collections.abc +import feast.core.DataSource_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class SavedDatasetSpec(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + FEATURES_FIELD_NUMBER: builtins.int + JOIN_KEYS_FIELD_NUMBER: builtins.int + FULL_FEATURE_NAMES_FIELD_NUMBER: builtins.int + STORAGE_FIELD_NUMBER: builtins.int + FEATURE_SERVICE_NAME_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + name: builtins.str + """Name of the dataset. Must be unique since it's possible to overwrite dataset by name""" + project: builtins.str + """Name of Feast project that this Dataset belongs to.""" + @property + def features(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """list of feature references with format ":" """ + @property + def join_keys(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """entity columns + request columns from all feature views used during retrieval""" + full_feature_names: builtins.bool + """Whether full feature names are used in stored data""" + @property + def storage(self) -> global___SavedDatasetStorage: ... + feature_service_name: builtins.str + """Optional and only populated if generated from a feature service fetch""" + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """User defined metadata""" + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + features: collections.abc.Iterable[builtins.str] | None = ..., + join_keys: collections.abc.Iterable[builtins.str] | None = ..., + full_feature_names: builtins.bool = ..., + storage: global___SavedDatasetStorage | None = ..., + feature_service_name: builtins.str = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["storage", b"storage"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["feature_service_name", b"feature_service_name", "features", b"features", "full_feature_names", b"full_feature_names", "join_keys", b"join_keys", "name", b"name", "project", b"project", "storage", b"storage", "tags", b"tags"]) -> None: ... + +global___SavedDatasetSpec = SavedDatasetSpec + +class SavedDatasetStorage(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FILE_STORAGE_FIELD_NUMBER: builtins.int + BIGQUERY_STORAGE_FIELD_NUMBER: builtins.int + REDSHIFT_STORAGE_FIELD_NUMBER: builtins.int + SNOWFLAKE_STORAGE_FIELD_NUMBER: builtins.int + TRINO_STORAGE_FIELD_NUMBER: builtins.int + SPARK_STORAGE_FIELD_NUMBER: builtins.int + CUSTOM_STORAGE_FIELD_NUMBER: builtins.int + ATHENA_STORAGE_FIELD_NUMBER: builtins.int + @property + def file_storage(self) -> feast.core.DataSource_pb2.DataSource.FileOptions: ... + @property + def bigquery_storage(self) -> feast.core.DataSource_pb2.DataSource.BigQueryOptions: ... + @property + def redshift_storage(self) -> feast.core.DataSource_pb2.DataSource.RedshiftOptions: ... + @property + def snowflake_storage(self) -> feast.core.DataSource_pb2.DataSource.SnowflakeOptions: ... + @property + def trino_storage(self) -> feast.core.DataSource_pb2.DataSource.TrinoOptions: ... + @property + def spark_storage(self) -> feast.core.DataSource_pb2.DataSource.SparkOptions: ... + @property + def custom_storage(self) -> feast.core.DataSource_pb2.DataSource.CustomSourceOptions: ... + @property + def athena_storage(self) -> feast.core.DataSource_pb2.DataSource.AthenaOptions: ... + def __init__( + self, + *, + file_storage: feast.core.DataSource_pb2.DataSource.FileOptions | None = ..., + bigquery_storage: feast.core.DataSource_pb2.DataSource.BigQueryOptions | None = ..., + redshift_storage: feast.core.DataSource_pb2.DataSource.RedshiftOptions | None = ..., + snowflake_storage: feast.core.DataSource_pb2.DataSource.SnowflakeOptions | None = ..., + trino_storage: feast.core.DataSource_pb2.DataSource.TrinoOptions | None = ..., + spark_storage: feast.core.DataSource_pb2.DataSource.SparkOptions | None = ..., + custom_storage: feast.core.DataSource_pb2.DataSource.CustomSourceOptions | None = ..., + athena_storage: feast.core.DataSource_pb2.DataSource.AthenaOptions | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["athena_storage", b"athena_storage", "bigquery_storage", b"bigquery_storage", "custom_storage", b"custom_storage", "file_storage", b"file_storage", "kind", b"kind", "redshift_storage", b"redshift_storage", "snowflake_storage", b"snowflake_storage", "spark_storage", b"spark_storage", "trino_storage", b"trino_storage"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["athena_storage", b"athena_storage", "bigquery_storage", b"bigquery_storage", "custom_storage", b"custom_storage", "file_storage", b"file_storage", "kind", b"kind", "redshift_storage", b"redshift_storage", "snowflake_storage", b"snowflake_storage", "spark_storage", b"spark_storage", "trino_storage", b"trino_storage"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["kind", b"kind"]) -> typing_extensions.Literal["file_storage", "bigquery_storage", "redshift_storage", "snowflake_storage", "trino_storage", "spark_storage", "custom_storage", "athena_storage"] | None: ... + +global___SavedDatasetStorage = SavedDatasetStorage + +class SavedDatasetMeta(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CREATED_TIMESTAMP_FIELD_NUMBER: builtins.int + LAST_UPDATED_TIMESTAMP_FIELD_NUMBER: builtins.int + MIN_EVENT_TIMESTAMP_FIELD_NUMBER: builtins.int + MAX_EVENT_TIMESTAMP_FIELD_NUMBER: builtins.int + @property + def created_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Time when this saved dataset is created""" + @property + def last_updated_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Time when this saved dataset is last updated""" + @property + def min_event_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Min timestamp in the dataset (needed for retrieval)""" + @property + def max_event_timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Max timestamp in the dataset (needed for retrieval)""" + def __init__( + self, + *, + created_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + last_updated_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + min_event_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + max_event_timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp", "max_event_timestamp", b"max_event_timestamp", "min_event_timestamp", b"min_event_timestamp"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["created_timestamp", b"created_timestamp", "last_updated_timestamp", b"last_updated_timestamp", "max_event_timestamp", b"max_event_timestamp", "min_event_timestamp", b"min_event_timestamp"]) -> None: ... + +global___SavedDatasetMeta = SavedDatasetMeta + +class SavedDataset(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SPEC_FIELD_NUMBER: builtins.int + META_FIELD_NUMBER: builtins.int + @property + def spec(self) -> global___SavedDatasetSpec: ... + @property + def meta(self) -> global___SavedDatasetMeta: ... + def __init__( + self, + *, + spec: global___SavedDatasetSpec | None = ..., + meta: global___SavedDatasetMeta | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> None: ... + +global___SavedDataset = SavedDataset diff --git a/sdk/python/feast/protos/feast/core/SavedDataset_pb2_grpc.py b/sdk/python/feast/protos/feast/core/SavedDataset_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/SavedDataset_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/SqliteTable_pb2.py b/sdk/python/feast/protos/feast/core/SqliteTable_pb2.py new file mode 100644 index 0000000000..8cc14781c7 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/SqliteTable_pb2.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/SqliteTable.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1c\x66\x65\x61st/core/SqliteTable.proto\x12\nfeast.core\")\n\x0bSqliteTable\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\tBU\n\x10\x66\x65\x61st.proto.coreB\x10SqliteTableProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.SqliteTable_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\020SqliteTableProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_SQLITETABLE']._serialized_start=44 + _globals['_SQLITETABLE']._serialized_end=85 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/SqliteTable_pb2.pyi b/sdk/python/feast/protos/feast/core/SqliteTable_pb2.pyi new file mode 100644 index 0000000000..10ecebf362 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/SqliteTable_pb2.pyi @@ -0,0 +1,50 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +* Copyright 2021 The Feast Authors +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* https://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +""" +import builtins +import google.protobuf.descriptor +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class SqliteTable(google.protobuf.message.Message): + """Represents a Sqlite table""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PATH_FIELD_NUMBER: builtins.int + NAME_FIELD_NUMBER: builtins.int + path: builtins.str + """Absolute path of the table""" + name: builtins.str + """Name of the table""" + def __init__( + self, + *, + path: builtins.str = ..., + name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "path", b"path"]) -> None: ... + +global___SqliteTable = SqliteTable diff --git a/sdk/python/feast/protos/feast/core/SqliteTable_pb2_grpc.py b/sdk/python/feast/protos/feast/core/SqliteTable_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/SqliteTable_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/Store_pb2.py b/sdk/python/feast/protos/feast/core/Store_pb2.py new file mode 100644 index 0000000000..7d24e11947 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Store_pb2.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/Store.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x16\x66\x65\x61st/core/Store.proto\x12\nfeast.core\"\xfd\x06\n\x05Store\x12\x0c\n\x04name\x18\x01 \x01(\t\x12)\n\x04type\x18\x02 \x01(\x0e\x32\x1b.feast.core.Store.StoreType\x12\x35\n\rsubscriptions\x18\x04 \x03(\x0b\x32\x1e.feast.core.Store.Subscription\x12\x35\n\x0credis_config\x18\x0b \x01(\x0b\x32\x1d.feast.core.Store.RedisConfigH\x00\x12\x44\n\x14redis_cluster_config\x18\x0e \x01(\x0b\x32$.feast.core.Store.RedisClusterConfigH\x00\x1a\x88\x01\n\x0bRedisConfig\x12\x0c\n\x04host\x18\x01 \x01(\t\x12\x0c\n\x04port\x18\x02 \x01(\x05\x12\x1a\n\x12initial_backoff_ms\x18\x03 \x01(\x05\x12\x13\n\x0bmax_retries\x18\x04 \x01(\x05\x12\x1f\n\x17\x66lush_frequency_seconds\x18\x05 \x01(\x05\x12\x0b\n\x03ssl\x18\x06 \x01(\x08\x1a\xdb\x02\n\x12RedisClusterConfig\x12\x19\n\x11\x63onnection_string\x18\x01 \x01(\t\x12\x1a\n\x12initial_backoff_ms\x18\x02 \x01(\x05\x12\x13\n\x0bmax_retries\x18\x03 \x01(\x05\x12\x1f\n\x17\x66lush_frequency_seconds\x18\x04 \x01(\x05\x12\x12\n\nkey_prefix\x18\x05 \x01(\t\x12\x17\n\x0f\x65nable_fallback\x18\x06 \x01(\x08\x12\x17\n\x0f\x66\x61llback_prefix\x18\x07 \x01(\t\x12@\n\tread_from\x18\x08 \x01(\x0e\x32-.feast.core.Store.RedisClusterConfig.ReadFrom\"P\n\x08ReadFrom\x12\n\n\x06MASTER\x10\x00\x12\x14\n\x10MASTER_PREFERRED\x10\x01\x12\x0b\n\x07REPLICA\x10\x02\x12\x15\n\x11REPLICA_PREFERRED\x10\x03\x1a\x44\n\x0cSubscription\x12\x0f\n\x07project\x18\x03 \x01(\t\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07\x65xclude\x18\x04 \x01(\x08J\x04\x08\x02\x10\x03\"N\n\tStoreType\x12\x0b\n\x07INVALID\x10\x00\x12\t\n\x05REDIS\x10\x01\x12\x11\n\rREDIS_CLUSTER\x10\x04\"\x04\x08\x02\x10\x02\"\x04\x08\x03\x10\x03\"\x04\x08\x0c\x10\x0c\"\x04\x08\r\x10\rB\x08\n\x06\x63onfigBO\n\x10\x66\x65\x61st.proto.coreB\nStoreProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.Store_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\nStoreProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_STORE']._serialized_start=39 + _globals['_STORE']._serialized_end=932 + _globals['_STORE_REDISCONFIG']._serialized_start=286 + _globals['_STORE_REDISCONFIG']._serialized_end=422 + _globals['_STORE_REDISCLUSTERCONFIG']._serialized_start=425 + _globals['_STORE_REDISCLUSTERCONFIG']._serialized_end=772 + _globals['_STORE_REDISCLUSTERCONFIG_READFROM']._serialized_start=692 + _globals['_STORE_REDISCLUSTERCONFIG_READFROM']._serialized_end=772 + _globals['_STORE_SUBSCRIPTION']._serialized_start=774 + _globals['_STORE_SUBSCRIPTION']._serialized_end=842 + _globals['_STORE_STORETYPE']._serialized_start=844 + _globals['_STORE_STORETYPE']._serialized_end=922 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/Store_pb2.pyi b/sdk/python/feast/protos/feast/core/Store_pb2.pyi new file mode 100644 index 0000000000..5ee957d184 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Store_pb2.pyi @@ -0,0 +1,234 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +* Copyright 2019 The Feast Authors +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* https://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +""" +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class Store(google.protobuf.message.Message): + """Store provides a location where Feast reads and writes feature values. + Feature values will be written to the Store in the form of FeatureRow elements. + The way FeatureRow is encoded and decoded when it is written to and read from + the Store depends on the type of the Store. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _StoreType: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _StoreTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Store._StoreType.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + INVALID: Store._StoreType.ValueType # 0 + REDIS: Store._StoreType.ValueType # 1 + """Redis stores a FeatureRow element as a key, value pair. + + The Redis data types used (https://redis.io/topics/data-types): + - key: STRING + - value: STRING + + Encodings: + - key: byte array of RedisKey (refer to feast.storage.RedisKeyV2) + - value: Redis hashmap + """ + REDIS_CLUSTER: Store._StoreType.ValueType # 4 + + class StoreType(_StoreType, metaclass=_StoreTypeEnumTypeWrapper): ... + INVALID: Store.StoreType.ValueType # 0 + REDIS: Store.StoreType.ValueType # 1 + """Redis stores a FeatureRow element as a key, value pair. + + The Redis data types used (https://redis.io/topics/data-types): + - key: STRING + - value: STRING + + Encodings: + - key: byte array of RedisKey (refer to feast.storage.RedisKeyV2) + - value: Redis hashmap + """ + REDIS_CLUSTER: Store.StoreType.ValueType # 4 + + class RedisConfig(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + HOST_FIELD_NUMBER: builtins.int + PORT_FIELD_NUMBER: builtins.int + INITIAL_BACKOFF_MS_FIELD_NUMBER: builtins.int + MAX_RETRIES_FIELD_NUMBER: builtins.int + FLUSH_FREQUENCY_SECONDS_FIELD_NUMBER: builtins.int + SSL_FIELD_NUMBER: builtins.int + host: builtins.str + port: builtins.int + initial_backoff_ms: builtins.int + """Optional. The number of milliseconds to wait before retrying failed Redis connection. + By default, Feast uses exponential backoff policy and "initial_backoff_ms" sets the initial wait duration. + """ + max_retries: builtins.int + """Optional. Maximum total number of retries for connecting to Redis. Default to zero retries.""" + flush_frequency_seconds: builtins.int + """Optional. How often flush data to redis""" + ssl: builtins.bool + """Optional. Connect over SSL.""" + def __init__( + self, + *, + host: builtins.str = ..., + port: builtins.int = ..., + initial_backoff_ms: builtins.int = ..., + max_retries: builtins.int = ..., + flush_frequency_seconds: builtins.int = ..., + ssl: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["flush_frequency_seconds", b"flush_frequency_seconds", "host", b"host", "initial_backoff_ms", b"initial_backoff_ms", "max_retries", b"max_retries", "port", b"port", "ssl", b"ssl"]) -> None: ... + + class RedisClusterConfig(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _ReadFrom: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _ReadFromEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Store.RedisClusterConfig._ReadFrom.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + MASTER: Store.RedisClusterConfig._ReadFrom.ValueType # 0 + MASTER_PREFERRED: Store.RedisClusterConfig._ReadFrom.ValueType # 1 + REPLICA: Store.RedisClusterConfig._ReadFrom.ValueType # 2 + REPLICA_PREFERRED: Store.RedisClusterConfig._ReadFrom.ValueType # 3 + + class ReadFrom(_ReadFrom, metaclass=_ReadFromEnumTypeWrapper): + """Optional. Priority of nodes when reading from cluster""" + + MASTER: Store.RedisClusterConfig.ReadFrom.ValueType # 0 + MASTER_PREFERRED: Store.RedisClusterConfig.ReadFrom.ValueType # 1 + REPLICA: Store.RedisClusterConfig.ReadFrom.ValueType # 2 + REPLICA_PREFERRED: Store.RedisClusterConfig.ReadFrom.ValueType # 3 + + CONNECTION_STRING_FIELD_NUMBER: builtins.int + INITIAL_BACKOFF_MS_FIELD_NUMBER: builtins.int + MAX_RETRIES_FIELD_NUMBER: builtins.int + FLUSH_FREQUENCY_SECONDS_FIELD_NUMBER: builtins.int + KEY_PREFIX_FIELD_NUMBER: builtins.int + ENABLE_FALLBACK_FIELD_NUMBER: builtins.int + FALLBACK_PREFIX_FIELD_NUMBER: builtins.int + READ_FROM_FIELD_NUMBER: builtins.int + connection_string: builtins.str + """List of Redis Uri for all the nodes in Redis Cluster, comma separated. Eg. host1:6379, host2:6379""" + initial_backoff_ms: builtins.int + max_retries: builtins.int + flush_frequency_seconds: builtins.int + """Optional. How often flush data to redis""" + key_prefix: builtins.str + """Optional. Append a prefix to the Redis Key""" + enable_fallback: builtins.bool + """Optional. Enable fallback to another key prefix if the original key is not present. + Useful for migrating key prefix without re-ingestion. Disabled by default. + """ + fallback_prefix: builtins.str + """Optional. This would be the fallback prefix to use if enable_fallback is true.""" + read_from: global___Store.RedisClusterConfig.ReadFrom.ValueType + def __init__( + self, + *, + connection_string: builtins.str = ..., + initial_backoff_ms: builtins.int = ..., + max_retries: builtins.int = ..., + flush_frequency_seconds: builtins.int = ..., + key_prefix: builtins.str = ..., + enable_fallback: builtins.bool = ..., + fallback_prefix: builtins.str = ..., + read_from: global___Store.RedisClusterConfig.ReadFrom.ValueType = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["connection_string", b"connection_string", "enable_fallback", b"enable_fallback", "fallback_prefix", b"fallback_prefix", "flush_frequency_seconds", b"flush_frequency_seconds", "initial_backoff_ms", b"initial_backoff_ms", "key_prefix", b"key_prefix", "max_retries", b"max_retries", "read_from", b"read_from"]) -> None: ... + + class Subscription(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PROJECT_FIELD_NUMBER: builtins.int + NAME_FIELD_NUMBER: builtins.int + EXCLUDE_FIELD_NUMBER: builtins.int + project: builtins.str + """Name of project that the feature sets belongs to. This can be one of + - [project_name] + - * + If an asterisk is provided, filtering on projects will be disabled. All projects will + be matched. It is NOT possible to provide an asterisk with a string in order to do + pattern matching. + """ + name: builtins.str + """Name of the desired feature set. Asterisks can be used as wildcards in the name. + Matching on names is only permitted if a specific project is defined. It is disallowed + If the project name is set to "*" + e.g. + - * can be used to match all feature sets + - my-feature-set* can be used to match all features prefixed by "my-feature-set" + - my-feature-set-6 can be used to select a single feature set + """ + exclude: builtins.bool + """All matches with exclude enabled will be filtered out instead of added""" + def __init__( + self, + *, + project: builtins.str = ..., + name: builtins.str = ..., + exclude: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["exclude", b"exclude", "name", b"name", "project", b"project"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + TYPE_FIELD_NUMBER: builtins.int + SUBSCRIPTIONS_FIELD_NUMBER: builtins.int + REDIS_CONFIG_FIELD_NUMBER: builtins.int + REDIS_CLUSTER_CONFIG_FIELD_NUMBER: builtins.int + name: builtins.str + """Name of the store.""" + type: global___Store.StoreType.ValueType + """Type of store.""" + @property + def subscriptions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Store.Subscription]: + """Feature sets to subscribe to.""" + @property + def redis_config(self) -> global___Store.RedisConfig: ... + @property + def redis_cluster_config(self) -> global___Store.RedisClusterConfig: ... + def __init__( + self, + *, + name: builtins.str = ..., + type: global___Store.StoreType.ValueType = ..., + subscriptions: collections.abc.Iterable[global___Store.Subscription] | None = ..., + redis_config: global___Store.RedisConfig | None = ..., + redis_cluster_config: global___Store.RedisClusterConfig | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["config", b"config", "redis_cluster_config", b"redis_cluster_config", "redis_config", b"redis_config"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["config", b"config", "name", b"name", "redis_cluster_config", b"redis_cluster_config", "redis_config", b"redis_config", "subscriptions", b"subscriptions", "type", b"type"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["config", b"config"]) -> typing_extensions.Literal["redis_config", "redis_cluster_config"] | None: ... + +global___Store = Store diff --git a/sdk/python/feast/protos/feast/core/Store_pb2_grpc.py b/sdk/python/feast/protos/feast/core/Store_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Store_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/StreamFeatureView_pb2.py b/sdk/python/feast/protos/feast/core/StreamFeatureView_pb2.py new file mode 100644 index 0000000000..ba19088edd --- /dev/null +++ b/sdk/python/feast/protos/feast/core/StreamFeatureView_pb2.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/StreamFeatureView.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from feast.protos.feast.core import OnDemandFeatureView_pb2 as feast_dot_core_dot_OnDemandFeatureView__pb2 +from feast.protos.feast.core import FeatureView_pb2 as feast_dot_core_dot_FeatureView__pb2 +from feast.protos.feast.core import Feature_pb2 as feast_dot_core_dot_Feature__pb2 +from feast.protos.feast.core import DataSource_pb2 as feast_dot_core_dot_DataSource__pb2 +from feast.protos.feast.core import Aggregation_pb2 as feast_dot_core_dot_Aggregation__pb2 +from feast.protos.feast.core import Transformation_pb2 as feast_dot_core_dot_Transformation__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"feast/core/StreamFeatureView.proto\x12\nfeast.core\x1a\x1egoogle/protobuf/duration.proto\x1a$feast/core/OnDemandFeatureView.proto\x1a\x1c\x66\x65\x61st/core/FeatureView.proto\x1a\x18\x66\x65\x61st/core/Feature.proto\x1a\x1b\x66\x65\x61st/core/DataSource.proto\x1a\x1c\x66\x65\x61st/core/Aggregation.proto\x1a\x1f\x66\x65\x61st/core/Transformation.proto\"o\n\x11StreamFeatureView\x12/\n\x04spec\x18\x01 \x01(\x0b\x32!.feast.core.StreamFeatureViewSpec\x12)\n\x04meta\x18\x02 \x01(\x0b\x32\x1b.feast.core.FeatureViewMeta\"\xa8\x05\n\x15StreamFeatureViewSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x10\n\x08\x65ntities\x18\x03 \x03(\t\x12+\n\x08\x66\x65\x61tures\x18\x04 \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x12\x31\n\x0e\x65ntity_columns\x18\x05 \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x12\x13\n\x0b\x64\x65scription\x18\x06 \x01(\t\x12\x39\n\x04tags\x18\x07 \x03(\x0b\x32+.feast.core.StreamFeatureViewSpec.TagsEntry\x12\r\n\x05owner\x18\x08 \x01(\t\x12&\n\x03ttl\x18\t \x01(\x0b\x32\x19.google.protobuf.Duration\x12,\n\x0c\x62\x61tch_source\x18\n \x01(\x0b\x32\x16.feast.core.DataSource\x12-\n\rstream_source\x18\x0b \x01(\x0b\x32\x16.feast.core.DataSource\x12\x0e\n\x06online\x18\x0c \x01(\x08\x12\x42\n\x15user_defined_function\x18\r \x01(\x0b\x32\x1f.feast.core.UserDefinedFunctionB\x02\x18\x01\x12\x0c\n\x04mode\x18\x0e \x01(\t\x12-\n\x0c\x61ggregations\x18\x0f \x03(\x0b\x32\x17.feast.core.Aggregation\x12\x17\n\x0ftimestamp_field\x18\x10 \x01(\t\x12\x43\n\x16\x66\x65\x61ture_transformation\x18\x11 \x01(\x0b\x32#.feast.core.FeatureTransformationV2\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42[\n\x10\x66\x65\x61st.proto.coreB\x16StreamFeatureViewProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.StreamFeatureView_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\026StreamFeatureViewProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_STREAMFEATUREVIEWSPEC_TAGSENTRY']._options = None + _globals['_STREAMFEATUREVIEWSPEC_TAGSENTRY']._serialized_options = b'8\001' + _globals['_STREAMFEATUREVIEWSPEC'].fields_by_name['user_defined_function']._options = None + _globals['_STREAMFEATUREVIEWSPEC'].fields_by_name['user_defined_function']._serialized_options = b'\030\001' + _globals['_STREAMFEATUREVIEW']._serialized_start=268 + _globals['_STREAMFEATUREVIEW']._serialized_end=379 + _globals['_STREAMFEATUREVIEWSPEC']._serialized_start=382 + _globals['_STREAMFEATUREVIEWSPEC']._serialized_end=1062 + _globals['_STREAMFEATUREVIEWSPEC_TAGSENTRY']._serialized_start=1019 + _globals['_STREAMFEATUREVIEWSPEC_TAGSENTRY']._serialized_end=1062 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/StreamFeatureView_pb2.pyi b/sdk/python/feast/protos/feast/core/StreamFeatureView_pb2.pyi new file mode 100644 index 0000000000..70e897a2f2 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/StreamFeatureView_pb2.pyi @@ -0,0 +1,170 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2020 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import collections.abc +import feast.core.Aggregation_pb2 +import feast.core.DataSource_pb2 +import feast.core.FeatureView_pb2 +import feast.core.Feature_pb2 +import feast.core.OnDemandFeatureView_pb2 +import feast.core.Transformation_pb2 +import google.protobuf.descriptor +import google.protobuf.duration_pb2 +import google.protobuf.internal.containers +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class StreamFeatureView(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SPEC_FIELD_NUMBER: builtins.int + META_FIELD_NUMBER: builtins.int + @property + def spec(self) -> global___StreamFeatureViewSpec: + """User-specified specifications of this feature view.""" + @property + def meta(self) -> feast.core.FeatureView_pb2.FeatureViewMeta: ... + def __init__( + self, + *, + spec: global___StreamFeatureViewSpec | None = ..., + meta: feast.core.FeatureView_pb2.FeatureViewMeta | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["meta", b"meta", "spec", b"spec"]) -> None: ... + +global___StreamFeatureView = StreamFeatureView + +class StreamFeatureViewSpec(google.protobuf.message.Message): + """Next available id: 17""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + ENTITIES_FIELD_NUMBER: builtins.int + FEATURES_FIELD_NUMBER: builtins.int + ENTITY_COLUMNS_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + OWNER_FIELD_NUMBER: builtins.int + TTL_FIELD_NUMBER: builtins.int + BATCH_SOURCE_FIELD_NUMBER: builtins.int + STREAM_SOURCE_FIELD_NUMBER: builtins.int + ONLINE_FIELD_NUMBER: builtins.int + USER_DEFINED_FUNCTION_FIELD_NUMBER: builtins.int + MODE_FIELD_NUMBER: builtins.int + AGGREGATIONS_FIELD_NUMBER: builtins.int + TIMESTAMP_FIELD_FIELD_NUMBER: builtins.int + FEATURE_TRANSFORMATION_FIELD_NUMBER: builtins.int + name: builtins.str + """Name of the feature view. Must be unique. Not updated.""" + project: builtins.str + """Name of Feast project that this feature view belongs to.""" + @property + def entities(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """List of names of entities associated with this feature view.""" + @property + def features(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Feature_pb2.FeatureSpecV2]: + """List of specifications for each feature defined as part of this feature view.""" + @property + def entity_columns(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Feature_pb2.FeatureSpecV2]: + """List of specifications for each entity defined as part of this feature view.""" + description: builtins.str + """Description of the feature view.""" + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """User defined metadata""" + owner: builtins.str + """Owner of the feature view.""" + @property + def ttl(self) -> google.protobuf.duration_pb2.Duration: + """Features in this feature view can only be retrieved from online serving + younger than ttl. Ttl is measured as the duration of time between + the feature's event timestamp and when the feature is retrieved + Feature values outside ttl will be returned as unset values and indicated to end user + """ + @property + def batch_source(self) -> feast.core.DataSource_pb2.DataSource: + """Batch/Offline DataSource where this view can retrieve offline feature data.""" + @property + def stream_source(self) -> feast.core.DataSource_pb2.DataSource: + """Streaming DataSource from where this view can consume "online" feature data.""" + online: builtins.bool + """Whether these features should be served online or not""" + @property + def user_defined_function(self) -> feast.core.OnDemandFeatureView_pb2.UserDefinedFunction: + """Serialized function that is encoded in the streamfeatureview""" + mode: builtins.str + """Mode of execution""" + @property + def aggregations(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Aggregation_pb2.Aggregation]: + """Aggregation definitions""" + timestamp_field: builtins.str + """Timestamp field for aggregation""" + @property + def feature_transformation(self) -> feast.core.Transformation_pb2.FeatureTransformationV2: + """Oneof with {user_defined_function, on_demand_substrait_transformation}""" + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + entities: collections.abc.Iterable[builtins.str] | None = ..., + features: collections.abc.Iterable[feast.core.Feature_pb2.FeatureSpecV2] | None = ..., + entity_columns: collections.abc.Iterable[feast.core.Feature_pb2.FeatureSpecV2] | None = ..., + description: builtins.str = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + owner: builtins.str = ..., + ttl: google.protobuf.duration_pb2.Duration | None = ..., + batch_source: feast.core.DataSource_pb2.DataSource | None = ..., + stream_source: feast.core.DataSource_pb2.DataSource | None = ..., + online: builtins.bool = ..., + user_defined_function: feast.core.OnDemandFeatureView_pb2.UserDefinedFunction | None = ..., + mode: builtins.str = ..., + aggregations: collections.abc.Iterable[feast.core.Aggregation_pb2.Aggregation] | None = ..., + timestamp_field: builtins.str = ..., + feature_transformation: feast.core.Transformation_pb2.FeatureTransformationV2 | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["batch_source", b"batch_source", "feature_transformation", b"feature_transformation", "stream_source", b"stream_source", "ttl", b"ttl", "user_defined_function", b"user_defined_function"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["aggregations", b"aggregations", "batch_source", b"batch_source", "description", b"description", "entities", b"entities", "entity_columns", b"entity_columns", "feature_transformation", b"feature_transformation", "features", b"features", "mode", b"mode", "name", b"name", "online", b"online", "owner", b"owner", "project", b"project", "stream_source", b"stream_source", "tags", b"tags", "timestamp_field", b"timestamp_field", "ttl", b"ttl", "user_defined_function", b"user_defined_function"]) -> None: ... + +global___StreamFeatureViewSpec = StreamFeatureViewSpec diff --git a/sdk/python/feast/protos/feast/core/StreamFeatureView_pb2_grpc.py b/sdk/python/feast/protos/feast/core/StreamFeatureView_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/StreamFeatureView_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/Transformation_pb2.py b/sdk/python/feast/protos/feast/core/Transformation_pb2.py new file mode 100644 index 0000000000..9fd11d3026 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Transformation_pb2.py @@ -0,0 +1,31 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/Transformation.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1f\x66\x65\x61st/core/Transformation.proto\x12\nfeast.core\"F\n\x15UserDefinedFunctionV2\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x62ody\x18\x02 \x01(\x0c\x12\x11\n\tbody_text\x18\x03 \x01(\t\"\xba\x01\n\x17\x46\x65\x61tureTransformationV2\x12\x42\n\x15user_defined_function\x18\x01 \x01(\x0b\x32!.feast.core.UserDefinedFunctionV2H\x00\x12I\n\x18substrait_transformation\x18\x02 \x01(\x0b\x32%.feast.core.SubstraitTransformationV2H\x00\x42\x10\n\x0etransformation\"J\n\x19SubstraitTransformationV2\x12\x16\n\x0esubstrait_plan\x18\x01 \x01(\x0c\x12\x15\n\ribis_function\x18\x02 \x01(\x0c\x42_\n\x10\x66\x65\x61st.proto.coreB\x1a\x46\x65\x61tureTransformationProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.Transformation_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\032FeatureTransformationProtoZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_USERDEFINEDFUNCTIONV2']._serialized_start=47 + _globals['_USERDEFINEDFUNCTIONV2']._serialized_end=117 + _globals['_FEATURETRANSFORMATIONV2']._serialized_start=120 + _globals['_FEATURETRANSFORMATIONV2']._serialized_end=306 + _globals['_SUBSTRAITTRANSFORMATIONV2']._serialized_start=308 + _globals['_SUBSTRAITTRANSFORMATIONV2']._serialized_end=382 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/Transformation_pb2.pyi b/sdk/python/feast/protos/feast/core/Transformation_pb2.pyi new file mode 100644 index 0000000000..1120c447e0 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Transformation_pb2.pyi @@ -0,0 +1,80 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" +import builtins +import google.protobuf.descriptor +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class UserDefinedFunctionV2(google.protobuf.message.Message): + """Serialized representation of python function.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + BODY_FIELD_NUMBER: builtins.int + BODY_TEXT_FIELD_NUMBER: builtins.int + name: builtins.str + """The function name""" + body: builtins.bytes + """The python-syntax function body (serialized by dill)""" + body_text: builtins.str + """The string representation of the udf""" + def __init__( + self, + *, + name: builtins.str = ..., + body: builtins.bytes = ..., + body_text: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["body", b"body", "body_text", b"body_text", "name", b"name"]) -> None: ... + +global___UserDefinedFunctionV2 = UserDefinedFunctionV2 + +class FeatureTransformationV2(google.protobuf.message.Message): + """A feature transformation executed as a user-defined function""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + USER_DEFINED_FUNCTION_FIELD_NUMBER: builtins.int + SUBSTRAIT_TRANSFORMATION_FIELD_NUMBER: builtins.int + @property + def user_defined_function(self) -> global___UserDefinedFunctionV2: ... + @property + def substrait_transformation(self) -> global___SubstraitTransformationV2: ... + def __init__( + self, + *, + user_defined_function: global___UserDefinedFunctionV2 | None = ..., + substrait_transformation: global___SubstraitTransformationV2 | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["substrait_transformation", b"substrait_transformation", "transformation", b"transformation", "user_defined_function", b"user_defined_function"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["substrait_transformation", b"substrait_transformation", "transformation", b"transformation", "user_defined_function", b"user_defined_function"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["transformation", b"transformation"]) -> typing_extensions.Literal["user_defined_function", "substrait_transformation"] | None: ... + +global___FeatureTransformationV2 = FeatureTransformationV2 + +class SubstraitTransformationV2(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SUBSTRAIT_PLAN_FIELD_NUMBER: builtins.int + IBIS_FUNCTION_FIELD_NUMBER: builtins.int + substrait_plan: builtins.bytes + ibis_function: builtins.bytes + def __init__( + self, + *, + substrait_plan: builtins.bytes = ..., + ibis_function: builtins.bytes = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["ibis_function", b"ibis_function", "substrait_plan", b"substrait_plan"]) -> None: ... + +global___SubstraitTransformationV2 = SubstraitTransformationV2 diff --git a/sdk/python/feast/protos/feast/core/Transformation_pb2_grpc.py b/sdk/python/feast/protos/feast/core/Transformation_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/Transformation_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/ValidationProfile_pb2.py b/sdk/python/feast/protos/feast/core/ValidationProfile_pb2.py new file mode 100644 index 0000000000..0fb27ceab1 --- /dev/null +++ b/sdk/python/feast/protos/feast/core/ValidationProfile_pb2.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/core/ValidationProfile.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"feast/core/ValidationProfile.proto\x12\nfeast.core\"\x83\x01\n\x14GEValidationProfiler\x12\x46\n\x08profiler\x18\x01 \x01(\x0b\x32\x34.feast.core.GEValidationProfiler.UserDefinedProfiler\x1a#\n\x13UserDefinedProfiler\x12\x0c\n\x04\x62ody\x18\x01 \x01(\x0c\"0\n\x13GEValidationProfile\x12\x19\n\x11\x65xpectation_suite\x18\x01 \x01(\x0c\"\xdd\x02\n\x13ValidationReference\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1e\n\x16reference_dataset_name\x18\x02 \x01(\t\x12\x0f\n\x07project\x18\x03 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12\x37\n\x04tags\x18\x05 \x03(\x0b\x32).feast.core.ValidationReference.TagsEntry\x12\x37\n\x0bge_profiler\x18\x06 \x01(\x0b\x32 .feast.core.GEValidationProfilerH\x00\x12\x35\n\nge_profile\x18\x07 \x01(\x0b\x32\x1f.feast.core.GEValidationProfileH\x01\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08profilerB\x10\n\x0e\x63\x61\x63hed_profileBV\n\x10\x66\x65\x61st.proto.coreB\x11ValidationProfileZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.core.ValidationProfile_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\021ValidationProfileZ/github.com/feast-dev/feast/go/protos/feast/core' + _globals['_VALIDATIONREFERENCE_TAGSENTRY']._options = None + _globals['_VALIDATIONREFERENCE_TAGSENTRY']._serialized_options = b'8\001' + _globals['_GEVALIDATIONPROFILER']._serialized_start=51 + _globals['_GEVALIDATIONPROFILER']._serialized_end=182 + _globals['_GEVALIDATIONPROFILER_USERDEFINEDPROFILER']._serialized_start=147 + _globals['_GEVALIDATIONPROFILER_USERDEFINEDPROFILER']._serialized_end=182 + _globals['_GEVALIDATIONPROFILE']._serialized_start=184 + _globals['_GEVALIDATIONPROFILE']._serialized_end=232 + _globals['_VALIDATIONREFERENCE']._serialized_start=235 + _globals['_VALIDATIONREFERENCE']._serialized_end=584 + _globals['_VALIDATIONREFERENCE_TAGSENTRY']._serialized_start=511 + _globals['_VALIDATIONREFERENCE_TAGSENTRY']._serialized_end=554 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/ValidationProfile_pb2.pyi b/sdk/python/feast/protos/feast/core/ValidationProfile_pb2.pyi new file mode 100644 index 0000000000..93da1e0f5e --- /dev/null +++ b/sdk/python/feast/protos/feast/core/ValidationProfile_pb2.pyi @@ -0,0 +1,136 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2021 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import sys +import typing + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class GEValidationProfiler(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class UserDefinedProfiler(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + BODY_FIELD_NUMBER: builtins.int + body: builtins.bytes + """The python-syntax function body (serialized by dill)""" + def __init__( + self, + *, + body: builtins.bytes = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["body", b"body"]) -> None: ... + + PROFILER_FIELD_NUMBER: builtins.int + @property + def profiler(self) -> global___GEValidationProfiler.UserDefinedProfiler: ... + def __init__( + self, + *, + profiler: global___GEValidationProfiler.UserDefinedProfiler | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["profiler", b"profiler"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["profiler", b"profiler"]) -> None: ... + +global___GEValidationProfiler = GEValidationProfiler + +class GEValidationProfile(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + EXPECTATION_SUITE_FIELD_NUMBER: builtins.int + expectation_suite: builtins.bytes + """JSON-serialized ExpectationSuite object""" + def __init__( + self, + *, + expectation_suite: builtins.bytes = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["expectation_suite", b"expectation_suite"]) -> None: ... + +global___GEValidationProfile = GEValidationProfile + +class ValidationReference(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + REFERENCE_DATASET_NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + GE_PROFILER_FIELD_NUMBER: builtins.int + GE_PROFILE_FIELD_NUMBER: builtins.int + name: builtins.str + """Unique name of validation reference within the project""" + reference_dataset_name: builtins.str + """Name of saved dataset used as reference dataset""" + project: builtins.str + """Name of Feast project that this object source belongs to""" + description: builtins.str + """Description of the validation reference""" + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """User defined metadata""" + @property + def ge_profiler(self) -> global___GEValidationProfiler: ... + @property + def ge_profile(self) -> global___GEValidationProfile: ... + def __init__( + self, + *, + name: builtins.str = ..., + reference_dataset_name: builtins.str = ..., + project: builtins.str = ..., + description: builtins.str = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ge_profiler: global___GEValidationProfiler | None = ..., + ge_profile: global___GEValidationProfile | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["cached_profile", b"cached_profile", "ge_profile", b"ge_profile", "ge_profiler", b"ge_profiler", "profiler", b"profiler"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["cached_profile", b"cached_profile", "description", b"description", "ge_profile", b"ge_profile", "ge_profiler", b"ge_profiler", "name", b"name", "profiler", b"profiler", "project", b"project", "reference_dataset_name", b"reference_dataset_name", "tags", b"tags"]) -> None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing_extensions.Literal["cached_profile", b"cached_profile"]) -> typing_extensions.Literal["ge_profile"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing_extensions.Literal["profiler", b"profiler"]) -> typing_extensions.Literal["ge_profiler"] | None: ... + +global___ValidationReference = ValidationReference diff --git a/sdk/python/feast/protos/feast/core/ValidationProfile_pb2_grpc.py b/sdk/python/feast/protos/feast/core/ValidationProfile_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/core/ValidationProfile_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/core/__init__.py b/sdk/python/feast/protos/feast/core/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/protos/feast/registry/RegistryServer_pb2.py b/sdk/python/feast/protos/feast/registry/RegistryServer_pb2.py new file mode 100644 index 0000000000..e0cae3da4b --- /dev/null +++ b/sdk/python/feast/protos/feast/registry/RegistryServer_pb2.py @@ -0,0 +1,198 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/registry/RegistryServer.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from feast.protos.feast.core import Registry_pb2 as feast_dot_core_dot_Registry__pb2 +from feast.protos.feast.core import Entity_pb2 as feast_dot_core_dot_Entity__pb2 +from feast.protos.feast.core import DataSource_pb2 as feast_dot_core_dot_DataSource__pb2 +from feast.protos.feast.core import FeatureView_pb2 as feast_dot_core_dot_FeatureView__pb2 +from feast.protos.feast.core import StreamFeatureView_pb2 as feast_dot_core_dot_StreamFeatureView__pb2 +from feast.protos.feast.core import OnDemandFeatureView_pb2 as feast_dot_core_dot_OnDemandFeatureView__pb2 +from feast.protos.feast.core import FeatureService_pb2 as feast_dot_core_dot_FeatureService__pb2 +from feast.protos.feast.core import SavedDataset_pb2 as feast_dot_core_dot_SavedDataset__pb2 +from feast.protos.feast.core import ValidationProfile_pb2 as feast_dot_core_dot_ValidationProfile__pb2 +from feast.protos.feast.core import InfraObject_pb2 as feast_dot_core_dot_InfraObject__pb2 +from feast.protos.feast.core import Permission_pb2 as feast_dot_core_dot_Permission__pb2 +from feast.protos.feast.core import Project_pb2 as feast_dot_core_dot_Project__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n#feast/registry/RegistryServer.proto\x12\x0e\x66\x65\x61st.registry\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x19\x66\x65\x61st/core/Registry.proto\x1a\x17\x66\x65\x61st/core/Entity.proto\x1a\x1b\x66\x65\x61st/core/DataSource.proto\x1a\x1c\x66\x65\x61st/core/FeatureView.proto\x1a\"feast/core/StreamFeatureView.proto\x1a$feast/core/OnDemandFeatureView.proto\x1a\x1f\x66\x65\x61st/core/FeatureService.proto\x1a\x1d\x66\x65\x61st/core/SavedDataset.proto\x1a\"feast/core/ValidationProfile.proto\x1a\x1c\x66\x65\x61st/core/InfraObject.proto\x1a\x1b\x66\x65\x61st/core/Permission.proto\x1a\x18\x66\x65\x61st/core/Project.proto\"!\n\x0eRefreshRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\"W\n\x12UpdateInfraRequest\x12 \n\x05infra\x18\x01 \x01(\x0b\x32\x11.feast.core.Infra\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x03 \x01(\x08\"7\n\x0fGetInfraRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x02 \x01(\x08\"B\n\x1aListProjectMetadataRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x02 \x01(\x08\"T\n\x1bListProjectMetadataResponse\x12\x35\n\x10project_metadata\x18\x01 \x03(\x0b\x32\x1b.feast.core.ProjectMetadata\"\xcb\x01\n\x1b\x41pplyMaterializationRequest\x12-\n\x0c\x66\x65\x61ture_view\x18\x01 \x01(\x0b\x32\x17.feast.core.FeatureView\x12\x0f\n\x07project\x18\x02 \x01(\t\x12.\n\nstart_date\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_date\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0e\n\x06\x63ommit\x18\x05 \x01(\x08\"Y\n\x12\x41pplyEntityRequest\x12\"\n\x06\x65ntity\x18\x01 \x01(\x0b\x32\x12.feast.core.Entity\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x03 \x01(\x08\"F\n\x10GetEntityRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x03 \x01(\x08\"\xa5\x01\n\x13ListEntitiesRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x02 \x01(\x08\x12;\n\x04tags\x18\x03 \x03(\x0b\x32-.feast.registry.ListEntitiesRequest.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"<\n\x14ListEntitiesResponse\x12$\n\x08\x65ntities\x18\x01 \x03(\x0b\x32\x12.feast.core.Entity\"D\n\x13\x44\x65leteEntityRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x03 \x01(\x08\"f\n\x16\x41pplyDataSourceRequest\x12+\n\x0b\x64\x61ta_source\x18\x01 \x01(\x0b\x32\x16.feast.core.DataSource\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x03 \x01(\x08\"J\n\x14GetDataSourceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x03 \x01(\x08\"\xab\x01\n\x16ListDataSourcesRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x02 \x01(\x08\x12>\n\x04tags\x18\x03 \x03(\x0b\x32\x30.feast.registry.ListDataSourcesRequest.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"G\n\x17ListDataSourcesResponse\x12,\n\x0c\x64\x61ta_sources\x18\x01 \x03(\x0b\x32\x16.feast.core.DataSource\"H\n\x17\x44\x65leteDataSourceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x03 \x01(\x08\"\x81\x02\n\x17\x41pplyFeatureViewRequest\x12/\n\x0c\x66\x65\x61ture_view\x18\x01 \x01(\x0b\x32\x17.feast.core.FeatureViewH\x00\x12\x41\n\x16on_demand_feature_view\x18\x02 \x01(\x0b\x32\x1f.feast.core.OnDemandFeatureViewH\x00\x12<\n\x13stream_feature_view\x18\x03 \x01(\x0b\x32\x1d.feast.core.StreamFeatureViewH\x00\x12\x0f\n\x07project\x18\x04 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x05 \x01(\x08\x42\x13\n\x11\x62\x61se_feature_view\"K\n\x15GetFeatureViewRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x03 \x01(\x08\"\xad\x01\n\x17ListFeatureViewsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x02 \x01(\x08\x12?\n\x04tags\x18\x03 \x03(\x0b\x32\x31.feast.registry.ListFeatureViewsRequest.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"J\n\x18ListFeatureViewsResponse\x12.\n\rfeature_views\x18\x01 \x03(\x0b\x32\x17.feast.core.FeatureView\"I\n\x18\x44\x65leteFeatureViewRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x03 \x01(\x08\"\xd6\x01\n\x0e\x41nyFeatureView\x12/\n\x0c\x66\x65\x61ture_view\x18\x01 \x01(\x0b\x32\x17.feast.core.FeatureViewH\x00\x12\x41\n\x16on_demand_feature_view\x18\x02 \x01(\x0b\x32\x1f.feast.core.OnDemandFeatureViewH\x00\x12<\n\x13stream_feature_view\x18\x03 \x01(\x0b\x32\x1d.feast.core.StreamFeatureViewH\x00\x42\x12\n\x10\x61ny_feature_view\"N\n\x18GetAnyFeatureViewRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x03 \x01(\x08\"U\n\x19GetAnyFeatureViewResponse\x12\x38\n\x10\x61ny_feature_view\x18\x01 \x01(\x0b\x32\x1e.feast.registry.AnyFeatureView\"\xb3\x01\n\x1aListAllFeatureViewsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x02 \x01(\x08\x12\x42\n\x04tags\x18\x03 \x03(\x0b\x32\x34.feast.registry.ListAllFeatureViewsRequest.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"T\n\x1bListAllFeatureViewsResponse\x12\x35\n\rfeature_views\x18\x01 \x03(\x0b\x32\x1e.feast.registry.AnyFeatureView\"Q\n\x1bGetStreamFeatureViewRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x03 \x01(\x08\"\xb9\x01\n\x1dListStreamFeatureViewsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x02 \x01(\x08\x12\x45\n\x04tags\x18\x03 \x03(\x0b\x32\x37.feast.registry.ListStreamFeatureViewsRequest.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"]\n\x1eListStreamFeatureViewsResponse\x12;\n\x14stream_feature_views\x18\x01 \x03(\x0b\x32\x1d.feast.core.StreamFeatureView\"S\n\x1dGetOnDemandFeatureViewRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x03 \x01(\x08\"\xbd\x01\n\x1fListOnDemandFeatureViewsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x02 \x01(\x08\x12G\n\x04tags\x18\x03 \x03(\x0b\x32\x39.feast.registry.ListOnDemandFeatureViewsRequest.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"d\n ListOnDemandFeatureViewsResponse\x12@\n\x17on_demand_feature_views\x18\x01 \x03(\x0b\x32\x1f.feast.core.OnDemandFeatureView\"r\n\x1a\x41pplyFeatureServiceRequest\x12\x33\n\x0f\x66\x65\x61ture_service\x18\x01 \x01(\x0b\x32\x1a.feast.core.FeatureService\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x03 \x01(\x08\"N\n\x18GetFeatureServiceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x03 \x01(\x08\"\xb3\x01\n\x1aListFeatureServicesRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x02 \x01(\x08\x12\x42\n\x04tags\x18\x03 \x03(\x0b\x32\x34.feast.registry.ListFeatureServicesRequest.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"S\n\x1bListFeatureServicesResponse\x12\x34\n\x10\x66\x65\x61ture_services\x18\x01 \x03(\x0b\x32\x1a.feast.core.FeatureService\"L\n\x1b\x44\x65leteFeatureServiceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x03 \x01(\x08\"l\n\x18\x41pplySavedDatasetRequest\x12/\n\rsaved_dataset\x18\x01 \x01(\x0b\x32\x18.feast.core.SavedDataset\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x03 \x01(\x08\"L\n\x16GetSavedDatasetRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x03 \x01(\x08\"\xaf\x01\n\x18ListSavedDatasetsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x02 \x01(\x08\x12@\n\x04tags\x18\x03 \x03(\x0b\x32\x32.feast.registry.ListSavedDatasetsRequest.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"M\n\x19ListSavedDatasetsResponse\x12\x30\n\x0esaved_datasets\x18\x01 \x03(\x0b\x32\x18.feast.core.SavedDataset\"J\n\x19\x44\x65leteSavedDatasetRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x03 \x01(\x08\"\x81\x01\n\x1f\x41pplyValidationReferenceRequest\x12=\n\x14validation_reference\x18\x01 \x01(\x0b\x32\x1f.feast.core.ValidationReference\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x03 \x01(\x08\"S\n\x1dGetValidationReferenceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x03 \x01(\x08\"\xbd\x01\n\x1fListValidationReferencesRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x02 \x01(\x08\x12G\n\x04tags\x18\x03 \x03(\x0b\x32\x39.feast.registry.ListValidationReferencesRequest.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"b\n ListValidationReferencesResponse\x12>\n\x15validation_references\x18\x01 \x03(\x0b\x32\x1f.feast.core.ValidationReference\"Q\n DeleteValidationReferenceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x03 \x01(\x08\"e\n\x16\x41pplyPermissionRequest\x12*\n\npermission\x18\x01 \x01(\x0b\x32\x16.feast.core.Permission\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x03 \x01(\x08\"J\n\x14GetPermissionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x03 \x01(\x08\"\xab\x01\n\x16ListPermissionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x02 \x01(\x08\x12>\n\x04tags\x18\x03 \x03(\x0b\x32\x30.feast.registry.ListPermissionsRequest.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"F\n\x17ListPermissionsResponse\x12+\n\x0bpermissions\x18\x01 \x03(\x0b\x32\x16.feast.core.Permission\"H\n\x17\x44\x65letePermissionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x03 \x01(\x08\"K\n\x13\x41pplyProjectRequest\x12$\n\x07project\x18\x01 \x01(\x0b\x32\x13.feast.core.Project\x12\x0e\n\x06\x63ommit\x18\x02 \x01(\x08\"6\n\x11GetProjectRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x61llow_cache\x18\x02 \x01(\x08\"\x94\x01\n\x13ListProjectsRequest\x12\x13\n\x0b\x61llow_cache\x18\x01 \x01(\x08\x12;\n\x04tags\x18\x02 \x03(\x0b\x32-.feast.registry.ListProjectsRequest.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"=\n\x14ListProjectsResponse\x12%\n\x08projects\x18\x01 \x03(\x0b\x32\x13.feast.core.Project\"4\n\x14\x44\x65leteProjectRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06\x63ommit\x18\x02 \x01(\x08\x32\xcb \n\x0eRegistryServer\x12K\n\x0b\x41pplyEntity\x12\".feast.registry.ApplyEntityRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x43\n\tGetEntity\x12 .feast.registry.GetEntityRequest\x1a\x12.feast.core.Entity\"\x00\x12[\n\x0cListEntities\x12#.feast.registry.ListEntitiesRequest\x1a$.feast.registry.ListEntitiesResponse\"\x00\x12M\n\x0c\x44\x65leteEntity\x12#.feast.registry.DeleteEntityRequest\x1a\x16.google.protobuf.Empty\"\x00\x12S\n\x0f\x41pplyDataSource\x12&.feast.registry.ApplyDataSourceRequest\x1a\x16.google.protobuf.Empty\"\x00\x12O\n\rGetDataSource\x12$.feast.registry.GetDataSourceRequest\x1a\x16.feast.core.DataSource\"\x00\x12\x64\n\x0fListDataSources\x12&.feast.registry.ListDataSourcesRequest\x1a\'.feast.registry.ListDataSourcesResponse\"\x00\x12U\n\x10\x44\x65leteDataSource\x12\'.feast.registry.DeleteDataSourceRequest\x1a\x16.google.protobuf.Empty\"\x00\x12U\n\x10\x41pplyFeatureView\x12\'.feast.registry.ApplyFeatureViewRequest\x1a\x16.google.protobuf.Empty\"\x00\x12W\n\x11\x44\x65leteFeatureView\x12(.feast.registry.DeleteFeatureViewRequest\x1a\x16.google.protobuf.Empty\"\x00\x12j\n\x11GetAnyFeatureView\x12(.feast.registry.GetAnyFeatureViewRequest\x1a).feast.registry.GetAnyFeatureViewResponse\"\x00\x12p\n\x13ListAllFeatureViews\x12*.feast.registry.ListAllFeatureViewsRequest\x1a+.feast.registry.ListAllFeatureViewsResponse\"\x00\x12R\n\x0eGetFeatureView\x12%.feast.registry.GetFeatureViewRequest\x1a\x17.feast.core.FeatureView\"\x00\x12g\n\x10ListFeatureViews\x12\'.feast.registry.ListFeatureViewsRequest\x1a(.feast.registry.ListFeatureViewsResponse\"\x00\x12\x64\n\x14GetStreamFeatureView\x12+.feast.registry.GetStreamFeatureViewRequest\x1a\x1d.feast.core.StreamFeatureView\"\x00\x12y\n\x16ListStreamFeatureViews\x12-.feast.registry.ListStreamFeatureViewsRequest\x1a..feast.registry.ListStreamFeatureViewsResponse\"\x00\x12j\n\x16GetOnDemandFeatureView\x12-.feast.registry.GetOnDemandFeatureViewRequest\x1a\x1f.feast.core.OnDemandFeatureView\"\x00\x12\x7f\n\x18ListOnDemandFeatureViews\x12/.feast.registry.ListOnDemandFeatureViewsRequest\x1a\x30.feast.registry.ListOnDemandFeatureViewsResponse\"\x00\x12[\n\x13\x41pplyFeatureService\x12*.feast.registry.ApplyFeatureServiceRequest\x1a\x16.google.protobuf.Empty\"\x00\x12[\n\x11GetFeatureService\x12(.feast.registry.GetFeatureServiceRequest\x1a\x1a.feast.core.FeatureService\"\x00\x12p\n\x13ListFeatureServices\x12*.feast.registry.ListFeatureServicesRequest\x1a+.feast.registry.ListFeatureServicesResponse\"\x00\x12]\n\x14\x44\x65leteFeatureService\x12+.feast.registry.DeleteFeatureServiceRequest\x1a\x16.google.protobuf.Empty\"\x00\x12W\n\x11\x41pplySavedDataset\x12(.feast.registry.ApplySavedDatasetRequest\x1a\x16.google.protobuf.Empty\"\x00\x12U\n\x0fGetSavedDataset\x12&.feast.registry.GetSavedDatasetRequest\x1a\x18.feast.core.SavedDataset\"\x00\x12j\n\x11ListSavedDatasets\x12(.feast.registry.ListSavedDatasetsRequest\x1a).feast.registry.ListSavedDatasetsResponse\"\x00\x12Y\n\x12\x44\x65leteSavedDataset\x12).feast.registry.DeleteSavedDatasetRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x65\n\x18\x41pplyValidationReference\x12/.feast.registry.ApplyValidationReferenceRequest\x1a\x16.google.protobuf.Empty\"\x00\x12j\n\x16GetValidationReference\x12-.feast.registry.GetValidationReferenceRequest\x1a\x1f.feast.core.ValidationReference\"\x00\x12\x7f\n\x18ListValidationReferences\x12/.feast.registry.ListValidationReferencesRequest\x1a\x30.feast.registry.ListValidationReferencesResponse\"\x00\x12g\n\x19\x44\x65leteValidationReference\x12\x30.feast.registry.DeleteValidationReferenceRequest\x1a\x16.google.protobuf.Empty\"\x00\x12S\n\x0f\x41pplyPermission\x12&.feast.registry.ApplyPermissionRequest\x1a\x16.google.protobuf.Empty\"\x00\x12O\n\rGetPermission\x12$.feast.registry.GetPermissionRequest\x1a\x16.feast.core.Permission\"\x00\x12\x64\n\x0fListPermissions\x12&.feast.registry.ListPermissionsRequest\x1a\'.feast.registry.ListPermissionsResponse\"\x00\x12U\n\x10\x44\x65letePermission\x12\'.feast.registry.DeletePermissionRequest\x1a\x16.google.protobuf.Empty\"\x00\x12M\n\x0c\x41pplyProject\x12#.feast.registry.ApplyProjectRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x46\n\nGetProject\x12!.feast.registry.GetProjectRequest\x1a\x13.feast.core.Project\"\x00\x12[\n\x0cListProjects\x12#.feast.registry.ListProjectsRequest\x1a$.feast.registry.ListProjectsResponse\"\x00\x12O\n\rDeleteProject\x12$.feast.registry.DeleteProjectRequest\x1a\x16.google.protobuf.Empty\"\x00\x12]\n\x14\x41pplyMaterialization\x12+.feast.registry.ApplyMaterializationRequest\x1a\x16.google.protobuf.Empty\"\x00\x12p\n\x13ListProjectMetadata\x12*.feast.registry.ListProjectMetadataRequest\x1a+.feast.registry.ListProjectMetadataResponse\"\x00\x12K\n\x0bUpdateInfra\x12\".feast.registry.UpdateInfraRequest\x1a\x16.google.protobuf.Empty\"\x00\x12@\n\x08GetInfra\x12\x1f.feast.registry.GetInfraRequest\x1a\x11.feast.core.Infra\"\x00\x12:\n\x06\x43ommit\x12\x16.google.protobuf.Empty\x1a\x16.google.protobuf.Empty\"\x00\x12\x43\n\x07Refresh\x12\x1e.feast.registry.RefreshRequest\x1a\x16.google.protobuf.Empty\"\x00\x12\x37\n\x05Proto\x12\x16.google.protobuf.Empty\x1a\x14.feast.core.Registry\"\x00\x62\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.registry.RegistryServer_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals['_LISTENTITIESREQUEST_TAGSENTRY']._options = None + _globals['_LISTENTITIESREQUEST_TAGSENTRY']._serialized_options = b'8\001' + _globals['_LISTDATASOURCESREQUEST_TAGSENTRY']._options = None + _globals['_LISTDATASOURCESREQUEST_TAGSENTRY']._serialized_options = b'8\001' + _globals['_LISTFEATUREVIEWSREQUEST_TAGSENTRY']._options = None + _globals['_LISTFEATUREVIEWSREQUEST_TAGSENTRY']._serialized_options = b'8\001' + _globals['_LISTALLFEATUREVIEWSREQUEST_TAGSENTRY']._options = None + _globals['_LISTALLFEATUREVIEWSREQUEST_TAGSENTRY']._serialized_options = b'8\001' + _globals['_LISTSTREAMFEATUREVIEWSREQUEST_TAGSENTRY']._options = None + _globals['_LISTSTREAMFEATUREVIEWSREQUEST_TAGSENTRY']._serialized_options = b'8\001' + _globals['_LISTONDEMANDFEATUREVIEWSREQUEST_TAGSENTRY']._options = None + _globals['_LISTONDEMANDFEATUREVIEWSREQUEST_TAGSENTRY']._serialized_options = b'8\001' + _globals['_LISTFEATURESERVICESREQUEST_TAGSENTRY']._options = None + _globals['_LISTFEATURESERVICESREQUEST_TAGSENTRY']._serialized_options = b'8\001' + _globals['_LISTSAVEDDATASETSREQUEST_TAGSENTRY']._options = None + _globals['_LISTSAVEDDATASETSREQUEST_TAGSENTRY']._serialized_options = b'8\001' + _globals['_LISTVALIDATIONREFERENCESREQUEST_TAGSENTRY']._options = None + _globals['_LISTVALIDATIONREFERENCESREQUEST_TAGSENTRY']._serialized_options = b'8\001' + _globals['_LISTPERMISSIONSREQUEST_TAGSENTRY']._options = None + _globals['_LISTPERMISSIONSREQUEST_TAGSENTRY']._serialized_options = b'8\001' + _globals['_LISTPROJECTSREQUEST_TAGSENTRY']._options = None + _globals['_LISTPROJECTSREQUEST_TAGSENTRY']._serialized_options = b'8\001' + _globals['_REFRESHREQUEST']._serialized_start=487 + _globals['_REFRESHREQUEST']._serialized_end=520 + _globals['_UPDATEINFRAREQUEST']._serialized_start=522 + _globals['_UPDATEINFRAREQUEST']._serialized_end=609 + _globals['_GETINFRAREQUEST']._serialized_start=611 + _globals['_GETINFRAREQUEST']._serialized_end=666 + _globals['_LISTPROJECTMETADATAREQUEST']._serialized_start=668 + _globals['_LISTPROJECTMETADATAREQUEST']._serialized_end=734 + _globals['_LISTPROJECTMETADATARESPONSE']._serialized_start=736 + _globals['_LISTPROJECTMETADATARESPONSE']._serialized_end=820 + _globals['_APPLYMATERIALIZATIONREQUEST']._serialized_start=823 + _globals['_APPLYMATERIALIZATIONREQUEST']._serialized_end=1026 + _globals['_APPLYENTITYREQUEST']._serialized_start=1028 + _globals['_APPLYENTITYREQUEST']._serialized_end=1117 + _globals['_GETENTITYREQUEST']._serialized_start=1119 + _globals['_GETENTITYREQUEST']._serialized_end=1189 + _globals['_LISTENTITIESREQUEST']._serialized_start=1192 + _globals['_LISTENTITIESREQUEST']._serialized_end=1357 + _globals['_LISTENTITIESREQUEST_TAGSENTRY']._serialized_start=1314 + _globals['_LISTENTITIESREQUEST_TAGSENTRY']._serialized_end=1357 + _globals['_LISTENTITIESRESPONSE']._serialized_start=1359 + _globals['_LISTENTITIESRESPONSE']._serialized_end=1419 + _globals['_DELETEENTITYREQUEST']._serialized_start=1421 + _globals['_DELETEENTITYREQUEST']._serialized_end=1489 + _globals['_APPLYDATASOURCEREQUEST']._serialized_start=1491 + _globals['_APPLYDATASOURCEREQUEST']._serialized_end=1593 + _globals['_GETDATASOURCEREQUEST']._serialized_start=1595 + _globals['_GETDATASOURCEREQUEST']._serialized_end=1669 + _globals['_LISTDATASOURCESREQUEST']._serialized_start=1672 + _globals['_LISTDATASOURCESREQUEST']._serialized_end=1843 + _globals['_LISTDATASOURCESREQUEST_TAGSENTRY']._serialized_start=1314 + _globals['_LISTDATASOURCESREQUEST_TAGSENTRY']._serialized_end=1357 + _globals['_LISTDATASOURCESRESPONSE']._serialized_start=1845 + _globals['_LISTDATASOURCESRESPONSE']._serialized_end=1916 + _globals['_DELETEDATASOURCEREQUEST']._serialized_start=1918 + _globals['_DELETEDATASOURCEREQUEST']._serialized_end=1990 + _globals['_APPLYFEATUREVIEWREQUEST']._serialized_start=1993 + _globals['_APPLYFEATUREVIEWREQUEST']._serialized_end=2250 + _globals['_GETFEATUREVIEWREQUEST']._serialized_start=2252 + _globals['_GETFEATUREVIEWREQUEST']._serialized_end=2327 + _globals['_LISTFEATUREVIEWSREQUEST']._serialized_start=2330 + _globals['_LISTFEATUREVIEWSREQUEST']._serialized_end=2503 + _globals['_LISTFEATUREVIEWSREQUEST_TAGSENTRY']._serialized_start=1314 + _globals['_LISTFEATUREVIEWSREQUEST_TAGSENTRY']._serialized_end=1357 + _globals['_LISTFEATUREVIEWSRESPONSE']._serialized_start=2505 + _globals['_LISTFEATUREVIEWSRESPONSE']._serialized_end=2579 + _globals['_DELETEFEATUREVIEWREQUEST']._serialized_start=2581 + _globals['_DELETEFEATUREVIEWREQUEST']._serialized_end=2654 + _globals['_ANYFEATUREVIEW']._serialized_start=2657 + _globals['_ANYFEATUREVIEW']._serialized_end=2871 + _globals['_GETANYFEATUREVIEWREQUEST']._serialized_start=2873 + _globals['_GETANYFEATUREVIEWREQUEST']._serialized_end=2951 + _globals['_GETANYFEATUREVIEWRESPONSE']._serialized_start=2953 + _globals['_GETANYFEATUREVIEWRESPONSE']._serialized_end=3038 + _globals['_LISTALLFEATUREVIEWSREQUEST']._serialized_start=3041 + _globals['_LISTALLFEATUREVIEWSREQUEST']._serialized_end=3220 + _globals['_LISTALLFEATUREVIEWSREQUEST_TAGSENTRY']._serialized_start=1314 + _globals['_LISTALLFEATUREVIEWSREQUEST_TAGSENTRY']._serialized_end=1357 + _globals['_LISTALLFEATUREVIEWSRESPONSE']._serialized_start=3222 + _globals['_LISTALLFEATUREVIEWSRESPONSE']._serialized_end=3306 + _globals['_GETSTREAMFEATUREVIEWREQUEST']._serialized_start=3308 + _globals['_GETSTREAMFEATUREVIEWREQUEST']._serialized_end=3389 + _globals['_LISTSTREAMFEATUREVIEWSREQUEST']._serialized_start=3392 + _globals['_LISTSTREAMFEATUREVIEWSREQUEST']._serialized_end=3577 + _globals['_LISTSTREAMFEATUREVIEWSREQUEST_TAGSENTRY']._serialized_start=1314 + _globals['_LISTSTREAMFEATUREVIEWSREQUEST_TAGSENTRY']._serialized_end=1357 + _globals['_LISTSTREAMFEATUREVIEWSRESPONSE']._serialized_start=3579 + _globals['_LISTSTREAMFEATUREVIEWSRESPONSE']._serialized_end=3672 + _globals['_GETONDEMANDFEATUREVIEWREQUEST']._serialized_start=3674 + _globals['_GETONDEMANDFEATUREVIEWREQUEST']._serialized_end=3757 + _globals['_LISTONDEMANDFEATUREVIEWSREQUEST']._serialized_start=3760 + _globals['_LISTONDEMANDFEATUREVIEWSREQUEST']._serialized_end=3949 + _globals['_LISTONDEMANDFEATUREVIEWSREQUEST_TAGSENTRY']._serialized_start=1314 + _globals['_LISTONDEMANDFEATUREVIEWSREQUEST_TAGSENTRY']._serialized_end=1357 + _globals['_LISTONDEMANDFEATUREVIEWSRESPONSE']._serialized_start=3951 + _globals['_LISTONDEMANDFEATUREVIEWSRESPONSE']._serialized_end=4051 + _globals['_APPLYFEATURESERVICEREQUEST']._serialized_start=4053 + _globals['_APPLYFEATURESERVICEREQUEST']._serialized_end=4167 + _globals['_GETFEATURESERVICEREQUEST']._serialized_start=4169 + _globals['_GETFEATURESERVICEREQUEST']._serialized_end=4247 + _globals['_LISTFEATURESERVICESREQUEST']._serialized_start=4250 + _globals['_LISTFEATURESERVICESREQUEST']._serialized_end=4429 + _globals['_LISTFEATURESERVICESREQUEST_TAGSENTRY']._serialized_start=1314 + _globals['_LISTFEATURESERVICESREQUEST_TAGSENTRY']._serialized_end=1357 + _globals['_LISTFEATURESERVICESRESPONSE']._serialized_start=4431 + _globals['_LISTFEATURESERVICESRESPONSE']._serialized_end=4514 + _globals['_DELETEFEATURESERVICEREQUEST']._serialized_start=4516 + _globals['_DELETEFEATURESERVICEREQUEST']._serialized_end=4592 + _globals['_APPLYSAVEDDATASETREQUEST']._serialized_start=4594 + _globals['_APPLYSAVEDDATASETREQUEST']._serialized_end=4702 + _globals['_GETSAVEDDATASETREQUEST']._serialized_start=4704 + _globals['_GETSAVEDDATASETREQUEST']._serialized_end=4780 + _globals['_LISTSAVEDDATASETSREQUEST']._serialized_start=4783 + _globals['_LISTSAVEDDATASETSREQUEST']._serialized_end=4958 + _globals['_LISTSAVEDDATASETSREQUEST_TAGSENTRY']._serialized_start=1314 + _globals['_LISTSAVEDDATASETSREQUEST_TAGSENTRY']._serialized_end=1357 + _globals['_LISTSAVEDDATASETSRESPONSE']._serialized_start=4960 + _globals['_LISTSAVEDDATASETSRESPONSE']._serialized_end=5037 + _globals['_DELETESAVEDDATASETREQUEST']._serialized_start=5039 + _globals['_DELETESAVEDDATASETREQUEST']._serialized_end=5113 + _globals['_APPLYVALIDATIONREFERENCEREQUEST']._serialized_start=5116 + _globals['_APPLYVALIDATIONREFERENCEREQUEST']._serialized_end=5245 + _globals['_GETVALIDATIONREFERENCEREQUEST']._serialized_start=5247 + _globals['_GETVALIDATIONREFERENCEREQUEST']._serialized_end=5330 + _globals['_LISTVALIDATIONREFERENCESREQUEST']._serialized_start=5333 + _globals['_LISTVALIDATIONREFERENCESREQUEST']._serialized_end=5522 + _globals['_LISTVALIDATIONREFERENCESREQUEST_TAGSENTRY']._serialized_start=1314 + _globals['_LISTVALIDATIONREFERENCESREQUEST_TAGSENTRY']._serialized_end=1357 + _globals['_LISTVALIDATIONREFERENCESRESPONSE']._serialized_start=5524 + _globals['_LISTVALIDATIONREFERENCESRESPONSE']._serialized_end=5622 + _globals['_DELETEVALIDATIONREFERENCEREQUEST']._serialized_start=5624 + _globals['_DELETEVALIDATIONREFERENCEREQUEST']._serialized_end=5705 + _globals['_APPLYPERMISSIONREQUEST']._serialized_start=5707 + _globals['_APPLYPERMISSIONREQUEST']._serialized_end=5808 + _globals['_GETPERMISSIONREQUEST']._serialized_start=5810 + _globals['_GETPERMISSIONREQUEST']._serialized_end=5884 + _globals['_LISTPERMISSIONSREQUEST']._serialized_start=5887 + _globals['_LISTPERMISSIONSREQUEST']._serialized_end=6058 + _globals['_LISTPERMISSIONSREQUEST_TAGSENTRY']._serialized_start=1314 + _globals['_LISTPERMISSIONSREQUEST_TAGSENTRY']._serialized_end=1357 + _globals['_LISTPERMISSIONSRESPONSE']._serialized_start=6060 + _globals['_LISTPERMISSIONSRESPONSE']._serialized_end=6130 + _globals['_DELETEPERMISSIONREQUEST']._serialized_start=6132 + _globals['_DELETEPERMISSIONREQUEST']._serialized_end=6204 + _globals['_APPLYPROJECTREQUEST']._serialized_start=6206 + _globals['_APPLYPROJECTREQUEST']._serialized_end=6281 + _globals['_GETPROJECTREQUEST']._serialized_start=6283 + _globals['_GETPROJECTREQUEST']._serialized_end=6337 + _globals['_LISTPROJECTSREQUEST']._serialized_start=6340 + _globals['_LISTPROJECTSREQUEST']._serialized_end=6488 + _globals['_LISTPROJECTSREQUEST_TAGSENTRY']._serialized_start=1314 + _globals['_LISTPROJECTSREQUEST_TAGSENTRY']._serialized_end=1357 + _globals['_LISTPROJECTSRESPONSE']._serialized_start=6490 + _globals['_LISTPROJECTSRESPONSE']._serialized_end=6551 + _globals['_DELETEPROJECTREQUEST']._serialized_start=6553 + _globals['_DELETEPROJECTREQUEST']._serialized_end=6605 + _globals['_REGISTRYSERVER']._serialized_start=6608 + _globals['_REGISTRYSERVER']._serialized_end=10779 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/registry/RegistryServer_pb2.pyi b/sdk/python/feast/protos/feast/registry/RegistryServer_pb2.pyi new file mode 100644 index 0000000000..f4507c02e2 --- /dev/null +++ b/sdk/python/feast/protos/feast/registry/RegistryServer_pb2.pyi @@ -0,0 +1,1318 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" +import builtins +import collections.abc +import feast.core.DataSource_pb2 +import feast.core.Entity_pb2 +import feast.core.FeatureService_pb2 +import feast.core.FeatureView_pb2 +import feast.core.InfraObject_pb2 +import feast.core.OnDemandFeatureView_pb2 +import feast.core.Permission_pb2 +import feast.core.Project_pb2 +import feast.core.Registry_pb2 +import feast.core.SavedDataset_pb2 +import feast.core.StreamFeatureView_pb2 +import feast.core.ValidationProfile_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class RefreshRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PROJECT_FIELD_NUMBER: builtins.int + project: builtins.str + def __init__( + self, + *, + project: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["project", b"project"]) -> None: ... + +global___RefreshRequest = RefreshRequest + +class UpdateInfraRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + INFRA_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + @property + def infra(self) -> feast.core.InfraObject_pb2.Infra: ... + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + infra: feast.core.InfraObject_pb2.Infra | None = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["infra", b"infra"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "infra", b"infra", "project", b"project"]) -> None: ... + +global___UpdateInfraRequest = UpdateInfraRequest + +class GetInfraRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + project: builtins.str + allow_cache: builtins.bool + def __init__( + self, + *, + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "project", b"project"]) -> None: ... + +global___GetInfraRequest = GetInfraRequest + +class ListProjectMetadataRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + project: builtins.str + allow_cache: builtins.bool + def __init__( + self, + *, + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "project", b"project"]) -> None: ... + +global___ListProjectMetadataRequest = ListProjectMetadataRequest + +class ListProjectMetadataResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PROJECT_METADATA_FIELD_NUMBER: builtins.int + @property + def project_metadata(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Registry_pb2.ProjectMetadata]: ... + def __init__( + self, + *, + project_metadata: collections.abc.Iterable[feast.core.Registry_pb2.ProjectMetadata] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["project_metadata", b"project_metadata"]) -> None: ... + +global___ListProjectMetadataResponse = ListProjectMetadataResponse + +class ApplyMaterializationRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FEATURE_VIEW_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + START_DATE_FIELD_NUMBER: builtins.int + END_DATE_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + @property + def feature_view(self) -> feast.core.FeatureView_pb2.FeatureView: ... + project: builtins.str + @property + def start_date(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def end_date(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + commit: builtins.bool + def __init__( + self, + *, + feature_view: feast.core.FeatureView_pb2.FeatureView | None = ..., + project: builtins.str = ..., + start_date: google.protobuf.timestamp_pb2.Timestamp | None = ..., + end_date: google.protobuf.timestamp_pb2.Timestamp | None = ..., + commit: builtins.bool = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["end_date", b"end_date", "feature_view", b"feature_view", "start_date", b"start_date"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "end_date", b"end_date", "feature_view", b"feature_view", "project", b"project", "start_date", b"start_date"]) -> None: ... + +global___ApplyMaterializationRequest = ApplyMaterializationRequest + +class ApplyEntityRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ENTITY_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + @property + def entity(self) -> feast.core.Entity_pb2.Entity: ... + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + entity: feast.core.Entity_pb2.Entity | None = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["entity", b"entity"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "entity", b"entity", "project", b"project"]) -> None: ... + +global___ApplyEntityRequest = ApplyEntityRequest + +class GetEntityRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + allow_cache: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "name", b"name", "project", b"project"]) -> None: ... + +global___GetEntityRequest = GetEntityRequest + +class ListEntitiesRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + project: builtins.str + allow_cache: builtins.bool + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "project", b"project", "tags", b"tags"]) -> None: ... + +global___ListEntitiesRequest = ListEntitiesRequest + +class ListEntitiesResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ENTITIES_FIELD_NUMBER: builtins.int + @property + def entities(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Entity_pb2.Entity]: ... + def __init__( + self, + *, + entities: collections.abc.Iterable[feast.core.Entity_pb2.Entity] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["entities", b"entities"]) -> None: ... + +global___ListEntitiesResponse = ListEntitiesResponse + +class DeleteEntityRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "name", b"name", "project", b"project"]) -> None: ... + +global___DeleteEntityRequest = DeleteEntityRequest + +class ApplyDataSourceRequest(google.protobuf.message.Message): + """DataSources""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DATA_SOURCE_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + @property + def data_source(self) -> feast.core.DataSource_pb2.DataSource: ... + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + data_source: feast.core.DataSource_pb2.DataSource | None = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["data_source", b"data_source"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "data_source", b"data_source", "project", b"project"]) -> None: ... + +global___ApplyDataSourceRequest = ApplyDataSourceRequest + +class GetDataSourceRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + allow_cache: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "name", b"name", "project", b"project"]) -> None: ... + +global___GetDataSourceRequest = GetDataSourceRequest + +class ListDataSourcesRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + project: builtins.str + allow_cache: builtins.bool + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "project", b"project", "tags", b"tags"]) -> None: ... + +global___ListDataSourcesRequest = ListDataSourcesRequest + +class ListDataSourcesResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DATA_SOURCES_FIELD_NUMBER: builtins.int + @property + def data_sources(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.DataSource_pb2.DataSource]: ... + def __init__( + self, + *, + data_sources: collections.abc.Iterable[feast.core.DataSource_pb2.DataSource] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["data_sources", b"data_sources"]) -> None: ... + +global___ListDataSourcesResponse = ListDataSourcesResponse + +class DeleteDataSourceRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "name", b"name", "project", b"project"]) -> None: ... + +global___DeleteDataSourceRequest = DeleteDataSourceRequest + +class ApplyFeatureViewRequest(google.protobuf.message.Message): + """FeatureViews""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FEATURE_VIEW_FIELD_NUMBER: builtins.int + ON_DEMAND_FEATURE_VIEW_FIELD_NUMBER: builtins.int + STREAM_FEATURE_VIEW_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + @property + def feature_view(self) -> feast.core.FeatureView_pb2.FeatureView: ... + @property + def on_demand_feature_view(self) -> feast.core.OnDemandFeatureView_pb2.OnDemandFeatureView: ... + @property + def stream_feature_view(self) -> feast.core.StreamFeatureView_pb2.StreamFeatureView: ... + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + feature_view: feast.core.FeatureView_pb2.FeatureView | None = ..., + on_demand_feature_view: feast.core.OnDemandFeatureView_pb2.OnDemandFeatureView | None = ..., + stream_feature_view: feast.core.StreamFeatureView_pb2.StreamFeatureView | None = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["base_feature_view", b"base_feature_view", "feature_view", b"feature_view", "on_demand_feature_view", b"on_demand_feature_view", "stream_feature_view", b"stream_feature_view"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["base_feature_view", b"base_feature_view", "commit", b"commit", "feature_view", b"feature_view", "on_demand_feature_view", b"on_demand_feature_view", "project", b"project", "stream_feature_view", b"stream_feature_view"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["base_feature_view", b"base_feature_view"]) -> typing_extensions.Literal["feature_view", "on_demand_feature_view", "stream_feature_view"] | None: ... + +global___ApplyFeatureViewRequest = ApplyFeatureViewRequest + +class GetFeatureViewRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + allow_cache: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "name", b"name", "project", b"project"]) -> None: ... + +global___GetFeatureViewRequest = GetFeatureViewRequest + +class ListFeatureViewsRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + project: builtins.str + allow_cache: builtins.bool + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "project", b"project", "tags", b"tags"]) -> None: ... + +global___ListFeatureViewsRequest = ListFeatureViewsRequest + +class ListFeatureViewsResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FEATURE_VIEWS_FIELD_NUMBER: builtins.int + @property + def feature_views(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.FeatureView_pb2.FeatureView]: ... + def __init__( + self, + *, + feature_views: collections.abc.Iterable[feast.core.FeatureView_pb2.FeatureView] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["feature_views", b"feature_views"]) -> None: ... + +global___ListFeatureViewsResponse = ListFeatureViewsResponse + +class DeleteFeatureViewRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "name", b"name", "project", b"project"]) -> None: ... + +global___DeleteFeatureViewRequest = DeleteFeatureViewRequest + +class AnyFeatureView(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FEATURE_VIEW_FIELD_NUMBER: builtins.int + ON_DEMAND_FEATURE_VIEW_FIELD_NUMBER: builtins.int + STREAM_FEATURE_VIEW_FIELD_NUMBER: builtins.int + @property + def feature_view(self) -> feast.core.FeatureView_pb2.FeatureView: ... + @property + def on_demand_feature_view(self) -> feast.core.OnDemandFeatureView_pb2.OnDemandFeatureView: ... + @property + def stream_feature_view(self) -> feast.core.StreamFeatureView_pb2.StreamFeatureView: ... + def __init__( + self, + *, + feature_view: feast.core.FeatureView_pb2.FeatureView | None = ..., + on_demand_feature_view: feast.core.OnDemandFeatureView_pb2.OnDemandFeatureView | None = ..., + stream_feature_view: feast.core.StreamFeatureView_pb2.StreamFeatureView | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["any_feature_view", b"any_feature_view", "feature_view", b"feature_view", "on_demand_feature_view", b"on_demand_feature_view", "stream_feature_view", b"stream_feature_view"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["any_feature_view", b"any_feature_view", "feature_view", b"feature_view", "on_demand_feature_view", b"on_demand_feature_view", "stream_feature_view", b"stream_feature_view"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["any_feature_view", b"any_feature_view"]) -> typing_extensions.Literal["feature_view", "on_demand_feature_view", "stream_feature_view"] | None: ... + +global___AnyFeatureView = AnyFeatureView + +class GetAnyFeatureViewRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + allow_cache: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "name", b"name", "project", b"project"]) -> None: ... + +global___GetAnyFeatureViewRequest = GetAnyFeatureViewRequest + +class GetAnyFeatureViewResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ANY_FEATURE_VIEW_FIELD_NUMBER: builtins.int + @property + def any_feature_view(self) -> global___AnyFeatureView: ... + def __init__( + self, + *, + any_feature_view: global___AnyFeatureView | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["any_feature_view", b"any_feature_view"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["any_feature_view", b"any_feature_view"]) -> None: ... + +global___GetAnyFeatureViewResponse = GetAnyFeatureViewResponse + +class ListAllFeatureViewsRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + project: builtins.str + allow_cache: builtins.bool + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "project", b"project", "tags", b"tags"]) -> None: ... + +global___ListAllFeatureViewsRequest = ListAllFeatureViewsRequest + +class ListAllFeatureViewsResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FEATURE_VIEWS_FIELD_NUMBER: builtins.int + @property + def feature_views(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___AnyFeatureView]: ... + def __init__( + self, + *, + feature_views: collections.abc.Iterable[global___AnyFeatureView] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["feature_views", b"feature_views"]) -> None: ... + +global___ListAllFeatureViewsResponse = ListAllFeatureViewsResponse + +class GetStreamFeatureViewRequest(google.protobuf.message.Message): + """StreamFeatureView""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + allow_cache: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "name", b"name", "project", b"project"]) -> None: ... + +global___GetStreamFeatureViewRequest = GetStreamFeatureViewRequest + +class ListStreamFeatureViewsRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + project: builtins.str + allow_cache: builtins.bool + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "project", b"project", "tags", b"tags"]) -> None: ... + +global___ListStreamFeatureViewsRequest = ListStreamFeatureViewsRequest + +class ListStreamFeatureViewsResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + STREAM_FEATURE_VIEWS_FIELD_NUMBER: builtins.int + @property + def stream_feature_views(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.StreamFeatureView_pb2.StreamFeatureView]: ... + def __init__( + self, + *, + stream_feature_views: collections.abc.Iterable[feast.core.StreamFeatureView_pb2.StreamFeatureView] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["stream_feature_views", b"stream_feature_views"]) -> None: ... + +global___ListStreamFeatureViewsResponse = ListStreamFeatureViewsResponse + +class GetOnDemandFeatureViewRequest(google.protobuf.message.Message): + """OnDemandFeatureView""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + allow_cache: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "name", b"name", "project", b"project"]) -> None: ... + +global___GetOnDemandFeatureViewRequest = GetOnDemandFeatureViewRequest + +class ListOnDemandFeatureViewsRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + project: builtins.str + allow_cache: builtins.bool + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "project", b"project", "tags", b"tags"]) -> None: ... + +global___ListOnDemandFeatureViewsRequest = ListOnDemandFeatureViewsRequest + +class ListOnDemandFeatureViewsResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ON_DEMAND_FEATURE_VIEWS_FIELD_NUMBER: builtins.int + @property + def on_demand_feature_views(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.OnDemandFeatureView_pb2.OnDemandFeatureView]: ... + def __init__( + self, + *, + on_demand_feature_views: collections.abc.Iterable[feast.core.OnDemandFeatureView_pb2.OnDemandFeatureView] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["on_demand_feature_views", b"on_demand_feature_views"]) -> None: ... + +global___ListOnDemandFeatureViewsResponse = ListOnDemandFeatureViewsResponse + +class ApplyFeatureServiceRequest(google.protobuf.message.Message): + """FeatureServices""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FEATURE_SERVICE_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + @property + def feature_service(self) -> feast.core.FeatureService_pb2.FeatureService: ... + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + feature_service: feast.core.FeatureService_pb2.FeatureService | None = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["feature_service", b"feature_service"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "feature_service", b"feature_service", "project", b"project"]) -> None: ... + +global___ApplyFeatureServiceRequest = ApplyFeatureServiceRequest + +class GetFeatureServiceRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + allow_cache: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "name", b"name", "project", b"project"]) -> None: ... + +global___GetFeatureServiceRequest = GetFeatureServiceRequest + +class ListFeatureServicesRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + project: builtins.str + allow_cache: builtins.bool + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "project", b"project", "tags", b"tags"]) -> None: ... + +global___ListFeatureServicesRequest = ListFeatureServicesRequest + +class ListFeatureServicesResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FEATURE_SERVICES_FIELD_NUMBER: builtins.int + @property + def feature_services(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.FeatureService_pb2.FeatureService]: ... + def __init__( + self, + *, + feature_services: collections.abc.Iterable[feast.core.FeatureService_pb2.FeatureService] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["feature_services", b"feature_services"]) -> None: ... + +global___ListFeatureServicesResponse = ListFeatureServicesResponse + +class DeleteFeatureServiceRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "name", b"name", "project", b"project"]) -> None: ... + +global___DeleteFeatureServiceRequest = DeleteFeatureServiceRequest + +class ApplySavedDatasetRequest(google.protobuf.message.Message): + """SavedDataset""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SAVED_DATASET_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + @property + def saved_dataset(self) -> feast.core.SavedDataset_pb2.SavedDataset: ... + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + saved_dataset: feast.core.SavedDataset_pb2.SavedDataset | None = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["saved_dataset", b"saved_dataset"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "project", b"project", "saved_dataset", b"saved_dataset"]) -> None: ... + +global___ApplySavedDatasetRequest = ApplySavedDatasetRequest + +class GetSavedDatasetRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + allow_cache: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "name", b"name", "project", b"project"]) -> None: ... + +global___GetSavedDatasetRequest = GetSavedDatasetRequest + +class ListSavedDatasetsRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + project: builtins.str + allow_cache: builtins.bool + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "project", b"project", "tags", b"tags"]) -> None: ... + +global___ListSavedDatasetsRequest = ListSavedDatasetsRequest + +class ListSavedDatasetsResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SAVED_DATASETS_FIELD_NUMBER: builtins.int + @property + def saved_datasets(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.SavedDataset_pb2.SavedDataset]: ... + def __init__( + self, + *, + saved_datasets: collections.abc.Iterable[feast.core.SavedDataset_pb2.SavedDataset] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["saved_datasets", b"saved_datasets"]) -> None: ... + +global___ListSavedDatasetsResponse = ListSavedDatasetsResponse + +class DeleteSavedDatasetRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "name", b"name", "project", b"project"]) -> None: ... + +global___DeleteSavedDatasetRequest = DeleteSavedDatasetRequest + +class ApplyValidationReferenceRequest(google.protobuf.message.Message): + """ValidationReference""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VALIDATION_REFERENCE_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + @property + def validation_reference(self) -> feast.core.ValidationProfile_pb2.ValidationReference: ... + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + validation_reference: feast.core.ValidationProfile_pb2.ValidationReference | None = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["validation_reference", b"validation_reference"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "project", b"project", "validation_reference", b"validation_reference"]) -> None: ... + +global___ApplyValidationReferenceRequest = ApplyValidationReferenceRequest + +class GetValidationReferenceRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + allow_cache: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "name", b"name", "project", b"project"]) -> None: ... + +global___GetValidationReferenceRequest = GetValidationReferenceRequest + +class ListValidationReferencesRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + project: builtins.str + allow_cache: builtins.bool + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "project", b"project", "tags", b"tags"]) -> None: ... + +global___ListValidationReferencesRequest = ListValidationReferencesRequest + +class ListValidationReferencesResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VALIDATION_REFERENCES_FIELD_NUMBER: builtins.int + @property + def validation_references(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.ValidationProfile_pb2.ValidationReference]: ... + def __init__( + self, + *, + validation_references: collections.abc.Iterable[feast.core.ValidationProfile_pb2.ValidationReference] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["validation_references", b"validation_references"]) -> None: ... + +global___ListValidationReferencesResponse = ListValidationReferencesResponse + +class DeleteValidationReferenceRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "name", b"name", "project", b"project"]) -> None: ... + +global___DeleteValidationReferenceRequest = DeleteValidationReferenceRequest + +class ApplyPermissionRequest(google.protobuf.message.Message): + """Permissions""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PERMISSION_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + @property + def permission(self) -> feast.core.Permission_pb2.Permission: ... + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + permission: feast.core.Permission_pb2.Permission | None = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["permission", b"permission"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "permission", b"permission", "project", b"project"]) -> None: ... + +global___ApplyPermissionRequest = ApplyPermissionRequest + +class GetPermissionRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + allow_cache: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "name", b"name", "project", b"project"]) -> None: ... + +global___GetPermissionRequest = GetPermissionRequest + +class ListPermissionsRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + PROJECT_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + project: builtins.str + allow_cache: builtins.bool + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + project: builtins.str = ..., + allow_cache: builtins.bool = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "project", b"project", "tags", b"tags"]) -> None: ... + +global___ListPermissionsRequest = ListPermissionsRequest + +class ListPermissionsResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PERMISSIONS_FIELD_NUMBER: builtins.int + @property + def permissions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Permission_pb2.Permission]: ... + def __init__( + self, + *, + permissions: collections.abc.Iterable[feast.core.Permission_pb2.Permission] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["permissions", b"permissions"]) -> None: ... + +global___ListPermissionsResponse = ListPermissionsResponse + +class DeletePermissionRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + name: builtins.str + project: builtins.str + commit: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + project: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "name", b"name", "project", b"project"]) -> None: ... + +global___DeletePermissionRequest = DeletePermissionRequest + +class ApplyProjectRequest(google.protobuf.message.Message): + """Projects""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PROJECT_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + @property + def project(self) -> feast.core.Project_pb2.Project: ... + commit: builtins.bool + def __init__( + self, + *, + project: feast.core.Project_pb2.Project | None = ..., + commit: builtins.bool = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["project", b"project"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "project", b"project"]) -> None: ... + +global___ApplyProjectRequest = ApplyProjectRequest + +class GetProjectRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + ALLOW_CACHE_FIELD_NUMBER: builtins.int + name: builtins.str + allow_cache: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + allow_cache: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "name", b"name"]) -> None: ... + +global___GetProjectRequest = GetProjectRequest + +class ListProjectsRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + ALLOW_CACHE_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + allow_cache: builtins.bool + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + allow_cache: builtins.bool = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_cache", b"allow_cache", "tags", b"tags"]) -> None: ... + +global___ListProjectsRequest = ListProjectsRequest + +class ListProjectsResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PROJECTS_FIELD_NUMBER: builtins.int + @property + def projects(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.core.Project_pb2.Project]: ... + def __init__( + self, + *, + projects: collections.abc.Iterable[feast.core.Project_pb2.Project] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["projects", b"projects"]) -> None: ... + +global___ListProjectsResponse = ListProjectsResponse + +class DeleteProjectRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + COMMIT_FIELD_NUMBER: builtins.int + name: builtins.str + commit: builtins.bool + def __init__( + self, + *, + name: builtins.str = ..., + commit: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["commit", b"commit", "name", b"name"]) -> None: ... + +global___DeleteProjectRequest = DeleteProjectRequest diff --git a/sdk/python/feast/protos/feast/registry/RegistryServer_pb2_grpc.py b/sdk/python/feast/protos/feast/registry/RegistryServer_pb2_grpc.py new file mode 100644 index 0000000000..bab23c4394 --- /dev/null +++ b/sdk/python/feast/protos/feast/registry/RegistryServer_pb2_grpc.py @@ -0,0 +1,1542 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from feast.protos.feast.core import DataSource_pb2 as feast_dot_core_dot_DataSource__pb2 +from feast.protos.feast.core import Entity_pb2 as feast_dot_core_dot_Entity__pb2 +from feast.protos.feast.core import FeatureService_pb2 as feast_dot_core_dot_FeatureService__pb2 +from feast.protos.feast.core import FeatureView_pb2 as feast_dot_core_dot_FeatureView__pb2 +from feast.protos.feast.core import InfraObject_pb2 as feast_dot_core_dot_InfraObject__pb2 +from feast.protos.feast.core import OnDemandFeatureView_pb2 as feast_dot_core_dot_OnDemandFeatureView__pb2 +from feast.protos.feast.core import Permission_pb2 as feast_dot_core_dot_Permission__pb2 +from feast.protos.feast.core import Project_pb2 as feast_dot_core_dot_Project__pb2 +from feast.protos.feast.core import Registry_pb2 as feast_dot_core_dot_Registry__pb2 +from feast.protos.feast.core import SavedDataset_pb2 as feast_dot_core_dot_SavedDataset__pb2 +from feast.protos.feast.core import StreamFeatureView_pb2 as feast_dot_core_dot_StreamFeatureView__pb2 +from feast.protos.feast.core import ValidationProfile_pb2 as feast_dot_core_dot_ValidationProfile__pb2 +from feast.protos.feast.registry import RegistryServer_pb2 as feast_dot_registry_dot_RegistryServer__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 + + +class RegistryServerStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.ApplyEntity = channel.unary_unary( + '/feast.registry.RegistryServer/ApplyEntity', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyEntityRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.GetEntity = channel.unary_unary( + '/feast.registry.RegistryServer/GetEntity', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.GetEntityRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_Entity__pb2.Entity.FromString, + ) + self.ListEntities = channel.unary_unary( + '/feast.registry.RegistryServer/ListEntities', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListEntitiesRequest.SerializeToString, + response_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListEntitiesResponse.FromString, + ) + self.DeleteEntity = channel.unary_unary( + '/feast.registry.RegistryServer/DeleteEntity', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.DeleteEntityRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ApplyDataSource = channel.unary_unary( + '/feast.registry.RegistryServer/ApplyDataSource', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyDataSourceRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.GetDataSource = channel.unary_unary( + '/feast.registry.RegistryServer/GetDataSource', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.GetDataSourceRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_DataSource__pb2.DataSource.FromString, + ) + self.ListDataSources = channel.unary_unary( + '/feast.registry.RegistryServer/ListDataSources', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListDataSourcesRequest.SerializeToString, + response_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListDataSourcesResponse.FromString, + ) + self.DeleteDataSource = channel.unary_unary( + '/feast.registry.RegistryServer/DeleteDataSource', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.DeleteDataSourceRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ApplyFeatureView = channel.unary_unary( + '/feast.registry.RegistryServer/ApplyFeatureView', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyFeatureViewRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.DeleteFeatureView = channel.unary_unary( + '/feast.registry.RegistryServer/DeleteFeatureView', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.DeleteFeatureViewRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.GetAnyFeatureView = channel.unary_unary( + '/feast.registry.RegistryServer/GetAnyFeatureView', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.GetAnyFeatureViewRequest.SerializeToString, + response_deserializer=feast_dot_registry_dot_RegistryServer__pb2.GetAnyFeatureViewResponse.FromString, + ) + self.ListAllFeatureViews = channel.unary_unary( + '/feast.registry.RegistryServer/ListAllFeatureViews', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListAllFeatureViewsRequest.SerializeToString, + response_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListAllFeatureViewsResponse.FromString, + ) + self.GetFeatureView = channel.unary_unary( + '/feast.registry.RegistryServer/GetFeatureView', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.GetFeatureViewRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_FeatureView__pb2.FeatureView.FromString, + ) + self.ListFeatureViews = channel.unary_unary( + '/feast.registry.RegistryServer/ListFeatureViews', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListFeatureViewsRequest.SerializeToString, + response_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListFeatureViewsResponse.FromString, + ) + self.GetStreamFeatureView = channel.unary_unary( + '/feast.registry.RegistryServer/GetStreamFeatureView', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.GetStreamFeatureViewRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_StreamFeatureView__pb2.StreamFeatureView.FromString, + ) + self.ListStreamFeatureViews = channel.unary_unary( + '/feast.registry.RegistryServer/ListStreamFeatureViews', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListStreamFeatureViewsRequest.SerializeToString, + response_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListStreamFeatureViewsResponse.FromString, + ) + self.GetOnDemandFeatureView = channel.unary_unary( + '/feast.registry.RegistryServer/GetOnDemandFeatureView', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.GetOnDemandFeatureViewRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_OnDemandFeatureView__pb2.OnDemandFeatureView.FromString, + ) + self.ListOnDemandFeatureViews = channel.unary_unary( + '/feast.registry.RegistryServer/ListOnDemandFeatureViews', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListOnDemandFeatureViewsRequest.SerializeToString, + response_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListOnDemandFeatureViewsResponse.FromString, + ) + self.ApplyFeatureService = channel.unary_unary( + '/feast.registry.RegistryServer/ApplyFeatureService', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyFeatureServiceRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.GetFeatureService = channel.unary_unary( + '/feast.registry.RegistryServer/GetFeatureService', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.GetFeatureServiceRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_FeatureService__pb2.FeatureService.FromString, + ) + self.ListFeatureServices = channel.unary_unary( + '/feast.registry.RegistryServer/ListFeatureServices', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListFeatureServicesRequest.SerializeToString, + response_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListFeatureServicesResponse.FromString, + ) + self.DeleteFeatureService = channel.unary_unary( + '/feast.registry.RegistryServer/DeleteFeatureService', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.DeleteFeatureServiceRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ApplySavedDataset = channel.unary_unary( + '/feast.registry.RegistryServer/ApplySavedDataset', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ApplySavedDatasetRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.GetSavedDataset = channel.unary_unary( + '/feast.registry.RegistryServer/GetSavedDataset', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.GetSavedDatasetRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_SavedDataset__pb2.SavedDataset.FromString, + ) + self.ListSavedDatasets = channel.unary_unary( + '/feast.registry.RegistryServer/ListSavedDatasets', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListSavedDatasetsRequest.SerializeToString, + response_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListSavedDatasetsResponse.FromString, + ) + self.DeleteSavedDataset = channel.unary_unary( + '/feast.registry.RegistryServer/DeleteSavedDataset', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.DeleteSavedDatasetRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ApplyValidationReference = channel.unary_unary( + '/feast.registry.RegistryServer/ApplyValidationReference', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyValidationReferenceRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.GetValidationReference = channel.unary_unary( + '/feast.registry.RegistryServer/GetValidationReference', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.GetValidationReferenceRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_ValidationProfile__pb2.ValidationReference.FromString, + ) + self.ListValidationReferences = channel.unary_unary( + '/feast.registry.RegistryServer/ListValidationReferences', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListValidationReferencesRequest.SerializeToString, + response_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListValidationReferencesResponse.FromString, + ) + self.DeleteValidationReference = channel.unary_unary( + '/feast.registry.RegistryServer/DeleteValidationReference', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.DeleteValidationReferenceRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ApplyPermission = channel.unary_unary( + '/feast.registry.RegistryServer/ApplyPermission', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyPermissionRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.GetPermission = channel.unary_unary( + '/feast.registry.RegistryServer/GetPermission', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.GetPermissionRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_Permission__pb2.Permission.FromString, + ) + self.ListPermissions = channel.unary_unary( + '/feast.registry.RegistryServer/ListPermissions', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListPermissionsRequest.SerializeToString, + response_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListPermissionsResponse.FromString, + ) + self.DeletePermission = channel.unary_unary( + '/feast.registry.RegistryServer/DeletePermission', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.DeletePermissionRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ApplyProject = channel.unary_unary( + '/feast.registry.RegistryServer/ApplyProject', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyProjectRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.GetProject = channel.unary_unary( + '/feast.registry.RegistryServer/GetProject', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.GetProjectRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_Project__pb2.Project.FromString, + ) + self.ListProjects = channel.unary_unary( + '/feast.registry.RegistryServer/ListProjects', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListProjectsRequest.SerializeToString, + response_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListProjectsResponse.FromString, + ) + self.DeleteProject = channel.unary_unary( + '/feast.registry.RegistryServer/DeleteProject', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.DeleteProjectRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ApplyMaterialization = channel.unary_unary( + '/feast.registry.RegistryServer/ApplyMaterialization', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyMaterializationRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ListProjectMetadata = channel.unary_unary( + '/feast.registry.RegistryServer/ListProjectMetadata', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListProjectMetadataRequest.SerializeToString, + response_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListProjectMetadataResponse.FromString, + ) + self.UpdateInfra = channel.unary_unary( + '/feast.registry.RegistryServer/UpdateInfra', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.UpdateInfraRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.GetInfra = channel.unary_unary( + '/feast.registry.RegistryServer/GetInfra', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.GetInfraRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_InfraObject__pb2.Infra.FromString, + ) + self.Commit = channel.unary_unary( + '/feast.registry.RegistryServer/Commit', + request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.Refresh = channel.unary_unary( + '/feast.registry.RegistryServer/Refresh', + request_serializer=feast_dot_registry_dot_RegistryServer__pb2.RefreshRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.Proto = channel.unary_unary( + '/feast.registry.RegistryServer/Proto', + request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + response_deserializer=feast_dot_core_dot_Registry__pb2.Registry.FromString, + ) + + +class RegistryServerServicer(object): + """Missing associated documentation comment in .proto file.""" + + def ApplyEntity(self, request, context): + """Entity RPCs + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetEntity(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListEntities(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteEntity(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ApplyDataSource(self, request, context): + """DataSource RPCs + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetDataSource(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListDataSources(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteDataSource(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ApplyFeatureView(self, request, context): + """FeatureView RPCs + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteFeatureView(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetAnyFeatureView(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListAllFeatureViews(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetFeatureView(self, request, context): + """plain FeatureView RPCs + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListFeatureViews(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetStreamFeatureView(self, request, context): + """StreamFeatureView RPCs + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListStreamFeatureViews(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetOnDemandFeatureView(self, request, context): + """OnDemandFeatureView RPCs + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListOnDemandFeatureViews(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ApplyFeatureService(self, request, context): + """FeatureService RPCs + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetFeatureService(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListFeatureServices(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteFeatureService(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ApplySavedDataset(self, request, context): + """SavedDataset RPCs + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetSavedDataset(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListSavedDatasets(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteSavedDataset(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ApplyValidationReference(self, request, context): + """ValidationReference RPCs + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetValidationReference(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListValidationReferences(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteValidationReference(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ApplyPermission(self, request, context): + """Permission RPCs + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetPermission(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListPermissions(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeletePermission(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ApplyProject(self, request, context): + """Project RPCs + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetProject(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListProjects(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteProject(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ApplyMaterialization(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListProjectMetadata(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateInfra(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetInfra(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Commit(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Refresh(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Proto(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_RegistryServerServicer_to_server(servicer, server): + rpc_method_handlers = { + 'ApplyEntity': grpc.unary_unary_rpc_method_handler( + servicer.ApplyEntity, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyEntityRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'GetEntity': grpc.unary_unary_rpc_method_handler( + servicer.GetEntity, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.GetEntityRequest.FromString, + response_serializer=feast_dot_core_dot_Entity__pb2.Entity.SerializeToString, + ), + 'ListEntities': grpc.unary_unary_rpc_method_handler( + servicer.ListEntities, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListEntitiesRequest.FromString, + response_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListEntitiesResponse.SerializeToString, + ), + 'DeleteEntity': grpc.unary_unary_rpc_method_handler( + servicer.DeleteEntity, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.DeleteEntityRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ApplyDataSource': grpc.unary_unary_rpc_method_handler( + servicer.ApplyDataSource, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyDataSourceRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'GetDataSource': grpc.unary_unary_rpc_method_handler( + servicer.GetDataSource, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.GetDataSourceRequest.FromString, + response_serializer=feast_dot_core_dot_DataSource__pb2.DataSource.SerializeToString, + ), + 'ListDataSources': grpc.unary_unary_rpc_method_handler( + servicer.ListDataSources, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListDataSourcesRequest.FromString, + response_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListDataSourcesResponse.SerializeToString, + ), + 'DeleteDataSource': grpc.unary_unary_rpc_method_handler( + servicer.DeleteDataSource, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.DeleteDataSourceRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ApplyFeatureView': grpc.unary_unary_rpc_method_handler( + servicer.ApplyFeatureView, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyFeatureViewRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'DeleteFeatureView': grpc.unary_unary_rpc_method_handler( + servicer.DeleteFeatureView, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.DeleteFeatureViewRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'GetAnyFeatureView': grpc.unary_unary_rpc_method_handler( + servicer.GetAnyFeatureView, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.GetAnyFeatureViewRequest.FromString, + response_serializer=feast_dot_registry_dot_RegistryServer__pb2.GetAnyFeatureViewResponse.SerializeToString, + ), + 'ListAllFeatureViews': grpc.unary_unary_rpc_method_handler( + servicer.ListAllFeatureViews, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListAllFeatureViewsRequest.FromString, + response_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListAllFeatureViewsResponse.SerializeToString, + ), + 'GetFeatureView': grpc.unary_unary_rpc_method_handler( + servicer.GetFeatureView, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.GetFeatureViewRequest.FromString, + response_serializer=feast_dot_core_dot_FeatureView__pb2.FeatureView.SerializeToString, + ), + 'ListFeatureViews': grpc.unary_unary_rpc_method_handler( + servicer.ListFeatureViews, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListFeatureViewsRequest.FromString, + response_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListFeatureViewsResponse.SerializeToString, + ), + 'GetStreamFeatureView': grpc.unary_unary_rpc_method_handler( + servicer.GetStreamFeatureView, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.GetStreamFeatureViewRequest.FromString, + response_serializer=feast_dot_core_dot_StreamFeatureView__pb2.StreamFeatureView.SerializeToString, + ), + 'ListStreamFeatureViews': grpc.unary_unary_rpc_method_handler( + servicer.ListStreamFeatureViews, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListStreamFeatureViewsRequest.FromString, + response_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListStreamFeatureViewsResponse.SerializeToString, + ), + 'GetOnDemandFeatureView': grpc.unary_unary_rpc_method_handler( + servicer.GetOnDemandFeatureView, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.GetOnDemandFeatureViewRequest.FromString, + response_serializer=feast_dot_core_dot_OnDemandFeatureView__pb2.OnDemandFeatureView.SerializeToString, + ), + 'ListOnDemandFeatureViews': grpc.unary_unary_rpc_method_handler( + servicer.ListOnDemandFeatureViews, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListOnDemandFeatureViewsRequest.FromString, + response_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListOnDemandFeatureViewsResponse.SerializeToString, + ), + 'ApplyFeatureService': grpc.unary_unary_rpc_method_handler( + servicer.ApplyFeatureService, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyFeatureServiceRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'GetFeatureService': grpc.unary_unary_rpc_method_handler( + servicer.GetFeatureService, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.GetFeatureServiceRequest.FromString, + response_serializer=feast_dot_core_dot_FeatureService__pb2.FeatureService.SerializeToString, + ), + 'ListFeatureServices': grpc.unary_unary_rpc_method_handler( + servicer.ListFeatureServices, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListFeatureServicesRequest.FromString, + response_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListFeatureServicesResponse.SerializeToString, + ), + 'DeleteFeatureService': grpc.unary_unary_rpc_method_handler( + servicer.DeleteFeatureService, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.DeleteFeatureServiceRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ApplySavedDataset': grpc.unary_unary_rpc_method_handler( + servicer.ApplySavedDataset, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ApplySavedDatasetRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'GetSavedDataset': grpc.unary_unary_rpc_method_handler( + servicer.GetSavedDataset, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.GetSavedDatasetRequest.FromString, + response_serializer=feast_dot_core_dot_SavedDataset__pb2.SavedDataset.SerializeToString, + ), + 'ListSavedDatasets': grpc.unary_unary_rpc_method_handler( + servicer.ListSavedDatasets, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListSavedDatasetsRequest.FromString, + response_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListSavedDatasetsResponse.SerializeToString, + ), + 'DeleteSavedDataset': grpc.unary_unary_rpc_method_handler( + servicer.DeleteSavedDataset, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.DeleteSavedDatasetRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ApplyValidationReference': grpc.unary_unary_rpc_method_handler( + servicer.ApplyValidationReference, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyValidationReferenceRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'GetValidationReference': grpc.unary_unary_rpc_method_handler( + servicer.GetValidationReference, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.GetValidationReferenceRequest.FromString, + response_serializer=feast_dot_core_dot_ValidationProfile__pb2.ValidationReference.SerializeToString, + ), + 'ListValidationReferences': grpc.unary_unary_rpc_method_handler( + servicer.ListValidationReferences, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListValidationReferencesRequest.FromString, + response_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListValidationReferencesResponse.SerializeToString, + ), + 'DeleteValidationReference': grpc.unary_unary_rpc_method_handler( + servicer.DeleteValidationReference, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.DeleteValidationReferenceRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ApplyPermission': grpc.unary_unary_rpc_method_handler( + servicer.ApplyPermission, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyPermissionRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'GetPermission': grpc.unary_unary_rpc_method_handler( + servicer.GetPermission, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.GetPermissionRequest.FromString, + response_serializer=feast_dot_core_dot_Permission__pb2.Permission.SerializeToString, + ), + 'ListPermissions': grpc.unary_unary_rpc_method_handler( + servicer.ListPermissions, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListPermissionsRequest.FromString, + response_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListPermissionsResponse.SerializeToString, + ), + 'DeletePermission': grpc.unary_unary_rpc_method_handler( + servicer.DeletePermission, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.DeletePermissionRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ApplyProject': grpc.unary_unary_rpc_method_handler( + servicer.ApplyProject, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyProjectRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'GetProject': grpc.unary_unary_rpc_method_handler( + servicer.GetProject, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.GetProjectRequest.FromString, + response_serializer=feast_dot_core_dot_Project__pb2.Project.SerializeToString, + ), + 'ListProjects': grpc.unary_unary_rpc_method_handler( + servicer.ListProjects, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListProjectsRequest.FromString, + response_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListProjectsResponse.SerializeToString, + ), + 'DeleteProject': grpc.unary_unary_rpc_method_handler( + servicer.DeleteProject, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.DeleteProjectRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ApplyMaterialization': grpc.unary_unary_rpc_method_handler( + servicer.ApplyMaterialization, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ApplyMaterializationRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ListProjectMetadata': grpc.unary_unary_rpc_method_handler( + servicer.ListProjectMetadata, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.ListProjectMetadataRequest.FromString, + response_serializer=feast_dot_registry_dot_RegistryServer__pb2.ListProjectMetadataResponse.SerializeToString, + ), + 'UpdateInfra': grpc.unary_unary_rpc_method_handler( + servicer.UpdateInfra, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.UpdateInfraRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'GetInfra': grpc.unary_unary_rpc_method_handler( + servicer.GetInfra, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.GetInfraRequest.FromString, + response_serializer=feast_dot_core_dot_InfraObject__pb2.Infra.SerializeToString, + ), + 'Commit': grpc.unary_unary_rpc_method_handler( + servicer.Commit, + request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'Refresh': grpc.unary_unary_rpc_method_handler( + servicer.Refresh, + request_deserializer=feast_dot_registry_dot_RegistryServer__pb2.RefreshRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'Proto': grpc.unary_unary_rpc_method_handler( + servicer.Proto, + request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + response_serializer=feast_dot_core_dot_Registry__pb2.Registry.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'feast.registry.RegistryServer', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class RegistryServer(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def ApplyEntity(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ApplyEntity', + feast_dot_registry_dot_RegistryServer__pb2.ApplyEntityRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetEntity(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/GetEntity', + feast_dot_registry_dot_RegistryServer__pb2.GetEntityRequest.SerializeToString, + feast_dot_core_dot_Entity__pb2.Entity.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ListEntities(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ListEntities', + feast_dot_registry_dot_RegistryServer__pb2.ListEntitiesRequest.SerializeToString, + feast_dot_registry_dot_RegistryServer__pb2.ListEntitiesResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def DeleteEntity(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/DeleteEntity', + feast_dot_registry_dot_RegistryServer__pb2.DeleteEntityRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ApplyDataSource(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ApplyDataSource', + feast_dot_registry_dot_RegistryServer__pb2.ApplyDataSourceRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetDataSource(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/GetDataSource', + feast_dot_registry_dot_RegistryServer__pb2.GetDataSourceRequest.SerializeToString, + feast_dot_core_dot_DataSource__pb2.DataSource.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ListDataSources(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ListDataSources', + feast_dot_registry_dot_RegistryServer__pb2.ListDataSourcesRequest.SerializeToString, + feast_dot_registry_dot_RegistryServer__pb2.ListDataSourcesResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def DeleteDataSource(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/DeleteDataSource', + feast_dot_registry_dot_RegistryServer__pb2.DeleteDataSourceRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ApplyFeatureView(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ApplyFeatureView', + feast_dot_registry_dot_RegistryServer__pb2.ApplyFeatureViewRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def DeleteFeatureView(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/DeleteFeatureView', + feast_dot_registry_dot_RegistryServer__pb2.DeleteFeatureViewRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetAnyFeatureView(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/GetAnyFeatureView', + feast_dot_registry_dot_RegistryServer__pb2.GetAnyFeatureViewRequest.SerializeToString, + feast_dot_registry_dot_RegistryServer__pb2.GetAnyFeatureViewResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ListAllFeatureViews(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ListAllFeatureViews', + feast_dot_registry_dot_RegistryServer__pb2.ListAllFeatureViewsRequest.SerializeToString, + feast_dot_registry_dot_RegistryServer__pb2.ListAllFeatureViewsResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetFeatureView(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/GetFeatureView', + feast_dot_registry_dot_RegistryServer__pb2.GetFeatureViewRequest.SerializeToString, + feast_dot_core_dot_FeatureView__pb2.FeatureView.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ListFeatureViews(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ListFeatureViews', + feast_dot_registry_dot_RegistryServer__pb2.ListFeatureViewsRequest.SerializeToString, + feast_dot_registry_dot_RegistryServer__pb2.ListFeatureViewsResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetStreamFeatureView(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/GetStreamFeatureView', + feast_dot_registry_dot_RegistryServer__pb2.GetStreamFeatureViewRequest.SerializeToString, + feast_dot_core_dot_StreamFeatureView__pb2.StreamFeatureView.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ListStreamFeatureViews(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ListStreamFeatureViews', + feast_dot_registry_dot_RegistryServer__pb2.ListStreamFeatureViewsRequest.SerializeToString, + feast_dot_registry_dot_RegistryServer__pb2.ListStreamFeatureViewsResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetOnDemandFeatureView(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/GetOnDemandFeatureView', + feast_dot_registry_dot_RegistryServer__pb2.GetOnDemandFeatureViewRequest.SerializeToString, + feast_dot_core_dot_OnDemandFeatureView__pb2.OnDemandFeatureView.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ListOnDemandFeatureViews(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ListOnDemandFeatureViews', + feast_dot_registry_dot_RegistryServer__pb2.ListOnDemandFeatureViewsRequest.SerializeToString, + feast_dot_registry_dot_RegistryServer__pb2.ListOnDemandFeatureViewsResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ApplyFeatureService(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ApplyFeatureService', + feast_dot_registry_dot_RegistryServer__pb2.ApplyFeatureServiceRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetFeatureService(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/GetFeatureService', + feast_dot_registry_dot_RegistryServer__pb2.GetFeatureServiceRequest.SerializeToString, + feast_dot_core_dot_FeatureService__pb2.FeatureService.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ListFeatureServices(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ListFeatureServices', + feast_dot_registry_dot_RegistryServer__pb2.ListFeatureServicesRequest.SerializeToString, + feast_dot_registry_dot_RegistryServer__pb2.ListFeatureServicesResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def DeleteFeatureService(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/DeleteFeatureService', + feast_dot_registry_dot_RegistryServer__pb2.DeleteFeatureServiceRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ApplySavedDataset(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ApplySavedDataset', + feast_dot_registry_dot_RegistryServer__pb2.ApplySavedDatasetRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetSavedDataset(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/GetSavedDataset', + feast_dot_registry_dot_RegistryServer__pb2.GetSavedDatasetRequest.SerializeToString, + feast_dot_core_dot_SavedDataset__pb2.SavedDataset.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ListSavedDatasets(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ListSavedDatasets', + feast_dot_registry_dot_RegistryServer__pb2.ListSavedDatasetsRequest.SerializeToString, + feast_dot_registry_dot_RegistryServer__pb2.ListSavedDatasetsResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def DeleteSavedDataset(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/DeleteSavedDataset', + feast_dot_registry_dot_RegistryServer__pb2.DeleteSavedDatasetRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ApplyValidationReference(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ApplyValidationReference', + feast_dot_registry_dot_RegistryServer__pb2.ApplyValidationReferenceRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetValidationReference(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/GetValidationReference', + feast_dot_registry_dot_RegistryServer__pb2.GetValidationReferenceRequest.SerializeToString, + feast_dot_core_dot_ValidationProfile__pb2.ValidationReference.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ListValidationReferences(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ListValidationReferences', + feast_dot_registry_dot_RegistryServer__pb2.ListValidationReferencesRequest.SerializeToString, + feast_dot_registry_dot_RegistryServer__pb2.ListValidationReferencesResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def DeleteValidationReference(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/DeleteValidationReference', + feast_dot_registry_dot_RegistryServer__pb2.DeleteValidationReferenceRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ApplyPermission(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ApplyPermission', + feast_dot_registry_dot_RegistryServer__pb2.ApplyPermissionRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetPermission(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/GetPermission', + feast_dot_registry_dot_RegistryServer__pb2.GetPermissionRequest.SerializeToString, + feast_dot_core_dot_Permission__pb2.Permission.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ListPermissions(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ListPermissions', + feast_dot_registry_dot_RegistryServer__pb2.ListPermissionsRequest.SerializeToString, + feast_dot_registry_dot_RegistryServer__pb2.ListPermissionsResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def DeletePermission(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/DeletePermission', + feast_dot_registry_dot_RegistryServer__pb2.DeletePermissionRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ApplyProject(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ApplyProject', + feast_dot_registry_dot_RegistryServer__pb2.ApplyProjectRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetProject(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/GetProject', + feast_dot_registry_dot_RegistryServer__pb2.GetProjectRequest.SerializeToString, + feast_dot_core_dot_Project__pb2.Project.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ListProjects(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ListProjects', + feast_dot_registry_dot_RegistryServer__pb2.ListProjectsRequest.SerializeToString, + feast_dot_registry_dot_RegistryServer__pb2.ListProjectsResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def DeleteProject(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/DeleteProject', + feast_dot_registry_dot_RegistryServer__pb2.DeleteProjectRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ApplyMaterialization(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ApplyMaterialization', + feast_dot_registry_dot_RegistryServer__pb2.ApplyMaterializationRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ListProjectMetadata(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/ListProjectMetadata', + feast_dot_registry_dot_RegistryServer__pb2.ListProjectMetadataRequest.SerializeToString, + feast_dot_registry_dot_RegistryServer__pb2.ListProjectMetadataResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def UpdateInfra(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/UpdateInfra', + feast_dot_registry_dot_RegistryServer__pb2.UpdateInfraRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetInfra(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/GetInfra', + feast_dot_registry_dot_RegistryServer__pb2.GetInfraRequest.SerializeToString, + feast_dot_core_dot_InfraObject__pb2.Infra.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def Commit(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/Commit', + google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def Refresh(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/Refresh', + feast_dot_registry_dot_RegistryServer__pb2.RefreshRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def Proto(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.registry.RegistryServer/Proto', + google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + feast_dot_core_dot_Registry__pb2.Registry.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/sdk/python/feast/protos/feast/registry/__init__.py b/sdk/python/feast/protos/feast/registry/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/protos/feast/serving/Connector_pb2.py b/sdk/python/feast/protos/feast/serving/Connector_pb2.py new file mode 100644 index 0000000000..b38471dea8 --- /dev/null +++ b/sdk/python/feast/protos/feast/serving/Connector_pb2.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/serving/Connector.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from feast.protos.feast.types import Value_pb2 as feast_dot_types_dot_Value__pb2 +from feast.protos.feast.types import EntityKey_pb2 as feast_dot_types_dot_EntityKey__pb2 +from feast.protos.feast.serving import ServingService_pb2 as feast_dot_serving_dot_ServingService__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1d\x66\x65\x61st/serving/Connector.proto\x12\x0egrpc.connector\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17\x66\x65\x61st/types/Value.proto\x1a\x1b\x66\x65\x61st/types/EntityKey.proto\x1a\"feast/serving/ServingService.proto\"\x9a\x01\n\x10\x43onnectorFeature\x12\x34\n\treference\x18\x01 \x01(\x0b\x32!.feast.serving.FeatureReferenceV2\x12-\n\ttimestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12!\n\x05value\x18\x03 \x01(\x0b\x32\x12.feast.types.Value\"M\n\x14\x43onnectorFeatureList\x12\x35\n\x0b\x66\x65\x61tureList\x18\x01 \x03(\x0b\x32 .grpc.connector.ConnectorFeature\"_\n\x11OnlineReadRequest\x12*\n\nentityKeys\x18\x01 \x03(\x0b\x32\x16.feast.types.EntityKey\x12\x0c\n\x04view\x18\x02 \x01(\t\x12\x10\n\x08\x66\x65\x61tures\x18\x03 \x03(\t\"K\n\x12OnlineReadResponse\x12\x35\n\x07results\x18\x01 \x03(\x0b\x32$.grpc.connector.ConnectorFeatureList2b\n\x0bOnlineStore\x12S\n\nOnlineRead\x12!.grpc.connector.OnlineReadRequest\x1a\".grpc.connector.OnlineReadResponseB4Z2github.com/feast-dev/feast/go/protos/feast/servingb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.serving.Connector_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'Z2github.com/feast-dev/feast/go/protos/feast/serving' + _globals['_CONNECTORFEATURE']._serialized_start=173 + _globals['_CONNECTORFEATURE']._serialized_end=327 + _globals['_CONNECTORFEATURELIST']._serialized_start=329 + _globals['_CONNECTORFEATURELIST']._serialized_end=406 + _globals['_ONLINEREADREQUEST']._serialized_start=408 + _globals['_ONLINEREADREQUEST']._serialized_end=503 + _globals['_ONLINEREADRESPONSE']._serialized_start=505 + _globals['_ONLINEREADRESPONSE']._serialized_end=580 + _globals['_ONLINESTORE']._serialized_start=582 + _globals['_ONLINESTORE']._serialized_end=680 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/serving/Connector_pb2.pyi b/sdk/python/feast/protos/feast/serving/Connector_pb2.pyi new file mode 100644 index 0000000000..f87109e0fa --- /dev/null +++ b/sdk/python/feast/protos/feast/serving/Connector_pb2.pyi @@ -0,0 +1,97 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" +import builtins +import collections.abc +import feast.serving.ServingService_pb2 +import feast.types.EntityKey_pb2 +import feast.types.Value_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class ConnectorFeature(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + REFERENCE_FIELD_NUMBER: builtins.int + TIMESTAMP_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + @property + def reference(self) -> feast.serving.ServingService_pb2.FeatureReferenceV2: ... + @property + def timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def value(self) -> feast.types.Value_pb2.Value: ... + def __init__( + self, + *, + reference: feast.serving.ServingService_pb2.FeatureReferenceV2 | None = ..., + timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + value: feast.types.Value_pb2.Value | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["reference", b"reference", "timestamp", b"timestamp", "value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["reference", b"reference", "timestamp", b"timestamp", "value", b"value"]) -> None: ... + +global___ConnectorFeature = ConnectorFeature + +class ConnectorFeatureList(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FEATURELIST_FIELD_NUMBER: builtins.int + @property + def featureList(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ConnectorFeature]: ... + def __init__( + self, + *, + featureList: collections.abc.Iterable[global___ConnectorFeature] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["featureList", b"featureList"]) -> None: ... + +global___ConnectorFeatureList = ConnectorFeatureList + +class OnlineReadRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ENTITYKEYS_FIELD_NUMBER: builtins.int + VIEW_FIELD_NUMBER: builtins.int + FEATURES_FIELD_NUMBER: builtins.int + @property + def entityKeys(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.types.EntityKey_pb2.EntityKey]: ... + view: builtins.str + @property + def features(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + def __init__( + self, + *, + entityKeys: collections.abc.Iterable[feast.types.EntityKey_pb2.EntityKey] | None = ..., + view: builtins.str = ..., + features: collections.abc.Iterable[builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["entityKeys", b"entityKeys", "features", b"features", "view", b"view"]) -> None: ... + +global___OnlineReadRequest = OnlineReadRequest + +class OnlineReadResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + RESULTS_FIELD_NUMBER: builtins.int + @property + def results(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ConnectorFeatureList]: ... + def __init__( + self, + *, + results: collections.abc.Iterable[global___ConnectorFeatureList] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["results", b"results"]) -> None: ... + +global___OnlineReadResponse = OnlineReadResponse diff --git a/sdk/python/feast/protos/feast/serving/Connector_pb2_grpc.py b/sdk/python/feast/protos/feast/serving/Connector_pb2_grpc.py new file mode 100644 index 0000000000..dfadf982dd --- /dev/null +++ b/sdk/python/feast/protos/feast/serving/Connector_pb2_grpc.py @@ -0,0 +1,66 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from feast.protos.feast.serving import Connector_pb2 as feast_dot_serving_dot_Connector__pb2 + + +class OnlineStoreStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.OnlineRead = channel.unary_unary( + '/grpc.connector.OnlineStore/OnlineRead', + request_serializer=feast_dot_serving_dot_Connector__pb2.OnlineReadRequest.SerializeToString, + response_deserializer=feast_dot_serving_dot_Connector__pb2.OnlineReadResponse.FromString, + ) + + +class OnlineStoreServicer(object): + """Missing associated documentation comment in .proto file.""" + + def OnlineRead(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_OnlineStoreServicer_to_server(servicer, server): + rpc_method_handlers = { + 'OnlineRead': grpc.unary_unary_rpc_method_handler( + servicer.OnlineRead, + request_deserializer=feast_dot_serving_dot_Connector__pb2.OnlineReadRequest.FromString, + response_serializer=feast_dot_serving_dot_Connector__pb2.OnlineReadResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'grpc.connector.OnlineStore', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class OnlineStore(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def OnlineRead(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/grpc.connector.OnlineStore/OnlineRead', + feast_dot_serving_dot_Connector__pb2.OnlineReadRequest.SerializeToString, + feast_dot_serving_dot_Connector__pb2.OnlineReadResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/sdk/python/feast/protos/feast/serving/GrpcServer_pb2.py b/sdk/python/feast/protos/feast/serving/GrpcServer_pb2.py new file mode 100644 index 0000000000..8e40630cff --- /dev/null +++ b/sdk/python/feast/protos/feast/serving/GrpcServer_pb2.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/serving/GrpcServer.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from feast.protos.feast.serving import ServingService_pb2 as feast_dot_serving_dot_ServingService__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1e\x66\x65\x61st/serving/GrpcServer.proto\x1a\"feast/serving/ServingService.proto\"\xb3\x01\n\x0bPushRequest\x12,\n\x08\x66\x65\x61tures\x18\x01 \x03(\x0b\x32\x1a.PushRequest.FeaturesEntry\x12\x1b\n\x13stream_feature_view\x18\x02 \x01(\t\x12\x1c\n\x14\x61llow_registry_cache\x18\x03 \x01(\x08\x12\n\n\x02to\x18\x04 \x01(\t\x1a/\n\rFeaturesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x1e\n\x0cPushResponse\x12\x0e\n\x06status\x18\x01 \x01(\x08\"\xc1\x01\n\x19WriteToOnlineStoreRequest\x12:\n\x08\x66\x65\x61tures\x18\x01 \x03(\x0b\x32(.WriteToOnlineStoreRequest.FeaturesEntry\x12\x19\n\x11\x66\x65\x61ture_view_name\x18\x02 \x01(\t\x12\x1c\n\x14\x61llow_registry_cache\x18\x03 \x01(\x08\x1a/\n\rFeaturesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\",\n\x1aWriteToOnlineStoreResponse\x12\x0e\n\x06status\x18\x01 \x01(\x08\x32\xf1\x01\n\x11GrpcFeatureServer\x12%\n\x04Push\x12\x0c.PushRequest\x1a\r.PushResponse\"\x00\x12M\n\x12WriteToOnlineStore\x12\x1a.WriteToOnlineStoreRequest\x1a\x1b.WriteToOnlineStoreResponse\x12\x66\n\x11GetOnlineFeatures\x12\'.feast.serving.GetOnlineFeaturesRequest\x1a(.feast.serving.GetOnlineFeaturesResponseb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.serving.GrpcServer_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals['_PUSHREQUEST_FEATURESENTRY']._options = None + _globals['_PUSHREQUEST_FEATURESENTRY']._serialized_options = b'8\001' + _globals['_WRITETOONLINESTOREREQUEST_FEATURESENTRY']._options = None + _globals['_WRITETOONLINESTOREREQUEST_FEATURESENTRY']._serialized_options = b'8\001' + _globals['_PUSHREQUEST']._serialized_start=71 + _globals['_PUSHREQUEST']._serialized_end=250 + _globals['_PUSHREQUEST_FEATURESENTRY']._serialized_start=203 + _globals['_PUSHREQUEST_FEATURESENTRY']._serialized_end=250 + _globals['_PUSHRESPONSE']._serialized_start=252 + _globals['_PUSHRESPONSE']._serialized_end=282 + _globals['_WRITETOONLINESTOREREQUEST']._serialized_start=285 + _globals['_WRITETOONLINESTOREREQUEST']._serialized_end=478 + _globals['_WRITETOONLINESTOREREQUEST_FEATURESENTRY']._serialized_start=203 + _globals['_WRITETOONLINESTOREREQUEST_FEATURESENTRY']._serialized_end=250 + _globals['_WRITETOONLINESTORERESPONSE']._serialized_start=480 + _globals['_WRITETOONLINESTORERESPONSE']._serialized_end=524 + _globals['_GRPCFEATURESERVER']._serialized_start=527 + _globals['_GRPCFEATURESERVER']._serialized_end=768 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/serving/GrpcServer_pb2.pyi b/sdk/python/feast/protos/feast/serving/GrpcServer_pb2.pyi new file mode 100644 index 0000000000..54964f46e5 --- /dev/null +++ b/sdk/python/feast/protos/feast/serving/GrpcServer_pb2.pyi @@ -0,0 +1,120 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class PushRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class FeaturesEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + FEATURES_FIELD_NUMBER: builtins.int + STREAM_FEATURE_VIEW_FIELD_NUMBER: builtins.int + ALLOW_REGISTRY_CACHE_FIELD_NUMBER: builtins.int + TO_FIELD_NUMBER: builtins.int + @property + def features(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + stream_feature_view: builtins.str + allow_registry_cache: builtins.bool + to: builtins.str + def __init__( + self, + *, + features: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + stream_feature_view: builtins.str = ..., + allow_registry_cache: builtins.bool = ..., + to: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_registry_cache", b"allow_registry_cache", "features", b"features", "stream_feature_view", b"stream_feature_view", "to", b"to"]) -> None: ... + +global___PushRequest = PushRequest + +class PushResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + STATUS_FIELD_NUMBER: builtins.int + status: builtins.bool + def __init__( + self, + *, + status: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["status", b"status"]) -> None: ... + +global___PushResponse = PushResponse + +class WriteToOnlineStoreRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class FeaturesEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + FEATURES_FIELD_NUMBER: builtins.int + FEATURE_VIEW_NAME_FIELD_NUMBER: builtins.int + ALLOW_REGISTRY_CACHE_FIELD_NUMBER: builtins.int + @property + def features(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + feature_view_name: builtins.str + allow_registry_cache: builtins.bool + def __init__( + self, + *, + features: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + feature_view_name: builtins.str = ..., + allow_registry_cache: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_registry_cache", b"allow_registry_cache", "feature_view_name", b"feature_view_name", "features", b"features"]) -> None: ... + +global___WriteToOnlineStoreRequest = WriteToOnlineStoreRequest + +class WriteToOnlineStoreResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + STATUS_FIELD_NUMBER: builtins.int + status: builtins.bool + def __init__( + self, + *, + status: builtins.bool = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["status", b"status"]) -> None: ... + +global___WriteToOnlineStoreResponse = WriteToOnlineStoreResponse diff --git a/sdk/python/feast/protos/feast/serving/GrpcServer_pb2_grpc.py b/sdk/python/feast/protos/feast/serving/GrpcServer_pb2_grpc.py new file mode 100644 index 0000000000..b381cc0f41 --- /dev/null +++ b/sdk/python/feast/protos/feast/serving/GrpcServer_pb2_grpc.py @@ -0,0 +1,133 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from feast.protos.feast.serving import GrpcServer_pb2 as feast_dot_serving_dot_GrpcServer__pb2 +from feast.protos.feast.serving import ServingService_pb2 as feast_dot_serving_dot_ServingService__pb2 + + +class GrpcFeatureServerStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.Push = channel.unary_unary( + '/GrpcFeatureServer/Push', + request_serializer=feast_dot_serving_dot_GrpcServer__pb2.PushRequest.SerializeToString, + response_deserializer=feast_dot_serving_dot_GrpcServer__pb2.PushResponse.FromString, + ) + self.WriteToOnlineStore = channel.unary_unary( + '/GrpcFeatureServer/WriteToOnlineStore', + request_serializer=feast_dot_serving_dot_GrpcServer__pb2.WriteToOnlineStoreRequest.SerializeToString, + response_deserializer=feast_dot_serving_dot_GrpcServer__pb2.WriteToOnlineStoreResponse.FromString, + ) + self.GetOnlineFeatures = channel.unary_unary( + '/GrpcFeatureServer/GetOnlineFeatures', + request_serializer=feast_dot_serving_dot_ServingService__pb2.GetOnlineFeaturesRequest.SerializeToString, + response_deserializer=feast_dot_serving_dot_ServingService__pb2.GetOnlineFeaturesResponse.FromString, + ) + + +class GrpcFeatureServerServicer(object): + """Missing associated documentation comment in .proto file.""" + + def Push(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def WriteToOnlineStore(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetOnlineFeatures(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_GrpcFeatureServerServicer_to_server(servicer, server): + rpc_method_handlers = { + 'Push': grpc.unary_unary_rpc_method_handler( + servicer.Push, + request_deserializer=feast_dot_serving_dot_GrpcServer__pb2.PushRequest.FromString, + response_serializer=feast_dot_serving_dot_GrpcServer__pb2.PushResponse.SerializeToString, + ), + 'WriteToOnlineStore': grpc.unary_unary_rpc_method_handler( + servicer.WriteToOnlineStore, + request_deserializer=feast_dot_serving_dot_GrpcServer__pb2.WriteToOnlineStoreRequest.FromString, + response_serializer=feast_dot_serving_dot_GrpcServer__pb2.WriteToOnlineStoreResponse.SerializeToString, + ), + 'GetOnlineFeatures': grpc.unary_unary_rpc_method_handler( + servicer.GetOnlineFeatures, + request_deserializer=feast_dot_serving_dot_ServingService__pb2.GetOnlineFeaturesRequest.FromString, + response_serializer=feast_dot_serving_dot_ServingService__pb2.GetOnlineFeaturesResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'GrpcFeatureServer', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class GrpcFeatureServer(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def Push(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/GrpcFeatureServer/Push', + feast_dot_serving_dot_GrpcServer__pb2.PushRequest.SerializeToString, + feast_dot_serving_dot_GrpcServer__pb2.PushResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def WriteToOnlineStore(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/GrpcFeatureServer/WriteToOnlineStore', + feast_dot_serving_dot_GrpcServer__pb2.WriteToOnlineStoreRequest.SerializeToString, + feast_dot_serving_dot_GrpcServer__pb2.WriteToOnlineStoreResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetOnlineFeatures(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/GrpcFeatureServer/GetOnlineFeatures', + feast_dot_serving_dot_ServingService__pb2.GetOnlineFeaturesRequest.SerializeToString, + feast_dot_serving_dot_ServingService__pb2.GetOnlineFeaturesResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/sdk/python/feast/protos/feast/serving/ServingService_pb2.py b/sdk/python/feast/protos/feast/serving/ServingService_pb2.py new file mode 100644 index 0000000000..fa86664057 --- /dev/null +++ b/sdk/python/feast/protos/feast/serving/ServingService_pb2.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/serving/ServingService.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from feast.protos.feast.types import Value_pb2 as feast_dot_types_dot_Value__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"feast/serving/ServingService.proto\x12\rfeast.serving\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17\x66\x65\x61st/types/Value.proto\"\x1c\n\x1aGetFeastServingInfoRequest\".\n\x1bGetFeastServingInfoResponse\x12\x0f\n\x07version\x18\x01 \x01(\t\"E\n\x12\x46\x65\x61tureReferenceV2\x12\x19\n\x11\x66\x65\x61ture_view_name\x18\x01 \x01(\t\x12\x14\n\x0c\x66\x65\x61ture_name\x18\x02 \x01(\t\"\xfd\x02\n\x1aGetOnlineFeaturesRequestV2\x12\x33\n\x08\x66\x65\x61tures\x18\x04 \x03(\x0b\x32!.feast.serving.FeatureReferenceV2\x12H\n\x0b\x65ntity_rows\x18\x02 \x03(\x0b\x32\x33.feast.serving.GetOnlineFeaturesRequestV2.EntityRow\x12\x0f\n\x07project\x18\x05 \x01(\t\x1a\xce\x01\n\tEntityRow\x12-\n\ttimestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12O\n\x06\x66ields\x18\x02 \x03(\x0b\x32?.feast.serving.GetOnlineFeaturesRequestV2.EntityRow.FieldsEntry\x1a\x41\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.feast.types.Value:\x02\x38\x01\"\x1a\n\x0b\x46\x65\x61tureList\x12\x0b\n\x03val\x18\x01 \x03(\t\"\xc8\x03\n\x18GetOnlineFeaturesRequest\x12\x19\n\x0f\x66\x65\x61ture_service\x18\x01 \x01(\tH\x00\x12.\n\x08\x66\x65\x61tures\x18\x02 \x01(\x0b\x32\x1a.feast.serving.FeatureListH\x00\x12G\n\x08\x65ntities\x18\x03 \x03(\x0b\x32\x35.feast.serving.GetOnlineFeaturesRequest.EntitiesEntry\x12\x1a\n\x12\x66ull_feature_names\x18\x04 \x01(\x08\x12T\n\x0frequest_context\x18\x05 \x03(\x0b\x32;.feast.serving.GetOnlineFeaturesRequest.RequestContextEntry\x1aK\n\rEntitiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12)\n\x05value\x18\x02 \x01(\x0b\x32\x1a.feast.types.RepeatedValue:\x02\x38\x01\x1aQ\n\x13RequestContextEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12)\n\x05value\x18\x02 \x01(\x0b\x32\x1a.feast.types.RepeatedValue:\x02\x38\x01\x42\x06\n\x04kind\"\xd2\x02\n\x19GetOnlineFeaturesResponse\x12\x42\n\x08metadata\x18\x01 \x01(\x0b\x32\x30.feast.serving.GetOnlineFeaturesResponseMetadata\x12G\n\x07results\x18\x02 \x03(\x0b\x32\x36.feast.serving.GetOnlineFeaturesResponse.FeatureVector\x12\x0e\n\x06status\x18\x03 \x01(\x08\x1a\x97\x01\n\rFeatureVector\x12\"\n\x06values\x18\x01 \x03(\x0b\x32\x12.feast.types.Value\x12,\n\x08statuses\x18\x02 \x03(\x0e\x32\x1a.feast.serving.FieldStatus\x12\x34\n\x10\x65vent_timestamps\x18\x03 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\"V\n!GetOnlineFeaturesResponseMetadata\x12\x31\n\rfeature_names\x18\x01 \x01(\x0b\x32\x1a.feast.serving.FeatureList*[\n\x0b\x46ieldStatus\x12\x0b\n\x07INVALID\x10\x00\x12\x0b\n\x07PRESENT\x10\x01\x12\x0e\n\nNULL_VALUE\x10\x02\x12\r\n\tNOT_FOUND\x10\x03\x12\x13\n\x0fOUTSIDE_MAX_AGE\x10\x04\x32\xe6\x01\n\x0eServingService\x12l\n\x13GetFeastServingInfo\x12).feast.serving.GetFeastServingInfoRequest\x1a*.feast.serving.GetFeastServingInfoResponse\x12\x66\n\x11GetOnlineFeatures\x12\'.feast.serving.GetOnlineFeaturesRequest\x1a(.feast.serving.GetOnlineFeaturesResponseBZ\n\x13\x66\x65\x61st.proto.servingB\x0fServingAPIProtoZ2github.com/feast-dev/feast/go/protos/feast/servingb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.serving.ServingService_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\023feast.proto.servingB\017ServingAPIProtoZ2github.com/feast-dev/feast/go/protos/feast/serving' + _globals['_GETONLINEFEATURESREQUESTV2_ENTITYROW_FIELDSENTRY']._options = None + _globals['_GETONLINEFEATURESREQUESTV2_ENTITYROW_FIELDSENTRY']._serialized_options = b'8\001' + _globals['_GETONLINEFEATURESREQUEST_ENTITIESENTRY']._options = None + _globals['_GETONLINEFEATURESREQUEST_ENTITIESENTRY']._serialized_options = b'8\001' + _globals['_GETONLINEFEATURESREQUEST_REQUESTCONTEXTENTRY']._options = None + _globals['_GETONLINEFEATURESREQUEST_REQUESTCONTEXTENTRY']._serialized_options = b'8\001' + _globals['_FIELDSTATUS']._serialized_start=1560 + _globals['_FIELDSTATUS']._serialized_end=1651 + _globals['_GETFEASTSERVINGINFOREQUEST']._serialized_start=111 + _globals['_GETFEASTSERVINGINFOREQUEST']._serialized_end=139 + _globals['_GETFEASTSERVINGINFORESPONSE']._serialized_start=141 + _globals['_GETFEASTSERVINGINFORESPONSE']._serialized_end=187 + _globals['_FEATUREREFERENCEV2']._serialized_start=189 + _globals['_FEATUREREFERENCEV2']._serialized_end=258 + _globals['_GETONLINEFEATURESREQUESTV2']._serialized_start=261 + _globals['_GETONLINEFEATURESREQUESTV2']._serialized_end=642 + _globals['_GETONLINEFEATURESREQUESTV2_ENTITYROW']._serialized_start=436 + _globals['_GETONLINEFEATURESREQUESTV2_ENTITYROW']._serialized_end=642 + _globals['_GETONLINEFEATURESREQUESTV2_ENTITYROW_FIELDSENTRY']._serialized_start=577 + _globals['_GETONLINEFEATURESREQUESTV2_ENTITYROW_FIELDSENTRY']._serialized_end=642 + _globals['_FEATURELIST']._serialized_start=644 + _globals['_FEATURELIST']._serialized_end=670 + _globals['_GETONLINEFEATURESREQUEST']._serialized_start=673 + _globals['_GETONLINEFEATURESREQUEST']._serialized_end=1129 + _globals['_GETONLINEFEATURESREQUEST_ENTITIESENTRY']._serialized_start=963 + _globals['_GETONLINEFEATURESREQUEST_ENTITIESENTRY']._serialized_end=1038 + _globals['_GETONLINEFEATURESREQUEST_REQUESTCONTEXTENTRY']._serialized_start=1040 + _globals['_GETONLINEFEATURESREQUEST_REQUESTCONTEXTENTRY']._serialized_end=1121 + _globals['_GETONLINEFEATURESRESPONSE']._serialized_start=1132 + _globals['_GETONLINEFEATURESRESPONSE']._serialized_end=1470 + _globals['_GETONLINEFEATURESRESPONSE_FEATUREVECTOR']._serialized_start=1319 + _globals['_GETONLINEFEATURESRESPONSE_FEATUREVECTOR']._serialized_end=1470 + _globals['_GETONLINEFEATURESRESPONSEMETADATA']._serialized_start=1472 + _globals['_GETONLINEFEATURESRESPONSEMETADATA']._serialized_end=1558 + _globals['_SERVINGSERVICE']._serialized_start=1654 + _globals['_SERVINGSERVICE']._serialized_end=1884 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/serving/ServingService_pb2.pyi b/sdk/python/feast/protos/feast/serving/ServingService_pb2.pyi new file mode 100644 index 0000000000..3c5e57ae45 --- /dev/null +++ b/sdk/python/feast/protos/feast/serving/ServingService_pb2.pyi @@ -0,0 +1,347 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2018 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import collections.abc +import feast.types.Value_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class _FieldStatus: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _FieldStatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_FieldStatus.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + INVALID: _FieldStatus.ValueType # 0 + """Status is unset for this field.""" + PRESENT: _FieldStatus.ValueType # 1 + """Field value is present for this field and age is within max age.""" + NULL_VALUE: _FieldStatus.ValueType # 2 + """Values could be found for entity key and age is within max age, but + this field value is not assigned a value on ingestion into feast. + """ + NOT_FOUND: _FieldStatus.ValueType # 3 + """Entity key did not return any values as they do not exist in Feast. + This could suggest that the feature values have not yet been ingested + into feast or the ingestion failed. + """ + OUTSIDE_MAX_AGE: _FieldStatus.ValueType # 4 + """Values could be found for entity key, but field values are outside the maximum + allowable range. + """ + +class FieldStatus(_FieldStatus, metaclass=_FieldStatusEnumTypeWrapper): ... + +INVALID: FieldStatus.ValueType # 0 +"""Status is unset for this field.""" +PRESENT: FieldStatus.ValueType # 1 +"""Field value is present for this field and age is within max age.""" +NULL_VALUE: FieldStatus.ValueType # 2 +"""Values could be found for entity key and age is within max age, but +this field value is not assigned a value on ingestion into feast. +""" +NOT_FOUND: FieldStatus.ValueType # 3 +"""Entity key did not return any values as they do not exist in Feast. +This could suggest that the feature values have not yet been ingested +into feast or the ingestion failed. +""" +OUTSIDE_MAX_AGE: FieldStatus.ValueType # 4 +"""Values could be found for entity key, but field values are outside the maximum +allowable range. +""" +global___FieldStatus = FieldStatus + +class GetFeastServingInfoRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +global___GetFeastServingInfoRequest = GetFeastServingInfoRequest + +class GetFeastServingInfoResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VERSION_FIELD_NUMBER: builtins.int + version: builtins.str + """Feast version of this serving deployment.""" + def __init__( + self, + *, + version: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["version", b"version"]) -> None: ... + +global___GetFeastServingInfoResponse = GetFeastServingInfoResponse + +class FeatureReferenceV2(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FEATURE_VIEW_NAME_FIELD_NUMBER: builtins.int + FEATURE_NAME_FIELD_NUMBER: builtins.int + feature_view_name: builtins.str + """Name of the Feature View to retrieve the feature from.""" + feature_name: builtins.str + """Name of the Feature to retrieve the feature from.""" + def __init__( + self, + *, + feature_view_name: builtins.str = ..., + feature_name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["feature_name", b"feature_name", "feature_view_name", b"feature_view_name"]) -> None: ... + +global___FeatureReferenceV2 = FeatureReferenceV2 + +class GetOnlineFeaturesRequestV2(google.protobuf.message.Message): + """ToDo (oleksii): remove this message (since it's not used) and move EntityRow on package level""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class EntityRow(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class FieldsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + @property + def value(self) -> feast.types.Value_pb2.Value: ... + def __init__( + self, + *, + key: builtins.str = ..., + value: feast.types.Value_pb2.Value | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + TIMESTAMP_FIELD_NUMBER: builtins.int + FIELDS_FIELD_NUMBER: builtins.int + @property + def timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Request timestamp of this row. This value will be used, + together with maxAge, to determine feature staleness. + """ + @property + def fields(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, feast.types.Value_pb2.Value]: + """Map containing mapping of entity name to entity value.""" + def __init__( + self, + *, + timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., + fields: collections.abc.Mapping[builtins.str, feast.types.Value_pb2.Value] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["timestamp", b"timestamp"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["fields", b"fields", "timestamp", b"timestamp"]) -> None: ... + + FEATURES_FIELD_NUMBER: builtins.int + ENTITY_ROWS_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + @property + def features(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___FeatureReferenceV2]: + """List of features that are being retrieved""" + @property + def entity_rows(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GetOnlineFeaturesRequestV2.EntityRow]: + """List of entity rows, containing entity id and timestamp data. + Used during retrieval of feature rows and for joining feature + rows into a final dataset + """ + project: builtins.str + """Optional field to specify project name override. If specified, uses the + given project for retrieval. Overrides the projects specified in + Feature References if both are specified. + """ + def __init__( + self, + *, + features: collections.abc.Iterable[global___FeatureReferenceV2] | None = ..., + entity_rows: collections.abc.Iterable[global___GetOnlineFeaturesRequestV2.EntityRow] | None = ..., + project: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["entity_rows", b"entity_rows", "features", b"features", "project", b"project"]) -> None: ... + +global___GetOnlineFeaturesRequestV2 = GetOnlineFeaturesRequestV2 + +class FeatureList(google.protobuf.message.Message): + """In JSON "val" field can be omitted""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VAL_FIELD_NUMBER: builtins.int + @property + def val(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + def __init__( + self, + *, + val: collections.abc.Iterable[builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["val", b"val"]) -> None: ... + +global___FeatureList = FeatureList + +class GetOnlineFeaturesRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class EntitiesEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + @property + def value(self) -> feast.types.Value_pb2.RepeatedValue: ... + def __init__( + self, + *, + key: builtins.str = ..., + value: feast.types.Value_pb2.RepeatedValue | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + class RequestContextEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + @property + def value(self) -> feast.types.Value_pb2.RepeatedValue: ... + def __init__( + self, + *, + key: builtins.str = ..., + value: feast.types.Value_pb2.RepeatedValue | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + FEATURE_SERVICE_FIELD_NUMBER: builtins.int + FEATURES_FIELD_NUMBER: builtins.int + ENTITIES_FIELD_NUMBER: builtins.int + FULL_FEATURE_NAMES_FIELD_NUMBER: builtins.int + REQUEST_CONTEXT_FIELD_NUMBER: builtins.int + feature_service: builtins.str + @property + def features(self) -> global___FeatureList: ... + @property + def entities(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, feast.types.Value_pb2.RepeatedValue]: + """The entity data is specified in a columnar format + A map of entity name -> list of values + """ + full_feature_names: builtins.bool + @property + def request_context(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, feast.types.Value_pb2.RepeatedValue]: + """Context for OnDemand Feature Transformation + (was moved to dedicated parameter to avoid unnecessary separation logic on serving side) + A map of variable name -> list of values + """ + def __init__( + self, + *, + feature_service: builtins.str = ..., + features: global___FeatureList | None = ..., + entities: collections.abc.Mapping[builtins.str, feast.types.Value_pb2.RepeatedValue] | None = ..., + full_feature_names: builtins.bool = ..., + request_context: collections.abc.Mapping[builtins.str, feast.types.Value_pb2.RepeatedValue] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["feature_service", b"feature_service", "features", b"features", "kind", b"kind"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["entities", b"entities", "feature_service", b"feature_service", "features", b"features", "full_feature_names", b"full_feature_names", "kind", b"kind", "request_context", b"request_context"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["kind", b"kind"]) -> typing_extensions.Literal["feature_service", "features"] | None: ... + +global___GetOnlineFeaturesRequest = GetOnlineFeaturesRequest + +class GetOnlineFeaturesResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class FeatureVector(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VALUES_FIELD_NUMBER: builtins.int + STATUSES_FIELD_NUMBER: builtins.int + EVENT_TIMESTAMPS_FIELD_NUMBER: builtins.int + @property + def values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.types.Value_pb2.Value]: ... + @property + def statuses(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___FieldStatus.ValueType]: ... + @property + def event_timestamps(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[google.protobuf.timestamp_pb2.Timestamp]: ... + def __init__( + self, + *, + values: collections.abc.Iterable[feast.types.Value_pb2.Value] | None = ..., + statuses: collections.abc.Iterable[global___FieldStatus.ValueType] | None = ..., + event_timestamps: collections.abc.Iterable[google.protobuf.timestamp_pb2.Timestamp] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["event_timestamps", b"event_timestamps", "statuses", b"statuses", "values", b"values"]) -> None: ... + + METADATA_FIELD_NUMBER: builtins.int + RESULTS_FIELD_NUMBER: builtins.int + STATUS_FIELD_NUMBER: builtins.int + @property + def metadata(self) -> global___GetOnlineFeaturesResponseMetadata: ... + @property + def results(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GetOnlineFeaturesResponse.FeatureVector]: + """Length of "results" array should match length of requested features. + We also preserve the same order of features here as in metadata.feature_names + """ + status: builtins.bool + def __init__( + self, + *, + metadata: global___GetOnlineFeaturesResponseMetadata | None = ..., + results: collections.abc.Iterable[global___GetOnlineFeaturesResponse.FeatureVector] | None = ..., + status: builtins.bool = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["metadata", b"metadata"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["metadata", b"metadata", "results", b"results", "status", b"status"]) -> None: ... + +global___GetOnlineFeaturesResponse = GetOnlineFeaturesResponse + +class GetOnlineFeaturesResponseMetadata(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FEATURE_NAMES_FIELD_NUMBER: builtins.int + @property + def feature_names(self) -> global___FeatureList: ... + def __init__( + self, + *, + feature_names: global___FeatureList | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["feature_names", b"feature_names"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["feature_names", b"feature_names"]) -> None: ... + +global___GetOnlineFeaturesResponseMetadata = GetOnlineFeaturesResponseMetadata diff --git a/sdk/python/feast/protos/feast/serving/ServingService_pb2_grpc.py b/sdk/python/feast/protos/feast/serving/ServingService_pb2_grpc.py new file mode 100644 index 0000000000..d3cd055f66 --- /dev/null +++ b/sdk/python/feast/protos/feast/serving/ServingService_pb2_grpc.py @@ -0,0 +1,101 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from feast.protos.feast.serving import ServingService_pb2 as feast_dot_serving_dot_ServingService__pb2 + + +class ServingServiceStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.GetFeastServingInfo = channel.unary_unary( + '/feast.serving.ServingService/GetFeastServingInfo', + request_serializer=feast_dot_serving_dot_ServingService__pb2.GetFeastServingInfoRequest.SerializeToString, + response_deserializer=feast_dot_serving_dot_ServingService__pb2.GetFeastServingInfoResponse.FromString, + ) + self.GetOnlineFeatures = channel.unary_unary( + '/feast.serving.ServingService/GetOnlineFeatures', + request_serializer=feast_dot_serving_dot_ServingService__pb2.GetOnlineFeaturesRequest.SerializeToString, + response_deserializer=feast_dot_serving_dot_ServingService__pb2.GetOnlineFeaturesResponse.FromString, + ) + + +class ServingServiceServicer(object): + """Missing associated documentation comment in .proto file.""" + + def GetFeastServingInfo(self, request, context): + """Get information about this Feast serving. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetOnlineFeatures(self, request, context): + """Get online features synchronously. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_ServingServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'GetFeastServingInfo': grpc.unary_unary_rpc_method_handler( + servicer.GetFeastServingInfo, + request_deserializer=feast_dot_serving_dot_ServingService__pb2.GetFeastServingInfoRequest.FromString, + response_serializer=feast_dot_serving_dot_ServingService__pb2.GetFeastServingInfoResponse.SerializeToString, + ), + 'GetOnlineFeatures': grpc.unary_unary_rpc_method_handler( + servicer.GetOnlineFeatures, + request_deserializer=feast_dot_serving_dot_ServingService__pb2.GetOnlineFeaturesRequest.FromString, + response_serializer=feast_dot_serving_dot_ServingService__pb2.GetOnlineFeaturesResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'feast.serving.ServingService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class ServingService(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def GetFeastServingInfo(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.serving.ServingService/GetFeastServingInfo', + feast_dot_serving_dot_ServingService__pb2.GetFeastServingInfoRequest.SerializeToString, + feast_dot_serving_dot_ServingService__pb2.GetFeastServingInfoResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetOnlineFeatures(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.serving.ServingService/GetOnlineFeatures', + feast_dot_serving_dot_ServingService__pb2.GetOnlineFeaturesRequest.SerializeToString, + feast_dot_serving_dot_ServingService__pb2.GetOnlineFeaturesResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/sdk/python/feast/protos/feast/serving/TransformationService_pb2.py b/sdk/python/feast/protos/feast/serving/TransformationService_pb2.py new file mode 100644 index 0000000000..bc060e9a77 --- /dev/null +++ b/sdk/python/feast/protos/feast/serving/TransformationService_pb2.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/serving/TransformationService.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n)feast/serving/TransformationService.proto\x12\rfeast.serving\"+\n\tValueType\x12\x15\n\x0b\x61rrow_value\x18\x01 \x01(\x0cH\x00\x42\x07\n\x05value\"%\n#GetTransformationServiceInfoRequest\"\x9c\x01\n$GetTransformationServiceInfoResponse\x12\x0f\n\x07version\x18\x01 \x01(\t\x12\x36\n\x04type\x18\x02 \x01(\x0e\x32(.feast.serving.TransformationServiceType\x12+\n#transformation_service_type_details\x18\x03 \x01(\t\"\x88\x01\n\x18TransformFeaturesRequest\x12#\n\x1bon_demand_feature_view_name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x36\n\x14transformation_input\x18\x03 \x01(\x0b\x32\x18.feast.serving.ValueType\"T\n\x19TransformFeaturesResponse\x12\x37\n\x15transformation_output\x18\x03 \x01(\x0b\x32\x18.feast.serving.ValueType*\x94\x01\n\x19TransformationServiceType\x12\'\n#TRANSFORMATION_SERVICE_TYPE_INVALID\x10\x00\x12&\n\"TRANSFORMATION_SERVICE_TYPE_PYTHON\x10\x01\x12&\n\"TRANSFORMATION_SERVICE_TYPE_CUSTOM\x10\x64\x32\x89\x02\n\x15TransformationService\x12\x87\x01\n\x1cGetTransformationServiceInfo\x12\x32.feast.serving.GetTransformationServiceInfoRequest\x1a\x33.feast.serving.GetTransformationServiceInfoResponse\x12\x66\n\x11TransformFeatures\x12\'.feast.serving.TransformFeaturesRequest\x1a(.feast.serving.TransformFeaturesResponseBh\n\x13\x66\x65\x61st.proto.servingB\x1dTransformationServiceAPIProtoZ2github.com/feast-dev/feast/go/protos/feast/servingb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.serving.TransformationService_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\023feast.proto.servingB\035TransformationServiceAPIProtoZ2github.com/feast-dev/feast/go/protos/feast/serving' + _globals['_TRANSFORMATIONSERVICETYPE']._serialized_start=529 + _globals['_TRANSFORMATIONSERVICETYPE']._serialized_end=677 + _globals['_VALUETYPE']._serialized_start=60 + _globals['_VALUETYPE']._serialized_end=103 + _globals['_GETTRANSFORMATIONSERVICEINFOREQUEST']._serialized_start=105 + _globals['_GETTRANSFORMATIONSERVICEINFOREQUEST']._serialized_end=142 + _globals['_GETTRANSFORMATIONSERVICEINFORESPONSE']._serialized_start=145 + _globals['_GETTRANSFORMATIONSERVICEINFORESPONSE']._serialized_end=301 + _globals['_TRANSFORMFEATURESREQUEST']._serialized_start=304 + _globals['_TRANSFORMFEATURESREQUEST']._serialized_end=440 + _globals['_TRANSFORMFEATURESRESPONSE']._serialized_start=442 + _globals['_TRANSFORMFEATURESRESPONSE']._serialized_end=526 + _globals['_TRANSFORMATIONSERVICE']._serialized_start=680 + _globals['_TRANSFORMATIONSERVICE']._serialized_end=945 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/serving/TransformationService_pb2.pyi b/sdk/python/feast/protos/feast/serving/TransformationService_pb2.pyi new file mode 100644 index 0000000000..3e0752b7bd --- /dev/null +++ b/sdk/python/feast/protos/feast/serving/TransformationService_pb2.pyi @@ -0,0 +1,136 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2021 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import google.protobuf.descriptor +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class _TransformationServiceType: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _TransformationServiceTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_TransformationServiceType.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + TRANSFORMATION_SERVICE_TYPE_INVALID: _TransformationServiceType.ValueType # 0 + TRANSFORMATION_SERVICE_TYPE_PYTHON: _TransformationServiceType.ValueType # 1 + TRANSFORMATION_SERVICE_TYPE_CUSTOM: _TransformationServiceType.ValueType # 100 + +class TransformationServiceType(_TransformationServiceType, metaclass=_TransformationServiceTypeEnumTypeWrapper): ... + +TRANSFORMATION_SERVICE_TYPE_INVALID: TransformationServiceType.ValueType # 0 +TRANSFORMATION_SERVICE_TYPE_PYTHON: TransformationServiceType.ValueType # 1 +TRANSFORMATION_SERVICE_TYPE_CUSTOM: TransformationServiceType.ValueType # 100 +global___TransformationServiceType = TransformationServiceType + +class ValueType(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ARROW_VALUE_FIELD_NUMBER: builtins.int + arrow_value: builtins.bytes + """Having a oneOf provides forward compatibility if we need to support compound types + that are not supported by arrow natively. + """ + def __init__( + self, + *, + arrow_value: builtins.bytes = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["arrow_value", b"arrow_value", "value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["arrow_value", b"arrow_value", "value", b"value"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["value", b"value"]) -> typing_extensions.Literal["arrow_value"] | None: ... + +global___ValueType = ValueType + +class GetTransformationServiceInfoRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +global___GetTransformationServiceInfoRequest = GetTransformationServiceInfoRequest + +class GetTransformationServiceInfoResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VERSION_FIELD_NUMBER: builtins.int + TYPE_FIELD_NUMBER: builtins.int + TRANSFORMATION_SERVICE_TYPE_DETAILS_FIELD_NUMBER: builtins.int + version: builtins.str + """Feast version of this transformation service deployment.""" + type: global___TransformationServiceType.ValueType + """Type of transformation service deployment. This is either Python, or custom""" + transformation_service_type_details: builtins.str + def __init__( + self, + *, + version: builtins.str = ..., + type: global___TransformationServiceType.ValueType = ..., + transformation_service_type_details: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["transformation_service_type_details", b"transformation_service_type_details", "type", b"type", "version", b"version"]) -> None: ... + +global___GetTransformationServiceInfoResponse = GetTransformationServiceInfoResponse + +class TransformFeaturesRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ON_DEMAND_FEATURE_VIEW_NAME_FIELD_NUMBER: builtins.int + PROJECT_FIELD_NUMBER: builtins.int + TRANSFORMATION_INPUT_FIELD_NUMBER: builtins.int + on_demand_feature_view_name: builtins.str + project: builtins.str + @property + def transformation_input(self) -> global___ValueType: ... + def __init__( + self, + *, + on_demand_feature_view_name: builtins.str = ..., + project: builtins.str = ..., + transformation_input: global___ValueType | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["transformation_input", b"transformation_input"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["on_demand_feature_view_name", b"on_demand_feature_view_name", "project", b"project", "transformation_input", b"transformation_input"]) -> None: ... + +global___TransformFeaturesRequest = TransformFeaturesRequest + +class TransformFeaturesResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TRANSFORMATION_OUTPUT_FIELD_NUMBER: builtins.int + @property + def transformation_output(self) -> global___ValueType: ... + def __init__( + self, + *, + transformation_output: global___ValueType | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["transformation_output", b"transformation_output"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["transformation_output", b"transformation_output"]) -> None: ... + +global___TransformFeaturesResponse = TransformFeaturesResponse diff --git a/sdk/python/feast/protos/feast/serving/TransformationService_pb2_grpc.py b/sdk/python/feast/protos/feast/serving/TransformationService_pb2_grpc.py new file mode 100644 index 0000000000..30099e39ca --- /dev/null +++ b/sdk/python/feast/protos/feast/serving/TransformationService_pb2_grpc.py @@ -0,0 +1,99 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from feast.protos.feast.serving import TransformationService_pb2 as feast_dot_serving_dot_TransformationService__pb2 + + +class TransformationServiceStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.GetTransformationServiceInfo = channel.unary_unary( + '/feast.serving.TransformationService/GetTransformationServiceInfo', + request_serializer=feast_dot_serving_dot_TransformationService__pb2.GetTransformationServiceInfoRequest.SerializeToString, + response_deserializer=feast_dot_serving_dot_TransformationService__pb2.GetTransformationServiceInfoResponse.FromString, + ) + self.TransformFeatures = channel.unary_unary( + '/feast.serving.TransformationService/TransformFeatures', + request_serializer=feast_dot_serving_dot_TransformationService__pb2.TransformFeaturesRequest.SerializeToString, + response_deserializer=feast_dot_serving_dot_TransformationService__pb2.TransformFeaturesResponse.FromString, + ) + + +class TransformationServiceServicer(object): + """Missing associated documentation comment in .proto file.""" + + def GetTransformationServiceInfo(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def TransformFeatures(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_TransformationServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'GetTransformationServiceInfo': grpc.unary_unary_rpc_method_handler( + servicer.GetTransformationServiceInfo, + request_deserializer=feast_dot_serving_dot_TransformationService__pb2.GetTransformationServiceInfoRequest.FromString, + response_serializer=feast_dot_serving_dot_TransformationService__pb2.GetTransformationServiceInfoResponse.SerializeToString, + ), + 'TransformFeatures': grpc.unary_unary_rpc_method_handler( + servicer.TransformFeatures, + request_deserializer=feast_dot_serving_dot_TransformationService__pb2.TransformFeaturesRequest.FromString, + response_serializer=feast_dot_serving_dot_TransformationService__pb2.TransformFeaturesResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'feast.serving.TransformationService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class TransformationService(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def GetTransformationServiceInfo(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.serving.TransformationService/GetTransformationServiceInfo', + feast_dot_serving_dot_TransformationService__pb2.GetTransformationServiceInfoRequest.SerializeToString, + feast_dot_serving_dot_TransformationService__pb2.GetTransformationServiceInfoResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def TransformFeatures(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/feast.serving.TransformationService/TransformFeatures', + feast_dot_serving_dot_TransformationService__pb2.TransformFeaturesRequest.SerializeToString, + feast_dot_serving_dot_TransformationService__pb2.TransformFeaturesResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/sdk/python/feast/protos/feast/serving/__init__.py b/sdk/python/feast/protos/feast/serving/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/protos/feast/storage/Redis_pb2.py b/sdk/python/feast/protos/feast/storage/Redis_pb2.py new file mode 100644 index 0000000000..37d59c9df5 --- /dev/null +++ b/sdk/python/feast/protos/feast/storage/Redis_pb2.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/storage/Redis.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from feast.protos.feast.types import Value_pb2 as feast_dot_types_dot_Value__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19\x66\x65\x61st/storage/Redis.proto\x12\rfeast.storage\x1a\x17\x66\x65\x61st/types/Value.proto\"^\n\nRedisKeyV2\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x14\n\x0c\x65ntity_names\x18\x02 \x03(\t\x12)\n\rentity_values\x18\x03 \x03(\x0b\x32\x12.feast.types.ValueBU\n\x13\x66\x65\x61st.proto.storageB\nRedisProtoZ2github.com/feast-dev/feast/go/protos/feast/storageb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.storage.Redis_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\023feast.proto.storageB\nRedisProtoZ2github.com/feast-dev/feast/go/protos/feast/storage' + _globals['_REDISKEYV2']._serialized_start=69 + _globals['_REDISKEYV2']._serialized_end=163 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/storage/Redis_pb2.pyi b/sdk/python/feast/protos/feast/storage/Redis_pb2.pyi new file mode 100644 index 0000000000..74cc2b07f0 --- /dev/null +++ b/sdk/python/feast/protos/feast/storage/Redis_pb2.pyi @@ -0,0 +1,54 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2019 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import collections.abc +import feast.types.Value_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class RedisKeyV2(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PROJECT_FIELD_NUMBER: builtins.int + ENTITY_NAMES_FIELD_NUMBER: builtins.int + ENTITY_VALUES_FIELD_NUMBER: builtins.int + project: builtins.str + @property + def entity_names(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + @property + def entity_values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.types.Value_pb2.Value]: ... + def __init__( + self, + *, + project: builtins.str = ..., + entity_names: collections.abc.Iterable[builtins.str] | None = ..., + entity_values: collections.abc.Iterable[feast.types.Value_pb2.Value] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["entity_names", b"entity_names", "entity_values", b"entity_values", "project", b"project"]) -> None: ... + +global___RedisKeyV2 = RedisKeyV2 diff --git a/sdk/python/feast/protos/feast/storage/Redis_pb2_grpc.py b/sdk/python/feast/protos/feast/storage/Redis_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/storage/Redis_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/storage/__init__.py b/sdk/python/feast/protos/feast/storage/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/protos/feast/types/EntityKey_pb2.py b/sdk/python/feast/protos/feast/types/EntityKey_pb2.py new file mode 100644 index 0000000000..a6e1abf730 --- /dev/null +++ b/sdk/python/feast/protos/feast/types/EntityKey_pb2.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/types/EntityKey.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from feast.protos.feast.types import Value_pb2 as feast_dot_types_dot_Value__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1b\x66\x65\x61st/types/EntityKey.proto\x12\x0b\x66\x65\x61st.types\x1a\x17\x66\x65\x61st/types/Value.proto\"I\n\tEntityKey\x12\x11\n\tjoin_keys\x18\x01 \x03(\t\x12)\n\rentity_values\x18\x02 \x03(\x0b\x32\x12.feast.types.ValueBU\n\x11\x66\x65\x61st.proto.typesB\x0e\x45ntityKeyProtoZ0github.com/feast-dev/feast/go/protos/feast/typesb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.types.EntityKey_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\021feast.proto.typesB\016EntityKeyProtoZ0github.com/feast-dev/feast/go/protos/feast/types' + _globals['_ENTITYKEY']._serialized_start=69 + _globals['_ENTITYKEY']._serialized_end=142 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/types/EntityKey_pb2.pyi b/sdk/python/feast/protos/feast/types/EntityKey_pb2.pyi new file mode 100644 index 0000000000..fe65e0c1b3 --- /dev/null +++ b/sdk/python/feast/protos/feast/types/EntityKey_pb2.pyi @@ -0,0 +1,51 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2018 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import collections.abc +import feast.types.Value_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class EntityKey(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + JOIN_KEYS_FIELD_NUMBER: builtins.int + ENTITY_VALUES_FIELD_NUMBER: builtins.int + @property + def join_keys(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + @property + def entity_values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[feast.types.Value_pb2.Value]: ... + def __init__( + self, + *, + join_keys: collections.abc.Iterable[builtins.str] | None = ..., + entity_values: collections.abc.Iterable[feast.types.Value_pb2.Value] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["entity_values", b"entity_values", "join_keys", b"join_keys"]) -> None: ... + +global___EntityKey = EntityKey diff --git a/sdk/python/feast/protos/feast/types/EntityKey_pb2_grpc.py b/sdk/python/feast/protos/feast/types/EntityKey_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/types/EntityKey_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/types/Field_pb2.py b/sdk/python/feast/protos/feast/types/Field_pb2.py new file mode 100644 index 0000000000..973fdc6cde --- /dev/null +++ b/sdk/python/feast/protos/feast/types/Field_pb2.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/types/Field.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from feast.protos.feast.types import Value_pb2 as feast_dot_types_dot_Value__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x17\x66\x65\x61st/types/Field.proto\x12\x0b\x66\x65\x61st.types\x1a\x17\x66\x65\x61st/types/Value.proto\"\xaf\x01\n\x05\x46ield\x12\x0c\n\x04name\x18\x01 \x01(\t\x12*\n\x05value\x18\x02 \x01(\x0e\x32\x1b.feast.types.ValueType.Enum\x12*\n\x04tags\x18\x03 \x03(\x0b\x32\x1c.feast.types.Field.TagsEntry\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42Q\n\x11\x66\x65\x61st.proto.typesB\nFieldProtoZ0github.com/feast-dev/feast/go/protos/feast/typesb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.types.Field_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\021feast.proto.typesB\nFieldProtoZ0github.com/feast-dev/feast/go/protos/feast/types' + _globals['_FIELD_TAGSENTRY']._options = None + _globals['_FIELD_TAGSENTRY']._serialized_options = b'8\001' + _globals['_FIELD']._serialized_start=66 + _globals['_FIELD']._serialized_end=241 + _globals['_FIELD_TAGSENTRY']._serialized_start=198 + _globals['_FIELD_TAGSENTRY']._serialized_end=241 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/types/Field_pb2.pyi b/sdk/python/feast/protos/feast/types/Field_pb2.pyi new file mode 100644 index 0000000000..28a2194237 --- /dev/null +++ b/sdk/python/feast/protos/feast/types/Field_pb2.pyi @@ -0,0 +1,73 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2018 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import collections.abc +import feast.types.Value_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class Field(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class TagsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + TAGS_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + name: builtins.str + value: feast.types.Value_pb2.ValueType.Enum.ValueType + @property + def tags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """Tags for user defined metadata on a field""" + description: builtins.str + """Description of the field.""" + def __init__( + self, + *, + name: builtins.str = ..., + value: feast.types.Value_pb2.ValueType.Enum.ValueType = ..., + tags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + description: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["description", b"description", "name", b"name", "tags", b"tags", "value", b"value"]) -> None: ... + +global___Field = Field diff --git a/sdk/python/feast/protos/feast/types/Field_pb2_grpc.py b/sdk/python/feast/protos/feast/types/Field_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/types/Field_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/types/Value_pb2.py b/sdk/python/feast/protos/feast/types/Value_pb2.py new file mode 100644 index 0000000000..18ee331180 --- /dev/null +++ b/sdk/python/feast/protos/feast/types/Value_pb2.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: feast/types/Value.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x17\x66\x65\x61st/types/Value.proto\x12\x0b\x66\x65\x61st.types\"\x97\x02\n\tValueType\"\x89\x02\n\x04\x45num\x12\x0b\n\x07INVALID\x10\x00\x12\t\n\x05\x42YTES\x10\x01\x12\n\n\x06STRING\x10\x02\x12\t\n\x05INT32\x10\x03\x12\t\n\x05INT64\x10\x04\x12\n\n\x06\x44OUBLE\x10\x05\x12\t\n\x05\x46LOAT\x10\x06\x12\x08\n\x04\x42OOL\x10\x07\x12\x12\n\x0eUNIX_TIMESTAMP\x10\x08\x12\x0e\n\nBYTES_LIST\x10\x0b\x12\x0f\n\x0bSTRING_LIST\x10\x0c\x12\x0e\n\nINT32_LIST\x10\r\x12\x0e\n\nINT64_LIST\x10\x0e\x12\x0f\n\x0b\x44OUBLE_LIST\x10\x0f\x12\x0e\n\nFLOAT_LIST\x10\x10\x12\r\n\tBOOL_LIST\x10\x11\x12\x17\n\x13UNIX_TIMESTAMP_LIST\x10\x12\x12\x08\n\x04NULL\x10\x13\"\x82\x05\n\x05Value\x12\x13\n\tbytes_val\x18\x01 \x01(\x0cH\x00\x12\x14\n\nstring_val\x18\x02 \x01(\tH\x00\x12\x13\n\tint32_val\x18\x03 \x01(\x05H\x00\x12\x13\n\tint64_val\x18\x04 \x01(\x03H\x00\x12\x14\n\ndouble_val\x18\x05 \x01(\x01H\x00\x12\x13\n\tfloat_val\x18\x06 \x01(\x02H\x00\x12\x12\n\x08\x62ool_val\x18\x07 \x01(\x08H\x00\x12\x1c\n\x12unix_timestamp_val\x18\x08 \x01(\x03H\x00\x12\x30\n\x0e\x62ytes_list_val\x18\x0b \x01(\x0b\x32\x16.feast.types.BytesListH\x00\x12\x32\n\x0fstring_list_val\x18\x0c \x01(\x0b\x32\x17.feast.types.StringListH\x00\x12\x30\n\x0eint32_list_val\x18\r \x01(\x0b\x32\x16.feast.types.Int32ListH\x00\x12\x30\n\x0eint64_list_val\x18\x0e \x01(\x0b\x32\x16.feast.types.Int64ListH\x00\x12\x32\n\x0f\x64ouble_list_val\x18\x0f \x01(\x0b\x32\x17.feast.types.DoubleListH\x00\x12\x30\n\x0e\x66loat_list_val\x18\x10 \x01(\x0b\x32\x16.feast.types.FloatListH\x00\x12.\n\rbool_list_val\x18\x11 \x01(\x0b\x32\x15.feast.types.BoolListH\x00\x12\x39\n\x17unix_timestamp_list_val\x18\x12 \x01(\x0b\x32\x16.feast.types.Int64ListH\x00\x12%\n\x08null_val\x18\x13 \x01(\x0e\x32\x11.feast.types.NullH\x00\x42\x05\n\x03val\"\x18\n\tBytesList\x12\x0b\n\x03val\x18\x01 \x03(\x0c\"\x19\n\nStringList\x12\x0b\n\x03val\x18\x01 \x03(\t\"\x18\n\tInt32List\x12\x0b\n\x03val\x18\x01 \x03(\x05\"\x18\n\tInt64List\x12\x0b\n\x03val\x18\x01 \x03(\x03\"\x19\n\nDoubleList\x12\x0b\n\x03val\x18\x01 \x03(\x01\"\x18\n\tFloatList\x12\x0b\n\x03val\x18\x01 \x03(\x02\"\x17\n\x08\x42oolList\x12\x0b\n\x03val\x18\x01 \x03(\x08\"0\n\rRepeatedValue\x12\x1f\n\x03val\x18\x01 \x03(\x0b\x32\x12.feast.types.Value*\x10\n\x04Null\x12\x08\n\x04NULL\x10\x00\x42Q\n\x11\x66\x65\x61st.proto.typesB\nValueProtoZ0github.com/feast-dev/feast/go/protos/feast/typesb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'feast.types.Value_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + _globals['DESCRIPTOR']._options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\021feast.proto.typesB\nValueProtoZ0github.com/feast-dev/feast/go/protos/feast/types' + _globals['_NULL']._serialized_start=1200 + _globals['_NULL']._serialized_end=1216 + _globals['_VALUETYPE']._serialized_start=41 + _globals['_VALUETYPE']._serialized_end=320 + _globals['_VALUETYPE_ENUM']._serialized_start=55 + _globals['_VALUETYPE_ENUM']._serialized_end=320 + _globals['_VALUE']._serialized_start=323 + _globals['_VALUE']._serialized_end=965 + _globals['_BYTESLIST']._serialized_start=967 + _globals['_BYTESLIST']._serialized_end=991 + _globals['_STRINGLIST']._serialized_start=993 + _globals['_STRINGLIST']._serialized_end=1018 + _globals['_INT32LIST']._serialized_start=1020 + _globals['_INT32LIST']._serialized_end=1044 + _globals['_INT64LIST']._serialized_start=1046 + _globals['_INT64LIST']._serialized_end=1070 + _globals['_DOUBLELIST']._serialized_start=1072 + _globals['_DOUBLELIST']._serialized_end=1097 + _globals['_FLOATLIST']._serialized_start=1099 + _globals['_FLOATLIST']._serialized_end=1123 + _globals['_BOOLLIST']._serialized_start=1125 + _globals['_BOOLLIST']._serialized_end=1148 + _globals['_REPEATEDVALUE']._serialized_start=1150 + _globals['_REPEATEDVALUE']._serialized_end=1198 +# @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/types/Value_pb2.pyi b/sdk/python/feast/protos/feast/types/Value_pb2.pyi new file mode 100644 index 0000000000..15e4870e6a --- /dev/null +++ b/sdk/python/feast/protos/feast/types/Value_pb2.pyi @@ -0,0 +1,296 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file + +Copyright 2018 The Feast Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class _Null: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _NullEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_Null.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + NULL: _Null.ValueType # 0 + +class Null(_Null, metaclass=_NullEnumTypeWrapper): ... + +NULL: Null.ValueType # 0 +global___Null = Null + +class ValueType(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _Enum: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _EnumEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ValueType._Enum.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + INVALID: ValueType._Enum.ValueType # 0 + BYTES: ValueType._Enum.ValueType # 1 + STRING: ValueType._Enum.ValueType # 2 + INT32: ValueType._Enum.ValueType # 3 + INT64: ValueType._Enum.ValueType # 4 + DOUBLE: ValueType._Enum.ValueType # 5 + FLOAT: ValueType._Enum.ValueType # 6 + BOOL: ValueType._Enum.ValueType # 7 + UNIX_TIMESTAMP: ValueType._Enum.ValueType # 8 + BYTES_LIST: ValueType._Enum.ValueType # 11 + STRING_LIST: ValueType._Enum.ValueType # 12 + INT32_LIST: ValueType._Enum.ValueType # 13 + INT64_LIST: ValueType._Enum.ValueType # 14 + DOUBLE_LIST: ValueType._Enum.ValueType # 15 + FLOAT_LIST: ValueType._Enum.ValueType # 16 + BOOL_LIST: ValueType._Enum.ValueType # 17 + UNIX_TIMESTAMP_LIST: ValueType._Enum.ValueType # 18 + NULL: ValueType._Enum.ValueType # 19 + + class Enum(_Enum, metaclass=_EnumEnumTypeWrapper): ... + INVALID: ValueType.Enum.ValueType # 0 + BYTES: ValueType.Enum.ValueType # 1 + STRING: ValueType.Enum.ValueType # 2 + INT32: ValueType.Enum.ValueType # 3 + INT64: ValueType.Enum.ValueType # 4 + DOUBLE: ValueType.Enum.ValueType # 5 + FLOAT: ValueType.Enum.ValueType # 6 + BOOL: ValueType.Enum.ValueType # 7 + UNIX_TIMESTAMP: ValueType.Enum.ValueType # 8 + BYTES_LIST: ValueType.Enum.ValueType # 11 + STRING_LIST: ValueType.Enum.ValueType # 12 + INT32_LIST: ValueType.Enum.ValueType # 13 + INT64_LIST: ValueType.Enum.ValueType # 14 + DOUBLE_LIST: ValueType.Enum.ValueType # 15 + FLOAT_LIST: ValueType.Enum.ValueType # 16 + BOOL_LIST: ValueType.Enum.ValueType # 17 + UNIX_TIMESTAMP_LIST: ValueType.Enum.ValueType # 18 + NULL: ValueType.Enum.ValueType # 19 + + def __init__( + self, + ) -> None: ... + +global___ValueType = ValueType + +class Value(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + BYTES_VAL_FIELD_NUMBER: builtins.int + STRING_VAL_FIELD_NUMBER: builtins.int + INT32_VAL_FIELD_NUMBER: builtins.int + INT64_VAL_FIELD_NUMBER: builtins.int + DOUBLE_VAL_FIELD_NUMBER: builtins.int + FLOAT_VAL_FIELD_NUMBER: builtins.int + BOOL_VAL_FIELD_NUMBER: builtins.int + UNIX_TIMESTAMP_VAL_FIELD_NUMBER: builtins.int + BYTES_LIST_VAL_FIELD_NUMBER: builtins.int + STRING_LIST_VAL_FIELD_NUMBER: builtins.int + INT32_LIST_VAL_FIELD_NUMBER: builtins.int + INT64_LIST_VAL_FIELD_NUMBER: builtins.int + DOUBLE_LIST_VAL_FIELD_NUMBER: builtins.int + FLOAT_LIST_VAL_FIELD_NUMBER: builtins.int + BOOL_LIST_VAL_FIELD_NUMBER: builtins.int + UNIX_TIMESTAMP_LIST_VAL_FIELD_NUMBER: builtins.int + NULL_VAL_FIELD_NUMBER: builtins.int + bytes_val: builtins.bytes + string_val: builtins.str + int32_val: builtins.int + int64_val: builtins.int + double_val: builtins.float + float_val: builtins.float + bool_val: builtins.bool + unix_timestamp_val: builtins.int + @property + def bytes_list_val(self) -> global___BytesList: ... + @property + def string_list_val(self) -> global___StringList: ... + @property + def int32_list_val(self) -> global___Int32List: ... + @property + def int64_list_val(self) -> global___Int64List: ... + @property + def double_list_val(self) -> global___DoubleList: ... + @property + def float_list_val(self) -> global___FloatList: ... + @property + def bool_list_val(self) -> global___BoolList: ... + @property + def unix_timestamp_list_val(self) -> global___Int64List: ... + null_val: global___Null.ValueType + def __init__( + self, + *, + bytes_val: builtins.bytes = ..., + string_val: builtins.str = ..., + int32_val: builtins.int = ..., + int64_val: builtins.int = ..., + double_val: builtins.float = ..., + float_val: builtins.float = ..., + bool_val: builtins.bool = ..., + unix_timestamp_val: builtins.int = ..., + bytes_list_val: global___BytesList | None = ..., + string_list_val: global___StringList | None = ..., + int32_list_val: global___Int32List | None = ..., + int64_list_val: global___Int64List | None = ..., + double_list_val: global___DoubleList | None = ..., + float_list_val: global___FloatList | None = ..., + bool_list_val: global___BoolList | None = ..., + unix_timestamp_list_val: global___Int64List | None = ..., + null_val: global___Null.ValueType = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["bool_list_val", b"bool_list_val", "bool_val", b"bool_val", "bytes_list_val", b"bytes_list_val", "bytes_val", b"bytes_val", "double_list_val", b"double_list_val", "double_val", b"double_val", "float_list_val", b"float_list_val", "float_val", b"float_val", "int32_list_val", b"int32_list_val", "int32_val", b"int32_val", "int64_list_val", b"int64_list_val", "int64_val", b"int64_val", "null_val", b"null_val", "string_list_val", b"string_list_val", "string_val", b"string_val", "unix_timestamp_list_val", b"unix_timestamp_list_val", "unix_timestamp_val", b"unix_timestamp_val", "val", b"val"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["bool_list_val", b"bool_list_val", "bool_val", b"bool_val", "bytes_list_val", b"bytes_list_val", "bytes_val", b"bytes_val", "double_list_val", b"double_list_val", "double_val", b"double_val", "float_list_val", b"float_list_val", "float_val", b"float_val", "int32_list_val", b"int32_list_val", "int32_val", b"int32_val", "int64_list_val", b"int64_list_val", "int64_val", b"int64_val", "null_val", b"null_val", "string_list_val", b"string_list_val", "string_val", b"string_val", "unix_timestamp_list_val", b"unix_timestamp_list_val", "unix_timestamp_val", b"unix_timestamp_val", "val", b"val"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["val", b"val"]) -> typing_extensions.Literal["bytes_val", "string_val", "int32_val", "int64_val", "double_val", "float_val", "bool_val", "unix_timestamp_val", "bytes_list_val", "string_list_val", "int32_list_val", "int64_list_val", "double_list_val", "float_list_val", "bool_list_val", "unix_timestamp_list_val", "null_val"] | None: ... + +global___Value = Value + +class BytesList(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VAL_FIELD_NUMBER: builtins.int + @property + def val(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bytes]: ... + def __init__( + self, + *, + val: collections.abc.Iterable[builtins.bytes] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["val", b"val"]) -> None: ... + +global___BytesList = BytesList + +class StringList(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VAL_FIELD_NUMBER: builtins.int + @property + def val(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + def __init__( + self, + *, + val: collections.abc.Iterable[builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["val", b"val"]) -> None: ... + +global___StringList = StringList + +class Int32List(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VAL_FIELD_NUMBER: builtins.int + @property + def val(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + def __init__( + self, + *, + val: collections.abc.Iterable[builtins.int] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["val", b"val"]) -> None: ... + +global___Int32List = Int32List + +class Int64List(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VAL_FIELD_NUMBER: builtins.int + @property + def val(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + def __init__( + self, + *, + val: collections.abc.Iterable[builtins.int] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["val", b"val"]) -> None: ... + +global___Int64List = Int64List + +class DoubleList(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VAL_FIELD_NUMBER: builtins.int + @property + def val(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]: ... + def __init__( + self, + *, + val: collections.abc.Iterable[builtins.float] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["val", b"val"]) -> None: ... + +global___DoubleList = DoubleList + +class FloatList(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VAL_FIELD_NUMBER: builtins.int + @property + def val(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]: ... + def __init__( + self, + *, + val: collections.abc.Iterable[builtins.float] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["val", b"val"]) -> None: ... + +global___FloatList = FloatList + +class BoolList(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VAL_FIELD_NUMBER: builtins.int + @property + def val(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: ... + def __init__( + self, + *, + val: collections.abc.Iterable[builtins.bool] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["val", b"val"]) -> None: ... + +global___BoolList = BoolList + +class RepeatedValue(google.protobuf.message.Message): + """This is to avoid an issue of being unable to specify `repeated value` in oneofs or maps + In JSON "val" field can be omitted + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VAL_FIELD_NUMBER: builtins.int + @property + def val(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Value]: ... + def __init__( + self, + *, + val: collections.abc.Iterable[global___Value] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["val", b"val"]) -> None: ... + +global___RepeatedValue = RepeatedValue diff --git a/sdk/python/feast/protos/feast/types/Value_pb2_grpc.py b/sdk/python/feast/protos/feast/types/Value_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/sdk/python/feast/protos/feast/types/Value_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/sdk/python/feast/protos/feast/types/__init__.py b/sdk/python/feast/protos/feast/types/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/registry_server.py b/sdk/python/feast/registry_server.py index 4a96ba76a8..c2f4a688d3 100644 --- a/sdk/python/feast/registry_server.py +++ b/sdk/python/feast/registry_server.py @@ -1,54 +1,124 @@ from concurrent import futures -from datetime import datetime +from datetime import datetime, timezone +from typing import Optional, Union, cast import grpc from google.protobuf.empty_pb2 import Empty -from pytz import utc +from grpc_health.v1 import health, health_pb2, health_pb2_grpc +from grpc_reflection.v1alpha import reflection -from feast import FeatureStore +from feast import FeatureService, FeatureStore +from feast.base_feature_view import BaseFeatureView from feast.data_source import DataSource from feast.entity import Entity -from feast.feature_service import FeatureService +from feast.errors import FeatureViewNotFoundException +from feast.feast_object import FeastObject from feast.feature_view import FeatureView +from feast.grpc_error_interceptor import ErrorInterceptor from feast.infra.infra_object import Infra from feast.infra.registry.base_registry import BaseRegistry from feast.on_demand_feature_view import OnDemandFeatureView +from feast.permissions.action import AuthzedAction +from feast.permissions.permission import Permission +from feast.permissions.security_manager import ( + assert_permissions, + assert_permissions_to_update, + permitted_resources, +) +from feast.permissions.server.grpc import AuthInterceptor +from feast.permissions.server.utils import ( + AuthManagerType, + ServerType, + init_auth_manager, + init_security_manager, + str_to_auth_manager_type, +) +from feast.project import Project from feast.protos.feast.registry import RegistryServer_pb2, RegistryServer_pb2_grpc from feast.saved_dataset import SavedDataset, ValidationReference from feast.stream_feature_view import StreamFeatureView +def _build_any_feature_view_proto(feature_view: BaseFeatureView): + if isinstance(feature_view, StreamFeatureView): + arg_name = "stream_feature_view" + feature_view_proto = feature_view.to_proto() + elif isinstance(feature_view, FeatureView): + arg_name = "feature_view" + feature_view_proto = feature_view.to_proto() + elif isinstance(feature_view, OnDemandFeatureView): + arg_name = "on_demand_feature_view" + feature_view_proto = feature_view.to_proto() + + return RegistryServer_pb2.AnyFeatureView( + feature_view=feature_view_proto if arg_name == "feature_view" else None, + stream_feature_view=feature_view_proto + if arg_name == "stream_feature_view" + else None, + on_demand_feature_view=feature_view_proto + if arg_name == "on_demand_feature_view" + else None, + ) + + class RegistryServer(RegistryServer_pb2_grpc.RegistryServerServicer): def __init__(self, registry: BaseRegistry) -> None: super().__init__() self.proxied_registry = registry def ApplyEntity(self, request: RegistryServer_pb2.ApplyEntityRequest, context): + entity = cast( + Entity, + assert_permissions_to_update( + resource=Entity.from_proto(request.entity), + getter=self.proxied_registry.get_entity, + project=request.project, + ), + ) self.proxied_registry.apply_entity( - entity=Entity.from_proto(request.entity), + entity=entity, project=request.project, commit=request.commit, ) + return Empty() def GetEntity(self, request: RegistryServer_pb2.GetEntityRequest, context): - return self.proxied_registry.get_entity( - name=request.name, project=request.project, allow_cache=request.allow_cache + return assert_permissions( + self.proxied_registry.get_entity( + name=request.name, + project=request.project, + allow_cache=request.allow_cache, + ), + actions=[AuthzedAction.DESCRIBE], ).to_proto() def ListEntities(self, request: RegistryServer_pb2.ListEntitiesRequest, context): return RegistryServer_pb2.ListEntitiesResponse( entities=[ entity.to_proto() - for entity in self.proxied_registry.list_entities( - project=request.project, - allow_cache=request.allow_cache, - tags=dict(request.tags), + for entity in permitted_resources( + resources=cast( + list[FeastObject], + self.proxied_registry.list_entities( + project=request.project, + allow_cache=request.allow_cache, + tags=dict(request.tags), + ), + ), + actions=AuthzedAction.DESCRIBE, ) ] ) def DeleteEntity(self, request: RegistryServer_pb2.DeleteEntityRequest, context): + assert_permissions( + resource=self.proxied_registry.get_entity( + name=request.name, project=request.project + ), + actions=AuthzedAction.DELETE, + ) + self.proxied_registry.delete_entity( name=request.name, project=request.project, commit=request.commit ) @@ -57,16 +127,30 @@ def DeleteEntity(self, request: RegistryServer_pb2.DeleteEntityRequest, context) def ApplyDataSource( self, request: RegistryServer_pb2.ApplyDataSourceRequest, context ): + data_source = cast( + DataSource, + assert_permissions_to_update( + resource=DataSource.from_proto(request.data_source), + getter=self.proxied_registry.get_data_source, + project=request.project, + ), + ) self.proxied_registry.apply_data_source( - data_source=DataSource.from_proto(request.data_source), + data_source=data_source, project=request.project, commit=request.commit, ) + return Empty() def GetDataSource(self, request: RegistryServer_pb2.GetDataSourceRequest, context): - return self.proxied_registry.get_data_source( - name=request.name, project=request.project, allow_cache=request.allow_cache + return assert_permissions( + resource=self.proxied_registry.get_data_source( + name=request.name, + project=request.project, + allow_cache=request.allow_cache, + ), + actions=AuthzedAction.DESCRIBE, ).to_proto() def ListDataSources( @@ -75,10 +159,16 @@ def ListDataSources( return RegistryServer_pb2.ListDataSourcesResponse( data_sources=[ data_source.to_proto() - for data_source in self.proxied_registry.list_data_sources( - project=request.project, - allow_cache=request.allow_cache, - tags=dict(request.tags), + for data_source in permitted_resources( + resources=cast( + list[FeastObject], + self.proxied_registry.list_data_sources( + project=request.project, + allow_cache=request.allow_cache, + tags=dict(request.tags), + ), + ), + actions=AuthzedAction.DESCRIBE, ) ] ) @@ -86,6 +176,14 @@ def ListDataSources( def DeleteDataSource( self, request: RegistryServer_pb2.DeleteDataSourceRequest, context ): + assert_permissions( + resource=self.proxied_registry.get_data_source( + name=request.name, + project=request.project, + ), + actions=AuthzedAction.DELETE, + ) + self.proxied_registry.delete_data_source( name=request.name, project=request.project, commit=request.commit ) @@ -94,10 +192,36 @@ def DeleteDataSource( def GetFeatureView( self, request: RegistryServer_pb2.GetFeatureViewRequest, context ): - return self.proxied_registry.get_feature_view( - name=request.name, project=request.project, allow_cache=request.allow_cache + return assert_permissions( + self.proxied_registry.get_feature_view( + name=request.name, + project=request.project, + allow_cache=request.allow_cache, + ), + actions=[AuthzedAction.DESCRIBE], ).to_proto() + def GetAnyFeatureView( + self, request: RegistryServer_pb2.GetAnyFeatureViewRequest, context + ): + feature_view = assert_permissions( + cast( + FeastObject, + self.proxied_registry.get_any_feature_view( + name=request.name, + project=request.project, + allow_cache=request.allow_cache, + ), + ), + actions=[AuthzedAction.DESCRIBE], + ) + + return RegistryServer_pb2.GetAnyFeatureViewResponse( + any_feature_view=_build_any_feature_view_proto( + cast(BaseFeatureView, feature_view) + ) + ) + def ApplyFeatureView( self, request: RegistryServer_pb2.ApplyFeatureViewRequest, context ): @@ -111,9 +235,21 @@ def ApplyFeatureView( elif feature_view_type == "stream_feature_view": feature_view = StreamFeatureView.from_proto(request.stream_feature_view) - self.proxied_registry.apply_feature_view( - feature_view=feature_view, project=request.project, commit=request.commit + assert_permissions_to_update( + resource=feature_view, + # Will replace with the new get_any_feature_view method later + getter=self.proxied_registry.get_feature_view, + project=request.project, + ) + + ( + self.proxied_registry.apply_feature_view( + feature_view=feature_view, + project=request.project, + commit=request.commit, + ), ) + return Empty() def ListFeatureViews( @@ -122,10 +258,36 @@ def ListFeatureViews( return RegistryServer_pb2.ListFeatureViewsResponse( feature_views=[ feature_view.to_proto() - for feature_view in self.proxied_registry.list_feature_views( - project=request.project, - allow_cache=request.allow_cache, - tags=dict(request.tags), + for feature_view in permitted_resources( + resources=cast( + list[FeastObject], + self.proxied_registry.list_feature_views( + project=request.project, + allow_cache=request.allow_cache, + tags=dict(request.tags), + ), + ), + actions=AuthzedAction.DESCRIBE, + ) + ] + ) + + def ListAllFeatureViews( + self, request: RegistryServer_pb2.ListAllFeatureViewsRequest, context + ): + return RegistryServer_pb2.ListAllFeatureViewsResponse( + feature_views=[ + _build_any_feature_view_proto(cast(BaseFeatureView, feature_view)) + for feature_view in permitted_resources( + resources=cast( + list[FeastObject], + self.proxied_registry.list_all_feature_views( + project=request.project, + allow_cache=request.allow_cache, + tags=dict(request.tags), + ), + ), + actions=AuthzedAction.DESCRIBE, ) ] ) @@ -133,6 +295,21 @@ def ListFeatureViews( def DeleteFeatureView( self, request: RegistryServer_pb2.DeleteFeatureViewRequest, context ): + feature_view: Union[StreamFeatureView, FeatureView] + + try: + feature_view = self.proxied_registry.get_stream_feature_view( + name=request.name, project=request.project, allow_cache=False + ) + except FeatureViewNotFoundException: + feature_view = self.proxied_registry.get_feature_view( + name=request.name, project=request.project, allow_cache=False + ) + + assert_permissions( + resource=feature_view, + actions=[AuthzedAction.DELETE], + ) self.proxied_registry.delete_feature_view( name=request.name, project=request.project, commit=request.commit ) @@ -141,8 +318,13 @@ def DeleteFeatureView( def GetStreamFeatureView( self, request: RegistryServer_pb2.GetStreamFeatureViewRequest, context ): - return self.proxied_registry.get_stream_feature_view( - name=request.name, project=request.project, allow_cache=request.allow_cache + return assert_permissions( + resource=self.proxied_registry.get_stream_feature_view( + name=request.name, + project=request.project, + allow_cache=request.allow_cache, + ), + actions=[AuthzedAction.DESCRIBE], ).to_proto() def ListStreamFeatureViews( @@ -151,10 +333,16 @@ def ListStreamFeatureViews( return RegistryServer_pb2.ListStreamFeatureViewsResponse( stream_feature_views=[ stream_feature_view.to_proto() - for stream_feature_view in self.proxied_registry.list_stream_feature_views( - project=request.project, - allow_cache=request.allow_cache, - tags=dict(request.tags), + for stream_feature_view in permitted_resources( + resources=cast( + list[FeastObject], + self.proxied_registry.list_stream_feature_views( + project=request.project, + allow_cache=request.allow_cache, + tags=dict(request.tags), + ), + ), + actions=AuthzedAction.DESCRIBE, ) ] ) @@ -162,8 +350,13 @@ def ListStreamFeatureViews( def GetOnDemandFeatureView( self, request: RegistryServer_pb2.GetOnDemandFeatureViewRequest, context ): - return self.proxied_registry.get_on_demand_feature_view( - name=request.name, project=request.project, allow_cache=request.allow_cache + return assert_permissions( + resource=self.proxied_registry.get_on_demand_feature_view( + name=request.name, + project=request.project, + allow_cache=request.allow_cache, + ), + actions=[AuthzedAction.DESCRIBE], ).to_proto() def ListOnDemandFeatureViews( @@ -172,10 +365,16 @@ def ListOnDemandFeatureViews( return RegistryServer_pb2.ListOnDemandFeatureViewsResponse( on_demand_feature_views=[ on_demand_feature_view.to_proto() - for on_demand_feature_view in self.proxied_registry.list_on_demand_feature_views( - project=request.project, - allow_cache=request.allow_cache, - tags=dict(request.tags), + for on_demand_feature_view in permitted_resources( + resources=cast( + list[FeastObject], + self.proxied_registry.list_on_demand_feature_views( + project=request.project, + allow_cache=request.allow_cache, + tags=dict(request.tags), + ), + ), + actions=AuthzedAction.DESCRIBE, ) ] ) @@ -183,18 +382,32 @@ def ListOnDemandFeatureViews( def ApplyFeatureService( self, request: RegistryServer_pb2.ApplyFeatureServiceRequest, context ): + feature_service = cast( + FeatureService, + assert_permissions_to_update( + resource=FeatureService.from_proto(request.feature_service), + getter=self.proxied_registry.get_feature_service, + project=request.project, + ), + ) self.proxied_registry.apply_feature_service( - feature_service=FeatureService.from_proto(request.feature_service), + feature_service=feature_service, project=request.project, commit=request.commit, ) + return Empty() def GetFeatureService( self, request: RegistryServer_pb2.GetFeatureServiceRequest, context ): - return self.proxied_registry.get_feature_service( - name=request.name, project=request.project, allow_cache=request.allow_cache + return assert_permissions( + resource=self.proxied_registry.get_feature_service( + name=request.name, + project=request.project, + allow_cache=request.allow_cache, + ), + actions=[AuthzedAction.DESCRIBE], ).to_proto() def ListFeatureServices( @@ -203,10 +416,16 @@ def ListFeatureServices( return RegistryServer_pb2.ListFeatureServicesResponse( feature_services=[ feature_service.to_proto() - for feature_service in self.proxied_registry.list_feature_services( - project=request.project, - allow_cache=request.allow_cache, - tags=dict(request.tags), + for feature_service in permitted_resources( + resources=cast( + list[FeastObject], + self.proxied_registry.list_feature_services( + project=request.project, + allow_cache=request.allow_cache, + tags=dict(request.tags), + ), + ), + actions=AuthzedAction.DESCRIBE, ) ] ) @@ -214,6 +433,15 @@ def ListFeatureServices( def DeleteFeatureService( self, request: RegistryServer_pb2.DeleteFeatureServiceRequest, context ): + ( + assert_permissions( + resource=self.proxied_registry.get_feature_service( + name=request.name, project=request.project + ), + actions=[AuthzedAction.DELETE], + ), + ) + self.proxied_registry.delete_feature_service( name=request.name, project=request.project, commit=request.commit ) @@ -222,18 +450,32 @@ def DeleteFeatureService( def ApplySavedDataset( self, request: RegistryServer_pb2.ApplySavedDatasetRequest, context ): + saved_dataset = cast( + SavedDataset, + assert_permissions_to_update( + resource=SavedDataset.from_proto(request.saved_dataset), + getter=self.proxied_registry.get_saved_dataset, + project=request.project, + ), + ) self.proxied_registry.apply_saved_dataset( - saved_dataset=SavedDataset.from_proto(request.saved_dataset), + saved_dataset=saved_dataset, project=request.project, commit=request.commit, ) + return Empty() def GetSavedDataset( self, request: RegistryServer_pb2.GetSavedDatasetRequest, context ): - return self.proxied_registry.get_saved_dataset( - name=request.name, project=request.project, allow_cache=request.allow_cache + return assert_permissions( + self.proxied_registry.get_saved_dataset( + name=request.name, + project=request.project, + allow_cache=request.allow_cache, + ), + actions=[AuthzedAction.DESCRIBE], ).to_proto() def ListSavedDatasets( @@ -242,8 +484,16 @@ def ListSavedDatasets( return RegistryServer_pb2.ListSavedDatasetsResponse( saved_datasets=[ saved_dataset.to_proto() - for saved_dataset in self.proxied_registry.list_saved_datasets( - project=request.project, allow_cache=request.allow_cache + for saved_dataset in permitted_resources( + resources=cast( + list[FeastObject], + self.proxied_registry.list_saved_datasets( + project=request.project, + allow_cache=request.allow_cache, + tags=dict(request.tags), + ), + ), + actions=AuthzedAction.DESCRIBE, ) ] ) @@ -251,6 +501,13 @@ def ListSavedDatasets( def DeleteSavedDataset( self, request: RegistryServer_pb2.DeleteSavedDatasetRequest, context ): + assert_permissions( + resource=self.proxied_registry.get_saved_dataset( + name=request.name, project=request.project + ), + actions=[AuthzedAction.DELETE], + ) + self.proxied_registry.delete_saved_dataset( name=request.name, project=request.project, commit=request.commit ) @@ -259,20 +516,32 @@ def DeleteSavedDataset( def ApplyValidationReference( self, request: RegistryServer_pb2.ApplyValidationReferenceRequest, context ): - self.proxied_registry.apply_validation_reference( - validation_reference=ValidationReference.from_proto( - request.validation_reference + validation_reference = cast( + ValidationReference, + assert_permissions_to_update( + resource=ValidationReference.from_proto(request.validation_reference), + getter=self.proxied_registry.get_validation_reference, + project=request.project, ), + ) + self.proxied_registry.apply_validation_reference( + validation_reference=validation_reference, project=request.project, commit=request.commit, ) + return Empty() def GetValidationReference( self, request: RegistryServer_pb2.GetValidationReferenceRequest, context ): - return self.proxied_registry.get_validation_reference( - name=request.name, project=request.project, allow_cache=request.allow_cache + return assert_permissions( + self.proxied_registry.get_validation_reference( + name=request.name, + project=request.project, + allow_cache=request.allow_cache, + ), + actions=[AuthzedAction.DESCRIBE], ).to_proto() def ListValidationReferences( @@ -281,8 +550,16 @@ def ListValidationReferences( return RegistryServer_pb2.ListValidationReferencesResponse( validation_references=[ validation_reference.to_proto() - for validation_reference in self.proxied_registry.list_validation_references( - project=request.project, allow_cache=request.allow_cache + for validation_reference in permitted_resources( + resources=cast( + list[FeastObject], + self.proxied_registry.list_validation_references( + project=request.project, + allow_cache=request.allow_cache, + tags=dict(request.tags), + ), + ), + actions=AuthzedAction.DESCRIBE, ) ] ) @@ -290,6 +567,12 @@ def ListValidationReferences( def DeleteValidationReference( self, request: RegistryServer_pb2.DeleteValidationReferenceRequest, context ): + assert_permissions( + resource=self.proxied_registry.get_validation_reference( + name=request.name, project=request.project + ), + actions=[AuthzedAction.DELETE], + ) self.proxied_registry.delete_validation_reference( name=request.name, project=request.project, commit=request.commit ) @@ -310,14 +593,20 @@ def ListProjectMetadata( def ApplyMaterialization( self, request: RegistryServer_pb2.ApplyMaterializationRequest, context ): + assert_permissions( + resource=FeatureView.from_proto(request.feature_view), + actions=[AuthzedAction.WRITE_ONLINE], + ) + self.proxied_registry.apply_materialization( feature_view=FeatureView.from_proto(request.feature_view), project=request.project, start_date=datetime.fromtimestamp( - request.start_date.seconds + request.start_date.nanos / 1e9, tz=utc + request.start_date.seconds + request.start_date.nanos / 1e9, + tz=timezone.utc, ), end_date=datetime.fromtimestamp( - request.end_date.seconds + request.end_date.nanos / 1e9, tz=utc + request.end_date.seconds + request.end_date.nanos / 1e9, tz=timezone.utc ), commit=request.commit, ) @@ -336,6 +625,122 @@ def GetInfra(self, request: RegistryServer_pb2.GetInfraRequest, context): project=request.project, allow_cache=request.allow_cache ).to_proto() + def ApplyPermission( + self, request: RegistryServer_pb2.ApplyPermissionRequest, context + ): + permission = cast( + Permission, + assert_permissions_to_update( + resource=Permission.from_proto(request.permission), + getter=self.proxied_registry.get_permission, + project=request.project, + ), + ) + self.proxied_registry.apply_permission( + permission=permission, + project=request.project, + commit=request.commit, + ) + return Empty() + + def GetPermission(self, request: RegistryServer_pb2.GetPermissionRequest, context): + permission = self.proxied_registry.get_permission( + name=request.name, project=request.project, allow_cache=request.allow_cache + ) + assert_permissions( + resource=permission, + actions=[AuthzedAction.DESCRIBE], + ) + permission.to_proto().spec.project = request.project + + return permission.to_proto() + + def ListPermissions( + self, request: RegistryServer_pb2.ListPermissionsRequest, context + ): + return RegistryServer_pb2.ListPermissionsResponse( + permissions=[ + permission.to_proto() + for permission in permitted_resources( + resources=cast( + list[FeastObject], + self.proxied_registry.list_permissions( + project=request.project, allow_cache=request.allow_cache + ), + ), + actions=AuthzedAction.DESCRIBE, + ) + ] + ) + + def DeletePermission( + self, request: RegistryServer_pb2.DeletePermissionRequest, context + ): + assert_permissions( + resource=self.proxied_registry.get_permission( + name=request.name, + project=request.project, + ), + actions=[AuthzedAction.DELETE], + ) + + self.proxied_registry.delete_permission( + name=request.name, project=request.project, commit=request.commit + ) + return Empty() + + def ApplyProject(self, request: RegistryServer_pb2.ApplyProjectRequest, context): + project = cast( + Project, + assert_permissions_to_update( + resource=Project.from_proto(request.project), + getter=self.proxied_registry.get_project, + project=Project.from_proto(request.project).name, + ), + ) + self.proxied_registry.apply_project( + project=project, + commit=request.commit, + ) + return Empty() + + def GetProject(self, request: RegistryServer_pb2.GetProjectRequest, context): + project = self.proxied_registry.get_project( + name=request.name, allow_cache=request.allow_cache + ) + assert_permissions( + resource=project, + actions=[AuthzedAction.DESCRIBE], + ) + return project.to_proto() + + def ListProjects(self, request: RegistryServer_pb2.ListProjectsRequest, context): + return RegistryServer_pb2.ListProjectsResponse( + projects=[ + project.to_proto() + for project in permitted_resources( + resources=cast( + list[FeastObject], + self.proxied_registry.list_projects( + allow_cache=request.allow_cache + ), + ), + actions=AuthzedAction.DESCRIBE, + ) + ] + ) + + def DeleteProject(self, request: RegistryServer_pb2.DeleteProjectRequest, context): + assert_permissions( + resource=self.proxied_registry.get_project( + name=request.name, + ), + actions=[AuthzedAction.DELETE], + ) + + self.proxied_registry.delete_project(name=request.name, commit=request.commit) + return Empty() + def Commit(self, request, context): self.proxied_registry.commit() return Empty() @@ -348,11 +753,55 @@ def Proto(self, request, context): return self.proxied_registry.proto() -def start_server(store: FeatureStore, port: int): - server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) +def start_server(store: FeatureStore, port: int, wait_for_termination: bool = True): + auth_manager_type = str_to_auth_manager_type(store.config.auth_config.type) + init_security_manager(auth_type=auth_manager_type, fs=store) + init_auth_manager( + auth_type=auth_manager_type, + server_type=ServerType.GRPC, + auth_config=store.config.auth_config, + ) + + server = grpc.server( + futures.ThreadPoolExecutor(max_workers=10), + interceptors=_grpc_interceptors(auth_manager_type), + ) RegistryServer_pb2_grpc.add_RegistryServerServicer_to_server( RegistryServer(store.registry), server ) + # Add health check service to server + health_servicer = health.HealthServicer() + health_pb2_grpc.add_HealthServicer_to_server(health_servicer, server) + health_servicer.set("", health_pb2.HealthCheckResponse.SERVING) + + service_names_available_for_reflection = ( + RegistryServer_pb2.DESCRIPTOR.services_by_name["RegistryServer"].full_name, + health_pb2.DESCRIPTOR.services_by_name["Health"].full_name, + reflection.SERVICE_NAME, + ) + reflection.enable_server_reflection(service_names_available_for_reflection, server) + server.add_insecure_port(f"[::]:{port}") server.start() - server.wait_for_termination() + if wait_for_termination: + server.wait_for_termination() + else: + return server + + +def _grpc_interceptors( + auth_type: AuthManagerType, +) -> Optional[list[grpc.ServerInterceptor]]: + """ + A list of the interceptors for the registry server. + + Args: + auth_type: The type of authorization manager, from the feature store configuration. + + Returns: + list[grpc.ServerInterceptor]: Optional list of interceptors. If the authorization type is set to `NONE`, it returns `None`. + """ + if auth_type == AuthManagerType.NONE: + return [ErrorInterceptor()] + + return [AuthInterceptor(), ErrorInterceptor()] diff --git a/sdk/python/feast/repo_config.py b/sdk/python/feast/repo_config.py index fc2792e323..bf0bde6fcb 100644 --- a/sdk/python/feast/repo_config.py +++ b/sdk/python/feast/repo_config.py @@ -9,6 +9,7 @@ BaseModel, ConfigDict, Field, + StrictBool, StrictInt, StrictStr, ValidationError, @@ -19,12 +20,14 @@ from feast.errors import ( FeastFeatureServerTypeInvalidError, + FeastInvalidAuthConfigClass, FeastOfflineStoreInvalidName, FeastOnlineStoreInvalidName, FeastRegistryNotSetError, FeastRegistryTypeInvalidError, ) from feast.importer import import_class +from feast.permissions.auth.auth_type import AuthType warnings.simplefilter("once", RuntimeWarning) @@ -44,7 +47,7 @@ "local": "feast.infra.materialization.local_engine.LocalMaterializationEngine", "snowflake.engine": "feast.infra.materialization.snowflake_engine.SnowflakeMaterializationEngine", "lambda": "feast.infra.materialization.aws_lambda.lambda_engine.LambdaMaterializationEngine", - "k8s": "feast.infra.materialization.kubernetes.kubernetes_materialization_engine.KubernetesMaterializationEngine", + "k8s": "feast.infra.materialization.kubernetes.k8s_materialization_engine.KubernetesMaterializationEngine", "spark.engine": "feast.infra.materialization.contrib.spark.spark_materialization_engine.SparkMaterializationEngine", } @@ -59,7 +62,6 @@ "hbase": "feast.infra.online_stores.contrib.hbase_online_store.hbase.HbaseOnlineStore", "cassandra": "feast.infra.online_stores.contrib.cassandra_online_store.cassandra_online_store.CassandraOnlineStore", "mysql": "feast.infra.online_stores.contrib.mysql_online_store.mysql.MySQLOnlineStore", - "rockset": "feast.infra.online_stores.contrib.rockset_online_store.rockset.RocksetOnlineStore", "hazelcast": "feast.infra.online_stores.contrib.hazelcast_online_store.hazelcast_online_store.HazelcastOnlineStore", "ikv": "feast.infra.online_stores.contrib.ikv_online_store.ikv.IKVOnlineStore", "elasticsearch": "feast.infra.online_stores.contrib.elasticsearch.ElasticSearchOnlineStore", @@ -86,6 +88,15 @@ "local": "feast.infra.feature_servers.local_process.config.LocalFeatureServerConfig", } +ALLOWED_AUTH_TYPES = ["no_auth", "kubernetes", "oidc"] + +AUTH_CONFIGS_CLASS_FOR_TYPE = { + "no_auth": "feast.permissions.auth_model.NoAuthConfig", + "kubernetes": "feast.permissions.auth_model.KubernetesAuthConfig", + "oidc": "feast.permissions.auth_model.OidcAuthConfig", + "oidc_client": "feast.permissions.auth_model.OidcClientAuthConfig", +} + class FeastBaseModel(BaseModel): """Feast Pydantic Configuration Class""" @@ -122,11 +133,10 @@ class RegistryConfig(FeastBaseModel): s3_additional_kwargs: Optional[Dict[str, str]] = None """ Dict[str, str]: Extra arguments to pass to boto3 when writing the registry file to S3. """ - sqlalchemy_config_kwargs: Dict[str, Any] = {} - """ Dict[str, Any]: Extra arguments to pass to SQLAlchemy.create_engine. """ - - cache_mode: StrictStr = "sync" - """ str: Cache mode type, Possible options are sync and thread(asynchronous caching using threading library)""" + purge_feast_metadata: StrictBool = False + """ bool: Stops using feast_metadata table and delete data from feast_metadata table. + Once this is set to True, it cannot be reverted back to False. Reverting back to False will + only reset the project but not all the projects""" @field_validator("path") def validate_path(cls, path: str, values: ValidationInfo) -> str: @@ -167,6 +177,9 @@ class RepoConfig(FeastBaseModel): online_config: Any = Field(None, alias="online_store") """ OnlineStoreConfig: Online store configuration (optional depending on provider) """ + auth: Any = Field(None, alias="auth") + """ auth: Optional if the services needs the authentication against IDPs (optional depending on provider) """ + offline_config: Any = Field(None, alias="offline_store") """ OfflineStoreConfig: Offline store configuration (optional depending on provider) """ @@ -211,6 +224,13 @@ def __init__(self, **data: Any): self._online_store = None self.online_config = data.get("online_store", "sqlite") + self._auth = None + if "auth" not in data: + self.auth = dict() + self.auth["type"] = AuthType.NONE.value + else: + self.auth = data.get("auth") + self._batch_engine = None if "batch_engine" in data: self.batch_engine_config = data["batch_engine"] @@ -270,6 +290,26 @@ def offline_store(self): self._offline_store = self.offline_config return self._offline_store + @property + def auth_config(self): + if not self._auth: + if isinstance(self.auth, Dict): + is_oidc_client = ( + self.auth.get("type") == AuthType.OIDC.value + and "username" in self.auth + and "password" in self.auth + and "client_secret" in self.auth + ) + self._auth = get_auth_config_from_type( + "oidc_client" if is_oidc_client else self.auth.get("type") + )(**self.auth) + elif isinstance(self.auth, str): + self._auth = get_auth_config_from_type(self.auth)() + elif self.auth: + self._auth = self.auth + + return self._auth + @property def online_store(self): if not self._online_store: @@ -300,6 +340,29 @@ def batch_engine(self): return self._batch_engine + @model_validator(mode="before") + def _validate_auth_config(cls, values: Any) -> Any: + from feast.permissions.auth_model import AuthConfig + + if "auth" in values: + if isinstance(values["auth"], Dict): + if values["auth"].get("type") is None: + raise ValueError( + f"auth configuration is missing authentication type. Possible values={ALLOWED_AUTH_TYPES}" + ) + elif values["auth"]["type"] not in ALLOWED_AUTH_TYPES: + raise ValueError( + f'auth configuration has invalid authentication type={values["auth"]["type"]}. Possible ' + f'values={ALLOWED_AUTH_TYPES}' + ) + elif isinstance(values["auth"], AuthConfig): + if values["auth"].type not in ALLOWED_AUTH_TYPES: + raise ValueError( + f'auth configuration has invalid authentication type={values["auth"].type}. Possible ' + f'values={ALLOWED_AUTH_TYPES}' + ) + return values + @model_validator(mode="before") def _validate_online_store_config(cls, values: Any) -> Any: # This method will validate whether the online store configurations are set correctly. This explicit validation @@ -480,6 +543,17 @@ def get_online_config_from_type(online_store_type: str): return import_class(module_name, config_class_name, config_class_name) +def get_auth_config_from_type(auth_config_type: str): + if auth_config_type in AUTH_CONFIGS_CLASS_FOR_TYPE: + auth_config_type = AUTH_CONFIGS_CLASS_FOR_TYPE[auth_config_type] + elif not auth_config_type.endswith("AuthConfig"): + raise FeastInvalidAuthConfigClass(auth_config_type) + module_name, online_store_class_type = auth_config_type.rsplit(".", 1) + config_class_name = f"{online_store_class_type}" + + return import_class(module_name, config_class_name, config_class_name) + + def get_offline_config_from_type(offline_store_type: str): if offline_store_type in OFFLINE_STORE_CLASS_FOR_TYPE: offline_store_type = OFFLINE_STORE_CLASS_FOR_TYPE[offline_store_type] diff --git a/sdk/python/feast/repo_contents.py b/sdk/python/feast/repo_contents.py index 33b99f29b2..d65f6ac7bb 100644 --- a/sdk/python/feast/repo_contents.py +++ b/sdk/python/feast/repo_contents.py @@ -18,6 +18,8 @@ from feast.feature_service import FeatureService from feast.feature_view import FeatureView from feast.on_demand_feature_view import OnDemandFeatureView +from feast.permissions.permission import Permission +from feast.project import Project from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto from feast.stream_feature_view import StreamFeatureView @@ -27,15 +29,18 @@ class RepoContents(NamedTuple): Represents the objects in a Feast feature repo. """ + projects: List[Project] data_sources: List[DataSource] feature_views: List[FeatureView] on_demand_feature_views: List[OnDemandFeatureView] stream_feature_views: List[StreamFeatureView] entities: List[Entity] feature_services: List[FeatureService] + permissions: List[Permission] def to_registry_proto(self) -> RegistryProto: registry_proto = RegistryProto() + registry_proto.projects.extend([e.to_proto() for e in self.projects]) registry_proto.data_sources.extend([e.to_proto() for e in self.data_sources]) registry_proto.entities.extend([e.to_proto() for e in self.entities]) registry_proto.feature_views.extend( @@ -50,4 +55,6 @@ def to_registry_proto(self) -> RegistryProto: registry_proto.stream_feature_views.extend( [fv.to_proto() for fv in self.stream_feature_views] ) + registry_proto.permissions.extend([p.to_proto() for p in self.permissions]) + return registry_proto diff --git a/sdk/python/feast/repo_operations.py b/sdk/python/feast/repo_operations.py index a3100ca9d7..6629768375 100644 --- a/sdk/python/feast/repo_operations.py +++ b/sdk/python/feast/repo_operations.py @@ -1,6 +1,7 @@ import base64 import importlib import json +import logging import os import random import re @@ -24,13 +25,18 @@ from feast.feature_store import FeatureStore from feast.feature_view import DUMMY_ENTITY, FeatureView from feast.file_utils import replace_str_in_file +from feast.infra.registry.base_registry import BaseRegistry from feast.infra.registry.registry import FEAST_OBJECT_TYPES, FeastObjectType, Registry from feast.names import adjectives, animals from feast.on_demand_feature_view import OnDemandFeatureView +from feast.permissions.permission import Permission +from feast.project import Project from feast.repo_config import RepoConfig from feast.repo_contents import RepoContents from feast.stream_feature_view import StreamFeatureView +logger = logging.getLogger(__name__) + def py_path_to_module(path: Path) -> str: return ( @@ -114,12 +120,14 @@ def parse_repo(repo_root: Path) -> RepoContents: not result in duplicates, but defining two equal objects will. """ res = RepoContents( + projects=[], data_sources=[], entities=[], feature_views=[], feature_services=[], on_demand_feature_views=[], stream_feature_views=[], + permissions=[], ) for repo_file in get_repo_files(repo_root): @@ -201,6 +209,12 @@ def parse_repo(repo_root: Path) -> RepoContents: (obj is odfv) for odfv in res.on_demand_feature_views ): res.on_demand_feature_views.append(obj) + elif isinstance(obj, Permission) and not any( + (obj is p) for p in res.permissions + ): + res.permissions.append(obj) + elif isinstance(obj, Project) and not any((obj is p) for p in res.projects): + res.projects.append(obj) res.entities.append(DUMMY_ENTITY) return res @@ -208,33 +222,57 @@ def parse_repo(repo_root: Path) -> RepoContents: def plan(repo_config: RepoConfig, repo_path: Path, skip_source_validation: bool): os.chdir(repo_path) - project, registry, repo, store = _prepare_registry_and_repo(repo_config, repo_path) - - if not skip_source_validation: - provider = store._get_provider() - data_sources = [t.batch_source for t in repo.feature_views] - # Make sure the data source used by this feature view is supported by Feast - for data_source in data_sources: - provider.validate_data_source(store.config, data_source) + repo = _get_repo_contents(repo_path, repo_config.project) + for project in repo.projects: + repo_config.project = project.name + store, registry = _get_store_and_registry(repo_config) + # TODO: When we support multiple projects in a single repo, we should filter repo contents by project + if not skip_source_validation: + provider = store._get_provider() + data_sources = [t.batch_source for t in repo.feature_views] + # Make sure the data source used by this feature view is supported by Feast + for data_source in data_sources: + provider.validate_data_source(store.config, data_source) + + registry_diff, infra_diff, _ = store.plan(repo) + click.echo(registry_diff.to_string()) + click.echo(infra_diff.to_string()) - registry_diff, infra_diff, _ = store.plan(repo) - click.echo(registry_diff.to_string()) - click.echo(infra_diff.to_string()) +def _get_repo_contents(repo_path, project_name: Optional[str] = None): + sys.dont_write_bytecode = True + repo = parse_repo(repo_path) -def _prepare_registry_and_repo(repo_config, repo_path): - store = FeatureStore(config=repo_config) - project = store.project - if not is_valid_name(project): + if len(repo.projects) < 1: + if project_name: + print( + f"No project found in the repository. Using project name {project_name} defined in feature_store.yaml" + ) + repo.projects.append(Project(name=project_name)) + else: + print( + "No project found in the repository. Either define Project in repository or define a project in feature_store.yaml" + ) + sys.exit(1) + elif len(repo.projects) == 1: + if repo.projects[0].name != project_name: + print( + "Project object name should match with the project name defined in feature_store.yaml" + ) + sys.exit(1) + else: print( - f"{project} is not valid. Project name should only have " - f"alphanumerical values and underscores but not start with an underscore." + "Multiple projects found in the repository. Currently no support for multiple projects" ) sys.exit(1) + + return repo + + +def _get_store_and_registry(repo_config): + store = FeatureStore(config=repo_config) registry = store.registry - sys.dont_write_bytecode = True - repo = parse_repo(repo_path) - return project, registry, repo, store + return store, registry def extract_objects_for_apply_delete(project, registry, repo): @@ -283,8 +321,8 @@ def extract_objects_for_apply_delete(project, registry, repo): def apply_total_with_repo_instance( store: FeatureStore, - project: str, - registry: Registry, + project_name: str, + registry: BaseRegistry, repo: RepoContents, skip_source_validation: bool, ): @@ -301,7 +339,7 @@ def apply_total_with_repo_instance( all_to_delete, views_to_keep, views_to_delete, - ) = extract_objects_for_apply_delete(project, registry, repo) + ) = extract_objects_for_apply_delete(project_name, registry, repo) if store._should_use_plan(): registry_diff, infra_diff, new_infra = store.plan(repo) @@ -351,15 +389,26 @@ def create_feature_store( def apply_total(repo_config: RepoConfig, repo_path: Path, skip_source_validation: bool): os.chdir(repo_path) - project, registry, repo, store = _prepare_registry_and_repo(repo_config, repo_path) - apply_total_with_repo_instance( - store, project, registry, repo, skip_source_validation - ) + repo = _get_repo_contents(repo_path, repo_config.project) + for project in repo.projects: + repo_config.project = project.name + store, registry = _get_store_and_registry(repo_config) + if not is_valid_name(project.name): + print( + f"{project.name} is not valid. Project name should only have " + f"alphanumerical values and underscores but not start with an underscore." + ) + sys.exit(1) + # TODO: When we support multiple projects in a single repo, we should filter repo contents by project. Currently there is no way to associate Feast objects to project. + print(f"Applying changes for project {project.name}") + apply_total_with_repo_instance( + store, project.name, registry, repo, skip_source_validation + ) def teardown(repo_config: RepoConfig, repo_path: Optional[str]): # Cannot pass in both repo_path and repo_config to FeatureStore. - feature_store = FeatureStore(repo_path=repo_path, config=None) + feature_store = FeatureStore(repo_path=repo_path, config=repo_config) feature_store.teardown() @@ -367,7 +416,12 @@ def registry_dump(repo_config: RepoConfig, repo_path: Path) -> str: """For debugging only: output contents of the metadata registry""" registry_config = repo_config.registry project = repo_config.project - registry = Registry(project, registry_config=registry_config, repo_path=repo_path) + registry = Registry( + project, + registry_config=registry_config, + repo_path=repo_path, + auth_config=repo_config.auth_config, + ) registry_dict = registry.to_dict(project=project) return json.dumps(registry_dict, indent=2, sort_keys=True) diff --git a/sdk/python/feast/rest_error_handler.py b/sdk/python/feast/rest_error_handler.py new file mode 100644 index 0000000000..fc802866f9 --- /dev/null +++ b/sdk/python/feast/rest_error_handler.py @@ -0,0 +1,57 @@ +import logging +from functools import wraps + +import requests + +from feast import RepoConfig +from feast.errors import FeastError +from feast.permissions.client.http_auth_requests_wrapper import ( + get_http_auth_requests_session, +) + +logger = logging.getLogger(__name__) + + +def rest_error_handling_decorator(func): + @wraps(func) + def wrapper(config: RepoConfig, *args, **kwargs): + assert isinstance(config, RepoConfig) + + # Get a Session object + with get_http_auth_requests_session(config.auth_config) as session: + # Define a wrapper for session methods + def method_wrapper(method_name): + original_method = getattr(session, method_name) + + @wraps(original_method) + def wrapped_method(*args, **kwargs): + logger.debug( + f"Calling {method_name} with args: {args}, kwargs: {kwargs}" + ) + response = original_method(*args, **kwargs) + logger.debug( + f"{method_name} response status code: {response.status_code}" + ) + + try: + response.raise_for_status() + except requests.RequestException: + logger.debug(f"response.json() = {response.json()}") + mapped_error = FeastError.from_error_detail(response.json()) + logger.debug(f"mapped_error = {str(mapped_error)}") + if mapped_error is not None: + raise mapped_error + return response + + return wrapped_method + + # Enhance session methods + session.get = method_wrapper("get") # type: ignore[method-assign] + session.post = method_wrapper("post") # type: ignore[method-assign] + session.put = method_wrapper("put") # type: ignore[method-assign] + session.delete = method_wrapper("delete") # type: ignore[method-assign] + + # Pass the enhanced session object to the decorated function + return func(session, config, *args, **kwargs) + + return wrapper diff --git a/sdk/python/feast/templates/aws/feature_repo/test_workflow.py b/sdk/python/feast/templates/aws/feature_repo/test_workflow.py index 59ac1f0ee7..092399e03c 100644 --- a/sdk/python/feast/templates/aws/feature_repo/test_workflow.py +++ b/sdk/python/feast/templates/aws/feature_repo/test_workflow.py @@ -1,9 +1,8 @@ import random import subprocess -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone import pandas as pd -from pytz import utc from feast import FeatureStore from feast.data_source import PushMode @@ -71,9 +70,11 @@ def run_demo(): def fetch_historical_features_entity_sql(store: FeatureStore, for_batch_scoring): end_date = ( - datetime.now().replace(microsecond=0, second=0, minute=0).astimezone(tz=utc) + datetime.now() + .replace(microsecond=0, second=0, minute=0) + .astimezone(tz=timezone.utc) ) - start_date = (end_date - timedelta(days=60)).astimezone(tz=utc) + start_date = (end_date - timedelta(days=60)).astimezone(tz=timezone.utc) # For batch scoring, we want the latest timestamps if for_batch_scoring: print( diff --git a/sdk/python/feast/templates/gcp/README.md b/sdk/python/feast/templates/gcp/README.md index 7929dc2bdf..bc9e51769c 100644 --- a/sdk/python/feast/templates/gcp/README.md +++ b/sdk/python/feast/templates/gcp/README.md @@ -11,7 +11,7 @@ You can run the overall workflow with `python test_workflow.py`. ## To move from this into a more production ready workflow: 1. `feature_store.yaml` points to a local file as a registry. You'll want to setup a remote file (e.g. in S3/GCS) or a SQL registry. See [registry docs](https://docs.feast.dev/getting-started/concepts/registry) for more details. -2. This example uses an already setup BigQuery Feast data warehouse as the [offline store](https://docs.feast.dev/getting-started/architecture-and-components/offline-store) +2. This example uses an already setup BigQuery Feast data warehouse as the [offline store](https://docs.feast.dev/getting-started/components/offline-store) to generate training data. You'll need to connect your own BigQuery instance to make this work. 3. Setup CI/CD + dev vs staging vs prod environments to automatically update the registry as you change Feast feature definitions. See [docs](https://docs.feast.dev/how-to-guides/running-feast-in-production#1.-automatically-deploying-changes-to-your-feature-definitions). 4. (optional) Regularly scheduled materialization to power low latency feature retrieval (e.g. via Airflow). See [Batch data ingestion](https://docs.feast.dev/getting-started/concepts/data-ingestion#batch-data-ingestion) diff --git a/sdk/python/feast/templates/local/README.md b/sdk/python/feast/templates/local/README.md index daf3a686fb..1e617cc442 100644 --- a/sdk/python/feast/templates/local/README.md +++ b/sdk/python/feast/templates/local/README.md @@ -18,7 +18,7 @@ You can run the overall workflow with `python test_workflow.py`. - You can see your options if you run `feast init --help`. 2. `feature_store.yaml` points to a local file as a registry. You'll want to setup a remote file (e.g. in S3/GCS) or a SQL registry. See [registry docs](https://docs.feast.dev/getting-started/concepts/registry) for more details. -3. This example uses a file [offline store](https://docs.feast.dev/getting-started/architecture-and-components/offline-store) +3. This example uses a file [offline store](https://docs.feast.dev/getting-started/components/offline-store) to generate training data. It does not scale. We recommend instead using a data warehouse such as BigQuery, Snowflake, Redshift. There is experimental support for Spark as well. 4. Setup CI/CD + dev vs staging vs prod environments to automatically update the registry as you change Feast feature definitions. See [docs](https://docs.feast.dev/how-to-guides/running-feast-in-production#1.-automatically-deploying-changes-to-your-feature-definitions). diff --git a/sdk/python/feast/templates/local/bootstrap.py b/sdk/python/feast/templates/local/bootstrap.py index ee2847c19c..e2c1efdbc4 100644 --- a/sdk/python/feast/templates/local/bootstrap.py +++ b/sdk/python/feast/templates/local/bootstrap.py @@ -10,6 +10,7 @@ def bootstrap(): from feast.driver_test_data import create_driver_hourly_stats_df repo_path = pathlib.Path(__file__).parent.absolute() / "feature_repo" + project_name = pathlib.Path(__file__).parent.absolute().name data_path = repo_path / "data" data_path.mkdir(exist_ok=True) @@ -23,6 +24,7 @@ def bootstrap(): driver_df.to_parquet(path=str(driver_stats_path), allow_truncated_timestamps=True) example_py_file = repo_path / "example_repo.py" + replace_str_in_file(example_py_file, "%PROJECT_NAME%", str(project_name)) replace_str_in_file(example_py_file, "%PARQUET_PATH%", str(driver_stats_path)) replace_str_in_file(example_py_file, "%LOGGING_PATH%", str(data_path)) diff --git a/sdk/python/feast/templates/local/feature_repo/example_repo.py b/sdk/python/feast/templates/local/feature_repo/example_repo.py index debe9d45e9..e2fd0a891c 100644 --- a/sdk/python/feast/templates/local/feature_repo/example_repo.py +++ b/sdk/python/feast/templates/local/feature_repo/example_repo.py @@ -10,6 +10,7 @@ FeatureView, Field, FileSource, + Project, PushSource, RequestSource, ) @@ -18,6 +19,9 @@ from feast.on_demand_feature_view import on_demand_feature_view from feast.types import Float32, Float64, Int64 +# Define a project for the feature repo +project = Project(name="%PROJECT_NAME%", description="A project for driver statistics") + # Define an entity for the driver. You can think of an entity as a primary key used to # fetch features. driver = Entity(name="driver", join_keys=["driver_id"]) diff --git a/sdk/python/feast/templates/local/feature_repo/feature_store.yaml b/sdk/python/feast/templates/local/feature_repo/feature_store.yaml index 3e6a360316..11b339583e 100644 --- a/sdk/python/feast/templates/local/feature_repo/feature_store.yaml +++ b/sdk/python/feast/templates/local/feature_repo/feature_store.yaml @@ -7,3 +7,6 @@ online_store: type: sqlite path: data/online_store.db entity_key_serialization_version: 2 +# By default, no_auth for authentication and authorization, other possible values kubernetes and oidc. Refer the documentation for more details. +auth: + type: no_auth diff --git a/sdk/python/feast/templates/minimal/feature_repo/feature_store.yaml b/sdk/python/feast/templates/minimal/feature_repo/feature_store.yaml index 9808690005..45a0ce7718 100644 --- a/sdk/python/feast/templates/minimal/feature_repo/feature_store.yaml +++ b/sdk/python/feast/templates/minimal/feature_repo/feature_store.yaml @@ -3,4 +3,4 @@ registry: /path/to/registry.db provider: local online_store: path: /path/to/online_store.db -entity_key_serialization_version: 2 +entity_key_serialization_version: 2 \ No newline at end of file diff --git a/sdk/python/feast/templates/postgres/feature_repo/test_workflow.py b/sdk/python/feast/templates/postgres/feature_repo/test_workflow.py index f657aba15f..843ade164c 100644 --- a/sdk/python/feast/templates/postgres/feature_repo/test_workflow.py +++ b/sdk/python/feast/templates/postgres/feature_repo/test_workflow.py @@ -1,3 +1,4 @@ +import os.path import subprocess from datetime import datetime @@ -8,9 +9,9 @@ def run_demo(): - store = FeatureStore(repo_path=".") + store = FeatureStore(repo_path=os.path.dirname(__file__)) print("\n--- Run feast apply to setup feature store on Postgres ---") - subprocess.run(["feast", "apply"]) + subprocess.run(["feast", "--chdir", os.path.dirname(__file__), "apply"]) print("\n--- Historical features for training ---") fetch_historical_features_entity_df(store, for_batch_scoring=False) @@ -54,7 +55,7 @@ def run_demo(): fetch_online_features(store, source="push") print("\n--- Run feast teardown ---") - subprocess.run(["feast", "teardown"]) + subprocess.run(["feast", "--chdir", os.path.dirname(__file__), "teardown"]) def fetch_historical_features_entity_df(store: FeatureStore, for_batch_scoring: bool): diff --git a/sdk/python/feast/templates/rockset/README.md b/sdk/python/feast/templates/rockset/README.md deleted file mode 100644 index d4f1ef6faf..0000000000 --- a/sdk/python/feast/templates/rockset/README.md +++ /dev/null @@ -1,21 +0,0 @@ -# Feast Quickstart -A quick view of what's in this repository: - -* `data/` contains raw demo parquet data -* `feature_repo/driver_repo.py` contains demo feature definitions -* `feature_repo/feature_store.yaml` contains a demo setup configuring where data sources are -* `test_workflow.py` showcases how to run all key Feast commands, including defining, retrieving, and pushing features. - -You can run the overall workflow with `python test_workflow.py`. - -## To move from this into a more production ready workflow: -> See more details in [Running Feast in production](https://docs.feast.dev/how-to-guides/running-feast-in-production) - -1. `feature_store.yaml` points to a local file as a registry. You'll want to setup a remote file (e.g. in S3/GCS) or a - SQL registry. See [registry docs](https://docs.feast.dev/getting-started/concepts/registry) for more details. -2. Setup CI/CD + dev vs staging vs prod environments to automatically update the registry as you change Feast feature definitions. See [docs](https://docs.feast.dev/how-to-guides/running-feast-in-production#1.-automatically-deploying-changes-to-your-feature-definitions). -3. (optional) Regularly scheduled materialization to power low latency feature retrieval (e.g. via Airflow). See [Batch data ingestion](https://docs.feast.dev/getting-started/concepts/data-ingestion#batch-data-ingestion) - for more details. -4. (optional) Deploy feature server instances with `feast serve` to expose endpoints to retrieve online features. - - See [Python feature server](https://docs.feast.dev/reference/feature-servers/python-feature-server) for details. - - Use cases can also directly call the Feast client to fetch features as per [Feature retrieval](https://docs.feast.dev/getting-started/concepts/feature-retrieval) diff --git a/sdk/python/feast/templates/rockset/bootstrap.py b/sdk/python/feast/templates/rockset/bootstrap.py deleted file mode 100644 index a3dc17f18e..0000000000 --- a/sdk/python/feast/templates/rockset/bootstrap.py +++ /dev/null @@ -1,30 +0,0 @@ -import click - -from feast.file_utils import replace_str_in_file - - -def bootstrap(): - # Bootstrap() will automatically be called from the init_repo() during `feast init` - import pathlib - - repo_path = pathlib.Path(__file__).parent.absolute() / "feature_repo" - config_file = repo_path / "feature_store.yaml" - data_path = repo_path / "data" - data_path.mkdir(exist_ok=True) - - rockset_apikey = click.prompt( - "Rockset Api Key (If blank will be read from ROCKSET_APIKEY in ENV):", - default="", - ) - - rockset_host = click.prompt( - "Rockset Host (If blank will be read from ROCKSET_APISERVER in ENV):", - default="", - ) - - replace_str_in_file(config_file, "ROCKSET_APIKEY", rockset_apikey) - replace_str_in_file(config_file, "ROCKSET_APISERVER", rockset_host) - - -if __name__ == "__main__": - bootstrap() diff --git a/sdk/python/feast/templates/rockset/feature_repo/feature_store.yaml b/sdk/python/feast/templates/rockset/feature_repo/feature_store.yaml deleted file mode 100644 index 57cf8e73bb..0000000000 --- a/sdk/python/feast/templates/rockset/feature_repo/feature_store.yaml +++ /dev/null @@ -1,8 +0,0 @@ -project: my_project -registry: registry.db -provider: local -online_store: - type: rockset - api_key: ROCKSET_APIKEY - host: ROCKSET_APISERVER # (api.usw2a1.rockset.com, api.euc1a1.rockset.com, api.use1a1.rockset.com) -entity_key_serialization_version: 2 diff --git a/sdk/python/feast/templates/snowflake/test_workflow.py b/sdk/python/feast/templates/snowflake/test_workflow.py index 3c44342881..f60b014874 100644 --- a/sdk/python/feast/templates/snowflake/test_workflow.py +++ b/sdk/python/feast/templates/snowflake/test_workflow.py @@ -1,10 +1,9 @@ import random import subprocess -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone import pandas as pd import yaml -from pytz import utc from feast import FeatureStore from feast.data_source import PushMode @@ -75,9 +74,11 @@ def run_demo(): def fetch_historical_features_entity_sql(store: FeatureStore, for_batch_scoring): end_date = ( - datetime.now().replace(microsecond=0, second=0, minute=0).astimezone(tz=utc) + datetime.now() + .replace(microsecond=0, second=0, minute=0) + .astimezone(tz=timezone.utc) ) - start_date = (end_date - timedelta(days=60)).astimezone(tz=utc) + start_date = (end_date - timedelta(days=60)).astimezone(tz=timezone.utc) project_name = yaml.safe_load(open("feature_repo/feature_store.yaml"))["project"] table_name = f"{project_name}_feast_driver_hourly_stats" diff --git a/sdk/python/feast/transformation/pandas_transformation.py b/sdk/python/feast/transformation/pandas_transformation.py index 41e437fb6b..ac31a4fa20 100644 --- a/sdk/python/feast/transformation/pandas_transformation.py +++ b/sdk/python/feast/transformation/pandas_transformation.py @@ -1,5 +1,4 @@ -from types import FunctionType -from typing import Any +from typing import Any, Callable import dill import pandas as pd @@ -15,7 +14,7 @@ class PandasTransformation: - def __init__(self, udf: FunctionType, udf_string: str = ""): + def __init__(self, udf: Callable[[Any], Any], udf_string: str = ""): """ Creates an PandasTransformation object. @@ -30,11 +29,11 @@ def __init__(self, udf: FunctionType, udf_string: str = ""): def transform_arrow( self, pa_table: pyarrow.Table, features: list[Field] ) -> pyarrow.Table: - output_df_pandas = self.udf.__call__(pa_table.to_pandas()) + output_df_pandas = self.udf(pa_table.to_pandas()) return pyarrow.Table.from_pandas(output_df_pandas) def transform(self, input_df: pd.DataFrame) -> pd.DataFrame: - return self.udf.__call__(input_df) + return self.udf(input_df) def infer_features(self, random_input: dict[str, list[Any]]) -> list[Field]: df = pd.DataFrame.from_dict(random_input) diff --git a/sdk/python/feast/ui/yarn.lock b/sdk/python/feast/ui/yarn.lock index cd1913bbb1..d9e9ce03c8 100644 --- a/sdk/python/feast/ui/yarn.lock +++ b/sdk/python/feast/ui/yarn.lock @@ -1743,11 +1743,25 @@ "@jridgewell/sourcemap-codec" "^1.4.10" "@jridgewell/trace-mapping" "^0.3.9" +"@jridgewell/gen-mapping@^0.3.5": + version "0.3.5" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz#dcce6aff74bdf6dad1a95802b69b04a2fcb1fb36" + integrity sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg== + dependencies: + "@jridgewell/set-array" "^1.2.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.24" + "@jridgewell/resolve-uri@3.1.0", "@jridgewell/resolve-uri@^3.0.3": version "3.1.0" resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== +"@jridgewell/resolve-uri@^3.1.0": + version "3.1.2" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz#7a0ee601f60f99a20c7c7c5ff0c80388c1189bd6" + integrity sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw== + "@jridgewell/set-array@^1.0.0": version "1.1.1" resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.1.tgz#36a6acc93987adcf0ba50c66908bd0b70de8afea" @@ -1758,6 +1772,11 @@ resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== +"@jridgewell/set-array@^1.2.1": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.2.1.tgz#558fb6472ed16a4c850b889530e6b36438c49280" + integrity sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A== + "@jridgewell/source-map@^0.3.2": version "0.3.2" resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.2.tgz#f45351aaed4527a298512ec72f81040c998580fb" @@ -1766,11 +1785,24 @@ "@jridgewell/gen-mapping" "^0.3.0" "@jridgewell/trace-mapping" "^0.3.9" +"@jridgewell/source-map@^0.3.3": + version "0.3.6" + resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.6.tgz#9d71ca886e32502eb9362c9a74a46787c36df81a" + integrity sha512-1ZJTZebgqllO79ue2bm3rIGud/bOe0pP5BjSRCRxxYkEZS8STV7zN84UBbiYu7jy+eCKSnVIUgoWWE/tt+shMQ== + dependencies: + "@jridgewell/gen-mapping" "^0.3.5" + "@jridgewell/trace-mapping" "^0.3.25" + "@jridgewell/sourcemap-codec@1.4.14", "@jridgewell/sourcemap-codec@^1.4.10": version "1.4.14" resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== +"@jridgewell/sourcemap-codec@^1.4.14": + version "1.5.0" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz#3188bcb273a414b0d215fd22a58540b989b9409a" + integrity sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ== + "@jridgewell/trace-mapping@^0.3.17": version "0.3.18" resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.18.tgz#25783b2086daf6ff1dcb53c9249ae480e4dd4cd6" @@ -1779,6 +1811,14 @@ "@jridgewell/resolve-uri" "3.1.0" "@jridgewell/sourcemap-codec" "1.4.14" +"@jridgewell/trace-mapping@^0.3.20", "@jridgewell/trace-mapping@^0.3.24", "@jridgewell/trace-mapping@^0.3.25": + version "0.3.25" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz#15f190e98895f3fc23276ee14bc76b675c2e50f0" + integrity sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ== + dependencies: + "@jridgewell/resolve-uri" "^3.1.0" + "@jridgewell/sourcemap-codec" "^1.4.14" + "@jridgewell/trace-mapping@^0.3.9": version "0.3.14" resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.14.tgz#b231a081d8f66796e475ad588a1ef473112701ed" @@ -2400,22 +2440,6 @@ "@types/d3-transition" "*" "@types/d3-zoom" "*" -"@types/eslint-scope@^3.7.3": - version "3.7.3" - resolved "https://registry.yarnpkg.com/@types/eslint-scope/-/eslint-scope-3.7.3.tgz#125b88504b61e3c8bc6f870882003253005c3224" - integrity sha512-PB3ldyrcnAicT35TWPs5IcwKD8S333HMaa2VVv4+wdvebJkjWuW/xESoB8IwRcog8HYVYamb1g/R31Qv5Bx03g== - dependencies: - "@types/eslint" "*" - "@types/estree" "*" - -"@types/eslint@*": - version "8.4.2" - resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-8.4.2.tgz#48f2ac58ab9c631cb68845c3d956b28f79fad575" - integrity sha512-Z1nseZON+GEnFjJc04sv4NSALGjhFwy6K0HXt7qsn5ArfAKtb63dXNJHf+1YW6IpOIYRBGUbu3GwJdj8DGnCjA== - dependencies: - "@types/estree" "*" - "@types/json-schema" "*" - "@types/eslint@^7.28.2": version "7.29.0" resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-7.29.0.tgz#e56ddc8e542815272720bb0b4ccc2aff9c3e1c78" @@ -2424,7 +2448,7 @@ "@types/estree" "*" "@types/json-schema" "*" -"@types/estree@*", "@types/estree@^0.0.51": +"@types/estree@*": version "0.0.51" resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.51.tgz#cfd70924a25a3fd32b218e5e420e6897e1ac4f40" integrity sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ== @@ -2434,6 +2458,11 @@ resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.39.tgz#e177e699ee1b8c22d23174caaa7422644389509f" integrity sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw== +"@types/estree@^1.0.5": + version "1.0.5" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.5.tgz#a6ce3e556e00fd9895dd872dd172ad0d4bd687f4" + integrity sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw== + "@types/express-serve-static-core@*", "@types/express-serve-static-core@^4.17.18": version "4.17.28" resolved "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.17.28.tgz#c47def9f34ec81dc6328d0b1b5303d1ec98d86b8" @@ -2869,125 +2898,125 @@ "@typescript-eslint/types" "5.23.0" eslint-visitor-keys "^3.0.0" -"@webassemblyjs/ast@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.11.1.tgz#2bfd767eae1a6996f432ff7e8d7fc75679c0b6a7" - integrity sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw== +"@webassemblyjs/ast@1.12.1", "@webassemblyjs/ast@^1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.12.1.tgz#bb16a0e8b1914f979f45864c23819cc3e3f0d4bb" + integrity sha512-EKfMUOPRRUTy5UII4qJDGPpqfwjOmZ5jeGFwid9mnoqIFK+e0vqoi1qH56JpmZSzEL53jKnNzScdmftJyG5xWg== dependencies: - "@webassemblyjs/helper-numbers" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/helper-numbers" "1.11.6" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" -"@webassemblyjs/floating-point-hex-parser@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz#f6c61a705f0fd7a6aecaa4e8198f23d9dc179e4f" - integrity sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ== +"@webassemblyjs/floating-point-hex-parser@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.6.tgz#dacbcb95aff135c8260f77fa3b4c5fea600a6431" + integrity sha512-ejAj9hfRJ2XMsNHk/v6Fu2dGS+i4UaXBXGemOfQ/JfQ6mdQg/WXtwleQRLLS4OvfDhv8rYnVwH27YJLMyYsxhw== -"@webassemblyjs/helper-api-error@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.1.tgz#1a63192d8788e5c012800ba6a7a46c705288fd16" - integrity sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg== +"@webassemblyjs/helper-api-error@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.6.tgz#6132f68c4acd59dcd141c44b18cbebbd9f2fa768" + integrity sha512-o0YkoP4pVu4rN8aTJgAyj9hC2Sv5UlkzCHhxqWj8butaLvnpdc2jOwh4ewE6CX0txSfLn/UYaV/pheS2Txg//Q== -"@webassemblyjs/helper-buffer@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.1.tgz#832a900eb444884cde9a7cad467f81500f5e5ab5" - integrity sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA== +"@webassemblyjs/helper-buffer@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.12.1.tgz#6df20d272ea5439bf20ab3492b7fb70e9bfcb3f6" + integrity sha512-nzJwQw99DNDKr9BVCOZcLuJJUlqkJh+kVzVl6Fmq/tI5ZtEyWT1KZMyOXltXLZJmDtvLCDgwsyrkohEtopTXCw== -"@webassemblyjs/helper-numbers@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.1.tgz#64d81da219fbbba1e3bd1bfc74f6e8c4e10a62ae" - integrity sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ== +"@webassemblyjs/helper-numbers@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.6.tgz#cbce5e7e0c1bd32cf4905ae444ef64cea919f1b5" + integrity sha512-vUIhZ8LZoIWHBohiEObxVm6hwP034jwmc9kuq5GdHZH0wiLVLIPcMCdpJzG4C11cHoQ25TFIQj9kaVADVX7N3g== dependencies: - "@webassemblyjs/floating-point-hex-parser" "1.11.1" - "@webassemblyjs/helper-api-error" "1.11.1" + "@webassemblyjs/floating-point-hex-parser" "1.11.6" + "@webassemblyjs/helper-api-error" "1.11.6" "@xtuc/long" "4.2.2" -"@webassemblyjs/helper-wasm-bytecode@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz#f328241e41e7b199d0b20c18e88429c4433295e1" - integrity sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q== +"@webassemblyjs/helper-wasm-bytecode@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.6.tgz#bb2ebdb3b83aa26d9baad4c46d4315283acd51e9" + integrity sha512-sFFHKwcmBprO9e7Icf0+gddyWYDViL8bpPjJJl0WHxCdETktXdmtWLGVzoHbqUcY4Be1LkNfwTmXOJUFZYSJdA== -"@webassemblyjs/helper-wasm-section@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.1.tgz#21ee065a7b635f319e738f0dd73bfbda281c097a" - integrity sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg== +"@webassemblyjs/helper-wasm-section@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.12.1.tgz#3da623233ae1a60409b509a52ade9bc22a37f7bf" + integrity sha512-Jif4vfB6FJlUlSbgEMHUyk1j234GTNG9dBJ4XJdOySoj518Xj0oGsNi59cUQF4RRMS9ouBUxDDdyBVfPTypa5g== dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-buffer" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/wasm-gen" "1.11.1" + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-buffer" "1.12.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + "@webassemblyjs/wasm-gen" "1.12.1" -"@webassemblyjs/ieee754@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.11.1.tgz#963929e9bbd05709e7e12243a099180812992614" - integrity sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ== +"@webassemblyjs/ieee754@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.11.6.tgz#bb665c91d0b14fffceb0e38298c329af043c6e3a" + integrity sha512-LM4p2csPNvbij6U1f19v6WR56QZ8JcHg3QIJTlSwzFcmx6WSORicYj6I63f9yU1kEUtrpG+kjkiIAkevHpDXrg== dependencies: "@xtuc/ieee754" "^1.2.0" -"@webassemblyjs/leb128@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.11.1.tgz#ce814b45574e93d76bae1fb2644ab9cdd9527aa5" - integrity sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw== +"@webassemblyjs/leb128@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.11.6.tgz#70e60e5e82f9ac81118bc25381a0b283893240d7" + integrity sha512-m7a0FhE67DQXgouf1tbN5XQcdWoNgaAuoULHIfGFIEVKA6tu/edls6XnIlkmS6FrXAquJRPni3ZZKjw6FSPjPQ== dependencies: "@xtuc/long" "4.2.2" -"@webassemblyjs/utf8@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.11.1.tgz#d1f8b764369e7c6e6bae350e854dec9a59f0a3ff" - integrity sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ== - -"@webassemblyjs/wasm-edit@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.1.tgz#ad206ebf4bf95a058ce9880a8c092c5dec8193d6" - integrity sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-buffer" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/helper-wasm-section" "1.11.1" - "@webassemblyjs/wasm-gen" "1.11.1" - "@webassemblyjs/wasm-opt" "1.11.1" - "@webassemblyjs/wasm-parser" "1.11.1" - "@webassemblyjs/wast-printer" "1.11.1" - -"@webassemblyjs/wasm-gen@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.1.tgz#86c5ea304849759b7d88c47a32f4f039ae3c8f76" - integrity sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/ieee754" "1.11.1" - "@webassemblyjs/leb128" "1.11.1" - "@webassemblyjs/utf8" "1.11.1" - -"@webassemblyjs/wasm-opt@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.1.tgz#657b4c2202f4cf3b345f8a4c6461c8c2418985f2" - integrity sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-buffer" "1.11.1" - "@webassemblyjs/wasm-gen" "1.11.1" - "@webassemblyjs/wasm-parser" "1.11.1" - -"@webassemblyjs/wasm-parser@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.1.tgz#86ca734534f417e9bd3c67c7a1c75d8be41fb199" - integrity sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-api-error" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/ieee754" "1.11.1" - "@webassemblyjs/leb128" "1.11.1" - "@webassemblyjs/utf8" "1.11.1" - -"@webassemblyjs/wast-printer@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.11.1.tgz#d0c73beda8eec5426f10ae8ef55cee5e7084c2f0" - integrity sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg== - dependencies: - "@webassemblyjs/ast" "1.11.1" +"@webassemblyjs/utf8@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.11.6.tgz#90f8bc34c561595fe156603be7253cdbcd0fab5a" + integrity sha512-vtXf2wTQ3+up9Zsg8sa2yWiQpzSsMyXj0qViVP6xKGCUT8p8YJ6HqI7l5eCnWx1T/FYdsv07HQs2wTFbbof/RA== + +"@webassemblyjs/wasm-edit@^1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.12.1.tgz#9f9f3ff52a14c980939be0ef9d5df9ebc678ae3b" + integrity sha512-1DuwbVvADvS5mGnXbE+c9NfA8QRcZ6iKquqjjmR10k6o+zzsRVesil54DKexiowcFCPdr/Q0qaMgB01+SQ1u6g== + dependencies: + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-buffer" "1.12.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + "@webassemblyjs/helper-wasm-section" "1.12.1" + "@webassemblyjs/wasm-gen" "1.12.1" + "@webassemblyjs/wasm-opt" "1.12.1" + "@webassemblyjs/wasm-parser" "1.12.1" + "@webassemblyjs/wast-printer" "1.12.1" + +"@webassemblyjs/wasm-gen@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.12.1.tgz#a6520601da1b5700448273666a71ad0a45d78547" + integrity sha512-TDq4Ojh9fcohAw6OIMXqiIcTq5KUXTGRkVxbSo1hQnSy6lAM5GSdfwWeSxpAo0YzgsgF182E/U0mDNhuA0tW7w== + dependencies: + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + "@webassemblyjs/ieee754" "1.11.6" + "@webassemblyjs/leb128" "1.11.6" + "@webassemblyjs/utf8" "1.11.6" + +"@webassemblyjs/wasm-opt@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.12.1.tgz#9e6e81475dfcfb62dab574ac2dda38226c232bc5" + integrity sha512-Jg99j/2gG2iaz3hijw857AVYekZe2SAskcqlWIZXjji5WStnOpVoat3gQfT/Q5tb2djnCjBtMocY/Su1GfxPBg== + dependencies: + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-buffer" "1.12.1" + "@webassemblyjs/wasm-gen" "1.12.1" + "@webassemblyjs/wasm-parser" "1.12.1" + +"@webassemblyjs/wasm-parser@1.12.1", "@webassemblyjs/wasm-parser@^1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.12.1.tgz#c47acb90e6f083391e3fa61d113650eea1e95937" + integrity sha512-xikIi7c2FHXysxXe3COrVUPSheuBtpcfhbpFj4gmu7KRLYOzANztwUU0IbsqvMqzuNK2+glRGWCEqZo1WCLyAQ== + dependencies: + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-api-error" "1.11.6" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + "@webassemblyjs/ieee754" "1.11.6" + "@webassemblyjs/leb128" "1.11.6" + "@webassemblyjs/utf8" "1.11.6" + +"@webassemblyjs/wast-printer@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.12.1.tgz#bcecf661d7d1abdaf989d8341a4833e33e2b31ac" + integrity sha512-+X4WAlOisVWQMikjbcvY2e0rwPsKQ9F688lksZhBcPycBBuii3O7m8FACbDMWDojpAqvjIncrG8J0XHKyQfVeA== + dependencies: + "@webassemblyjs/ast" "1.12.1" "@xtuc/long" "4.2.2" "@xtuc/ieee754@^1.2.0": @@ -3021,10 +3050,10 @@ acorn-globals@^6.0.0: acorn "^7.1.1" acorn-walk "^7.1.1" -acorn-import-assertions@^1.7.6: - version "1.8.0" - resolved "https://registry.yarnpkg.com/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz#ba2b5939ce62c238db6d93d81c9b111b29b855e9" - integrity sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw== +acorn-import-attributes@^1.9.5: + version "1.9.5" + resolved "https://registry.yarnpkg.com/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz#7eb1557b1ba05ef18b5ed0ec67591bfab04688ef" + integrity sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ== acorn-jsx@^5.3.2: version "5.3.2" @@ -3055,6 +3084,11 @@ acorn@^8.2.4, acorn@^8.5.0, acorn@^8.7.1: resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.7.1.tgz#0197122c843d1bf6d0a5e83220a788f278f63c30" integrity sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A== +acorn@^8.8.2: + version "8.12.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.12.1.tgz#71616bdccbe25e27a54439e0046e89ca76df2248" + integrity sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg== + address@^1.0.1, address@^1.1.2: version "1.2.0" resolved "https://registry.yarnpkg.com/address/-/address-1.2.0.tgz#d352a62c92fee90f89a693eccd2a8b2139ab02d9" @@ -3511,10 +3545,10 @@ bluebird@^3.5.5: resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== -body-parser@1.20.2: - version "1.20.2" - resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.2.tgz#6feb0e21c4724d06de7ff38da36dad4f57a747fd" - integrity sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA== +body-parser@1.20.3: + version "1.20.3" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.3.tgz#1953431221c6fb5cd63c4b36d53fab0928e548c6" + integrity sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g== dependencies: bytes "3.1.2" content-type "~1.0.5" @@ -3524,7 +3558,7 @@ body-parser@1.20.2: http-errors "2.0.0" iconv-lite "0.4.24" on-finished "2.4.1" - qs "6.11.0" + qs "6.13.0" raw-body "2.5.2" type-is "~1.6.18" unpipe "1.0.0" @@ -3559,7 +3593,7 @@ brace-expansion@^2.0.1: dependencies: balanced-match "^1.0.0" -braces@^3.0.2, braces@~3.0.2: +braces@^3.0.3, braces@~3.0.2: version "3.0.3" resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.3.tgz#490332f40919452272d55a8480adc0c441358789" integrity sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA== @@ -3585,7 +3619,7 @@ browser-process-hrtime@^1.0.0: resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== -browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.16.6, browserslist@^4.18.1, browserslist@^4.20.2, browserslist@^4.20.3: +browserslist@^4.0.0, browserslist@^4.16.6, browserslist@^4.18.1, browserslist@^4.20.2, browserslist@^4.20.3: version "4.20.3" resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.20.3.tgz#eb7572f49ec430e054f56d52ff0ebe9be915f8bf" integrity sha512-NBhymBQl1zM0Y5dQT/O+xiLP9/rzOIQdKM/eMJBAq7yBgaB6krIYLGejrwVYnSHZdqjscB1SPuAjHwxjvN6Wdg== @@ -3596,6 +3630,16 @@ browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.16.6, browserslist@^4 node-releases "^2.0.3" picocolors "^1.0.0" +browserslist@^4.21.10: + version "4.23.3" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.23.3.tgz#debb029d3c93ebc97ffbc8d9cbb03403e227c800" + integrity sha512-btwCFJVjI4YWDNfau8RhZ+B1Q/VLoUITrm3RlP6y1tYGWIOa+InuYiRGXUBXo8nA1qKmHMyLB/iVQg5TT4eFoA== + dependencies: + caniuse-lite "^1.0.30001646" + electron-to-chromium "^1.5.4" + node-releases "^2.0.18" + update-browserslist-db "^1.1.0" + bser@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" @@ -3631,6 +3675,17 @@ call-bind@^1.0.0, call-bind@^1.0.2: function-bind "^1.1.1" get-intrinsic "^1.0.2" +call-bind@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.7.tgz#06016599c40c56498c18769d2730be242b6fa3b9" + integrity sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w== + dependencies: + es-define-property "^1.0.0" + es-errors "^1.3.0" + function-bind "^1.1.2" + get-intrinsic "^1.2.4" + set-function-length "^1.2.1" + callsites@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" @@ -3679,6 +3734,11 @@ caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001332, caniuse-lite@^1.0.30001335: resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001339.tgz#f9aece4ea8156071613b27791547ba0b33f176cf" integrity sha512-Es8PiVqCe+uXdms0Gu5xP5PF2bxLR7OBp3wUzUnuO7OHzhOfCyg3hdiGWVPVxhiuniOzng+hTc1u3fEQ0TlkSQ== +caniuse-lite@^1.0.30001646: + version "1.0.30001653" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001653.tgz#b8af452f8f33b1c77f122780a4aecebea0caca56" + integrity sha512-XGWQVB8wFQ2+9NZwZ10GxTYC5hk0Fa+q8cSkr0tgvMhYhMHP/QC+WTgrePMDBWiWc/pV+1ik82Al20XOK25Gcw== + case-sensitive-paths-webpack-plugin@^2.4.0: version "2.4.0" resolved "https://registry.yarnpkg.com/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.4.0.tgz#db64066c6422eed2e08cc14b986ca43796dbc6d4" @@ -4583,6 +4643,15 @@ default-gateway@^6.0.3: dependencies: execa "^5.0.0" +define-data-property@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/define-data-property/-/define-data-property-1.1.4.tgz#894dc141bb7d3060ae4366f6a0107e68fbe48c5e" + integrity sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A== + dependencies: + es-define-property "^1.0.0" + es-errors "^1.3.0" + gopd "^1.0.1" + define-lazy-prop@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f" @@ -4823,6 +4892,11 @@ electron-to-chromium@^1.4.118: resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.137.tgz#186180a45617283f1c012284458510cd99d6787f" integrity sha512-0Rcpald12O11BUogJagX3HsCN3FE83DSqWjgXoHo5a72KUKMSfI39XBgJpgNNxS9fuGzytaFjE06kZkiVFy2qA== +electron-to-chromium@^1.5.4: + version "1.5.13" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.5.13.tgz#1abf0410c5344b2b829b7247e031f02810d442e6" + integrity sha512-lbBcvtIJ4J6sS4tb5TLp1b4LyfCdMkwStzXPyAgVgTRAsep4bvrAGaBOP7ZJtQMNJpSQ9SqG4brWOroNaQtm7Q== + emittery@^0.10.2: version "0.10.2" resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.10.2.tgz#902eec8aedb8c41938c46e9385e9db7e03182933" @@ -4858,10 +4932,15 @@ encodeurl@~1.0.2: resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k= -enhanced-resolve@^5.10.0: - version "5.12.0" - resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.12.0.tgz#300e1c90228f5b570c4d35babf263f6da7155634" - integrity sha512-QHTXI/sZQmko1cbDoNAa3mJ5qhWUUNAq3vR0/YiD379fWQrcfuoX1+HW2S0MTt7XmoPLapdaDKUtelUSPic7hQ== +encodeurl@~2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-2.0.0.tgz#7b8ea898077d7e409d3ac45474ea38eaf0857a58" + integrity sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg== + +enhanced-resolve@^5.17.1: + version "5.17.1" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.17.1.tgz#67bfbbcc2f81d511be77d686a90267ef7f898a15" + integrity sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg== dependencies: graceful-fs "^4.2.4" tapable "^2.2.0" @@ -4914,10 +4993,22 @@ es-abstract@^1.17.2, es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.19 string.prototype.trimstart "^1.0.5" unbox-primitive "^1.0.2" -es-module-lexer@^0.9.0: - version "0.9.3" - resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-0.9.3.tgz#6f13db00cc38417137daf74366f535c8eb438f19" - integrity sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ== +es-define-property@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/es-define-property/-/es-define-property-1.0.0.tgz#c7faefbdff8b2696cf5f46921edfb77cc4ba3845" + integrity sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ== + dependencies: + get-intrinsic "^1.2.4" + +es-errors@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/es-errors/-/es-errors-1.3.0.tgz#05f75a25dab98e4fb1dcd5e1472c0546d5057c8f" + integrity sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw== + +es-module-lexer@^1.2.1: + version "1.5.4" + resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-1.5.4.tgz#a8efec3a3da991e60efa6b633a7cad6ab8d26b78" + integrity sha512-MVNK56NiMrOwitFB7cqDwq0CQutbw+0BvLshJSse0MUNU+y1FC3bUS/AQg7oUng+/wKrrki7JfmwtVHkVfPLlw== es-shim-unscopables@^1.0.0: version "1.0.0" @@ -4940,6 +5031,11 @@ escalade@^3.1.1: resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== +escalade@^3.1.2: + version "3.2.0" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.2.0.tgz#011a3f69856ba189dffa7dc8fcce99d2a87903e5" + integrity sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA== + escape-html@~1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" @@ -5271,36 +5367,36 @@ expect@^27.5.1: jest-message-util "^27.5.1" express@^4.17.3: - version "4.19.2" - resolved "https://registry.yarnpkg.com/express/-/express-4.19.2.tgz#e25437827a3aa7f2a827bc8171bbbb664a356465" - integrity sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q== + version "4.21.0" + resolved "https://registry.yarnpkg.com/express/-/express-4.21.0.tgz#d57cb706d49623d4ac27833f1cbc466b668eb915" + integrity sha512-VqcNGcj/Id5ZT1LZ/cfihi3ttTn+NJmkli2eZADigjq29qTlWi/hAQ43t/VLPq8+UX06FCEx3ByOYet6ZFblng== dependencies: accepts "~1.3.8" array-flatten "1.1.1" - body-parser "1.20.2" + body-parser "1.20.3" content-disposition "0.5.4" content-type "~1.0.4" cookie "0.6.0" cookie-signature "1.0.6" debug "2.6.9" depd "2.0.0" - encodeurl "~1.0.2" + encodeurl "~2.0.0" escape-html "~1.0.3" etag "~1.8.1" - finalhandler "1.2.0" + finalhandler "1.3.1" fresh "0.5.2" http-errors "2.0.0" - merge-descriptors "1.0.1" + merge-descriptors "1.0.3" methods "~1.1.2" on-finished "2.4.1" parseurl "~1.3.3" - path-to-regexp "0.1.7" + path-to-regexp "0.1.10" proxy-addr "~2.0.7" - qs "6.11.0" + qs "6.13.0" range-parser "~1.2.1" safe-buffer "5.2.1" - send "0.18.0" - serve-static "1.15.0" + send "0.19.0" + serve-static "1.16.2" setprototypeof "1.2.0" statuses "2.0.1" type-is "~1.6.18" @@ -5412,13 +5508,13 @@ filter-obj@^1.1.0: resolved "https://registry.yarnpkg.com/filter-obj/-/filter-obj-1.1.0.tgz#9b311112bc6c6127a16e016c6c5d7f19e0805c5b" integrity sha1-mzERErxsYSehbgFsbF1/GeCAXFs= -finalhandler@1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.2.0.tgz#7d23fe5731b207b4640e4fcd00aec1f9207a7b32" - integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg== +finalhandler@1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.3.1.tgz#0c575f1d1d324ddd1da35ad7ece3df7d19088019" + integrity sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ== dependencies: debug "2.6.9" - encodeurl "~1.0.2" + encodeurl "~2.0.0" escape-html "~1.0.3" on-finished "2.4.1" parseurl "~1.3.3" @@ -5586,6 +5682,11 @@ function-bind@^1.1.1: resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== +function-bind@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" + integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== + function.prototype.name@^1.1.5: version "1.1.5" resolved "https://registry.yarnpkg.com/function.prototype.name/-/function.prototype.name-1.1.5.tgz#cce0505fe1ffb80503e6f9e46cc64e46a12a9621" @@ -5625,6 +5726,17 @@ get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1: has "^1.0.3" has-symbols "^1.0.1" +get-intrinsic@^1.1.3, get-intrinsic@^1.2.4: + version "1.2.4" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.4.tgz#e385f5a4b5227d449c3eabbad05494ef0abbeadd" + integrity sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ== + dependencies: + es-errors "^1.3.0" + function-bind "^1.1.2" + has-proto "^1.0.1" + has-symbols "^1.0.3" + hasown "^2.0.0" + get-nonce@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/get-nonce/-/get-nonce-1.0.1.tgz#fdf3f0278073820d2ce9426c18f07481b1e0cdf3" @@ -5731,11 +5843,23 @@ good-listener@^1.2.2: dependencies: delegate "^3.1.2" +gopd@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.0.1.tgz#29ff76de69dac7489b7c0918a5788e56477c332c" + integrity sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA== + dependencies: + get-intrinsic "^1.1.3" + graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: version "4.2.10" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== +graceful-fs@^4.2.11: + version "4.2.11" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" + integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== + gzip-size@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/gzip-size/-/gzip-size-6.0.0.tgz#065367fd50c239c0671cbcbad5be3e2eeb10e462" @@ -5775,6 +5899,18 @@ has-property-descriptors@^1.0.0: dependencies: get-intrinsic "^1.1.1" +has-property-descriptors@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz#963ed7d071dc7bf5f084c5bfbe0d1b6222586854" + integrity sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg== + dependencies: + es-define-property "^1.0.0" + +has-proto@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has-proto/-/has-proto-1.0.3.tgz#b31ddfe9b0e6e9914536a6ab286426d0214f77fd" + integrity sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q== + has-symbols@^1.0.1, has-symbols@^1.0.2, has-symbols@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" @@ -5794,6 +5930,13 @@ has@^1.0.3: dependencies: function-bind "^1.1.1" +hasown@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.2.tgz#003eaf91be7adc372e84ec59dc37252cedb80003" + integrity sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ== + dependencies: + function-bind "^1.1.2" + hast-to-hyperscript@^9.0.0: version "9.0.1" resolved "https://registry.yarnpkg.com/hast-to-hyperscript/-/hast-to-hyperscript-9.0.1.tgz#9b67fd188e4c81e8ad66f803855334173920218d" @@ -7351,10 +7494,10 @@ memfs@^3.4.3: resolved "https://registry.yarnpkg.com/memoize-one/-/memoize-one-5.2.1.tgz#8337aa3c4335581839ec01c3d594090cebe8f00e" integrity sha512-zYiwtZUcYyXKo/np96AGZAckk+FWWsUdJ3cHGGmld7+AhvcWmQyGCYUh1hc4Q/pkOhb65dQR/pqCyK0cOaHz4Q== -merge-descriptors@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" - integrity sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E= +merge-descriptors@1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.3.tgz#d80319a65f3c7935351e5cfdac8f9318504dbed5" + integrity sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ== merge-stream@^2.0.0: version "2.0.0" @@ -7372,11 +7515,11 @@ methods@~1.1.2: integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4= micromatch@^4.0.2, micromatch@^4.0.4: - version "4.0.5" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" - integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== + version "4.0.8" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.8.tgz#d66fa18f3a47076789320b9b1af32bd86d9fa202" + integrity sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA== dependencies: - braces "^3.0.2" + braces "^3.0.3" picomatch "^2.3.1" microseconds@0.2.0: @@ -7536,6 +7679,11 @@ node-int64@^0.4.0: resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" integrity sha1-h6kGXNs1XTGC2PlM4RGIuCXGijs= +node-releases@^2.0.18: + version "2.0.18" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.18.tgz#f010e8d35e2fe8d6b2944f03f70213ecedc4ca3f" + integrity sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g== + node-releases@^2.0.3: version "2.0.4" resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.4.tgz#f38252370c43854dc48aa431c766c6c398f40476" @@ -7602,6 +7750,11 @@ object-inspect@^1.12.0, object-inspect@^1.9.0: resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.0.tgz#6e2c120e868fd1fd18cb4f18c31741d0d6e776f0" integrity sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g== +object-inspect@^1.13.1: + version "1.13.2" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.13.2.tgz#dea0088467fb991e67af4058147a24824a3043ff" + integrity sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g== + object-keys@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" @@ -7889,10 +8042,10 @@ path-parse@^1.0.6, path-parse@^1.0.7: resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== -path-to-regexp@0.1.7: - version "0.1.7" - resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" - integrity sha1-32BBeABfUi8V60SQ5yR6G/qmf4w= +path-to-regexp@0.1.10: + version "0.1.10" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.10.tgz#67e9108c5c0551b9e5326064387de4763c4d5f8b" + integrity sha512-7lf7qcQidTku0Gu3YDPc8DJ1q7OOucfa/BSsIwjuh56VU7katFvuM8hULfkwB3Fns/rsVF7PwPKVw1sl5KQS9w== path-type@^4.0.0: version "4.0.0" @@ -7914,6 +8067,11 @@ picocolors@^1.0.0: resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== +picocolors@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.1.tgz#a8ad579b571952f0e5d25892de5445bcfe25aaa1" + integrity sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew== + picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.2, picomatch@^2.2.3, picomatch@^2.3.1: version "2.3.1" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" @@ -8608,12 +8766,12 @@ q@^1.1.2: resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7" integrity sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc= -qs@6.11.0: - version "6.11.0" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.0.tgz#fd0d963446f7a65e1367e01abd85429453f0c37a" - integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q== +qs@6.13.0: + version "6.13.0" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.13.0.tgz#6ca3bd58439f7e245655798997787b0d88a51906" + integrity sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg== dependencies: - side-channel "^1.0.4" + side-channel "^1.0.6" query-string@^7.1.1: version "7.1.1" @@ -9397,7 +9555,7 @@ schema-utils@^2.6.5: ajv "^6.12.4" ajv-keywords "^3.5.2" -schema-utils@^3.0.0, schema-utils@^3.1.0, schema-utils@^3.1.1: +schema-utils@^3.0.0, schema-utils@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-3.1.1.tgz#bc74c4b6b6995c1d88f76a8b77bea7219e0c8281" integrity sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw== @@ -9406,6 +9564,15 @@ schema-utils@^3.0.0, schema-utils@^3.1.0, schema-utils@^3.1.1: ajv "^6.12.5" ajv-keywords "^3.5.2" +schema-utils@^3.2.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-3.3.0.tgz#f50a88877c3c01652a15b622ae9e9795df7a60fe" + integrity sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg== + dependencies: + "@types/json-schema" "^7.0.8" + ajv "^6.12.5" + ajv-keywords "^3.5.2" + schema-utils@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-4.0.0.tgz#60331e9e3ae78ec5d16353c467c34b3a0a1d3df7" @@ -9450,10 +9617,10 @@ semver@^7.3.2, semver@^7.3.5: dependencies: lru-cache "^6.0.0" -send@0.18.0: - version "0.18.0" - resolved "https://registry.yarnpkg.com/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be" - integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg== +send@0.19.0: + version "0.19.0" + resolved "https://registry.yarnpkg.com/send/-/send-0.19.0.tgz#bbc5a388c8ea6c048967049dbeac0e4a3f09d7f8" + integrity sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw== dependencies: debug "2.6.9" depd "2.0.0" @@ -9483,6 +9650,13 @@ serialize-javascript@^6.0.0: dependencies: randombytes "^2.1.0" +serialize-javascript@^6.0.1: + version "6.0.2" + resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-6.0.2.tgz#defa1e055c83bf6d59ea805d8da862254eb6a6c2" + integrity sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g== + dependencies: + randombytes "^2.1.0" + serialize-query-params@^1.3.5: version "1.3.6" resolved "https://registry.yarnpkg.com/serialize-query-params/-/serialize-query-params-1.3.6.tgz#5dd5225db85ce747fe6fbc4897628504faafec6d" @@ -9501,15 +9675,27 @@ serve-index@^1.9.1: mime-types "~2.1.17" parseurl "~1.3.2" -serve-static@1.15.0: - version "1.15.0" - resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.15.0.tgz#faaef08cffe0a1a62f60cad0c4e513cff0ac9540" - integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g== +serve-static@1.16.2: + version "1.16.2" + resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.16.2.tgz#b6a5343da47f6bdd2673848bf45754941e803296" + integrity sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw== dependencies: - encodeurl "~1.0.2" + encodeurl "~2.0.0" escape-html "~1.0.3" parseurl "~1.3.3" - send "0.18.0" + send "0.19.0" + +set-function-length@^1.2.1: + version "1.2.2" + resolved "https://registry.yarnpkg.com/set-function-length/-/set-function-length-1.2.2.tgz#aac72314198eaed975cf77b2c3b6b880695e5449" + integrity sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg== + dependencies: + define-data-property "^1.1.4" + es-errors "^1.3.0" + function-bind "^1.1.2" + get-intrinsic "^1.2.4" + gopd "^1.0.1" + has-property-descriptors "^1.0.2" setprototypeof@1.1.0: version "1.1.0" @@ -9552,6 +9738,16 @@ side-channel@^1.0.4: get-intrinsic "^1.0.2" object-inspect "^1.9.0" +side-channel@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.6.tgz#abd25fb7cd24baf45466406b1096b7831c9215f2" + integrity sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA== + dependencies: + call-bind "^1.0.7" + es-errors "^1.3.0" + get-intrinsic "^1.2.4" + object-inspect "^1.13.1" + signal-exit@^3.0.2, signal-exit@^3.0.3: version "3.0.7" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" @@ -10040,7 +10236,7 @@ terminal-link@^2.0.0: ansi-escapes "^4.2.1" supports-hyperlinks "^2.0.0" -terser-webpack-plugin@^5.1.3, terser-webpack-plugin@^5.2.5: +terser-webpack-plugin@^5.2.5: version "5.3.1" resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.1.tgz#0320dcc270ad5372c1e8993fabbd927929773e54" integrity sha512-GvlZdT6wPQKbDNW/GDQzZFg/j4vKU96yl2q6mcUkzKOgW4gwf1Z8cZToUCrz31XHlPWH8MVb1r2tFtdDtTGJ7g== @@ -10051,6 +10247,17 @@ terser-webpack-plugin@^5.1.3, terser-webpack-plugin@^5.2.5: source-map "^0.6.1" terser "^5.7.2" +terser-webpack-plugin@^5.3.10: + version "5.3.10" + resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.10.tgz#904f4c9193c6fd2a03f693a2150c62a92f40d199" + integrity sha512-BKFPWlPDndPs+NGGCr1U59t0XScL5317Y0UReNrHaw9/FwhPENlq6bfgs+4yPfyP51vqC1bQ4rp1EfXW5ZSH9w== + dependencies: + "@jridgewell/trace-mapping" "^0.3.20" + jest-worker "^27.4.5" + schema-utils "^3.1.1" + serialize-javascript "^6.0.1" + terser "^5.26.0" + terser@^5.0.0, terser@^5.10.0, terser@^5.7.2: version "5.14.2" resolved "https://registry.yarnpkg.com/terser/-/terser-5.14.2.tgz#9ac9f22b06994d736174f4091aa368db896f1c10" @@ -10061,6 +10268,16 @@ terser@^5.0.0, terser@^5.10.0, terser@^5.7.2: commander "^2.20.0" source-map-support "~0.5.20" +terser@^5.26.0: + version "5.31.6" + resolved "https://registry.yarnpkg.com/terser/-/terser-5.31.6.tgz#c63858a0f0703988d0266a82fcbf2d7ba76422b1" + integrity sha512-PQ4DAriWzKj+qgehQ7LK5bQqCFNMmlhjR2PFFLuqGCpuCAauxemVBWwWOxo3UIwWQx8+Pr61Df++r76wDmkQBg== + dependencies: + "@jridgewell/source-map" "^0.3.3" + acorn "^8.8.2" + commander "^2.20.0" + source-map-support "~0.5.20" + test-exclude@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" @@ -10417,6 +10634,14 @@ upath@^1.2.0: resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== +update-browserslist-db@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.1.0.tgz#7ca61c0d8650766090728046e416a8cde682859e" + integrity sha512-EdRAaAyk2cUE1wOf2DkEhzxqOQvFOoRJFNS6NeyJ01Gp2beMRpBAINjM2iDXE3KCuKhwnvHIQCJm6ThL2Z+HzQ== + dependencies: + escalade "^3.1.2" + picocolors "^1.0.1" + uri-js@^4.2.2: version "4.4.1" resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" @@ -10560,10 +10785,10 @@ walker@^1.0.7: dependencies: makeerror "1.0.12" -watchpack@^2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.4.0.tgz#fa33032374962c78113f93c7f2fb4c54c9862a5d" - integrity sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg== +watchpack@^2.4.1: + version "2.4.2" + resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.4.2.tgz#2feeaed67412e7c33184e5a79ca738fbd38564da" + integrity sha512-TnbFSbcOCcDgjZ4piURLCbJ3nJhznVh9kw6F6iokjiFPl8ONxe9A6nMDVXDiNbrSfLILs6vB07F7wLBrwPYzJw== dependencies: glob-to-regexp "^0.4.1" graceful-fs "^4.1.2" @@ -10675,33 +10900,32 @@ webpack-sources@^3.2.3: integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== webpack@^5.64.4: - version "5.76.1" - resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.76.1.tgz#7773de017e988bccb0f13c7d75ec245f377d295c" - integrity sha512-4+YIK4Abzv8172/SGqObnUjaIHjLEuUasz9EwQj/9xmPPkYJy2Mh03Q/lJfSD3YLzbxy5FeTq5Uw0323Oh6SJQ== - dependencies: - "@types/eslint-scope" "^3.7.3" - "@types/estree" "^0.0.51" - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/wasm-edit" "1.11.1" - "@webassemblyjs/wasm-parser" "1.11.1" + version "5.94.0" + resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.94.0.tgz#77a6089c716e7ab90c1c67574a28da518a20970f" + integrity sha512-KcsGn50VT+06JH/iunZJedYGUJS5FGjow8wb9c0v5n1Om8O1g4L6LjtfxwlXIATopoQu+vOXXa7gYisWxCoPyg== + dependencies: + "@types/estree" "^1.0.5" + "@webassemblyjs/ast" "^1.12.1" + "@webassemblyjs/wasm-edit" "^1.12.1" + "@webassemblyjs/wasm-parser" "^1.12.1" acorn "^8.7.1" - acorn-import-assertions "^1.7.6" - browserslist "^4.14.5" + acorn-import-attributes "^1.9.5" + browserslist "^4.21.10" chrome-trace-event "^1.0.2" - enhanced-resolve "^5.10.0" - es-module-lexer "^0.9.0" + enhanced-resolve "^5.17.1" + es-module-lexer "^1.2.1" eslint-scope "5.1.1" events "^3.2.0" glob-to-regexp "^0.4.1" - graceful-fs "^4.2.9" + graceful-fs "^4.2.11" json-parse-even-better-errors "^2.3.1" loader-runner "^4.2.0" mime-types "^2.1.27" neo-async "^2.6.2" - schema-utils "^3.1.0" + schema-utils "^3.2.0" tapable "^2.1.1" - terser-webpack-plugin "^5.1.3" - watchpack "^2.4.0" + terser-webpack-plugin "^5.3.10" + watchpack "^2.4.1" webpack-sources "^3.2.3" websocket-driver@>=0.5.1, websocket-driver@^0.7.4: diff --git a/sdk/python/feast/ui_server.py b/sdk/python/feast/ui_server.py index 35b51a8021..7e8591e2aa 100644 --- a/sdk/python/feast/ui_server.py +++ b/sdk/python/feast/ui_server.py @@ -51,7 +51,7 @@ def shutdown_event(): async_refresh() - ui_dir_ref = importlib_resources.files(__spec__.parent) / "ui/build/" # type: ignore[name-defined] + ui_dir_ref = importlib_resources.files(__spec__.parent) / "ui/build/" # type: ignore[name-defined, arg-type] with importlib_resources.as_file(ui_dir_ref) as ui_dir: # Initialize with the projects-list.json file with ui_dir.joinpath("projects-list.json").open(mode="w") as f: diff --git a/sdk/python/feast/utils.py b/sdk/python/feast/utils.py index 0467393aa2..8a9f1fadae 100644 --- a/sdk/python/feast/utils.py +++ b/sdk/python/feast/utils.py @@ -1,6 +1,5 @@ import copy import itertools -import logging import os import typing import warnings @@ -25,7 +24,6 @@ import pyarrow from dateutil.tz import tzlocal from google.protobuf.timestamp_pb2 import Timestamp -from pytz import utc from feast.constants import FEAST_FS_YAML_FILE_PATH_ENV_NAME from feast.entity import Entity @@ -35,11 +33,13 @@ FeatureViewNotFoundException, RequestDataNotFoundInEntityRowsException, ) +from feast.infra.key_encoding_utils import deserialize_entity_key from feast.protos.feast.serving.ServingService_pb2 import ( FieldStatus, GetOnlineFeaturesResponse, ) from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto +from feast.protos.feast.types.Value_pb2 import FloatList as FloatListProto from feast.protos.feast.types.Value_pb2 import RepeatedValue as RepeatedValueProto from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.type_map import python_values_to_proto_values @@ -49,9 +49,9 @@ if typing.TYPE_CHECKING: from feast.feature_service import FeatureService from feast.feature_view import FeatureView + from feast.infra.registry.base_registry import BaseRegistry from feast.on_demand_feature_view import OnDemandFeatureView - APPLICATION_NAME = "feast-dev/feast" USER_AGENT = "{}/{}".format(APPLICATION_NAME, get_version()) @@ -63,7 +63,7 @@ def get_user_agent(): def make_tzaware(t: datetime) -> datetime: """We assume tz-naive datetimes are UTC""" if t.tzinfo is None: - return t.replace(tzinfo=utc) + return t.replace(tzinfo=timezone.utc) else: return t @@ -81,7 +81,7 @@ def to_naive_utc(ts: datetime) -> datetime: if ts.tzinfo is None: return ts else: - return ts.astimezone(utc).replace(tzinfo=None) + return ts.astimezone(timezone.utc).replace(tzinfo=None) def maybe_local_tz(t: datetime) -> datetime: @@ -344,7 +344,9 @@ def _group_feature_refs( # on demand view to on demand view proto on_demand_view_index = { - view.projection.name_to_use(): view for view in all_on_demand_feature_views + view.projection.name_to_use(): view + for view in all_on_demand_feature_views + if view.projection } # view name to feature names @@ -747,10 +749,6 @@ def _list_feature_views( ) -> List["FeatureView"]: from feast.feature_view import DUMMY_ENTITY_NAME - logging.warning( - "_list_feature_views will make breaking changes. Please use _list_batch_feature_views instead. " - "_list_feature_views will behave like _list_all_feature_views in the future." - ) feature_views = [] for fv in registry.list_feature_views(project, allow_cache=allow_cache, tags=tags): if hide_dummy_entity and fv.entities and fv.entities[0] == DUMMY_ENTITY_NAME: @@ -761,61 +759,64 @@ def _list_feature_views( def _get_feature_views_to_use( - registry, + registry: "BaseRegistry", project, features: Optional[Union[List[str], "FeatureService"]], allow_cache=False, hide_dummy_entity: bool = True, ) -> Tuple[List["FeatureView"], List["OnDemandFeatureView"]]: from feast.feature_service import FeatureService - - fvs = { - fv.name: fv - for fv in [ - *_list_feature_views(registry, project, allow_cache, hide_dummy_entity), - *registry.list_stream_feature_views( - project=project, allow_cache=allow_cache - ), - ] - } - - od_fvs = { - fv.name: fv - for fv in registry.list_on_demand_feature_views( - project=project, allow_cache=allow_cache - ) - } + from feast.feature_view import DUMMY_ENTITY_NAME + from feast.on_demand_feature_view import OnDemandFeatureView if isinstance(features, FeatureService): - fvs_to_use, od_fvs_to_use = [], [] - for fv_name, projection in [ + feature_views = [ (projection.name, projection) for projection in features.feature_view_projections - ]: - if fv_name in fvs: - fvs_to_use.append(fvs[fv_name].with_projection(copy.copy(projection))) - elif fv_name in od_fvs: - odfv = od_fvs[fv_name].with_projection(copy.copy(projection)) - od_fvs_to_use.append(odfv) - # Let's make sure to include an FVs which the ODFV requires Features from. - for projection in odfv.source_feature_view_projections.values(): - fv = fvs[projection.name].with_projection(copy.copy(projection)) - if fv not in fvs_to_use: - fvs_to_use.append(fv) - else: - raise ValueError( - f"The provided feature service {features.name} contains a reference to a feature view" - f"{fv_name} which doesn't exist. Please make sure that you have created the feature view" - f'{fv_name} and that you have registered it by running "apply".' - ) - views_to_use = (fvs_to_use, od_fvs_to_use) + ] else: - views_to_use = ( - [*fvs.values()], - [*od_fvs.values()], - ) + assert features is not None + feature_views = [(feature.split(":")[0], None) for feature in features] # type: ignore[misc] + + fvs_to_use, od_fvs_to_use = [], [] + for name, projection in feature_views: + fv = registry.get_any_feature_view(name, project, allow_cache) + + if isinstance(fv, OnDemandFeatureView): + od_fvs_to_use.append( + fv.with_projection(copy.copy(projection)) if projection else fv + ) + + for source_projection in fv.source_feature_view_projections.values(): + source_fv = registry.get_any_feature_view( + source_projection.name, project, allow_cache + ) + # TODO better way to handler dummy entities + if ( + hide_dummy_entity + and source_fv.entities # type: ignore[attr-defined] + and source_fv.entities[0] == DUMMY_ENTITY_NAME # type: ignore[attr-defined] + ): + source_fv.entities = [] # type: ignore[attr-defined] + source_fv.entity_columns = [] # type: ignore[attr-defined] + + if source_fv not in fvs_to_use: + fvs_to_use.append( + source_fv.with_projection(copy.copy(source_projection)) + ) + else: + if ( + hide_dummy_entity + and fv.entities # type: ignore[attr-defined] + and fv.entities[0] == DUMMY_ENTITY_NAME # type: ignore[attr-defined] + ): + fv.entities = [] # type: ignore[attr-defined] + fv.entity_columns = [] # type: ignore[attr-defined] + fvs_to_use.append( + fv.with_projection(copy.copy(projection)) if projection else fv + ) - return views_to_use + return (fvs_to_use, od_fvs_to_use) def _get_online_request_context( @@ -1056,3 +1057,51 @@ def tags_str_to_dict(tags: str = "") -> dict[str, str]: def _utc_now() -> datetime: return datetime.now(tz=timezone.utc) + + +def _build_retrieve_online_document_record( + entity_key: Union[str, bytes], + feature_value: Union[str, bytes], + vector_value: Union[str, List[float]], + distance_value: float, + event_timestamp: datetime, + entity_key_serialization_version: int, +) -> Tuple[ + Optional[datetime], + Optional[EntityKeyProto], + Optional[ValueProto], + Optional[ValueProto], + Optional[ValueProto], +]: + if entity_key_serialization_version < 3: + entity_key_proto = None + else: + if isinstance(entity_key, str): + entity_key_proto_bin = entity_key.encode("utf-8") + else: + entity_key_proto_bin = entity_key + entity_key_proto = deserialize_entity_key( + entity_key_proto_bin, + entity_key_serialization_version=entity_key_serialization_version, + ) + + feature_value_proto = ValueProto() + + if isinstance(feature_value, str): + feature_value_proto.ParseFromString(feature_value.encode("utf-8")) + else: + feature_value_proto.ParseFromString(feature_value) + + if isinstance(vector_value, str): + vector_value_proto = ValueProto(string_val=vector_value) + else: + vector_value_proto = ValueProto(float_list_val=FloatListProto(val=vector_value)) + + distance_value_proto = ValueProto(float_val=distance_value) + return ( + event_timestamp, + entity_key_proto, + feature_value_proto, + vector_value_proto, + distance_value_proto, + ) diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 785342550a..6268de6ae1 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -1,10 +1,11 @@ # This file was autogenerated by uv via the following command: -# uv pip compile --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.10-ci-requirements.txt -aiobotocore==2.13.1 - # via feast (setup.py) -aiohttp==3.9.5 +# uv pip compile -p 3.10 --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.10-ci-requirements.txt +aiobotocore==2.15.1 +aiohappyeyeballs==2.4.0 + # via aiohttp +aiohttp==3.10.5 # via aiobotocore -aioitertools==0.11.0 +aioitertools==0.12.0 # via aiobotocore aiosignal==1.3.1 # via aiohttp @@ -14,14 +15,12 @@ altair==4.2.2 # via great-expectations annotated-types==0.7.0 # via pydantic -anyio==4.4.0 +anyio==4.5.0 # via # httpx # jupyter-server # starlette # watchfiles -appnope==0.1.4 - # via ipykernel argon2-cffi==23.1.0 # via jupyter-server argon2-cffi-bindings==21.2.0 @@ -31,60 +30,54 @@ arrow==1.3.0 asn1crypto==1.5.1 # via snowflake-connector-python assertpy==1.1 - # via feast (setup.py) asttokens==2.4.1 # via stack-data async-lru==2.0.4 # via jupyterlab +async-property==0.2.2 + # via python-keycloak async-timeout==4.0.3 # via # aiohttp # redis -atpublic==4.1.0 +atpublic==5.0 # via ibis-framework -attrs==23.2.0 +attrs==24.2.0 # via # aiohttp # jsonschema # referencing -azure-core==1.30.2 +azure-core==1.31.0 # via # azure-identity # azure-storage-blob -azure-identity==1.17.1 - # via feast (setup.py) -azure-storage-blob==12.20.0 - # via feast (setup.py) -babel==2.15.0 +azure-identity==1.18.0 +azure-storage-blob==12.23.0 +babel==2.16.0 # via # jupyterlab-server # sphinx beautifulsoup4==4.12.3 # via nbconvert -bidict==0.23.1 - # via ibis-framework +bigtree==0.21.1 bleach==6.1.0 # via nbconvert -boto3==1.34.131 - # via - # feast (setup.py) - # moto -botocore==1.34.131 +boto3==1.35.23 + # via moto +botocore==1.35.23 # via # aiobotocore # boto3 # moto # s3transfer -build==1.2.1 +build==1.2.2 # via - # feast (setup.py) # pip-tools # singlestoredb -cachetools==5.3.3 +cachetools==5.5.0 # via google-auth -cassandra-driver==3.29.1 - # via feast (setup.py) -certifi==2024.7.4 +cassandra-driver==3.29.2 +certifi==2024.8.30 # via # elastic-transport # httpcore @@ -93,7 +86,7 @@ certifi==2024.7.4 # minio # requests # snowflake-connector-python -cffi==1.16.0 +cffi==1.17.1 # via # argon2-cffi-bindings # cryptography @@ -106,31 +99,27 @@ charset-normalizer==3.3.2 # snowflake-connector-python click==8.1.7 # via - # feast (setup.py) # dask # geomet # great-expectations # pip-tools - # typer # uvicorn cloudpickle==3.0.0 # via dask colorama==0.4.6 - # via - # feast (setup.py) - # great-expectations + # via great-expectations comm==0.2.2 # via # ipykernel # ipywidgets -coverage[toml]==7.5.4 +coverage[toml]==7.6.1 # via pytest-cov -cryptography==42.0.8 +cryptography==43.0.1 # via - # feast (setup.py) # azure-identity # azure-storage-blob # great-expectations + # jwcrypto # moto # msal # pyjwt @@ -138,58 +127,50 @@ cryptography==42.0.8 # snowflake-connector-python # types-pyopenssl # types-redis -dask[dataframe]==2024.6.2 - # via - # feast (setup.py) - # dask-expr -dask-expr==1.1.6 +cython==3.0.11 + # via thriftpy2 +dask[dataframe]==2024.9.0 + # via dask-expr +dask-expr==1.1.14 # via dask -db-dtypes==1.2.0 +db-dtypes==1.3.0 # via google-cloud-bigquery -debugpy==1.8.2 +debugpy==1.8.5 # via ipykernel decorator==5.1.1 # via ipython defusedxml==0.7.1 # via nbconvert -deltalake==0.18.1 - # via feast (setup.py) +deltalake==0.20.0 +deprecation==2.1.0 + # via python-keycloak dill==0.3.8 - # via feast (setup.py) distlib==0.3.8 # via virtualenv -dnspython==2.6.1 - # via email-validator docker==7.1.0 # via testcontainers docutils==0.19 # via sphinx -duckdb==0.10.3 +duckdb==1.1.0 # via ibis-framework -elastic-transport==8.13.1 +elastic-transport==8.15.0 # via elasticsearch -elasticsearch==8.14.0 - # via feast (setup.py) -email-validator==2.2.0 - # via fastapi +elasticsearch==8.15.1 entrypoints==0.4 # via altair -exceptiongroup==1.2.1 +exceptiongroup==1.2.2 # via # anyio # ipython # pytest execnet==2.1.1 # via pytest-xdist -executing==2.0.1 +executing==2.1.0 # via stack-data -fastapi==0.111.0 - # via feast (setup.py) -fastapi-cli==0.0.4 - # via fastapi +fastapi==0.115.0 fastjsonschema==2.20.0 # via nbformat -filelock==3.15.4 +filelock==3.16.1 # via # snowflake-connector-python # virtualenv @@ -199,69 +180,60 @@ frozenlist==1.4.1 # via # aiohttp # aiosignal -fsspec==2023.12.2 - # via - # feast (setup.py) - # dask -geojson==2.5.0 - # via rockset +fsspec==2024.9.0 + # via dask geomet==0.2.1.post1 # via cassandra-driver -google-api-core[grpc]==2.19.1 +google-api-core[grpc]==2.20.0 # via - # feast (setup.py) # google-cloud-bigquery # google-cloud-bigquery-storage # google-cloud-bigtable # google-cloud-core # google-cloud-datastore # google-cloud-storage -google-auth==2.30.0 +google-auth==2.35.0 # via # google-api-core + # google-cloud-bigquery # google-cloud-bigquery-storage + # google-cloud-bigtable # google-cloud-core + # google-cloud-datastore # google-cloud-storage # kubernetes -google-cloud-bigquery[pandas]==3.13.0 - # via feast (setup.py) -google-cloud-bigquery-storage==2.25.0 - # via feast (setup.py) -google-cloud-bigtable==2.24.0 - # via feast (setup.py) +google-cloud-bigquery[pandas]==3.25.0 +google-cloud-bigquery-storage==2.26.0 +google-cloud-bigtable==2.26.0 google-cloud-core==2.4.1 # via # google-cloud-bigquery # google-cloud-bigtable # google-cloud-datastore # google-cloud-storage -google-cloud-datastore==2.19.0 - # via feast (setup.py) -google-cloud-storage==2.17.0 - # via feast (setup.py) -google-crc32c==1.5.0 +google-cloud-datastore==2.20.1 +google-cloud-storage==2.18.2 +google-crc32c==1.6.0 # via # google-cloud-storage # google-resumable-media -google-resumable-media==2.7.1 +google-resumable-media==2.7.2 # via # google-cloud-bigquery # google-cloud-storage -googleapis-common-protos[grpc]==1.63.2 +googleapis-common-protos[grpc]==1.65.0 # via - # feast (setup.py) # google-api-core # grpc-google-iam-v1 # grpcio-status -great-expectations==0.18.16 - # via feast (setup.py) +great-expectations==0.18.21 +greenlet==3.1.0 + # via sqlalchemy grpc-google-iam-v1==0.13.1 # via google-cloud-bigtable -grpcio==1.64.1 +grpcio==1.66.1 # via - # feast (setup.py) # google-api-core - # google-cloud-bigquery # googleapis-common-protos # grpc-google-iam-v1 # grpcio-health-checking @@ -269,49 +241,36 @@ grpcio==1.64.1 # grpcio-status # grpcio-testing # grpcio-tools -grpcio-health-checking==1.62.2 - # via feast (setup.py) -grpcio-reflection==1.62.2 - # via feast (setup.py) -grpcio-status==1.62.2 +grpcio-health-checking==1.62.3 +grpcio-reflection==1.62.3 +grpcio-status==1.62.3 # via google-api-core -grpcio-testing==1.62.2 - # via feast (setup.py) -grpcio-tools==1.62.2 - # via feast (setup.py) -gunicorn==22.0.0 - # via feast (setup.py) +grpcio-testing==1.62.3 +grpcio-tools==1.62.3 +gunicorn==23.0.0 h11==0.14.0 # via # httpcore # uvicorn happybase==1.2.0 - # via feast (setup.py) -hazelcast-python-client==5.4.0 - # via feast (setup.py) -hiredis==2.3.2 - # via feast (setup.py) +hazelcast-python-client==5.5.0 +hiredis==2.4.0 httpcore==1.0.5 # via httpx httptools==0.6.1 # via uvicorn -httpx==0.27.0 +httpx==0.27.2 # via - # feast (setup.py) - # fastapi # jupyterlab -ibis-framework[duckdb]==9.1.0 - # via - # feast (setup.py) - # ibis-substrait -ibis-substrait==4.0.0 - # via feast (setup.py) -identify==2.5.36 + # python-keycloak +ibis-framework[duckdb]==9.5.0 + # via ibis-substrait +ibis-substrait==4.0.1 +identify==2.6.1 # via pre-commit -idna==3.7 +idna==3.10 # via # anyio - # email-validator # httpx # jsonschema # requests @@ -319,18 +278,20 @@ idna==3.7 # yarl imagesize==1.4.1 # via sphinx -importlib-metadata==8.0.0 - # via dask +importlib-metadata==8.5.0 + # via + # build + # dask iniconfig==2.0.0 # via pytest -ipykernel==6.29.4 +ipykernel==6.29.5 # via jupyterlab -ipython==8.25.0 +ipython==8.27.0 # via # great-expectations # ipykernel # ipywidgets -ipywidgets==8.1.3 +ipywidgets==8.1.5 # via great-expectations isodate==0.6.1 # via azure-storage-blob @@ -340,9 +301,7 @@ jedi==0.19.1 # via ipython jinja2==3.1.4 # via - # feast (setup.py) # altair - # fastapi # great-expectations # jupyter-server # jupyterlab @@ -362,9 +321,8 @@ jsonpointer==3.0.0 # via # jsonpatch # jsonschema -jsonschema[format-nongpl]==4.22.0 +jsonschema[format-nongpl]==4.23.0 # via - # feast (setup.py) # altair # great-expectations # jupyter-events @@ -372,7 +330,7 @@ jsonschema[format-nongpl]==4.22.0 # nbformat jsonschema-specifications==2023.12.1 # via jsonschema -jupyter-client==8.6.2 +jupyter-client==8.6.3 # via # ipykernel # jupyter-server @@ -390,7 +348,7 @@ jupyter-events==0.10.0 # via jupyter-server jupyter-lsp==2.2.5 # via jupyterlab -jupyter-server==2.14.1 +jupyter-server==2.14.2 # via # jupyter-lsp # jupyterlab @@ -399,21 +357,22 @@ jupyter-server==2.14.1 # notebook-shim jupyter-server-terminals==0.5.3 # via jupyter-server -jupyterlab==4.2.3 +jupyterlab==4.2.5 # via notebook jupyterlab-pygments==0.3.0 # via nbconvert -jupyterlab-server==2.27.2 +jupyterlab-server==2.27.3 # via # jupyterlab # notebook -jupyterlab-widgets==3.0.11 +jupyterlab-widgets==3.0.13 # via ipywidgets +jwcrypto==1.5.6 + # via python-keycloak kubernetes==20.13.0 - # via feast (setup.py) locket==1.0.0 # via partd -makefun==1.15.2 +makefun==1.15.4 # via great-expectations markdown-it-py==3.0.0 # via rich @@ -422,7 +381,7 @@ markupsafe==2.1.5 # jinja2 # nbconvert # werkzeug -marshmallow==3.21.3 +marshmallow==3.22.0 # via great-expectations matplotlib-inline==0.1.7 # via @@ -431,35 +390,28 @@ matplotlib-inline==0.1.7 mdurl==0.1.2 # via markdown-it-py minio==7.1.0 - # via feast (setup.py) mistune==3.0.2 # via # great-expectations # nbconvert -mmh3==4.1.0 - # via feast (setup.py) +mmh3==5.0.0 mock==2.0.0 - # via feast (setup.py) moto==4.2.14 - # via feast (setup.py) -msal==1.29.0 +msal==1.31.0 # via # azure-identity # msal-extensions msal-extensions==1.2.0 # via azure-identity -multidict==6.0.5 +multidict==6.1.0 # via # aiohttp # yarl -mypy==1.10.1 - # via - # feast (setup.py) - # sqlalchemy +mypy==1.11.2 + # via sqlalchemy mypy-extensions==1.0.0 # via mypy mypy-protobuf==3.3.0 - # via feast (setup.py) nbclient==0.10.0 # via nbconvert nbconvert==7.16.4 @@ -474,7 +426,7 @@ nest-asyncio==1.6.0 # via ipykernel nodeenv==1.9.1 # via pre-commit -notebook==7.2.1 +notebook==7.2.2 # via great-expectations notebook-shim==0.2.4 # via @@ -482,7 +434,6 @@ notebook-shim==0.2.4 # notebook numpy==1.26.4 # via - # feast (setup.py) # altair # dask # db-dtypes @@ -493,8 +444,6 @@ numpy==1.26.4 # scipy oauthlib==3.2.2 # via requests-oauthlib -orjson==3.10.5 - # via fastapi overrides==7.7.0 # via jupyter-server packaging==24.1 @@ -502,9 +451,11 @@ packaging==24.1 # build # dask # db-dtypes + # deprecation # google-cloud-bigquery # great-expectations # gunicorn + # ibis-framework # ibis-substrait # ipykernel # jupyter-server @@ -517,7 +468,6 @@ packaging==24.1 # sphinx pandas==2.2.2 # via - # feast (setup.py) # altair # dask # dask-expr @@ -536,14 +486,13 @@ parsy==2.1 # via ibis-framework partd==1.4.2 # via dask -pbr==6.0.0 +pbr==6.1.0 # via mock pexpect==4.9.0 # via ipython -pip==24.1.1 +pip==24.2 # via pip-tools pip-tools==7.4.1 - # via feast (setup.py) platformdirs==3.11.0 # via # jupyter-core @@ -553,28 +502,22 @@ pluggy==1.5.0 # via pytest ply==3.11 # via thriftpy2 -portalocker==2.10.0 +portalocker==2.10.1 # via msal-extensions pre-commit==3.3.1 - # via feast (setup.py) prometheus-client==0.20.0 - # via - # feast (setup.py) - # jupyter-server + # via jupyter-server prompt-toolkit==3.0.47 # via ipython proto-plus==1.24.0 # via # google-api-core - # google-cloud-bigquery # google-cloud-bigquery-storage # google-cloud-bigtable # google-cloud-datastore -protobuf==4.25.3 +protobuf==4.25.5 # via - # feast (setup.py) # google-api-core - # google-cloud-bigquery # google-cloud-bigquery-storage # google-cloud-bigtable # google-cloud-datastore @@ -589,30 +532,25 @@ protobuf==4.25.3 # proto-plus # substrait psutil==5.9.0 - # via - # feast (setup.py) - # ipykernel -psycopg[binary, pool]==3.1.19 - # via feast (setup.py) -psycopg-binary==3.1.19 + # via ipykernel +psycopg[binary, pool]==3.2.2 +psycopg-binary==3.2.2 # via psycopg -psycopg-pool==3.2.2 +psycopg-pool==3.2.3 # via psycopg ptyprocess==0.7.0 # via # pexpect # terminado -pure-eval==0.2.2 +pure-eval==0.2.3 # via stack-data py==1.11.0 - # via feast (setup.py) py-cpuinfo==9.0.0 # via pytest-benchmark py4j==0.10.9.7 # via pyspark -pyarrow==15.0.2 +pyarrow==17.0.0 # via - # feast (setup.py) # dask-expr # db-dtypes # deltalake @@ -620,57 +558,47 @@ pyarrow==15.0.2 # ibis-framework # snowflake-connector-python pyarrow-hotfix==0.6 - # via - # deltalake - # ibis-framework -pyasn1==0.6.0 + # via ibis-framework +pyasn1==0.6.1 # via # pyasn1-modules # rsa -pyasn1-modules==0.4.0 +pyasn1-modules==0.4.1 # via google-auth pybindgen==0.22.1 - # via feast (setup.py) pycparser==2.22 # via cffi -pydantic==2.7.4 +pydantic==2.9.2 # via - # feast (setup.py) # fastapi # great-expectations -pydantic-core==2.18.4 +pydantic-core==2.23.4 # via pydantic pygments==2.18.0 # via - # feast (setup.py) # ipython # nbconvert # rich # sphinx -pyjwt[crypto]==2.8.0 +pyjwt[crypto]==2.9.0 # via # msal # singlestoredb # snowflake-connector-python -pymssql==2.3.0 - # via feast (setup.py) +pymssql==2.3.1 pymysql==1.1.1 - # via feast (setup.py) pyodbc==5.1.0 - # via feast (setup.py) -pyopenssl==24.1.0 +pyopenssl==24.2.1 # via snowflake-connector-python -pyparsing==3.1.2 +pyparsing==3.1.4 # via great-expectations pyproject-hooks==1.1.0 # via # build # pip-tools -pyspark==3.5.1 - # via feast (setup.py) +pyspark==3.5.2 pytest==7.4.4 # via - # feast (setup.py) # pytest-benchmark # pytest-cov # pytest-env @@ -680,21 +608,13 @@ pytest==7.4.4 # pytest-timeout # pytest-xdist pytest-benchmark==3.4.1 - # via feast (setup.py) pytest-cov==5.0.0 - # via feast (setup.py) pytest-env==1.1.3 - # via feast (setup.py) pytest-lazy-fixture==0.6.3 - # via feast (setup.py) pytest-mock==1.10.4 - # via feast (setup.py) pytest-ordering==0.6 - # via feast (setup.py) pytest-timeout==1.4.2 - # via feast (setup.py) pytest-xdist==3.6.1 - # via feast (setup.py) python-dateutil==2.9.0.post0 # via # arrow @@ -706,24 +626,21 @@ python-dateutil==2.9.0.post0 # kubernetes # moto # pandas - # rockset # trino python-dotenv==1.0.1 # via uvicorn python-json-logger==2.0.7 # via jupyter-events -python-multipart==0.0.9 - # via fastapi -pytz==2024.1 +python-keycloak==4.2.2 +pytz==2024.2 # via # great-expectations # ibis-framework # pandas # snowflake-connector-python # trino -pyyaml==6.0.1 +pyyaml==6.0.2 # via - # feast (setup.py) # dask # ibis-substrait # jupyter-events @@ -731,25 +648,21 @@ pyyaml==6.0.1 # pre-commit # responses # uvicorn -pyzmq==26.0.3 +pyzmq==26.2.0 # via # ipykernel # jupyter-client # jupyter-server redis==4.6.0 - # via feast (setup.py) referencing==0.35.1 # via # jsonschema # jsonschema-specifications # jupyter-events -regex==2024.5.15 - # via - # feast (setup.py) - # parsimonious +regex==2024.9.11 + # via parsimonious requests==2.32.3 # via - # feast (setup.py) # azure-core # docker # google-api-core @@ -760,7 +673,9 @@ requests==2.32.3 # kubernetes # moto # msal + # python-keycloak # requests-oauthlib + # requests-toolbelt # responses # singlestoredb # snowflake-connector-python @@ -768,6 +683,8 @@ requests==2.32.3 # trino requests-oauthlib==2.0.0 # via kubernetes +requests-toolbelt==1.0.0 + # via python-keycloak responses==0.25.3 # via moto rfc3339-validator==0.1.4 @@ -778,39 +695,33 @@ rfc3986-validator==0.1.1 # via # jsonschema # jupyter-events -rich==13.7.1 - # via - # ibis-framework - # typer -rockset==2.1.2 - # via feast (setup.py) -rpds-py==0.18.1 +rich==13.8.1 + # via ibis-framework +rpds-py==0.20.0 # via # jsonschema # referencing rsa==4.9 # via google-auth -ruamel-yaml==0.17.17 +ruamel-yaml==0.17.40 # via great-expectations -ruff==0.4.10 - # via feast (setup.py) +ruamel-yaml-clib==0.2.8 + # via ruamel-yaml +ruff==0.6.6 s3transfer==0.10.2 # via boto3 -scipy==1.14.0 +scipy==1.14.1 # via great-expectations send2trash==1.8.3 # via jupyter-server -setuptools==70.1.1 +setuptools==75.1.0 # via # grpcio-tools # jupyterlab # kubernetes # pip-tools # singlestoredb -shellingham==1.5.4 - # via typer -singlestoredb==1.4.0 - # via feast (setup.py) +singlestoredb==1.6.3 six==1.16.0 # via # asttokens @@ -830,56 +741,48 @@ sniffio==1.3.1 # httpx snowballstemmer==2.2.0 # via sphinx -snowflake-connector-python[pandas]==3.11.0 - # via feast (setup.py) +snowflake-connector-python[pandas]==3.12.2 sortedcontainers==2.4.0 # via snowflake-connector-python -soupsieve==2.5 +soupsieve==2.6 # via beautifulsoup4 sphinx==6.2.1 - # via feast (setup.py) -sphinxcontrib-applehelp==1.0.8 +sphinxcontrib-applehelp==2.0.0 # via sphinx -sphinxcontrib-devhelp==1.0.6 +sphinxcontrib-devhelp==2.0.0 # via sphinx -sphinxcontrib-htmlhelp==2.0.5 +sphinxcontrib-htmlhelp==2.1.0 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.7 +sphinxcontrib-qthelp==2.0.0 # via sphinx -sphinxcontrib-serializinghtml==1.1.10 +sphinxcontrib-serializinghtml==2.0.0 # via sphinx -sqlalchemy[mypy]==2.0.31 - # via feast (setup.py) -sqlglot==25.1.0 +sqlalchemy[mypy]==2.0.35 +sqlglot==25.20.1 # via ibis-framework sqlite-vec==0.1.1 - # via feast (setup.py) -sqlparams==6.0.1 +sqlparams==6.1.0 # via singlestoredb stack-data==0.6.3 # via ipython -starlette==0.37.2 +starlette==0.38.5 # via fastapi -substrait==0.19.0 +substrait==0.23.0 # via ibis-substrait tabulate==0.9.0 - # via feast (setup.py) -tenacity==8.4.2 - # via feast (setup.py) +tenacity==8.5.0 terminado==0.18.1 # via # jupyter-server # jupyter-server-terminals testcontainers==4.4.0 - # via feast (setup.py) -thriftpy2==0.5.1 +thriftpy2==0.5.2 # via happybase tinycss2==1.3.0 # via nbconvert toml==0.10.2 - # via feast (setup.py) tomli==2.0.1 # via # build @@ -890,7 +793,7 @@ tomli==2.0.1 # pytest # pytest-env # singlestoredb -tomlkit==0.12.5 +tomlkit==0.13.2 # via snowflake-connector-python toolz==0.12.1 # via @@ -906,10 +809,8 @@ tornado==6.4.1 # jupyterlab # notebook # terminado -tqdm==4.66.4 - # via - # feast (setup.py) - # great-expectations +tqdm==4.66.5 + # via great-expectations traitlets==5.14.3 # via # comm @@ -925,40 +826,24 @@ traitlets==5.14.3 # nbclient # nbconvert # nbformat -trino==0.328.0 - # via feast (setup.py) +trino==0.329.0 typeguard==4.3.0 - # via feast (setup.py) -typer==0.12.3 - # via fastapi-cli types-cffi==1.16.0.20240331 # via types-pyopenssl types-protobuf==3.19.22 - # via - # feast (setup.py) - # mypy-protobuf + # via mypy-protobuf types-pymysql==1.1.0.20240524 - # via feast (setup.py) -types-pyopenssl==24.1.0.20240425 +types-pyopenssl==24.1.0.20240722 # via types-redis -types-python-dateutil==2.9.0.20240316 - # via - # feast (setup.py) - # arrow -types-pytz==2024.1.0.20240417 - # via feast (setup.py) -types-pyyaml==6.0.12.20240311 - # via feast (setup.py) -types-redis==4.6.0.20240425 - # via feast (setup.py) +types-python-dateutil==2.9.0.20240906 + # via arrow +types-pytz==2024.2.0.20240913 +types-pyyaml==6.0.12.20240917 +types-redis==4.6.0.20240903 types-requests==2.30.0.0 - # via feast (setup.py) -types-setuptools==70.1.0.20240627 - # via - # feast (setup.py) - # types-cffi +types-setuptools==75.1.0.20240917 + # via types-cffi types-tabulate==0.9.0.20240106 - # via feast (setup.py) types-urllib3==1.26.25.14 # via types-requests typing-extensions==4.12.2 @@ -972,6 +857,8 @@ typing-extensions==4.12.2 # great-expectations # ibis-framework # ipython + # jwcrypto + # multidict # mypy # psycopg # psycopg-pool @@ -981,7 +868,6 @@ typing-extensions==4.12.2 # sqlalchemy # testcontainers # typeguard - # typer # uvicorn tzdata==2024.1 # via pandas @@ -989,13 +875,10 @@ tzlocal==5.2 # via # great-expectations # trino -ujson==5.10.0 - # via fastapi uri-template==1.3.0 # via jsonschema -urllib3==1.26.19 +urllib3==2.2.3 # via - # feast (setup.py) # botocore # docker # elastic-transport @@ -1004,23 +887,17 @@ urllib3==1.26.19 # minio # requests # responses - # rockset # testcontainers -uvicorn[standard]==0.30.1 - # via - # feast (setup.py) - # fastapi -uvloop==0.19.0 +uvicorn[standard]==0.30.6 +uvloop==0.20.0 # via uvicorn virtualenv==20.23.0 - # via - # feast (setup.py) - # pre-commit -watchfiles==0.22.0 + # via pre-commit +watchfiles==0.24.0 # via uvicorn wcwidth==0.2.13 # via prompt-toolkit -webcolors==24.6.0 +webcolors==24.8.0 # via jsonschema webencodings==0.5.1 # via @@ -1030,15 +907,15 @@ websocket-client==1.8.0 # via # jupyter-server # kubernetes -websockets==12.0 +websockets==13.0.1 # via uvicorn -werkzeug==3.0.3 +werkzeug==3.0.4 # via moto -wheel==0.43.0 +wheel==0.44.0 # via # pip-tools # singlestoredb -widgetsnbextension==4.0.11 +widgetsnbextension==4.0.13 # via ipywidgets wrapt==1.16.0 # via @@ -1046,7 +923,7 @@ wrapt==1.16.0 # testcontainers xmltodict==0.13.0 # via moto -yarl==1.9.4 +yarl==1.11.1 # via aiohttp -zipp==3.19.1 +zipp==3.20.2 # via importlib-metadata diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index 250e617b85..9e5eb0be72 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -1,198 +1,127 @@ # This file was autogenerated by uv via the following command: -# uv pip compile --system --no-strip-extras setup.py --output-file sdk/python/requirements/py3.10-requirements.txt -annotated-types==0.6.0 +# uv pip compile -p 3.10 --system --no-strip-extras setup.py --output-file sdk/python/requirements/py3.10-requirements.txt +annotated-types==0.7.0 # via pydantic -anyio==4.3.0 +anyio==4.5.0 # via - # httpx # starlette # watchfiles -attrs==23.2.0 +attrs==24.2.0 # via # jsonschema # referencing -certifi==2024.7.4 - # via - # httpcore - # httpx - # requests +bigtree==0.21.1 +certifi==2024.8.30 + # via requests charset-normalizer==3.3.2 # via requests click==8.1.7 # via - # feast (setup.py) # dask - # typer # uvicorn cloudpickle==3.0.0 # via dask colorama==0.4.6 - # via feast (setup.py) -dask[dataframe]==2024.5.0 - # via - # feast (setup.py) - # dask-expr -dask-expr==1.1.0 +dask[dataframe]==2024.9.0 + # via dask-expr +dask-expr==1.1.14 # via dask dill==0.3.8 - # via feast (setup.py) -dnspython==2.6.1 - # via email-validator -email-validator==2.1.1 - # via fastapi -exceptiongroup==1.2.1 +exceptiongroup==1.2.2 # via anyio -fastapi==0.111.0 - # via - # feast (setup.py) - # fastapi-cli -fastapi-cli==0.0.2 - # via fastapi -fsspec==2024.3.1 +fastapi==0.115.0 +fsspec==2024.9.0 # via dask -gunicorn==22.0.0 - # via feast (setup.py) +greenlet==3.1.0 + # via sqlalchemy +gunicorn==23.0.0 h11==0.14.0 - # via - # httpcore - # uvicorn -httpcore==1.0.5 - # via httpx + # via uvicorn httptools==0.6.1 # via uvicorn -httpx==0.27.0 - # via fastapi -idna==3.7 +idna==3.10 # via # anyio - # email-validator - # httpx # requests -importlib-metadata==7.1.0 +importlib-metadata==8.5.0 # via dask jinja2==3.1.4 - # via - # feast (setup.py) - # fastapi -jsonschema==4.22.0 - # via feast (setup.py) +jsonschema==4.23.0 jsonschema-specifications==2023.12.1 # via jsonschema locket==1.0.0 # via partd -markdown-it-py==3.0.0 - # via rich markupsafe==2.1.5 # via jinja2 -mdurl==0.1.2 - # via markdown-it-py -mmh3==4.1.0 - # via feast (setup.py) -mypy==1.10.0 +mmh3==5.0.0 +mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy -mypy-protobuf==3.6.0 - # via feast (setup.py) numpy==1.26.4 # via - # feast (setup.py) # dask # pandas # pyarrow -orjson==3.10.3 - # via fastapi -packaging==24.0 +packaging==24.1 # via # dask # gunicorn pandas==2.2.2 # via - # feast (setup.py) # dask # dask-expr partd==1.4.2 # via dask prometheus-client==0.20.0 - # via feast (setup.py) -protobuf==4.25.3 - # via - # feast (setup.py) - # mypy-protobuf +protobuf==4.25.5 psutil==6.0.0 - # via feast (setup.py) -pyarrow==16.0.0 - # via - # feast (setup.py) - # dask-expr -pydantic==2.7.1 - # via - # feast (setup.py) - # fastapi -pydantic-core==2.18.2 +pyarrow==17.0.0 + # via dask-expr +pydantic==2.9.2 + # via fastapi +pydantic-core==2.23.4 # via pydantic pygments==2.18.0 - # via - # feast (setup.py) - # rich +pyjwt==2.9.0 python-dateutil==2.9.0.post0 # via pandas python-dotenv==1.0.1 # via uvicorn -python-multipart==0.0.9 - # via fastapi -pytz==2024.1 +pytz==2024.2 # via pandas -pyyaml==6.0.1 +pyyaml==6.0.2 # via - # feast (setup.py) # dask # uvicorn referencing==0.35.1 # via # jsonschema # jsonschema-specifications -requests==2.31.0 - # via feast (setup.py) -rich==13.7.1 - # via typer -rpds-py==0.18.1 +requests==2.32.3 +rpds-py==0.20.0 # via # jsonschema # referencing -shellingham==1.5.4 - # via typer six==1.16.0 # via python-dateutil sniffio==1.3.1 - # via - # anyio - # httpx -sqlalchemy[mypy]==2.0.30 - # via feast (setup.py) -starlette==0.37.2 + # via anyio +sqlalchemy[mypy]==2.0.35 +starlette==0.38.5 # via fastapi tabulate==0.9.0 - # via feast (setup.py) -tenacity==8.3.0 - # via feast (setup.py) +tenacity==8.5.0 toml==0.10.2 - # via feast (setup.py) tomli==2.0.1 # via mypy toolz==0.12.1 # via # dask # partd -tqdm==4.66.4 - # via feast (setup.py) -typeguard==4.2.1 - # via feast (setup.py) -typer==0.12.3 - # via fastapi-cli -types-protobuf==5.26.0.20240422 - # via mypy-protobuf -typing-extensions==4.11.0 +tqdm==4.66.5 +typeguard==4.3.0 +typing-extensions==4.12.2 # via # anyio # fastapi @@ -201,24 +130,17 @@ typing-extensions==4.11.0 # pydantic-core # sqlalchemy # typeguard - # typer # uvicorn tzdata==2024.1 # via pandas -ujson==5.9.0 - # via fastapi -urllib3==2.2.1 +urllib3==2.2.3 # via requests -uvicorn[standard]==0.29.0 - # via - # feast (setup.py) - # fastapi - # fastapi-cli -uvloop==0.19.0 +uvicorn[standard]==0.30.6 +uvloop==0.20.0 # via uvicorn -watchfiles==0.21.0 +watchfiles==0.24.0 # via uvicorn -websockets==12.0 +websockets==13.0.1 # via uvicorn -zipp==3.19.1 +zipp==3.20.2 # via importlib-metadata diff --git a/sdk/python/requirements/py3.11-ci-requirements.txt b/sdk/python/requirements/py3.11-ci-requirements.txt index f16b486aa5..946d4e0519 100644 --- a/sdk/python/requirements/py3.11-ci-requirements.txt +++ b/sdk/python/requirements/py3.11-ci-requirements.txt @@ -1,10 +1,11 @@ # This file was autogenerated by uv via the following command: -# uv pip compile --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.11-ci-requirements.txt -aiobotocore==2.13.1 - # via feast (setup.py) -aiohttp==3.9.5 +# uv pip compile -p 3.11 --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.11-ci-requirements.txt +aiobotocore==2.15.1 +aiohappyeyeballs==2.4.0 + # via aiohttp +aiohttp==3.10.5 # via aiobotocore -aioitertools==0.11.0 +aioitertools==0.12.0 # via aiobotocore aiosignal==1.3.1 # via aiohttp @@ -14,14 +15,12 @@ altair==4.2.2 # via great-expectations annotated-types==0.7.0 # via pydantic -anyio==4.4.0 +anyio==4.5.0 # via # httpx # jupyter-server # starlette # watchfiles -appnope==0.1.4 - # via ipykernel argon2-cffi==23.1.0 # via jupyter-server argon2-cffi-bindings==21.2.0 @@ -31,56 +30,52 @@ arrow==1.3.0 asn1crypto==1.5.1 # via snowflake-connector-python assertpy==1.1 - # via feast (setup.py) asttokens==2.4.1 # via stack-data async-lru==2.0.4 # via jupyterlab -atpublic==4.1.0 +async-property==0.2.2 + # via python-keycloak +async-timeout==4.0.3 + # via redis +atpublic==5.0 # via ibis-framework -attrs==23.2.0 +attrs==24.2.0 # via # aiohttp # jsonschema # referencing -azure-core==1.30.2 +azure-core==1.31.0 # via # azure-identity # azure-storage-blob -azure-identity==1.17.1 - # via feast (setup.py) -azure-storage-blob==12.20.0 - # via feast (setup.py) -babel==2.15.0 +azure-identity==1.18.0 +azure-storage-blob==12.23.0 +babel==2.16.0 # via # jupyterlab-server # sphinx beautifulsoup4==4.12.3 # via nbconvert -bidict==0.23.1 - # via ibis-framework +bigtree==0.21.1 bleach==6.1.0 # via nbconvert -boto3==1.34.131 - # via - # feast (setup.py) - # moto -botocore==1.34.131 +boto3==1.35.23 + # via moto +botocore==1.35.23 # via # aiobotocore # boto3 # moto # s3transfer -build==1.2.1 +build==1.2.2 # via - # feast (setup.py) # pip-tools # singlestoredb -cachetools==5.3.3 +cachetools==5.5.0 # via google-auth -cassandra-driver==3.29.1 - # via feast (setup.py) -certifi==2024.7.4 +cassandra-driver==3.29.2 +certifi==2024.8.30 # via # elastic-transport # httpcore @@ -89,7 +84,7 @@ certifi==2024.7.4 # minio # requests # snowflake-connector-python -cffi==1.16.0 +cffi==1.17.1 # via # argon2-cffi-bindings # cryptography @@ -102,31 +97,27 @@ charset-normalizer==3.3.2 # snowflake-connector-python click==8.1.7 # via - # feast (setup.py) # dask # geomet # great-expectations # pip-tools - # typer # uvicorn cloudpickle==3.0.0 # via dask colorama==0.4.6 - # via - # feast (setup.py) - # great-expectations + # via great-expectations comm==0.2.2 # via # ipykernel # ipywidgets -coverage[toml]==7.5.4 +coverage[toml]==7.6.1 # via pytest-cov -cryptography==42.0.8 +cryptography==43.0.1 # via - # feast (setup.py) # azure-identity # azure-storage-blob # great-expectations + # jwcrypto # moto # msal # pyjwt @@ -134,53 +125,45 @@ cryptography==42.0.8 # snowflake-connector-python # types-pyopenssl # types-redis -dask[dataframe]==2024.6.2 - # via - # feast (setup.py) - # dask-expr -dask-expr==1.1.6 +cython==3.0.11 + # via thriftpy2 +dask[dataframe]==2024.9.0 + # via dask-expr +dask-expr==1.1.14 # via dask -db-dtypes==1.2.0 +db-dtypes==1.3.0 # via google-cloud-bigquery -debugpy==1.8.2 +debugpy==1.8.5 # via ipykernel decorator==5.1.1 # via ipython defusedxml==0.7.1 # via nbconvert -deltalake==0.18.1 - # via feast (setup.py) +deltalake==0.20.0 +deprecation==2.1.0 + # via python-keycloak dill==0.3.8 - # via feast (setup.py) distlib==0.3.8 # via virtualenv -dnspython==2.6.1 - # via email-validator docker==7.1.0 # via testcontainers docutils==0.19 # via sphinx -duckdb==0.10.3 +duckdb==1.1.0 # via ibis-framework -elastic-transport==8.13.1 +elastic-transport==8.15.0 # via elasticsearch -elasticsearch==8.14.0 - # via feast (setup.py) -email-validator==2.2.0 - # via fastapi +elasticsearch==8.15.1 entrypoints==0.4 # via altair execnet==2.1.1 # via pytest-xdist -executing==2.0.1 +executing==2.1.0 # via stack-data -fastapi==0.111.0 - # via feast (setup.py) -fastapi-cli==0.0.4 - # via fastapi +fastapi==0.115.0 fastjsonschema==2.20.0 # via nbformat -filelock==3.15.4 +filelock==3.16.1 # via # snowflake-connector-python # virtualenv @@ -190,69 +173,60 @@ frozenlist==1.4.1 # via # aiohttp # aiosignal -fsspec==2023.12.2 - # via - # feast (setup.py) - # dask -geojson==2.5.0 - # via rockset +fsspec==2024.9.0 + # via dask geomet==0.2.1.post1 # via cassandra-driver -google-api-core[grpc]==2.19.1 +google-api-core[grpc]==2.20.0 # via - # feast (setup.py) # google-cloud-bigquery # google-cloud-bigquery-storage # google-cloud-bigtable # google-cloud-core # google-cloud-datastore # google-cloud-storage -google-auth==2.30.0 +google-auth==2.35.0 # via # google-api-core + # google-cloud-bigquery # google-cloud-bigquery-storage + # google-cloud-bigtable # google-cloud-core + # google-cloud-datastore # google-cloud-storage # kubernetes -google-cloud-bigquery[pandas]==3.13.0 - # via feast (setup.py) -google-cloud-bigquery-storage==2.25.0 - # via feast (setup.py) -google-cloud-bigtable==2.24.0 - # via feast (setup.py) +google-cloud-bigquery[pandas]==3.25.0 +google-cloud-bigquery-storage==2.26.0 +google-cloud-bigtable==2.26.0 google-cloud-core==2.4.1 # via # google-cloud-bigquery # google-cloud-bigtable # google-cloud-datastore # google-cloud-storage -google-cloud-datastore==2.19.0 - # via feast (setup.py) -google-cloud-storage==2.17.0 - # via feast (setup.py) -google-crc32c==1.5.0 +google-cloud-datastore==2.20.1 +google-cloud-storage==2.18.2 +google-crc32c==1.6.0 # via # google-cloud-storage # google-resumable-media -google-resumable-media==2.7.1 +google-resumable-media==2.7.2 # via # google-cloud-bigquery # google-cloud-storage -googleapis-common-protos[grpc]==1.63.2 +googleapis-common-protos[grpc]==1.65.0 # via - # feast (setup.py) # google-api-core # grpc-google-iam-v1 # grpcio-status -great-expectations==0.18.16 - # via feast (setup.py) +great-expectations==0.18.21 +greenlet==3.1.0 + # via sqlalchemy grpc-google-iam-v1==0.13.1 # via google-cloud-bigtable -grpcio==1.64.1 +grpcio==1.66.1 # via - # feast (setup.py) # google-api-core - # google-cloud-bigquery # googleapis-common-protos # grpc-google-iam-v1 # grpcio-health-checking @@ -260,49 +234,36 @@ grpcio==1.64.1 # grpcio-status # grpcio-testing # grpcio-tools -grpcio-health-checking==1.62.2 - # via feast (setup.py) -grpcio-reflection==1.62.2 - # via feast (setup.py) -grpcio-status==1.62.2 +grpcio-health-checking==1.62.3 +grpcio-reflection==1.62.3 +grpcio-status==1.62.3 # via google-api-core -grpcio-testing==1.62.2 - # via feast (setup.py) -grpcio-tools==1.62.2 - # via feast (setup.py) -gunicorn==22.0.0 - # via feast (setup.py) +grpcio-testing==1.62.3 +grpcio-tools==1.62.3 +gunicorn==23.0.0 h11==0.14.0 # via # httpcore # uvicorn happybase==1.2.0 - # via feast (setup.py) -hazelcast-python-client==5.4.0 - # via feast (setup.py) -hiredis==2.3.2 - # via feast (setup.py) +hazelcast-python-client==5.5.0 +hiredis==2.4.0 httpcore==1.0.5 # via httpx httptools==0.6.1 # via uvicorn -httpx==0.27.0 +httpx==0.27.2 # via - # feast (setup.py) - # fastapi # jupyterlab -ibis-framework[duckdb]==9.1.0 - # via - # feast (setup.py) - # ibis-substrait -ibis-substrait==4.0.0 - # via feast (setup.py) -identify==2.5.36 + # python-keycloak +ibis-framework[duckdb]==9.5.0 + # via ibis-substrait +ibis-substrait==4.0.1 +identify==2.6.1 # via pre-commit -idna==3.7 +idna==3.10 # via # anyio - # email-validator # httpx # jsonschema # requests @@ -310,18 +271,18 @@ idna==3.7 # yarl imagesize==1.4.1 # via sphinx -importlib-metadata==8.0.0 +importlib-metadata==8.5.0 # via dask iniconfig==2.0.0 # via pytest -ipykernel==6.29.4 +ipykernel==6.29.5 # via jupyterlab -ipython==8.25.0 +ipython==8.27.0 # via # great-expectations # ipykernel # ipywidgets -ipywidgets==8.1.3 +ipywidgets==8.1.5 # via great-expectations isodate==0.6.1 # via azure-storage-blob @@ -331,9 +292,7 @@ jedi==0.19.1 # via ipython jinja2==3.1.4 # via - # feast (setup.py) # altair - # fastapi # great-expectations # jupyter-server # jupyterlab @@ -353,9 +312,8 @@ jsonpointer==3.0.0 # via # jsonpatch # jsonschema -jsonschema[format-nongpl]==4.22.0 +jsonschema[format-nongpl]==4.23.0 # via - # feast (setup.py) # altair # great-expectations # jupyter-events @@ -363,7 +321,7 @@ jsonschema[format-nongpl]==4.22.0 # nbformat jsonschema-specifications==2023.12.1 # via jsonschema -jupyter-client==8.6.2 +jupyter-client==8.6.3 # via # ipykernel # jupyter-server @@ -381,7 +339,7 @@ jupyter-events==0.10.0 # via jupyter-server jupyter-lsp==2.2.5 # via jupyterlab -jupyter-server==2.14.1 +jupyter-server==2.14.2 # via # jupyter-lsp # jupyterlab @@ -390,21 +348,22 @@ jupyter-server==2.14.1 # notebook-shim jupyter-server-terminals==0.5.3 # via jupyter-server -jupyterlab==4.2.3 +jupyterlab==4.2.5 # via notebook jupyterlab-pygments==0.3.0 # via nbconvert -jupyterlab-server==2.27.2 +jupyterlab-server==2.27.3 # via # jupyterlab # notebook -jupyterlab-widgets==3.0.11 +jupyterlab-widgets==3.0.13 # via ipywidgets +jwcrypto==1.5.6 + # via python-keycloak kubernetes==20.13.0 - # via feast (setup.py) locket==1.0.0 # via partd -makefun==1.15.2 +makefun==1.15.4 # via great-expectations markdown-it-py==3.0.0 # via rich @@ -413,7 +372,7 @@ markupsafe==2.1.5 # jinja2 # nbconvert # werkzeug -marshmallow==3.21.3 +marshmallow==3.22.0 # via great-expectations matplotlib-inline==0.1.7 # via @@ -422,35 +381,28 @@ matplotlib-inline==0.1.7 mdurl==0.1.2 # via markdown-it-py minio==7.1.0 - # via feast (setup.py) mistune==3.0.2 # via # great-expectations # nbconvert -mmh3==4.1.0 - # via feast (setup.py) +mmh3==5.0.0 mock==2.0.0 - # via feast (setup.py) moto==4.2.14 - # via feast (setup.py) -msal==1.29.0 +msal==1.31.0 # via # azure-identity # msal-extensions msal-extensions==1.2.0 # via azure-identity -multidict==6.0.5 +multidict==6.1.0 # via # aiohttp # yarl -mypy==1.10.1 - # via - # feast (setup.py) - # sqlalchemy +mypy==1.11.2 + # via sqlalchemy mypy-extensions==1.0.0 # via mypy mypy-protobuf==3.3.0 - # via feast (setup.py) nbclient==0.10.0 # via nbconvert nbconvert==7.16.4 @@ -465,7 +417,7 @@ nest-asyncio==1.6.0 # via ipykernel nodeenv==1.9.1 # via pre-commit -notebook==7.2.1 +notebook==7.2.2 # via great-expectations notebook-shim==0.2.4 # via @@ -473,7 +425,6 @@ notebook-shim==0.2.4 # notebook numpy==1.26.4 # via - # feast (setup.py) # altair # dask # db-dtypes @@ -484,8 +435,6 @@ numpy==1.26.4 # scipy oauthlib==3.2.2 # via requests-oauthlib -orjson==3.10.5 - # via fastapi overrides==7.7.0 # via jupyter-server packaging==24.1 @@ -493,9 +442,11 @@ packaging==24.1 # build # dask # db-dtypes + # deprecation # google-cloud-bigquery # great-expectations # gunicorn + # ibis-framework # ibis-substrait # ipykernel # jupyter-server @@ -508,7 +459,6 @@ packaging==24.1 # sphinx pandas==2.2.2 # via - # feast (setup.py) # altair # dask # dask-expr @@ -527,14 +477,13 @@ parsy==2.1 # via ibis-framework partd==1.4.2 # via dask -pbr==6.0.0 +pbr==6.1.0 # via mock pexpect==4.9.0 # via ipython -pip==24.1.1 +pip==24.2 # via pip-tools pip-tools==7.4.1 - # via feast (setup.py) platformdirs==3.11.0 # via # jupyter-core @@ -544,28 +493,22 @@ pluggy==1.5.0 # via pytest ply==3.11 # via thriftpy2 -portalocker==2.10.0 +portalocker==2.10.1 # via msal-extensions pre-commit==3.3.1 - # via feast (setup.py) prometheus-client==0.20.0 - # via - # feast (setup.py) - # jupyter-server + # via jupyter-server prompt-toolkit==3.0.47 # via ipython proto-plus==1.24.0 # via # google-api-core - # google-cloud-bigquery # google-cloud-bigquery-storage # google-cloud-bigtable # google-cloud-datastore -protobuf==4.25.3 +protobuf==4.25.5 # via - # feast (setup.py) # google-api-core - # google-cloud-bigquery # google-cloud-bigquery-storage # google-cloud-bigtable # google-cloud-datastore @@ -580,30 +523,25 @@ protobuf==4.25.3 # proto-plus # substrait psutil==5.9.0 - # via - # feast (setup.py) - # ipykernel -psycopg[binary, pool]==3.1.19 - # via feast (setup.py) -psycopg-binary==3.1.19 + # via ipykernel +psycopg[binary, pool]==3.2.2 +psycopg-binary==3.2.2 # via psycopg -psycopg-pool==3.2.2 +psycopg-pool==3.2.3 # via psycopg ptyprocess==0.7.0 # via # pexpect # terminado -pure-eval==0.2.2 +pure-eval==0.2.3 # via stack-data py==1.11.0 - # via feast (setup.py) py-cpuinfo==9.0.0 # via pytest-benchmark py4j==0.10.9.7 # via pyspark -pyarrow==15.0.2 +pyarrow==17.0.0 # via - # feast (setup.py) # dask-expr # db-dtypes # deltalake @@ -611,57 +549,47 @@ pyarrow==15.0.2 # ibis-framework # snowflake-connector-python pyarrow-hotfix==0.6 - # via - # deltalake - # ibis-framework -pyasn1==0.6.0 + # via ibis-framework +pyasn1==0.6.1 # via # pyasn1-modules # rsa -pyasn1-modules==0.4.0 +pyasn1-modules==0.4.1 # via google-auth pybindgen==0.22.1 - # via feast (setup.py) pycparser==2.22 # via cffi -pydantic==2.7.4 +pydantic==2.9.2 # via - # feast (setup.py) # fastapi # great-expectations -pydantic-core==2.18.4 +pydantic-core==2.23.4 # via pydantic pygments==2.18.0 # via - # feast (setup.py) # ipython # nbconvert # rich # sphinx -pyjwt[crypto]==2.8.0 +pyjwt[crypto]==2.9.0 # via # msal # singlestoredb # snowflake-connector-python -pymssql==2.3.0 - # via feast (setup.py) +pymssql==2.3.1 pymysql==1.1.1 - # via feast (setup.py) pyodbc==5.1.0 - # via feast (setup.py) -pyopenssl==24.1.0 +pyopenssl==24.2.1 # via snowflake-connector-python -pyparsing==3.1.2 +pyparsing==3.1.4 # via great-expectations pyproject-hooks==1.1.0 # via # build # pip-tools -pyspark==3.5.1 - # via feast (setup.py) +pyspark==3.5.2 pytest==7.4.4 # via - # feast (setup.py) # pytest-benchmark # pytest-cov # pytest-env @@ -671,21 +599,13 @@ pytest==7.4.4 # pytest-timeout # pytest-xdist pytest-benchmark==3.4.1 - # via feast (setup.py) pytest-cov==5.0.0 - # via feast (setup.py) pytest-env==1.1.3 - # via feast (setup.py) pytest-lazy-fixture==0.6.3 - # via feast (setup.py) pytest-mock==1.10.4 - # via feast (setup.py) pytest-ordering==0.6 - # via feast (setup.py) pytest-timeout==1.4.2 - # via feast (setup.py) pytest-xdist==3.6.1 - # via feast (setup.py) python-dateutil==2.9.0.post0 # via # arrow @@ -697,24 +617,21 @@ python-dateutil==2.9.0.post0 # kubernetes # moto # pandas - # rockset # trino python-dotenv==1.0.1 # via uvicorn python-json-logger==2.0.7 # via jupyter-events -python-multipart==0.0.9 - # via fastapi -pytz==2024.1 +python-keycloak==4.2.2 +pytz==2024.2 # via # great-expectations # ibis-framework # pandas # snowflake-connector-python # trino -pyyaml==6.0.1 +pyyaml==6.0.2 # via - # feast (setup.py) # dask # ibis-substrait # jupyter-events @@ -722,25 +639,21 @@ pyyaml==6.0.1 # pre-commit # responses # uvicorn -pyzmq==26.0.3 +pyzmq==26.2.0 # via # ipykernel # jupyter-client # jupyter-server redis==4.6.0 - # via feast (setup.py) referencing==0.35.1 # via # jsonschema # jsonschema-specifications # jupyter-events -regex==2024.5.15 - # via - # feast (setup.py) - # parsimonious +regex==2024.9.11 + # via parsimonious requests==2.32.3 # via - # feast (setup.py) # azure-core # docker # google-api-core @@ -751,7 +664,9 @@ requests==2.32.3 # kubernetes # moto # msal + # python-keycloak # requests-oauthlib + # requests-toolbelt # responses # singlestoredb # snowflake-connector-python @@ -759,6 +674,8 @@ requests==2.32.3 # trino requests-oauthlib==2.0.0 # via kubernetes +requests-toolbelt==1.0.0 + # via python-keycloak responses==0.25.3 # via moto rfc3339-validator==0.1.4 @@ -769,39 +686,33 @@ rfc3986-validator==0.1.1 # via # jsonschema # jupyter-events -rich==13.7.1 - # via - # ibis-framework - # typer -rockset==2.1.2 - # via feast (setup.py) -rpds-py==0.18.1 +rich==13.8.1 + # via ibis-framework +rpds-py==0.20.0 # via # jsonschema # referencing rsa==4.9 # via google-auth -ruamel-yaml==0.17.17 +ruamel-yaml==0.17.40 # via great-expectations -ruff==0.4.10 - # via feast (setup.py) +ruamel-yaml-clib==0.2.8 + # via ruamel-yaml +ruff==0.6.6 s3transfer==0.10.2 # via boto3 -scipy==1.14.0 +scipy==1.14.1 # via great-expectations send2trash==1.8.3 # via jupyter-server -setuptools==70.1.1 +setuptools==75.1.0 # via # grpcio-tools # jupyterlab # kubernetes # pip-tools # singlestoredb -shellingham==1.5.4 - # via typer -singlestoredb==1.4.0 - # via feast (setup.py) +singlestoredb==1.6.3 six==1.16.0 # via # asttokens @@ -821,57 +732,49 @@ sniffio==1.3.1 # httpx snowballstemmer==2.2.0 # via sphinx -snowflake-connector-python[pandas]==3.11.0 - # via feast (setup.py) +snowflake-connector-python[pandas]==3.12.2 sortedcontainers==2.4.0 # via snowflake-connector-python -soupsieve==2.5 +soupsieve==2.6 # via beautifulsoup4 sphinx==6.2.1 - # via feast (setup.py) -sphinxcontrib-applehelp==1.0.8 +sphinxcontrib-applehelp==2.0.0 # via sphinx -sphinxcontrib-devhelp==1.0.6 +sphinxcontrib-devhelp==2.0.0 # via sphinx -sphinxcontrib-htmlhelp==2.0.5 +sphinxcontrib-htmlhelp==2.1.0 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.7 +sphinxcontrib-qthelp==2.0.0 # via sphinx -sphinxcontrib-serializinghtml==1.1.10 +sphinxcontrib-serializinghtml==2.0.0 # via sphinx -sqlalchemy[mypy]==2.0.31 - # via feast (setup.py) -sqlglot==25.1.0 +sqlalchemy[mypy]==2.0.35 +sqlglot==25.20.1 # via ibis-framework sqlite-vec==0.1.1 - # via feast (setup.py) -sqlparams==6.0.1 +sqlparams==6.1.0 # via singlestoredb stack-data==0.6.3 # via ipython -starlette==0.37.2 +starlette==0.38.5 # via fastapi -substrait==0.19.0 +substrait==0.23.0 # via ibis-substrait tabulate==0.9.0 - # via feast (setup.py) -tenacity==8.4.2 - # via feast (setup.py) +tenacity==8.5.0 terminado==0.18.1 # via # jupyter-server # jupyter-server-terminals testcontainers==4.4.0 - # via feast (setup.py) -thriftpy2==0.5.1 +thriftpy2==0.5.2 # via happybase tinycss2==1.3.0 # via nbconvert toml==0.10.2 - # via feast (setup.py) -tomlkit==0.12.5 +tomlkit==0.13.2 # via snowflake-connector-python toolz==0.12.1 # via @@ -887,10 +790,8 @@ tornado==6.4.1 # jupyterlab # notebook # terminado -tqdm==4.66.4 - # via - # feast (setup.py) - # great-expectations +tqdm==4.66.5 + # via great-expectations traitlets==5.14.3 # via # comm @@ -906,40 +807,24 @@ traitlets==5.14.3 # nbclient # nbconvert # nbformat -trino==0.328.0 - # via feast (setup.py) +trino==0.329.0 typeguard==4.3.0 - # via feast (setup.py) -typer==0.12.3 - # via fastapi-cli types-cffi==1.16.0.20240331 # via types-pyopenssl types-protobuf==3.19.22 - # via - # feast (setup.py) - # mypy-protobuf + # via mypy-protobuf types-pymysql==1.1.0.20240524 - # via feast (setup.py) -types-pyopenssl==24.1.0.20240425 +types-pyopenssl==24.1.0.20240722 # via types-redis -types-python-dateutil==2.9.0.20240316 - # via - # feast (setup.py) - # arrow -types-pytz==2024.1.0.20240417 - # via feast (setup.py) -types-pyyaml==6.0.12.20240311 - # via feast (setup.py) -types-redis==4.6.0.20240425 - # via feast (setup.py) +types-python-dateutil==2.9.0.20240906 + # via arrow +types-pytz==2024.2.0.20240913 +types-pyyaml==6.0.12.20240917 +types-redis==4.6.0.20240903 types-requests==2.30.0.0 - # via feast (setup.py) -types-setuptools==70.1.0.20240627 - # via - # feast (setup.py) - # types-cffi +types-setuptools==75.1.0.20240917 + # via types-cffi types-tabulate==0.9.0.20240106 - # via feast (setup.py) types-urllib3==1.26.25.14 # via types-requests typing-extensions==4.12.2 @@ -951,6 +836,7 @@ typing-extensions==4.12.2 # great-expectations # ibis-framework # ipython + # jwcrypto # mypy # psycopg # psycopg-pool @@ -960,20 +846,16 @@ typing-extensions==4.12.2 # sqlalchemy # testcontainers # typeguard - # typer tzdata==2024.1 # via pandas tzlocal==5.2 # via # great-expectations # trino -ujson==5.10.0 - # via fastapi uri-template==1.3.0 # via jsonschema -urllib3==1.26.19 +urllib3==2.2.3 # via - # feast (setup.py) # botocore # docker # elastic-transport @@ -982,23 +864,17 @@ urllib3==1.26.19 # minio # requests # responses - # rockset # testcontainers -uvicorn[standard]==0.30.1 - # via - # feast (setup.py) - # fastapi -uvloop==0.19.0 +uvicorn[standard]==0.30.6 +uvloop==0.20.0 # via uvicorn virtualenv==20.23.0 - # via - # feast (setup.py) - # pre-commit -watchfiles==0.22.0 + # via pre-commit +watchfiles==0.24.0 # via uvicorn wcwidth==0.2.13 # via prompt-toolkit -webcolors==24.6.0 +webcolors==24.8.0 # via jsonschema webencodings==0.5.1 # via @@ -1008,15 +884,15 @@ websocket-client==1.8.0 # via # jupyter-server # kubernetes -websockets==12.0 +websockets==13.0.1 # via uvicorn -werkzeug==3.0.3 +werkzeug==3.0.4 # via moto -wheel==0.43.0 +wheel==0.44.0 # via # pip-tools # singlestoredb -widgetsnbextension==4.0.11 +widgetsnbextension==4.0.13 # via ipywidgets wrapt==1.16.0 # via @@ -1024,7 +900,7 @@ wrapt==1.16.0 # testcontainers xmltodict==0.13.0 # via moto -yarl==1.9.4 +yarl==1.11.1 # via aiohttp -zipp==3.19.1 +zipp==3.20.2 # via importlib-metadata diff --git a/sdk/python/requirements/py3.11-requirements.txt b/sdk/python/requirements/py3.11-requirements.txt index 4f1655de09..1ce25e7d5b 100644 --- a/sdk/python/requirements/py3.11-requirements.txt +++ b/sdk/python/requirements/py3.11-requirements.txt @@ -1,194 +1,123 @@ # This file was autogenerated by uv via the following command: -# uv pip compile --system --no-strip-extras setup.py --output-file sdk/python/requirements/py3.11-requirements.txt -annotated-types==0.6.0 +# uv pip compile -p 3.11 --system --no-strip-extras setup.py --output-file sdk/python/requirements/py3.11-requirements.txt +annotated-types==0.7.0 # via pydantic -anyio==4.3.0 +anyio==4.5.0 # via - # httpx # starlette # watchfiles -attrs==23.2.0 +attrs==24.2.0 # via # jsonschema # referencing -certifi==2024.7.4 - # via - # httpcore - # httpx - # requests +bigtree==0.21.1 +certifi==2024.8.30 + # via requests charset-normalizer==3.3.2 # via requests click==8.1.7 # via - # feast (setup.py) # dask - # typer # uvicorn cloudpickle==3.0.0 # via dask colorama==0.4.6 - # via feast (setup.py) -dask[dataframe]==2024.5.0 - # via - # feast (setup.py) - # dask-expr -dask-expr==1.1.0 +dask[dataframe]==2024.9.0 + # via dask-expr +dask-expr==1.1.14 # via dask dill==0.3.8 - # via feast (setup.py) -dnspython==2.6.1 - # via email-validator -email-validator==2.1.1 - # via fastapi -fastapi==0.111.0 - # via - # feast (setup.py) - # fastapi-cli -fastapi-cli==0.0.2 - # via fastapi -fsspec==2024.3.1 +fastapi==0.115.0 +fsspec==2024.9.0 # via dask -gunicorn==22.0.0 - # via feast (setup.py) +greenlet==3.1.0 + # via sqlalchemy +gunicorn==23.0.0 h11==0.14.0 - # via - # httpcore - # uvicorn -httpcore==1.0.5 - # via httpx + # via uvicorn httptools==0.6.1 # via uvicorn -httpx==0.27.0 - # via fastapi -idna==3.7 +idna==3.10 # via # anyio - # email-validator - # httpx # requests -importlib-metadata==7.1.0 +importlib-metadata==8.5.0 # via dask jinja2==3.1.4 - # via - # feast (setup.py) - # fastapi -jsonschema==4.22.0 - # via feast (setup.py) +jsonschema==4.23.0 jsonschema-specifications==2023.12.1 # via jsonschema locket==1.0.0 # via partd -markdown-it-py==3.0.0 - # via rich markupsafe==2.1.5 # via jinja2 -mdurl==0.1.2 - # via markdown-it-py -mmh3==4.1.0 - # via feast (setup.py) -mypy==1.10.0 +mmh3==5.0.0 +mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy -mypy-protobuf==3.6.0 - # via feast (setup.py) numpy==1.26.4 # via - # feast (setup.py) # dask # pandas # pyarrow -orjson==3.10.3 - # via fastapi -packaging==24.0 +packaging==24.1 # via # dask # gunicorn pandas==2.2.2 # via - # feast (setup.py) # dask # dask-expr partd==1.4.2 # via dask prometheus-client==0.20.0 - # via feast (setup.py) -protobuf==4.25.3 - # via - # feast (setup.py) - # mypy-protobuf +protobuf==4.25.5 psutil==6.0.0 - # via feast (setup.py) -pyarrow==16.0.0 - # via - # feast (setup.py) - # dask-expr -pydantic==2.7.1 - # via - # feast (setup.py) - # fastapi -pydantic-core==2.18.2 +pyarrow==17.0.0 + # via dask-expr +pydantic==2.9.2 + # via fastapi +pydantic-core==2.23.4 # via pydantic pygments==2.18.0 - # via - # feast (setup.py) - # rich +pyjwt==2.9.0 python-dateutil==2.9.0.post0 # via pandas python-dotenv==1.0.1 # via uvicorn -python-multipart==0.0.9 - # via fastapi -pytz==2024.1 +pytz==2024.2 # via pandas -pyyaml==6.0.1 +pyyaml==6.0.2 # via - # feast (setup.py) # dask # uvicorn referencing==0.35.1 # via # jsonschema # jsonschema-specifications -requests==2.31.0 - # via feast (setup.py) -rich==13.7.1 - # via typer -rpds-py==0.18.1 +requests==2.32.3 +rpds-py==0.20.0 # via # jsonschema # referencing -shellingham==1.5.4 - # via typer six==1.16.0 # via python-dateutil sniffio==1.3.1 - # via - # anyio - # httpx -sqlalchemy[mypy]==2.0.30 - # via feast (setup.py) -starlette==0.37.2 + # via anyio +sqlalchemy[mypy]==2.0.35 +starlette==0.38.5 # via fastapi tabulate==0.9.0 - # via feast (setup.py) -tenacity==8.3.0 - # via feast (setup.py) +tenacity==8.5.0 toml==0.10.2 - # via feast (setup.py) toolz==0.12.1 # via # dask # partd -tqdm==4.66.4 - # via feast (setup.py) -typeguard==4.2.1 - # via feast (setup.py) -typer==0.12.3 - # via fastapi-cli -types-protobuf==5.26.0.20240422 - # via mypy-protobuf -typing-extensions==4.11.0 +tqdm==4.66.5 +typeguard==4.3.0 +typing-extensions==4.12.2 # via # fastapi # mypy @@ -196,23 +125,16 @@ typing-extensions==4.11.0 # pydantic-core # sqlalchemy # typeguard - # typer tzdata==2024.1 # via pandas -ujson==5.9.0 - # via fastapi -urllib3==2.2.1 +urllib3==2.2.3 # via requests -uvicorn[standard]==0.29.0 - # via - # feast (setup.py) - # fastapi - # fastapi-cli -uvloop==0.19.0 +uvicorn[standard]==0.30.6 +uvloop==0.20.0 # via uvicorn -watchfiles==0.21.0 +watchfiles==0.24.0 # via uvicorn -websockets==12.0 +websockets==13.0.1 # via uvicorn -zipp==3.19.1 +zipp==3.20.2 # via importlib-metadata diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index 94bfa82058..5ea2c58819 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -1,10 +1,11 @@ # This file was autogenerated by uv via the following command: -# uv pip compile --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.9-ci-requirements.txt -aiobotocore==2.13.1 - # via feast (setup.py) -aiohttp==3.9.5 +# uv pip compile -p 3.9 --system --no-strip-extras setup.py --extra ci --output-file sdk/python/requirements/py3.9-ci-requirements.txt +aiobotocore==2.15.1 +aiohappyeyeballs==2.4.0 + # via aiohttp +aiohttp==3.10.5 # via aiobotocore -aioitertools==0.11.0 +aioitertools==0.12.0 # via aiobotocore aiosignal==1.3.1 # via aiohttp @@ -14,14 +15,12 @@ altair==4.2.2 # via great-expectations annotated-types==0.7.0 # via pydantic -anyio==4.4.0 +anyio==4.5.0 # via # httpx # jupyter-server # starlette # watchfiles -appnope==0.1.4 - # via ipykernel argon2-cffi==23.1.0 # via jupyter-server argon2-cffi-bindings==21.2.0 @@ -31,31 +30,30 @@ arrow==1.3.0 asn1crypto==1.5.1 # via snowflake-connector-python assertpy==1.1 - # via feast (setup.py) asttokens==2.4.1 # via stack-data async-lru==2.0.4 # via jupyterlab +async-property==0.2.2 + # via python-keycloak async-timeout==4.0.3 # via # aiohttp # redis atpublic==4.1.0 # via ibis-framework -attrs==23.2.0 +attrs==24.2.0 # via # aiohttp # jsonschema # referencing -azure-core==1.30.2 +azure-core==1.31.0 # via # azure-identity # azure-storage-blob -azure-identity==1.17.1 - # via feast (setup.py) -azure-storage-blob==12.20.0 - # via feast (setup.py) -babel==2.15.0 +azure-identity==1.18.0 +azure-storage-blob==12.23.0 +babel==2.16.0 # via # jupyterlab-server # sphinx @@ -63,28 +61,25 @@ beautifulsoup4==4.12.3 # via nbconvert bidict==0.23.1 # via ibis-framework +bigtree==0.21.1 bleach==6.1.0 # via nbconvert -boto3==1.34.131 - # via - # feast (setup.py) - # moto -botocore==1.34.131 +boto3==1.35.23 + # via moto +botocore==1.35.23 # via # aiobotocore # boto3 # moto # s3transfer -build==1.2.1 +build==1.2.2 # via - # feast (setup.py) # pip-tools # singlestoredb -cachetools==5.3.3 +cachetools==5.5.0 # via google-auth -cassandra-driver==3.29.1 - # via feast (setup.py) -certifi==2024.7.4 +cassandra-driver==3.29.2 +certifi==2024.8.30 # via # elastic-transport # httpcore @@ -93,7 +88,7 @@ certifi==2024.7.4 # minio # requests # snowflake-connector-python -cffi==1.16.0 +cffi==1.17.1 # via # argon2-cffi-bindings # cryptography @@ -106,31 +101,27 @@ charset-normalizer==3.3.2 # snowflake-connector-python click==8.1.7 # via - # feast (setup.py) # dask # geomet # great-expectations # pip-tools - # typer # uvicorn cloudpickle==3.0.0 # via dask colorama==0.4.6 - # via - # feast (setup.py) - # great-expectations + # via great-expectations comm==0.2.2 # via # ipykernel # ipywidgets -coverage[toml]==7.5.4 +coverage[toml]==7.6.1 # via pytest-cov -cryptography==42.0.8 +cryptography==43.0.1 # via - # feast (setup.py) # azure-identity # azure-storage-blob # great-expectations + # jwcrypto # moto # msal # pyjwt @@ -138,58 +129,50 @@ cryptography==42.0.8 # snowflake-connector-python # types-pyopenssl # types-redis -dask[dataframe]==2024.6.2 - # via - # feast (setup.py) - # dask-expr -dask-expr==1.1.6 +cython==3.0.11 + # via thriftpy2 +dask[dataframe]==2024.8.0 + # via dask-expr +dask-expr==1.1.10 # via dask -db-dtypes==1.2.0 +db-dtypes==1.3.0 # via google-cloud-bigquery -debugpy==1.8.2 +debugpy==1.8.5 # via ipykernel decorator==5.1.1 # via ipython defusedxml==0.7.1 # via nbconvert -deltalake==0.18.1 - # via feast (setup.py) +deltalake==0.20.0 +deprecation==2.1.0 + # via python-keycloak dill==0.3.8 - # via feast (setup.py) distlib==0.3.8 # via virtualenv -dnspython==2.6.1 - # via email-validator docker==7.1.0 # via testcontainers docutils==0.19 # via sphinx duckdb==0.10.3 # via ibis-framework -elastic-transport==8.13.1 +elastic-transport==8.15.0 # via elasticsearch -elasticsearch==8.14.0 - # via feast (setup.py) -email-validator==2.2.0 - # via fastapi +elasticsearch==8.15.1 entrypoints==0.4 # via altair -exceptiongroup==1.2.1 +exceptiongroup==1.2.2 # via # anyio # ipython # pytest execnet==2.1.1 # via pytest-xdist -executing==2.0.1 +executing==2.1.0 # via stack-data -fastapi==0.111.0 - # via feast (setup.py) -fastapi-cli==0.0.4 - # via fastapi +fastapi==0.115.0 fastjsonschema==2.20.0 # via nbformat -filelock==3.15.4 +filelock==3.16.1 # via # snowflake-connector-python # virtualenv @@ -199,69 +182,60 @@ frozenlist==1.4.1 # via # aiohttp # aiosignal -fsspec==2023.12.2 - # via - # feast (setup.py) - # dask -geojson==2.5.0 - # via rockset +fsspec==2024.9.0 + # via dask geomet==0.2.1.post1 # via cassandra-driver -google-api-core[grpc]==2.19.1 +google-api-core[grpc]==2.20.0 # via - # feast (setup.py) # google-cloud-bigquery # google-cloud-bigquery-storage # google-cloud-bigtable # google-cloud-core # google-cloud-datastore # google-cloud-storage -google-auth==2.30.0 +google-auth==2.35.0 # via # google-api-core + # google-cloud-bigquery # google-cloud-bigquery-storage + # google-cloud-bigtable # google-cloud-core + # google-cloud-datastore # google-cloud-storage # kubernetes -google-cloud-bigquery[pandas]==3.13.0 - # via feast (setup.py) -google-cloud-bigquery-storage==2.25.0 - # via feast (setup.py) -google-cloud-bigtable==2.24.0 - # via feast (setup.py) +google-cloud-bigquery[pandas]==3.25.0 +google-cloud-bigquery-storage==2.26.0 +google-cloud-bigtable==2.26.0 google-cloud-core==2.4.1 # via # google-cloud-bigquery # google-cloud-bigtable # google-cloud-datastore # google-cloud-storage -google-cloud-datastore==2.19.0 - # via feast (setup.py) -google-cloud-storage==2.17.0 - # via feast (setup.py) -google-crc32c==1.5.0 +google-cloud-datastore==2.20.1 +google-cloud-storage==2.18.2 +google-crc32c==1.6.0 # via # google-cloud-storage # google-resumable-media -google-resumable-media==2.7.1 +google-resumable-media==2.7.2 # via # google-cloud-bigquery # google-cloud-storage -googleapis-common-protos[grpc]==1.63.2 +googleapis-common-protos[grpc]==1.65.0 # via - # feast (setup.py) # google-api-core # grpc-google-iam-v1 # grpcio-status -great-expectations==0.18.16 - # via feast (setup.py) +great-expectations==0.18.21 +greenlet==3.1.0 + # via sqlalchemy grpc-google-iam-v1==0.13.1 # via google-cloud-bigtable -grpcio==1.64.1 +grpcio==1.66.1 # via - # feast (setup.py) # google-api-core - # google-cloud-bigquery # googleapis-common-protos # grpc-google-iam-v1 # grpcio-health-checking @@ -269,49 +243,36 @@ grpcio==1.64.1 # grpcio-status # grpcio-testing # grpcio-tools -grpcio-health-checking==1.62.2 - # via feast (setup.py) -grpcio-reflection==1.62.2 - # via feast (setup.py) -grpcio-status==1.62.2 +grpcio-health-checking==1.62.3 +grpcio-reflection==1.62.3 +grpcio-status==1.62.3 # via google-api-core -grpcio-testing==1.62.2 - # via feast (setup.py) -grpcio-tools==1.62.2 - # via feast (setup.py) -gunicorn==22.0.0 - # via feast (setup.py) +grpcio-testing==1.62.3 +grpcio-tools==1.62.3 +gunicorn==23.0.0 h11==0.14.0 # via # httpcore # uvicorn happybase==1.2.0 - # via feast (setup.py) -hazelcast-python-client==5.4.0 - # via feast (setup.py) -hiredis==2.3.2 - # via feast (setup.py) +hazelcast-python-client==5.5.0 +hiredis==2.4.0 httpcore==1.0.5 # via httpx httptools==0.6.1 # via uvicorn -httpx==0.27.0 +httpx==0.27.2 # via - # feast (setup.py) - # fastapi # jupyterlab + # python-keycloak ibis-framework[duckdb]==9.0.0 - # via - # feast (setup.py) - # ibis-substrait -ibis-substrait==4.0.0 - # via feast (setup.py) -identify==2.5.36 + # via ibis-substrait +ibis-substrait==4.0.1 +identify==2.6.1 # via pre-commit -idna==3.7 +idna==3.10 # via # anyio - # email-validator # httpx # jsonschema # requests @@ -319,7 +280,7 @@ idna==3.7 # yarl imagesize==1.4.1 # via sphinx -importlib-metadata==8.0.0 +importlib-metadata==8.5.0 # via # build # dask @@ -332,14 +293,14 @@ importlib-metadata==8.0.0 # typeguard iniconfig==2.0.0 # via pytest -ipykernel==6.29.4 +ipykernel==6.29.5 # via jupyterlab ipython==8.18.1 # via # great-expectations # ipykernel # ipywidgets -ipywidgets==8.1.3 +ipywidgets==8.1.5 # via great-expectations isodate==0.6.1 # via azure-storage-blob @@ -349,9 +310,7 @@ jedi==0.19.1 # via ipython jinja2==3.1.4 # via - # feast (setup.py) # altair - # fastapi # great-expectations # jupyter-server # jupyterlab @@ -371,9 +330,8 @@ jsonpointer==3.0.0 # via # jsonpatch # jsonschema -jsonschema[format-nongpl]==4.22.0 +jsonschema[format-nongpl]==4.23.0 # via - # feast (setup.py) # altair # great-expectations # jupyter-events @@ -381,7 +339,7 @@ jsonschema[format-nongpl]==4.22.0 # nbformat jsonschema-specifications==2023.12.1 # via jsonschema -jupyter-client==8.6.2 +jupyter-client==8.6.3 # via # ipykernel # jupyter-server @@ -399,7 +357,7 @@ jupyter-events==0.10.0 # via jupyter-server jupyter-lsp==2.2.5 # via jupyterlab -jupyter-server==2.14.1 +jupyter-server==2.14.2 # via # jupyter-lsp # jupyterlab @@ -408,21 +366,22 @@ jupyter-server==2.14.1 # notebook-shim jupyter-server-terminals==0.5.3 # via jupyter-server -jupyterlab==4.2.3 +jupyterlab==4.2.5 # via notebook jupyterlab-pygments==0.3.0 # via nbconvert -jupyterlab-server==2.27.2 +jupyterlab-server==2.27.3 # via # jupyterlab # notebook -jupyterlab-widgets==3.0.11 +jupyterlab-widgets==3.0.13 # via ipywidgets +jwcrypto==1.5.6 + # via python-keycloak kubernetes==20.13.0 - # via feast (setup.py) locket==1.0.0 # via partd -makefun==1.15.2 +makefun==1.15.4 # via great-expectations markdown-it-py==3.0.0 # via rich @@ -431,7 +390,7 @@ markupsafe==2.1.5 # jinja2 # nbconvert # werkzeug -marshmallow==3.21.3 +marshmallow==3.22.0 # via great-expectations matplotlib-inline==0.1.7 # via @@ -440,35 +399,28 @@ matplotlib-inline==0.1.7 mdurl==0.1.2 # via markdown-it-py minio==7.1.0 - # via feast (setup.py) mistune==3.0.2 # via # great-expectations # nbconvert -mmh3==4.1.0 - # via feast (setup.py) +mmh3==5.0.0 mock==2.0.0 - # via feast (setup.py) moto==4.2.14 - # via feast (setup.py) -msal==1.29.0 +msal==1.31.0 # via # azure-identity # msal-extensions msal-extensions==1.2.0 # via azure-identity -multidict==6.0.5 +multidict==6.1.0 # via # aiohttp # yarl -mypy==1.10.1 - # via - # feast (setup.py) - # sqlalchemy +mypy==1.11.2 + # via sqlalchemy mypy-extensions==1.0.0 # via mypy mypy-protobuf==3.3.0 - # via feast (setup.py) nbclient==0.10.0 # via nbconvert nbconvert==7.16.4 @@ -483,7 +435,7 @@ nest-asyncio==1.6.0 # via ipykernel nodeenv==1.9.1 # via pre-commit -notebook==7.2.1 +notebook==7.2.2 # via great-expectations notebook-shim==0.2.4 # via @@ -491,7 +443,6 @@ notebook-shim==0.2.4 # notebook numpy==1.26.4 # via - # feast (setup.py) # altair # dask # db-dtypes @@ -502,8 +453,6 @@ numpy==1.26.4 # scipy oauthlib==3.2.2 # via requests-oauthlib -orjson==3.10.5 - # via fastapi overrides==7.7.0 # via jupyter-server packaging==24.1 @@ -511,6 +460,7 @@ packaging==24.1 # build # dask # db-dtypes + # deprecation # google-cloud-bigquery # great-expectations # gunicorn @@ -526,7 +476,6 @@ packaging==24.1 # sphinx pandas==2.2.2 # via - # feast (setup.py) # altair # dask # dask-expr @@ -545,14 +494,13 @@ parsy==2.1 # via ibis-framework partd==1.4.2 # via dask -pbr==6.0.0 +pbr==6.1.0 # via mock pexpect==4.9.0 # via ipython -pip==24.1.1 +pip==24.2 # via pip-tools pip-tools==7.4.1 - # via feast (setup.py) platformdirs==3.11.0 # via # jupyter-core @@ -562,28 +510,22 @@ pluggy==1.5.0 # via pytest ply==3.11 # via thriftpy2 -portalocker==2.10.0 +portalocker==2.10.1 # via msal-extensions pre-commit==3.3.1 - # via feast (setup.py) prometheus-client==0.20.0 - # via - # feast (setup.py) - # jupyter-server + # via jupyter-server prompt-toolkit==3.0.47 # via ipython proto-plus==1.24.0 # via # google-api-core - # google-cloud-bigquery # google-cloud-bigquery-storage # google-cloud-bigtable # google-cloud-datastore -protobuf==4.25.3 +protobuf==4.25.5 # via - # feast (setup.py) # google-api-core - # google-cloud-bigquery # google-cloud-bigquery-storage # google-cloud-bigtable # google-cloud-datastore @@ -598,30 +540,25 @@ protobuf==4.25.3 # proto-plus # substrait psutil==5.9.0 - # via - # feast (setup.py) - # ipykernel -psycopg[binary, pool]==3.1.18 - # via feast (setup.py) -psycopg-binary==3.1.18 + # via ipykernel +psycopg[binary, pool]==3.2.2 +psycopg-binary==3.2.2 # via psycopg -psycopg-pool==3.2.2 +psycopg-pool==3.2.3 # via psycopg ptyprocess==0.7.0 # via # pexpect # terminado -pure-eval==0.2.2 +pure-eval==0.2.3 # via stack-data py==1.11.0 - # via feast (setup.py) py-cpuinfo==9.0.0 # via pytest-benchmark py4j==0.10.9.7 # via pyspark -pyarrow==15.0.2 +pyarrow==16.1.0 # via - # feast (setup.py) # dask-expr # db-dtypes # deltalake @@ -629,57 +566,47 @@ pyarrow==15.0.2 # ibis-framework # snowflake-connector-python pyarrow-hotfix==0.6 - # via - # deltalake - # ibis-framework -pyasn1==0.6.0 + # via ibis-framework +pyasn1==0.6.1 # via # pyasn1-modules # rsa -pyasn1-modules==0.4.0 +pyasn1-modules==0.4.1 # via google-auth pybindgen==0.22.1 - # via feast (setup.py) pycparser==2.22 # via cffi -pydantic==2.7.4 +pydantic==2.9.2 # via - # feast (setup.py) # fastapi # great-expectations -pydantic-core==2.18.4 +pydantic-core==2.23.4 # via pydantic pygments==2.18.0 # via - # feast (setup.py) # ipython # nbconvert # rich # sphinx -pyjwt[crypto]==2.8.0 +pyjwt[crypto]==2.9.0 # via # msal # singlestoredb # snowflake-connector-python -pymssql==2.3.0 - # via feast (setup.py) +pymssql==2.3.1 pymysql==1.1.1 - # via feast (setup.py) pyodbc==5.1.0 - # via feast (setup.py) -pyopenssl==24.1.0 +pyopenssl==24.2.1 # via snowflake-connector-python -pyparsing==3.1.2 +pyparsing==3.1.4 # via great-expectations pyproject-hooks==1.1.0 # via # build # pip-tools -pyspark==3.5.1 - # via feast (setup.py) +pyspark==3.5.2 pytest==7.4.4 # via - # feast (setup.py) # pytest-benchmark # pytest-cov # pytest-env @@ -689,21 +616,13 @@ pytest==7.4.4 # pytest-timeout # pytest-xdist pytest-benchmark==3.4.1 - # via feast (setup.py) pytest-cov==5.0.0 - # via feast (setup.py) pytest-env==1.1.3 - # via feast (setup.py) pytest-lazy-fixture==0.6.3 - # via feast (setup.py) pytest-mock==1.10.4 - # via feast (setup.py) pytest-ordering==0.6 - # via feast (setup.py) pytest-timeout==1.4.2 - # via feast (setup.py) pytest-xdist==3.6.1 - # via feast (setup.py) python-dateutil==2.9.0.post0 # via # arrow @@ -715,24 +634,21 @@ python-dateutil==2.9.0.post0 # kubernetes # moto # pandas - # rockset # trino python-dotenv==1.0.1 # via uvicorn python-json-logger==2.0.7 # via jupyter-events -python-multipart==0.0.9 - # via fastapi -pytz==2024.1 +python-keycloak==4.2.2 +pytz==2024.2 # via # great-expectations # ibis-framework # pandas # snowflake-connector-python # trino -pyyaml==6.0.1 +pyyaml==6.0.2 # via - # feast (setup.py) # dask # ibis-substrait # jupyter-events @@ -740,25 +656,21 @@ pyyaml==6.0.1 # pre-commit # responses # uvicorn -pyzmq==26.0.3 +pyzmq==26.2.0 # via # ipykernel # jupyter-client # jupyter-server redis==4.6.0 - # via feast (setup.py) referencing==0.35.1 # via # jsonschema # jsonschema-specifications # jupyter-events -regex==2024.5.15 - # via - # feast (setup.py) - # parsimonious +regex==2024.9.11 + # via parsimonious requests==2.32.3 # via - # feast (setup.py) # azure-core # docker # google-api-core @@ -769,7 +681,9 @@ requests==2.32.3 # kubernetes # moto # msal + # python-keycloak # requests-oauthlib + # requests-toolbelt # responses # singlestoredb # snowflake-connector-python @@ -777,6 +691,8 @@ requests==2.32.3 # trino requests-oauthlib==2.0.0 # via kubernetes +requests-toolbelt==1.0.0 + # via python-keycloak responses==0.25.3 # via moto rfc3339-validator==0.1.4 @@ -787,41 +703,33 @@ rfc3986-validator==0.1.1 # via # jsonschema # jupyter-events -rich==13.7.1 - # via - # ibis-framework - # typer -rockset==2.1.2 - # via feast (setup.py) -rpds-py==0.18.1 +rich==13.8.1 + # via ibis-framework +rpds-py==0.20.0 # via # jsonschema # referencing rsa==4.9 # via google-auth -ruamel-yaml==0.17.17 +ruamel-yaml==0.17.40 # via great-expectations ruamel-yaml-clib==0.2.8 # via ruamel-yaml -ruff==0.4.10 - # via feast (setup.py) +ruff==0.6.6 s3transfer==0.10.2 # via boto3 scipy==1.13.1 # via great-expectations send2trash==1.8.3 # via jupyter-server -setuptools==70.1.1 +setuptools==75.1.0 # via # grpcio-tools # jupyterlab # kubernetes # pip-tools # singlestoredb -shellingham==1.5.4 - # via typer -singlestoredb==1.4.0 - # via feast (setup.py) +singlestoredb==1.6.3 six==1.16.0 # via # asttokens @@ -841,56 +749,48 @@ sniffio==1.3.1 # httpx snowballstemmer==2.2.0 # via sphinx -snowflake-connector-python[pandas]==3.11.0 - # via feast (setup.py) +snowflake-connector-python[pandas]==3.12.2 sortedcontainers==2.4.0 # via snowflake-connector-python -soupsieve==2.5 +soupsieve==2.6 # via beautifulsoup4 sphinx==6.2.1 - # via feast (setup.py) -sphinxcontrib-applehelp==1.0.8 +sphinxcontrib-applehelp==2.0.0 # via sphinx -sphinxcontrib-devhelp==1.0.6 +sphinxcontrib-devhelp==2.0.0 # via sphinx -sphinxcontrib-htmlhelp==2.0.5 +sphinxcontrib-htmlhelp==2.1.0 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.7 +sphinxcontrib-qthelp==2.0.0 # via sphinx -sphinxcontrib-serializinghtml==1.1.10 +sphinxcontrib-serializinghtml==2.0.0 # via sphinx -sqlalchemy[mypy]==2.0.31 - # via feast (setup.py) +sqlalchemy[mypy]==2.0.35 sqlglot==23.12.2 # via ibis-framework sqlite-vec==0.1.1 - # via feast (setup.py) -sqlparams==6.0.1 +sqlparams==6.1.0 # via singlestoredb stack-data==0.6.3 # via ipython -starlette==0.37.2 +starlette==0.38.5 # via fastapi -substrait==0.19.0 +substrait==0.23.0 # via ibis-substrait tabulate==0.9.0 - # via feast (setup.py) -tenacity==8.4.2 - # via feast (setup.py) +tenacity==8.5.0 terminado==0.18.1 # via # jupyter-server # jupyter-server-terminals testcontainers==4.4.0 - # via feast (setup.py) -thriftpy2==0.5.1 +thriftpy2==0.5.2 # via happybase tinycss2==1.3.0 # via nbconvert toml==0.10.2 - # via feast (setup.py) tomli==2.0.1 # via # build @@ -901,7 +801,7 @@ tomli==2.0.1 # pytest # pytest-env # singlestoredb -tomlkit==0.12.5 +tomlkit==0.13.2 # via snowflake-connector-python toolz==0.12.1 # via @@ -917,10 +817,8 @@ tornado==6.4.1 # jupyterlab # notebook # terminado -tqdm==4.66.4 - # via - # feast (setup.py) - # great-expectations +tqdm==4.66.5 + # via great-expectations traitlets==5.14.3 # via # comm @@ -936,40 +834,24 @@ traitlets==5.14.3 # nbclient # nbconvert # nbformat -trino==0.328.0 - # via feast (setup.py) +trino==0.329.0 typeguard==4.3.0 - # via feast (setup.py) -typer==0.12.3 - # via fastapi-cli types-cffi==1.16.0.20240331 # via types-pyopenssl types-protobuf==3.19.22 - # via - # feast (setup.py) - # mypy-protobuf + # via mypy-protobuf types-pymysql==1.1.0.20240524 - # via feast (setup.py) -types-pyopenssl==24.1.0.20240425 +types-pyopenssl==24.1.0.20240722 # via types-redis -types-python-dateutil==2.9.0.20240316 - # via - # feast (setup.py) - # arrow -types-pytz==2024.1.0.20240417 - # via feast (setup.py) -types-pyyaml==6.0.12.20240311 - # via feast (setup.py) -types-redis==4.6.0.20240425 - # via feast (setup.py) +types-python-dateutil==2.9.0.20240906 + # via arrow +types-pytz==2024.2.0.20240913 +types-pyyaml==6.0.12.20240917 +types-redis==4.6.0.20240903 types-requests==2.30.0.0 - # via feast (setup.py) -types-setuptools==70.1.0.20240627 - # via - # feast (setup.py) - # types-cffi +types-setuptools==75.1.0.20240917 + # via types-cffi types-tabulate==0.9.0.20240106 - # via feast (setup.py) types-urllib3==1.26.25.14 # via types-requests typing-extensions==4.12.2 @@ -984,6 +866,8 @@ typing-extensions==4.12.2 # great-expectations # ibis-framework # ipython + # jwcrypto + # multidict # mypy # psycopg # psycopg-pool @@ -994,7 +878,6 @@ typing-extensions==4.12.2 # starlette # testcontainers # typeguard - # typer # uvicorn tzdata==2024.1 # via pandas @@ -1002,13 +885,10 @@ tzlocal==5.2 # via # great-expectations # trino -ujson==5.10.0 - # via fastapi uri-template==1.3.0 # via jsonschema -urllib3==1.26.19 +urllib3==1.26.20 # via - # feast (setup.py) # botocore # docker # elastic-transport @@ -1017,24 +897,18 @@ urllib3==1.26.19 # minio # requests # responses - # rockset # snowflake-connector-python # testcontainers -uvicorn[standard]==0.30.1 - # via - # feast (setup.py) - # fastapi -uvloop==0.19.0 +uvicorn[standard]==0.30.6 +uvloop==0.20.0 # via uvicorn virtualenv==20.23.0 - # via - # feast (setup.py) - # pre-commit -watchfiles==0.22.0 + # via pre-commit +watchfiles==0.24.0 # via uvicorn wcwidth==0.2.13 # via prompt-toolkit -webcolors==24.6.0 +webcolors==24.8.0 # via jsonschema webencodings==0.5.1 # via @@ -1044,15 +918,15 @@ websocket-client==1.8.0 # via # jupyter-server # kubernetes -websockets==12.0 +websockets==13.0.1 # via uvicorn -werkzeug==3.0.3 +werkzeug==3.0.4 # via moto -wheel==0.43.0 +wheel==0.44.0 # via # pip-tools # singlestoredb -widgetsnbextension==4.0.11 +widgetsnbextension==4.0.13 # via ipywidgets wrapt==1.16.0 # via @@ -1060,7 +934,7 @@ wrapt==1.16.0 # testcontainers xmltodict==0.13.0 # via moto -yarl==1.9.4 +yarl==1.11.1 # via aiohttp -zipp==3.19.1 +zipp==3.20.2 # via importlib-metadata diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index f9fa856a0e..857d7d72bf 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -1,200 +1,129 @@ # This file was autogenerated by uv via the following command: -# uv pip compile --system --no-strip-extras setup.py --output-file sdk/python/requirements/py3.9-requirements.txt -annotated-types==0.6.0 +# uv pip compile -p 3.9 --system --no-strip-extras setup.py --output-file sdk/python/requirements/py3.9-requirements.txt +annotated-types==0.7.0 # via pydantic -anyio==4.3.0 +anyio==4.5.0 # via - # httpx # starlette # watchfiles -attrs==23.2.0 +attrs==24.2.0 # via # jsonschema # referencing -certifi==2024.7.4 - # via - # httpcore - # httpx - # requests +bigtree==0.21.1 +certifi==2024.8.30 + # via requests charset-normalizer==3.3.2 # via requests click==8.1.7 # via - # feast (setup.py) # dask - # typer # uvicorn cloudpickle==3.0.0 # via dask colorama==0.4.6 - # via feast (setup.py) -dask[dataframe]==2024.5.0 - # via - # feast (setup.py) - # dask-expr -dask-expr==1.1.0 +dask[dataframe]==2024.8.0 + # via dask-expr +dask-expr==1.1.10 # via dask dill==0.3.8 - # via feast (setup.py) -dnspython==2.6.1 - # via email-validator -email-validator==2.1.1 - # via fastapi exceptiongroup==1.2.2 # via anyio -fastapi==0.111.0 - # via - # feast (setup.py) - # fastapi-cli -fastapi-cli==0.0.2 - # via fastapi -fsspec==2024.3.1 +fastapi==0.115.0 +fsspec==2024.9.0 # via dask -gunicorn==22.0.0 - # via feast (setup.py) +greenlet==3.1.0 + # via sqlalchemy +gunicorn==23.0.0 h11==0.14.0 - # via - # httpcore - # uvicorn -httpcore==1.0.5 - # via httpx + # via uvicorn httptools==0.6.1 # via uvicorn -httpx==0.27.0 - # via fastapi -idna==3.7 +idna==3.10 # via # anyio - # email-validator - # httpx # requests -importlib-metadata==8.2.0 +importlib-metadata==8.5.0 # via # dask # typeguard jinja2==3.1.4 - # via - # feast (setup.py) - # fastapi -jsonschema==4.22.0 - # via feast (setup.py) +jsonschema==4.23.0 jsonschema-specifications==2023.12.1 # via jsonschema locket==1.0.0 # via partd -markdown-it-py==3.0.0 - # via rich markupsafe==2.1.5 # via jinja2 -mdurl==0.1.2 - # via markdown-it-py -mmh3==4.1.0 - # via feast (setup.py) -mypy==1.10.0 +mmh3==5.0.0 +mypy==1.11.2 # via sqlalchemy mypy-extensions==1.0.0 # via mypy -mypy-protobuf==3.6.0 - # via feast (setup.py) numpy==1.26.4 # via - # feast (setup.py) # dask # pandas # pyarrow -orjson==3.10.3 - # via fastapi -packaging==24.0 +packaging==24.1 # via # dask # gunicorn pandas==2.2.2 # via - # feast (setup.py) # dask # dask-expr partd==1.4.2 # via dask prometheus-client==0.20.0 - # via feast (setup.py) -protobuf==4.25.3 - # via - # feast (setup.py) - # mypy-protobuf +protobuf==4.25.5 psutil==6.0.0 - # via feast (setup.py) -pyarrow==16.0.0 - # via - # feast (setup.py) - # dask-expr -pydantic==2.7.1 - # via - # feast (setup.py) - # fastapi -pydantic-core==2.18.2 +pyarrow==17.0.0 + # via dask-expr +pydantic==2.9.2 + # via fastapi +pydantic-core==2.23.4 # via pydantic pygments==2.18.0 - # via - # feast (setup.py) - # rich +pyjwt==2.9.0 python-dateutil==2.9.0.post0 # via pandas python-dotenv==1.0.1 # via uvicorn -python-multipart==0.0.9 - # via fastapi -pytz==2024.1 +pytz==2024.2 # via pandas -pyyaml==6.0.1 +pyyaml==6.0.2 # via - # feast (setup.py) # dask # uvicorn referencing==0.35.1 # via # jsonschema # jsonschema-specifications -requests==2.31.0 - # via feast (setup.py) -rich==13.7.1 - # via typer -rpds-py==0.18.1 +requests==2.32.3 +rpds-py==0.20.0 # via # jsonschema # referencing -shellingham==1.5.4 - # via typer six==1.16.0 # via python-dateutil sniffio==1.3.1 - # via - # anyio - # httpx -sqlalchemy[mypy]==2.0.30 - # via feast (setup.py) -starlette==0.37.2 + # via anyio +sqlalchemy[mypy]==2.0.35 +starlette==0.38.5 # via fastapi tabulate==0.9.0 - # via feast (setup.py) -tenacity==8.3.0 - # via feast (setup.py) +tenacity==8.5.0 toml==0.10.2 - # via feast (setup.py) tomli==2.0.1 # via mypy toolz==0.12.1 # via # dask # partd -tqdm==4.66.4 - # via feast (setup.py) -typeguard==4.2.1 - # via feast (setup.py) -typer==0.12.3 - # via fastapi-cli -types-protobuf==5.26.0.20240422 - # via mypy-protobuf -typing-extensions==4.11.0 +tqdm==4.66.5 +typeguard==4.3.0 +typing-extensions==4.12.2 # via # anyio # fastapi @@ -204,24 +133,17 @@ typing-extensions==4.11.0 # sqlalchemy # starlette # typeguard - # typer # uvicorn tzdata==2024.1 # via pandas -ujson==5.9.0 - # via fastapi -urllib3==2.2.1 +urllib3==2.2.3 # via requests -uvicorn[standard]==0.29.0 - # via - # feast (setup.py) - # fastapi - # fastapi-cli -uvloop==0.19.0 +uvicorn[standard]==0.30.6 +uvloop==0.20.0 # via uvicorn -watchfiles==0.21.0 +watchfiles==0.24.0 # via uvicorn -websockets==12.0 +websockets==13.0.1 # via uvicorn -zipp==3.19.2 +zipp==3.20.2 # via importlib-metadata diff --git a/sdk/python/tests/conftest.py b/sdk/python/tests/conftest.py index 1fd510d104..08b8757b95 100644 --- a/sdk/python/tests/conftest.py +++ b/sdk/python/tests/conftest.py @@ -15,9 +15,11 @@ import multiprocessing import os import random +import tempfile from datetime import timedelta from multiprocessing import Process from sys import platform +from textwrap import dedent from typing import Any, Dict, List, Tuple, no_type_check from unittest import mock @@ -29,12 +31,12 @@ from feast.feature_store import FeatureStore # noqa: E402 from feast.utils import _utc_now from feast.wait import wait_retry_backoff # noqa: E402 -from tests.data.data_creator import ( # noqa: E402 - create_basic_driver_dataset, +from tests.data.data_creator import ( + create_basic_driver_dataset, # noqa: E402 create_document_dataset, ) -from tests.integration.feature_repos.integration_test_repo_config import ( - IntegrationTestRepoConfig, # noqa: E402 +from tests.integration.feature_repos.integration_test_repo_config import ( # noqa: E402 + IntegrationTestRepoConfig, ) from tests.integration.feature_repos.repo_configuration import ( # noqa: E402 AVAILABLE_OFFLINE_STORES, @@ -46,14 +48,15 @@ construct_universal_feature_views, construct_universal_test_data, ) -from tests.integration.feature_repos.universal.data_sources.file import ( - FileDataSourceCreator, # noqa: E402 +from tests.integration.feature_repos.universal.data_sources.file import ( # noqa: E402 + FileDataSourceCreator, ) from tests.integration.feature_repos.universal.entities import ( # noqa: E402 customer, driver, location, ) +from tests.utils.auth_permissions_util import default_store from tests.utils.http_server import check_port_open, free_port # noqa: E402 logger = logging.getLogger(__name__) @@ -194,6 +197,26 @@ def environment(request, worker_id): e.teardown() +@pytest.fixture +def vectordb_environment(request, worker_id): + e = construct_test_environment( + request.param, + worker_id=worker_id, + fixture_request=request, + entity_key_serialization_version=3, + ) + + e.setup() + + if hasattr(e.data_source_creator, "mock_environ"): + with mock.patch.dict(os.environ, e.data_source_creator.mock_environ): + yield e + else: + yield e + + e.teardown() + + _config_cache: Any = {} @@ -276,7 +299,11 @@ def pytest_generate_tests(metafunc: pytest.Metafunc): c = IntegrationTestRepoConfig(**config) if c not in _config_cache: - _config_cache[c] = c + marks = [ + pytest.mark.xdist_group(name=m) + for m in c.offline_store_creator.xdist_groups() + ] + _config_cache[c] = pytest.param(c, marks=marks) configs.append(_config_cache[c]) else: @@ -406,3 +433,79 @@ def fake_document_data(environment: Environment) -> Tuple[pd.DataFrame, DataSour environment.feature_store.project, ) return df, data_source + + +@pytest.fixture +def temp_dir(): + with tempfile.TemporaryDirectory() as temp_dir: + print(f"Created {temp_dir}") + yield temp_dir + + +@pytest.fixture +def server_port(): + return free_port() + + +@pytest.fixture +def feature_store(temp_dir, auth_config, applied_permissions): + print(f"Creating store at {temp_dir}") + return default_store(str(temp_dir), auth_config, applied_permissions) + + +@pytest.fixture(scope="module") +def all_markers_from_module(request): + markers = set() + for item in request.session.items: + for marker in item.iter_markers(): + markers.add(marker.name) + + return markers + + +@pytest.fixture(scope="module") +def is_integration_test(all_markers_from_module): + return "integration" in all_markers_from_module + + +@pytest.fixture( + scope="module", + params=[ + dedent( + """ + auth: + type: no_auth + """ + ), + dedent( + """ + auth: + type: kubernetes + """ + ), + dedent( + """ + auth: + type: oidc + client_id: feast-integration-client + client_secret: feast-integration-client-secret + username: reader_writer + password: password + auth_discovery_url: KEYCLOAK_URL_PLACE_HOLDER/realms/master/.well-known/openid-configuration + """ + ), + ], +) +def auth_config(request, is_integration_test): + auth_configuration = request.param + + if is_integration_test: + if "kubernetes" in auth_configuration: + pytest.skip( + "skipping integration tests for kubernetes platform, unit tests are covering this functionality." + ) + elif "oidc" in auth_configuration: + keycloak_url = request.getfixturevalue("start_keycloak_server") + return auth_configuration.replace("KEYCLOAK_URL_PLACE_HOLDER", keycloak_url) + + return auth_configuration diff --git a/sdk/python/tests/data/data_creator.py b/sdk/python/tests/data/data_creator.py index 15d09c5a40..5d6cffeb9d 100644 --- a/sdk/python/tests/data/data_creator.py +++ b/sdk/python/tests/data/data_creator.py @@ -1,8 +1,8 @@ -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from typing import Dict, List, Optional import pandas as pd -from pytz import timezone, utc +from zoneinfo import ZoneInfo from feast.types import FeastType, Float32, Int32, Int64, String from feast.utils import _utc_now @@ -27,11 +27,11 @@ def create_basic_driver_dataset( ts - timedelta(hours=3), # Use different time zones to test tz-naive -> tz-aware conversion (ts - timedelta(hours=4)) - .replace(tzinfo=utc) - .astimezone(tz=timezone("Europe/Berlin")), + .replace(tzinfo=timezone.utc) + .astimezone(tz=ZoneInfo("Europe/Berlin")), (ts - timedelta(hours=1)) - .replace(tzinfo=utc) - .astimezone(tz=timezone("US/Pacific")), + .replace(tzinfo=timezone.utc) + .astimezone(tz=ZoneInfo("US/Pacific")), ], "created_ts": [ts, ts, ts, ts, ts], } diff --git a/sdk/python/tests/example_repos/example_feature_repo_with_project_1.py b/sdk/python/tests/example_repos/example_feature_repo_with_project_1.py new file mode 100644 index 0000000000..ad04d7ae66 --- /dev/null +++ b/sdk/python/tests/example_repos/example_feature_repo_with_project_1.py @@ -0,0 +1,151 @@ +from datetime import timedelta + +import pandas as pd + +from feast import Entity, FeatureService, FeatureView, Field, FileSource, PushSource +from feast.on_demand_feature_view import on_demand_feature_view +from feast.project import Project +from feast.types import Array, Float32, Int64, String +from tests.integration.feature_repos.universal.feature_views import TAGS + +# Note that file source paths are not validated, so there doesn't actually need to be any data +# at the paths for these file sources. Since these paths are effectively fake, this example +# feature repo should not be used for historical retrieval. +project = Project( + name="test_universal_cli_with_project_4567", + description="test_universal_cli_with_project_4567 description", + tags={"application": "integration"}, + owner="test@test.com", +) + +driver_locations_source = FileSource( + path="data/driver_locations.parquet", + timestamp_field="event_timestamp", + created_timestamp_column="created_timestamp", +) + +customer_profile_source = FileSource( + name="customer_profile_source", + path="data/customer_profiles.parquet", + timestamp_field="event_timestamp", +) + +customer_driver_combined_source = FileSource( + path="data/customer_driver_combined.parquet", + timestamp_field="event_timestamp", +) + +driver_locations_push_source = PushSource( + name="driver_locations_push", + batch_source=driver_locations_source, +) + +rag_documents_source = FileSource( + name="rag_documents_source", + path="data/rag_documents.parquet", + timestamp_field="event_timestamp", +) + +driver = Entity( + name="driver", # The name is derived from this argument, not object name. + join_keys=["driver_id"], + description="driver id", + tags=TAGS, +) + +customer = Entity( + name="customer", # The name is derived from this argument, not object name. + join_keys=["customer_id"], + tags=TAGS, +) + +item = Entity( + name="item_id", # The name is derived from this argument, not object name. + join_keys=["item_id"], +) + +driver_locations = FeatureView( + name="driver_locations", + entities=[driver], + ttl=timedelta(days=1), + schema=[ + Field(name="lat", dtype=Float32), + Field(name="lon", dtype=String), + Field(name="driver_id", dtype=Int64), + ], + online=True, + source=driver_locations_source, + tags={}, +) + +pushed_driver_locations = FeatureView( + name="pushed_driver_locations", + entities=[driver], + ttl=timedelta(days=1), + schema=[ + Field(name="driver_lat", dtype=Float32), + Field(name="driver_long", dtype=String), + Field(name="driver_id", dtype=Int64), + ], + online=True, + source=driver_locations_push_source, + tags={}, +) + +customer_profile = FeatureView( + name="customer_profile", + entities=[customer], + ttl=timedelta(days=1), + schema=[ + Field(name="avg_orders_day", dtype=Float32), + Field(name="name", dtype=String), + Field(name="age", dtype=Int64), + Field(name="customer_id", dtype=String), + ], + online=True, + source=customer_profile_source, + tags={}, +) + +customer_driver_combined = FeatureView( + name="customer_driver_combined", + entities=[customer, driver], + ttl=timedelta(days=1), + schema=[ + Field(name="trips", dtype=Int64), + Field(name="driver_id", dtype=Int64), + Field(name="customer_id", dtype=String), + ], + online=True, + source=customer_driver_combined_source, + tags={}, +) + +document_embeddings = FeatureView( + name="document_embeddings", + entities=[item], + schema=[ + Field(name="Embeddings", dtype=Array(Float32)), + Field(name="item_id", dtype=String), + ], + source=rag_documents_source, + ttl=timedelta(hours=24), +) + + +@on_demand_feature_view( + sources=[customer_profile], + schema=[Field(name="on_demand_age", dtype=Int64)], + mode="pandas", +) +def customer_profile_pandas_odfv(inputs: pd.DataFrame) -> pd.DataFrame: + outputs = pd.DataFrame() + outputs["on_demand_age"] = inputs["age"] + 1 + return outputs + + +all_drivers_feature_service = FeatureService( + name="driver_locations_service", + features=[driver_locations], + tags=TAGS, +) diff --git a/sdk/python/tests/integration/conftest.py b/sdk/python/tests/integration/conftest.py new file mode 100644 index 0000000000..82f80b8992 --- /dev/null +++ b/sdk/python/tests/integration/conftest.py @@ -0,0 +1,49 @@ +import logging + +import pytest +from testcontainers.keycloak import KeycloakContainer +from testcontainers.minio import MinioContainer +from testcontainers.mysql import MySqlContainer +from testcontainers.postgres import PostgresContainer + +from tests.utils.auth_permissions_util import setup_permissions_on_keycloak + +logger = logging.getLogger(__name__) + + +@pytest.fixture(scope="session") +def start_keycloak_server(): + logger.info("Starting keycloak instance") + with KeycloakContainer("quay.io/keycloak/keycloak:24.0.1") as keycloak_container: + setup_permissions_on_keycloak(keycloak_container.get_client()) + yield keycloak_container.get_url() + + +@pytest.fixture(scope="session") +def mysql_server(): + container = MySqlContainer("mysql:latest") + container.start() + + yield container + + container.stop() + + +@pytest.fixture(scope="session") +def postgres_server(): + container = PostgresContainer() + container.start() + + yield container + + container.stop() + + +@pytest.fixture(scope="session") +def minio_server(): + container = MinioContainer() + container.start() + + yield container + + container.stop() diff --git a/sdk/python/tests/integration/feature_repos/repo_configuration.py b/sdk/python/tests/integration/feature_repos/repo_configuration.py index 48f5070f1e..73f99fb7c2 100644 --- a/sdk/python/tests/integration/feature_repos/repo_configuration.py +++ b/sdk/python/tests/integration/feature_repos/repo_configuration.py @@ -11,15 +11,26 @@ import pandas as pd import pytest -from feast import FeatureStore, FeatureView, OnDemandFeatureView, driver_test_data +from feast import ( + FeatureStore, + FeatureView, + OnDemandFeatureView, + StreamFeatureView, + driver_test_data, +) from feast.constants import FULL_REPO_CONFIGS_MODULE_ENV_NAME from feast.data_source import DataSource from feast.errors import FeastModuleImportError +from feast.feature_service import FeatureService from feast.infra.feature_servers.base_config import ( BaseFeatureServerConfig, FeatureLoggingConfig, ) from feast.infra.feature_servers.local_process.config import LocalFeatureServerConfig +from feast.permissions.action import AuthzedAction +from feast.permissions.auth_model import OidcClientAuthConfig +from feast.permissions.permission import Permission +from feast.permissions.policy import RoleBasedPolicy from feast.repo_config import RegistryConfig, RepoConfig from feast.utils import _utc_now from tests.integration.feature_repos.integration_test_repo_config import ( @@ -36,6 +47,7 @@ DuckDBDataSourceCreator, DuckDBDeltaDataSourceCreator, FileDataSourceCreator, + RemoteOfflineOidcAuthStoreDataSourceCreator, RemoteOfflineStoreDataSourceCreator, ) from tests.integration.feature_repos.universal.data_sources.redshift import ( @@ -98,12 +110,6 @@ "instance": os.getenv("BIGTABLE_INSTANCE_ID", "feast-integration-tests"), } -ROCKSET_CONFIG = { - "type": "rockset", - "api_key": os.getenv("ROCKSET_APIKEY", ""), - "host": os.getenv("ROCKSET_APISERVER", "api.rs2.usw2.rockset.com"), -} - IKV_CONFIG = { "type": "ikv", "account_id": os.getenv("IKV_ACCOUNT_ID", ""), @@ -124,6 +130,7 @@ ("local", DuckDBDataSourceCreator), ("local", DuckDBDeltaDataSourceCreator), ("local", RemoteOfflineStoreDataSourceCreator), + ("local", RemoteOfflineOidcAuthStoreDataSourceCreator), ] if os.getenv("FEAST_IS_LOCAL_TEST", "False") == "True": @@ -134,7 +141,6 @@ ] ) - AVAILABLE_ONLINE_STORES: Dict[ str, Tuple[Union[str, Dict[Any, Any]], Optional[Type[OnlineStoreCreator]]] ] = {"sqlite": ({"type": "sqlite"}, None)} @@ -154,17 +160,12 @@ AVAILABLE_ONLINE_STORES["datastore"] = ("datastore", None) AVAILABLE_ONLINE_STORES["snowflake"] = (SNOWFLAKE_CONFIG, None) AVAILABLE_ONLINE_STORES["bigtable"] = (BIGTABLE_CONFIG, None) - # Uncomment to test using private Rockset account. Currently not enabled as - # there is no dedicated Rockset instance for CI testing and there is no - # containerized version of Rockset. - # AVAILABLE_ONLINE_STORES["rockset"] = (ROCKSET_CONFIG, None) # Uncomment to test using private IKV account. Currently not enabled as # there is no dedicated IKV instance for CI testing and there is no # containerized version of IKV. # AVAILABLE_ONLINE_STORES["ikv"] = (IKV_CONFIG, None) - full_repo_configs_module = os.environ.get(FULL_REPO_CONFIGS_MODULE_ENV_NAME) if full_repo_configs_module is not None: try: @@ -200,7 +201,6 @@ for c in FULL_REPO_CONFIGS } - # Replace online stores with emulated online stores if we're running local integration tests if os.getenv("FEAST_LOCAL_ONLINE_CONTAINER", "False").lower() == "true": replacements: Dict[ @@ -432,6 +432,7 @@ def setup(self): feature_server=self.feature_server, entity_key_serialization_version=self.entity_key_serialization_version, ) + self.feature_store = FeatureStore(config=self.config) def teardown(self): @@ -441,6 +442,71 @@ def teardown(self): self.online_store_creator.teardown() +@dataclass +class OfflineServerPermissionsEnvironment(Environment): + def setup(self): + self.data_source_creator.setup(self.registry) + keycloak_url = self.data_source_creator.get_keycloak_url() + auth_config = OidcClientAuthConfig( + client_id="feast-integration-client", + type="oidc", + auth_discovery_url=f"{keycloak_url}/realms/master/.well-known" + f"/openid-configuration", + client_secret="feast-integration-client-secret", + username="reader_writer", + password="password", + ) + self.config = RepoConfig( + registry=self.registry, + project=self.project, + provider=self.provider, + offline_store=self.data_source_creator.create_offline_store_config(), + online_store=self.online_store_creator.create_online_store() + if self.online_store_creator + else self.online_store, + batch_engine=self.batch_engine, + repo_path=self.repo_dir_name, + feature_server=self.feature_server, + entity_key_serialization_version=self.entity_key_serialization_version, + auth=auth_config, + ) + + self.feature_store = FeatureStore(config=self.config) + permissions_list = [ + Permission( + name="offline_fv_perm", + types=FeatureView, + policy=RoleBasedPolicy(roles=["writer"]), + actions=[AuthzedAction.READ_OFFLINE, AuthzedAction.WRITE_OFFLINE], + ), + Permission( + name="offline_odfv_perm", + types=OnDemandFeatureView, + policy=RoleBasedPolicy(roles=["writer"]), + actions=[AuthzedAction.READ_OFFLINE, AuthzedAction.WRITE_OFFLINE], + ), + Permission( + name="offline_sfv_perm", + types=StreamFeatureView, + policy=RoleBasedPolicy(roles=["writer"]), + actions=[AuthzedAction.READ_OFFLINE, AuthzedAction.WRITE_OFFLINE], + ), + Permission( + name="offline_fs_perm", + types=FeatureService, + policy=RoleBasedPolicy(roles=["writer"]), + actions=[AuthzedAction.READ_OFFLINE, AuthzedAction.WRITE_OFFLINE], + ), + Permission( + name="offline_datasource_perm", + types=DataSource, + policy=RoleBasedPolicy(roles=["writer"]), + actions=[AuthzedAction.READ_OFFLINE, AuthzedAction.WRITE_OFFLINE], + ), + ] + self.feature_store.apply(permissions_list) + + def table_name_from_data_source(ds: DataSource) -> Optional[str]: if hasattr(ds, "table_ref"): return ds.table_ref # type: ignore @@ -491,23 +557,27 @@ def construct_test_environment( cache_ttl_seconds=1, ) - environment = Environment( - name=project, - provider=test_repo_config.provider, - data_source_creator=offline_creator, - python_feature_server=test_repo_config.python_feature_server, - worker_id=worker_id, - online_store_creator=online_creator, - fixture_request=fixture_request, - project=project, - registry=registry, - feature_server=feature_server, - entity_key_serialization_version=entity_key_serialization_version, - repo_dir_name=repo_dir_name, - batch_engine=test_repo_config.batch_engine, - online_store=test_repo_config.online_store, - ) + environment_params = { + "name": project, + "provider": test_repo_config.provider, + "data_source_creator": offline_creator, + "python_feature_server": test_repo_config.python_feature_server, + "worker_id": worker_id, + "online_store_creator": online_creator, + "fixture_request": fixture_request, + "project": project, + "registry": registry, + "feature_server": feature_server, + "entity_key_serialization_version": entity_key_serialization_version, + "repo_dir_name": repo_dir_name, + "batch_engine": test_repo_config.batch_engine, + "online_store": test_repo_config.online_store, + } + if not isinstance(offline_creator, RemoteOfflineOidcAuthStoreDataSourceCreator): + environment = Environment(**environment_params) + else: + environment = OfflineServerPermissionsEnvironment(**environment_params) return environment diff --git a/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py b/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py index f1cab21429..aa46160358 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py @@ -60,3 +60,6 @@ def create_logged_features_destination(self) -> LoggingDestination: @abstractmethod def teardown(self): raise NotImplementedError + + def xdist_groups() -> list[str]: + return [] diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py index 5174e16046..d8b75aca24 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py @@ -5,6 +5,7 @@ import tempfile import uuid from pathlib import Path +from subprocess import Popen from typing import Any, Dict, List, Optional import pandas as pd @@ -32,6 +33,7 @@ from tests.integration.feature_repos.universal.data_source_creator import ( DataSourceCreator, ) +from tests.utils.auth_permissions_util import include_auth_config from tests.utils.http_server import check_port_open, free_port # noqa: E402 logger = logging.getLogger(__name__) @@ -367,8 +369,91 @@ class RemoteOfflineStoreDataSourceCreator(FileDataSourceCreator): def __init__(self, project_name: str, *args, **kwargs): super().__init__(project_name) self.server_port: int = 0 + self.proc: Optional[Popen[bytes]] = None + + def setup(self, registry: RegistryConfig): + parent_offline_config = super().create_offline_store_config() + config = RepoConfig( + project=self.project_name, + provider="local", + offline_store=parent_offline_config, + registry=registry.path, + entity_key_serialization_version=2, + ) + + repo_path = Path(tempfile.mkdtemp()) + with open(repo_path / "feature_store.yaml", "w") as outfile: + yaml.dump(config.model_dump(by_alias=True), outfile) + repo_path = repo_path.resolve() + + self.server_port = free_port() + host = "0.0.0.0" + cmd = [ + "feast", + "-c" + str(repo_path), + "serve_offline", + "--host", + host, + "--port", + str(self.server_port), + ] + self.proc = subprocess.Popen( + cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL + ) + + _time_out_sec: int = 60 + # Wait for server to start + wait_retry_backoff( + lambda: (None, check_port_open(host, self.server_port)), + timeout_secs=_time_out_sec, + timeout_msg=f"Unable to start the feast remote offline server in {_time_out_sec} seconds at port={self.server_port}", + ) + return "grpc+tcp://{}:{}".format(host, self.server_port) + + def create_offline_store_config(self) -> FeastConfigBaseModel: + self.remote_offline_store_config = RemoteOfflineStoreConfig( + type="remote", host="0.0.0.0", port=self.server_port + ) + return self.remote_offline_store_config + + def teardown(self): + super().teardown() + if self.proc is not None: + self.proc.kill() + + # wait server to free the port + wait_retry_backoff( + lambda: ( + None, + not check_port_open("localhost", self.server_port), + ), + timeout_secs=30, + ) + + +class RemoteOfflineOidcAuthStoreDataSourceCreator(FileDataSourceCreator): + def __init__(self, project_name: str, *args, **kwargs): + super().__init__(project_name) + if "fixture_request" in kwargs: + request = kwargs["fixture_request"] + self.keycloak_url = request.getfixturevalue("start_keycloak_server") + else: + raise RuntimeError( + "fixture_request object is not passed to inject keycloak fixture dynamically." + ) + auth_config_template = """ +auth: + type: oidc + client_id: feast-integration-client + auth_discovery_url: {keycloak_url}/realms/master/.well-known/openid-configuration +""" + self.auth_config = auth_config_template.format(keycloak_url=self.keycloak_url) + self.server_port: int = 0 self.proc = None + def xdist_groups() -> list[str]: + return ["keycloak"] + def setup(self, registry: RegistryConfig): parent_offline_config = super().create_offline_store_config() config = RepoConfig( @@ -384,6 +469,10 @@ def setup(self, registry: RegistryConfig): yaml.dump(config.model_dump(by_alias=True), outfile) repo_path = str(repo_path.resolve()) + include_auth_config( + file_path=f"{repo_path}/feature_store.yaml", auth_config=self.auth_config + ) + self.server_port = free_port() host = "0.0.0.0" cmd = [ @@ -414,6 +503,9 @@ def create_offline_store_config(self) -> FeastConfigBaseModel: ) return self.remote_offline_store_config + def get_keycloak_url(self): + return self.keycloak_url + def teardown(self): super().teardown() if self.proc is not None: diff --git a/sdk/python/tests/integration/materialization/test_snowflake.py b/sdk/python/tests/integration/materialization/test_snowflake.py index f12191363b..dc9d684ab5 100644 --- a/sdk/python/tests/integration/materialization/test_snowflake.py +++ b/sdk/python/tests/integration/materialization/test_snowflake.py @@ -1,8 +1,7 @@ import os -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone import pytest -from pytz import utc from feast import Field from feast.entity import Entity @@ -150,7 +149,7 @@ def test_snowflake_materialization_consistency_internal_with_lists( now = _utc_now() full_feature_names = True - start_date = (now - timedelta(hours=5)).replace(tzinfo=utc) + start_date = (now - timedelta(hours=5)).replace(tzinfo=timezone.utc) end_date = split_dt fs.materialize( feature_views=[driver_stats_fv.name], @@ -165,7 +164,7 @@ def test_snowflake_materialization_consistency_internal_with_lists( "string": ["3"] * 2, "bytes": [b"3"] * 2, "bool": [False] * 2, - "datetime": [datetime(1981, 1, 1, tzinfo=utc)] * 2, + "datetime": [datetime(1981, 1, 1, tzinfo=timezone.utc)] * 2, } expected_value = [] if feature_is_empty_list else expected_values[feature_dtype] @@ -222,9 +221,11 @@ def test_snowflake_materialization_entityless_fv(): ttl=timedelta(weeks=52), source=ds, ) + assert overall_stats_fv.entity_columns == [] try: fs.apply([overall_stats_fv, driver]) + assert overall_stats_fv.entity_columns != [] # materialization is run in two steps and # we use timestamp from generated dataframe as a split point @@ -234,7 +235,7 @@ def test_snowflake_materialization_entityless_fv(): now = _utc_now() - start_date = (now - timedelta(hours=5)).replace(tzinfo=utc) + start_date = (now - timedelta(hours=5)).replace(tzinfo=timezone.utc) end_date = split_dt fs.materialize( feature_views=[overall_stats_fv.name], diff --git a/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py b/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py index ecaa5f40db..97ad54251f 100644 --- a/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py +++ b/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py @@ -21,6 +21,7 @@ table_name_from_data_source, ) from tests.integration.feature_repos.universal.data_sources.file import ( + RemoteOfflineOidcAuthStoreDataSourceCreator, RemoteOfflineStoreDataSourceCreator, ) from tests.integration.feature_repos.universal.data_sources.snowflake import ( @@ -162,7 +163,11 @@ def test_historical_features_main( ) if not isinstance( - environment.data_source_creator, RemoteOfflineStoreDataSourceCreator + environment.data_source_creator, + ( + RemoteOfflineStoreDataSourceCreator, + RemoteOfflineOidcAuthStoreDataSourceCreator, + ), ): assert_feature_service_correctness( store, diff --git a/sdk/python/tests/integration/offline_store/test_validation.py b/sdk/python/tests/integration/offline_store/test_validation.py index 6f0496e8c8..52d83ab8d8 100644 --- a/sdk/python/tests/integration/offline_store/test_validation.py +++ b/sdk/python/tests/integration/offline_store/test_validation.py @@ -305,6 +305,23 @@ def test_e2e_validation_via_cli(environment, universal_data_sources): assert p.returncode == 0, p.stderr.decode() assert "Validation successful" in p.stdout.decode(), p.stderr.decode() + p = runner.run( + ["saved-datasets", "describe", saved_dataset.name], cwd=local_repo.repo_path + ) + assert p.returncode == 0, p.stderr.decode() + + p = runner.run( + ["validation-references", "describe", reference.name], + cwd=local_repo.repo_path, + ) + assert p.returncode == 0, p.stderr.decode() + + p = runner.run( + ["feature-services", "describe", feature_service.name], + cwd=local_repo.repo_path, + ) + assert p.returncode == 0, p.stderr.decode() + # make sure second validation will use cached profile shutil.rmtree(saved_dataset.storage.file_options.uri) diff --git a/sdk/python/tests/integration/online_store/test_python_feature_server.py b/sdk/python/tests/integration/online_store/test_python_feature_server.py index 1010e73178..d08e1104eb 100644 --- a/sdk/python/tests/integration/online_store/test_python_feature_server.py +++ b/sdk/python/tests/integration/online_store/test_python_feature_server.py @@ -4,6 +4,7 @@ import pytest from fastapi.testclient import TestClient +from feast.errors import PushSourceNotFoundException from feast.feast_object import FeastObject from feast.feature_server import get_app from feast.utils import _utc_now @@ -90,21 +91,24 @@ def test_push_source_does_not_exist(python_fs_client): initial_temp = _get_temperatures_from_feature_server( python_fs_client, location_ids=[1] )[0] - response = python_fs_client.post( - "/push", - data=json.dumps( - { - "push_source_name": "push_source_does_not_exist", - "df": { - "location_id": [1], - "temperature": [initial_temp * 100], - "event_timestamp": [str(_utc_now())], - "created": [str(_utc_now())], - }, - } - ), - ) - assert response.status_code == 422 + with pytest.raises( + PushSourceNotFoundException, + match="Unable to find push source 'push_source_does_not_exist'", + ): + python_fs_client.post( + "/push", + data=json.dumps( + { + "push_source_name": "push_source_does_not_exist", + "df": { + "location_id": [1], + "temperature": [initial_temp * 100], + "event_timestamp": [str(_utc_now())], + "created": [str(_utc_now())], + }, + } + ), + ) def _get_temperatures_from_feature_server(client, location_ids: List[int]): diff --git a/sdk/python/tests/integration/online_store/test_remote_online_store.py b/sdk/python/tests/integration/online_store/test_remote_online_store.py index 21ac00583b..d8c92077db 100644 --- a/sdk/python/tests/integration/online_store/test_remote_online_store.py +++ b/sdk/python/tests/integration/online_store/test_remote_online_store.py @@ -1,28 +1,59 @@ import os -import subprocess import tempfile from textwrap import dedent import pytest +from feast import FeatureView, OnDemandFeatureView, StreamFeatureView from feast.feature_store import FeatureStore -from feast.utils import _utc_now -from feast.wait import wait_retry_backoff +from feast.permissions.action import AuthzedAction +from feast.permissions.permission import Permission +from feast.permissions.policy import RoleBasedPolicy +from tests.utils.auth_permissions_util import ( + PROJECT_NAME, + default_store, + start_feature_server, +) from tests.utils.cli_repo_creator import CliRunner -from tests.utils.http_server import check_port_open, free_port +from tests.utils.http_server import free_port @pytest.mark.integration -def test_remote_online_store_read(): +def test_remote_online_store_read(auth_config): with tempfile.TemporaryDirectory() as remote_server_tmp_dir, tempfile.TemporaryDirectory() as remote_client_tmp_dir: + permissions_list = [ + Permission( + name="online_list_fv_perm", + types=FeatureView, + policy=RoleBasedPolicy(roles=["reader"]), + actions=[AuthzedAction.READ_ONLINE], + ), + Permission( + name="online_list_odfv_perm", + types=OnDemandFeatureView, + policy=RoleBasedPolicy(roles=["reader"]), + actions=[AuthzedAction.READ_ONLINE], + ), + Permission( + name="online_list_sfv_perm", + types=StreamFeatureView, + policy=RoleBasedPolicy(roles=["reader"]), + actions=[AuthzedAction.READ_ONLINE], + ), + ] server_store, server_url, registry_path = ( - _create_server_store_spin_feature_server(temp_dir=remote_server_tmp_dir) + _create_server_store_spin_feature_server( + temp_dir=remote_server_tmp_dir, + auth_config=auth_config, + permissions_list=permissions_list, + ) ) assert None not in (server_store, server_url, registry_path) client_store = _create_remote_client_feature_store( temp_dir=remote_client_tmp_dir, server_registry_path=str(registry_path), feature_server_url=server_url, + auth_config=auth_config, ) assert client_store is not None _assert_non_existing_entity_feature_views_entity( @@ -127,11 +158,13 @@ def _assert_client_server_online_stores_are_matching( assert online_features_from_client == online_features_from_server -def _create_server_store_spin_feature_server(temp_dir): +def _create_server_store_spin_feature_server( + temp_dir, auth_config: str, permissions_list +): + store = default_store(str(temp_dir), auth_config, permissions_list) feast_server_port = free_port() - store = _default_store(str(temp_dir), "REMOTE_ONLINE_SERVER_PROJECT") server_url = next( - _start_feature_server( + start_feature_server( repo_path=str(store.repo_path), server_port=feast_server_port ) ) @@ -139,24 +172,8 @@ def _create_server_store_spin_feature_server(temp_dir): return store, server_url, os.path.join(store.repo_path, "data", "registry.db") -def _default_store(temp_dir, project_name) -> FeatureStore: - runner = CliRunner() - result = runner.run(["init", project_name], cwd=temp_dir) - repo_path = os.path.join(temp_dir, project_name, "feature_repo") - assert result.returncode == 0 - - result = runner.run(["--chdir", repo_path, "apply"], cwd=temp_dir) - assert result.returncode == 0 - - fs = FeatureStore(repo_path=repo_path) - fs.materialize_incremental( - end_date=_utc_now(), feature_views=["driver_hourly_stats"] - ) - return fs - - def _create_remote_client_feature_store( - temp_dir, server_registry_path: str, feature_server_url: str + temp_dir, server_registry_path: str, feature_server_url: str, auth_config: str ) -> FeatureStore: project_name = "REMOTE_ONLINE_CLIENT_PROJECT" runner = CliRunner() @@ -167,23 +184,21 @@ def _create_remote_client_feature_store( repo_path=str(repo_path), registry_path=server_registry_path, feature_server_url=feature_server_url, + auth_config=auth_config, ) - result = runner.run(["--chdir", repo_path, "apply"], cwd=temp_dir) - assert result.returncode == 0 - return FeatureStore(repo_path=repo_path) def _overwrite_remote_client_feature_store_yaml( - repo_path: str, registry_path: str, feature_server_url: str + repo_path: str, registry_path: str, feature_server_url: str, auth_config: str ): repo_config = os.path.join(repo_path, "feature_store.yaml") with open(repo_config, "w") as repo_config: repo_config.write( dedent( f""" - project: REMOTE_ONLINE_CLIENT_PROJECT + project: {PROJECT_NAME} registry: {registry_path} provider: local online_store: @@ -192,57 +207,5 @@ def _overwrite_remote_client_feature_store_yaml( entity_key_serialization_version: 2 """ ) - ) - - -def _start_feature_server(repo_path: str, server_port: int, metrics: bool = False): - host = "0.0.0.0" - cmd = [ - "feast", - "-c" + repo_path, - "serve", - "--host", - host, - "--port", - str(server_port), - ] - feast_server_process = subprocess.Popen( - cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL - ) - _time_out_sec: int = 60 - # Wait for server to start - wait_retry_backoff( - lambda: (None, check_port_open(host, server_port)), - timeout_secs=_time_out_sec, - timeout_msg=f"Unable to start the feast server in {_time_out_sec} seconds for remote online store type, port={server_port}", - ) - - if metrics: - cmd.append("--metrics") - - # Check if metrics are enabled and Prometheus server is running - if metrics: - wait_retry_backoff( - lambda: (None, check_port_open("localhost", 8000)), - timeout_secs=_time_out_sec, - timeout_msg="Unable to start the Prometheus server in 60 seconds.", - ) - else: - assert not check_port_open( - "localhost", 8000 - ), "Prometheus server is running when it should be disabled." - - yield f"http://localhost:{server_port}" - - if feast_server_process is not None: - feast_server_process.kill() - - # wait server to free the port - wait_retry_backoff( - lambda: ( - None, - not check_port_open("localhost", server_port), - ), - timeout_msg=f"Unable to stop the feast server in {_time_out_sec} seconds for remote online store type, port={server_port}", - timeout_secs=_time_out_sec, + + auth_config ) diff --git a/sdk/python/tests/integration/online_store/test_universal_online.py b/sdk/python/tests/integration/online_store/test_universal_online.py index 2ffe869ef5..1a0803acff 100644 --- a/sdk/python/tests/integration/online_store/test_universal_online.py +++ b/sdk/python/tests/integration/online_store/test_universal_online.py @@ -163,7 +163,6 @@ def test_write_to_online_store_event_check(environment): fs.apply([fv1, e]) assert len(fs.list_all_feature_views(tags=TAGS)) == 1 assert len(fs.list_feature_views(tags=TAGS)) == 1 - assert len(fs.list_batch_feature_views(tags=TAGS)) == 1 # data to ingest into Online Store (recent) data = { @@ -421,7 +420,7 @@ def setup_feature_store_universal_feature_views( feature_views = construct_universal_feature_views(data_sources) fs.apply([driver(), feature_views.driver, feature_views.global_fv]) - assert len(fs.list_batch_feature_views(TAGS)) == 2 + assert len(fs.list_all_feature_views(TAGS)) == 2 data = { "driver_id": [1, 2], @@ -518,7 +517,7 @@ def test_online_list_retrieval(environment, universal_data_sources): environment, universal_data_sources ) - assert len(fs.list_batch_feature_views(tags=TAGS)) == 2 + assert len(fs.list_all_feature_views(tags=TAGS)) == 2 @pytest.mark.integration @@ -847,8 +846,8 @@ def assert_feature_service_entity_mapping_correctness( @pytest.mark.integration @pytest.mark.universal_online_stores(only=["pgvector", "elasticsearch"]) -def test_retrieve_online_documents(environment, fake_document_data): - fs = environment.feature_store +def test_retrieve_online_documents(vectordb_environment, fake_document_data): + fs = vectordb_environment.feature_store df, data_source = fake_document_data item_embeddings_feature_view = create_item_embeddings_feature_view(data_source) fs.apply([item_embeddings_feature_view, item()]) @@ -862,6 +861,9 @@ def test_retrieve_online_documents(environment, fake_document_data): ).to_dict() assert len(documents["embedding_float"]) == 2 + # assert returned the entity_id + assert len(documents["item_id"]) == 2 + documents = fs.retrieve_online_documents( feature="item_embeddings:embedding_float", query=[1.0, 2.0], diff --git a/sdk/python/tests/integration/registration/test_universal_cli.py b/sdk/python/tests/integration/registration/test_universal_cli.py index fc90108d78..735f71407f 100644 --- a/sdk/python/tests/integration/registration/test_universal_cli.py +++ b/sdk/python/tests/integration/registration/test_universal_cli.py @@ -52,7 +52,127 @@ def test_universal_cli(): for key, value in registry_dict.items() } + # project, entity & feature view list commands should succeed + result = runner.run(["projects", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["entities", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["feature-views", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["feature-services", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["data-sources", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["permissions", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["validation-references", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["stream-feature-views", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["saved-datasets", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + + # entity & feature view describe commands should succeed when objects exist + result = runner.run(["projects", "describe", project], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["projects", "current_project"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["entities", "describe", "driver"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run( + ["feature-views", "describe", "driver_locations"], cwd=repo_path + ) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run( + ["feature-services", "describe", "driver_locations_service"], + cwd=repo_path, + ) + assertpy.assert_that(result.returncode).is_equal_to(0) + assertpy.assert_that(fs.list_feature_views()).is_length(5) + result = runner.run( + ["data-sources", "describe", "customer_profile_source"], + cwd=repo_path, + ) + assertpy.assert_that(result.returncode).is_equal_to(0) + assertpy.assert_that(fs.list_data_sources()).is_length(5) + assertpy.assert_that(fs.list_projects()).is_length(1) + + # entity & feature view describe commands should fail when objects don't exist + result = runner.run(["projects", "describe", "foo"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(1) + result = runner.run(["entities", "describe", "foo"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(1) + result = runner.run(["feature-views", "describe", "foo"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(1) + result = runner.run(["feature-services", "describe", "foo"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(1) + result = runner.run(["data-sources", "describe", "foo"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(1) + result = runner.run(["permissions", "describe", "foo"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(1) + + # Doing another apply should be a no op, and should not cause errors + result = runner.run(["apply"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + basic_rw_test( + FeatureStore(repo_path=str(repo_path), config=None), + view_name="driver_locations", + ) + + # Confirm that registry contents have not changed. + registry_dict = fs.registry.to_dict(project=project) + assertpy.assert_that(registry_specs).is_equal_to( + { + key: [fco["spec"] if "spec" in fco else fco for fco in value] + for key, value in registry_dict.items() + } + ) + + result = runner.run(["teardown"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + finally: + runner.run(["teardown"], cwd=repo_path) + + +@pytest.mark.integration +def test_universal_cli_with_project(): + project = "test_universal_cli_with_project_4567" + runner = CliRunner() + + with tempfile.TemporaryDirectory() as repo_dir_name: + try: + repo_path = Path(repo_dir_name) + feature_store_yaml = make_feature_store_yaml( + project, + repo_path, + FileDataSourceCreator("project"), + "local", + {"type": "sqlite"}, + ) + + repo_config = repo_path / "feature_store.yaml" + + repo_config.write_text(dedent(feature_store_yaml)) + + repo_example = repo_path / "example.py" + repo_example.write_text( + get_example_repo("example_feature_repo_with_project_1.py") + ) + result = runner.run(["apply"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + + # Store registry contents, to be compared later. + fs = FeatureStore(repo_path=str(repo_path)) + registry_dict = fs.registry.to_dict(project=project) + # Save only the specs, not the metadata. + registry_specs = { + key: [fco["spec"] if "spec" in fco else fco for fco in value] + for key, value in registry_dict.items() + } + # entity & feature view list commands should succeed + result = runner.run(["projects", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) result = runner.run(["entities", "list"], cwd=repo_path) assertpy.assert_that(result.returncode).is_equal_to(0) result = runner.run(["feature-views", "list"], cwd=repo_path) @@ -61,8 +181,17 @@ def test_universal_cli(): assertpy.assert_that(result.returncode).is_equal_to(0) result = runner.run(["data-sources", "list"], cwd=repo_path) assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["permissions", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) # entity & feature view describe commands should succeed when objects exist + result = runner.run(["projects", "describe", project], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["projects", "current_project"], cwd=repo_path) + print(result.returncode) + print("result: ", result) + print("result.stdout: ", result.stdout) + assertpy.assert_that(result.returncode).is_equal_to(0) result = runner.run(["entities", "describe", "driver"], cwd=repo_path) assertpy.assert_that(result.returncode).is_equal_to(0) result = runner.run( @@ -82,7 +211,16 @@ def test_universal_cli(): assertpy.assert_that(result.returncode).is_equal_to(0) assertpy.assert_that(fs.list_data_sources()).is_length(5) + projects_list = fs.list_projects() + assertpy.assert_that(projects_list).is_length(1) + assertpy.assert_that(projects_list[0].name).is_equal_to(project) + assertpy.assert_that(projects_list[0].description).is_equal_to( + "test_universal_cli_with_project_4567 description" + ) + # entity & feature view describe commands should fail when objects don't exist + result = runner.run(["projects", "describe", "foo"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(1) result = runner.run(["entities", "describe", "foo"], cwd=repo_path) assertpy.assert_that(result.returncode).is_equal_to(1) result = runner.run(["feature-views", "describe", "foo"], cwd=repo_path) @@ -91,6 +229,18 @@ def test_universal_cli(): assertpy.assert_that(result.returncode).is_equal_to(1) result = runner.run(["data-sources", "describe", "foo"], cwd=repo_path) assertpy.assert_that(result.returncode).is_equal_to(1) + result = runner.run(["permissions", "describe", "foo"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(1) + result = runner.run( + ["validation-references", "describe", "foo"], cwd=repo_path + ) + assertpy.assert_that(result.returncode).is_equal_to(1) + result = runner.run( + ["stream-feature-views", "describe", "foo"], cwd=repo_path + ) + assertpy.assert_that(result.returncode).is_equal_to(1) + result = runner.run(["saved-datasets", "describe", "foo"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(1) # Doing another apply should be a no op, and should not cause errors result = runner.run(["apply"], cwd=repo_path) @@ -141,6 +291,12 @@ def test_odfv_apply() -> None: assertpy.assert_that(result.returncode).is_equal_to(0) # entity & feature view list commands should succeed + result = runner.run(["projects", "describe", project], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["projects", "current_project"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["projects", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) result = runner.run(["entities", "list"], cwd=repo_path) assertpy.assert_that(result.returncode).is_equal_to(0) result = runner.run(["on-demand-feature-views", "list"], cwd=repo_path) @@ -172,7 +328,14 @@ def test_nullable_online_store(test_nullable_online_store) -> None: repo_example = repo_path / "example.py" repo_example.write_text(get_example_repo("empty_feature_repo.py")) + result = runner.run(["apply"], cwd=repo_path) assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["projects", "describe", project], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["projects", "current_project"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) + result = runner.run(["projects", "list"], cwd=repo_path) + assertpy.assert_that(result.returncode).is_equal_to(0) finally: runner.run(["teardown"], cwd=repo_path) diff --git a/sdk/python/tests/integration/registration/test_universal_registry.py b/sdk/python/tests/integration/registration/test_universal_registry.py index b0738c8419..0bed89ca16 100644 --- a/sdk/python/tests/integration/registration/test_universal_registry.py +++ b/sdk/python/tests/integration/registration/test_universal_registry.py @@ -13,8 +13,10 @@ # limitations under the License. import logging import os +import random +import string import time -from datetime import timedelta +from datetime import timedelta, timezone from tempfile import mkstemp from unittest import mock @@ -22,11 +24,8 @@ import pandas as pd import pytest from pytest_lazyfixture import lazy_fixture -from pytz import utc -from testcontainers.core.container import DockerContainer -from testcontainers.core.waiting_utils import wait_for_logs -from testcontainers.minio import MinioContainer from testcontainers.mysql import MySqlContainer +from testcontainers.postgres import PostgresContainer from feast import FeatureService, FileSource, RequestSource from feast.data_format import AvroFormat, ParquetFormat @@ -37,10 +36,15 @@ from feast.field import Field from feast.infra.infra_object import Infra from feast.infra.online_stores.sqlite import SqliteTable +from feast.infra.registry.base_registry import BaseRegistry from feast.infra.registry.registry import Registry from feast.infra.registry.remote import RemoteRegistry, RemoteRegistryConfig -from feast.infra.registry.sql import SqlRegistry +from feast.infra.registry.sql import SqlRegistry, SqlRegistryConfig from feast.on_demand_feature_view import on_demand_feature_view +from feast.permissions.action import AuthzedAction +from feast.permissions.permission import Permission +from feast.permissions.policy import RoleBasedPolicy +from feast.project import Project from feast.protos.feast.registry import RegistryServer_pb2, RegistryServer_pb2_grpc from feast.registry_server import RegistryServer from feast.repo_config import RegistryConfig @@ -89,17 +93,15 @@ def s3_registry() -> Registry: return Registry("project", registry_config, None) -@pytest.fixture(scope="session") -def minio_registry() -> Registry: - bucket_name = "test-bucket" +@pytest.fixture(scope="function") +def minio_registry(minio_server): + bucket_name = "".join(random.choices(string.ascii_lowercase, k=10)) - container = MinioContainer() - container.start() - client = container.get_client() + client = minio_server.get_client() client.make_bucket(bucket_name) - container_host = container.get_container_host_ip() - exposed_port = container.get_exposed_port(container.port) + container_host = minio_server.get_container_host_ip() + exposed_port = minio_server.get_exposed_port(minio_server.port) registry_config = RegistryConfig( path=f"s3://{bucket_name}/registry.db", cache_ttl_seconds=600 @@ -107,131 +109,168 @@ def minio_registry() -> Registry: mock_environ = { "FEAST_S3_ENDPOINT_URL": f"http://{container_host}:{exposed_port}", - "AWS_ACCESS_KEY_ID": container.access_key, - "AWS_SECRET_ACCESS_KEY": container.secret_key, + "AWS_ACCESS_KEY_ID": minio_server.access_key, + "AWS_SECRET_ACCESS_KEY": minio_server.secret_key, "AWS_SESSION_TOKEN": "", } with mock.patch.dict(os.environ, mock_environ): yield Registry("project", registry_config, None) - container.stop() - -POSTGRES_USER = "test" -POSTGRES_PASSWORD = "test" -POSTGRES_DB = "test" +POSTGRES_READONLY_USER = "read_only_user" +POSTGRES_READONLY_PASSWORD = "readonly_password" logger = logging.getLogger(__name__) -@pytest.fixture(scope="function") -def pg_registry(): - container = ( - DockerContainer("postgres:latest") - .with_exposed_ports(5432) - .with_env("POSTGRES_USER", POSTGRES_USER) - .with_env("POSTGRES_PASSWORD", POSTGRES_PASSWORD) - .with_env("POSTGRES_DB", POSTGRES_DB) - ) +def add_pg_read_only_user( + container_host, container_port, db_name, postgres_user, postgres_password +): + # Connect to PostgreSQL as an admin + import psycopg + + conn_string = f"dbname={db_name} user={postgres_user} password={postgres_password} host={container_host} port={container_port}" + + with psycopg.connect(conn_string) as conn: + user_exists = conn.execute( + f"SELECT 1 FROM pg_catalog.pg_user WHERE usename = '{POSTGRES_READONLY_USER}'" + ).fetchone() + if not user_exists: + conn.execute( + f"CREATE USER {POSTGRES_READONLY_USER} WITH PASSWORD '{POSTGRES_READONLY_PASSWORD}';" + ) - container.start() + conn.execute( + f"REVOKE ALL PRIVILEGES ON DATABASE {db_name} FROM {POSTGRES_READONLY_USER};" + ) + conn.execute( + f"GRANT CONNECT ON DATABASE {db_name} TO {POSTGRES_READONLY_USER};" + ) + conn.execute( + f"GRANT SELECT ON ALL TABLES IN SCHEMA public TO {POSTGRES_READONLY_USER};" + ) + conn.execute( + f"ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT ON TABLES TO {POSTGRES_READONLY_USER};" + ) - registry_config = _given_registry_config_for_pg_sql(container) - yield SqlRegistry(registry_config, "project", None) +@pytest.fixture(scope="function") +def pg_registry(postgres_server): + db_name = "".join(random.choices(string.ascii_lowercase, k=10)) - container.stop() + _create_pg_database(postgres_server, db_name) + container_port = postgres_server.get_exposed_port(5432) + container_host = postgres_server.get_container_host_ip() -@pytest.fixture(scope="function") -def pg_registry_async(): - container = ( - DockerContainer("postgres:latest") - .with_exposed_ports(5432) - .with_env("POSTGRES_USER", POSTGRES_USER) - .with_env("POSTGRES_PASSWORD", POSTGRES_PASSWORD) - .with_env("POSTGRES_DB", POSTGRES_DB) + add_pg_read_only_user( + container_host, + container_port, + db_name, + postgres_server.username, + postgres_server.password, ) - container.start() - - registry_config = _given_registry_config_for_pg_sql(container, 2, "thread") + registry_config = SqlRegistryConfig( + registry_type="sql", + cache_ttl_seconds=2, + cache_mode="sync", + # The `path` must include `+psycopg` in order for `sqlalchemy.create_engine()` + # to understand that we are using psycopg3. + path=f"postgresql+psycopg://{postgres_server.username}:{postgres_server.password}@{container_host}:{container_port}/{db_name}", + read_path=f"postgresql+psycopg://{POSTGRES_READONLY_USER}:{POSTGRES_READONLY_PASSWORD}@{container_host}:{container_port}/{db_name}", + sqlalchemy_config_kwargs={"echo": False, "pool_pre_ping": True}, + thread_pool_executor_worker_count=0, + purge_feast_metadata=False, + ) yield SqlRegistry(registry_config, "project", None) - container.stop() +@pytest.fixture(scope="function") +def pg_registry_async(postgres_server): + db_name = "".join(random.choices(string.ascii_lowercase, k=10)) -def _given_registry_config_for_pg_sql( - container, cache_ttl_seconds=2, cache_mode="sync" -): - log_string_to_wait_for = "database system is ready to accept connections" - waited = wait_for_logs( - container=container, - predicate=log_string_to_wait_for, - timeout=30, - interval=10, - ) - logger.info("Waited for %s seconds until postgres container was up", waited) - container_port = container.get_exposed_port(5432) - container_host = container.get_container_host_ip() - - return RegistryConfig( + _create_pg_database(postgres_server, db_name) + + container_port = postgres_server.get_exposed_port(5432) + container_host = postgres_server.get_container_host_ip() + + registry_config = SqlRegistryConfig( registry_type="sql", - cache_ttl_seconds=cache_ttl_seconds, - cache_mode=cache_mode, + cache_ttl_seconds=2, + cache_mode="thread", # The `path` must include `+psycopg` in order for `sqlalchemy.create_engine()` # to understand that we are using psycopg3. - path=f"postgresql+psycopg://{POSTGRES_USER}:{POSTGRES_PASSWORD}@{container_host}:{container_port}/{POSTGRES_DB}", + path=f"postgresql+psycopg://{postgres_server.username}:{postgres_server.password}@{container_host}:{container_port}/{db_name}", sqlalchemy_config_kwargs={"echo": False, "pool_pre_ping": True}, + thread_pool_executor_worker_count=3, + purge_feast_metadata=False, ) + yield SqlRegistry(registry_config, "project", None) -@pytest.fixture(scope="function") -def mysql_registry(): - container = MySqlContainer("mysql:latest") - container.start() - registry_config = _given_registry_config_for_mysql(container) +def _create_mysql_database(container: MySqlContainer, database: str): + container.exec( + f"mysql -uroot -p{container.root_password} -e 'CREATE DATABASE {database}; GRANT ALL PRIVILEGES ON {database}.* TO {container.username};'" + ) - yield SqlRegistry(registry_config, "project", None) - container.stop() +def _create_pg_database(container: PostgresContainer, database: str): + container.exec(f"psql -U {container.username} -c 'CREATE DATABASE {database}'") @pytest.fixture(scope="function") -def mysql_registry_async(): - container = MySqlContainer("mysql:latest") - container.start() +def mysql_registry(mysql_server): + db_name = "".join(random.choices(string.ascii_lowercase, k=10)) + + _create_mysql_database(mysql_server, db_name) + + connection_url = ( + "/".join(mysql_server.get_connection_url().split("/")[:-1]) + f"/{db_name}" + ) - registry_config = _given_registry_config_for_mysql(container, 2, "thread") + registry_config = SqlRegistryConfig( + registry_type="sql", + path=connection_url, + cache_ttl_seconds=2, + cache_mode="sync", + sqlalchemy_config_kwargs={"echo": False, "pool_pre_ping": True}, + thread_pool_executor_worker_count=0, + purge_feast_metadata=False, + ) yield SqlRegistry(registry_config, "project", None) - container.stop() +@pytest.fixture(scope="function") +def mysql_registry_async(mysql_server): + db_name = "".join(random.choices(string.ascii_lowercase, k=10)) -def _given_registry_config_for_mysql(container, cache_ttl_seconds=2, cache_mode="sync"): - import sqlalchemy + _create_mysql_database(mysql_server, db_name) - engine = sqlalchemy.create_engine( - container.get_connection_url(), pool_pre_ping=True + connection_url = ( + "/".join(mysql_server.get_connection_url().split("/")[:-1]) + f"/{db_name}" ) - engine.connect() - return RegistryConfig( + registry_config = SqlRegistryConfig( registry_type="sql", - path=container.get_connection_url(), - cache_ttl_seconds=cache_ttl_seconds, - cache_mode=cache_mode, + path=connection_url, + cache_ttl_seconds=2, + cache_mode="thread", sqlalchemy_config_kwargs={"echo": False, "pool_pre_ping": True}, + thread_pool_executor_worker_count=3, + purge_feast_metadata=False, ) + yield SqlRegistry(registry_config, "project", None) + @pytest.fixture(scope="session") def sqlite_registry(): - registry_config = RegistryConfig( + registry_config = SqlRegistryConfig( registry_type="sql", path="sqlite://", ) @@ -248,7 +287,11 @@ def __init__(self, service, servicer): ) def unary_unary( - self, method: str, request_serializer=None, response_deserializer=None + self, + method: str, + request_serializer=None, + response_deserializer=None, + _registered_method=None, ): method_name = method.split("/")[-1] method_descriptor = self.service.methods_by_name[method_name] @@ -271,7 +314,9 @@ def mock_remote_registry(): proxied_registry = Registry("project", registry_config, None) registry = RemoteRegistry( - registry_config=RemoteRegistryConfig(path=""), project=None, repo_path=None + registry_config=RemoteRegistryConfig(path=""), + project=None, + repo_path=None, ) mock_channel = GrpcMockChannel( RegistryServer_pb2.DESCRIPTOR.services_by_name["RegistryServer"], @@ -316,11 +361,11 @@ def mock_remote_registry(): async_sql_fixtures = [ pytest.param( lazy_fixture("pg_registry_async"), - marks=pytest.mark.xdist_group(name="pg_registry_async"), + marks=pytest.mark.xdist_group(name="pg_registry"), ), pytest.param( lazy_fixture("mysql_registry_async"), - marks=pytest.mark.xdist_group(name="mysql_registry_async"), + marks=pytest.mark.xdist_group(name="mysql_registry"), ), ] @@ -343,9 +388,11 @@ def test_apply_entity_success(test_registry): project_uuid = project_metadata[0].project_uuid assert len(project_metadata[0].project_uuid) == 36 assert_project_uuid(project, project_uuid, test_registry) + assert_project(project, test_registry) entities = test_registry.list_entities(project, tags=entity.tags) assert_project_uuid(project, project_uuid, test_registry) + assert_project(project, test_registry) entity = entities[0] assert ( @@ -382,11 +429,12 @@ def test_apply_entity_success(test_registry): updated_entity.created_timestamp is not None and updated_entity.created_timestamp == entity.created_timestamp ) - test_registry.delete_entity("driver_car_id", project) assert_project_uuid(project, project_uuid, test_registry) + assert_project(project, test_registry) entities = test_registry.list_entities(project) assert_project_uuid(project, project_uuid, test_registry) + assert_project(project, test_registry) assert len(entities) == 0 test_registry.teardown() @@ -398,12 +446,20 @@ def assert_project_uuid(project, project_uuid, test_registry): assert project_metadata[0].project_uuid == project_uuid +def assert_project(project_name, test_registry, allow_cache=False): + project_obj = test_registry.list_projects(allow_cache=allow_cache) + assert len(project_obj) == 1 + assert project_obj[0].name == "project" + project_obj = test_registry.get_project(name=project_name, allow_cache=allow_cache) + assert project_obj.name == "project" + + @pytest.mark.integration @pytest.mark.parametrize( "test_registry", all_fixtures, ) -def test_apply_feature_view_success(test_registry): +def test_apply_feature_view_success(test_registry: BaseRegistry): # Create Feature Views batch_source = FileSource( file_format=ParquetFormat(), @@ -452,6 +508,8 @@ def test_apply_feature_view_success(test_registry): ) feature_view = test_registry.get_feature_view("my_feature_view_1", project) + any_feature_view = test_registry.get_any_feature_view("my_feature_view_1", project) + assert ( feature_view.name == "my_feature_view_1" and feature_view.features[0].name == "fs1_my_feature_1" @@ -463,6 +521,7 @@ def test_apply_feature_view_success(test_registry): and feature_view.features[3].name == "fs1_my_feature_4" and feature_view.features[3].dtype == Array(Bytes) and feature_view.entities[0] == "fs1_my_entity_1" + and feature_view == any_feature_view ) assert feature_view.ttl == timedelta(minutes=5) @@ -490,7 +549,7 @@ def test_apply_feature_view_success(test_registry): "test_registry", sql_fixtures, ) -def test_apply_on_demand_feature_view_success(test_registry): +def test_apply_on_demand_feature_view_success(test_registry: BaseRegistry): # Create Feature Views driver_stats = FileSource( name="driver_stats_source", @@ -533,6 +592,7 @@ def location_features_from_push(inputs: pd.DataFrame) -> pd.DataFrame: test_registry.get_user_metadata(project, location_features_from_push) # Register Feature View + test_registry.apply_feature_view(driver_daily_features_view, project) test_registry.apply_feature_view(location_features_from_push, project) assert not test_registry.get_user_metadata(project, location_features_from_push) @@ -551,13 +611,21 @@ def location_features_from_push(inputs: pd.DataFrame) -> pd.DataFrame: and feature_views[0].features[0].dtype == String ) + all_feature_views = test_registry.list_all_feature_views(project) + + assert len(all_feature_views) == 2 + feature_view = test_registry.get_on_demand_feature_view( "location_features_from_push", project ) + any_feature_view = test_registry.get_any_feature_view( + "location_features_from_push", project + ) assert ( feature_view.name == "location_features_from_push" and feature_view.features[0].name == "first_char" and feature_view.features[0].dtype == String + and feature_view == any_feature_view ) test_registry.delete_feature_view("location_features_from_push", project) @@ -721,9 +789,10 @@ def simple_udf(x: int): project = "project" # Register Feature Views - test_registry.apply_feature_view(odfv1, project) - test_registry.apply_feature_view(fv1, project) - test_registry.apply_feature_view(sfv, project) + test_registry.apply_feature_view(odfv1, project, False) + test_registry.apply_feature_view(fv1, project, False) + test_registry.apply_feature_view(sfv, project, False) + test_registry.commit() # Modify odfv by changing a single feature dtype @on_demand_feature_view( @@ -802,8 +871,8 @@ def odfv1(feature_df: pd.DataFrame) -> pd.DataFrame: # Simulate materialization current_date = _utc_now() - end_date = current_date.replace(tzinfo=utc) - start_date = (current_date - timedelta(days=1)).replace(tzinfo=utc) + end_date = current_date.replace(tzinfo=timezone.utc) + start_date = (current_date - timedelta(days=1)).replace(tzinfo=timezone.utc) test_registry.apply_materialization(feature_view, project, start_date, end_date) materialized_feature_view = test_registry.get_feature_view( "my_feature_view_1", project @@ -871,8 +940,8 @@ def odfv1(feature_df: pd.DataFrame) -> pd.DataFrame: # Simulate materialization a second time current_date = _utc_now() - end_date_1 = current_date.replace(tzinfo=utc) - start_date_1 = (current_date - timedelta(days=1)).replace(tzinfo=utc) + end_date_1 = current_date.replace(tzinfo=timezone.utc) + start_date_1 = (current_date - timedelta(days=1)).replace(tzinfo=timezone.utc) test_registry.apply_materialization( updated_feature_view, project, start_date_1, end_date_1 ) @@ -1097,7 +1166,7 @@ def test_registry_cache_thread_async(test_registry): "test_registry", all_fixtures, ) -def test_apply_stream_feature_view_success(test_registry): +def test_apply_stream_feature_view_success(test_registry: BaseRegistry): # Create Feature Views def simple_udf(x: int): return x + 3 @@ -1150,12 +1219,17 @@ def simple_udf(x: int): project, tags=sfv.tags ) + all_feature_views = test_registry.list_all_feature_views(project, tags=sfv.tags) + # List Feature Views assert len(stream_feature_views) == 1 + assert len(all_feature_views) == 1 assert stream_feature_views[0] == sfv test_registry.delete_feature_view("test kafka stream feature view", project) - stream_feature_views = test_registry.list_stream_feature_views(project) + stream_feature_views = test_registry.list_stream_feature_views( + project, tags=sfv.tags + ) assert len(stream_feature_views) == 0 test_registry.teardown() @@ -1277,6 +1351,10 @@ def test_commit(): project_uuid = project_metadata.project_uuid assert len(project_uuid) == 36 validate_project_uuid(project_uuid, test_registry) + assert len(test_registry.cached_registry_proto.projects) == 1 + project_obj = test_registry.cached_registry_proto.projects[0] + assert project == Project.from_proto(project_obj).name + assert_project(project, test_registry, True) # Retrieving the entity should still succeed entities = test_registry.list_entities(project, allow_cache=True, tags=entity.tags) @@ -1289,6 +1367,7 @@ def test_commit(): and entity.tags["team"] == "matchmaking" ) validate_project_uuid(project_uuid, test_registry) + assert_project(project, test_registry, True) entity = test_registry.get_entity("driver_car_id", project, allow_cache=True) assert ( @@ -1298,6 +1377,7 @@ def test_commit(): and entity.tags["team"] == "matchmaking" ) validate_project_uuid(project_uuid, test_registry) + assert_project(project, test_registry, True) # Create new registry that points to the same store registry_with_same_store = Registry("project", registry_config, None) @@ -1306,6 +1386,7 @@ def test_commit(): entities = registry_with_same_store.list_entities(project) assert len(entities) == 0 validate_project_uuid(project_uuid, registry_with_same_store) + assert_project(project, test_registry, True) # commit from the original registry test_registry.commit() @@ -1324,6 +1405,7 @@ def test_commit(): and entity.tags["team"] == "matchmaking" ) validate_project_uuid(project_uuid, registry_with_same_store) + assert_project(project, test_registry) entity = test_registry.get_entity("driver_car_id", project) assert ( @@ -1344,3 +1426,405 @@ def validate_project_uuid(project_uuid, test_registry): assert len(test_registry.cached_registry_proto.project_metadata) == 1 project_metadata = test_registry.cached_registry_proto.project_metadata[0] assert project_metadata.project_uuid == project_uuid + + +@pytest.mark.integration +@pytest.mark.parametrize("test_registry", all_fixtures) +def test_apply_permission_success(test_registry): + permission = Permission( + name="read_permission", + actions=AuthzedAction.DESCRIBE, + policy=RoleBasedPolicy(roles=["reader"]), + types=FeatureView, + ) + + project = "project" + + # Register Permission + test_registry.apply_permission(permission, project) + project_metadata = test_registry.list_project_metadata(project=project) + assert len(project_metadata) == 1 + project_uuid = project_metadata[0].project_uuid + assert len(project_metadata[0].project_uuid) == 36 + assert_project_uuid(project, project_uuid, test_registry) + assert_project(project, test_registry) + + permissions = test_registry.list_permissions(project) + assert_project_uuid(project, project_uuid, test_registry) + + permission = permissions[0] + assert ( + len(permissions) == 1 + and permission.name == "read_permission" + and len(permission.types) == 1 + and permission.types[0] == FeatureView + and len(permission.actions) == 1 + and permission.actions[0] == AuthzedAction.DESCRIBE + and isinstance(permission.policy, RoleBasedPolicy) + and len(permission.policy.roles) == 1 + and permission.policy.roles[0] == "reader" + and permission.name_pattern is None + and permission.tags is None + and permission.required_tags is None + ) + + # After the first apply, the created_timestamp should be the same as the last_update_timestamp. + assert permission.created_timestamp == permission.last_updated_timestamp + + permission = test_registry.get_permission("read_permission", project) + assert ( + permission.name == "read_permission" + and len(permission.types) == 1 + and permission.types[0] == FeatureView + and len(permission.actions) == 1 + and permission.actions[0] == AuthzedAction.DESCRIBE + and isinstance(permission.policy, RoleBasedPolicy) + and len(permission.policy.roles) == 1 + and permission.policy.roles[0] == "reader" + and permission.name_pattern is None + and permission.tags is None + and permission.required_tags is None + ) + + # Update permission + updated_permission = Permission( + name="read_permission", + actions=[AuthzedAction.DESCRIBE, AuthzedAction.WRITE_ONLINE], + policy=RoleBasedPolicy(roles=["reader", "writer"]), + types=FeatureView, + ) + test_registry.apply_permission(updated_permission, project) + + permissions = test_registry.list_permissions(project) + assert_project_uuid(project, project_uuid, test_registry) + assert len(permissions) == 1 + + updated_permission = test_registry.get_permission("read_permission", project) + assert ( + updated_permission.name == "read_permission" + and len(updated_permission.types) == 1 + and updated_permission.types[0] == FeatureView + and len(updated_permission.actions) == 2 + and AuthzedAction.DESCRIBE in updated_permission.actions + and AuthzedAction.WRITE_ONLINE in updated_permission.actions + and isinstance(updated_permission.policy, RoleBasedPolicy) + and len(updated_permission.policy.roles) == 2 + and "reader" in updated_permission.policy.roles + and "writer" in updated_permission.policy.roles + and updated_permission.name_pattern is None + and updated_permission.tags is None + and updated_permission.required_tags is None + ) + + # The created_timestamp for the entity should be set to the created_timestamp value stored from the previous apply + assert ( + updated_permission.created_timestamp is not None + and updated_permission.created_timestamp == permission.created_timestamp + ) + + updated_permission = Permission( + name="read_permission", + actions=[AuthzedAction.DESCRIBE, AuthzedAction.WRITE_ONLINE], + policy=RoleBasedPolicy(roles=["reader", "writer"]), + types=FeatureView, + name_pattern="aaa", + tags={"team": "matchmaking"}, + required_tags={"tag1": "tag1-value"}, + ) + test_registry.apply_permission(updated_permission, project) + + permissions = test_registry.list_permissions(project) + assert_project_uuid(project, project_uuid, test_registry) + assert len(permissions) == 1 + + updated_permission = test_registry.get_permission("read_permission", project) + assert ( + updated_permission.name == "read_permission" + and len(updated_permission.types) == 1 + and updated_permission.types[0] == FeatureView + and len(updated_permission.actions) == 2 + and AuthzedAction.DESCRIBE in updated_permission.actions + and AuthzedAction.WRITE_ONLINE in updated_permission.actions + and isinstance(updated_permission.policy, RoleBasedPolicy) + and len(updated_permission.policy.roles) == 2 + and "reader" in updated_permission.policy.roles + and "writer" in updated_permission.policy.roles + and updated_permission.name_pattern == "aaa" + and "team" in updated_permission.tags + and updated_permission.tags["team"] == "matchmaking" + and updated_permission.required_tags["tag1"] == "tag1-value" + ) + + test_registry.delete_permission("read_permission", project) + assert_project_uuid(project, project_uuid, test_registry) + permissions = test_registry.list_permissions(project) + assert_project_uuid(project, project_uuid, test_registry) + assert len(permissions) == 0 + assert_project(project, test_registry) + + test_registry.teardown() + + +@pytest.mark.integration +@pytest.mark.parametrize("test_registry", all_fixtures) +def test_apply_project_success(test_registry): + project = Project( + name="project", + description="Project description", + tags={"team": "project team"}, + owner="owner@mail.com", + ) + + # Register Project + test_registry.apply_project(project) + assert_project(project.name, test_registry, False) + + projects_list = test_registry.list_projects(tags=project.tags) + + assert_project(projects_list[0].name, test_registry) + + project_get = test_registry.get_project("project") + assert ( + project_get.name == project.name + and project_get.description == project.description + and project_get.tags == project.tags + and project_get.owner == project.owner + ) + + # Update project + updated_project = Project( + name=project.name, + description="New Project Description", + tags={"team": "matchmaking", "app": "feast"}, + ) + test_registry.apply_project(updated_project) + + updated_project_get = test_registry.get_project(project.name) + + # The created_timestamp for the entity should be set to the created_timestamp value stored from the previous apply + assert ( + updated_project_get.created_timestamp is not None + and updated_project_get.created_timestamp == project_get.created_timestamp + ) + + assert ( + updated_project_get.created_timestamp + < updated_project_get.last_updated_timestamp + ) + + entity = Entity( + name="driver_car_id", + description="Car driver id", + tags={"team": "matchmaking"}, + ) + + test_registry.apply_entity(entity, project.name) + entities = test_registry.list_entities(project.name) + assert len(entities) == 1 + + test_registry.delete_project(project.name, commit=False) + + test_registry.commit() + + entities = test_registry.list_entities(project.name, False) + assert len(entities) == 0 + projects_list = test_registry.list_projects() + assert len(projects_list) == 0 + + test_registry.refresh(project.name) + + test_registry.teardown() + + +@pytest.fixture +def local_registry_purge_feast_metadata() -> Registry: + fd, registry_path = mkstemp() + registry_config = RegistryConfig( + path=registry_path, cache_ttl_seconds=600, purge_feast_metadata=True + ) + return Registry("project", registry_config, None) + + +@pytest.fixture(scope="function") +def pg_registry_purge_feast_metadata(postgres_server): + db_name = "".join(random.choices(string.ascii_lowercase, k=10)) + + _create_pg_database(postgres_server, db_name) + + container_port = postgres_server.get_exposed_port(5432) + container_host = postgres_server.get_container_host_ip() + + registry_config = SqlRegistryConfig( + registry_type="sql", + cache_ttl_seconds=2, + cache_mode="thread", + # The `path` must include `+psycopg` in order for `sqlalchemy.create_engine()` + # to understand that we are using psycopg3. + path=f"postgresql+psycopg://{postgres_server.username}:{postgres_server.password}@{container_host}:{container_port}/{db_name}", + sqlalchemy_config_kwargs={"echo": False, "pool_pre_ping": True}, + thread_pool_executor_worker_count=3, + purge_feast_metadata=True, + ) + + yield SqlRegistry(registry_config, "project", None) + + +@pytest.fixture(scope="function") +def mysql_registry_purge_feast_metadata(mysql_server): + db_name = "".join(random.choices(string.ascii_lowercase, k=10)) + + _create_mysql_database(mysql_server, db_name) + + connection_url = ( + "/".join(mysql_server.get_connection_url().split("/")[:-1]) + f"/{db_name}" + ) + + registry_config = SqlRegistryConfig( + registry_type="sql", + path=connection_url, + cache_ttl_seconds=2, + cache_mode="thread", + sqlalchemy_config_kwargs={"echo": False, "pool_pre_ping": True}, + thread_pool_executor_worker_count=3, + purge_feast_metadata=True, + ) + + yield SqlRegistry(registry_config, "project", None) + + +purge_feast_metadata_fixtures = [ + lazy_fixture("local_registry_purge_feast_metadata"), + pytest.param( + lazy_fixture("pg_registry_purge_feast_metadata"), + marks=pytest.mark.xdist_group(name="pg_registry"), + ), + pytest.param( + lazy_fixture("mysql_registry_purge_feast_metadata"), + marks=pytest.mark.xdist_group(name="mysql_registry"), + ), +] + + +@pytest.mark.integration +@pytest.mark.parametrize("test_registry", purge_feast_metadata_fixtures) +def test_apply_entity_success_with_purge_feast_metadata(test_registry): + entity = Entity( + name="driver_car_id", + description="Car driver id", + tags={"team": "matchmaking"}, + ) + + project = "project" + + # Register Entity + test_registry.apply_entity(entity, project) + project_metadata = test_registry.list_project_metadata(project=project) + assert len(project_metadata) == 0 + assert_project(project, test_registry) + + entities = test_registry.list_entities(project, tags=entity.tags) + assert_project(project, test_registry) + + entity = entities[0] + assert ( + len(entities) == 1 + and entity.name == "driver_car_id" + and entity.description == "Car driver id" + and "team" in entity.tags + and entity.tags["team"] == "matchmaking" + ) + + entity = test_registry.get_entity("driver_car_id", project) + assert ( + entity.name == "driver_car_id" + and entity.description == "Car driver id" + and "team" in entity.tags + and entity.tags["team"] == "matchmaking" + ) + + # After the first apply, the created_timestamp should be the same as the last_update_timestamp. + assert entity.created_timestamp == entity.last_updated_timestamp + + # Update entity + updated_entity = Entity( + name="driver_car_id", + description="Car driver Id", + tags={"team": "matchmaking"}, + ) + test_registry.apply_entity(updated_entity, project) + + updated_entity = test_registry.get_entity("driver_car_id", project) + + # The created_timestamp for the entity should be set to the created_timestamp value stored from the previous apply + assert ( + updated_entity.created_timestamp is not None + and updated_entity.created_timestamp == entity.created_timestamp + ) + test_registry.delete_entity("driver_car_id", project) + assert_project(project, test_registry) + entities = test_registry.list_entities(project) + assert_project(project, test_registry) + assert len(entities) == 0 + + test_registry.teardown() + + +@pytest.mark.integration +@pytest.mark.parametrize( + "test_registry", + sql_fixtures + async_sql_fixtures, +) +def test_apply_entity_to_sql_registry_and_reinitialize_sql_registry(test_registry): + entity = Entity( + name="driver_car_id", + description="Car driver id", + tags={"team": "matchmaking"}, + ) + + project = "project" + + # Register Entity + test_registry.apply_entity(entity, project) + assert_project(project, test_registry) + + entities = test_registry.list_entities(project, tags=entity.tags) + assert_project(project, test_registry) + + entity = entities[0] + assert ( + len(entities) == 1 + and entity.name == "driver_car_id" + and entity.description == "Car driver id" + and "team" in entity.tags + and entity.tags["team"] == "matchmaking" + ) + + entity = test_registry.get_entity("driver_car_id", project) + assert ( + entity.name == "driver_car_id" + and entity.description == "Car driver id" + and "team" in entity.tags + and entity.tags["team"] == "matchmaking" + ) + + # After the first apply, the created_timestamp should be the same as the last_update_timestamp. + assert entity.created_timestamp == entity.last_updated_timestamp + updated_test_registry = SqlRegistry(test_registry.registry_config, "project", None) + + # Update entity + updated_entity = Entity( + name="driver_car_id", + description="Car driver Id", + tags={"team": "matchmaking"}, + ) + updated_test_registry.apply_entity(updated_entity, project) + + updated_entity = updated_test_registry.get_entity("driver_car_id", project) + updated_test_registry.delete_entity("driver_car_id", project) + assert_project(project, updated_test_registry) + entities = updated_test_registry.list_entities(project) + assert_project(project, updated_test_registry) + assert len(entities) == 0 + + updated_test_registry.teardown() + test_registry.teardown() diff --git a/sdk/python/tests/unit/diff/test_registry_diff.py b/sdk/python/tests/unit/diff/test_registry_diff.py index c209f1e0e0..2834c57800 100644 --- a/sdk/python/tests/unit/diff/test_registry_diff.py +++ b/sdk/python/tests/unit/diff/test_registry_diff.py @@ -6,8 +6,12 @@ tag_objects_for_keep_delete_update_add, ) from feast.entity import Entity +from feast.feast_object import ALL_RESOURCE_TYPES from feast.feature_view import FeatureView from feast.on_demand_feature_view import on_demand_feature_view +from feast.permissions.action import AuthzedAction +from feast.permissions.permission import Permission +from feast.permissions.policy import RoleBasedPolicy from feast.types import String from tests.utils.data_source_test_creator import prep_file_source @@ -170,3 +174,22 @@ def test_diff_registry_objects_batch_to_push_source(simple_dataset_1): feast_object_diffs.feast_object_property_diffs[0].property_name == "stream_source" ) + + +def test_diff_registry_objects_permissions(): + pre_changed = Permission( + name="reader", + types=ALL_RESOURCE_TYPES, + policy=RoleBasedPolicy(roles=["reader"]), + actions=[AuthzedAction.DESCRIBE], + ) + post_changed = Permission( + name="reader", + types=ALL_RESOURCE_TYPES, + policy=RoleBasedPolicy(roles=["reader"]), + actions=[AuthzedAction.CREATE], + ) + + feast_object_diffs = diff_registry_objects(pre_changed, post_changed, "permission") + assert len(feast_object_diffs.feast_object_property_diffs) == 1 + assert feast_object_diffs.feast_object_property_diffs[0].property_name == "actions" diff --git a/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py b/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py index 98d82ce357..9dcf7e4caf 100644 --- a/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py +++ b/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py @@ -4,6 +4,13 @@ from typing import Optional from feast.infra.online_stores.sqlite import SqliteOnlineStoreConfig +from feast.permissions.auth.auth_type import AuthType +from feast.permissions.auth_model import ( + KubernetesAuthConfig, + NoAuthConfig, + OidcAuthConfig, + OidcClientAuthConfig, +) from feast.repo_config import FeastConfigError, load_repo_config @@ -195,3 +202,136 @@ def test_no_provider(): ), expect_error=None, ) + + +def test_auth_config(): + _test_config( + dedent( + """ + project: foo + auth: + client_id: test_client_id + client_secret: test_client_secret + username: test_user_name + password: test_password + auth_discovery_url: http://localhost:8080/realms/master/.well-known/openid-configuration + registry: "registry.db" + provider: local + online_store: + path: foo + entity_key_serialization_version: 2 + """ + ), + expect_error="missing authentication type", + ) + + _test_config( + dedent( + """ + project: foo + auth: + type: not_valid_auth_type + client_id: test_client_id + client_secret: test_client_secret + username: test_user_name + password: test_password + auth_discovery_url: http://localhost:8080/realms/master/.well-known/openid-configuration + registry: "registry.db" + provider: local + online_store: + path: foo + entity_key_serialization_version: 2 + """ + ), + expect_error="invalid authentication type=not_valid_auth_type", + ) + + oidc_server_repo_config = _test_config( + dedent( + """ + project: foo + auth: + type: oidc + client_id: test_client_id + auth_discovery_url: http://localhost:8080/realms/master/.well-known/openid-configuration + registry: "registry.db" + provider: local + online_store: + path: foo + entity_key_serialization_version: 2 + """ + ), + expect_error=None, + ) + assert oidc_server_repo_config.auth["type"] == AuthType.OIDC.value + assert isinstance(oidc_server_repo_config.auth_config, OidcAuthConfig) + assert oidc_server_repo_config.auth_config.client_id == "test_client_id" + assert ( + oidc_server_repo_config.auth_config.auth_discovery_url + == "http://localhost:8080/realms/master/.well-known/openid-configuration" + ) + + oidc_client_repo_config = _test_config( + dedent( + """ + project: foo + auth: + type: oidc + client_id: test_client_id + client_secret: test_client_secret + username: test_user_name + password: test_password + auth_discovery_url: http://localhost:8080/realms/master/.well-known/openid-configuration + registry: "registry.db" + provider: local + online_store: + path: foo + entity_key_serialization_version: 2 + """ + ), + expect_error=None, + ) + assert oidc_client_repo_config.auth["type"] == AuthType.OIDC.value + assert isinstance(oidc_client_repo_config.auth_config, OidcClientAuthConfig) + assert oidc_client_repo_config.auth_config.client_id == "test_client_id" + assert oidc_client_repo_config.auth_config.client_secret == "test_client_secret" + assert oidc_client_repo_config.auth_config.username == "test_user_name" + assert oidc_client_repo_config.auth_config.password == "test_password" + assert ( + oidc_client_repo_config.auth_config.auth_discovery_url + == "http://localhost:8080/realms/master/.well-known/openid-configuration" + ) + + no_auth_repo_config = _test_config( + dedent( + """ + project: foo + registry: "registry.db" + provider: local + online_store: + path: foo + entity_key_serialization_version: 2 + """ + ), + expect_error=None, + ) + assert no_auth_repo_config.auth.get("type") == AuthType.NONE.value + assert isinstance(no_auth_repo_config.auth_config, NoAuthConfig) + + k8_repo_config = _test_config( + dedent( + """ + auth: + type: kubernetes + project: foo + registry: "registry.db" + provider: local + online_store: + path: foo + entity_key_serialization_version: 2 + """ + ), + expect_error=None, + ) + assert k8_repo_config.auth.get("type") == AuthType.KUBERNETES.value + assert isinstance(k8_repo_config.auth_config, KubernetesAuthConfig) diff --git a/sdk/python/tests/unit/local_feast_tests/test_local_feature_store.py b/sdk/python/tests/unit/local_feast_tests/test_local_feature_store.py index 0e834e314b..cc48295b20 100644 --- a/sdk/python/tests/unit/local_feast_tests/test_local_feature_store.py +++ b/sdk/python/tests/unit/local_feast_tests/test_local_feature_store.py @@ -9,11 +9,15 @@ from feast.data_format import AvroFormat, ParquetFormat from feast.data_source import KafkaSource from feast.entity import Entity +from feast.feast_object import ALL_RESOURCE_TYPES from feast.feature_store import FeatureStore -from feast.feature_view import FeatureView +from feast.feature_view import DUMMY_ENTITY_ID, FeatureView from feast.field import Field from feast.infra.offline_stores.file_source import FileSource from feast.infra.online_stores.sqlite import SqliteOnlineStoreConfig +from feast.permissions.action import AuthzedAction +from feast.permissions.permission import Permission +from feast.permissions.policy import RoleBasedPolicy from feast.repo_config import RepoConfig from feast.stream_feature_view import stream_feature_view from feast.types import Array, Bytes, Float32, Int64, String @@ -205,8 +209,9 @@ def test_apply_feature_view_with_inline_batch_source( test_feature_store.apply([entity, driver_fv]) fvs = test_feature_store.list_batch_feature_views() + dfv = fvs[0] assert len(fvs) == 1 - assert fvs[0] == driver_fv + assert dfv == driver_fv ds = test_feature_store.list_data_sources() assert len(ds) == 1 @@ -338,6 +343,81 @@ def test_apply_entities_and_feature_views(test_feature_store): test_feature_store.teardown() +@pytest.mark.parametrize( + "test_feature_store", + [lazy_fixture("feature_store_with_local_registry")], +) +def test_apply_dummuy_entity_and_feature_view_columns(test_feature_store): + assert isinstance(test_feature_store, FeatureStore) + # Create Feature Views + batch_source = FileSource( + file_format=ParquetFormat(), + path="file://feast/*", + timestamp_field="ts_col", + created_timestamp_column="timestamp", + ) + + e1 = Entity(name="fs1_my_entity_1", description="something") + + fv = FeatureView( + name="my_feature_view_no_entity", + schema=[ + Field(name="fs1_my_feature_1", dtype=Int64), + Field(name="fs1_my_feature_2", dtype=String), + Field(name="fs1_my_feature_3", dtype=Array(String)), + Field(name="fs1_my_feature_4", dtype=Array(Bytes)), + Field(name="fs1_my_entity_2", dtype=Int64), + ], + entities=[], + tags={"team": "matchmaking"}, + source=batch_source, + ttl=timedelta(minutes=5), + ) + + # Check that the entity_columns are empty before applying + assert fv.entity_columns == [] + + # Register Feature View + test_feature_store.apply([fv, e1]) + fv_actual = test_feature_store.get_feature_view("my_feature_view_no_entity") + + # Note that after the apply() the feature_view serializes the Dummy Entity ID + assert fv.entity_columns[0].name == DUMMY_ENTITY_ID + assert fv_actual.entity_columns[0].name == DUMMY_ENTITY_ID + + test_feature_store.teardown() + + +@pytest.mark.parametrize( + "test_feature_store", + [lazy_fixture("feature_store_with_local_registry")], +) +def test_apply_permissions(test_feature_store): + assert isinstance(test_feature_store, FeatureStore) + + permission = Permission( + name="reader", + types=ALL_RESOURCE_TYPES, + policy=RoleBasedPolicy(roles=["reader"]), + actions=[AuthzedAction.DESCRIBE], + ) + + # Register Permission + test_feature_store.apply([permission]) + + permissions = test_feature_store.list_permissions() + assert len(permissions) == 1 + assert permissions[0] == permission + + # delete Permission + test_feature_store.apply(objects=[], objects_to_delete=[permission], partial=False) + + permissions = test_feature_store.list_permissions() + assert len(permissions) == 0 + + test_feature_store.teardown() + + @pytest.mark.parametrize( "test_feature_store", [lazy_fixture("feature_store_with_local_registry")], diff --git a/sdk/python/tests/unit/permissions/__init__.py b/sdk/python/tests/unit/permissions/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/tests/unit/permissions/auth/client/test_authentication_client_manager_factory.py b/sdk/python/tests/unit/permissions/auth/client/test_authentication_client_manager_factory.py new file mode 100644 index 0000000000..5a6a8d70fa --- /dev/null +++ b/sdk/python/tests/unit/permissions/auth/client/test_authentication_client_manager_factory.py @@ -0,0 +1,55 @@ +import os +from unittest import mock + +import assertpy +import jwt +import pytest +import yaml + +from feast.permissions.auth.auth_type import AuthType +from feast.permissions.auth_model import ( + AuthConfig, +) +from feast.permissions.client.auth_client_manager import ( + AuthenticationClientManagerFactory, +) +from feast.permissions.client.intra_comm_authentication_client_manager import ( + IntraCommAuthClientManager, +) + + +@mock.patch.dict(os.environ, {"INTRA_COMMUNICATION_BASE64": "server_intra_com_val"}) +def test_authentication_client_manager_factory(auth_config): + raw_config = yaml.safe_load(auth_config) + auth_config = AuthConfig(type=raw_config["auth"]["type"]) + + authentication_client_manager_factory = AuthenticationClientManagerFactory( + auth_config + ) + + authentication_client_manager = ( + authentication_client_manager_factory.get_auth_client_manager() + ) + + if auth_config.type not in [AuthType.KUBERNETES.value, AuthType.OIDC.value]: + with pytest.raises( + RuntimeError, + match=f"No Auth client manager implemented for the auth type:{auth_config.type}", + ): + authentication_client_manager.get_token() + else: + token = authentication_client_manager.get_token() + + decoded_token = jwt.decode(token, options={"verify_signature": False}) + assertpy.assert_that(authentication_client_manager).is_type_of( + IntraCommAuthClientManager + ) + + if AuthType.KUBERNETES.value == auth_config.type: + assertpy.assert_that(decoded_token["sub"]).is_equal_to( + ":::server_intra_com_val" + ) + elif AuthType.OIDC.value in auth_config.type: + assertpy.assert_that(decoded_token["preferred_username"]).is_equal_to( + "server_intra_com_val" + ) diff --git a/sdk/python/tests/unit/permissions/auth/conftest.py b/sdk/python/tests/unit/permissions/auth/conftest.py new file mode 100644 index 0000000000..5a29f8ec78 --- /dev/null +++ b/sdk/python/tests/unit/permissions/auth/conftest.py @@ -0,0 +1,99 @@ +import pytest +from kubernetes import client + +from feast.permissions.auth_model import OidcAuthConfig +from tests.unit.permissions.auth.server.test_utils import ( + invalid_list_entities_perm, + read_entities_perm, + read_fv_perm, + read_odfv_perm, + read_permissions_perm, + read_projects_perm, + read_sfv_perm, +) +from tests.unit.permissions.auth.test_token_parser import _CLIENT_ID + + +@pytest.fixture +def sa_name(): + return "my-name" + + +@pytest.fixture +def namespace(): + return "my-ns" + + +@pytest.fixture +def rolebindings(sa_name, namespace) -> dict: + roles = ["reader", "writer"] + items = [] + for r in roles: + items.append( + client.V1RoleBinding( + metadata=client.V1ObjectMeta(name=r, namespace=namespace), + subjects=[ + client.V1Subject( + kind="ServiceAccount", + name=sa_name, + api_group="rbac.authorization.k8s.io", + ) + ], + role_ref=client.V1RoleRef( + kind="Role", name=r, api_group="rbac.authorization.k8s.io" + ), + ) + ) + return {"items": client.V1RoleBindingList(items=items), "roles": roles} + + +@pytest.fixture +def clusterrolebindings(sa_name, namespace) -> dict: + roles = ["updater"] + items = [] + for r in roles: + items.append( + client.V1ClusterRoleBinding( + metadata=client.V1ObjectMeta(name=r, namespace=namespace), + subjects=[ + client.V1Subject( + kind="ServiceAccount", + name=sa_name, + namespace=namespace, + api_group="rbac.authorization.k8s.io", + ) + ], + role_ref=client.V1RoleRef( + kind="Role", name=r, api_group="rbac.authorization.k8s.io" + ), + ) + ) + return {"items": client.V1RoleBindingList(items=items), "roles": roles} + + +@pytest.fixture +def oidc_config() -> OidcAuthConfig: + return OidcAuthConfig( + auth_discovery_url="https://localhost:8080/realms/master/.well-known/openid-configuration", + client_id=_CLIENT_ID, + type="oidc", + ) + + +@pytest.fixture( + scope="module", + params=[ + [], + [invalid_list_entities_perm], + [ + read_entities_perm, + read_permissions_perm, + read_fv_perm, + read_odfv_perm, + read_sfv_perm, + read_projects_perm, + ], + ], +) +def applied_permissions(request): + return request.param diff --git a/sdk/python/tests/unit/permissions/auth/server/mock_utils.py b/sdk/python/tests/unit/permissions/auth/server/mock_utils.py new file mode 100644 index 0000000000..12f7785b05 --- /dev/null +++ b/sdk/python/tests/unit/permissions/auth/server/mock_utils.py @@ -0,0 +1,81 @@ +from unittest.mock import MagicMock, Mock + +from requests import Response + + +def mock_oidc(request, monkeypatch, client_id): + async def mock_oath2(self, request): + return "OK" + + monkeypatch.setattr( + "feast.permissions.auth.oidc_token_parser.OAuth2AuthorizationCodeBearer.__call__", + mock_oath2, + ) + signing_key = MagicMock() + signing_key.key = "a-key" + monkeypatch.setattr( + "feast.permissions.auth.oidc_token_parser.PyJWKClient.get_signing_key_from_jwt", + lambda self, access_token: signing_key, + ) + user_data = { + "preferred_username": "my-name", + "resource_access": {client_id: {"roles": ["reader", "writer"]}}, + } + monkeypatch.setattr( + "feast.permissions.auth.oidc_token_parser.jwt.decode", + lambda self, *args, **kwargs: user_data, + ) + discovery_response = Mock(spec=Response) + discovery_response.status_code = 200 + discovery_response.json.return_value = { + "token_endpoint": "http://localhost:8080/realms/master/protocol/openid-connect/token" + } + monkeypatch.setattr( + "feast.permissions.client.oidc_authentication_client_manager.requests.get", + lambda url: discovery_response, + ) + token_response = Mock(spec=Response) + token_response.status_code = 200 + token_response.json.return_value = {"access_token": "my-token"} + monkeypatch.setattr( + "feast.permissions.client.oidc_authentication_client_manager.requests.post", + lambda url, data, headers: token_response, + ) + + monkeypatch.setattr( + "feast.permissions.oidc_service.OIDCDiscoveryService._fetch_discovery_data", + lambda self, *args, **kwargs: { + "authorization_endpoint": "https://localhost:8080/realms/master/protocol/openid-connect/auth", + "token_endpoint": "https://localhost:8080/realms/master/protocol/openid-connect/token", + "jwks_uri": "https://localhost:8080/realms/master/protocol/openid-connect/certs", + }, + ) + + +def mock_kubernetes(request, monkeypatch): + sa_name = request.getfixturevalue("sa_name") + namespace = request.getfixturevalue("namespace") + subject = f"system:serviceaccount:{namespace}:{sa_name}" + rolebindings = request.getfixturevalue("rolebindings") + clusterrolebindings = request.getfixturevalue("clusterrolebindings") + + monkeypatch.setattr( + "feast.permissions.auth.kubernetes_token_parser.config.load_incluster_config", + lambda: None, + ) + monkeypatch.setattr( + "feast.permissions.auth.kubernetes_token_parser.jwt.decode", + lambda *args, **kwargs: {"sub": subject}, + ) + monkeypatch.setattr( + "feast.permissions.auth.kubernetes_token_parser.client.RbacAuthorizationV1Api.list_namespaced_role_binding", + lambda *args, **kwargs: rolebindings["items"], + ) + monkeypatch.setattr( + "feast.permissions.auth.kubernetes_token_parser.client.RbacAuthorizationV1Api.list_cluster_role_binding", + lambda *args, **kwargs: clusterrolebindings["items"], + ) + monkeypatch.setattr( + "feast.permissions.client.kubernetes_auth_client_manager.KubernetesAuthClientManager.get_token", + lambda self: "my-token", + ) diff --git a/sdk/python/tests/unit/permissions/auth/server/test_auth_registry_server.py b/sdk/python/tests/unit/permissions/auth/server/test_auth_registry_server.py new file mode 100644 index 0000000000..c72b1aa1e2 --- /dev/null +++ b/sdk/python/tests/unit/permissions/auth/server/test_auth_registry_server.py @@ -0,0 +1,279 @@ +from datetime import datetime + +import assertpy +import pandas as pd +import pytest +import yaml + +from feast import FeatureStore +from feast.errors import ( + EntityNotFoundException, + FeastPermissionError, + FeatureViewNotFoundException, +) +from feast.permissions.permission import Permission +from feast.registry_server import start_server +from feast.wait import wait_retry_backoff # noqa: E402 +from tests.unit.permissions.auth.server import mock_utils +from tests.unit.permissions.auth.server.test_utils import ( + invalid_list_entities_perm, + read_entities_perm, + read_fv_perm, + read_odfv_perm, + read_permissions_perm, + read_projects_perm, + read_sfv_perm, +) +from tests.utils.auth_permissions_util import get_remote_registry_store +from tests.utils.http_server import check_port_open # noqa: E402 + + +@pytest.fixture +def start_registry_server( + request, + auth_config, + server_port, + feature_store, + monkeypatch, +): + if "kubernetes" in auth_config: + mock_utils.mock_kubernetes(request=request, monkeypatch=monkeypatch) + elif "oidc" in auth_config: + auth_config_yaml = yaml.safe_load(auth_config) + mock_utils.mock_oidc( + request=request, + monkeypatch=monkeypatch, + client_id=auth_config_yaml["auth"]["client_id"], + ) + + assertpy.assert_that(server_port).is_not_equal_to(0) + + print(f"Starting Registry at {server_port}") + server = start_server( + feature_store, + server_port, + wait_for_termination=False, + ) + print("Waiting server availability") + wait_retry_backoff( + lambda: (None, check_port_open("localhost", server_port)), + timeout_secs=10, + ) + print("Server started") + + yield server + + print("Stopping server") + server.stop(grace=None) # Teardown server + + +def test_registry_apis( + auth_config, + temp_dir, + server_port, + start_registry_server, + feature_store, + applied_permissions, +): + print(f"Running for\n:{auth_config}") + remote_feature_store = get_remote_registry_store(server_port, feature_store) + permissions = _test_list_permissions(remote_feature_store, applied_permissions) + _test_get_entity(remote_feature_store, applied_permissions) + _test_list_entities(remote_feature_store, applied_permissions) + _test_get_fv(remote_feature_store, applied_permissions) + _test_list_fvs(remote_feature_store, applied_permissions) + + if _permissions_exist_in_permission_list( + [ + read_entities_perm, + read_permissions_perm, + read_fv_perm, + read_odfv_perm, + read_sfv_perm, + ], + permissions, + ): + _test_get_historical_features(remote_feature_store) + + +def _test_get_historical_features(client_fs: FeatureStore): + entity_df = pd.DataFrame.from_dict( + { + # entity's join key -> entity values + "driver_id": [1001, 1002, 1003], + # "event_timestamp" (reserved key) -> timestamps + "event_timestamp": [ + datetime(2021, 4, 12, 10, 59, 42), + datetime(2021, 4, 12, 8, 12, 10), + datetime(2021, 4, 12, 16, 40, 26), + ], + # (optional) label name -> label values. Feast does not process these + "label_driver_reported_satisfaction": [1, 5, 3], + # values we're using for an on-demand transformation + "val_to_add": [1, 2, 3], + "val_to_add_2": [10, 20, 30], + } + ) + + training_df = client_fs.get_historical_features( + entity_df=entity_df, + features=[ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips", + "transformed_conv_rate:conv_rate_plus_val1", + "transformed_conv_rate:conv_rate_plus_val2", + ], + ).to_df() + assertpy.assert_that(training_df).is_not_none() + + +def _test_get_entity(client_fs: FeatureStore, permissions: list[Permission]): + if not _is_auth_enabled(client_fs) or _is_permission_enabled( + client_fs, permissions, read_entities_perm + ): + entity = client_fs.get_entity("driver") + assertpy.assert_that(entity).is_not_none() + assertpy.assert_that(entity.name).is_equal_to("driver") + else: + with pytest.raises(FeastPermissionError): + client_fs.get_entity("driver") + with pytest.raises(EntityNotFoundException): + client_fs.get_entity("invalid-name") + + +def _test_list_entities(client_fs: FeatureStore, permissions: list[Permission]): + entities = client_fs.list_entities() + + if not _is_auth_enabled(client_fs) or _is_permission_enabled( + client_fs, permissions, read_entities_perm + ): + assertpy.assert_that(entities).is_not_none() + assertpy.assert_that(len(entities)).is_equal_to(1) + assertpy.assert_that(entities[0].name).is_equal_to("driver") + else: + assertpy.assert_that(entities).is_not_none() + assertpy.assert_that(len(entities)).is_equal_to(0) + + +def _no_permission_retrieved(permissions: list[Permission]) -> bool: + return len(permissions) == 0 + + +def _test_list_permissions( + client_fs: FeatureStore, applied_permissions: list[Permission] +) -> list[Permission]: + if _is_auth_enabled(client_fs) and _permissions_exist_in_permission_list( + [invalid_list_entities_perm], applied_permissions + ): + with pytest.raises(Exception): + client_fs.list_permissions() + return [] + else: + permissions = client_fs.list_permissions() + + if not _is_auth_enabled(client_fs): + assertpy.assert_that(permissions).is_not_none() + assertpy.assert_that(len(permissions)).is_equal_to(len(applied_permissions)) + elif _is_auth_enabled(client_fs) and _permissions_exist_in_permission_list( + [ + read_entities_perm, + read_permissions_perm, + read_fv_perm, + read_odfv_perm, + read_sfv_perm, + read_projects_perm, + ], + permissions, + ): + assertpy.assert_that(permissions).is_not_none() + assertpy.assert_that(len(permissions)).is_equal_to( + len( + [ + read_entities_perm, + read_permissions_perm, + read_fv_perm, + read_odfv_perm, + read_sfv_perm, + read_projects_perm, + ] + ) + ) + elif _is_auth_enabled(client_fs) and _is_listing_permissions_allowed(permissions): + assertpy.assert_that(permissions).is_not_none() + assertpy.assert_that(len(permissions)).is_equal_to(1) + + return permissions + + +def _is_listing_permissions_allowed(permissions: list[Permission]) -> bool: + return read_permissions_perm in permissions + + +def _is_auth_enabled(client_fs: FeatureStore) -> bool: + return client_fs.config.auth_config.type != "no_auth" + + +def _test_get_fv(client_fs: FeatureStore, permissions: list[Permission]): + if not _is_auth_enabled(client_fs) or _is_permission_enabled( + client_fs, permissions, read_fv_perm + ): + fv = client_fs.get_feature_view("driver_hourly_stats") + assertpy.assert_that(fv).is_not_none() + assertpy.assert_that(fv.name).is_equal_to("driver_hourly_stats") + else: + with pytest.raises(FeastPermissionError): + client_fs.get_feature_view("driver_hourly_stats") + with pytest.raises(FeatureViewNotFoundException): + client_fs.get_feature_view("invalid-name") + + +def _test_list_fvs(client_fs: FeatureStore, permissions: list[Permission]): + if _is_auth_enabled(client_fs) and _permissions_exist_in_permission_list( + [invalid_list_entities_perm], permissions + ): + with pytest.raises(Exception): + client_fs.list_feature_views() + return [] + else: + fvs = client_fs.list_feature_views() + for fv in fvs: + print(f"{fv.name}, {type(fv).__name__}") + + if not _is_auth_enabled(client_fs) or _is_permission_enabled( + client_fs, permissions, read_fv_perm + ): + assertpy.assert_that(fvs).is_not_none() + assertpy.assert_that(len(fvs)).is_equal_to(2) + + names = _to_names(fvs) + assertpy.assert_that(names).contains("driver_hourly_stats") + assertpy.assert_that(names).contains("driver_hourly_stats_fresh") + else: + assertpy.assert_that(fvs).is_not_none() + assertpy.assert_that(len(fvs)).is_equal_to(0) + + +def _permissions_exist_in_permission_list( + permission_to_test: list[Permission], permission_list: list[Permission] +) -> bool: + return all(e in permission_list for e in permission_to_test) + + +def _is_permission_enabled( + client_fs: FeatureStore, + permissions: list[Permission], + permission: Permission, +): + return _is_auth_enabled(client_fs) and ( + _no_permission_retrieved(permissions) + or ( + _permissions_exist_in_permission_list( + [read_permissions_perm, permission], permissions + ) + ) + ) + + +def _to_names(items): + return [i.name for i in items] diff --git a/sdk/python/tests/unit/permissions/auth/server/test_utils.py b/sdk/python/tests/unit/permissions/auth/server/test_utils.py new file mode 100644 index 0000000000..32b4fd8f98 --- /dev/null +++ b/sdk/python/tests/unit/permissions/auth/server/test_utils.py @@ -0,0 +1,69 @@ +import assertpy +import pytest + +from feast import Entity, FeatureView, OnDemandFeatureView, StreamFeatureView +from feast.permissions.action import AuthzedAction +from feast.permissions.permission import Permission +from feast.permissions.policy import RoleBasedPolicy +from feast.permissions.server.utils import AuthManagerType, str_to_auth_manager_type +from feast.project import Project + +read_permissions_perm = Permission( + name="read_permissions_perm", + types=Permission, + policy=RoleBasedPolicy(roles=["reader"]), + actions=[AuthzedAction.DESCRIBE], +) + +read_projects_perm = Permission( + name="read_projects_perm", + types=Project, + policy=RoleBasedPolicy(roles=["reader"]), + actions=[AuthzedAction.DESCRIBE], +) + +read_entities_perm = Permission( + name="read_entities_perm", + types=Entity, + policy=RoleBasedPolicy(roles=["reader"]), + actions=[AuthzedAction.DESCRIBE], +) + +read_fv_perm = Permission( + name="read_fv_perm", + types=FeatureView, + policy=RoleBasedPolicy(roles=["reader"]), + actions=[AuthzedAction.DESCRIBE], +) + +read_odfv_perm = Permission( + name="read_odfv_perm", + types=OnDemandFeatureView, + policy=RoleBasedPolicy(roles=["reader"]), + actions=[AuthzedAction.DESCRIBE], +) + +read_sfv_perm = Permission( + name="read_sfv_perm", + types=StreamFeatureView, + policy=RoleBasedPolicy(roles=["reader"]), + actions=[AuthzedAction.DESCRIBE], +) + +invalid_list_entities_perm = Permission( + name="invalid_list_entity_perm", + types=Entity, + policy=RoleBasedPolicy(roles=["dancer"]), + actions=[AuthzedAction.DESCRIBE], +) + + +@pytest.mark.parametrize( + "label, value", + [(t.value, t) for t in AuthManagerType] + + [(t.value.upper(), t) for t in AuthManagerType] + + [(t.value.lower(), t) for t in AuthManagerType] + + [("none", AuthManagerType.NONE)], +) +def test_str_to_auth_type(label, value): + assertpy.assert_that(str_to_auth_manager_type(label)).is_equal_to(value) diff --git a/sdk/python/tests/unit/permissions/auth/test_token_extractor.py b/sdk/python/tests/unit/permissions/auth/test_token_extractor.py new file mode 100644 index 0000000000..a6fcd89e5b --- /dev/null +++ b/sdk/python/tests/unit/permissions/auth/test_token_extractor.py @@ -0,0 +1,140 @@ +from unittest.mock import Mock + +import assertpy +import pytest +from fastapi.requests import Request +from starlette.authentication import ( + AuthenticationError, +) + +from feast.permissions.server.arrow_flight_token_extractor import ( + ArrowFlightTokenExtractor, +) +from feast.permissions.server.grpc_token_extractor import GrpcTokenExtractor +from feast.permissions.server.rest_token_extractor import RestTokenExtractor + + +@pytest.mark.parametrize( + "error_type, dict, header", + [ + (ValueError, {}, None), + (ValueError, {"other": 123}, None), + (AuthenticationError, {}, ""), + (AuthenticationError, {}, "abcd"), + (AuthenticationError, {}, "other-scheme abcd"), + ], +) +def test_rest_token_extractor_failures(error_type, dict, header): + token_extractor = RestTokenExtractor() + + request = None + if header is not None: + request = Mock(spec=Request) + if header != "": + request.headers = {"authorization": header} + else: + request.headers = {} + with pytest.raises(error_type): + if request is None: + token_extractor.extract_access_token(**dict) + else: + token_extractor.extract_access_token(request=request) + + +@pytest.mark.parametrize( + "error_type, dict, header", + [ + (ValueError, {}, None), + (ValueError, {"other": 123}, None), + (AuthenticationError, {}, ""), + (AuthenticationError, {}, "abcd"), + (AuthenticationError, {}, "other-scheme abcd"), + ], +) +def test_grpc_token_extractor_failures(error_type, dict, header): + token_extractor = GrpcTokenExtractor() + + metadata = None + if header is not None: + metadata = {} + if metadata != "": + metadata["authorization"] = header + with pytest.raises(error_type): + if metadata is None: + token_extractor.extract_access_token(**dict) + else: + token_extractor.extract_access_token(metadata=metadata) + + +def test_rest_token_extractor(): + token_extractor = RestTokenExtractor() + request: Request = Mock(spec=Request) + token = "abcd" + + request.headers = {"authorization": f"Bearer {token}"} + assertpy.assert_that( + token_extractor.extract_access_token(request=request) + ).is_equal_to(token) + + request.headers = {"authorization": f"bearer {token}"} + assertpy.assert_that( + token_extractor.extract_access_token(request=request) + ).is_equal_to(token) + + +def test_grpc_token_extractor(): + token_extractor = GrpcTokenExtractor() + metadata = {} + token = "abcd" + + metadata["authorization"] = f"Bearer {token}" + assertpy.assert_that( + token_extractor.extract_access_token(metadata=metadata) + ).is_equal_to(token) + + metadata["authorization"] = f"bearer {token}" + assertpy.assert_that( + token_extractor.extract_access_token(metadata=metadata) + ).is_equal_to(token) + + +@pytest.mark.parametrize( + "error_type, dict, header", + [ + (ValueError, {}, None), + (ValueError, {"other": 123}, None), + (AuthenticationError, {}, ""), + (AuthenticationError, {}, "abcd"), + (AuthenticationError, {}, ["abcd"]), + (AuthenticationError, {}, ["other-scheme abcd"]), + ], +) +def test_arrow_flight_token_extractor_failures(error_type, dict, header): + token_extractor = ArrowFlightTokenExtractor() + + headers = None + if header is not None: + if header != "": + headers = {"authorization": header} + else: + headers = {} + with pytest.raises(error_type): + if headers is None: + token_extractor.extract_access_token(**dict) + else: + token_extractor.extract_access_token(headers=headers) + + +def test_arrow_flight_token_extractor(): + token_extractor = ArrowFlightTokenExtractor() + token = "abcd" + + headers = {"authorization": [f"Bearer {token}"]} + assertpy.assert_that( + token_extractor.extract_access_token(headers=headers) + ).is_equal_to(token) + + headers = {"authorization": [f"bearer {token}"]} + assertpy.assert_that( + token_extractor.extract_access_token(headers=headers) + ).is_equal_to(token) diff --git a/sdk/python/tests/unit/permissions/auth/test_token_parser.py b/sdk/python/tests/unit/permissions/auth/test_token_parser.py new file mode 100644 index 0000000000..bac2103b4f --- /dev/null +++ b/sdk/python/tests/unit/permissions/auth/test_token_parser.py @@ -0,0 +1,272 @@ +import asyncio +import os +from unittest import mock +from unittest.mock import MagicMock, patch + +import assertpy +import pytest +from starlette.authentication import ( + AuthenticationError, +) + +from feast.permissions.auth.kubernetes_token_parser import KubernetesTokenParser +from feast.permissions.auth.oidc_token_parser import OidcTokenParser +from feast.permissions.user import User + +_CLIENT_ID = "test" + + +@patch( + "feast.permissions.auth.oidc_token_parser.OAuth2AuthorizationCodeBearer.__call__" +) +@patch("feast.permissions.auth.oidc_token_parser.PyJWKClient.get_signing_key_from_jwt") +@patch("feast.permissions.auth.oidc_token_parser.jwt.decode") +@patch("feast.permissions.oidc_service.OIDCDiscoveryService._fetch_discovery_data") +def test_oidc_token_validation_success( + mock_discovery_data, mock_jwt, mock_signing_key, mock_oauth2, oidc_config +): + signing_key = MagicMock() + signing_key.key = "a-key" + mock_signing_key.return_value = signing_key + + mock_discovery_data.return_value = { + "authorization_endpoint": "https://localhost:8080/realms/master/protocol/openid-connect/auth", + "token_endpoint": "https://localhost:8080/realms/master/protocol/openid-connect/token", + "jwks_uri": "https://localhost:8080/realms/master/protocol/openid-connect/certs", + } + + user_data = { + "preferred_username": "my-name", + "resource_access": {_CLIENT_ID: {"roles": ["reader", "writer"]}}, + } + mock_jwt.return_value = user_data + + access_token = "aaa-bbb-ccc" + token_parser = OidcTokenParser(auth_config=oidc_config) + user = asyncio.run( + token_parser.user_details_from_access_token(access_token=access_token) + ) + + assertpy.assert_that(user).is_type_of(User) + if isinstance(user, User): + assertpy.assert_that(user.username).is_equal_to("my-name") + assertpy.assert_that(user.roles.sort()).is_equal_to(["reader", "writer"].sort()) + assertpy.assert_that(user.has_matching_role(["reader"])).is_true() + assertpy.assert_that(user.has_matching_role(["writer"])).is_true() + assertpy.assert_that(user.has_matching_role(["updater"])).is_false() + + +@patch( + "feast.permissions.auth.oidc_token_parser.OAuth2AuthorizationCodeBearer.__call__" +) +def test_oidc_token_validation_failure(mock_oauth2, oidc_config): + mock_oauth2.side_effect = AuthenticationError("wrong token") + + access_token = "aaa-bbb-ccc" + token_parser = OidcTokenParser(auth_config=oidc_config) + with pytest.raises(AuthenticationError): + asyncio.run( + token_parser.user_details_from_access_token(access_token=access_token) + ) + + +@mock.patch.dict(os.environ, {"INTRA_COMMUNICATION_BASE64": "test1234"}) +@pytest.mark.parametrize( + "intra_communication_val, is_intra_server", + [ + ("test1234", True), + ("my-name", False), + ], +) +def test_oidc_inter_server_comm( + intra_communication_val, is_intra_server, oidc_config, monkeypatch +): + async def mock_oath2(self, request): + return "OK" + + monkeypatch.setattr( + "feast.permissions.auth.oidc_token_parser.OAuth2AuthorizationCodeBearer.__call__", + mock_oath2, + ) + signing_key = MagicMock() + signing_key.key = "a-key" + monkeypatch.setattr( + "feast.permissions.auth.oidc_token_parser.PyJWKClient.get_signing_key_from_jwt", + lambda self, access_token: signing_key, + ) + + user_data = { + "preferred_username": f"{intra_communication_val}", + } + + if not is_intra_server: + user_data["resource_access"] = {_CLIENT_ID: {"roles": ["reader", "writer"]}} + + monkeypatch.setattr( + "feast.permissions.oidc_service.OIDCDiscoveryService._fetch_discovery_data", + lambda self, *args, **kwargs: { + "authorization_endpoint": "https://localhost:8080/realms/master/protocol/openid-connect/auth", + "token_endpoint": "https://localhost:8080/realms/master/protocol/openid-connect/token", + "jwks_uri": "https://localhost:8080/realms/master/protocol/openid-connect/certs", + }, + ) + + monkeypatch.setattr( + "feast.permissions.auth.oidc_token_parser.jwt.decode", + lambda self, *args, **kwargs: user_data, + ) + + access_token = "aaa-bbb-ccc" + token_parser = OidcTokenParser(auth_config=oidc_config) + user = asyncio.run( + token_parser.user_details_from_access_token(access_token=access_token) + ) + + if is_intra_server: + assertpy.assert_that(user).is_not_none() + assertpy.assert_that(user.username).is_equal_to(intra_communication_val) + assertpy.assert_that(user.roles).is_equal_to([]) + else: + assertpy.assert_that(user).is_not_none() + assertpy.assert_that(user).is_type_of(User) + if isinstance(user, User): + assertpy.assert_that(user.username).is_equal_to("my-name") + assertpy.assert_that(user.roles.sort()).is_equal_to( + ["reader", "writer"].sort() + ) + assertpy.assert_that(user.has_matching_role(["reader"])).is_true() + assertpy.assert_that(user.has_matching_role(["writer"])).is_true() + assertpy.assert_that(user.has_matching_role(["updater"])).is_false() + + +# TODO RBAC: Move role bindings to a reusable fixture +@patch("feast.permissions.auth.kubernetes_token_parser.config.load_incluster_config") +@patch("feast.permissions.auth.kubernetes_token_parser.jwt.decode") +@patch( + "feast.permissions.auth.kubernetes_token_parser.client.RbacAuthorizationV1Api.list_namespaced_role_binding" +) +@patch( + "feast.permissions.auth.kubernetes_token_parser.client.RbacAuthorizationV1Api.list_cluster_role_binding" +) +def test_k8s_token_validation_success( + mock_crb, + mock_rb, + mock_jwt, + mock_config, + rolebindings, + clusterrolebindings, +): + sa_name = "my-name" + namespace = "my-ns" + subject = f"system:serviceaccount:{namespace}:{sa_name}" + mock_jwt.return_value = {"sub": subject} + + mock_rb.return_value = rolebindings["items"] + mock_crb.return_value = clusterrolebindings["items"] + + roles = rolebindings["roles"] + croles = clusterrolebindings["roles"] + + access_token = "aaa-bbb-ccc" + token_parser = KubernetesTokenParser() + user = asyncio.run( + token_parser.user_details_from_access_token(access_token=access_token) + ) + + assertpy.assert_that(user).is_type_of(User) + if isinstance(user, User): + assertpy.assert_that(user.username).is_equal_to(f"{namespace}:{sa_name}") + assertpy.assert_that(user.roles.sort()).is_equal_to((roles + croles).sort()) + for r in roles: + assertpy.assert_that(user.has_matching_role([r])).is_true() + for cr in croles: + assertpy.assert_that(user.has_matching_role([cr])).is_true() + assertpy.assert_that(user.has_matching_role(["foo"])).is_false() + + +@patch("feast.permissions.auth.kubernetes_token_parser.config.load_incluster_config") +@patch("feast.permissions.auth.kubernetes_token_parser.jwt.decode") +def test_k8s_token_validation_failure(mock_jwt, mock_config): + subject = "wrong-subject" + mock_jwt.return_value = {"sub": subject} + + access_token = "aaa-bbb-ccc" + token_parser = KubernetesTokenParser() + with pytest.raises(AuthenticationError): + asyncio.run( + token_parser.user_details_from_access_token(access_token=access_token) + ) + + +@mock.patch.dict(os.environ, {"INTRA_COMMUNICATION_BASE64": "test1234"}) +@pytest.mark.parametrize( + "intra_communication_val, is_intra_server", + [ + ("test1234", True), + ("my-name", False), + ], +) +def test_k8s_inter_server_comm( + intra_communication_val, + is_intra_server, + oidc_config, + request, + rolebindings, + clusterrolebindings, + monkeypatch, +): + if is_intra_server: + subject = f":::{intra_communication_val}" + else: + sa_name = request.getfixturevalue("sa_name") + namespace = request.getfixturevalue("namespace") + subject = f"system:serviceaccount:{namespace}:{sa_name}" + rolebindings = request.getfixturevalue("rolebindings") + clusterrolebindings = request.getfixturevalue("clusterrolebindings") + + monkeypatch.setattr( + "feast.permissions.auth.kubernetes_token_parser.client.RbacAuthorizationV1Api.list_namespaced_role_binding", + lambda *args, **kwargs: rolebindings["items"], + ) + monkeypatch.setattr( + "feast.permissions.auth.kubernetes_token_parser.client.RbacAuthorizationV1Api.list_cluster_role_binding", + lambda *args, **kwargs: clusterrolebindings["items"], + ) + monkeypatch.setattr( + "feast.permissions.client.kubernetes_auth_client_manager.KubernetesAuthClientManager.get_token", + lambda self: "my-token", + ) + + monkeypatch.setattr( + "feast.permissions.auth.kubernetes_token_parser.config.load_incluster_config", + lambda: None, + ) + + monkeypatch.setattr( + "feast.permissions.auth.kubernetes_token_parser.jwt.decode", + lambda *args, **kwargs: {"sub": subject}, + ) + + roles = rolebindings["roles"] + croles = clusterrolebindings["roles"] + + access_token = "aaa-bbb-ccc" + token_parser = KubernetesTokenParser() + user = asyncio.run( + token_parser.user_details_from_access_token(access_token=access_token) + ) + + if is_intra_server: + assertpy.assert_that(user).is_not_none() + assertpy.assert_that(user.username).is_equal_to(intra_communication_val) + assertpy.assert_that(user.roles).is_equal_to([]) + else: + assertpy.assert_that(user).is_type_of(User) + if isinstance(user, User): + assertpy.assert_that(user.username).is_equal_to(f"{namespace}:{sa_name}") + assertpy.assert_that(user.roles.sort()).is_equal_to((roles + croles).sort()) + for r in roles: + assertpy.assert_that(user.has_matching_role([r])).is_true() + for cr in croles: + assertpy.assert_that(user.has_matching_role([cr])).is_true() + assertpy.assert_that(user.has_matching_role(["foo"])).is_false() diff --git a/sdk/python/tests/unit/permissions/conftest.py b/sdk/python/tests/unit/permissions/conftest.py new file mode 100644 index 0000000000..6adbc6ec54 --- /dev/null +++ b/sdk/python/tests/unit/permissions/conftest.py @@ -0,0 +1,108 @@ +from unittest.mock import Mock + +import pytest + +from feast import FeatureView +from feast.entity import Entity +from feast.infra.registry.base_registry import BaseRegistry +from feast.permissions.decorator import require_permissions +from feast.permissions.permission import AuthzedAction, Permission +from feast.permissions.policy import RoleBasedPolicy +from feast.permissions.security_manager import ( + SecurityManager, + set_security_manager, +) +from feast.permissions.user import User + + +class SecuredFeatureView(FeatureView): + def __init__(self, name, tags): + super().__init__( + name=name, + source=Mock(), + tags=tags, + ) + + @require_permissions(actions=[AuthzedAction.DESCRIBE]) + def read_protected(self) -> bool: + return True + + @require_permissions(actions=[AuthzedAction.UPDATE]) + def write_protected(self) -> bool: + return True + + def unprotected(self) -> bool: + return True + + +@pytest.fixture +def feature_views() -> list[FeatureView]: + return [ + SecuredFeatureView("secured", {}), + SecuredFeatureView("special-secured", {}), + ] + + +@pytest.fixture +def users() -> list[User]: + users = [] + users.append(User("r", ["reader"])) + users.append(User("w", ["writer"])) + users.append(User("rw", ["reader", "writer"])) + users.append(User("special", ["reader", "writer", "special-reader"])) + users.append(User("updater", ["updater"])) + users.append(User("creator", ["creator"])) + users.append(User("admin", ["updater", "creator"])) + return dict([(u.username, u) for u in users]) + + +@pytest.fixture +def security_manager() -> SecurityManager: + permissions = [] + permissions.append( + Permission( + name="reader", + types=FeatureView, + policy=RoleBasedPolicy(roles=["reader"]), + actions=[AuthzedAction.DESCRIBE], + ) + ) + permissions.append( + Permission( + name="writer", + types=FeatureView, + policy=RoleBasedPolicy(roles=["writer"]), + actions=[AuthzedAction.UPDATE], + ) + ) + permissions.append( + Permission( + name="special", + types=FeatureView, + name_pattern="special.*", + policy=RoleBasedPolicy(roles=["special-reader"]), + actions=[AuthzedAction.DESCRIBE, AuthzedAction.UPDATE], + ) + ) + permissions.append( + Permission( + name="entity_updater", + types=Entity, + policy=RoleBasedPolicy(roles=["updater"]), + actions=[AuthzedAction.DESCRIBE, AuthzedAction.UPDATE], + ) + ) + permissions.append( + Permission( + name="entity_creator", + types=Entity, + policy=RoleBasedPolicy(roles=["creator"]), + actions=[AuthzedAction.CREATE], + ) + ) + + registry = Mock(spec=BaseRegistry) + registry.list_permissions = Mock(return_value=permissions) + sm = SecurityManager(project="any", registry=registry) + set_security_manager(sm) + return sm diff --git a/sdk/python/tests/unit/permissions/test_decision.py b/sdk/python/tests/unit/permissions/test_decision.py new file mode 100644 index 0000000000..23bafedeab --- /dev/null +++ b/sdk/python/tests/unit/permissions/test_decision.py @@ -0,0 +1,34 @@ +import assertpy +import pytest + +from feast.permissions.decision import DecisionEvaluator + +# Each vote is a tuple of `current_vote` and expected output of `is_decided` + + +@pytest.mark.parametrize( + "evaluator, votes, decision, no_of_explanations", + [ + (DecisionEvaluator(3), [(True, True)], True, 0), + (DecisionEvaluator(3), [(True, True)], True, 0), + ( + DecisionEvaluator(3), + [(False, False), (False, False), (False, True)], + False, + 3, + ), + ], +) +def test_decision_evaluator(evaluator, votes, decision, no_of_explanations): + for v in votes: + vote = v[0] + decided = v[1] + evaluator.add_grant(vote, "" if vote else "a message") + if decided: + assertpy.assert_that(evaluator.is_decided()).is_true() + else: + assertpy.assert_that(evaluator.is_decided()).is_false() + + grant, explanations = evaluator.grant() + assertpy.assert_that(grant).is_equal_to(decision) + assertpy.assert_that(explanations).is_length(no_of_explanations) diff --git a/sdk/python/tests/unit/permissions/test_decorator.py b/sdk/python/tests/unit/permissions/test_decorator.py new file mode 100644 index 0000000000..f434301a2c --- /dev/null +++ b/sdk/python/tests/unit/permissions/test_decorator.py @@ -0,0 +1,34 @@ +import assertpy +import pytest + +from feast.errors import FeastPermissionError + + +@pytest.mark.parametrize( + "username, can_read, can_write", + [ + (None, True, True), + ("r", True, False), + ("w", False, True), + ("rw", True, True), + ], +) +def test_access_SecuredFeatureView( + security_manager, feature_views, users, username, can_read, can_write +): + sm = security_manager + fv = feature_views[0] + user = users.get(username) + + sm.set_current_user(user) + if can_read: + fv.read_protected() + else: + with pytest.raises(FeastPermissionError): + fv.read_protected() + if can_write: + fv.write_protected() + else: + with pytest.raises(FeastPermissionError): + fv.write_protected() + assertpy.assert_that(fv.unprotected()).is_true() diff --git a/sdk/python/tests/unit/permissions/test_oidc_auth_client.py b/sdk/python/tests/unit/permissions/test_oidc_auth_client.py new file mode 100644 index 0000000000..68aec70fc7 --- /dev/null +++ b/sdk/python/tests/unit/permissions/test_oidc_auth_client.py @@ -0,0 +1,63 @@ +from unittest.mock import patch + +from requests import Session + +from feast.permissions.auth_model import ( + KubernetesAuthConfig, + NoAuthConfig, + OidcClientAuthConfig, +) +from feast.permissions.client.http_auth_requests_wrapper import ( + AuthenticatedRequestsSession, + get_http_auth_requests_session, +) +from feast.permissions.client.kubernetes_auth_client_manager import ( + KubernetesAuthClientManager, +) +from feast.permissions.client.oidc_authentication_client_manager import ( + OidcAuthClientManager, +) + +MOCKED_TOKEN_VALUE: str = "dummy_token" + + +def _get_dummy_oidc_auth_type() -> OidcClientAuthConfig: + oidc_config = OidcClientAuthConfig( + auth_discovery_url="http://localhost:8080/realms/master/.well-known/openid-configuration", + type="oidc", + username="admin_test", + password="password_test", + client_id="dummy_client_id", + client_secret="client_secret", + ) + return oidc_config + + +@patch.object(KubernetesAuthClientManager, "get_token", return_value=MOCKED_TOKEN_VALUE) +@patch.object(OidcAuthClientManager, "get_token", return_value=MOCKED_TOKEN_VALUE) +def test_http_auth_requests_session(mock_kubernetes_token, mock_oidc_token): + no_auth_config = NoAuthConfig() + assert isinstance(get_http_auth_requests_session(no_auth_config), Session) + + oidc_auth_config = _get_dummy_oidc_auth_type() + oidc_auth_requests_session = get_http_auth_requests_session(oidc_auth_config) + _assert_auth_requests_session(oidc_auth_requests_session, MOCKED_TOKEN_VALUE) + + kubernetes_auth_config = KubernetesAuthConfig(type="kubernetes") + kubernetes_auth_requests_session = get_http_auth_requests_session( + kubernetes_auth_config + ) + _assert_auth_requests_session(kubernetes_auth_requests_session, MOCKED_TOKEN_VALUE) + + +def _assert_auth_requests_session( + auth_req_session: AuthenticatedRequestsSession, expected_token: str +): + assert isinstance(auth_req_session, AuthenticatedRequestsSession) + assert "Authorization" in auth_req_session.headers, ( + "Authorization header is missing in object of class: " + "AuthenticatedRequestsSession " + ) + assert ( + auth_req_session.headers["Authorization"] == f"Bearer {expected_token}" + ), "Authorization token is incorrect" diff --git a/sdk/python/tests/unit/permissions/test_permission.py b/sdk/python/tests/unit/permissions/test_permission.py new file mode 100644 index 0000000000..606d750d81 --- /dev/null +++ b/sdk/python/tests/unit/permissions/test_permission.py @@ -0,0 +1,205 @@ +from unittest.mock import Mock + +import assertpy +import pytest + +from feast.batch_feature_view import BatchFeatureView +from feast.data_source import DataSource +from feast.entity import Entity +from feast.feast_object import ALL_RESOURCE_TYPES +from feast.feature_service import FeatureService +from feast.feature_view import FeatureView +from feast.on_demand_feature_view import OnDemandFeatureView +from feast.permissions.action import ALL_ACTIONS, AuthzedAction +from feast.permissions.permission import ( + Permission, +) +from feast.permissions.policy import AllowAll, Policy +from feast.saved_dataset import ValidationReference +from feast.stream_feature_view import StreamFeatureView + + +def test_defaults(): + p = Permission(name="test") + assertpy.assert_that(type(p.types)).is_equal_to(list) + assertpy.assert_that(p.types).is_equal_to(ALL_RESOURCE_TYPES) + assertpy.assert_that(p.name_pattern).is_none() + assertpy.assert_that(p.tags).is_none() + assertpy.assert_that(type(p.actions)).is_equal_to(list) + assertpy.assert_that(p.actions).is_equal_to(ALL_ACTIONS) + assertpy.assert_that(type(p.actions)).is_equal_to(list) + assertpy.assert_that(isinstance(p.policy, Policy)).is_true() + assertpy.assert_that(p.policy).is_equal_to(AllowAll) + + +@pytest.mark.parametrize( + "dict, result", + [ + ({"types": None}, True), + ({"types": []}, True), + ({"types": ALL_RESOURCE_TYPES}, True), + ({"types": [FeatureView, FeatureService]}, True), + ({"actions": None}, False), + ({"actions": []}, False), + ({"actions": ALL_ACTIONS}, True), + ({"actions": ALL_ACTIONS}, True), + ({"actions": [AuthzedAction.CREATE, AuthzedAction.DELETE]}, True), + ({"policy": None}, False), + ({"policy": []}, False), + ({"policy": Mock(spec=Policy)}, True), + ], +) +def test_validity(dict, result): + if not result: + with pytest.raises(ValueError): + Permission(name="test", **dict) + else: + Permission(name="test", **dict) + + +def test_normalized_args(): + p = Permission(name="test") + assertpy.assert_that(type(p.types)).is_equal_to(list) + assertpy.assert_that(p.types).is_equal_to(ALL_RESOURCE_TYPES) + + p = Permission(name="test", actions=AuthzedAction.CREATE) + assertpy.assert_that(type(p.actions)).is_equal_to(list) + assertpy.assert_that(p.actions).is_equal_to([AuthzedAction.CREATE]) + + +@pytest.mark.parametrize( + "resource, types, result", + [ + (None, ALL_RESOURCE_TYPES, False), + ("invalid string", ALL_RESOURCE_TYPES, False), + ("ALL", ALL_RESOURCE_TYPES, False), + ("ALL", ALL_RESOURCE_TYPES, False), + ( + Mock(spec=FeatureView), + [t for t in ALL_RESOURCE_TYPES if t not in [FeatureView]], + False, + ), + ( + Mock(spec=OnDemandFeatureView), + [t for t in ALL_RESOURCE_TYPES if t not in [OnDemandFeatureView]], + False, + ), # OnDemandFeatureView is a BaseFeatureView + ( + Mock(spec=BatchFeatureView), + FeatureView, + True, + ), # BatchFeatureView is a FeatureView + ( + Mock(spec=BatchFeatureView), + [t for t in ALL_RESOURCE_TYPES if t not in [FeatureView, BatchFeatureView]], + False, + ), + ( + Mock(spec=StreamFeatureView), + FeatureView, + True, + ), # StreamFeatureView is a FeatureView + ( + Mock(spec=StreamFeatureView), + [ + t + for t in ALL_RESOURCE_TYPES + if t not in [FeatureView, StreamFeatureView] + ], + False, + ), + ( + Mock(spec=Entity), + [t for t in ALL_RESOURCE_TYPES if t not in [Entity]], + False, + ), + ( + Mock(spec=FeatureService), + [t for t in ALL_RESOURCE_TYPES if t not in [FeatureService]], + False, + ), + ( + Mock(spec=DataSource), + [t for t in ALL_RESOURCE_TYPES if t not in [DataSource]], + False, + ), + ( + Mock(spec=ValidationReference), + [t for t in ALL_RESOURCE_TYPES if t not in [ValidationReference]], + False, + ), + ( + Mock(spec=Permission), + [t for t in ALL_RESOURCE_TYPES if t not in [Permission]], + False, + ), + ] + + [(Mock(spec=t), ALL_RESOURCE_TYPES, True) for t in ALL_RESOURCE_TYPES] + + [(Mock(spec=t), [t], True) for t in ALL_RESOURCE_TYPES], +) +def test_match_resource_with_subclasses(resource, types, result): + p = Permission(name="test", types=types) + assertpy.assert_that(p.match_resource(resource)).is_equal_to(result) + + +@pytest.mark.parametrize( + "pattern, name, match", + [ + ("test.*", "test", True), + ("test.*", "test1", True), + ("test.*", "wrongtest", False), + (".*test.*", "wrongtest", True), + ], +) +def test_resource_match_with_name_filter(pattern, name, match): + p = Permission(name="test", name_pattern=pattern) + for t in ALL_RESOURCE_TYPES: + resource = Mock(spec=t) + resource.name = name + assertpy.assert_that(p.match_resource(resource)).is_equal_to(match) + + +@pytest.mark.parametrize( + ("required_tags, tags, result"), + [ + ({"owner": "dev"}, {}, False), + ({"owner": "dev"}, {"owner": "master"}, False), + ({"owner": "dev"}, {"owner": "dev", "other": 1}, True), + ({"owner": "dev", "dep": 1}, {"owner": "dev", "other": 1}, False), + ({"owner": "dev", "dep": 1}, {"owner": "dev", "other": 1, "dep": 1}, True), + ], +) +def test_resource_match_with_tags(required_tags, tags, result): + # Missing tags + p = Permission(name="test", required_tags=required_tags) + for t in ALL_RESOURCE_TYPES: + resource = Mock(spec=t) + resource.name = "test" + resource.required_tags = tags + assertpy.assert_that(p.match_resource(resource)).is_equal_to(result) + + +@pytest.mark.parametrize( + ("permitted_actions, requested_actions, result"), + [(ALL_ACTIONS, [a], True) for a in AuthzedAction.__members__.values()] + + [ + ( + [AuthzedAction.CREATE, AuthzedAction.DELETE], + [AuthzedAction.CREATE, AuthzedAction.DELETE], + True, + ), + ([AuthzedAction.CREATE, AuthzedAction.DELETE], [AuthzedAction.CREATE], True), + ([AuthzedAction.CREATE, AuthzedAction.DELETE], [AuthzedAction.DELETE], True), + ([AuthzedAction.CREATE, AuthzedAction.DELETE], [AuthzedAction.UPDATE], False), + ( + [AuthzedAction.CREATE, AuthzedAction.DELETE], + [AuthzedAction.CREATE, AuthzedAction.DELETE, AuthzedAction.UPDATE], + False, + ), + ], +) +def test_match_actions(permitted_actions, requested_actions, result): + p = Permission(name="test", actions=permitted_actions) + assertpy.assert_that( + p.match_actions(requested_actions=requested_actions) + ).is_equal_to(result) diff --git a/sdk/python/tests/unit/permissions/test_policy.py b/sdk/python/tests/unit/permissions/test_policy.py new file mode 100644 index 0000000000..4e78282d4f --- /dev/null +++ b/sdk/python/tests/unit/permissions/test_policy.py @@ -0,0 +1,44 @@ +import assertpy +import pytest + +from feast.permissions.policy import AllowAll, RoleBasedPolicy +from feast.permissions.user import User + + +@pytest.mark.parametrize( + "username", + [("r"), ("w"), ("rw"), ("missing")], +) +def test_allow_all(users, username): + user = users.get(username, User(username, [])) + assertpy.assert_that(AllowAll.validate_user(user)).is_true() + + +@pytest.mark.parametrize( + "required_roles, username, result", + [ + (["reader"], "r", True), + (["writer"], "r", False), + (["reader", "writer"], "r", True), + (["writer", "updater"], "r", False), + (["reader"], "w", False), + (["writer"], "w", True), + (["reader", "writer"], "w", True), + (["reader", "updater"], "w", False), + (["reader"], "rw", True), + (["writer"], "rw", True), + (["reader", "writer"], "rw", True), + (["updater"], "rw", False), + ], +) +def test_role_based_policy(users, required_roles, username, result): + user = users.get(username) + policy = RoleBasedPolicy(roles=required_roles) + + validate_result, explain = policy.validate_user(user) + assertpy.assert_that(validate_result).is_equal_to(result) + + if result is True: + assertpy.assert_that(explain).is_equal_to("") + else: + assertpy.assert_that(len(explain)).is_greater_than(0) diff --git a/sdk/python/tests/unit/permissions/test_security_manager.py b/sdk/python/tests/unit/permissions/test_security_manager.py new file mode 100644 index 0000000000..11b8dfb88e --- /dev/null +++ b/sdk/python/tests/unit/permissions/test_security_manager.py @@ -0,0 +1,328 @@ +import assertpy +import pytest + +from feast.entity import Entity +from feast.errors import FeastObjectNotFoundException, FeastPermissionError +from feast.permissions.action import READ, AuthzedAction +from feast.permissions.security_manager import ( + assert_permissions, + assert_permissions_to_update, + permitted_resources, +) +from feast.permissions.user import User + + +@pytest.mark.parametrize( + "username, requested_actions, allowed, allowed_single, raise_error_in_assert, raise_error_in_permit, intra_communication_flag", + [ + (None, [], True, [True, True], [False, False], False, False), + (None, [], True, [True, True], [False, False], False, True), + ( + "r", + [AuthzedAction.DESCRIBE], + True, + [True, True], + [False, False], + False, + False, + ), + ( + "r", + [AuthzedAction.DESCRIBE], + True, + [True, True], + [False, False], + False, + True, + ), + ("server_intra_com_val", [], True, [True, True], [False, False], False, True), + ( + "r", + [AuthzedAction.UPDATE], + False, + [False, False], + [True, True], + False, + False, + ), + ("r", [AuthzedAction.UPDATE], True, [True, True], [False, False], False, True), + ( + "w", + [AuthzedAction.DESCRIBE], + False, + [False, False], + [True, True], + False, + False, + ), + ( + "w", + [AuthzedAction.DESCRIBE], + True, + [True, True], + [True, True], + False, + True, + ), + ( + "w", + [AuthzedAction.UPDATE], + False, + [True, True], + [False, False], + False, + False, + ), + ("w", [AuthzedAction.UPDATE], False, [True, True], [False, False], False, True), + ( + "rw", + [AuthzedAction.DESCRIBE], + False, + [True, True], + [False, False], + False, + False, + ), + ( + "rw", + [AuthzedAction.DESCRIBE], + False, + [True, True], + [False, False], + False, + True, + ), + ( + "rw", + [AuthzedAction.UPDATE], + False, + [True, True], + [False, False], + False, + False, + ), + ( + "rw", + [AuthzedAction.UPDATE], + False, + [True, True], + [False, False], + False, + True, + ), + ( + "rw", + [AuthzedAction.DESCRIBE, AuthzedAction.UPDATE], + False, + [False, False], + [True, True], + True, + False, + ), + ( + "rw", + [AuthzedAction.DESCRIBE, AuthzedAction.UPDATE], + True, + [True, True], + [False, False], + False, + True, + ), + ( + "special", + [AuthzedAction.DESCRIBE, AuthzedAction.UPDATE], + False, + [False, True], + [True, False], + True, + False, + ), + ( + "admin", + [AuthzedAction.DESCRIBE, AuthzedAction.UPDATE], + True, + [True, True], + [False, False], + False, + True, + ), + ( + "special", + READ + [AuthzedAction.UPDATE], + False, + [False, False], + [True, True], + True, + False, + ), + ( + "admin", + READ + [AuthzedAction.UPDATE], + True, + [True, True], + [False, False], + False, + True, + ), + ], +) +def test_access_SecuredFeatureView( + security_manager, + feature_views, + users, + username, + requested_actions, + allowed, + allowed_single, + raise_error_in_assert, + raise_error_in_permit, + intra_communication_flag, + monkeypatch, +): + sm = security_manager + user = users.get(username) + sm.set_current_user(user) + + if intra_communication_flag: + monkeypatch.setenv("INTRA_COMMUNICATION_BASE64", "server_intra_com_val") + sm.set_current_user(User("server_intra_com_val", [])) + else: + monkeypatch.delenv("INTRA_COMMUNICATION_BASE64", False) + + resources = feature_views + + result = [] + if raise_error_in_permit: + with pytest.raises(FeastPermissionError): + result = permitted_resources(resources=resources, actions=requested_actions) + else: + result = permitted_resources(resources=resources, actions=requested_actions) + + if allowed: + assertpy.assert_that(result).is_equal_to(resources) + elif not raise_error_in_permit: + filtered = [r for i, r in enumerate(resources) if allowed_single[i]] + assertpy.assert_that(result).is_equal_to(filtered) + + for i, r in enumerate(resources): + if allowed_single[i]: + result = assert_permissions(resource=r, actions=requested_actions) + assertpy.assert_that(result).is_equal_to(r) + elif raise_error_in_assert[i]: + with pytest.raises(FeastPermissionError): + assert_permissions(resource=r, actions=requested_actions) + else: + result = assert_permissions(resource=r, actions=requested_actions) + assertpy.assert_that(result).is_none() + + +@pytest.mark.parametrize( + "username, allowed, intra_communication_flag", + [ + (None, True, False), + (None, True, True), + ("r", False, False), + ("r", True, True), + ("w", False, False), + ("w", True, True), + ("rw", False, False), + ("rw", True, True), + ("special", False, False), + ("special", True, True), + ("updater", False, False), + ("updater", True, True), + ("creator", True, False), + ("creator", True, True), + ("admin", True, False), + ("admin", True, True), + ], +) +def test_create_entity( + security_manager, + users, + username, + allowed, + intra_communication_flag, + monkeypatch, +): + sm = security_manager + user = users.get(username) + sm.set_current_user(user) + + if intra_communication_flag: + monkeypatch.setenv("INTRA_COMMUNICATION_BASE64", "server_intra_com_val") + sm.set_current_user(User("server_intra_com_val", [])) + else: + monkeypatch.delenv("INTRA_COMMUNICATION_BASE64", False) + + entity = Entity( + name="", + ) + + def getter(name: str, project: str, allow_cache: bool): + raise FeastObjectNotFoundException() + + if allowed: + result = assert_permissions_to_update( + resource=entity, getter=getter, project="" + ) + assertpy.assert_that(result).is_equal_to(entity) + else: + with pytest.raises(FeastPermissionError): + assert_permissions_to_update(resource=entity, getter=getter, project="") + + +@pytest.mark.parametrize( + "username, allowed, intra_communication_flag", + [ + (None, True, False), + (None, True, True), + ("r", False, False), + ("r", True, True), + ("w", False, False), + ("w", True, True), + ("rw", False, False), + ("rw", True, True), + ("special", False, False), + ("special", True, True), + ("updater", True, False), + ("updater", True, True), + ("creator", False, False), + ("creator", True, True), + ("admin", True, False), + ("admin", True, True), + ], +) +def test_update_entity( + security_manager, + users, + username, + allowed, + intra_communication_flag, + monkeypatch, +): + sm = security_manager + user = users.get(username) + sm.set_current_user(user) + + if intra_communication_flag: + monkeypatch.setenv("INTRA_COMMUNICATION_BASE64", "server_intra_com_val") + sm.set_current_user(User("server_intra_com_val", [])) + else: + monkeypatch.delenv("INTRA_COMMUNICATION_BASE64", False) + + entity = Entity( + name="", + ) + + def getter(name: str, project: str, allow_cache: bool): + return entity + + if allowed: + result = assert_permissions_to_update( + resource=entity, getter=getter, project="" + ) + assertpy.assert_that(result).is_equal_to(entity) + else: + with pytest.raises(FeastPermissionError): + assert_permissions_to_update(resource=entity, getter=getter, project="") diff --git a/sdk/python/tests/unit/permissions/test_user.py b/sdk/python/tests/unit/permissions/test_user.py new file mode 100644 index 0000000000..cce318cba7 --- /dev/null +++ b/sdk/python/tests/unit/permissions/test_user.py @@ -0,0 +1,34 @@ +import assertpy +import pytest + +from feast.permissions.user import User + + +@pytest.fixture(scope="module") +def users(): + users = [] + users.append(User("a", ["a1", "a2"])) + users.append(User("b", ["b1", "b2"])) + return dict([(u.username, u) for u in users]) + + +@pytest.mark.parametrize( + "username, roles, result", + [ + ("c", [], False), + ("a", ["b1"], False), + ("a", ["a1", "b1"], True), + ("a", ["a1"], True), + ("a", ["a1", "a2"], True), + ("a", ["a1", "a2", "a3"], True), + ("b", ["a1", "a3"], False), + ("b", ["a1", "b1"], True), + ("b", ["b1", "b2"], True), + ("b", ["b1", "b2", "b3"], True), + ], +) +def test_user_has_matching_role(users, username, roles, result): + user = users.get(username, User(username, [])) + assertpy.assert_that(user.has_matching_role(requested_roles=roles)).is_equal_to( + result + ) diff --git a/sdk/python/tests/unit/test_arrow_error_decorator.py b/sdk/python/tests/unit/test_arrow_error_decorator.py new file mode 100644 index 0000000000..fc350d34c0 --- /dev/null +++ b/sdk/python/tests/unit/test_arrow_error_decorator.py @@ -0,0 +1,33 @@ +import pyarrow.flight as fl +import pytest + +from feast.arrow_error_handler import arrow_client_error_handling_decorator +from feast.errors import PermissionNotFoundException + +permissionError = PermissionNotFoundException("dummy_name", "dummy_project") + + +@arrow_client_error_handling_decorator +def decorated_method(error): + raise error + + +@pytest.mark.parametrize( + "error, expected_raised_error", + [ + (fl.FlightError("Flight error: "), fl.FlightError("Flight error: ")), + ( + fl.FlightError(f"Flight error: {permissionError.to_error_detail()}"), + permissionError, + ), + (fl.FlightError("Test Error"), fl.FlightError("Test Error")), + (RuntimeError("Flight error: "), RuntimeError("Flight error: ")), + (permissionError, permissionError), + ], +) +def test_rest_error_handling_with_feast_exception(error, expected_raised_error): + with pytest.raises( + type(expected_raised_error), + match=str(expected_raised_error), + ): + decorated_method(error) diff --git a/sdk/python/tests/unit/test_errors.py b/sdk/python/tests/unit/test_errors.py new file mode 100644 index 0000000000..b3f33690da --- /dev/null +++ b/sdk/python/tests/unit/test_errors.py @@ -0,0 +1,26 @@ +import re + +import assertpy + +import feast.errors as errors + + +def test_error_error_detail(): + e = errors.FeatureViewNotFoundException("abc") + + d = e.to_error_detail() + + assertpy.assert_that(d).is_not_none() + assertpy.assert_that(d).contains('"module": "feast.errors"') + assertpy.assert_that(d).contains('"class": "FeatureViewNotFoundException"') + assertpy.assert_that(re.search(r"abc", d)).is_true() + + converted_e = errors.FeastError.from_error_detail(d) + assertpy.assert_that(converted_e).is_not_none() + assertpy.assert_that(str(converted_e)).is_equal_to(str(e)) + assertpy.assert_that(repr(converted_e)).is_equal_to(repr(e)) + + +def test_invalid_error_error_detail(): + e = errors.FeastError.from_error_detail("invalid") + assertpy.assert_that(e).is_none() diff --git a/sdk/python/tests/unit/test_feature_views.py b/sdk/python/tests/unit/test_feature_views.py index 981968df0d..ce789c706c 100644 --- a/sdk/python/tests/unit/test_feature_views.py +++ b/sdk/python/tests/unit/test_feature_views.py @@ -111,7 +111,27 @@ def test_hash(): assert len(s4) == 3 -# TODO(felixwang9817): Add tests for proto conversion. +def test_proto_conversion(): + file_source = FileSource(name="my-file-source", path="test.parquet") + feature_view_1 = FeatureView( + name="my-feature-view", + entities=[], + schema=[ + Field(name="feature1", dtype=Float32), + Field(name="feature2", dtype=Float32), + ], + source=file_source, + ) + + feature_view_proto = feature_view_1.to_proto() + assert ( + feature_view_proto.spec.name == "my-feature-view" + and feature_view_proto.spec.batch_source.file_options.uri == "test.parquet" + and feature_view_proto.spec.batch_source.name == "my-file-source" + and feature_view_proto.spec.batch_source.type == 1 + ) + + # TODO(felixwang9817): Add tests for field mapping logic. diff --git a/sdk/python/tests/unit/test_offline_server.py b/sdk/python/tests/unit/test_offline_server.py index 5991e7450d..7c38d9bfca 100644 --- a/sdk/python/tests/unit/test_offline_server.py +++ b/sdk/python/tests/unit/test_offline_server.py @@ -8,13 +8,14 @@ import pyarrow.flight as flight import pytest -from feast import FeatureStore +from feast import FeatureStore, FeatureView, FileSource +from feast.errors import FeatureViewNotFoundException from feast.feature_logging import FeatureServiceLoggingSource from feast.infra.offline_stores.remote import ( RemoteOfflineStore, RemoteOfflineStoreConfig, ) -from feast.offline_server import OfflineServer +from feast.offline_server import OfflineServer, _init_auth_manager from feast.repo_config import RepoConfig from tests.utils.cli_repo_creator import CliRunner @@ -26,6 +27,7 @@ def empty_offline_server(environment): store = environment.feature_store location = "grpc+tcp://localhost:0" + _init_auth_manager(store=store) return OfflineServer(store=store, location=location) @@ -102,6 +104,8 @@ def test_remote_offline_store_apis(): with tempfile.TemporaryDirectory() as temp_dir: store = default_store(str(temp_dir)) location = "grpc+tcp://localhost:0" + + _init_auth_manager(store=store) server = OfflineServer(store=store, location=location) assertpy.assert_that(server).is_not_none @@ -117,6 +121,35 @@ def test_remote_offline_store_apis(): _test_pull_all_from_table_or_query(str(temp_dir), fs) +def test_remote_offline_store_exception_handling(): + with tempfile.TemporaryDirectory() as temp_dir: + store = default_store(str(temp_dir)) + location = "grpc+tcp://localhost:0" + + _init_auth_manager(store=store) + server = OfflineServer(store=store, location=location) + + assertpy.assert_that(server).is_not_none + assertpy.assert_that(server.port).is_not_equal_to(0) + + fs = remote_feature_store(server) + data_file = os.path.join( + temp_dir, fs.project, "feature_repo/data/driver_stats.parquet" + ) + data_df = pd.read_parquet(data_file) + + with pytest.raises( + FeatureViewNotFoundException, + match="Feature view test does not exist in project test_remote_offline", + ): + RemoteOfflineStore.offline_write_batch( + fs.config, + FeatureView(name="test", source=FileSource(path="test")), + pa.Table.from_pandas(data_df), + progress=None, + ) + + def _test_get_historical_features_returns_data(fs: FeatureStore): entity_df = pd.DataFrame.from_dict( { diff --git a/sdk/python/tests/unit/test_on_demand_feature_view.py b/sdk/python/tests/unit/test_on_demand_feature_view.py index d9cc5dee50..6073891aba 100644 --- a/sdk/python/tests/unit/test_on_demand_feature_view.py +++ b/sdk/python/tests/unit/test_on_demand_feature_view.py @@ -251,11 +251,9 @@ def test_from_proto_backwards_compatible_udf(): proto.spec.feature_transformation.user_defined_function.body_text ) - # And now we're going to null the feature_transformation proto object before reserializing the entire proto - # proto.spec.user_defined_function.body_text = on_demand_feature_view.transformation.udf_string - proto.spec.feature_transformation.user_defined_function.name = "" - proto.spec.feature_transformation.user_defined_function.body = b"" - proto.spec.feature_transformation.user_defined_function.body_text = "" + # For objects that are already registered, feature_transformation and mode is not set + proto.spec.feature_transformation.Clear() + proto.spec.ClearField("mode") # And now we expect the to get the same object back under feature_transformation reserialized_proto = OnDemandFeatureView.from_proto(proto) diff --git a/sdk/python/tests/unit/test_on_demand_python_transformation.py b/sdk/python/tests/unit/test_on_demand_python_transformation.py index c5bd68d6a8..ff7ad494ca 100644 --- a/sdk/python/tests/unit/test_on_demand_python_transformation.py +++ b/sdk/python/tests/unit/test_on_demand_python_transformation.py @@ -307,6 +307,8 @@ def setUp(self): online=True, source=driver_stats_source, ) + assert driver_stats_fv.entities == [driver.name] + assert driver_stats_fv.entity_columns == [] request_source = RequestSource( name="request_source", @@ -373,6 +375,11 @@ def python_view(inputs: dict[str, Any]) -> dict[str, Any]: feature_view_name="driver_hourly_stats", df=driver_df ) + fv_applied = self.store.get_feature_view("driver_hourly_stats") + assert fv_applied.entities == [driver.name] + # Note here that after apply() is called, the entity_columns are populated with the join_key + assert fv_applied.entity_columns[0].name == driver.join_key + def test_python_transformation_returning_all_data_types(self): entity_rows = [ { diff --git a/sdk/python/tests/unit/test_project.py b/sdk/python/tests/unit/test_project.py new file mode 100644 index 0000000000..f15aef2972 --- /dev/null +++ b/sdk/python/tests/unit/test_project.py @@ -0,0 +1,122 @@ +import unittest +from datetime import datetime, timezone + +from feast.project import Project +from feast.protos.feast.core.Project_pb2 import Project as ProjectProto +from feast.protos.feast.core.Project_pb2 import ProjectMeta as ProjectMetaProto +from feast.protos.feast.core.Project_pb2 import ProjectSpec as ProjectSpecProto + + +class TestProject(unittest.TestCase): + def setUp(self): + self.project_name = "test_project" + self.description = "Test project description" + self.tags = {"env": "test"} + self.owner = "test_owner" + self.created_timestamp = datetime.now(tz=timezone.utc) + self.last_updated_timestamp = datetime.now(tz=timezone.utc) + + def test_initialization(self): + project = Project( + name=self.project_name, + description=self.description, + tags=self.tags, + owner=self.owner, + created_timestamp=self.created_timestamp, + last_updated_timestamp=self.last_updated_timestamp, + ) + self.assertEqual(project.name, self.project_name) + self.assertEqual(project.description, self.description) + self.assertEqual(project.tags, self.tags) + self.assertEqual(project.owner, self.owner) + self.assertEqual(project.created_timestamp, self.created_timestamp) + self.assertEqual(project.last_updated_timestamp, self.last_updated_timestamp) + + def test_equality(self): + project1 = Project(name=self.project_name) + project2 = Project(name=self.project_name) + project3 = Project(name="different_project") + self.assertTrue( + project1.name == project2.name + and project1.description == project2.description + and project1.tags == project2.tags + and project1.owner == project2.owner + ) + self.assertFalse( + project1.name == project3.name + and project1.description == project3.description + and project1.tags == project3.tags + and project1.owner == project3.owner + ) + + def test_is_valid(self): + project = Project(name=self.project_name) + project.is_valid() + with self.assertRaises(ValueError): + invalid_project = Project(name="") + invalid_project.is_valid() + + def test_from_proto(self): + meta = ProjectMetaProto() + meta.created_timestamp.FromDatetime(self.created_timestamp) + meta.last_updated_timestamp.FromDatetime(self.last_updated_timestamp) + project_proto = ProjectProto( + spec=ProjectSpecProto( + name=self.project_name, + description=self.description, + tags=self.tags, + owner=self.owner, + ), + meta=meta, + ) + project = Project.from_proto(project_proto) + self.assertEqual(project.name, self.project_name) + self.assertEqual(project.description, self.description) + self.assertEqual(project.tags, self.tags) + self.assertEqual(project.owner, self.owner) + self.assertEqual(project.created_timestamp, self.created_timestamp) + self.assertEqual(project.last_updated_timestamp, self.last_updated_timestamp) + + def test_to_proto(self): + project = Project( + name=self.project_name, + description=self.description, + tags=self.tags, + owner=self.owner, + created_timestamp=self.created_timestamp, + last_updated_timestamp=self.last_updated_timestamp, + ) + project_proto = project.to_proto() + self.assertEqual(project_proto.spec.name, self.project_name) + self.assertEqual(project_proto.spec.description, self.description) + self.assertEqual(project_proto.spec.tags, self.tags) + self.assertEqual(project_proto.spec.owner, self.owner) + self.assertEqual( + project_proto.meta.created_timestamp.ToDatetime().replace( + tzinfo=timezone.utc + ), + self.created_timestamp, + ) + self.assertEqual( + project_proto.meta.last_updated_timestamp.ToDatetime().replace( + tzinfo=timezone.utc + ), + self.last_updated_timestamp, + ) + + def test_to_proto_and_back(self): + project = Project( + name=self.project_name, + description=self.description, + tags=self.tags, + owner=self.owner, + created_timestamp=self.created_timestamp, + last_updated_timestamp=self.last_updated_timestamp, + ) + project_proto = project.to_proto() + project_from_proto = Project.from_proto(project_proto) + self.assertEqual(project, project_from_proto) + + +if __name__ == "__main__": + unittest.main() diff --git a/sdk/python/tests/unit/test_rest_error_decorator.py b/sdk/python/tests/unit/test_rest_error_decorator.py new file mode 100644 index 0000000000..147ae767bd --- /dev/null +++ b/sdk/python/tests/unit/test_rest_error_decorator.py @@ -0,0 +1,78 @@ +from unittest.mock import Mock, patch + +import assertpy +import pytest +import requests + +from feast import RepoConfig +from feast.errors import PermissionNotFoundException +from feast.infra.online_stores.remote import ( + RemoteOnlineStoreConfig, + get_remote_online_features, +) + + +@pytest.fixture +def feast_exception() -> PermissionNotFoundException: + return PermissionNotFoundException("dummy_name", "dummy_project") + + +@pytest.fixture +def none_feast_exception() -> RuntimeError: + return RuntimeError("dummy_name", "dummy_project") + + +@patch("feast.infra.online_stores.remote.requests.sessions.Session.post") +def test_rest_error_handling_with_feast_exception( + mock_post, environment, feast_exception +): + # Create a mock response object + mock_response = Mock() + mock_response.status_code = feast_exception.http_status_code() + mock_response.json.return_value = feast_exception.to_error_detail() + mock_response.raise_for_status.side_effect = requests.exceptions.HTTPError() + + # Configure the mock to return the mock response + mock_post.return_value = mock_response + + store = environment.feature_store + online_config = RemoteOnlineStoreConfig(type="remote", path="dummy") + + with pytest.raises( + PermissionNotFoundException, + match="Permission dummy_name does not exist in project dummy_project", + ): + get_remote_online_features( + config=RepoConfig( + project="test", online_store=online_config, registry=store.registry + ), + req_body="{test:test}", + ) + + +@patch("feast.infra.online_stores.remote.requests.sessions.Session.post") +def test_rest_error_handling_with_none_feast_exception( + mock_post, environment, none_feast_exception +): + # Create a mock response object + mock_response = Mock() + mock_response.status_code = 500 + mock_response.json.return_value = str(none_feast_exception) + mock_response.raise_for_status.side_effect = requests.exceptions.HTTPError() + + # Configure the mock to return the mock response + mock_post.return_value = mock_response + + store = environment.feature_store + online_config = RemoteOnlineStoreConfig(type="remote", path="dummy") + + response = get_remote_online_features( + config=RepoConfig( + project="test", online_store=online_config, registry=store.registry + ), + req_body="{test:test}", + ) + + assertpy.assert_that(response).is_not_none() + assertpy.assert_that(response.status_code).is_equal_to(500) + assertpy.assert_that(response.json()).is_equal_to("('dummy_name', 'dummy_project')") diff --git a/sdk/python/tests/utils/auth_permissions_util.py b/sdk/python/tests/utils/auth_permissions_util.py new file mode 100644 index 0000000000..3b5e589812 --- /dev/null +++ b/sdk/python/tests/utils/auth_permissions_util.py @@ -0,0 +1,245 @@ +import os +import subprocess + +import yaml +from keycloak import KeycloakAdmin + +from feast import ( + FeatureStore, + RepoConfig, +) +from feast.infra.registry.remote import RemoteRegistryConfig +from feast.permissions.permission import Permission +from feast.wait import wait_retry_backoff +from tests.utils.cli_repo_creator import CliRunner +from tests.utils.http_server import check_port_open + +PROJECT_NAME = "feast_test_project" + + +def include_auth_config(file_path, auth_config: str): + with open(file_path, "r") as file: + existing_content = yaml.safe_load(file) + new_section = yaml.safe_load(auth_config) + if isinstance(existing_content, dict) and isinstance(new_section, dict): + existing_content.update(new_section) + else: + raise ValueError("Both existing content and new section must be dictionaries.") + with open(file_path, "w") as file: + yaml.safe_dump(existing_content, file, default_flow_style=False) + print(f"Updated auth section at {file_path}") + + +def default_store( + temp_dir, + auth_config: str, + permissions: list[Permission], +): + runner = CliRunner() + result = runner.run(["init", PROJECT_NAME], cwd=temp_dir) + repo_path = os.path.join(temp_dir, PROJECT_NAME, "feature_repo") + assert result.returncode == 0 + + include_auth_config( + file_path=f"{repo_path}/feature_store.yaml", auth_config=auth_config + ) + + result = runner.run(["--chdir", repo_path, "apply"], cwd=temp_dir) + assert result.returncode == 0 + + fs = FeatureStore(repo_path=repo_path) + + fs.apply(permissions) + + return fs + + +def start_feature_server(repo_path: str, server_port: int, metrics: bool = False): + host = "0.0.0.0" + cmd = [ + "feast", + "-c" + repo_path, + "serve", + "--host", + host, + "--port", + str(server_port), + ] + feast_server_process = subprocess.Popen( + cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + _time_out_sec: int = 60 + # Wait for server to start + wait_retry_backoff( + lambda: (None, check_port_open(host, server_port)), + timeout_secs=_time_out_sec, + timeout_msg=f"Unable to start the feast server in {_time_out_sec} seconds for remote online store type, port={server_port}", + ) + + if metrics: + cmd.append("--metrics") + + # Check if metrics are enabled and Prometheus server is running + if metrics: + wait_retry_backoff( + lambda: (None, check_port_open("localhost", 8000)), + timeout_secs=_time_out_sec, + timeout_msg="Unable to start the Prometheus server in 60 seconds.", + ) + else: + assert not check_port_open( + "localhost", 8000 + ), "Prometheus server is running when it should be disabled." + + yield f"http://localhost:{server_port}" + + if feast_server_process is not None: + feast_server_process.kill() + + # wait server to free the port + wait_retry_backoff( + lambda: ( + None, + not check_port_open("localhost", server_port), + ), + timeout_msg=f"Unable to stop the feast server in {_time_out_sec} seconds for remote online store type, port={server_port}", + timeout_secs=_time_out_sec, + ) + + +def get_remote_registry_store(server_port, feature_store): + registry_config = RemoteRegistryConfig( + registry_type="remote", path=f"localhost:{server_port}" + ) + + store = FeatureStore( + config=RepoConfig( + project=PROJECT_NAME, + auth=feature_store.config.auth, + registry=registry_config, + provider="local", + entity_key_serialization_version=2, + ) + ) + return store + + +def setup_permissions_on_keycloak(keycloak_admin: KeycloakAdmin): + new_client_id = "feast-integration-client" + new_client_secret = "feast-integration-client-secret" + # Create a new client + client_representation = { + "clientId": new_client_id, + "secret": new_client_secret, + "enabled": True, + "directAccessGrantsEnabled": True, + "publicClient": False, + "redirectUris": ["*"], + "serviceAccountsEnabled": True, + "standardFlowEnabled": True, + } + keycloak_admin.create_client(client_representation) + + # Get the client ID + client_id = keycloak_admin.get_client_id(new_client_id) + + # Role representation + reader_role_rep = { + "name": "reader", + "description": "feast reader client role", + "composite": False, + "clientRole": True, + "containerId": client_id, + } + keycloak_admin.create_client_role(client_id, reader_role_rep, True) + reader_role_id = keycloak_admin.get_client_role( + client_id=client_id, role_name="reader" + ) + + # Role representation + writer_role_rep = { + "name": "writer", + "description": "feast writer client role", + "composite": False, + "clientRole": True, + "containerId": client_id, + } + keycloak_admin.create_client_role(client_id, writer_role_rep, True) + writer_role_id = keycloak_admin.get_client_role( + client_id=client_id, role_name="writer" + ) + + # Mapper representation + mapper_representation = { + "name": "client-roles-mapper", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-client-role-mapper", + "consentRequired": False, + "config": { + "multivalued": "true", + "userinfo.token.claim": "true", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "roles", + "jsonType.label": "String", + "client.id": client_id, + }, + } + + # Add predefined client roles mapper to the client + keycloak_admin.add_mapper_to_client(client_id, mapper_representation) + + reader_writer_user = { + "username": "reader_writer", + "enabled": True, + "firstName": "reader_writer fn", + "lastName": "reader_writer ln", + "email": "reader_writer@email.com", + "emailVerified": True, + "credentials": [{"value": "password", "type": "password", "temporary": False}], + } + reader_writer_user_id = keycloak_admin.create_user(reader_writer_user) + keycloak_admin.assign_client_role( + user_id=reader_writer_user_id, + client_id=client_id, + roles=[reader_role_id, writer_role_id], + ) + + reader_user = { + "username": "reader", + "enabled": True, + "firstName": "reader fn", + "lastName": "reader ln", + "email": "reader@email.com", + "emailVerified": True, + "credentials": [{"value": "password", "type": "password", "temporary": False}], + } + reader_user_id = keycloak_admin.create_user(reader_user) + keycloak_admin.assign_client_role( + user_id=reader_user_id, client_id=client_id, roles=[reader_role_id] + ) + + writer_user = { + "username": "writer", + "enabled": True, + "firstName": "writer fn", + "lastName": "writer ln", + "email": "writer@email.com", + "emailVerified": True, + "credentials": [{"value": "password", "type": "password", "temporary": False}], + } + writer_user_id = keycloak_admin.create_user(writer_user) + keycloak_admin.assign_client_role( + user_id=writer_user_id, client_id=client_id, roles=[writer_role_id] + ) + + no_roles_user = { + "username": "no_roles_user", + "enabled": True, + "firstName": "no_roles_user fn", + "lastName": "no_roles_user ln", + "email": "no_roles_user@email.com", + "emailVerified": True, + "credentials": [{"value": "password", "type": "password", "temporary": False}], + } + keycloak_admin.create_user(no_roles_user) diff --git a/sdk/python/tests/utils/e2e_test_validation.py b/sdk/python/tests/utils/e2e_test_validation.py index 1a8bedc796..a08e8fef42 100644 --- a/sdk/python/tests/utils/e2e_test_validation.py +++ b/sdk/python/tests/utils/e2e_test_validation.py @@ -1,13 +1,12 @@ import math import os import time -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from pathlib import Path from typing import Dict, List, Optional, Union import pandas as pd import yaml -from pytz import utc from feast import FeatureStore, FeatureView, RepoConfig from feast.utils import _utc_now @@ -39,7 +38,7 @@ def validate_offline_online_store_consistency( # Run materialize() # use both tz-naive & tz-aware timestamps to test that they're both correctly handled - start_date = (now - timedelta(hours=5)).replace(tzinfo=utc) + start_date = (now - timedelta(hours=5)).replace(tzinfo=timezone.utc) end_date = split_dt fs.materialize(feature_views=[fv.name], start_date=start_date, end_date=end_date) @@ -87,7 +86,8 @@ def validate_offline_online_store_consistency( and updated_fv.materialization_intervals[0][0] == start_date and updated_fv.materialization_intervals[0][1] == end_date and updated_fv.materialization_intervals[1][0] == end_date - and updated_fv.materialization_intervals[1][1] == now.replace(tzinfo=utc) + and updated_fv.materialization_intervals[1][1] + == now.replace(tzinfo=timezone.utc) ) # check result of materialize_incremental() diff --git a/sdk/python/tests/utils/feature_records.py b/sdk/python/tests/utils/feature_records.py index bd3567c9ee..e81666eaa5 100644 --- a/sdk/python/tests/utils/feature_records.py +++ b/sdk/python/tests/utils/feature_records.py @@ -5,7 +5,6 @@ import pandas as pd import pytest from pandas.testing import assert_frame_equal as pd_assert_frame_equal -from pytz import utc from feast import FeatureService, FeatureStore, utils from feast.errors import FeatureNameCollisionError @@ -16,7 +15,7 @@ def convert_timestamp_records_to_utc( records: List[Dict[str, Any]], column: str ) -> List[Dict[str, Any]]: for record in records: - record[column] = utils.make_tzaware(record[column]).astimezone(utc) + record[column] = utils.make_tzaware(record[column]).astimezone(timezone.utc) return records diff --git a/sdk/python/tests/utils/test_log_creator.py b/sdk/python/tests/utils/test_log_creator.py index 987c8d77ef..3e432e11bf 100644 --- a/sdk/python/tests/utils/test_log_creator.py +++ b/sdk/python/tests/utils/test_log_creator.py @@ -1,7 +1,7 @@ import contextlib -import datetime import tempfile import uuid +from datetime import timedelta from pathlib import Path from typing import Iterator, List, Union @@ -80,7 +80,7 @@ def prepare_logs( logs_df[REQUEST_ID_FIELD] = [str(uuid.uuid4()) for _ in range(num_rows)] logs_df[LOG_TIMESTAMP_FIELD] = pd.Series( np.random.randint(0, 7 * 24 * 3600, num_rows) - ).map(lambda secs: pd.Timestamp.utcnow() - datetime.timedelta(seconds=secs)) + ).map(lambda secs: pd.Timestamp.utcnow() - timedelta(seconds=secs)) logs_df[LOG_DATE_FIELD] = logs_df[LOG_TIMESTAMP_FIELD].dt.date for projection in feature_service.feature_view_projections: diff --git a/setup.py b/setup.py index 6fb5bfee61..c62fb8c50f 100644 --- a/setup.py +++ b/setup.py @@ -11,21 +11,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import glob import os import pathlib import re import shutil -import subprocess -import sys -from pathlib import Path - -from setuptools import find_packages, setup, Command -from setuptools.command.build_ext import build_ext as _build_ext -from setuptools.command.build_py import build_py -from setuptools.command.develop import develop -from setuptools.command.install import install +from setuptools import find_packages, setup NAME = "feast" DESCRIPTION = "Python SDK for Feast" @@ -37,13 +28,12 @@ "click>=7.0.0,<9.0.0", "colorama>=0.3.9,<1", "dill~=0.3.0", - "mypy-protobuf>=3.1", + "protobuf<5", "Jinja2>=2,<4", "jsonschema", "mmh3", "numpy>=1.22,<2", "pandas>=1.4.3,<3", - "protobuf>=4.24.0,<5.0.0", "pyarrow>=4", "pydantic>=2.0.0", "pygments>=2.12.0,<3", @@ -61,6 +51,8 @@ "dask[dataframe]>=2024.2.1", "prometheus_client", "psutil", + "bigtree>=0.19.2", + "pyjwt", ] GCP_REQUIRED = [ @@ -71,7 +63,7 @@ "google-cloud-datastore>=2.16.0,<3", "google-cloud-storage>=1.34.0,<3", "google-cloud-bigtable>=2.11.0,<3", - "fsspec<=2024.1.0", + "fsspec<=2024.9.0", ] REDIS_REQUIRED = [ @@ -79,7 +71,7 @@ "hiredis>=2.0.0,<3", ] -AWS_REQUIRED = ["boto3>=1.17.0,<2", "fsspec<=2024.1.0", "aiobotocore>2,<3"] +AWS_REQUIRED = ["boto3>=1.17.0,<2", "fsspec<=2024.9.0", "aiobotocore>2,<3"] KUBERNETES_REQUIRED = ["kubernetes<=20.13.0"] @@ -100,7 +92,7 @@ "psycopg[binary,pool]>=3.0.0,<4", ] -OPENTELEMETRY = ["prometheus_client","psutil"] +OPENTELEMETRY = ["prometheus_client", "psutil"] MYSQL_REQUIRED = ["pymysql", "types-PyMySQL"] @@ -122,10 +114,6 @@ "pymssql", ] -ROCKSET_REQUIRED = [ - "rockset>=1.0.3", -] - IKV_REQUIRED = [ "ikvpy>=0.0.36", ] @@ -141,7 +129,6 @@ GRPCIO_REQUIRED = [ "grpcio>=1.56.2,<2", - "grpcio-tools>=1.56.2,<2", "grpcio-reflection>=1.56.2,<2", "grpcio-health-checking>=1.56.2,<2", ] @@ -162,6 +149,8 @@ "virtualenv==20.23.0", "cryptography>=35.0,<43", "ruff>=0.3.3", + "mypy-protobuf>=3.1", + "grpcio-tools>=1.56.2,<2", "grpcio-testing>=1.56.2,<2", # FastAPI does not correctly pull starlette dependency on httpx see thread(https://github.com/tiangolo/fastapi/issues/5656). "httpx>=0.23.3", @@ -183,6 +172,7 @@ "pytest-env", "Sphinx>4.0.0,<7", "testcontainers==4.4.0", + "python-keycloak==4.2.2", "pre-commit<3.3.2", "assertpy==1.1", "pip-tools", @@ -210,7 +200,6 @@ + HBASE_REQUIRED + CASSANDRA_REQUIRED + AZURE_REQUIRED - + ROCKSET_REQUIRED + HAZELCAST_REQUIRED + IBIS_REQUIRED + GRPCIO_REQUIRED @@ -246,107 +235,8 @@ else: use_scm_version = None -PROTO_SUBDIRS = ["core", "registry", "serving", "types", "storage"] PYTHON_CODE_PREFIX = "sdk/python" - -class BuildPythonProtosCommand(Command): - description = "Builds the proto files into Python files." - user_options = [ - ("inplace", "i", "Write generated proto files to source directory."), - ] - - def initialize_options(self): - self.python_protoc = [ - sys.executable, - "-m", - "grpc_tools.protoc", - ] # find_executable("protoc") - self.proto_folder = os.path.join(repo_root, "protos") - self.sub_folders = PROTO_SUBDIRS - self.build_lib = None - self.inplace = 0 - - def finalize_options(self): - self.set_undefined_options("build", ("build_lib", "build_lib")) - - @property - def python_folder(self): - if self.inplace: - return os.path.join( - os.path.dirname(__file__) or os.getcwd(), "sdk/python/feast/protos" - ) - - return os.path.join(self.build_lib, "feast/protos") - - def _generate_python_protos(self, path: str): - proto_files = glob.glob(os.path.join(self.proto_folder, path)) - Path(self.python_folder).mkdir(parents=True, exist_ok=True) - subprocess.check_call( - self.python_protoc - + [ - "-I", - self.proto_folder, - "--python_out", - self.python_folder, - "--grpc_python_out", - self.python_folder, - "--mypy_out", - self.python_folder, - ] - + proto_files - ) - - def run(self): - for sub_folder in self.sub_folders: - self._generate_python_protos(f"feast/{sub_folder}/*.proto") - # We need the __init__ files for each of the generated subdirs - # so that they are regular packages, and don't need the `--namespace-packages` flags - # when being typechecked using mypy. - with open(f"{self.python_folder}/feast/{sub_folder}/__init__.py", "w"): - pass - - with open(f"{self.python_folder}/__init__.py", "w"): - pass - with open(f"{self.python_folder}/feast/__init__.py", "w"): - pass - - for path in Path(self.python_folder).rglob("*.py"): - for folder in self.sub_folders: - # Read in the file - with open(path, "r") as file: - filedata = file.read() - - # Replace the target string - filedata = filedata.replace( - f"from feast.{folder}", f"from feast.protos.feast.{folder}" - ) - - # Write the file out again - with open(path, "w") as file: - file.write(filedata) - - -class BuildCommand(build_py): - """Custom build command.""" - - def run(self): - self.run_command("build_python_protos") - - self.run_command("build_ext") - build_py.run(self) - - -class DevelopCommand(develop): - """Custom develop command.""" - - def run(self): - self.reinitialize_command("build_python_protos", inplace=1) - self.run_command("build_python_protos") - - develop.run(self) - - setup( name=NAME, author=AUTHOR, @@ -360,8 +250,6 @@ def run(self): ), package_dir={"": PYTHON_CODE_PREFIX}, install_requires=REQUIRED, - # https://stackoverflow.com/questions/28509965/setuptools-development-requirements - # Install dev requirements with: pip install -e .[dev] extras_require={ "dev": DEV_REQUIRED, "ci": CI_REQUIRED, @@ -382,7 +270,6 @@ def run(self): "cassandra": CASSANDRA_REQUIRED, "hazelcast": HAZELCAST_REQUIRED, "grpcio": GRPCIO_REQUIRED, - "rockset": ROCKSET_REQUIRED, "ibis": IBIS_REQUIRED, "duckdb": DUCKDB_REQUIRED, "ikv": IKV_REQUIRED, @@ -405,15 +292,7 @@ def run(self): entry_points={"console_scripts": ["feast=feast.cli:cli"]}, use_scm_version=use_scm_version, setup_requires=[ - "setuptools_scm", - "grpcio>=1.56.2,<2", - "grpcio-tools>=1.56.2,<2", - "mypy-protobuf>=3.1", - "pybindgen==0.22.0", - ], - cmdclass={ - "build_python_protos": BuildPythonProtosCommand, - "build_py": BuildCommand, - "develop": DevelopCommand, - }, + "pybindgen==0.22.0", #TODO do we need this? + "setuptools_scm>=6.2", #TODO do we need this? + ] ) diff --git a/ui/README.md b/ui/README.md index 12aacd329e..852bddc296 100644 --- a/ui/README.md +++ b/ui/README.md @@ -46,7 +46,7 @@ ReactDOM.render( ); ``` -When you start the React app, it will look for `projects-list.json` to find a list of your projects. The JSON should looks something like this. +When you start the React app, it will look for `projects-list.json` to find a list of your projects. The JSON should look something like this. ```json { diff --git a/ui/package.json b/ui/package.json index cd80859aa1..bc2a71378a 100644 --- a/ui/package.json +++ b/ui/package.json @@ -6,14 +6,12 @@ "dist" ], "main": "./dist/feast-ui.cjs", + "types": "./dist/FeastUI.d.ts", "module": "./dist/feast-ui.module.js", "peerDependencies": { "@elastic/datemath": "^5.0.3", "@elastic/eui": "^55.0.1", "@emotion/react": "^11.7.1", - "@types/d3": "^7.1.0", - "@types/react": "^17.0.20", - "@types/react-dom": "^17.0.9", "d3": "^7.3.0", "inter-ui": "^3.19.3", "moment": "^2.29.1", @@ -22,9 +20,8 @@ "react": "^17.0.2", "react-dom": "^17.0.2", "react-query": "^3.34.12", - "react-router-dom": "6", + "react-router-dom": "<6.4.0", "react-scripts": "^5.0.0", - "typescript": "^4.4.2", "use-query-params": "^1.2.3", "zod": "^3.11.6" }, @@ -32,11 +29,6 @@ "@elastic/datemath": "^5.0.3", "@elastic/eui": "^55.0.1", "@emotion/react": "^11.7.1", - "@types/d3": "^7.1.0", - "@types/jest": "^27.0.1", - "@types/node": "^16.7.13", - "@types/react": "^17.0.20", - "@types/react-dom": "^17.0.9", "d3": "^7.3.0", "inter-ui": "^3.19.3", "moment": "^2.29.1", @@ -45,16 +37,17 @@ "query-string": "^7.1.1", "react-code-blocks": "^0.0.9-0", "react-query": "^3.34.12", - "react-router-dom": "6", + "react-router-dom": "<6.4.0", "react-scripts": "^5.0.0", + "tslib": "^2.3.1", "use-query-params": "^1.2.3", "zod": "^3.11.6" }, "scripts": { "start": "npm run generate-protos && react-scripts start", "build": "npm run generate-protos && react-scripts build", - "build:lib": "npm run generate-protos && rimraf ./dist && tsc && rollup -c", - "build:lib-dev": "npm run generate-protos && rimraf ./dist && tsc && rollup -c && yalc publish -f", + "build:lib": "npm run generate-protos && rimraf ./dist && tsc --project ./tsconfig.build-lib.json && rollup -c", + "build:lib-dev": "npm run build:lib && yalc publish -f", "test": "npm run generate-protos && react-scripts test", "eject": "react-scripts eject", "generate-protos": "pbjs --no-encode -o src/protos.js -w commonjs -t static-module `find ../protos/feast/ -iname *.proto` && pbts -n protos -o src/protos.d.ts src/protos.js" @@ -94,6 +87,11 @@ "@testing-library/jest-dom": "^5.14.1", "@testing-library/react": "^12.0.0", "@testing-library/user-event": "^13.2.1", + "@types/d3": "^7.1.0", + "@types/jest": "^27.0.1", + "@types/node": "^16.7.13", + "@types/react": "^17.0.20", + "@types/react-dom": "^17.0.9", "msw": "^0.36.8", "protobufjs-cli": "^1.0.2", "react": "^17.0.2", @@ -105,7 +103,6 @@ "rollup-plugin-svg": "^2.0.0", "rollup-plugin-svgo": "^1.1.0", "rollup-plugin-terser": "^7.0.2", - "tslib": "^2.3.1", "typescript": "^4.4.2" }, "description": "Web UI for the [Feast Feature Store](https://feast.dev/)", diff --git a/ui/tsconfig.build-lib.json b/ui/tsconfig.build-lib.json new file mode 100644 index 0000000000..c29bd063f0 --- /dev/null +++ b/ui/tsconfig.build-lib.json @@ -0,0 +1,11 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "declaration": true, + "declarationMap": true, + "emitDeclarationOnly": true, + "noEmit": false, + "outDir": "./dist", + "rootDir": "./src" + } +} diff --git a/ui/yarn.lock b/ui/yarn.lock index 26c833fa11..1f36143b67 100644 --- a/ui/yarn.lock +++ b/ui/yarn.lock @@ -1627,16 +1627,35 @@ "@jridgewell/sourcemap-codec" "^1.4.10" "@jridgewell/trace-mapping" "^0.3.9" +"@jridgewell/gen-mapping@^0.3.5": + version "0.3.5" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz#dcce6aff74bdf6dad1a95802b69b04a2fcb1fb36" + integrity sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg== + dependencies: + "@jridgewell/set-array" "^1.2.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.24" + "@jridgewell/resolve-uri@3.1.0", "@jridgewell/resolve-uri@^3.0.3": version "3.1.0" resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== +"@jridgewell/resolve-uri@^3.1.0": + version "3.1.2" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz#7a0ee601f60f99a20c7c7c5ff0c80388c1189bd6" + integrity sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw== + "@jridgewell/set-array@^1.0.1": version "1.1.2" resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== +"@jridgewell/set-array@^1.2.1": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.2.1.tgz#558fb6472ed16a4c850b889530e6b36438c49280" + integrity sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A== + "@jridgewell/source-map@^0.3.2": version "0.3.2" resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.2.tgz#f45351aaed4527a298512ec72f81040c998580fb" @@ -1645,11 +1664,24 @@ "@jridgewell/gen-mapping" "^0.3.0" "@jridgewell/trace-mapping" "^0.3.9" +"@jridgewell/source-map@^0.3.3": + version "0.3.6" + resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.6.tgz#9d71ca886e32502eb9362c9a74a46787c36df81a" + integrity sha512-1ZJTZebgqllO79ue2bm3rIGud/bOe0pP5BjSRCRxxYkEZS8STV7zN84UBbiYu7jy+eCKSnVIUgoWWE/tt+shMQ== + dependencies: + "@jridgewell/gen-mapping" "^0.3.5" + "@jridgewell/trace-mapping" "^0.3.25" + "@jridgewell/sourcemap-codec@1.4.14", "@jridgewell/sourcemap-codec@^1.4.10": version "1.4.14" resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== +"@jridgewell/sourcemap-codec@^1.4.14": + version "1.5.0" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz#3188bcb273a414b0d215fd22a58540b989b9409a" + integrity sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ== + "@jridgewell/trace-mapping@^0.3.0": version "0.3.4" resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.4.tgz#f6a0832dffd5b8a6aaa633b7d9f8e8e94c83a0c3" @@ -1666,6 +1698,14 @@ "@jridgewell/resolve-uri" "3.1.0" "@jridgewell/sourcemap-codec" "1.4.14" +"@jridgewell/trace-mapping@^0.3.20", "@jridgewell/trace-mapping@^0.3.24", "@jridgewell/trace-mapping@^0.3.25": + version "0.3.25" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz#15f190e98895f3fc23276ee14bc76b675c2e50f0" + integrity sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ== + dependencies: + "@jridgewell/resolve-uri" "^3.1.0" + "@jridgewell/sourcemap-codec" "^1.4.14" + "@jridgewell/trace-mapping@^0.3.9": version "0.3.14" resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.14.tgz#b231a081d8f66796e475ad588a1ef473112701ed" @@ -2362,22 +2402,6 @@ "@types/d3-transition" "*" "@types/d3-zoom" "*" -"@types/eslint-scope@^3.7.3": - version "3.7.4" - resolved "https://registry.yarnpkg.com/@types/eslint-scope/-/eslint-scope-3.7.4.tgz#37fc1223f0786c39627068a12e94d6e6fc61de16" - integrity sha512-9K4zoImiZc3HlIp6AVUDE4CWYx22a+lhSZMYNpbjW04+YF0KWj4pJXnEMjdnFTiQibFFmElcsasJXDbdI/EPhA== - dependencies: - "@types/eslint" "*" - "@types/estree" "*" - -"@types/eslint@*": - version "8.4.1" - resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-8.4.1.tgz#c48251553e8759db9e656de3efc846954ac32304" - integrity sha512-GE44+DNEyxxh2Kc6ro/VkIj+9ma0pO0bwv9+uHSyBrikYOHr8zYcdPvnBOp1aw8s+CjRvuSx7CyWqRrNFQ59mA== - dependencies: - "@types/estree" "*" - "@types/json-schema" "*" - "@types/eslint@^7.28.2": version "7.29.0" resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-7.29.0.tgz#e56ddc8e542815272720bb0b4ccc2aff9c3e1c78" @@ -2396,10 +2420,10 @@ resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.39.tgz#e177e699ee1b8c22d23174caaa7422644389509f" integrity sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw== -"@types/estree@^0.0.51": - version "0.0.51" - resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.51.tgz#cfd70924a25a3fd32b218e5e420e6897e1ac4f40" - integrity sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ== +"@types/estree@^1.0.5": + version "1.0.5" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.5.tgz#a6ce3e556e00fd9895dd872dd172ad0d4bd687f4" + integrity sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw== "@types/express-serve-static-core@*", "@types/express-serve-static-core@^4.17.18": version "4.17.28" @@ -2873,125 +2897,125 @@ "@typescript-eslint/types" "5.10.1" eslint-visitor-keys "^3.0.0" -"@webassemblyjs/ast@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.11.1.tgz#2bfd767eae1a6996f432ff7e8d7fc75679c0b6a7" - integrity sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw== +"@webassemblyjs/ast@1.12.1", "@webassemblyjs/ast@^1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.12.1.tgz#bb16a0e8b1914f979f45864c23819cc3e3f0d4bb" + integrity sha512-EKfMUOPRRUTy5UII4qJDGPpqfwjOmZ5jeGFwid9mnoqIFK+e0vqoi1qH56JpmZSzEL53jKnNzScdmftJyG5xWg== dependencies: - "@webassemblyjs/helper-numbers" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/helper-numbers" "1.11.6" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" -"@webassemblyjs/floating-point-hex-parser@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz#f6c61a705f0fd7a6aecaa4e8198f23d9dc179e4f" - integrity sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ== +"@webassemblyjs/floating-point-hex-parser@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.6.tgz#dacbcb95aff135c8260f77fa3b4c5fea600a6431" + integrity sha512-ejAj9hfRJ2XMsNHk/v6Fu2dGS+i4UaXBXGemOfQ/JfQ6mdQg/WXtwleQRLLS4OvfDhv8rYnVwH27YJLMyYsxhw== -"@webassemblyjs/helper-api-error@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.1.tgz#1a63192d8788e5c012800ba6a7a46c705288fd16" - integrity sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg== +"@webassemblyjs/helper-api-error@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.6.tgz#6132f68c4acd59dcd141c44b18cbebbd9f2fa768" + integrity sha512-o0YkoP4pVu4rN8aTJgAyj9hC2Sv5UlkzCHhxqWj8butaLvnpdc2jOwh4ewE6CX0txSfLn/UYaV/pheS2Txg//Q== -"@webassemblyjs/helper-buffer@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.1.tgz#832a900eb444884cde9a7cad467f81500f5e5ab5" - integrity sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA== +"@webassemblyjs/helper-buffer@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.12.1.tgz#6df20d272ea5439bf20ab3492b7fb70e9bfcb3f6" + integrity sha512-nzJwQw99DNDKr9BVCOZcLuJJUlqkJh+kVzVl6Fmq/tI5ZtEyWT1KZMyOXltXLZJmDtvLCDgwsyrkohEtopTXCw== -"@webassemblyjs/helper-numbers@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.1.tgz#64d81da219fbbba1e3bd1bfc74f6e8c4e10a62ae" - integrity sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ== +"@webassemblyjs/helper-numbers@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.6.tgz#cbce5e7e0c1bd32cf4905ae444ef64cea919f1b5" + integrity sha512-vUIhZ8LZoIWHBohiEObxVm6hwP034jwmc9kuq5GdHZH0wiLVLIPcMCdpJzG4C11cHoQ25TFIQj9kaVADVX7N3g== dependencies: - "@webassemblyjs/floating-point-hex-parser" "1.11.1" - "@webassemblyjs/helper-api-error" "1.11.1" + "@webassemblyjs/floating-point-hex-parser" "1.11.6" + "@webassemblyjs/helper-api-error" "1.11.6" "@xtuc/long" "4.2.2" -"@webassemblyjs/helper-wasm-bytecode@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz#f328241e41e7b199d0b20c18e88429c4433295e1" - integrity sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q== +"@webassemblyjs/helper-wasm-bytecode@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.6.tgz#bb2ebdb3b83aa26d9baad4c46d4315283acd51e9" + integrity sha512-sFFHKwcmBprO9e7Icf0+gddyWYDViL8bpPjJJl0WHxCdETktXdmtWLGVzoHbqUcY4Be1LkNfwTmXOJUFZYSJdA== -"@webassemblyjs/helper-wasm-section@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.1.tgz#21ee065a7b635f319e738f0dd73bfbda281c097a" - integrity sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg== +"@webassemblyjs/helper-wasm-section@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.12.1.tgz#3da623233ae1a60409b509a52ade9bc22a37f7bf" + integrity sha512-Jif4vfB6FJlUlSbgEMHUyk1j234GTNG9dBJ4XJdOySoj518Xj0oGsNi59cUQF4RRMS9ouBUxDDdyBVfPTypa5g== dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-buffer" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/wasm-gen" "1.11.1" + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-buffer" "1.12.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + "@webassemblyjs/wasm-gen" "1.12.1" -"@webassemblyjs/ieee754@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.11.1.tgz#963929e9bbd05709e7e12243a099180812992614" - integrity sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ== +"@webassemblyjs/ieee754@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.11.6.tgz#bb665c91d0b14fffceb0e38298c329af043c6e3a" + integrity sha512-LM4p2csPNvbij6U1f19v6WR56QZ8JcHg3QIJTlSwzFcmx6WSORicYj6I63f9yU1kEUtrpG+kjkiIAkevHpDXrg== dependencies: "@xtuc/ieee754" "^1.2.0" -"@webassemblyjs/leb128@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.11.1.tgz#ce814b45574e93d76bae1fb2644ab9cdd9527aa5" - integrity sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw== +"@webassemblyjs/leb128@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.11.6.tgz#70e60e5e82f9ac81118bc25381a0b283893240d7" + integrity sha512-m7a0FhE67DQXgouf1tbN5XQcdWoNgaAuoULHIfGFIEVKA6tu/edls6XnIlkmS6FrXAquJRPni3ZZKjw6FSPjPQ== dependencies: "@xtuc/long" "4.2.2" -"@webassemblyjs/utf8@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.11.1.tgz#d1f8b764369e7c6e6bae350e854dec9a59f0a3ff" - integrity sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ== - -"@webassemblyjs/wasm-edit@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.1.tgz#ad206ebf4bf95a058ce9880a8c092c5dec8193d6" - integrity sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-buffer" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/helper-wasm-section" "1.11.1" - "@webassemblyjs/wasm-gen" "1.11.1" - "@webassemblyjs/wasm-opt" "1.11.1" - "@webassemblyjs/wasm-parser" "1.11.1" - "@webassemblyjs/wast-printer" "1.11.1" - -"@webassemblyjs/wasm-gen@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.1.tgz#86c5ea304849759b7d88c47a32f4f039ae3c8f76" - integrity sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/ieee754" "1.11.1" - "@webassemblyjs/leb128" "1.11.1" - "@webassemblyjs/utf8" "1.11.1" - -"@webassemblyjs/wasm-opt@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.1.tgz#657b4c2202f4cf3b345f8a4c6461c8c2418985f2" - integrity sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-buffer" "1.11.1" - "@webassemblyjs/wasm-gen" "1.11.1" - "@webassemblyjs/wasm-parser" "1.11.1" - -"@webassemblyjs/wasm-parser@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.1.tgz#86ca734534f417e9bd3c67c7a1c75d8be41fb199" - integrity sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-api-error" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/ieee754" "1.11.1" - "@webassemblyjs/leb128" "1.11.1" - "@webassemblyjs/utf8" "1.11.1" - -"@webassemblyjs/wast-printer@1.11.1": - version "1.11.1" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.11.1.tgz#d0c73beda8eec5426f10ae8ef55cee5e7084c2f0" - integrity sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg== - dependencies: - "@webassemblyjs/ast" "1.11.1" +"@webassemblyjs/utf8@1.11.6": + version "1.11.6" + resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.11.6.tgz#90f8bc34c561595fe156603be7253cdbcd0fab5a" + integrity sha512-vtXf2wTQ3+up9Zsg8sa2yWiQpzSsMyXj0qViVP6xKGCUT8p8YJ6HqI7l5eCnWx1T/FYdsv07HQs2wTFbbof/RA== + +"@webassemblyjs/wasm-edit@^1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.12.1.tgz#9f9f3ff52a14c980939be0ef9d5df9ebc678ae3b" + integrity sha512-1DuwbVvADvS5mGnXbE+c9NfA8QRcZ6iKquqjjmR10k6o+zzsRVesil54DKexiowcFCPdr/Q0qaMgB01+SQ1u6g== + dependencies: + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-buffer" "1.12.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + "@webassemblyjs/helper-wasm-section" "1.12.1" + "@webassemblyjs/wasm-gen" "1.12.1" + "@webassemblyjs/wasm-opt" "1.12.1" + "@webassemblyjs/wasm-parser" "1.12.1" + "@webassemblyjs/wast-printer" "1.12.1" + +"@webassemblyjs/wasm-gen@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.12.1.tgz#a6520601da1b5700448273666a71ad0a45d78547" + integrity sha512-TDq4Ojh9fcohAw6OIMXqiIcTq5KUXTGRkVxbSo1hQnSy6lAM5GSdfwWeSxpAo0YzgsgF182E/U0mDNhuA0tW7w== + dependencies: + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + "@webassemblyjs/ieee754" "1.11.6" + "@webassemblyjs/leb128" "1.11.6" + "@webassemblyjs/utf8" "1.11.6" + +"@webassemblyjs/wasm-opt@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.12.1.tgz#9e6e81475dfcfb62dab574ac2dda38226c232bc5" + integrity sha512-Jg99j/2gG2iaz3hijw857AVYekZe2SAskcqlWIZXjji5WStnOpVoat3gQfT/Q5tb2djnCjBtMocY/Su1GfxPBg== + dependencies: + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-buffer" "1.12.1" + "@webassemblyjs/wasm-gen" "1.12.1" + "@webassemblyjs/wasm-parser" "1.12.1" + +"@webassemblyjs/wasm-parser@1.12.1", "@webassemblyjs/wasm-parser@^1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.12.1.tgz#c47acb90e6f083391e3fa61d113650eea1e95937" + integrity sha512-xikIi7c2FHXysxXe3COrVUPSheuBtpcfhbpFj4gmu7KRLYOzANztwUU0IbsqvMqzuNK2+glRGWCEqZo1WCLyAQ== + dependencies: + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-api-error" "1.11.6" + "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + "@webassemblyjs/ieee754" "1.11.6" + "@webassemblyjs/leb128" "1.11.6" + "@webassemblyjs/utf8" "1.11.6" + +"@webassemblyjs/wast-printer@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.12.1.tgz#bcecf661d7d1abdaf989d8341a4833e33e2b31ac" + integrity sha512-+X4WAlOisVWQMikjbcvY2e0rwPsKQ9F688lksZhBcPycBBuii3O7m8FACbDMWDojpAqvjIncrG8J0XHKyQfVeA== + dependencies: + "@webassemblyjs/ast" "1.12.1" "@xtuc/long" "4.2.2" "@xmldom/xmldom@^0.7.2": @@ -3030,10 +3054,10 @@ acorn-globals@^6.0.0: acorn "^7.1.1" acorn-walk "^7.1.1" -acorn-import-assertions@^1.7.6: - version "1.8.0" - resolved "https://registry.yarnpkg.com/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz#ba2b5939ce62c238db6d93d81c9b111b29b855e9" - integrity sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw== +acorn-import-attributes@^1.9.5: + version "1.9.5" + resolved "https://registry.yarnpkg.com/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz#7eb1557b1ba05ef18b5ed0ec67591bfab04688ef" + integrity sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ== acorn-jsx@^5.3.1, acorn-jsx@^5.3.2: version "5.3.2" @@ -3074,6 +3098,11 @@ acorn@^8.8.0: resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.8.0.tgz#88c0187620435c7f6015803f5539dae05a9dbea8" integrity sha512-QOxyigPVrpZ2GXT+PFyZTl6TtOFc5egxHIP9IlQ+RbupQuX4RkT/Bee4/kQuC02Xkzg84JcT7oLYtDIQxp+v7w== +acorn@^8.8.2: + version "8.12.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.12.1.tgz#71616bdccbe25e27a54439e0046e89ca76df2248" + integrity sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg== + address@^1.0.1, address@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/address/-/address-1.1.2.tgz#bf1116c9c758c51b7a933d296b72c221ed9428b6" @@ -3629,7 +3658,7 @@ browser-process-hrtime@^1.0.0: resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== -browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.16.6, browserslist@^4.17.5, browserslist@^4.18.1, browserslist@^4.19.1: +browserslist@^4.0.0, browserslist@^4.16.6, browserslist@^4.17.5, browserslist@^4.18.1, browserslist@^4.19.1: version "4.19.1" resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.19.1.tgz#4ac0435b35ab655896c31d53018b6dd5e9e4c9a3" integrity sha512-u2tbbG5PdKRTUoctO3NBD8FQ5HdPh1ZXPHzp1rwaa5jTc+RV9/+RlWiAIKmjRPQF+xbGM9Kklj5bZQFa2s/38A== @@ -3640,6 +3669,16 @@ browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.16.6, browserslist@^4 node-releases "^2.0.1" picocolors "^1.0.0" +browserslist@^4.21.10: + version "4.23.3" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.23.3.tgz#debb029d3c93ebc97ffbc8d9cbb03403e227c800" + integrity sha512-btwCFJVjI4YWDNfau8RhZ+B1Q/VLoUITrm3RlP6y1tYGWIOa+InuYiRGXUBXo8nA1qKmHMyLB/iVQg5TT4eFoA== + dependencies: + caniuse-lite "^1.0.30001646" + electron-to-chromium "^1.5.4" + node-releases "^2.0.18" + update-browserslist-db "^1.1.0" + bser@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" @@ -3736,6 +3775,11 @@ caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001286, caniuse-lite@^1.0.30001297, can resolved "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001416.tgz" integrity sha512-06wzzdAkCPZO+Qm4e/eNghZBDfVNDsCgw33T27OwBH9unE9S478OYw//Q2L7Npf/zBzs7rjZOszIFQkwQKAEqA== +caniuse-lite@^1.0.30001646: + version "1.0.30001657" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001657.tgz#29fd504bffca719d1c6b63a1f6f840be1973a660" + integrity sha512-DPbJAlP8/BAXy3IgiWmZKItubb3TYGP0WscQQlVGIfT4s/YlFYVuJgyOsQNP7rJRChx/qdMeLJQJP0Sgg2yjNA== + case-sensitive-paths-webpack-plugin@^2.4.0: version "2.4.0" resolved "https://registry.yarnpkg.com/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.4.0.tgz#db64066c6422eed2e08cc14b986ca43796dbc6d4" @@ -4997,6 +5041,11 @@ electron-to-chromium@^1.4.17: resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.57.tgz#2b2766df76ac8dbc0a1d41249bc5684a31849892" integrity sha512-FNC+P5K1n6pF+M0zIK+gFCoXcJhhzDViL3DRIGy2Fv5PohuSES1JHR7T+GlwxSxlzx4yYbsuzCZvHxcBSRCIOw== +electron-to-chromium@^1.5.4: + version "1.5.15" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.5.15.tgz#3c969a29b03682db7a3032283ec8be6e75effe50" + integrity sha512-Z4rIDoImwEJW+YYKnPul4DzqsWVqYetYVN3XqDmRpgV0mjz0hYTaeeh+8/9CL1bk3AHYmF4freW/NTiVoXA2gA== + emittery@^0.8.1: version "0.8.1" resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.8.1.tgz#bb23cc86d03b30aa75a7f734819dee2e1ba70860" @@ -5027,10 +5076,10 @@ encodeurl@~1.0.2: resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== -enhanced-resolve@^5.10.0: - version "5.12.0" - resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.12.0.tgz#300e1c90228f5b570c4d35babf263f6da7155634" - integrity sha512-QHTXI/sZQmko1cbDoNAa3mJ5qhWUUNAq3vR0/YiD379fWQrcfuoX1+HW2S0MTt7XmoPLapdaDKUtelUSPic7hQ== +enhanced-resolve@^5.17.1: + version "5.17.1" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.17.1.tgz#67bfbbcc2f81d511be77d686a90267ef7f898a15" + integrity sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg== dependencies: graceful-fs "^4.2.4" tapable "^2.2.0" @@ -5085,10 +5134,10 @@ es-abstract@^1.17.2, es-abstract@^1.19.0, es-abstract@^1.19.1: string.prototype.trimstart "^1.0.4" unbox-primitive "^1.0.1" -es-module-lexer@^0.9.0: - version "0.9.3" - resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-0.9.3.tgz#6f13db00cc38417137daf74366f535c8eb438f19" - integrity sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ== +es-module-lexer@^1.2.1: + version "1.5.4" + resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-1.5.4.tgz#a8efec3a3da991e60efa6b633a7cad6ab8d26b78" + integrity sha512-MVNK56NiMrOwitFB7cqDwq0CQutbw+0BvLshJSse0MUNU+y1FC3bUS/AQg7oUng+/wKrrki7JfmwtVHkVfPLlw== es-to-primitive@^1.2.1: version "1.2.1" @@ -5104,6 +5153,11 @@ escalade@^3.1.1: resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== +escalade@^3.1.2: + version "3.2.0" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.2.0.tgz#011a3f69856ba189dffa7dc8fcce99d2a87903e5" + integrity sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA== + escape-html@~1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" @@ -5970,7 +6024,7 @@ good-listener@^1.2.2: dependencies: delegate "^3.1.2" -graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: +graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6: version "4.2.9" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.9.tgz#041b05df45755e587a24942279b9d113146e1c96" integrity sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ== @@ -5980,6 +6034,11 @@ graceful-fs@^4.1.9: resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== +graceful-fs@^4.2.11: + version "4.2.11" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" + integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== + graphql@^15.5.1: version "15.8.0" resolved "https://registry.yarnpkg.com/graphql/-/graphql-15.8.0.tgz#33410e96b012fa3bdb1091cc99a94769db212b38" @@ -7228,6 +7287,15 @@ jest-worker@^27.0.2, jest-worker@^27.3.1, jest-worker@^27.4.1, jest-worker@^27.4 merge-stream "^2.0.0" supports-color "^8.0.0" +jest-worker@^27.4.5: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-27.5.1.tgz#8d146f0900e8973b106b6f73cc1e9a8cb86f8db0" + integrity sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg== + dependencies: + "@types/node" "*" + merge-stream "^2.0.0" + supports-color "^8.0.0" + jest@^27.4.3: version "27.4.7" resolved "https://registry.yarnpkg.com/jest/-/jest-27.4.7.tgz#87f74b9026a1592f2da05b4d258e57505f28eca4" @@ -7983,6 +8051,11 @@ node-releases@^2.0.1: resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.1.tgz#3d1d395f204f1f2f29a54358b9fb678765ad2fc5" integrity sha512-CqyzN6z7Q6aMeF/ktcMVTzhAHCEpf8SOarwpzpf8pNBY2k5/oM34UHldUwp8VKI7uxct2HxSRdJjBaZeESzcxA== +node-releases@^2.0.18: + version "2.0.18" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.18.tgz#f010e8d35e2fe8d6b2944f03f70213ecedc4ca3f" + integrity sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g== + normalize-path@^3.0.0, normalize-path@~3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" @@ -8406,6 +8479,11 @@ picocolors@^1.0.0: resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== +picocolors@^1.0.1: + version "1.1.0" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.1.0.tgz#5358b76a78cde483ba5cef6a9dc9671440b27d59" + integrity sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw== + picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.2, picomatch@^2.2.3, picomatch@^2.3.0: version "2.3.1" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" @@ -9345,18 +9423,18 @@ react-remove-scroll@^2.5.2: use-callback-ref "^1.3.0" use-sidecar "^1.1.2" -react-router-dom@6: - version "6.2.1" - resolved "https://registry.yarnpkg.com/react-router-dom/-/react-router-dom-6.2.1.tgz#32ec81829152fbb8a7b045bf593a22eadf019bec" - integrity sha512-I6Zax+/TH/cZMDpj3/4Fl2eaNdcvoxxHoH1tYOREsQ22OKDYofGebrNm6CTPUcvLvZm63NL/vzCYdjf9CUhqmA== +react-router-dom@<6.4.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/react-router-dom/-/react-router-dom-6.3.0.tgz#a0216da813454e521905b5fa55e0e5176123f43d" + integrity sha512-uaJj7LKytRxZNQV8+RbzJWnJ8K2nPsOOEuX7aQstlMZKQT0164C+X2w6bnkqU3sjtLvpd5ojrezAyfZ1+0sStw== dependencies: history "^5.2.0" - react-router "6.2.1" + react-router "6.3.0" -react-router@6.2.1: - version "6.2.1" - resolved "https://registry.yarnpkg.com/react-router/-/react-router-6.2.1.tgz#be2a97a6006ce1d9123c28934e604faef51448a3" - integrity sha512-2fG0udBtxou9lXtK97eJeET2ki5//UWfQSl1rlJ7quwe6jrktK9FCCc8dQb5QY6jAv3jua8bBQRhhDOM/kVRsg== +react-router@6.3.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/react-router/-/react-router-6.3.0.tgz#3970cc64b4cb4eae0c1ea5203a80334fdd175557" + integrity sha512-7Wh1DzVQ+tlFjkeo+ujvjSqSJmkt1+8JO+T5xklPlgrh70y7ogx75ODRW0ThWhY7S+6yEDks8TYrtQe/aoboBQ== dependencies: history "^5.2.0" @@ -9945,7 +10023,7 @@ schema-utils@^2.6.5: ajv "^6.12.4" ajv-keywords "^3.5.2" -schema-utils@^3.0.0, schema-utils@^3.1.0, schema-utils@^3.1.1: +schema-utils@^3.0.0, schema-utils@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-3.1.1.tgz#bc74c4b6b6995c1d88f76a8b77bea7219e0c8281" integrity sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw== @@ -9954,6 +10032,15 @@ schema-utils@^3.0.0, schema-utils@^3.1.0, schema-utils@^3.1.1: ajv "^6.12.5" ajv-keywords "^3.5.2" +schema-utils@^3.2.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-3.3.0.tgz#f50a88877c3c01652a15b622ae9e9795df7a60fe" + integrity sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg== + dependencies: + "@types/json-schema" "^7.0.8" + ajv "^6.12.5" + ajv-keywords "^3.5.2" + schema-utils@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-4.0.0.tgz#60331e9e3ae78ec5d16353c467c34b3a0a1d3df7" @@ -10031,6 +10118,13 @@ serialize-javascript@^6.0.0: dependencies: randombytes "^2.1.0" +serialize-javascript@^6.0.1: + version "6.0.2" + resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-6.0.2.tgz#defa1e055c83bf6d59ea805d8da862254eb6a6c2" + integrity sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g== + dependencies: + randombytes "^2.1.0" + serialize-query-params@^1.3.5: version "1.3.6" resolved "https://registry.yarnpkg.com/serialize-query-params/-/serialize-query-params-1.3.6.tgz#5dd5225db85ce747fe6fbc4897628504faafec6d" @@ -10626,7 +10720,7 @@ terminal-link@^2.0.0: ansi-escapes "^4.2.1" supports-hyperlinks "^2.0.0" -terser-webpack-plugin@^5.1.3, terser-webpack-plugin@^5.2.5: +terser-webpack-plugin@^5.2.5: version "5.3.0" resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.0.tgz#21641326486ecf91d8054161c816e464435bae9f" integrity sha512-LPIisi3Ol4chwAaPP8toUJ3L4qCM1G0wao7L3qNv57Drezxj6+VEyySpPw4B1HSO2Eg/hDY/MNF5XihCAoqnsQ== @@ -10637,6 +10731,17 @@ terser-webpack-plugin@^5.1.3, terser-webpack-plugin@^5.2.5: source-map "^0.6.1" terser "^5.7.2" +terser-webpack-plugin@^5.3.10: + version "5.3.10" + resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.10.tgz#904f4c9193c6fd2a03f693a2150c62a92f40d199" + integrity sha512-BKFPWlPDndPs+NGGCr1U59t0XScL5317Y0UReNrHaw9/FwhPENlq6bfgs+4yPfyP51vqC1bQ4rp1EfXW5ZSH9w== + dependencies: + "@jridgewell/trace-mapping" "^0.3.20" + jest-worker "^27.4.5" + schema-utils "^3.1.1" + serialize-javascript "^6.0.1" + terser "^5.26.0" + terser@^5.0.0, terser@^5.10.0, terser@^5.7.2: version "5.14.2" resolved "https://registry.yarnpkg.com/terser/-/terser-5.14.2.tgz#9ac9f22b06994d736174f4091aa368db896f1c10" @@ -10647,6 +10752,16 @@ terser@^5.0.0, terser@^5.10.0, terser@^5.7.2: commander "^2.20.0" source-map-support "~0.5.20" +terser@^5.26.0: + version "5.31.6" + resolved "https://registry.yarnpkg.com/terser/-/terser-5.31.6.tgz#c63858a0f0703988d0266a82fcbf2d7ba76422b1" + integrity sha512-PQ4DAriWzKj+qgehQ7LK5bQqCFNMmlhjR2PFFLuqGCpuCAauxemVBWwWOxo3UIwWQx8+Pr61Df++r76wDmkQBg== + dependencies: + "@jridgewell/source-map" "^0.3.3" + acorn "^8.8.2" + commander "^2.20.0" + source-map-support "~0.5.20" + test-exclude@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" @@ -11062,6 +11177,14 @@ upath@^1.2.0: resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== +update-browserslist-db@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.1.0.tgz#7ca61c0d8650766090728046e416a8cde682859e" + integrity sha512-EdRAaAyk2cUE1wOf2DkEhzxqOQvFOoRJFNS6NeyJ01Gp2beMRpBAINjM2iDXE3KCuKhwnvHIQCJm6ThL2Z+HzQ== + dependencies: + escalade "^3.1.2" + picocolors "^1.0.1" + uri-js@^4.2.2: version "4.4.1" resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" @@ -11205,10 +11328,10 @@ walker@^1.0.7: dependencies: makeerror "1.0.12" -watchpack@^2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.4.0.tgz#fa33032374962c78113f93c7f2fb4c54c9862a5d" - integrity sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg== +watchpack@^2.4.1: + version "2.4.2" + resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.4.2.tgz#2feeaed67412e7c33184e5a79ca738fbd38564da" + integrity sha512-TnbFSbcOCcDgjZ4piURLCbJ3nJhznVh9kw6F6iokjiFPl8ONxe9A6nMDVXDiNbrSfLILs6vB07F7wLBrwPYzJw== dependencies: glob-to-regexp "^0.4.1" graceful-fs "^4.1.2" @@ -11328,33 +11451,32 @@ webpack-sources@^3.2.3: integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== webpack@^5.64.4: - version "5.76.1" - resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.76.1.tgz#7773de017e988bccb0f13c7d75ec245f377d295c" - integrity sha512-4+YIK4Abzv8172/SGqObnUjaIHjLEuUasz9EwQj/9xmPPkYJy2Mh03Q/lJfSD3YLzbxy5FeTq5Uw0323Oh6SJQ== - dependencies: - "@types/eslint-scope" "^3.7.3" - "@types/estree" "^0.0.51" - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/wasm-edit" "1.11.1" - "@webassemblyjs/wasm-parser" "1.11.1" + version "5.94.0" + resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.94.0.tgz#77a6089c716e7ab90c1c67574a28da518a20970f" + integrity sha512-KcsGn50VT+06JH/iunZJedYGUJS5FGjow8wb9c0v5n1Om8O1g4L6LjtfxwlXIATopoQu+vOXXa7gYisWxCoPyg== + dependencies: + "@types/estree" "^1.0.5" + "@webassemblyjs/ast" "^1.12.1" + "@webassemblyjs/wasm-edit" "^1.12.1" + "@webassemblyjs/wasm-parser" "^1.12.1" acorn "^8.7.1" - acorn-import-assertions "^1.7.6" - browserslist "^4.14.5" + acorn-import-attributes "^1.9.5" + browserslist "^4.21.10" chrome-trace-event "^1.0.2" - enhanced-resolve "^5.10.0" - es-module-lexer "^0.9.0" + enhanced-resolve "^5.17.1" + es-module-lexer "^1.2.1" eslint-scope "5.1.1" events "^3.2.0" glob-to-regexp "^0.4.1" - graceful-fs "^4.2.9" + graceful-fs "^4.2.11" json-parse-even-better-errors "^2.3.1" loader-runner "^4.2.0" mime-types "^2.1.27" neo-async "^2.6.2" - schema-utils "^3.1.0" + schema-utils "^3.2.0" tapable "^2.1.1" - terser-webpack-plugin "^5.1.3" - watchpack "^2.4.0" + terser-webpack-plugin "^5.3.10" + watchpack "^2.4.1" webpack-sources "^3.2.3" websocket-driver@>=0.5.1, websocket-driver@^0.7.4: