forked from feathr-ai/feathr
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* squash commit and avoid conflict * Revert legacy purview client issue * Fix typo * Remove auth from assink * Update aerospike guidance document * Chaneg port param to int
- Loading branch information
1 parent
8497219
commit e6c69da
Showing
8 changed files
with
327 additions
and
5 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
113 changes: 113 additions & 0 deletions
113
feathr_project/test/test_user_workspace/feathr_config_purview.yaml
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,113 @@ | ||
# DO NOT MOVE OR DELETE THIS FILE | ||
|
||
# This file contains the configurations that are used by Feathr | ||
# All the configurations can be overwritten by environment variables with concatenation of `__` for different layers of this config file. | ||
# For example, `feathr_runtime_location` for databricks can be overwritten by setting this environment variable: | ||
# SPARK_CONFIG__DATABRICKS__FEATHR_RUNTIME_LOCATION | ||
# Another example would be overwriting Redis host with this config: `ONLINE_STORE__REDIS__HOST` | ||
# For example if you want to override this setting in a shell environment: | ||
# export ONLINE_STORE__REDIS__HOST=feathrazure.redis.cache.windows.net | ||
|
||
# version of API settings | ||
api_version: 1 | ||
project_config: | ||
project_name: 'project_feathr_integration_test' | ||
# Information that are required to be set via environment variables. | ||
required_environment_variables: | ||
# the environemnt variables are required to run Feathr | ||
# Redis password for your online store | ||
- 'REDIS_PASSWORD' | ||
# client IDs and client Secret for the service principal. Read the getting started docs on how to get those information. | ||
- 'AZURE_CLIENT_ID' | ||
- 'AZURE_TENANT_ID' | ||
- 'AZURE_CLIENT_SECRET' | ||
optional_environment_variables: | ||
# the environemnt variables are optional, however you will need them if you want to use some of the services: | ||
- ADLS_ACCOUNT | ||
- ADLS_KEY | ||
- WASB_ACCOUNT | ||
- WASB_KEY | ||
- S3_ACCESS_KEY | ||
- S3_SECRET_KEY | ||
- JDBC_TABLE | ||
- JDBC_USER | ||
- JDBC_PASSWORD | ||
- KAFKA_SASL_JAAS_CONFIG | ||
|
||
offline_store: | ||
# paths starts with abfss:// or abfs:// | ||
# ADLS_ACCOUNT and ADLS_KEY should be set in environment variable if this is set to true | ||
adls: | ||
adls_enabled: true | ||
|
||
# paths starts with wasb:// or wasbs:// | ||
# WASB_ACCOUNT and WASB_KEY should be set in environment variable | ||
wasb: | ||
wasb_enabled: true | ||
|
||
# paths starts with s3a:// | ||
# S3_ACCESS_KEY and S3_SECRET_KEY should be set in environment variable | ||
s3: | ||
s3_enabled: true | ||
# S3 endpoint. If you use S3 endpoint, then you need to provide access key and secret key in the environment variable as well. | ||
s3_endpoint: 's3.amazonaws.com' | ||
|
||
# jdbc endpoint | ||
jdbc: | ||
jdbc_enabled: true | ||
jdbc_database: 'feathrtestdb' | ||
jdbc_table: 'feathrtesttable' | ||
|
||
# snowflake endpoint | ||
snowflake: | ||
snowflake_enabled: true | ||
url: "dqllago-ol19457.snowflakecomputing.com" | ||
user: "feathrintegration" | ||
role: "ACCOUNTADMIN" | ||
|
||
spark_config: | ||
# choice for spark runtime. Currently support: azure_synapse, databricks | ||
# The `databricks` configs will be ignored if `azure_synapse` is set and vice versa. | ||
spark_cluster: 'databricks' | ||
# configure number of parts for the spark output for feature generation job | ||
spark_result_output_parts: '1' | ||
|
||
azure_synapse: | ||
dev_url: 'https://feathrazuretest3synapse.dev.azuresynapse.net' | ||
pool_name: 'spark3' | ||
# workspace dir for storing all the required configuration files and the jar resources | ||
workspace_dir: 'abfss://feathrazuretest3fs@feathrazuretest3storage.dfs.core.windows.net/feathr_test_workspace' | ||
executor_size: 'Small' | ||
executor_num: 1 | ||
# Feathr Job configuration. Support local paths, path start with http(s)://, and paths start with abfs(s):// | ||
# this is the default location so end users don't have to compile the runtime again. | ||
# feathr_runtime_location: wasbs://public@azurefeathrstorage.blob.core.windows.net/feathr-assembly-LATEST.jar | ||
feathr_runtime_location: "../../target/scala-2.12/feathr-assembly-0.7.2.jar" | ||
databricks: | ||
# workspace instance | ||
workspace_instance_url: 'https://adb-2474129336842816.16.azuredatabricks.net/' | ||
workspace_token_value: 'dapid8ddd83000dc2863763b7d47f0e8f3db' | ||
# config string including run time information, spark version, machine size, etc. | ||
# the config follows the format in the databricks documentation: https://docs.microsoft.com/en-us/azure/databricks/dev-tools/api/2.0/jobs | ||
config_template: {"run_name":"FEATHR_FILL_IN","new_cluster":{"spark_version":"9.1.x-scala2.12","num_workers":1,"spark_conf":{"FEATHR_FILL_IN":"FEATHR_FILL_IN"},"instance_pool_id":"0403-214809-inlet434-pool-l9dj3kwz"},"libraries":[{"jar":"FEATHR_FILL_IN"}],"spark_jar_task":{"main_class_name":"FEATHR_FILL_IN","parameters":["FEATHR_FILL_IN"]}} | ||
# Feathr Job location. Support local paths, path start with http(s)://, and paths start with dbfs:/ | ||
work_dir: 'dbfs:/feathr_getting_started' | ||
# this is the default location so end users don't have to compile the runtime again. | ||
feathr_runtime_location: "../../target/scala-2.12/feathr-assembly-0.7.2.jar" | ||
|
||
online_store: | ||
redis: | ||
# Redis configs to access Redis cluster | ||
host: 'feathrazuretest3redis.redis.cache.windows.net' | ||
port: 6380 | ||
ssl_enabled: True | ||
|
||
feature_registry: | ||
# The API endpoint of the registry service | ||
api_endpoint: "https://feathr-sql-registry.azurewebsites.net/api/v1" | ||
|
||
monitoring: | ||
database: | ||
sql: | ||
url: 'jdbc:postgresql://featuremonitoring.postgres.database.azure.com:5432/postgres' | ||
user: "demo" |
Oops, something went wrong.