diff --git a/sdk/python/feast/feature_server.py b/sdk/python/feast/feature_server.py index 3abca1d6e8..7c638dd248 100644 --- a/sdk/python/feast/feature_server.py +++ b/sdk/python/feast/feature_server.py @@ -1,9 +1,11 @@ import json import traceback import warnings +from typing import List, Optional import gunicorn.app.base import pandas as pd +from dateutil import parser from fastapi import FastAPI, HTTPException, Request, Response, status from fastapi.logger import logger from fastapi.params import Depends @@ -11,7 +13,7 @@ from pydantic import BaseModel import feast -from feast import proto_json +from feast import proto_json, utils from feast.data_source import PushMode from feast.errors import PushSourceNotFoundException from feast.protos.feast.serving.ServingService_pb2 import GetOnlineFeaturesRequest @@ -31,6 +33,17 @@ class PushFeaturesRequest(BaseModel): to: str = "online" +class MaterializeRequest(BaseModel): + start_ts: str + end_ts: str + feature_views: Optional[List[str]] = None + + +class MaterializeIncrementalRequest(BaseModel): + end_ts: str + feature_views: Optional[List[str]] = None + + def get_app(store: "feast.FeatureStore"): proto_json.patch() @@ -134,6 +147,34 @@ def write_to_online_store(body=Depends(get_body)): def health(): return Response(status_code=status.HTTP_200_OK) + @app.post("/materialize") + def materialize(body=Depends(get_body)): + try: + request = MaterializeRequest(**json.loads(body)) + store.materialize( + utils.make_tzaware(parser.parse(request.start_ts)), + utils.make_tzaware(parser.parse(request.end_ts)), + request.feature_views, + ) + except Exception as e: + # Print the original exception on the server side + logger.exception(traceback.format_exc()) + # Raise HTTPException to return the error message to the client + raise HTTPException(status_code=500, detail=str(e)) + + @app.post("/materialize-incremental") + def materialize_incremental(body=Depends(get_body)): + try: + request = MaterializeIncrementalRequest(**json.loads(body)) + store.materialize_incremental( + utils.make_tzaware(parser.parse(request.end_ts)), request.feature_views + ) + except Exception as e: + # Print the original exception on the server side + logger.exception(traceback.format_exc()) + # Raise HTTPException to return the error message to the client + raise HTTPException(status_code=500, detail=str(e)) + return app diff --git a/sdk/python/feast/infra/offline_stores/bigquery.py b/sdk/python/feast/infra/offline_stores/bigquery.py index 5913b60f62..86c587c7fd 100644 --- a/sdk/python/feast/infra/offline_stores/bigquery.py +++ b/sdk/python/feast/infra/offline_stores/bigquery.py @@ -19,7 +19,7 @@ import pandas as pd import pyarrow import pyarrow.parquet -from pydantic import StrictStr, validator +from pydantic import ConstrainedStr, StrictStr, validator from pydantic.typing import Literal from tenacity import Retrying, retry_if_exception_type, stop_after_delay, wait_fixed @@ -72,6 +72,13 @@ def get_http_client_info(): return http_client_info.ClientInfo(user_agent=get_user_agent()) +class BigQueryTableCreateDisposition(ConstrainedStr): + """Custom constraint for table_create_disposition. To understand more, see: + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.create_disposition""" + + values = {"CREATE_NEVER", "CREATE_IF_NEEDED"} + + class BigQueryOfflineStoreConfig(FeastConfigBaseModel): """Offline store config for GCP BigQuery""" @@ -95,6 +102,9 @@ class BigQueryOfflineStoreConfig(FeastConfigBaseModel): gcs_staging_location: Optional[str] = None """ (optional) GCS location used for offloading BigQuery results as parquet files.""" + table_create_disposition: Optional[BigQueryTableCreateDisposition] = None + """ (optional) Specifies whether the job is allowed to create new tables. The default value is CREATE_IF_NEEDED.""" + @validator("billing_project_id") def project_id_exists(cls, v, values, **kwargs): if v and not values["project_id"]: @@ -324,6 +334,7 @@ def write_logged_features( job_config = bigquery.LoadJobConfig( source_format=bigquery.SourceFormat.PARQUET, schema=arrow_schema_to_bq_schema(source.get_schema(registry)), + create_disposition=config.offline_store.table_create_disposition, time_partitioning=bigquery.TimePartitioning( type_=bigquery.TimePartitioningType.DAY, field=source.get_log_timestamp_column(), @@ -384,6 +395,7 @@ def offline_write_batch( job_config = bigquery.LoadJobConfig( source_format=bigquery.SourceFormat.PARQUET, schema=arrow_schema_to_bq_schema(pa_schema), + create_disposition=config.offline_store.table_create_disposition, write_disposition="WRITE_APPEND", # Default but included for clarity ) diff --git a/sdk/python/feast/infra/offline_stores/snowflake.py b/sdk/python/feast/infra/offline_stores/snowflake.py index 38568ce79b..5ab11cf3f2 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake.py +++ b/sdk/python/feast/infra/offline_stores/snowflake.py @@ -1,4 +1,5 @@ import contextlib +import json import os import uuid import warnings @@ -51,6 +52,7 @@ ) from feast.repo_config import FeastConfigBaseModel, RepoConfig from feast.saved_dataset import SavedDatasetStorage +from feast.types import Array, Float32, Float64, Int32, Int64, String, UnixTimestamp from feast.usage import log_exceptions_and_usage try: @@ -320,6 +322,7 @@ def query_generator() -> Iterator[str]: on_demand_feature_views=OnDemandFeatureView.get_requested_odfvs( feature_refs, project, registry ), + feature_views=feature_views, metadata=RetrievalMetadata( features=feature_refs, keys=list(entity_schema.keys() - {entity_df_event_timestamp_col}), @@ -398,9 +401,12 @@ def __init__( config: RepoConfig, full_feature_names: bool, on_demand_feature_views: Optional[List[OnDemandFeatureView]] = None, + feature_views: Optional[List[FeatureView]] = None, metadata: Optional[RetrievalMetadata] = None, ): + if feature_views is None: + feature_views = [] if not isinstance(query, str): self._query_generator = query else: @@ -416,6 +422,7 @@ def query_generator() -> Iterator[str]: self.config = config self._full_feature_names = full_feature_names self._on_demand_feature_views = on_demand_feature_views or [] + self._feature_views = feature_views self._metadata = metadata self.export_path: Optional[str] if self.config.offline_store.blob_export_location: @@ -436,6 +443,18 @@ def _to_df_internal(self, timeout: Optional[int] = None) -> pd.DataFrame: self.snowflake_conn, self.to_sql() ).fetch_pandas_all() + for feature_view in self._feature_views: + for feature in feature_view.features: + if feature.dtype in [ + Array(String), + Array(Int32), + Array(Int64), + Array(UnixTimestamp), + Array(Float64), + Array(Float32), + ]: + df[feature.name] = [json.loads(x) for x in df[feature.name]] + return df def _to_arrow_internal(self, timeout: Optional[int] = None) -> pyarrow.Table: diff --git a/sdk/python/feast/infra/offline_stores/snowflake_source.py b/sdk/python/feast/infra/offline_stores/snowflake_source.py index 95bd46f1ec..0cbf82dd1c 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake_source.py +++ b/sdk/python/feast/infra/offline_stores/snowflake_source.py @@ -279,12 +279,12 @@ def get_table_column_names_and_types( else: row["snowflake_type"] = "NUMBERwSCALE" - elif row["type_code"] in [5, 9, 10, 12]: + elif row["type_code"] in [5, 9, 12]: error = snowflake_unsupported_map[row["type_code"]] raise NotImplementedError( f"The following Snowflake Data Type is not supported: {error}" ) - elif row["type_code"] in [1, 2, 3, 4, 6, 7, 8, 11, 13]: + elif row["type_code"] in [1, 2, 3, 4, 6, 7, 8, 10, 11, 13]: row["snowflake_type"] = snowflake_type_code_map[row["type_code"]] else: raise NotImplementedError( @@ -305,6 +305,7 @@ def get_table_column_names_and_types( 6: "TIMESTAMP_LTZ", 7: "TIMESTAMP_TZ", 8: "TIMESTAMP_NTZ", + 10: "ARRAY", 11: "BINARY", 13: "BOOLEAN", } @@ -312,7 +313,6 @@ def get_table_column_names_and_types( snowflake_unsupported_map = { 5: "VARIANT -- Try converting to VARCHAR", 9: "OBJECT -- Try converting to VARCHAR", - 10: "ARRAY -- Try converting to VARCHAR", 12: "TIME -- Try converting to VARCHAR", } diff --git a/sdk/python/feast/infra/registry/proto_registry_utils.py b/sdk/python/feast/infra/registry/proto_registry_utils.py index c7eeea0f82..e93f513b69 100644 --- a/sdk/python/feast/infra/registry/proto_registry_utils.py +++ b/sdk/python/feast/infra/registry/proto_registry_utils.py @@ -1,4 +1,5 @@ import uuid +from functools import wraps from typing import List, Optional from feast import usage @@ -23,6 +24,26 @@ from feast.stream_feature_view import StreamFeatureView +def registry_proto_cache(func): + cache_key = None + cache_value = None + + @wraps(func) + def wrapper(registry_proto: RegistryProto, project: str): + nonlocal cache_key, cache_value + + key = tuple([id(registry_proto), registry_proto.version_id, project]) + + if key == cache_key: + return cache_value + else: + cache_value = func(registry_proto, project) + cache_key = key + return cache_value + + return wrapper + + def init_project_metadata(cached_registry_proto: RegistryProto, project: str): new_project_uuid = f"{uuid.uuid4()}" usage.set_current_project_uuid(new_project_uuid) @@ -137,8 +158,9 @@ def get_validation_reference( raise ValidationReferenceNotFound(name, project=project) +@registry_proto_cache def list_feature_services( - registry_proto: RegistryProto, project: str, allow_cache: bool = False + registry_proto: RegistryProto, project: str ) -> List[FeatureService]: feature_services = [] for feature_service_proto in registry_proto.feature_services: @@ -147,6 +169,7 @@ def list_feature_services( return feature_services +@registry_proto_cache def list_feature_views( registry_proto: RegistryProto, project: str ) -> List[FeatureView]: @@ -157,6 +180,7 @@ def list_feature_views( return feature_views +@registry_proto_cache def list_request_feature_views( registry_proto: RegistryProto, project: str ) -> List[RequestFeatureView]: @@ -169,6 +193,7 @@ def list_request_feature_views( return feature_views +@registry_proto_cache def list_stream_feature_views( registry_proto: RegistryProto, project: str ) -> List[StreamFeatureView]: @@ -181,6 +206,7 @@ def list_stream_feature_views( return stream_feature_views +@registry_proto_cache def list_on_demand_feature_views( registry_proto: RegistryProto, project: str ) -> List[OnDemandFeatureView]: @@ -193,6 +219,7 @@ def list_on_demand_feature_views( return on_demand_feature_views +@registry_proto_cache def list_entities(registry_proto: RegistryProto, project: str) -> List[Entity]: entities = [] for entity_proto in registry_proto.entities: @@ -201,6 +228,7 @@ def list_entities(registry_proto: RegistryProto, project: str) -> List[Entity]: return entities +@registry_proto_cache def list_data_sources(registry_proto: RegistryProto, project: str) -> List[DataSource]: data_sources = [] for data_source_proto in registry_proto.data_sources: @@ -209,6 +237,7 @@ def list_data_sources(registry_proto: RegistryProto, project: str) -> List[DataS return data_sources +@registry_proto_cache def list_saved_datasets( registry_proto: RegistryProto, project: str ) -> List[SavedDataset]: @@ -219,6 +248,7 @@ def list_saved_datasets( return saved_datasets +@registry_proto_cache def list_validation_references( registry_proto: RegistryProto, project: str ) -> List[ValidationReference]: @@ -231,6 +261,7 @@ def list_validation_references( return validation_references +@registry_proto_cache def list_project_metadata( registry_proto: RegistryProto, project: str ) -> List[ProjectMetadata]: diff --git a/sdk/python/feast/infra/utils/snowflake/snowpark/snowflake_python_udfs_creation.sql b/sdk/python/feast/infra/utils/snowflake/snowpark/snowflake_python_udfs_creation.sql index a197a3ee4c..a444c0b7c5 100644 --- a/sdk/python/feast/infra/utils/snowflake/snowpark/snowflake_python_udfs_creation.sql +++ b/sdk/python/feast/infra/utils/snowflake/snowpark/snowflake_python_udfs_creation.sql @@ -14,6 +14,62 @@ CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_varchar_to_string_pro HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_varchar_to_string_proto' IMPORTS = ('@STAGE_HOLDER/feast.zip'); +CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_array_bytes_to_list_bytes_proto(df ARRAY) + RETURNS BINARY + LANGUAGE PYTHON + RUNTIME_VERSION = '3.8' + PACKAGES = ('protobuf', 'pandas') + HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_bytes_to_list_bytes_proto' + IMPORTS = ('@STAGE_HOLDER/feast.zip'); + +CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_array_varchar_to_list_string_proto(df ARRAY) + RETURNS BINARY + LANGUAGE PYTHON + RUNTIME_VERSION = '3.8' + PACKAGES = ('protobuf', 'pandas') + HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_varchar_to_list_string_proto' + IMPORTS = ('@STAGE_HOLDER/feast.zip'); + +CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_array_number_to_list_int32_proto(df ARRAY) + RETURNS BINARY + LANGUAGE PYTHON + RUNTIME_VERSION = '3.8' + PACKAGES = ('protobuf', 'pandas') + HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_number_to_list_int32_proto' + IMPORTS = ('@STAGE_HOLDER/feast.zip'); + +CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_array_number_to_list_int64_proto(df ARRAY) + RETURNS BINARY + LANGUAGE PYTHON + RUNTIME_VERSION = '3.8' + PACKAGES = ('protobuf', 'pandas') + HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_number_to_list_int64_proto' + IMPORTS = ('@STAGE_HOLDER/feast.zip'); + +CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_array_float_to_list_double_proto(df ARRAY) + RETURNS BINARY + LANGUAGE PYTHON + RUNTIME_VERSION = '3.8' + PACKAGES = ('protobuf', 'pandas') + HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_float_to_list_double_proto' + IMPORTS = ('@STAGE_HOLDER/feast.zip'); + +CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_array_boolean_to_list_bool_proto(df ARRAY) + RETURNS BINARY + LANGUAGE PYTHON + RUNTIME_VERSION = '3.8' + PACKAGES = ('protobuf', 'pandas') + HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_boolean_to_list_bool_proto' + IMPORTS = ('@STAGE_HOLDER/feast.zip'); + +CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_array_timestamp_to_list_unix_timestamp_proto(df ARRAY) + RETURNS BINARY + LANGUAGE PYTHON + RUNTIME_VERSION = '3.8' + PACKAGES = ('protobuf', 'pandas') + HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_timestamp_to_list_unix_timestamp_proto' + IMPORTS = ('@STAGE_HOLDER/feast.zip'); + CREATE FUNCTION IF NOT EXISTS feast_PROJECT_NAME_snowflake_number_to_int32_proto(df NUMBER) RETURNS BINARY LANGUAGE PYTHON diff --git a/sdk/python/feast/infra/utils/snowflake/snowpark/snowflake_udfs.py b/sdk/python/feast/infra/utils/snowflake/snowpark/snowflake_udfs.py index 02311ca55d..6eb827788f 100644 --- a/sdk/python/feast/infra/utils/snowflake/snowpark/snowflake_udfs.py +++ b/sdk/python/feast/infra/utils/snowflake/snowpark/snowflake_udfs.py @@ -59,6 +59,175 @@ def feast_snowflake_varchar_to_string_proto(df): return df +""" +CREATE OR REPLACE FUNCTION feast_snowflake_array_bytes_to_list_bytes_proto(df ARRAY) + RETURNS BINARY + LANGUAGE PYTHON + RUNTIME_VERSION = '3.8' + PACKAGES = ('protobuf', 'pandas') + HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_bytes_to_list_bytes_proto' + IMPORTS = ('@feast_stage/feast.zip'); +""" +# ValueType.STRING_LIST = 12 +@vectorized(input=pandas.DataFrame) +def feast_snowflake_array_bytes_to_list_bytes_proto(df): + sys._xoptions["snowflake_partner_attribution"].append("feast") + + df = list( + map( + ValueProto.SerializeToString, + python_values_to_proto_values(df[0].to_numpy(), ValueType.BYTES_LIST), + ) + ) + return df + + +""" +CREATE OR REPLACE FUNCTION feast_snowflake_array_varchar_to_list_string_proto(df ARRAY) + RETURNS BINARY + LANGUAGE PYTHON + RUNTIME_VERSION = '3.8' + PACKAGES = ('protobuf', 'pandas') + HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_varchar_to_list_string_proto' + IMPORTS = ('@feast_stage/feast.zip'); +""" + + +@vectorized(input=pandas.DataFrame) +def feast_snowflake_array_varchar_to_list_string_proto(df): + sys._xoptions["snowflake_partner_attribution"].append("feast") + + df = list( + map( + ValueProto.SerializeToString, + python_values_to_proto_values(df[0].to_numpy(), ValueType.STRING_LIST), + ) + ) + return df + + +""" +CREATE OR REPLACE FUNCTION feast_snowflake_array_number_to_list_int32_proto(df ARRAY) + RETURNS BINARY + LANGUAGE PYTHON + RUNTIME_VERSION = '3.8' + PACKAGES = ('protobuf', 'pandas') + HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_number_to_list_int32_proto' + IMPORTS = ('@feast_stage/feast.zip'); +""" + + +@vectorized(input=pandas.DataFrame) +def feast_snowflake_array_number_to_list_int32_proto(df): + sys._xoptions["snowflake_partner_attribution"].append("feast") + + df = list( + map( + ValueProto.SerializeToString, + python_values_to_proto_values(df[0].to_numpy(), ValueType.INT32_LIST), + ) + ) + return df + + +""" +CREATE OR REPLACE FUNCTION feast_snowflake_array_number_to_list_int64_proto(df ARRAY) + RETURNS BINARY + LANGUAGE PYTHON + RUNTIME_VERSION = '3.8' + PACKAGES = ('protobuf', 'pandas') + HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_number_to_list_int64_proto' + IMPORTS = ('@feast_stage/feast.zip'); +""" + + +@vectorized(input=pandas.DataFrame) +def feast_snowflake_array_number_to_list_int64_proto(df): + sys._xoptions["snowflake_partner_attribution"].append("feast") + + df = list( + map( + ValueProto.SerializeToString, + python_values_to_proto_values(df[0].to_numpy(), ValueType.INT64_LIST), + ) + ) + return df + + +""" +CREATE OR REPLACE FUNCTION feast_snowflake_array_float_to_list_double_proto(df ARRAY) + RETURNS BINARY + LANGUAGE PYTHON + RUNTIME_VERSION = '3.8' + PACKAGES = ('protobuf', 'pandas') + HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_float_to_list_double_proto' + IMPORTS = ('@feast_stage/feast.zip'); +""" + + +@vectorized(input=pandas.DataFrame) +def feast_snowflake_array_float_to_list_double_proto(df): + sys._xoptions["snowflake_partner_attribution"].append("feast") + + df = list( + map( + ValueProto.SerializeToString, + python_values_to_proto_values(df[0].to_numpy(), ValueType.DOUBLE_LIST), + ) + ) + return df + + +""" +CREATE OR REPLACE FUNCTION feast_snowflake_array_boolean_to_list_bool_proto(df ARRAY) + RETURNS BINARY + LANGUAGE PYTHON + RUNTIME_VERSION = '3.8' + PACKAGES = ('protobuf', 'pandas') + HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_boolean_to_list_bool_proto' + IMPORTS = ('@feast_stage/feast.zip'); +""" + + +@vectorized(input=pandas.DataFrame) +def feast_snowflake_array_boolean_to_list_bool_proto(df): + sys._xoptions["snowflake_partner_attribution"].append("feast") + + df = list( + map( + ValueProto.SerializeToString, + python_values_to_proto_values(df[0].to_numpy(), ValueType.BOOL_LIST), + ) + ) + return df + + +""" +CREATE OR REPLACE FUNCTION feast_snowflake_array_timestamp_to_list_unix_timestamp_proto(df ARRAY) + RETURNS BINARY + LANGUAGE PYTHON + RUNTIME_VERSION = '3.8' + PACKAGES = ('protobuf', 'pandas') + HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_timestamp_to_list_unix_timestamp_proto' + IMPORTS = ('@feast_stage/feast.zip'); +""" + + +@vectorized(input=pandas.DataFrame) +def feast_snowflake_array_timestamp_to_list_unix_timestamp_proto(df): + sys._xoptions["snowflake_partner_attribution"].append("feast") + + df = list( + map( + ValueProto.SerializeToString, + python_values_to_proto_values( + df[0].to_numpy(), ValueType.UNIX_TIMESTAMP_LIST + ), + ) + ) + return df + + """ CREATE OR REPLACE FUNCTION feast_snowflake_number_to_int32_proto(df NUMBER) RETURNS BINARY diff --git a/sdk/python/feast/type_map.py b/sdk/python/feast/type_map.py index 3f49069066..c2959f0f22 100644 --- a/sdk/python/feast/type_map.py +++ b/sdk/python/feast/type_map.py @@ -648,6 +648,7 @@ def snowflake_type_to_feast_value_type(snowflake_type: str) -> ValueType: "TIMESTAMP_TZ": ValueType.UNIX_TIMESTAMP, "TIMESTAMP_LTZ": ValueType.UNIX_TIMESTAMP, "TIMESTAMP_NTZ": ValueType.UNIX_TIMESTAMP, + "ARRAY": ValueType.STRING, # Actual list type cannot be inferred, so failing to a String instead } return type_map[snowflake_type] @@ -662,6 +663,14 @@ def _convert_value_name_to_snowflake_udf(value_name: str, project_name: str) -> "FLOAT": f"feast_{project_name}_snowflake_float_to_double_proto", "BOOL": f"feast_{project_name}_snowflake_boolean_to_bool_proto", "UNIX_TIMESTAMP": f"feast_{project_name}_snowflake_timestamp_to_unix_timestamp_proto", + "BYTES_LIST": f"feast_{project_name}_snowflake_array_bytes_to_list_bytes_proto", + "STRING_LIST": f"feast_{project_name}_snowflake_array_varchar_to_list_string_proto", + "INT32_LIST": f"feast_{project_name}_snowflake_array_number_to_list_int32_proto", + "INT64_LIST": f"feast_{project_name}_snowflake_array_number_to_list_int64_proto", + "DOUBLE_LIST": f"feast_{project_name}_snowflake_array_float_to_list_double_proto", + "FLOAT_LIST": f"feast_{project_name}_snowflake_array_float_to_list_double_proto", + "BOOL_LIST": f"feast_{project_name}_snowflake_array_boolean_to_list_bool_proto", + "UNIX_TIMESTAMP_LIST": f"feast_{project_name}_snowflake_array_timestamp_to_list_unix_timestamp_proto", } return name_map[value_name].upper()