From b181bde67b52d3f4e4af4b370181a6f890ae5ebc Mon Sep 17 00:00:00 2001 From: Lev Pickovsky Date: Sun, 24 Apr 2022 14:13:14 +0300 Subject: [PATCH] fix: Fixing the return order of elements when calculating the min and max entity-DF event timestamps in the Spark offline store. Signed-off-by: Lev Pickovsky --- .../infra/offline_stores/contrib/spark_offline_store/spark.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py index 770bd8adc2..f42b4bdb87 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py +++ b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py @@ -324,8 +324,8 @@ def _get_entity_df_event_timestamp_range( df = spark_session.sql(entity_df).select(entity_df_event_timestamp_col) # TODO(kzhang132): need utc conversion here. entity_df_event_timestamp_range = ( - df.agg({entity_df_event_timestamp_col: "max"}).collect()[0][0], df.agg({entity_df_event_timestamp_col: "min"}).collect()[0][0], + df.agg({entity_df_event_timestamp_col: "max"}).collect()[0][0], ) else: raise InvalidEntityType(type(entity_df))