Skip to content

Commit f1a521e

Browse files
authored
Fix test_spark_credentials_s3_exception_on_metadata_file_deletion (#1759)
1 parent 0bb062e commit f1a521e

File tree

1 file changed

+8
-5
lines changed

1 file changed

+8
-5
lines changed

regtests/t_pyspark/src/test_spark_sql_s3_with_privileges.py

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1203,23 +1203,26 @@ def test_spark_credentials_s3_exception_on_metadata_file_deletion(root_client, s
12031203
assert metadata_contents['ContentLength'] > 0
12041204

12051205
# Delete metadata files
1206+
objects_to_delete = [{'Key': obj['Key']} for obj in objects['Contents']]
12061207
s3.delete_objects(Bucket=test_bucket,
1207-
Delete={'Objects': objects})
1208+
Delete={'Objects': objects_to_delete})
12081209

12091210
try:
12101211
response = snowman_catalog_client.load_table(snowflake_catalog.name, unquote('db1%1Fschema'),
12111212
"iceberg_table",
12121213
"vended-credentials")
12131214
except Exception as e:
1214-
assert '404' in str(e)
1215+
# 400 error(BadRequest) is thrown when metadata file is missing
1216+
assert '400' in str(e)
12151217

12161218

12171219
with IcebergSparkSession(credentials=f'{snowman.principal.client_id}:{snowman.credentials.client_secret.get_secret_value()}',
12181220
catalog_name=snowflake_catalog.name,
12191221
polaris_url=polaris_catalog_url) as spark:
1220-
spark.sql(f'USE {snowflake_catalog.name}')
1221-
spark.sql('USE db1.schema')
1222-
spark.sql('DROP TABLE iceberg_table PURGE')
1222+
# Spark drop table triggers load table underneath, which fails due to missing metadata file.
1223+
# Directly call drop_table api to drop the table entity.
1224+
snowman_catalog_client.drop_table(snowflake_catalog.name, unquote('db1%1Fschema'),
1225+
"iceberg_table")
12231226
spark.sql(f'USE {snowflake_catalog.name}')
12241227
spark.sql('DROP NAMESPACE db1.schema')
12251228
spark.sql('DROP NAMESPACE db1')

0 commit comments

Comments
 (0)