HonahX commented on code in PR #2936:
URL: https://github.com/apache/polaris/pull/2936#discussion_r2474641896
##########
regtests/t_pyspark/src/test_spark_sql_s3_with_privileges.py:
##########
@@ -495,15 +495,28 @@ def
test_spark_credentials_can_delete_after_purge(root_client, snowflake_catalog
aws_secret_access_key=response.config['s3.secret-access-key'],
aws_session_token=response.config['s3.session-token'])
+ # Extract the table location from the metadata_location in the response
+ # metadata_location format:
s3://bucket/path/to/table/metadata/v1.metadata.json
+ # We need to extract the base table path (everything before /metadata/)
+ metadata_location = response.metadata_location
+ assert metadata_location.startswith('s3://')
+ # Remove s3:// prefix and bucket name to get the path
+ path_without_scheme = metadata_location[5:] # Remove 's3://'
+ path_parts = path_without_scheme.split('/', 1) # Split bucket and path
+ bucket_from_metadata = path_parts[0]
+ full_path = path_parts[1] if len(path_parts) > 1 else ''
+ # Extract table base path (everything before /metadata/)
+ table_base_path = full_path.rsplit('/metadata/', 1)[0] if '/metadata/' in
full_path else ''
+
objects = s3.list_objects(Bucket=test_bucket, Delimiter='/',
-
Prefix=f'{aws_bucket_base_location_prefix}/snowflake_catalog/db1/schema/{table_name}/data/')
+ Prefix=f'{table_base_path}/data/')
Review Comment:
How does the checks before drop with purge passed since we were checking the
wrong locations. :scream: Seems we still somehow find some data files there.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]