This is an automated email from the ASF dual-hosted git repository.
yufei pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/polaris.git
The following commit(s) were added to refs/heads/main by this push:
new 0638d5388 Fix sparks sql regtests with up to date config (#1454)
0638d5388 is described below
commit 0638d538873c41eebe019fb441e1a76e8ecf3d61
Author: Michael Collado <[email protected]>
AuthorDate: Thu Apr 24 21:30:00 2025 -0700
Fix sparks sql regtests with up to date config (#1454)
---
regtests/t_spark_sql/ref/spark_sql_s3.sh.ref | 2 +-
regtests/t_spark_sql/src/spark_sql_s3.sh | 8 ++++----
2 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/regtests/t_spark_sql/ref/spark_sql_s3.sh.ref
b/regtests/t_spark_sql/ref/spark_sql_s3.sh.ref
index a25c74543..029590b5c 100755
--- a/regtests/t_spark_sql/ref/spark_sql_s3.sh.ref
+++ b/regtests/t_spark_sql/ref/spark_sql_s3.sh.ref
@@ -1,4 +1,4 @@
-{"defaults":{"default-base-location":"s3://datalake-storage-team/polaris_test/spark_sql_s3_catalog"},"overrides":{"prefix":"spark_sql_s3_catalog"},"endpoints":["GET
/v1/{prefix}/namespaces","GET /v1/{prefix}/namespaces/{namespace}","POST
/v1/{prefix}/namespaces","POST
/v1/{prefix}/namespaces/{namespace}/properties","DELETE
/v1/{prefix}/namespaces/{namespace}","GET
/v1/{prefix}/namespaces/{namespace}/tables","GET
/v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/names
[...]
+{"defaults":{"default-base-location":"s3://test_bucket/polaris_test/spark_sql_s3_catalog"},"overrides":{"prefix":"spark_sql_s3_catalog"},"endpoints":["GET
/v1/{prefix}/namespaces","GET /v1/{prefix}/namespaces/{namespace}","HEAD
/v1/{prefix}/namespaces/{namespace}","POST /v1/{prefix}/namespaces","POST
/v1/{prefix}/namespaces/{namespace}/properties","DELETE
/v1/{prefix}/namespaces/{namespace}","GET
/v1/{prefix}/namespaces/{namespace}/tables","GET
/v1/{prefix}/namespaces/{namespace}/tables/ [...]
Catalog created
spark-sql (default)> use polaris;
spark-sql ()> show namespaces;
diff --git a/regtests/t_spark_sql/src/spark_sql_s3.sh
b/regtests/t_spark_sql/src/spark_sql_s3.sh
index 949bd9631..c9098eefd 100755
--- a/regtests/t_spark_sql/src/spark_sql_s3.sh
+++ b/regtests/t_spark_sql/src/spark_sql_s3.sh
@@ -25,18 +25,18 @@ if [ -z "$AWS_TEST_ENABLED" ] || [ "$AWS_TEST_ENABLED" !=
"true" ]; then
fi
SPARK_BEARER_TOKEN="${REGTEST_ROOT_BEARER_TOKEN}"
-
+set -x
curl -i -X POST -H "Authorization: Bearer ${SPARK_BEARER_TOKEN}" -H 'Accept:
application/json' -H 'Content-Type: application/json' \
http://${POLARIS_HOST:-localhost}:8181/api/management/v1/catalogs \
- -d "{\"name\": \"spark_sql_s3_catalog\", \"id\": 100, \"type\":
\"INTERNAL\", \"readOnly\": false, \"properties\": {\"default-base-location\":
\"s3://datalake-storage-team/polaris_test/spark_sql_s3_catalog\"},
\"storageConfigInfo\": {\"storageType\": \"S3\", \"allowedLocations\":
[\"${AWS_TEST_BASE}/polaris_test/\"], \"roleArn\":
\"arn:aws:iam::631484165566:role/datalake-storage-integration-role\"}}" >
/dev/stderr
-
+ -d "{\"name\": \"spark_sql_s3_catalog\", \"id\": 100, \"type\":
\"INTERNAL\", \"readOnly\": false, \"properties\": {\"default-base-location\":
\"${AWS_TEST_BASE}/polaris_test/spark_sql_s3_catalog\"}, \"storageConfigInfo\":
{\"storageType\": \"S3\", \"allowedLocations\":
[\"${AWS_TEST_BASE}/polaris_test/\"], \"roleArn\": \"${AWS_ROLE_ARN}\"}}" >
/dev/stderr
+set +x
# Add TABLE_WRITE_DATA to the catalog's catalog_admin role since by default it
can only manage access and metadata
curl -i -X PUT -H "Authorization: Bearer ${SPARK_BEARER_TOKEN}" -H 'Accept:
application/json' -H 'Content-Type: application/json' \
http://${POLARIS_HOST:-localhost}:8181/api/management/v1/catalogs/spark_sql_s3_catalog/catalog-roles/catalog_admin/grants
\
-d '{"type": "catalog", "privilege": "TABLE_WRITE_DATA"}' > /dev/stderr
curl -H "Authorization: Bearer ${SPARK_BEARER_TOKEN}" -H 'Accept:
application/json' -H 'Content-Type: application/json' \
-
"http://${POLARIS_HOST:-localhost}:8181/api/catalog/v1/config?warehouse=spark_sql_s3_catalog"
+
"http://${POLARIS_HOST:-localhost}:8181/api/catalog/v1/config?warehouse=spark_sql_s3_catalog"
| sed "s|${AWS_TEST_BASE}|s3://test_bucket|g"
echo
echo "Catalog created"
cat << EOF | ${SPARK_HOME}/bin/spark-sql -S --conf
spark.sql.catalog.polaris.token="${SPARK_BEARER_TOKEN}" --conf
spark.sql.catalog.polaris.warehouse=spark_sql_s3_catalog