This is an automated email from the ASF dual-hosted git repository.
sbp pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tooling-trusted-release.git
The following commit(s) were added to refs/heads/main by this push:
new 16eec17 Simplify the script for downloading releases with curl
16eec17 is described below
commit 16eec177c89c0098f1ef17d1779281fd2f374841
Author: Sean B. Palmer <[email protected]>
AuthorDate: Mon May 5 14:32:33 2025 +0100
Simplify the script for downloading releases with curl
---
atr/routes/download.py | 6 +--
atr/static/sh/download-urls.sh | 84 ++++--------------------------------------
2 files changed, 11 insertions(+), 79 deletions(-)
diff --git a/atr/routes/download.py b/atr/routes/download.py
index de7237b..d41941d 100644
--- a/atr/routes/download.py
+++ b/atr/routes/download.py
@@ -97,7 +97,7 @@ async def urls_selected(project_name: str, version_name: str)
-> response.Respon
ValueError("Release not found")
)
url_list_str = await _generate_file_url_list(release)
- return quart.Response(url_list_str + "\n", mimetype="text/plain")
+ return quart.Response(url_list_str, mimetype="text/plain")
except ValueError as e:
return quart.Response(f"Error: {e}", status=404, mimetype="text/plain")
except Exception as e:
@@ -184,8 +184,8 @@ async def _generate_file_url_list(release: models.Release)
-> str:
file_path=str(rel_path),
_external=True,
)
- urls.append(abs_url)
- return "\n".join(sorted(urls))
+ urls.append(abs_url + " " + str(rel_path))
+ return "\n".join(sorted(urls)) + "\n"
async def _list(
diff --git a/atr/static/sh/download-urls.sh b/atr/static/sh/download-urls.sh
index 4aba10d..740f97d 100644
--- a/atr/static/sh/download-urls.sh
+++ b/atr/static/sh/download-urls.sh
@@ -1,83 +1,15 @@
#!/bin/sh
-set -eu
+set -efu
_url_of_urls="[URL_OF_URLS]"
-_urls_prefix="[URLS_PREFIX]"
-_hex_to_dec() {
- case $1 in
- 0) printf 0;;
- 1) printf 1;;
- 2) printf 2;;
- 3) printf 3;;
- 4) printf 4;;
- 5) printf 5;;
- 6) printf 6;;
- 7) printf 7;;
- 8) printf 8;;
- 9) printf 9;;
- a|A) printf 10;;
- b|B) printf 11;;
- c|C) printf 12;;
- d|D) printf 13;;
- e|E) printf 14;;
- f|F) printf 15;;
- esac
-}
-
-_hex2_to_oct() {
- _a="${1%"${1#?}"}"
- _b="${1#?}"
- _a_dec=$(_hex_to_dec "$_a")
- _b_dec=$(_hex_to_dec "$_b")
- _total_dec=$((_a_dec * 16 + _b_dec))
- printf "%o" "$_total_dec"
-}
-
-_url_decode() {
- _u=$1
- while [ "$_u" ]
- do
- case $_u in
- %??*)
- _hh=${_u#%}
- _hh=${_hh%"${_hh#??}"}
- case $_hh in
- [0-9A-Fa-f][0-9A-Fa-f])
- # shellcheck disable=SC2059
- printf "\\$(_hex2_to_oct "$_hh")"
- _u=${_u#%??}
- continue
- esac
- ;;
- esac
- printf %c "${_u%"${_u#?}"}"
- _u=${_u#?}
- done
-}
-
-_curl() {
- if [ -n "${CURL_EXTRA-}" ]
- then
- set -f
- # shellcheck disable=SC2086
- command curl $CURL_EXTRA "$@"
- _code=$?
- set +f
- return "$_code"
- else
- command curl "$@"
- fi
-}
-
-_curl -fsS "$_url_of_urls" | while IFS= read -r _url
+# shellcheck disable=SC2086
+curl ${CURL_EXTRA:-} -fsS "$_url_of_urls" | while IFS= read -r _url_and_path
do
- _rel_url_path=${_url#"$_urls_prefix"}
- [ "$_rel_url_path" = "$_url" ] && continue
-
- _rel_path=$(_url_decode "$_rel_url_path")
- [ -z "$_rel_path" ] && continue
+ _url=${_url_and_path%% *}
+ _path=${_url_and_path#* }
- printf "Downloading %s to %s\n" "$_url" "$_rel_path"
- _curl --create-dirs -fsS "$_url" -o "$_rel_path"
+ prointf "Downloading %s\n" "$_path" || :
+ # shellcheck disable=SC2086
+ curl ${CURL_EXTRA:-} --create-dirs -fsS "$_url" -o "$_path"
done
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]