Add fallback logic in installer for fetching files. (#17045)

Still try to use `curl` first if it exists, but if it fails, fall back
to attempting with wget instead.
This commit is contained in:
Austin S. Hemmelgarn 2024-04-16 07:32:14 -04:00 committed by GitHub
parent 9d9fbfbdbb
commit 42595b3f26
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 189 additions and 42 deletions

View File

@ -103,13 +103,33 @@ check_for_curl() {
get() {
url="${1}"
checked=0
succeeded=0
check_for_curl
if [ -n "${curl}" ]; then
"${curl}" -q -o - -sSL --connect-timeout 10 --retry 3 "${url}"
elif command -v wget > /dev/null 2>&1; then
wget -T 15 -O - "${url}"
checked=1
if "${curl}" -q -o - -sSL --connect-timeout 10 --retry 3 "${url}"; then
succeeded=1
fi
fi
if [ "${succeeded}" -eq 0 ]; then
if command -v wget > /dev/null 2>&1; then
checked=1
if wget -T 15 -O - "${url}"; then
succeeded=1
fi
fi
fi
if [ "${succeeded}" -eq 1 ]; then
return 0
elif [ "${checked}" -eq 1 ]; then
return 1
else
fatal "I need curl or wget to proceed, but neither is available on this system." "L0002"
fi
@ -124,9 +144,29 @@ download_file() {
check_for_curl
if [ -n "${curl}" ]; then
run "${curl}" -q -sSL --connect-timeout 10 --retry 3 --output "${dest}" "${url}"
elif command -v wget > /dev/null 2>&1; then
run wget -T 15 -O "${dest}" "${url}"
checked=1
if run "${curl}" -q -sSL --connect-timeout 10 --retry 3 --output "${dest}" "${url}"; then
succeeded=1
else
rm -f "${dest}"
fi
fi
if [ "${succeeded}" -eq 0 ]; then
if command -v wget > /dev/null 2>&1; then
checked=1
if run wget -T 15 -O "${dest}" "${url}"; then
succeeded=1
fi
fi
fi
if [ "${succeeded}" -eq 1 ]; then
return 0
elif [ "${checked}" -eq 1 ]; then
return 1
else
echo >&2
echo >&2 "Downloading ${name} from '${url}' failed because of missing mandatory packages."

View File

@ -311,23 +311,31 @@ telemetry_event() {
EOF
)"
succeeded=0
if [ -n "${CURL}" ]; then
"${CURL}" --silent -o /dev/null -X POST --max-time 2 --header "Content-Type: application/json" -d "${REQ_BODY}" "${TELEMETRY_URL}" > /dev/null
elif command -v wget > /dev/null 2>&1; then
if wget --help 2>&1 | grep BusyBox > /dev/null 2>&1; then
# BusyBox-compatible version of wget, there is no --no-check-certificate option
wget -q -O - \
-T 1 \
--header 'Content-Type: application/json' \
--post-data "${REQ_BODY}" \
"${TELEMETRY_URL}" > /dev/null
else
wget -q -O - --no-check-certificate \
--method POST \
--timeout=1 \
--header 'Content-Type: application/json' \
--body-data "${REQ_BODY}" \
"${TELEMETRY_URL}" > /dev/null
if "${CURL}" --silent -o /dev/null -X POST --max-time 2 --header "Content-Type: application/json" -d "${REQ_BODY}" "${TELEMETRY_URL}" > /dev/null; then
succeeded=1
fi
fi
if [ "${succeeded}" -eq 0 ]; then
if command -v wget > /dev/null 2>&1; then
if wget --help 2>&1 | grep BusyBox > /dev/null 2>&1; then
# BusyBox-compatible version of wget, there is no --no-check-certificate option
wget -q -O - \
-T 1 \
--header 'Content-Type: application/json' \
--post-data "${REQ_BODY}" \
"${TELEMETRY_URL}" > /dev/null
else
wget -q -O - --no-check-certificate \
--method POST \
--timeout=1 \
--header 'Content-Type: application/json' \
--body-data "${REQ_BODY}" \
"${TELEMETRY_URL}" > /dev/null
fi
fi
fi
}
@ -605,15 +613,38 @@ set_tmpdir() {
check_for_remote_file() {
url="${1}"
succeeded=0
checked=0
if echo "${url}" | grep -Eq "^file:///"; then
[ -e "${url#file://}" ] || return 1
return 0
elif [ -n "${NETDATA_ASSUME_REMOTE_FILES_ARE_PRESENT}" ]; then
return 0
elif [ -n "${CURL}" ]; then
"${CURL}" --output /dev/null --silent --head --fail "${url}" || return 1
elif command -v wget > /dev/null 2>&1; then
wget -S --spider "${url}" 2>&1 | grep -q 'HTTP/1.1 200 OK' || return 1
fi
if [ -n "${CURL}" ]; then
checked=1
if "${CURL}" --output /dev/null --silent --head --fail "${url}"; then
succeeded=1
fi
fi
if [ "${succeeded}" -eq 0 ]; then
if command -v wget > /dev/null 2>&1; then
checked=1
if wget -S --spider "${url}" 2>&1 | grep -q 'HTTP/1.1 200 OK'; then
succeeded=1
fi
fi
fi
if [ "${succeeded}" -eq 1 ]; then
return 0
elif [ "${checked}" -eq 1 ]; then
return 1
else
fatal "${ERROR_F0003}" F0003
fi
@ -622,13 +653,39 @@ check_for_remote_file() {
download() {
url="${1}"
dest="${2}"
succeeded=0
checked=0
if echo "${url}" | grep -Eq "^file:///"; then
run cp "${url#file://}" "${dest}" || return 1
elif [ -n "${CURL}" ]; then
run "${CURL}" --fail -q -sSL --connect-timeout 10 --retry 3 --output "${dest}" "${url}" || return 1
elif command -v wget > /dev/null 2>&1; then
run wget -T 15 -O "${dest}" "${url}" || return 1
return 0
fi
if [ -n "${CURL}" ]; then
checked=1
if run "${CURL}" --fail -q -sSL --connect-timeout 10 --retry 3 --output "${dest}" "${url}"; then
succeeded=1
else
rm -f "${dest}"
fi
fi
if [ "${succeeded}" -eq 0 ]; then
if command -v wget > /dev/null 2>&1; then
checked=1
if run wget -T 15 -O "${dest}" "${url}"; then
succeeded=1
fi
fi
fi
if [ "${succeeded}" -eq 1 ]; then
return 0
elif [ "${checked}" -eq 1 ]; then
return 1
else
fatal "${ERROR_F0003}" F0003
fi
@ -652,11 +709,31 @@ get_actual_version() {
get_redirect() {
url="${1}"
succeeded=0
checked=0
if [ -n "${CURL}" ]; then
run sh -c "${CURL} ${url} -s -L -I -o /dev/null -w '%{url_effective}' | grep -Eo '[^/]+$'" || return 1
elif command -v wget > /dev/null 2>&1; then
run sh -c "wget -S -O /dev/null ${url} 2>&1 | grep -m 1 Location | grep -Eo '[^/]+$'" || return 1
checked=1
if run sh -c "${CURL} ${url} -s -L -I -o /dev/null -w '%{url_effective}' | grep -Eo '[^/]+$'"; then
succeeded=1
fi
fi
if [ "${succeeded}" -eq 0 ]; then
if command -v wget > /dev/null 2>&1; then
checked=1
if run sh -c "wget -S -O /dev/null ${url} 2>&1 | grep -m 1 Location | grep -Eo '[^/]+$'"; then
succeeded=1
fi
fi
fi
if [ "${succeeded}" -eq 1 ]; then
return 0
elif [ "${checked}" -eq 1 ]; then
return 1
else
fatal "${ERROR_F0003}" F0003
fi

View File

@ -396,15 +396,37 @@ check_for_curl() {
_safe_download() {
url="${1}"
dest="${2}"
succeeded=0
checked=0
check_for_curl
if [ -n "${curl}" ]; then
"${curl}" -fsSL --connect-timeout 10 --retry 3 "${url}" > "${dest}"
return $?
elif command -v wget > /dev/null 2>&1; then
wget -T 15 -O - "${url}" > "${dest}"
return $?
checked=1
if "${curl}" -fsSL --connect-timeout 10 --retry 3 "${url}" > "${dest}"; then
succeeded=1
else
rm -f "${dest}"
fi
fi
if [ "${succeeded}" -eq 0 ]; then
if command -v wget > /dev/null 2>&1; then
checked=1
if wget -T 15 -O - "${url}" > "${dest}"; then
succeeded=1
else
rm -f "${dest}"
fi
fi
fi
if [ "${succeeded}" -eq 1 ]; then
return 0
elif [ "${checked}" -eq 1 ]; then
return 1
else
return 255
fi
@ -432,13 +454,21 @@ get_netdata_latest_tag() {
check_for_curl
if [ -n "${curl}" ]; then
tag=$("${curl}" "${url}" -s -L -I -o /dev/null -w '%{url_effective}' | grep -Eom 1 '[^/]*/?$')
elif command -v wget >/dev/null 2>&1; then
tag=$(wget -S -O /dev/null "${url}" 2>&1 | grep -m 1 Location | grep -Eo '[^/]*/?$')
else
tag=$("${curl}" "${url}" -s -L -I -o /dev/null -w '%{url_effective}')
fi
if [ -z "${tag}" ]; then
if command -v wget >/dev/null 2>&1; then
tag=$(wget -S -O /dev/null "${url}" 2>&1 | grep Location)
fi
fi
if [ -z "${tag}" ]; then
fatal "I need curl or wget to proceed, but neither of them are available on this system." U0006
fi
tag="$(echo "${tag}" | grep -Eom 1 '[^/]*/?$')"
# Fallback case for simpler local testing.
if echo "${tag}" | grep -Eq 'latest/?$'; then
if _safe_download "${url}/latest-version.txt" ./ndupdate-version.txt; then