Update wget

This commit is contained in:
Edi Septriyanto
2021-12-18 16:12:05 +07:00
parent 7c88365784
commit 49832ebf68
11 changed files with 41 additions and 39 deletions

View File

@@ -29,33 +29,33 @@ echo ""
# Network speed test benchmark.
echo "### Network Speedtest Benchmark ###"
cachefly=$( wget -O /dev/null http://cachefly.cachefly.net/100mb.test 2>&1 | awk '/\/dev\/null/ {speed=$3 $4} END {gsub(/\(|\)/,"",speed); print speed}' )
cachefly=$( wget -q -O /dev/null http://cachefly.cachefly.net/100mb.test 2>&1 | awk '/\/dev\/null/ {speed=$3 $4} END {gsub(/\(|\)/,"",speed); print speed}' )
echo "Download speed from CacheFly: $cachefly "
leaseweb=$( wget -O /dev/null http://mirror.leaseweb.com/speedtest/100mb.bin 2>&1 | awk '/\/dev\/null/ {speed=$3 $4} END {gsub(/\(|\)/,"",speed); print speed}' )
leaseweb=$( wget -q -O /dev/null http://mirror.leaseweb.com/speedtest/100mb.bin 2>&1 | awk '/\/dev\/null/ {speed=$3 $4} END {gsub(/\(|\)/,"",speed); print speed}' )
echo "Download speed from Leaseweb, Haarlem, NL: $leaseweb "
linodeatl=$( wget -O /dev/null http://speedtest.atlanta.linode.com/100MB-atlanta.bin 2>&1 | awk '/\/dev\/null/ {speed=$3 $4} END {gsub(/\(|\)/,"",speed); print speed}' )
linodeatl=$( wget -q -O /dev/null http://speedtest.atlanta.linode.com/100MB-atlanta.bin 2>&1 | awk '/\/dev\/null/ {speed=$3 $4} END {gsub(/\(|\)/,"",speed); print speed}' )
echo "Download speed from Linode, Atlanta, GA: $linodeatl "
linodedltx=$( wget -O /dev/null http://speedtest.dallas.linode.com/100MB-dallas.bin 2>&1 | awk '/\/dev\/null/ {speed=$3 $4} END {gsub(/\(|\)/,"",speed); print speed}' )
linodedltx=$( wget -q -O /dev/null http://speedtest.dallas.linode.com/100MB-dallas.bin 2>&1 | awk '/\/dev\/null/ {speed=$3 $4} END {gsub(/\(|\)/,"",speed); print speed}' )
echo "Download speed from Linode, Dallas, TX: $linodedltx "
linodefmt=$( wget -O /dev/null http://speedtest.london.linode.com/100MB-london.bin 2>&1 | awk '/\/dev\/null/ {speed=$3 $4} END {gsub(/\(|\)/,"",speed); print speed}' )
linodefmt=$( wget -q -O /dev/null http://speedtest.london.linode.com/100MB-london.bin 2>&1 | awk '/\/dev\/null/ {speed=$3 $4} END {gsub(/\(|\)/,"",speed); print speed}' )
echo "Download speed from Linode, Fremont, US: $linodefmt "
linodenj=$( wget -O /dev/null http://speedtest.newark.linode.com/100MB-newark.bin 2>&1 | awk '/\/dev\/null/ {speed=$3 $4} END {gsub(/\(|\)/,"",speed); print speed}' )
linodenj=$( wget -q -O /dev/null http://speedtest.newark.linode.com/100MB-newark.bin 2>&1 | awk '/\/dev\/null/ {speed=$3 $4} END {gsub(/\(|\)/,"",speed); print speed}' )
echo "Download speed from Linode, Newark, NJ: $linodenj "
linodeuk=$( wget -O /dev/null http://speedtest.london.linode.com/100MB-london.bin 2>&1 | awk '/\/dev\/null/ {speed=$3 $4} END {gsub(/\(|\)/,"",speed); print speed}' )
linodeuk=$( wget -q -O /dev/null http://speedtest.london.linode.com/100MB-london.bin 2>&1 | awk '/\/dev\/null/ {speed=$3 $4} END {gsub(/\(|\)/,"",speed); print speed}' )
echo "Download speed from Linode, London, UK: $linodeuk "
linodejp=$( wget -O /dev/null http://speedtest.tokyo.linode.com/100MB-tokyo.bin 2>&1 | awk '/\/dev\/null/ {speed=$3 $4} END {gsub(/\(|\)/,"",speed); print speed}' )
linodejp=$( wget -q -O /dev/null http://speedtest.tokyo.linode.com/100MB-tokyo.bin 2>&1 | awk '/\/dev\/null/ {speed=$3 $4} END {gsub(/\(|\)/,"",speed); print speed}' )
echo "Download speed from Linode, Tokyo, JP: $linodejp "
linodesgp=$( wget -O /dev/null http://speedtest.singapore.linode.com/100MB-singapore.bin 2>&1 | awk '/\/dev\/null/ {speed=$3 $4} END {gsub(/\(|\)/,"",speed); print speed}' )
linodesgp=$( wget -q -O /dev/null http://speedtest.singapore.linode.com/100MB-singapore.bin 2>&1 | awk '/\/dev\/null/ {speed=$3 $4} END {gsub(/\(|\)/,"",speed); print speed}' )
echo "Download speed from Linode, Singapore, SGP: $linodesgp "
slsea=$( wget -O /dev/null http://speedtest.sea01.softlayer.com/downloads/test100.zip 2>&1 | awk '/\/dev\/null/ {speed=$3 $4} END {gsub(/\(|\)/,"",speed); print speed}' )
slsea=$( wget -q -O /dev/null http://speedtest.sea01.softlayer.com/downloads/test100.zip 2>&1 | awk '/\/dev\/null/ {speed=$3 $4} END {gsub(/\(|\)/,"",speed); print speed}' )
echo "Download speed from Softlayer, Seattle, WA: $slsea "
slsjc=$( wget -O /dev/null http://speedtest.sjc01.softlayer.com/downloads/test100.zip 2>&1 | awk '/\/dev\/null/ {speed=$3 $4} END {gsub(/\(|\)/,"",speed); print speed}' )
slsjc=$( wget -q -O /dev/null http://speedtest.sjc01.softlayer.com/downloads/test100.zip 2>&1 | awk '/\/dev\/null/ {speed=$3 $4} END {gsub(/\(|\)/,"",speed); print speed}' )
echo "Download speed from Softlayer, San Jose, CA: $slsjc "
sldal=$( wget -O /dev/null http://speedtest.dal05.softlayer.com/downloads/test100.zip 2>&1 | awk '/\/dev\/null/ {speed=$3 $4} END {gsub(/\(|\)/,"",speed); print speed}' )
sldal=$( wget -q -O /dev/null http://speedtest.dal05.softlayer.com/downloads/test100.zip 2>&1 | awk '/\/dev\/null/ {speed=$3 $4} END {gsub(/\(|\)/,"",speed); print speed}' )
echo "Download speed from Softlayer, Dallas, TX: $sldal "
slwdc=$( wget -O /dev/null http://speedtest.wdc01.softlayer.com/downloads/test100.zip 2>&1 | awk '/\/dev\/null/ {speed=$3 $4} END {gsub(/\(|\)/,"",speed); print speed}' )
slwdc=$( wget -q -O /dev/null http://speedtest.wdc01.softlayer.com/downloads/test100.zip 2>&1 | awk '/\/dev\/null/ {speed=$3 $4} END {gsub(/\(|\)/,"",speed); print speed}' )
echo "Download speed from Softlayer, Washington, DC: $slwdc "
slsng=$( wget -O /dev/null http://speedtest.sng01.softlayer.com/downloads/test100.zip 2>&1 | awk '/\/dev\/null/ {speed=$3 $4} END {gsub(/\(|\)/,"",speed); print speed}' )
slsng=$( wget -q -O /dev/null http://speedtest.sng01.softlayer.com/downloads/test100.zip 2>&1 | awk '/\/dev\/null/ {speed=$3 $4} END {gsub(/\(|\)/,"",speed); print speed}' )
echo "Download speed from Softlayer, Singapore: $slsng "

View File

@@ -536,7 +536,7 @@ add support for dynamic modules in a way compatible with ngx_pagespeed until 1.9
status "Detected debian-based distro."
install_dependencies "apt install ${INSTALL_FLAGS}" debian_is_installed \
build-essential zlib1g-dev libpcre3 libpcre3-dev unzip wget uuid-dev
build-essential zlib1g-dev libpcre3 libpcre3-dev unzip wget -q uuid-dev
if gcc_too_old; then
if [ ! -e /usr/lib/gcc-mozilla/bin/gcc ]; then
@@ -554,7 +554,7 @@ add support for dynamic modules in a way compatible with ngx_pagespeed until 1.9
status "Detected redhat-based distro."
install_dependencies "yum install ${INSTALL_FLAGS}" redhat_is_installed \
gcc-c++ pcre-devel zlib-devel make unzip wget libuuid-devel
gcc-c++ pcre-devel zlib-devel make unzip wget -q libuuid-devel
if gcc_too_old; then
if [ ! -e /opt/rh/devtoolset-2/root/usr/bin/gcc ]; then

View File

@@ -68,7 +68,7 @@ function init_fail2ban_install() {
fail2ban_download_link="https://github.com/fail2ban/fail2ban/archive/${FAIL2BAN_VERSION}.tar.gz"
if curl -sLI "${fail2ban_download_link}" | grep -q "HTTP/[.12]* [2].."; then
run wget "${fail2ban_download_link}" -O fail2ban.tar.gz -q --show-progress && \
run wget -q "${fail2ban_download_link}" -O fail2ban.tar.gz && \
run tar -zxf fail2ban.tar.gz && \
run cd fail2ban-*/ && \
run python setup.py install && \

View File

@@ -70,7 +70,7 @@ function init_imagemagick_install() {
fi
run cd "${BUILD_DIR}" && \
run wget "${IMAGEMAGICK_ZIP_URL}" -q --show-progress && \
run wget -q "${IMAGEMAGICK_ZIP_URL}" && \
run tar -zxf "${IMAGEMAGICK_FILENAME}" && \
run cd ImageMagick-*/ && \
run ./configure && \

View File

@@ -90,7 +90,7 @@ function init_memcached_install() {
fi
if curl -sLI "${MEMCACHED_DOWNLOAD_URL}" | grep -q "HTTP/[.12]* [2].."; then
run wget "${MEMCACHED_DOWNLOAD_URL}" -O memcached.tar.gz -q --show-progress && \
run wget -q "${MEMCACHED_DOWNLOAD_URL}" -O memcached.tar.gz && \
run tar -zxf memcached.tar.gz && \
run cd memcached-* && \

View File

@@ -138,7 +138,9 @@ function install_php() {
run pecl channel-update pear.php.net
if [[ "${#PHP_PECL_EXTS[@]}" -gt 0 ]]; then
run pecl -d "php_suffix=${PHPv}" install "${PHP_PECL_EXTS[@]}"
run pecl -d "php_suffix=${PHPv}" install \
-D 'enable-sockets="no" enable-openssl="yes" enable-http2="yes" enable-mysqlnd="yes" enable-swoole-json="yes" enable-swoole-curl="yes" enable-cares="yes" with-postgres="yes"' \
"${PHP_PECL_EXTS[@]}"
fi
# Install additional PHP extensions.
@@ -629,11 +631,11 @@ function install_ioncube_loader() {
ARCH=${ARCH:-$(uname -p)}
if [[ "${ARCH}" == "x86_64" ]]; then
run wget -q --show-progress "https://downloads2.ioncube.com/loader_downloads/ioncube_loaders_lin_x86-64.tar.gz"
run wget -q "https://downloads2.ioncube.com/loader_downloads/ioncube_loaders_lin_x86-64.tar.gz"
run tar -xzf ioncube_loaders_lin_x86-64.tar.gz
run rm -f ioncube_loaders_lin_x86-64.tar.gz
else
run wget -q --show-progress "https://downloads2.ioncube.com/loader_downloads/ioncube_loaders_lin_x86.tar.gz"
run wget -q "https://downloads2.ioncube.com/loader_downloads/ioncube_loaders_lin_x86.tar.gz"
run tar -xzf ioncube_loaders_lin_x86.tar.gz
run rm -f ioncube_loaders_lin_x86.tar.gz
fi
@@ -701,11 +703,11 @@ function install_sourceguardian_loader() {
ARCH=${ARCH:-$(uname -p)}
if [[ "${ARCH}" == "x86_64" ]]; then
run wget -q --show-progress "https://www.sourceguardian.com/loaders/download/loaders.linux-x86_64.tar.gz"
run wget -q "https://www.sourceguardian.com/loaders/download/loaders.linux-x86_64.tar.gz"
run tar -xzf loaders.linux-x86_64.tar.gz
run rm -f loaders.linux-x86_64.tar.gz
else
run wget -q --show-progress "https://www.sourceguardian.com/loaders/download/loaders.linux-x86.tar.gz"
run wget -q "https://www.sourceguardian.com/loaders/download/loaders.linux-x86.tar.gz"
run tar -xzf loaders.linux-x86.tar.gz
run rm -f loaders.linux-x86.tar.gz
fi

View File

@@ -99,7 +99,7 @@ function init_redis_install {
fi
if curl -sLI "${REDIS_DOWNLOAD_URL}" | grep -q "HTTP/[.12]* [2].."; then
run wget -q --show-progress -O "redis.tar.gz" "${REDIS_DOWNLOAD_URL}" && \
run wget -q -O "redis.tar.gz" "${REDIS_DOWNLOAD_URL}" && \
run tar -zxf "redis.tar.gz" && \
run cd redis-* && \
run make && \

View File

@@ -76,10 +76,10 @@ function init_tools_install() {
[ ! -d /usr/share/nginx/html/lcp/dbadmin ] && run mkdir -p /usr/share/nginx/html/lcp/dbadmin
# Overwrite existing files.
run wget https://github.com/vrana/adminer/releases/download/v4.8.1/adminer-4.8.1.php \
-O /usr/share/nginx/html/lcp/dbadmin/index.php -q --show-progress
run wget https://github.com/vrana/adminer/releases/download/v4.8.1/editor-4.8.1.php \
-O /usr/share/nginx/html/lcp/dbadmin/editor.php -q --show-progress
run wget -q https://github.com/vrana/adminer/releases/download/v4.8.1/adminer-4.8.1.php \
-O /usr/share/nginx/html/lcp/dbadmin/index.php
run wget -q https://github.com/vrana/adminer/releases/download/v4.8.1/editor-4.8.1.php \
-O /usr/share/nginx/html/lcp/dbadmin/editor.php
# Install File Manager.
# Experimental: Tinyfilemanager https://github.com/joglomedia/tinyfilemanager
@@ -92,8 +92,8 @@ function init_tools_install() {
CURRENT_DIR=$(pwd)
run cd /usr/share/nginx/html/lcp/filemanager && \
#run git pull -q
run wget https://raw.githubusercontent.com/joglomedia/tinyfilemanager/lemperfm_1.3.0/index.php \
-O /usr/share/nginx/html/lcp/filemanager/index.php -q --show-progress && \
run wget -q https://raw.githubusercontent.com/joglomedia/tinyfilemanager/lemperfm_1.3.0/index.php \
-O /usr/share/nginx/html/lcp/filemanager/index.php && \
run cd "${CURRENT_DIR}" || return 1
fi
@@ -104,8 +104,8 @@ function init_tools_install() {
fi
# Install Zend OpCache Web Admin.
run wget https://raw.github.com/rlerdorf/opcache-status/master/opcache.php \
-O /usr/share/nginx/html/lcp/opcache.php -q --show-progress
run wget -q https://raw.github.com/rlerdorf/opcache-status/master/opcache.php \
-O /usr/share/nginx/html/lcp/opcache.php
# Install phpMemcachedAdmin Web Admin.
if [ ! -d /usr/share/nginx/html/lcp/memcadmin/ ]; then

View File

@@ -133,7 +133,7 @@ function init_vsftpd_install() {
fi
run cd "${BUILD_DIR}" && \
run wget "${VSFTPD_ZIP_URL}" -q --show-progress && \
run wget -q "${VSFTPD_ZIP_URL}" && \
run tar -zxf "${VSFTPD_FILENAME}" && \
run cd vsftpd-*/ && \
run make && \

View File

@@ -392,7 +392,7 @@ function install_apf() {
echo "Installing APF+BFD firewall..."
if curl -sLI "https://github.com/rfxn/advanced-policy-firewall/archive/${APF_VERSION}.tar.gz" \
| grep -q "HTTP/[.12]* [2].."; then
run wget -q --show-progress "https://github.com/rfxn/advanced-policy-firewall/archive/${APF_VERSION}.tar.gz" && \
run wget -q "https://github.com/rfxn/advanced-policy-firewall/archive/${APF_VERSION}.tar.gz" && \
run tar -xf "${APF_VERSION}.tar.gz" && \
run cd advanced-policy-firewall-*/ && \
run bash install.sh && \