#!/bin/bash # Download Freetz packages from a randomly chosen mirror server. There is # an empty input field for a user-defined server in 'menuconfig'. Additionally, # another download source maybe specified on the command line as $3. $2 is the name # of the file to be downloaded. # # This script is designed to be called by makefile includes (*.mk), but can # also be called directly. It expects to find a file with necessary variables # at a relative location '../.config' to this script. The variables are: # - FREETZ_DL_SITE_USER (URL, e.g. http://my.server.org) # supported checksum algorithms CHECKSUM_ALGOS[32]="md5" CHECKSUM_ALGOS[40]="sha1" CHECKSUM_ALGOS[64]="sha256" CHECKSUM_ALGOS[128]="sha512" DOT_CONFIG=$FREETZ_BASE_DIR/.config FREETZ_DL_SITE_1="http://freetz.magenbrot.net" FREETZ_DL_SITE_2="http://freetz.3dfxatwork.de" FREETZ_DL_SITE_3="http://freetz.wirsind.info" helpmsg() { cat << 'EOF' freetz_download - download Freetz packages Usage: freetz_download (-?|--help) | [--no-append-servers] ( ( | check) [,,,...] [] ) check - check download download-dir - target directory for download file filename - download file name main-url - url without filename for primary download checksum - file checksum (supported algorithms: md5, sha1, sha256, sha512) -?, --help - print this help text Examples: freetz_download dl package.tar.bz2 freetz_download dl package.tar.bz2 http://myserver.com/freetz freetz_download dl package.tar.bz2 http://myserver.com/freetz,ftp://myserver-mirror.com/pub/freetz Debugging: just set DEBUG=1, like this: DEBUG=1 freetz_download dl package.tar.bz2 EOF } do_download() { # $1 = target directory, $2 = download URL without file, $3 = (remote) file name local fullURL="${2%/}/$3" echo [ "$DEBUG" == "1" ] && echo \ "wget -nd -t3 --timeout=20 --no-check-certificate --passive-ftp -N -P \"${1%/}/\" \"${fullURL}\"" wget -nd -t3 --timeout=20 --no-check-certificate --passive-ftp -N -P "${1%/}/" "${fullURL}" wget_result=$? if [ "$wget_result" != "0" ]; then echo "Download failed - \"${fullURL}\" -> error code $wget_result" >&2 rm -f $1/$3 return $wget_result fi echo "Download succeeded - \"${fullURL}\" -> saved to folder \"$1\"" } do_check() { local mode wget_output wget_result for mode in --spider --output-document=/dev/null; do [ "$DEBUG" == "1" ] && \ echo "LC_ALL=C wget -t3 --timeout=20 --no-check-certificate --passive-ftp -S $mode \"$1\" 2>&1" wget_output=$(LC_ALL=C wget -t3 --timeout=20 --no-check-certificate --passive-ftp -S $mode "$1" 2>&1) wget_result=$? if [ "$wget_result" != "0" ]; then # workaround wget spider-mode bug (false negative): known to happen with all sites # with forbidden directory listing. wget in spider-mode first checks the directory # containing the downloaded file. After getting "403 Forbidden" it stops and doesn't # make any further checks. if echo "$wget_output" | grep -qEi "403 Forbidden"; then continue fi # real negative return $wget_result fi # workaround wget-bug: while checking availability of a file on an ftp-site # it prints "no such file" but doesn't set return code appropriately if echo "$wget_output" | grep -qEi "(no such file|file does not exist)"; then return 1 fi # workaround yet another hassle: web-servers redirecting to some "file does not exist"-page, e.g. 404.html. # heuristic - we never download html-pages, so the content type of the downloaded file should be something other than 'text/html'. # note: disabling redirection with "--max-redirect=0" does not work as it's a legitimate behavior # for the mirror sites to redirect to some geographically nearby location. if echo "$wget_output" | grep -i "Content-Type" | tail -1 | grep -qi "text/html"; then return 1 fi break done return 0 } # $1 - url # $2 - subdir # $3 - version do_checkout_cvs() { local module_name module_name="${2/-$3/}" (cvs -d:"$1" export -D "$3" "$module_name" || cvs -d:"$1" export -r "$3" "$module_name") && mv "$module_name" "$2" } do_checkout_svn() { local extra_flags svn help export | grep -q trust-server-cert && extra_flags="--trust-server-cert" svn export --non-interactive $extra_flags -r$3 "$1" "$2" } do_checkout_git() { GIT_SSL_NO_VERIFY=true git clone "$1" "$2" \ && pushd "$2" >/dev/null \ && git checkout "$3" \ && LAST_COMMIT_TIMESTAMP=$(git log -1 --pretty=format:%cD) \ && if [ -f ".gitmodules" -a -z "$NO_SUBMODULES" ]; then git submodule init && git submodule update; fi \ && rm -rf .git \ && popd >/dev/null } do_checkout_git_no_submodules() { NO_SUBMODULES=1 do_checkout_git "$@" } # Note: remote repository must support "git archive" command # $1 is expected to have the following format: # repository_url,path1,path2,...,pathN # where # repository_url - a repository url supporting archive command, e.g. git://repo.or.cz/git.git # pathX - a path to any file or directory within the repository # Only path1,...,pathN will be transmitted saving both bandwidth and processing power. do_checkout_git_archive() { local repository paths repository=$(echo "$1" | cut -d , -f 1) paths=$(echo "$1" | cut -d , -f 2- | tr , " ") git archive --format=tar --remote="$repository" --prefix="$2/" "$3" $paths | tar -x } do_checkout_bzr() { bzr export -r"$3" "$2" "$1" } do_checkout_hg() { hg clone -r "$3" "$1" "$2" && (cd "$2" && rm -rf .hg .hgignore .hgtags) } do_checkout_darcs() { darcs get -t "$3" "$1" "$2" && (cd "$2" && rm -rf _darcs) } # Regular use: help parameter -> exit without error if [ "$1" == "-?" -o "$1" == "--help" ]; then helpmsg exit 0 fi opt_append_servers=y while [ $# -gt 1 ]; do case "$1" in --no-append-servers) opt_append_servers=n shift ;; *) break ;; esac done # Wrong number of parameters -> exit with error if [ $# -lt 2 -o $# -gt 4 ]; then helpmsg >&2 exit 1 fi DL_DIR="$1" DL_FILE="$2" URLs="$3" CHECKSUM="$(echo ${4,,})" # convert to lower-case and trim whitespaces (checksum length is used to determine the checksum algorithm) # Import Freetz config variables [ -e $DOT_CONFIG ] && . $DOT_CONFIG if [ "$DEBUG" == "1" ]; then echo "freetz_download parameters:" echo " DL_DIR=$DL_DIR" echo " DOT_CONFIG=$DOT_CONFIG" echo " DL_FILE=$DL_FILE" echo " URLs=$URLs" echo " CHECKSUM=$CHECKSUM" fi # fill array 'sites' with download server URLs, first primary site and package specific mirrors (if specified) num_sites=0 if [ -n "$URLs" ]; then # check if URLs contains magic sequence corresponding to one of the supported VCSs supportedVCSs="cvs|svn|git|git_no_submodules|git_archive|bzr|hg|darcs" VCS="$(echo "${URLs}" | sed -nr -e "s,^(${supportedVCSs})(@|://).*,\1,p")" if [ -n "${VCS}" ]; then URLs="$(echo "${URLs}" | sed -r -e "s,^(${supportedVCSs})@(.*),\2,")" num_sites=0 CHECKSUM="" else URLs="${URLs//@1&1/@1u1}" # replace our 1&1 alias with a one containing no special shell characters # replace special shell characters with their percent-encoding counterparts (in order to be able to apply shell brace-expansion) URLs="${URLs//\ /%20}" URLs="${URLs//\!/%21}" URLs="${URLs//\$/%24}" URLs="${URLs//\&/%26}" for url in $(eval echo "{${URLs},}"); do # wrap URLs with curly-braces to apply brace-expansion if [ ${url:0:3} == "@SF" ]; then # give sourceforge a few more tries, because it redirects to different mirrors subpath=${url/@SF\//} for (( n=0; n < 5; n++ )); do sites[(( num_sites++ ))]=http://downloads.sourceforge.net/$subpath done elif [ ${url:0:4} == "@AVM" ]; then subpath=${url/@AVM\//} sites[(( num_sites++ ))]=http://download.avm.de/fritzbox/$subpath sites[(( num_sites++ ))]=http://download.avm.de/archive/fritz.box/$subpath sites[(( num_sites++ ))]=http://download.avm.de/$subpath sites[(( num_sites++ ))]=ftp://ftp.avm.de/fritzbox/$subpath sites[(( num_sites++ ))]=ftp://ftp.avm.de/archive/fritz.box/$subpath sites[(( num_sites++ ))]=https://service.avm.de/downloads/$subpath sites[(( num_sites++ ))]=http://avm.de/$subpath sites[(( num_sites++ ))]=http://www.avm.de/de/Service/Service-Portale/$subpath elif [ ${url:0:4} == "@GNU" ]; then subpath=${url/@GNU\//} sites[(( num_sites++ ))]=http://ftpmirror.gnu.org/$subpath # automatically chooses a nearby and up-to-date mirror sites[(( num_sites++ ))]=http://ftp.gnu.org/gnu/$subpath elif [ ${url:0:7} == "@KERNEL" ]; then subpath=${url/@KERNEL\//} sites[(( num_sites++ ))]=https://kernel.org/pub/$subpath sites[(( num_sites++ ))]=ftp://ftp.kernel.org/pub/$subpath elif [ ${url:0:8} == "@TELEKOM" ]; then subpath=${url/@TELEKOM\//} sites[(( num_sites++ ))]=https://www.telekom.de/hilfe/downloads/$subpath sites[(( num_sites++ ))]=http://www.telekom.de/dlp/eki/downloads/$subpath sites[(( num_sites++ ))]=http://www.t-home.de/dlp/eki/downloads/$subpath sites[(( num_sites++ ))]=http://hilfe.telekom.de/dlp/eki/downloads/$subpath elif [ ${url:0:4} == "@1u1" ]; then subpath=${url/@1u1\//} sites[(( num_sites++ ))]=http://acsfwdl.1und1.de/avm/$subpath elif [ ${url:0:4} == "@EWE" ]; then subpath=${url/@EWE\//} sites[(( num_sites++ ))]=http://download.ewe.de/avm/$subpath elif [ ${url:0:6} == "@M-NET" ]; then subpath=${url/@M-NET\//} sites[(( num_sites++ ))]=http://www.m-net.de/fileadmin/downloads/Links/$subpath elif [ ${url:0:7} == "@APACHE" ]; then subpath=${url/@APACHE\//} sites[(( num_sites++ ))]=http://www.eu.apache.org/dist/$subpath sites[(( num_sites++ ))]=http://www.apache.org/dist/$subpath sites[(( num_sites++ ))]=http://apache.openmirror.de/$subpath sites[(( num_sites++ ))]=http://archive.apache.org/dist/$subpath elif [ ${url:0:6} == "@SAMBA" ]; then subpath=${url/@SAMBA\//} sites[(( num_sites++ ))]=https://www.samba.org/ftp/$subpath sites[(( num_sites++ ))]=https://ftp.samba.org/pub/$subpath else sites[(( num_sites++ ))]=$url fi done fi fi # Get Freetz mirror sites in array FREETZ_SITES=( $( for (( i=1; i <= 3 ; i++ )); do eval "echo \$FREETZ_DL_SITE_$i" done ) ) # Add user-defined mirror if [ -n "$FREETZ_DL_SITE_USER" ]; then FREETZ_SITES[3]=$FREETZ_DL_SITE_USER fi # Fill array 'sites' with download server URLs, add Freetz Mirrors, in random order if [ "$opt_append_servers" = y ]; then for (( n=${#FREETZ_SITES[@]}; n > 0; n-- )); do p=$(( RANDOM % n )) sites[(( num_sites++ ))]=${FREETZ_SITES[$p]} FREETZ_SITES[$p]=${FREETZ_SITES[$((n-1))]} done fi # Loop over servers until a download succeeds or all requests have failed. IFS=$'\n' for i in ${sites[@]}; do if [ ${DL_DIR:0:5} == "check" ]; then do_check "$i/$DL_FILE" if [ "$?" != "0" ]; then continue fi else do_download "$DL_DIR" "$i" "$DL_FILE" if [ "$?" != "0" ]; then continue fi if [ -n "$CHECKSUM" ]; then CHECKSUM_ALGO="${CHECKSUM_ALGOS[${#CHECKSUM}]}" if [ -n "${CHECKSUM_ALGO}" ]; then CHECKSUM_PROG="$(dirname $0)/${CHECKSUM_ALGO}sum" [ ! -x "$CHECKSUM_PROG" ] && CHECKSUM_PROG="$(which ${CHECKSUM_ALGO}sum)" fi if [ -x "$CHECKSUM_PROG" ]; then FILE_CHECKSUM=$($CHECKSUM_PROG "$DL_DIR/$DL_FILE" | sed 's/ .*//') if [ "$CHECKSUM" != "$FILE_CHECKSUM" ]; then echo "${CHECKSUM_ALGO^^} mismatch for $DL_DIR/$DL_FILE: $CHECKSUM $FILE_CHECKSUM" 1>&1 rm -f "$DL_DIR/$DL_FILE" continue; else echo "${CHECKSUM_ALGO^^} verified for $DL_DIR/$DL_FILE: $FILE_CHECKSUM" fi fi fi fi exit 0 done unset IFS if [ -n "$VCS" ]; then echo case "$DL_FILE" in *.tar.gz) COMPRESSION_FLAG=z ;; *.tar.bz2) COMPRESSION_FLAG=j ;; *.tar.xz) COMPRESSION_FLAG=J ;; *) { echo "Failed to detect compression method from $DL_FILE"; exit 1; } ;; esac SUBDIR=$(echo "$DL_FILE" | sed -r -e 's,(.*)[.]tar[.](gz|bz2|xz)$,\1,') VERSION=$(echo "$SUBDIR" | sed -r -e 's,[^-]*-(.+),\1,') [ -n "$VERSION" ] || { echo "Failed to detect version from $DL_FILE"; exit 1; } TMP_DIR=$(mktemp -d -t freetzXXX) || { echo "Error creating temporary dir."; exit 1; } echo "Checking out files from the $VCS repository..." \ && ( \ cd "$TMP_DIR" \ && rm -rf "$SUBDIR" \ && [ ! -d "$SUBDIR" ] \ && do_checkout_"$VCS" "$URLs" "$SUBDIR" "$VERSION" \ && echo "Packing checkout..." \ && { \ find "$SUBDIR" -not -type d | LC_ALL=C sort > "$SUBDIR.list.sorted"; \ tar \ --format=gnu \ --numeric-owner --owner=0 --group=0 \ ${LAST_COMMIT_TIMESTAMP:+--mtime="${LAST_COMMIT_TIMESTAMP}"} \ -T "$SUBDIR.list.sorted" \ -c${COMPRESSION_FLAG}f "$DL_FILE"; \ } \ ) \ && mv "$TMP_DIR/$DL_FILE" "$DL_DIR/" \ && rm -rf "$TMP_DIR" \ && exit 0 rm -f "$DL_DIR/$DL_FILE" rm -rf "$TMP_DIR" fi exit 1