@@ -958,7 +958,49 @@ check_backup_conditions() {
958958 done
959959}
960960
961- # Define file download function
961+ # Define download function
962962download_file () {
963- wget $1 -q --show-progress --progress=bar:force
964- }
963+ local url=$1
964+ local destination=$2
965+ local force=$3
966+
967+ # Default destination is the curent working directory
968+ local dstopt=" "
969+
970+ if [ ! -z " $( echo " $url " | grep -E " \.(gz|gzip|bz2|zip|xz)$" ) " ]; then
971+ # When an archive file is downloaded it will be first saved localy
972+ dstopt=" --directory-prefix=$ARCHIVE_DIR "
973+ local is_archive=" true"
974+ local filename=" ${url##*/ } "
975+ if [ -z " $filename " ]; then
976+ >&2 echo " [!] No filename was found in url, exiting ($url )"
977+ exit 1
978+ fi
979+ if [ ! -z " $force " ] && [ -f " $ARCHIVE_DIR /$filename " ]; then
980+ rm -f $ARCHIVE_DIR /$filename
981+ fi
982+ elif [ ! -z " $destination " ]; then
983+ # Plain files will be written to specified location
984+ dstopt=" -O $destination "
985+ fi
986+ # check for corrupted archive
987+ if [ -f " $ARCHIVE_DIR /$filename " ] && [ " $is_archive " = " true" ]; then
988+ tar -tzf " $ARCHIVE_DIR /$filename " > /dev/null 2>&1
989+ if [ $? -ne 0 ]; then
990+ >&2 echo " [!] Archive $ARCHIVE_DIR /$filename is corrupted, redownloading"
991+ rm -f $ARCHIVE_DIR /$filename
992+ fi
993+ fi
994+
995+ if [ ! -f " $ARCHIVE_DIR /$filename " ]; then
996+ wget $url -q $dstopt --show-progress --progress=bar:force --limit-rate=3m
997+ fi
998+
999+ if [ ! -z " $destination " ] && [ " $is_archive " = " true" ]; then
1000+ if [ " $destination " = " -" ]; then
1001+ cat " $ARCHIVE_DIR /$filename "
1002+ elif [ -d " $( dirname $destination ) " ]; then
1003+ cp " $ARCHIVE_DIR /$filename " " $destination "
1004+ fi
1005+ fi
1006+ }
0 commit comments