CURL版递归爬虫下载软件脚本

清华大佬耗费三个月吐血整理的几百G的资源,免费分享!....>>>

#!/bin/env bash
PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin
export PATH
clear
Url="http://mirrors.cnnic.cn/apache/"
DownListFile="/tmp/downlist.txt"
DownListTmpFile="/tmp/tmplist.txt"
DownFileType="zip$|gz$"
DownList=""
UrlBack="$Url"
[ ! -f $DownListFile ] && touch $DownListFile || echo > $DownListFile
[ ! -f $DownListTmpFile ] && touch $DownListTmpFile || echo > $DownListTmpFile
CURL_URLS(){
	Urls=`curl $UrlBack |awk -F "a href=\"" '{printf "%s\n",$2}'|awk -F "\"" '{printf "%s\n",$1}'|grep -vE "^$|^\?|^http:\/\/"|^#`
}
URL_LIST(){
	CURL_URLS
	for i in $Urls ;do
		echo "$UrlBack$i" >> $DownListTmpFile
	done
}
RECURSIVE_SEARCH_URL(){
	UrlBackTmps=`cat $DownListTmpFile`
	[[ "$UrlBackTmps" == "" ]] && echo "no more page for search" && exit 1
	for j in $UrlBackTmps ;do
		if [[ "${j##*\/}" != "" ]] ;then
			echo "$j" >> $DownListFile
		else
			UrlBack="$j"
			URL_LIST
		fi
		UrlTmps=`grep -vE "$j$" $DownListTmpFile`
		echo "$UrlTmps" > $DownListTmpFile
		RECURSIVE_SEARCH_URL
	done
}
DOWNLOAD_FILE(){
	DownList=`grep -E "$DownFileType" $DownListFile`
	for k in $DownList ;do
		FilePath=/tmp/${k#*\/\/}
		[ ! -d `dirname $FilePath` ] && mkdir -p `dirname $FilePath`
		[ ! -f $FilePath ] && cd `dirname $FilePath` && curl -O $k
	done
}
URL_LIST $Urls
RECURSIVE_SEARCH_URL