清华大佬耗费三个月吐血整理的几百G的资源,免费分享!....>>>
最近在做文件下载这块的东西,研究了一下多线程文件下载这块的知识。这里只说一下原理,具体实现请看代码,已经写了注释了。
如果想实现文件断点下载的话,只需要在记录每个线程当前下载了多少长度的内容即可,可以将其持久化到文件或数据库中保存起来,然后线程开始下载的时候都先读取一下它当前下载了多少就OK了。
主要原理
为了加快下载速度,每个文件固定N个线程来下载,然后每个线程负责下载该文件的某一部分,比如文件大小90M,用3个线程来下载,那么第一个线程负责下载文件的长度范围:0-30*1024*1024-1,第二个线程负责下载文件的长度范围:30*1024*1024-60*1024*1024-1,第三个线程负责下载文件的长度范围:60*1024*1024-90*1024*1024-1,3个线程下载完后就合成了整个文件。这里需要用到Http中的ContentLength和Range请求头,ContentLength对应文件的总长度,Range头用来请求文件某一子块的内容,例如:Range 0-10000,表示请求该文件0-10000字节的内容。
具体代码如下:
import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.RandomAccessFile; import java.util.zip.GZIPInputStream; import org.apache.http.Header; import org.apache.http.HttpResponse; import org.apache.http.HttpStatus; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.params.CookiePolicy; import org.apache.http.params.CoreConnectionPNames; import com.ricky.java.common.download.file.http.HttpClientManager; import com.ricky.java.common.download.file.util.Constants; public class Downloader { private String url; // 目标地址 private File file; // 本地文件 private static final int THREAD_AMOUNT = 3; // 线程数 private static final String DIR_PATH = "D:/download/file"; // 下载目录 private long threadLen; // 每个线程下载多少 private HttpClient mHttpClient = HttpClientManager.getHttpClient(); public Downloader(String address) throws IOException { // 通过构造函数传入下载地址 url = address; file = new File(DIR_PATH, address.substring(address.lastIndexOf("?") + 1)+".xml"); } public void download() throws IOException { long totalLen = getContentLength(url); // 获取文件长度 threadLen = (totalLen + THREAD_AMOUNT - 1) / THREAD_AMOUNT; // 计算每个线程要下载的长度 System.out.println("totalLen="+totalLen+"***threadLen="+threadLen); RandomAccessFile raf = new RandomAccessFile(file, "rws"); // 在本地创建一个和服务端大小相同的文件 raf.setLength(totalLen); // 设置文件的大小 raf.close(); for (int i = 0; i < THREAD_AMOUNT; i++) // 开启3条线程, 每个线程下载一部分数据到本地文件中 new DownloadThread(i).start(); } public long getContentLength(String address) { HttpGet httpget = null; try { httpget = new HttpGet(address); httpget.setHeader("User-Agent","Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.79 Safari/537.1"); httpget.setHeader("Accept","text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"); httpget.getParams().setParameter(CoreConnectionPNames.SO_TIMEOUT,Constants.SO_TIMEOUT); httpget.getParams().setParameter(CoreConnectionPNames.CONNECTION_TIMEOUT,Constants.CONNECTION_TIMEOUT); httpget.getParams().setParameter("http.protocol.cookie-policy",CookiePolicy.BROWSER_COMPATIBILITY); HttpResponse response = mHttpClient.execute(httpget); int status = response.getStatusLine().getStatusCode(); if (status == HttpStatus.SC_OK) { return response.getEntity().getContentLength(); } } catch (ClientProtocolException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); }finally{ if(httpget!=null){ httpget.abort(); } } return 0; } private class DownloadThread extends Thread { private int id; public DownloadThread(int id) { this.id = id; } public void run() { long start = id * threadLen; // 起始位置 long end = id * threadLen + threadLen - 1; // 结束位置 System.out.println("线程" + id + ": " + start + "-" + end); HttpGet httpget = null; try { httpget = new HttpGet(url); httpget.setHeader("User-Agent","Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.79 Safari/537.1"); httpget.setHeader("Accept","text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"); httpget.getParams().setParameter(CoreConnectionPNames.SO_TIMEOUT,Constants.SO_TIMEOUT); httpget.getParams().setParameter(CoreConnectionPNames.CONNECTION_TIMEOUT,Constants.CONNECTION_TIMEOUT); httpget.getParams().setParameter("http.protocol.cookie-policy",CookiePolicy.BROWSER_COMPATIBILITY); HttpResponse response = mHttpClient.execute(httpget); int status = response.getStatusLine().getStatusCode(); if (status == HttpStatus.SC_OK) { InputStream in = response.getEntity().getContent(); Header contentEncoding = response.getFirstHeader("Content-Encoding"); if (contentEncoding != null && contentEncoding.getValue().equalsIgnoreCase("gzip")) { System.out.println("gzip InputStream in post"); in = new GZIPInputStream(in); } RandomAccessFile raf = new RandomAccessFile(file, "rws"); raf.seek(start); byte[] buffer = new byte[1024]; int len; while ((len = in.read(buffer)) != -1) raf.write(buffer, 0, len); raf.close(); System.out.println("线程" + id + "下载完毕"); }else{ System.out.println("线程" + id + "请求失败,响应码="+status); } } catch (ClientProtocolException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); }finally{ if(httpget!=null){ httpget.abort(); } } } } public static void main(String[] args) throws IOException { // new Downloader("http://dldir1.qq.com/qqfile/qq/QQ6.2/12179/QQ6.2.exe").download(); new Downloader("http://api.t.dianping.com/n/api.xml?cityId=1").download(); } }
HttpClientManager.java
import org.apache.http.client.HttpClient; import org.apache.http.conn.scheme.PlainSocketFactory; import org.apache.http.conn.scheme.Scheme; import org.apache.http.conn.scheme.SchemeRegistry; import org.apache.http.conn.ssl.SSLSocketFactory; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.impl.conn.PoolingClientConnectionManager; import org.apache.http.params.BasicHttpParams; import org.apache.http.params.CoreConnectionPNames; import org.apache.http.params.HttpParams; import com.ricky.java.common.download.file.util.Constants; public class HttpClientManager { private static HttpParams httpParams; private static PoolingClientConnectionManager cm; /** * 最大连接数 */ public final static int MAX_TOTAL_CONNECTIONS = 200; /** * 每个路由最大连接数 */ public final static int MAX_ROUTE_CONNECTIONS = 300; static { SchemeRegistry schemeRegistry = new SchemeRegistry(); schemeRegistry.register( new Scheme("http",80,PlainSocketFactory.getSocketFactory())); schemeRegistry.register( new Scheme("https", 443, SSLSocketFactory.getSocketFactory())); cm = new PoolingClientConnectionManager(schemeRegistry); cm.setMaxTotal(MAX_TOTAL_CONNECTIONS); cm.setDefaultMaxPerRoute(MAX_ROUTE_CONNECTIONS); HttpParams params = new BasicHttpParams(); params.setParameter(CoreConnectionPNames.CONNECTION_TIMEOUT,Constants.CONNECTION_TIMEOUT); params.setParameter(CoreConnectionPNames.SO_TIMEOUT, Constants.SO_TIMEOUT); } public static HttpClient getHttpClient() { return new DefaultHttpClient(cm, httpParams); } }
如果想实现文件断点下载的话,只需要在记录每个线程当前下载了多少长度的内容即可,可以将其持久化到文件或数据库中保存起来,然后线程开始下载的时候都先读取一下它当前下载了多少就OK了。