HttpWebRequest request = (HttpWebRequest)HttpWebRequest.Create(RemoteUrl);
request.Method = "GET";
//request.Headers.Add("user-agent", "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.2;)");
request.ContentLength = 0;
request.Timeout = 20000;
HttpWebResponse response = (HttpWebResponse)request.GetResponse();
response.ContentLength
为何 response.ContentLength 获取到的大小有时候会出问题, 不是大了就是小了,
比如http://XXXXXXX/1.rar 实际大小为300K 有时response.ContentLength获取到的大小换算过来是1M多。 再次刷新页面response.ContentLength 获取的大小又正常 网络很稳定啊。。 怎么回事,谁知道
request.Method = "GET";
//request.Headers.Add("user-agent", "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.2;)");
request.ContentLength = 0;
request.Timeout = 20000;
HttpWebResponse response = (HttpWebResponse)request.GetResponse();
response.ContentLength
为何 response.ContentLength 获取到的大小有时候会出问题, 不是大了就是小了,
比如http://XXXXXXX/1.rar 实际大小为300K 有时response.ContentLength获取到的大小换算过来是1M多。 再次刷新页面response.ContentLength 获取的大小又正常 网络很稳定啊。。 怎么回事,谁知道
如果你在URL后面加一个 http://www.sss.ccc/aa.rar?temp=随机数
这可能改善这个状况,但仍不是好的方案
String url = "http://www.dtan.so/Image/logo.gif";
String fileName = url.Substring(url.LastIndexOf("/") + 1);
String refer = url.Substring(0, url.LastIndexOf("/") + 1);
System.Net.HttpWebRequest req = System.Net.HttpWebRequest.Create(url) as System.Net.HttpWebRequest;
req.AllowAutoRedirect = true;
req.Referer = refer;
req.UserAgent = "Mozilla/5.0 (Windows; U; Windows NT 6.1; zh-CN; rv:1.9.2.13) Gecko/20101203 Firefox/3.6.13";
System.Net.HttpWebResponse res = req.GetResponse() as System.Net.HttpWebResponse;
System.IO.Stream stream = res.GetResponseStream();
byte[] buffer = new byte[32 * 1024];
int bytesProcessed = 0;
System.IO.FileStream fs = System.IO.File.Create(Server.MapPath(fileName));
int bytesRead;
do
{
bytesRead = stream.Read(buffer, 0, buffer.Length);
fs.Write(buffer, 0, bytesRead);
bytesProcessed += bytesRead;
}
while (bytesRead > 0);
fs.Flush();
fs.Close();
res.Close();http://dotnet.aspx.cc/file/HttpWebRequest-Download-Http-Url.aspx