爬蟲有的時候會遇到被禁ip的情況,這個時候你可以找一下代理網(wǎng)站,抓取一下ip,來進行動態(tài)的輪詢就沒問題了,也可以用別人做好的第三方ip代理平臺,比如說crawlera,crawlera是一個利用代理IP地址池來做分布式下載的第三方平臺。【具體介紹請看這篇博客:http://blog.csdn.net/djd1234567/article/details/51741557】
package daili;import java.io.BufferedReader;import java.io.IOException;import java.io.InputStream;import java.io.InputStreamReader;import java.io.UnsupportedEncodingException;import java.net.InetSocketAddress;import java.net.MalformedURLException;import java.net.PRoxy;import java.net.URL;import java.net.URLConnection;/* * author:合肥工業(yè)大學 管院學院 錢洋 *1563178220@QQ.com *博客地址:http://blog.csdn.net/qy20115549/*/public class GetHtml { public static void main(String[] args) throws UnsupportedEncodingException { //輸入代理ip,端口,及所要爬取的url gethtml("183.136.217.74",8080,"http://club.autohome.com.cn/bbs/forum-c-2533-1.html?orderby=dateline&qaType=-1"); } public static String gethtml(String ip,int port,String url) throws UnsupportedEncodingException{ URL url1 = null; try { url1 = new URL(url); } catch (MalformedURLException e1) { e1.printStackTrace(); } InetSocketAddress addr = null; //代理服務器的ip及端口 addr = new InetSocketAddress(ip, port); Proxy proxy = new Proxy(Proxy.Type.HTTP, addr); // http proxy InputStream in = null; try { URLConnection conn = url1.openConnection(proxy); conn.setConnectTimeout(3000); in = conn.getInputStream(); } catch (Exception e) { System.out.println("ip " + " is not aviable");//異常IP } String s = convertStreamToString(in); System.out.println(s); return s; } public static String convertStreamToString(InputStream is) throws UnsupportedEncodingException { if (is == null) return ""; BufferedReader reader = new BufferedReader(new InputStreamReader(is,"gb2312")); StringBuilder sb = new StringBuilder(); String line = null; try { while ((line = reader.readLine()) != null) { sb.append(line + "/n"); } } catch (IOException e) { e.printStackTrace(); } finally { try { is.close(); } catch (IOException e) { e.printStackTrace(); } } return sb.toString(); }}如下圖,便可以抓取到url對應的html內容。
新聞熱點
疑難解答