简单的网络爬虫程序(Web Crawlers)
程序比较简单,但是能体现基本原理。
package com.wxisme.webcrawlers; import java.io.*; import java.net.*; /** * Web Crawlers * @author wxisme * */ public class WebCrawlers { public static void main(String[] args) { URL url = null; try { url = new URL("http://www.baidu.com"); } catch (MalformedURLException e) { System.out.println("域名不合法!"); e.printStackTrace(); } InputStream is = null; try { is = url.openStream(); } catch (IOException e) { e.printStackTrace(); } FileOutputStream fos = null; try { fos = new FileOutputStream("E:\\baidu.txt"); } catch (FileNotFoundException e) { System.out.println("文件创建失败!"); e.printStackTrace(); } //使用转换流设置字符集 BufferedReader br = null; try { br = new BufferedReader(new InputStreamReader( is,"utf-8")); } catch (UnsupportedEncodingException e) { System.out.println("字符集设置失败!"); e.printStackTrace(); } BufferedWriter bw = new BufferedWriter(new OutputStreamWriter( fos)); String msg = null; try { while((msg = br.readLine()) != null) { bw.write(msg); bw.newLine(); } } catch (IOException e) { System.out.println("文件操作失败!"); e.printStackTrace(); } finally { try { bw.flush(); } catch (IOException e) { e.printStackTrace(); } closeAll(is, fos, br, bw); } } public static void closeAll(Closeable ... io) { for(Closeable temp : io) { if(temp != null) { try { temp.close(); } catch (IOException e) { System.out.println("文件关闭失败!"); e.printStackTrace(); } } } } }
郑重声明:本站内容如果来自互联网及其他传播媒体,其版权均属原媒体及文章作者所有。转载目的在于传递更多信息及用于网络分享,并不代表本站赞同其观点和对其真实性负责,也不构成任何其他建议。