在以前看电影《社交网络》的时候,mark黑了7栋公寓,获取图片信息,当时觉得十分cool!所以尝试做了一个网页爬虫,爬取一个网页的图片!
来看代码:
package com.MySpider;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class GetImage {
/**
* @param args
*/
private List<String> imageUrl = new ArrayList<String>();//用于存储图片的url
private int count = 0;//图片计数器
public static void main(String[] args) {
String netUrl = "http://.com/";//要爬的网页
new GetImage().init(netUrl);
}
public void init(String netUrl)
{
getPage(netUrl);
while(imageUrl.size()!=0)
{
getImage(imageUrl.remove(0));
}
}
//获取网页信息line中的图片url并加入到集合中
public void getImageUrl(String line)
{
String regex = "http://.{1,}.jpg";
Pattern pat = Pattern.compile(regex);
Matcher matcher=pat.matcher(line);
String str =null;
while(matcher.find())
{
str = matcher.group();
imageUrl.add(str);
}
}
//爬取网页中的信息。
public void getPage(String netUrl)
{
BufferedReader mybr = null;
try {
URL myurl = new URL(netUrl);
URLConnection myconn = myurl.openConnection();
InputStream myin = myconn.getInputStream();
mybr = new BufferedReader(new InputStreamReader(myin));
String line;
while((line = mybr.readLine())!= null)
{
getImageUrl(line);//判断网页中的jpg图片
}
} catch (MalformedURLException e) {
System.out.println("url异常");
} catch (IOException e) {
System.out.println("url连接异常");
}finally
{
if( mybr != null)
{
try {
mybr.close();
} catch (IOException e) {
System.out.println("读入流关闭异常");
}
}
}
}
//下载该图片!
public void getImage(String imageUrl)
{
InputStream myin = null;
BufferedOutputStream myos = null;
try {
URL myurl = new URL(imageUrl);
URLConnection myconn = myurl.openConnection();
myin = myconn.getInputStream();
myos = new BufferedOutputStream(new FileOutputStream("e:\\spiderImage\\"+count+".jpg"));
byte[] buff = new byte[1024];
int num = 0;
while((num = myin.read(buff))!= -1)
{
myos.write(buff, 0, num);
myos.flush();
}
count++;
} catch (MalformedURLException e) {
System.out.println("url异常");
} catch (IOException e) {
System.out.println("url连接异常");
}
finally
{
if( myin != null)
{
try {
myos.close();
} catch (IOException e) {
System.out.println("读入流关闭异常");
}
}
if( myos != null)
{
try {
myos.close();
} catch (IOException e) {
System.out.println("输出流关闭异常");
}
}
}
}
}
运行就ok了!
当然有些网站的图片虽然爬到了,但是下不了!还是要改善程序!
下面是我爬到的成果!