要通过rust爬虫最好先学习一下tokio库,此外还需要工具库hyper(发送请求,得到数据),
html5ever(解析html),下面先上个例子,等有时间再发个实践的
extern crate hyper_tls;
extern crate html5ever;
extern crate hyper;
extern crate futures;
use hyper_tls::HttpsConnector;
use hyper::Client;
use hyper::rt::{self,Stream,Future};
use html5ever::rcdom::{RcDom,Handle};
use html5ever::tendril::StrTendril;
use html5ever::parse_document;
use html5ever::tendril::TendrilSink;
use html5ever::rcdom::NodeData;
use std::collections::HashSet;
use std::default::Default;
use std::sync::Arc;
use std::borrow::Borrow;
use std::clone::Clone;
use futures::future;
#[derive(Clone)]
struct FocusHref {
ahref:HashSet<String>,
imghref:HashSet<String>,
}
impl FocusHref {
fn new(url:String) -> impl Future<Item = FocusHref,Error = ()> {
let url:hyper::Uri = url.parse().unwrap();
let https = HttpsConnector::new(4).unwrap();
let client = Client::builder().build::<_,hy