爱都会爱上对啊十多哀思奥i海大事
暗示wesf asd放arg爱尔g虽然而发ae fad
撒旦法
啊ad放
爱上的想法
adf 暗示fasd个asd光伏ad放ase放
asef awef wea放ae发送到
gherts好srt gsergrthdert6 hawe个rtjert heryjt7yu十二
gsert好
package main
import "sync"
import "strconv"
import "log"
import "dockerServer/common"
import "github.com/PuerkitoBio/goquery"
func getLink(pageLink string,pageNum int,saveCate string,wg *sync.WaitGroup){
// 获取图片页面的链接
defer wg.Done()
var tampLink string
var finalList []string
for i:=0;i<pageNum;i++{
if i!=0{
tampLink=pageLink+"/"+strconv.Itoa(i+1)
}else{
tampLink=pageLink
}
resp,err1:=common.GetHTMLResponse(tampLink)
if err1 != nil{
log.Println("获取页面报错 :",err1)
continue
}
// html分析
doc,err2:=goquery.NewDocumentFromReader(resp.Body)
if err2!=nil{
log.Println("HTML错误",err2)
continue
}
// 拿图片页面连接
doc.Find("img").Each(func(i int,s *goquery.Selection){
picLink_,isExist:=s.Attr("src")
if isExist==true{
// 过滤
if !common.StrInStrings("www",picLink_){
finalList = append(finalList,picLink_)
}
}
})
}
for i,v := range finalList{
tamSaveJpg:=saveCate+"/"+strconv.Itoa(i)+".jpg"
common.SavePic(v,tamSaveJpg)
}
}
func main(){
path:=common.CrawlerFileAssistant("无限图","CiCi","旗袍")
beforeList := map[string]int {
"https://www.wxytw.com/28681.html":14,
}
var go_sync sync.WaitGroup
for k,v:=range beforeList{
go_sync.Add(1)
go getLink(k,v,path,&go_sync)
}
go_sync.Wait()
log.Println("爬虫结束")
}
撒娇地hi阿U所困惑对啊是否UI阿谁
ASDF
AS F
SD F
ASD
F SDA
F AS
F AWE
F AWR
YGHSE T
HDRT
B
SDTRB
SRTH
drg
serg
ser
g sdfr
g se
g se
gsert
g se
g
erthert
hjsdeg