[日常] Go语言圣经--并发的web爬虫

两种:

crawler.go 

package main

import (
        "fmt"
        "links"
        //"log"
        "os"
)

func main() {
        worklist := make(chan []string)

        // Start with the command-line arguments.
        go func() { worklist <- os.Args[1:] }() 
        // Crawl the web concurrently.
        seen := make(map[string]bool)
        for list := range worklist {
                for _, link := range list {
                        if !seen[link] {
                                seen[link] = true
                                go func(link string) {
                                        worklist <- crawl(link)
                                }(link)
                        }   
                }   
        }   
}

var tokens = make(chan struct{}, 20) 

//从一个url页面中提取出所有的url
func crawl(url string) []string {
        fmt.Println(url)
        tokens <- struct{}{}
        list, err := links.Extract(url)
        <-tokens
        if err != nil {
                //log.Print(err)
        }   
        return list
}

crawler2.go 

package main

import (
        "fmt"
        "links"
        //"log"
        "os"
        "strings"
)

func main() {
        worklist := make(chan []string)
        unseenLinks := make(chan string)

        // Start with the command-line arguments.
        go func() { worklist <- os.Args[1:] }() 
        // Create 20 crawler goroutines to fetch each unseen link.
        for i := 0; i < 20; i++ {
                go func() {
                        for link := range unseenLinks {
                                //if strings.HasPrefix(link, "http://www.lypeng.com") {
                                foundLinks := crawl(link)
                                go func() { worklist <- foundLinks }() 

                                //} 
                        }   
                }() 
        }   

        // The main goroutine de-duplicates worklist items
        // and sends the unseen ones to the crawlers.
        seen := make(map[string]bool)
        for list := range worklist {
                for _, link := range list {
                        if !seen[link] {
                                seen[link] = true
                                unseenLinks <- link
                        }   
                }   
        }   
}

//从一个url页面中提取出所有的url
func crawl(url string) []string {
        fmt.Println(url)
        list, err := links.Extract(url)
        if err != nil {
                //log.Print(err)
        }   
        return list
}

  

  

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值