Help us understand the problem. What is going on with this article?

GoでWebクローラー作ってみた

More than 1 year has passed since last update.

キーワードを入力すると、そのワードでオーガニック検索した際の1ページ目のページURLを取得できるコマンドラインツールを作りました。
詳しくは下記のリポジトリを見て下さい。

https://github.com/ryonakao/WebCrawlerForSerps

参考

http://blog.kechako.com/entry/2016/03/09/225345
大変参考になりました。

コード全貌

main.go
package main

import (
    "flag"
    "log"
    "net/http"
    "net/url"
    "strings"

    "github.com/PuerkitoBio/goquery"
)

func Crawl(url string, depth int, m *message) {
    defer func() { m.quit <- 0 }()

    // WebページからURLを取得
    urls, err := Fetch(url)

    // 結果送信
    m.res <- &respons{
        url: url,
        err: err,
    }

    if err == nil {
        for _, url := range urls {
            // 新しいリクエスト送信
            m.req <- &request{
                url:   url,
                depth: depth - 1,
            }
        }
    }
}

func Fetch(u string) (urls []string, err error) {
    baseUrl, err := url.Parse(u)
    if err != nil {
        return
    }

    resp, err := http.Get(baseUrl.String())
    if err != nil {
        return
    }
    defer resp.Body.Close()

    // 取得したhtmlを文字列で確認したい時はこれ
    //body, err := ioutil.ReadAll(resp.Body)
    //buf := bytes.NewBuffer(body)
    //html := buf.String()
    //fmt.Println(html)
    // ---------------

    doc, err := goquery.NewDocumentFromReader(resp.Body)
    if err != nil {
        return
    }

    urls = make([]string, 0)
    doc.Find(".r").Each(func(_ int, srg *goquery.Selection) {
        srg.Find("a").Each(func(_ int, s *goquery.Selection) {
            href, exists := s.Attr("href")
            if exists {
                reqUrl, err := baseUrl.Parse(href)
                if err == nil {
                    urls = append(urls, reqUrl.String())
                }
            }
        })
    })

    return
}

func main() {
    var word = flag.String("w", " ", "検索ワードを入力して下さい")
    flag.Parse()
    log.Println("検索ワード:", *word)
    *word = strings.Replace(*word, " ", "+", -1)
    firstURL := "https://www.google.co.jp/search?rlz=1C5CHFA_enJP693JP693&q=" + string(*word)
    log.Println("検索URL:", firstURL)
    m := newMessage()
    go m.execute()
    m.req <- &request{
        url:   firstURL,
        depth: 2,
    }

    if err := http.ListenAndServe(":8080", nil); err != nil {
        log.Fatal("ListenAndSearver:", err)
    }
}

message.go
package main

import (
    "fmt"
    "log"
    "os"
)

type message struct {
    res  chan *respons
    req  chan *request
    quit chan int
}
type respons struct {
    url string
    err interface{}
}
type request struct {
    url   string
    depth int
}

func newMessage() *message {
    return &message{
        res:  make(chan *respons),
        req:  make(chan *request),
        quit: make(chan int),
    }
}

func (m *message) execute() {
    // ワーカーの数
    wc := 0
    urlMap := make(map[string]bool, 100)
    done := false
    for !done {
        select {
        case res := <-m.res:
            if res.err == nil {
                fmt.Printf("%s\n", res.url)
            } else {
                fmt.Fprintf(os.Stderr, "Error %s\n%v\n", res.url, res.err)
            }
        case req := <-m.req:
            if req.depth == 0 {
                break
            }

            if urlMap[req.url] {
                // 取得済み
                break
            }
            urlMap[req.url] = true

            wc++
            go Crawl(req.url, req.depth, m)
        case <-m.quit:
            wc--
            if wc == 0 {
                done = true
            }
        }
    }
    log.Println("スクレイピング完了")
    os.Exit(0)
}

Why not register and get more from Qiita?
  1. We will deliver articles that match you
    By following users and tags, you can catch up information on technical fields that you are interested in as a whole
  2. you can read useful information later efficiently
    By "stocking" the articles you like, you can search right away
Comments
No comments
Sign up for free and join this conversation.
If you already have a Qiita account
Why do not you register as a user and use Qiita more conveniently?
You need to log in to use this function. Qiita can be used more conveniently after logging in.
You seem to be reading articles frequently this month. Qiita can be used more conveniently after logging in.
  1. We will deliver articles that match you
    By following users and tags, you can catch up information on technical fields that you are interested in as a whole
  2. you can read useful information later efficiently
    By "stocking" the articles you like, you can search right away
ユーザーは見つかりませんでした