-
Notifications
You must be signed in to change notification settings - Fork 8
/
crawl.go
251 lines (224 loc) · 5.87 KB
/
crawl.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
// Copyright 2018 Benjamin Estes. All rights reserved. Use of this
// source code is governed by an MIT-style license that can be found
// in the LICENSE file.
// This package provides the command crawl, which performs
// efficient and concurrent crawling. For details:
//
// https://github.com/benjaminestes/crawl
package main
import (
"bufio"
"bytes"
"encoding/json"
"flag"
"fmt"
"io"
"io/ioutil"
"log"
"net/http"
"os"
"time"
"github.com/benjaminestes/crawl/crawler"
"github.com/benjaminestes/crawl/schema"
"github.com/benjaminestes/crawl/sitemap"
"github.com/benjaminestes/crawl/version"
)
var (
spiderCommand = flag.NewFlagSet("spider", flag.ExitOnError)
listCommand = flag.NewFlagSet("list", flag.ExitOnError)
listType = listCommand.String("format",
"text", "format of input for list mode: {text|xml}")
sitemapCommand = flag.NewFlagSet("sitemap", flag.ExitOnError)
versionCommand = flag.NewFlagSet("version", flag.ExitOnError)
)
func main() {
if len(os.Args) < 2 {
doHelp()
os.Exit(0)
}
switch os.Args[1] {
case "help":
doHelp()
os.Exit(0)
case "schema":
doSchema()
case "spider":
doSpider()
case "list":
doList()
case "sitemap":
doSitemap()
case "version":
doVersion()
os.Exit(0)
default:
fmt.Fprintf(os.Stderr, "unexpected command: %s\n", os.Args[1])
fmt.Fprintf(os.Stderr, `run "crawl help" for usage`+"\n")
os.Exit(1)
}
}
func doSchema() {
os.Stdout.Write(schema.BigQueryJSON())
fmt.Println()
}
func doVersion() {
fmt.Println(version.Version)
}
func doSpider() {
spiderCommand.Parse(os.Args[2:])
if spiderCommand.NArg() < 1 {
log.Fatal(fmt.Errorf("expected location of config file"))
}
config, err := os.Open(spiderCommand.Arg(0))
if err != nil {
log.Fatal(fmt.Errorf("%v", err))
}
c, err := crawler.FromJSON(config)
if err != nil {
log.Fatal(fmt.Errorf("%v", err))
}
doCrawl(c)
}
func doSitemap() {
sitemapCommand.Parse(os.Args[2:])
if sitemapCommand.NArg() < 1 {
log.Fatal(fmt.Errorf("expected sitemap URL"))
}
queue, err := fetchAll(sitemapCommand.Arg(0))
if err != nil {
log.Fatal(fmt.Errorf("error fetching sitemap"))
}
for _, u := range queue {
fmt.Println(u)
}
}
func doList() {
listCommand.Parse(os.Args[2:])
if listCommand.NArg() < 1 {
log.Fatal(fmt.Errorf("expected location of config file"))
}
config, err := os.Open(listCommand.Arg(0))
if err != nil {
log.Fatal(fmt.Errorf("%v", err))
}
var queue []string
switch *listType {
case "text":
queue = listFromReader(os.Stdin)
// FIXME: Here to justify listType existence.
case "xml":
queue, err = sitemap.Parse(os.Stdin)
if err != nil {
log.Fatalf("couldn't parse sitemap from stdin: %v", err)
}
}
c, err := crawler.FromJSON(config)
if err != nil {
log.Fatal(fmt.Errorf("%v", err))
}
c.From = queue
c.MaxDepth = 0
doCrawl(c)
}
func doCrawl(c *crawler.Crawler) {
count, lastCount := 0, 0
lastUpdate := time.Now()
err := c.Start()
if err != nil {
// FIXME: need a way to signal error
panic("couldn't start crawler")
}
log.Printf("crawl started")
for n := c.Next(); n != nil; n = c.Next() {
j, _ := json.Marshal(n)
fmt.Printf("%s\n", j)
count++
if time.Since(lastUpdate) > 5*time.Second {
lastUpdate = time.Now()
rate := (count - lastCount) / 5
lastCount = count
log.Printf("crawled %d (~%d/sec)", count, rate)
}
}
log.Printf("crawl complete, %d URLs total", count)
}
func listFromReader(in io.Reader) []string {
var queue []string
scanner := bufio.NewScanner(in)
for scanner.Scan() {
queue = append(queue, scanner.Text())
}
return queue
}
// fetchAll recursively produces a list of all URLs represented by the
// sitemap (index?) at url. If url points to a sitemap index, all of
// the sitemaps within that index will be recursively
// requested. Requests are not concurrent.
func fetchAll(url string) ([]string, error) {
log.Printf("retrieving sitemap %s", url)
resp, err := http.Get(url)
if err != nil {
log.Fatalf("error retrieving sitemap %s: %v", url, err)
}
defer resp.Body.Close()
// It's possible we will need to try to parse the response
// body twice, so read to []byte.
data, err := ioutil.ReadAll(resp.Body)
if err != nil {
log.Fatalf("error reading content of sitemap %s: %v", url, err)
}
var urls []string
urls, err = sitemap.Parse(bytes.NewReader(data))
if err != nil {
return nil, err
}
if len(urls) > 0 {
return urls, nil
}
sitemaps, err := sitemap.ParseIndex(bytes.NewReader(data))
if err != nil {
return nil, err
}
for _, s := range sitemaps {
newurls, err := fetchAll(s)
if err != nil {
return nil, err
}
urls = append(urls, newurls...)
}
return urls, nil
}
func doHelp() {
fmt.Println("USAGE: crawl <command> [-flags] [args]")
fmt.Println()
fmt.Println("The following commands are valid:")
fmt.Println("\thelp, list, schema, sitemap, spider, version")
fmt.Println()
fmt.Println("help\t\tPrint this message.")
fmt.Println()
fmt.Println("list\t\tCrawl a list of URLs provided on stdin.")
fmt.Println()
fmt.Println("\t\tThe -format={text|xml} flag determines the expected type.")
fmt.Println()
fmt.Println("\t\tExample:")
fmt.Println("\t\tcrawl list config.json <url_list.txt >out.txt")
fmt.Println("\t\tcrawl list -format=xml config.json <sitemap.xml >out.txt")
fmt.Println()
fmt.Println("schema\t\tPrint a BigQuery-compatible JSON schema to stdout.")
fmt.Println()
fmt.Println("\t\tExample:")
fmt.Println("\t\tcrawl schema >schema.json")
fmt.Println()
fmt.Println("sitemap\t\tRecursively requests a sitemap or sitemap index from")
fmt.Println("\t\ta URL provided as argument.")
fmt.Println()
fmt.Println("\t\tExample:")
fmt.Println("\t\tcrawl sitemap http://www.example.com/sitemap.xml >out.txt")
fmt.Println()
fmt.Println("spider\t\tCrawl from the URLs specific in the configuration file.")
fmt.Println()
fmt.Println("\t\tExample:")
fmt.Println("\t\tcrawl spider config.json >out.txt")
fmt.Println()
fmt.Println("version\t\tPrint the version.")
}