aboutsummaryrefslogtreecommitdiff
path: root/cmd/crawl/crawl_test.go
blob: 57acffc07e83ee35d18e37a9e59adeb8ccaf10a8 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
package main

import (
	"fmt"
	"io/ioutil"
	"net/http"
	"net/http/httptest"
	"os"
	"path/filepath"
	"testing"

	"git.jordan.im/crawl"
	"git.jordan.im/crawl/warc"
)

func linkTo(w http.ResponseWriter, uri string) {
	w.Header().Set("Content-Type", "text/html")
	fmt.Fprintf(w, "<html><body><a href=\"%s\">link!</a></body></html>", uri)
}

func TestCrawl(t *testing.T) {
	tmpdir, err := ioutil.TempDir("", "")
	if err != nil {
		t.Fatal(err)
	}
	defer os.RemoveAll(tmpdir)

	srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
		switch r.URL.Path {
		case "/":
			linkTo(w, "/redir")
		case "/b":
			linkTo(w, "/")
		case "/redir":
			http.Redirect(w, r, "/b", http.StatusFound)
		default:
			http.NotFound(w, r)
		}
	}))
	defer srv.Close()

	seeds := crawl.MustParseURLs([]string{srv.URL + "/"})
	scope := crawl.AND(
		crawl.NewSchemeScope([]string{"http"}),
		crawl.NewDepthScope(10),
		crawl.NewSeedScope(seeds),
	)

	outf, err := os.Create(filepath.Join(tmpdir, "warc.gz"))
	if err != nil {
		t.Fatal(err)
	}
	w := warc.NewWriter(outf)
	defer w.Close()
	saver, err := newWarcSaveHandler(w)
	if err != nil {
		t.Fatal(err)
	}

	crawler, err := crawl.NewCrawler(
		filepath.Join(tmpdir, "db"),
		seeds,
		scope,
		crawl.FetcherFunc(fetch),
		crawl.HandleRetries(crawl.FollowRedirects(crawl.FilterErrors(saver))),
	)
	if err != nil {
		t.Fatal(err)
	}

	crawler.Run(1)
	crawler.Close()

	if n := saver.(*warcSaveHandler).numWritten; n != 3 {
		t.Fatalf("warc handler wrote %d records, expected 3", n)
	}
}