aboutsummaryrefslogtreecommitdiff
path: root/scope.go
blob: 6a630180590265b7b8aee8eef468676e7c9f905a (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
package crawl

import (
	"fmt"
	"net/url"
	"regexp"
	"strings"
)

// Scope defines the crawling scope.
type Scope interface {
	// Check a URL to see if it's in scope for crawling.
	Check(*url.URL, int) bool
}

type maxDepthScope struct {
	maxDepth int
}

func (s *maxDepthScope) Check(uri *url.URL, depth int) bool {
	return depth < s.maxDepth
}

// NewDepthScope returns a Scope that will limit crawls to a
// maximum link depth with respect to the crawl seeds.
func NewDepthScope(maxDepth int) Scope {
	return &maxDepthScope{maxDepth}
}

type schemeScope struct {
	allowedSchemes map[string]struct{}
}

func (s *schemeScope) Check(uri *url.URL, depth int) bool {
	_, ok := s.allowedSchemes[uri.Scheme]
	return ok
}

// NewSchemeScope limits the crawl to the specified URL schemes.
func NewSchemeScope(schemes []string) Scope {
	m := make(map[string]struct{})
	for _, s := range schemes {
		m[s] = struct{}{}
	}
	return &schemeScope{m}
}

// A URLPrefixMap makes it easy to check for URL prefixes (even for
// very large lists). The URL scheme is ignored, along with an
// eventual "www." prefix.
type URLPrefixMap map[string]struct{}

func normalizeURLPrefix(uri *url.URL) string {
	return strings.TrimPrefix(uri.Host, "www.") + strings.TrimSuffix(uri.Path, "/")
}

// Add an URL to the prefix map.
func (m URLPrefixMap) Add(uri *url.URL) {
	m[normalizeURLPrefix(uri)] = struct{}{}
}

// Contains returns true if the given URL matches the prefix map.
func (m URLPrefixMap) Contains(uri *url.URL) bool {
	s := strings.TrimPrefix(uri.Host, "www.")
	if _, ok := m[s]; ok {
		return true
	}
	for _, p := range strings.Split(uri.Path, "/") {
		if p == "" {
			continue
		}
		s = fmt.Sprintf("%s/%s", s, p)
		if _, ok := m[s]; ok {
			return true
		}
	}
	return false
}

type urlPrefixScope struct {
	prefixes URLPrefixMap
}

func (s *urlPrefixScope) Check(uri *url.URL, depth int) bool {
	return s.prefixes.Contains(uri)
}

// NewURLPrefixScope returns a Scope that limits the crawl to a set of
// allowed URL prefixes.
func NewURLPrefixScope(prefixes URLPrefixMap) Scope {
	return &urlPrefixScope{prefixes}
}

// NewSeedScope returns a Scope that will only allow crawling the seed
// prefixes.
func NewSeedScope(seeds []*url.URL) Scope {
	pfx := make(URLPrefixMap)
	for _, s := range seeds {
		pfx.Add(s)
	}
	return NewURLPrefixScope(pfx)
}

type regexpIgnoreScope struct {
	ignores []*regexp.Regexp
}

func (s *regexpIgnoreScope) Check(uri *url.URL, depth int) bool {
	uriStr := uri.String()
	for _, i := range s.ignores {
		if i.MatchString(uriStr) {
			return false
		}
	}
	return true
}

// NewRegexpIgnoreScope returns a Scope that filters out URLs
// according to a list of regular expressions.
func NewRegexpIgnoreScope(ignores []string) Scope {
	if ignores == nil {
		ignores = defaultIgnorePatterns
	}
	r := regexpIgnoreScope{
		ignores: make([]*regexp.Regexp, 0, len(ignores)),
	}
	for _, i := range ignores {
		r.ignores = append(r.ignores, regexp.MustCompile(i))
	}
	return &r
}