aboutsummaryrefslogtreecommitdiff
path: root/cmd
diff options
context:
space:
mode:
authorale <ale@incal.net>2014-12-20 10:49:36 +0000
committerale <ale@incal.net>2014-12-20 10:49:36 +0000
commitf0c14e5e36d640d0f6801691c69dffb69459fe10 (patch)
treeb9bd2ae00aa3ca65175e37959875cdd52488b007 /cmd
parentd4c561c23d016cf6a7507840153e835994915cb8 (diff)
downloadcrawl-f0c14e5e36d640d0f6801691c69dffb69459fe10.tar.gz
crawl-f0c14e5e36d640d0f6801691c69dffb69459fe10.zip
move link extraction to a common location
Diffstat (limited to 'cmd')
-rw-r--r--cmd/crawl/crawl.go48
-rw-r--r--cmd/links/links.go33
2 files changed, 7 insertions, 74 deletions
diff --git a/cmd/crawl/crawl.go b/cmd/crawl/crawl.go
index 0979d43..1abeca6 100644
--- a/cmd/crawl/crawl.go
+++ b/cmd/crawl/crawl.go
@@ -10,15 +10,13 @@ import (
"io/ioutil"
"log"
"net/http"
- "net/url"
"os"
- "regexp"
"strconv"
"strings"
"git.autistici.org/ale/crawl"
+ "git.autistici.org/ale/crawl/analysis"
"git.autistici.org/ale/crawl/warc"
- "github.com/PuerkitoBio/goquery"
)
var (
@@ -27,53 +25,15 @@ var (
depth = flag.Int("depth", 10, "maximum link depth")
validSchemes = flag.String("schemes", "http,https", "comma-separated list of allowed protocols")
outputFile = flag.String("output", "crawl.warc.gz", "output WARC file")
-
- urlcssRx = regexp.MustCompile(`background.*:.*url\(["']?([^'"\)]+)["']?\)`)
)
-var linkMatches = []struct {
- tag string
- attr string
-}{
- {"a", "href"},
- {"link", "href"},
- {"img", "src"},
- {"script", "src"},
-}
-
func extractLinks(c *crawl.Crawler, u string, depth int, resp *http.Response, err error) error {
- var outlinks []string
-
- ctype := resp.Header.Get("Content-Type")
- if strings.HasPrefix(ctype, "text/html") {
- doc, err := goquery.NewDocumentFromResponse(resp)
- if err != nil {
- return err
- }
-
- for _, lm := range linkMatches {
- doc.Find(fmt.Sprintf("%s[%s]", lm.tag, lm.attr)).Each(func(i int, s *goquery.Selection) {
- val, _ := s.Attr(lm.attr)
- outlinks = append(outlinks, val)
- })
- }
- } else if strings.HasPrefix(ctype, "text/css") {
- if data, err := ioutil.ReadAll(resp.Body); err == nil {
- for _, val := range urlcssRx.FindAllStringSubmatch(string(data), -1) {
- outlinks = append(outlinks, val[1])
- }
- }
+ links, err := analysis.GetLinks(resp)
+ if err != nil {
+ return err
}
- // Uniquify and parse outbound links.
- links := make(map[string]*url.URL)
- for _, val := range outlinks {
- if linkurl, err := resp.Request.URL.Parse(val); err == nil {
- links[linkurl.String()] = linkurl
- }
- }
for _, link := range links {
- //log.Printf("%s -> %s", u, link.String())
c.Enqueue(link, depth+1)
}
diff --git a/cmd/links/links.go b/cmd/links/links.go
index 9ae2394..95388ce 100644
--- a/cmd/links/links.go
+++ b/cmd/links/links.go
@@ -6,14 +6,12 @@ package main
import (
"flag"
- "fmt"
"log"
"net/http"
- "net/url"
"strings"
"git.autistici.org/ale/crawl"
- "github.com/PuerkitoBio/goquery"
+ "git.autistici.org/ale/crawl/analysis"
)
var (
@@ -23,41 +21,16 @@ var (
validSchemes = flag.String("schemes", "http,https", "comma-separated list of allowed protocols")
)
-var linkMatches = []struct {
- tag string
- attr string
-}{
- {"a", "href"},
- {"link", "href"},
- {"img", "src"},
- {"script", "src"},
-}
-
func extractLinks(c *crawl.Crawler, u string, depth int, resp *http.Response, err error) error {
- if !strings.HasPrefix(resp.Header.Get("Content-Type"), "text/html") {
- return nil
- }
-
- doc, err := goquery.NewDocumentFromResponse(resp)
+ links, err := analysis.GetLinks(resp)
if err != nil {
return err
}
- links := make(map[string]*url.URL)
-
- for _, lm := range linkMatches {
- doc.Find(fmt.Sprintf("%s[%s]", lm.tag, lm.attr)).Each(func(i int, s *goquery.Selection) {
- val, _ := s.Attr(lm.attr)
- if linkurl, err := resp.Request.URL.Parse(val); err == nil {
- links[linkurl.String()] = linkurl
- }
- })
- }
-
for _, link := range links {
- //log.Printf("%s -> %s", u, link.String())
c.Enqueue(link, depth+1)
}
+
return nil
}