aboutsummaryrefslogtreecommitdiff
path: root/cmd
diff options
context:
space:
mode:
authorJordan <me@jordan.im>2022-03-24 12:02:08 -0700
committerJordan <me@jordan.im>2022-03-24 12:02:08 -0700
commit20d95915298391ea05054238c5b13337499b7af4 (patch)
tree024c080f319fc7a430343eda96ff620ed7d1febb /cmd
parent6355aa4310ff0c32b056580e812ca6f0e2a5ee2f (diff)
downloadcrawl-20d95915298391ea05054238c5b13337499b7af4.tar.gz
crawl-20d95915298391ea05054238c5b13337499b7af4.zip
misc: update handler signatures, tests, housekeeping
Diffstat (limited to 'cmd')
-rw-r--r--cmd/crawl/crawl.go45
-rw-r--r--cmd/crawl/crawl_test.go17
-rw-r--r--cmd/links/links.go5
3 files changed, 30 insertions, 37 deletions
diff --git a/cmd/crawl/crawl.go b/cmd/crawl/crawl.go
index 8f28bc4..7082d14 100644
--- a/cmd/crawl/crawl.go
+++ b/cmd/crawl/crawl.go
@@ -9,7 +9,6 @@ import (
"flag"
"fmt"
"io"
- "io/ioutil"
"log"
"net"
"net/http"
@@ -181,47 +180,29 @@ func (h *warcSaveHandler) writeWARCRecord(typ, uri string, header []byte, body *
}
-func (h *warcSaveHandler) Handle(p crawl.Publisher, u string, tag, depth int, resp *http.Response, _ error) error {
- // Read the response body (so we can save it to the WARC
- // output) and replace it with a buffer.
-
- /*
- data, derr := ioutil.ReadAll(resp.Body)
- if derr != nil {
- // Errors at this stage are usually transport-level errors,
- // and as such, retriable.
- return crawl.ErrRetryRequest
- }
- resp.Body = ioutil.NopCloser(bytes.NewReader(data))
- */
-
- // Keep temporary file to store request/response data
- r, _ := ioutil.TempFile("temp", "crawl")
- defer r.Close()
-
- w, _ := os.OpenFile(r.Name(), os.O_RDWR, 0777)
- defer w.Close()
- defer os.Remove(r.Name())
+func (h *warcSaveHandler) Handle(p crawl.Publisher, u string, tag, depth int, resp *http.Response, rBody *os.File, _ error) error {
+ wBody, _ := os.OpenFile(rBody.Name(), os.O_RDWR, 0777)
+ defer wBody.Close()
// Dump the request to the WARC output.
- if werr := resp.Request.Write(w); werr != nil {
+ if werr := resp.Request.Write(wBody); werr != nil {
return werr
}
- if werr := h.writeWARCRecord("request", resp.Request.URL.String(), nil, r); werr != nil {
+ if werr := h.writeWARCRecord("request", resp.Request.URL.String(), nil, rBody); werr != nil {
return werr
}
// Seek to start; we've written since last read
- if _, err := r.Seek(0, io.SeekStart); err != nil {
+ if _, err := rBody.Seek(0, io.SeekStart); err != nil {
return err
}
- w.Close()
- w, _ = os.OpenFile(r.Name(), os.O_RDWR, 0777)
- defer w.Close()
+ wBody.Close()
+ wBody, _ = os.OpenFile(rBody.Name(), os.O_RDWR, 0777)
+ defer wBody.Close()
// Write response body to tmp file
- if _, err := io.Copy(w, resp.Body); err != nil {
+ if _, err := io.Copy(wBody, resp.Body); err != nil {
return err
}
@@ -231,18 +212,18 @@ func (h *warcSaveHandler) Handle(p crawl.Publisher, u string, tag, depth int, re
[][]byte{[]byte(statusLine), hdr2str(resp.Header), []byte("")},
[]byte{'\r', '\n'},
)
- if werr := h.writeWARCRecord("response", resp.Request.URL.String(), respHeader, r); werr != nil {
+ if werr := h.writeWARCRecord("response", resp.Request.URL.String(), respHeader, rBody); werr != nil {
return werr
}
// Seek to start; we've written since last read
- if _, err := r.Seek(0, io.SeekStart); err != nil {
+ if _, err := rBody.Seek(0, io.SeekStart); err != nil {
return err
}
h.numWritten++
- return extractLinks(p, u, depth, resp, r, nil)
+ return extractLinks(p, u, depth, resp, rBody, nil)
}
func newWarcSaveHandler(w *warc.Writer) (crawl.Handler, error) {
diff --git a/cmd/crawl/crawl_test.go b/cmd/crawl/crawl_test.go
index 57acffc..1646e40 100644
--- a/cmd/crawl/crawl_test.go
+++ b/cmd/crawl/crawl_test.go
@@ -6,7 +6,6 @@ import (
"net/http"
"net/http/httptest"
"os"
- "path/filepath"
"testing"
"git.jordan.im/crawl"
@@ -25,6 +24,18 @@ func TestCrawl(t *testing.T) {
}
defer os.RemoveAll(tmpdir)
+ if err := os.Chdir(tmpdir); err != nil {
+ t.Fatal(err)
+ }
+
+ // Create directory to (temporarily) store response bodies
+ if _, err := os.Stat("temp"); os.IsNotExist(err) {
+ err := os.Mkdir("temp", 0700)
+ if err != nil {
+ t.Fatal(err)
+ }
+ }
+
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
switch r.URL.Path {
case "/":
@@ -46,7 +57,7 @@ func TestCrawl(t *testing.T) {
crawl.NewSeedScope(seeds),
)
- outf, err := os.Create(filepath.Join(tmpdir, "warc.gz"))
+ outf, err := os.Create("warc.gz")
if err != nil {
t.Fatal(err)
}
@@ -58,7 +69,7 @@ func TestCrawl(t *testing.T) {
}
crawler, err := crawl.NewCrawler(
- filepath.Join(tmpdir, "db"),
+ "db",
seeds,
scope,
crawl.FetcherFunc(fetch),
diff --git a/cmd/links/links.go b/cmd/links/links.go
index 95f48d9..847f80f 100644
--- a/cmd/links/links.go
+++ b/cmd/links/links.go
@@ -8,6 +8,7 @@ import (
"flag"
"log"
"net/http"
+ "os"
"strings"
"git.jordan.im/crawl"
@@ -20,8 +21,8 @@ var (
validSchemes = flag.String("schemes", "http,https", "comma-separated list of allowed protocols")
)
-func extractLinks(p crawl.Publisher, u string, tag, depth int, resp *http.Response, _ error) error {
- links, err := analysis.GetLinks(resp)
+func extractLinks(p crawl.Publisher, u string, tag, depth int, resp *http.Response, body *os.File, _ error) error {
+ links, err := analysis.GetLinks(resp, body)
if err != nil {
// Not a fatal error, just a bad web page.
return nil