You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
noxy/noxy_test.go

415 lines
11 KiB
Go

package noxy
import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"math"
"net/http"
"net/http/httptest"
"net/url"
"os"
"sync"
"testing"
"testing/iotest"
"time"
nostr "github.com/nbd-wtf/go-nostr"
"golang.org/x/net/websocket"
)
func TestDetachedSlurpDataCacheMiss(t *testing.T) {
const contents = "text file"
const ctype = "text/plain;charset=utf-8"
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", ctype)
w.Write([]byte(contents))
}))
defer ts.Close()
var testURL = ts.URL + "/"
cache := DirCache{Root: t.TempDir()}
noxer := Noxer{Cache: cache, MaxFileSize: 1024}
for i := 1; i <= 2; i++ {
t.Run(fmt.Sprintf("slurp %d", i), func(t *testing.T) {
bgCtx := context.Background()
canceledCtx, cancel := context.WithCancel(bgCtx)
cancel() // slurp must run on a separate context
ds, err := noxer.detachedSlurpData(canceledCtx, testURL)
if err != nil {
t.Fatalf("noxer.detachedSlurpData: %v", err)
}
checkDataStream(t, ds, ctype, []byte(contents))
checkCachedDataFile(t, cache, testURL, []byte(contents))
cacheKey := MakeCacheKey(testURL, CacheKeyData)
cachedDS, err := cache.GetStream(bgCtx, cacheKey)
if err != nil {
t.Fatalf("cache.GetStream: %v", err)
}
checkDataStream(t, cachedDS, ctype, []byte(contents))
noxer.slurpersMu.Lock()
defer noxer.slurpersMu.Unlock()
if len(noxer.slurpers) > 0 {
t.Error("x.slurpers is not empty")
}
})
}
}
func TestDetachedSlurpDataClosedReader(t *testing.T) {
const ctype = "text/plain;charset=utf-8"
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", ctype)
w.Write([]byte("foo"))
time.Sleep(time.Second)
w.Write([]byte("bar"))
}))
defer ts.Close()
var testURL = ts.URL + "/"
cache := DirCache{Root: t.TempDir()}
noxer := Noxer{Cache: cache, MaxFileSize: 1024}
ctx := context.Background()
ds1, err := noxer.detachedSlurpData(ctx, testURL)
if err != nil {
t.Fatalf("noxer.detachedSlurpData 1: %v", err)
}
ds1.r.(io.Closer).Close()
cacheKey := MakeCacheKey(testURL, CacheKeyData)
noxer.slurpersMu.Lock()
ch := noxer.slurpers[cacheKey.Path()]
noxer.slurpersMu.Unlock()
select {
case <-time.After(3 * time.Second):
t.Fatal("slurp took too long")
case <-ch:
}
ds2, err := cache.GetStream(ctx, cacheKey)
if err != nil {
t.Fatalf("cache.GetStream: %v", err)
}
checkDataStream(t, ds2, ctype, []byte("foobar"))
}
func TestSlurpLinkMeta(t *testing.T) {
var count int
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if count > 0 {
w.WriteHeader(http.StatusNotFound)
return
}
count += 1
w.Header().Set("Content-Type", "text/html")
fmt.Fprintln(w, `<html><head>
<meta property="og:type" content="article" />
<meta property="og:title" content="test title" />
<meta property="og:description" content="test descr" />
<meta property="og:image" content="http://unused:0/image.png" />
</head></html>`)
}))
defer ts.Close()
var testURL = ts.URL + "/"
cache := DirCache{Root: t.TempDir()}
noxer := Noxer{Cache: cache, MaxFileSize: 1024}
meta1, err := noxer.slurpLinkMeta(context.Background(), testURL)
if err != nil {
t.Fatalf("slurpLinkMeta 1: %v", err)
}
wantMeta := &LinkMeta{
Type: "article",
Title: "test title",
Description: "test descr",
ImageURLs: []string{"http://unused:0/image.png"},
}
compareLinkMeta(t, meta1, wantMeta)
// expected to be cached by now
meta2, err := noxer.slurpLinkMeta(context.Background(), testURL)
if err != nil {
t.Fatalf("slurpLinkMeta 2: %v", err)
}
compareLinkMeta(t, meta2, wantMeta)
}
func TestSlurpLinkMetaHTTPErr(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusNotFound)
}))
defer ts.Close()
var testURL = ts.URL + "/"
noxer := Noxer{Cache: NullCache, MaxFileSize: 1024}
_, err := noxer.slurpLinkMeta(context.Background(), testURL)
if !errors.Is(err, ErrNotFound) {
t.Errorf("slurpLinkMeta err=%v; want ErrNotFound", err)
}
}
func TestVerifyEventLinkNoMeta(t *testing.T) {
priv := genNostrKey()
event := &nostr.Event{
CreatedAt: time.Now(),
Kind: nostr.KindTextNote,
Content: "text; http://unused:0/foo and http://unused:0/bar",
PubKey: nostrPubKey(priv),
}
if err := event.Sign(priv); err != nil {
t.Fatal(err)
}
trelay := ServeSingleEvent(t, event)
defer trelay.Close()
t.Logf("fake relay URL: %s", trelay.URL)
noxer := Noxer{
Cache: DirCache{Root: t.TempDir()},
MaxFileSize: 1024,
KnownRelays: []string{"127.0.0.1"},
IdleRelayTimeout: time.Minute,
}
tt := []struct {
url string
wantOK bool
}{
{"http://unused:0/foo", true},
{"http://unused:0/bar", true},
{"http://unused:0/", false},
{"http://example.org", false},
}
for _, tc := range tt {
t.Run(tc.url, func(t *testing.T) {
ctx := context.Background()
err := noxer.verifyEventLink(ctx, event.ID, trelay.URL, tc.url, verifyNoMeta)
switch {
case tc.wantOK && err != nil:
t.Errorf("verifyEventLink: %v", err)
case !tc.wantOK && err == nil:
t.Error("verifyEventLink returned nil error")
}
})
}
if subs := trelay.OpenSubs(); len(subs) > 0 {
t.Errorf("trelay.OpenSubs is not empty: %q", subs)
}
}
func TestFetchMetaAndStreamData(t *testing.T) {
var website *httptest.Server
website = httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
switch r.URL.Path {
default:
w.WriteHeader(http.StatusBadRequest)
t.Errorf("%s %s", r.Method, r.URL)
case "/":
w.Header().Set("Content-Type", "text/html")
fmt.Fprintf(w, `<html><head>
<meta property="og:image" content="%s/image.png" />
</head></html>`, website.URL)
case "/image.png":
w.Header().Set("Content-Type", "image/png")
w.Write([]byte{1, 2, 3})
}
}))
defer website.Close()
websiteRootURL := website.URL + "/"
websiteImageURL := website.URL + "/image.png"
priv := genNostrKey()
event := &nostr.Event{
CreatedAt: time.Now(),
Kind: nostr.KindTextNote,
Content: fmt.Sprintf("link to an html page with image: %s", websiteRootURL),
PubKey: nostrPubKey(priv),
}
if err := event.Sign(priv); err != nil {
t.Fatal(err)
}
trelay := ServeSingleEvent(t, event)
defer trelay.Close()
cache := DirCache{Root: t.TempDir()}
noxer := Noxer{
Cache: cache,
MaxFileSize: 1024,
KnownRelays: []string{"127.0.0.1"},
IdleRelayTimeout: time.Minute,
}
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
meta, err := noxer.FetchLinkMeta(ctx, event.ID, trelay.URL, websiteRootURL)
if err != nil {
t.Fatalf("FetchLinkMeta(%s): %v", websiteRootURL, err)
}
var cachedMeta LinkMeta
if err := cache.GetJSON(ctx, MakeCacheKey(websiteRootURL, CacheKeyURLPreview), &cachedMeta); err != nil {
t.Fatalf("cache.getjson: %v", err)
}
compareLinkMeta(t, meta, &cachedMeta)
ds, err := noxer.StreamLinkData(ctx, event.ID, trelay.URL, websiteImageURL)
if err != nil {
t.Fatalf("StreamLinkData(%s): %v", websiteImageURL, err)
}
checkDataStream(t, ds, "image/png", []byte{1, 2, 3})
checkCachedDataFile(t, cache, websiteImageURL, []byte{1, 2, 3})
}
func checkDataStream(t *testing.T, ds *DataStream, ctype string, contents []byte) {
t.Helper()
if err := iotest.TestReader(ds, contents); err != nil {
t.Errorf("data stream reader: %v", err)
}
if ds.ContentType != ctype {
t.Errorf("ds.ContentType = %q; want %q", ds.ContentType, ctype)
}
}
func checkCachedDataFile(t *testing.T, cache DirCache, origURL string, contents []byte) {
t.Helper()
cacheKey := MakeCacheKey(origURL, CacheKeyData)
b, err := os.ReadFile(cache.makeFilepath(cacheKey, false))
if err != nil {
t.Errorf("cache file read: %v", err)
}
if !bytes.Equal(b, contents) {
t.Errorf("cached bytes = %q; want %q", b, contents)
}
}
func compareLinkMeta(t *testing.T, actual, expected *LinkMeta) {
t.Helper()
if actual.Type != expected.Type {
t.Errorf("actual.Type = %q; want %q", actual.Type, expected.Type)
}
if actual.Title != expected.Title {
t.Errorf("actual.Title = %q; want %q", actual.Title, expected.Title)
}
if actual.Description != expected.Description {
t.Errorf("actual.Description = %q; want %q", actual.Description, expected.Description)
}
if len(actual.ImageURLs) != 1 || actual.ImageURLs[0] != expected.ImageURLs[0] {
t.Errorf("actual.ImageURLs = %q; want %q", actual.ImageURLs, expected.ImageURLs)
}
}
func genNostrKey() string {
k := nostr.GeneratePrivateKey()
if k == "" {
panic("nostr.GeneratePrivateKey returned empty string")
}
return k
}
func nostrPubKey(priv string) string {
pub, err := nostr.GetPublicKey(priv)
if err != nil {
panic(err.Error())
}
return pub
}
type FakeNostrRelay struct {
Event *nostr.Event
URL string
HTTPServer *httptest.Server
Mu sync.Mutex
Subs map[string]bool // id => true if still active; false for unsub'ed IDs
}
func (nr *FakeNostrRelay) Close() {
nr.HTTPServer.Close()
}
func (nr *FakeNostrRelay) OpenSubs() []string {
nr.Mu.Lock()
defer nr.Mu.Unlock()
var a []string
for k, open := range nr.Subs {
if open {
a = append(a, k)
}
}
return a
}
func nostrHandler(t *testing.T, nr *FakeNostrRelay) func(*websocket.Conn) {
return func(conn *websocket.Conn) {
for {
var req [3]any
if err := websocket.JSON.Receive(conn, &req); err != nil {
conn.Close()
return
}
switch req[0].(string) {
default:
t.Errorf("ws handler req[0]=%q; want REQ or CLOSE", req[0])
conn.Close()
return
case "CLOSE":
nr.Mu.Lock()
defer nr.Mu.Unlock()
nr.Subs[req[1].(string)] = false
return
case "REQ":
subid := req[1].(string)
nr.Mu.Lock()
nr.Subs[subid] = true
nr.Mu.Unlock()
filters := req[2].(map[string]any)
t.Logf("ws handler sub=%q, filters=%s", subid, filters)
if ids := filters["ids"].([]any); len(ids) != 1 || ids[0].(string) != nr.Event.ID {
t.Errorf("ws handler REQ filter ids=%q; want [%q]", ids, []string{nr.Event.ID})
}
if limit := filters["limit"].(float64); math.Abs(limit-1) > 0.00001 {
t.Errorf("ws handler REQ limit=%f; want 1", limit)
}
b, err := json.Marshal(nr.Event)
if err != nil {
t.Errorf("json.Marshal: %v", err)
conn.Close()
return
}
resp := fmt.Sprintf(`["EVENT", %q, %s]`, subid, b)
t.Logf("ws handler resp: %s", resp)
if err := websocket.Message.Send(conn, resp); err != nil {
t.Errorf("ws handler REQ write: %v", err)
}
}
}
}
}
func ServeSingleEvent(t *testing.T, event *nostr.Event) *FakeNostrRelay {
relay := &FakeNostrRelay{
Event: event,
Subs: make(map[string]bool),
}
relay.HTTPServer = httptest.NewServer(&websocket.Server{
Handshake: func(conf *websocket.Config, r *http.Request) error {
t.Logf("new handshake from %s", r.RemoteAddr)
return nil
},
Handler: nostrHandler(t, relay),
})
tsurl, err := url.Parse(relay.HTTPServer.URL)
if err != nil {
panic(err)
}
relay.URL = fmt.Sprintf("ws://%s/", tsurl.Host)
return relay
}