Compare commits
4 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
c16ee76638
|
|||
|
132fdc9f36
|
|||
|
4f1d48c247
|
|||
|
651791aec1
|
@@ -5,7 +5,6 @@ import (
|
||||
"orly.dev/pkg/encoders/event"
|
||||
"orly.dev/pkg/encoders/filter"
|
||||
"orly.dev/pkg/encoders/hex"
|
||||
"orly.dev/pkg/encoders/kind"
|
||||
"orly.dev/pkg/encoders/kinds"
|
||||
"orly.dev/pkg/encoders/tag"
|
||||
"orly.dev/pkg/protocol/ws"
|
||||
@@ -17,12 +16,12 @@ import (
|
||||
)
|
||||
|
||||
func (s *Server) SpiderFetch(
|
||||
k *kind.T, noFetch bool, pubkeys ...[]byte,
|
||||
k *kinds.T, noFetch, noExtract bool, pubkeys ...[]byte,
|
||||
) (pks [][]byte, err error) {
|
||||
// first search the local database
|
||||
pkList := tag.New(pubkeys...)
|
||||
f := &filter.F{
|
||||
Kinds: kinds.New(k),
|
||||
Kinds: k,
|
||||
Authors: pkList,
|
||||
}
|
||||
var evs event.S
|
||||
@@ -30,23 +29,37 @@ func (s *Server) SpiderFetch(
|
||||
// none were found, so we need to scan the spiders
|
||||
err = nil
|
||||
}
|
||||
if len(evs) < len(pubkeys) && !noFetch {
|
||||
var kindsList string
|
||||
for i, kk := range k.K {
|
||||
if i > 0 {
|
||||
kindsList += ","
|
||||
}
|
||||
kindsList += kk.Name()
|
||||
}
|
||||
log.I.F("%d events found of type %s", len(evs), kindsList)
|
||||
// for _, ev := range evs {
|
||||
// o += fmt.Sprintf("%s\n\n", ev.Marshal(nil))
|
||||
// }
|
||||
// log.I.F("%s", o)
|
||||
if !noFetch {
|
||||
// we need to search the spider seeds.
|
||||
// Break up pubkeys into batches of 512
|
||||
for i := 0; i < len(pubkeys); i += 512 {
|
||||
end := i + 512
|
||||
// Break up pubkeys into batches of 128
|
||||
for i := 0; i < len(pubkeys); i += 128 {
|
||||
end := i + 128
|
||||
if end > len(pubkeys) {
|
||||
end = len(pubkeys)
|
||||
}
|
||||
batchPubkeys := pubkeys[i:end]
|
||||
log.I.F(
|
||||
"processing batch %d to %d of %d for kind %s",
|
||||
i, end, len(pubkeys), k.Name(),
|
||||
i, end, len(pubkeys), kindsList,
|
||||
)
|
||||
batchPkList := tag.New(batchPubkeys...)
|
||||
lim := uint(batchPkList.Len())
|
||||
batchFilter := &filter.F{
|
||||
Kinds: kinds.New(k),
|
||||
Kinds: k,
|
||||
Authors: batchPkList,
|
||||
Limit: &lim,
|
||||
}
|
||||
|
||||
var mx sync.Mutex
|
||||
@@ -76,6 +89,16 @@ func (s *Server) SpiderFetch(
|
||||
return
|
||||
}
|
||||
mx.Lock()
|
||||
// save the events to the database
|
||||
for _, ev := range evss {
|
||||
log.I.F("saving event:\n%s", ev.Marshal(nil))
|
||||
if _, _, err = s.Storage().SaveEvent(
|
||||
s.Ctx, ev,
|
||||
); chk.E(err) {
|
||||
err = nil
|
||||
continue
|
||||
}
|
||||
}
|
||||
for _, ev := range evss {
|
||||
evs = append(evs, ev)
|
||||
}
|
||||
@@ -84,13 +107,6 @@ func (s *Server) SpiderFetch(
|
||||
}
|
||||
wg.Wait()
|
||||
}
|
||||
// save the events to the database
|
||||
for _, ev := range evs {
|
||||
if _, _, err = s.Storage().SaveEvent(s.Ctx, ev); chk.E(err) {
|
||||
err = nil
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
// deduplicate and take the newest
|
||||
var tmp event.S
|
||||
@@ -108,7 +124,10 @@ func (s *Server) SpiderFetch(
|
||||
tmp = append(tmp, evm[0])
|
||||
}
|
||||
evs = tmp
|
||||
// we have all we're going to get now
|
||||
// we have all we're going to get now, extract the p tags
|
||||
if noExtract {
|
||||
return
|
||||
}
|
||||
pkMap := make(map[string]struct{})
|
||||
for _, ev := range evs {
|
||||
t := ev.Tags.GetAll(tag.New("p"))
|
||||
@@ -118,7 +137,7 @@ func (s *Server) SpiderFetch(
|
||||
continue
|
||||
}
|
||||
pk := make([]byte, schnorr.PubKeyBytesLen)
|
||||
if _, err = hex.DecBytes(pk, pkh); chk.E(err) {
|
||||
if _, err = hex.DecBytes(pk, pkh); err != nil {
|
||||
err = nil
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
"orly.dev/pkg/encoders/bech32encoding"
|
||||
"orly.dev/pkg/encoders/hex"
|
||||
"orly.dev/pkg/encoders/kind"
|
||||
"orly.dev/pkg/encoders/kinds"
|
||||
"orly.dev/pkg/utils/chk"
|
||||
"orly.dev/pkg/utils/log"
|
||||
)
|
||||
@@ -52,21 +53,22 @@ func (s *Server) Spider(noFetch ...bool) (err error) {
|
||||
log.I.F("getting ownersFollowed")
|
||||
var ownersFollowed [][]byte
|
||||
if ownersFollowed, err = s.SpiderFetch(
|
||||
kind.FollowList, dontFetch, ownersPubkeys...,
|
||||
kinds.New(kind.FollowList), dontFetch, false, ownersPubkeys...,
|
||||
); chk.E(err) {
|
||||
return
|
||||
}
|
||||
// log.I.S(ownersFollowed)
|
||||
log.I.F("getting followedFollows")
|
||||
var followedFollows [][]byte
|
||||
if followedFollows, err = s.SpiderFetch(
|
||||
kind.FollowList, dontFetch, ownersFollowed...,
|
||||
kinds.New(kind.FollowList), dontFetch, false, ownersFollowed...,
|
||||
); chk.E(err) {
|
||||
return
|
||||
}
|
||||
log.I.F("getting ownersMuted")
|
||||
var ownersMuted [][]byte
|
||||
if ownersMuted, err = s.SpiderFetch(
|
||||
kind.MuteList, dontFetch, ownersPubkeys...,
|
||||
kinds.New(kind.MuteList), dontFetch, false, ownersPubkeys...,
|
||||
); chk.E(err) {
|
||||
return
|
||||
}
|
||||
@@ -74,22 +76,17 @@ func (s *Server) Spider(noFetch ...bool) (err error) {
|
||||
// list
|
||||
filteredFollows := make([][]byte, 0, len(followedFollows))
|
||||
for _, follow := range followedFollows {
|
||||
found := false
|
||||
for _, owner := range ownersFollowed {
|
||||
if bytes.Equal(follow, owner) {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
for _, owner := range ownersMuted {
|
||||
if bytes.Equal(follow, owner) {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
filteredFollows = append(filteredFollows, follow)
|
||||
}
|
||||
filteredFollows = append(filteredFollows, follow)
|
||||
}
|
||||
followedFollows = filteredFollows
|
||||
own := "owner"
|
||||
@@ -115,7 +112,7 @@ func (s *Server) Spider(noFetch ...bool) (err error) {
|
||||
len(followedFollows), folfol,
|
||||
len(ownersMuted), mut,
|
||||
)
|
||||
// add the owners
|
||||
// add the owners to the ownersFollowed
|
||||
ownersFollowed = append(ownersFollowed, ownersPubkeys...)
|
||||
s.SetOwnersPubkeys(ownersPubkeys)
|
||||
s.SetOwnersFollowed(ownersFollowed)
|
||||
@@ -125,9 +122,12 @@ func (s *Server) Spider(noFetch ...bool) (err error) {
|
||||
if !dontFetch {
|
||||
go func() {
|
||||
everyone := append(ownersFollowed, followedFollows...)
|
||||
s.SpiderFetch(kind.ProfileMetadata, false, everyone...)
|
||||
s.SpiderFetch(kind.RelayListMetadata, false, everyone...)
|
||||
s.SpiderFetch(kind.DMRelaysList, false, everyone...)
|
||||
s.SpiderFetch(
|
||||
kinds.New(
|
||||
kind.ProfileMetadata, kind.RelayListMetadata,
|
||||
kind.DMRelaysList,
|
||||
), false, true, everyone...,
|
||||
)
|
||||
}()
|
||||
}
|
||||
}()
|
||||
|
||||
@@ -80,11 +80,6 @@ func (d *D) Import(r io.Reader) {
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (d *D) Export(c context.T, w io.Writer, pubkeys ...[]byte) {
|
||||
// TODO implement me
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (d *D) SetLogLevel(level string) {
|
||||
d.Logger.SetLogLevel(lol.GetLogLevel(level))
|
||||
}
|
||||
|
||||
98
pkg/database/export.go
Normal file
98
pkg/database/export.go
Normal file
@@ -0,0 +1,98 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"github.com/dgraph-io/badger/v4"
|
||||
"io"
|
||||
"orly.dev/pkg/database/indexes"
|
||||
"orly.dev/pkg/database/indexes/types"
|
||||
"orly.dev/pkg/encoders/codecbuf"
|
||||
"orly.dev/pkg/encoders/event"
|
||||
"orly.dev/pkg/utils/chk"
|
||||
"orly.dev/pkg/utils/context"
|
||||
"orly.dev/pkg/utils/units"
|
||||
)
|
||||
|
||||
// Export the complete database of stored events to an io.Writer in line structured minified
|
||||
// JSON.
|
||||
func (d *D) Export(c context.T, w io.Writer, pubkeys ...[]byte) {
|
||||
var err error
|
||||
if len(pubkeys) == 0 {
|
||||
if err = d.View(
|
||||
func(txn *badger.Txn) (err error) {
|
||||
buf := codecbuf.Get()
|
||||
defer codecbuf.Put(buf)
|
||||
if err = indexes.EventEnc(nil).MarshalWrite(buf); chk.E(err) {
|
||||
return
|
||||
}
|
||||
it := txn.NewIterator(badger.IteratorOptions{Prefix: buf.Bytes()})
|
||||
evB := make([]byte, 0, units.Mb)
|
||||
defer it.Close()
|
||||
for it.Rewind(); it.Valid(); it.Next() {
|
||||
item := it.Item()
|
||||
if evB, err = item.ValueCopy(evB); chk.E(err) {
|
||||
continue
|
||||
}
|
||||
evBuf := bytes.NewBuffer(evB)
|
||||
ev := event.New()
|
||||
if err = ev.UnmarshalBinary(evBuf); chk.E(err) {
|
||||
continue
|
||||
}
|
||||
// Serialize the event to JSON and write it to the output
|
||||
if _, err = w.Write(ev.Serialize()); chk.E(err) {
|
||||
continue
|
||||
}
|
||||
if _, err = w.Write([]byte{'\n'}); chk.E(err) {
|
||||
continue
|
||||
}
|
||||
}
|
||||
return
|
||||
},
|
||||
); err != nil {
|
||||
return
|
||||
}
|
||||
} else {
|
||||
for _, pubkey := range pubkeys {
|
||||
if err = d.View(
|
||||
func(txn *badger.Txn) (err error) {
|
||||
pkBuf := codecbuf.Get()
|
||||
defer codecbuf.Put(pkBuf)
|
||||
ph := &types.PubHash{}
|
||||
if err = ph.FromPubkey(pubkey); chk.E(err) {
|
||||
return
|
||||
}
|
||||
if err = indexes.PubkeyEnc(
|
||||
ph, nil, nil,
|
||||
).MarshalWrite(pkBuf); chk.E(err) {
|
||||
return
|
||||
}
|
||||
it := txn.NewIterator(badger.IteratorOptions{Prefix: pkBuf.Bytes()})
|
||||
evB := make([]byte, 0, units.Mb)
|
||||
defer it.Close()
|
||||
for it.Rewind(); it.Valid(); it.Next() {
|
||||
item := it.Item()
|
||||
if evB, err = item.ValueCopy(evB); chk.E(err) {
|
||||
continue
|
||||
}
|
||||
evBuf := bytes.NewBuffer(evB)
|
||||
ev := event.New()
|
||||
if err = ev.UnmarshalBinary(evBuf); chk.E(err) {
|
||||
continue
|
||||
}
|
||||
// Serialize the event to JSON and write it to the output
|
||||
if _, err = w.Write(ev.Serialize()); chk.E(err) {
|
||||
continue
|
||||
}
|
||||
if _, err = w.Write([]byte{'\n'}); chk.E(err) {
|
||||
continue
|
||||
}
|
||||
}
|
||||
return
|
||||
},
|
||||
); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
111
pkg/database/export_test.go
Normal file
111
pkg/database/export_test.go
Normal file
@@ -0,0 +1,111 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"orly.dev/pkg/encoders/event"
|
||||
"orly.dev/pkg/encoders/event/examples"
|
||||
"orly.dev/pkg/utils/chk"
|
||||
"orly.dev/pkg/utils/context"
|
||||
)
|
||||
|
||||
// TestExport tests the Export function by:
|
||||
// 1. Creating a new database with events from examples.Cache
|
||||
// 2. Checking that all event IDs in the cache are found in the export
|
||||
// 3. Verifying this also works when only a few pubkeys are requested
|
||||
func TestExport(t *testing.T) {
|
||||
// Create a temporary directory for the database
|
||||
tempDir, err := os.MkdirTemp("", "test-db-*")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temporary directory: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tempDir) // Clean up after the test
|
||||
|
||||
// Create a context and cancel function for the database
|
||||
ctx, cancel := context.Cancel(context.Bg())
|
||||
defer cancel()
|
||||
|
||||
// Initialize the database
|
||||
db, err := New(ctx, cancel, tempDir, "info")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create database: %v", err)
|
||||
}
|
||||
defer db.Close()
|
||||
|
||||
// Create a scanner to read events from examples.Cache
|
||||
scanner := bufio.NewScanner(bytes.NewBuffer(examples.Cache))
|
||||
scanner.Buffer(make([]byte, 0, 1_000_000_000), 1_000_000_000)
|
||||
|
||||
// Maps to store event IDs and their associated pubkeys
|
||||
eventIDs := make(map[string]bool)
|
||||
pubkeyToEventIDs := make(map[string][]string)
|
||||
|
||||
// Process each event
|
||||
for scanner.Scan() {
|
||||
chk.E(scanner.Err())
|
||||
b := scanner.Bytes()
|
||||
ev := event.New()
|
||||
|
||||
// Unmarshal the event
|
||||
if _, err = ev.Unmarshal(b); chk.E(err) {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Save the event to the database
|
||||
if _, _, err = db.SaveEvent(ctx, ev); err != nil {
|
||||
t.Fatalf("Failed to save event: %v", err)
|
||||
}
|
||||
|
||||
// Store the event ID
|
||||
eventID := ev.IdString()
|
||||
eventIDs[eventID] = true
|
||||
|
||||
// Store the event ID by pubkey
|
||||
pubkey := ev.PubKeyString()
|
||||
pubkeyToEventIDs[pubkey] = append(pubkeyToEventIDs[pubkey], eventID)
|
||||
}
|
||||
|
||||
// Check for scanner errors
|
||||
if err = scanner.Err(); err != nil {
|
||||
t.Fatalf("Scanner error: %v", err)
|
||||
}
|
||||
|
||||
t.Logf("Saved %d events to the database", len(eventIDs))
|
||||
|
||||
// Test 1: Export all events and verify all IDs are in the export
|
||||
var exportBuffer bytes.Buffer
|
||||
db.Export(ctx, &exportBuffer)
|
||||
|
||||
// Parse the exported events and check that all IDs are present
|
||||
exportedIDs := make(map[string]bool)
|
||||
exportScanner := bufio.NewScanner(&exportBuffer)
|
||||
exportScanner.Buffer(make([]byte, 0, 1_000_000_000), 1_000_000_000)
|
||||
exportCount := 0
|
||||
for exportScanner.Scan() {
|
||||
b := exportScanner.Bytes()
|
||||
ev := event.New()
|
||||
if _, err = ev.Unmarshal(b); chk.E(err) {
|
||||
t.Fatal(err)
|
||||
}
|
||||
exportedIDs[ev.IdString()] = true
|
||||
exportCount++
|
||||
}
|
||||
// Check for scanner errors
|
||||
if err = exportScanner.Err(); err != nil {
|
||||
t.Fatalf("Scanner error: %v", err)
|
||||
}
|
||||
|
||||
t.Logf("Found %d events in the export", exportCount)
|
||||
|
||||
// Check that all original event IDs are in the export
|
||||
for id := range eventIDs {
|
||||
if !exportedIDs[id] {
|
||||
t.Errorf("Event ID %s not found in export", id)
|
||||
}
|
||||
}
|
||||
|
||||
t.Logf("All %d event IDs found in export", len(eventIDs))
|
||||
}
|
||||
@@ -8,6 +8,7 @@ import (
|
||||
"orly.dev/pkg/encoders/codecbuf"
|
||||
"orly.dev/pkg/interfaces/store"
|
||||
"orly.dev/pkg/utils/chk"
|
||||
"orly.dev/pkg/utils/errorf"
|
||||
)
|
||||
|
||||
func (d *D) GetFullIdPubkeyBySerial(ser *types.Uint40) (
|
||||
@@ -53,5 +54,11 @@ func (d *D) GetFullIdPubkeyBySerial(ser *types.Uint40) (
|
||||
); chk.E(err) {
|
||||
return
|
||||
}
|
||||
if fidpk != nil {
|
||||
err = errorf.E(
|
||||
"failed to fetch full id pubkey by serial %d",
|
||||
ser.Get(),
|
||||
)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
@@ -32,7 +32,7 @@ func (d *D) QueryEvents(c context.T, f *filter.F) (evs event.S, err error) {
|
||||
}
|
||||
// fetch the events
|
||||
var ev *event.E
|
||||
if ev, err = d.FetchEventBySerial(ser); chk.E(err) {
|
||||
if ev, err = d.FetchEventBySerial(ser); err != nil {
|
||||
continue
|
||||
}
|
||||
evs = append(evs, ev)
|
||||
|
||||
@@ -145,7 +145,7 @@ func (en *Result) Unmarshal(b []byte) (r []byte, err error) {
|
||||
return
|
||||
}
|
||||
en.Event = event.New()
|
||||
if r, err = en.Event.Unmarshal(r); chk.E(err) {
|
||||
if r, err = en.Event.Unmarshal(r); err != nil {
|
||||
return
|
||||
}
|
||||
if r, err = envelopes.SkipToTheEnd(r); chk.E(err) {
|
||||
@@ -158,7 +158,7 @@ func (en *Result) Unmarshal(b []byte) (r []byte, err error) {
|
||||
// envelope into it.
|
||||
func ParseResult(b []byte) (t *Result, rem []byte, err error) {
|
||||
t = NewResult()
|
||||
if rem, err = t.Unmarshal(b); chk.T(err) {
|
||||
if rem, err = t.Unmarshal(b); err != nil {
|
||||
return
|
||||
}
|
||||
return
|
||||
|
||||
@@ -2,6 +2,7 @@ package event
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"github.com/minio/sha256-simd"
|
||||
"io"
|
||||
"orly.dev/pkg/crypto/ec/schnorr"
|
||||
@@ -300,7 +301,7 @@ AfterClose:
|
||||
}
|
||||
return
|
||||
invalid:
|
||||
err = errorf.E(
|
||||
err = fmt.Errorf(
|
||||
"invalid key,\n'%s'\n'%s'\n'%s'", string(b), string(b[:len(r)]),
|
||||
string(r),
|
||||
)
|
||||
|
||||
422
pkg/encoders/event/json_tags_test.go
Normal file
422
pkg/encoders/event/json_tags_test.go
Normal file
@@ -0,0 +1,422 @@
|
||||
package event
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"orly.dev/pkg/encoders/kind"
|
||||
"orly.dev/pkg/encoders/tag"
|
||||
"orly.dev/pkg/encoders/tags"
|
||||
text2 "orly.dev/pkg/encoders/text"
|
||||
"orly.dev/pkg/encoders/timestamp"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// compareTags compares two tags and reports any differences
|
||||
func compareTags(t *testing.T, expected, actual *tags.T, context string) {
|
||||
if expected == nil && actual == nil {
|
||||
return
|
||||
}
|
||||
|
||||
if expected == nil || actual == nil {
|
||||
t.Errorf("%s: One of the tags is nil", context)
|
||||
return
|
||||
}
|
||||
|
||||
expectedSlice := expected.ToStringsSlice()
|
||||
actualSlice := actual.ToStringsSlice()
|
||||
|
||||
if len(expectedSlice) != len(actualSlice) {
|
||||
t.Errorf(
|
||||
"%s: Tags length mismatch: expected %d, got %d", context,
|
||||
len(expectedSlice), len(actualSlice),
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
for i, expectedTag := range expectedSlice {
|
||||
actualTag := actualSlice[i]
|
||||
|
||||
if len(expectedTag) != len(actualTag) {
|
||||
t.Errorf(
|
||||
"%s: Tag[%d] length mismatch: expected %d, got %d", context, i,
|
||||
len(expectedTag), len(actualTag),
|
||||
)
|
||||
continue
|
||||
}
|
||||
|
||||
for j, expectedElem := range expectedTag {
|
||||
if expectedElem != actualTag[j] {
|
||||
t.Errorf(
|
||||
"%s: Tag[%d][%d] mismatch: expected '%s', got '%s'",
|
||||
context, i, j, expectedElem, actualTag[j],
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestUnmarshalEscapedJSONInTags tests that the Unmarshal function correctly handles
|
||||
// tags with fields containing escaped JSON that has been escaped using NostrEscape.
|
||||
func TestUnmarshalEscapedJSONInTags(t *testing.T) {
|
||||
// Test 1: Tag with a field containing escaped JSON
|
||||
t.Run("SimpleEscapedJSON", func(t *testing.T) {
|
||||
// Create a tag with a field containing JSON that needs escaping
|
||||
jsonContent := `{"key":"value","nested":{"array":[1,2,3]}}`
|
||||
|
||||
// Create the event with the tag containing JSON
|
||||
originalEvent := &E{
|
||||
Id: bytes.Repeat([]byte{0x01}, 32),
|
||||
Pubkey: bytes.Repeat([]byte{0x02}, 32),
|
||||
CreatedAt: timestamp.FromUnix(1609459200),
|
||||
Kind: kind.TextNote,
|
||||
Tags: tags.New(),
|
||||
Content: []byte("Event with JSON in tag"),
|
||||
Sig: bytes.Repeat([]byte{0x03}, 64),
|
||||
}
|
||||
|
||||
// Add a tag with JSON content
|
||||
jsonTag := tag.New("j", jsonContent)
|
||||
originalEvent.Tags.AppendTags(jsonTag)
|
||||
|
||||
// Marshal the event
|
||||
marshaled := originalEvent.Marshal(nil)
|
||||
|
||||
// Unmarshal back into a new event
|
||||
unmarshaledEvent := &E{}
|
||||
_, err := unmarshaledEvent.Unmarshal(marshaled)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to unmarshal event with JSON in tag: %v", err)
|
||||
}
|
||||
|
||||
// Verify the tag was correctly unmarshaled
|
||||
if unmarshaledEvent.Tags.Len() != 1 {
|
||||
t.Fatalf("Expected 1 tag, got %d", unmarshaledEvent.Tags.Len())
|
||||
}
|
||||
|
||||
unmarshaledTag := unmarshaledEvent.Tags.GetTagElement(0)
|
||||
if unmarshaledTag.Len() != 2 {
|
||||
t.Fatalf("Expected tag with 2 elements, got %d", unmarshaledTag.Len())
|
||||
}
|
||||
|
||||
if string(unmarshaledTag.B(0)) != "j" {
|
||||
t.Errorf("Expected tag key 'j', got '%s'", unmarshaledTag.B(0))
|
||||
}
|
||||
|
||||
if string(unmarshaledTag.B(1)) != jsonContent {
|
||||
t.Errorf("Expected tag value '%s', got '%s'", jsonContent, unmarshaledTag.B(1))
|
||||
}
|
||||
})
|
||||
|
||||
// Test 2: Tag with a field containing escaped JSON with special characters
|
||||
t.Run("EscapedJSONWithSpecialChars", func(t *testing.T) {
|
||||
// JSON with characters that need escaping: quotes, backslashes, control chars
|
||||
jsonContent := `{"text":"This has \"quotes\" and \\ backslashes","newlines":"\n\r\t"}`
|
||||
|
||||
// Create the event with the tag containing JSON with special chars
|
||||
originalEvent := &E{
|
||||
Id: bytes.Repeat([]byte{0x01}, 32),
|
||||
Pubkey: bytes.Repeat([]byte{0x02}, 32),
|
||||
CreatedAt: timestamp.FromUnix(1609459200),
|
||||
Kind: kind.TextNote,
|
||||
Tags: tags.New(),
|
||||
Content: []byte("Event with JSON containing special chars in tag"),
|
||||
Sig: bytes.Repeat([]byte{0x03}, 64),
|
||||
}
|
||||
|
||||
// Add a tag with JSON content containing special chars
|
||||
jsonTag := tag.New("j", jsonContent)
|
||||
originalEvent.Tags.AppendTags(jsonTag)
|
||||
|
||||
// Marshal the event
|
||||
marshaled := originalEvent.Marshal(nil)
|
||||
|
||||
// Unmarshal back into a new event
|
||||
unmarshaledEvent := &E{}
|
||||
_, err := unmarshaledEvent.Unmarshal(marshaled)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to unmarshal event with JSON containing special chars: %v", err)
|
||||
}
|
||||
|
||||
// Verify the tag was correctly unmarshaled
|
||||
unmarshaledTag := unmarshaledEvent.Tags.GetTagElement(0)
|
||||
if string(unmarshaledTag.B(1)) != jsonContent {
|
||||
t.Errorf("Expected tag value '%s', got '%s'", jsonContent, unmarshaledTag.B(1))
|
||||
}
|
||||
})
|
||||
|
||||
// Test 3: Tag with nested JSON that contains already escaped content
|
||||
t.Run("NestedEscapedJSON", func(t *testing.T) {
|
||||
// JSON with already escaped content
|
||||
jsonContent := `{"escaped":"This JSON contains \\\"already escaped\\\" content"}`
|
||||
|
||||
// Create the event with the tag containing nested escaped JSON
|
||||
originalEvent := &E{
|
||||
Id: bytes.Repeat([]byte{0x01}, 32),
|
||||
Pubkey: bytes.Repeat([]byte{0x02}, 32),
|
||||
CreatedAt: timestamp.FromUnix(1609459200),
|
||||
Kind: kind.TextNote,
|
||||
Tags: tags.New(),
|
||||
Content: []byte("Event with nested escaped JSON in tag"),
|
||||
Sig: bytes.Repeat([]byte{0x03}, 64),
|
||||
}
|
||||
|
||||
// Add a tag with nested escaped JSON content
|
||||
jsonTag := tag.New("j", jsonContent)
|
||||
originalEvent.Tags.AppendTags(jsonTag)
|
||||
|
||||
// Marshal the event
|
||||
marshaled := originalEvent.Marshal(nil)
|
||||
|
||||
// Unmarshal back into a new event
|
||||
unmarshaledEvent := &E{}
|
||||
_, err := unmarshaledEvent.Unmarshal(marshaled)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to unmarshal event with nested escaped JSON: %v", err)
|
||||
}
|
||||
|
||||
// Verify the tag was correctly unmarshaled
|
||||
unmarshaledTag := unmarshaledEvent.Tags.GetTagElement(0)
|
||||
if string(unmarshaledTag.B(1)) != jsonContent {
|
||||
t.Errorf("Expected tag value '%s', got '%s'", jsonContent, unmarshaledTag.B(1))
|
||||
}
|
||||
})
|
||||
|
||||
// Test 4: Tag with JSON that has been explicitly escaped using NostrEscape
|
||||
t.Run("ExplicitlyEscapedJSON", func(t *testing.T) {
|
||||
// Original JSON with characters that need escaping
|
||||
originalJSON := []byte(`{"key":"value with "quotes"","nested":{"array":[1,2,3],"special":"\n\r\t"}}`)
|
||||
|
||||
// Explicitly escape the JSON using NostrEscape
|
||||
escapedJSON := make([]byte, 0, len(originalJSON)*2)
|
||||
escapedJSON = text2.NostrEscape(escapedJSON, originalJSON)
|
||||
|
||||
// Create the event with the tag containing explicitly escaped JSON
|
||||
originalEvent := &E{
|
||||
Id: bytes.Repeat([]byte{0x01}, 32),
|
||||
Pubkey: bytes.Repeat([]byte{0x02}, 32),
|
||||
CreatedAt: timestamp.FromUnix(1609459200),
|
||||
Kind: kind.TextNote,
|
||||
Tags: tags.New(),
|
||||
Content: []byte("Event with explicitly escaped JSON in tag"),
|
||||
Sig: bytes.Repeat([]byte{0x03}, 64),
|
||||
}
|
||||
|
||||
// Add a tag with the explicitly escaped JSON content
|
||||
jsonTag := tag.New("j", string(escapedJSON))
|
||||
originalEvent.Tags.AppendTags(jsonTag)
|
||||
|
||||
// Marshal the event
|
||||
marshaled := originalEvent.Marshal(nil)
|
||||
|
||||
// Unmarshal back into a new event
|
||||
unmarshaledEvent := &E{}
|
||||
_, err := unmarshaledEvent.Unmarshal(marshaled)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to unmarshal event with explicitly escaped JSON: %v", err)
|
||||
}
|
||||
|
||||
// Verify the tag was correctly unmarshaled
|
||||
unmarshaledTag := unmarshaledEvent.Tags.GetTagElement(0)
|
||||
if string(unmarshaledTag.B(1)) != string(escapedJSON) {
|
||||
t.Errorf("Expected tag value '%s', got '%s'", string(escapedJSON), unmarshaledTag.B(1))
|
||||
}
|
||||
|
||||
// Unescape the unmarshaled JSON to verify it matches the original
|
||||
unescapedJSON := make([]byte, len(unmarshaledTag.B(1)))
|
||||
copy(unescapedJSON, unmarshaledTag.B(1))
|
||||
unescapedJSON = text2.NostrUnescape(unescapedJSON)
|
||||
|
||||
if string(unescapedJSON) != string(originalJSON) {
|
||||
t.Errorf("Unescaped JSON doesn't match original. Expected '%s', got '%s'", string(originalJSON), string(unescapedJSON))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestUnmarshalTags(t *testing.T) {
|
||||
// Test 1: Simple event with empty tags
|
||||
t.Run(
|
||||
"EmptyTags", func(t *testing.T) {
|
||||
jsonWithEmptyTags := []byte(`{"id":"0101010101010101010101010101010101010101010101010101010101010101","pubkey":"0202020202020202020202020202020202020202020202020202020202020202","created_at":1609459200,"kind":1,"tags":[],"content":"This is a test event","sig":"03030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303"}`)
|
||||
|
||||
expected := &E{
|
||||
Id: bytes.Repeat([]byte{0x01}, 32),
|
||||
Pubkey: bytes.Repeat([]byte{0x02}, 32),
|
||||
CreatedAt: timestamp.FromUnix(1609459200),
|
||||
Kind: kind.TextNote,
|
||||
Tags: tags.New(),
|
||||
Content: []byte("This is a test event"),
|
||||
Sig: bytes.Repeat([]byte{0x03}, 64),
|
||||
}
|
||||
|
||||
actual := &E{}
|
||||
_, err := actual.Unmarshal(jsonWithEmptyTags)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to unmarshal JSON with empty tags: %v", err)
|
||||
}
|
||||
|
||||
compareTags(t, expected.Tags, actual.Tags, "EmptyTags")
|
||||
},
|
||||
)
|
||||
|
||||
// Test 2: Event with simple tags
|
||||
t.Run(
|
||||
"SimpleTags", func(t *testing.T) {
|
||||
jsonWithSimpleTags := []byte(`{"id":"0101010101010101010101010101010101010101010101010101010101010101","pubkey":"0202020202020202020202020202020202020202020202020202020202020202","created_at":1609459200,"kind":1,"tags":[["e","1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"],["p","abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890"]],"content":"This is a test event","sig":"03030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303"}`)
|
||||
|
||||
expected := &E{
|
||||
Id: bytes.Repeat([]byte{0x01}, 32),
|
||||
Pubkey: bytes.Repeat([]byte{0x02}, 32),
|
||||
CreatedAt: timestamp.FromUnix(1609459200),
|
||||
Kind: kind.TextNote,
|
||||
Tags: tags.New(),
|
||||
Content: []byte("This is a test event"),
|
||||
Sig: bytes.Repeat([]byte{0x03}, 64),
|
||||
}
|
||||
|
||||
// Add tags
|
||||
eTag := tag.New(
|
||||
"e",
|
||||
"1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef",
|
||||
)
|
||||
pTag := tag.New(
|
||||
"p",
|
||||
"abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890",
|
||||
)
|
||||
expected.Tags.AppendTags(eTag, pTag)
|
||||
|
||||
actual := &E{}
|
||||
_, err := actual.Unmarshal(jsonWithSimpleTags)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to unmarshal JSON with simple tags: %v", err)
|
||||
}
|
||||
|
||||
compareTags(t, expected.Tags, actual.Tags, "SimpleTags")
|
||||
},
|
||||
)
|
||||
|
||||
// Test 3: Event with complex tags (more elements per tag)
|
||||
t.Run(
|
||||
"ComplexTags", func(t *testing.T) {
|
||||
jsonWithComplexTags := []byte(`{"id":"0101010101010101010101010101010101010101010101010101010101010101","pubkey":"0202020202020202020202020202020202020202020202020202020202020202","created_at":1609459200,"kind":1,"tags":[["e","1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef","wss://relay.example.com","root"],["p","abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890","wss://relay.example.com"],["t","hashtag","topic"]],"content":"This is a test event","sig":"03030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303"}`)
|
||||
|
||||
expected := &E{
|
||||
Id: bytes.Repeat([]byte{0x01}, 32),
|
||||
Pubkey: bytes.Repeat([]byte{0x02}, 32),
|
||||
CreatedAt: timestamp.FromUnix(1609459200),
|
||||
Kind: kind.TextNote,
|
||||
Tags: tags.New(),
|
||||
Content: []byte("This is a test event"),
|
||||
Sig: bytes.Repeat([]byte{0x03}, 64),
|
||||
}
|
||||
|
||||
// Add tags
|
||||
eTag := tag.New(
|
||||
"e",
|
||||
"1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef",
|
||||
"wss://relay.example.com", "root",
|
||||
)
|
||||
pTag := tag.New(
|
||||
"p",
|
||||
"abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890",
|
||||
"wss://relay.example.com",
|
||||
)
|
||||
tTag := tag.New("t", "hashtag", "topic")
|
||||
expected.Tags.AppendTags(eTag, pTag, tTag)
|
||||
|
||||
actual := &E{}
|
||||
_, err := actual.Unmarshal(jsonWithComplexTags)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to unmarshal JSON with complex tags: %v", err)
|
||||
}
|
||||
|
||||
compareTags(t, expected.Tags, actual.Tags, "ComplexTags")
|
||||
},
|
||||
)
|
||||
|
||||
// Test 4: Test using the Unmarshal function (not the method)
|
||||
t.Run(
|
||||
"UnmarshalFunction", func(t *testing.T) {
|
||||
jsonWithTags := []byte(`{
|
||||
"id": "0101010101010101010101010101010101010101010101010101010101010101",
|
||||
"pubkey": "0202020202020202020202020202020202020202020202020202020202020202",
|
||||
"created_at": 1609459200,
|
||||
"kind": 1,
|
||||
"tags": [["e", "1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"], ["p", "abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890"]],
|
||||
"content": "This is a test event",
|
||||
"sig": "03030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303"
|
||||
}`)
|
||||
|
||||
expected := &E{
|
||||
Id: bytes.Repeat([]byte{0x01}, 32),
|
||||
Pubkey: bytes.Repeat([]byte{0x02}, 32),
|
||||
CreatedAt: timestamp.FromUnix(1609459200),
|
||||
Kind: kind.TextNote,
|
||||
Tags: tags.New(),
|
||||
Content: []byte("This is a test event"),
|
||||
Sig: bytes.Repeat([]byte{0x03}, 64),
|
||||
}
|
||||
|
||||
// Add tags
|
||||
eTag := tag.New(
|
||||
"e",
|
||||
"1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef",
|
||||
)
|
||||
pTag := tag.New(
|
||||
"p",
|
||||
"abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890",
|
||||
)
|
||||
expected.Tags.AppendTags(eTag, pTag)
|
||||
|
||||
actual := &E{}
|
||||
_, err := Unmarshal(actual, jsonWithTags)
|
||||
if err != nil {
|
||||
t.Fatalf(
|
||||
"Failed to unmarshal JSON with tags using Unmarshal function: %v",
|
||||
err,
|
||||
)
|
||||
}
|
||||
|
||||
compareTags(t, expected.Tags, actual.Tags, "UnmarshalFunction")
|
||||
},
|
||||
)
|
||||
|
||||
// Test 5: Event with nested empty tags
|
||||
t.Run(
|
||||
"NestedEmptyTags", func(t *testing.T) {
|
||||
jsonWithNestedEmptyTags := []byte(`{
|
||||
"id": "0101010101010101010101010101010101010101010101010101010101010101",
|
||||
"pubkey": "0202020202020202020202020202020202020202020202020202020202020202",
|
||||
"created_at": 1609459200,
|
||||
"kind": 1,
|
||||
"tags": [[], ["e"], ["p", ""]],
|
||||
"content": "This is a test event",
|
||||
"sig": "03030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303"
|
||||
}`)
|
||||
|
||||
expected := &E{
|
||||
Id: bytes.Repeat([]byte{0x01}, 32),
|
||||
Pubkey: bytes.Repeat([]byte{0x02}, 32),
|
||||
CreatedAt: timestamp.FromUnix(1609459200),
|
||||
Kind: kind.TextNote,
|
||||
Tags: tags.New(),
|
||||
Content: []byte("This is a test event"),
|
||||
Sig: bytes.Repeat([]byte{0x03}, 64),
|
||||
}
|
||||
|
||||
// Add tags
|
||||
emptyTag := tag.New[string]()
|
||||
eTag := tag.New("e")
|
||||
pTag := tag.New("p", "")
|
||||
expected.Tags.AppendTags(emptyTag, eTag, pTag)
|
||||
|
||||
actual := &E{}
|
||||
_, err := actual.Unmarshal(jsonWithNestedEmptyTags)
|
||||
if err != nil {
|
||||
t.Fatalf(
|
||||
"Failed to unmarshal JSON with nested empty tags: %v", err,
|
||||
)
|
||||
}
|
||||
|
||||
compareTags(t, expected.Tags, actual.Tags, "NestedEmptyTags")
|
||||
},
|
||||
)
|
||||
}
|
||||
44
pkg/protocol/openapi/api-reference.js
Normal file
44
pkg/protocol/openapi/api-reference.js
Normal file
File diff suppressed because one or more lines are too long
@@ -78,8 +78,7 @@ func (a *A) HandleReq(
|
||||
continue
|
||||
}
|
||||
}
|
||||
if events, err = sto.QueryEvents(c, f); chk.E(err) {
|
||||
log.E.F("eventstore: %v", err)
|
||||
if events, err = sto.QueryEvents(c, f); err != nil {
|
||||
if errors.Is(err, badger.ErrDBClosed) {
|
||||
return
|
||||
}
|
||||
@@ -91,8 +90,10 @@ func (a *A) HandleReq(
|
||||
for _, ev := range events {
|
||||
if !auth.CheckPrivilege(a.Listener.AuthedPubkey(), ev) {
|
||||
log.W.F(
|
||||
"not privileged %0x ev pubkey %0x",
|
||||
"not privileged %0x ev pubkey %0x ev pubkey %0x kind %s privileged: %v",
|
||||
a.Listener.AuthedPubkey(), ev.Pubkey,
|
||||
a.Listener.AuthedPubkey(), ev.Kind.Name(),
|
||||
ev.Kind.IsPrivileged(),
|
||||
)
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -132,17 +132,23 @@ func (p *S) Deliver(ev *event.E) {
|
||||
p.Mx.Lock()
|
||||
defer p.Mx.Unlock()
|
||||
for w, subs := range p.Map {
|
||||
log.I.F("%v %s", subs, w.RealRemote())
|
||||
// log.I.F("%v %s", subs, w.RealRemote())
|
||||
for id, subscriber := range subs {
|
||||
log.T.F(
|
||||
"subscriber %s\n%s", w.RealRemote(),
|
||||
subscriber.Marshal(nil),
|
||||
)
|
||||
// log.T.F(
|
||||
// "subscriber %s\n%s", w.RealRemote(),
|
||||
// subscriber.Marshal(nil),
|
||||
// )
|
||||
if !subscriber.Match(ev) {
|
||||
continue
|
||||
}
|
||||
if p.Server.AuthRequired() {
|
||||
if !auth.CheckPrivilege(w.AuthedPubkey(), ev) {
|
||||
log.W.F(
|
||||
"not privileged %0x ev pubkey %0x ev pubkey %0x kind %s privileged: %v",
|
||||
w.AuthedPubkey(), ev.Pubkey,
|
||||
w.AuthedPubkey(), ev.Kind.Name(),
|
||||
ev.Kind.IsPrivileged(),
|
||||
)
|
||||
continue
|
||||
}
|
||||
var res *eventenvelope.Result
|
||||
|
||||
@@ -234,9 +234,10 @@ func (r *Client) ConnectWithTLS(ctx context.T, tlsConfig *tls.Config) error {
|
||||
// general message reader loop
|
||||
go func() {
|
||||
buf := new(bytes.Buffer)
|
||||
var err error
|
||||
for {
|
||||
buf.Reset()
|
||||
if err := conn.ReadMessage(r.connectionContext, buf); chk.T(err) {
|
||||
if err = conn.ReadMessage(r.connectionContext, buf); err != nil {
|
||||
r.ConnectionError = err
|
||||
r.Close()
|
||||
break
|
||||
@@ -270,10 +271,12 @@ func (r *Client) ConnectWithTLS(ctx context.T, tlsConfig *tls.Config) error {
|
||||
}
|
||||
r.challenge = env.Challenge
|
||||
case eventenvelope.L:
|
||||
// log.I.F("message: %s", message)
|
||||
env := eventenvelope.NewResult()
|
||||
if env, message, err = eventenvelope.ParseResult(message); chk.E(err) {
|
||||
if env, message, err = eventenvelope.ParseResult(message); err != nil {
|
||||
continue
|
||||
}
|
||||
// log.I.F("%s", env.Event.Marshal(nil))
|
||||
if len(env.Subscription.T) == 0 {
|
||||
continue
|
||||
}
|
||||
@@ -497,12 +500,13 @@ func (r *Client) PrepareSubscription(
|
||||
return sub
|
||||
}
|
||||
|
||||
// QuerySync is only used in tests. The realy query method is synchronous now
|
||||
// QuerySync is only used in tests. The relay query method is synchronous now
|
||||
// anyway (it ensures sort order is respected).
|
||||
func (r *Client) QuerySync(
|
||||
ctx context.T, f *filter.F,
|
||||
opts ...SubscriptionOption,
|
||||
) ([]*event.E, error) {
|
||||
// log.T.F("QuerySync:\n%s", f.Marshal(nil))
|
||||
sub, err := r.Subscribe(ctx, filters.New(f), opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"bytes"
|
||||
"compress/flate"
|
||||
"crypto/tls"
|
||||
"fmt"
|
||||
"github.com/gobwas/httphead"
|
||||
"github.com/gobwas/ws"
|
||||
"github.com/gobwas/ws/wsflate"
|
||||
@@ -170,7 +171,7 @@ func (cn *Connection) ReadMessage(c context.T, buf io.Writer) (err error) {
|
||||
h, err := cn.reader.NextFrame()
|
||||
if err != nil {
|
||||
cn.conn.Close()
|
||||
return errorf.E(
|
||||
return fmt.Errorf(
|
||||
"%s failed to advance frame: %s",
|
||||
cn.conn.RemoteAddr(),
|
||||
err.Error(),
|
||||
|
||||
Reference in New Issue
Block a user