Add reqenvelope package with marshal/unmarshal support and tests, implement GenFilters utility, and improve filter-related methods for robustness
This commit is contained in:
111
pkg/encoders/envelopes/reqenvelope/reqenvelope.go
Normal file
111
pkg/encoders/envelopes/reqenvelope/reqenvelope.go
Normal file
@@ -0,0 +1,111 @@
|
||||
// Package reqenvelope is a message from a client to a relay containing a
|
||||
// subscription identifier and an array of filters to search for events.
|
||||
package reqenvelope
|
||||
|
||||
import (
|
||||
"io"
|
||||
|
||||
"lol.mleku.dev/chk"
|
||||
"next.orly.dev/pkg/encoders/envelopes"
|
||||
"next.orly.dev/pkg/encoders/filter"
|
||||
"next.orly.dev/pkg/encoders/text"
|
||||
"next.orly.dev/pkg/interfaces/codec"
|
||||
)
|
||||
|
||||
// L is the label associated with this type of codec.Envelope.
|
||||
const L = "REQ"
|
||||
|
||||
// T is a filter/subscription request envelope that can contain multiple
|
||||
// filters. These prompt the relay to search its event store and return all
|
||||
// events and if the limit is unset or large enough, it will continue to return
|
||||
// newly received events after it returns an eoseenvelope.T.
|
||||
type T struct {
|
||||
Subscription []byte
|
||||
Filters filter.S
|
||||
}
|
||||
|
||||
var _ codec.Envelope = (*T)(nil)
|
||||
|
||||
// New creates a new reqenvelope.T with a standard subscription.Id and empty
|
||||
// filters.T.
|
||||
func New() *T { return new(T) }
|
||||
|
||||
// NewFrom creates a new reqenvelope.T with a provided subscription.Id and
|
||||
// filters.T.
|
||||
func NewFrom(id []byte, ff filter.S) *T {
|
||||
return &T{
|
||||
Subscription: id,
|
||||
Filters: ff,
|
||||
}
|
||||
}
|
||||
|
||||
func NewWithId[V string | []byte](id V, ff filter.S) (sub *T) {
|
||||
return &T{
|
||||
Subscription: []byte(id),
|
||||
Filters: ff,
|
||||
}
|
||||
}
|
||||
|
||||
// Label returns the label of a reqenvelope.T.
|
||||
func (en *T) Label() string { return L }
|
||||
|
||||
// Write the REQ T to a provided io.Writer.
|
||||
func (en *T) Write(w io.Writer) (err error) {
|
||||
_, err = w.Write(en.Marshal(nil))
|
||||
return
|
||||
}
|
||||
|
||||
// Marshal a reqenvelope.T envelope into minified JSON, appending to a provided
|
||||
// destination slice. Note that this ensures correct string escaping on the
|
||||
// subscription.Id field.
|
||||
func (en *T) Marshal(dst []byte) (b []byte) {
|
||||
var err error
|
||||
_ = err
|
||||
b = dst
|
||||
b = envelopes.Marshal(
|
||||
b, L,
|
||||
func(bst []byte) (o []byte) {
|
||||
o = bst
|
||||
o = append(o, '"')
|
||||
o = append(o, en.Subscription...)
|
||||
o = append(o, '"')
|
||||
for _, f := range en.Filters {
|
||||
o = append(o, ',')
|
||||
o = f.Marshal(o)
|
||||
}
|
||||
return
|
||||
},
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
// Unmarshal into a reqenvelope.T from minified JSON, returning the remainder
|
||||
// after the end of the envelope. Note that this ensures the subscription.Id
|
||||
// string is correctly unescaped by NIP-01 escaping rules.
|
||||
func (en *T) Unmarshal(b []byte) (r []byte, err error) {
|
||||
r = b
|
||||
if en.Subscription, r, err = text.UnmarshalQuoted(r); chk.E(err) {
|
||||
return
|
||||
}
|
||||
|
||||
if r, err = text.Comma(r); chk.E(err) {
|
||||
return
|
||||
}
|
||||
if r, err = en.Filters.Unmarshal(r); chk.E(err) {
|
||||
return
|
||||
}
|
||||
if r, err = envelopes.SkipToTheEnd(r); chk.E(err) {
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Parse reads a REQ envelope from minified JSON into a newly allocated
|
||||
// reqenvelope.T.
|
||||
func (en *T) Parse(b []byte) (t *T, rem []byte, err error) {
|
||||
t = New()
|
||||
if rem, err = t.Unmarshal(b); chk.E(err) {
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
69
pkg/encoders/envelopes/reqenvelope/reqenvelope_test.go
Normal file
69
pkg/encoders/envelopes/reqenvelope/reqenvelope_test.go
Normal file
@@ -0,0 +1,69 @@
|
||||
package reqenvelope
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"lol.mleku.dev/chk"
|
||||
"next.orly.dev/pkg/encoders/envelopes"
|
||||
"next.orly.dev/pkg/encoders/filter"
|
||||
"next.orly.dev/pkg/utils"
|
||||
)
|
||||
|
||||
func TestMarshalUnmarshal(t *testing.T) {
|
||||
var err error
|
||||
rb, rb1, rb2 := make([]byte, 0, 65535), make([]byte, 0, 65535), make(
|
||||
[]byte, 0, 65535,
|
||||
)
|
||||
for i := range 1000 {
|
||||
var f filter.S
|
||||
if f, err = filter.GenFilters(); chk.E(err) {
|
||||
t.Fatal(err)
|
||||
}
|
||||
s := utils.NewSubscription(i)
|
||||
req := NewFrom(s, f)
|
||||
rb = req.Marshal(rb)
|
||||
rb1 = rb1[:len(rb)]
|
||||
copy(rb1, rb)
|
||||
var rem []byte
|
||||
var l string
|
||||
if l, rb, err = envelopes.Identify(rb); chk.E(err) {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if l != L {
|
||||
t.Fatalf("invalid sentinel %s, expect %s", l, L)
|
||||
}
|
||||
req2 := New()
|
||||
if rem, err = req2.Unmarshal(rb); chk.E(err) {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if len(rem) > 0 {
|
||||
t.Fatalf(
|
||||
"unmarshal failed, remainder\n%d %s",
|
||||
len(rem), rem,
|
||||
)
|
||||
}
|
||||
rb2 = req2.Marshal(rb2)
|
||||
if !utils.FastEqual(rb1, rb2) {
|
||||
if len(rb1) != len(rb2) {
|
||||
t.Fatalf(
|
||||
"unmarshal failed, different lengths\n%d %s\n%d %s\n",
|
||||
len(rb1), rb1, len(rb2), rb2,
|
||||
)
|
||||
}
|
||||
for i := range rb1 {
|
||||
if rb1[i] != rb2[i] {
|
||||
t.Fatalf(
|
||||
"unmarshal failed, difference at position %d\n%d %s\n%s\n%d %s\n%s\n",
|
||||
i, len(rb1), rb1[:i], rb1[i:], len(rb2), rb2[:i],
|
||||
rb2[i:],
|
||||
)
|
||||
}
|
||||
}
|
||||
t.Fatalf(
|
||||
"unmarshal failed\n%d %s\n%d %s\n",
|
||||
len(rb1), rb1, len(rb2), rb2,
|
||||
)
|
||||
}
|
||||
rb, rb1, rb2 = rb[:0], rb1[:0], rb2[:0]
|
||||
}
|
||||
}
|
||||
@@ -282,21 +282,6 @@ func (f *F) Unmarshal(b []byte) (r []byte, err error) {
|
||||
}
|
||||
k := make([]byte, len(key))
|
||||
copy(k, key)
|
||||
// switch key[1] {
|
||||
// case 'e', 'p':
|
||||
// // the tags must all be 64 character hexadecimal
|
||||
// var ff [][]byte
|
||||
// if ff, r, err = text2.UnmarshalHexArray(
|
||||
// r,
|
||||
// sha256.Size,
|
||||
// ); chk.E(err) {
|
||||
// return
|
||||
// }
|
||||
// ff = append([][]byte{k}, ff...)
|
||||
// f.Tags = f.Tags.AppendTags(tag.FromBytesSlice(ff...))
|
||||
// // f.Tags.F = append(f.Tags.F, tag.New(ff...))
|
||||
// default:
|
||||
// other types of tags can be anything
|
||||
var ff [][]byte
|
||||
if ff, r, err = text.UnmarshalStringArray(r); chk.E(err) {
|
||||
return
|
||||
|
||||
@@ -1,21 +1,10 @@
|
||||
package filter
|
||||
|
||||
import (
|
||||
"math"
|
||||
"testing"
|
||||
|
||||
"lukechampine.com/frand"
|
||||
"next.orly.dev/pkg/crypto/ec/schnorr"
|
||||
"next.orly.dev/pkg/crypto/ec/secp256k1"
|
||||
"next.orly.dev/pkg/crypto/sha256"
|
||||
"next.orly.dev/pkg/encoders/hex"
|
||||
"next.orly.dev/pkg/encoders/kind"
|
||||
"next.orly.dev/pkg/encoders/tag"
|
||||
"next.orly.dev/pkg/encoders/timestamp"
|
||||
"next.orly.dev/pkg/utils"
|
||||
"next.orly.dev/pkg/utils/values"
|
||||
|
||||
"lol.mleku.dev/chk"
|
||||
"next.orly.dev/pkg/utils"
|
||||
)
|
||||
|
||||
func TestT_MarshalUnmarshal(t *testing.T) {
|
||||
@@ -44,59 +33,3 @@ func TestT_MarshalUnmarshal(t *testing.T) {
|
||||
dst, dst1, dst2 = dst[:0], dst1[:0], dst2[:0]
|
||||
}
|
||||
}
|
||||
|
||||
// GenFilter is a testing tool to create random arbitrary filters for tests.
|
||||
func GenFilter() (f *F, err error) {
|
||||
f = New()
|
||||
n := frand.Intn(16)
|
||||
for _ = range n {
|
||||
id := make([]byte, sha256.Size)
|
||||
frand.Read(id)
|
||||
f.Ids.T = append(f.Ids.T, id)
|
||||
// f.Ids.Field = append(f.Ids.Field, id)
|
||||
}
|
||||
n = frand.Intn(16)
|
||||
for _ = range n {
|
||||
f.Kinds.K = append(f.Kinds.K, kind.New(frand.Intn(math.MaxUint16)))
|
||||
}
|
||||
n = frand.Intn(16)
|
||||
for _ = range n {
|
||||
var sk *secp256k1.SecretKey
|
||||
if sk, err = secp256k1.GenerateSecretKey(); chk.E(err) {
|
||||
return
|
||||
}
|
||||
pk := sk.PubKey()
|
||||
f.Authors.T = append(f.Authors.T, schnorr.SerializePubKey(pk))
|
||||
// f.Authors.Field = append(f.Authors.Field, schnorr.SerializePubKey(pk))
|
||||
}
|
||||
a := frand.Intn(16)
|
||||
if a < n {
|
||||
n = a
|
||||
}
|
||||
for i := range n {
|
||||
p := make([]byte, 0, schnorr.PubKeyBytesLen*2)
|
||||
p = hex.EncAppend(p, f.Authors.T[i])
|
||||
}
|
||||
for b := 'a'; b <= 'z'; b++ {
|
||||
l := frand.Intn(6)
|
||||
var idb [][]byte
|
||||
for range l {
|
||||
bb := make([]byte, frand.Intn(31)+1)
|
||||
frand.Read(bb)
|
||||
id := make([]byte, 0, len(bb)*2)
|
||||
id = hex.EncAppend(id, bb)
|
||||
idb = append(idb, id)
|
||||
}
|
||||
idb = append([][]byte{{'#', byte(b)}}, idb...)
|
||||
*f.Tags = append(*f.Tags, tag.NewFromByteSlice(idb...))
|
||||
// f.Tags.F = append(f.Tags.F, tag.FromBytesSlice(idb...))
|
||||
}
|
||||
tn := int(timestamp.Now().I64())
|
||||
f.Since = ×tamp.T{int64(tn - frand.Intn(10000))}
|
||||
f.Until = timestamp.Now()
|
||||
if frand.Intn(10) > 5 {
|
||||
f.Limit = values.ToUintPointer(uint(frand.Intn(1000)))
|
||||
}
|
||||
f.Search = []byte("token search text")
|
||||
return
|
||||
}
|
||||
|
||||
73
pkg/encoders/filter/filters.go
Normal file
73
pkg/encoders/filter/filters.go
Normal file
@@ -0,0 +1,73 @@
|
||||
package filter
|
||||
|
||||
import (
|
||||
"lol.mleku.dev/errorf"
|
||||
)
|
||||
|
||||
type S []*F
|
||||
|
||||
// Marshal encodes a slice of filters as a JSON array of objects.
|
||||
// It appends the result to dst and returns the resulting slice.
|
||||
func (s S) Marshal(dst []byte) (b []byte) {
|
||||
b = dst
|
||||
b = append(b, '[')
|
||||
first := false
|
||||
for _, f := range s {
|
||||
if f == nil {
|
||||
continue
|
||||
}
|
||||
if first {
|
||||
b = append(b, ',')
|
||||
} else {
|
||||
first = true
|
||||
}
|
||||
b = f.Marshal(b)
|
||||
}
|
||||
b = append(b, ']')
|
||||
return
|
||||
}
|
||||
|
||||
// Unmarshal decodes one or more filters from JSON.
|
||||
func (s *S) Unmarshal(b []byte) (r []byte, err error) {
|
||||
r = b
|
||||
if len(r) == 0 {
|
||||
return
|
||||
}
|
||||
r = r[1:]
|
||||
// Handle empty array "[]"
|
||||
if len(r) > 0 && r[0] == ']' {
|
||||
r = r[1:]
|
||||
return
|
||||
}
|
||||
for {
|
||||
if len(r) == 0 {
|
||||
return
|
||||
}
|
||||
f := new(F)
|
||||
var rem []byte
|
||||
if rem, err = f.Unmarshal(r); err != nil {
|
||||
return
|
||||
}
|
||||
*s = append(*s, f)
|
||||
r = rem
|
||||
if len(r) == 0 {
|
||||
return
|
||||
}
|
||||
if r[0] == ',' {
|
||||
// Next element in the array
|
||||
r = r[1:]
|
||||
continue
|
||||
}
|
||||
if r[0] == ']' {
|
||||
// End of the enclosed array; consume and return
|
||||
r = r[1:]
|
||||
return
|
||||
}
|
||||
// Unexpected token
|
||||
err = errorf.E(
|
||||
"filters.Unmarshal: expected ',' or ']' after filter, got: %q",
|
||||
r[0],
|
||||
)
|
||||
return
|
||||
}
|
||||
}
|
||||
83
pkg/encoders/filter/gen.go
Normal file
83
pkg/encoders/filter/gen.go
Normal file
@@ -0,0 +1,83 @@
|
||||
package filter
|
||||
|
||||
import (
|
||||
"math"
|
||||
|
||||
"lol.mleku.dev/chk"
|
||||
"lukechampine.com/frand"
|
||||
"next.orly.dev/pkg/crypto/ec/schnorr"
|
||||
"next.orly.dev/pkg/crypto/ec/secp256k1"
|
||||
"next.orly.dev/pkg/crypto/sha256"
|
||||
"next.orly.dev/pkg/encoders/hex"
|
||||
"next.orly.dev/pkg/encoders/kind"
|
||||
"next.orly.dev/pkg/encoders/tag"
|
||||
"next.orly.dev/pkg/encoders/timestamp"
|
||||
"next.orly.dev/pkg/utils/values"
|
||||
)
|
||||
|
||||
// GenFilter is a testing tool to create random arbitrary filters for tests.
|
||||
func GenFilter() (f *F, err error) {
|
||||
f = New()
|
||||
n := frand.Intn(16)
|
||||
for _ = range n {
|
||||
id := make([]byte, sha256.Size)
|
||||
frand.Read(id)
|
||||
f.Ids.T = append(f.Ids.T, id)
|
||||
// f.Ids.Field = append(f.Ids.Field, id)
|
||||
}
|
||||
n = frand.Intn(16)
|
||||
for _ = range n {
|
||||
f.Kinds.K = append(f.Kinds.K, kind.New(frand.Intn(math.MaxUint16)))
|
||||
}
|
||||
n = frand.Intn(16)
|
||||
for _ = range n {
|
||||
var sk *secp256k1.SecretKey
|
||||
if sk, err = secp256k1.GenerateSecretKey(); chk.E(err) {
|
||||
return
|
||||
}
|
||||
pk := sk.PubKey()
|
||||
f.Authors.T = append(f.Authors.T, schnorr.SerializePubKey(pk))
|
||||
// f.Authors.Field = append(f.Authors.Field, schnorr.SerializePubKey(pk))
|
||||
}
|
||||
a := frand.Intn(16)
|
||||
if a < n {
|
||||
n = a
|
||||
}
|
||||
for i := range n {
|
||||
p := make([]byte, 0, schnorr.PubKeyBytesLen*2)
|
||||
p = hex.EncAppend(p, f.Authors.T[i])
|
||||
}
|
||||
for b := 'a'; b <= 'z'; b++ {
|
||||
l := frand.Intn(6)
|
||||
var idb [][]byte
|
||||
for range l {
|
||||
bb := make([]byte, frand.Intn(31)+1)
|
||||
frand.Read(bb)
|
||||
id := make([]byte, 0, len(bb)*2)
|
||||
id = hex.EncAppend(id, bb)
|
||||
idb = append(idb, id)
|
||||
}
|
||||
idb = append([][]byte{{'#', byte(b)}}, idb...)
|
||||
*f.Tags = append(*f.Tags, tag.NewFromByteSlice(idb...))
|
||||
// f.Tags.F = append(f.Tags.F, tag.FromBytesSlice(idb...))
|
||||
}
|
||||
tn := int(timestamp.Now().I64())
|
||||
f.Since = ×tamp.T{int64(tn - frand.Intn(10000))}
|
||||
f.Until = timestamp.Now()
|
||||
if frand.Intn(10) > 5 {
|
||||
f.Limit = values.ToUintPointer(uint(frand.Intn(1000)))
|
||||
}
|
||||
f.Search = []byte("token search text")
|
||||
return
|
||||
}
|
||||
|
||||
func GenFilters() (s S, err error) {
|
||||
n := frand.Intn(5) + 1
|
||||
for _ = range n {
|
||||
var f *F
|
||||
if f, err = GenFilter(); chk.E(err) {
|
||||
}
|
||||
s = append(s, f)
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -54,7 +54,12 @@ func (t *T) Free() {
|
||||
t.T = nil
|
||||
}
|
||||
|
||||
func (t *T) Len() int { return len(t.T) }
|
||||
func (t *T) Len() int {
|
||||
if t == nil {
|
||||
return 0
|
||||
}
|
||||
return len(t.T)
|
||||
}
|
||||
|
||||
func (t *T) Less(i, j int) bool {
|
||||
return bytes.Compare(t.T[i], t.T[j]) < 0
|
||||
|
||||
@@ -24,6 +24,9 @@ func NewSWithCap(c int) (s *S) {
|
||||
}
|
||||
|
||||
func (s *S) Len() int {
|
||||
if s == nil {
|
||||
return 0
|
||||
}
|
||||
return len(*s)
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user