partly completed filter search

since/until/kinds/authors combinations done
This commit is contained in:
2025-06-12 11:32:44 +01:00
parent 966f58f4c7
commit e12fb03b03
13 changed files with 473 additions and 384 deletions

View File

@@ -1,11 +1,139 @@
package database
import (
"x.realy.lol/event"
"math"
"x.realy.lol/chk"
"x.realy.lol/database/indexes/types/varint"
"x.realy.lol/filter"
"x.realy.lol/hex"
"x.realy.lol/timestamp"
)
func (d *D) Filter(f filter.F) (evs []event.E, err error) {
type Bitfield byte
const (
hasIds Bitfield = 1
hasKinds Bitfield = 2
hasAuthors Bitfield = 4
hasTags Bitfield = 8
hasSince Bitfield = 16
hasUntil Bitfield = 32
hasLimit Bitfield = 64
hasSearch Bitfield = 128
)
func ToBitfield(f *filter.F) (b Bitfield) {
if len(f.Ids) != 0 {
b += hasIds
}
if len(f.Kinds) != 0 {
b += hasKinds
}
if len(f.Authors) != 0 {
b += hasAuthors
}
if len(f.Kinds) != 0 {
b += hasTags
}
if f.Since != nil {
b += hasSince
}
if f.Until != nil {
b += hasUntil
}
if f.Limit != nil {
b += hasLimit
}
if f.Search != "" {
b += hasSearch
}
return
}
func (d *D) Filter(f filter.F) (evSerials []*varint.V, err error) {
bf := ToBitfield(&f)
// first, if there is Ids these override everything else
if bf&hasIds != 0 {
for _, v := range f.Ids {
var id []byte
if id, err = hex.Dec(v); chk.E(err) {
// just going to ignore it i guess
continue
}
var ev *varint.V
if ev, err = d.FindEventSerialById(id); chk.E(err) {
// just going to ignore it i guess
continue
}
evSerials = append(evSerials, ev)
}
return
}
// next, check for filters that only have since and/or until
if bf&(hasSince+hasUntil) != 0 {
var since, until timestamp.Timestamp
if bf&hasSince != 0 {
since = *f.Since
}
if bf&hasUntil != 0 {
until = *f.Until
} else {
until = math.MaxInt64
}
if evSerials, err = d.GetEventSerialsByCreatedAtRange(since, until); chk.E(err) {
return
}
return
}
// next, kinds/since/until
if bf&(hasSince+hasUntil+hasKinds) == bf && bf&hasKinds != 0 {
var since, until timestamp.Timestamp
if bf&hasSince != 0 {
since = *f.Since
}
if bf&hasUntil != 0 {
until = *f.Until
} else {
until = math.MaxInt64
}
if evSerials, err = d.GetEventSerialsByKindsCreatedAtRange(f.Kinds, since, until); chk.E(err) {
return
}
return
}
// next authors/since/until
if bf&(hasSince+hasUntil+hasAuthors) == bf && bf&hasAuthors != 0 {
var since, until timestamp.Timestamp
if bf&hasSince != 0 {
since = *f.Since
}
if bf&hasUntil != 0 {
until = *f.Until
} else {
until = math.MaxInt64
}
if evSerials, err = d.GetEventSerialsByAuthorsCreatedAtRange(f.Authors, since, until); chk.E(err) {
return
}
return
}
// next authors/kinds/since/until
if bf&(hasSince+hasUntil+hasKinds+hasAuthors) == bf && bf&(hasAuthors+hasKinds) != 0 {
var since, until timestamp.Timestamp
if bf&hasSince != 0 {
since = *f.Since
}
if bf&hasUntil != 0 {
until = *f.Until
} else {
until = math.MaxInt64
}
if evSerials, err = d.GetEventSerialsByKindsAuthorsCreatedAtRange(f.Kinds, f.Authors, since, until); chk.E(err) {
return
}
return
}
return
}

View File

@@ -2,16 +2,23 @@ package database
import (
"bytes"
"math"
"github.com/dgraph-io/badger/v4"
"x.realy.lol/chk"
"x.realy.lol/database/indexes"
"x.realy.lol/database/indexes/prefixes"
"x.realy.lol/database/indexes/types/idhash"
"x.realy.lol/database/indexes/types/prefix"
"x.realy.lol/database/indexes/types/varint"
"x.realy.lol/errorf"
"x.realy.lol/event"
"x.realy.lol/timestamp"
)
func (d *D) FindEventById(evId []byte) (ev *event.E, err error) {
id, ser := indexes.IdVars()
func (d *D) FindEventSerialById(evId []byte) (ser *varint.V, err error) {
id := idhash.New()
if err = id.FromId(evId); chk.E(err) {
return
}
@@ -27,6 +34,7 @@ func (d *D) FindEventById(evId []byte) (ev *event.E, err error) {
item := it.Item()
k := item.KeyCopy(nil)
buf := bytes.NewBuffer(k)
ser = varint.New()
if err = indexes.IdDec(id, ser).UnmarshalRead(buf); chk.E(err) {
return
}
@@ -35,23 +43,31 @@ func (d *D) FindEventById(evId []byte) (ev *event.E, err error) {
}); err != nil {
return
}
if ser == nil {
err = errorf.E("event %0x not found", evId)
return
}
return
}
func (d *D) GetEventFromSerial(ser *varint.V) (ev *event.E, err error) {
if err = d.View(func(txn *badger.Txn) (err error) {
evk := new(bytes.Buffer)
if err = indexes.EventEnc(ser).MarshalWrite(evk); chk.E(err) {
enc := indexes.EventDec(ser)
kb := new(bytes.Buffer)
if err = enc.MarshalWrite(kb); chk.E(err) {
return
}
it := txn.NewIterator(badger.IteratorOptions{Prefix: evk.Bytes()})
defer it.Close()
for it.Seek(evk.Bytes()); it.Valid(); {
item := it.Item()
var val []byte
if val, err = item.ValueCopy(nil); chk.E(err) {
return
}
ev = event.New()
if err = ev.UnmarshalRead(bytes.NewBuffer(val)); chk.E(err) {
return
}
var item *badger.Item
if item, err = txn.Get(kb.Bytes()); chk.E(err) {
return
}
var val []byte
if val, err = item.ValueCopy(nil); chk.E(err) {
return
}
ev = event.New()
vr := bytes.NewBuffer(val)
if err = ev.UnmarshalRead(vr); chk.E(err) {
return
}
return
@@ -60,3 +76,219 @@ func (d *D) FindEventById(evId []byte) (ev *event.E, err error) {
}
return
}
func (d *D) GetEventFullIndexFromSerial(ser *varint.V) (id []byte, err error) {
if err = d.View(func(txn *badger.Txn) (err error) {
enc := indexes.New(prefix.New(prefixes.FullIndex), ser)
prf := new(bytes.Buffer)
if err = enc.MarshalWrite(prf); chk.E(err) {
return
}
it := txn.NewIterator(badger.IteratorOptions{Prefix: prf.Bytes()})
defer it.Close()
for it.Seek(prf.Bytes()); it.Valid(); it.Next() {
item := it.Item()
key := item.KeyCopy(nil)
kbuf := bytes.NewBuffer(key)
_, t, p, ki, ca := indexes.FullIndexVars()
dec := indexes.FullIndexDec(ser, t, p, ki, ca)
if err = dec.UnmarshalRead(kbuf); chk.E(err) {
return
}
id = t.Bytes()
}
return
}); chk.E(err) {
return
}
return
}
func (d *D) GetEventById(evId []byte) (ev *event.E, err error) {
var ser *varint.V
if ser, err = d.FindEventSerialById(evId); chk.E(err) {
return
}
ev, err = d.GetEventFromSerial(ser)
return
}
// GetEventSerialsByCreatedAtRange returns the serials of events with the given since/until
// range in reverse chronological order (starting at until, going back to since).
func (d *D) GetEventSerialsByCreatedAtRange(since, until timestamp.Timestamp) (sers []*varint.V, err error) {
// get the start (end) max possible index prefix
startCreatedAt, startSer := indexes.CreatedAtVars()
startCreatedAt.FromInt64(until.ToInt64())
startSer.FromUint64(math.MaxUint64)
prf := new(bytes.Buffer)
if err = indexes.CreatedAtEnc(startCreatedAt, startSer).MarshalWrite(prf); chk.E(err) {
return
}
if err = d.View(func(txn *badger.Txn) (err error) {
it := txn.NewIterator(badger.IteratorOptions{Reverse: true, Prefix: prf.Bytes()})
defer it.Close()
key := make([]byte, 10)
for it.Rewind(); it.Valid(); it.Next() {
item := it.Item()
key = item.KeyCopy(key)
ca, ser := indexes.CreatedAtVars()
buf := bytes.NewBuffer(key)
if err = indexes.CreatedAtDec(ca, ser).UnmarshalRead(buf); chk.E(err) {
// skip it then
continue
}
if ca.ToTimestamp() < since {
break
}
sers = append(sers, ser)
}
return
}); chk.E(err) {
return
}
return
}
func (d *D) GetEventSerialsByKindsCreatedAtRange(kinds []int, since, until timestamp.Timestamp) (sers []*varint.V, err error) {
// get the start (end) max possible index prefix, one for each kind in the list
var searchIdxs [][]byte
for _, k := range kinds {
kind, startCreatedAt, startSer := indexes.KindCreatedAtVars()
kind.Set(k)
startCreatedAt.FromInt64(until.ToInt64())
startSer.FromUint64(math.MaxUint64)
prf := new(bytes.Buffer)
if err = indexes.KindCreatedAtEnc(kind, startCreatedAt, startSer).MarshalWrite(prf); chk.E(err) {
return
}
searchIdxs = append(searchIdxs, prf.Bytes())
}
for _, idx := range searchIdxs {
if err = d.View(func(txn *badger.Txn) (err error) {
it := txn.NewIterator(badger.IteratorOptions{Reverse: true, Prefix: idx})
defer it.Close()
key := make([]byte, 10)
for it.Rewind(); it.Valid(); it.Next() {
item := it.Item()
key = item.KeyCopy(key)
kind, ca, ser := indexes.KindCreatedAtVars()
buf := bytes.NewBuffer(key)
if err = indexes.KindCreatedAtDec(kind, ca, ser).UnmarshalRead(buf); chk.E(err) {
// skip it then
continue
}
if ca.ToTimestamp() < since {
break
}
sers = append(sers, ser)
}
return
}); chk.E(err) {
return
}
}
return
}
func (d *D) GetEventSerialsByAuthorsCreatedAtRange(pubkeys []string, since, until timestamp.Timestamp) (sers []*varint.V, err error) {
// get the start (end) max possible index prefix, one for each kind in the list
var searchIdxs [][]byte
var pkDecodeErrs int
for _, p := range pubkeys {
pubkey, startCreatedAt, startSer := indexes.PubkeyCreatedAtVars()
if err = pubkey.FromPubkeyHex(p); chk.E(err) {
// gracefully ignore wrong keys
pkDecodeErrs++
continue
}
if pkDecodeErrs == len(pubkeys) {
err = errorf.E("all pubkeys in authors field of filter failed to decode")
return
}
startCreatedAt.FromInt64(until.ToInt64())
startSer.FromUint64(math.MaxUint64)
prf := new(bytes.Buffer)
if err = indexes.PubkeyCreatedAtEnc(pubkey, startCreatedAt, startSer).MarshalWrite(prf); chk.E(err) {
return
}
searchIdxs = append(searchIdxs, prf.Bytes())
}
for _, idx := range searchIdxs {
if err = d.View(func(txn *badger.Txn) (err error) {
it := txn.NewIterator(badger.IteratorOptions{Reverse: true, Prefix: idx})
defer it.Close()
key := make([]byte, 10)
for it.Rewind(); it.Valid(); it.Next() {
item := it.Item()
key = item.KeyCopy(key)
kind, ca, ser := indexes.KindCreatedAtVars()
buf := bytes.NewBuffer(key)
if err = indexes.KindCreatedAtDec(kind, ca, ser).UnmarshalRead(buf); chk.E(err) {
// skip it then
continue
}
if ca.ToTimestamp() < since {
break
}
sers = append(sers, ser)
}
return
}); chk.E(err) {
return
}
}
return
}
func (d *D) GetEventSerialsByKindsAuthorsCreatedAtRange(kinds []int, pubkeys []string, since, until timestamp.Timestamp) (sers []*varint.V, err error) {
// get the start (end) max possible index prefix, one for each kind in the list
var searchIdxs [][]byte
var pkDecodeErrs int
for _, k := range kinds {
for _, p := range pubkeys {
kind, pubkey, startCreatedAt, startSer := indexes.KindPubkeyCreatedAtVars()
if err = pubkey.FromPubkeyHex(p); chk.E(err) {
// gracefully ignore wrong keys
pkDecodeErrs++
continue
}
if pkDecodeErrs == len(pubkeys) {
err = errorf.E("all pubkeys in authors field of filter failed to decode")
return
}
startCreatedAt.FromInt64(until.ToInt64())
startSer.FromUint64(math.MaxUint64)
kind.Set(k)
prf := new(bytes.Buffer)
if err = indexes.KindPubkeyCreatedAtEnc(kind, pubkey, startCreatedAt, startSer).MarshalWrite(prf); chk.E(err) {
return
}
searchIdxs = append(searchIdxs, prf.Bytes())
}
}
for _, idx := range searchIdxs {
if err = d.View(func(txn *badger.Txn) (err error) {
it := txn.NewIterator(badger.IteratorOptions{Reverse: true, Prefix: idx})
defer it.Close()
key := make([]byte, 10)
for it.Rewind(); it.Valid(); it.Next() {
item := it.Item()
key = item.KeyCopy(key)
kind, ca, ser := indexes.KindCreatedAtVars()
buf := bytes.NewBuffer(key)
if err = indexes.KindCreatedAtDec(kind, ca, ser).UnmarshalRead(buf); chk.E(err) {
// skip it then
continue
}
if ca.ToTimestamp() < since {
break
}
sers = append(sers, ser)
}
return
}); chk.E(err) {
return
}
}
return
}

View File

@@ -28,7 +28,7 @@ func (d *D) GetFulltextKeys(ev *event.E, ser *varint.V) (keys [][]byte, err erro
ft := fulltext.New()
ft.FromWord([]byte(i))
pos := varint.New()
pos.FromInteger(uint64(w[i]))
pos.FromUint64(uint64(w[i]))
buf := new(bytes.Buffer)
if err = indexes.FullTextWordEnc(ft, pos, ser).MarshalWrite(buf); chk.E(err) {
return

View File

@@ -29,7 +29,7 @@ func (d *D) GetEventIndexes(ev *event.E) (indices [][]byte, ser *varint.V, err e
if s, err = d.Serial(); chk.E(err) {
return
}
ser.FromInteger(s)
ser.FromUint64(s)
// create the event id key
id := idhash.New()
var idb []byte

View File

@@ -20,7 +20,7 @@ func TestEvent(t *testing.T) {
var err error
for range 100 {
ser := EventVars()
ser.FromInteger(uint64(frand.Intn(math.MaxInt64)))
ser.FromUint64(uint64(frand.Intn(math.MaxInt64)))
buf := new(bytes.Buffer)
evIdx := EventEnc(ser)
evIdx.MarshalWrite(buf)
@@ -59,7 +59,7 @@ func TestId(t *testing.T) {
if err = id.FromId(frand.Bytes(sha256.Size)); chk.E(err) {
t.Fatal(err)
}
ser.FromInteger(uint64(frand.Intn(math.MaxInt64)))
ser.FromUint64(uint64(frand.Intn(math.MaxInt64)))
buf := new(bytes.Buffer)
evIdx := IdEnc(id, ser)
evIdx.MarshalWrite(buf)
@@ -91,7 +91,7 @@ func TestFullIndex(t *testing.T) {
}
ki.Set(frand.Intn(math.MaxUint16))
ca.FromInt64(time.Now().Unix())
ser.FromInteger(uint64(frand.Intn(math.MaxInt64)))
ser.FromUint64(uint64(frand.Intn(math.MaxInt64)))
buf := new(bytes.Buffer)
fi := FullIndexEnc(ser, id, p, ki, ca)
if err = fi.MarshalWrite(buf); chk.E(err) {
@@ -129,7 +129,7 @@ func TestPubkey(t *testing.T) {
if err = p.FromPubkey(frand.Bytes(schnorr.PubKeyBytesLen)); chk.E(err) {
t.Fatal(err)
}
ser.FromInteger(uint64(frand.Intn(math.MaxInt64)))
ser.FromUint64(uint64(frand.Intn(math.MaxInt64)))
buf := new(bytes.Buffer)
fi := PubkeyEnc(p, ser)
fi.MarshalWrite(buf)
@@ -157,7 +157,7 @@ func TestPubkeyCreatedAt(t *testing.T) {
t.Fatal(err)
}
ca.FromInt64(time.Now().Unix())
ser.FromInteger(uint64(frand.Intn(math.MaxInt64)))
ser.FromUint64(uint64(frand.Intn(math.MaxInt64)))
buf := new(bytes.Buffer)
fi := PubkeyCreatedAtEnc(p, ca, ser)
fi.MarshalWrite(buf)
@@ -182,7 +182,7 @@ func TestCreatedAt(t *testing.T) {
for range 100 {
ca, ser := CreatedAtVars()
ca.FromInt64(time.Now().Unix())
ser.FromInteger(uint64(frand.Intn(math.MaxInt64)))
ser.FromUint64(uint64(frand.Intn(math.MaxInt64)))
buf := new(bytes.Buffer)
fi := CreatedAtEnc(ca, ser)
fi.MarshalWrite(buf)
@@ -207,7 +207,7 @@ func TestFirstSeen(t *testing.T) {
for range 100 {
ser, ts := FirstSeenVars()
ts.FromInt64(time.Now().Unix())
ser.FromInteger(uint64(frand.Intn(math.MaxInt64)))
ser.FromUint64(uint64(frand.Intn(math.MaxInt64)))
buf := new(bytes.Buffer)
fs := FirstSeenEnc(ser, ts)
fs.MarshalWrite(buf)
@@ -232,7 +232,7 @@ func TestKind(t *testing.T) {
for range 100 {
ki, ser := KindVars()
ki.Set(frand.Intn(math.MaxUint16))
ser.FromInteger(uint64(frand.Intn(math.MaxInt64)))
ser.FromUint64(uint64(frand.Intn(math.MaxInt64)))
buf := new(bytes.Buffer)
kIdx := KindEnc(ki, ser)
kIdx.MarshalWrite(buf)
@@ -263,7 +263,7 @@ func TestTagA(t *testing.T) {
t.Fatal(err)
}
ki.Set(frand.Intn(math.MaxUint16))
ser.FromInteger(uint64(frand.Intn(math.MaxInt64)))
ser.FromUint64(uint64(frand.Intn(math.MaxInt64)))
buf := new(bytes.Buffer)
fi := TagAEnc(ki, p, id, ser)
fi.MarshalWrite(buf)
@@ -296,7 +296,7 @@ func TestTagEvent(t *testing.T) {
if err = id.FromId(frand.Bytes(sha256.Size)); chk.E(err) {
t.Fatal(err)
}
ser.FromInteger(uint64(frand.Intn(math.MaxInt64)))
ser.FromUint64(uint64(frand.Intn(math.MaxInt64)))
buf := new(bytes.Buffer)
evIdx := TagEventEnc(id, ser)
evIdx.MarshalWrite(buf)
@@ -323,7 +323,7 @@ func TestTagPubkey(t *testing.T) {
if err = p.FromPubkey(frand.Bytes(schnorr.PubKeyBytesLen)); chk.E(err) {
t.Fatal(err)
}
ser.FromInteger(uint64(frand.Intn(math.MaxInt64)))
ser.FromUint64(uint64(frand.Intn(math.MaxInt64)))
buf := new(bytes.Buffer)
fi := TagPubkeyEnc(p, ser)
fi.MarshalWrite(buf)
@@ -349,7 +349,7 @@ func TestTagHashtag(t *testing.T) {
if err = id.FromIdent(frand.Bytes(frand.Intn(16) + 8)); chk.E(err) {
t.Fatal(err)
}
ser.FromInteger(uint64(frand.Intn(math.MaxInt64)))
ser.FromUint64(uint64(frand.Intn(math.MaxInt64)))
buf := new(bytes.Buffer)
fi := TagHashtagEnc(id, ser)
fi.MarshalWrite(buf)
@@ -376,7 +376,7 @@ func TestTagIdentifier(t *testing.T) {
if err = id.FromIdent(frand.Bytes(frand.Intn(16) + 8)); chk.E(err) {
t.Fatal(err)
}
ser.FromInteger(uint64(frand.Intn(math.MaxInt64)))
ser.FromUint64(uint64(frand.Intn(math.MaxInt64)))
buf := new(bytes.Buffer)
fi := TagIdentifierEnc(id, ser)
fi.MarshalWrite(buf)
@@ -405,7 +405,7 @@ func TestTagLetter(t *testing.T) {
}
lb := frand.Bytes(1)
l.Set(lb[0])
ser.FromInteger(uint64(frand.Intn(math.MaxInt64)))
ser.FromUint64(uint64(frand.Intn(math.MaxInt64)))
buf := new(bytes.Buffer)
fi := TagLetterEnc(l, id, ser)
fi.MarshalWrite(buf)
@@ -435,7 +435,7 @@ func TestTagProtected(t *testing.T) {
if err = p.FromPubkey(frand.Bytes(schnorr.PubKeyBytesLen)); chk.E(err) {
t.Fatal(err)
}
ser.FromInteger(uint64(frand.Intn(math.MaxInt64)))
ser.FromUint64(uint64(frand.Intn(math.MaxInt64)))
buf := new(bytes.Buffer)
fi := TagProtectedEnc(p, ser)
fi.MarshalWrite(buf)
@@ -465,7 +465,7 @@ func TestTagNonstandard(t *testing.T) {
if err = v.FromIdent(frand.Bytes(frand.Intn(16) + 8)); chk.E(err) {
t.Fatal(err)
}
ser.FromInteger(uint64(frand.Intn(math.MaxInt64)))
ser.FromUint64(uint64(frand.Intn(math.MaxInt64)))
buf := new(bytes.Buffer)
fi := TagNonstandardEnc(k, v, ser)
fi.MarshalWrite(buf)
@@ -493,8 +493,8 @@ func TestFulltextWord(t *testing.T) {
for range 100 {
fw, pos, ser := FullTextWordVars()
fw.FromWord(frand.Bytes(frand.Intn(10) + 5))
pos.FromInteger(uint64(frand.Intn(math.MaxUint32)))
ser.FromInteger(uint64(frand.Intn(math.MaxInt64)))
pos.FromUint64(uint64(frand.Intn(math.MaxUint32)))
ser.FromUint64(uint64(frand.Intn(math.MaxInt64)))
buf := new(bytes.Buffer)
fi := FullTextWordEnc(fw, pos, ser)
if err = fi.MarshalWrite(buf); chk.E(err) {
@@ -523,7 +523,7 @@ func TestLastAccessed(t *testing.T) {
var err error
for range 100 {
ser := LastAccessedVars()
ser.FromInteger(uint64(frand.Intn(math.MaxInt64)))
ser.FromUint64(uint64(frand.Intn(math.MaxInt64)))
buf := new(bytes.Buffer)
fi := LastAccessedEnc(ser)
fi.MarshalWrite(buf)
@@ -544,7 +544,7 @@ func TestAccessCounter(t *testing.T) {
var err error
for range 100 {
ser := AccessCounterVars()
ser.FromInteger(uint64(frand.Intn(math.MaxInt64)))
ser.FromUint64(uint64(frand.Intn(math.MaxInt64)))
buf := new(bytes.Buffer)
fi := AccessCounterEnc(ser)
fi.MarshalWrite(buf)

View File

@@ -3,9 +3,11 @@ package pubhash
import (
"io"
"x.realy.lol/chk"
"x.realy.lol/ec/schnorr"
"x.realy.lol/errorf"
"x.realy.lol/helpers"
"x.realy.lol/hex"
)
const Len = 8
@@ -23,6 +25,19 @@ func (ph *T) FromPubkey(pk []byte) (err error) {
return
}
func (ph *T) FromPubkeyHex(pk string) (err error) {
if len(pk) != schnorr.PubKeyBytesLen*2 {
err = errorf.E("invalid Pubkey length, got %d require %d", len(pk), schnorr.PubKeyBytesLen*2)
return
}
var pkb []byte
if pkb, err = hex.Dec(pk); chk.E(err) {
return
}
ph.val = helpers.Hash(pkb)[:Len]
return
}
func (ph *T) Bytes() (b []byte) { return ph.val }
func (ph *T) MarshalWrite(w io.Writer) (err error) {

View File

@@ -12,7 +12,7 @@ type V struct{ val uint64 }
func New() (s *V) { return &V{} }
func (vi *V) FromInteger(ser uint64) {
func (vi *V) FromUint64(ser uint64) {
vi.val = ser
return
}

View File

@@ -60,7 +60,7 @@ func TestD_StoreEvent(t *testing.T) {
log.I.F("completed unmarshalling %d events", count)
for _, v := range evIds {
var ev *event.E
if ev, err = d.FindEventById(v); chk.E(err) {
if ev, err = d.GetEventById(v); chk.E(err) {
t.Fatal(err)
}
_ = ev

View File

@@ -14,7 +14,7 @@ import (
func (d *D) StoreEvent(ev *event.E) (err error) {
var ev2 *event.E
if ev2, err = d.FindEventById(ev.GetIdBytes()); err != nil {
if ev2, err = d.GetEventById(ev.GetIdBytes()); err != nil {
// so we didn't find it?
}
if ev2 != nil {

View File

@@ -13,17 +13,14 @@ import (
type S []F
type F struct {
IDs []string
Ids []string
Kinds []int
Authors []string
Tags TagMap
Since *timestamp.Timestamp
Until *timestamp.Timestamp
Limit int
Limit *int
Search string
// LimitZero is or must be set when there is a "limit":0 in the filter, and not when "limit" is just omitted
LimitZero bool `json:"-"`
}
type TagMap map[string][]string
@@ -77,7 +74,7 @@ func (ef F) MatchesIgnoringTimestampConstraints(event *event.E) bool {
return false
}
if ef.IDs != nil && !slices.Contains(ef.IDs, event.Id) {
if ef.Ids != nil && !slices.Contains(ef.Ids, event.Id) {
return false
}
@@ -103,7 +100,7 @@ func FilterEqual(a F, b F) bool {
return false
}
if !helpers.Similar(a.IDs, b.IDs) {
if !helpers.Similar(a.Ids, b.Ids) {
return false
}
@@ -137,21 +134,16 @@ func FilterEqual(a F, b F) bool {
return false
}
if a.LimitZero != b.LimitZero {
return false
}
return true
}
func (ef F) Clone() F {
clone := F{
IDs: slices.Clone(ef.IDs),
Authors: slices.Clone(ef.Authors),
Kinds: slices.Clone(ef.Kinds),
Limit: ef.Limit,
Search: ef.Search,
LimitZero: ef.LimitZero,
Ids: slices.Clone(ef.Ids),
Authors: slices.Clone(ef.Authors),
Kinds: slices.Clone(ef.Kinds),
Limit: ef.Limit,
Search: ef.Search,
}
if ef.Tags != nil {
@@ -181,8 +173,8 @@ func (ef F) Clone() F {
//
// The given .Limit present in the filter is ignored.
func GetTheoreticalLimit(filter F) int {
if len(filter.IDs) > 0 {
return len(filter.IDs)
if len(filter.Ids) > 0 {
return len(filter.Ids)
}
if len(filter.Kinds) == 0 {
@@ -217,3 +209,5 @@ func GetTheoreticalLimit(filter F) int {
return -1
}
func IntToPointer(i int) (ptr *int) { return &i }

View File

@@ -1,311 +0,0 @@
package filter
import (
"github.com/mailru/easyjson"
"github.com/mailru/easyjson/jlexer"
"github.com/mailru/easyjson/jwriter"
"x.realy.lol/timestamp"
)
// suppress unused package warning
var (
_ *jlexer.Lexer
_ *jwriter.Writer
_ easyjson.Marshaler
)
func easyjson4d398eaaDecodeGithubComNbdWtfGoNostr(in *jlexer.Lexer, out *F) {
isTopLevel := in.IsStart()
if in.IsNull() {
if isTopLevel {
in.Consumed()
}
in.Skip()
return
}
out.Tags = make(TagMap)
in.Delim('{')
for !in.IsDelim('}') {
key := in.UnsafeFieldName(false)
in.WantColon()
if in.IsNull() {
in.Skip()
in.WantComma()
continue
}
switch key {
case "ids":
if in.IsNull() {
in.Skip()
out.IDs = nil
} else {
in.Delim('[')
if out.IDs == nil {
if !in.IsDelim(']') {
out.IDs = make([]string, 0, 20)
} else {
out.IDs = []string{}
}
} else {
out.IDs = (out.IDs)[:0]
}
for !in.IsDelim(']') {
var v1 string
v1 = string(in.String())
out.IDs = append(out.IDs, v1)
in.WantComma()
}
in.Delim(']')
}
case "kinds":
if in.IsNull() {
in.Skip()
out.Kinds = nil
} else {
in.Delim('[')
if out.Kinds == nil {
if !in.IsDelim(']') {
out.Kinds = make([]int, 0, 8)
} else {
out.Kinds = []int{}
}
} else {
out.Kinds = (out.Kinds)[:0]
}
for !in.IsDelim(']') {
var v2 int
v2 = int(in.Int())
out.Kinds = append(out.Kinds, v2)
in.WantComma()
}
in.Delim(']')
}
case "authors":
if in.IsNull() {
in.Skip()
out.Authors = nil
} else {
in.Delim('[')
if out.Authors == nil {
if !in.IsDelim(']') {
out.Authors = make([]string, 0, 40)
} else {
out.Authors = []string{}
}
} else {
out.Authors = (out.Authors)[:0]
}
for !in.IsDelim(']') {
var v3 string
v3 = string(in.String())
out.Authors = append(out.Authors, v3)
in.WantComma()
}
in.Delim(']')
}
case "since":
if in.IsNull() {
in.Skip()
out.Since = nil
} else {
if out.Since == nil {
out.Since = new(timestamp.Timestamp)
}
*out.Since = timestamp.Timestamp(in.Int64())
}
case "until":
if in.IsNull() {
in.Skip()
out.Until = nil
} else {
if out.Until == nil {
out.Until = new(timestamp.Timestamp)
}
*out.Until = timestamp.Timestamp(in.Int64())
}
case "limit":
out.Limit = int(in.Int())
if out.Limit == 0 {
out.LimitZero = true
}
case "search":
out.Search = string(in.String())
default:
if len(key) > 1 && key[0] == '#' {
tagValues := make([]string, 0, 40)
if !in.IsNull() {
in.Delim('[')
if out.Authors == nil {
if !in.IsDelim(']') {
tagValues = make([]string, 0, 4)
} else {
tagValues = []string{}
}
} else {
tagValues = (tagValues)[:0]
}
for !in.IsDelim(']') {
var v3 string
v3 = string(in.String())
tagValues = append(tagValues, v3)
in.WantComma()
}
in.Delim(']')
}
out.Tags[key[1:]] = tagValues
} else {
in.SkipRecursive()
}
}
in.WantComma()
}
in.Delim('}')
if isTopLevel {
in.Consumed()
}
}
func easyjson4d398eaaEncodeGithubComNbdWtfGoNostr(out *jwriter.Writer, in F) {
out.RawByte('{')
first := true
_ = first
if len(in.IDs) != 0 {
const prefix string = ",\"ids\":"
first = false
out.RawString(prefix[1:])
{
out.RawByte('[')
for v4, v5 := range in.IDs {
if v4 > 0 {
out.RawByte(',')
}
out.String(string(v5))
}
out.RawByte(']')
}
}
if len(in.Kinds) != 0 {
const prefix string = ",\"kinds\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
{
out.RawByte('[')
for v6, v7 := range in.Kinds {
if v6 > 0 {
out.RawByte(',')
}
out.Int(int(v7))
}
out.RawByte(']')
}
}
if len(in.Authors) != 0 {
const prefix string = ",\"authors\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
{
out.RawByte('[')
for v8, v9 := range in.Authors {
if v8 > 0 {
out.RawByte(',')
}
out.String(string(v9))
}
out.RawByte(']')
}
}
if in.Since != nil {
const prefix string = ",\"since\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.Int64(int64(*in.Since))
}
if in.Until != nil {
const prefix string = ",\"until\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.Int64(int64(*in.Until))
}
if in.Limit != 0 || in.LimitZero {
const prefix string = ",\"limit\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.Int(int(in.Limit))
}
if in.Search != "" {
const prefix string = ",\"search\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.String(string(in.Search))
}
for tag, values := range in.Tags {
const prefix string = ",\"authors\":"
if first {
first = false
out.RawString("\"#" + tag + "\":")
} else {
out.RawString(",\"#" + tag + "\":")
}
{
out.RawByte('[')
for i, v := range values {
if i > 0 {
out.RawByte(',')
}
out.String(string(v))
}
out.RawByte(']')
}
}
out.RawByte('}')
}
// MarshalJSON supports json.Marshaler interface
func (v F) MarshalJSON() ([]byte, error) {
w := jwriter.Writer{NoEscapeHTML: true}
easyjson4d398eaaEncodeGithubComNbdWtfGoNostr(&w, v)
return w.Buffer.BuildBytes(), w.Error
}
// MarshalEasyJSON supports easyjson.Marshaler interface
func (v F) MarshalEasyJSON(w *jwriter.Writer) {
w.NoEscapeHTML = true
easyjson4d398eaaEncodeGithubComNbdWtfGoNostr(w, v)
}
// UnmarshalJSON supports json.Unmarshaler interface
func (v *F) UnmarshalJSON(data []byte) error {
r := jlexer.Lexer{Data: data}
easyjson4d398eaaDecodeGithubComNbdWtfGoNostr(&r, v)
return r.Error()
}
// UnmarshalEasyJSON supports easyjson.Unmarshaler interface
func (v *F) UnmarshalEasyJSON(l *jlexer.Lexer) {
easyjson4d398eaaDecodeGithubComNbdWtfGoNostr(l, v)
}

View File

@@ -8,8 +8,10 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"x.realy.lol/chk"
"x.realy.lol/event"
"x.realy.lol/kind"
"x.realy.lol/log"
"x.realy.lol/timestamp"
)
@@ -54,8 +56,7 @@ func TestFilterUnmarshalWithLimitZero(t *testing.T) {
f.Since.Time().UTC().Format("2006-01-02") != "2022-02-07" ||
f.Until != nil ||
f.Tags == nil || len(f.Tags) != 2 || !slices.Contains(f.Tags["something"], "bab") ||
f.Search != "test" ||
f.LimitZero == false {
f.Search != "test" {
return false
}
return true
@@ -65,10 +66,9 @@ func TestFilterUnmarshalWithLimitZero(t *testing.T) {
func TestFilterMarshalWithLimitZero(t *testing.T) {
until := timestamp.Timestamp(12345678)
filterj, err := json.Marshal(F{
Kinds: []int{kind.TextNote, kind.RecommendServer, kind.EncryptedDirectMessage},
Tags: TagMap{"fruit": {"banana", "mango"}},
Until: &until,
LimitZero: true,
Kinds: []int{kind.TextNote, kind.RecommendServer, kind.EncryptedDirectMessage},
Tags: TagMap{"fruit": {"banana", "mango"}},
Until: &until,
})
assert.NoError(t, err)
@@ -103,13 +103,13 @@ func TestFilterEquality(t *testing.T) {
Kinds: []int{kind.EncryptedDirectMessage, kind.Deletion},
Tags: TagMap{"letter": {"a", "b"}, "fruit": {"banana"}},
Since: &tm,
IDs: []string{"aaaa", "bbbb"},
Ids: []string{"aaaa", "bbbb"},
},
F{
Kinds: []int{kind.Deletion, kind.EncryptedDirectMessage},
Tags: TagMap{"letter": {"a", "b"}, "fruit": {"banana"}},
Since: &tm,
IDs: []string{"aaaa", "bbbb"},
Ids: []string{"aaaa", "bbbb"},
},
), "kind+2tags+since+ids filters should be equal")
@@ -125,13 +125,13 @@ func TestFilterClone(t *testing.T) {
Kinds: []int{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
Tags: TagMap{"letter": {"a", "b"}, "fruit": {"banana"}},
Since: &ts,
IDs: []string{"9894b4b5cb5166d23ee8899a4151cf0c66aec00bde101982a13b8e8ceb972df9"},
Ids: []string{"9894b4b5cb5166d23ee8899a4151cf0c66aec00bde101982a13b8e8ceb972df9"},
}
clone := flt.Clone()
assert.True(t, FilterEqual(flt, clone), "clone is not equal:\n %v !=\n %v", flt, clone)
clone1 := flt.Clone()
clone1.IDs = append(clone1.IDs, "88f0c63fcb93463407af97a5e5ee64fa883d107ef9e558472c4eb9aaaefa459d")
clone1.Ids = append(clone1.Ids, "88f0c63fcb93463407af97a5e5ee64fa883d107ef9e558472c4eb9aaaefa459d")
assert.False(t, FilterEqual(flt, clone1), "modifying the clone ids should cause it to not be equal anymore")
clone2 := flt.Clone()
@@ -148,7 +148,7 @@ func TestFilterClone(t *testing.T) {
}
func TestTheoreticalLimit(t *testing.T) {
require.Equal(t, 6, GetTheoreticalLimit(F{IDs: []string{"a", "b", "c", "d", "e", "f"}}))
require.Equal(t, 6, GetTheoreticalLimit(F{Ids: []string{"a", "b", "c", "d", "e", "f"}}))
require.Equal(t, 9, GetTheoreticalLimit(F{Authors: []string{"a", "b", "c"}, Kinds: []int{3, 0, 10002}}))
require.Equal(t, 4, GetTheoreticalLimit(F{Authors: []string{"a", "b", "c", "d"}, Kinds: []int{10050}}))
require.Equal(t, -1, GetTheoreticalLimit(F{Authors: []string{"a", "b", "c", "d"}}))
@@ -156,3 +156,32 @@ func TestTheoreticalLimit(t *testing.T) {
require.Equal(t, 24, GetTheoreticalLimit(F{Authors: []string{"a", "b", "c", "d", "e", "f"}, Kinds: []int{30023, 30024}, Tags: TagMap{"d": []string{"aaa", "bbb"}}}))
require.Equal(t, -1, GetTheoreticalLimit(F{Authors: []string{"a", "b", "c", "d", "e", "f"}, Kinds: []int{30023, 30024}}))
}
func TestFilter(t *testing.T) {
ts := timestamp.Now() - 60*60
now := timestamp.Now()
flt := &F{
Authors: []string{"1d80e5588de010d137a67c42b03717595f5f510e73e42cfc48f31bae91844d59"},
Kinds: []int{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
Tags: TagMap{
"#t": {"a", "b"},
"#e": {"9894b4b5cb5166d23ee8899a4151cf0c66aec00bde101982a13b8e8ceb972df9"},
"#p": {"1d80e5588de010d137a67c42b03717595f5f510e73e42cfc48f31bae91844d59"},
},
Until: &now,
Since: &ts,
Ids: []string{"9894b4b5cb5166d23ee8899a4151cf0c66aec00bde101982a13b8e8ceb972df9"},
// Limit: IntToPointer(10),
}
var err error
var b []byte
if b, err = json.Marshal(flt); chk.E(err) {
t.Fatal(err)
}
log.I.F("%s", b)
var f2 F
if err = json.Unmarshal(b, &f2); chk.E(err) {
t.Fatal(err)
}
log.I.S(f2)
}

View File

@@ -15,3 +15,5 @@ func New[T constraints.Integer | constraints.Float](t T) Timestamp {
}
func (t Timestamp) Time() time.Time { return time.Unix(int64(t), 0) }
func (t Timestamp) ToInt64() int64 { return int64(t) }