nicer names for filter(s) and small cleanups

This commit is contained in:
2025-06-10 22:26:56 +01:00
parent 789c7913e7
commit 966f58f4c7
8 changed files with 54 additions and 50 deletions

11
database/filter.go Normal file
View File

@@ -0,0 +1,11 @@
package database
import (
"x.realy.lol/event"
"x.realy.lol/filter"
)
func (d *D) Filter(f filter.F) (evs []event.E, err error) {
return
}

View File

@@ -10,7 +10,7 @@ import (
"x.realy.lol/event"
)
func (d *D) FindEvent(evId []byte) (ev *event.E, err error) {
func (d *D) FindEventById(evId []byte) (ev *event.E, err error) {
id, ser := indexes.IdVars()
if err = id.FromId(evId); chk.E(err) {
return

View File

@@ -90,7 +90,6 @@ func TestGetEventIndexes(t *testing.T) {
if err = ev.MarshalWrite(binE); chk.E(err) {
log.I.F("bogus tags probably: %s", b)
encErrs++
// events that marshal with errors have e and p tag values that aren't hex and should not be accepted
continue
}
ev2 := event.New()
@@ -121,17 +120,12 @@ func TestGetEventIndexes(t *testing.T) {
if indices, _, err = d.GetEventIndexes(ev); chk.E(err) {
t.Fatal(err)
}
// log.I.F("%s", b)
// log.I.S(indices)
datasize += len(b)
for _, v := range indices {
size += len(v)
}
_ = indices
count++
// if count == 10000 {
// break
// }
}
log.I.F("unmarshaled, verified and indexed %d events in %s, %d Mb of indexes from %d Mb of events, %d Mb as binary, failed verify %d, failed encode %d", count, time.Now().Sub(start), size/units.Mb, datasize/units.Mb, binsize/units.Mb, errs, encErrs)
d.Close()

View File

@@ -50,7 +50,7 @@ func TestD_StoreEvent(t *testing.T) {
count++
if count%1000 == 0 {
log.I.F("unmarshaled %d events", count)
// break
break
}
if err = d.StoreEvent(ev); chk.E(err) {
continue
@@ -60,11 +60,10 @@ func TestD_StoreEvent(t *testing.T) {
log.I.F("completed unmarshalling %d events", count)
for _, v := range evIds {
var ev *event.E
if ev, err = d.FindEvent(v); chk.E(err) {
if ev, err = d.FindEventById(v); chk.E(err) {
t.Fatal(err)
}
_ = ev
// log.I.S(ev)
}
log.I.F("stored and retrieved %d events", len(evIds))
return

View File

@@ -14,7 +14,7 @@ import (
func (d *D) StoreEvent(ev *event.E) (err error) {
var ev2 *event.E
if ev2, err = d.FindEvent(ev.GetIdBytes()); err != nil {
if ev2, err = d.FindEventById(ev.GetIdBytes()); err != nil {
// so we didn't find it?
}
if ev2 != nil {

View File

@@ -10,9 +10,9 @@ import (
"x.realy.lol/timestamp"
)
type Filters []Filter
type S []F
type Filter struct {
type F struct {
IDs []string
Kinds []int
Authors []string
@@ -28,12 +28,12 @@ type Filter struct {
type TagMap map[string][]string
func (eff Filters) String() string {
func (eff S) String() string {
j, _ := json.Marshal(eff)
return string(j)
}
func (eff Filters) Match(event *event.E) bool {
func (eff S) Match(event *event.E) bool {
for _, filter := range eff {
if filter.Matches(event) {
return true
@@ -42,7 +42,7 @@ func (eff Filters) Match(event *event.E) bool {
return false
}
func (eff Filters) MatchIgnoringTimestampConstraints(event *event.E) bool {
func (eff S) MatchIgnoringTimestampConstraints(event *event.E) bool {
for _, filter := range eff {
if filter.MatchesIgnoringTimestampConstraints(event) {
return true
@@ -51,12 +51,12 @@ func (eff Filters) MatchIgnoringTimestampConstraints(event *event.E) bool {
return false
}
func (ef Filter) String() string {
func (ef F) String() string {
j, _ := json.Marshal(ef)
return string(j)
}
func (ef Filter) Matches(event *event.E) bool {
func (ef F) Matches(event *event.E) bool {
if !ef.MatchesIgnoringTimestampConstraints(event) {
return false
}
@@ -72,7 +72,7 @@ func (ef Filter) Matches(event *event.E) bool {
return true
}
func (ef Filter) MatchesIgnoringTimestampConstraints(event *event.E) bool {
func (ef F) MatchesIgnoringTimestampConstraints(event *event.E) bool {
if event == nil {
return false
}
@@ -98,7 +98,7 @@ func (ef Filter) MatchesIgnoringTimestampConstraints(event *event.E) bool {
return true
}
func FilterEqual(a Filter, b Filter) bool {
func FilterEqual(a F, b F) bool {
if !helpers.Similar(a.Kinds, b.Kinds) {
return false
}
@@ -144,8 +144,8 @@ func FilterEqual(a Filter, b Filter) bool {
return true
}
func (ef Filter) Clone() Filter {
clone := Filter{
func (ef F) Clone() F {
clone := F{
IDs: slices.Clone(ef.IDs),
Authors: slices.Clone(ef.Authors),
Kinds: slices.Clone(ef.Kinds),
@@ -180,7 +180,7 @@ func (ef Filter) Clone() Filter {
// It returns -1 if there are no theoretical limits.
//
// The given .Limit present in the filter is ignored.
func GetTheoreticalLimit(filter Filter) int {
func GetTheoreticalLimit(filter F) int {
if len(filter.IDs) > 0 {
return len(filter.IDs)
}

View File

@@ -15,7 +15,7 @@ var (
_ easyjson.Marshaler
)
func easyjson4d398eaaDecodeGithubComNbdWtfGoNostr(in *jlexer.Lexer, out *Filter) {
func easyjson4d398eaaDecodeGithubComNbdWtfGoNostr(in *jlexer.Lexer, out *F) {
isTopLevel := in.IsStart()
if in.IsNull() {
if isTopLevel {
@@ -166,7 +166,7 @@ func easyjson4d398eaaDecodeGithubComNbdWtfGoNostr(in *jlexer.Lexer, out *Filter)
}
}
func easyjson4d398eaaEncodeGithubComNbdWtfGoNostr(out *jwriter.Writer, in Filter) {
func easyjson4d398eaaEncodeGithubComNbdWtfGoNostr(out *jwriter.Writer, in F) {
out.RawByte('{')
first := true
_ = first
@@ -286,26 +286,26 @@ func easyjson4d398eaaEncodeGithubComNbdWtfGoNostr(out *jwriter.Writer, in Filter
}
// MarshalJSON supports json.Marshaler interface
func (v Filter) MarshalJSON() ([]byte, error) {
func (v F) MarshalJSON() ([]byte, error) {
w := jwriter.Writer{NoEscapeHTML: true}
easyjson4d398eaaEncodeGithubComNbdWtfGoNostr(&w, v)
return w.Buffer.BuildBytes(), w.Error
}
// MarshalEasyJSON supports easyjson.Marshaler interface
func (v Filter) MarshalEasyJSON(w *jwriter.Writer) {
func (v F) MarshalEasyJSON(w *jwriter.Writer) {
w.NoEscapeHTML = true
easyjson4d398eaaEncodeGithubComNbdWtfGoNostr(w, v)
}
// UnmarshalJSON supports json.Unmarshaler interface
func (v *Filter) UnmarshalJSON(data []byte) error {
func (v *F) UnmarshalJSON(data []byte) error {
r := jlexer.Lexer{Data: data}
easyjson4d398eaaDecodeGithubComNbdWtfGoNostr(&r, v)
return r.Error()
}
// UnmarshalEasyJSON supports easyjson.Unmarshaler interface
func (v *Filter) UnmarshalEasyJSON(l *jlexer.Lexer) {
func (v *F) UnmarshalEasyJSON(l *jlexer.Lexer) {
easyjson4d398eaaDecodeGithubComNbdWtfGoNostr(l, v)
}

View File

@@ -15,7 +15,7 @@ import (
func TestFilterUnmarshal(t *testing.T) {
raw := `{"ids": ["abc"],"#e":["zzz"],"#something":["nothing","bab"],"since":1644254609,"search":"test"}`
var f Filter
var f F
err := json.Unmarshal([]byte(raw), &f)
assert.NoError(t, err)
@@ -32,7 +32,7 @@ func TestFilterUnmarshal(t *testing.T) {
func TestFilterMarshal(t *testing.T) {
until := timestamp.Timestamp(12345678)
filterj, err := json.Marshal(Filter{
filterj, err := json.Marshal(F{
Kinds: []int{kind.TextNote, kind.RecommendServer, kind.EncryptedDirectMessage},
Tags: TagMap{"fruit": {"banana", "mango"}},
Until: &until,
@@ -45,7 +45,7 @@ func TestFilterMarshal(t *testing.T) {
func TestFilterUnmarshalWithLimitZero(t *testing.T) {
raw := `{"ids": ["abc"],"#e":["zzz"],"limit":0,"#something":["nothing","bab"],"since":1644254609,"search":"test"}`
var f Filter
var f F
err := json.Unmarshal([]byte(raw), &f)
assert.NoError(t, err)
@@ -64,7 +64,7 @@ func TestFilterUnmarshalWithLimitZero(t *testing.T) {
func TestFilterMarshalWithLimitZero(t *testing.T) {
until := timestamp.Timestamp(12345678)
filterj, err := json.Marshal(Filter{
filterj, err := json.Marshal(F{
Kinds: []int{kind.TextNote, kind.RecommendServer, kind.EncryptedDirectMessage},
Tags: TagMap{"fruit": {"banana", "mango"}},
Until: &until,
@@ -77,7 +77,7 @@ func TestFilterMarshalWithLimitZero(t *testing.T) {
}
func TestFilterMatchingLive(t *testing.T) {
var filter Filter
var filter F
var event event.E
json.Unmarshal([]byte(`{"kinds":[1],"authors":["a8171781fd9e90ede3ea44ddca5d3abf828fe8eedeb0f3abb0dd3e563562e1fc","1d80e5588de010d137a67c42b03717595f5f510e73e42cfc48f31bae91844d59","ed4ca520e9929dfe9efdadf4011b53d30afd0678a09aa026927e60e7a45d9244"],"since":1677033299}`), &filter)
@@ -88,24 +88,24 @@ func TestFilterMatchingLive(t *testing.T) {
func TestFilterEquality(t *testing.T) {
assert.True(t, FilterEqual(
Filter{Kinds: []int{kind.EncryptedDirectMessage, kind.Deletion}},
Filter{Kinds: []int{kind.EncryptedDirectMessage, kind.Deletion}},
F{Kinds: []int{kind.EncryptedDirectMessage, kind.Deletion}},
F{Kinds: []int{kind.EncryptedDirectMessage, kind.Deletion}},
), "kinds filters should be equal")
assert.True(t, FilterEqual(
Filter{Kinds: []int{kind.EncryptedDirectMessage, kind.Deletion}, Tags: TagMap{"letter": {"a", "b"}}},
Filter{Kinds: []int{kind.EncryptedDirectMessage, kind.Deletion}, Tags: TagMap{"letter": {"b", "a"}}},
F{Kinds: []int{kind.EncryptedDirectMessage, kind.Deletion}, Tags: TagMap{"letter": {"a", "b"}}},
F{Kinds: []int{kind.EncryptedDirectMessage, kind.Deletion}, Tags: TagMap{"letter": {"b", "a"}}},
), "kind+tags filters should be equal")
tm := timestamp.Now()
assert.True(t, FilterEqual(
Filter{
F{
Kinds: []int{kind.EncryptedDirectMessage, kind.Deletion},
Tags: TagMap{"letter": {"a", "b"}, "fruit": {"banana"}},
Since: &tm,
IDs: []string{"aaaa", "bbbb"},
},
Filter{
F{
Kinds: []int{kind.Deletion, kind.EncryptedDirectMessage},
Tags: TagMap{"letter": {"a", "b"}, "fruit": {"banana"}},
Since: &tm,
@@ -114,14 +114,14 @@ func TestFilterEquality(t *testing.T) {
), "kind+2tags+since+ids filters should be equal")
assert.False(t, FilterEqual(
Filter{Kinds: []int{kind.TextNote, kind.EncryptedDirectMessage, kind.Deletion}},
Filter{Kinds: []int{kind.EncryptedDirectMessage, kind.Deletion, kind.Repost}},
F{Kinds: []int{kind.TextNote, kind.EncryptedDirectMessage, kind.Deletion}},
F{Kinds: []int{kind.EncryptedDirectMessage, kind.Deletion, kind.Repost}},
), "kinds filters shouldn't be equal")
}
func TestFilterClone(t *testing.T) {
ts := timestamp.Now() - 60*60
flt := Filter{
flt := F{
Kinds: []int{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
Tags: TagMap{"letter": {"a", "b"}, "fruit": {"banana"}},
Since: &ts,
@@ -148,11 +148,11 @@ func TestFilterClone(t *testing.T) {
}
func TestTheoreticalLimit(t *testing.T) {
require.Equal(t, 6, GetTheoreticalLimit(Filter{IDs: []string{"a", "b", "c", "d", "e", "f"}}))
require.Equal(t, 9, GetTheoreticalLimit(Filter{Authors: []string{"a", "b", "c"}, Kinds: []int{3, 0, 10002}}))
require.Equal(t, 4, GetTheoreticalLimit(Filter{Authors: []string{"a", "b", "c", "d"}, Kinds: []int{10050}}))
require.Equal(t, -1, GetTheoreticalLimit(Filter{Authors: []string{"a", "b", "c", "d"}}))
require.Equal(t, -1, GetTheoreticalLimit(Filter{Kinds: []int{3, 0, 10002}}))
require.Equal(t, 24, GetTheoreticalLimit(Filter{Authors: []string{"a", "b", "c", "d", "e", "f"}, Kinds: []int{30023, 30024}, Tags: TagMap{"d": []string{"aaa", "bbb"}}}))
require.Equal(t, -1, GetTheoreticalLimit(Filter{Authors: []string{"a", "b", "c", "d", "e", "f"}, Kinds: []int{30023, 30024}}))
require.Equal(t, 6, GetTheoreticalLimit(F{IDs: []string{"a", "b", "c", "d", "e", "f"}}))
require.Equal(t, 9, GetTheoreticalLimit(F{Authors: []string{"a", "b", "c"}, Kinds: []int{3, 0, 10002}}))
require.Equal(t, 4, GetTheoreticalLimit(F{Authors: []string{"a", "b", "c", "d"}, Kinds: []int{10050}}))
require.Equal(t, -1, GetTheoreticalLimit(F{Authors: []string{"a", "b", "c", "d"}}))
require.Equal(t, -1, GetTheoreticalLimit(F{Kinds: []int{3, 0, 10002}}))
require.Equal(t, 24, GetTheoreticalLimit(F{Authors: []string{"a", "b", "c", "d", "e", "f"}, Kinds: []int{30023, 30024}, Tags: TagMap{"d": []string{"aaa", "bbb"}}}))
require.Equal(t, -1, GetTheoreticalLimit(F{Authors: []string{"a", "b", "c", "d", "e", "f"}, Kinds: []int{30023, 30024}}))
}