rip off go-nostr for filters and all necessaries
This commit is contained in:
21
LICENSE.md
Normal file
21
LICENSE.md
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2022 nbd
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
@@ -14,17 +14,19 @@ import (
|
||||
"x.realy.lol/log"
|
||||
"x.realy.lol/p256k"
|
||||
"x.realy.lol/signer"
|
||||
"x.realy.lol/tags"
|
||||
"x.realy.lol/text"
|
||||
"x.realy.lol/timestamp"
|
||||
)
|
||||
|
||||
type E struct {
|
||||
Id string `json:"id"`
|
||||
Pubkey string `json:"pubkey"`
|
||||
CreatedAt int64 `json:"created_at"`
|
||||
Kind uint16 `json:"kind`
|
||||
Tags [][]string `json:"tags"`
|
||||
Content string `json:"content"`
|
||||
Sig string `json:"sig"`
|
||||
Id string `json:"id"`
|
||||
Pubkey string `json:"pubkey"`
|
||||
CreatedAt timestamp.Timestamp `json:"created_at"`
|
||||
Kind int `json:"kind`
|
||||
Tags tags.Tags `json:"tags"`
|
||||
Content string `json:"content"`
|
||||
Sig string `json:"sig"`
|
||||
}
|
||||
|
||||
func New() (ev *E) { return &E{} }
|
||||
@@ -189,13 +191,13 @@ func (ev *E) FromCanonical(b []byte) (err error) {
|
||||
err = errorf.E("failed to get created_at value, got type %v expected float64", reflect.TypeOf(un[2]))
|
||||
return
|
||||
}
|
||||
ev.CreatedAt = int64(createdAt)
|
||||
ev.CreatedAt = timestamp.New(createdAt)
|
||||
var kind float64
|
||||
if kind, ok = un[3].(float64); !ok {
|
||||
err = errorf.E("failed to get kind value, got type %v expected float64", reflect.TypeOf(un[3]))
|
||||
return
|
||||
}
|
||||
ev.Kind = uint16(kind)
|
||||
ev.Kind = int(kind)
|
||||
var tags []any
|
||||
if tags, ok = un[4].([]any); !ok {
|
||||
err = errorf.E("failed to get tags value, got type %v expected []interface", reflect.TypeOf(un[4]))
|
||||
@@ -211,8 +213,8 @@ func (ev *E) FromCanonical(b []byte) (err error) {
|
||||
return
|
||||
}
|
||||
|
||||
func FromSliceInterface(in []any) (tags [][]string, err error) {
|
||||
tags = make([][]string, 0)
|
||||
func FromSliceInterface(in []any) (t tags.Tags, err error) {
|
||||
t = make(tags.Tags, 0)
|
||||
for _, v := range in {
|
||||
var ok bool
|
||||
var vv []any
|
||||
@@ -229,7 +231,7 @@ func FromSliceInterface(in []any) (tags [][]string, err error) {
|
||||
}
|
||||
tag = append(tag, x)
|
||||
}
|
||||
tags = append(tags, tag)
|
||||
t = append(t, tag)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
219
filter/filter.go
Normal file
219
filter/filter.go
Normal file
@@ -0,0 +1,219 @@
|
||||
package filter
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"slices"
|
||||
|
||||
"x.realy.lol/event"
|
||||
"x.realy.lol/helpers"
|
||||
"x.realy.lol/kind"
|
||||
"x.realy.lol/timestamp"
|
||||
)
|
||||
|
||||
type Filters []Filter
|
||||
|
||||
type Filter struct {
|
||||
IDs []string
|
||||
Kinds []int
|
||||
Authors []string
|
||||
Tags TagMap
|
||||
Since *timestamp.Timestamp
|
||||
Until *timestamp.Timestamp
|
||||
Limit int
|
||||
Search string
|
||||
|
||||
// LimitZero is or must be set when there is a "limit":0 in the filter, and not when "limit" is just omitted
|
||||
LimitZero bool `json:"-"`
|
||||
}
|
||||
|
||||
type TagMap map[string][]string
|
||||
|
||||
func (eff Filters) String() string {
|
||||
j, _ := json.Marshal(eff)
|
||||
return string(j)
|
||||
}
|
||||
|
||||
func (eff Filters) Match(event *event.E) bool {
|
||||
for _, filter := range eff {
|
||||
if filter.Matches(event) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (eff Filters) MatchIgnoringTimestampConstraints(event *event.E) bool {
|
||||
for _, filter := range eff {
|
||||
if filter.MatchesIgnoringTimestampConstraints(event) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (ef Filter) String() string {
|
||||
j, _ := json.Marshal(ef)
|
||||
return string(j)
|
||||
}
|
||||
|
||||
func (ef Filter) Matches(event *event.E) bool {
|
||||
if !ef.MatchesIgnoringTimestampConstraints(event) {
|
||||
return false
|
||||
}
|
||||
|
||||
if ef.Since != nil && event.CreatedAt < *ef.Since {
|
||||
return false
|
||||
}
|
||||
|
||||
if ef.Until != nil && event.CreatedAt > *ef.Until {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (ef Filter) MatchesIgnoringTimestampConstraints(event *event.E) bool {
|
||||
if event == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if ef.IDs != nil && !slices.Contains(ef.IDs, event.Id) {
|
||||
return false
|
||||
}
|
||||
|
||||
if ef.Kinds != nil && !slices.Contains(ef.Kinds, event.Kind) {
|
||||
return false
|
||||
}
|
||||
|
||||
if ef.Authors != nil && !slices.Contains(ef.Authors, event.Pubkey) {
|
||||
return false
|
||||
}
|
||||
|
||||
for f, v := range ef.Tags {
|
||||
if v != nil && !event.Tags.ContainsAny(f, v) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func FilterEqual(a Filter, b Filter) bool {
|
||||
if !helpers.Similar(a.Kinds, b.Kinds) {
|
||||
return false
|
||||
}
|
||||
|
||||
if !helpers.Similar(a.IDs, b.IDs) {
|
||||
return false
|
||||
}
|
||||
|
||||
if !helpers.Similar(a.Authors, b.Authors) {
|
||||
return false
|
||||
}
|
||||
|
||||
if len(a.Tags) != len(b.Tags) {
|
||||
return false
|
||||
}
|
||||
|
||||
for f, av := range a.Tags {
|
||||
if bv, ok := b.Tags[f]; !ok {
|
||||
return false
|
||||
} else {
|
||||
if !helpers.Similar(av, bv) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !helpers.ArePointerValuesEqual(a.Since, b.Since) {
|
||||
return false
|
||||
}
|
||||
|
||||
if !helpers.ArePointerValuesEqual(a.Until, b.Until) {
|
||||
return false
|
||||
}
|
||||
|
||||
if a.Search != b.Search {
|
||||
return false
|
||||
}
|
||||
|
||||
if a.LimitZero != b.LimitZero {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (ef Filter) Clone() Filter {
|
||||
clone := Filter{
|
||||
IDs: slices.Clone(ef.IDs),
|
||||
Authors: slices.Clone(ef.Authors),
|
||||
Kinds: slices.Clone(ef.Kinds),
|
||||
Limit: ef.Limit,
|
||||
Search: ef.Search,
|
||||
LimitZero: ef.LimitZero,
|
||||
}
|
||||
|
||||
if ef.Tags != nil {
|
||||
clone.Tags = make(TagMap, len(ef.Tags))
|
||||
for k, v := range ef.Tags {
|
||||
clone.Tags[k] = slices.Clone(v)
|
||||
}
|
||||
}
|
||||
|
||||
if ef.Since != nil {
|
||||
since := *ef.Since
|
||||
clone.Since = &since
|
||||
}
|
||||
|
||||
if ef.Until != nil {
|
||||
until := *ef.Until
|
||||
clone.Until = &until
|
||||
}
|
||||
|
||||
return clone
|
||||
}
|
||||
|
||||
// GetTheoreticalLimit gets the maximum number of events that a normal filter would ever return, for example, if
|
||||
// there is a number of "ids" in the filter, the theoretical limit will be that number of ids.
|
||||
//
|
||||
// It returns -1 if there are no theoretical limits.
|
||||
//
|
||||
// The given .Limit present in the filter is ignored.
|
||||
func GetTheoreticalLimit(filter Filter) int {
|
||||
if len(filter.IDs) > 0 {
|
||||
return len(filter.IDs)
|
||||
}
|
||||
|
||||
if len(filter.Kinds) == 0 {
|
||||
return -1
|
||||
}
|
||||
|
||||
if len(filter.Authors) > 0 {
|
||||
allAreReplaceable := true
|
||||
for _, k := range filter.Kinds {
|
||||
if !kind.IsReplaceableKind(k) {
|
||||
allAreReplaceable = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if allAreReplaceable {
|
||||
return len(filter.Authors) * len(filter.Kinds)
|
||||
}
|
||||
|
||||
if len(filter.Tags["d"]) > 0 {
|
||||
allAreAddressable := true
|
||||
for _, k := range filter.Kinds {
|
||||
if !kind.IsAddressableKind(k) {
|
||||
allAreAddressable = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if allAreAddressable {
|
||||
return len(filter.Authors) * len(filter.Kinds) * len(filter.Tags["d"])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
||||
311
filter/filter_easyjson.go
Normal file
311
filter/filter_easyjson.go
Normal file
@@ -0,0 +1,311 @@
|
||||
package filter
|
||||
|
||||
import (
|
||||
"github.com/mailru/easyjson"
|
||||
"github.com/mailru/easyjson/jlexer"
|
||||
"github.com/mailru/easyjson/jwriter"
|
||||
|
||||
"x.realy.lol/timestamp"
|
||||
)
|
||||
|
||||
// suppress unused package warning
|
||||
var (
|
||||
_ *jlexer.Lexer
|
||||
_ *jwriter.Writer
|
||||
_ easyjson.Marshaler
|
||||
)
|
||||
|
||||
func easyjson4d398eaaDecodeGithubComNbdWtfGoNostr(in *jlexer.Lexer, out *Filter) {
|
||||
isTopLevel := in.IsStart()
|
||||
if in.IsNull() {
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
in.Skip()
|
||||
return
|
||||
}
|
||||
out.Tags = make(TagMap)
|
||||
in.Delim('{')
|
||||
for !in.IsDelim('}') {
|
||||
key := in.UnsafeFieldName(false)
|
||||
in.WantColon()
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
in.WantComma()
|
||||
continue
|
||||
}
|
||||
switch key {
|
||||
case "ids":
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
out.IDs = nil
|
||||
} else {
|
||||
in.Delim('[')
|
||||
if out.IDs == nil {
|
||||
if !in.IsDelim(']') {
|
||||
out.IDs = make([]string, 0, 20)
|
||||
} else {
|
||||
out.IDs = []string{}
|
||||
}
|
||||
} else {
|
||||
out.IDs = (out.IDs)[:0]
|
||||
}
|
||||
for !in.IsDelim(']') {
|
||||
var v1 string
|
||||
v1 = string(in.String())
|
||||
out.IDs = append(out.IDs, v1)
|
||||
in.WantComma()
|
||||
}
|
||||
in.Delim(']')
|
||||
}
|
||||
case "kinds":
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
out.Kinds = nil
|
||||
} else {
|
||||
in.Delim('[')
|
||||
if out.Kinds == nil {
|
||||
if !in.IsDelim(']') {
|
||||
out.Kinds = make([]int, 0, 8)
|
||||
} else {
|
||||
out.Kinds = []int{}
|
||||
}
|
||||
} else {
|
||||
out.Kinds = (out.Kinds)[:0]
|
||||
}
|
||||
for !in.IsDelim(']') {
|
||||
var v2 int
|
||||
v2 = int(in.Int())
|
||||
out.Kinds = append(out.Kinds, v2)
|
||||
in.WantComma()
|
||||
}
|
||||
in.Delim(']')
|
||||
}
|
||||
case "authors":
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
out.Authors = nil
|
||||
} else {
|
||||
in.Delim('[')
|
||||
if out.Authors == nil {
|
||||
if !in.IsDelim(']') {
|
||||
out.Authors = make([]string, 0, 40)
|
||||
} else {
|
||||
out.Authors = []string{}
|
||||
}
|
||||
} else {
|
||||
out.Authors = (out.Authors)[:0]
|
||||
}
|
||||
for !in.IsDelim(']') {
|
||||
var v3 string
|
||||
v3 = string(in.String())
|
||||
out.Authors = append(out.Authors, v3)
|
||||
in.WantComma()
|
||||
}
|
||||
in.Delim(']')
|
||||
}
|
||||
case "since":
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
out.Since = nil
|
||||
} else {
|
||||
if out.Since == nil {
|
||||
out.Since = new(timestamp.Timestamp)
|
||||
}
|
||||
*out.Since = timestamp.Timestamp(in.Int64())
|
||||
}
|
||||
case "until":
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
out.Until = nil
|
||||
} else {
|
||||
if out.Until == nil {
|
||||
out.Until = new(timestamp.Timestamp)
|
||||
}
|
||||
*out.Until = timestamp.Timestamp(in.Int64())
|
||||
}
|
||||
case "limit":
|
||||
out.Limit = int(in.Int())
|
||||
if out.Limit == 0 {
|
||||
out.LimitZero = true
|
||||
}
|
||||
case "search":
|
||||
out.Search = string(in.String())
|
||||
default:
|
||||
if len(key) > 1 && key[0] == '#' {
|
||||
tagValues := make([]string, 0, 40)
|
||||
if !in.IsNull() {
|
||||
in.Delim('[')
|
||||
if out.Authors == nil {
|
||||
if !in.IsDelim(']') {
|
||||
tagValues = make([]string, 0, 4)
|
||||
} else {
|
||||
tagValues = []string{}
|
||||
}
|
||||
} else {
|
||||
tagValues = (tagValues)[:0]
|
||||
}
|
||||
for !in.IsDelim(']') {
|
||||
var v3 string
|
||||
v3 = string(in.String())
|
||||
tagValues = append(tagValues, v3)
|
||||
in.WantComma()
|
||||
}
|
||||
in.Delim(']')
|
||||
}
|
||||
out.Tags[key[1:]] = tagValues
|
||||
} else {
|
||||
in.SkipRecursive()
|
||||
}
|
||||
}
|
||||
in.WantComma()
|
||||
}
|
||||
in.Delim('}')
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
}
|
||||
|
||||
func easyjson4d398eaaEncodeGithubComNbdWtfGoNostr(out *jwriter.Writer, in Filter) {
|
||||
out.RawByte('{')
|
||||
first := true
|
||||
_ = first
|
||||
if len(in.IDs) != 0 {
|
||||
const prefix string = ",\"ids\":"
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
{
|
||||
out.RawByte('[')
|
||||
for v4, v5 := range in.IDs {
|
||||
if v4 > 0 {
|
||||
out.RawByte(',')
|
||||
}
|
||||
out.String(string(v5))
|
||||
}
|
||||
out.RawByte(']')
|
||||
}
|
||||
}
|
||||
if len(in.Kinds) != 0 {
|
||||
const prefix string = ",\"kinds\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
{
|
||||
out.RawByte('[')
|
||||
for v6, v7 := range in.Kinds {
|
||||
if v6 > 0 {
|
||||
out.RawByte(',')
|
||||
}
|
||||
out.Int(int(v7))
|
||||
}
|
||||
out.RawByte(']')
|
||||
}
|
||||
}
|
||||
if len(in.Authors) != 0 {
|
||||
const prefix string = ",\"authors\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
{
|
||||
out.RawByte('[')
|
||||
for v8, v9 := range in.Authors {
|
||||
if v8 > 0 {
|
||||
out.RawByte(',')
|
||||
}
|
||||
out.String(string(v9))
|
||||
}
|
||||
out.RawByte(']')
|
||||
}
|
||||
}
|
||||
if in.Since != nil {
|
||||
const prefix string = ",\"since\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Int64(int64(*in.Since))
|
||||
}
|
||||
if in.Until != nil {
|
||||
const prefix string = ",\"until\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Int64(int64(*in.Until))
|
||||
}
|
||||
if in.Limit != 0 || in.LimitZero {
|
||||
const prefix string = ",\"limit\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Int(int(in.Limit))
|
||||
}
|
||||
if in.Search != "" {
|
||||
const prefix string = ",\"search\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.String(string(in.Search))
|
||||
}
|
||||
for tag, values := range in.Tags {
|
||||
const prefix string = ",\"authors\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString("\"#" + tag + "\":")
|
||||
} else {
|
||||
out.RawString(",\"#" + tag + "\":")
|
||||
}
|
||||
{
|
||||
out.RawByte('[')
|
||||
for i, v := range values {
|
||||
if i > 0 {
|
||||
out.RawByte(',')
|
||||
}
|
||||
out.String(string(v))
|
||||
}
|
||||
out.RawByte(']')
|
||||
}
|
||||
}
|
||||
out.RawByte('}')
|
||||
}
|
||||
|
||||
// MarshalJSON supports json.Marshaler interface
|
||||
func (v Filter) MarshalJSON() ([]byte, error) {
|
||||
w := jwriter.Writer{NoEscapeHTML: true}
|
||||
easyjson4d398eaaEncodeGithubComNbdWtfGoNostr(&w, v)
|
||||
return w.Buffer.BuildBytes(), w.Error
|
||||
}
|
||||
|
||||
// MarshalEasyJSON supports easyjson.Marshaler interface
|
||||
func (v Filter) MarshalEasyJSON(w *jwriter.Writer) {
|
||||
w.NoEscapeHTML = true
|
||||
easyjson4d398eaaEncodeGithubComNbdWtfGoNostr(w, v)
|
||||
}
|
||||
|
||||
// UnmarshalJSON supports json.Unmarshaler interface
|
||||
func (v *Filter) UnmarshalJSON(data []byte) error {
|
||||
r := jlexer.Lexer{Data: data}
|
||||
easyjson4d398eaaDecodeGithubComNbdWtfGoNostr(&r, v)
|
||||
return r.Error()
|
||||
}
|
||||
|
||||
// UnmarshalEasyJSON supports easyjson.Unmarshaler interface
|
||||
func (v *Filter) UnmarshalEasyJSON(l *jlexer.Lexer) {
|
||||
easyjson4d398eaaDecodeGithubComNbdWtfGoNostr(l, v)
|
||||
}
|
||||
158
filter/filter_test.go
Normal file
158
filter/filter_test.go
Normal file
@@ -0,0 +1,158 @@
|
||||
package filter
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"slices"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"x.realy.lol/event"
|
||||
"x.realy.lol/kind"
|
||||
"x.realy.lol/timestamp"
|
||||
)
|
||||
|
||||
func TestFilterUnmarshal(t *testing.T) {
|
||||
raw := `{"ids": ["abc"],"#e":["zzz"],"#something":["nothing","bab"],"since":1644254609,"search":"test"}`
|
||||
var f Filter
|
||||
err := json.Unmarshal([]byte(raw), &f)
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.Condition(t, func() (success bool) {
|
||||
if f.Since == nil || f.Since.Time().UTC().Format("2006-01-02") != "2022-02-07" ||
|
||||
f.Until != nil ||
|
||||
f.Tags == nil || len(f.Tags) != 2 || !slices.Contains(f.Tags["something"], "bab") ||
|
||||
f.Search != "test" {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}, "failed to parse filter correctly")
|
||||
}
|
||||
|
||||
func TestFilterMarshal(t *testing.T) {
|
||||
until := timestamp.Timestamp(12345678)
|
||||
filterj, err := json.Marshal(Filter{
|
||||
Kinds: []int{kind.TextNote, kind.RecommendServer, kind.EncryptedDirectMessage},
|
||||
Tags: TagMap{"fruit": {"banana", "mango"}},
|
||||
Until: &until,
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
|
||||
expected := `{"kinds":[1,2,4],"until":12345678,"#fruit":["banana","mango"]}`
|
||||
assert.Equal(t, expected, string(filterj))
|
||||
}
|
||||
|
||||
func TestFilterUnmarshalWithLimitZero(t *testing.T) {
|
||||
raw := `{"ids": ["abc"],"#e":["zzz"],"limit":0,"#something":["nothing","bab"],"since":1644254609,"search":"test"}`
|
||||
var f Filter
|
||||
err := json.Unmarshal([]byte(raw), &f)
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.Condition(t, func() (success bool) {
|
||||
if f.Since == nil ||
|
||||
f.Since.Time().UTC().Format("2006-01-02") != "2022-02-07" ||
|
||||
f.Until != nil ||
|
||||
f.Tags == nil || len(f.Tags) != 2 || !slices.Contains(f.Tags["something"], "bab") ||
|
||||
f.Search != "test" ||
|
||||
f.LimitZero == false {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}, "failed to parse filter correctly")
|
||||
}
|
||||
|
||||
func TestFilterMarshalWithLimitZero(t *testing.T) {
|
||||
until := timestamp.Timestamp(12345678)
|
||||
filterj, err := json.Marshal(Filter{
|
||||
Kinds: []int{kind.TextNote, kind.RecommendServer, kind.EncryptedDirectMessage},
|
||||
Tags: TagMap{"fruit": {"banana", "mango"}},
|
||||
Until: &until,
|
||||
LimitZero: true,
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
|
||||
expected := `{"kinds":[1,2,4],"until":12345678,"limit":0,"#fruit":["banana","mango"]}`
|
||||
assert.Equal(t, expected, string(filterj))
|
||||
}
|
||||
|
||||
func TestFilterMatchingLive(t *testing.T) {
|
||||
var filter Filter
|
||||
var event event.E
|
||||
|
||||
json.Unmarshal([]byte(`{"kinds":[1],"authors":["a8171781fd9e90ede3ea44ddca5d3abf828fe8eedeb0f3abb0dd3e563562e1fc","1d80e5588de010d137a67c42b03717595f5f510e73e42cfc48f31bae91844d59","ed4ca520e9929dfe9efdadf4011b53d30afd0678a09aa026927e60e7a45d9244"],"since":1677033299}`), &filter)
|
||||
json.Unmarshal([]byte(`{"id":"5a127c9c931f392f6afc7fdb74e8be01c34035314735a6b97d2cf360d13cfb94","pubkey":"1d80e5588de010d137a67c42b03717595f5f510e73e42cfc48f31bae91844d59","created_at":1677033299,"kind":1,"tags":[["t","japan"]],"content":"If you like my art,I'd appreciate a coin or two!!\nZap is welcome!! Thanks.\n\n\n#japan #bitcoin #art #bananaart\nhttps://void.cat/d/CgM1bzDgHUCtiNNwfX9ajY.webp","sig":"828497508487ca1e374f6b4f2bba7487bc09fccd5cc0d1baa82846a944f8c5766918abf5878a580f1e6615de91f5b57a32e34c42ee2747c983aaf47dbf2a0255"}`), &event)
|
||||
|
||||
assert.True(t, filter.Matches(&event), "live filter should match")
|
||||
}
|
||||
|
||||
func TestFilterEquality(t *testing.T) {
|
||||
assert.True(t, FilterEqual(
|
||||
Filter{Kinds: []int{kind.EncryptedDirectMessage, kind.Deletion}},
|
||||
Filter{Kinds: []int{kind.EncryptedDirectMessage, kind.Deletion}},
|
||||
), "kinds filters should be equal")
|
||||
|
||||
assert.True(t, FilterEqual(
|
||||
Filter{Kinds: []int{kind.EncryptedDirectMessage, kind.Deletion}, Tags: TagMap{"letter": {"a", "b"}}},
|
||||
Filter{Kinds: []int{kind.EncryptedDirectMessage, kind.Deletion}, Tags: TagMap{"letter": {"b", "a"}}},
|
||||
), "kind+tags filters should be equal")
|
||||
|
||||
tm := timestamp.Now()
|
||||
assert.True(t, FilterEqual(
|
||||
Filter{
|
||||
Kinds: []int{kind.EncryptedDirectMessage, kind.Deletion},
|
||||
Tags: TagMap{"letter": {"a", "b"}, "fruit": {"banana"}},
|
||||
Since: &tm,
|
||||
IDs: []string{"aaaa", "bbbb"},
|
||||
},
|
||||
Filter{
|
||||
Kinds: []int{kind.Deletion, kind.EncryptedDirectMessage},
|
||||
Tags: TagMap{"letter": {"a", "b"}, "fruit": {"banana"}},
|
||||
Since: &tm,
|
||||
IDs: []string{"aaaa", "bbbb"},
|
||||
},
|
||||
), "kind+2tags+since+ids filters should be equal")
|
||||
|
||||
assert.False(t, FilterEqual(
|
||||
Filter{Kinds: []int{kind.TextNote, kind.EncryptedDirectMessage, kind.Deletion}},
|
||||
Filter{Kinds: []int{kind.EncryptedDirectMessage, kind.Deletion, kind.Repost}},
|
||||
), "kinds filters shouldn't be equal")
|
||||
}
|
||||
|
||||
func TestFilterClone(t *testing.T) {
|
||||
ts := timestamp.Now() - 60*60
|
||||
flt := Filter{
|
||||
Kinds: []int{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
|
||||
Tags: TagMap{"letter": {"a", "b"}, "fruit": {"banana"}},
|
||||
Since: &ts,
|
||||
IDs: []string{"9894b4b5cb5166d23ee8899a4151cf0c66aec00bde101982a13b8e8ceb972df9"},
|
||||
}
|
||||
clone := flt.Clone()
|
||||
assert.True(t, FilterEqual(flt, clone), "clone is not equal:\n %v !=\n %v", flt, clone)
|
||||
|
||||
clone1 := flt.Clone()
|
||||
clone1.IDs = append(clone1.IDs, "88f0c63fcb93463407af97a5e5ee64fa883d107ef9e558472c4eb9aaaefa459d")
|
||||
assert.False(t, FilterEqual(flt, clone1), "modifying the clone ids should cause it to not be equal anymore")
|
||||
|
||||
clone2 := flt.Clone()
|
||||
clone2.Tags["letter"] = append(clone2.Tags["letter"], "c")
|
||||
assert.False(t, FilterEqual(flt, clone2), "modifying the clone tag items should cause it to not be equal anymore")
|
||||
|
||||
clone3 := flt.Clone()
|
||||
clone3.Tags["g"] = []string{"drt"}
|
||||
assert.False(t, FilterEqual(flt, clone3), "modifying the clone tag map should cause it to not be equal anymore")
|
||||
|
||||
clone4 := flt.Clone()
|
||||
*clone4.Since++
|
||||
assert.False(t, FilterEqual(flt, clone4), "modifying the clone since should cause it to not be equal anymore")
|
||||
}
|
||||
|
||||
func TestTheoreticalLimit(t *testing.T) {
|
||||
require.Equal(t, 6, GetTheoreticalLimit(Filter{IDs: []string{"a", "b", "c", "d", "e", "f"}}))
|
||||
require.Equal(t, 9, GetTheoreticalLimit(Filter{Authors: []string{"a", "b", "c"}, Kinds: []int{3, 0, 10002}}))
|
||||
require.Equal(t, 4, GetTheoreticalLimit(Filter{Authors: []string{"a", "b", "c", "d"}, Kinds: []int{10050}}))
|
||||
require.Equal(t, -1, GetTheoreticalLimit(Filter{Authors: []string{"a", "b", "c", "d"}}))
|
||||
require.Equal(t, -1, GetTheoreticalLimit(Filter{Kinds: []int{3, 0, 10002}}))
|
||||
require.Equal(t, 24, GetTheoreticalLimit(Filter{Authors: []string{"a", "b", "c", "d", "e", "f"}, Kinds: []int{30023, 30024}, Tags: TagMap{"d": []string{"aaa", "bbb"}}}))
|
||||
require.Equal(t, -1, GetTheoreticalLimit(Filter{Authors: []string{"a", "b", "c", "d", "e", "f"}, Kinds: []int{30023, 30024}}))
|
||||
}
|
||||
3
go.mod
3
go.mod
@@ -7,11 +7,13 @@ require (
|
||||
github.com/davecgh/go-spew v1.1.1
|
||||
github.com/fatih/color v1.18.0
|
||||
github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0
|
||||
github.com/mailru/easyjson v0.7.7
|
||||
github.com/minio/sha256-simd v1.0.1
|
||||
github.com/pkg/profile v1.7.0
|
||||
github.com/stretchr/testify v1.10.0
|
||||
github.com/templexxx/xhex v0.0.0-20200614015412-aed53437177b
|
||||
go-simpler.org/env v0.12.0
|
||||
golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa
|
||||
golang.org/x/lint v0.0.0-20241112194109-818c5a804067
|
||||
honnef.co/go/tools v0.6.1
|
||||
lukechampine.com/frand v1.5.1
|
||||
@@ -22,6 +24,7 @@ require (
|
||||
github.com/alexflint/go-scalar v1.2.0 // indirect
|
||||
github.com/felixge/fgprof v0.9.5 // indirect
|
||||
github.com/google/pprof v0.0.0-20250501235452-c0086092b71a // indirect
|
||||
github.com/josharian/intern v1.0.0 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.2.10 // indirect
|
||||
github.com/mattn/go-colorable v0.1.14 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
|
||||
4
go.sum
4
go.sum
@@ -32,12 +32,14 @@ github.com/google/pprof v0.0.0-20250501235452-c0086092b71a h1:rDA3FfmxwXR+BVKKdz
|
||||
github.com/google/pprof v0.0.0-20250501235452-c0086092b71a/go.mod h1:5hDyRhoBCxViHszMt12TnOpEI4VVi+U8Gm9iphldiMA=
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20210905161508-09a460cdf81d/go.mod h1:aYm2/VgdVmcIU8iMfdMvDMsRAQjcfZSKFby6HOFvi/w=
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20230524184225-eabc099b10ab/go.mod h1:gx7rwoVhcfuVKG5uya9Hs3Sxj7EIvldVofAWIUtGouw=
|
||||
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
|
||||
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
||||
github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0 h1:iQTw/8FWTuc7uiaSepXwyf3o52HaUYcV+Tu66S3F5GA=
|
||||
github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0/go.mod h1:1NbS8ALrpOvjt0rHPNLyCIeMtbizbir8U//inJ+zuB8=
|
||||
github.com/klauspost/cpuid/v2 v2.2.10 h1:tBs3QSyvjDyFTq3uoc/9xFpCuOsJQFNPiAhYdw2skhE=
|
||||
github.com/klauspost/cpuid/v2 v2.2.10/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
|
||||
github.com/ledongthuc/pdf v0.0.0-20220302134840-0c2507a12d80/go.mod h1:imJHygn/1yfhB7XSJJKlFZKl/J+dCPAknuiaGOshXAs=
|
||||
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
|
||||
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
|
||||
github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE=
|
||||
github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8=
|
||||
@@ -66,6 +68,8 @@ go-simpler.org/env v0.12.0 h1:kt/lBts0J1kjWJAnB740goNdvwNxt5emhYngL0Fzufs=
|
||||
go-simpler.org/env v0.12.0/go.mod h1:cc/5Md9JCUM7LVLtN0HYjPTDcI3Q8TDaPlNTAlDU+WI=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa h1:FRnLl4eNAQl8hwxVVC17teOw8kdjVDVAiFMtgUdTSRQ=
|
||||
golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa/go.mod h1:zk2irFbV9DP96SEBUUAy67IdHUaZuSnrz1n472HUCLE=
|
||||
golang.org/x/exp/typeparams v0.0.0-20231108232855-2478ac86f678 h1:1P7xPZEwZMoBoz0Yze5Nx2/4pxj6nw9ZqHWXqP0iRgQ=
|
||||
golang.org/x/exp/typeparams v0.0.0-20231108232855-2478ac86f678/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk=
|
||||
golang.org/x/lint v0.0.0-20241112194109-818c5a804067 h1:adDmSQyFTCiv19j015EGKJBoaa7ElV0Q1Wovb/4G7NA=
|
||||
|
||||
117
helpers/helpers.go
Normal file
117
helpers/helpers.go
Normal file
@@ -0,0 +1,117 @@
|
||||
package helpers
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"unsafe"
|
||||
|
||||
"golang.org/x/exp/constraints"
|
||||
)
|
||||
|
||||
const MAX_LOCKS = 50
|
||||
|
||||
var (
|
||||
namedMutexPool = make([]sync.Mutex, MAX_LOCKS)
|
||||
)
|
||||
|
||||
//go:noescape
|
||||
//go:linkname memhash runtime.memhash
|
||||
func memhash(p unsafe.Pointer, h, s uintptr) uintptr
|
||||
|
||||
func NamedLock(name string) (unlock func()) {
|
||||
sptr := unsafe.StringData(name)
|
||||
idx := uint64(memhash(unsafe.Pointer(sptr), 0, uintptr(len(name)))) % MAX_LOCKS
|
||||
namedMutexPool[idx].Lock()
|
||||
return namedMutexPool[idx].Unlock
|
||||
}
|
||||
|
||||
func Similar[E constraints.Ordered](as, bs []E) bool {
|
||||
if len(as) != len(bs) {
|
||||
return false
|
||||
}
|
||||
|
||||
for _, a := range as {
|
||||
for _, b := range bs {
|
||||
if b == a {
|
||||
goto next
|
||||
}
|
||||
}
|
||||
// didn't find a B that corresponded to the current A
|
||||
return false
|
||||
|
||||
next:
|
||||
continue
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// EscapeString for JSON encoding according to RFC8259.
|
||||
// Also encloses result in quotation marks "".
|
||||
func EscapeString(dst []byte, s string) []byte {
|
||||
dst = append(dst, '"')
|
||||
for i := 0; i < len(s); i++ {
|
||||
c := s[i]
|
||||
switch {
|
||||
case c == '"':
|
||||
// quotation mark
|
||||
dst = append(dst, []byte{'\\', '"'}...)
|
||||
case c == '\\':
|
||||
// reverse solidus
|
||||
dst = append(dst, []byte{'\\', '\\'}...)
|
||||
case c >= 0x20:
|
||||
// default, rest below are control chars
|
||||
dst = append(dst, c)
|
||||
case c == 0x08:
|
||||
dst = append(dst, []byte{'\\', 'b'}...)
|
||||
case c < 0x09:
|
||||
dst = append(dst, []byte{'\\', 'u', '0', '0', '0', '0' + c}...)
|
||||
case c == 0x09:
|
||||
dst = append(dst, []byte{'\\', 't'}...)
|
||||
case c == 0x0a:
|
||||
dst = append(dst, []byte{'\\', 'n'}...)
|
||||
case c == 0x0c:
|
||||
dst = append(dst, []byte{'\\', 'f'}...)
|
||||
case c == 0x0d:
|
||||
dst = append(dst, []byte{'\\', 'r'}...)
|
||||
case c < 0x10:
|
||||
dst = append(dst, []byte{'\\', 'u', '0', '0', '0', 0x57 + c}...)
|
||||
case c < 0x1a:
|
||||
dst = append(dst, []byte{'\\', 'u', '0', '0', '1', 0x20 + c}...)
|
||||
case c < 0x20: // maybe default?
|
||||
dst = append(dst, []byte{'\\', 'u', '0', '0', '1', 0x47 + c}...)
|
||||
}
|
||||
}
|
||||
dst = append(dst, '"')
|
||||
return dst
|
||||
}
|
||||
|
||||
func ArePointerValuesEqual[V comparable](a *V, b *V) bool {
|
||||
if a == nil && b == nil {
|
||||
return true
|
||||
}
|
||||
if a != nil && b != nil {
|
||||
return *a == *b
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func SubIdToSerial(subId string) int64 {
|
||||
n := strings.Index(subId, ":")
|
||||
if n < 0 || n > len(subId) {
|
||||
return -1
|
||||
}
|
||||
serialId, _ := strconv.ParseInt(subId[0:n], 10, 64)
|
||||
return serialId
|
||||
}
|
||||
|
||||
func IsLowerHex(thing string) bool {
|
||||
for _, charNumber := range thing {
|
||||
if (charNumber >= 48 && charNumber <= 57) || (charNumber >= 97 && charNumber <= 102) {
|
||||
continue
|
||||
}
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
149
kind/kinds.go
Normal file
149
kind/kinds.go
Normal file
@@ -0,0 +1,149 @@
|
||||
package kind
|
||||
|
||||
const (
|
||||
ProfileMetadata int = 0
|
||||
TextNote int = 1
|
||||
RecommendServer int = 2
|
||||
FollowList int = 3
|
||||
EncryptedDirectMessage int = 4
|
||||
Deletion int = 5
|
||||
Repost int = 6
|
||||
Reaction int = 7
|
||||
BadgeAward int = 8
|
||||
SimpleGroupChatMessage int = 9
|
||||
SimpleGroupThreadedReply int = 10
|
||||
SimpleGroupThread int = 11
|
||||
SimpleGroupReply int = 12
|
||||
Seal int = 13
|
||||
DirectMessage int = 14
|
||||
GenericRepost int = 16
|
||||
ReactionToWebsite int = 17
|
||||
ChannelCreation int = 40
|
||||
ChannelMetadata int = 41
|
||||
ChannelMessage int = 42
|
||||
ChannelHideMessage int = 43
|
||||
ChannelMuteUser int = 44
|
||||
Chess int = 64
|
||||
MergeRequests int = 818
|
||||
Bid int = 1021
|
||||
BidConfirmation int = 1022
|
||||
OpenTimestamps int = 1040
|
||||
GiftWrap int = 1059
|
||||
FileMetadata int = 1063
|
||||
LiveChatMessage int = 1311
|
||||
Patch int = 1617
|
||||
Issue int = 1621
|
||||
Reply int = 1622
|
||||
StatusOpen int = 1630
|
||||
StatusApplied int = 1631
|
||||
StatusClosed int = 1632
|
||||
StatusDraft int = 1633
|
||||
ProblemTracker int = 1971
|
||||
Reporting int = 1984
|
||||
Label int = 1985
|
||||
RelayReviews int = 1986
|
||||
AIEmbeddings int = 1987
|
||||
Torrent int = 2003
|
||||
TorrentComment int = 2004
|
||||
CoinjoinPool int = 2022
|
||||
CommunityPostApproval int = 4550
|
||||
JobFeedback int = 7000
|
||||
SimpleGroupPutUser int = 9000
|
||||
SimpleGroupRemoveUser int = 9001
|
||||
SimpleGroupEditMetadata int = 9002
|
||||
SimpleGroupDeleteEvent int = 9005
|
||||
SimpleGroupCreateGroup int = 9007
|
||||
SimpleGroupDeleteGroup int = 9008
|
||||
SimpleGroupCreateInvite int = 9009
|
||||
SimpleGroupJoinRequest int = 9021
|
||||
SimpleGroupLeaveRequest int = 9022
|
||||
ZapGoal int = 9041
|
||||
TidalLogin int = 9467
|
||||
ZapRequest int = 9734
|
||||
Zap int = 9735
|
||||
Highlights int = 9802
|
||||
MuteList int = 10000
|
||||
PinList int = 10001
|
||||
RelayListMetadata int = 10002
|
||||
BookmarkList int = 10003
|
||||
CommunityList int = 10004
|
||||
PublicChatList int = 10005
|
||||
BlockedRelayList int = 10006
|
||||
SearchRelayList int = 10007
|
||||
SimpleGroupList int = 10009
|
||||
InterestList int = 10015
|
||||
EmojiList int = 10030
|
||||
DMRelayList int = 10050
|
||||
UserServerList int = 10063
|
||||
FileStorageServerList int = 10096
|
||||
GoodWikiAuthorList int = 10101
|
||||
GoodWikiRelayList int = 10102
|
||||
NWCWalletInfo int = 13194
|
||||
LightningPubRPC int = 21000
|
||||
ClientAuthentication int = 22242
|
||||
NWCWalletRequest int = 23194
|
||||
NWCWalletResponse int = 23195
|
||||
NostrConnect int = 24133
|
||||
Blobs int = 24242
|
||||
HTTPAuth int = 27235
|
||||
CategorizedPeopleList int = 30000
|
||||
CategorizedBookmarksList int = 30001
|
||||
RelaySets int = 30002
|
||||
BookmarkSets int = 30003
|
||||
CuratedSets int = 30004
|
||||
CuratedVideoSets int = 30005
|
||||
MuteSets int = 30007
|
||||
ProfileBadges int = 30008
|
||||
BadgeDefinition int = 30009
|
||||
InterestSets int = 30015
|
||||
StallDefinition int = 30017
|
||||
ProductDefinition int = 30018
|
||||
MarketplaceUI int = 30019
|
||||
ProductSoldAsAuction int = 30020
|
||||
Article int = 30023
|
||||
DraftArticle int = 30024
|
||||
EmojiSets int = 30030
|
||||
ModularArticleHeader int = 30040
|
||||
ModularArticleContent int = 30041
|
||||
ReleaseArtifactSets int = 30063
|
||||
ApplicationSpecificData int = 30078
|
||||
LiveEvent int = 30311
|
||||
UserStatuses int = 30315
|
||||
ClassifiedListing int = 30402
|
||||
DraftClassifiedListing int = 30403
|
||||
RepositoryAnnouncement int = 30617
|
||||
RepositoryState int = 30618
|
||||
SimpleGroupMetadata int = 39000
|
||||
SimpleGroupAdmins int = 39001
|
||||
SimpleGroupMembers int = 39002
|
||||
SimpleGroupRoles int = 39003
|
||||
WikiArticle int = 30818
|
||||
Redirects int = 30819
|
||||
Feed int = 31890
|
||||
DateCalendarEvent int = 31922
|
||||
TimeCalendarEvent int = 31923
|
||||
Calendar int = 31924
|
||||
CalendarEventRSVP int = 31925
|
||||
HandlerRecommendation int = 31989
|
||||
HandlerInformation int = 31990
|
||||
VideoEvent int = 34235
|
||||
ShortVideoEvent int = 34236
|
||||
VideoViewEvent int = 34237
|
||||
CommunityDefinition int = 34550
|
||||
)
|
||||
|
||||
func IsRegularKind(kind int) bool {
|
||||
return kind < 10000 && kind != 0 && kind != 3
|
||||
}
|
||||
|
||||
func IsReplaceableKind(kind int) bool {
|
||||
return kind == 0 || kind == 3 || (10000 <= kind && kind < 20000)
|
||||
}
|
||||
|
||||
func IsEphemeralKind(kind int) bool {
|
||||
return 20000 <= kind && kind < 30000
|
||||
}
|
||||
|
||||
func IsAddressableKind(kind int) bool {
|
||||
return 30000 <= kind && kind < 40000
|
||||
}
|
||||
19
kind/kinds_test.go
Normal file
19
kind/kinds_test.go
Normal file
@@ -0,0 +1,19 @@
|
||||
package kind
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func KindKindTest(t *testing.T) {
|
||||
require.True(t, IsRegularKind(1))
|
||||
require.True(t, IsRegularKind(9))
|
||||
require.True(t, IsRegularKind(1111))
|
||||
require.True(t, IsReplaceableKind(0))
|
||||
require.True(t, IsReplaceableKind(3))
|
||||
require.True(t, IsReplaceableKind(10002))
|
||||
require.True(t, IsReplaceableKind(10050))
|
||||
require.True(t, IsAddressableKind(30023))
|
||||
require.True(t, IsAddressableKind(39000))
|
||||
}
|
||||
47
normalize/normalize.go
Normal file
47
normalize/normalize.go
Normal file
@@ -0,0 +1,47 @@
|
||||
package normalize
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Url normalizes the url and replaces http://, https:// schemes with ws://, wss://
|
||||
// and normalizes the path.
|
||||
func Url(u string) string {
|
||||
if u == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
u = strings.TrimSpace(u)
|
||||
u = strings.ToLower(u)
|
||||
|
||||
if fqn := strings.Split(u, ":")[0]; fqn == "localhost" || fqn == "127.0.0.1" {
|
||||
u = "ws://" + u
|
||||
} else if !strings.HasPrefix(u, "http") && !strings.HasPrefix(u, "ws") {
|
||||
u = "wss://" + u
|
||||
}
|
||||
|
||||
p, err := url.Parse(u)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
if p.Scheme == "http" {
|
||||
p.Scheme = "ws"
|
||||
} else if p.Scheme == "https" {
|
||||
p.Scheme = "wss"
|
||||
}
|
||||
|
||||
p.Path = strings.TrimRight(p.Path, "/")
|
||||
|
||||
return p.String()
|
||||
}
|
||||
|
||||
// OkMessage takes a string message that is to be sent in an `OK` or `CLOSED` command
|
||||
// and prefixes it with "<prefix>: " if it doesn't already have an acceptable prefix.
|
||||
func OkMessage(reason string, prefix string) string {
|
||||
if idx := strings.Index(reason, ": "); idx == -1 || strings.IndexByte(reason[0:idx], ' ') != -1 {
|
||||
return prefix + ": " + reason
|
||||
}
|
||||
return reason
|
||||
}
|
||||
37
normalize/normalize_test.go
Normal file
37
normalize/normalize_test.go
Normal file
@@ -0,0 +1,37 @@
|
||||
package normalize
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
type urlTest struct {
|
||||
url, expected string
|
||||
}
|
||||
|
||||
var urlTests = []urlTest{
|
||||
{"", ""},
|
||||
{"wss://x.com/y", "wss://x.com/y"},
|
||||
{"wss://x.com/y/", "wss://x.com/y"},
|
||||
{"http://x.com/y", "ws://x.com/y"},
|
||||
{Url("http://x.com/y"), "ws://x.com/y"},
|
||||
{Url("wss://x.com"), "wss://x.com"},
|
||||
{Url("wss://x.com/"), "wss://x.com"},
|
||||
{Url(Url(Url("wss://x.com/"))), "wss://x.com"},
|
||||
{"wss://x.com", "wss://x.com"},
|
||||
{"wss://x.com/", "wss://x.com"},
|
||||
{"x.com////", "wss://x.com"},
|
||||
{"x.com/?x=23", "wss://x.com?x=23"},
|
||||
{"localhost:4036", "ws://localhost:4036"},
|
||||
{"localhost:4036/relay", "ws://localhost:4036/relay"},
|
||||
{"localhostmagnanimus.com", "wss://localhostmagnanimus.com"},
|
||||
{Url("localhost:4036/relay"), "ws://localhost:4036/relay"},
|
||||
}
|
||||
|
||||
func TestUrl(t *testing.T) {
|
||||
for _, test := range urlTests {
|
||||
output := Url(test.url)
|
||||
assert.Equal(t, test.expected, output)
|
||||
}
|
||||
}
|
||||
38
tags/tag_test.go
Normal file
38
tags/tag_test.go
Normal file
@@ -0,0 +1,38 @@
|
||||
package tags
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestTagHelpers(t *testing.T) {
|
||||
tags := Tags{
|
||||
Tag{"x"},
|
||||
Tag{"p", "abcdef", "wss://x.com"},
|
||||
Tag{"p", "123456", "wss://y.com"},
|
||||
Tag{"e", "eeeeee"},
|
||||
Tag{"e", "ffffff"},
|
||||
}
|
||||
|
||||
assert.NotNil(t, tags.GetFirst([]string{"x"}), "failed to get existing prefix")
|
||||
assert.Nil(t, tags.GetFirst([]string{"x", ""}), "got with wrong prefix")
|
||||
assert.NotNil(t, tags.GetFirst([]string{"p", "abcdef", "wss://"}), "failed to get with existing prefix")
|
||||
assert.NotNil(t, tags.GetFirst([]string{"p", "abcdef", ""}), "failed to get with existing prefix (blank last string)")
|
||||
assert.Equal(t, "ffffff", (*(tags.GetLast([]string{"e"})))[1], "failed to get last")
|
||||
assert.Equal(t, 2, len(tags.GetAll([]string{"e", ""})), "failed to get all")
|
||||
c := make(Tags, 0, 2)
|
||||
for _, tag := range tags.All([]string{"e", ""}) {
|
||||
c = append(c, tag)
|
||||
}
|
||||
assert.Equal(t, tags.GetAll([]string{"e", ""}), c)
|
||||
assert.Equal(t, 5, len(tags.AppendUnique(Tag{"e", "ffffff"})), "append unique changed the array size when existed")
|
||||
assert.Equal(t, 6, len(tags.AppendUnique(Tag{"e", "bbbbbb"})), "append unique failed to append when didn't exist")
|
||||
assert.Equal(t, "ffffff", tags.AppendUnique(Tag{"e", "eeeeee"})[4][1], "append unique changed the order")
|
||||
assert.Equal(t, "eeeeee", tags.AppendUnique(Tag{"e", "eeeeee"})[3][1], "append unique changed the order")
|
||||
|
||||
filtered := tags.FilterOut([]string{"e"})
|
||||
tags.FilterOutInPlace([]string{"e"})
|
||||
assert.ElementsMatch(t, filtered, tags)
|
||||
assert.Len(t, filtered, 3)
|
||||
}
|
||||
220
tags/tags.go
Normal file
220
tags/tags.go
Normal file
@@ -0,0 +1,220 @@
|
||||
package tags
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"iter"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"x.realy.lol/helpers"
|
||||
"x.realy.lol/normalize"
|
||||
)
|
||||
|
||||
type Tag []string
|
||||
|
||||
// StartsWith checks if a tag contains a prefix.
|
||||
// for example,
|
||||
//
|
||||
// ["p", "abcdef...", "wss://relay.com"]
|
||||
//
|
||||
// would match against
|
||||
//
|
||||
// ["p", "abcdef..."]
|
||||
//
|
||||
// or even
|
||||
//
|
||||
// ["p", "abcdef...", "wss://"]
|
||||
func (tag Tag) StartsWith(prefix []string) bool {
|
||||
prefixLen := len(prefix)
|
||||
|
||||
if prefixLen > len(tag) {
|
||||
return false
|
||||
}
|
||||
// check initial elements for equality
|
||||
for i := 0; i < prefixLen-1; i++ {
|
||||
if prefix[i] != tag[i] {
|
||||
return false
|
||||
}
|
||||
}
|
||||
// check last element just for a prefix
|
||||
return strings.HasPrefix(tag[prefixLen-1], prefix[prefixLen-1])
|
||||
}
|
||||
|
||||
func (tag Tag) Key() string {
|
||||
if len(tag) > 0 {
|
||||
return tag[0]
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (tag Tag) Value() string {
|
||||
if len(tag) > 1 {
|
||||
return tag[1]
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (tag Tag) Relay() string {
|
||||
if len(tag) > 2 && (tag[0] == "e" || tag[0] == "p") {
|
||||
return normalize.Url(tag[2])
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type Tags []Tag
|
||||
|
||||
// GetD gets the first "d" tag (for parameterized replaceable events) value or ""
|
||||
func (tags Tags) GetD() string {
|
||||
for _, v := range tags {
|
||||
if v.StartsWith([]string{"d", ""}) {
|
||||
return v[1]
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// GetFirst gets the first tag in tags that matches the prefix, see [Tag.StartsWith]
|
||||
func (tags Tags) GetFirst(tagPrefix []string) *Tag {
|
||||
for _, v := range tags {
|
||||
if v.StartsWith(tagPrefix) {
|
||||
return &v
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetLast gets the last tag in tags that matches the prefix, see [Tag.StartsWith]
|
||||
func (tags Tags) GetLast(tagPrefix []string) *Tag {
|
||||
for i := len(tags) - 1; i >= 0; i-- {
|
||||
v := tags[i]
|
||||
if v.StartsWith(tagPrefix) {
|
||||
return &v
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetAll gets all the tags that match the prefix, see [Tag.StartsWith]
|
||||
func (tags Tags) GetAll(tagPrefix []string) Tags {
|
||||
result := make(Tags, 0, len(tags))
|
||||
for _, v := range tags {
|
||||
if v.StartsWith(tagPrefix) {
|
||||
result = append(result, v)
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// All returns an iterator for all the tags that match the prefix, see [Tag.StartsWith]
|
||||
func (tags Tags) All(tagPrefix []string) iter.Seq2[int, Tag] {
|
||||
return func(yield func(int, Tag) bool) {
|
||||
for i, v := range tags {
|
||||
if v.StartsWith(tagPrefix) {
|
||||
if !yield(i, v) {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// FilterOut returns a new slice with only the elements that match the prefix, see [Tag.StartsWith]
|
||||
func (tags Tags) FilterOut(tagPrefix []string) Tags {
|
||||
filtered := make(Tags, 0, len(tags))
|
||||
for _, v := range tags {
|
||||
if !v.StartsWith(tagPrefix) {
|
||||
filtered = append(filtered, v)
|
||||
}
|
||||
}
|
||||
return filtered
|
||||
}
|
||||
|
||||
// FilterOutInPlace removes all tags that match the prefix, but potentially reorders the tags in unpredictable ways, see [Tag.StartsWith]
|
||||
func (tags *Tags) FilterOutInPlace(tagPrefix []string) {
|
||||
for i := 0; i < len(*tags); i++ {
|
||||
tag := (*tags)[i]
|
||||
if tag.StartsWith(tagPrefix) {
|
||||
// remove this by swapping the last tag into this place
|
||||
last := len(*tags) - 1
|
||||
(*tags)[i] = (*tags)[last]
|
||||
*tags = (*tags)[0:last]
|
||||
i-- // this is so we can match this just swapped item in the next iteration
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// AppendUnique appends a tag if it doesn't exist yet, otherwise does nothing.
|
||||
// the uniqueness comparison is done based only on the first 2 elements of the tag.
|
||||
func (tags Tags) AppendUnique(tag Tag) Tags {
|
||||
n := len(tag)
|
||||
if n > 2 {
|
||||
n = 2
|
||||
}
|
||||
|
||||
if tags.GetFirst(tag[:n]) == nil {
|
||||
return append(tags, tag)
|
||||
}
|
||||
return tags
|
||||
}
|
||||
|
||||
func (t *Tags) Scan(src any) error {
|
||||
var jtags []byte
|
||||
|
||||
switch v := src.(type) {
|
||||
case []byte:
|
||||
jtags = v
|
||||
case string:
|
||||
jtags = []byte(v)
|
||||
default:
|
||||
return errors.New("couldn't scan tags, it's not a json string")
|
||||
}
|
||||
|
||||
json.Unmarshal(jtags, &t)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (tags Tags) ContainsAny(tagName string, values []string) bool {
|
||||
for _, tag := range tags {
|
||||
if len(tag) < 2 {
|
||||
continue
|
||||
}
|
||||
|
||||
if tag[0] != tagName {
|
||||
continue
|
||||
}
|
||||
|
||||
if slices.Contains(values, tag[1]) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// Marshal Tag. Used for Serialization so string escaping should be as in RFC8259.
|
||||
func (tag Tag) marshalTo(dst []byte) []byte {
|
||||
dst = append(dst, '[')
|
||||
for i, s := range tag {
|
||||
if i > 0 {
|
||||
dst = append(dst, ',')
|
||||
}
|
||||
dst = helpers.EscapeString(dst, s)
|
||||
}
|
||||
dst = append(dst, ']')
|
||||
return dst
|
||||
}
|
||||
|
||||
// MarshalTo appends the JSON encoded byte of Tags as [][]string to dst.
|
||||
// String escaping is as described in RFC8259.
|
||||
func (tags Tags) marshalTo(dst []byte) []byte {
|
||||
dst = append(dst, '[')
|
||||
for i, tag := range tags {
|
||||
if i > 0 {
|
||||
dst = append(dst, ',')
|
||||
}
|
||||
dst = tag.marshalTo(dst)
|
||||
}
|
||||
dst = append(dst, ']')
|
||||
return dst
|
||||
}
|
||||
17
timestamp/timestamp.go
Normal file
17
timestamp/timestamp.go
Normal file
@@ -0,0 +1,17 @@
|
||||
package timestamp
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"golang.org/x/exp/constraints"
|
||||
)
|
||||
|
||||
type Timestamp int64
|
||||
|
||||
func Now() Timestamp { return Timestamp(time.Now().Unix()) }
|
||||
|
||||
func New[T constraints.Integer | constraints.Float](t T) Timestamp {
|
||||
return Timestamp(t)
|
||||
}
|
||||
|
||||
func (t Timestamp) Time() time.Time { return time.Unix(int64(t), 0) }
|
||||
Reference in New Issue
Block a user