Fix binary tag value handling for e/p tags across database layer
Some checks failed
Go / build-and-release (push) Has been cancelled
Some checks failed
Go / build-and-release (push) Has been cancelled
- Update nostr library to v1.0.3 with improved binary tag support - Replace tag.Value() calls with tag.ValueHex() to handle both binary and hex formats - Add NormalizeTagValueForHash() for consistent filter tag normalization - Update QueryPTagGraph to handle binary-encoded and hex-encoded pubkeys - Fix tag matching in query-events.go using TagValuesMatchUsingTagMethods - Add filter_utils.go with tag normalization helper functions - Update delete operations in process-delete.go and neo4j/delete.go - Fix ACL follows extraction to use ValueHex() for consistent decoding - Add binary_tag_filter_test.go for testing tag value normalization - Bump version to v0.30.3
This commit is contained in:
@@ -137,7 +137,8 @@
|
||||
"Bash(/tmp/orly-test help:*)",
|
||||
"Bash(go version:*)",
|
||||
"Bash(ss:*)",
|
||||
"Bash(CGO_ENABLED=0 go clean:*)"
|
||||
"Bash(CGO_ENABLED=0 go clean:*)",
|
||||
"Bash(CGO_ENABLED=0 timeout 30 go test:*)"
|
||||
],
|
||||
"deny": [],
|
||||
"ask": []
|
||||
|
||||
@@ -63,7 +63,8 @@ func testPrivilegedEventFiltering(events event.S, authedPubkey []byte, aclMode s
|
||||
continue
|
||||
}
|
||||
// Fall back to hex decoding for non-binary values
|
||||
pt, err := hex.Dec(string(pTag.Value()))
|
||||
// Use ValueHex() which handles both binary and hex storage formats
|
||||
pt, err := hex.Dec(string(pTag.ValueHex()))
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
18
go.mod
18
go.mod
@@ -3,27 +3,23 @@ module next.orly.dev
|
||||
go 1.25.3
|
||||
|
||||
require (
|
||||
git.mleku.dev/mleku/nostr v1.0.2
|
||||
git.mleku.dev/mleku/nostr v1.0.3
|
||||
github.com/adrg/xdg v0.5.3
|
||||
github.com/davecgh/go-spew v1.1.1
|
||||
github.com/dgraph-io/badger/v4 v4.8.0
|
||||
github.com/dgraph-io/dgo/v230 v230.0.1
|
||||
github.com/ebitengine/purego v0.9.1
|
||||
github.com/gorilla/websocket v1.5.3
|
||||
github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0
|
||||
github.com/klauspost/compress v1.18.1
|
||||
github.com/minio/sha256-simd v1.0.1
|
||||
github.com/nbd-wtf/go-nostr v0.52.0
|
||||
github.com/neo4j/neo4j-go-driver/v5 v5.28.4
|
||||
github.com/pkg/profile v1.7.0
|
||||
github.com/puzpuzpuz/xsync/v3 v3.5.1
|
||||
github.com/stretchr/testify v1.11.1
|
||||
github.com/templexxx/xhex v0.0.0-20200614015412-aed53437177b
|
||||
github.com/vertex-lab/nostr-sqlite v0.3.2
|
||||
go-simpler.org/env v0.12.0
|
||||
go.uber.org/atomic v1.11.0
|
||||
golang.org/x/crypto v0.45.0
|
||||
golang.org/x/exp v0.0.0-20251113190631-e25ba8c21ef6
|
||||
golang.org/x/lint v0.0.0-20241112194109-818c5a804067
|
||||
golang.org/x/net v0.47.0
|
||||
google.golang.org/grpc v1.76.0
|
||||
honnef.co/go/tools v0.6.1
|
||||
lol.mleku.dev v1.0.5
|
||||
@@ -40,10 +36,12 @@ require (
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
github.com/cloudwego/base64x v0.1.5 // indirect
|
||||
github.com/coder/websocket v1.8.12 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/decred/dcrd/crypto/blake256 v1.1.0 // indirect
|
||||
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 // indirect
|
||||
github.com/dgraph-io/ristretto/v2 v2.3.0 // indirect
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
github.com/ebitengine/purego v0.9.1 // indirect
|
||||
github.com/felixge/fgprof v0.9.5 // indirect
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
@@ -58,22 +56,24 @@ require (
|
||||
github.com/mattn/go-sqlite3 v1.14.32 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
github.com/nbd-wtf/go-nostr v0.52.0 // indirect
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/puzpuzpuz/xsync/v3 v3.5.1 // indirect
|
||||
github.com/templexxx/cpu v0.1.1 // indirect
|
||||
github.com/templexxx/xhex v0.0.0-20200614015412-aed53437177b // indirect
|
||||
github.com/tidwall/gjson v1.18.0 // indirect
|
||||
github.com/tidwall/match v1.1.1 // indirect
|
||||
github.com/tidwall/pretty v1.2.1 // indirect
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
||||
github.com/vertex-lab/nostr-sqlite v0.3.2 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.2.1 // indirect
|
||||
go.opentelemetry.io/otel v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.38.0 // indirect
|
||||
golang.org/x/arch v0.15.0 // indirect
|
||||
golang.org/x/exp v0.0.0-20251113190631-e25ba8c21ef6 // indirect
|
||||
golang.org/x/exp/typeparams v0.0.0-20251023183803-a4bb9ffd2546 // indirect
|
||||
golang.org/x/mod v0.30.0 // indirect
|
||||
golang.org/x/net v0.47.0 // indirect
|
||||
golang.org/x/sync v0.18.0 // indirect
|
||||
golang.org/x/sys v0.38.0 // indirect
|
||||
golang.org/x/text v0.31.0 // indirect
|
||||
|
||||
26
go.sum
26
go.sum
@@ -1,6 +1,6 @@
|
||||
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||
git.mleku.dev/mleku/nostr v1.0.2 h1:SbCUoja9baTOEybQdtTkUcJWWNMAMsVzI/OXh+ZuSKw=
|
||||
git.mleku.dev/mleku/nostr v1.0.2/go.mod h1:swI7bWLc7yU1jd7PLCCIrIcUR3Ug5O+GPvpub/w6eTY=
|
||||
git.mleku.dev/mleku/nostr v1.0.3 h1:dWpGVzIOrjeWVnDnrX039s2LvcfHwDIo47NyyO1CBbs=
|
||||
git.mleku.dev/mleku/nostr v1.0.3/go.mod h1:swI7bWLc7yU1jd7PLCCIrIcUR3Ug5O+GPvpub/w6eTY=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/BurntSushi/toml v1.5.0 h1:W5quZX/G/csjUnuI8SUYlsHs9M38FC7znL0lIO+DvMg=
|
||||
github.com/BurntSushi/toml v1.5.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho=
|
||||
@@ -8,7 +8,6 @@ github.com/ImVexed/fasturl v0.0.0-20230304231329-4e41488060f3 h1:ClzzXMDDuUbWfNN
|
||||
github.com/ImVexed/fasturl v0.0.0-20230304231329-4e41488060f3/go.mod h1:we0YA5CsBbH5+/NUzC/AlMmxaDtWlXeNsqrwXjTzmzA=
|
||||
github.com/adrg/xdg v0.5.3 h1:xRnxJXne7+oWDatRhR1JLnvuccuIeCoBu2rtuLqQB78=
|
||||
github.com/adrg/xdg v0.5.3/go.mod h1:nlTsY+NNiCBGCK2tpm09vRqfVzrc2fLmXGpBLF0zlTQ=
|
||||
github.com/btcsuite/btcd v0.24.2 h1:aLmxPguqxza+4ag8R1I2nnJjSu2iFn/kqtHTIImswcY=
|
||||
github.com/btcsuite/btcd/btcec/v2 v2.3.4 h1:3EJjcN70HCu/mwqlUsGK8GcNVyLVxFDlWurTXGPFfiQ=
|
||||
github.com/btcsuite/btcd/btcec/v2 v2.3.4/go.mod h1:zYzJ8etWJQIv1Ogk7OzpWjowwOdXY1W/17j2MW85J04=
|
||||
github.com/btcsuite/btcd/chaincfg/chainhash v1.1.0 h1:59Kx4K6lzOW5w6nFlA0v5+lk/6sjybR934QNHSJZPTQ=
|
||||
@@ -138,8 +137,6 @@ github.com/nbd-wtf/go-nostr v0.52.0/go.mod h1:4avYoc9mDGZ9wHsvCOhHH9vPzKucCfuYBt
|
||||
github.com/neo4j/neo4j-go-driver/v5 v5.28.4 h1:7toxehVcYkZbyxV4W3Ib9VcnyRBQPucF+VwNNmtSXi4=
|
||||
github.com/neo4j/neo4j-go-driver/v5 v5.28.4/go.mod h1:Vff8OwT7QpLm7L2yYr85XNWe9Rbqlbeb9asNXJTHO4k=
|
||||
github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde/go.mod h1:nZgzbfBr3hhjoZnS66nKrHmduYNpc34ny7RK4z5/HM0=
|
||||
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
|
||||
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pkg/profile v1.7.0 h1:hnbDkaNWPCLMO9wGLdBFTIZvzDrDfBM2072E1S9gJkA=
|
||||
@@ -201,13 +198,9 @@ golang.org/x/arch v0.15.0/go.mod h1:JmwW7aLIoRUKgaTzhkiEFxvcEiQGyOg9BMonBJUS7EE=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04=
|
||||
golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0=
|
||||
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
|
||||
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546 h1:mgKeJMpvi0yx/sU5GsxQ7p6s2wtOnGAHZWCHUM4KGzY=
|
||||
golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546/go.mod h1:j/pmGrbnkbPtQfxEe5D0VQhZC6qKbfKifgD0oM7sR70=
|
||||
golang.org/x/exp v0.0.0-20251113190631-e25ba8c21ef6 h1:zfMcR1Cs4KNuomFFgGefv5N0czO2XZpUbxGUy8i8ug0=
|
||||
golang.org/x/exp v0.0.0-20251113190631-e25ba8c21ef6/go.mod h1:46edojNIoXTNOhySWIWdix628clX9ODXwPsQuG6hsK0=
|
||||
golang.org/x/exp/typeparams v0.0.0-20251023183803-a4bb9ffd2546 h1:HDjDiATsGqvuqvkDvgJjD1IgPrVekcSXVVE21JwvzGE=
|
||||
@@ -220,8 +213,7 @@ golang.org/x/lint v0.0.0-20241112194109-818c5a804067/go.mod h1:3xt1FjdF8hUf6vQPI
|
||||
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
|
||||
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA=
|
||||
golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w=
|
||||
golang.org/x/mod v0.30.0 h1:fDEXFVZ/fmCKProc/yAXXUijritrDzahmwwefnjoPFk=
|
||||
golang.org/x/mod v0.30.0/go.mod h1:lAsf5O2EvJeSFMiBxXDki7sCgAxEUcZHXoXMKT4GJKc=
|
||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
@@ -231,8 +223,6 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||
golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4=
|
||||
golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210=
|
||||
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
|
||||
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
@@ -241,8 +231,7 @@ golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJ
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
|
||||
golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
|
||||
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
@@ -251,14 +240,10 @@ golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||
golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ=
|
||||
golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
|
||||
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k=
|
||||
golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM=
|
||||
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
|
||||
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
@@ -270,8 +255,7 @@ golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtn
|
||||
golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||
golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||
golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ=
|
||||
golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs=
|
||||
golang.org/x/tools v0.39.0 h1:ik4ho21kwuQln40uelmciQPp9SipgNDdrafrYA4TmQQ=
|
||||
golang.org/x/tools v0.39.0/go.mod h1:JnefbkDPyD8UU2kI5fuf8ZX4/yUeh9W877ZeBONxUqQ=
|
||||
golang.org/x/tools/go/expect v0.1.1-deprecated h1:jpBZDwmgPhXsKZC6WhL20P4b/wmnpsEAGHaNy0n/rJM=
|
||||
golang.org/x/tools/go/expect v0.1.1-deprecated/go.mod h1:eihoPOH+FgIqa3FpoTwguz/bVUSGBlGQU67vpBeOrBY=
|
||||
|
||||
@@ -928,8 +928,8 @@ func (f *Follows) extractFollowedPubkeys(event *event.E) {
|
||||
continue
|
||||
}
|
||||
// Fall back to hex decoding for non-binary values
|
||||
// ValueHex() handles both formats, but we already checked binary above
|
||||
if pubkey, err := hex.DecodeString(string(tag.Value())); err == nil && len(pubkey) == 32 {
|
||||
// Use ValueHex() which handles both binary and hex storage formats
|
||||
if pubkey, err := hex.DecodeString(string(tag.ValueHex())); err == nil && len(pubkey) == 32 {
|
||||
f.AddFollow(pubkey)
|
||||
}
|
||||
}
|
||||
|
||||
458
pkg/database/binary_tag_filter_test.go
Normal file
458
pkg/database/binary_tag_filter_test.go
Normal file
@@ -0,0 +1,458 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"git.mleku.dev/mleku/nostr/encoders/event"
|
||||
"git.mleku.dev/mleku/nostr/encoders/filter"
|
||||
"git.mleku.dev/mleku/nostr/encoders/hex"
|
||||
"git.mleku.dev/mleku/nostr/encoders/kind"
|
||||
"git.mleku.dev/mleku/nostr/encoders/tag"
|
||||
"git.mleku.dev/mleku/nostr/encoders/timestamp"
|
||||
"git.mleku.dev/mleku/nostr/interfaces/signer/p8k"
|
||||
"lol.mleku.dev/chk"
|
||||
)
|
||||
|
||||
// TestBinaryTagFilterRegression tests that queries with #e and #p tags work correctly
|
||||
// even when the event's tags are stored in binary format but filter values come as hex strings.
|
||||
//
|
||||
// This is a regression test for the bug where:
|
||||
// - Events with e/p tags are stored with binary-encoded values (32 bytes + null terminator)
|
||||
// - Filters from clients use hex strings (64 characters)
|
||||
// - The mismatch caused queries with #e or #p filter tags to fail
|
||||
//
|
||||
// See: https://github.com/mleku/orly/issues/XXX
|
||||
func TestBinaryTagFilterRegression(t *testing.T) {
|
||||
// Create a temporary directory for the database
|
||||
tempDir, err := os.MkdirTemp("", "test-db-binary-tag-*")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temporary directory: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tempDir)
|
||||
|
||||
// Create a context and cancel function for the database
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
defer cancel()
|
||||
|
||||
// Initialize the database
|
||||
db, err := New(ctx, cancel, tempDir, "info")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create database: %v", err)
|
||||
}
|
||||
defer db.Close()
|
||||
|
||||
// Create signers for the test
|
||||
authorSign := p8k.MustNew()
|
||||
if err := authorSign.Generate(); chk.E(err) {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
referencedPubkeySign := p8k.MustNew()
|
||||
if err := referencedPubkeySign.Generate(); chk.E(err) {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Create a referenced event (to generate a valid event ID for e-tag)
|
||||
referencedEvent := event.New()
|
||||
referencedEvent.Kind = kind.TextNote.K
|
||||
referencedEvent.Pubkey = referencedPubkeySign.Pub()
|
||||
referencedEvent.CreatedAt = timestamp.Now().V - 7200 // 2 hours ago
|
||||
referencedEvent.Content = []byte("Referenced event")
|
||||
referencedEvent.Tags = tag.NewS()
|
||||
referencedEvent.Sign(referencedPubkeySign)
|
||||
|
||||
// Save the referenced event
|
||||
if _, err := db.SaveEvent(ctx, referencedEvent); err != nil {
|
||||
t.Fatalf("Failed to save referenced event: %v", err)
|
||||
}
|
||||
|
||||
// Get hex representations of the IDs we'll use in tags
|
||||
referencedEventIdHex := hex.Enc(referencedEvent.ID)
|
||||
referencedPubkeyHex := hex.Enc(referencedPubkeySign.Pub())
|
||||
|
||||
// Create a test event similar to the problematic case:
|
||||
// - Kind 30520 (addressable)
|
||||
// - Has d, p, e, u, t tags
|
||||
testEvent := event.New()
|
||||
testEvent.Kind = 30520 // Addressable event kind
|
||||
testEvent.Pubkey = authorSign.Pub()
|
||||
testEvent.CreatedAt = timestamp.Now().V
|
||||
testEvent.Content = []byte("Test content with binary tags")
|
||||
testEvent.Tags = tag.NewS(
|
||||
tag.NewFromAny("d", "test-d-tag-value"),
|
||||
tag.NewFromAny("p", string(referencedPubkeyHex)), // p-tag with hex pubkey
|
||||
tag.NewFromAny("e", string(referencedEventIdHex)), // e-tag with hex event ID
|
||||
tag.NewFromAny("u", "test.app"),
|
||||
tag.NewFromAny("t", "test-topic"),
|
||||
)
|
||||
testEvent.Sign(authorSign)
|
||||
|
||||
// Save the test event
|
||||
if _, err := db.SaveEvent(ctx, testEvent); err != nil {
|
||||
t.Fatalf("Failed to save test event: %v", err)
|
||||
}
|
||||
|
||||
authorPubkeyHex := hex.Enc(authorSign.Pub())
|
||||
testEventIdHex := hex.Enc(testEvent.ID)
|
||||
|
||||
// Test case 1: Query WITHOUT e/p tags (should work - baseline)
|
||||
t.Run("QueryWithoutEPTags", func(t *testing.T) {
|
||||
f := &filter.F{
|
||||
Kinds: kind.NewS(kind.New(30520)),
|
||||
Authors: tag.NewFromBytesSlice(authorSign.Pub()),
|
||||
Tags: tag.NewS(
|
||||
tag.NewFromAny("#d", "test-d-tag-value"),
|
||||
tag.NewFromAny("#u", "test.app"),
|
||||
),
|
||||
}
|
||||
|
||||
results, err := db.QueryForIds(ctx, f)
|
||||
if err != nil {
|
||||
t.Fatalf("Query without e/p tags failed: %v", err)
|
||||
}
|
||||
|
||||
if len(results) == 0 {
|
||||
t.Fatal("Expected to find event with d/u tags filter, got 0 results")
|
||||
}
|
||||
|
||||
// Verify we got the correct event
|
||||
found := false
|
||||
for _, r := range results {
|
||||
if hex.Enc(r.Id) == testEventIdHex {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
t.Errorf("Expected event ID %s not found in results", testEventIdHex)
|
||||
}
|
||||
})
|
||||
|
||||
// Test case 2: Query WITH #p tag (this was the failing case)
|
||||
t.Run("QueryWithPTag", func(t *testing.T) {
|
||||
f := &filter.F{
|
||||
Kinds: kind.NewS(kind.New(30520)),
|
||||
Authors: tag.NewFromBytesSlice(authorSign.Pub()),
|
||||
Tags: tag.NewS(
|
||||
tag.NewFromAny("#d", "test-d-tag-value"),
|
||||
tag.NewFromAny("#p", string(referencedPubkeyHex)),
|
||||
tag.NewFromAny("#u", "test.app"),
|
||||
),
|
||||
}
|
||||
|
||||
results, err := db.QueryForIds(ctx, f)
|
||||
if err != nil {
|
||||
t.Fatalf("Query with #p tag failed: %v", err)
|
||||
}
|
||||
|
||||
if len(results) == 0 {
|
||||
t.Fatalf("REGRESSION: Expected to find event with #p tag filter, got 0 results. "+
|
||||
"This suggests the binary tag encoding fix is not working. "+
|
||||
"Author: %s, #p: %s", authorPubkeyHex, referencedPubkeyHex)
|
||||
}
|
||||
|
||||
// Verify we got the correct event
|
||||
found := false
|
||||
for _, r := range results {
|
||||
if hex.Enc(r.Id) == testEventIdHex {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
t.Errorf("Expected event ID %s not found in results", testEventIdHex)
|
||||
}
|
||||
})
|
||||
|
||||
// Test case 3: Query WITH #e tag (this was also the failing case)
|
||||
t.Run("QueryWithETag", func(t *testing.T) {
|
||||
f := &filter.F{
|
||||
Kinds: kind.NewS(kind.New(30520)),
|
||||
Authors: tag.NewFromBytesSlice(authorSign.Pub()),
|
||||
Tags: tag.NewS(
|
||||
tag.NewFromAny("#d", "test-d-tag-value"),
|
||||
tag.NewFromAny("#e", string(referencedEventIdHex)),
|
||||
tag.NewFromAny("#u", "test.app"),
|
||||
),
|
||||
}
|
||||
|
||||
results, err := db.QueryForIds(ctx, f)
|
||||
if err != nil {
|
||||
t.Fatalf("Query with #e tag failed: %v", err)
|
||||
}
|
||||
|
||||
if len(results) == 0 {
|
||||
t.Fatalf("REGRESSION: Expected to find event with #e tag filter, got 0 results. "+
|
||||
"This suggests the binary tag encoding fix is not working. "+
|
||||
"Author: %s, #e: %s", authorPubkeyHex, referencedEventIdHex)
|
||||
}
|
||||
|
||||
// Verify we got the correct event
|
||||
found := false
|
||||
for _, r := range results {
|
||||
if hex.Enc(r.Id) == testEventIdHex {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
t.Errorf("Expected event ID %s not found in results", testEventIdHex)
|
||||
}
|
||||
})
|
||||
|
||||
// Test case 4: Query WITH BOTH #e AND #p tags (the most complete failing case)
|
||||
t.Run("QueryWithBothEAndPTags", func(t *testing.T) {
|
||||
f := &filter.F{
|
||||
Kinds: kind.NewS(kind.New(30520)),
|
||||
Authors: tag.NewFromBytesSlice(authorSign.Pub()),
|
||||
Tags: tag.NewS(
|
||||
tag.NewFromAny("#d", "test-d-tag-value"),
|
||||
tag.NewFromAny("#e", string(referencedEventIdHex)),
|
||||
tag.NewFromAny("#p", string(referencedPubkeyHex)),
|
||||
tag.NewFromAny("#u", "test.app"),
|
||||
),
|
||||
}
|
||||
|
||||
results, err := db.QueryForIds(ctx, f)
|
||||
if err != nil {
|
||||
t.Fatalf("Query with both #e and #p tags failed: %v", err)
|
||||
}
|
||||
|
||||
if len(results) == 0 {
|
||||
t.Fatalf("REGRESSION: Expected to find event with #e and #p tag filters, got 0 results. "+
|
||||
"This is the exact regression case from the bug report. "+
|
||||
"Author: %s, #e: %s, #p: %s", authorPubkeyHex, referencedEventIdHex, referencedPubkeyHex)
|
||||
}
|
||||
|
||||
// Verify we got the correct event
|
||||
found := false
|
||||
for _, r := range results {
|
||||
if hex.Enc(r.Id) == testEventIdHex {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
t.Errorf("Expected event ID %s not found in results", testEventIdHex)
|
||||
}
|
||||
})
|
||||
|
||||
// Test case 5: Query with kinds + #p tag (no authors)
|
||||
// Note: Queries with only kinds+tags may use different index paths
|
||||
t.Run("QueryWithKindAndPTag", func(t *testing.T) {
|
||||
f := &filter.F{
|
||||
Kinds: kind.NewS(kind.New(30520)),
|
||||
Tags: tag.NewS(
|
||||
tag.NewFromAny("#p", string(referencedPubkeyHex)),
|
||||
),
|
||||
}
|
||||
|
||||
results, err := db.QueryForIds(ctx, f)
|
||||
if err != nil {
|
||||
t.Fatalf("Query with kind+#p tag failed: %v", err)
|
||||
}
|
||||
|
||||
// This query should find results using the TagKindEnc index
|
||||
t.Logf("Query with kind+#p tag returned %d results", len(results))
|
||||
})
|
||||
|
||||
// Test case 6: Query with kinds + #e tag (no authors)
|
||||
t.Run("QueryWithKindAndETag", func(t *testing.T) {
|
||||
f := &filter.F{
|
||||
Kinds: kind.NewS(kind.New(30520)),
|
||||
Tags: tag.NewS(
|
||||
tag.NewFromAny("#e", string(referencedEventIdHex)),
|
||||
),
|
||||
}
|
||||
|
||||
results, err := db.QueryForIds(ctx, f)
|
||||
if err != nil {
|
||||
t.Fatalf("Query with kind+#e tag failed: %v", err)
|
||||
}
|
||||
|
||||
// This query should find results using the TagKindEnc index
|
||||
t.Logf("Query with kind+#e tag returned %d results", len(results))
|
||||
})
|
||||
}
|
||||
|
||||
// TestFilterNormalization tests the filter normalization utilities
|
||||
func TestFilterNormalization(t *testing.T) {
|
||||
// Test hex pubkey value (64 chars)
|
||||
hexPubkey := []byte("8b1180c2e03cbf83ab048068a7f7d6959ff0331761aba867aaecdc793045c1bc")
|
||||
|
||||
// Test IsBinaryOptimizedTag
|
||||
if !IsBinaryOptimizedTag('e') {
|
||||
t.Error("Expected 'e' to be a binary-optimized tag")
|
||||
}
|
||||
if !IsBinaryOptimizedTag('p') {
|
||||
t.Error("Expected 'p' to be a binary-optimized tag")
|
||||
}
|
||||
if IsBinaryOptimizedTag('d') {
|
||||
t.Error("Expected 'd' NOT to be a binary-optimized tag")
|
||||
}
|
||||
if IsBinaryOptimizedTag('t') {
|
||||
t.Error("Expected 't' NOT to be a binary-optimized tag")
|
||||
}
|
||||
|
||||
// Test IsValidHexValue
|
||||
if !IsValidHexValue(hexPubkey) {
|
||||
t.Error("Expected valid hex pubkey to pass IsValidHexValue")
|
||||
}
|
||||
if IsValidHexValue([]byte("not-hex")) {
|
||||
t.Error("Expected invalid hex to fail IsValidHexValue")
|
||||
}
|
||||
if IsValidHexValue([]byte("abc123")) { // Too short
|
||||
t.Error("Expected short hex to fail IsValidHexValue")
|
||||
}
|
||||
|
||||
// Test HexToBinary conversion
|
||||
binary := HexToBinary(hexPubkey)
|
||||
if binary == nil {
|
||||
t.Fatal("HexToBinary returned nil for valid hex")
|
||||
}
|
||||
if len(binary) != BinaryEncodedLen {
|
||||
t.Errorf("Expected binary length %d, got %d", BinaryEncodedLen, len(binary))
|
||||
}
|
||||
if binary[HashLen] != 0 {
|
||||
t.Error("Expected null terminator at position 32")
|
||||
}
|
||||
|
||||
// Test IsBinaryEncoded
|
||||
if !IsBinaryEncoded(binary) {
|
||||
t.Error("Expected converted binary to pass IsBinaryEncoded")
|
||||
}
|
||||
if IsBinaryEncoded(hexPubkey) {
|
||||
t.Error("Expected hex to fail IsBinaryEncoded")
|
||||
}
|
||||
|
||||
// Test BinaryToHex (round-trip)
|
||||
hexBack := BinaryToHex(binary)
|
||||
if hexBack == nil {
|
||||
t.Fatal("BinaryToHex returned nil")
|
||||
}
|
||||
if string(hexBack) != string(hexPubkey) {
|
||||
t.Errorf("Round-trip failed: expected %s, got %s", hexPubkey, hexBack)
|
||||
}
|
||||
|
||||
// Test NormalizeTagValue for p-tag (should convert hex to binary)
|
||||
normalized := NormalizeTagValue('p', hexPubkey)
|
||||
if !IsBinaryEncoded(normalized) {
|
||||
t.Error("Expected NormalizeTagValue to convert hex to binary for p-tag")
|
||||
}
|
||||
|
||||
// Test NormalizeTagValue for d-tag (should NOT convert)
|
||||
dTagValue := []byte("some-d-tag-value")
|
||||
normalizedD := NormalizeTagValue('d', dTagValue)
|
||||
if string(normalizedD) != string(dTagValue) {
|
||||
t.Error("Expected NormalizeTagValue to leave d-tag unchanged")
|
||||
}
|
||||
|
||||
// Test TagValuesMatch with different encodings
|
||||
if !TagValuesMatch('p', binary, hexPubkey) {
|
||||
t.Error("Expected binary and hex values to match for p-tag")
|
||||
}
|
||||
if !TagValuesMatch('p', hexPubkey, binary) {
|
||||
t.Error("Expected hex and binary values to match for p-tag (reverse)")
|
||||
}
|
||||
if !TagValuesMatch('p', binary, binary) {
|
||||
t.Error("Expected identical binary values to match")
|
||||
}
|
||||
if !TagValuesMatch('p', hexPubkey, hexPubkey) {
|
||||
t.Error("Expected identical hex values to match")
|
||||
}
|
||||
|
||||
// Test non-matching values
|
||||
otherHex := []byte("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")
|
||||
if TagValuesMatch('p', hexPubkey, otherHex) {
|
||||
t.Error("Expected different hex values NOT to match")
|
||||
}
|
||||
}
|
||||
|
||||
// TestNormalizeFilterTag tests the NormalizeFilterTag function
|
||||
func TestNormalizeFilterTag(t *testing.T) {
|
||||
hexPubkey := "8b1180c2e03cbf83ab048068a7f7d6959ff0331761aba867aaecdc793045c1bc"
|
||||
|
||||
// Test with #p style tag (filter format)
|
||||
pTag := tag.NewFromAny("#p", hexPubkey)
|
||||
normalized := NormalizeFilterTag(pTag)
|
||||
|
||||
if normalized == nil {
|
||||
t.Fatal("NormalizeFilterTag returned nil")
|
||||
}
|
||||
|
||||
// Check that the normalized value is binary
|
||||
normalizedValue := normalized.T[1]
|
||||
if !IsBinaryEncoded(normalizedValue) {
|
||||
t.Errorf("Expected normalized #p tag value to be binary, got length %d", len(normalizedValue))
|
||||
}
|
||||
|
||||
// Test with e style tag (event format - single letter key)
|
||||
hexEventId := "34ccd22f852544a0b7a310b50cc76189130fd3d121d1f4dd77d759862a7b7261"
|
||||
eTag := tag.NewFromAny("e", hexEventId)
|
||||
normalizedE := NormalizeFilterTag(eTag)
|
||||
|
||||
normalizedEValue := normalizedE.T[1]
|
||||
if !IsBinaryEncoded(normalizedEValue) {
|
||||
t.Errorf("Expected normalized e tag value to be binary, got length %d", len(normalizedEValue))
|
||||
}
|
||||
|
||||
// Test with non-optimized tag (should remain unchanged)
|
||||
dTag := tag.NewFromAny("#d", "some-value")
|
||||
normalizedD := NormalizeFilterTag(dTag)
|
||||
|
||||
normalizedDValue := normalizedD.T[1]
|
||||
if string(normalizedDValue) != "some-value" {
|
||||
t.Errorf("Expected #d tag value to remain unchanged, got %s", normalizedDValue)
|
||||
}
|
||||
}
|
||||
|
||||
// TestNormalizeFilter tests the full filter normalization
|
||||
func TestNormalizeFilter(t *testing.T) {
|
||||
hexPubkey := "8b1180c2e03cbf83ab048068a7f7d6959ff0331761aba867aaecdc793045c1bc"
|
||||
hexEventId := "34ccd22f852544a0b7a310b50cc76189130fd3d121d1f4dd77d759862a7b7261"
|
||||
|
||||
f := &filter.F{
|
||||
Kinds: kind.NewS(kind.New(30520)),
|
||||
Tags: tag.NewS(
|
||||
tag.NewFromAny("#d", "test-value"),
|
||||
tag.NewFromAny("#e", hexEventId),
|
||||
tag.NewFromAny("#p", hexPubkey),
|
||||
tag.NewFromAny("#u", "test.app"),
|
||||
),
|
||||
}
|
||||
|
||||
normalized := NormalizeFilter(f)
|
||||
|
||||
// Verify non-tag fields are preserved
|
||||
if normalized.Kinds == nil || normalized.Kinds.Len() != 1 {
|
||||
t.Error("Filter Kinds should be preserved")
|
||||
}
|
||||
|
||||
// Verify tags are normalized
|
||||
if normalized.Tags == nil {
|
||||
t.Fatal("Normalized filter Tags is nil")
|
||||
}
|
||||
|
||||
// Check that #e and #p tags have binary values
|
||||
for _, tg := range *normalized.Tags {
|
||||
key := tg.Key()
|
||||
if len(key) == 2 && key[0] == '#' {
|
||||
switch key[1] {
|
||||
case 'e', 'p':
|
||||
// These should have binary values
|
||||
val := tg.T[1]
|
||||
if !IsBinaryEncoded(val) {
|
||||
t.Errorf("Expected #%c tag to have binary value after normalization", key[1])
|
||||
}
|
||||
case 'd', 'u':
|
||||
// These should NOT have binary values
|
||||
val := tg.T[1]
|
||||
if IsBinaryEncoded(val) {
|
||||
t.Errorf("Expected #%c tag NOT to have binary value", key[1])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
253
pkg/database/filter_utils.go
Normal file
253
pkg/database/filter_utils.go
Normal file
@@ -0,0 +1,253 @@
|
||||
// Package database provides filter utilities for normalizing tag values.
|
||||
//
|
||||
// The nostr library optimizes e/p tag values by storing them in binary format
|
||||
// (32 bytes + null terminator) rather than hex strings (64 chars). However,
|
||||
// filter tags from client queries come as hex strings and don't go through
|
||||
// the same binary encoding during unmarshalling.
|
||||
//
|
||||
// This file provides utilities to normalize filter tags to match the binary
|
||||
// encoding used in stored events, ensuring consistent index lookups and
|
||||
// tag comparisons.
|
||||
package database
|
||||
|
||||
import (
|
||||
"git.mleku.dev/mleku/nostr/encoders/filter"
|
||||
"git.mleku.dev/mleku/nostr/encoders/hex"
|
||||
"git.mleku.dev/mleku/nostr/encoders/tag"
|
||||
)
|
||||
|
||||
// Tag binary encoding constants (matching the nostr library)
|
||||
const (
|
||||
// BinaryEncodedLen is the length of a binary-encoded 32-byte hash with null terminator
|
||||
BinaryEncodedLen = 33
|
||||
// HexEncodedLen is the length of a hex-encoded 32-byte hash
|
||||
HexEncodedLen = 64
|
||||
// HashLen is the raw length of a hash (pubkey/event ID)
|
||||
HashLen = 32
|
||||
)
|
||||
|
||||
// binaryOptimizedTags defines which tag keys use binary encoding optimization
|
||||
var binaryOptimizedTags = map[byte]bool{
|
||||
'e': true, // event references
|
||||
'p': true, // pubkey references
|
||||
}
|
||||
|
||||
// IsBinaryOptimizedTag returns true if the given tag key uses binary encoding
|
||||
func IsBinaryOptimizedTag(key byte) bool {
|
||||
return binaryOptimizedTags[key]
|
||||
}
|
||||
|
||||
// IsBinaryEncoded checks if a value field is stored in optimized binary format
|
||||
func IsBinaryEncoded(val []byte) bool {
|
||||
return len(val) == BinaryEncodedLen && val[HashLen] == 0
|
||||
}
|
||||
|
||||
// IsValidHexValue checks if a byte slice is a valid 64-character hex string
|
||||
func IsValidHexValue(b []byte) bool {
|
||||
if len(b) != HexEncodedLen {
|
||||
return false
|
||||
}
|
||||
return IsHexString(b)
|
||||
}
|
||||
|
||||
// HexToBinary converts a 64-character hex string to 33-byte binary format
|
||||
// Returns nil if the input is not a valid hex string
|
||||
func HexToBinary(hexVal []byte) []byte {
|
||||
if !IsValidHexValue(hexVal) {
|
||||
return nil
|
||||
}
|
||||
binVal := make([]byte, BinaryEncodedLen)
|
||||
if _, err := hex.DecBytes(binVal[:HashLen], hexVal); err != nil {
|
||||
return nil
|
||||
}
|
||||
binVal[HashLen] = 0 // null terminator
|
||||
return binVal
|
||||
}
|
||||
|
||||
// BinaryToHex converts a 33-byte binary value to 64-character hex string
|
||||
// Returns nil if the input is not in binary format
|
||||
func BinaryToHex(binVal []byte) []byte {
|
||||
if !IsBinaryEncoded(binVal) {
|
||||
return nil
|
||||
}
|
||||
return hex.EncAppend(nil, binVal[:HashLen])
|
||||
}
|
||||
|
||||
// NormalizeTagValue normalizes a tag value for the given key.
|
||||
// For e/p tags, hex values are converted to binary format.
|
||||
// Other tags are returned unchanged.
|
||||
func NormalizeTagValue(key byte, val []byte) []byte {
|
||||
if !IsBinaryOptimizedTag(key) {
|
||||
return val
|
||||
}
|
||||
// If already binary, return as-is
|
||||
if IsBinaryEncoded(val) {
|
||||
return val
|
||||
}
|
||||
// If valid hex, convert to binary
|
||||
if binVal := HexToBinary(val); binVal != nil {
|
||||
return binVal
|
||||
}
|
||||
// Otherwise return as-is
|
||||
return val
|
||||
}
|
||||
|
||||
// NormalizeTagToHex returns the hex representation of a tag value.
|
||||
// For binary-encoded values, converts to hex. For hex values, returns as-is.
|
||||
func NormalizeTagToHex(val []byte) []byte {
|
||||
if IsBinaryEncoded(val) {
|
||||
return BinaryToHex(val)
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
// NormalizeFilterTag creates a new tag with binary-encoded values for e/p tags.
|
||||
// The original tag is not modified.
|
||||
func NormalizeFilterTag(t *tag.T) *tag.T {
|
||||
if t == nil || t.Len() < 2 {
|
||||
return t
|
||||
}
|
||||
|
||||
keyBytes := t.Key()
|
||||
var key byte
|
||||
|
||||
// Handle both "e" and "#e" style keys
|
||||
if len(keyBytes) == 1 {
|
||||
key = keyBytes[0]
|
||||
} else if len(keyBytes) == 2 && keyBytes[0] == '#' {
|
||||
key = keyBytes[1]
|
||||
} else {
|
||||
return t // Not a single-letter tag
|
||||
}
|
||||
|
||||
if !IsBinaryOptimizedTag(key) {
|
||||
return t // Not an optimized tag type
|
||||
}
|
||||
|
||||
// Create new tag with normalized values
|
||||
normalized := tag.NewWithCap(t.Len())
|
||||
normalized.T = append(normalized.T, t.T[0]) // Keep key as-is
|
||||
|
||||
// Normalize each value
|
||||
for _, val := range t.T[1:] {
|
||||
normalizedVal := NormalizeTagValue(key, val)
|
||||
normalized.T = append(normalized.T, normalizedVal)
|
||||
}
|
||||
|
||||
return normalized
|
||||
}
|
||||
|
||||
// NormalizeFilterTags normalizes all tags in a tag.S, converting e/p hex values to binary.
|
||||
// Returns a new tag.S with normalized tags.
|
||||
func NormalizeFilterTags(tags *tag.S) *tag.S {
|
||||
if tags == nil || tags.Len() == 0 {
|
||||
return tags
|
||||
}
|
||||
|
||||
normalized := tag.NewSWithCap(tags.Len())
|
||||
for _, t := range *tags {
|
||||
normalizedTag := NormalizeFilterTag(t)
|
||||
normalized.Append(normalizedTag)
|
||||
}
|
||||
return normalized
|
||||
}
|
||||
|
||||
// NormalizeFilter normalizes a filter's tags for consistent database queries.
|
||||
// This should be called before using a filter for database lookups.
|
||||
// The original filter is not modified; a copy with normalized tags is returned.
|
||||
func NormalizeFilter(f *filter.F) *filter.F {
|
||||
if f == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Create a shallow copy of the filter
|
||||
normalized := &filter.F{
|
||||
Ids: f.Ids,
|
||||
Kinds: f.Kinds,
|
||||
Authors: f.Authors,
|
||||
Since: f.Since,
|
||||
Until: f.Until,
|
||||
Search: f.Search,
|
||||
Limit: f.Limit,
|
||||
}
|
||||
|
||||
// Normalize the tags
|
||||
normalized.Tags = NormalizeFilterTags(f.Tags)
|
||||
|
||||
return normalized
|
||||
}
|
||||
|
||||
// TagValuesMatch compares two tag values, handling both binary and hex encodings.
|
||||
// This is useful for post-query tag matching where event values may be binary
|
||||
// and filter values may be hex (or vice versa).
|
||||
func TagValuesMatch(key byte, eventVal, filterVal []byte) bool {
|
||||
// If both are the same, they match
|
||||
if len(eventVal) == len(filterVal) {
|
||||
for i := range eventVal {
|
||||
if eventVal[i] != filterVal[i] {
|
||||
goto different
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
different:
|
||||
|
||||
// For non-optimized tags, require exact match
|
||||
if !IsBinaryOptimizedTag(key) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Normalize both to hex and compare
|
||||
eventHex := NormalizeTagToHex(eventVal)
|
||||
filterHex := NormalizeTagToHex(filterVal)
|
||||
|
||||
if len(eventHex) != len(filterHex) {
|
||||
return false
|
||||
}
|
||||
for i := range eventHex {
|
||||
if eventHex[i] != filterHex[i] {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// TagValuesMatchUsingTagMethods compares an event tag's value with a filter value
|
||||
// using the tag.T methods. This leverages the nostr library's ValueHex() method
|
||||
// for proper binary/hex conversion.
|
||||
func TagValuesMatchUsingTagMethods(eventTag *tag.T, filterVal []byte) bool {
|
||||
if eventTag == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
keyBytes := eventTag.Key()
|
||||
if len(keyBytes) != 1 {
|
||||
// Not a single-letter tag, use direct comparison
|
||||
return bytesEqual(eventTag.Value(), filterVal)
|
||||
}
|
||||
|
||||
key := keyBytes[0]
|
||||
if !IsBinaryOptimizedTag(key) {
|
||||
// Not an optimized tag, use direct comparison
|
||||
return bytesEqual(eventTag.Value(), filterVal)
|
||||
}
|
||||
|
||||
// For e/p tags, use ValueHex() for proper conversion
|
||||
eventHex := eventTag.ValueHex()
|
||||
filterHex := NormalizeTagToHex(filterVal)
|
||||
|
||||
return bytesEqual(eventHex, filterHex)
|
||||
}
|
||||
|
||||
// bytesEqual is a fast equality check that avoids allocation
|
||||
func bytesEqual(a, b []byte) bool {
|
||||
if len(a) != len(b) {
|
||||
return false
|
||||
}
|
||||
for i := range a {
|
||||
if a[i] != b[i] {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
@@ -30,6 +30,16 @@ func IsHexString(data []byte) (isHex bool) {
|
||||
return true
|
||||
}
|
||||
|
||||
// NormalizeTagValueForHash normalizes a tag value for consistent hashing.
|
||||
// For 'e' and 'p' tags, the nostr library stores values in binary format (32 bytes),
|
||||
// but filters from clients come with hex strings (64 chars). This function ensures
|
||||
// that filter values are converted to binary to match the stored index format.
|
||||
//
|
||||
// This function delegates to NormalizeTagValue from filter_utils.go for consistency.
|
||||
func NormalizeTagValueForHash(key byte, valueBytes []byte) []byte {
|
||||
return NormalizeTagValue(key, valueBytes)
|
||||
}
|
||||
|
||||
// CreateIdHashFromData creates an IdHash from data that could be hex or binary
|
||||
func CreateIdHashFromData(data []byte) (i *types2.IdHash, err error) {
|
||||
i = new(types2.IdHash)
|
||||
@@ -190,14 +200,18 @@ func GetIndexesFromFilter(f *filter.F) (idxs []Range, err error) {
|
||||
keyBytes := t.Key()
|
||||
key := new(types2.Letter)
|
||||
// If the tag key starts with '#', use the second character as the key
|
||||
var keyByte byte
|
||||
if len(keyBytes) == 2 && keyBytes[0] == '#' {
|
||||
key.Set(keyBytes[1])
|
||||
keyByte = keyBytes[1]
|
||||
} else {
|
||||
key.Set(keyBytes[0])
|
||||
keyByte = keyBytes[0]
|
||||
}
|
||||
key.Set(keyByte)
|
||||
for _, valueBytes := range t.T[1:] {
|
||||
// Normalize e/p tag values from hex to binary for consistent hashing
|
||||
normalizedValue := NormalizeTagValueForHash(keyByte, valueBytes)
|
||||
valueHash := new(types2.Ident)
|
||||
valueHash.FromIdent(valueBytes)
|
||||
valueHash.FromIdent(normalizedValue)
|
||||
start, end := new(bytes.Buffer), new(bytes.Buffer)
|
||||
idxS := indexes.TagKindPubkeyEnc(
|
||||
key, valueHash, kind, p, caStart, nil,
|
||||
@@ -234,14 +248,18 @@ func GetIndexesFromFilter(f *filter.F) (idxs []Range, err error) {
|
||||
keyBytes := t.Key()
|
||||
key := new(types2.Letter)
|
||||
// If the tag key starts with '#', use the second character as the key
|
||||
var keyByte byte
|
||||
if len(keyBytes) == 2 && keyBytes[0] == '#' {
|
||||
key.Set(keyBytes[1])
|
||||
keyByte = keyBytes[1]
|
||||
} else {
|
||||
key.Set(keyBytes[0])
|
||||
keyByte = keyBytes[0]
|
||||
}
|
||||
key.Set(keyByte)
|
||||
for _, valueBytes := range t.T[1:] {
|
||||
// Normalize e/p tag values from hex to binary for consistent hashing
|
||||
normalizedValue := NormalizeTagValueForHash(keyByte, valueBytes)
|
||||
valueHash := new(types2.Ident)
|
||||
valueHash.FromIdent(valueBytes)
|
||||
valueHash.FromIdent(normalizedValue)
|
||||
start, end := new(bytes.Buffer), new(bytes.Buffer)
|
||||
idxS := indexes.TagKindEnc(
|
||||
key, valueHash, kind, caStart, nil,
|
||||
@@ -280,14 +298,18 @@ func GetIndexesFromFilter(f *filter.F) (idxs []Range, err error) {
|
||||
keyBytes := t.Key()
|
||||
key := new(types2.Letter)
|
||||
// If the tag key starts with '#', use the second character as the key
|
||||
var keyByte byte
|
||||
if len(keyBytes) == 2 && keyBytes[0] == '#' {
|
||||
key.Set(keyBytes[1])
|
||||
keyByte = keyBytes[1]
|
||||
} else {
|
||||
key.Set(keyBytes[0])
|
||||
keyByte = keyBytes[0]
|
||||
}
|
||||
key.Set(keyByte)
|
||||
for _, valueBytes := range t.T[1:] {
|
||||
// Normalize e/p tag values from hex to binary for consistent hashing
|
||||
normalizedValue := NormalizeTagValueForHash(keyByte, valueBytes)
|
||||
valueHash := new(types2.Ident)
|
||||
valueHash.FromIdent(valueBytes)
|
||||
valueHash.FromIdent(normalizedValue)
|
||||
start, end := new(bytes.Buffer), new(bytes.Buffer)
|
||||
idxS := indexes.TagPubkeyEnc(
|
||||
key, valueHash, p, caStart, nil,
|
||||
@@ -318,14 +340,18 @@ func GetIndexesFromFilter(f *filter.F) (idxs []Range, err error) {
|
||||
keyBytes := t.Key()
|
||||
key := new(types2.Letter)
|
||||
// If the tag key starts with '#', use the second character as the key
|
||||
var keyByte byte
|
||||
if len(keyBytes) == 2 && keyBytes[0] == '#' {
|
||||
key.Set(keyBytes[1])
|
||||
keyByte = keyBytes[1]
|
||||
} else {
|
||||
key.Set(keyBytes[0])
|
||||
keyByte = keyBytes[0]
|
||||
}
|
||||
key.Set(keyByte)
|
||||
for _, valueBytes := range t.T[1:] {
|
||||
// Normalize e/p tag values from hex to binary for consistent hashing
|
||||
normalizedValue := NormalizeTagValueForHash(keyByte, valueBytes)
|
||||
valueHash := new(types2.Ident)
|
||||
valueHash.FromIdent(valueBytes)
|
||||
valueHash.FromIdent(normalizedValue)
|
||||
start, end := new(bytes.Buffer), new(bytes.Buffer)
|
||||
idxS := indexes.TagEnc(key, valueHash, caStart, nil)
|
||||
if err = idxS.MarshalWrite(start); chk.E(err) {
|
||||
|
||||
@@ -29,13 +29,14 @@ func (d *D) ProcessDelete(ev *event.E, admins [][]byte) (err error) {
|
||||
if eTag.Len() < 2 {
|
||||
continue
|
||||
}
|
||||
eventId := eTag.Value()
|
||||
if len(eventId) != 64 { // hex encoded event ID
|
||||
// Use ValueHex() to handle both binary and hex storage formats
|
||||
eventIdHex := eTag.ValueHex()
|
||||
if len(eventIdHex) != 64 { // hex encoded event ID
|
||||
continue
|
||||
}
|
||||
// Decode hex event ID
|
||||
var eid []byte
|
||||
if eid, err = hexenc.DecAppend(nil, eventId); chk.E(err) {
|
||||
if eid, err = hexenc.DecAppend(nil, eventIdHex); chk.E(err) {
|
||||
continue
|
||||
}
|
||||
// Fetch the event to verify ownership
|
||||
|
||||
@@ -281,12 +281,14 @@ func (d *D) QueryEventsWithOptions(c context.Context, f *filter.F, includeDelete
|
||||
// For replaceable events, we need to check if there are any
|
||||
// e-tags that reference events with the same kind and pubkey
|
||||
for _, eTag := range eTags {
|
||||
if len(eTag.Value()) != 64 {
|
||||
// Use ValueHex() to handle both binary and hex storage formats
|
||||
eTagHex := eTag.ValueHex()
|
||||
if len(eTagHex) != 64 {
|
||||
continue
|
||||
}
|
||||
// Get the event ID from the e-tag
|
||||
evId := make([]byte, sha256.Size)
|
||||
if _, err = hex.DecBytes(evId, eTag.Value()); err != nil {
|
||||
if _, err = hex.DecBytes(evId, eTagHex); err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -363,10 +365,10 @@ func (d *D) QueryEventsWithOptions(c context.Context, f *filter.F, includeDelete
|
||||
eventTag.Key(), actualKey,
|
||||
) {
|
||||
// Check if the event's tag value matches any of the filter's values
|
||||
// Using TagValuesMatchUsingTagMethods handles binary/hex conversion
|
||||
// for e/p tags automatically
|
||||
for _, filterValue := range filterTag.T[1:] {
|
||||
if bytes.Equal(
|
||||
eventTag.Value(), filterValue,
|
||||
) {
|
||||
if TagValuesMatchUsingTagMethods(eventTag, filterValue) {
|
||||
eventHasTag = true
|
||||
break
|
||||
}
|
||||
|
||||
@@ -78,11 +78,21 @@ func (d *D) QueryPTagGraph(f *filter.F) (sers types.Uint40s, err error) {
|
||||
var pubkeySerials []*types.Uint40
|
||||
for _, pTagBytes := range pTags {
|
||||
var pubkeyBytes []byte
|
||||
// Try to decode as hex
|
||||
if pubkeyBytes, err = hex.Dec(string(pTagBytes)); chk.E(err) {
|
||||
log.D.F("QueryPTagGraph: failed to decode pubkey hex: %v", err)
|
||||
|
||||
// Handle both binary-encoded (33 bytes) and hex-encoded (64 chars) values
|
||||
// Filter tags may come as either format depending on how they were parsed
|
||||
if IsBinaryEncoded(pTagBytes) {
|
||||
// Already binary-encoded, extract the 32-byte hash
|
||||
pubkeyBytes = pTagBytes[:HashLen]
|
||||
} else {
|
||||
// Try to decode as hex using NormalizeTagToHex for consistent handling
|
||||
hexBytes := NormalizeTagToHex(pTagBytes)
|
||||
var decErr error
|
||||
if pubkeyBytes, decErr = hex.Dec(string(hexBytes)); chk.E(decErr) {
|
||||
log.D.F("QueryPTagGraph: failed to decode pubkey hex: %v", decErr)
|
||||
continue
|
||||
}
|
||||
}
|
||||
if len(pubkeyBytes) != 32 {
|
||||
log.D.F("QueryPTagGraph: invalid pubkey length: %d", len(pubkeyBytes))
|
||||
continue
|
||||
|
||||
@@ -214,10 +214,11 @@ func (d *D) SaveEvent(c context.Context, ev *event.E) (
|
||||
// Extract p-tag pubkeys using GetAll
|
||||
pTags := ev.Tags.GetAll([]byte("p"))
|
||||
for _, pTag := range pTags {
|
||||
if len(pTag.T) >= 2 {
|
||||
// Decode hex pubkey from p-tag
|
||||
if pTag.Len() >= 2 {
|
||||
// Get pubkey from p-tag, handling both binary and hex storage formats
|
||||
// ValueHex() returns hex regardless of internal storage format
|
||||
var ptagPubkey []byte
|
||||
if ptagPubkey, err = hex.Dec(string(pTag.T[tag.Value])); err == nil && len(ptagPubkey) == 32 {
|
||||
if ptagPubkey, err = hex.Dec(string(pTag.ValueHex())); err == nil && len(ptagPubkey) == 32 {
|
||||
pkHex := hex.Enc(ptagPubkey)
|
||||
// Skip if already added as author
|
||||
if _, exists := pubkeysForGraph[pkHex]; !exists {
|
||||
|
||||
@@ -78,7 +78,8 @@ func (n *N) ProcessDelete(ev *event.E, admins [][]byte) error {
|
||||
continue
|
||||
}
|
||||
|
||||
eventIDStr := string(eTag.T[1])
|
||||
// Use ValueHex() to correctly handle both binary and hex storage formats
|
||||
eventIDStr := string(eTag.ValueHex())
|
||||
eventID, err := hex.Dec(eventIDStr)
|
||||
if err != nil {
|
||||
continue
|
||||
|
||||
@@ -1 +1 @@
|
||||
v0.30.2
|
||||
v0.30.3
|
||||
Reference in New Issue
Block a user