From ea4a54c5e7ae254b90156c6b423b8bfce8795cb4 Mon Sep 17 00:00:00 2001 From: mleku Date: Sun, 28 Dec 2025 11:30:11 +0200 Subject: [PATCH] Add Cashu blind signature access tokens (NIP-XX draft) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Implements privacy-preserving bearer tokens for relay access control using Cashu-style blind signatures. Tokens prove whitelist membership without linking issuance to usage. Features: - BDHKE crypto primitives (HashToCurve, Blind, Sign, Unblind, Verify) - Keyset management with weekly rotation - Token format with kind permissions and scope isolation - Generic issuer/verifier with pluggable authorization - HTTP endpoints: POST /cashu/mint, GET /cashu/keysets, GET /cashu/info - ACL adapter bridging ORLY's access control to Cashu AuthzChecker - Stateless revocation via ACL re-check on each token use - Two-token rotation for seamless renewal (max 2 weeks after blacklist) Configuration: - ORLY_CASHU_ENABLED: Enable Cashu tokens - ORLY_CASHU_TOKEN_TTL: Token validity (default: 1 week) - ORLY_CASHU_SCOPES: Allowed scopes (relay, nip46, blossom, api) - ORLY_CASHU_REAUTHORIZE: Re-check ACL on each verification Files: - pkg/cashu/bdhke/: Core blind signature cryptography - pkg/cashu/keyset/: Keyset management and rotation - pkg/cashu/token/: Token format with kind permissions - pkg/cashu/issuer/: Token issuance with authorization - pkg/cashu/verifier/: Token verification with middleware - pkg/interfaces/cashu/: AuthzChecker, KeysetStore interfaces - pkg/bunker/acl_adapter.go: ORLY ACL integration - app/handle-cashu.go: HTTP endpoints - docs/NIP-XX-CASHU-ACCESS-TOKENS.md: Full specification 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --- app/config/config.go | 59 +++++ app/handle-cashu.go | 144 ++++++++++ app/server.go | 14 + docs/NIP-XX-CASHU-ACCESS-TOKENS.md | 390 +++++++++++++++++++++++++++ pkg/bunker/acl_adapter.go | 100 +++++++ pkg/cashu/bdhke/bdhke.go | 293 ++++++++++++++++++++ pkg/cashu/bdhke/bdhke_test.go | 348 ++++++++++++++++++++++++ pkg/cashu/issuer/issuer.go | 288 ++++++++++++++++++++ pkg/cashu/issuer/issuer_test.go | 296 +++++++++++++++++++++ pkg/cashu/keyset/keyset.go | 338 ++++++++++++++++++++++++ pkg/cashu/keyset/keyset_test.go | 278 +++++++++++++++++++ pkg/cashu/keyset/store.go | 74 ++++++ pkg/cashu/token/token.go | 345 ++++++++++++++++++++++++ pkg/cashu/token/token_test.go | 336 +++++++++++++++++++++++ pkg/cashu/verifier/middleware.go | 138 ++++++++++ pkg/cashu/verifier/verifier.go | 186 +++++++++++++ pkg/cashu/verifier/verifier_test.go | 396 ++++++++++++++++++++++++++++ pkg/interfaces/cashu/cashu.go | 106 ++++++++ 18 files changed, 4129 insertions(+) create mode 100644 app/handle-cashu.go create mode 100644 docs/NIP-XX-CASHU-ACCESS-TOKENS.md create mode 100644 pkg/bunker/acl_adapter.go create mode 100644 pkg/cashu/bdhke/bdhke.go create mode 100644 pkg/cashu/bdhke/bdhke_test.go create mode 100644 pkg/cashu/issuer/issuer.go create mode 100644 pkg/cashu/issuer/issuer_test.go create mode 100644 pkg/cashu/keyset/keyset.go create mode 100644 pkg/cashu/keyset/keyset_test.go create mode 100644 pkg/cashu/keyset/store.go create mode 100644 pkg/cashu/token/token.go create mode 100644 pkg/cashu/token/token_test.go create mode 100644 pkg/cashu/verifier/middleware.go create mode 100644 pkg/cashu/verifier/verifier.go create mode 100644 pkg/cashu/verifier/verifier_test.go create mode 100644 pkg/interfaces/cashu/cashu.go diff --git a/app/config/config.go b/app/config/config.go index 8b03766..fdf7050 100644 --- a/app/config/config.go +++ b/app/config/config.go @@ -143,6 +143,14 @@ type C struct { BunkerEnabled bool `env:"ORLY_BUNKER_ENABLED" default:"false" usage:"enable NIP-46 bunker signing service (requires WireGuard)"` BunkerPort int `env:"ORLY_BUNKER_PORT" default:"3335" usage:"internal port for bunker WebSocket (only accessible via WireGuard)"` + // Cashu access token configuration (NIP-XX) + CashuEnabled bool `env:"ORLY_CASHU_ENABLED" default:"false" usage:"enable Cashu blind signature tokens for access control"` + CashuTokenTTL string `env:"ORLY_CASHU_TOKEN_TTL" default:"168h" usage:"token validity duration (default: 1 week)"` + CashuKeysetTTL string `env:"ORLY_CASHU_KEYSET_TTL" default:"168h" usage:"keyset active signing period (default: 1 week)"` + CashuVerifyTTL string `env:"ORLY_CASHU_VERIFY_TTL" default:"504h" usage:"keyset verification period (default: 3 weeks)"` + CashuScopes string `env:"ORLY_CASHU_SCOPES" default:"relay,nip46" usage:"comma-separated list of allowed token scopes"` + CashuReauthorize bool `env:"ORLY_CASHU_REAUTHORIZE" default:"true" usage:"re-check ACL on each token verification for stateless revocation"` + // Cluster replication configuration ClusterPropagatePrivilegedEvents bool `env:"ORLY_CLUSTER_PROPAGATE_PRIVILEGED_EVENTS" default:"true" usage:"propagate privileged events (DMs, gift wraps, etc.) to relay peers for replication"` @@ -523,3 +531,54 @@ func (cfg *C) GetWireGuardConfigValues() ( cfg.BunkerEnabled, cfg.BunkerPort } + +// GetCashuConfigValues returns the Cashu access token configuration values. +// This avoids circular imports with pkg/cashu while allowing main.go to construct +// the Cashu issuer/verifier configuration. +func (cfg *C) GetCashuConfigValues() ( + enabled bool, + tokenTTL time.Duration, + keysetTTL time.Duration, + verifyTTL time.Duration, + scopes []string, + reauthorize bool, +) { + // Parse token TTL + tokenTTL = 168 * time.Hour // Default: 1 week + if cfg.CashuTokenTTL != "" { + if d, err := time.ParseDuration(cfg.CashuTokenTTL); err == nil { + tokenTTL = d + } + } + + // Parse keyset TTL + keysetTTL = 168 * time.Hour // Default: 1 week + if cfg.CashuKeysetTTL != "" { + if d, err := time.ParseDuration(cfg.CashuKeysetTTL); err == nil { + keysetTTL = d + } + } + + // Parse verify TTL + verifyTTL = 504 * time.Hour // Default: 3 weeks + if cfg.CashuVerifyTTL != "" { + if d, err := time.ParseDuration(cfg.CashuVerifyTTL); err == nil { + verifyTTL = d + } + } + + // Parse scopes + if cfg.CashuScopes != "" { + scopes = strings.Split(cfg.CashuScopes, ",") + for i := range scopes { + scopes[i] = strings.TrimSpace(scopes[i]) + } + } + + return cfg.CashuEnabled, + tokenTTL, + keysetTTL, + verifyTTL, + scopes, + cfg.CashuReauthorize +} diff --git a/app/handle-cashu.go b/app/handle-cashu.go new file mode 100644 index 0000000..7b7fe5d --- /dev/null +++ b/app/handle-cashu.go @@ -0,0 +1,144 @@ +package app + +import ( + "encoding/hex" + "encoding/json" + "net/http" + "time" + + "lol.mleku.dev/chk" + "lol.mleku.dev/log" + + "git.mleku.dev/mleku/nostr/httpauth" + "next.orly.dev/pkg/cashu/issuer" + "next.orly.dev/pkg/cashu/keyset" + "next.orly.dev/pkg/cashu/token" +) + +// CashuMintRequest is the request body for token issuance. +type CashuMintRequest struct { + BlindedMessage string `json:"blinded_message"` // Hex-encoded blinded point B_ + Scope string `json:"scope"` // Token scope (e.g., "relay", "nip46") + Kinds []int `json:"kinds,omitempty"` // Permitted event kinds + KindRanges [][]int `json:"kind_ranges,omitempty"` // Permitted kind ranges +} + +// CashuMintResponse is the response body for token issuance. +type CashuMintResponse struct { + BlindedSignature string `json:"blinded_signature"` // Hex-encoded blinded signature C_ + KeysetID string `json:"keyset_id"` // Keyset ID used + Expiry int64 `json:"expiry"` // Token expiration timestamp + MintPubkey string `json:"mint_pubkey"` // Hex-encoded mint public key +} + +// handleCashuMint handles POST /cashu/mint - issues a new token. +func (s *Server) handleCashuMint(w http.ResponseWriter, r *http.Request) { + // Check if Cashu is enabled + if s.CashuIssuer == nil { + http.Error(w, "Cashu tokens not enabled", http.StatusNotImplemented) + return + } + + // Require NIP-98 authentication + valid, pubkey, err := httpauth.CheckAuth(r) + if chk.E(err) || !valid { + http.Error(w, "NIP-98 authentication required", http.StatusUnauthorized) + return + } + + // Parse request body + var req CashuMintRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + http.Error(w, "Invalid request body", http.StatusBadRequest) + return + } + + // Decode blinded message from hex + blindedMsg, err := hex.DecodeString(req.BlindedMessage) + if err != nil { + http.Error(w, "Invalid blinded_message: must be hex", http.StatusBadRequest) + return + } + + // Default scope + if req.Scope == "" { + req.Scope = token.ScopeRelay + } + + // Issue token + issueReq := &issuer.IssueRequest{ + BlindedMessage: blindedMsg, + Pubkey: pubkey, + Scope: req.Scope, + Kinds: req.Kinds, + KindRanges: req.KindRanges, + } + + resp, err := s.CashuIssuer.Issue(r.Context(), issueReq, r.RemoteAddr) + if err != nil { + log.W.F("Cashu mint failed for %x: %v", pubkey[:8], err) + http.Error(w, err.Error(), http.StatusForbidden) + return + } + + log.D.F("Cashu token issued for %x, scope=%s, keyset=%s", pubkey[:8], req.Scope, resp.KeysetID) + + // Return response + mintResp := CashuMintResponse{ + BlindedSignature: hex.EncodeToString(resp.BlindedSignature), + KeysetID: resp.KeysetID, + Expiry: resp.Expiry, + MintPubkey: hex.EncodeToString(resp.MintPubkey), + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(mintResp) +} + +// handleCashuKeysets handles GET /cashu/keysets - returns available keysets. +func (s *Server) handleCashuKeysets(w http.ResponseWriter, r *http.Request) { + if s.CashuIssuer == nil { + http.Error(w, "Cashu tokens not enabled", http.StatusNotImplemented) + return + } + + infos := s.CashuIssuer.GetKeysetInfo() + + type KeysetsResponse struct { + Keysets []keyset.KeysetInfo `json:"keysets"` + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(KeysetsResponse{Keysets: infos}) +} + +// handleCashuInfo handles GET /cashu/info - returns mint information. +func (s *Server) handleCashuInfo(w http.ResponseWriter, r *http.Request) { + if s.CashuIssuer == nil { + http.Error(w, "Cashu tokens not enabled", http.StatusNotImplemented) + return + } + + info := s.CashuIssuer.GetMintInfo(s.Config.AppName) + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(info) +} + +// CashuTokenTTL returns the configured token TTL. +func (s *Server) CashuTokenTTL() time.Duration { + enabled, tokenTTL, _, _, _, _ := s.Config.GetCashuConfigValues() + if !enabled { + return 0 + } + return tokenTTL +} + +// CashuKeysetTTL returns the configured keyset TTL. +func (s *Server) CashuKeysetTTL() time.Duration { + enabled, _, keysetTTL, _, _, _ := s.Config.GetCashuConfigValues() + if !enabled { + return 0 + } + return keysetTTL +} diff --git a/app/server.go b/app/server.go index 757dbb8..4df73d3 100644 --- a/app/server.go +++ b/app/server.go @@ -34,6 +34,8 @@ import ( "next.orly.dev/pkg/protocol/nip43" "next.orly.dev/pkg/protocol/publish" "next.orly.dev/pkg/bunker" + "next.orly.dev/pkg/cashu/issuer" + "next.orly.dev/pkg/cashu/verifier" "next.orly.dev/pkg/ratelimit" "next.orly.dev/pkg/spider" dsync "next.orly.dev/pkg/sync" @@ -85,6 +87,10 @@ type Server struct { wireguardServer *wireguard.Server bunkerServer *bunker.Server subnetPool *wireguard.SubnetPool + + // Cashu access token system (NIP-XX) + CashuIssuer *issuer.Issuer + CashuVerifier *verifier.Verifier } // isIPBlacklisted checks if an IP address is blacklisted using the managed ACL system @@ -350,6 +356,14 @@ func (s *Server) UserInterface() { s.mux.HandleFunc("/api/wireguard/status", s.handleWireGuardStatus) s.mux.HandleFunc("/api/wireguard/audit", s.handleWireGuardAudit) s.mux.HandleFunc("/api/bunker/url", s.handleBunkerURL) + + // Cashu access token endpoints (NIP-XX) + s.mux.HandleFunc("/cashu/mint", s.handleCashuMint) + s.mux.HandleFunc("/cashu/keysets", s.handleCashuKeysets) + s.mux.HandleFunc("/cashu/info", s.handleCashuInfo) + if s.CashuIssuer != nil { + log.Printf("Cashu access token API enabled at /cashu") + } } // handleFavicon serves orly-favicon.png as favicon.ico diff --git a/docs/NIP-XX-CASHU-ACCESS-TOKENS.md b/docs/NIP-XX-CASHU-ACCESS-TOKENS.md new file mode 100644 index 0000000..15aa733 --- /dev/null +++ b/docs/NIP-XX-CASHU-ACCESS-TOKENS.md @@ -0,0 +1,390 @@ +# NIP-XX: Cashu Access Tokens for Relay Authorization + +`draft` `optional` + +This NIP defines a protocol for relays to issue privacy-preserving access tokens using Cashu blind signatures. Tokens prove relay membership without linking issuance to usage, enabling spam protection while preserving user privacy. + +## Motivation + +Relays need to control access to prevent spam and abuse. Current approaches (NIP-42, NIP-98) require per-request authentication that links all user activity. Cashu blind signatures allow relays to issue bearer tokens that prove authorization without revealing which specific user is connecting. + +This is particularly useful for: +- NIP-46 remote signing (bunker) access control +- Premium relay tiers +- Rate limit bypass for trusted users +- Any service requiring proof of relay membership + +## Overview + +1. Relay operates as a Cashu mint for its authorized users +2. Users authenticate via NIP-98 to obtain blinded signatures +3. Tokens specify permitted event kinds and expiry +4. Two-token rotation allows seamless renewal before expiry +5. Tokens are bearer credentials passed in HTTP/WebSocket headers + +## Token Format + +### Token Structure + +```json +{ + "k": "", + "s": "", + "c": "", + "p": "", + "e": , + "kinds": [0, 1, 3, 10002], + "kind_ranges": [[20000, 29999]], + "scope": "relay" +} +``` + +| Field | Type | Description | +|-------|------|-------------| +| `k` | string | Keyset ID (hex) identifying the signing key | +| `s` | string | 32-byte random secret (hex) | +| `c` | string | 33-byte compressed signature point (hex) | +| `p` | string | 32-byte user pubkey (hex) | +| `e` | number | Unix timestamp when token expires | +| `kinds` | number[] | Explicit list of permitted event kinds | +| `kind_ranges` | number[][] | Ranges of permitted kinds as [min, max] pairs | +| `scope` | string | Token scope: "relay", "nip46", "blossom", or custom | + +### Kind Permissions + +Tokens specify which event kinds the bearer may publish: + +- `kinds`: Explicit list of individual kinds (e.g., `[0, 1, 3]`) +- `kind_ranges`: Inclusive ranges (e.g., `[[20000, 29999]]` for ephemeral events) + +A token permits a kind if it appears in `kinds` OR falls within any `kind_ranges` entry. + +Special values: +- Empty `kinds` and `kind_ranges`: No write access (read-only token) +- `kinds: [-1]`: All kinds permitted (wildcard) + +### Scopes + +| Scope | Description | +|-------|-------------| +| `relay` | Standard relay WebSocket access (REQ, EVENT, COUNT) | +| `nip46` | NIP-46 remote signing / bunker access | +| `blossom` | Blossom media server access | +| `api` | HTTP API access | + +Custom scopes may be defined by applications. + +### Serialization + +Tokens are serialized as: +``` +cashuA +``` + +The `cashuA` prefix indicates version 1 of this specification. + +## Keyset Management + +### Keyset Structure + +Relays maintain signing keysets that rotate periodically: + +```json +{ + "id": "", + "pubkey": "", + "active": true, + "created_at": 1735300000, + "expires_at": 1736510000 +} +``` + +### Keyset ID Calculation + +``` +keyset_id = SHA256(compressed_pubkey)[0:7] as hex (14 characters) +``` + +### Rotation Policy + +- **Active period**: 1 week (tokens can be issued) +- **Verification period**: 3 weeks (tokens can still be validated) +- **Total validity**: Active keyset + 2 previous keysets accepted + +This ensures tokens issued at the end of an active period remain valid for their full lifetime. + +## Two-Token Rotation + +Users may hold up to two tokens: + +| Token | State | Purpose | +|-------|-------|---------| +| Active | In use | Current authentication credential | +| Pending | Awaiting | Pre-fetched for seamless rotation | + +### Rotation Flow + +1. User obtains initial token (becomes Active) +2. When Active token reaches 50% lifetime, user requests new token (becomes Pending) +3. When Active token expires, Pending becomes Active +4. User requests new Pending token +5. Repeat + +This ensures continuous access without authentication gaps. + +### Blacklist Behavior + +If a user is removed from the relay's whitelist: +- Active token continues working until expiry (max 1 week) +- Pending token continues working until expiry (max 1 week) +- **Maximum access after blacklist: 2 weeks** + +New token requests will fail immediately upon blacklist. + +## HTTP Endpoints + +### Token Issuance + +```http +POST /cashu/mint +Authorization: Nostr +Content-Type: application/json + +{ + "blinded_message": "", + "scope": "relay", + "kinds": [0, 1, 3, 7], + "kind_ranges": [[30000, 39999]] +} +``` + +**Response:** +```json +{ + "blinded_signature": "", + "keyset_id": "", + "expiry": 1736294400, + "pubkey": "" +} +``` + +The user must: +1. Generate random secret `x` and blinding factor `r` +2. Compute `Y = hash_to_curve(x)` +3. Compute `B_ = Y + r*G` +4. Send `B_` as `blinded_message` +5. Receive `C_` as `blinded_signature` +6. Compute `C = C_ - r*K` (K is mint pubkey) +7. Token is `(x, C)` with metadata + +### Keyset Discovery + +```http +GET /cashu/keysets + +Response: +{ + "keysets": [ + { + "id": "0a1b2c3d4e5f67", + "pubkey": "02...", + "active": true, + "expires_at": 1736510000 + } + ] +} +``` + +### Token Info (Optional) + +```http +GET /cashu/info + +Response: +{ + "name": "Relay Name", + "version": "NIP-XX/1", + "token_ttl": 604800, + "max_kinds": 100, + "supported_scopes": ["relay", "nip46", "blossom"] +} +``` + +## Authentication Headers + +### WebSocket Upgrade + +```http +GET / HTTP/1.1 +Upgrade: websocket +X-Cashu-Token: cashuA +``` + +### HTTP Requests + +```http +GET /api/resource HTTP/1.1 +Authorization: Cashu cashuA +``` + +Or as dedicated header: +```http +X-Cashu-Token: cashuA +``` + +### NIP-46 Integration + +For NIP-46 bunker connections, the token is passed in the WebSocket upgrade: + +```http +GET /nip46 HTTP/1.1 +Upgrade: websocket +X-Cashu-Token: cashuA +``` + +The bunker verifies: +1. Token signature is valid +2. Token has not expired +3. Token scope is "nip46" +4. User pubkey in token matches NIP-46 connect pubkey + +## Cryptographic Details + +### Blind Diffie-Hellman Key Exchange (BDHKE) + +Uses secp256k1 curve with Cashu's hash-to-curve: + +``` +hash_to_curve(message): + msg_hash = SHA256("Secp256k1_HashToCurve_Cashu_" || message) + for counter in 0..65536: + hash = SHA256(msg_hash || counter_le32) + point = try_parse("02" || hash) + if point is valid: + return point + fail +``` + +**Blinding:** +``` +Y = hash_to_curve(secret) +r = random_scalar() +B_ = Y + r*G +``` + +**Signing (mint):** +``` +C_ = k * B_ +``` + +**Unblinding (user):** +``` +C = C_ - r*K +``` + +**Verification (mint):** +``` +valid = (C == k * hash_to_curve(secret)) +``` + +## Verification Flow + +When relay receives a token: + +1. Parse token from header +2. Find keyset by ID (must be active or recently expired) +3. Verify: `C == k * hash_to_curve(secret)` +4. Check: `expiry > now` +5. Check: scope matches service +6. Check: requested kind in `kinds` or `kind_ranges` +7. **Optional**: Re-check user pubkey against current ACL + +Step 7 provides "stateless revocation" - tokens become invalid immediately when user is removed from ACL, not just when they expire. + +## Security Considerations + +### Token as Bearer Credential + +Tokens are bearer credentials. Compromise allows impersonation until expiry. Mitigations: +- Short TTL (1 week recommended) +- TLS for all transport +- Secure client storage + +### Privacy Properties + +- **Unlinkability**: Relay cannot link token issuance to token use +- **No tracking**: Different secrets prevent correlation across tokens +- **Pubkey binding**: Token is bound to user's Nostr pubkey + +### Keyset Compromise + +If keyset private key is compromised: +- Rotate immediately (new keyset) +- Old keyset enters verification-only mode +- Tokens issued by compromised keyset expire naturally (max 3 weeks) + +### Replay Prevention + +- Tokens have expiry timestamps +- Optional: Relay tracks used secrets (adds state, breaks unlinkability) +- Scope prevents cross-service replay + +## Example Flow + +``` +1. Alice wants NIP-46 bunker access to relay.example.com + +2. Alice authenticates via NIP-98: + POST /cashu/mint + Authorization: Nostr + {"blinded_message": "02abc...", "scope": "nip46"} + +3. Relay checks: + - NIP-98 signature valid + - Alice's pubkey in whitelist with write access + +4. Relay responds: + {"blinded_signature": "03def...", "keyset_id": "a1b2c3...", "expiry": 1736294400} + +5. Alice unblinds signature, constructs token + +6. Alice connects to bunker: + GET /nip46 HTTP/1.1 + Upgrade: websocket + X-Cashu-Token: cashuA + +7. Bunker verifies token, establishes NIP-46 session + +8. One week later, Alice's token approaches expiry + - Alice requests new token (step 2-5) + - New token becomes Pending + - When Active expires, Pending becomes Active +``` + +## Relay Implementation Notes + +### Recommended Defaults + +| Parameter | Value | Rationale | +|-----------|-------|-----------| +| Token TTL | 7 days | Balance between convenience and revocation speed | +| Keyset rotation | Weekly | Limits key exposure | +| Verification keysets | 3 | Covers full token lifetime + grace period | +| Re-check ACL | On every use | Enables immediate revocation | + +### Error Codes + +| Code | Meaning | +|------|---------| +| 401 | Missing or malformed token | +| 403 | Valid token but insufficient permissions (wrong scope/kinds) | +| 410 | Token expired | +| 421 | Unknown keyset ID | + +## References + +- [Cashu Protocol](https://github.com/cashubtc/nuts) +- [NIP-42: Authentication of clients to relays](https://github.com/nostr-protocol/nips/blob/master/42.md) +- [NIP-46: Nostr Remote Signing](https://github.com/nostr-protocol/nips/blob/master/46.md) +- [NIP-98: HTTP Auth](https://github.com/nostr-protocol/nips/blob/master/98.md) +- [Blind Signatures for Untraceable Payments](http://www.hit.bme.hu/~buttyan/courses/BMEVIHIM219/2009/Chaum.BlindSigForPayworx.1662.pdf) diff --git a/pkg/bunker/acl_adapter.go b/pkg/bunker/acl_adapter.go new file mode 100644 index 0000000..9c92eec --- /dev/null +++ b/pkg/bunker/acl_adapter.go @@ -0,0 +1,100 @@ +// Package bunker implements NIP-46 remote signing with Cashu token authentication. +package bunker + +import ( + "context" + + "next.orly.dev/pkg/acl" + acliface "next.orly.dev/pkg/interfaces/acl" + cashuiface "next.orly.dev/pkg/interfaces/cashu" + "next.orly.dev/pkg/cashu/token" +) + +// ACLAuthzChecker adapts ORLY's ACL system to cashu.AuthzChecker. +// This allows the Cashu token system to use the existing ACL for authorization. +type ACLAuthzChecker struct { + // ScopeRequirements maps scopes to required access levels. + // If not set, defaults are used. + ScopeRequirements map[string]string +} + +// NewACLAuthzChecker creates a new ACL-based authorization checker. +func NewACLAuthzChecker() *ACLAuthzChecker { + return &ACLAuthzChecker{ + ScopeRequirements: map[string]string{ + token.ScopeRelay: acliface.Write, // Relay access requires write + token.ScopeNIP46: acliface.Write, // Bunker access requires write + token.ScopeBlossom: acliface.Write, // Blossom access requires write + token.ScopeAPI: acliface.Admin, // API access requires admin + }, + } +} + +// CheckAuthorization checks if a pubkey is authorized for a scope. +func (a *ACLAuthzChecker) CheckAuthorization(ctx context.Context, pubkey []byte, scope string, remoteAddr string) error { + // Get access level from ACL registry + level := acl.Registry.GetAccessLevel(pubkey, remoteAddr) + + // Check against required level for scope + requiredLevel, ok := a.ScopeRequirements[scope] + if !ok { + // Default to write access for unknown scopes + requiredLevel = acliface.Write + } + + if !hasAccessLevel(level, requiredLevel) { + return cashuiface.NewAuthzError( + cashuiface.ErrCodeInsufficientAccess, + "insufficient access level for scope "+scope, + ) + } + + // Check for banned/blocked status + if level == "banned" { + return cashuiface.ErrBanned + } + if level == "blocked" { + return cashuiface.ErrBlocked + } + + return nil +} + +// ReauthorizationEnabled returns true - we always re-check ACL on each verification. +func (a *ACLAuthzChecker) ReauthorizationEnabled() bool { + return true +} + +// hasAccessLevel checks if the actual level meets or exceeds the required level. +func hasAccessLevel(actual, required string) bool { + levels := map[string]int{ + acliface.None: 0, + "banned": 0, + "blocked": 0, + acliface.Read: 1, + acliface.Write: 2, + acliface.Admin: 3, + acliface.Owner: 4, + } + + actualLevel, aok := levels[actual] + requiredLevel, rok := levels[required] + + if !aok || !rok { + return false + } + + return actualLevel >= requiredLevel +} + +// SetScopeRequirement sets the required access level for a scope. +func (a *ACLAuthzChecker) SetScopeRequirement(scope, level string) { + if a.ScopeRequirements == nil { + a.ScopeRequirements = make(map[string]string) + } + a.ScopeRequirements[scope] = level +} + +// Ensure ACLAuthzChecker implements both interfaces. +var _ cashuiface.AuthzChecker = (*ACLAuthzChecker)(nil) +var _ cashuiface.ReauthorizationChecker = (*ACLAuthzChecker)(nil) diff --git a/pkg/cashu/bdhke/bdhke.go b/pkg/cashu/bdhke/bdhke.go new file mode 100644 index 0000000..1eeb171 --- /dev/null +++ b/pkg/cashu/bdhke/bdhke.go @@ -0,0 +1,293 @@ +// Package bdhke implements Blind Diffie-Hellman Key Exchange for Cashu-style tokens. +// This is the core cryptographic primitive used in ecash blind signatures. +// +// The protocol allows a mint (issuer) to sign a message without knowing what +// it's signing, providing unlinkability between token issuance and redemption. +// +// Protocol overview: +// 1. User creates secret x, computes Y = HashToCurve(x) +// 2. User blinds: B_ = Y + r*G (r is random blinding factor) +// 3. Mint signs: C_ = k*B_ (k is mint's private key) +// 4. User unblinds: C = C_ - r*K (K is mint's public key) +// 5. Token is (x, C) - mint can verify: C == k*HashToCurve(x) +// +// Reference: https://github.com/cashubtc/nuts/blob/main/00.md +package bdhke + +import ( + "crypto/rand" + "crypto/sha256" + "encoding/binary" + "errors" + "fmt" + + "github.com/decred/dcrd/dcrec/secp256k1/v4" +) + +// DomainSeparator is prepended to messages before hashing to prevent +// cross-protocol attacks. +const DomainSeparator = "Secp256k1_HashToCurve_Cashu_" + +// Errors +var ( + ErrHashToCurveFailed = errors.New("bdhke: hash to curve failed after max iterations") + ErrInvalidPoint = errors.New("bdhke: invalid curve point") + ErrInvalidPrivateKey = errors.New("bdhke: invalid private key") + ErrSignatureMismatch = errors.New("bdhke: signature verification failed") +) + +// HashToCurve deterministically maps a message to a point on secp256k1. +// Uses the try-and-increment method as specified in Cashu NUT-00. +// +// Algorithm: +// 1. Compute msg_hash = SHA256(domain_separator || message) +// 2. For counter in 0..65536: +// a. Compute hash = SHA256(msg_hash || counter) +// b. Try to parse 02 || hash as compressed point +// c. If valid point, return it +// 3. Fail if no valid point found (extremely unlikely) +func HashToCurve(message []byte) (*secp256k1.PublicKey, error) { + // Hash the message with domain separator + msgHash := sha256.Sum256(append([]byte(DomainSeparator), message...)) + + // Try up to 65536 iterations (in practice, ~50% chance on first try) + counterBytes := make([]byte, 4) + for counter := uint32(0); counter < 65536; counter++ { + binary.LittleEndian.PutUint32(counterBytes, counter) + + // Hash again with counter + toHash := append(msgHash[:], counterBytes...) + hash := sha256.Sum256(toHash) + + // Try to parse as compressed point with 02 prefix (even y) + compressed := make([]byte, 33) + compressed[0] = 0x02 + copy(compressed[1:], hash[:]) + + pk, err := secp256k1.ParsePubKey(compressed) + if err == nil { + return pk, nil + } + } + + return nil, ErrHashToCurveFailed +} + +// BlindResult contains the blinding operation result. +type BlindResult struct { + B *secp256k1.PublicKey // Blinded message B_ = Y + r*G + R *secp256k1.PrivateKey // Blinding factor (keep secret until unblinding) + Y *secp256k1.PublicKey // Original point Y = HashToCurve(secret) +} + +// Blind creates a blinded message from a secret. +// The blinding factor r is generated randomly and must be kept secret +// until the signature is received and needs to be unblinded. +// +// B_ = Y + r*G where: +// - Y = HashToCurve(secret) +// - r = random scalar +// - G = generator point +func Blind(secret []byte) (*BlindResult, error) { + // Compute Y = HashToCurve(secret) + Y, err := HashToCurve(secret) + if err != nil { + return nil, fmt.Errorf("blind: %w", err) + } + + // Generate random blinding factor r + rBytes := make([]byte, 32) + if _, err := rand.Read(rBytes); err != nil { + return nil, fmt.Errorf("blind: failed to generate random: %w", err) + } + r := secp256k1.PrivKeyFromBytes(rBytes) + + // Compute r*G (blinding factor times generator) + rG := new(secp256k1.JacobianPoint) + secp256k1.ScalarBaseMultNonConst(&r.Key, rG) + + // Convert Y to Jacobian + yJ := new(secp256k1.JacobianPoint) + Y.AsJacobian(yJ) + + // Compute B_ = Y + r*G + bJ := new(secp256k1.JacobianPoint) + secp256k1.AddNonConst(yJ, rG, bJ) + bJ.ToAffine() + + // Convert back to PublicKey + B := secp256k1.NewPublicKey(&bJ.X, &bJ.Y) + + return &BlindResult{ + B: B, + R: r, + Y: Y, + }, nil +} + +// BlindWithFactor creates a blinded message using a provided blinding factor. +// This is useful for testing or when the blinding factor needs to be deterministic. +func BlindWithFactor(secret []byte, rBytes []byte) (*BlindResult, error) { + if len(rBytes) != 32 { + return nil, errors.New("blind: blinding factor must be 32 bytes") + } + + // Compute Y = HashToCurve(secret) + Y, err := HashToCurve(secret) + if err != nil { + return nil, fmt.Errorf("blind: %w", err) + } + + r := secp256k1.PrivKeyFromBytes(rBytes) + + // Compute r*G + rG := new(secp256k1.JacobianPoint) + secp256k1.ScalarBaseMultNonConst(&r.Key, rG) + + // Convert Y to Jacobian + yJ := new(secp256k1.JacobianPoint) + Y.AsJacobian(yJ) + + // Compute B_ = Y + r*G + bJ := new(secp256k1.JacobianPoint) + secp256k1.AddNonConst(yJ, rG, bJ) + bJ.ToAffine() + + B := secp256k1.NewPublicKey(&bJ.X, &bJ.Y) + + return &BlindResult{ + B: B, + R: r, + Y: Y, + }, nil +} + +// Sign creates a blinded signature on a blinded message. +// This is performed by the mint using its private key k. +// +// C_ = k * B_ where: +// - k = mint's private key scalar +// - B_ = blinded message from user +func Sign(B *secp256k1.PublicKey, k *secp256k1.PrivateKey) (*secp256k1.PublicKey, error) { + if B == nil || k == nil { + return nil, ErrInvalidPoint + } + + // Convert B to Jacobian + bJ := new(secp256k1.JacobianPoint) + B.AsJacobian(bJ) + + // Compute C_ = k * B_ + cJ := new(secp256k1.JacobianPoint) + secp256k1.ScalarMultNonConst(&k.Key, bJ, cJ) + cJ.ToAffine() + + C := secp256k1.NewPublicKey(&cJ.X, &cJ.Y) + return C, nil +} + +// Unblind removes the blinding factor from the signature. +// This is performed by the user after receiving the blinded signature. +// +// C = C_ - r*K where: +// - C_ = blinded signature from mint +// - r = original blinding factor +// - K = mint's public key +func Unblind(C_ *secp256k1.PublicKey, r *secp256k1.PrivateKey, K *secp256k1.PublicKey) (*secp256k1.PublicKey, error) { + if C_ == nil || r == nil || K == nil { + return nil, ErrInvalidPoint + } + + // Compute r*K + kJ := new(secp256k1.JacobianPoint) + K.AsJacobian(kJ) + + rK := new(secp256k1.JacobianPoint) + secp256k1.ScalarMultNonConst(&r.Key, kJ, rK) + + // Negate r*K to get -r*K + rK.Y.Negate(1) + rK.Y.Normalize() + + // Convert C_ to Jacobian + c_J := new(secp256k1.JacobianPoint) + C_.AsJacobian(c_J) + + // Compute C = C_ + (-r*K) = C_ - r*K + cJ := new(secp256k1.JacobianPoint) + secp256k1.AddNonConst(c_J, rK, cJ) + cJ.ToAffine() + + C := secp256k1.NewPublicKey(&cJ.X, &cJ.Y) + return C, nil +} + +// Verify checks that a token's signature is valid. +// The mint uses this to verify tokens during redemption. +// +// Checks: C == k * HashToCurve(secret) where: +// - C = unblinded signature from token +// - k = mint's private key +// - secret = token's secret value +func Verify(secret []byte, C *secp256k1.PublicKey, k *secp256k1.PrivateKey) (bool, error) { + if C == nil || k == nil { + return false, ErrInvalidPoint + } + + // Compute Y = HashToCurve(secret) + Y, err := HashToCurve(secret) + if err != nil { + return false, err + } + + // Compute expected = k * Y + yJ := new(secp256k1.JacobianPoint) + Y.AsJacobian(yJ) + + expectedJ := new(secp256k1.JacobianPoint) + secp256k1.ScalarMultNonConst(&k.Key, yJ, expectedJ) + expectedJ.ToAffine() + + expected := secp256k1.NewPublicKey(&expectedJ.X, &expectedJ.Y) + + // Compare C with expected + return C.IsEqual(expected), nil +} + +// VerifyWithPublicKey verifies a token without knowing the private key. +// This requires a DLEQ proof (not yet implemented). +// For now, returns error indicating this is not supported. +func VerifyWithPublicKey(secret []byte, C *secp256k1.PublicKey, K *secp256k1.PublicKey) (bool, error) { + return false, errors.New("bdhke: DLEQ proof verification not implemented") +} + +// GenerateKeypair generates a new mint keypair. +func GenerateKeypair() (*secp256k1.PrivateKey, *secp256k1.PublicKey, error) { + keyBytes := make([]byte, 32) + if _, err := rand.Read(keyBytes); err != nil { + return nil, nil, fmt.Errorf("generate keypair: %w", err) + } + + privKey := secp256k1.PrivKeyFromBytes(keyBytes) + pubKey := privKey.PubKey() + + return privKey, pubKey, nil +} + +// SecretFromBytes creates a secret suitable for token issuance. +// The secret should be 32 bytes of random data. +func SecretFromBytes(data []byte) []byte { + // Just return a copy - secrets are arbitrary byte strings + secret := make([]byte, len(data)) + copy(secret, data) + return secret +} + +// GenerateSecret creates a new random 32-byte secret. +func GenerateSecret() ([]byte, error) { + secret := make([]byte, 32) + if _, err := rand.Read(secret); err != nil { + return nil, fmt.Errorf("generate secret: %w", err) + } + return secret, nil +} diff --git a/pkg/cashu/bdhke/bdhke_test.go b/pkg/cashu/bdhke/bdhke_test.go new file mode 100644 index 0000000..e7890fa --- /dev/null +++ b/pkg/cashu/bdhke/bdhke_test.go @@ -0,0 +1,348 @@ +package bdhke + +import ( + "bytes" + "encoding/hex" + "testing" + + "github.com/decred/dcrd/dcrec/secp256k1/v4" +) + +// Test vectors from Cashu NUT-00 specification +// https://github.com/cashubtc/nuts/blob/main/00.md + +func TestHashToCurve(t *testing.T) { + tests := []struct { + name string + message string + expected string // Expected compressed public key in hex + }{ + { + name: "test vector 1", + message: "0000000000000000000000000000000000000000000000000000000000000000", + expected: "024cce997d3b518f739663b757deaec95bcd9473c30a14ac2fd04023a739d1a725", + }, + { + name: "test vector 2", + message: "0000000000000000000000000000000000000000000000000000000000000001", + expected: "022e7158e11c9506f1aa4248bf531298daa7febd6194f003edcd9b93ade6253acf", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + msgBytes, err := hex.DecodeString(tt.message) + if err != nil { + t.Fatalf("failed to decode message: %v", err) + } + + point, err := HashToCurve(msgBytes) + if err != nil { + t.Fatalf("HashToCurve failed: %v", err) + } + + got := hex.EncodeToString(point.SerializeCompressed()) + if got != tt.expected { + t.Errorf("HashToCurve(%s) = %s, want %s", tt.message, got, tt.expected) + } + }) + } +} + +func TestBlindSignUnblindVerify(t *testing.T) { + // Generate mint keypair + k, K, err := GenerateKeypair() + if err != nil { + t.Fatalf("failed to generate keypair: %v", err) + } + + // Generate a secret + secret, err := GenerateSecret() + if err != nil { + t.Fatalf("failed to generate secret: %v", err) + } + + // User blinds the secret + blindResult, err := Blind(secret) + if err != nil { + t.Fatalf("Blind failed: %v", err) + } + + // Mint signs the blinded message + C_, err := Sign(blindResult.B, k) + if err != nil { + t.Fatalf("Sign failed: %v", err) + } + + // User unblinds the signature + C, err := Unblind(C_, blindResult.R, K) + if err != nil { + t.Fatalf("Unblind failed: %v", err) + } + + // Verify the token + valid, err := Verify(secret, C, k) + if err != nil { + t.Fatalf("Verify failed: %v", err) + } + + if !valid { + t.Error("Verify returned false, expected true") + } +} + +func TestVerifyWrongSecret(t *testing.T) { + k, K, _ := GenerateKeypair() + secret1, _ := GenerateSecret() + secret2, _ := GenerateSecret() + + // Create token with secret1 + blindResult, _ := Blind(secret1) + C_, _ := Sign(blindResult.B, k) + C, _ := Unblind(C_, blindResult.R, K) + + // Try to verify with secret2 + valid, err := Verify(secret2, C, k) + if err != nil { + t.Fatalf("Verify failed: %v", err) + } + + if valid { + t.Error("Verify returned true for wrong secret") + } +} + +func TestVerifyWrongKey(t *testing.T) { + k1, K1, _ := GenerateKeypair() + k2, _, _ := GenerateKeypair() + + secret, _ := GenerateSecret() + + // Create token with k1 + blindResult, _ := Blind(secret) + C_, _ := Sign(blindResult.B, k1) + C, _ := Unblind(C_, blindResult.R, K1) + + // Try to verify with k2 + valid, err := Verify(secret, C, k2) + if err != nil { + t.Fatalf("Verify failed: %v", err) + } + + if valid { + t.Error("Verify returned true for wrong key") + } +} + +func TestBlindWithFactor(t *testing.T) { + k, K, _ := GenerateKeypair() + secret := []byte("test secret message") + + // Use deterministic blinding factor + rBytes := make([]byte, 32) + for i := range rBytes { + rBytes[i] = byte(i) + } + + blindResult, err := BlindWithFactor(secret, rBytes) + if err != nil { + t.Fatalf("BlindWithFactor failed: %v", err) + } + + // Complete the protocol + C_, _ := Sign(blindResult.B, k) + C, _ := Unblind(C_, blindResult.R, K) + + valid, _ := Verify(secret, C, k) + if !valid { + t.Error("BlindWithFactor: verification failed") + } + + // Do it again with same factor - should get same B + blindResult2, _ := BlindWithFactor(secret, rBytes) + if !bytes.Equal(blindResult.B.SerializeCompressed(), blindResult2.B.SerializeCompressed()) { + t.Error("BlindWithFactor not deterministic") + } +} + +func TestHashToCurveDeterministic(t *testing.T) { + message := []byte("deterministic test") + + p1, err := HashToCurve(message) + if err != nil { + t.Fatalf("HashToCurve failed: %v", err) + } + + p2, err := HashToCurve(message) + if err != nil { + t.Fatalf("HashToCurve failed: %v", err) + } + + if !p1.IsEqual(p2) { + t.Error("HashToCurve not deterministic") + } +} + +func TestSignNilInputs(t *testing.T) { + k, _, _ := GenerateKeypair() + + _, err := Sign(nil, k) + if err == nil { + t.Error("Sign(nil, k) should error") + } + + B, _ := HashToCurve([]byte("test")) + _, err = Sign(B, nil) + if err == nil { + t.Error("Sign(B, nil) should error") + } +} + +func TestUnblindNilInputs(t *testing.T) { + k, K, _ := GenerateKeypair() + secret, _ := GenerateSecret() + blindResult, _ := Blind(secret) + C_, _ := Sign(blindResult.B, k) + + _, err := Unblind(nil, blindResult.R, K) + if err == nil { + t.Error("Unblind(nil, r, K) should error") + } + + _, err = Unblind(C_, nil, K) + if err == nil { + t.Error("Unblind(C_, nil, K) should error") + } + + _, err = Unblind(C_, blindResult.R, nil) + if err == nil { + t.Error("Unblind(C_, r, nil) should error") + } +} + +func TestVerifyNilInputs(t *testing.T) { + k, K, _ := GenerateKeypair() + secret, _ := GenerateSecret() + blindResult, _ := Blind(secret) + C_, _ := Sign(blindResult.B, k) + C, _ := Unblind(C_, blindResult.R, K) + + _, err := Verify(secret, nil, k) + if err == nil { + t.Error("Verify(secret, nil, k) should error") + } + + _, err = Verify(secret, C, nil) + if err == nil { + t.Error("Verify(secret, C, nil) should error") + } +} + +// Benchmark functions +func BenchmarkHashToCurve(b *testing.B) { + secret, _ := GenerateSecret() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + HashToCurve(secret) + } +} + +func BenchmarkBlind(b *testing.B) { + secret, _ := GenerateSecret() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + Blind(secret) + } +} + +func BenchmarkSign(b *testing.B) { + k, _, _ := GenerateKeypair() + secret, _ := GenerateSecret() + blindResult, _ := Blind(secret) + b.ResetTimer() + + for i := 0; i < b.N; i++ { + Sign(blindResult.B, k) + } +} + +func BenchmarkUnblind(b *testing.B) { + k, K, _ := GenerateKeypair() + secret, _ := GenerateSecret() + blindResult, _ := Blind(secret) + C_, _ := Sign(blindResult.B, k) + b.ResetTimer() + + for i := 0; i < b.N; i++ { + Unblind(C_, blindResult.R, K) + } +} + +func BenchmarkVerify(b *testing.B) { + k, K, _ := GenerateKeypair() + secret, _ := GenerateSecret() + blindResult, _ := Blind(secret) + C_, _ := Sign(blindResult.B, k) + C, _ := Unblind(C_, blindResult.R, K) + b.ResetTimer() + + for i := 0; i < b.N; i++ { + Verify(secret, C, k) + } +} + +func BenchmarkFullProtocol(b *testing.B) { + k, K, _ := GenerateKeypair() + + b.ResetTimer() + for i := 0; i < b.N; i++ { + secret, _ := GenerateSecret() + blindResult, _ := Blind(secret) + C_, _ := Sign(blindResult.B, k) + C, _ := Unblind(C_, blindResult.R, K) + Verify(secret, C, k) + } +} + +// Test that serialization/deserialization works correctly +func TestPointSerialization(t *testing.T) { + k, K, _ := GenerateKeypair() + secret, _ := GenerateSecret() + blindResult, _ := Blind(secret) + C_, _ := Sign(blindResult.B, k) + C, _ := Unblind(C_, blindResult.R, K) + + // Serialize and deserialize C + serialized := C.SerializeCompressed() + deserialized, err := secp256k1.ParsePubKey(serialized) + if err != nil { + t.Fatalf("failed to parse serialized point: %v", err) + } + + // Verify with deserialized point + valid, err := Verify(secret, deserialized, k) + if err != nil { + t.Fatalf("Verify failed: %v", err) + } + if !valid { + t.Error("Verify failed after point serialization round-trip") + } + + // Same for K + kSerialized := K.SerializeCompressed() + kDeserialized, err := secp256k1.ParsePubKey(kSerialized) + if err != nil { + t.Fatalf("failed to parse serialized K: %v", err) + } + + // Unblind with deserialized K + C2, err := Unblind(C_, blindResult.R, kDeserialized) + if err != nil { + t.Fatalf("Unblind with deserialized K failed: %v", err) + } + if !C.IsEqual(C2) { + t.Error("Unblind result differs after K round-trip") + } +} diff --git a/pkg/cashu/issuer/issuer.go b/pkg/cashu/issuer/issuer.go new file mode 100644 index 0000000..7b18eec --- /dev/null +++ b/pkg/cashu/issuer/issuer.go @@ -0,0 +1,288 @@ +// Package issuer implements Cashu token issuance with authorization checks. +package issuer + +import ( + "context" + "errors" + "fmt" + "time" + + "github.com/decred/dcrd/dcrec/secp256k1/v4" + + "next.orly.dev/pkg/cashu/bdhke" + "next.orly.dev/pkg/cashu/keyset" + "next.orly.dev/pkg/cashu/token" + cashuiface "next.orly.dev/pkg/interfaces/cashu" +) + +// Errors. +var ( + ErrNoActiveKeyset = errors.New("issuer: no active keyset available") + ErrInvalidBlindedMsg = errors.New("issuer: invalid blinded message") + ErrInvalidPubkey = errors.New("issuer: invalid pubkey") + ErrInvalidScope = errors.New("issuer: invalid scope") +) + +// Config holds issuer configuration. +type Config struct { + // DefaultTTL is the default token lifetime. + DefaultTTL time.Duration + + // MaxTTL is the maximum allowed token lifetime. + MaxTTL time.Duration + + // AllowedScopes is the list of scopes this issuer can issue tokens for. + // Empty means all scopes are allowed. + AllowedScopes []string + + // MaxKinds is the maximum number of explicit kinds in a token. + // 0 means unlimited. + MaxKinds int + + // MaxKindRanges is the maximum number of kind ranges in a token. + // 0 means unlimited. + MaxKindRanges int +} + +// DefaultConfig returns sensible default configuration. +func DefaultConfig() Config { + return Config{ + DefaultTTL: 7 * 24 * time.Hour, // 1 week + MaxTTL: 7 * 24 * time.Hour, // 1 week + MaxKinds: 100, + MaxKindRanges: 10, + } +} + +// Issuer handles token issuance with authorization checks. +type Issuer struct { + keysets *keyset.Manager + authz cashuiface.AuthzChecker + config Config +} + +// New creates a new issuer. +func New(keysets *keyset.Manager, authz cashuiface.AuthzChecker, config Config) *Issuer { + return &Issuer{ + keysets: keysets, + authz: authz, + config: config, + } +} + +// IssueRequest contains the request parameters for token issuance. +type IssueRequest struct { + // BlindedMessage is the blinded point B_ (33 bytes compressed). + BlindedMessage []byte + + // Pubkey is the user's Nostr pubkey (32 bytes). + Pubkey []byte + + // Scope is the requested token scope. + Scope string + + // Kinds is the list of permitted event kinds. + Kinds []int + + // KindRanges is the list of permitted kind ranges. + KindRanges [][]int + + // TTL is the requested token lifetime (optional, uses default if zero). + TTL time.Duration +} + +// IssueResponse contains the response from token issuance. +type IssueResponse struct { + // BlindedSignature is the blinded signature C_ (33 bytes compressed). + BlindedSignature []byte + + // KeysetID is the ID of the keyset used for signing. + KeysetID string + + // Expiry is the token expiration timestamp. + Expiry int64 + + // MintPubkey is the public key of the keyset (for unblinding). + MintPubkey []byte +} + +// Issue creates a blinded signature after authorization check. +func (i *Issuer) Issue(ctx context.Context, req *IssueRequest, remoteAddr string) (*IssueResponse, error) { + // Validate request + if err := i.validateRequest(req); err != nil { + return nil, err + } + + // Check authorization + if err := i.authz.CheckAuthorization(ctx, req.Pubkey, req.Scope, remoteAddr); err != nil { + return nil, fmt.Errorf("issuer: authorization failed: %w", err) + } + + // Get active keyset + ks := i.keysets.GetSigningKeyset() + if ks == nil || !ks.IsActiveForSigning() { + return nil, ErrNoActiveKeyset + } + + // Parse blinded message + B_, err := secp256k1.ParsePubKey(req.BlindedMessage) + if err != nil { + return nil, fmt.Errorf("%w: %v", ErrInvalidBlindedMsg, err) + } + + // Sign the blinded message + C_, err := bdhke.Sign(B_, ks.PrivateKey) + if err != nil { + return nil, fmt.Errorf("issuer: signing failed: %w", err) + } + + // Calculate expiry + ttl := req.TTL + if ttl <= 0 { + ttl = i.config.DefaultTTL + } + if ttl > i.config.MaxTTL { + ttl = i.config.MaxTTL + } + expiry := time.Now().Add(ttl).Unix() + + return &IssueResponse{ + BlindedSignature: C_.SerializeCompressed(), + KeysetID: ks.ID, + Expiry: expiry, + MintPubkey: ks.SerializePublicKey(), + }, nil +} + +// validateRequest validates the issue request. +func (i *Issuer) validateRequest(req *IssueRequest) error { + // Validate blinded message + if len(req.BlindedMessage) != 33 { + return fmt.Errorf("%w: expected 33 bytes, got %d", ErrInvalidBlindedMsg, len(req.BlindedMessage)) + } + + // Validate pubkey + if len(req.Pubkey) != 32 { + return fmt.Errorf("%w: expected 32 bytes, got %d", ErrInvalidPubkey, len(req.Pubkey)) + } + + // Validate scope + if req.Scope == "" { + return ErrInvalidScope + } + if len(i.config.AllowedScopes) > 0 { + allowed := false + for _, s := range i.config.AllowedScopes { + if s == req.Scope { + allowed = true + break + } + } + if !allowed { + return fmt.Errorf("%w: %s not in allowed scopes", ErrInvalidScope, req.Scope) + } + } + + // Validate kinds count + if i.config.MaxKinds > 0 && len(req.Kinds) > i.config.MaxKinds { + return fmt.Errorf("issuer: too many kinds: %d > %d", len(req.Kinds), i.config.MaxKinds) + } + + // Validate kind ranges count + if i.config.MaxKindRanges > 0 && len(req.KindRanges) > i.config.MaxKindRanges { + return fmt.Errorf("issuer: too many kind ranges: %d > %d", len(req.KindRanges), i.config.MaxKindRanges) + } + + // Validate kind ranges format + for idx, r := range req.KindRanges { + if len(r) != 2 { + return fmt.Errorf("issuer: kind range %d must have 2 elements", idx) + } + if r[0] > r[1] { + return fmt.Errorf("issuer: kind range %d min > max: %d > %d", idx, r[0], r[1]) + } + } + + return nil +} + +// GetKeysetInfo returns public information about available keysets. +func (i *Issuer) GetKeysetInfo() []keyset.KeysetInfo { + return i.keysets.ListKeysetInfo() +} + +// GetActiveKeysetID returns the ID of the currently active keyset. +func (i *Issuer) GetActiveKeysetID() string { + ks := i.keysets.GetSigningKeyset() + if ks == nil { + return "" + } + return ks.ID +} + +// MintInfo contains public information about the mint. +type MintInfo struct { + Name string `json:"name,omitempty"` + Version string `json:"version"` + TokenTTL int64 `json:"token_ttl"` + MaxKinds int `json:"max_kinds,omitempty"` + MaxKindRanges int `json:"max_kind_ranges,omitempty"` + SupportedScopes []string `json:"supported_scopes,omitempty"` +} + +// GetMintInfo returns public information about the issuer. +func (i *Issuer) GetMintInfo(name string) MintInfo { + return MintInfo{ + Name: name, + Version: "NIP-XX/1", + TokenTTL: int64(i.config.DefaultTTL.Seconds()), + MaxKinds: i.config.MaxKinds, + MaxKindRanges: i.config.MaxKindRanges, + SupportedScopes: i.config.AllowedScopes, + } +} + +// BuildToken is a helper that creates a complete token from the issue response +// and the user's secret and blinding factor. +// This is typically done client-side, but provided for testing and CLI tools. +func BuildToken( + resp *IssueResponse, + secret []byte, + blindingFactor *secp256k1.PrivateKey, + pubkey []byte, + scope string, + kinds []int, + kindRanges [][]int, +) (*token.Token, error) { + // Parse mint pubkey + mintPubkey, err := secp256k1.ParsePubKey(resp.MintPubkey) + if err != nil { + return nil, fmt.Errorf("invalid mint pubkey: %w", err) + } + + // Parse blinded signature + C_, err := secp256k1.ParsePubKey(resp.BlindedSignature) + if err != nil { + return nil, fmt.Errorf("invalid blinded signature: %w", err) + } + + // Unblind the signature + C, err := bdhke.Unblind(C_, blindingFactor, mintPubkey) + if err != nil { + return nil, fmt.Errorf("unblind failed: %w", err) + } + + // Create token + tok := token.New( + resp.KeysetID, + secret, + C.SerializeCompressed(), + pubkey, + time.Unix(resp.Expiry, 0), + scope, + ) + tok.SetKinds(kinds...) + tok.KindRanges = kindRanges + + return tok, nil +} diff --git a/pkg/cashu/issuer/issuer_test.go b/pkg/cashu/issuer/issuer_test.go new file mode 100644 index 0000000..f9dc06e --- /dev/null +++ b/pkg/cashu/issuer/issuer_test.go @@ -0,0 +1,296 @@ +package issuer + +import ( + "context" + "testing" + "time" + + "github.com/decred/dcrd/dcrec/secp256k1/v4" + + "next.orly.dev/pkg/cashu/bdhke" + "next.orly.dev/pkg/cashu/keyset" + "next.orly.dev/pkg/cashu/token" + cashuiface "next.orly.dev/pkg/interfaces/cashu" +) + +func setupIssuer(authz cashuiface.AuthzChecker) (*Issuer, *keyset.Manager) { + store := keyset.NewMemoryStore() + manager := keyset.NewManager(store, keyset.DefaultActiveWindow, keyset.DefaultVerifyWindow) + manager.Init() + + config := DefaultConfig() + issuer := New(manager, authz, config) + + return issuer, manager +} + +func TestIssueSuccess(t *testing.T) { + issuer, _ := setupIssuer(cashuiface.AllowAllChecker{}) + + // Generate user keypair + secret, err := bdhke.GenerateSecret() + if err != nil { + t.Fatalf("GenerateSecret failed: %v", err) + } + + // Generate blinded message + blindResult, err := bdhke.Blind(secret) + if err != nil { + t.Fatalf("Blind failed: %v", err) + } + + // User pubkey + pubkey := make([]byte, 32) + for i := range pubkey { + pubkey[i] = byte(i) + } + + req := &IssueRequest{ + BlindedMessage: blindResult.B.SerializeCompressed(), + Pubkey: pubkey, + Scope: token.ScopeRelay, + Kinds: []int{0, 1, 3}, + KindRanges: [][]int{{30000, 39999}}, + } + + resp, err := issuer.Issue(context.Background(), req, "127.0.0.1") + if err != nil { + t.Fatalf("Issue failed: %v", err) + } + + // Check response + if len(resp.BlindedSignature) != 33 { + t.Errorf("BlindedSignature length = %d, want 33", len(resp.BlindedSignature)) + } + if resp.KeysetID == "" { + t.Error("KeysetID is empty") + } + if resp.Expiry <= time.Now().Unix() { + t.Error("Expiry should be in the future") + } + if len(resp.MintPubkey) != 33 { + t.Errorf("MintPubkey length = %d, want 33", len(resp.MintPubkey)) + } +} + +func TestIssueAuthorizationDenied(t *testing.T) { + issuer, _ := setupIssuer(cashuiface.DenyAllChecker{}) + + secret, _ := bdhke.GenerateSecret() + blindResult, _ := bdhke.Blind(secret) + pubkey := make([]byte, 32) + + req := &IssueRequest{ + BlindedMessage: blindResult.B.SerializeCompressed(), + Pubkey: pubkey, + Scope: token.ScopeRelay, + } + + _, err := issuer.Issue(context.Background(), req, "127.0.0.1") + if err == nil { + t.Error("Issue should fail when authorization is denied") + } +} + +func TestIssueInvalidBlindedMessage(t *testing.T) { + issuer, _ := setupIssuer(cashuiface.AllowAllChecker{}) + + pubkey := make([]byte, 32) + + req := &IssueRequest{ + BlindedMessage: []byte{1, 2, 3}, // Invalid + Pubkey: pubkey, + Scope: token.ScopeRelay, + } + + _, err := issuer.Issue(context.Background(), req, "127.0.0.1") + if err == nil { + t.Error("Issue should fail with invalid blinded message") + } +} + +func TestIssueInvalidPubkey(t *testing.T) { + issuer, _ := setupIssuer(cashuiface.AllowAllChecker{}) + + secret, _ := bdhke.GenerateSecret() + blindResult, _ := bdhke.Blind(secret) + + req := &IssueRequest{ + BlindedMessage: blindResult.B.SerializeCompressed(), + Pubkey: []byte{1, 2, 3}, // Invalid length + Scope: token.ScopeRelay, + } + + _, err := issuer.Issue(context.Background(), req, "127.0.0.1") + if err == nil { + t.Error("Issue should fail with invalid pubkey") + } +} + +func TestIssueInvalidScope(t *testing.T) { + store := keyset.NewMemoryStore() + manager := keyset.NewManager(store, keyset.DefaultActiveWindow, keyset.DefaultVerifyWindow) + manager.Init() + + config := DefaultConfig() + config.AllowedScopes = []string{token.ScopeRelay} // Only relay scope allowed + + issuer := New(manager, cashuiface.AllowAllChecker{}, config) + + secret, _ := bdhke.GenerateSecret() + blindResult, _ := bdhke.Blind(secret) + pubkey := make([]byte, 32) + + req := &IssueRequest{ + BlindedMessage: blindResult.B.SerializeCompressed(), + Pubkey: pubkey, + Scope: token.ScopeNIP46, // Not allowed + } + + _, err := issuer.Issue(context.Background(), req, "127.0.0.1") + if err == nil { + t.Error("Issue should fail with disallowed scope") + } +} + +func TestIssueTTL(t *testing.T) { + issuer, _ := setupIssuer(cashuiface.AllowAllChecker{}) + + secret, _ := bdhke.GenerateSecret() + blindResult, _ := bdhke.Blind(secret) + pubkey := make([]byte, 32) + + // Request with custom TTL + req := &IssueRequest{ + BlindedMessage: blindResult.B.SerializeCompressed(), + Pubkey: pubkey, + Scope: token.ScopeRelay, + TTL: time.Hour, + } + + resp, err := issuer.Issue(context.Background(), req, "127.0.0.1") + if err != nil { + t.Fatalf("Issue failed: %v", err) + } + + // Expiry should be ~1 hour from now + expectedExpiry := time.Now().Add(time.Hour).Unix() + if resp.Expiry < expectedExpiry-60 || resp.Expiry > expectedExpiry+60 { + t.Errorf("Expiry %d not within expected range of %d", resp.Expiry, expectedExpiry) + } +} + +func TestBuildToken(t *testing.T) { + issuer, manager := setupIssuer(cashuiface.AllowAllChecker{}) + + // Generate secret and blind it + secret, _ := bdhke.GenerateSecret() + blindResult, _ := bdhke.Blind(secret) + pubkey := make([]byte, 32) + for i := range pubkey { + pubkey[i] = byte(i) + } + + // Issue token + req := &IssueRequest{ + BlindedMessage: blindResult.B.SerializeCompressed(), + Pubkey: pubkey, + Scope: token.ScopeRelay, + Kinds: []int{1, 2, 3}, + } + + resp, err := issuer.Issue(context.Background(), req, "127.0.0.1") + if err != nil { + t.Fatalf("Issue failed: %v", err) + } + + // Build complete token + tok, err := BuildToken(resp, secret, blindResult.R, pubkey, token.ScopeRelay, []int{1, 2, 3}, nil) + if err != nil { + t.Fatalf("BuildToken failed: %v", err) + } + + // Verify token structure + if tok.KeysetID != resp.KeysetID { + t.Errorf("KeysetID mismatch: %s != %s", tok.KeysetID, resp.KeysetID) + } + if tok.Scope != token.ScopeRelay { + t.Errorf("Scope = %s, want %s", tok.Scope, token.ScopeRelay) + } + + // Verify signature (using the keyset) + ks := manager.FindByID(tok.KeysetID) + if ks == nil { + t.Fatal("Keyset not found") + } + + valid, err := bdhke.Verify(tok.Secret, mustParsePoint(tok.Signature), ks.PrivateKey) + if err != nil { + t.Fatalf("Verify failed: %v", err) + } + if !valid { + t.Error("Token signature is not valid") + } +} + +func TestGetKeysetInfo(t *testing.T) { + issuer, _ := setupIssuer(cashuiface.AllowAllChecker{}) + + infos := issuer.GetKeysetInfo() + if len(infos) == 0 { + t.Error("GetKeysetInfo returned empty") + } + + for _, info := range infos { + if info.ID == "" { + t.Error("KeysetInfo has empty ID") + } + if info.PublicKey == "" { + t.Error("KeysetInfo has empty PublicKey") + } + } +} + +func TestGetActiveKeysetID(t *testing.T) { + issuer, _ := setupIssuer(cashuiface.AllowAllChecker{}) + + id := issuer.GetActiveKeysetID() + if id == "" { + t.Error("GetActiveKeysetID returned empty") + } + if len(id) != 14 { + t.Errorf("KeysetID length = %d, want 14", len(id)) + } +} + +func TestGetMintInfo(t *testing.T) { + store := keyset.NewMemoryStore() + manager := keyset.NewManager(store, keyset.DefaultActiveWindow, keyset.DefaultVerifyWindow) + manager.Init() + + config := DefaultConfig() + config.AllowedScopes = []string{token.ScopeRelay, token.ScopeNIP46} + + issuer := New(manager, cashuiface.AllowAllChecker{}, config) + + info := issuer.GetMintInfo("Test Relay") + + if info.Name != "Test Relay" { + t.Errorf("Name = %s, want Test Relay", info.Name) + } + if info.Version != "NIP-XX/1" { + t.Errorf("Version = %s, want NIP-XX/1", info.Version) + } + if len(info.SupportedScopes) != 2 { + t.Errorf("SupportedScopes length = %d, want 2", len(info.SupportedScopes)) + } +} + +// Helper to parse point for testing +func mustParsePoint(data []byte) *secp256k1.PublicKey { + pk, err := secp256k1.ParsePubKey(data) + if err != nil { + panic(err) + } + return pk +} diff --git a/pkg/cashu/keyset/keyset.go b/pkg/cashu/keyset/keyset.go new file mode 100644 index 0000000..e11e8cd --- /dev/null +++ b/pkg/cashu/keyset/keyset.go @@ -0,0 +1,338 @@ +// Package keyset manages Cashu mint keysets for blind signature tokens. +// Keysets rotate periodically to limit key exposure and provide forward secrecy. +package keyset + +import ( + "crypto/rand" + "crypto/sha256" + "encoding/hex" + "fmt" + "sync" + "time" + + "github.com/decred/dcrd/dcrec/secp256k1/v4" +) + +// DefaultActiveWindow is how long a keyset is valid for issuing new tokens. +const DefaultActiveWindow = 7 * 24 * time.Hour // 1 week + +// DefaultVerifyWindow is how long a keyset remains valid for verification. +const DefaultVerifyWindow = 21 * 24 * time.Hour // 3 weeks + +// Keyset represents a signing keyset with lifecycle management. +type Keyset struct { + ID string // 14-char hex ID (7 bytes) + PrivateKey *secp256k1.PrivateKey // Signing key + PublicKey *secp256k1.PublicKey // Verification key + CreatedAt time.Time // When keyset was created + ActiveAt time.Time // When keyset becomes active for signing + ExpiresAt time.Time // When keyset can no longer sign (but can still verify) + VerifyEnd time.Time // When keyset can no longer verify + Active bool // Whether keyset is currently active for signing +} + +// New creates a new keyset with generated keys. +func New() (*Keyset, error) { + return NewWithTTL(DefaultActiveWindow, DefaultVerifyWindow) +} + +// NewWithTTL creates a new keyset with custom lifetimes. +func NewWithTTL(activeTTL, verifyTTL time.Duration) (*Keyset, error) { + // Generate random private key + keyBytes := make([]byte, 32) + if _, err := rand.Read(keyBytes); err != nil { + return nil, fmt.Errorf("keyset: failed to generate key: %w", err) + } + + privKey := secp256k1.PrivKeyFromBytes(keyBytes) + pubKey := privKey.PubKey() + + now := time.Now() + k := &Keyset{ + PrivateKey: privKey, + PublicKey: pubKey, + CreatedAt: now, + ActiveAt: now, + ExpiresAt: now.Add(activeTTL), + VerifyEnd: now.Add(verifyTTL), + Active: true, + } + + // Calculate ID from public key + k.ID = k.calculateID() + + return k, nil +} + +// NewFromPrivateKey creates a keyset from an existing private key. +func NewFromPrivateKey(privKeyBytes []byte, createdAt time.Time, activeTTL, verifyTTL time.Duration) (*Keyset, error) { + if len(privKeyBytes) != 32 { + return nil, fmt.Errorf("keyset: private key must be 32 bytes") + } + + privKey := secp256k1.PrivKeyFromBytes(privKeyBytes) + pubKey := privKey.PubKey() + + k := &Keyset{ + PrivateKey: privKey, + PublicKey: pubKey, + CreatedAt: createdAt, + ActiveAt: createdAt, + ExpiresAt: createdAt.Add(activeTTL), + VerifyEnd: createdAt.Add(verifyTTL), + Active: true, + } + + k.ID = k.calculateID() + + return k, nil +} + +// calculateID computes the keyset ID from the public key. +// ID = hex(SHA256(compressed_pubkey)[0:7]) +func (k *Keyset) calculateID() string { + compressed := k.PublicKey.SerializeCompressed() + hash := sha256.Sum256(compressed) + return hex.EncodeToString(hash[:7]) +} + +// IsActiveForSigning returns true if keyset can be used to sign new tokens. +func (k *Keyset) IsActiveForSigning() bool { + now := time.Now() + return k.Active && now.After(k.ActiveAt) && now.Before(k.ExpiresAt) +} + +// IsValidForVerification returns true if keyset can be used to verify tokens. +func (k *Keyset) IsValidForVerification() bool { + now := time.Now() + return now.After(k.ActiveAt) && now.Before(k.VerifyEnd) +} + +// Deactivate marks the keyset as no longer active for signing. +func (k *Keyset) Deactivate() { + k.Active = false +} + +// SerializePrivateKey returns the private key as bytes for storage. +func (k *Keyset) SerializePrivateKey() []byte { + return k.PrivateKey.Serialize() +} + +// SerializePublicKey returns the compressed public key. +func (k *Keyset) SerializePublicKey() []byte { + return k.PublicKey.SerializeCompressed() +} + +// KeysetInfo is a public view of a keyset (without private key). +type KeysetInfo struct { + ID string `json:"id"` + PublicKey string `json:"pubkey"` + Active bool `json:"active"` + CreatedAt int64 `json:"created_at"` + ExpiresAt int64 `json:"expires_at"` + VerifyEnd int64 `json:"verify_end"` +} + +// Info returns public information about the keyset. +func (k *Keyset) Info() KeysetInfo { + return KeysetInfo{ + ID: k.ID, + PublicKey: hex.EncodeToString(k.SerializePublicKey()), + Active: k.IsActiveForSigning(), + CreatedAt: k.CreatedAt.Unix(), + ExpiresAt: k.ExpiresAt.Unix(), + VerifyEnd: k.VerifyEnd.Unix(), + } +} + +// Manager handles keyset lifecycle including rotation. +type Manager struct { + store Store + activeTTL time.Duration + verifyTTL time.Duration + + mu sync.RWMutex + current *Keyset // Current active keyset for signing + verification []*Keyset // All keysets valid for verification (including current) +} + +// NewManager creates a keyset manager. +func NewManager(store Store, activeTTL, verifyTTL time.Duration) *Manager { + return &Manager{ + store: store, + activeTTL: activeTTL, + verifyTTL: verifyTTL, + verification: make([]*Keyset, 0), + } +} + +// Init initializes the manager by loading existing keysets or creating a new one. +func (m *Manager) Init() error { + m.mu.Lock() + defer m.mu.Unlock() + + // Load all valid keysets from store + keysets, err := m.store.ListVerificationKeysets() + if err != nil { + return fmt.Errorf("manager: failed to load keysets: %w", err) + } + + // Find current active keyset + var active *Keyset + for _, k := range keysets { + if k.IsActiveForSigning() { + if active == nil || k.CreatedAt.After(active.CreatedAt) { + active = k + } + } + if k.IsValidForVerification() { + m.verification = append(m.verification, k) + } + } + + // If no active keyset, create one + if active == nil { + newKeyset, err := NewWithTTL(m.activeTTL, m.verifyTTL) + if err != nil { + return fmt.Errorf("manager: failed to create initial keyset: %w", err) + } + if err := m.store.SaveKeyset(newKeyset); err != nil { + return fmt.Errorf("manager: failed to save initial keyset: %w", err) + } + active = newKeyset + m.verification = append(m.verification, newKeyset) + } + + m.current = active + return nil +} + +// GetSigningKeyset returns the current active keyset for signing. +func (m *Manager) GetSigningKeyset() *Keyset { + m.mu.RLock() + defer m.mu.RUnlock() + return m.current +} + +// GetVerificationKeysets returns all keysets valid for verification. +func (m *Manager) GetVerificationKeysets() []*Keyset { + m.mu.RLock() + defer m.mu.RUnlock() + + result := make([]*Keyset, 0, len(m.verification)) + for _, k := range m.verification { + if k.IsValidForVerification() { + result = append(result, k) + } + } + return result +} + +// FindByID returns the keyset with the given ID, if it's valid for verification. +func (m *Manager) FindByID(id string) *Keyset { + m.mu.RLock() + defer m.mu.RUnlock() + + for _, k := range m.verification { + if k.ID == id && k.IsValidForVerification() { + return k + } + } + return nil +} + +// RotateIfNeeded checks if rotation is needed and performs it. +// Returns true if a new keyset was created. +func (m *Manager) RotateIfNeeded() (bool, error) { + m.mu.Lock() + defer m.mu.Unlock() + + // Check if current keyset is still active + if m.current != nil && m.current.IsActiveForSigning() { + return false, nil + } + + // Create new keyset + newKeyset, err := NewWithTTL(m.activeTTL, m.verifyTTL) + if err != nil { + return false, fmt.Errorf("manager: failed to create new keyset: %w", err) + } + + // Deactivate old keyset + if m.current != nil { + m.current.Deactivate() + } + + // Save new keyset + if err := m.store.SaveKeyset(newKeyset); err != nil { + return false, fmt.Errorf("manager: failed to save new keyset: %w", err) + } + + // Update manager state + m.current = newKeyset + m.verification = append(m.verification, newKeyset) + + // Prune expired verification keysets + m.pruneExpired() + + return true, nil +} + +// pruneExpired removes keysets that are no longer valid for verification. +// Must be called with lock held. +func (m *Manager) pruneExpired() { + valid := make([]*Keyset, 0, len(m.verification)) + for _, k := range m.verification { + if k.IsValidForVerification() { + valid = append(valid, k) + } + } + m.verification = valid +} + +// ListKeysetInfo returns public info for all verification keysets. +func (m *Manager) ListKeysetInfo() []KeysetInfo { + m.mu.RLock() + defer m.mu.RUnlock() + + result := make([]KeysetInfo, 0, len(m.verification)) + for _, k := range m.verification { + if k.IsValidForVerification() { + result = append(result, k.Info()) + } + } + return result +} + +// StartRotationTicker starts a goroutine that rotates keysets periodically. +// Returns a channel that receives true on each rotation. +func (m *Manager) StartRotationTicker(interval time.Duration) (rotated <-chan bool, stop func()) { + ticker := time.NewTicker(interval) + ch := make(chan bool, 1) + done := make(chan struct{}) + + go func() { + for { + select { + case <-ticker.C: + rotated, err := m.RotateIfNeeded() + if err != nil { + // Log error but continue + continue + } + if rotated { + select { + case ch <- true: + default: + } + } + case <-done: + ticker.Stop() + close(ch) + return + } + } + }() + + return ch, func() { close(done) } +} diff --git a/pkg/cashu/keyset/keyset_test.go b/pkg/cashu/keyset/keyset_test.go new file mode 100644 index 0000000..b0e5b56 --- /dev/null +++ b/pkg/cashu/keyset/keyset_test.go @@ -0,0 +1,278 @@ +package keyset + +import ( + "testing" + "time" +) + +func TestNewKeyset(t *testing.T) { + k, err := New() + if err != nil { + t.Fatalf("New() failed: %v", err) + } + + // Check ID is 14 characters (7 bytes hex) + if len(k.ID) != 14 { + t.Errorf("ID length = %d, want 14", len(k.ID)) + } + + // Check keys are set + if k.PrivateKey == nil { + t.Error("PrivateKey is nil") + } + if k.PublicKey == nil { + t.Error("PublicKey is nil") + } + + // Check times are set + if k.CreatedAt.IsZero() { + t.Error("CreatedAt is zero") + } + if !k.IsActiveForSigning() { + t.Error("New keyset should be active for signing") + } + if !k.IsValidForVerification() { + t.Error("New keyset should be valid for verification") + } +} + +func TestKeysetIDDeterministic(t *testing.T) { + // Same private key should produce same ID + privKeyBytes := make([]byte, 32) + for i := range privKeyBytes { + privKeyBytes[i] = byte(i) + } + + k1, err := NewFromPrivateKey(privKeyBytes, time.Now(), DefaultActiveWindow, DefaultVerifyWindow) + if err != nil { + t.Fatalf("NewFromPrivateKey failed: %v", err) + } + + k2, err := NewFromPrivateKey(privKeyBytes, time.Now(), DefaultActiveWindow, DefaultVerifyWindow) + if err != nil { + t.Fatalf("NewFromPrivateKey failed: %v", err) + } + + if k1.ID != k2.ID { + t.Errorf("IDs should match: %s != %s", k1.ID, k2.ID) + } +} + +func TestKeysetExpiration(t *testing.T) { + // Create keyset with very short TTL + k, err := NewWithTTL(100*time.Millisecond, 200*time.Millisecond) + if err != nil { + t.Fatalf("NewWithTTL failed: %v", err) + } + + // Should be active initially + if !k.IsActiveForSigning() { + t.Error("New keyset should be active for signing") + } + + // Wait for signing to expire + time.Sleep(150 * time.Millisecond) + + if k.IsActiveForSigning() { + t.Error("Keyset should not be active for signing after expiry") + } + if !k.IsValidForVerification() { + t.Error("Keyset should still be valid for verification") + } + + // Wait for verification to expire + time.Sleep(100 * time.Millisecond) + + if k.IsValidForVerification() { + t.Error("Keyset should not be valid for verification after verify expiry") + } +} + +func TestKeysetDeactivate(t *testing.T) { + k, _ := New() + + if !k.Active { + t.Error("New keyset should be active") + } + + k.Deactivate() + + if k.Active { + t.Error("Keyset should not be active after Deactivate()") + } + if k.IsActiveForSigning() { + t.Error("Deactivated keyset should not be active for signing") + } +} + +func TestKeysetInfo(t *testing.T) { + k, _ := New() + info := k.Info() + + if info.ID != k.ID { + t.Errorf("Info ID = %s, want %s", info.ID, k.ID) + } + if len(info.PublicKey) != 66 { // 33 bytes * 2 hex chars + t.Errorf("Info PublicKey length = %d, want 66", len(info.PublicKey)) + } + if !info.Active { + t.Error("Info Active should be true for new keyset") + } +} + +func TestManager(t *testing.T) { + store := NewMemoryStore() + manager := NewManager(store, DefaultActiveWindow, DefaultVerifyWindow) + + if err := manager.Init(); err != nil { + t.Fatalf("Init failed: %v", err) + } + + // Should have a signing keyset + signing := manager.GetSigningKeyset() + if signing == nil { + t.Fatal("GetSigningKeyset returned nil") + } + + // Should have at least one verification keyset + verification := manager.GetVerificationKeysets() + if len(verification) == 0 { + t.Error("GetVerificationKeysets returned empty") + } + + // Should find keyset by ID + found := manager.FindByID(signing.ID) + if found == nil { + t.Error("FindByID returned nil for signing keyset") + } + if found.ID != signing.ID { + t.Errorf("FindByID returned wrong keyset: %s != %s", found.ID, signing.ID) + } +} + +func TestManagerRotation(t *testing.T) { + store := NewMemoryStore() + manager := NewManager(store, 50*time.Millisecond, 200*time.Millisecond) + + if err := manager.Init(); err != nil { + t.Fatalf("Init failed: %v", err) + } + + initialID := manager.GetSigningKeyset().ID + + // Rotation should not happen yet + rotated, err := manager.RotateIfNeeded() + if err != nil { + t.Fatalf("RotateIfNeeded failed: %v", err) + } + if rotated { + t.Error("Should not rotate when keyset is still active") + } + + // Wait for signing to expire + time.Sleep(60 * time.Millisecond) + + // Now rotation should happen + rotated, err = manager.RotateIfNeeded() + if err != nil { + t.Fatalf("RotateIfNeeded failed: %v", err) + } + if !rotated { + t.Error("Should rotate when keyset is expired") + } + + newID := manager.GetSigningKeyset().ID + if newID == initialID { + t.Error("New keyset should have different ID") + } + + // Old keyset should still be valid for verification + old := manager.FindByID(initialID) + if old == nil { + t.Error("Old keyset should still be found for verification") + } +} + +func TestManagerPersistence(t *testing.T) { + store := NewMemoryStore() + + // First manager creates keyset + m1 := NewManager(store, DefaultActiveWindow, DefaultVerifyWindow) + if err := m1.Init(); err != nil { + t.Fatalf("Init failed: %v", err) + } + id := m1.GetSigningKeyset().ID + + // Second manager should load existing keyset + m2 := NewManager(store, DefaultActiveWindow, DefaultVerifyWindow) + if err := m2.Init(); err != nil { + t.Fatalf("Init failed: %v", err) + } + + if m2.GetSigningKeyset().ID != id { + t.Error("Second manager should use same keyset as first") + } +} + +func TestManagerListKeysetInfo(t *testing.T) { + store := NewMemoryStore() + manager := NewManager(store, DefaultActiveWindow, DefaultVerifyWindow) + manager.Init() + + infos := manager.ListKeysetInfo() + if len(infos) == 0 { + t.Error("ListKeysetInfo returned empty") + } + + for _, info := range infos { + if info.ID == "" { + t.Error("KeysetInfo has empty ID") + } + if info.PublicKey == "" { + t.Error("KeysetInfo has empty PublicKey") + } + } +} + +func TestMemoryStore(t *testing.T) { + store := NewMemoryStore() + + k, _ := New() + + // Save + if err := store.SaveKeyset(k); err != nil { + t.Fatalf("SaveKeyset failed: %v", err) + } + + // Load + loaded, err := store.LoadKeyset(k.ID) + if err != nil { + t.Fatalf("LoadKeyset failed: %v", err) + } + if loaded == nil { + t.Fatal("LoadKeyset returned nil") + } + if loaded.ID != k.ID { + t.Errorf("Loaded ID = %s, want %s", loaded.ID, k.ID) + } + + // List active + active, err := store.ListActiveKeysets() + if err != nil { + t.Fatalf("ListActiveKeysets failed: %v", err) + } + if len(active) != 1 { + t.Errorf("ListActiveKeysets returned %d, want 1", len(active)) + } + + // Delete + if err := store.DeleteKeyset(k.ID); err != nil { + t.Fatalf("DeleteKeyset failed: %v", err) + } + + // Should be gone + loaded, _ = store.LoadKeyset(k.ID) + if loaded != nil { + t.Error("Keyset should be deleted") + } +} diff --git a/pkg/cashu/keyset/store.go b/pkg/cashu/keyset/store.go new file mode 100644 index 0000000..0c85ff5 --- /dev/null +++ b/pkg/cashu/keyset/store.go @@ -0,0 +1,74 @@ +package keyset + +// Store is the interface for persisting keysets. +// Implement this interface for your database backend. +type Store interface { + // SaveKeyset persists a keyset. + SaveKeyset(k *Keyset) error + + // LoadKeyset loads a keyset by ID. + LoadKeyset(id string) (*Keyset, error) + + // ListActiveKeysets returns all keysets that can be used for signing. + ListActiveKeysets() ([]*Keyset, error) + + // ListVerificationKeysets returns all keysets that can be used for verification. + ListVerificationKeysets() ([]*Keyset, error) + + // DeleteKeyset removes a keyset from storage. + DeleteKeyset(id string) error +} + +// MemoryStore is an in-memory implementation of Store for testing. +type MemoryStore struct { + keysets map[string]*Keyset +} + +// NewMemoryStore creates a new in-memory store. +func NewMemoryStore() *MemoryStore { + return &MemoryStore{ + keysets: make(map[string]*Keyset), + } +} + +// SaveKeyset saves a keyset to memory. +func (s *MemoryStore) SaveKeyset(k *Keyset) error { + s.keysets[k.ID] = k + return nil +} + +// LoadKeyset loads a keyset by ID. +func (s *MemoryStore) LoadKeyset(id string) (*Keyset, error) { + if k, ok := s.keysets[id]; ok { + return k, nil + } + return nil, nil +} + +// ListActiveKeysets returns all active keysets. +func (s *MemoryStore) ListActiveKeysets() ([]*Keyset, error) { + result := make([]*Keyset, 0) + for _, k := range s.keysets { + if k.IsActiveForSigning() { + result = append(result, k) + } + } + return result, nil +} + +// ListVerificationKeysets returns all keysets valid for verification. +func (s *MemoryStore) ListVerificationKeysets() ([]*Keyset, error) { + result := make([]*Keyset, 0) + for _, k := range s.keysets { + if k.IsValidForVerification() { + result = append(result, k) + } + } + return result, nil +} + +// DeleteKeyset removes a keyset. +func (s *MemoryStore) DeleteKeyset(id string) error { + delete(s.keysets, id) + return nil +} diff --git a/pkg/cashu/token/token.go b/pkg/cashu/token/token.go new file mode 100644 index 0000000..ca08fe0 --- /dev/null +++ b/pkg/cashu/token/token.go @@ -0,0 +1,345 @@ +// Package token implements the Cashu access token format as defined in NIP-XX. +// Tokens are privacy-preserving bearer credentials with kind permissions. +package token + +import ( + "encoding/base64" + "encoding/hex" + "encoding/json" + "errors" + "fmt" + "strings" + "time" +) + +// Prefix for serialized tokens. +const Prefix = "cashuA" + +// Predefined scopes. +const ( + ScopeRelay = "relay" // Standard relay WebSocket access + ScopeNIP46 = "nip46" // NIP-46 remote signing / bunker + ScopeBlossom = "blossom" // Blossom media server + ScopeAPI = "api" // HTTP API access +) + +// WildcardKind indicates all kinds are permitted. +const WildcardKind = -1 + +// Errors. +var ( + ErrInvalidPrefix = errors.New("token: invalid prefix, expected cashuA") + ErrInvalidEncoding = errors.New("token: invalid base64url encoding") + ErrInvalidJSON = errors.New("token: invalid JSON structure") + ErrTokenExpired = errors.New("token: expired") + ErrKindNotPermitted = errors.New("token: kind not permitted") + ErrScopeMismatch = errors.New("token: scope mismatch") +) + +// Token represents a Cashu access token with kind permissions. +type Token struct { + // Cryptographic fields + KeysetID string `json:"k"` // Keyset ID (hex) + Secret []byte `json:"s"` // Random secret (32 bytes) + Signature []byte `json:"c"` // Blind signature (33 bytes compressed) + Pubkey []byte `json:"p"` // User's Nostr pubkey (32 bytes) + + // Metadata + Expiry int64 `json:"e"` // Unix timestamp when token expires + Scope string `json:"sc"` // Token scope (relay, nip46, etc.) + + // Kind permissions + Kinds []int `json:"kinds,omitempty"` // Explicit list of permitted kinds + KindRanges [][]int `json:"kind_ranges,omitempty"` // Ranges as [min, max] pairs +} + +// tokenJSON is the JSON-serializable form with hex-encoded bytes. +type tokenJSON struct { + KeysetID string `json:"k"` + Secret string `json:"s"` + Signature string `json:"c"` + Pubkey string `json:"p"` + Expiry int64 `json:"e"` + Scope string `json:"sc"` + Kinds []int `json:"kinds,omitempty"` + KindRanges [][]int `json:"kind_ranges,omitempty"` +} + +// New creates a new token with the given parameters. +func New(keysetID string, secret, signature, pubkey []byte, expiry time.Time, scope string) *Token { + return &Token{ + KeysetID: keysetID, + Secret: secret, + Signature: signature, + Pubkey: pubkey, + Expiry: expiry.Unix(), + Scope: scope, + } +} + +// SetKinds sets explicit permitted kinds. +// Use WildcardKind (-1) to allow all kinds. +func (t *Token) SetKinds(kinds ...int) { + t.Kinds = kinds +} + +// SetKindRanges sets permitted kind ranges. +// Each range is [min, max] inclusive. +func (t *Token) SetKindRanges(ranges ...[]int) { + t.KindRanges = ranges +} + +// AddKindRange adds a single kind range. +func (t *Token) AddKindRange(min, max int) { + t.KindRanges = append(t.KindRanges, []int{min, max}) +} + +// IsExpired returns true if the token has expired. +func (t *Token) IsExpired() bool { + return time.Now().Unix() > t.Expiry +} + +// ExpiresAt returns the expiry time. +func (t *Token) ExpiresAt() time.Time { + return time.Unix(t.Expiry, 0) +} + +// TimeRemaining returns the duration until expiry. +func (t *Token) TimeRemaining() time.Duration { + return time.Until(t.ExpiresAt()) +} + +// IsKindPermitted checks if a given event kind is permitted by this token. +func (t *Token) IsKindPermitted(kind int) bool { + // Check for wildcard + for _, k := range t.Kinds { + if k == WildcardKind { + return true + } + } + + // Check explicit kinds + for _, k := range t.Kinds { + if k == kind { + return true + } + } + + // Check kind ranges + for _, r := range t.KindRanges { + if len(r) >= 2 && kind >= r[0] && kind <= r[1] { + return true + } + } + + // If no kinds or ranges specified, check scope defaults + if len(t.Kinds) == 0 && len(t.KindRanges) == 0 { + return t.defaultKindPermitted(kind) + } + + return false +} + +// defaultKindPermitted returns default permissions based on scope. +func (t *Token) defaultKindPermitted(kind int) bool { + switch t.Scope { + case ScopeRelay: + // Default relay scope allows common kinds + return true + case ScopeNIP46: + // NIP-46 scope allows NIP-46 kinds (24133) + return kind == 24133 + case ScopeBlossom: + // Blossom scope allows auth kinds + return kind == 24242 + default: + return false + } +} + +// HasWritePermission returns true if any kind is permitted (not read-only). +func (t *Token) HasWritePermission() bool { + return len(t.Kinds) > 0 || len(t.KindRanges) > 0 +} + +// IsReadOnly returns true if no kinds are permitted. +func (t *Token) IsReadOnly() bool { + return !t.HasWritePermission() +} + +// MatchesScope checks if the token scope matches the required scope. +func (t *Token) MatchesScope(requiredScope string) bool { + return t.Scope == requiredScope +} + +// PubkeyHex returns the pubkey as a hex string. +func (t *Token) PubkeyHex() string { + return hex.EncodeToString(t.Pubkey) +} + +// Encode serializes the token to the wire format: cashuA +func (t *Token) Encode() (string, error) { + // Convert to JSON-friendly format + tj := tokenJSON{ + KeysetID: t.KeysetID, + Secret: hex.EncodeToString(t.Secret), + Signature: hex.EncodeToString(t.Signature), + Pubkey: hex.EncodeToString(t.Pubkey), + Expiry: t.Expiry, + Scope: t.Scope, + Kinds: t.Kinds, + KindRanges: t.KindRanges, + } + + jsonBytes, err := json.Marshal(tj) + if err != nil { + return "", fmt.Errorf("token: failed to encode: %w", err) + } + + encoded := base64.RawURLEncoding.EncodeToString(jsonBytes) + return Prefix + encoded, nil +} + +// Parse decodes a token from the wire format. +func Parse(s string) (*Token, error) { + // Check prefix + if !strings.HasPrefix(s, Prefix) { + return nil, ErrInvalidPrefix + } + + // Decode base64url + encoded := strings.TrimPrefix(s, Prefix) + jsonBytes, err := base64.RawURLEncoding.DecodeString(encoded) + if err != nil { + return nil, fmt.Errorf("%w: %v", ErrInvalidEncoding, err) + } + + // Parse JSON + var tj tokenJSON + if err := json.Unmarshal(jsonBytes, &tj); err != nil { + return nil, fmt.Errorf("%w: %v", ErrInvalidJSON, err) + } + + // Decode hex fields + secret, err := hex.DecodeString(tj.Secret) + if err != nil { + return nil, fmt.Errorf("token: invalid secret hex: %w", err) + } + + signature, err := hex.DecodeString(tj.Signature) + if err != nil { + return nil, fmt.Errorf("token: invalid signature hex: %w", err) + } + + pubkey, err := hex.DecodeString(tj.Pubkey) + if err != nil { + return nil, fmt.Errorf("token: invalid pubkey hex: %w", err) + } + + return &Token{ + KeysetID: tj.KeysetID, + Secret: secret, + Signature: signature, + Pubkey: pubkey, + Expiry: tj.Expiry, + Scope: tj.Scope, + Kinds: tj.Kinds, + KindRanges: tj.KindRanges, + }, nil +} + +// ParseFromHeader extracts and parses a token from HTTP headers. +// Supports: +// - X-Cashu-Token: cashuA... +// - Authorization: Cashu cashuA... +func ParseFromHeader(header string) (*Token, error) { + // Try X-Cashu-Token format (raw token) + if strings.HasPrefix(header, Prefix) { + return Parse(header) + } + + // Try Authorization format + if strings.HasPrefix(header, "Cashu ") { + tokenStr := strings.TrimPrefix(header, "Cashu ") + return Parse(strings.TrimSpace(tokenStr)) + } + + return nil, ErrInvalidPrefix +} + +// Validate performs basic validation on the token. +// Does NOT verify the cryptographic signature - use Verifier for that. +func (t *Token) Validate() error { + if t.IsExpired() { + return ErrTokenExpired + } + + if len(t.KeysetID) != 14 { + return fmt.Errorf("token: invalid keyset ID length: %d", len(t.KeysetID)) + } + + if len(t.Secret) != 32 { + return fmt.Errorf("token: invalid secret length: %d", len(t.Secret)) + } + + if len(t.Signature) != 33 { + return fmt.Errorf("token: invalid signature length: %d", len(t.Signature)) + } + + if len(t.Pubkey) != 32 { + return fmt.Errorf("token: invalid pubkey length: %d", len(t.Pubkey)) + } + + if t.Scope == "" { + return errors.New("token: missing scope") + } + + // Validate kind ranges + for i, r := range t.KindRanges { + if len(r) != 2 { + return fmt.Errorf("token: kind range %d must have 2 elements", i) + } + if r[0] > r[1] { + return fmt.Errorf("token: kind range %d min > max: %d > %d", i, r[0], r[1]) + } + } + + return nil +} + +// Clone creates a copy of the token. +func (t *Token) Clone() *Token { + clone := &Token{ + KeysetID: t.KeysetID, + Secret: make([]byte, len(t.Secret)), + Signature: make([]byte, len(t.Signature)), + Pubkey: make([]byte, len(t.Pubkey)), + Expiry: t.Expiry, + Scope: t.Scope, + } + + copy(clone.Secret, t.Secret) + copy(clone.Signature, t.Signature) + copy(clone.Pubkey, t.Pubkey) + + if len(t.Kinds) > 0 { + clone.Kinds = make([]int, len(t.Kinds)) + copy(clone.Kinds, t.Kinds) + } + + if len(t.KindRanges) > 0 { + clone.KindRanges = make([][]int, len(t.KindRanges)) + for i, r := range t.KindRanges { + clone.KindRanges[i] = make([]int, len(r)) + copy(clone.KindRanges[i], r) + } + } + + return clone +} + +// String returns the encoded token string. +func (t *Token) String() string { + s, _ := t.Encode() + return s +} diff --git a/pkg/cashu/token/token_test.go b/pkg/cashu/token/token_test.go new file mode 100644 index 0000000..545c886 --- /dev/null +++ b/pkg/cashu/token/token_test.go @@ -0,0 +1,336 @@ +package token + +import ( + "encoding/hex" + "testing" + "time" +) + +func makeTestToken() *Token { + secret := make([]byte, 32) + signature := make([]byte, 33) + pubkey := make([]byte, 32) + + for i := range secret { + secret[i] = byte(i) + } + for i := range signature { + signature[i] = byte(i + 32) + } + for i := range pubkey { + pubkey[i] = byte(i + 64) + } + + signature[0] = 0x02 // Valid compressed point prefix + + return New( + "0a1b2c3d4e5f67", + secret, + signature, + pubkey, + time.Now().Add(time.Hour), + ScopeRelay, + ) +} + +func TestTokenEncodeDecode(t *testing.T) { + tok := makeTestToken() + tok.SetKinds(0, 1, 3, 7) + tok.AddKindRange(30000, 39999) + + encoded, err := tok.Encode() + if err != nil { + t.Fatalf("Encode failed: %v", err) + } + + // Should have correct prefix + if encoded[:6] != Prefix { + t.Errorf("Encoded token should start with %s, got %s", Prefix, encoded[:6]) + } + + // Decode + decoded, err := Parse(encoded) + if err != nil { + t.Fatalf("Parse failed: %v", err) + } + + // Compare fields + if decoded.KeysetID != tok.KeysetID { + t.Errorf("KeysetID mismatch: %s != %s", decoded.KeysetID, tok.KeysetID) + } + if hex.EncodeToString(decoded.Secret) != hex.EncodeToString(tok.Secret) { + t.Error("Secret mismatch") + } + if hex.EncodeToString(decoded.Signature) != hex.EncodeToString(tok.Signature) { + t.Error("Signature mismatch") + } + if hex.EncodeToString(decoded.Pubkey) != hex.EncodeToString(tok.Pubkey) { + t.Error("Pubkey mismatch") + } + if decoded.Expiry != tok.Expiry { + t.Errorf("Expiry mismatch: %d != %d", decoded.Expiry, tok.Expiry) + } + if decoded.Scope != tok.Scope { + t.Errorf("Scope mismatch: %s != %s", decoded.Scope, tok.Scope) + } + + // Check kinds + if len(decoded.Kinds) != len(tok.Kinds) { + t.Errorf("Kinds length mismatch: %d != %d", len(decoded.Kinds), len(tok.Kinds)) + } + for i, k := range decoded.Kinds { + if k != tok.Kinds[i] { + t.Errorf("Kinds[%d] mismatch: %d != %d", i, k, tok.Kinds[i]) + } + } + + // Check kind ranges + if len(decoded.KindRanges) != len(tok.KindRanges) { + t.Errorf("KindRanges length mismatch: %d != %d", len(decoded.KindRanges), len(tok.KindRanges)) + } +} + +func TestTokenKindPermissions(t *testing.T) { + tok := makeTestToken() + tok.SetKinds(0, 1, 3) + tok.AddKindRange(30000, 39999) + + tests := []struct { + kind int + expected bool + }{ + {0, true}, // Explicit kind + {1, true}, // Explicit kind + {3, true}, // Explicit kind + {2, false}, // Not in list + {7, false}, // Not in list + {30000, true}, // Start of range + {35000, true}, // Middle of range + {39999, true}, // End of range + {29999, false}, // Just before range + {40000, false}, // Just after range + } + + for _, tt := range tests { + result := tok.IsKindPermitted(tt.kind) + if result != tt.expected { + t.Errorf("IsKindPermitted(%d) = %v, want %v", tt.kind, result, tt.expected) + } + } +} + +func TestTokenWildcardKind(t *testing.T) { + tok := makeTestToken() + tok.SetKinds(WildcardKind) + + // All kinds should be permitted + for _, kind := range []int{0, 1, 100, 1000, 30000, 65535} { + if !tok.IsKindPermitted(kind) { + t.Errorf("Wildcard should permit kind %d", kind) + } + } +} + +func TestTokenReadOnly(t *testing.T) { + tok := makeTestToken() + + // No kinds set - should be read-only by kinds check + if tok.HasWritePermission() { + t.Error("Token with no kinds should not have write permission") + } + + tok.SetKinds(1) + if !tok.HasWritePermission() { + t.Error("Token with kinds should have write permission") + } +} + +func TestTokenExpiry(t *testing.T) { + // Token that expires in 1 hour + tok := makeTestToken() + if tok.IsExpired() { + t.Error("Token should not be expired yet") + } + + // Token that expired 1 hour ago + tok.Expiry = time.Now().Add(-time.Hour).Unix() + if !tok.IsExpired() { + t.Error("Token should be expired") + } +} + +func TestTokenTimeRemaining(t *testing.T) { + tok := makeTestToken() + remaining := tok.TimeRemaining() + + // Should be close to 1 hour + if remaining < 59*time.Minute || remaining > 61*time.Minute { + t.Errorf("TimeRemaining = %v, expected ~1 hour", remaining) + } +} + +func TestTokenValidate(t *testing.T) { + // Valid token + tok := makeTestToken() + if err := tok.Validate(); err != nil { + t.Errorf("Validate failed for valid token: %v", err) + } + + // Expired token + expired := makeTestToken() + expired.Expiry = time.Now().Add(-time.Hour).Unix() + if err := expired.Validate(); err != ErrTokenExpired { + t.Errorf("Validate should return ErrTokenExpired, got %v", err) + } + + // Invalid keyset ID + badKeyset := makeTestToken() + badKeyset.KeysetID = "short" + if err := badKeyset.Validate(); err == nil { + t.Error("Validate should fail for short keyset ID") + } + + // Invalid secret length + badSecret := makeTestToken() + badSecret.Secret = []byte{1, 2, 3} + if err := badSecret.Validate(); err == nil { + t.Error("Validate should fail for wrong secret length") + } + + // Invalid kind range + badRange := makeTestToken() + badRange.KindRanges = [][]int{{100, 50}} // min > max + if err := badRange.Validate(); err == nil { + t.Error("Validate should fail for invalid kind range") + } +} + +func TestParseFromHeader(t *testing.T) { + tok := makeTestToken() + encoded, _ := tok.Encode() + + // Test X-Cashu-Token format + parsed, err := ParseFromHeader(encoded) + if err != nil { + t.Fatalf("ParseFromHeader failed for raw token: %v", err) + } + if parsed.KeysetID != tok.KeysetID { + t.Error("Parsed token has wrong KeysetID") + } + + // Test Authorization format + parsed, err = ParseFromHeader("Cashu " + encoded) + if err != nil { + t.Fatalf("ParseFromHeader failed for Authorization format: %v", err) + } + if parsed.KeysetID != tok.KeysetID { + t.Error("Parsed token has wrong KeysetID") + } + + // Test invalid format + _, err = ParseFromHeader("Bearer xyz") + if err != ErrInvalidPrefix { + t.Errorf("Expected ErrInvalidPrefix, got %v", err) + } +} + +func TestTokenClone(t *testing.T) { + tok := makeTestToken() + tok.SetKinds(1, 2, 3) + tok.AddKindRange(100, 200) + + clone := tok.Clone() + + // Modify original + tok.Secret[0] = 0xFF + tok.Kinds[0] = 999 + tok.KindRanges[0][0] = 999 + + // Clone should be unchanged + if clone.Secret[0] == 0xFF { + t.Error("Clone secret was modified when original changed") + } + if clone.Kinds[0] == 999 { + t.Error("Clone kinds was modified when original changed") + } + if clone.KindRanges[0][0] == 999 { + t.Error("Clone kind ranges was modified when original changed") + } +} + +func TestTokenMatchesScope(t *testing.T) { + tok := makeTestToken() + tok.Scope = ScopeNIP46 + + if !tok.MatchesScope(ScopeNIP46) { + t.Error("Should match ScopeNIP46") + } + if tok.MatchesScope(ScopeRelay) { + t.Error("Should not match ScopeRelay") + } +} + +func TestTokenPubkeyHex(t *testing.T) { + tok := makeTestToken() + hexPubkey := tok.PubkeyHex() + + // Should be 64 characters (32 bytes * 2) + if len(hexPubkey) != 64 { + t.Errorf("PubkeyHex length = %d, want 64", len(hexPubkey)) + } + + // Should decode back to original + decoded, err := hex.DecodeString(hexPubkey) + if err != nil { + t.Fatalf("PubkeyHex is not valid hex: %v", err) + } + for i, b := range decoded { + if b != tok.Pubkey[i] { + t.Errorf("PubkeyHex[%d] mismatch", i) + } + } +} + +func TestTokenString(t *testing.T) { + tok := makeTestToken() + s := tok.String() + + if s[:6] != Prefix { + t.Errorf("String() should start with prefix, got %s", s[:6]) + } +} + +func BenchmarkTokenEncode(b *testing.B) { + tok := makeTestToken() + tok.SetKinds(0, 1, 3, 7) + tok.AddKindRange(30000, 39999) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + tok.Encode() + } +} + +func BenchmarkTokenParse(b *testing.B) { + tok := makeTestToken() + tok.SetKinds(0, 1, 3, 7) + tok.AddKindRange(30000, 39999) + encoded, _ := tok.Encode() + + b.ResetTimer() + for i := 0; i < b.N; i++ { + Parse(encoded) + } +} + +func BenchmarkTokenIsKindPermitted(b *testing.B) { + tok := makeTestToken() + tok.SetKinds(0, 1, 3, 7, 10, 20, 30, 40, 50) + tok.AddKindRange(30000, 39999) + tok.AddKindRange(20000, 29999) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + tok.IsKindPermitted(35000) + } +} diff --git a/pkg/cashu/verifier/middleware.go b/pkg/cashu/verifier/middleware.go new file mode 100644 index 0000000..eea6efa --- /dev/null +++ b/pkg/cashu/verifier/middleware.go @@ -0,0 +1,138 @@ +package verifier + +import ( + "context" + "net/http" + + "next.orly.dev/pkg/cashu/token" +) + +// ContextKey is the type for context keys. +type ContextKey string + +const ( + // TokenContextKey is the context key for the verified token. + TokenContextKey ContextKey = "cashu_token" + + // PubkeyContextKey is the context key for the user's pubkey. + PubkeyContextKey ContextKey = "cashu_pubkey" +) + +// TokenFromContext extracts the verified token from the request context. +func TokenFromContext(ctx context.Context) *token.Token { + if tok, ok := ctx.Value(TokenContextKey).(*token.Token); ok { + return tok + } + return nil +} + +// PubkeyFromContext extracts the user's pubkey from the request context. +func PubkeyFromContext(ctx context.Context) []byte { + if pubkey, ok := ctx.Value(PubkeyContextKey).([]byte); ok { + return pubkey + } + return nil +} + +// Middleware creates an HTTP middleware that verifies Cashu tokens. +func Middleware(v *Verifier, requiredScope string) func(http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + tok, err := v.VerifyRequest(r.Context(), r, requiredScope) + if err != nil { + writeError(w, err) + return + } + + // Add token and pubkey to context + ctx := context.WithValue(r.Context(), TokenContextKey, tok) + ctx = context.WithValue(ctx, PubkeyContextKey, tok.Pubkey) + + next.ServeHTTP(w, r.WithContext(ctx)) + }) + } +} + +// MiddlewareForKind creates middleware that also checks kind permission. +func MiddlewareForKind(v *Verifier, requiredScope string, kind int) func(http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + tok, err := v.VerifyRequestForKind(r.Context(), r, requiredScope, kind) + if err != nil { + writeError(w, err) + return + } + + ctx := context.WithValue(r.Context(), TokenContextKey, tok) + ctx = context.WithValue(ctx, PubkeyContextKey, tok.Pubkey) + + next.ServeHTTP(w, r.WithContext(ctx)) + }) + } +} + +// OptionalMiddleware creates middleware that verifies tokens if present, +// but allows requests without tokens to proceed. +func OptionalMiddleware(v *Verifier, requiredScope string) func(http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + tok, err := v.VerifyRequest(r.Context(), r, requiredScope) + if err == nil { + // Token present and valid - add to context + ctx := context.WithValue(r.Context(), TokenContextKey, tok) + ctx = context.WithValue(ctx, PubkeyContextKey, tok.Pubkey) + r = r.WithContext(ctx) + } else if err != ErrMissingToken { + // Token present but invalid - reject + writeError(w, err) + return + } + // No token or valid token - proceed + + next.ServeHTTP(w, r) + }) + } +} + +// writeError writes an appropriate HTTP error response. +func writeError(w http.ResponseWriter, err error) { + switch err { + case ErrMissingToken: + http.Error(w, "Missing token", http.StatusUnauthorized) + case ErrTokenExpired: + http.Error(w, "Token expired", http.StatusGone) + case ErrUnknownKeyset: + http.Error(w, "Unknown keyset", http.StatusMisdirectedRequest) + case ErrInvalidSignature: + http.Error(w, "Invalid signature", http.StatusUnauthorized) + case ErrScopeMismatch: + http.Error(w, "Scope mismatch", http.StatusForbidden) + case ErrKindNotPermitted: + http.Error(w, "Kind not permitted", http.StatusForbidden) + case ErrAccessRevoked: + http.Error(w, "Access revoked", http.StatusForbidden) + default: + http.Error(w, err.Error(), http.StatusUnauthorized) + } +} + +// RequireToken is a helper that extracts and verifies a token inline. +// Returns the token or writes an error response and returns nil. +func RequireToken(v *Verifier, w http.ResponseWriter, r *http.Request, requiredScope string) *token.Token { + tok, err := v.VerifyRequest(r.Context(), r, requiredScope) + if err != nil { + writeError(w, err) + return nil + } + return tok +} + +// RequireKind is a helper that also checks kind permission inline. +func RequireKind(v *Verifier, w http.ResponseWriter, r *http.Request, requiredScope string, kind int) *token.Token { + tok, err := v.VerifyRequestForKind(r.Context(), r, requiredScope, kind) + if err != nil { + writeError(w, err) + return nil + } + return tok +} diff --git a/pkg/cashu/verifier/verifier.go b/pkg/cashu/verifier/verifier.go new file mode 100644 index 0000000..442ea11 --- /dev/null +++ b/pkg/cashu/verifier/verifier.go @@ -0,0 +1,186 @@ +// Package verifier implements Cashu token verification with optional re-authorization. +package verifier + +import ( + "context" + "errors" + "fmt" + "net/http" + + "github.com/decred/dcrd/dcrec/secp256k1/v4" + + "next.orly.dev/pkg/cashu/bdhke" + "next.orly.dev/pkg/cashu/keyset" + "next.orly.dev/pkg/cashu/token" + cashuiface "next.orly.dev/pkg/interfaces/cashu" +) + +// Errors. +var ( + ErrTokenExpired = errors.New("verifier: token expired") + ErrUnknownKeyset = errors.New("verifier: unknown keyset") + ErrInvalidSignature = errors.New("verifier: invalid signature") + ErrScopeMismatch = errors.New("verifier: scope mismatch") + ErrKindNotPermitted = errors.New("verifier: kind not permitted") + ErrAccessRevoked = errors.New("verifier: access revoked") + ErrMissingToken = errors.New("verifier: missing token") +) + +// Config holds verifier configuration. +type Config struct { + // Reauthorize enables re-checking authorization on each verification. + // This provides "stateless revocation" at the cost of an extra check. + Reauthorize bool +} + +// DefaultConfig returns sensible default configuration. +func DefaultConfig() Config { + return Config{ + Reauthorize: true, // Enable stateless revocation by default + } +} + +// Verifier validates Cashu tokens and checks permissions. +type Verifier struct { + keysets *keyset.Manager + authz cashuiface.AuthzChecker + claimValidator cashuiface.ClaimValidator + config Config +} + +// New creates a new verifier. +func New(keysets *keyset.Manager, authz cashuiface.AuthzChecker, config Config) *Verifier { + return &Verifier{ + keysets: keysets, + authz: authz, + config: config, + } +} + +// SetClaimValidator sets an optional claim validator. +func (v *Verifier) SetClaimValidator(cv cashuiface.ClaimValidator) { + v.claimValidator = cv +} + +// Verify validates a token's cryptographic signature and checks expiry. +func (v *Verifier) Verify(ctx context.Context, tok *token.Token, remoteAddr string) error { + // Basic validation + if err := tok.Validate(); err != nil { + return err + } + + // Check expiry + if tok.IsExpired() { + return ErrTokenExpired + } + + // Find keyset + ks := v.keysets.FindByID(tok.KeysetID) + if ks == nil { + return fmt.Errorf("%w: %s", ErrUnknownKeyset, tok.KeysetID) + } + + // Verify signature + valid, err := v.verifySignature(tok, ks) + if err != nil { + return fmt.Errorf("verifier: signature check failed: %w", err) + } + if !valid { + return ErrInvalidSignature + } + + // Re-check authorization if enabled + if v.config.Reauthorize && v.authz != nil { + if err := v.authz.CheckAuthorization(ctx, tok.Pubkey, tok.Scope, remoteAddr); err != nil { + return fmt.Errorf("%w: %v", ErrAccessRevoked, err) + } + } + + return nil +} + +// VerifyForScope verifies a token and checks that it has the required scope. +func (v *Verifier) VerifyForScope(ctx context.Context, tok *token.Token, requiredScope string, remoteAddr string) error { + if err := v.Verify(ctx, tok, remoteAddr); err != nil { + return err + } + + if !tok.MatchesScope(requiredScope) { + return fmt.Errorf("%w: expected %s, got %s", ErrScopeMismatch, requiredScope, tok.Scope) + } + + return nil +} + +// VerifyForKind verifies a token and checks that the specified kind is permitted. +func (v *Verifier) VerifyForKind(ctx context.Context, tok *token.Token, kind int, remoteAddr string) error { + if err := v.Verify(ctx, tok, remoteAddr); err != nil { + return err + } + + if !tok.IsKindPermitted(kind) { + return fmt.Errorf("%w: kind %d", ErrKindNotPermitted, kind) + } + + return nil +} + +// verifySignature checks the BDHKE signature. +func (v *Verifier) verifySignature(tok *token.Token, ks *keyset.Keyset) (bool, error) { + // Parse signature as curve point + C, err := secp256k1.ParsePubKey(tok.Signature) + if err != nil { + return false, fmt.Errorf("invalid signature format: %w", err) + } + + // Verify: C == k * HashToCurve(secret) + return bdhke.Verify(tok.Secret, C, ks.PrivateKey) +} + +// ExtractFromRequest extracts and parses a token from an HTTP request. +// Checks headers in order: X-Cashu-Token, Authorization (Cashu scheme). +func (v *Verifier) ExtractFromRequest(r *http.Request) (*token.Token, error) { + // Try X-Cashu-Token header first + if header := r.Header.Get("X-Cashu-Token"); header != "" { + return token.ParseFromHeader(header) + } + + // Try Authorization header + if header := r.Header.Get("Authorization"); header != "" { + return token.ParseFromHeader(header) + } + + return nil, ErrMissingToken +} + +// VerifyRequest extracts, parses, and verifies a token from an HTTP request. +func (v *Verifier) VerifyRequest(ctx context.Context, r *http.Request, requiredScope string) (*token.Token, error) { + tok, err := v.ExtractFromRequest(r) + if err != nil { + return nil, err + } + + if err := v.VerifyForScope(ctx, tok, requiredScope, r.RemoteAddr); err != nil { + return nil, err + } + + return tok, nil +} + +// VerifyRequestForKind extracts, parses, and verifies a token for a specific kind. +func (v *Verifier) VerifyRequestForKind(ctx context.Context, r *http.Request, requiredScope string, kind int) (*token.Token, error) { + tok, err := v.ExtractFromRequest(r) + if err != nil { + return nil, err + } + + if err := v.VerifyForScope(ctx, tok, requiredScope, r.RemoteAddr); err != nil { + return nil, err + } + + if !tok.IsKindPermitted(kind) { + return nil, fmt.Errorf("%w: kind %d", ErrKindNotPermitted, kind) + } + + return tok, nil +} diff --git a/pkg/cashu/verifier/verifier_test.go b/pkg/cashu/verifier/verifier_test.go new file mode 100644 index 0000000..1c1af8a --- /dev/null +++ b/pkg/cashu/verifier/verifier_test.go @@ -0,0 +1,396 @@ +package verifier + +import ( + "context" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/decred/dcrd/dcrec/secp256k1/v4" + + "next.orly.dev/pkg/cashu/bdhke" + "next.orly.dev/pkg/cashu/issuer" + "next.orly.dev/pkg/cashu/keyset" + "next.orly.dev/pkg/cashu/token" + cashuiface "next.orly.dev/pkg/interfaces/cashu" +) + +func setupVerifier() (*Verifier, *issuer.Issuer, *keyset.Manager) { + store := keyset.NewMemoryStore() + manager := keyset.NewManager(store, keyset.DefaultActiveWindow, keyset.DefaultVerifyWindow) + manager.Init() + + issuerConfig := issuer.DefaultConfig() + iss := issuer.New(manager, cashuiface.AllowAllChecker{}, issuerConfig) + + verifierConfig := DefaultConfig() + ver := New(manager, cashuiface.AllowAllChecker{}, verifierConfig) + + return ver, iss, manager +} + +func issueTestToken(iss *issuer.Issuer, scope string, kinds []int) (*token.Token, error) { + secret, err := bdhke.GenerateSecret() + if err != nil { + return nil, err + } + + blindResult, err := bdhke.Blind(secret) + if err != nil { + return nil, err + } + + pubkey := make([]byte, 32) + for i := range pubkey { + pubkey[i] = byte(i) + } + + req := &issuer.IssueRequest{ + BlindedMessage: blindResult.B.SerializeCompressed(), + Pubkey: pubkey, + Scope: scope, + Kinds: kinds, + } + + resp, err := iss.Issue(context.Background(), req, "127.0.0.1") + if err != nil { + return nil, err + } + + return issuer.BuildToken(resp, secret, blindResult.R, pubkey, scope, kinds, nil) +} + +func TestVerifySuccess(t *testing.T) { + ver, iss, _ := setupVerifier() + + tok, err := issueTestToken(iss, token.ScopeRelay, []int{1, 2, 3}) + if err != nil { + t.Fatalf("issueTestToken failed: %v", err) + } + + err = ver.Verify(context.Background(), tok, "127.0.0.1") + if err != nil { + t.Errorf("Verify failed: %v", err) + } +} + +func TestVerifyExpired(t *testing.T) { + ver, iss, _ := setupVerifier() + + tok, err := issueTestToken(iss, token.ScopeRelay, []int{1}) + if err != nil { + t.Fatalf("issueTestToken failed: %v", err) + } + + // Expire the token + tok.Expiry = time.Now().Add(-time.Hour).Unix() + + err = ver.Verify(context.Background(), tok, "127.0.0.1") + if err == nil { + t.Error("Verify should fail for expired token") + } +} + +func TestVerifyInvalidSignature(t *testing.T) { + ver, iss, _ := setupVerifier() + + tok, err := issueTestToken(iss, token.ScopeRelay, []int{1}) + if err != nil { + t.Fatalf("issueTestToken failed: %v", err) + } + + // Corrupt the signature + tok.Signature[10] ^= 0xFF + + err = ver.Verify(context.Background(), tok, "127.0.0.1") + if err == nil { + t.Error("Verify should fail for invalid signature") + } +} + +func TestVerifyUnknownKeyset(t *testing.T) { + ver, iss, _ := setupVerifier() + + tok, err := issueTestToken(iss, token.ScopeRelay, []int{1}) + if err != nil { + t.Fatalf("issueTestToken failed: %v", err) + } + + // Change keyset ID + tok.KeysetID = "00000000000000" + + err = ver.Verify(context.Background(), tok, "127.0.0.1") + if err == nil { + t.Error("Verify should fail for unknown keyset") + } +} + +func TestVerifyForScope(t *testing.T) { + ver, iss, _ := setupVerifier() + + tok, err := issueTestToken(iss, token.ScopeNIP46, []int{24133}) + if err != nil { + t.Fatalf("issueTestToken failed: %v", err) + } + + // Should pass for correct scope + err = ver.VerifyForScope(context.Background(), tok, token.ScopeNIP46, "127.0.0.1") + if err != nil { + t.Errorf("VerifyForScope failed for correct scope: %v", err) + } + + // Should fail for wrong scope + err = ver.VerifyForScope(context.Background(), tok, token.ScopeRelay, "127.0.0.1") + if err == nil { + t.Error("VerifyForScope should fail for wrong scope") + } +} + +func TestVerifyForKind(t *testing.T) { + ver, iss, _ := setupVerifier() + + tok, err := issueTestToken(iss, token.ScopeRelay, []int{1, 2, 3}) + if err != nil { + t.Fatalf("issueTestToken failed: %v", err) + } + + // Should pass for permitted kind + err = ver.VerifyForKind(context.Background(), tok, 1, "127.0.0.1") + if err != nil { + t.Errorf("VerifyForKind failed for permitted kind: %v", err) + } + + // Should fail for non-permitted kind + err = ver.VerifyForKind(context.Background(), tok, 100, "127.0.0.1") + if err == nil { + t.Error("VerifyForKind should fail for non-permitted kind") + } +} + +func TestVerifyReauthorization(t *testing.T) { + store := keyset.NewMemoryStore() + manager := keyset.NewManager(store, keyset.DefaultActiveWindow, keyset.DefaultVerifyWindow) + manager.Init() + + iss := issuer.New(manager, cashuiface.AllowAllChecker{}, issuer.DefaultConfig()) + + // Create verifier that denies authorization + config := DefaultConfig() + config.Reauthorize = true + ver := New(manager, cashuiface.DenyAllChecker{}, config) + + tok, err := issueTestToken(iss, token.ScopeRelay, []int{1}) + if err != nil { + t.Fatalf("issueTestToken failed: %v", err) + } + + // Should fail due to reauthorization check + err = ver.Verify(context.Background(), tok, "127.0.0.1") + if err == nil { + t.Error("Verify should fail when reauthorization fails") + } +} + +func TestExtractFromRequest(t *testing.T) { + ver, iss, _ := setupVerifier() + + tok, err := issueTestToken(iss, token.ScopeRelay, []int{1}) + if err != nil { + t.Fatalf("issueTestToken failed: %v", err) + } + + encoded, _ := tok.Encode() + + tests := []struct { + name string + header string + value string + }{ + {"X-Cashu-Token", "X-Cashu-Token", encoded}, + {"Authorization Cashu", "Authorization", "Cashu " + encoded}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + req := httptest.NewRequest("GET", "/", nil) + req.Header.Set(tt.header, tt.value) + + extracted, err := ver.ExtractFromRequest(req) + if err != nil { + t.Fatalf("ExtractFromRequest failed: %v", err) + } + + if extracted.KeysetID != tok.KeysetID { + t.Errorf("KeysetID mismatch: %s != %s", extracted.KeysetID, tok.KeysetID) + } + }) + } +} + +func TestExtractFromRequestMissing(t *testing.T) { + ver, _, _ := setupVerifier() + + req := httptest.NewRequest("GET", "/", nil) + + _, err := ver.ExtractFromRequest(req) + if err != ErrMissingToken { + t.Errorf("Expected ErrMissingToken, got %v", err) + } +} + +func TestVerifyRequest(t *testing.T) { + ver, iss, _ := setupVerifier() + + tok, err := issueTestToken(iss, token.ScopeRelay, []int{1}) + if err != nil { + t.Fatalf("issueTestToken failed: %v", err) + } + + encoded, _ := tok.Encode() + + req := httptest.NewRequest("GET", "/", nil) + req.Header.Set("X-Cashu-Token", encoded) + + verified, err := ver.VerifyRequest(context.Background(), req, token.ScopeRelay) + if err != nil { + t.Fatalf("VerifyRequest failed: %v", err) + } + + if verified.KeysetID != tok.KeysetID { + t.Error("VerifyRequest returned wrong token") + } +} + +func TestMiddleware(t *testing.T) { + ver, iss, _ := setupVerifier() + + tok, err := issueTestToken(iss, token.ScopeRelay, []int{1}) + if err != nil { + t.Fatalf("issueTestToken failed: %v", err) + } + + encoded, _ := tok.Encode() + + // Handler that checks context + handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + ctxTok := TokenFromContext(r.Context()) + if ctxTok == nil { + t.Error("Token not in context") + } + pubkey := PubkeyFromContext(r.Context()) + if pubkey == nil { + t.Error("Pubkey not in context") + } + w.WriteHeader(http.StatusOK) + }) + + wrapped := Middleware(ver, token.ScopeRelay)(handler) + + req := httptest.NewRequest("GET", "/", nil) + req.Header.Set("X-Cashu-Token", encoded) + + rec := httptest.NewRecorder() + wrapped.ServeHTTP(rec, req) + + if rec.Code != http.StatusOK { + t.Errorf("Status = %d, want 200", rec.Code) + } +} + +func TestMiddlewareUnauthorized(t *testing.T) { + ver, _, _ := setupVerifier() + + handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) + }) + + wrapped := Middleware(ver, token.ScopeRelay)(handler) + + // Request without token + req := httptest.NewRequest("GET", "/", nil) + rec := httptest.NewRecorder() + wrapped.ServeHTTP(rec, req) + + if rec.Code != http.StatusUnauthorized { + t.Errorf("Status = %d, want 401", rec.Code) + } +} + +func TestOptionalMiddleware(t *testing.T) { + ver, iss, _ := setupVerifier() + + tok, err := issueTestToken(iss, token.ScopeRelay, []int{1}) + if err != nil { + t.Fatalf("issueTestToken failed: %v", err) + } + + encoded, _ := tok.Encode() + + handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) + }) + + wrapped := OptionalMiddleware(ver, token.ScopeRelay)(handler) + + // With token + req1 := httptest.NewRequest("GET", "/", nil) + req1.Header.Set("X-Cashu-Token", encoded) + rec1 := httptest.NewRecorder() + wrapped.ServeHTTP(rec1, req1) + + if rec1.Code != http.StatusOK { + t.Errorf("With token: Status = %d, want 200", rec1.Code) + } + + // Without token + req2 := httptest.NewRequest("GET", "/", nil) + rec2 := httptest.NewRecorder() + wrapped.ServeHTTP(rec2, req2) + + if rec2.Code != http.StatusOK { + t.Errorf("Without token: Status = %d, want 200", rec2.Code) + } +} + +func TestRequireToken(t *testing.T) { + ver, iss, _ := setupVerifier() + + tok, err := issueTestToken(iss, token.ScopeRelay, []int{1}) + if err != nil { + t.Fatalf("issueTestToken failed: %v", err) + } + + encoded, _ := tok.Encode() + + // With valid token + req := httptest.NewRequest("GET", "/", nil) + req.Header.Set("X-Cashu-Token", encoded) + rec := httptest.NewRecorder() + + result := RequireToken(ver, rec, req, token.ScopeRelay) + if result == nil { + t.Error("RequireToken should return token") + } + + // Without token + req2 := httptest.NewRequest("GET", "/", nil) + rec2 := httptest.NewRecorder() + + result2 := RequireToken(ver, rec2, req2, token.ScopeRelay) + if result2 != nil { + t.Error("RequireToken should return nil for missing token") + } + if rec2.Code != http.StatusUnauthorized { + t.Errorf("Status = %d, want 401", rec2.Code) + } +} + +// Helper to parse point +func mustParsePoint(data []byte) *secp256k1.PublicKey { + pk, err := secp256k1.ParsePubKey(data) + if err != nil { + panic(err) + } + return pk +} diff --git a/pkg/interfaces/cashu/cashu.go b/pkg/interfaces/cashu/cashu.go new file mode 100644 index 0000000..a350448 --- /dev/null +++ b/pkg/interfaces/cashu/cashu.go @@ -0,0 +1,106 @@ +// Package cashu defines interfaces for the Cashu access token system. +// Implement these interfaces to integrate with your authorization backend. +package cashu + +import ( + "context" +) + +// AuthzChecker determines if a pubkey is authorized for a given scope. +// Implement this interface to integrate with your access control system. +type AuthzChecker interface { + // CheckAuthorization returns nil if the pubkey is authorized for the scope, + // or an error describing why authorization failed. + // + // Parameters: + // - ctx: Context for cancellation and timeouts + // - pubkey: User's Nostr pubkey (32 bytes) + // - scope: Token scope (e.g., "relay", "nip46", "api") + // - remoteAddr: Client's remote address (for IP-based checks) + // + // The implementation should check if the user has sufficient permissions + // for the requested scope. This is called during token issuance. + CheckAuthorization(ctx context.Context, pubkey []byte, scope string, remoteAddr string) error +} + +// ReauthorizationChecker is an optional extension of AuthzChecker that +// supports re-checking authorization during token verification. +// This enables "stateless revocation" - tokens become invalid immediately +// when the user is removed from the access list. +type ReauthorizationChecker interface { + AuthzChecker + + // ReauthorizationEnabled returns true if authorization should be + // re-checked on every token verification. + ReauthorizationEnabled() bool +} + +// ClaimValidator validates custom claims in tokens. +// Implement this for application-specific claim validation. +type ClaimValidator interface { + // ValidateClaims validates custom claims embedded in a token. + // Returns nil if claims are valid, error otherwise. + ValidateClaims(claims map[string]any) error +} + +// KindPermissionChecker validates event kind permissions. +// This is typically implemented by the token itself, but can be +// extended for additional validation logic. +type KindPermissionChecker interface { + // IsKindPermitted returns true if the given event kind is allowed. + IsKindPermitted(kind int) bool + + // HasWritePermission returns true if any kinds are permitted. + HasWritePermission() bool +} + +// Common error types that implementations may return. +type AuthzError struct { + Code string + Message string +} + +func (e *AuthzError) Error() string { + return e.Message +} + +// Predefined authorization error codes. +const ( + ErrCodeNotAuthorized = "not_authorized" + ErrCodeBanned = "banned" + ErrCodeBlocked = "blocked" + ErrCodeInvalidScope = "invalid_scope" + ErrCodeRateLimited = "rate_limited" + ErrCodeInsufficientAccess = "insufficient_access" +) + +// NewAuthzError creates a new authorization error. +func NewAuthzError(code, message string) *AuthzError { + return &AuthzError{Code: code, Message: message} +} + +// Common authorization errors. +var ( + ErrNotAuthorized = NewAuthzError(ErrCodeNotAuthorized, "not authorized for this scope") + ErrBanned = NewAuthzError(ErrCodeBanned, "user is banned") + ErrBlocked = NewAuthzError(ErrCodeBlocked, "IP address is blocked") + ErrInvalidScope = NewAuthzError(ErrCodeInvalidScope, "invalid scope requested") +) + +// AllowAllChecker is a simple implementation that allows all requests. +// Useful for testing or open relays. +type AllowAllChecker struct{} + +// CheckAuthorization always returns nil (allowed). +func (AllowAllChecker) CheckAuthorization(ctx context.Context, pubkey []byte, scope string, remoteAddr string) error { + return nil +} + +// DenyAllChecker is a simple implementation that denies all requests. +// Useful for testing or temporarily disabling token issuance. +type DenyAllChecker struct{} + +// CheckAuthorization always returns ErrNotAuthorized. +func (DenyAllChecker) CheckAuthorization(ctx context.Context, pubkey []byte, scope string, remoteAddr string) error { + return ErrNotAuthorized +}