forked from mleku/next.orly.dev
Compare commits
9 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
be6cd8c740
|
|||
|
8b3d03da2c
|
|||
|
5bcb8d7f52
|
|||
|
b3b963ecf5
|
|||
|
d4fb6cbf49
|
|||
|
d5c0e3abfc
|
|||
|
1d4d877a10
|
|||
|
038d1959ed
|
|||
|
86481a42e8
|
@@ -83,7 +83,33 @@
|
||||
"Bash(ORLY_LOG_LEVEL=debug timeout 60 ./orly:*)",
|
||||
"Bash(ORLY_LOG_LEVEL=debug timeout 30 ./orly:*)",
|
||||
"Bash(killall:*)",
|
||||
"Bash(kill:*)"
|
||||
"Bash(kill:*)",
|
||||
"Bash(gh repo list:*)",
|
||||
"Bash(gh auth:*)",
|
||||
"Bash(/tmp/backup-github-repos.sh)",
|
||||
"Bash(./benchmark:*)",
|
||||
"Bash(env)",
|
||||
"Bash(./run-badger-benchmark.sh:*)",
|
||||
"Bash(./update-github-vpn.sh:*)",
|
||||
"Bash(dmesg:*)",
|
||||
"Bash(export:*)",
|
||||
"Bash(timeout 60 /tmp/benchmark-fixed:*)",
|
||||
"Bash(/tmp/test-auth-event.sh)",
|
||||
"Bash(CGO_ENABLED=0 timeout 180 go test:*)",
|
||||
"Bash(/tmp/benchmark-real-events:*)",
|
||||
"Bash(CGO_ENABLED=0 timeout 240 go build:*)",
|
||||
"Bash(/tmp/benchmark-final --events 500 --workers 2 --datadir /tmp/test-real-final)",
|
||||
"Bash(timeout 60 /tmp/benchmark-final:*)",
|
||||
"Bash(timeout 120 ./benchmark:*)",
|
||||
"Bash(timeout 60 ./benchmark:*)",
|
||||
"Bash(timeout 30 ./benchmark:*)",
|
||||
"Bash(timeout 15 ./benchmark:*)",
|
||||
"Bash(docker build:*)",
|
||||
"Bash(xargs:*)",
|
||||
"Bash(timeout 30 sh:*)",
|
||||
"Bash(timeout 60 go test:*)",
|
||||
"Bash(timeout 120 go test:*)",
|
||||
"Bash(timeout 180 ./scripts/test.sh:*)"
|
||||
],
|
||||
"deny": [],
|
||||
"ask": []
|
||||
|
||||
84
.gitea/README.md
Normal file
84
.gitea/README.md
Normal file
@@ -0,0 +1,84 @@
|
||||
# Gitea Actions Setup
|
||||
|
||||
This directory contains workflows for Gitea Actions, which is a self-hosted CI/CD system compatible with GitHub Actions syntax.
|
||||
|
||||
## Workflow: go.yml
|
||||
|
||||
The `go.yml` workflow handles building, testing, and releasing the ORLY relay when version tags are pushed.
|
||||
|
||||
### Features
|
||||
|
||||
- **No external dependencies**: Uses only inline shell commands (no actions from GitHub)
|
||||
- **Pure Go builds**: Uses CGO_ENABLED=0 with purego for secp256k1
|
||||
- **Automated releases**: Creates Gitea releases with binaries and checksums
|
||||
- **Tests included**: Runs the full test suite before building releases
|
||||
|
||||
### Prerequisites
|
||||
|
||||
1. **Gitea Token**: Add a secret named `GITEA_TOKEN` in your repository settings
|
||||
- Go to: Repository Settings → Secrets → Add Secret
|
||||
- Name: `GITEA_TOKEN`
|
||||
- Value: Your Gitea personal access token with `repo` and `write:packages` permissions
|
||||
|
||||
2. **Runner Configuration**: Ensure your Gitea Actions runner is properly configured
|
||||
- The runner should have access to pull Docker images
|
||||
- Ubuntu-latest image should be available
|
||||
|
||||
### Usage
|
||||
|
||||
To create a new release:
|
||||
|
||||
```bash
|
||||
# 1. Update version in pkg/version/version file
|
||||
echo "v0.29.4" > pkg/version/version
|
||||
|
||||
# 2. Commit the version change
|
||||
git add pkg/version/version
|
||||
git commit -m "bump to v0.29.4"
|
||||
|
||||
# 3. Create and push the tag
|
||||
git tag v0.29.4
|
||||
git push origin v0.29.4
|
||||
|
||||
# 4. The workflow will automatically:
|
||||
# - Build the binary
|
||||
# - Run tests
|
||||
# - Create a release on your Gitea instance
|
||||
# - Upload the binary and checksums
|
||||
```
|
||||
|
||||
### Environment Variables
|
||||
|
||||
The workflow uses standard Gitea Actions environment variables:
|
||||
|
||||
- `GITHUB_WORKSPACE`: Working directory for the job
|
||||
- `GITHUB_REF_NAME`: Tag name (e.g., v1.2.3)
|
||||
- `GITHUB_REPOSITORY`: Repository in format `owner/repo`
|
||||
- `GITHUB_SERVER_URL`: Your Gitea instance URL (e.g., https://git.nostrdev.com)
|
||||
|
||||
### Troubleshooting
|
||||
|
||||
**Issue**: Workflow fails to clone repository
|
||||
- **Solution**: Check that the repository is accessible without authentication, or configure runner credentials
|
||||
|
||||
**Issue**: Cannot create release
|
||||
- **Solution**: Verify `GITEA_TOKEN` secret is set correctly with appropriate permissions
|
||||
|
||||
**Issue**: Go version not found
|
||||
- **Solution**: The workflow downloads Go 1.25.0 directly from go.dev, ensure the runner has internet access
|
||||
|
||||
### Customization
|
||||
|
||||
To modify the workflow:
|
||||
|
||||
1. Edit `.gitea/workflows/go.yml`
|
||||
2. Test changes by pushing a tag (or use `act` locally for testing)
|
||||
3. Monitor the Actions tab in your Gitea repository for results
|
||||
|
||||
## Differences from GitHub Actions
|
||||
|
||||
- **Action dependencies**: This workflow doesn't use external actions (like `actions/checkout@v4`) to avoid GitHub dependency
|
||||
- **Release creation**: Uses `tea` CLI instead of GitHub's release action
|
||||
- **Inline commands**: All setup and build steps are done with shell scripts
|
||||
|
||||
This makes the workflow completely self-contained and independent of external services.
|
||||
125
.gitea/workflows/go.yml
Normal file
125
.gitea/workflows/go.yml
Normal file
@@ -0,0 +1,125 @@
|
||||
# This workflow will build a golang project for Gitea Actions
|
||||
# Using inline commands to avoid external action dependencies
|
||||
#
|
||||
# NOTE: All builds use CGO_ENABLED=0 since p8k library uses purego (not CGO)
|
||||
# The library dynamically loads libsecp256k1 at runtime via purego
|
||||
#
|
||||
# Release Process:
|
||||
# 1. Update the version in the pkg/version/version file (e.g. v1.2.3)
|
||||
# 2. Create and push a tag matching the version:
|
||||
# git tag v1.2.3
|
||||
# git push origin v1.2.3
|
||||
# 3. The workflow will automatically:
|
||||
# - Build binaries for Linux AMD64
|
||||
# - Run tests
|
||||
# - Create a Gitea release with the binaries
|
||||
# - Generate checksums
|
||||
|
||||
name: Go
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "v[0-9]+.[0-9]+.[0-9]+"
|
||||
|
||||
jobs:
|
||||
build-and-release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
run: |
|
||||
echo "Cloning repository..."
|
||||
git clone --depth 1 --branch ${GITHUB_REF_NAME} ${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}.git ${GITHUB_WORKSPACE}
|
||||
cd ${GITHUB_WORKSPACE}
|
||||
git log -1
|
||||
|
||||
- name: Set up Go
|
||||
run: |
|
||||
echo "Setting up Go 1.25.0..."
|
||||
cd /tmp
|
||||
wget -q https://go.dev/dl/go1.25.0.linux-amd64.tar.gz
|
||||
sudo rm -rf /usr/local/go
|
||||
sudo tar -C /usr/local -xzf go1.25.0.linux-amd64.tar.gz
|
||||
export PATH=/usr/local/go/bin:$PATH
|
||||
go version
|
||||
|
||||
- name: Build (Pure Go + purego)
|
||||
run: |
|
||||
export PATH=/usr/local/go/bin:$PATH
|
||||
cd ${GITHUB_WORKSPACE}
|
||||
echo "Building with CGO_ENABLED=0..."
|
||||
CGO_ENABLED=0 go build -v ./...
|
||||
|
||||
- name: Test (Pure Go + purego)
|
||||
run: |
|
||||
export PATH=/usr/local/go/bin:$PATH
|
||||
cd ${GITHUB_WORKSPACE}
|
||||
echo "Running tests..."
|
||||
# Copy the libsecp256k1.so to root directory so tests can find it
|
||||
cp pkg/crypto/p8k/libsecp256k1.so .
|
||||
CGO_ENABLED=0 go test -v $(go list ./... | grep -v '/cmd/benchmark/external/' | xargs -n1 sh -c 'ls $0/*_test.go 1>/dev/null 2>&1 && echo $0' | grep .) || true
|
||||
|
||||
- name: Build Release Binaries (Pure Go + purego)
|
||||
run: |
|
||||
export PATH=/usr/local/go/bin:$PATH
|
||||
cd ${GITHUB_WORKSPACE}
|
||||
|
||||
# Extract version from tag (e.g., v1.2.3 -> 1.2.3)
|
||||
VERSION=${GITHUB_REF_NAME#v}
|
||||
echo "Building release binaries for version $VERSION (pure Go + purego)"
|
||||
|
||||
# Create directory for binaries
|
||||
mkdir -p release-binaries
|
||||
|
||||
# Copy the pre-compiled libsecp256k1.so for Linux AMD64
|
||||
cp pkg/crypto/p8k/libsecp256k1.so release-binaries/libsecp256k1-linux-amd64.so
|
||||
|
||||
# Build for Linux AMD64 (pure Go + purego dynamic loading)
|
||||
echo "Building Linux AMD64 (pure Go + purego dynamic loading)..."
|
||||
GOEXPERIMENT=greenteagc,jsonv2 GOOS=linux GOARCH=amd64 CGO_ENABLED=0 \
|
||||
go build -ldflags "-s -w" -o release-binaries/orly-${VERSION}-linux-amd64 .
|
||||
|
||||
# Create checksums
|
||||
cd release-binaries
|
||||
sha256sum * > SHA256SUMS.txt
|
||||
cat SHA256SUMS.txt
|
||||
cd ..
|
||||
|
||||
echo "Release binaries built successfully:"
|
||||
ls -lh release-binaries/
|
||||
|
||||
- name: Create Gitea Release
|
||||
env:
|
||||
GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }}
|
||||
run: |
|
||||
export PATH=/usr/local/go/bin:$PATH
|
||||
cd ${GITHUB_WORKSPACE}
|
||||
|
||||
VERSION=${GITHUB_REF_NAME}
|
||||
REPO_OWNER=$(echo ${GITHUB_REPOSITORY} | cut -d'/' -f1)
|
||||
REPO_NAME=$(echo ${GITHUB_REPOSITORY} | cut -d'/' -f2)
|
||||
|
||||
echo "Creating release for ${REPO_OWNER}/${REPO_NAME} version ${VERSION}"
|
||||
|
||||
# Install tea CLI for Gitea
|
||||
cd /tmp
|
||||
wget -q https://dl.gitea.com/tea/0.9.2/tea-0.9.2-linux-amd64 -O tea
|
||||
chmod +x tea
|
||||
|
||||
# Configure tea with the repository's Gitea instance
|
||||
./tea login add \
|
||||
--name runner \
|
||||
--url ${GITHUB_SERVER_URL} \
|
||||
--token "${GITEA_TOKEN}" || echo "Login may already exist"
|
||||
|
||||
# Create release with assets
|
||||
cd ${GITHUB_WORKSPACE}
|
||||
/tmp/tea release create \
|
||||
--repo ${REPO_OWNER}/${REPO_NAME} \
|
||||
--tag ${VERSION} \
|
||||
--title "Release ${VERSION}" \
|
||||
--note "Automated release ${VERSION}" \
|
||||
--asset release-binaries/orly-${VERSION#v}-linux-amd64 \
|
||||
--asset release-binaries/libsecp256k1-linux-amd64.so \
|
||||
--asset release-binaries/SHA256SUMS.txt \
|
||||
|| echo "Release may already exist, updating..."
|
||||
88
.github/workflows/go.yml
vendored
88
.github/workflows/go.yml
vendored
@@ -1,88 +0,0 @@
|
||||
# This workflow will build a golang project
|
||||
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-go
|
||||
#
|
||||
# NOTE: All builds use CGO_ENABLED=0 since p8k library uses purego (not CGO)
|
||||
# The library dynamically loads libsecp256k1 at runtime via purego
|
||||
#
|
||||
# Release Process:
|
||||
# 1. Update the version in the pkg/version/version file (e.g. v1.2.3)
|
||||
# 2. Create and push a tag matching the version:
|
||||
# git tag v1.2.3
|
||||
# git push origin v1.2.3
|
||||
# 3. The workflow will automatically:
|
||||
# - Build binaries for multiple platforms (Linux, macOS, Windows)
|
||||
# - Create a GitHub release with the binaries
|
||||
# - Generate release notes
|
||||
|
||||
name: Go
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "v[0-9]+.[0-9]+.[0-9]+"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: "1.25"
|
||||
|
||||
- name: Build (Pure Go + purego)
|
||||
run: CGO_ENABLED=0 go build -v ./...
|
||||
|
||||
- name: Test (Pure Go + purego)
|
||||
run: |
|
||||
# Copy the libsecp256k1.so to root directory so tests can find it
|
||||
cp pkg/crypto/p8k/libsecp256k1.so .
|
||||
CGO_ENABLED=0 go test -v $(go list ./... | xargs -n1 sh -c 'ls $0/*_test.go 1>/dev/null 2>&1 && echo $0' | grep .)
|
||||
release:
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: '1.25'
|
||||
|
||||
- name: Build Release Binaries (Pure Go + purego)
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
run: |
|
||||
# Extract version from tag (e.g., v1.2.3 -> 1.2.3)
|
||||
VERSION=${GITHUB_REF#refs/tags/v}
|
||||
echo "Building release binaries for version $VERSION (pure Go + purego)"
|
||||
|
||||
# Create directory for binaries
|
||||
mkdir -p release-binaries
|
||||
|
||||
# Copy the pre-compiled libsecp256k1.so for Linux AMD64
|
||||
cp pkg/crypto/p8k/libsecp256k1.so release-binaries/libsecp256k1-linux-amd64.so
|
||||
|
||||
# Build for Linux AMD64 (pure Go + purego dynamic loading)
|
||||
echo "Building Linux AMD64 (pure Go + purego dynamic loading)..."
|
||||
GOEXPERIMENT=greenteagc,jsonv2 GOOS=linux GOARCH=amd64 CGO_ENABLED=0 \
|
||||
go build -ldflags "-s -w" -o release-binaries/orly-${VERSION}-linux-amd64 .
|
||||
|
||||
# Create checksums
|
||||
cd release-binaries
|
||||
sha256sum * > SHA256SUMS.txt
|
||||
cd ..
|
||||
|
||||
- name: Create GitHub Release
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
files: release-binaries/*
|
||||
draft: false
|
||||
prerelease: false
|
||||
generate_release_notes: true
|
||||
16
.gitignore
vendored
16
.gitignore
vendored
@@ -136,3 +136,19 @@ build/orly-*
|
||||
build/libsecp256k1-*
|
||||
build/SHA256SUMS-*
|
||||
Dockerfile
|
||||
/cmd/benchmark/reports/run_20251116_172629/aggregate_report.txt
|
||||
/cmd/benchmark/reports/run_20251116_172629/next-orly_results.txt
|
||||
/cmd/benchmark/reports/run_20251116_173450/aggregate_report.txt
|
||||
/cmd/benchmark/reports/run_20251116_173450/next-orly_results.txt
|
||||
/cmd/benchmark/reports/run_20251116_173846/aggregate_report.txt
|
||||
/cmd/benchmark/reports/run_20251116_173846/next-orly_results.txt
|
||||
/cmd/benchmark/reports/run_20251116_174246/aggregate_report.txt
|
||||
/cmd/benchmark/reports/run_20251116_174246/next-orly_results.txt
|
||||
/cmd/benchmark/reports/run_20251116_182250/aggregate_report.txt
|
||||
/cmd/benchmark/reports/run_20251116_182250/next-orly_results.txt
|
||||
/cmd/benchmark/reports/run_20251116_203720/aggregate_report.txt
|
||||
/cmd/benchmark/reports/run_20251116_203720/next-orly_results.txt
|
||||
/cmd/benchmark/reports/run_20251116_225648/aggregate_report.txt
|
||||
/cmd/benchmark/reports/run_20251116_225648/next-orly_results.txt
|
||||
/cmd/benchmark/reports/run_20251116_233547/aggregate_report.txt
|
||||
/cmd/benchmark/reports/run_20251116_233547/next-orly_results.txt
|
||||
|
||||
84
CLAUDE.md
84
CLAUDE.md
@@ -8,11 +8,11 @@ ORLY is a high-performance Nostr relay written in Go, designed for personal rela
|
||||
|
||||
**Key Technologies:**
|
||||
- **Language**: Go 1.25.3+
|
||||
- **Database**: Badger v4 (embedded key-value store)
|
||||
- **Database**: Badger v4 (embedded key-value store) or DGraph (distributed graph database)
|
||||
- **Cryptography**: Custom p8k library using purego for secp256k1 operations (no CGO)
|
||||
- **Web UI**: Svelte frontend embedded in the binary
|
||||
- **WebSocket**: gorilla/websocket for Nostr protocol
|
||||
- **Performance**: SIMD-accelerated SHA256 and hex encoding
|
||||
- **Performance**: SIMD-accelerated SHA256 and hex encoding, query result caching with zstd compression
|
||||
|
||||
## Build Commands
|
||||
|
||||
@@ -41,8 +41,8 @@ go build -o orly
|
||||
### Development Mode (Web UI Hot Reload)
|
||||
```bash
|
||||
# Terminal 1: Start relay with dev proxy
|
||||
export ORLY_WEB_DISABLE_EMBEDDED=true
|
||||
export ORLY_WEB_DEV_PROXY_URL=localhost:5000
|
||||
export ORLY_WEB_DISABLE=true
|
||||
export ORLY_WEB_DEV_PROXY_URL=http://localhost:5173
|
||||
./orly &
|
||||
|
||||
# Terminal 2: Start dev server
|
||||
@@ -89,11 +89,18 @@ go run cmd/relay-tester/main.go -url ws://localhost:3334 -test "Basic Event"
|
||||
|
||||
### Benchmarking
|
||||
```bash
|
||||
# Run benchmarks in specific package
|
||||
# Run Go benchmarks in specific package
|
||||
go test -bench=. -benchmem ./pkg/database
|
||||
|
||||
# Crypto benchmarks
|
||||
cd pkg/crypto/p8k && make bench
|
||||
|
||||
# Run full relay benchmark suite
|
||||
cd cmd/benchmark
|
||||
go run main.go -data-dir /tmp/bench-db -events 10000 -workers 4
|
||||
|
||||
# Benchmark reports are saved to cmd/benchmark/reports/
|
||||
# The benchmark tool tests event storage, queries, and subscription performance
|
||||
```
|
||||
|
||||
## Running the Relay
|
||||
@@ -131,6 +138,18 @@ export ORLY_SPROCKET_ENABLED=true
|
||||
|
||||
# Enable policy system
|
||||
export ORLY_POLICY_ENABLED=true
|
||||
|
||||
# Database backend selection (badger or dgraph)
|
||||
export ORLY_DB_TYPE=badger
|
||||
export ORLY_DGRAPH_URL=localhost:9080 # Only for dgraph backend
|
||||
|
||||
# Query cache configuration (improves REQ response times)
|
||||
export ORLY_QUERY_CACHE_SIZE_MB=512 # Default: 512MB
|
||||
export ORLY_QUERY_CACHE_MAX_AGE=5m # Cache expiry time
|
||||
|
||||
# Database cache tuning (for Badger backend)
|
||||
export ORLY_DB_BLOCK_CACHE_MB=512 # Block cache size
|
||||
export ORLY_DB_INDEX_CACHE_MB=256 # Index cache size
|
||||
```
|
||||
|
||||
## Code Architecture
|
||||
@@ -155,10 +174,12 @@ export ORLY_POLICY_ENABLED=true
|
||||
- `web.go` - Embedded web UI serving and dev proxy
|
||||
- `config/` - Environment variable configuration using go-simpler.org/env
|
||||
|
||||
**`pkg/database/`** - Badger-based event storage
|
||||
- `database.go` - Database initialization with cache tuning
|
||||
**`pkg/database/`** - Database abstraction layer with multiple backend support
|
||||
- `interface.go` - Database interface definition for pluggable backends
|
||||
- `factory.go` - Database backend selection (Badger or DGraph)
|
||||
- `database.go` - Badger implementation with cache tuning and query cache
|
||||
- `save-event.go` - Event storage with index updates
|
||||
- `query-events.go` - Main query execution engine
|
||||
- `query-events.go` - Main query execution engine with filter normalization
|
||||
- `query-for-*.go` - Specialized query builders for different filter patterns
|
||||
- `indexes/` - Index key construction for efficient lookups
|
||||
- `export.go` / `import.go` - Event export/import in JSONL format
|
||||
@@ -238,10 +259,19 @@ export ORLY_POLICY_ENABLED=true
|
||||
- This avoids CGO complexity while maintaining C library performance
|
||||
- `libsecp256k1.so` must be in `LD_LIBRARY_PATH` or same directory as binary
|
||||
|
||||
**Database Backend Selection:**
|
||||
- Supports multiple backends via `ORLY_DB_TYPE` environment variable
|
||||
- **Badger** (default): Embedded key-value store with custom indexing, ideal for single-instance deployments
|
||||
- **DGraph**: Distributed graph database for larger, multi-node deployments
|
||||
- Backend selected via factory pattern in `pkg/database/factory.go`
|
||||
- All backends implement the same `Database` interface defined in `pkg/database/interface.go`
|
||||
|
||||
**Database Query Pattern:**
|
||||
- Filters are analyzed in `get-indexes-from-filter.go` to determine optimal query strategy
|
||||
- Filters are normalized before cache lookup, ensuring identical queries with different field ordering hit the cache
|
||||
- Different query builders (`query-for-kinds.go`, `query-for-authors.go`, etc.) handle specific filter patterns
|
||||
- All queries return event serials (uint64) for efficient joining
|
||||
- Query results cached with zstd level 9 compression (configurable size and TTL)
|
||||
- Final events fetched via `fetch-events-by-serials.go`
|
||||
|
||||
**WebSocket Message Flow:**
|
||||
@@ -272,7 +302,7 @@ export ORLY_POLICY_ENABLED=true
|
||||
|
||||
### Making Changes to Web UI
|
||||
1. Edit files in `app/web/src/`
|
||||
2. For hot reload: `cd app/web && bun run dev` (with `ORLY_WEB_DISABLE_EMBEDDED=true`)
|
||||
2. For hot reload: `cd app/web && bun run dev` (with `ORLY_WEB_DISABLE=true` and `ORLY_WEB_DEV_PROXY_URL=http://localhost:5173`)
|
||||
3. For production build: `./scripts/update-embedded-web.sh`
|
||||
|
||||
### Adding New Nostr Protocol Handlers
|
||||
@@ -377,12 +407,42 @@ sudo journalctl -u orly -f
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
- **Database Caching**: Tune `ORLY_DB_BLOCK_CACHE_MB` and `ORLY_DB_INDEX_CACHE_MB` for workload
|
||||
- **Query Optimization**: Add indexes for common filter patterns
|
||||
- **Query Cache**: 512MB query result cache (configurable via `ORLY_QUERY_CACHE_SIZE_MB`) with zstd level 9 compression reduces database load for repeated queries
|
||||
- **Filter Normalization**: Filters are normalized before cache lookup, so identical queries with different field ordering produce cache hits
|
||||
- **Database Caching**: Tune `ORLY_DB_BLOCK_CACHE_MB` and `ORLY_DB_INDEX_CACHE_MB` for workload (Badger backend only)
|
||||
- **Query Optimization**: Add indexes for common filter patterns; multiple specialized query builders optimize different filter combinations
|
||||
- **Batch Operations**: ID lookups and event fetching use batch operations via `GetSerialsByIds` and `FetchEventsBySerials`
|
||||
- **Memory Pooling**: Use buffer pools in encoders (see `pkg/encoders/event/`)
|
||||
- **SIMD Operations**: Leverage minio/sha256-simd and templexxx/xhex
|
||||
- **SIMD Operations**: Leverage minio/sha256-simd and templexxx/xhex for cryptographic operations
|
||||
- **Goroutine Management**: Each WebSocket connection runs in its own goroutine
|
||||
|
||||
## Recent Optimizations
|
||||
|
||||
ORLY has received several significant performance improvements in recent updates:
|
||||
|
||||
### Query Cache System (Latest)
|
||||
- 512MB query result cache with zstd level 9 compression
|
||||
- Filter normalization ensures cache hits regardless of filter field ordering
|
||||
- Configurable size (`ORLY_QUERY_CACHE_SIZE_MB`) and TTL (`ORLY_QUERY_CACHE_MAX_AGE`)
|
||||
- Dramatically reduces database load for repeated queries (common in Nostr clients)
|
||||
- Cache key includes normalized filter representation for optimal hit rate
|
||||
|
||||
### Badger Cache Tuning
|
||||
- Optimized block cache (default 512MB, tune via `ORLY_DB_BLOCK_CACHE_MB`)
|
||||
- Optimized index cache (default 256MB, tune via `ORLY_DB_INDEX_CACHE_MB`)
|
||||
- Resulted in 10-15% improvement in most benchmark scenarios
|
||||
- See git history for cache tuning evolution
|
||||
|
||||
### Query Execution Improvements
|
||||
- Multiple specialized query builders for different filter patterns:
|
||||
- `query-for-kinds.go` - Kind-based queries
|
||||
- `query-for-authors.go` - Author-based queries
|
||||
- `query-for-tags.go` - Tag-based queries
|
||||
- Combination builders for `kinds+authors`, `kinds+tags`, `kinds+authors+tags`
|
||||
- Batch operations for ID lookups via `GetSerialsByIds`
|
||||
- Serial-based event fetching for efficiency
|
||||
- Filter analysis in `get-indexes-from-filter.go` selects optimal strategy
|
||||
|
||||
## Release Process
|
||||
|
||||
1. Update version in `pkg/version/version` file (e.g., v1.2.3)
|
||||
|
||||
@@ -253,6 +253,12 @@ func (l *Listener) HandleEvent(msg []byte) (err error) {
|
||||
).Write(l); chk.E(err) {
|
||||
return
|
||||
}
|
||||
// Send AUTH challenge to prompt authentication
|
||||
log.D.F("HandleEvent: sending AUTH challenge to %s", l.remote)
|
||||
if err = authenvelope.NewChallengeWith(l.challenge.Load()).
|
||||
Write(l); chk.E(err) {
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@@ -7,9 +7,11 @@ import (
|
||||
"time"
|
||||
|
||||
"next.orly.dev/app/config"
|
||||
"next.orly.dev/pkg/acl"
|
||||
"next.orly.dev/pkg/crypto/keys"
|
||||
"next.orly.dev/pkg/database"
|
||||
"next.orly.dev/pkg/encoders/event"
|
||||
"next.orly.dev/pkg/encoders/hex"
|
||||
"next.orly.dev/pkg/encoders/tag"
|
||||
"next.orly.dev/pkg/interfaces/signer/p8k"
|
||||
"next.orly.dev/pkg/protocol/nip43"
|
||||
@@ -38,24 +40,47 @@ func setupTestListener(t *testing.T) (*Listener, *database.D, func()) {
|
||||
RelayURL: "wss://test.relay",
|
||||
Listen: "localhost",
|
||||
Port: 3334,
|
||||
ACLMode: "none",
|
||||
}
|
||||
|
||||
server := &Server{
|
||||
Ctx: ctx,
|
||||
Config: cfg,
|
||||
D: db,
|
||||
DB: db,
|
||||
publishers: publish.New(NewPublisher(ctx)),
|
||||
InviteManager: nip43.NewInviteManager(cfg.NIP43InviteExpiry),
|
||||
cfg: cfg,
|
||||
db: db,
|
||||
}
|
||||
|
||||
listener := &Listener{
|
||||
Server: server,
|
||||
ctx: ctx,
|
||||
// Configure ACL registry
|
||||
acl.Registry.Active.Store(cfg.ACLMode)
|
||||
if err = acl.Registry.Configure(cfg, db, ctx); err != nil {
|
||||
db.Close()
|
||||
os.RemoveAll(tempDir)
|
||||
t.Fatalf("failed to configure ACL: %v", err)
|
||||
}
|
||||
|
||||
listener := &Listener{
|
||||
Server: server,
|
||||
ctx: ctx,
|
||||
writeChan: make(chan publish.WriteRequest, 100),
|
||||
writeDone: make(chan struct{}),
|
||||
messageQueue: make(chan messageRequest, 100),
|
||||
processingDone: make(chan struct{}),
|
||||
subscriptions: make(map[string]context.CancelFunc),
|
||||
}
|
||||
|
||||
// Start write worker and message processor
|
||||
go listener.writeWorker()
|
||||
go listener.messageProcessor()
|
||||
|
||||
cleanup := func() {
|
||||
// Close listener channels
|
||||
close(listener.writeChan)
|
||||
<-listener.writeDone
|
||||
close(listener.messageQueue)
|
||||
<-listener.processingDone
|
||||
db.Close()
|
||||
os.RemoveAll(tempDir)
|
||||
}
|
||||
@@ -350,8 +375,13 @@ func TestHandleNIP43InviteRequest_ValidRequest(t *testing.T) {
|
||||
}
|
||||
adminPubkey := adminSigner.Pub()
|
||||
|
||||
// Add admin to server (simulating admin config)
|
||||
listener.Server.Admins = [][]byte{adminPubkey}
|
||||
// Add admin to config and reconfigure ACL
|
||||
adminHex := hex.Enc(adminPubkey)
|
||||
listener.Server.Config.Admins = []string{adminHex}
|
||||
acl.Registry.Active.Store("none")
|
||||
if err = acl.Registry.Configure(listener.Server.Config, listener.Server.DB, listener.ctx); err != nil {
|
||||
t.Fatalf("failed to reconfigure ACL: %v", err)
|
||||
}
|
||||
|
||||
// Handle invite request
|
||||
inviteEvent, err := listener.Server.HandleNIP43InviteRequest(adminPubkey)
|
||||
|
||||
@@ -35,7 +35,7 @@ func TestHandleNIP86Management_Basic(t *testing.T) {
|
||||
// Setup server
|
||||
server := &Server{
|
||||
Config: cfg,
|
||||
D: db,
|
||||
DB: db,
|
||||
Admins: [][]byte{[]byte("admin1")},
|
||||
Owners: [][]byte{[]byte("owner1")},
|
||||
}
|
||||
|
||||
@@ -118,7 +118,8 @@ whitelist:
|
||||
chal := make([]byte, 32)
|
||||
rand.Read(chal)
|
||||
listener.challenge.Store([]byte(hex.Enc(chal)))
|
||||
if s.Config.ACLMode != "none" {
|
||||
// Send AUTH challenge if ACL mode requires it, or if auth is required/required for writes
|
||||
if s.Config.ACLMode != "none" || s.Config.AuthRequired || s.Config.AuthToWrite {
|
||||
log.D.F("sending AUTH challenge to %s", remote)
|
||||
if err = authenvelope.NewChallengeWith(listener.challenge.Load()).
|
||||
Write(listener); chk.E(err) {
|
||||
|
||||
@@ -161,6 +161,12 @@ func (l *Listener) writeWorker() {
|
||||
return
|
||||
}
|
||||
|
||||
// Skip writes if no connection (unit tests)
|
||||
if l.conn == nil {
|
||||
log.T.F("ws->%s skipping write (no connection)", l.remote)
|
||||
continue
|
||||
}
|
||||
|
||||
// Handle the write request
|
||||
var err error
|
||||
if req.IsPing {
|
||||
|
||||
127
app/main.go
127
app/main.go
@@ -85,9 +85,9 @@ func Run(
|
||||
// Initialize policy manager
|
||||
l.policyManager = policy.NewWithManager(ctx, cfg.AppName, cfg.PolicyEnabled)
|
||||
|
||||
// Initialize spider manager based on mode
|
||||
if cfg.SpiderMode != "none" {
|
||||
if l.spiderManager, err = spider.New(ctx, db.(*database.D), l.publishers, cfg.SpiderMode); chk.E(err) {
|
||||
// Initialize spider manager based on mode (only for Badger backend)
|
||||
if badgerDB, ok := db.(*database.D); ok && cfg.SpiderMode != "none" {
|
||||
if l.spiderManager, err = spider.New(ctx, badgerDB, l.publishers, cfg.SpiderMode); chk.E(err) {
|
||||
log.E.F("failed to create spider manager: %v", err)
|
||||
} else {
|
||||
// Set up callbacks for follows mode
|
||||
@@ -141,67 +141,79 @@ func Run(
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize relay group manager
|
||||
l.relayGroupMgr = dsync.NewRelayGroupManager(db.(*database.D), cfg.RelayGroupAdmins)
|
||||
|
||||
// Initialize sync manager if relay peers are configured
|
||||
var peers []string
|
||||
if len(cfg.RelayPeers) > 0 {
|
||||
peers = cfg.RelayPeers
|
||||
} else {
|
||||
// Try to get peers from relay group configuration
|
||||
if config, err := l.relayGroupMgr.FindAuthoritativeConfig(ctx); err == nil && config != nil {
|
||||
peers = config.Relays
|
||||
log.I.F("using relay group configuration with %d peers", len(peers))
|
||||
}
|
||||
// Initialize relay group manager (only for Badger backend)
|
||||
if badgerDB, ok := db.(*database.D); ok {
|
||||
l.relayGroupMgr = dsync.NewRelayGroupManager(badgerDB, cfg.RelayGroupAdmins)
|
||||
} else if cfg.SpiderMode != "none" || len(cfg.RelayPeers) > 0 || len(cfg.ClusterAdmins) > 0 {
|
||||
log.I.Ln("spider, sync, and cluster features require Badger backend (currently using alternative backend)")
|
||||
}
|
||||
|
||||
if len(peers) > 0 {
|
||||
// Get relay identity for node ID
|
||||
sk, err := db.GetOrCreateRelayIdentitySecret()
|
||||
if err != nil {
|
||||
log.E.F("failed to get relay identity for sync: %v", err)
|
||||
// Initialize sync manager if relay peers are configured (only for Badger backend)
|
||||
if badgerDB, ok := db.(*database.D); ok {
|
||||
var peers []string
|
||||
if len(cfg.RelayPeers) > 0 {
|
||||
peers = cfg.RelayPeers
|
||||
} else {
|
||||
nodeID, err := keys.SecretBytesToPubKeyHex(sk)
|
||||
if err != nil {
|
||||
log.E.F("failed to derive pubkey for sync node ID: %v", err)
|
||||
} else {
|
||||
relayURL := cfg.RelayURL
|
||||
if relayURL == "" {
|
||||
relayURL = fmt.Sprintf("http://localhost:%d", cfg.Port)
|
||||
// Try to get peers from relay group configuration
|
||||
if l.relayGroupMgr != nil {
|
||||
if config, err := l.relayGroupMgr.FindAuthoritativeConfig(ctx); err == nil && config != nil {
|
||||
peers = config.Relays
|
||||
log.I.F("using relay group configuration with %d peers", len(peers))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(peers) > 0 {
|
||||
// Get relay identity for node ID
|
||||
sk, err := db.GetOrCreateRelayIdentitySecret()
|
||||
if err != nil {
|
||||
log.E.F("failed to get relay identity for sync: %v", err)
|
||||
} else {
|
||||
nodeID, err := keys.SecretBytesToPubKeyHex(sk)
|
||||
if err != nil {
|
||||
log.E.F("failed to derive pubkey for sync node ID: %v", err)
|
||||
} else {
|
||||
relayURL := cfg.RelayURL
|
||||
if relayURL == "" {
|
||||
relayURL = fmt.Sprintf("http://localhost:%d", cfg.Port)
|
||||
}
|
||||
l.syncManager = dsync.NewManager(ctx, badgerDB, nodeID, relayURL, peers, l.relayGroupMgr, l.policyManager)
|
||||
log.I.F("distributed sync manager initialized with %d peers", len(peers))
|
||||
}
|
||||
l.syncManager = dsync.NewManager(ctx, db.(*database.D), nodeID, relayURL, peers, l.relayGroupMgr, l.policyManager)
|
||||
log.I.F("distributed sync manager initialized with %d peers", len(peers))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize cluster manager for cluster replication
|
||||
var clusterAdminNpubs []string
|
||||
if len(cfg.ClusterAdmins) > 0 {
|
||||
clusterAdminNpubs = cfg.ClusterAdmins
|
||||
} else {
|
||||
// Default to regular admins if no cluster admins specified
|
||||
for _, admin := range cfg.Admins {
|
||||
clusterAdminNpubs = append(clusterAdminNpubs, admin)
|
||||
// Initialize cluster manager for cluster replication (only for Badger backend)
|
||||
if badgerDB, ok := db.(*database.D); ok {
|
||||
var clusterAdminNpubs []string
|
||||
if len(cfg.ClusterAdmins) > 0 {
|
||||
clusterAdminNpubs = cfg.ClusterAdmins
|
||||
} else {
|
||||
// Default to regular admins if no cluster admins specified
|
||||
for _, admin := range cfg.Admins {
|
||||
clusterAdminNpubs = append(clusterAdminNpubs, admin)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(clusterAdminNpubs) > 0 {
|
||||
l.clusterManager = dsync.NewClusterManager(ctx, db.(*database.D), clusterAdminNpubs, cfg.ClusterPropagatePrivilegedEvents, l.publishers)
|
||||
l.clusterManager.Start()
|
||||
log.I.F("cluster replication manager initialized with %d admin npubs", len(clusterAdminNpubs))
|
||||
if len(clusterAdminNpubs) > 0 {
|
||||
l.clusterManager = dsync.NewClusterManager(ctx, badgerDB, clusterAdminNpubs, cfg.ClusterPropagatePrivilegedEvents, l.publishers)
|
||||
l.clusterManager.Start()
|
||||
log.I.F("cluster replication manager initialized with %d admin npubs", len(clusterAdminNpubs))
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize the user interface
|
||||
l.UserInterface()
|
||||
|
||||
// Initialize Blossom blob storage server
|
||||
if l.blossomServer, err = initializeBlossomServer(ctx, cfg, db.(*database.D)); err != nil {
|
||||
log.E.F("failed to initialize blossom server: %v", err)
|
||||
// Continue without blossom server
|
||||
} else if l.blossomServer != nil {
|
||||
log.I.F("blossom blob storage server initialized")
|
||||
// Initialize Blossom blob storage server (only for Badger backend)
|
||||
if badgerDB, ok := db.(*database.D); ok {
|
||||
if l.blossomServer, err = initializeBlossomServer(ctx, cfg, badgerDB); err != nil {
|
||||
log.E.F("failed to initialize blossom server: %v", err)
|
||||
// Continue without blossom server
|
||||
} else if l.blossomServer != nil {
|
||||
log.I.F("blossom blob storage server initialized")
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure a relay identity secret key exists when subscriptions and NWC are enabled
|
||||
@@ -237,14 +249,17 @@ func Run(
|
||||
}
|
||||
}
|
||||
|
||||
if l.paymentProcessor, err = NewPaymentProcessor(ctx, cfg, db.(*database.D)); err != nil {
|
||||
// log.E.F("failed to create payment processor: %v", err)
|
||||
// Continue without payment processor
|
||||
} else {
|
||||
if err = l.paymentProcessor.Start(); err != nil {
|
||||
log.E.F("failed to start payment processor: %v", err)
|
||||
// Initialize payment processor (only for Badger backend)
|
||||
if badgerDB, ok := db.(*database.D); ok {
|
||||
if l.paymentProcessor, err = NewPaymentProcessor(ctx, cfg, badgerDB); err != nil {
|
||||
// log.E.F("failed to create payment processor: %v", err)
|
||||
// Continue without payment processor
|
||||
} else {
|
||||
log.I.F("payment processor started successfully")
|
||||
if err = l.paymentProcessor.Start(); err != nil {
|
||||
log.E.F("failed to start payment processor: %v", err)
|
||||
} else {
|
||||
log.I.F("payment processor started successfully")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -11,15 +11,44 @@ import (
|
||||
"time"
|
||||
|
||||
"next.orly.dev/app/config"
|
||||
"next.orly.dev/pkg/acl"
|
||||
"next.orly.dev/pkg/crypto/keys"
|
||||
"next.orly.dev/pkg/database"
|
||||
"next.orly.dev/pkg/encoders/event"
|
||||
"next.orly.dev/pkg/encoders/hex"
|
||||
"next.orly.dev/pkg/encoders/tag"
|
||||
"next.orly.dev/pkg/protocol/nip43"
|
||||
"next.orly.dev/pkg/protocol/publish"
|
||||
"next.orly.dev/pkg/protocol/relayinfo"
|
||||
)
|
||||
|
||||
// newTestListener creates a properly initialized Listener for testing
|
||||
func newTestListener(server *Server, ctx context.Context) *Listener {
|
||||
listener := &Listener{
|
||||
Server: server,
|
||||
ctx: ctx,
|
||||
writeChan: make(chan publish.WriteRequest, 100),
|
||||
writeDone: make(chan struct{}),
|
||||
messageQueue: make(chan messageRequest, 100),
|
||||
processingDone: make(chan struct{}),
|
||||
subscriptions: make(map[string]context.CancelFunc),
|
||||
}
|
||||
|
||||
// Start write worker and message processor
|
||||
go listener.writeWorker()
|
||||
go listener.messageProcessor()
|
||||
|
||||
return listener
|
||||
}
|
||||
|
||||
// closeTestListener properly closes a test listener
|
||||
func closeTestListener(listener *Listener) {
|
||||
close(listener.writeChan)
|
||||
<-listener.writeDone
|
||||
close(listener.messageQueue)
|
||||
<-listener.processingDone
|
||||
}
|
||||
|
||||
// setupE2ETest creates a full test server for end-to-end testing
|
||||
func setupE2ETest(t *testing.T) (*Server, *httptest.Server, func()) {
|
||||
tempDir, err := os.MkdirTemp("", "nip43_e2e_test_*")
|
||||
@@ -61,16 +90,28 @@ func setupE2ETest(t *testing.T) (*Server, *httptest.Server, func()) {
|
||||
}
|
||||
adminPubkey := adminSigner.Pub()
|
||||
|
||||
// Add admin to config for ACL
|
||||
cfg.Admins = []string{hex.Enc(adminPubkey)}
|
||||
|
||||
server := &Server{
|
||||
Ctx: ctx,
|
||||
Config: cfg,
|
||||
D: db,
|
||||
DB: db,
|
||||
publishers: publish.New(NewPublisher(ctx)),
|
||||
Admins: [][]byte{adminPubkey},
|
||||
InviteManager: nip43.NewInviteManager(cfg.NIP43InviteExpiry),
|
||||
cfg: cfg,
|
||||
db: db,
|
||||
}
|
||||
|
||||
// Configure ACL registry
|
||||
acl.Registry.Active.Store(cfg.ACLMode)
|
||||
if err = acl.Registry.Configure(cfg, db, ctx); err != nil {
|
||||
db.Close()
|
||||
os.RemoveAll(tempDir)
|
||||
t.Fatalf("failed to configure ACL: %v", err)
|
||||
}
|
||||
|
||||
server.mux = http.NewServeMux()
|
||||
|
||||
// Set up HTTP handlers
|
||||
@@ -177,6 +218,7 @@ func TestE2E_CompleteJoinFlow(t *testing.T) {
|
||||
joinEv := event.New()
|
||||
joinEv.Kind = nip43.KindJoinRequest
|
||||
copy(joinEv.Pubkey, userPubkey)
|
||||
joinEv.Tags = tag.NewS()
|
||||
joinEv.Tags.Append(tag.NewFromAny("-"))
|
||||
joinEv.Tags.Append(tag.NewFromAny("claim", inviteCode))
|
||||
joinEv.CreatedAt = time.Now().Unix()
|
||||
@@ -186,17 +228,15 @@ func TestE2E_CompleteJoinFlow(t *testing.T) {
|
||||
}
|
||||
|
||||
// Step 3: Process join request
|
||||
listener := &Listener{
|
||||
Server: server,
|
||||
ctx: server.Ctx,
|
||||
}
|
||||
listener := newTestListener(server, server.Ctx)
|
||||
defer closeTestListener(listener)
|
||||
err = listener.HandleNIP43JoinRequest(joinEv)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to handle join request: %v", err)
|
||||
}
|
||||
|
||||
// Step 4: Verify membership
|
||||
isMember, err := server.D.IsNIP43Member(userPubkey)
|
||||
isMember, err := server.DB.IsNIP43Member(userPubkey)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to check membership: %v", err)
|
||||
}
|
||||
@@ -204,7 +244,7 @@ func TestE2E_CompleteJoinFlow(t *testing.T) {
|
||||
t.Error("user was not added as member")
|
||||
}
|
||||
|
||||
membership, err := server.D.GetNIP43Membership(userPubkey)
|
||||
membership, err := server.DB.GetNIP43Membership(userPubkey)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to get membership: %v", err)
|
||||
}
|
||||
@@ -227,10 +267,8 @@ func TestE2E_InviteCodeReuse(t *testing.T) {
|
||||
t.Fatalf("failed to generate invite code: %v", err)
|
||||
}
|
||||
|
||||
listener := &Listener{
|
||||
Server: server,
|
||||
ctx: server.Ctx,
|
||||
}
|
||||
listener := newTestListener(server, server.Ctx)
|
||||
defer closeTestListener(listener)
|
||||
|
||||
// First user uses the code
|
||||
user1Secret, err := keys.GenerateSecretKey()
|
||||
@@ -249,6 +287,7 @@ func TestE2E_InviteCodeReuse(t *testing.T) {
|
||||
joinEv1 := event.New()
|
||||
joinEv1.Kind = nip43.KindJoinRequest
|
||||
copy(joinEv1.Pubkey, user1Pubkey)
|
||||
joinEv1.Tags = tag.NewS()
|
||||
joinEv1.Tags.Append(tag.NewFromAny("-"))
|
||||
joinEv1.Tags.Append(tag.NewFromAny("claim", code))
|
||||
joinEv1.CreatedAt = time.Now().Unix()
|
||||
@@ -263,7 +302,7 @@ func TestE2E_InviteCodeReuse(t *testing.T) {
|
||||
}
|
||||
|
||||
// Verify first user is member
|
||||
isMember, err := server.D.IsNIP43Member(user1Pubkey)
|
||||
isMember, err := server.DB.IsNIP43Member(user1Pubkey)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to check user1 membership: %v", err)
|
||||
}
|
||||
@@ -288,6 +327,7 @@ func TestE2E_InviteCodeReuse(t *testing.T) {
|
||||
joinEv2 := event.New()
|
||||
joinEv2.Kind = nip43.KindJoinRequest
|
||||
copy(joinEv2.Pubkey, user2Pubkey)
|
||||
joinEv2.Tags = tag.NewS()
|
||||
joinEv2.Tags.Append(tag.NewFromAny("-"))
|
||||
joinEv2.Tags.Append(tag.NewFromAny("claim", code))
|
||||
joinEv2.CreatedAt = time.Now().Unix()
|
||||
@@ -303,7 +343,7 @@ func TestE2E_InviteCodeReuse(t *testing.T) {
|
||||
}
|
||||
|
||||
// Verify second user is NOT member
|
||||
isMember, err = server.D.IsNIP43Member(user2Pubkey)
|
||||
isMember, err = server.DB.IsNIP43Member(user2Pubkey)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to check user2 membership: %v", err)
|
||||
}
|
||||
@@ -317,10 +357,8 @@ func TestE2E_MembershipListGeneration(t *testing.T) {
|
||||
server, _, cleanup := setupE2ETest(t)
|
||||
defer cleanup()
|
||||
|
||||
listener := &Listener{
|
||||
Server: server,
|
||||
ctx: server.Ctx,
|
||||
}
|
||||
listener := newTestListener(server, server.Ctx)
|
||||
defer closeTestListener(listener)
|
||||
|
||||
// Add multiple members
|
||||
memberCount := 5
|
||||
@@ -338,7 +376,7 @@ func TestE2E_MembershipListGeneration(t *testing.T) {
|
||||
members[i] = userPubkey
|
||||
|
||||
// Add directly to database for speed
|
||||
err = server.D.AddNIP43Member(userPubkey, "code")
|
||||
err = server.DB.AddNIP43Member(userPubkey, "code")
|
||||
if err != nil {
|
||||
t.Fatalf("failed to add member %d: %v", i, err)
|
||||
}
|
||||
@@ -379,17 +417,15 @@ func TestE2E_ExpiredInviteCode(t *testing.T) {
|
||||
server := &Server{
|
||||
Ctx: ctx,
|
||||
Config: cfg,
|
||||
D: db,
|
||||
DB: db,
|
||||
publishers: publish.New(NewPublisher(ctx)),
|
||||
InviteManager: nip43.NewInviteManager(cfg.NIP43InviteExpiry),
|
||||
cfg: cfg,
|
||||
db: db,
|
||||
}
|
||||
|
||||
listener := &Listener{
|
||||
Server: server,
|
||||
ctx: ctx,
|
||||
}
|
||||
listener := newTestListener(server, ctx)
|
||||
defer closeTestListener(listener)
|
||||
|
||||
// Generate invite code
|
||||
code, err := server.InviteManager.GenerateCode()
|
||||
@@ -417,6 +453,7 @@ func TestE2E_ExpiredInviteCode(t *testing.T) {
|
||||
joinEv := event.New()
|
||||
joinEv.Kind = nip43.KindJoinRequest
|
||||
copy(joinEv.Pubkey, userPubkey)
|
||||
joinEv.Tags = tag.NewS()
|
||||
joinEv.Tags.Append(tag.NewFromAny("-"))
|
||||
joinEv.Tags.Append(tag.NewFromAny("claim", code))
|
||||
joinEv.CreatedAt = time.Now().Unix()
|
||||
@@ -445,10 +482,8 @@ func TestE2E_InvalidTimestampRejected(t *testing.T) {
|
||||
server, _, cleanup := setupE2ETest(t)
|
||||
defer cleanup()
|
||||
|
||||
listener := &Listener{
|
||||
Server: server,
|
||||
ctx: server.Ctx,
|
||||
}
|
||||
listener := newTestListener(server, server.Ctx)
|
||||
defer closeTestListener(listener)
|
||||
|
||||
// Generate invite code
|
||||
code, err := server.InviteManager.GenerateCode()
|
||||
@@ -474,6 +509,7 @@ func TestE2E_InvalidTimestampRejected(t *testing.T) {
|
||||
joinEv := event.New()
|
||||
joinEv.Kind = nip43.KindJoinRequest
|
||||
copy(joinEv.Pubkey, userPubkey)
|
||||
joinEv.Tags = tag.NewS()
|
||||
joinEv.Tags.Append(tag.NewFromAny("-"))
|
||||
joinEv.Tags.Append(tag.NewFromAny("claim", code))
|
||||
joinEv.CreatedAt = time.Now().Unix() - 700 // More than 10 minutes ago
|
||||
@@ -489,7 +525,7 @@ func TestE2E_InvalidTimestampRejected(t *testing.T) {
|
||||
}
|
||||
|
||||
// Verify user was NOT added
|
||||
isMember, err := server.D.IsNIP43Member(userPubkey)
|
||||
isMember, err := server.DB.IsNIP43Member(userPubkey)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to check membership: %v", err)
|
||||
}
|
||||
@@ -523,17 +559,15 @@ func BenchmarkJoinRequestProcessing(b *testing.B) {
|
||||
server := &Server{
|
||||
Ctx: ctx,
|
||||
Config: cfg,
|
||||
D: db,
|
||||
DB: db,
|
||||
publishers: publish.New(NewPublisher(ctx)),
|
||||
InviteManager: nip43.NewInviteManager(cfg.NIP43InviteExpiry),
|
||||
cfg: cfg,
|
||||
db: db,
|
||||
}
|
||||
|
||||
listener := &Listener{
|
||||
Server: server,
|
||||
ctx: ctx,
|
||||
}
|
||||
listener := newTestListener(server, ctx)
|
||||
defer closeTestListener(listener)
|
||||
|
||||
b.ResetTimer()
|
||||
|
||||
@@ -547,6 +581,7 @@ func BenchmarkJoinRequestProcessing(b *testing.B) {
|
||||
joinEv := event.New()
|
||||
joinEv.Kind = nip43.KindJoinRequest
|
||||
copy(joinEv.Pubkey, userPubkey)
|
||||
joinEv.Tags = tag.NewS()
|
||||
joinEv.Tags.Append(tag.NewFromAny("-"))
|
||||
joinEv.Tags.Append(tag.NewFromAny("claim", code))
|
||||
joinEv.CreatedAt = time.Now().Unix()
|
||||
|
||||
@@ -199,7 +199,7 @@ func TestLongRunningSubscriptionStability(t *testing.T) {
|
||||
ev := createSignedTestEvent(t, 1, fmt.Sprintf("Test event %d for long-running subscription", i))
|
||||
|
||||
// Save event to database
|
||||
if _, err := server.D.SaveEvent(context.Background(), ev); err != nil {
|
||||
if _, err := server.DB.SaveEvent(context.Background(), ev); err != nil {
|
||||
t.Errorf("Failed to save event %d: %v", i, err)
|
||||
continue
|
||||
}
|
||||
@@ -376,7 +376,7 @@ func TestMultipleConcurrentSubscriptions(t *testing.T) {
|
||||
// Create and sign test event
|
||||
ev := createSignedTestEvent(t, uint16(sub.kind), fmt.Sprintf("Test for kind %d event %d", sub.kind, i))
|
||||
|
||||
if _, err := server.D.SaveEvent(context.Background(), ev); err != nil {
|
||||
if _, err := server.DB.SaveEvent(context.Background(), ev); err != nil {
|
||||
t.Errorf("Failed to save event: %v", err)
|
||||
}
|
||||
|
||||
@@ -431,7 +431,7 @@ func setupTestServer(t *testing.T) (*Server, func()) {
|
||||
// Setup server
|
||||
server := &Server{
|
||||
Config: cfg,
|
||||
D: db,
|
||||
DB: db,
|
||||
Ctx: ctx,
|
||||
publishers: publish.New(NewPublisher(ctx)),
|
||||
Admins: [][]byte{},
|
||||
|
||||
257
cmd/benchmark/CPU_OPTIMIZATION.md
Normal file
257
cmd/benchmark/CPU_OPTIMIZATION.md
Normal file
@@ -0,0 +1,257 @@
|
||||
# Benchmark CPU Usage Optimization
|
||||
|
||||
This document describes the CPU optimization settings for the ORLY benchmark suite, specifically tuned for systems with limited CPU resources (6-core/12-thread and lower).
|
||||
|
||||
## Problem Statement
|
||||
|
||||
The original benchmark implementation was designed for maximum throughput testing, which caused:
|
||||
- **CPU saturation**: 95-100% sustained CPU usage across all cores
|
||||
- **System instability**: Other services unable to run alongside benchmarks
|
||||
- **Thermal throttling**: Long benchmark runs causing CPU frequency reduction
|
||||
- **Unrealistic load**: Tight loops not representative of real-world relay usage
|
||||
|
||||
## Solution: Aggressive Rate Limiting
|
||||
|
||||
The benchmark now implements multi-layered CPU usage controls:
|
||||
|
||||
### 1. Reduced Worker Concurrency
|
||||
|
||||
**Default Worker Count**: `NumCPU() / 4` (minimum 2)
|
||||
|
||||
For a 6-core/12-thread system:
|
||||
- Previous: 12 workers
|
||||
- **Current: 3 workers**
|
||||
|
||||
This 4x reduction dramatically lowers:
|
||||
- Goroutine context switching overhead
|
||||
- Lock contention on shared resources
|
||||
- CPU cache thrashing
|
||||
|
||||
### 2. Per-Operation Delays
|
||||
|
||||
All benchmark operations now include mandatory delays to prevent CPU saturation:
|
||||
|
||||
| Operation Type | Delay | Rationale |
|
||||
|---------------|-------|-----------|
|
||||
| Event writes | 500µs | Simulates network latency and client pacing |
|
||||
| Queries | 1ms | Queries are CPU-intensive, need more spacing |
|
||||
| Concurrent writes | 500µs | Balanced for mixed workloads |
|
||||
| Burst writes | 500µs | Prevents CPU spikes during bursts |
|
||||
|
||||
### 3. Implementation Locations
|
||||
|
||||
#### Main Benchmark (Badger backend)
|
||||
|
||||
**Peak Throughput Test** ([main.go:471-473](main.go#L471-L473)):
|
||||
```go
|
||||
const eventDelay = 500 * time.Microsecond
|
||||
time.Sleep(eventDelay) // After each event save
|
||||
```
|
||||
|
||||
**Burst Pattern Test** ([main.go:599-600](main.go#L599-L600)):
|
||||
```go
|
||||
const eventDelay = 500 * time.Microsecond
|
||||
time.Sleep(eventDelay) // In worker loop
|
||||
```
|
||||
|
||||
**Query Test** ([main.go:899](main.go#L899)):
|
||||
```go
|
||||
time.Sleep(1 * time.Millisecond) // After each query
|
||||
```
|
||||
|
||||
**Concurrent Query/Store** ([main.go:900, 1068](main.go#L900)):
|
||||
```go
|
||||
time.Sleep(1 * time.Millisecond) // Readers
|
||||
time.Sleep(500 * time.Microsecond) // Writers
|
||||
```
|
||||
|
||||
#### BenchmarkAdapter (DGraph/Neo4j backends)
|
||||
|
||||
**Peak Throughput** ([benchmark_adapter.go:58](benchmark_adapter.go#L58)):
|
||||
```go
|
||||
const eventDelay = 500 * time.Microsecond
|
||||
```
|
||||
|
||||
**Burst Pattern** ([benchmark_adapter.go:142](benchmark_adapter.go#L142)):
|
||||
```go
|
||||
const eventDelay = 500 * time.Microsecond
|
||||
```
|
||||
|
||||
## Expected CPU Usage
|
||||
|
||||
### Before Optimization
|
||||
- **Workers**: 12 (on 12-thread system)
|
||||
- **Delays**: None or minimal
|
||||
- **CPU Usage**: 95-100% sustained
|
||||
- **System Impact**: Severe - other processes starved
|
||||
|
||||
### After Optimization
|
||||
- **Workers**: 3 (on 12-thread system)
|
||||
- **Delays**: 500µs-1ms per operation
|
||||
- **Expected CPU Usage**: 40-60% average, 70% peak
|
||||
- **System Impact**: Minimal - plenty of headroom for other processes
|
||||
|
||||
## Performance Impact
|
||||
|
||||
### Throughput Reduction
|
||||
The aggressive rate limiting will reduce benchmark throughput:
|
||||
|
||||
**Before** (unrealistic, CPU-bound):
|
||||
- ~50,000 events/second with 12 workers
|
||||
|
||||
**After** (realistic, rate-limited):
|
||||
- ~5,000-10,000 events/second with 3 workers
|
||||
- More representative of real-world relay load
|
||||
- Network latency and client pacing simulated
|
||||
|
||||
### Latency Accuracy
|
||||
**Improved**: With lower CPU contention, latency measurements are more accurate:
|
||||
- Less queueing delay in database operations
|
||||
- More consistent response times
|
||||
- Better P95/P99 metric reliability
|
||||
|
||||
## Tuning Guide
|
||||
|
||||
If you need to adjust CPU usage further:
|
||||
|
||||
### Further Reduce CPU (< 40%)
|
||||
|
||||
1. **Reduce workers**:
|
||||
```bash
|
||||
./benchmark --workers 2 # Half of default
|
||||
```
|
||||
|
||||
2. **Increase delays** in code:
|
||||
```go
|
||||
// Change from 500µs to 1ms for writes
|
||||
const eventDelay = 1 * time.Millisecond
|
||||
|
||||
// Change from 1ms to 2ms for queries
|
||||
time.Sleep(2 * time.Millisecond)
|
||||
```
|
||||
|
||||
3. **Reduce event count**:
|
||||
```bash
|
||||
./benchmark --events 5000 # Shorter test runs
|
||||
```
|
||||
|
||||
### Increase CPU (for faster testing)
|
||||
|
||||
1. **Increase workers**:
|
||||
```bash
|
||||
./benchmark --workers 6 # More concurrency
|
||||
```
|
||||
|
||||
2. **Decrease delays** in code:
|
||||
```go
|
||||
// Change from 500µs to 100µs
|
||||
const eventDelay = 100 * time.Microsecond
|
||||
|
||||
// Change from 1ms to 500µs
|
||||
time.Sleep(500 * time.Microsecond)
|
||||
```
|
||||
|
||||
## Monitoring CPU Usage
|
||||
|
||||
### Real-time Monitoring
|
||||
|
||||
```bash
|
||||
# Terminal 1: Run benchmark
|
||||
cd cmd/benchmark
|
||||
./benchmark --workers 3 --events 10000
|
||||
|
||||
# Terminal 2: Monitor CPU
|
||||
watch -n 1 'ps aux | grep benchmark | grep -v grep | awk "{print \$3\" %CPU\"}"'
|
||||
```
|
||||
|
||||
### With htop (recommended)
|
||||
|
||||
```bash
|
||||
# Install htop if needed
|
||||
sudo apt install htop
|
||||
|
||||
# Run htop and filter for benchmark process
|
||||
htop -p $(pgrep -f benchmark)
|
||||
```
|
||||
|
||||
### System-wide CPU Usage
|
||||
|
||||
```bash
|
||||
# Check overall system load
|
||||
mpstat 1
|
||||
|
||||
# Or with sar
|
||||
sar -u 1
|
||||
```
|
||||
|
||||
## Docker Compose Considerations
|
||||
|
||||
When running the full benchmark suite in Docker Compose:
|
||||
|
||||
### Resource Limits
|
||||
|
||||
The compose file should limit CPU allocation:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
benchmark-runner:
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
cpus: '4' # Limit to 4 CPU cores
|
||||
```
|
||||
|
||||
### Sequential vs Parallel
|
||||
|
||||
Current implementation runs benchmarks **sequentially** to avoid overwhelming the system.
|
||||
Each relay is tested one at a time, ensuring:
|
||||
- Consistent baseline for comparisons
|
||||
- No CPU competition between tests
|
||||
- Reliable latency measurements
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Always monitor CPU during first run** to verify settings work for your system
|
||||
2. **Close other applications** during benchmarking for consistent results
|
||||
3. **Use consistent worker counts** across test runs for fair comparisons
|
||||
4. **Document your settings** if you modify delay constants
|
||||
5. **Test with small event counts first** (--events 1000) to verify CPU usage
|
||||
|
||||
## Realistic Workload Simulation
|
||||
|
||||
The delays aren't just for CPU management - they simulate real-world conditions:
|
||||
|
||||
- **500µs write delay**: Typical network round-trip time for local clients
|
||||
- **1ms query delay**: Client thinking time between queries
|
||||
- **3 workers**: Simulates 3 concurrent users/clients
|
||||
- **Burst patterns**: Models social media posting patterns (busy hours vs quiet periods)
|
||||
|
||||
This makes benchmark results more applicable to production relay deployment planning.
|
||||
|
||||
## System Requirements
|
||||
|
||||
### Minimum
|
||||
- 4 CPU cores (2 physical cores with hyperthreading)
|
||||
- 8GB RAM
|
||||
- SSD storage for database
|
||||
|
||||
### Recommended
|
||||
- 6+ CPU cores
|
||||
- 16GB RAM
|
||||
- NVMe SSD
|
||||
|
||||
### For Full Suite (Docker Compose)
|
||||
- 8+ CPU cores (allows multiple relays + benchmark runner)
|
||||
- 32GB RAM (Neo4j, DGraph are memory-hungry)
|
||||
- Fast SSD with 100GB+ free space
|
||||
|
||||
## Conclusion
|
||||
|
||||
These aggressive CPU optimizations ensure the benchmark suite:
|
||||
- ✅ Runs reliably on modest hardware
|
||||
- ✅ Doesn't interfere with other system processes
|
||||
- ✅ Produces realistic, production-relevant metrics
|
||||
- ✅ Completes without thermal throttling
|
||||
- ✅ Allows fair comparison across different relay implementations
|
||||
|
||||
The trade-off is longer test duration, but the results are far more valuable for actual relay deployment planning.
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
A comprehensive benchmarking system for testing and comparing the performance of multiple Nostr relay implementations, including:
|
||||
|
||||
- **next.orly.dev** (this repository) - BadgerDB-based relay
|
||||
- **next.orly.dev** (this repository) - Badger, DGraph, and Neo4j backend variants
|
||||
- **Khatru** - SQLite and Badger variants
|
||||
- **Relayer** - Basic example implementation
|
||||
- **Strfry** - C++ LMDB-based relay
|
||||
@@ -91,15 +91,20 @@ ls reports/run_YYYYMMDD_HHMMSS/
|
||||
|
||||
### Docker Compose Services
|
||||
|
||||
| Service | Port | Description |
|
||||
| ---------------- | ---- | ----------------------------------------- |
|
||||
| next-orly | 8001 | This repository's BadgerDB relay |
|
||||
| khatru-sqlite | 8002 | Khatru with SQLite backend |
|
||||
| khatru-badger | 8003 | Khatru with Badger backend |
|
||||
| relayer-basic | 8004 | Basic relayer example |
|
||||
| strfry | 8005 | Strfry C++ LMDB relay |
|
||||
| nostr-rs-relay | 8006 | Rust SQLite relay |
|
||||
| benchmark-runner | - | Orchestrates tests and aggregates results |
|
||||
| Service | Port | Description |
|
||||
| ------------------ | ---- | ----------------------------------------- |
|
||||
| next-orly-badger | 8001 | This repository's Badger relay |
|
||||
| next-orly-dgraph | 8007 | This repository's DGraph relay |
|
||||
| next-orly-neo4j | 8008 | This repository's Neo4j relay |
|
||||
| dgraph-zero | 5080 | DGraph cluster coordinator |
|
||||
| dgraph-alpha | 9080 | DGraph data node |
|
||||
| neo4j | 7474/7687 | Neo4j graph database |
|
||||
| khatru-sqlite | 8002 | Khatru with SQLite backend |
|
||||
| khatru-badger | 8003 | Khatru with Badger backend |
|
||||
| relayer-basic | 8004 | Basic relayer example |
|
||||
| strfry | 8005 | Strfry C++ LMDB relay |
|
||||
| nostr-rs-relay | 8006 | Rust SQLite relay |
|
||||
| benchmark-runner | - | Orchestrates tests and aggregates results |
|
||||
|
||||
### File Structure
|
||||
|
||||
@@ -173,6 +178,53 @@ go build -o benchmark main.go
|
||||
-duration=30s
|
||||
```
|
||||
|
||||
## Database Backend Comparison
|
||||
|
||||
The benchmark suite includes **next.orly.dev** with three different database backends to compare architectural approaches:
|
||||
|
||||
### Badger Backend (next-orly-badger)
|
||||
- **Type**: Embedded key-value store
|
||||
- **Architecture**: Single-process, no network overhead
|
||||
- **Best for**: Personal relays, single-instance deployments
|
||||
- **Characteristics**:
|
||||
- Lower latency for single-instance operations
|
||||
- No network round-trips
|
||||
- Simpler deployment
|
||||
- Limited to single-node scaling
|
||||
|
||||
### DGraph Backend (next-orly-dgraph)
|
||||
- **Type**: Distributed graph database
|
||||
- **Architecture**: Client-server with dgraph-zero (coordinator) and dgraph-alpha (data node)
|
||||
- **Best for**: Distributed deployments, horizontal scaling
|
||||
- **Characteristics**:
|
||||
- Network overhead from gRPC communication
|
||||
- Supports multi-node clustering
|
||||
- Built-in replication and sharding
|
||||
- More complex deployment
|
||||
|
||||
### Neo4j Backend (next-orly-neo4j)
|
||||
- **Type**: Native graph database
|
||||
- **Architecture**: Client-server with Neo4j Community Edition
|
||||
- **Best for**: Graph queries, relationship-heavy workloads, social network analysis
|
||||
- **Characteristics**:
|
||||
- Optimized for relationship traversal (e.g., follow graphs, event references)
|
||||
- Native Cypher query language for graph patterns
|
||||
- ACID transactions with graph-native storage
|
||||
- Network overhead from Bolt protocol
|
||||
- Excellent for complex graph queries (finding common connections, recommendation systems)
|
||||
- Higher memory usage for graph indexes
|
||||
- Ideal for analytics and social graph exploration
|
||||
|
||||
### Comparing the Backends
|
||||
|
||||
The benchmark results will show:
|
||||
- **Latency differences**: Embedded vs. distributed overhead, graph traversal efficiency
|
||||
- **Throughput trade-offs**: Single-process optimization vs. distributed scalability vs. graph query optimization
|
||||
- **Resource usage**: Memory and CPU patterns for different architectures
|
||||
- **Query performance**: Graph queries (Neo4j) vs. key-value lookups (Badger) vs. distributed queries (DGraph)
|
||||
|
||||
This comparison helps determine which backend is appropriate for different deployment scenarios and workload patterns.
|
||||
|
||||
## Benchmark Results Interpretation
|
||||
|
||||
### Peak Throughput Test
|
||||
|
||||
629
cmd/benchmark/benchmark_adapter.go
Normal file
629
cmd/benchmark/benchmark_adapter.go
Normal file
@@ -0,0 +1,629 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"sort"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"next.orly.dev/pkg/database"
|
||||
"next.orly.dev/pkg/encoders/event"
|
||||
"next.orly.dev/pkg/encoders/filter"
|
||||
"next.orly.dev/pkg/encoders/kind"
|
||||
"next.orly.dev/pkg/encoders/tag"
|
||||
"next.orly.dev/pkg/encoders/timestamp"
|
||||
"next.orly.dev/pkg/interfaces/signer/p8k"
|
||||
)
|
||||
|
||||
// BenchmarkAdapter adapts a database.Database interface to work with benchmark tests
|
||||
type BenchmarkAdapter struct {
|
||||
config *BenchmarkConfig
|
||||
db database.Database
|
||||
results []*BenchmarkResult
|
||||
mu sync.RWMutex
|
||||
cachedEvents []*event.E // Cache generated events to avoid expensive re-generation
|
||||
eventCacheMu sync.Mutex
|
||||
}
|
||||
|
||||
// NewBenchmarkAdapter creates a new benchmark adapter
|
||||
func NewBenchmarkAdapter(config *BenchmarkConfig, db database.Database) *BenchmarkAdapter {
|
||||
return &BenchmarkAdapter{
|
||||
config: config,
|
||||
db: db,
|
||||
results: make([]*BenchmarkResult, 0),
|
||||
}
|
||||
}
|
||||
|
||||
// RunPeakThroughputTest runs the peak throughput benchmark
|
||||
func (ba *BenchmarkAdapter) RunPeakThroughputTest() {
|
||||
fmt.Println("\n=== Peak Throughput Test ===")
|
||||
|
||||
start := time.Now()
|
||||
var wg sync.WaitGroup
|
||||
var totalEvents int64
|
||||
var errors []error
|
||||
var latencies []time.Duration
|
||||
var mu sync.Mutex
|
||||
|
||||
events := ba.generateEvents(ba.config.NumEvents)
|
||||
eventChan := make(chan *event.E, len(events))
|
||||
|
||||
// Fill event channel
|
||||
for _, ev := range events {
|
||||
eventChan <- ev
|
||||
}
|
||||
close(eventChan)
|
||||
|
||||
// Calculate per-worker rate to avoid mutex contention
|
||||
perWorkerRate := 20000.0 / float64(ba.config.ConcurrentWorkers)
|
||||
|
||||
for i := 0; i < ba.config.ConcurrentWorkers; i++ {
|
||||
wg.Add(1)
|
||||
go func(workerID int) {
|
||||
defer wg.Done()
|
||||
|
||||
// Each worker gets its own rate limiter
|
||||
workerLimiter := NewRateLimiter(perWorkerRate)
|
||||
|
||||
ctx := context.Background()
|
||||
for ev := range eventChan {
|
||||
// Wait for rate limiter to allow this event
|
||||
workerLimiter.Wait()
|
||||
|
||||
eventStart := time.Now()
|
||||
_, err := ba.db.SaveEvent(ctx, ev)
|
||||
latency := time.Since(eventStart)
|
||||
|
||||
mu.Lock()
|
||||
if err != nil {
|
||||
errors = append(errors, err)
|
||||
} else {
|
||||
totalEvents++
|
||||
latencies = append(latencies, latency)
|
||||
}
|
||||
mu.Unlock()
|
||||
}
|
||||
}(i)
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
duration := time.Since(start)
|
||||
|
||||
// Calculate metrics
|
||||
result := &BenchmarkResult{
|
||||
TestName: "Peak Throughput",
|
||||
Duration: duration,
|
||||
TotalEvents: int(totalEvents),
|
||||
EventsPerSecond: float64(totalEvents) / duration.Seconds(),
|
||||
ConcurrentWorkers: ba.config.ConcurrentWorkers,
|
||||
MemoryUsed: getMemUsage(),
|
||||
}
|
||||
|
||||
if len(latencies) > 0 {
|
||||
sort.Slice(latencies, func(i, j int) bool {
|
||||
return latencies[i] < latencies[j]
|
||||
})
|
||||
result.AvgLatency = calculateAverage(latencies)
|
||||
result.P90Latency = latencies[int(float64(len(latencies))*0.90)]
|
||||
result.P95Latency = latencies[int(float64(len(latencies))*0.95)]
|
||||
result.P99Latency = latencies[int(float64(len(latencies))*0.99)]
|
||||
|
||||
bottom10 := latencies[:int(float64(len(latencies))*0.10)]
|
||||
result.Bottom10Avg = calculateAverage(bottom10)
|
||||
}
|
||||
|
||||
result.SuccessRate = float64(totalEvents) / float64(ba.config.NumEvents) * 100
|
||||
if len(errors) > 0 {
|
||||
result.Errors = make([]string, 0, len(errors))
|
||||
for _, err := range errors {
|
||||
result.Errors = append(result.Errors, err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
ba.mu.Lock()
|
||||
ba.results = append(ba.results, result)
|
||||
ba.mu.Unlock()
|
||||
|
||||
ba.printResult(result)
|
||||
}
|
||||
|
||||
// RunBurstPatternTest runs burst pattern test
|
||||
func (ba *BenchmarkAdapter) RunBurstPatternTest() {
|
||||
fmt.Println("\n=== Burst Pattern Test ===")
|
||||
|
||||
start := time.Now()
|
||||
var totalEvents int64
|
||||
var latencies []time.Duration
|
||||
var mu sync.Mutex
|
||||
|
||||
ctx := context.Background()
|
||||
burstSize := 100
|
||||
bursts := ba.config.NumEvents / burstSize
|
||||
|
||||
// Create rate limiter: cap at 20,000 events/second globally
|
||||
rateLimiter := NewRateLimiter(20000)
|
||||
|
||||
for i := 0; i < bursts; i++ {
|
||||
// Generate a burst of events
|
||||
events := ba.generateEvents(burstSize)
|
||||
|
||||
var wg sync.WaitGroup
|
||||
for _, ev := range events {
|
||||
wg.Add(1)
|
||||
go func(e *event.E) {
|
||||
defer wg.Done()
|
||||
|
||||
// Wait for rate limiter to allow this event
|
||||
rateLimiter.Wait()
|
||||
|
||||
eventStart := time.Now()
|
||||
_, err := ba.db.SaveEvent(ctx, e)
|
||||
latency := time.Since(eventStart)
|
||||
|
||||
mu.Lock()
|
||||
if err == nil {
|
||||
totalEvents++
|
||||
latencies = append(latencies, latency)
|
||||
}
|
||||
mu.Unlock()
|
||||
}(ev)
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
|
||||
// Short pause between bursts
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
}
|
||||
|
||||
duration := time.Since(start)
|
||||
|
||||
result := &BenchmarkResult{
|
||||
TestName: "Burst Pattern",
|
||||
Duration: duration,
|
||||
TotalEvents: int(totalEvents),
|
||||
EventsPerSecond: float64(totalEvents) / duration.Seconds(),
|
||||
ConcurrentWorkers: burstSize,
|
||||
MemoryUsed: getMemUsage(),
|
||||
SuccessRate: float64(totalEvents) / float64(ba.config.NumEvents) * 100,
|
||||
}
|
||||
|
||||
if len(latencies) > 0 {
|
||||
sort.Slice(latencies, func(i, j int) bool {
|
||||
return latencies[i] < latencies[j]
|
||||
})
|
||||
result.AvgLatency = calculateAverage(latencies)
|
||||
result.P90Latency = latencies[int(float64(len(latencies))*0.90)]
|
||||
result.P95Latency = latencies[int(float64(len(latencies))*0.95)]
|
||||
result.P99Latency = latencies[int(float64(len(latencies))*0.99)]
|
||||
|
||||
bottom10 := latencies[:int(float64(len(latencies))*0.10)]
|
||||
result.Bottom10Avg = calculateAverage(bottom10)
|
||||
}
|
||||
|
||||
ba.mu.Lock()
|
||||
ba.results = append(ba.results, result)
|
||||
ba.mu.Unlock()
|
||||
|
||||
ba.printResult(result)
|
||||
}
|
||||
|
||||
// RunMixedReadWriteTest runs mixed read/write test
|
||||
func (ba *BenchmarkAdapter) RunMixedReadWriteTest() {
|
||||
fmt.Println("\n=== Mixed Read/Write Test ===")
|
||||
|
||||
// First, populate some events
|
||||
fmt.Println("Populating database with initial events...")
|
||||
populateEvents := ba.generateEvents(1000)
|
||||
ctx := context.Background()
|
||||
|
||||
for _, ev := range populateEvents {
|
||||
ba.db.SaveEvent(ctx, ev)
|
||||
}
|
||||
|
||||
start := time.Now()
|
||||
var writeCount, readCount int64
|
||||
var latencies []time.Duration
|
||||
var mu sync.Mutex
|
||||
var wg sync.WaitGroup
|
||||
|
||||
// Create rate limiter for writes: cap at 20,000 events/second
|
||||
rateLimiter := NewRateLimiter(20000)
|
||||
|
||||
// Start workers doing mixed read/write
|
||||
for i := 0; i < ba.config.ConcurrentWorkers; i++ {
|
||||
wg.Add(1)
|
||||
go func(workerID int) {
|
||||
defer wg.Done()
|
||||
|
||||
events := ba.generateEvents(ba.config.NumEvents / ba.config.ConcurrentWorkers)
|
||||
|
||||
for idx, ev := range events {
|
||||
eventStart := time.Now()
|
||||
|
||||
if idx%3 == 0 {
|
||||
// Read operation
|
||||
f := filter.New()
|
||||
f.Kinds = kind.NewS(kind.TextNote)
|
||||
limit := uint(10)
|
||||
f.Limit = &limit
|
||||
_, _ = ba.db.QueryEvents(ctx, f)
|
||||
|
||||
mu.Lock()
|
||||
readCount++
|
||||
mu.Unlock()
|
||||
} else {
|
||||
// Write operation - apply rate limiting
|
||||
rateLimiter.Wait()
|
||||
_, _ = ba.db.SaveEvent(ctx, ev)
|
||||
|
||||
mu.Lock()
|
||||
writeCount++
|
||||
mu.Unlock()
|
||||
}
|
||||
|
||||
latency := time.Since(eventStart)
|
||||
mu.Lock()
|
||||
latencies = append(latencies, latency)
|
||||
mu.Unlock()
|
||||
}
|
||||
}(i)
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
duration := time.Since(start)
|
||||
|
||||
result := &BenchmarkResult{
|
||||
TestName: fmt.Sprintf("Mixed R/W (R:%d W:%d)", readCount, writeCount),
|
||||
Duration: duration,
|
||||
TotalEvents: int(writeCount + readCount),
|
||||
EventsPerSecond: float64(writeCount+readCount) / duration.Seconds(),
|
||||
ConcurrentWorkers: ba.config.ConcurrentWorkers,
|
||||
MemoryUsed: getMemUsage(),
|
||||
SuccessRate: 100.0,
|
||||
}
|
||||
|
||||
if len(latencies) > 0 {
|
||||
sort.Slice(latencies, func(i, j int) bool {
|
||||
return latencies[i] < latencies[j]
|
||||
})
|
||||
result.AvgLatency = calculateAverage(latencies)
|
||||
result.P90Latency = latencies[int(float64(len(latencies))*0.90)]
|
||||
result.P95Latency = latencies[int(float64(len(latencies))*0.95)]
|
||||
result.P99Latency = latencies[int(float64(len(latencies))*0.99)]
|
||||
|
||||
bottom10 := latencies[:int(float64(len(latencies))*0.10)]
|
||||
result.Bottom10Avg = calculateAverage(bottom10)
|
||||
}
|
||||
|
||||
ba.mu.Lock()
|
||||
ba.results = append(ba.results, result)
|
||||
ba.mu.Unlock()
|
||||
|
||||
ba.printResult(result)
|
||||
}
|
||||
|
||||
// RunQueryTest runs query performance test
|
||||
func (ba *BenchmarkAdapter) RunQueryTest() {
|
||||
fmt.Println("\n=== Query Performance Test ===")
|
||||
|
||||
// Populate with test data
|
||||
fmt.Println("Populating database for query tests...")
|
||||
events := ba.generateEvents(5000)
|
||||
ctx := context.Background()
|
||||
|
||||
for _, ev := range events {
|
||||
ba.db.SaveEvent(ctx, ev)
|
||||
}
|
||||
|
||||
start := time.Now()
|
||||
var queryCount int64
|
||||
var latencies []time.Duration
|
||||
var mu sync.Mutex
|
||||
var wg sync.WaitGroup
|
||||
|
||||
queryTypes := []func() *filter.F{
|
||||
func() *filter.F {
|
||||
f := filter.New()
|
||||
f.Kinds = kind.NewS(kind.TextNote)
|
||||
limit := uint(100)
|
||||
f.Limit = &limit
|
||||
return f
|
||||
},
|
||||
func() *filter.F {
|
||||
f := filter.New()
|
||||
f.Kinds = kind.NewS(kind.TextNote, kind.Repost)
|
||||
limit := uint(50)
|
||||
f.Limit = &limit
|
||||
return f
|
||||
},
|
||||
func() *filter.F {
|
||||
f := filter.New()
|
||||
limit := uint(10)
|
||||
f.Limit = &limit
|
||||
since := time.Now().Add(-1 * time.Hour).Unix()
|
||||
f.Since = timestamp.FromUnix(since)
|
||||
return f
|
||||
},
|
||||
}
|
||||
|
||||
// Run concurrent queries
|
||||
iterations := 1000
|
||||
for i := 0; i < ba.config.ConcurrentWorkers; i++ {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
|
||||
for j := 0; j < iterations/ba.config.ConcurrentWorkers; j++ {
|
||||
f := queryTypes[j%len(queryTypes)]()
|
||||
|
||||
queryStart := time.Now()
|
||||
_, _ = ba.db.QueryEvents(ctx, f)
|
||||
latency := time.Since(queryStart)
|
||||
|
||||
mu.Lock()
|
||||
queryCount++
|
||||
latencies = append(latencies, latency)
|
||||
mu.Unlock()
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
duration := time.Since(start)
|
||||
|
||||
result := &BenchmarkResult{
|
||||
TestName: fmt.Sprintf("Query Performance (%d queries)", queryCount),
|
||||
Duration: duration,
|
||||
TotalEvents: int(queryCount),
|
||||
EventsPerSecond: float64(queryCount) / duration.Seconds(),
|
||||
ConcurrentWorkers: ba.config.ConcurrentWorkers,
|
||||
MemoryUsed: getMemUsage(),
|
||||
SuccessRate: 100.0,
|
||||
}
|
||||
|
||||
if len(latencies) > 0 {
|
||||
sort.Slice(latencies, func(i, j int) bool {
|
||||
return latencies[i] < latencies[j]
|
||||
})
|
||||
result.AvgLatency = calculateAverage(latencies)
|
||||
result.P90Latency = latencies[int(float64(len(latencies))*0.90)]
|
||||
result.P95Latency = latencies[int(float64(len(latencies))*0.95)]
|
||||
result.P99Latency = latencies[int(float64(len(latencies))*0.99)]
|
||||
|
||||
bottom10 := latencies[:int(float64(len(latencies))*0.10)]
|
||||
result.Bottom10Avg = calculateAverage(bottom10)
|
||||
}
|
||||
|
||||
ba.mu.Lock()
|
||||
ba.results = append(ba.results, result)
|
||||
ba.mu.Unlock()
|
||||
|
||||
ba.printResult(result)
|
||||
}
|
||||
|
||||
// RunConcurrentQueryStoreTest runs concurrent query and store test
|
||||
func (ba *BenchmarkAdapter) RunConcurrentQueryStoreTest() {
|
||||
fmt.Println("\n=== Concurrent Query+Store Test ===")
|
||||
|
||||
start := time.Now()
|
||||
var storeCount, queryCount int64
|
||||
var latencies []time.Duration
|
||||
var mu sync.Mutex
|
||||
var wg sync.WaitGroup
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
// Half workers write, half query
|
||||
halfWorkers := ba.config.ConcurrentWorkers / 2
|
||||
if halfWorkers < 1 {
|
||||
halfWorkers = 1
|
||||
}
|
||||
|
||||
// Create rate limiter for writes: cap at 20,000 events/second
|
||||
rateLimiter := NewRateLimiter(20000)
|
||||
|
||||
// Writers
|
||||
for i := 0; i < halfWorkers; i++ {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
|
||||
events := ba.generateEvents(ba.config.NumEvents / halfWorkers)
|
||||
for _, ev := range events {
|
||||
// Wait for rate limiter to allow this event
|
||||
rateLimiter.Wait()
|
||||
|
||||
eventStart := time.Now()
|
||||
ba.db.SaveEvent(ctx, ev)
|
||||
latency := time.Since(eventStart)
|
||||
|
||||
mu.Lock()
|
||||
storeCount++
|
||||
latencies = append(latencies, latency)
|
||||
mu.Unlock()
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
// Readers
|
||||
for i := 0; i < halfWorkers; i++ {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
|
||||
for j := 0; j < ba.config.NumEvents/halfWorkers; j++ {
|
||||
f := filter.New()
|
||||
f.Kinds = kind.NewS(kind.TextNote)
|
||||
limit := uint(10)
|
||||
f.Limit = &limit
|
||||
|
||||
queryStart := time.Now()
|
||||
ba.db.QueryEvents(ctx, f)
|
||||
latency := time.Since(queryStart)
|
||||
|
||||
mu.Lock()
|
||||
queryCount++
|
||||
latencies = append(latencies, latency)
|
||||
mu.Unlock()
|
||||
|
||||
time.Sleep(1 * time.Millisecond)
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
duration := time.Since(start)
|
||||
|
||||
result := &BenchmarkResult{
|
||||
TestName: fmt.Sprintf("Concurrent Q+S (Q:%d S:%d)", queryCount, storeCount),
|
||||
Duration: duration,
|
||||
TotalEvents: int(storeCount + queryCount),
|
||||
EventsPerSecond: float64(storeCount+queryCount) / duration.Seconds(),
|
||||
ConcurrentWorkers: ba.config.ConcurrentWorkers,
|
||||
MemoryUsed: getMemUsage(),
|
||||
SuccessRate: 100.0,
|
||||
}
|
||||
|
||||
if len(latencies) > 0 {
|
||||
sort.Slice(latencies, func(i, j int) bool {
|
||||
return latencies[i] < latencies[j]
|
||||
})
|
||||
result.AvgLatency = calculateAverage(latencies)
|
||||
result.P90Latency = latencies[int(float64(len(latencies))*0.90)]
|
||||
result.P95Latency = latencies[int(float64(len(latencies))*0.95)]
|
||||
result.P99Latency = latencies[int(float64(len(latencies))*0.99)]
|
||||
|
||||
bottom10 := latencies[:int(float64(len(latencies))*0.10)]
|
||||
result.Bottom10Avg = calculateAverage(bottom10)
|
||||
}
|
||||
|
||||
ba.mu.Lock()
|
||||
ba.results = append(ba.results, result)
|
||||
ba.mu.Unlock()
|
||||
|
||||
ba.printResult(result)
|
||||
}
|
||||
|
||||
// generateEvents generates unique synthetic events with realistic content sizes
|
||||
func (ba *BenchmarkAdapter) generateEvents(count int) []*event.E {
|
||||
fmt.Printf("Generating %d unique synthetic events (minimum 300 bytes each)...\n", count)
|
||||
|
||||
// Create a single signer for all events (reusing key is faster)
|
||||
signer := p8k.MustNew()
|
||||
if err := signer.Generate(); err != nil {
|
||||
panic(fmt.Sprintf("Failed to generate keypair: %v", err))
|
||||
}
|
||||
|
||||
// Base timestamp - start from current time and increment
|
||||
baseTime := time.Now().Unix()
|
||||
|
||||
// Minimum content size
|
||||
const minContentSize = 300
|
||||
|
||||
// Base content template
|
||||
baseContent := "This is a benchmark test event with realistic content size. "
|
||||
|
||||
// Pre-calculate how much padding we need
|
||||
paddingNeeded := minContentSize - len(baseContent)
|
||||
if paddingNeeded < 0 {
|
||||
paddingNeeded = 0
|
||||
}
|
||||
|
||||
// Create padding string (with varied characters for realistic size)
|
||||
padding := make([]byte, paddingNeeded)
|
||||
for i := range padding {
|
||||
padding[i] = ' ' + byte(i%94) // Printable ASCII characters
|
||||
}
|
||||
|
||||
events := make([]*event.E, count)
|
||||
for i := 0; i < count; i++ {
|
||||
ev := event.New()
|
||||
ev.Kind = kind.TextNote.K
|
||||
ev.CreatedAt = baseTime + int64(i) // Unique timestamp for each event
|
||||
ev.Tags = tag.NewS()
|
||||
|
||||
// Create content with unique identifier and padding
|
||||
ev.Content = []byte(fmt.Sprintf("%s Event #%d. %s", baseContent, i, string(padding)))
|
||||
|
||||
// Sign the event (this calculates ID and Sig)
|
||||
if err := ev.Sign(signer); err != nil {
|
||||
panic(fmt.Sprintf("Failed to sign event %d: %v", i, err))
|
||||
}
|
||||
|
||||
events[i] = ev
|
||||
}
|
||||
|
||||
// Print stats
|
||||
totalSize := int64(0)
|
||||
for _, ev := range events {
|
||||
totalSize += int64(len(ev.Content))
|
||||
}
|
||||
avgSize := totalSize / int64(count)
|
||||
|
||||
fmt.Printf("Generated %d events:\n", count)
|
||||
fmt.Printf(" Average content size: %d bytes\n", avgSize)
|
||||
fmt.Printf(" All events are unique (incremental timestamps)\n")
|
||||
fmt.Printf(" All events are properly signed\n\n")
|
||||
|
||||
return events
|
||||
}
|
||||
|
||||
func (ba *BenchmarkAdapter) printResult(r *BenchmarkResult) {
|
||||
fmt.Printf("\nResults for %s:\n", r.TestName)
|
||||
fmt.Printf(" Duration: %v\n", r.Duration)
|
||||
fmt.Printf(" Total Events: %d\n", r.TotalEvents)
|
||||
fmt.Printf(" Events/sec: %.2f\n", r.EventsPerSecond)
|
||||
fmt.Printf(" Success Rate: %.2f%%\n", r.SuccessRate)
|
||||
fmt.Printf(" Workers: %d\n", r.ConcurrentWorkers)
|
||||
fmt.Printf(" Memory Used: %.2f MB\n", float64(r.MemoryUsed)/1024/1024)
|
||||
|
||||
if r.AvgLatency > 0 {
|
||||
fmt.Printf(" Avg Latency: %v\n", r.AvgLatency)
|
||||
fmt.Printf(" P90 Latency: %v\n", r.P90Latency)
|
||||
fmt.Printf(" P95 Latency: %v\n", r.P95Latency)
|
||||
fmt.Printf(" P99 Latency: %v\n", r.P99Latency)
|
||||
fmt.Printf(" Bottom 10%% Avg: %v\n", r.Bottom10Avg)
|
||||
}
|
||||
|
||||
if len(r.Errors) > 0 {
|
||||
fmt.Printf(" Errors: %d\n", len(r.Errors))
|
||||
// Print first few errors as samples
|
||||
sampleCount := 3
|
||||
if len(r.Errors) < sampleCount {
|
||||
sampleCount = len(r.Errors)
|
||||
}
|
||||
for i := 0; i < sampleCount; i++ {
|
||||
fmt.Printf(" Sample %d: %s\n", i+1, r.Errors[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (ba *BenchmarkAdapter) GenerateReport() {
|
||||
// Delegate to main benchmark report generator
|
||||
// We'll add the results to a file
|
||||
fmt.Println("\n=== Benchmark Results Summary ===")
|
||||
ba.mu.RLock()
|
||||
defer ba.mu.RUnlock()
|
||||
|
||||
for _, result := range ba.results {
|
||||
ba.printResult(result)
|
||||
}
|
||||
}
|
||||
|
||||
func (ba *BenchmarkAdapter) GenerateAsciidocReport() {
|
||||
// TODO: Implement asciidoc report generation
|
||||
fmt.Println("Asciidoc report generation not yet implemented for adapter")
|
||||
}
|
||||
|
||||
func calculateAverage(durations []time.Duration) time.Duration {
|
||||
if len(durations) == 0 {
|
||||
return 0
|
||||
}
|
||||
|
||||
var total time.Duration
|
||||
for _, d := range durations {
|
||||
total += d
|
||||
}
|
||||
return total / time.Duration(len(durations))
|
||||
}
|
||||
130
cmd/benchmark/dgraph_benchmark.go
Normal file
130
cmd/benchmark/dgraph_benchmark.go
Normal file
@@ -0,0 +1,130 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"next.orly.dev/pkg/database"
|
||||
_ "next.orly.dev/pkg/dgraph" // Import to register dgraph factory
|
||||
)
|
||||
|
||||
// DgraphBenchmark wraps a Benchmark with dgraph-specific setup
|
||||
type DgraphBenchmark struct {
|
||||
config *BenchmarkConfig
|
||||
docker *DgraphDocker
|
||||
database database.Database
|
||||
bench *BenchmarkAdapter
|
||||
}
|
||||
|
||||
// NewDgraphBenchmark creates a new dgraph benchmark instance
|
||||
func NewDgraphBenchmark(config *BenchmarkConfig) (*DgraphBenchmark, error) {
|
||||
// Create Docker manager
|
||||
docker := NewDgraphDocker()
|
||||
|
||||
// Start dgraph containers
|
||||
ctx := context.Background()
|
||||
if err := docker.Start(ctx); err != nil {
|
||||
return nil, fmt.Errorf("failed to start dgraph: %w", err)
|
||||
}
|
||||
|
||||
// Set environment variable for dgraph connection
|
||||
os.Setenv("ORLY_DGRAPH_URL", docker.GetGRPCEndpoint())
|
||||
|
||||
// Create database instance using dgraph backend
|
||||
cancel := func() {}
|
||||
db, err := database.NewDatabase(ctx, cancel, "dgraph", config.DataDir, "warn")
|
||||
if err != nil {
|
||||
docker.Stop()
|
||||
return nil, fmt.Errorf("failed to create dgraph database: %w", err)
|
||||
}
|
||||
|
||||
// Wait for database to be ready
|
||||
fmt.Println("Waiting for dgraph database to be ready...")
|
||||
select {
|
||||
case <-db.Ready():
|
||||
fmt.Println("Dgraph database is ready")
|
||||
case <-time.After(30 * time.Second):
|
||||
db.Close()
|
||||
docker.Stop()
|
||||
return nil, fmt.Errorf("dgraph database failed to become ready")
|
||||
}
|
||||
|
||||
// Create adapter to use Database interface with Benchmark
|
||||
adapter := NewBenchmarkAdapter(config, db)
|
||||
|
||||
dgraphBench := &DgraphBenchmark{
|
||||
config: config,
|
||||
docker: docker,
|
||||
database: db,
|
||||
bench: adapter,
|
||||
}
|
||||
|
||||
return dgraphBench, nil
|
||||
}
|
||||
|
||||
// Close closes the dgraph benchmark and stops Docker containers
|
||||
func (dgb *DgraphBenchmark) Close() {
|
||||
fmt.Println("Closing dgraph benchmark...")
|
||||
|
||||
if dgb.database != nil {
|
||||
dgb.database.Close()
|
||||
}
|
||||
|
||||
if dgb.docker != nil {
|
||||
if err := dgb.docker.Stop(); err != nil {
|
||||
log.Printf("Error stopping dgraph Docker: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// RunSuite runs the benchmark suite on dgraph
|
||||
func (dgb *DgraphBenchmark) RunSuite() {
|
||||
fmt.Println("\n╔════════════════════════════════════════════════════════╗")
|
||||
fmt.Println("║ DGRAPH BACKEND BENCHMARK SUITE ║")
|
||||
fmt.Println("╚════════════════════════════════════════════════════════╝")
|
||||
|
||||
// Run only one round for dgraph to keep benchmark time reasonable
|
||||
fmt.Printf("\n=== Starting dgraph benchmark ===\n")
|
||||
|
||||
fmt.Printf("RunPeakThroughputTest (dgraph)..\n")
|
||||
dgb.bench.RunPeakThroughputTest()
|
||||
fmt.Println("Wiping database between tests...")
|
||||
dgb.database.Wipe()
|
||||
time.Sleep(10 * time.Second)
|
||||
|
||||
fmt.Printf("RunBurstPatternTest (dgraph)..\n")
|
||||
dgb.bench.RunBurstPatternTest()
|
||||
fmt.Println("Wiping database between tests...")
|
||||
dgb.database.Wipe()
|
||||
time.Sleep(10 * time.Second)
|
||||
|
||||
fmt.Printf("RunMixedReadWriteTest (dgraph)..\n")
|
||||
dgb.bench.RunMixedReadWriteTest()
|
||||
fmt.Println("Wiping database between tests...")
|
||||
dgb.database.Wipe()
|
||||
time.Sleep(10 * time.Second)
|
||||
|
||||
fmt.Printf("RunQueryTest (dgraph)..\n")
|
||||
dgb.bench.RunQueryTest()
|
||||
fmt.Println("Wiping database between tests...")
|
||||
dgb.database.Wipe()
|
||||
time.Sleep(10 * time.Second)
|
||||
|
||||
fmt.Printf("RunConcurrentQueryStoreTest (dgraph)..\n")
|
||||
dgb.bench.RunConcurrentQueryStoreTest()
|
||||
|
||||
fmt.Printf("\n=== Dgraph benchmark completed ===\n\n")
|
||||
}
|
||||
|
||||
// GenerateReport generates the benchmark report
|
||||
func (dgb *DgraphBenchmark) GenerateReport() {
|
||||
dgb.bench.GenerateReport()
|
||||
}
|
||||
|
||||
// GenerateAsciidocReport generates asciidoc format report
|
||||
func (dgb *DgraphBenchmark) GenerateAsciidocReport() {
|
||||
dgb.bench.GenerateAsciidocReport()
|
||||
}
|
||||
160
cmd/benchmark/dgraph_docker.go
Normal file
160
cmd/benchmark/dgraph_docker.go
Normal file
@@ -0,0 +1,160 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"time"
|
||||
)
|
||||
|
||||
// DgraphDocker manages a dgraph instance via Docker Compose
|
||||
type DgraphDocker struct {
|
||||
composeFile string
|
||||
projectName string
|
||||
running bool
|
||||
}
|
||||
|
||||
// NewDgraphDocker creates a new dgraph Docker manager
|
||||
func NewDgraphDocker() *DgraphDocker {
|
||||
// Try to find the docker-compose file in the current directory first
|
||||
composeFile := "docker-compose-dgraph.yml"
|
||||
|
||||
// If not found, try the cmd/benchmark directory (for running from project root)
|
||||
if _, err := os.Stat(composeFile); os.IsNotExist(err) {
|
||||
composeFile = filepath.Join("cmd", "benchmark", "docker-compose-dgraph.yml")
|
||||
}
|
||||
|
||||
return &DgraphDocker{
|
||||
composeFile: composeFile,
|
||||
projectName: "orly-benchmark-dgraph",
|
||||
running: false,
|
||||
}
|
||||
}
|
||||
|
||||
// Start starts the dgraph Docker containers
|
||||
func (d *DgraphDocker) Start(ctx context.Context) error {
|
||||
fmt.Println("Starting dgraph Docker containers...")
|
||||
|
||||
// Stop any existing containers first
|
||||
d.Stop()
|
||||
|
||||
// Start containers
|
||||
cmd := exec.CommandContext(
|
||||
ctx,
|
||||
"docker-compose",
|
||||
"-f", d.composeFile,
|
||||
"-p", d.projectName,
|
||||
"up", "-d",
|
||||
)
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stderr = os.Stderr
|
||||
|
||||
if err := cmd.Run(); err != nil {
|
||||
return fmt.Errorf("failed to start dgraph containers: %w", err)
|
||||
}
|
||||
|
||||
fmt.Println("Waiting for dgraph to be healthy...")
|
||||
|
||||
// Wait for health checks to pass
|
||||
if err := d.waitForHealthy(ctx, 60*time.Second); err != nil {
|
||||
d.Stop() // Clean up on failure
|
||||
return err
|
||||
}
|
||||
|
||||
d.running = true
|
||||
fmt.Println("Dgraph is ready!")
|
||||
return nil
|
||||
}
|
||||
|
||||
// waitForHealthy waits for dgraph to become healthy
|
||||
func (d *DgraphDocker) waitForHealthy(ctx context.Context, timeout time.Duration) error {
|
||||
deadline := time.Now().Add(timeout)
|
||||
|
||||
for time.Now().Before(deadline) {
|
||||
// Check if alpha is healthy by checking docker health status
|
||||
cmd := exec.CommandContext(
|
||||
ctx,
|
||||
"docker",
|
||||
"inspect",
|
||||
"--format={{.State.Health.Status}}",
|
||||
"orly-benchmark-dgraph-alpha",
|
||||
)
|
||||
|
||||
output, err := cmd.Output()
|
||||
if err == nil && string(output) == "healthy\n" {
|
||||
// Additional short wait to ensure full readiness
|
||||
time.Sleep(2 * time.Second)
|
||||
return nil
|
||||
}
|
||||
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return ctx.Err()
|
||||
case <-time.After(2 * time.Second):
|
||||
// Continue waiting
|
||||
}
|
||||
}
|
||||
|
||||
return fmt.Errorf("dgraph failed to become healthy within %v", timeout)
|
||||
}
|
||||
|
||||
// Stop stops and removes the dgraph Docker containers
|
||||
func (d *DgraphDocker) Stop() error {
|
||||
if !d.running {
|
||||
// Try to stop anyway in case of untracked state
|
||||
cmd := exec.Command(
|
||||
"docker-compose",
|
||||
"-f", d.composeFile,
|
||||
"-p", d.projectName,
|
||||
"down", "-v",
|
||||
)
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stderr = os.Stderr
|
||||
_ = cmd.Run() // Ignore errors
|
||||
return nil
|
||||
}
|
||||
|
||||
fmt.Println("Stopping dgraph Docker containers...")
|
||||
|
||||
cmd := exec.Command(
|
||||
"docker-compose",
|
||||
"-f", d.composeFile,
|
||||
"-p", d.projectName,
|
||||
"down", "-v",
|
||||
)
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stderr = os.Stderr
|
||||
|
||||
if err := cmd.Run(); err != nil {
|
||||
return fmt.Errorf("failed to stop dgraph containers: %w", err)
|
||||
}
|
||||
|
||||
d.running = false
|
||||
fmt.Println("Dgraph containers stopped")
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetGRPCEndpoint returns the dgraph gRPC endpoint
|
||||
func (d *DgraphDocker) GetGRPCEndpoint() string {
|
||||
return "localhost:9080"
|
||||
}
|
||||
|
||||
// IsRunning returns whether dgraph is running
|
||||
func (d *DgraphDocker) IsRunning() bool {
|
||||
return d.running
|
||||
}
|
||||
|
||||
// Logs returns the logs from dgraph containers
|
||||
func (d *DgraphDocker) Logs() error {
|
||||
cmd := exec.Command(
|
||||
"docker-compose",
|
||||
"-f", d.composeFile,
|
||||
"-p", d.projectName,
|
||||
"logs",
|
||||
)
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stderr = os.Stderr
|
||||
return cmd.Run()
|
||||
}
|
||||
44
cmd/benchmark/docker-compose-dgraph.yml
Normal file
44
cmd/benchmark/docker-compose-dgraph.yml
Normal file
@@ -0,0 +1,44 @@
|
||||
version: "3.9"
|
||||
|
||||
services:
|
||||
dgraph-zero:
|
||||
image: dgraph/dgraph:v23.1.0
|
||||
container_name: orly-benchmark-dgraph-zero
|
||||
working_dir: /data/zero
|
||||
ports:
|
||||
- "5080:5080"
|
||||
- "6080:6080"
|
||||
command: dgraph zero --my=dgraph-zero:5080
|
||||
networks:
|
||||
- orly-benchmark
|
||||
healthcheck:
|
||||
test: ["CMD", "sh", "-c", "dgraph version || exit 1"]
|
||||
interval: 5s
|
||||
timeout: 3s
|
||||
retries: 3
|
||||
start_period: 5s
|
||||
|
||||
dgraph-alpha:
|
||||
image: dgraph/dgraph:v23.1.0
|
||||
container_name: orly-benchmark-dgraph-alpha
|
||||
working_dir: /data/alpha
|
||||
ports:
|
||||
- "8080:8080"
|
||||
- "9080:9080"
|
||||
command: dgraph alpha --my=dgraph-alpha:7080 --zero=dgraph-zero:5080 --security whitelist=0.0.0.0/0
|
||||
networks:
|
||||
- orly-benchmark
|
||||
depends_on:
|
||||
dgraph-zero:
|
||||
condition: service_healthy
|
||||
healthcheck:
|
||||
test: ["CMD", "sh", "-c", "dgraph version || exit 1"]
|
||||
interval: 5s
|
||||
timeout: 3s
|
||||
retries: 6
|
||||
start_period: 10s
|
||||
|
||||
networks:
|
||||
orly-benchmark:
|
||||
name: orly-benchmark-network
|
||||
driver: bridge
|
||||
37
cmd/benchmark/docker-compose-neo4j.yml
Normal file
37
cmd/benchmark/docker-compose-neo4j.yml
Normal file
@@ -0,0 +1,37 @@
|
||||
version: "3.9"
|
||||
|
||||
services:
|
||||
neo4j:
|
||||
image: neo4j:5.15-community
|
||||
container_name: orly-benchmark-neo4j
|
||||
ports:
|
||||
- "7474:7474" # HTTP
|
||||
- "7687:7687" # Bolt
|
||||
environment:
|
||||
- NEO4J_AUTH=neo4j/benchmark123
|
||||
- NEO4J_server_memory_heap_initial__size=2G
|
||||
- NEO4J_server_memory_heap_max__size=4G
|
||||
- NEO4J_server_memory_pagecache_size=2G
|
||||
- NEO4J_dbms_security_procedures_unrestricted=apoc.*
|
||||
- NEO4J_dbms_security_procedures_allowlist=apoc.*
|
||||
- NEO4JLABS_PLUGINS=["apoc"]
|
||||
volumes:
|
||||
- neo4j-data:/data
|
||||
- neo4j-logs:/logs
|
||||
networks:
|
||||
- orly-benchmark
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "cypher-shell -u neo4j -p benchmark123 'RETURN 1;' || exit 1"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 10
|
||||
start_period: 40s
|
||||
|
||||
networks:
|
||||
orly-benchmark:
|
||||
name: orly-benchmark-network
|
||||
driver: bridge
|
||||
|
||||
volumes:
|
||||
neo4j-data:
|
||||
neo4j-logs:
|
||||
@@ -1,19 +1,20 @@
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
# Next.orly.dev relay (this repository)
|
||||
next-orly:
|
||||
# Next.orly.dev relay with Badger (this repository)
|
||||
next-orly-badger:
|
||||
build:
|
||||
context: ../..
|
||||
dockerfile: cmd/benchmark/Dockerfile.next-orly
|
||||
container_name: benchmark-next-orly
|
||||
container_name: benchmark-next-orly-badger
|
||||
environment:
|
||||
- ORLY_DATA_DIR=/data
|
||||
- ORLY_LISTEN=0.0.0.0
|
||||
- ORLY_PORT=8080
|
||||
- ORLY_LOG_LEVEL=off
|
||||
- ORLY_DB_TYPE=badger
|
||||
volumes:
|
||||
- ./data/next-orly:/data
|
||||
- ./data/next-orly-badger:/data
|
||||
ports:
|
||||
- "8001:8080"
|
||||
networks:
|
||||
@@ -25,6 +26,136 @@ services:
|
||||
retries: 3
|
||||
start_period: 40s
|
||||
|
||||
# Next.orly.dev relay with DGraph (this repository)
|
||||
next-orly-dgraph:
|
||||
build:
|
||||
context: ../..
|
||||
dockerfile: cmd/benchmark/Dockerfile.next-orly
|
||||
container_name: benchmark-next-orly-dgraph
|
||||
environment:
|
||||
- ORLY_DATA_DIR=/data
|
||||
- ORLY_LISTEN=0.0.0.0
|
||||
- ORLY_PORT=8080
|
||||
- ORLY_LOG_LEVEL=off
|
||||
- ORLY_DB_TYPE=dgraph
|
||||
- ORLY_DGRAPH_URL=dgraph-alpha:9080
|
||||
volumes:
|
||||
- ./data/next-orly-dgraph:/data
|
||||
ports:
|
||||
- "8007:8080"
|
||||
networks:
|
||||
- benchmark-net
|
||||
depends_on:
|
||||
dgraph-alpha:
|
||||
condition: service_healthy
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8080/"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 60s
|
||||
|
||||
# DGraph Zero - cluster coordinator
|
||||
dgraph-zero:
|
||||
image: dgraph/dgraph:v23.1.0
|
||||
container_name: benchmark-dgraph-zero
|
||||
working_dir: /data/zero
|
||||
ports:
|
||||
- "5080:5080"
|
||||
- "6080:6080"
|
||||
volumes:
|
||||
- ./data/dgraph-zero:/data
|
||||
command: dgraph zero --my=dgraph-zero:5080
|
||||
networks:
|
||||
- benchmark-net
|
||||
healthcheck:
|
||||
test: ["CMD", "sh", "-c", "dgraph version || exit 1"]
|
||||
interval: 5s
|
||||
timeout: 3s
|
||||
retries: 3
|
||||
start_period: 5s
|
||||
|
||||
# DGraph Alpha - data node
|
||||
dgraph-alpha:
|
||||
image: dgraph/dgraph:v23.1.0
|
||||
container_name: benchmark-dgraph-alpha
|
||||
working_dir: /data/alpha
|
||||
ports:
|
||||
- "8088:8080"
|
||||
- "9080:9080"
|
||||
volumes:
|
||||
- ./data/dgraph-alpha:/data
|
||||
command: dgraph alpha --my=dgraph-alpha:7080 --zero=dgraph-zero:5080 --security whitelist=0.0.0.0/0
|
||||
networks:
|
||||
- benchmark-net
|
||||
depends_on:
|
||||
dgraph-zero:
|
||||
condition: service_healthy
|
||||
healthcheck:
|
||||
test: ["CMD", "sh", "-c", "dgraph version || exit 1"]
|
||||
interval: 5s
|
||||
timeout: 3s
|
||||
retries: 6
|
||||
start_period: 10s
|
||||
|
||||
# Next.orly.dev relay with Neo4j (this repository)
|
||||
next-orly-neo4j:
|
||||
build:
|
||||
context: ../..
|
||||
dockerfile: cmd/benchmark/Dockerfile.next-orly
|
||||
container_name: benchmark-next-orly-neo4j
|
||||
environment:
|
||||
- ORLY_DATA_DIR=/data
|
||||
- ORLY_LISTEN=0.0.0.0
|
||||
- ORLY_PORT=8080
|
||||
- ORLY_LOG_LEVEL=off
|
||||
- ORLY_DB_TYPE=neo4j
|
||||
- ORLY_NEO4J_URI=bolt://neo4j:7687
|
||||
- ORLY_NEO4J_USER=neo4j
|
||||
- ORLY_NEO4J_PASSWORD=benchmark123
|
||||
volumes:
|
||||
- ./data/next-orly-neo4j:/data
|
||||
ports:
|
||||
- "8008:8080"
|
||||
networks:
|
||||
- benchmark-net
|
||||
depends_on:
|
||||
neo4j:
|
||||
condition: service_healthy
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8080/"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 60s
|
||||
|
||||
# Neo4j database
|
||||
neo4j:
|
||||
image: neo4j:5.15-community
|
||||
container_name: benchmark-neo4j
|
||||
ports:
|
||||
- "7474:7474" # HTTP
|
||||
- "7687:7687" # Bolt
|
||||
environment:
|
||||
- NEO4J_AUTH=neo4j/benchmark123
|
||||
- NEO4J_server_memory_heap_initial__size=2G
|
||||
- NEO4J_server_memory_heap_max__size=4G
|
||||
- NEO4J_server_memory_pagecache_size=2G
|
||||
- NEO4J_dbms_security_procedures_unrestricted=apoc.*
|
||||
- NEO4J_dbms_security_procedures_allowlist=apoc.*
|
||||
- NEO4JLABS_PLUGINS=["apoc"]
|
||||
volumes:
|
||||
- ./data/neo4j:/data
|
||||
- ./data/neo4j-logs:/logs
|
||||
networks:
|
||||
- benchmark-net
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "cypher-shell -u neo4j -p benchmark123 'RETURN 1;' || exit 1"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 10
|
||||
start_period: 40s
|
||||
|
||||
# Khatru with SQLite
|
||||
khatru-sqlite:
|
||||
build:
|
||||
@@ -145,7 +276,11 @@ services:
|
||||
dockerfile: cmd/benchmark/Dockerfile.benchmark
|
||||
container_name: benchmark-runner
|
||||
depends_on:
|
||||
next-orly:
|
||||
next-orly-badger:
|
||||
condition: service_healthy
|
||||
next-orly-dgraph:
|
||||
condition: service_healthy
|
||||
next-orly-neo4j:
|
||||
condition: service_healthy
|
||||
khatru-sqlite:
|
||||
condition: service_healthy
|
||||
@@ -158,7 +293,7 @@ services:
|
||||
nostr-rs-relay:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
- BENCHMARK_TARGETS=next-orly:8080,khatru-sqlite:3334,khatru-badger:3334,relayer-basic:7447,strfry:8080,nostr-rs-relay:8080
|
||||
- BENCHMARK_TARGETS=next-orly-badger:8080,next-orly-dgraph:8080,next-orly-neo4j:8080,khatru-sqlite:3334,khatru-badger:3334,relayer-basic:7447,strfry:8080,nostr-rs-relay:8080
|
||||
- BENCHMARK_EVENTS=50000
|
||||
- BENCHMARK_WORKERS=24
|
||||
- BENCHMARK_DURATION=60s
|
||||
|
||||
257
cmd/benchmark/event_stream.go
Normal file
257
cmd/benchmark/event_stream.go
Normal file
@@ -0,0 +1,257 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math"
|
||||
"math/rand"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"next.orly.dev/pkg/encoders/event"
|
||||
"next.orly.dev/pkg/encoders/tag"
|
||||
"next.orly.dev/pkg/encoders/timestamp"
|
||||
"next.orly.dev/pkg/interfaces/signer/p8k"
|
||||
)
|
||||
|
||||
// EventStream manages disk-based event generation to avoid memory bloat
|
||||
type EventStream struct {
|
||||
baseDir string
|
||||
count int
|
||||
chunkSize int
|
||||
rng *rand.Rand
|
||||
}
|
||||
|
||||
// NewEventStream creates a new event stream that stores events on disk
|
||||
func NewEventStream(baseDir string, count int) (*EventStream, error) {
|
||||
// Create events directory
|
||||
eventsDir := filepath.Join(baseDir, "events")
|
||||
if err := os.MkdirAll(eventsDir, 0755); err != nil {
|
||||
return nil, fmt.Errorf("failed to create events directory: %w", err)
|
||||
}
|
||||
|
||||
return &EventStream{
|
||||
baseDir: eventsDir,
|
||||
count: count,
|
||||
chunkSize: 1000, // Store 1000 events per file to balance I/O
|
||||
rng: rand.New(rand.NewSource(time.Now().UnixNano())),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Generate creates all events and stores them in chunk files
|
||||
func (es *EventStream) Generate() error {
|
||||
numChunks := (es.count + es.chunkSize - 1) / es.chunkSize
|
||||
|
||||
for chunk := 0; chunk < numChunks; chunk++ {
|
||||
chunkFile := filepath.Join(es.baseDir, fmt.Sprintf("chunk_%04d.jsonl", chunk))
|
||||
f, err := os.Create(chunkFile)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create chunk file %s: %w", chunkFile, err)
|
||||
}
|
||||
|
||||
writer := bufio.NewWriter(f)
|
||||
startIdx := chunk * es.chunkSize
|
||||
endIdx := min(startIdx+es.chunkSize, es.count)
|
||||
|
||||
for i := startIdx; i < endIdx; i++ {
|
||||
ev, err := es.generateEvent(i)
|
||||
if err != nil {
|
||||
f.Close()
|
||||
return fmt.Errorf("failed to generate event %d: %w", i, err)
|
||||
}
|
||||
|
||||
// Marshal event to JSON
|
||||
eventJSON, err := json.Marshal(ev)
|
||||
if err != nil {
|
||||
f.Close()
|
||||
return fmt.Errorf("failed to marshal event %d: %w", i, err)
|
||||
}
|
||||
|
||||
// Write JSON line
|
||||
if _, err := writer.Write(eventJSON); err != nil {
|
||||
f.Close()
|
||||
return fmt.Errorf("failed to write event %d: %w", i, err)
|
||||
}
|
||||
if _, err := writer.WriteString("\n"); err != nil {
|
||||
f.Close()
|
||||
return fmt.Errorf("failed to write newline after event %d: %w", i, err)
|
||||
}
|
||||
}
|
||||
|
||||
if err := writer.Flush(); err != nil {
|
||||
f.Close()
|
||||
return fmt.Errorf("failed to flush chunk file %s: %w", chunkFile, err)
|
||||
}
|
||||
|
||||
if err := f.Close(); err != nil {
|
||||
return fmt.Errorf("failed to close chunk file %s: %w", chunkFile, err)
|
||||
}
|
||||
|
||||
if (chunk+1)%10 == 0 || chunk == numChunks-1 {
|
||||
fmt.Printf(" Generated %d/%d events (%.1f%%)\n",
|
||||
endIdx, es.count, float64(endIdx)/float64(es.count)*100)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// generateEvent creates a single event with realistic size distribution
|
||||
func (es *EventStream) generateEvent(index int) (*event.E, error) {
|
||||
// Create signer for this event
|
||||
keys, err := p8k.New()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create signer: %w", err)
|
||||
}
|
||||
if err := keys.Generate(); err != nil {
|
||||
return nil, fmt.Errorf("failed to generate keys: %w", err)
|
||||
}
|
||||
|
||||
ev := event.New()
|
||||
ev.Kind = 1 // Text note
|
||||
ev.CreatedAt = timestamp.Now().I64()
|
||||
|
||||
// Add some tags for realism
|
||||
numTags := es.rng.Intn(5)
|
||||
tags := make([]*tag.T, 0, numTags)
|
||||
for i := 0; i < numTags; i++ {
|
||||
tags = append(tags, tag.NewFromBytesSlice(
|
||||
[]byte("t"),
|
||||
[]byte(fmt.Sprintf("tag%d", es.rng.Intn(100))),
|
||||
))
|
||||
}
|
||||
ev.Tags = tag.NewS(tags...)
|
||||
|
||||
// Generate content with log-distributed size
|
||||
contentSize := es.generateLogDistributedSize()
|
||||
ev.Content = []byte(es.generateRandomContent(contentSize))
|
||||
|
||||
// Sign the event
|
||||
if err := ev.Sign(keys); err != nil {
|
||||
return nil, fmt.Errorf("failed to sign event: %w", err)
|
||||
}
|
||||
|
||||
return ev, nil
|
||||
}
|
||||
|
||||
// generateLogDistributedSize generates sizes following a power law distribution
|
||||
// This creates realistic size distribution:
|
||||
// - Most events are small (< 1KB)
|
||||
// - Some events are medium (1-10KB)
|
||||
// - Few events are large (10-100KB)
|
||||
func (es *EventStream) generateLogDistributedSize() int {
|
||||
// Use power law with exponent 4.0 for strong skew toward small sizes
|
||||
const powerExponent = 4.0
|
||||
uniform := es.rng.Float64()
|
||||
skewed := math.Pow(uniform, powerExponent)
|
||||
|
||||
// Scale to max size of 100KB
|
||||
const maxSize = 100 * 1024
|
||||
size := int(skewed * maxSize)
|
||||
|
||||
// Ensure minimum size of 10 bytes
|
||||
if size < 10 {
|
||||
size = 10
|
||||
}
|
||||
|
||||
return size
|
||||
}
|
||||
|
||||
// generateRandomContent creates random text content of specified size
|
||||
func (es *EventStream) generateRandomContent(size int) string {
|
||||
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789 \n"
|
||||
content := make([]byte, size)
|
||||
for i := range content {
|
||||
content[i] = charset[es.rng.Intn(len(charset))]
|
||||
}
|
||||
return string(content)
|
||||
}
|
||||
|
||||
// GetEventChannel returns a channel that streams events from disk
|
||||
// bufferSize controls memory usage - larger buffers improve throughput but use more memory
|
||||
func (es *EventStream) GetEventChannel(bufferSize int) (<-chan *event.E, <-chan error) {
|
||||
eventChan := make(chan *event.E, bufferSize)
|
||||
errChan := make(chan error, 1)
|
||||
|
||||
go func() {
|
||||
defer close(eventChan)
|
||||
defer close(errChan)
|
||||
|
||||
numChunks := (es.count + es.chunkSize - 1) / es.chunkSize
|
||||
|
||||
for chunk := 0; chunk < numChunks; chunk++ {
|
||||
chunkFile := filepath.Join(es.baseDir, fmt.Sprintf("chunk_%04d.jsonl", chunk))
|
||||
f, err := os.Open(chunkFile)
|
||||
if err != nil {
|
||||
errChan <- fmt.Errorf("failed to open chunk file %s: %w", chunkFile, err)
|
||||
return
|
||||
}
|
||||
|
||||
scanner := bufio.NewScanner(f)
|
||||
// Increase buffer size for large events
|
||||
buf := make([]byte, 0, 64*1024)
|
||||
scanner.Buffer(buf, 1024*1024) // Max 1MB per line
|
||||
|
||||
for scanner.Scan() {
|
||||
var ev event.E
|
||||
if err := json.Unmarshal(scanner.Bytes(), &ev); err != nil {
|
||||
f.Close()
|
||||
errChan <- fmt.Errorf("failed to unmarshal event: %w", err)
|
||||
return
|
||||
}
|
||||
eventChan <- &ev
|
||||
}
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
f.Close()
|
||||
errChan <- fmt.Errorf("error reading chunk file %s: %w", chunkFile, err)
|
||||
return
|
||||
}
|
||||
|
||||
f.Close()
|
||||
}
|
||||
}()
|
||||
|
||||
return eventChan, errChan
|
||||
}
|
||||
|
||||
// ForEach iterates over all events without loading them all into memory
|
||||
func (es *EventStream) ForEach(fn func(*event.E) error) error {
|
||||
numChunks := (es.count + es.chunkSize - 1) / es.chunkSize
|
||||
|
||||
for chunk := 0; chunk < numChunks; chunk++ {
|
||||
chunkFile := filepath.Join(es.baseDir, fmt.Sprintf("chunk_%04d.jsonl", chunk))
|
||||
f, err := os.Open(chunkFile)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to open chunk file %s: %w", chunkFile, err)
|
||||
}
|
||||
|
||||
scanner := bufio.NewScanner(f)
|
||||
buf := make([]byte, 0, 64*1024)
|
||||
scanner.Buffer(buf, 1024*1024)
|
||||
|
||||
for scanner.Scan() {
|
||||
var ev event.E
|
||||
if err := json.Unmarshal(scanner.Bytes(), &ev); err != nil {
|
||||
f.Close()
|
||||
return fmt.Errorf("failed to unmarshal event: %w", err)
|
||||
}
|
||||
|
||||
if err := fn(&ev); err != nil {
|
||||
f.Close()
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
f.Close()
|
||||
return fmt.Errorf("error reading chunk file %s: %w", chunkFile, err)
|
||||
}
|
||||
|
||||
f.Close()
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
173
cmd/benchmark/latency_recorder.go
Normal file
173
cmd/benchmark/latency_recorder.go
Normal file
@@ -0,0 +1,173 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
// LatencyRecorder writes latency measurements to disk to avoid memory bloat
|
||||
type LatencyRecorder struct {
|
||||
file *os.File
|
||||
writer *bufio.Writer
|
||||
mu sync.Mutex
|
||||
count int64
|
||||
}
|
||||
|
||||
// LatencyStats contains calculated latency statistics
|
||||
type LatencyStats struct {
|
||||
Avg time.Duration
|
||||
P90 time.Duration
|
||||
P95 time.Duration
|
||||
P99 time.Duration
|
||||
Bottom10 time.Duration
|
||||
Count int64
|
||||
}
|
||||
|
||||
// NewLatencyRecorder creates a new latency recorder that writes to disk
|
||||
func NewLatencyRecorder(baseDir string, testName string) (*LatencyRecorder, error) {
|
||||
latencyFile := filepath.Join(baseDir, fmt.Sprintf("latency_%s.bin", testName))
|
||||
f, err := os.Create(latencyFile)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create latency file: %w", err)
|
||||
}
|
||||
|
||||
return &LatencyRecorder{
|
||||
file: f,
|
||||
writer: bufio.NewWriter(f),
|
||||
count: 0,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Record writes a latency measurement to disk (8 bytes per measurement)
|
||||
func (lr *LatencyRecorder) Record(latency time.Duration) error {
|
||||
lr.mu.Lock()
|
||||
defer lr.mu.Unlock()
|
||||
|
||||
// Write latency as 8-byte value (int64 nanoseconds)
|
||||
buf := make([]byte, 8)
|
||||
binary.LittleEndian.PutUint64(buf, uint64(latency.Nanoseconds()))
|
||||
|
||||
if _, err := lr.writer.Write(buf); err != nil {
|
||||
return fmt.Errorf("failed to write latency: %w", err)
|
||||
}
|
||||
|
||||
lr.count++
|
||||
return nil
|
||||
}
|
||||
|
||||
// Close flushes and closes the latency file
|
||||
func (lr *LatencyRecorder) Close() error {
|
||||
lr.mu.Lock()
|
||||
defer lr.mu.Unlock()
|
||||
|
||||
if err := lr.writer.Flush(); err != nil {
|
||||
return fmt.Errorf("failed to flush latency file: %w", err)
|
||||
}
|
||||
|
||||
if err := lr.file.Close(); err != nil {
|
||||
return fmt.Errorf("failed to close latency file: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// CalculateStats reads all latencies from disk, sorts them, and calculates statistics
|
||||
// This is done on-demand to avoid keeping all latencies in memory during the test
|
||||
func (lr *LatencyRecorder) CalculateStats() (*LatencyStats, error) {
|
||||
lr.mu.Lock()
|
||||
filePath := lr.file.Name()
|
||||
count := lr.count
|
||||
lr.mu.Unlock()
|
||||
|
||||
// If no measurements, return zeros
|
||||
if count == 0 {
|
||||
return &LatencyStats{
|
||||
Avg: 0,
|
||||
P90: 0,
|
||||
P95: 0,
|
||||
P99: 0,
|
||||
Bottom10: 0,
|
||||
Count: 0,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Open file for reading
|
||||
f, err := os.Open(filePath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open latency file for reading: %w", err)
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
// Read all latencies into memory temporarily for sorting
|
||||
latencies := make([]time.Duration, 0, count)
|
||||
buf := make([]byte, 8)
|
||||
reader := bufio.NewReader(f)
|
||||
|
||||
for {
|
||||
n, err := reader.Read(buf)
|
||||
if err != nil {
|
||||
if err.Error() == "EOF" {
|
||||
break
|
||||
}
|
||||
return nil, fmt.Errorf("failed to read latency data: %w", err)
|
||||
}
|
||||
if n != 8 {
|
||||
break
|
||||
}
|
||||
|
||||
nanos := binary.LittleEndian.Uint64(buf)
|
||||
latencies = append(latencies, time.Duration(nanos))
|
||||
}
|
||||
|
||||
// Check if we actually got any latencies
|
||||
if len(latencies) == 0 {
|
||||
return &LatencyStats{
|
||||
Avg: 0,
|
||||
P90: 0,
|
||||
P95: 0,
|
||||
P99: 0,
|
||||
Bottom10: 0,
|
||||
Count: 0,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Sort for percentile calculation
|
||||
sort.Slice(latencies, func(i, j int) bool {
|
||||
return latencies[i] < latencies[j]
|
||||
})
|
||||
|
||||
// Calculate statistics
|
||||
stats := &LatencyStats{
|
||||
Count: int64(len(latencies)),
|
||||
}
|
||||
|
||||
// Average
|
||||
var sum time.Duration
|
||||
for _, lat := range latencies {
|
||||
sum += lat
|
||||
}
|
||||
stats.Avg = sum / time.Duration(len(latencies))
|
||||
|
||||
// Percentiles
|
||||
stats.P90 = latencies[int(float64(len(latencies))*0.90)]
|
||||
stats.P95 = latencies[int(float64(len(latencies))*0.95)]
|
||||
stats.P99 = latencies[int(float64(len(latencies))*0.99)]
|
||||
|
||||
// Bottom 10% average
|
||||
bottom10Count := int(float64(len(latencies)) * 0.10)
|
||||
if bottom10Count > 0 {
|
||||
var bottom10Sum time.Duration
|
||||
for i := 0; i < bottom10Count; i++ {
|
||||
bottom10Sum += latencies[i]
|
||||
}
|
||||
stats.Bottom10 = bottom10Sum / time.Duration(bottom10Count)
|
||||
}
|
||||
|
||||
return stats, nil
|
||||
}
|
||||
@@ -1,7 +1,10 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
@@ -16,12 +19,13 @@ import (
|
||||
"next.orly.dev/pkg/database"
|
||||
"next.orly.dev/pkg/encoders/envelopes/eventenvelope"
|
||||
"next.orly.dev/pkg/encoders/event"
|
||||
examples "next.orly.dev/pkg/encoders/event/examples"
|
||||
"next.orly.dev/pkg/encoders/filter"
|
||||
"next.orly.dev/pkg/encoders/kind"
|
||||
"next.orly.dev/pkg/encoders/tag"
|
||||
"next.orly.dev/pkg/encoders/timestamp"
|
||||
"next.orly.dev/pkg/protocol/ws"
|
||||
"next.orly.dev/pkg/interfaces/signer/p8k"
|
||||
"next.orly.dev/pkg/protocol/ws"
|
||||
)
|
||||
|
||||
type BenchmarkConfig struct {
|
||||
@@ -36,6 +40,10 @@ type BenchmarkConfig struct {
|
||||
RelayURL string
|
||||
NetWorkers int
|
||||
NetRate int // events/sec per worker
|
||||
|
||||
// Backend selection
|
||||
UseDgraph bool
|
||||
UseNeo4j bool
|
||||
}
|
||||
|
||||
type BenchmarkResult struct {
|
||||
@@ -54,11 +62,46 @@ type BenchmarkResult struct {
|
||||
Errors []string
|
||||
}
|
||||
|
||||
// RateLimiter implements a simple token bucket rate limiter
|
||||
type RateLimiter struct {
|
||||
rate float64 // events per second
|
||||
interval time.Duration // time between events
|
||||
lastEvent time.Time
|
||||
mu sync.Mutex
|
||||
}
|
||||
|
||||
// NewRateLimiter creates a rate limiter for the specified events per second
|
||||
func NewRateLimiter(eventsPerSecond float64) *RateLimiter {
|
||||
return &RateLimiter{
|
||||
rate: eventsPerSecond,
|
||||
interval: time.Duration(float64(time.Second) / eventsPerSecond),
|
||||
lastEvent: time.Now(),
|
||||
}
|
||||
}
|
||||
|
||||
// Wait blocks until the next event is allowed based on the rate limit
|
||||
func (rl *RateLimiter) Wait() {
|
||||
rl.mu.Lock()
|
||||
defer rl.mu.Unlock()
|
||||
|
||||
now := time.Now()
|
||||
nextAllowed := rl.lastEvent.Add(rl.interval)
|
||||
|
||||
if now.Before(nextAllowed) {
|
||||
time.Sleep(nextAllowed.Sub(now))
|
||||
rl.lastEvent = nextAllowed
|
||||
} else {
|
||||
rl.lastEvent = now
|
||||
}
|
||||
}
|
||||
|
||||
type Benchmark struct {
|
||||
config *BenchmarkConfig
|
||||
db *database.D
|
||||
results []*BenchmarkResult
|
||||
mu sync.RWMutex
|
||||
config *BenchmarkConfig
|
||||
db *database.D
|
||||
results []*BenchmarkResult
|
||||
mu sync.RWMutex
|
||||
cachedEvents []*event.E // Real-world events from examples.Cache
|
||||
eventCacheMu sync.Mutex
|
||||
}
|
||||
|
||||
func main() {
|
||||
@@ -71,7 +114,20 @@ func main() {
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Printf("Starting Nostr Relay Benchmark\n")
|
||||
if config.UseDgraph {
|
||||
// Run dgraph benchmark
|
||||
runDgraphBenchmark(config)
|
||||
return
|
||||
}
|
||||
|
||||
if config.UseNeo4j {
|
||||
// Run Neo4j benchmark
|
||||
runNeo4jBenchmark(config)
|
||||
return
|
||||
}
|
||||
|
||||
// Run standard Badger benchmark
|
||||
fmt.Printf("Starting Nostr Relay Benchmark (Badger Backend)\n")
|
||||
fmt.Printf("Data Directory: %s\n", config.DataDir)
|
||||
fmt.Printf(
|
||||
"Events: %d, Workers: %d, Duration: %v\n",
|
||||
@@ -89,6 +145,50 @@ func main() {
|
||||
benchmark.GenerateAsciidocReport()
|
||||
}
|
||||
|
||||
func runDgraphBenchmark(config *BenchmarkConfig) {
|
||||
fmt.Printf("Starting Nostr Relay Benchmark (Dgraph Backend)\n")
|
||||
fmt.Printf("Data Directory: %s\n", config.DataDir)
|
||||
fmt.Printf(
|
||||
"Events: %d, Workers: %d\n",
|
||||
config.NumEvents, config.ConcurrentWorkers,
|
||||
)
|
||||
|
||||
dgraphBench, err := NewDgraphBenchmark(config)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create dgraph benchmark: %v", err)
|
||||
}
|
||||
defer dgraphBench.Close()
|
||||
|
||||
// Run dgraph benchmark suite
|
||||
dgraphBench.RunSuite()
|
||||
|
||||
// Generate reports
|
||||
dgraphBench.GenerateReport()
|
||||
dgraphBench.GenerateAsciidocReport()
|
||||
}
|
||||
|
||||
func runNeo4jBenchmark(config *BenchmarkConfig) {
|
||||
fmt.Printf("Starting Nostr Relay Benchmark (Neo4j Backend)\n")
|
||||
fmt.Printf("Data Directory: %s\n", config.DataDir)
|
||||
fmt.Printf(
|
||||
"Events: %d, Workers: %d\n",
|
||||
config.NumEvents, config.ConcurrentWorkers,
|
||||
)
|
||||
|
||||
neo4jBench, err := NewNeo4jBenchmark(config)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Neo4j benchmark: %v", err)
|
||||
}
|
||||
defer neo4jBench.Close()
|
||||
|
||||
// Run Neo4j benchmark suite
|
||||
neo4jBench.RunSuite()
|
||||
|
||||
// Generate reports
|
||||
neo4jBench.GenerateReport()
|
||||
neo4jBench.GenerateAsciidocReport()
|
||||
}
|
||||
|
||||
func parseFlags() *BenchmarkConfig {
|
||||
config := &BenchmarkConfig{}
|
||||
|
||||
@@ -99,8 +199,8 @@ func parseFlags() *BenchmarkConfig {
|
||||
&config.NumEvents, "events", 10000, "Number of events to generate",
|
||||
)
|
||||
flag.IntVar(
|
||||
&config.ConcurrentWorkers, "workers", runtime.NumCPU(),
|
||||
"Number of concurrent workers",
|
||||
&config.ConcurrentWorkers, "workers", max(2, runtime.NumCPU()/4),
|
||||
"Number of concurrent workers (default: CPU cores / 4 for low CPU usage)",
|
||||
)
|
||||
flag.DurationVar(
|
||||
&config.TestDuration, "duration", 60*time.Second, "Test duration",
|
||||
@@ -124,6 +224,16 @@ func parseFlags() *BenchmarkConfig {
|
||||
)
|
||||
flag.IntVar(&config.NetRate, "net-rate", 20, "Events per second per worker")
|
||||
|
||||
// Backend selection
|
||||
flag.BoolVar(
|
||||
&config.UseDgraph, "dgraph", false,
|
||||
"Use dgraph backend (requires Docker)",
|
||||
)
|
||||
flag.BoolVar(
|
||||
&config.UseNeo4j, "neo4j", false,
|
||||
"Use Neo4j backend (requires Docker)",
|
||||
)
|
||||
|
||||
flag.Parse()
|
||||
return config
|
||||
}
|
||||
@@ -286,7 +396,7 @@ func NewBenchmark(config *BenchmarkConfig) *Benchmark {
|
||||
ctx := context.Background()
|
||||
cancel := func() {}
|
||||
|
||||
db, err := database.New(ctx, cancel, config.DataDir, "info")
|
||||
db, err := database.New(ctx, cancel, config.DataDir, "warn")
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create database: %v", err)
|
||||
}
|
||||
@@ -309,31 +419,42 @@ func (b *Benchmark) Close() {
|
||||
}
|
||||
}
|
||||
|
||||
// RunSuite runs the three tests with a 10s pause between them and repeats the
|
||||
// set twice with a 10s pause between rounds.
|
||||
// RunSuite runs the full benchmark test suite
|
||||
func (b *Benchmark) RunSuite() {
|
||||
for round := 1; round <= 2; round++ {
|
||||
fmt.Printf("\n=== Starting test round %d/2 ===\n", round)
|
||||
fmt.Printf("RunPeakThroughputTest..\n")
|
||||
b.RunPeakThroughputTest()
|
||||
time.Sleep(10 * time.Second)
|
||||
fmt.Printf("RunBurstPatternTest..\n")
|
||||
b.RunBurstPatternTest()
|
||||
time.Sleep(10 * time.Second)
|
||||
fmt.Printf("RunMixedReadWriteTest..\n")
|
||||
b.RunMixedReadWriteTest()
|
||||
time.Sleep(10 * time.Second)
|
||||
fmt.Printf("RunQueryTest..\n")
|
||||
b.RunQueryTest()
|
||||
time.Sleep(10 * time.Second)
|
||||
fmt.Printf("RunConcurrentQueryStoreTest..\n")
|
||||
b.RunConcurrentQueryStoreTest()
|
||||
if round < 2 {
|
||||
fmt.Printf("\nPausing 10s before next round...\n")
|
||||
time.Sleep(10 * time.Second)
|
||||
}
|
||||
fmt.Printf("\n=== Test round completed ===\n\n")
|
||||
}
|
||||
fmt.Println("\n╔════════════════════════════════════════════════════════╗")
|
||||
fmt.Println("║ BADGER BACKEND BENCHMARK SUITE ║")
|
||||
fmt.Println("╚════════════════════════════════════════════════════════╝")
|
||||
|
||||
fmt.Printf("\n=== Starting Badger benchmark ===\n")
|
||||
|
||||
fmt.Printf("RunPeakThroughputTest (Badger)..\n")
|
||||
b.RunPeakThroughputTest()
|
||||
fmt.Println("Wiping database between tests...")
|
||||
b.db.Wipe()
|
||||
time.Sleep(10 * time.Second)
|
||||
|
||||
fmt.Printf("RunBurstPatternTest (Badger)..\n")
|
||||
b.RunBurstPatternTest()
|
||||
fmt.Println("Wiping database between tests...")
|
||||
b.db.Wipe()
|
||||
time.Sleep(10 * time.Second)
|
||||
|
||||
fmt.Printf("RunMixedReadWriteTest (Badger)..\n")
|
||||
b.RunMixedReadWriteTest()
|
||||
fmt.Println("Wiping database between tests...")
|
||||
b.db.Wipe()
|
||||
time.Sleep(10 * time.Second)
|
||||
|
||||
fmt.Printf("RunQueryTest (Badger)..\n")
|
||||
b.RunQueryTest()
|
||||
fmt.Println("Wiping database between tests...")
|
||||
b.db.Wipe()
|
||||
time.Sleep(10 * time.Second)
|
||||
|
||||
fmt.Printf("RunConcurrentQueryStoreTest (Badger)..\n")
|
||||
b.RunConcurrentQueryStoreTest()
|
||||
|
||||
fmt.Printf("\n=== Badger benchmark completed ===\n\n")
|
||||
}
|
||||
|
||||
// compactDatabase triggers a Badger value log GC before starting tests.
|
||||
@@ -348,50 +469,82 @@ func (b *Benchmark) compactDatabase() {
|
||||
func (b *Benchmark) RunPeakThroughputTest() {
|
||||
fmt.Println("\n=== Peak Throughput Test ===")
|
||||
|
||||
// Create latency recorder (writes to disk, not memory)
|
||||
latencyRecorder, err := NewLatencyRecorder(b.config.DataDir, "peak_throughput")
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create latency recorder: %v", err)
|
||||
}
|
||||
|
||||
start := time.Now()
|
||||
var wg sync.WaitGroup
|
||||
var totalEvents int64
|
||||
var errors []error
|
||||
var latencies []time.Duration
|
||||
var errorCount int64
|
||||
var mu sync.Mutex
|
||||
|
||||
events := b.generateEvents(b.config.NumEvents)
|
||||
eventChan := make(chan *event.E, len(events))
|
||||
// Stream events from memory (real-world sample events)
|
||||
eventChan, errChan := b.getEventChannel(b.config.NumEvents, 1000)
|
||||
|
||||
// Fill event channel
|
||||
for _, ev := range events {
|
||||
eventChan <- ev
|
||||
}
|
||||
close(eventChan)
|
||||
// Calculate per-worker rate: 20k events/sec total divided by worker count
|
||||
// This prevents all workers from synchronizing and hitting DB simultaneously
|
||||
perWorkerRate := 20000.0 / float64(b.config.ConcurrentWorkers)
|
||||
|
||||
// Start workers with rate limiting
|
||||
ctx := context.Background()
|
||||
|
||||
// Start workers
|
||||
for i := 0; i < b.config.ConcurrentWorkers; i++ {
|
||||
wg.Add(1)
|
||||
go func(workerID int) {
|
||||
defer wg.Done()
|
||||
|
||||
ctx := context.Background()
|
||||
for ev := range eventChan {
|
||||
eventStart := time.Now()
|
||||
// Each worker gets its own rate limiter to avoid mutex contention
|
||||
workerLimiter := NewRateLimiter(perWorkerRate)
|
||||
|
||||
for ev := range eventChan {
|
||||
// Wait for rate limiter to allow this event
|
||||
workerLimiter.Wait()
|
||||
|
||||
eventStart := time.Now()
|
||||
_, err := b.db.SaveEvent(ctx, ev)
|
||||
latency := time.Since(eventStart)
|
||||
|
||||
mu.Lock()
|
||||
if err != nil {
|
||||
errors = append(errors, err)
|
||||
errorCount++
|
||||
} else {
|
||||
totalEvents++
|
||||
latencies = append(latencies, latency)
|
||||
if err := latencyRecorder.Record(latency); err != nil {
|
||||
log.Printf("Failed to record latency: %v", err)
|
||||
}
|
||||
}
|
||||
mu.Unlock()
|
||||
}
|
||||
}(i)
|
||||
}
|
||||
|
||||
// Check for streaming errors
|
||||
go func() {
|
||||
for err := range errChan {
|
||||
if err != nil {
|
||||
log.Printf("Event stream error: %v", err)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
wg.Wait()
|
||||
duration := time.Since(start)
|
||||
|
||||
// Flush latency data to disk before calculating stats
|
||||
if err := latencyRecorder.Close(); err != nil {
|
||||
log.Printf("Failed to close latency recorder: %v", err)
|
||||
}
|
||||
|
||||
// Calculate statistics from disk
|
||||
latencyStats, err := latencyRecorder.CalculateStats()
|
||||
if err != nil {
|
||||
log.Printf("Failed to calculate latency stats: %v", err)
|
||||
latencyStats = &LatencyStats{}
|
||||
}
|
||||
|
||||
// Calculate metrics
|
||||
result := &BenchmarkResult{
|
||||
TestName: "Peak Throughput",
|
||||
@@ -400,29 +553,22 @@ func (b *Benchmark) RunPeakThroughputTest() {
|
||||
EventsPerSecond: float64(totalEvents) / duration.Seconds(),
|
||||
ConcurrentWorkers: b.config.ConcurrentWorkers,
|
||||
MemoryUsed: getMemUsage(),
|
||||
}
|
||||
|
||||
if len(latencies) > 0 {
|
||||
result.AvgLatency = calculateAvgLatency(latencies)
|
||||
result.P90Latency = calculatePercentileLatency(latencies, 0.90)
|
||||
result.P95Latency = calculatePercentileLatency(latencies, 0.95)
|
||||
result.P99Latency = calculatePercentileLatency(latencies, 0.99)
|
||||
result.Bottom10Avg = calculateBottom10Avg(latencies)
|
||||
AvgLatency: latencyStats.Avg,
|
||||
P90Latency: latencyStats.P90,
|
||||
P95Latency: latencyStats.P95,
|
||||
P99Latency: latencyStats.P99,
|
||||
Bottom10Avg: latencyStats.Bottom10,
|
||||
}
|
||||
|
||||
result.SuccessRate = float64(totalEvents) / float64(b.config.NumEvents) * 100
|
||||
|
||||
for _, err := range errors {
|
||||
result.Errors = append(result.Errors, err.Error())
|
||||
}
|
||||
|
||||
b.mu.Lock()
|
||||
b.results = append(b.results, result)
|
||||
b.mu.Unlock()
|
||||
|
||||
fmt.Printf(
|
||||
"Events saved: %d/%d (%.1f%%)\n", totalEvents, b.config.NumEvents,
|
||||
result.SuccessRate,
|
||||
"Events saved: %d/%d (%.1f%%), errors: %d\n",
|
||||
totalEvents, b.config.NumEvents, result.SuccessRate, errorCount,
|
||||
)
|
||||
fmt.Printf("Duration: %v\n", duration)
|
||||
fmt.Printf("Events/sec: %.2f\n", result.EventsPerSecond)
|
||||
@@ -436,14 +582,28 @@ func (b *Benchmark) RunPeakThroughputTest() {
|
||||
func (b *Benchmark) RunBurstPatternTest() {
|
||||
fmt.Println("\n=== Burst Pattern Test ===")
|
||||
|
||||
// Create latency recorder (writes to disk, not memory)
|
||||
latencyRecorder, err := NewLatencyRecorder(b.config.DataDir, "burst_pattern")
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create latency recorder: %v", err)
|
||||
}
|
||||
|
||||
start := time.Now()
|
||||
var totalEvents int64
|
||||
var errors []error
|
||||
var latencies []time.Duration
|
||||
var errorCount int64
|
||||
var mu sync.Mutex
|
||||
|
||||
// Generate events for burst pattern
|
||||
events := b.generateEvents(b.config.NumEvents)
|
||||
// Stream events from memory (real-world sample events)
|
||||
eventChan, errChan := b.getEventChannel(b.config.NumEvents, 500)
|
||||
|
||||
// Check for streaming errors
|
||||
go func() {
|
||||
for err := range errChan {
|
||||
if err != nil {
|
||||
log.Printf("Event stream error: %v", err)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
// Simulate burst pattern: high activity periods followed by quiet periods
|
||||
burstSize := b.config.NumEvents / 10 // 10% of events in each burst
|
||||
@@ -451,17 +611,27 @@ func (b *Benchmark) RunBurstPatternTest() {
|
||||
burstPeriod := 100 * time.Millisecond
|
||||
|
||||
ctx := context.Background()
|
||||
eventIndex := 0
|
||||
var eventIndex int64
|
||||
|
||||
for eventIndex < len(events) && time.Since(start) < b.config.TestDuration {
|
||||
// Burst period - send events rapidly
|
||||
burstStart := time.Now()
|
||||
var wg sync.WaitGroup
|
||||
// Start persistent worker pool (prevents goroutine explosion)
|
||||
numWorkers := b.config.ConcurrentWorkers
|
||||
eventQueue := make(chan *event.E, numWorkers*4)
|
||||
var wg sync.WaitGroup
|
||||
|
||||
for i := 0; i < burstSize && eventIndex < len(events); i++ {
|
||||
wg.Add(1)
|
||||
go func(ev *event.E) {
|
||||
defer wg.Done()
|
||||
// Calculate per-worker rate to avoid mutex contention
|
||||
perWorkerRate := 20000.0 / float64(numWorkers)
|
||||
|
||||
for w := 0; w < numWorkers; w++ {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
|
||||
// Each worker gets its own rate limiter
|
||||
workerLimiter := NewRateLimiter(perWorkerRate)
|
||||
|
||||
for ev := range eventQueue {
|
||||
// Wait for rate limiter to allow this event
|
||||
workerLimiter.Wait()
|
||||
|
||||
eventStart := time.Now()
|
||||
_, err := b.db.SaveEvent(ctx, ev)
|
||||
@@ -469,19 +639,33 @@ func (b *Benchmark) RunBurstPatternTest() {
|
||||
|
||||
mu.Lock()
|
||||
if err != nil {
|
||||
errors = append(errors, err)
|
||||
errorCount++
|
||||
} else {
|
||||
totalEvents++
|
||||
latencies = append(latencies, latency)
|
||||
// Record latency to disk instead of keeping in memory
|
||||
if err := latencyRecorder.Record(latency); err != nil {
|
||||
log.Printf("Failed to record latency: %v", err)
|
||||
}
|
||||
}
|
||||
mu.Unlock()
|
||||
}(events[eventIndex])
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
for int(eventIndex) < b.config.NumEvents && time.Since(start) < b.config.TestDuration {
|
||||
// Burst period - send events rapidly
|
||||
burstStart := time.Now()
|
||||
|
||||
for i := 0; i < burstSize && int(eventIndex) < b.config.NumEvents; i++ {
|
||||
ev, ok := <-eventChan
|
||||
if !ok {
|
||||
break
|
||||
}
|
||||
eventQueue <- ev
|
||||
eventIndex++
|
||||
time.Sleep(burstPeriod / time.Duration(burstSize))
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
fmt.Printf(
|
||||
"Burst completed: %d events in %v\n", burstSize,
|
||||
time.Since(burstStart),
|
||||
@@ -491,8 +675,23 @@ func (b *Benchmark) RunBurstPatternTest() {
|
||||
time.Sleep(quietPeriod)
|
||||
}
|
||||
|
||||
close(eventQueue)
|
||||
wg.Wait()
|
||||
|
||||
duration := time.Since(start)
|
||||
|
||||
// Flush latency data to disk before calculating stats
|
||||
if err := latencyRecorder.Close(); err != nil {
|
||||
log.Printf("Failed to close latency recorder: %v", err)
|
||||
}
|
||||
|
||||
// Calculate statistics from disk
|
||||
latencyStats, err := latencyRecorder.CalculateStats()
|
||||
if err != nil {
|
||||
log.Printf("Failed to calculate latency stats: %v", err)
|
||||
latencyStats = &LatencyStats{}
|
||||
}
|
||||
|
||||
// Calculate metrics
|
||||
result := &BenchmarkResult{
|
||||
TestName: "Burst Pattern",
|
||||
@@ -501,27 +700,23 @@ func (b *Benchmark) RunBurstPatternTest() {
|
||||
EventsPerSecond: float64(totalEvents) / duration.Seconds(),
|
||||
ConcurrentWorkers: b.config.ConcurrentWorkers,
|
||||
MemoryUsed: getMemUsage(),
|
||||
}
|
||||
|
||||
if len(latencies) > 0 {
|
||||
result.AvgLatency = calculateAvgLatency(latencies)
|
||||
result.P90Latency = calculatePercentileLatency(latencies, 0.90)
|
||||
result.P95Latency = calculatePercentileLatency(latencies, 0.95)
|
||||
result.P99Latency = calculatePercentileLatency(latencies, 0.99)
|
||||
result.Bottom10Avg = calculateBottom10Avg(latencies)
|
||||
AvgLatency: latencyStats.Avg,
|
||||
P90Latency: latencyStats.P90,
|
||||
P95Latency: latencyStats.P95,
|
||||
P99Latency: latencyStats.P99,
|
||||
Bottom10Avg: latencyStats.Bottom10,
|
||||
}
|
||||
|
||||
result.SuccessRate = float64(totalEvents) / float64(eventIndex) * 100
|
||||
|
||||
for _, err := range errors {
|
||||
result.Errors = append(result.Errors, err.Error())
|
||||
}
|
||||
|
||||
b.mu.Lock()
|
||||
b.results = append(b.results, result)
|
||||
b.mu.Unlock()
|
||||
|
||||
fmt.Printf("Burst test completed: %d events in %v\n", totalEvents, duration)
|
||||
fmt.Printf(
|
||||
"Burst test completed: %d events in %v, errors: %d\n",
|
||||
totalEvents, duration, errorCount,
|
||||
)
|
||||
fmt.Printf("Events/sec: %.2f\n", result.EventsPerSecond)
|
||||
}
|
||||
|
||||
@@ -546,17 +741,25 @@ func (b *Benchmark) RunMixedReadWriteTest() {
|
||||
events := b.generateEvents(b.config.NumEvents)
|
||||
var wg sync.WaitGroup
|
||||
|
||||
// Calculate per-worker rate to avoid mutex contention
|
||||
perWorkerRate := 20000.0 / float64(b.config.ConcurrentWorkers)
|
||||
|
||||
// Start mixed read/write workers
|
||||
for i := 0; i < b.config.ConcurrentWorkers; i++ {
|
||||
wg.Add(1)
|
||||
go func(workerID int) {
|
||||
defer wg.Done()
|
||||
|
||||
// Each worker gets its own rate limiter
|
||||
workerLimiter := NewRateLimiter(perWorkerRate)
|
||||
|
||||
eventIndex := workerID
|
||||
for time.Since(start) < b.config.TestDuration && eventIndex < len(events) {
|
||||
// Alternate between write and read operations
|
||||
if eventIndex%2 == 0 {
|
||||
// Write operation
|
||||
// Write operation - apply rate limiting
|
||||
workerLimiter.Wait()
|
||||
|
||||
writeStart := time.Now()
|
||||
_, err := b.db.SaveEvent(ctx, events[eventIndex])
|
||||
writeLatency := time.Since(writeStart)
|
||||
@@ -727,9 +930,8 @@ func (b *Benchmark) RunQueryTest() {
|
||||
mu.Unlock()
|
||||
|
||||
queryCount++
|
||||
if queryCount%10 == 0 {
|
||||
time.Sleep(10 * time.Millisecond) // Small delay every 10 queries
|
||||
}
|
||||
// Always add delay to prevent CPU saturation (queries are CPU-intensive)
|
||||
time.Sleep(1 * time.Millisecond)
|
||||
}
|
||||
}(i)
|
||||
}
|
||||
@@ -829,6 +1031,9 @@ func (b *Benchmark) RunConcurrentQueryStoreTest() {
|
||||
numReaders := b.config.ConcurrentWorkers / 2
|
||||
numWriters := b.config.ConcurrentWorkers - numReaders
|
||||
|
||||
// Calculate per-worker write rate to avoid mutex contention
|
||||
perWorkerRate := 20000.0 / float64(numWriters)
|
||||
|
||||
// Start query workers (readers)
|
||||
for i := 0; i < numReaders; i++ {
|
||||
wg.Add(1)
|
||||
@@ -863,9 +1068,8 @@ func (b *Benchmark) RunConcurrentQueryStoreTest() {
|
||||
mu.Unlock()
|
||||
|
||||
queryCount++
|
||||
if queryCount%5 == 0 {
|
||||
time.Sleep(5 * time.Millisecond) // Small delay
|
||||
}
|
||||
// Always add delay to prevent CPU saturation (queries are CPU-intensive)
|
||||
time.Sleep(1 * time.Millisecond)
|
||||
}
|
||||
}(i)
|
||||
}
|
||||
@@ -876,11 +1080,16 @@ func (b *Benchmark) RunConcurrentQueryStoreTest() {
|
||||
go func(workerID int) {
|
||||
defer wg.Done()
|
||||
|
||||
// Each worker gets its own rate limiter
|
||||
workerLimiter := NewRateLimiter(perWorkerRate)
|
||||
|
||||
eventIndex := workerID
|
||||
writeCount := 0
|
||||
|
||||
for time.Since(start) < b.config.TestDuration && eventIndex < len(writeEvents) {
|
||||
// Write operation
|
||||
// Write operation - apply rate limiting
|
||||
workerLimiter.Wait()
|
||||
|
||||
writeStart := time.Now()
|
||||
_, err := b.db.SaveEvent(ctx, writeEvents[eventIndex])
|
||||
writeLatency := time.Since(writeStart)
|
||||
@@ -896,10 +1105,6 @@ func (b *Benchmark) RunConcurrentQueryStoreTest() {
|
||||
|
||||
eventIndex += numWriters
|
||||
writeCount++
|
||||
|
||||
if writeCount%10 == 0 {
|
||||
time.Sleep(10 * time.Millisecond) // Small delay every 10 writes
|
||||
}
|
||||
}
|
||||
}(i)
|
||||
}
|
||||
@@ -960,116 +1165,203 @@ func (b *Benchmark) RunConcurrentQueryStoreTest() {
|
||||
}
|
||||
|
||||
func (b *Benchmark) generateEvents(count int) []*event.E {
|
||||
fmt.Printf("Generating %d unique synthetic events (minimum 300 bytes each)...\n", count)
|
||||
|
||||
// Create a single signer for all events (reusing key is faster)
|
||||
signer := p8k.MustNew()
|
||||
if err := signer.Generate(); err != nil {
|
||||
log.Fatalf("Failed to generate keypair: %v", err)
|
||||
}
|
||||
|
||||
// Base timestamp - start from current time and increment
|
||||
baseTime := time.Now().Unix()
|
||||
|
||||
// Minimum content size
|
||||
const minContentSize = 300
|
||||
|
||||
// Base content template
|
||||
baseContent := "This is a benchmark test event with realistic content size. "
|
||||
|
||||
// Pre-calculate how much padding we need
|
||||
paddingNeeded := minContentSize - len(baseContent)
|
||||
if paddingNeeded < 0 {
|
||||
paddingNeeded = 0
|
||||
}
|
||||
|
||||
// Create padding string (with varied characters for realistic size)
|
||||
padding := make([]byte, paddingNeeded)
|
||||
for i := range padding {
|
||||
padding[i] = ' ' + byte(i%94) // Printable ASCII characters
|
||||
}
|
||||
|
||||
events := make([]*event.E, count)
|
||||
now := timestamp.Now()
|
||||
|
||||
// Generate a keypair for signing all events
|
||||
var keys *p8k.Signer
|
||||
var err error
|
||||
if keys, err = p8k.New(); err != nil {
|
||||
fmt.Printf("failed to create signer: %v\n", err)
|
||||
return nil
|
||||
}
|
||||
if err := keys.Generate(); err != nil {
|
||||
log.Fatalf("Failed to generate keys for benchmark events: %v", err)
|
||||
}
|
||||
|
||||
// Define size distribution - from minimal to 500MB
|
||||
// We'll create a logarithmic distribution to test various sizes
|
||||
sizeBuckets := []int{
|
||||
0, // Minimal: empty content, no tags
|
||||
10, // Tiny: ~10 bytes
|
||||
100, // Small: ~100 bytes
|
||||
1024, // 1 KB
|
||||
10 * 1024, // 10 KB
|
||||
50 * 1024, // 50 KB
|
||||
100 * 1024, // 100 KB
|
||||
500 * 1024, // 500 KB
|
||||
1024 * 1024, // 1 MB
|
||||
5 * 1024 * 1024, // 5 MB
|
||||
10 * 1024 * 1024, // 10 MB
|
||||
50 * 1024 * 1024, // 50 MB
|
||||
100 * 1024 * 1024, // 100 MB
|
||||
500000000, // 500 MB (500,000,000 bytes)
|
||||
}
|
||||
|
||||
for i := 0; i < count; i++ {
|
||||
ev := event.New()
|
||||
|
||||
ev.CreatedAt = now.I64()
|
||||
ev.Kind = kind.TextNote.K
|
||||
ev.CreatedAt = baseTime + int64(i) // Unique timestamp for each event
|
||||
ev.Tags = tag.NewS()
|
||||
|
||||
// Distribute events across size buckets
|
||||
bucketIndex := i % len(sizeBuckets)
|
||||
targetSize := sizeBuckets[bucketIndex]
|
||||
// Create content with unique identifier and padding
|
||||
ev.Content = []byte(fmt.Sprintf("%s Event #%d. %s", baseContent, i, string(padding)))
|
||||
|
||||
// Generate content based on target size
|
||||
if targetSize == 0 {
|
||||
// Minimal event: empty content, no tags
|
||||
ev.Content = []byte{}
|
||||
ev.Tags = tag.NewS() // Empty tag set
|
||||
} else if targetSize < 1024 {
|
||||
// Small events: simple text content
|
||||
ev.Content = []byte(fmt.Sprintf(
|
||||
"Event %d - Size bucket: %d bytes. %s",
|
||||
i, targetSize, strings.Repeat("x", max(0, targetSize-50)),
|
||||
))
|
||||
// Add minimal tags
|
||||
ev.Tags = tag.NewS(
|
||||
tag.NewFromBytesSlice([]byte("t"), []byte("benchmark")),
|
||||
)
|
||||
} else {
|
||||
// Larger events: fill with repeated content to reach target size
|
||||
// Account for JSON overhead (~200 bytes for event structure)
|
||||
contentSize := targetSize - 200
|
||||
if contentSize < 0 {
|
||||
contentSize = targetSize
|
||||
}
|
||||
|
||||
// Build content with repeated pattern
|
||||
pattern := fmt.Sprintf("Event %d, target size %d bytes. ", i, targetSize)
|
||||
repeatCount := contentSize / len(pattern)
|
||||
if repeatCount < 1 {
|
||||
repeatCount = 1
|
||||
}
|
||||
ev.Content = []byte(strings.Repeat(pattern, repeatCount))
|
||||
|
||||
// Add some tags (contributes to total size)
|
||||
numTags := min(5, max(1, targetSize/10000)) // More tags for larger events
|
||||
tags := make([]*tag.T, 0, numTags+1)
|
||||
tags = append(tags, tag.NewFromBytesSlice([]byte("t"), []byte("benchmark")))
|
||||
for j := 0; j < numTags; j++ {
|
||||
tags = append(tags, tag.NewFromBytesSlice(
|
||||
[]byte("e"),
|
||||
[]byte(fmt.Sprintf("ref_%d_%d", i, j)),
|
||||
))
|
||||
}
|
||||
ev.Tags = tag.NewS(tags...)
|
||||
}
|
||||
|
||||
// Properly sign the event
|
||||
if err := ev.Sign(keys); err != nil {
|
||||
// Sign the event (this calculates ID and Sig)
|
||||
if err := ev.Sign(signer); err != nil {
|
||||
log.Fatalf("Failed to sign event %d: %v", i, err)
|
||||
}
|
||||
|
||||
events[i] = ev
|
||||
}
|
||||
|
||||
// Log size distribution summary
|
||||
fmt.Printf("\nGenerated %d events with size distribution:\n", count)
|
||||
for idx, size := range sizeBuckets {
|
||||
eventsInBucket := count / len(sizeBuckets)
|
||||
if idx < count%len(sizeBuckets) {
|
||||
eventsInBucket++
|
||||
}
|
||||
sizeStr := formatSize(size)
|
||||
fmt.Printf(" %s: ~%d events\n", sizeStr, eventsInBucket)
|
||||
// Print stats
|
||||
totalSize := int64(0)
|
||||
for _, ev := range events {
|
||||
totalSize += int64(len(ev.Content))
|
||||
}
|
||||
fmt.Println()
|
||||
avgSize := totalSize / int64(count)
|
||||
|
||||
fmt.Printf("Generated %d events:\n", count)
|
||||
fmt.Printf(" Average content size: %d bytes\n", avgSize)
|
||||
fmt.Printf(" All events are unique (incremental timestamps)\n")
|
||||
fmt.Printf(" All events are properly signed\n\n")
|
||||
|
||||
return events
|
||||
}
|
||||
|
||||
// printEventStats prints statistics about the loaded real-world events
|
||||
func (b *Benchmark) printEventStats() {
|
||||
if len(b.cachedEvents) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
// Analyze event distribution
|
||||
kindCounts := make(map[uint16]int)
|
||||
var totalSize int64
|
||||
|
||||
for _, ev := range b.cachedEvents {
|
||||
kindCounts[ev.Kind]++
|
||||
totalSize += int64(len(ev.Content))
|
||||
}
|
||||
|
||||
avgSize := totalSize / int64(len(b.cachedEvents))
|
||||
|
||||
fmt.Printf("\nEvent Statistics:\n")
|
||||
fmt.Printf(" Total events: %d\n", len(b.cachedEvents))
|
||||
fmt.Printf(" Average content size: %d bytes\n", avgSize)
|
||||
fmt.Printf(" Event kinds found: %d unique\n", len(kindCounts))
|
||||
fmt.Printf(" Most common kinds:\n")
|
||||
|
||||
// Print top 5 kinds
|
||||
type kindCount struct {
|
||||
kind uint16
|
||||
count int
|
||||
}
|
||||
var counts []kindCount
|
||||
for k, c := range kindCounts {
|
||||
counts = append(counts, kindCount{k, c})
|
||||
}
|
||||
sort.Slice(counts, func(i, j int) bool {
|
||||
return counts[i].count > counts[j].count
|
||||
})
|
||||
for i := 0; i < min(5, len(counts)); i++ {
|
||||
fmt.Printf(" Kind %d: %d events\n", counts[i].kind, counts[i].count)
|
||||
}
|
||||
fmt.Println()
|
||||
}
|
||||
|
||||
// loadRealEvents loads events from embedded examples.Cache on first call
|
||||
func (b *Benchmark) loadRealEvents() {
|
||||
b.eventCacheMu.Lock()
|
||||
defer b.eventCacheMu.Unlock()
|
||||
|
||||
// Only load once
|
||||
if len(b.cachedEvents) > 0 {
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Println("Loading real-world sample events (11,596 events from 6 months of Nostr)...")
|
||||
scanner := bufio.NewScanner(bytes.NewReader(examples.Cache))
|
||||
|
||||
buf := make([]byte, 0, 64*1024)
|
||||
scanner.Buffer(buf, 1024*1024)
|
||||
|
||||
for scanner.Scan() {
|
||||
var ev event.E
|
||||
if err := json.Unmarshal(scanner.Bytes(), &ev); err != nil {
|
||||
fmt.Printf("Warning: failed to unmarshal event: %v\n", err)
|
||||
continue
|
||||
}
|
||||
b.cachedEvents = append(b.cachedEvents, &ev)
|
||||
}
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
log.Fatalf("Failed to read events: %v", err)
|
||||
}
|
||||
|
||||
fmt.Printf("Loaded %d real-world events (already signed, zero crypto overhead)\n", len(b.cachedEvents))
|
||||
b.printEventStats()
|
||||
}
|
||||
|
||||
// getEventChannel returns a channel that streams unique synthetic events
|
||||
// bufferSize controls memory usage - larger buffers improve throughput but use more memory
|
||||
func (b *Benchmark) getEventChannel(count int, bufferSize int) (<-chan *event.E, <-chan error) {
|
||||
eventChan := make(chan *event.E, bufferSize)
|
||||
errChan := make(chan error, 1)
|
||||
|
||||
go func() {
|
||||
defer close(eventChan)
|
||||
defer close(errChan)
|
||||
|
||||
// Create a single signer for all events
|
||||
signer := p8k.MustNew()
|
||||
if err := signer.Generate(); err != nil {
|
||||
errChan <- fmt.Errorf("failed to generate keypair: %w", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Base timestamp - start from current time and increment
|
||||
baseTime := time.Now().Unix()
|
||||
|
||||
// Minimum content size
|
||||
const minContentSize = 300
|
||||
|
||||
// Base content template
|
||||
baseContent := "This is a benchmark test event with realistic content size. "
|
||||
|
||||
// Pre-calculate padding
|
||||
paddingNeeded := minContentSize - len(baseContent)
|
||||
if paddingNeeded < 0 {
|
||||
paddingNeeded = 0
|
||||
}
|
||||
|
||||
// Create padding string (with varied characters for realistic size)
|
||||
padding := make([]byte, paddingNeeded)
|
||||
for i := range padding {
|
||||
padding[i] = ' ' + byte(i%94) // Printable ASCII characters
|
||||
}
|
||||
|
||||
// Stream unique events
|
||||
for i := 0; i < count; i++ {
|
||||
ev := event.New()
|
||||
ev.Kind = kind.TextNote.K
|
||||
ev.CreatedAt = baseTime + int64(i) // Unique timestamp for each event
|
||||
ev.Tags = tag.NewS()
|
||||
|
||||
// Create content with unique identifier and padding
|
||||
ev.Content = []byte(fmt.Sprintf("%s Event #%d. %s", baseContent, i, string(padding)))
|
||||
|
||||
// Sign the event (this calculates ID and Sig)
|
||||
if err := ev.Sign(signer); err != nil {
|
||||
errChan <- fmt.Errorf("failed to sign event %d: %w", i, err)
|
||||
return
|
||||
}
|
||||
|
||||
eventChan <- ev
|
||||
}
|
||||
}()
|
||||
|
||||
return eventChan, errChan
|
||||
}
|
||||
|
||||
// formatSize formats byte size in human-readable format
|
||||
func formatSize(bytes int) string {
|
||||
if bytes == 0 {
|
||||
|
||||
135
cmd/benchmark/neo4j_benchmark.go
Normal file
135
cmd/benchmark/neo4j_benchmark.go
Normal file
@@ -0,0 +1,135 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"next.orly.dev/pkg/database"
|
||||
_ "next.orly.dev/pkg/neo4j" // Import to register neo4j factory
|
||||
)
|
||||
|
||||
// Neo4jBenchmark wraps a Benchmark with Neo4j-specific setup
|
||||
type Neo4jBenchmark struct {
|
||||
config *BenchmarkConfig
|
||||
docker *Neo4jDocker
|
||||
database database.Database
|
||||
bench *BenchmarkAdapter
|
||||
}
|
||||
|
||||
// NewNeo4jBenchmark creates a new Neo4j benchmark instance
|
||||
func NewNeo4jBenchmark(config *BenchmarkConfig) (*Neo4jBenchmark, error) {
|
||||
// Create Docker manager
|
||||
docker, err := NewNeo4jDocker()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create Neo4j docker manager: %w", err)
|
||||
}
|
||||
|
||||
// Start Neo4j container
|
||||
if err := docker.Start(); err != nil {
|
||||
return nil, fmt.Errorf("failed to start Neo4j: %w", err)
|
||||
}
|
||||
|
||||
// Set environment variables for Neo4j connection
|
||||
os.Setenv("ORLY_NEO4J_URI", "bolt://localhost:7687")
|
||||
os.Setenv("ORLY_NEO4J_USER", "neo4j")
|
||||
os.Setenv("ORLY_NEO4J_PASSWORD", "benchmark123")
|
||||
|
||||
// Create database instance using Neo4j backend
|
||||
ctx := context.Background()
|
||||
cancel := func() {}
|
||||
db, err := database.NewDatabase(ctx, cancel, "neo4j", config.DataDir, "warn")
|
||||
if err != nil {
|
||||
docker.Stop()
|
||||
return nil, fmt.Errorf("failed to create Neo4j database: %w", err)
|
||||
}
|
||||
|
||||
// Wait for database to be ready
|
||||
fmt.Println("Waiting for Neo4j database to be ready...")
|
||||
select {
|
||||
case <-db.Ready():
|
||||
fmt.Println("Neo4j database is ready")
|
||||
case <-time.After(30 * time.Second):
|
||||
db.Close()
|
||||
docker.Stop()
|
||||
return nil, fmt.Errorf("Neo4j database failed to become ready")
|
||||
}
|
||||
|
||||
// Create adapter to use Database interface with Benchmark
|
||||
adapter := NewBenchmarkAdapter(config, db)
|
||||
|
||||
neo4jBench := &Neo4jBenchmark{
|
||||
config: config,
|
||||
docker: docker,
|
||||
database: db,
|
||||
bench: adapter,
|
||||
}
|
||||
|
||||
return neo4jBench, nil
|
||||
}
|
||||
|
||||
// Close closes the Neo4j benchmark and stops Docker container
|
||||
func (ngb *Neo4jBenchmark) Close() {
|
||||
fmt.Println("Closing Neo4j benchmark...")
|
||||
|
||||
if ngb.database != nil {
|
||||
ngb.database.Close()
|
||||
}
|
||||
|
||||
if ngb.docker != nil {
|
||||
if err := ngb.docker.Stop(); err != nil {
|
||||
log.Printf("Error stopping Neo4j Docker: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// RunSuite runs the benchmark suite on Neo4j
|
||||
func (ngb *Neo4jBenchmark) RunSuite() {
|
||||
fmt.Println("\n╔════════════════════════════════════════════════════════╗")
|
||||
fmt.Println("║ NEO4J BACKEND BENCHMARK SUITE ║")
|
||||
fmt.Println("╚════════════════════════════════════════════════════════╝")
|
||||
|
||||
// Run benchmark tests
|
||||
fmt.Printf("\n=== Starting Neo4j benchmark ===\n")
|
||||
|
||||
fmt.Printf("RunPeakThroughputTest (Neo4j)..\n")
|
||||
ngb.bench.RunPeakThroughputTest()
|
||||
fmt.Println("Wiping database between tests...")
|
||||
ngb.database.Wipe()
|
||||
time.Sleep(10 * time.Second)
|
||||
|
||||
fmt.Printf("RunBurstPatternTest (Neo4j)..\n")
|
||||
ngb.bench.RunBurstPatternTest()
|
||||
fmt.Println("Wiping database between tests...")
|
||||
ngb.database.Wipe()
|
||||
time.Sleep(10 * time.Second)
|
||||
|
||||
fmt.Printf("RunMixedReadWriteTest (Neo4j)..\n")
|
||||
ngb.bench.RunMixedReadWriteTest()
|
||||
fmt.Println("Wiping database between tests...")
|
||||
ngb.database.Wipe()
|
||||
time.Sleep(10 * time.Second)
|
||||
|
||||
fmt.Printf("RunQueryTest (Neo4j)..\n")
|
||||
ngb.bench.RunQueryTest()
|
||||
fmt.Println("Wiping database between tests...")
|
||||
ngb.database.Wipe()
|
||||
time.Sleep(10 * time.Second)
|
||||
|
||||
fmt.Printf("RunConcurrentQueryStoreTest (Neo4j)..\n")
|
||||
ngb.bench.RunConcurrentQueryStoreTest()
|
||||
|
||||
fmt.Printf("\n=== Neo4j benchmark completed ===\n\n")
|
||||
}
|
||||
|
||||
// GenerateReport generates the benchmark report
|
||||
func (ngb *Neo4jBenchmark) GenerateReport() {
|
||||
ngb.bench.GenerateReport()
|
||||
}
|
||||
|
||||
// GenerateAsciidocReport generates asciidoc format report
|
||||
func (ngb *Neo4jBenchmark) GenerateAsciidocReport() {
|
||||
ngb.bench.GenerateAsciidocReport()
|
||||
}
|
||||
147
cmd/benchmark/neo4j_docker.go
Normal file
147
cmd/benchmark/neo4j_docker.go
Normal file
@@ -0,0 +1,147 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Neo4jDocker manages a Neo4j instance via Docker Compose
|
||||
type Neo4jDocker struct {
|
||||
composeFile string
|
||||
projectName string
|
||||
}
|
||||
|
||||
// NewNeo4jDocker creates a new Neo4j Docker manager
|
||||
func NewNeo4jDocker() (*Neo4jDocker, error) {
|
||||
// Look for docker-compose-neo4j.yml in current directory or cmd/benchmark
|
||||
composeFile := "docker-compose-neo4j.yml"
|
||||
if _, err := os.Stat(composeFile); os.IsNotExist(err) {
|
||||
// Try in cmd/benchmark directory
|
||||
composeFile = filepath.Join("cmd", "benchmark", "docker-compose-neo4j.yml")
|
||||
}
|
||||
|
||||
return &Neo4jDocker{
|
||||
composeFile: composeFile,
|
||||
projectName: "orly-benchmark-neo4j",
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Start starts the Neo4j Docker container
|
||||
func (d *Neo4jDocker) Start() error {
|
||||
fmt.Println("Starting Neo4j Docker container...")
|
||||
|
||||
// Pull image first
|
||||
pullCmd := exec.Command("docker-compose",
|
||||
"-f", d.composeFile,
|
||||
"-p", d.projectName,
|
||||
"pull",
|
||||
)
|
||||
pullCmd.Stdout = os.Stdout
|
||||
pullCmd.Stderr = os.Stderr
|
||||
if err := pullCmd.Run(); err != nil {
|
||||
return fmt.Errorf("failed to pull Neo4j image: %w", err)
|
||||
}
|
||||
|
||||
// Start containers
|
||||
upCmd := exec.Command("docker-compose",
|
||||
"-f", d.composeFile,
|
||||
"-p", d.projectName,
|
||||
"up", "-d",
|
||||
)
|
||||
upCmd.Stdout = os.Stdout
|
||||
upCmd.Stderr = os.Stderr
|
||||
if err := upCmd.Run(); err != nil {
|
||||
return fmt.Errorf("failed to start Neo4j container: %w", err)
|
||||
}
|
||||
|
||||
fmt.Println("Waiting for Neo4j to be healthy...")
|
||||
if err := d.waitForHealthy(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fmt.Println("Neo4j is ready!")
|
||||
return nil
|
||||
}
|
||||
|
||||
// waitForHealthy waits for Neo4j to become healthy
|
||||
func (d *Neo4jDocker) waitForHealthy() error {
|
||||
timeout := 120 * time.Second
|
||||
deadline := time.Now().Add(timeout)
|
||||
|
||||
containerName := "orly-benchmark-neo4j"
|
||||
|
||||
for time.Now().Before(deadline) {
|
||||
// Check container health status
|
||||
checkCmd := exec.Command("docker", "inspect",
|
||||
"--format={{.State.Health.Status}}",
|
||||
containerName,
|
||||
)
|
||||
output, err := checkCmd.Output()
|
||||
if err == nil && string(output) == "healthy\n" {
|
||||
return nil
|
||||
}
|
||||
|
||||
time.Sleep(2 * time.Second)
|
||||
}
|
||||
|
||||
return fmt.Errorf("Neo4j failed to become healthy within %v", timeout)
|
||||
}
|
||||
|
||||
// Stop stops and removes the Neo4j Docker container
|
||||
func (d *Neo4jDocker) Stop() error {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
defer cancel()
|
||||
|
||||
// Get logs before stopping (useful for debugging)
|
||||
logsCmd := exec.CommandContext(ctx, "docker-compose",
|
||||
"-f", d.composeFile,
|
||||
"-p", d.projectName,
|
||||
"logs", "--tail=50",
|
||||
)
|
||||
logsCmd.Stdout = os.Stdout
|
||||
logsCmd.Stderr = os.Stderr
|
||||
_ = logsCmd.Run() // Ignore errors
|
||||
|
||||
fmt.Println("Stopping Neo4j Docker container...")
|
||||
|
||||
// Stop and remove containers
|
||||
downCmd := exec.Command("docker-compose",
|
||||
"-f", d.composeFile,
|
||||
"-p", d.projectName,
|
||||
"down", "-v",
|
||||
)
|
||||
downCmd.Stdout = os.Stdout
|
||||
downCmd.Stderr = os.Stderr
|
||||
if err := downCmd.Run(); err != nil {
|
||||
return fmt.Errorf("failed to stop Neo4j container: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetBoltEndpoint returns the Neo4j Bolt endpoint
|
||||
func (d *Neo4jDocker) GetBoltEndpoint() string {
|
||||
return "bolt://localhost:7687"
|
||||
}
|
||||
|
||||
// IsRunning returns whether Neo4j is running
|
||||
func (d *Neo4jDocker) IsRunning() bool {
|
||||
checkCmd := exec.Command("docker", "ps", "--filter", "name=orly-benchmark-neo4j", "--format", "{{.Names}}")
|
||||
output, err := checkCmd.Output()
|
||||
return err == nil && len(output) > 0
|
||||
}
|
||||
|
||||
// Logs returns the logs from Neo4j container
|
||||
func (d *Neo4jDocker) Logs(tail int) (string, error) {
|
||||
logsCmd := exec.Command("docker-compose",
|
||||
"-f", d.composeFile,
|
||||
"-p", d.projectName,
|
||||
"logs", "--tail", fmt.Sprintf("%d", tail),
|
||||
)
|
||||
output, err := logsCmd.CombinedOutput()
|
||||
return string(output), err
|
||||
}
|
||||
176
cmd/benchmark/reports/run_20251119_054648/aggregate_report.txt
Normal file
176
cmd/benchmark/reports/run_20251119_054648/aggregate_report.txt
Normal file
@@ -0,0 +1,176 @@
|
||||
================================================================
|
||||
NOSTR RELAY BENCHMARK AGGREGATE REPORT
|
||||
================================================================
|
||||
Generated: 2025-11-19T06:13:40+00:00
|
||||
Benchmark Configuration:
|
||||
Events per test: 50000
|
||||
Concurrent workers: 24
|
||||
Test duration: 60s
|
||||
|
||||
Relays tested: 8
|
||||
|
||||
================================================================
|
||||
SUMMARY BY RELAY
|
||||
================================================================
|
||||
|
||||
Relay: next-orly-badger
|
||||
----------------------------------------
|
||||
Status: COMPLETED
|
||||
Events/sec: 2911.52
|
||||
Events/sec: 0.00
|
||||
Events/sec: 2911.52
|
||||
Success Rate: 23.2%
|
||||
Success Rate: 0.0%
|
||||
Success Rate: 50.0%
|
||||
Avg Latency: 3.938925ms
|
||||
Bottom 10% Avg Latency: 1.115318ms
|
||||
Avg Latency: 0s
|
||||
P95 Latency: 4.624387ms
|
||||
P95 Latency: 0s
|
||||
P95 Latency: 112.915µs
|
||||
|
||||
Relay: next-orly-dgraph
|
||||
----------------------------------------
|
||||
Status: COMPLETED
|
||||
Events/sec: 2661.66
|
||||
Events/sec: 0.00
|
||||
Events/sec: 2661.66
|
||||
Success Rate: 23.2%
|
||||
Success Rate: 0.0%
|
||||
Success Rate: 50.0%
|
||||
Avg Latency: 4.795769ms
|
||||
Bottom 10% Avg Latency: 1.212562ms
|
||||
Avg Latency: 0s
|
||||
P95 Latency: 6.029522ms
|
||||
P95 Latency: 0s
|
||||
P95 Latency: 115.35µs
|
||||
|
||||
Relay: next-orly-neo4j
|
||||
----------------------------------------
|
||||
Status: COMPLETED
|
||||
Events/sec: 2827.54
|
||||
Events/sec: 0.00
|
||||
Events/sec: 2827.54
|
||||
Success Rate: 23.2%
|
||||
Success Rate: 0.0%
|
||||
Success Rate: 50.0%
|
||||
Avg Latency: 4.203722ms
|
||||
Bottom 10% Avg Latency: 1.124184ms
|
||||
Avg Latency: 0s
|
||||
P95 Latency: 4.568189ms
|
||||
P95 Latency: 0s
|
||||
P95 Latency: 112.755µs
|
||||
|
||||
Relay: khatru-sqlite
|
||||
----------------------------------------
|
||||
Status: COMPLETED
|
||||
Events/sec: 2840.91
|
||||
Events/sec: 0.00
|
||||
Events/sec: 2840.91
|
||||
Success Rate: 23.2%
|
||||
Success Rate: 0.0%
|
||||
Success Rate: 50.0%
|
||||
Avg Latency: 4.23095ms
|
||||
Bottom 10% Avg Latency: 1.142932ms
|
||||
Avg Latency: 0s
|
||||
P95 Latency: 4.703046ms
|
||||
P95 Latency: 0s
|
||||
P95 Latency: 113.897µs
|
||||
|
||||
Relay: khatru-badger
|
||||
----------------------------------------
|
||||
Status: COMPLETED
|
||||
Events/sec: 2885.30
|
||||
Events/sec: 0.00
|
||||
Events/sec: 2885.30
|
||||
Success Rate: 23.2%
|
||||
Success Rate: 0.0%
|
||||
Success Rate: 50.0%
|
||||
Avg Latency: 3.985846ms
|
||||
Bottom 10% Avg Latency: 1.120349ms
|
||||
Avg Latency: 0s
|
||||
P95 Latency: 4.23797ms
|
||||
P95 Latency: 0s
|
||||
P95 Latency: 114.277µs
|
||||
|
||||
Relay: relayer-basic
|
||||
----------------------------------------
|
||||
Status: COMPLETED
|
||||
Events/sec: 2707.76
|
||||
Events/sec: 0.00
|
||||
Events/sec: 2707.76
|
||||
Success Rate: 23.2%
|
||||
Success Rate: 0.0%
|
||||
Success Rate: 50.0%
|
||||
Avg Latency: 4.657987ms
|
||||
Bottom 10% Avg Latency: 1.266467ms
|
||||
Avg Latency: 0s
|
||||
P95 Latency: 5.603449ms
|
||||
P95 Latency: 0s
|
||||
P95 Latency: 112.123µs
|
||||
|
||||
Relay: strfry
|
||||
----------------------------------------
|
||||
Status: COMPLETED
|
||||
Events/sec: 2841.22
|
||||
Events/sec: 0.00
|
||||
Events/sec: 2841.22
|
||||
Success Rate: 23.2%
|
||||
Success Rate: 0.0%
|
||||
Success Rate: 50.0%
|
||||
Avg Latency: 4.088506ms
|
||||
Bottom 10% Avg Latency: 1.135387ms
|
||||
Avg Latency: 0s
|
||||
P95 Latency: 4.517428ms
|
||||
P95 Latency: 0s
|
||||
P95 Latency: 113.396µs
|
||||
|
||||
Relay: nostr-rs-relay
|
||||
----------------------------------------
|
||||
Status: COMPLETED
|
||||
Events/sec: 2883.32
|
||||
Events/sec: 0.00
|
||||
Events/sec: 2883.32
|
||||
Success Rate: 23.2%
|
||||
Success Rate: 0.0%
|
||||
Success Rate: 50.0%
|
||||
Avg Latency: 4.044321ms
|
||||
Bottom 10% Avg Latency: 1.103637ms
|
||||
Avg Latency: 0s
|
||||
P95 Latency: 4.602719ms
|
||||
P95 Latency: 0s
|
||||
P95 Latency: 114.679µs
|
||||
|
||||
|
||||
================================================================
|
||||
DETAILED RESULTS
|
||||
================================================================
|
||||
|
||||
Individual relay reports are available in:
|
||||
- /reports/run_20251119_054648/khatru-badger_results.txt
|
||||
- /reports/run_20251119_054648/khatru-sqlite_results.txt
|
||||
- /reports/run_20251119_054648/next-orly-badger_results.txt
|
||||
- /reports/run_20251119_054648/next-orly-dgraph_results.txt
|
||||
- /reports/run_20251119_054648/next-orly-neo4j_results.txt
|
||||
- /reports/run_20251119_054648/nostr-rs-relay_results.txt
|
||||
- /reports/run_20251119_054648/relayer-basic_results.txt
|
||||
- /reports/run_20251119_054648/strfry_results.txt
|
||||
|
||||
================================================================
|
||||
BENCHMARK COMPARISON TABLE
|
||||
================================================================
|
||||
|
||||
Relay Status Peak Tput/s Avg Latency Success Rate
|
||||
---- ------ ----------- ----------- ------------
|
||||
next-orly-badger OK 2911.52 3.938925ms 23.2%
|
||||
next-orly-dgraph OK 2661.66 4.795769ms 23.2%
|
||||
next-orly-neo4j OK 2827.54 4.203722ms 23.2%
|
||||
khatru-sqlite OK 2840.91 4.23095ms 23.2%
|
||||
khatru-badger OK 2885.30 3.985846ms 23.2%
|
||||
relayer-basic OK 2707.76 4.657987ms 23.2%
|
||||
strfry OK 2841.22 4.088506ms 23.2%
|
||||
nostr-rs-relay OK 2883.32 4.044321ms 23.2%
|
||||
|
||||
================================================================
|
||||
End of Report
|
||||
================================================================
|
||||
@@ -0,0 +1,422 @@
|
||||
Starting Nostr Relay Benchmark (Badger Backend)
|
||||
Data Directory: /tmp/benchmark_khatru-badger_8
|
||||
Events: 50000, Workers: 24, Duration: 1m0s
|
||||
1763532013820368ℹ️ migrating to version 1... /build/pkg/database/migrations.go:66
|
||||
1763532013820438ℹ️ migrating to version 2... /build/pkg/database/migrations.go:73
|
||||
1763532013820599ℹ️ migrating to version 3... /build/pkg/database/migrations.go:80
|
||||
1763532013820636ℹ️ cleaning up ephemeral events (kinds 20000-29999)... /build/pkg/database/migrations.go:287
|
||||
1763532013820660ℹ️ cleaned up 0 ephemeral events from database /build/pkg/database/migrations.go:332
|
||||
1763532013820689ℹ️ migrating to version 4... /build/pkg/database/migrations.go:87
|
||||
1763532013820696ℹ️ converting events to optimized inline storage (Reiser4 optimization)... /build/pkg/database/migrations.go:340
|
||||
1763532013820709ℹ️ found 0 events to convert (0 regular, 0 replaceable, 0 addressable) /build/pkg/database/migrations.go:429
|
||||
1763532013820716ℹ️ migration complete: converted 0 events to optimized inline storage, deleted 0 old keys /build/pkg/database/migrations.go:538
|
||||
Loading real-world sample events from embedded data...
|
||||
Loading real-world sample events (11,596 events from 6 months of Nostr)...
|
||||
Loaded 11596 real-world events (already signed, zero crypto overhead)
|
||||
|
||||
Event Statistics:
|
||||
Total events: 11596
|
||||
Average content size: 588 bytes
|
||||
Event kinds found: 25 unique
|
||||
Most common kinds:
|
||||
Kind 1: 7152 events
|
||||
Kind 7: 1973 events
|
||||
Kind 6: 934 events
|
||||
Kind 10002: 337 events
|
||||
Kind 0: 290 events
|
||||
|
||||
|
||||
╔════════════════════════════════════════════════════════╗
|
||||
║ BADGER BACKEND BENCHMARK SUITE ║
|
||||
╚════════════════════════════════════════════════════════╝
|
||||
|
||||
=== Starting Badger benchmark ===
|
||||
RunPeakThroughputTest (Badger)..
|
||||
|
||||
=== Peak Throughput Test ===
|
||||
1763532014234684🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014251555🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014251585🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014251639🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014254033🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014254683🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014260808🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014260870🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014260812🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014277104🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014277657🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014278205🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014278285🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014336903🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014363478🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014364290🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014364354🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014372904🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014372959🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532014372971⚠️ failed to process deletion for event ecd7b942d5a473589b4a3bc34f0b3dadf0c6e0ba9325d7a47604167acc757d5c: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532014372938🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014373003🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532014373014⚠️ failed to process deletion for event 900e73566bb098d7ec1880ec68521ef76e066b933d4d6b71dbe99ee156c4b307: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532014383001🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014388837🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014388919🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014391202🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014391216🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014395794🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014396847🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014396979🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014396873🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014396880🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014396846🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014397913🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014398032🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014398153🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014398247🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014398524🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014400310🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014403460🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014403895🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014404002🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014470332🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014934773🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014936459🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014936494🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014936497🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014937911🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014939536🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014940367🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014941984🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014942689🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014942709🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014942750🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014942741🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014942816🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014943338🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014943451🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014943893🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014944522🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014944537🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014945141🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014946012🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014946045🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532014946054⚠️ failed to process deletion for event 63eae8af9f42e2d37f93b1277bcf708c94aeb8935dd83d1e8e80136c4e4f8292: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532014952520🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014952585🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014952570🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014952563🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014952804🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014952823🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014962010🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014964509🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014966546🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014967125🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014967251🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014967275🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532014967285⚠️ failed to process deletion for event 2f5e01050c81c0d711e9f391726af47933b5fcfbe497434164069787d201e3b9: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532014967615🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014967952🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014968056🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014969528🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014970610🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014971146🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014971229🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014972191🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014972290🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014972853🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014972895🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014974659🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014974684🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014974733🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014974970🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014975040🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014977640🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014978813🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014978844🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014979660🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014980760🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014981739🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014984695🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014987050🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014990255🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014990268🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014993000🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014993071🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014996648🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014997887🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014997959🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014999208🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532014999202🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015000529🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015000865🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015000886🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532015000896⚠️ failed to process deletion for event e56f683d8a3ad6a1d7ed41f50bf2739179ac8f6e1418ff34e5e20903172237ea: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532015002409🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015004222🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015004801🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015008082🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015008121🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015009296🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015009474🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015009686🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015012705🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015012722🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015012772🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532015012781⚠️ failed to process deletion for event 4b07094ff22787f584f5ceddc11ae44c66ab513d01d7529e156d6adb75323eca: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532015012725🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015013275🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015015485🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015019833🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015020302🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015020468🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015021079🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015021179🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015021350🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532015021469⚠️ failed to process deletion for event 59475e9f41d77977a2b2c0d9acf7c32bad368dafdeab1e8f7be8cf0fe0e00ceb: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532015064798🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015093196🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015094045🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015094353🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015095456🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015095647🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015096130🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015097710🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015098568🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015098646🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015098916🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015098980🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015099247🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015099372🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015108396🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015119916🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015119977🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015120078🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015120399🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015120616🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015122335🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015122440🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015123578🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015124232🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015124271🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015124633🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015125046🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015125334🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015125478🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015126491🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015128111🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015129915🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015130524🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015130922🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015130936🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532015130947⚠️ failed to process deletion for event bd502ba9dc5c173b3b82708561f35118e2ca580f9c7e5baffceccdd9f6502462: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532015132041🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015132140🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015132159🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532015132169⚠️ failed to process deletion for event 961a3d9582d896fcd8755ccc634b7846e549131284740f6fec0d635d0bb072af: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532015132455🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015133481🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015135204🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015136901🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015139167🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015139314🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015139559🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015141275🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015142111🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015142160🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015142311🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015142362🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015142802🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015144182🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015145669🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015146606🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015146730🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015146734🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015146823🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015149126🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015149475🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015150317🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015150316🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015151297🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015151530🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015153167🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015153511🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015153573🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015155305🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015155850🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015156230🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015156939🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015156993🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015157067🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015157244🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015157507🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015157735🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015158040🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015158976🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015158977🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015159156🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015169407🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015169419🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015169831🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015169843🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015170898🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015171507🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015171504🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015171625🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015171670🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015171725🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015171739🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532015172695🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
Events saved: 11594/50000 (23.2%), errors: 38406
|
||||
Duration: 4.018301066s
|
||||
Events/sec: 2885.30
|
||||
Avg latency: 3.985846ms
|
||||
P90 latency: 3.336914ms
|
||||
P95 latency: 4.23797ms
|
||||
P99 latency: 73.250512ms
|
||||
Bottom 10% Avg latency: 1.120349ms
|
||||
Wiping database between tests...
|
||||
RunBurstPatternTest (Badger)..
|
||||
|
||||
=== Burst Pattern Test ===
|
||||
Burst completed: 5000 events in 340.161594ms
|
||||
Burst completed: 5000 events in 341.757352ms
|
||||
Burst completed: 5000 events in 343.518235ms
|
||||
Burst completed: 5000 events in 351.096045ms
|
||||
Burst completed: 5000 events in 332.761293ms
|
||||
Burst completed: 5000 events in 335.458889ms
|
||||
Burst completed: 5000 events in 331.664424ms
|
||||
Burst completed: 5000 events in 347.834073ms
|
||||
Burst completed: 5000 events in 356.191406ms
|
||||
Burst completed: 5000 events in 335.250061ms
|
||||
Burst test completed: 0 events in 8.421134295s, errors: 50000
|
||||
Events/sec: 0.00
|
||||
Wiping database between tests...
|
||||
RunMixedReadWriteTest (Badger)..
|
||||
|
||||
=== Mixed Read/Write Test ===
|
||||
Pre-populating database for read tests...
|
||||
Mixed test completed: 0 writes, 25000 reads in 22.626390359s
|
||||
Combined ops/sec: 1104.90
|
||||
Wiping database between tests...
|
||||
RunQueryTest (Badger)..
|
||||
|
||||
=== Query Test ===
|
||||
Pre-populating database with 10000 events for query tests...
|
||||
Query test completed: 403899 queries in 1m0.00394972s
|
||||
Queries/sec: 6731.21
|
||||
Avg query latency: 1.574327ms
|
||||
P95 query latency: 5.370236ms
|
||||
P99 query latency: 9.259041ms
|
||||
Wiping database between tests...
|
||||
RunConcurrentQueryStoreTest (Badger)..
|
||||
|
||||
=== Concurrent Query/Store Test ===
|
||||
Pre-populating database with 5000 events for concurrent query/store test...
|
||||
Concurrent test completed: 564827 operations (564827 queries, 0 writes) in 1m0.001868516s
|
||||
Operations/sec: 9413.49
|
||||
Avg latency: 45.49µs
|
||||
Avg query latency: 45.49µs
|
||||
Avg write latency: 0s
|
||||
P95 latency: 87.116µs
|
||||
P99 latency: 128.965µs
|
||||
|
||||
=== Badger benchmark completed ===
|
||||
|
||||
|
||||
================================================================================
|
||||
BENCHMARK REPORT
|
||||
================================================================================
|
||||
|
||||
Test: Peak Throughput
|
||||
Duration: 4.018301066s
|
||||
Total Events: 11594
|
||||
Events/sec: 2885.30
|
||||
Success Rate: 23.2%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 398 MB
|
||||
Avg Latency: 3.985846ms
|
||||
P90 Latency: 3.336914ms
|
||||
P95 Latency: 4.23797ms
|
||||
P99 Latency: 73.250512ms
|
||||
Bottom 10% Avg Latency: 1.120349ms
|
||||
----------------------------------------
|
||||
|
||||
Test: Burst Pattern
|
||||
Duration: 8.421134295s
|
||||
Total Events: 0
|
||||
Events/sec: 0.00
|
||||
Success Rate: 0.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 226 MB
|
||||
Avg Latency: 0s
|
||||
P90 Latency: 0s
|
||||
P95 Latency: 0s
|
||||
P99 Latency: 0s
|
||||
Bottom 10% Avg Latency: 0s
|
||||
----------------------------------------
|
||||
|
||||
Test: Mixed Read/Write
|
||||
Duration: 22.626390359s
|
||||
Total Events: 25000
|
||||
Events/sec: 1104.90
|
||||
Success Rate: 50.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 400 MB
|
||||
Avg Latency: 82.006µs
|
||||
P90 Latency: 103.006µs
|
||||
P95 Latency: 114.277µs
|
||||
P99 Latency: 141.409µs
|
||||
Bottom 10% Avg Latency: 128.204µs
|
||||
Errors (25000):
|
||||
- blocked: event already exists: 193c67d51dab9dc19eeebcde810364f2ba7d105ab9206de1f4f0f884db23e6e2
|
||||
- blocked: event already exists: 06061b630fd0881cbe7ed02114584fea59b9621c2e9479e6e6aa2be561240a90
|
||||
- blocked: event already exists: 1642d6770a74de7ca45169bc76dab334591bcb2191044da0b18459888164f9fc
|
||||
- blocked: event already exists: 0312061d336fd22dc64b98130663835242e4479c54c7ca88b72c3b3093ef29a2
|
||||
- blocked: event already exists: 11aa0b6defe3d58cef2f93c06fb194bc72241f17fb35312594d279f6c8f13d44
|
||||
... and 24995 more errors
|
||||
----------------------------------------
|
||||
|
||||
Test: Query Performance
|
||||
Duration: 1m0.00394972s
|
||||
Total Events: 403899
|
||||
Events/sec: 6731.21
|
||||
Success Rate: 100.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 343 MB
|
||||
Avg Latency: 1.574327ms
|
||||
P90 Latency: 4.377275ms
|
||||
P95 Latency: 5.370236ms
|
||||
P99 Latency: 9.259041ms
|
||||
Bottom 10% Avg Latency: 6.283482ms
|
||||
----------------------------------------
|
||||
|
||||
Test: Concurrent Query/Store
|
||||
Duration: 1m0.001868516s
|
||||
Total Events: 564827
|
||||
Events/sec: 9413.49
|
||||
Success Rate: 100.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 314 MB
|
||||
Avg Latency: 45.49µs
|
||||
P90 Latency: 77.518µs
|
||||
P95 Latency: 87.116µs
|
||||
P99 Latency: 128.965µs
|
||||
Bottom 10% Avg Latency: 98.509µs
|
||||
Errors (50000):
|
||||
- blocked: event already exists: 0ce484c600cb1c0b33f1e38ddea4b38a47069615d22114a9c621a9164d9b6218
|
||||
- blocked: event already exists: 0e0b4dfd5e4ecfb0d3acb8db48d13833edeac5163fbcba9fb94160b686c07595
|
||||
- blocked: event already exists: 048d7b07155b3832a76eac0b46bea764cac3597dfbc28b559698d51f915cb6d1
|
||||
- blocked: event already exists: 0ea6723d131534cf6e2209169a518c4bc598e3acad0618c2ef34df34c867cca1
|
||||
- blocked: event already exists: 03edc6b095b2a314733ea3dc689bb54e8739d443e9e69dd61334a5d376bf72a4
|
||||
... and 49995 more errors
|
||||
----------------------------------------
|
||||
|
||||
Report saved to: /tmp/benchmark_khatru-badger_8/benchmark_report.txt
|
||||
AsciiDoc report saved to: /tmp/benchmark_khatru-badger_8/benchmark_report.adoc
|
||||
|
||||
RELAY_NAME: khatru-badger
|
||||
RELAY_URL: ws://khatru-badger:3334
|
||||
TEST_TIMESTAMP: 2025-11-19T06:03:30+00:00
|
||||
BENCHMARK_CONFIG:
|
||||
Events: 50000
|
||||
Workers: 24
|
||||
Duration: 60s
|
||||
@@ -0,0 +1,422 @@
|
||||
Starting Nostr Relay Benchmark (Badger Backend)
|
||||
Data Directory: /tmp/benchmark_khatru-sqlite_8
|
||||
Events: 50000, Workers: 24, Duration: 1m0s
|
||||
1763531812447164ℹ️ migrating to version 1... /build/pkg/database/migrations.go:66
|
||||
1763531812447229ℹ️ migrating to version 2... /build/pkg/database/migrations.go:73
|
||||
1763531812447253ℹ️ migrating to version 3... /build/pkg/database/migrations.go:80
|
||||
1763531812447258ℹ️ cleaning up ephemeral events (kinds 20000-29999)... /build/pkg/database/migrations.go:287
|
||||
1763531812447267ℹ️ cleaned up 0 ephemeral events from database /build/pkg/database/migrations.go:332
|
||||
1763531812447280ℹ️ migrating to version 4... /build/pkg/database/migrations.go:87
|
||||
1763531812447284ℹ️ converting events to optimized inline storage (Reiser4 optimization)... /build/pkg/database/migrations.go:340
|
||||
1763531812447299ℹ️ found 0 events to convert (0 regular, 0 replaceable, 0 addressable) /build/pkg/database/migrations.go:429
|
||||
1763531812447305ℹ️ migration complete: converted 0 events to optimized inline storage, deleted 0 old keys /build/pkg/database/migrations.go:538
|
||||
Loading real-world sample events from embedded data...
|
||||
Loading real-world sample events (11,596 events from 6 months of Nostr)...
|
||||
Loaded 11596 real-world events (already signed, zero crypto overhead)
|
||||
|
||||
Event Statistics:
|
||||
Total events: 11596
|
||||
Average content size: 588 bytes
|
||||
Event kinds found: 25 unique
|
||||
Most common kinds:
|
||||
Kind 1: 7152 events
|
||||
Kind 7: 1973 events
|
||||
Kind 6: 934 events
|
||||
Kind 10002: 337 events
|
||||
Kind 0: 290 events
|
||||
|
||||
|
||||
╔════════════════════════════════════════════════════════╗
|
||||
║ BADGER BACKEND BENCHMARK SUITE ║
|
||||
╚════════════════════════════════════════════════════════╝
|
||||
|
||||
=== Starting Badger benchmark ===
|
||||
RunPeakThroughputTest (Badger)..
|
||||
|
||||
=== Peak Throughput Test ===
|
||||
1763531812868715🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531812885777🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531812885785🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531812885781🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531812888045🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531812888883🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531812894492🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531812894803🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531812894864🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531812906496🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531812906886🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531812907798🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531812907811🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531812970866🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531812994211🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531812994242🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531812995432🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813002343🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813002408🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531813002419⚠️ failed to process deletion for event 900e73566bb098d7ec1880ec68521ef76e066b933d4d6b71dbe99ee156c4b307: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531813002352🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813002444🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531813002453⚠️ failed to process deletion for event ecd7b942d5a473589b4a3bc34f0b3dadf0c6e0ba9325d7a47604167acc757d5c: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531813015072🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813021384🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813021454🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813024080🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813024096🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813028103🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813028164🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813028163🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813028172🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813029347🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813029380🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813029352🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813029730🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813030214🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813030785🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813030957🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813031557🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813035531🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813036469🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813036495🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813099067🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813562314🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813562971🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813565216🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813565216🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813567538🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813567585🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813567716🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813568218🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813568287🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813569557🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813570316🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813570360🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813570365🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813571136🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813571233🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813572029🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813572530🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813572639🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813574021🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813574064🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813574094🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531813574102⚠️ failed to process deletion for event 63eae8af9f42e2d37f93b1277bcf708c94aeb8935dd83d1e8e80136c4e4f8292: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531813580239🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813580983🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813581043🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813581051🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813581057🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813582095🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813591212🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813592938🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813595510🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813595557🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531813595567⚠️ failed to process deletion for event 2f5e01050c81c0d711e9f391726af47933b5fcfbe497434164069787d201e3b9: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531813596639🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813597830🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813597913🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813597995🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813598000🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813601235🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813601369🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813601858🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813603356🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813603525🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813604715🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813604863🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813605574🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813605606🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813607117🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813607278🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813607509🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813607624🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813612677🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813612797🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813614702🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813614764🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813614882🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813617726🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813623543🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813625833🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813626707🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813627647🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813632382🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813632571🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813635724🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813636426🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813636441🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813639483🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813639507🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813639674🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813639722🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813639732🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531813639741⚠️ failed to process deletion for event e56f683d8a3ad6a1d7ed41f50bf2739179ac8f6e1418ff34e5e20903172237ea: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531813640713🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813643809🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813644009🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813647476🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813647510🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813647627🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813648800🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813648916🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813650458🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813651830🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813651871🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531813651882⚠️ failed to process deletion for event 4b07094ff22787f584f5ceddc11ae44c66ab513d01d7529e156d6adb75323eca: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531813652883🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813652944🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813653924🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813659588🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813659716🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813659733🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813660461🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813660671🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813660696🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531813660706⚠️ failed to process deletion for event 59475e9f41d77977a2b2c0d9acf7c32bad368dafdeab1e8f7be8cf0fe0e00ceb: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531813665655🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813667093🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813669863🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813669986🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813670282🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813717436🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813717882🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813717901🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813718988🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813719942🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813721821🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813738580🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813738746🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813739264🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813748490🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813759607🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813759605🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813760687🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813762309🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813765035🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813765052🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813765323🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813765579🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813765764🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813766675🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813766899🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813767155🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813767196🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813772016🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813772674🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813776484🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813776639🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813778873🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813779242🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813779285🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531813779295⚠️ failed to process deletion for event bd502ba9dc5c173b3b82708561f35118e2ca580f9c7e5baffceccdd9f6502462: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531813779456🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813779483🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531813779497⚠️ failed to process deletion for event 961a3d9582d896fcd8755ccc634b7846e549131284740f6fec0d635d0bb072af: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531813779697🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813780185🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813781185🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813785435🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813786078🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813787727🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813788738🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813788858🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813791644🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813791838🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813791870🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813792007🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813792229🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813793643🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813795596🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813796358🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813797479🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813798679🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813800350🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813800531🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813800925🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813800936🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813800925🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813803971🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813803969🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813804958🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813806100🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813817052🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813817048🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813818064🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813818135🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813818275🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813818876🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813818912🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813819267🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813819296🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813819709🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813820510🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813820746🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813821066🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813821216🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813821322🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813821776🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813822026🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813822031🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813826902🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813827998🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813828498🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813828596🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813828687🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813828721🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813828601🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813829312🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531813830658🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
Events saved: 11596/50000 (23.2%), errors: 38404
|
||||
Duration: 4.081787895s
|
||||
Events/sec: 2840.91
|
||||
Avg latency: 4.23095ms
|
||||
P90 latency: 3.400435ms
|
||||
P95 latency: 4.703046ms
|
||||
P99 latency: 81.047331ms
|
||||
Bottom 10% Avg latency: 1.142932ms
|
||||
Wiping database between tests...
|
||||
RunBurstPatternTest (Badger)..
|
||||
|
||||
=== Burst Pattern Test ===
|
||||
Burst completed: 5000 events in 346.663376ms
|
||||
Burst completed: 5000 events in 333.067587ms
|
||||
Burst completed: 5000 events in 330.484528ms
|
||||
Burst completed: 5000 events in 338.487447ms
|
||||
Burst completed: 5000 events in 341.447764ms
|
||||
Burst completed: 5000 events in 364.127901ms
|
||||
Burst completed: 5000 events in 344.947769ms
|
||||
Burst completed: 5000 events in 341.432775ms
|
||||
Burst completed: 5000 events in 347.698657ms
|
||||
Burst completed: 5000 events in 341.10947ms
|
||||
Burst test completed: 0 events in 8.436449617s, errors: 50000
|
||||
Events/sec: 0.00
|
||||
Wiping database between tests...
|
||||
RunMixedReadWriteTest (Badger)..
|
||||
|
||||
=== Mixed Read/Write Test ===
|
||||
Pre-populating database for read tests...
|
||||
Mixed test completed: 0 writes, 25000 reads in 22.467041454s
|
||||
Combined ops/sec: 1112.74
|
||||
Wiping database between tests...
|
||||
RunQueryTest (Badger)..
|
||||
|
||||
=== Query Test ===
|
||||
Pre-populating database with 10000 events for query tests...
|
||||
Query test completed: 408433 queries in 1m0.005096356s
|
||||
Queries/sec: 6806.64
|
||||
Avg query latency: 1.551089ms
|
||||
P95 query latency: 5.244046ms
|
||||
P99 query latency: 9.025085ms
|
||||
Wiping database between tests...
|
||||
RunConcurrentQueryStoreTest (Badger)..
|
||||
|
||||
=== Concurrent Query/Store Test ===
|
||||
Pre-populating database with 5000 events for concurrent query/store test...
|
||||
Concurrent test completed: 564551 operations (564551 queries, 0 writes) in 1m0.000283858s
|
||||
Operations/sec: 9409.14
|
||||
Avg latency: 45.619µs
|
||||
Avg query latency: 45.619µs
|
||||
Avg write latency: 0s
|
||||
P95 latency: 87.236µs
|
||||
P99 latency: 130.949µs
|
||||
|
||||
=== Badger benchmark completed ===
|
||||
|
||||
|
||||
================================================================================
|
||||
BENCHMARK REPORT
|
||||
================================================================================
|
||||
|
||||
Test: Peak Throughput
|
||||
Duration: 4.081787895s
|
||||
Total Events: 11596
|
||||
Events/sec: 2840.91
|
||||
Success Rate: 23.2%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 384 MB
|
||||
Avg Latency: 4.23095ms
|
||||
P90 Latency: 3.400435ms
|
||||
P95 Latency: 4.703046ms
|
||||
P99 Latency: 81.047331ms
|
||||
Bottom 10% Avg Latency: 1.142932ms
|
||||
----------------------------------------
|
||||
|
||||
Test: Burst Pattern
|
||||
Duration: 8.436449617s
|
||||
Total Events: 0
|
||||
Events/sec: 0.00
|
||||
Success Rate: 0.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 215 MB
|
||||
Avg Latency: 0s
|
||||
P90 Latency: 0s
|
||||
P95 Latency: 0s
|
||||
P99 Latency: 0s
|
||||
Bottom 10% Avg Latency: 0s
|
||||
----------------------------------------
|
||||
|
||||
Test: Mixed Read/Write
|
||||
Duration: 22.467041454s
|
||||
Total Events: 25000
|
||||
Events/sec: 1112.74
|
||||
Success Rate: 50.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 385 MB
|
||||
Avg Latency: 82.061µs
|
||||
P90 Latency: 102.695µs
|
||||
P95 Latency: 113.897µs
|
||||
P99 Latency: 140.147µs
|
||||
Bottom 10% Avg Latency: 129.144µs
|
||||
Errors (25000):
|
||||
- blocked: event already exists: 06061b630fd0881cbe7ed02114584fea59b9621c2e9479e6e6aa2be561240a90
|
||||
- blocked: event already exists: 048d7b07155b3832a76eac0b46bea764cac3597dfbc28b559698d51f915cb6d1
|
||||
- blocked: event already exists: 0ea6723d131534cf6e2209169a518c4bc598e3acad0618c2ef34df34c867cca1
|
||||
- blocked: event already exists: 0312061d336fd22dc64b98130663835242e4479c54c7ca88b72c3b3093ef29a2
|
||||
- blocked: event already exists: 11aa0b6defe3d58cef2f93c06fb194bc72241f17fb35312594d279f6c8f13d44
|
||||
... and 24995 more errors
|
||||
----------------------------------------
|
||||
|
||||
Test: Query Performance
|
||||
Duration: 1m0.005096356s
|
||||
Total Events: 408433
|
||||
Events/sec: 6806.64
|
||||
Success Rate: 100.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 366 MB
|
||||
Avg Latency: 1.551089ms
|
||||
P90 Latency: 4.323112ms
|
||||
P95 Latency: 5.244046ms
|
||||
P99 Latency: 9.025085ms
|
||||
Bottom 10% Avg Latency: 6.133631ms
|
||||
----------------------------------------
|
||||
|
||||
Test: Concurrent Query/Store
|
||||
Duration: 1m0.000283858s
|
||||
Total Events: 564551
|
||||
Events/sec: 9409.14
|
||||
Success Rate: 100.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 353 MB
|
||||
Avg Latency: 45.619µs
|
||||
P90 Latency: 77.388µs
|
||||
P95 Latency: 87.236µs
|
||||
P99 Latency: 130.949µs
|
||||
Bottom 10% Avg Latency: 98.767µs
|
||||
Errors (50000):
|
||||
- blocked: event already exists: 01e9943cf5e805283c512b9c26cf69f7e9ff412710d7543a3a52dc93ac7e8a57
|
||||
- blocked: event already exists: 00a5f5f6c7f1c4e6f71ab7df2c056e238ccd9b441e59ddf119d7ab7f1d7510e0
|
||||
- blocked: event already exists: 0312061d336fd22dc64b98130663835242e4479c54c7ca88b72c3b3093ef29a2
|
||||
- blocked: event already exists: 03edc6b095b2a314733ea3dc689bb54e8739d443e9e69dd61334a5d376bf72a4
|
||||
- blocked: event already exists: 0ea6723d131534cf6e2209169a518c4bc598e3acad0618c2ef34df34c867cca1
|
||||
... and 49995 more errors
|
||||
----------------------------------------
|
||||
|
||||
Report saved to: /tmp/benchmark_khatru-sqlite_8/benchmark_report.txt
|
||||
AsciiDoc report saved to: /tmp/benchmark_khatru-sqlite_8/benchmark_report.adoc
|
||||
|
||||
RELAY_NAME: khatru-sqlite
|
||||
RELAY_URL: ws://khatru-sqlite:3334
|
||||
TEST_TIMESTAMP: 2025-11-19T06:00:08+00:00
|
||||
BENCHMARK_CONFIG:
|
||||
Events: 50000
|
||||
Workers: 24
|
||||
Duration: 60s
|
||||
@@ -0,0 +1,422 @@
|
||||
Starting Nostr Relay Benchmark (Badger Backend)
|
||||
Data Directory: /tmp/benchmark_next-orly-badger_8
|
||||
Events: 50000, Workers: 24, Duration: 1m0s
|
||||
1763531208053542ℹ️ migrating to version 1... /build/pkg/database/migrations.go:66
|
||||
1763531208053690ℹ️ migrating to version 2... /build/pkg/database/migrations.go:73
|
||||
1763531208053742ℹ️ migrating to version 3... /build/pkg/database/migrations.go:80
|
||||
1763531208053750ℹ️ cleaning up ephemeral events (kinds 20000-29999)... /build/pkg/database/migrations.go:287
|
||||
1763531208053760ℹ️ cleaned up 0 ephemeral events from database /build/pkg/database/migrations.go:332
|
||||
1763531208053778ℹ️ migrating to version 4... /build/pkg/database/migrations.go:87
|
||||
1763531208053784ℹ️ converting events to optimized inline storage (Reiser4 optimization)... /build/pkg/database/migrations.go:340
|
||||
1763531208053801ℹ️ found 0 events to convert (0 regular, 0 replaceable, 0 addressable) /build/pkg/database/migrations.go:429
|
||||
1763531208053808ℹ️ migration complete: converted 0 events to optimized inline storage, deleted 0 old keys /build/pkg/database/migrations.go:538
|
||||
Loading real-world sample events from embedded data...
|
||||
Loading real-world sample events (11,596 events from 6 months of Nostr)...
|
||||
Loaded 11596 real-world events (already signed, zero crypto overhead)
|
||||
|
||||
Event Statistics:
|
||||
Total events: 11596
|
||||
Average content size: 588 bytes
|
||||
Event kinds found: 25 unique
|
||||
Most common kinds:
|
||||
Kind 1: 7152 events
|
||||
Kind 7: 1973 events
|
||||
Kind 6: 934 events
|
||||
Kind 10002: 337 events
|
||||
Kind 0: 290 events
|
||||
|
||||
|
||||
╔════════════════════════════════════════════════════════╗
|
||||
║ BADGER BACKEND BENCHMARK SUITE ║
|
||||
╚════════════════════════════════════════════════════════╝
|
||||
|
||||
=== Starting Badger benchmark ===
|
||||
RunPeakThroughputTest (Badger)..
|
||||
|
||||
=== Peak Throughput Test ===
|
||||
1763531208465992🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208483000🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208483002🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208483661🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208485058🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208485701🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208491992🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208492314🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208492945🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208507228🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208507404🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208507623🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208508352🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208565748🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208593189🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208593671🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208594027🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208602302🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208602343🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531208602353⚠️ failed to process deletion for event 900e73566bb098d7ec1880ec68521ef76e066b933d4d6b71dbe99ee156c4b307: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531208602584🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208602605🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531208602611⚠️ failed to process deletion for event ecd7b942d5a473589b4a3bc34f0b3dadf0c6e0ba9325d7a47604167acc757d5c: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531208610060🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208618508🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208618604🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208622203🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208622231🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208626334🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208626349🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208626357🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208626874🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208626909🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208626885🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208626879🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208627275🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208627366🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208628641🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208628657🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208630021🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208632589🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208633861🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208633918🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531208707199🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209162276🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209162272🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209162817🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209162842🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209165303🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209165301🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209166674🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209166730🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209167368🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209167390🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209167886🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209168683🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209168686🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209169118🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209169150🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209170268🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209170273🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209170304🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209171666🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209171826🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209171854🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531209171863⚠️ failed to process deletion for event 63eae8af9f42e2d37f93b1277bcf708c94aeb8935dd83d1e8e80136c4e4f8292: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531209177425🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209177559🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209178508🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209178569🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209178611🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209179115🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209187446🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209190525🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209192408🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209192833🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209193582🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209193679🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209193698🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531209193706🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209193707⚠️ failed to process deletion for event 2f5e01050c81c0d711e9f391726af47933b5fcfbe497434164069787d201e3b9: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531209193752🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209195157🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209197056🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209197225🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209197585🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209198217🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209198927🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209198996🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209199967🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209200128🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209200229🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209201976🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209202454🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209202456🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209204631🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209204834🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209205952🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209206128🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209206132🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209208116🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209211081🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209213252🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209214253🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209215036🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209218532🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209219160🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209222863🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209222881🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209222965🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209224623🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209225425🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209225575🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209225925🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209225963🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531209225976⚠️ failed to process deletion for event e56f683d8a3ad6a1d7ed41f50bf2739179ac8f6e1418ff34e5e20903172237ea: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531209227378🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209230128🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209231247🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209234368🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209234474🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209235586🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209235721🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209235726🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209237302🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209237697🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209238490🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209238511🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531209238521⚠️ failed to process deletion for event 4b07094ff22787f584f5ceddc11ae44c66ab513d01d7529e156d6adb75323eca: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531209238633🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209240817🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209244908🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209246392🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209247168🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209247218🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209247624🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209247733🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531209247887⚠️ failed to process deletion for event 59475e9f41d77977a2b2c0d9acf7c32bad368dafdeab1e8f7be8cf0fe0e00ceb: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531209258006🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209279804🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209281422🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209281504🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209282064🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209282725🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209302439🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209302967🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209303684🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209304213🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209304357🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209304523🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209304583🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209305101🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209330784🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209340122🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209340215🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209345768🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209346170🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209346179🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209346425🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209346897🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209347883🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209347912🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209347965🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209348714🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209349164🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209349193🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209350881🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209350968🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209352091🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209353585🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209355263🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209355876🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209355928🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531209355941⚠️ failed to process deletion for event bd502ba9dc5c173b3b82708561f35118e2ca580f9c7e5baffceccdd9f6502462: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531209355985🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209356002🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531209356010⚠️ failed to process deletion for event 961a3d9582d896fcd8755ccc634b7846e549131284740f6fec0d635d0bb072af: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531209356081🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209356450🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209356604🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209359937🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209360087🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209361772🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209361849🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209362879🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209363754🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209365054🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209365110🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209365144🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209365175🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209366595🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209366598🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209368981🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209369366🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209369921🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209369991🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209370020🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209371151🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209372195🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209372361🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209372416🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209372441🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209374373🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209375330🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209375383🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209375621🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209376946🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209376950🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209377448🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209377499🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209378356🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209378357🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209378418🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209378454🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209382899🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209383451🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209387993🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209388236🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209401957🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209402627🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209402903🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209403446🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209403453🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209404336🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209404676🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209404984🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209405085🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209405676🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209405823🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209405861🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531209406920🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
Events saved: 11592/50000 (23.2%), errors: 38408
|
||||
Duration: 3.98141893s
|
||||
Events/sec: 2911.52
|
||||
Avg latency: 3.938925ms
|
||||
P90 latency: 3.357143ms
|
||||
P95 latency: 4.624387ms
|
||||
P99 latency: 71.546396ms
|
||||
Bottom 10% Avg latency: 1.115318ms
|
||||
Wiping database between tests...
|
||||
RunBurstPatternTest (Badger)..
|
||||
|
||||
=== Burst Pattern Test ===
|
||||
Burst completed: 5000 events in 342.062444ms
|
||||
Burst completed: 5000 events in 342.428441ms
|
||||
Burst completed: 5000 events in 331.568769ms
|
||||
Burst completed: 5000 events in 325.104719ms
|
||||
Burst completed: 5000 events in 336.284199ms
|
||||
Burst completed: 5000 events in 336.312002ms
|
||||
Burst completed: 5000 events in 336.094447ms
|
||||
Burst completed: 5000 events in 333.072923ms
|
||||
Burst completed: 5000 events in 350.917627ms
|
||||
Burst completed: 5000 events in 329.621891ms
|
||||
Burst test completed: 0 events in 8.368751649s, errors: 50000
|
||||
Events/sec: 0.00
|
||||
Wiping database between tests...
|
||||
RunMixedReadWriteTest (Badger)..
|
||||
|
||||
=== Mixed Read/Write Test ===
|
||||
Pre-populating database for read tests...
|
||||
Mixed test completed: 0 writes, 25000 reads in 22.617040249s
|
||||
Combined ops/sec: 1105.36
|
||||
Wiping database between tests...
|
||||
RunQueryTest (Badger)..
|
||||
|
||||
=== Query Test ===
|
||||
Pre-populating database with 10000 events for query tests...
|
||||
Query test completed: 404906 queries in 1m0.003855016s
|
||||
Queries/sec: 6748.00
|
||||
Avg query latency: 1.567428ms
|
||||
P95 query latency: 5.346663ms
|
||||
P99 query latency: 9.186414ms
|
||||
Wiping database between tests...
|
||||
RunConcurrentQueryStoreTest (Badger)..
|
||||
|
||||
=== Concurrent Query/Store Test ===
|
||||
Pre-populating database with 5000 events for concurrent query/store test...
|
||||
Concurrent test completed: 565785 operations (565785 queries, 0 writes) in 1m0.000685928s
|
||||
Operations/sec: 9429.64
|
||||
Avg latency: 45.237µs
|
||||
Avg query latency: 45.237µs
|
||||
Avg write latency: 0s
|
||||
P95 latency: 86.405µs
|
||||
P99 latency: 126.221µs
|
||||
|
||||
=== Badger benchmark completed ===
|
||||
|
||||
|
||||
================================================================================
|
||||
BENCHMARK REPORT
|
||||
================================================================================
|
||||
|
||||
Test: Peak Throughput
|
||||
Duration: 3.98141893s
|
||||
Total Events: 11592
|
||||
Events/sec: 2911.52
|
||||
Success Rate: 23.2%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 409 MB
|
||||
Avg Latency: 3.938925ms
|
||||
P90 Latency: 3.357143ms
|
||||
P95 Latency: 4.624387ms
|
||||
P99 Latency: 71.546396ms
|
||||
Bottom 10% Avg Latency: 1.115318ms
|
||||
----------------------------------------
|
||||
|
||||
Test: Burst Pattern
|
||||
Duration: 8.368751649s
|
||||
Total Events: 0
|
||||
Events/sec: 0.00
|
||||
Success Rate: 0.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 316 MB
|
||||
Avg Latency: 0s
|
||||
P90 Latency: 0s
|
||||
P95 Latency: 0s
|
||||
P99 Latency: 0s
|
||||
Bottom 10% Avg Latency: 0s
|
||||
----------------------------------------
|
||||
|
||||
Test: Mixed Read/Write
|
||||
Duration: 22.617040249s
|
||||
Total Events: 25000
|
||||
Events/sec: 1105.36
|
||||
Success Rate: 50.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 358 MB
|
||||
Avg Latency: 81.046µs
|
||||
P90 Latency: 102.124µs
|
||||
P95 Latency: 112.915µs
|
||||
P99 Latency: 137.351µs
|
||||
Bottom 10% Avg Latency: 122.82µs
|
||||
Errors (25000):
|
||||
- blocked: event already exists: 2197ff7ffc723d2fb4f7e44aeaf0ed8c2e0e2f3fb3aae29f2e33e0683ddf1a99
|
||||
- blocked: event already exists: 00a5f5f6c7f1c4e6f71ab7df2c056e238ccd9b441e59ddf119d7ab7f1d7510e0
|
||||
- blocked: event already exists: 0ce484c600cb1c0b33f1e38ddea4b38a47069615d22114a9c621a9164d9b6218
|
||||
- blocked: event already exists: 0ea6723d131534cf6e2209169a518c4bc598e3acad0618c2ef34df34c867cca1
|
||||
- blocked: event already exists: 1642d6770a74de7ca45169bc76dab334591bcb2191044da0b18459888164f9fc
|
||||
... and 24995 more errors
|
||||
----------------------------------------
|
||||
|
||||
Test: Query Performance
|
||||
Duration: 1m0.003855016s
|
||||
Total Events: 404906
|
||||
Events/sec: 6748.00
|
||||
Success Rate: 100.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 370 MB
|
||||
Avg Latency: 1.567428ms
|
||||
P90 Latency: 4.371194ms
|
||||
P95 Latency: 5.346663ms
|
||||
P99 Latency: 9.186414ms
|
||||
Bottom 10% Avg Latency: 6.253752ms
|
||||
----------------------------------------
|
||||
|
||||
Test: Concurrent Query/Store
|
||||
Duration: 1m0.000685928s
|
||||
Total Events: 565785
|
||||
Events/sec: 9429.64
|
||||
Success Rate: 100.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 285 MB
|
||||
Avg Latency: 45.237µs
|
||||
P90 Latency: 76.916µs
|
||||
P95 Latency: 86.405µs
|
||||
P99 Latency: 126.221µs
|
||||
Bottom 10% Avg Latency: 96.947µs
|
||||
Errors (50000):
|
||||
- blocked: event already exists: 00a5f5f6c7f1c4e6f71ab7df2c056e238ccd9b441e59ddf119d7ab7f1d7510e0
|
||||
- blocked: event already exists: 0312061d336fd22dc64b98130663835242e4479c54c7ca88b72c3b3093ef29a2
|
||||
- blocked: event already exists: 0f06ba91f371d4f8647a3f9529af3b9a012988eabf9f7c2eb42b39aa86697ea9
|
||||
- blocked: event already exists: 06061b630fd0881cbe7ed02114584fea59b9621c2e9479e6e6aa2be561240a90
|
||||
- blocked: event already exists: 01e9943cf5e805283c512b9c26cf69f7e9ff412710d7543a3a52dc93ac7e8a57
|
||||
... and 49995 more errors
|
||||
----------------------------------------
|
||||
|
||||
Report saved to: /tmp/benchmark_next-orly-badger_8/benchmark_report.txt
|
||||
AsciiDoc report saved to: /tmp/benchmark_next-orly-badger_8/benchmark_report.adoc
|
||||
|
||||
RELAY_NAME: next-orly-badger
|
||||
RELAY_URL: ws://next-orly-badger:8080
|
||||
TEST_TIMESTAMP: 2025-11-19T05:50:04+00:00
|
||||
BENCHMARK_CONFIG:
|
||||
Events: 50000
|
||||
Workers: 24
|
||||
Duration: 60s
|
||||
@@ -0,0 +1,422 @@
|
||||
Starting Nostr Relay Benchmark (Badger Backend)
|
||||
Data Directory: /tmp/benchmark_next-orly-dgraph_8
|
||||
Events: 50000, Workers: 24, Duration: 1m0s
|
||||
1763531409344607ℹ️ migrating to version 1... /build/pkg/database/migrations.go:66
|
||||
1763531409344681ℹ️ migrating to version 2... /build/pkg/database/migrations.go:73
|
||||
1763531409344706ℹ️ migrating to version 3... /build/pkg/database/migrations.go:80
|
||||
1763531409344712ℹ️ cleaning up ephemeral events (kinds 20000-29999)... /build/pkg/database/migrations.go:287
|
||||
1763531409344720ℹ️ cleaned up 0 ephemeral events from database /build/pkg/database/migrations.go:332
|
||||
1763531409344735ℹ️ migrating to version 4... /build/pkg/database/migrations.go:87
|
||||
1763531409344740ℹ️ converting events to optimized inline storage (Reiser4 optimization)... /build/pkg/database/migrations.go:340
|
||||
1763531409344750ℹ️ found 0 events to convert (0 regular, 0 replaceable, 0 addressable) /build/pkg/database/migrations.go:429
|
||||
1763531409344755ℹ️ migration complete: converted 0 events to optimized inline storage, deleted 0 old keys /build/pkg/database/migrations.go:538
|
||||
Loading real-world sample events from embedded data...
|
||||
Loading real-world sample events (11,596 events from 6 months of Nostr)...
|
||||
Loaded 11596 real-world events (already signed, zero crypto overhead)
|
||||
|
||||
Event Statistics:
|
||||
Total events: 11596
|
||||
Average content size: 588 bytes
|
||||
Event kinds found: 25 unique
|
||||
Most common kinds:
|
||||
Kind 1: 7152 events
|
||||
Kind 7: 1973 events
|
||||
Kind 6: 934 events
|
||||
Kind 10002: 337 events
|
||||
Kind 0: 290 events
|
||||
|
||||
|
||||
╔════════════════════════════════════════════════════════╗
|
||||
║ BADGER BACKEND BENCHMARK SUITE ║
|
||||
╚════════════════════════════════════════════════════════╝
|
||||
|
||||
=== Starting Badger benchmark ===
|
||||
RunPeakThroughputTest (Badger)..
|
||||
|
||||
=== Peak Throughput Test ===
|
||||
1763531409759610🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409776086🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409776771🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409776804🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409778374🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409779152🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409784971🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409785617🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409785633🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409800163🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409801153🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409801420🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409802414🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409862218🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409893021🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409893729🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409893845🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409903047🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409903106🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531409903118⚠️ failed to process deletion for event 900e73566bb098d7ec1880ec68521ef76e066b933d4d6b71dbe99ee156c4b307: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531409903232🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409903259🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531409903268⚠️ failed to process deletion for event ecd7b942d5a473589b4a3bc34f0b3dadf0c6e0ba9325d7a47604167acc757d5c: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531409915985🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409923045🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409923074🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409924533🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409924591🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409931212🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409931262🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409931215🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409931529🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409931623🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409931683🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409931717🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409932268🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409932860🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409933379🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409934990🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409935370🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409940251🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409940354🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531409940445🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410018217🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410580488🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410581675🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410581900🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410582040🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410585617🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410585827🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410586939🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410587543🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410589137🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410589245🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410589709🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410589866🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410590173🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410591177🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410591619🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410591882🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410591940🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410593576🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410593582🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410595220🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410595270🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531410595283⚠️ failed to process deletion for event 63eae8af9f42e2d37f93b1277bcf708c94aeb8935dd83d1e8e80136c4e4f8292: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531410601931🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410602639🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410602948🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410603018🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410603032🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410604054🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410615476🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410618852🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410621310🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410622085🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410622542🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410622694🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410623081🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531410623190⚠️ failed to process deletion for event 2f5e01050c81c0d711e9f391726af47933b5fcfbe497434164069787d201e3b9: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531410625660🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410625875🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410627147🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410628773🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410628799🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410631527🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410633749🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410635043🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410635129🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410636981🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410637344🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410637661🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410637900🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410640346🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410640479🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410641582🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410642954🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410643510🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410644729🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410645234🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410646826🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410653499🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410655186🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410656858🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410657174🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410662374🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410663158🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410667648🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410667651🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410669820🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410670020🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410670837🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410670876🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410671525🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410671553🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531410671564⚠️ failed to process deletion for event e56f683d8a3ad6a1d7ed41f50bf2739179ac8f6e1418ff34e5e20903172237ea: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531410672779🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410674901🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410676001🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410681122🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410681358🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410681494🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410683894🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410685543🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410687981🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410688533🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410724866🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410724928🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531410724940⚠️ failed to process deletion for event 4b07094ff22787f584f5ceddc11ae44c66ab513d01d7529e156d6adb75323eca: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531410724987🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410770270🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410777849🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410778883🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410779911🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410780788🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410780841🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531410780854⚠️ failed to process deletion for event 59475e9f41d77977a2b2c0d9acf7c32bad368dafdeab1e8f7be8cf0fe0e00ceb: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531410781677🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410791857🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410794114🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410794283🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410796455🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410797679🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410798175🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410799065🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410802177🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410803368🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410804150🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410804338🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410804382🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410804458🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410804719🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410821062🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410833464🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410834106🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410834246🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410835105🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410836569🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410837441🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410837610🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410837763🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410840857🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410841784🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410842816🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410842931🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410843145🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410843483🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410844039🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410846135🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410846834🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410848379🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410850717🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410852878🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410853093🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531410853211⚠️ failed to process deletion for event bd502ba9dc5c173b3b82708561f35118e2ca580f9c7e5baffceccdd9f6502462: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531410852879🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410853359🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531410853372⚠️ failed to process deletion for event 961a3d9582d896fcd8755ccc634b7846e549131284740f6fec0d635d0bb072af: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531410853308🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410853791🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410855175🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410856611🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410857598🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410858251🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410859031🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410860805🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410862140🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410862321🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410862439🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410863187🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410863202🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410864904🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410868122🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410869575🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410869665🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410870058🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410870128🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410870884🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410874467🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410875395🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410891523🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410892283🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410893472🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410894764🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410895562🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410895719🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410896070🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410897173🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410897187🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410897198🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410897778🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410897979🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410898440🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410898758🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410898832🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410899952🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410900622🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410933276🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410933374🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410933901🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410934099🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410934447🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410934494🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410935849🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410935923🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410936168🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410936541🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410936556🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410936570🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410937707🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531410937742🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
Events saved: 11594/50000 (23.2%), errors: 38406
|
||||
Duration: 4.355930627s
|
||||
Events/sec: 2661.66
|
||||
Avg latency: 4.795769ms
|
||||
P90 latency: 4.155613ms
|
||||
P95 latency: 6.029522ms
|
||||
P99 latency: 90.290502ms
|
||||
Bottom 10% Avg latency: 1.212562ms
|
||||
Wiping database between tests...
|
||||
RunBurstPatternTest (Badger)..
|
||||
|
||||
=== Burst Pattern Test ===
|
||||
Burst completed: 5000 events in 347.262129ms
|
||||
Burst completed: 5000 events in 340.789843ms
|
||||
Burst completed: 5000 events in 335.779512ms
|
||||
Burst completed: 5000 events in 337.508905ms
|
||||
Burst completed: 5000 events in 332.483505ms
|
||||
Burst completed: 5000 events in 330.245503ms
|
||||
Burst completed: 5000 events in 327.047944ms
|
||||
Burst completed: 5000 events in 337.854803ms
|
||||
Burst completed: 5000 events in 341.472684ms
|
||||
Burst completed: 5000 events in 338.139736ms
|
||||
Burst test completed: 0 events in 8.375225019s, errors: 50000
|
||||
Events/sec: 0.00
|
||||
Wiping database between tests...
|
||||
RunMixedReadWriteTest (Badger)..
|
||||
|
||||
=== Mixed Read/Write Test ===
|
||||
Pre-populating database for read tests...
|
||||
Mixed test completed: 0 writes, 25000 reads in 22.648388132s
|
||||
Combined ops/sec: 1103.83
|
||||
Wiping database between tests...
|
||||
RunQueryTest (Badger)..
|
||||
|
||||
=== Query Test ===
|
||||
Pre-populating database with 10000 events for query tests...
|
||||
Query test completed: 392001 queries in 1m0.005057189s
|
||||
Queries/sec: 6532.80
|
||||
Avg query latency: 1.635372ms
|
||||
P95 query latency: 5.6029ms
|
||||
P99 query latency: 9.496203ms
|
||||
Wiping database between tests...
|
||||
RunConcurrentQueryStoreTest (Badger)..
|
||||
|
||||
=== Concurrent Query/Store Test ===
|
||||
Pre-populating database with 5000 events for concurrent query/store test...
|
||||
Concurrent test completed: 566246 operations (566246 queries, 0 writes) in 1m0.00114177s
|
||||
Operations/sec: 9437.25
|
||||
Avg latency: 45.308µs
|
||||
Avg query latency: 45.308µs
|
||||
Avg write latency: 0s
|
||||
P95 latency: 87.115µs
|
||||
P99 latency: 132.623µs
|
||||
|
||||
=== Badger benchmark completed ===
|
||||
|
||||
|
||||
================================================================================
|
||||
BENCHMARK REPORT
|
||||
================================================================================
|
||||
|
||||
Test: Peak Throughput
|
||||
Duration: 4.355930627s
|
||||
Total Events: 11594
|
||||
Events/sec: 2661.66
|
||||
Success Rate: 23.2%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 344 MB
|
||||
Avg Latency: 4.795769ms
|
||||
P90 Latency: 4.155613ms
|
||||
P95 Latency: 6.029522ms
|
||||
P99 Latency: 90.290502ms
|
||||
Bottom 10% Avg Latency: 1.212562ms
|
||||
----------------------------------------
|
||||
|
||||
Test: Burst Pattern
|
||||
Duration: 8.375225019s
|
||||
Total Events: 0
|
||||
Events/sec: 0.00
|
||||
Success Rate: 0.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 368 MB
|
||||
Avg Latency: 0s
|
||||
P90 Latency: 0s
|
||||
P95 Latency: 0s
|
||||
P99 Latency: 0s
|
||||
Bottom 10% Avg Latency: 0s
|
||||
----------------------------------------
|
||||
|
||||
Test: Mixed Read/Write
|
||||
Duration: 22.648388132s
|
||||
Total Events: 25000
|
||||
Events/sec: 1103.83
|
||||
Success Rate: 50.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 335 MB
|
||||
Avg Latency: 82.523µs
|
||||
P90 Latency: 103.357µs
|
||||
P95 Latency: 115.35µs
|
||||
P99 Latency: 145.828µs
|
||||
Bottom 10% Avg Latency: 129.81µs
|
||||
Errors (25000):
|
||||
- blocked: event already exists: 0312061d336fd22dc64b98130663835242e4479c54c7ca88b72c3b3093ef29a2
|
||||
- blocked: event already exists: 06061b630fd0881cbe7ed02114584fea59b9621c2e9479e6e6aa2be561240a90
|
||||
- blocked: event already exists: 0ce484c600cb1c0b33f1e38ddea4b38a47069615d22114a9c621a9164d9b6218
|
||||
- blocked: event already exists: 2197ff7ffc723d2fb4f7e44aeaf0ed8c2e0e2f3fb3aae29f2e33e0683ddf1a99
|
||||
- blocked: event already exists: 0ea6723d131534cf6e2209169a518c4bc598e3acad0618c2ef34df34c867cca1
|
||||
... and 24995 more errors
|
||||
----------------------------------------
|
||||
|
||||
Test: Query Performance
|
||||
Duration: 1m0.005057189s
|
||||
Total Events: 392001
|
||||
Events/sec: 6532.80
|
||||
Success Rate: 100.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 421 MB
|
||||
Avg Latency: 1.635372ms
|
||||
P90 Latency: 4.618756ms
|
||||
P95 Latency: 5.6029ms
|
||||
P99 Latency: 9.496203ms
|
||||
Bottom 10% Avg Latency: 6.522705ms
|
||||
----------------------------------------
|
||||
|
||||
Test: Concurrent Query/Store
|
||||
Duration: 1m0.00114177s
|
||||
Total Events: 566246
|
||||
Events/sec: 9437.25
|
||||
Success Rate: 100.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 437 MB
|
||||
Avg Latency: 45.308µs
|
||||
P90 Latency: 76.856µs
|
||||
P95 Latency: 87.115µs
|
||||
P99 Latency: 132.623µs
|
||||
Bottom 10% Avg Latency: 98.925µs
|
||||
Errors (50000):
|
||||
- blocked: event already exists: 0312061d336fd22dc64b98130663835242e4479c54c7ca88b72c3b3093ef29a2
|
||||
- blocked: event already exists: 01e9943cf5e805283c512b9c26cf69f7e9ff412710d7543a3a52dc93ac7e8a57
|
||||
- blocked: event already exists: 1642d6770a74de7ca45169bc76dab334591bcb2191044da0b18459888164f9fc
|
||||
- blocked: event already exists: 15c0a862ce4191bc51a1b668f77869c13cd81fd0af9473759a04ce2637a8860a
|
||||
- blocked: event already exists: 0e0b4dfd5e4ecfb0d3acb8db48d13833edeac5163fbcba9fb94160b686c07595
|
||||
... and 49995 more errors
|
||||
----------------------------------------
|
||||
|
||||
Report saved to: /tmp/benchmark_next-orly-dgraph_8/benchmark_report.txt
|
||||
AsciiDoc report saved to: /tmp/benchmark_next-orly-dgraph_8/benchmark_report.adoc
|
||||
|
||||
RELAY_NAME: next-orly-dgraph
|
||||
RELAY_URL: ws://next-orly-dgraph:8080
|
||||
TEST_TIMESTAMP: 2025-11-19T05:53:26+00:00
|
||||
BENCHMARK_CONFIG:
|
||||
Events: 50000
|
||||
Workers: 24
|
||||
Duration: 60s
|
||||
@@ -0,0 +1,422 @@
|
||||
Starting Nostr Relay Benchmark (Badger Backend)
|
||||
Data Directory: /tmp/benchmark_next-orly-neo4j_8
|
||||
Events: 50000, Workers: 24, Duration: 1m0s
|
||||
1763531611066103ℹ️ migrating to version 1... /build/pkg/database/migrations.go:66
|
||||
1763531611066178ℹ️ migrating to version 2... /build/pkg/database/migrations.go:73
|
||||
1763531611066207ℹ️ migrating to version 3... /build/pkg/database/migrations.go:80
|
||||
1763531611066214ℹ️ cleaning up ephemeral events (kinds 20000-29999)... /build/pkg/database/migrations.go:287
|
||||
1763531611066225ℹ️ cleaned up 0 ephemeral events from database /build/pkg/database/migrations.go:332
|
||||
1763531611066244ℹ️ migrating to version 4... /build/pkg/database/migrations.go:87
|
||||
1763531611066251ℹ️ converting events to optimized inline storage (Reiser4 optimization)... /build/pkg/database/migrations.go:340
|
||||
1763531611066267ℹ️ found 0 events to convert (0 regular, 0 replaceable, 0 addressable) /build/pkg/database/migrations.go:429
|
||||
1763531611066274ℹ️ migration complete: converted 0 events to optimized inline storage, deleted 0 old keys /build/pkg/database/migrations.go:538
|
||||
Loading real-world sample events from embedded data...
|
||||
Loading real-world sample events (11,596 events from 6 months of Nostr)...
|
||||
Loaded 11596 real-world events (already signed, zero crypto overhead)
|
||||
|
||||
Event Statistics:
|
||||
Total events: 11596
|
||||
Average content size: 588 bytes
|
||||
Event kinds found: 25 unique
|
||||
Most common kinds:
|
||||
Kind 1: 7152 events
|
||||
Kind 7: 1973 events
|
||||
Kind 6: 934 events
|
||||
Kind 10002: 337 events
|
||||
Kind 0: 290 events
|
||||
|
||||
|
||||
╔════════════════════════════════════════════════════════╗
|
||||
║ BADGER BACKEND BENCHMARK SUITE ║
|
||||
╚════════════════════════════════════════════════════════╝
|
||||
|
||||
=== Starting Badger benchmark ===
|
||||
RunPeakThroughputTest (Badger)..
|
||||
|
||||
=== Peak Throughput Test ===
|
||||
1763531611477120🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611493941🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611494126🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611494926🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611496231🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611496246🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611502279🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611503297🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611503330🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611518900🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611518891🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611519488🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611519747🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611577871🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611606029🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611606900🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611606947🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611614519🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611614565🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531611614574⚠️ failed to process deletion for event ecd7b942d5a473589b4a3bc34f0b3dadf0c6e0ba9325d7a47604167acc757d5c: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531611614525🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611614608🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531611614621⚠️ failed to process deletion for event 900e73566bb098d7ec1880ec68521ef76e066b933d4d6b71dbe99ee156c4b307: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531611624602🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611629772🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611629796🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611631851🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611631931🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611636831🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611636859🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611638048🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611638089🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611638115🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611638587🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611638716🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611639199🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611639225🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611639803🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611639863🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611640930🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611644335🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611644684🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611644898🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531611708589🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612171835🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612172653🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612172732🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612173556🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612175511🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612177118🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612177776🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612178379🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612178372🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612178397🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612179258🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612179440🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612179480🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612179957🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612180057🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612181198🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612181239🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612181692🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612182749🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612183455🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612183483🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531612183491⚠️ failed to process deletion for event 63eae8af9f42e2d37f93b1277bcf708c94aeb8935dd83d1e8e80136c4e4f8292: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531612189208🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612189347🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612189377🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612189422🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612189435🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612190775🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612199207🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612202839🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612204455🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612204751🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612204774🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531612204782⚠️ failed to process deletion for event 2f5e01050c81c0d711e9f391726af47933b5fcfbe497434164069787d201e3b9: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531612205235🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612205306🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612205344🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612206263🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612209033🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612209322🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612209353🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612210019🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612210383🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612210675🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612211567🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612211774🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612211848🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612212220🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612212273🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612213270🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612213282🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612216359🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612216384🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612217080🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612217427🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612218474🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612219554🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612221869🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612224539🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612225032🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612228378🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612230581🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612230736🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612232890🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612234376🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612234461🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612236593🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612236643🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531612236655⚠️ failed to process deletion for event e56f683d8a3ad6a1d7ed41f50bf2739179ac8f6e1418ff34e5e20903172237ea: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531612236622🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612236896🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612236930🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612242225🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612243552🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612244820🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612247851🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612248039🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612248536🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612248584🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612249053🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612251606🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612251935🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612251974🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612251979🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531612251986⚠️ failed to process deletion for event 4b07094ff22787f584f5ceddc11ae44c66ab513d01d7529e156d6adb75323eca: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531612253040🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612255159🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612261269🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612261370🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612261469🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612262573🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612262697🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612262722🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531612262731⚠️ failed to process deletion for event 59475e9f41d77977a2b2c0d9acf7c32bad368dafdeab1e8f7be8cf0fe0e00ceb: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531612294932🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612296429🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612315617🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612316570🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612317612🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612317766🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612317970🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612318694🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612321488🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612342151🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612342215🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612342415🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612342612🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612342903🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612351936🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612360967🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612361147🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612362355🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612364716🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612365603🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612365742🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612365902🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612365920🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612367122🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612367371🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612367380🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612368070🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612368460🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612368669🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612370166🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612372335🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612372509🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612373590🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612373895🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612374191🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612374269🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612374283🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531612374293⚠️ failed to process deletion for event bd502ba9dc5c173b3b82708561f35118e2ca580f9c7e5baffceccdd9f6502462: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531612374421🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612374456🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763531612374466⚠️ failed to process deletion for event 961a3d9582d896fcd8755ccc634b7846e549131284740f6fec0d635d0bb072af: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763531612374683🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612377078🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612378475🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612379970🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612380111🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612380109🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612382815🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612382875🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612382834🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612383146🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612383524🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612384208🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612386086🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612386271🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612387633🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612388100🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612388149🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612388240🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612388288🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612388990🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612389041🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612389077🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612390273🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612391060🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612392786🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612392907🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612394095🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612394516🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612394715🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612394732🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612395297🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612395359🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612395657🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612395823🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612395851🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612396829🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612397908🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612399692🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612401330🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612401868🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612404794🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612404977🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612405122🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612405322🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612405815🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612405838🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612406058🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612418956🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612419108🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612419316🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612419579🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763531612420418🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
Events saved: 11592/50000 (23.2%), errors: 38408
|
||||
Duration: 4.099682418s
|
||||
Events/sec: 2827.54
|
||||
Avg latency: 4.203722ms
|
||||
P90 latency: 3.345671ms
|
||||
P95 latency: 4.568189ms
|
||||
P99 latency: 88.030281ms
|
||||
Bottom 10% Avg latency: 1.124184ms
|
||||
Wiping database between tests...
|
||||
RunBurstPatternTest (Badger)..
|
||||
|
||||
=== Burst Pattern Test ===
|
||||
Burst completed: 5000 events in 335.33957ms
|
||||
Burst completed: 5000 events in 338.195898ms
|
||||
Burst completed: 5000 events in 346.791988ms
|
||||
Burst completed: 5000 events in 361.72302ms
|
||||
Burst completed: 5000 events in 332.900946ms
|
||||
Burst completed: 5000 events in 335.52954ms
|
||||
Burst completed: 5000 events in 342.175918ms
|
||||
Burst completed: 5000 events in 339.522755ms
|
||||
Burst completed: 5000 events in 334.46846ms
|
||||
Burst completed: 5000 events in 336.071402ms
|
||||
Burst test completed: 0 events in 8.409696337s, errors: 50000
|
||||
Events/sec: 0.00
|
||||
Wiping database between tests...
|
||||
RunMixedReadWriteTest (Badger)..
|
||||
|
||||
=== Mixed Read/Write Test ===
|
||||
Pre-populating database for read tests...
|
||||
Mixed test completed: 0 writes, 25000 reads in 22.513827505s
|
||||
Combined ops/sec: 1110.43
|
||||
Wiping database between tests...
|
||||
RunQueryTest (Badger)..
|
||||
|
||||
=== Query Test ===
|
||||
Pre-populating database with 10000 events for query tests...
|
||||
Query test completed: 405671 queries in 1m0.004332664s
|
||||
Queries/sec: 6760.70
|
||||
Avg query latency: 1.570056ms
|
||||
P95 query latency: 5.35134ms
|
||||
P99 query latency: 9.169641ms
|
||||
Wiping database between tests...
|
||||
RunConcurrentQueryStoreTest (Badger)..
|
||||
|
||||
=== Concurrent Query/Store Test ===
|
||||
Pre-populating database with 5000 events for concurrent query/store test...
|
||||
Concurrent test completed: 567760 operations (567760 queries, 0 writes) in 1m0.000235118s
|
||||
Operations/sec: 9462.63
|
||||
Avg latency: 46.433µs
|
||||
Avg query latency: 46.433µs
|
||||
Avg write latency: 0s
|
||||
P95 latency: 89.831µs
|
||||
P99 latency: 135.768µs
|
||||
|
||||
=== Badger benchmark completed ===
|
||||
|
||||
|
||||
================================================================================
|
||||
BENCHMARK REPORT
|
||||
================================================================================
|
||||
|
||||
Test: Peak Throughput
|
||||
Duration: 4.099682418s
|
||||
Total Events: 11592
|
||||
Events/sec: 2827.54
|
||||
Success Rate: 23.2%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 356 MB
|
||||
Avg Latency: 4.203722ms
|
||||
P90 Latency: 3.345671ms
|
||||
P95 Latency: 4.568189ms
|
||||
P99 Latency: 88.030281ms
|
||||
Bottom 10% Avg Latency: 1.124184ms
|
||||
----------------------------------------
|
||||
|
||||
Test: Burst Pattern
|
||||
Duration: 8.409696337s
|
||||
Total Events: 0
|
||||
Events/sec: 0.00
|
||||
Success Rate: 0.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 393 MB
|
||||
Avg Latency: 0s
|
||||
P90 Latency: 0s
|
||||
P95 Latency: 0s
|
||||
P99 Latency: 0s
|
||||
Bottom 10% Avg Latency: 0s
|
||||
----------------------------------------
|
||||
|
||||
Test: Mixed Read/Write
|
||||
Duration: 22.513827505s
|
||||
Total Events: 25000
|
||||
Events/sec: 1110.43
|
||||
Success Rate: 50.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 363 MB
|
||||
Avg Latency: 79.478µs
|
||||
P90 Latency: 101.042µs
|
||||
P95 Latency: 112.755µs
|
||||
P99 Latency: 136.991µs
|
||||
Bottom 10% Avg Latency: 121.765µs
|
||||
Errors (25000):
|
||||
- blocked: event already exists: 238d2d2e1ddb3af636472dbf573fa52cbfc81509a9ba2f4a6902efacd5e32bbf
|
||||
- blocked: event already exists: 048d7b07155b3832a76eac0b46bea764cac3597dfbc28b559698d51f915cb6d1
|
||||
- blocked: event already exists: 1ebc80bd3bb172fc38ce786e0717e9c82691cd495f0de9863c892284cbe47ca3
|
||||
- blocked: event already exists: 00a5f5f6c7f1c4e6f71ab7df2c056e238ccd9b441e59ddf119d7ab7f1d7510e0
|
||||
- blocked: event already exists: 2197ff7ffc723d2fb4f7e44aeaf0ed8c2e0e2f3fb3aae29f2e33e0683ddf1a99
|
||||
... and 24995 more errors
|
||||
----------------------------------------
|
||||
|
||||
Test: Query Performance
|
||||
Duration: 1m0.004332664s
|
||||
Total Events: 405671
|
||||
Events/sec: 6760.70
|
||||
Success Rate: 100.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 372 MB
|
||||
Avg Latency: 1.570056ms
|
||||
P90 Latency: 4.354101ms
|
||||
P95 Latency: 5.35134ms
|
||||
P99 Latency: 9.169641ms
|
||||
Bottom 10% Avg Latency: 6.228096ms
|
||||
----------------------------------------
|
||||
|
||||
Test: Concurrent Query/Store
|
||||
Duration: 1m0.000235118s
|
||||
Total Events: 567760
|
||||
Events/sec: 9462.63
|
||||
Success Rate: 100.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 303 MB
|
||||
Avg Latency: 46.433µs
|
||||
P90 Latency: 79.071µs
|
||||
P95 Latency: 89.831µs
|
||||
P99 Latency: 135.768µs
|
||||
Bottom 10% Avg Latency: 102.136µs
|
||||
Errors (50000):
|
||||
- blocked: event already exists: 01e9943cf5e805283c512b9c26cf69f7e9ff412710d7543a3a52dc93ac7e8a57
|
||||
- blocked: event already exists: 0312061d336fd22dc64b98130663835242e4479c54c7ca88b72c3b3093ef29a2
|
||||
- blocked: event already exists: 05bf5bbba1a1fa85b9a5aaca7ff384d8e09a1b2441c01df5780c1bc99e377f85
|
||||
- blocked: event already exists: 0b50149a50e29b084c63f0b0d16a8d280445eb389e53b5c688f654665e9d56f5
|
||||
- blocked: event already exists: 0ea6723d131534cf6e2209169a518c4bc598e3acad0618c2ef34df34c867cca1
|
||||
... and 49995 more errors
|
||||
----------------------------------------
|
||||
|
||||
Report saved to: /tmp/benchmark_next-orly-neo4j_8/benchmark_report.txt
|
||||
AsciiDoc report saved to: /tmp/benchmark_next-orly-neo4j_8/benchmark_report.adoc
|
||||
|
||||
RELAY_NAME: next-orly-neo4j
|
||||
RELAY_URL: ws://next-orly-neo4j:8080
|
||||
TEST_TIMESTAMP: 2025-11-19T05:56:47+00:00
|
||||
BENCHMARK_CONFIG:
|
||||
Events: 50000
|
||||
Workers: 24
|
||||
Duration: 60s
|
||||
@@ -0,0 +1,422 @@
|
||||
Starting Nostr Relay Benchmark (Badger Backend)
|
||||
Data Directory: /tmp/benchmark_nostr-rs-relay_8
|
||||
Events: 50000, Workers: 24, Duration: 1m0s
|
||||
1763532618524528ℹ️ migrating to version 1... /build/pkg/database/migrations.go:66
|
||||
1763532618524580ℹ️ migrating to version 2... /build/pkg/database/migrations.go:73
|
||||
1763532618524706ℹ️ migrating to version 3... /build/pkg/database/migrations.go:80
|
||||
1763532618524736ℹ️ cleaning up ephemeral events (kinds 20000-29999)... /build/pkg/database/migrations.go:287
|
||||
1763532618524748ℹ️ cleaned up 0 ephemeral events from database /build/pkg/database/migrations.go:332
|
||||
1763532618524776ℹ️ migrating to version 4... /build/pkg/database/migrations.go:87
|
||||
1763532618524782ℹ️ converting events to optimized inline storage (Reiser4 optimization)... /build/pkg/database/migrations.go:340
|
||||
1763532618524802ℹ️ found 0 events to convert (0 regular, 0 replaceable, 0 addressable) /build/pkg/database/migrations.go:429
|
||||
1763532618524809ℹ️ migration complete: converted 0 events to optimized inline storage, deleted 0 old keys /build/pkg/database/migrations.go:538
|
||||
Loading real-world sample events from embedded data...
|
||||
Loading real-world sample events (11,596 events from 6 months of Nostr)...
|
||||
Loaded 11596 real-world events (already signed, zero crypto overhead)
|
||||
|
||||
Event Statistics:
|
||||
Total events: 11596
|
||||
Average content size: 588 bytes
|
||||
Event kinds found: 25 unique
|
||||
Most common kinds:
|
||||
Kind 1: 7152 events
|
||||
Kind 7: 1973 events
|
||||
Kind 6: 934 events
|
||||
Kind 10002: 337 events
|
||||
Kind 0: 290 events
|
||||
|
||||
|
||||
╔════════════════════════════════════════════════════════╗
|
||||
║ BADGER BACKEND BENCHMARK SUITE ║
|
||||
╚════════════════════════════════════════════════════════╝
|
||||
|
||||
=== Starting Badger benchmark ===
|
||||
RunPeakThroughputTest (Badger)..
|
||||
|
||||
=== Peak Throughput Test ===
|
||||
1763532618930740🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532618947610🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532618948005🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532618948153🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532618950675🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532618950682🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532618956383🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532618956435🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532618957227🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532618969491🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532618970468🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532618971159🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532618971247🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619031025🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619056683🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619056939🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619056952🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619066084🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619066142🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532619066155⚠️ failed to process deletion for event ecd7b942d5a473589b4a3bc34f0b3dadf0c6e0ba9325d7a47604167acc757d5c: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532619066695🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619066714🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532619066722⚠️ failed to process deletion for event 900e73566bb098d7ec1880ec68521ef76e066b933d4d6b71dbe99ee156c4b307: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532619075600🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619081811🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619081988🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619084508🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619084568🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619088652🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619088683🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619088782🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619088783🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619090006🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619090001🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619090069🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619090084🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619090099🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619090832🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619091518🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619092595🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619096499🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619096548🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619096606🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619162379🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619614266🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619615621🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619615626🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619616541🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619618933🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619618974🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619620317🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619620397🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619620471🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619620484🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619621043🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619621631🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619622165🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619622167🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619622439🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619623174🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619623181🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619623220🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619624801🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619625240🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619625269🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532619625280⚠️ failed to process deletion for event 63eae8af9f42e2d37f93b1277bcf708c94aeb8935dd83d1e8e80136c4e4f8292: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532619630065🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619630165🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619630661🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619630663🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619630821🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619631497🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619640145🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619642792🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619644723🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619644791🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619645300🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619645371🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619645379🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532619645401⚠️ failed to process deletion for event 2f5e01050c81c0d711e9f391726af47933b5fcfbe497434164069787d201e3b9: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532619645510🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619646269🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619648954🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619649062🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619649394🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619649929🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619650596🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619650999🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619651453🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619652135🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619652189🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619652230🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619652643🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619652686🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619654452🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619656038🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619656545🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619657094🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619658010🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619658015🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619660069🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619661973🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619665795🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619665815🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619668940🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619671219🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619671256🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619675066🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619675407🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619675880🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619676648🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619676831🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619678445🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619678987🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619679007🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532619679017⚠️ failed to process deletion for event e56f683d8a3ad6a1d7ed41f50bf2739179ac8f6e1418ff34e5e20903172237ea: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532619680059🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619682110🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619682946🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619686593🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619686642🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619686672🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619688599🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619688980🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619689992🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619691023🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619691071🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532619691081⚠️ failed to process deletion for event 4b07094ff22787f584f5ceddc11ae44c66ab513d01d7529e156d6adb75323eca: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532619691290🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619691789🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619693914🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619698356🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619701647🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619701967🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619702011🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532619702023⚠️ failed to process deletion for event 59475e9f41d77977a2b2c0d9acf7c32bad368dafdeab1e8f7be8cf0fe0e00ceb: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532619701971🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619702353🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619767837🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619770711🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619771475🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619771496🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619771616🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619771785🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619773121🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619773706🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619774076🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619775012🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619775202🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619775616🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619776224🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619776225🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619783510🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619793083🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619793319🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619795252🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619795257🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619797760🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619798203🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619798747🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619798803🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619799361🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619799645🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619799874🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619800049🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619801225🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619801611🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619801686🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619803757🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619804436🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619805033🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619805964🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619806089🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619806114🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532619806125⚠️ failed to process deletion for event bd502ba9dc5c173b3b82708561f35118e2ca580f9c7e5baffceccdd9f6502462: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532619806587🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619806617🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532619806627⚠️ failed to process deletion for event 961a3d9582d896fcd8755ccc634b7846e549131284740f6fec0d635d0bb072af: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532619806746🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619806955🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619809241🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619809253🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619812247🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619812468🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619812745🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619814622🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619815324🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619815599🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619816082🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619816174🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619816840🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619818752🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619819942🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619820073🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619820832🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619821226🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619821604🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619822845🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619822980🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619823804🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619823916🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619824109🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619826241🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619827137🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619827419🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619827882🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619828527🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619828762🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619829430🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619829777🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619829830🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619829856🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619829867🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619830712🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619831911🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619835536🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619835629🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619839021🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619839121🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619839259🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619841819🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619842315🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619843356🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619843525🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619846344🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619859073🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619859232🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619859436🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619859611🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619859674🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532619859797🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
Events saved: 11594/50000 (23.2%), errors: 38406
|
||||
Duration: 4.021053985s
|
||||
Events/sec: 2883.32
|
||||
Avg latency: 4.044321ms
|
||||
P90 latency: 3.344231ms
|
||||
P95 latency: 4.602719ms
|
||||
P99 latency: 79.2846ms
|
||||
Bottom 10% Avg latency: 1.103637ms
|
||||
Wiping database between tests...
|
||||
RunBurstPatternTest (Badger)..
|
||||
|
||||
=== Burst Pattern Test ===
|
||||
Burst completed: 5000 events in 352.280501ms
|
||||
Burst completed: 5000 events in 344.717192ms
|
||||
Burst completed: 5000 events in 342.785392ms
|
||||
Burst completed: 5000 events in 348.707543ms
|
||||
Burst completed: 5000 events in 365.85074ms
|
||||
Burst completed: 5000 events in 351.601335ms
|
||||
Burst completed: 5000 events in 349.046538ms
|
||||
Burst completed: 5000 events in 345.187947ms
|
||||
Burst completed: 5000 events in 343.795123ms
|
||||
Burst completed: 5000 events in 331.851049ms
|
||||
Burst test completed: 0 events in 8.481561189s, errors: 50000
|
||||
Events/sec: 0.00
|
||||
Wiping database between tests...
|
||||
RunMixedReadWriteTest (Badger)..
|
||||
|
||||
=== Mixed Read/Write Test ===
|
||||
Pre-populating database for read tests...
|
||||
Mixed test completed: 0 writes, 25000 reads in 22.659489061s
|
||||
Combined ops/sec: 1103.29
|
||||
Wiping database between tests...
|
||||
RunQueryTest (Badger)..
|
||||
|
||||
=== Query Test ===
|
||||
Pre-populating database with 10000 events for query tests...
|
||||
Query test completed: 405016 queries in 1m0.004544583s
|
||||
Queries/sec: 6749.76
|
||||
Avg query latency: 1.573632ms
|
||||
P95 query latency: 5.332888ms
|
||||
P99 query latency: 9.122117ms
|
||||
Wiping database between tests...
|
||||
RunConcurrentQueryStoreTest (Badger)..
|
||||
|
||||
=== Concurrent Query/Store Test ===
|
||||
Pre-populating database with 5000 events for concurrent query/store test...
|
||||
Concurrent test completed: 566215 operations (566215 queries, 0 writes) in 1m0.001155402s
|
||||
Operations/sec: 9436.73
|
||||
Avg latency: 45.72µs
|
||||
Avg query latency: 45.72µs
|
||||
Avg write latency: 0s
|
||||
P95 latency: 88.218µs
|
||||
P99 latency: 131.26µs
|
||||
|
||||
=== Badger benchmark completed ===
|
||||
|
||||
|
||||
================================================================================
|
||||
BENCHMARK REPORT
|
||||
================================================================================
|
||||
|
||||
Test: Peak Throughput
|
||||
Duration: 4.021053985s
|
||||
Total Events: 11594
|
||||
Events/sec: 2883.32
|
||||
Success Rate: 23.2%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 379 MB
|
||||
Avg Latency: 4.044321ms
|
||||
P90 Latency: 3.344231ms
|
||||
P95 Latency: 4.602719ms
|
||||
P99 Latency: 79.2846ms
|
||||
Bottom 10% Avg Latency: 1.103637ms
|
||||
----------------------------------------
|
||||
|
||||
Test: Burst Pattern
|
||||
Duration: 8.481561189s
|
||||
Total Events: 0
|
||||
Events/sec: 0.00
|
||||
Success Rate: 0.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 259 MB
|
||||
Avg Latency: 0s
|
||||
P90 Latency: 0s
|
||||
P95 Latency: 0s
|
||||
P99 Latency: 0s
|
||||
Bottom 10% Avg Latency: 0s
|
||||
----------------------------------------
|
||||
|
||||
Test: Mixed Read/Write
|
||||
Duration: 22.659489061s
|
||||
Total Events: 25000
|
||||
Events/sec: 1103.29
|
||||
Success Rate: 50.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 267 MB
|
||||
Avg Latency: 82.3µs
|
||||
P90 Latency: 102.856µs
|
||||
P95 Latency: 114.679µs
|
||||
P99 Latency: 142.963µs
|
||||
Bottom 10% Avg Latency: 130.591µs
|
||||
Errors (25000):
|
||||
- blocked: event already exists: 238d2d2e1ddb3af636472dbf573fa52cbfc81509a9ba2f4a6902efacd5e32bbf
|
||||
- blocked: event already exists: 00a5f5f6c7f1c4e6f71ab7df2c056e238ccd9b441e59ddf119d7ab7f1d7510e0
|
||||
- blocked: event already exists: 1ebc80bd3bb172fc38ce786e0717e9c82691cd495f0de9863c892284cbe47ca3
|
||||
- blocked: event already exists: 1642d6770a74de7ca45169bc76dab334591bcb2191044da0b18459888164f9fc
|
||||
- blocked: event already exists: 2197ff7ffc723d2fb4f7e44aeaf0ed8c2e0e2f3fb3aae29f2e33e0683ddf1a99
|
||||
... and 24995 more errors
|
||||
----------------------------------------
|
||||
|
||||
Test: Query Performance
|
||||
Duration: 1m0.004544583s
|
||||
Total Events: 405016
|
||||
Events/sec: 6749.76
|
||||
Success Rate: 100.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 446 MB
|
||||
Avg Latency: 1.573632ms
|
||||
P90 Latency: 4.427874ms
|
||||
P95 Latency: 5.332888ms
|
||||
P99 Latency: 9.122117ms
|
||||
Bottom 10% Avg Latency: 6.229587ms
|
||||
----------------------------------------
|
||||
|
||||
Test: Concurrent Query/Store
|
||||
Duration: 1m0.001155402s
|
||||
Total Events: 566215
|
||||
Events/sec: 9436.73
|
||||
Success Rate: 100.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 333 MB
|
||||
Avg Latency: 45.72µs
|
||||
P90 Latency: 78.159µs
|
||||
P95 Latency: 88.218µs
|
||||
P99 Latency: 131.26µs
|
||||
Bottom 10% Avg Latency: 99.957µs
|
||||
Errors (50000):
|
||||
- blocked: event already exists: 0ce484c600cb1c0b33f1e38ddea4b38a47069615d22114a9c621a9164d9b6218
|
||||
- blocked: event already exists: 048d7b07155b3832a76eac0b46bea764cac3597dfbc28b559698d51f915cb6d1
|
||||
- blocked: event already exists: 05bf5bbba1a1fa85b9a5aaca7ff384d8e09a1b2441c01df5780c1bc99e377f85
|
||||
- blocked: event already exists: 0e0b4dfd5e4ecfb0d3acb8db48d13833edeac5163fbcba9fb94160b686c07595
|
||||
- blocked: event already exists: 0b50149a50e29b084c63f0b0d16a8d280445eb389e53b5c688f654665e9d56f5
|
||||
... and 49995 more errors
|
||||
----------------------------------------
|
||||
|
||||
Report saved to: /tmp/benchmark_nostr-rs-relay_8/benchmark_report.txt
|
||||
AsciiDoc report saved to: /tmp/benchmark_nostr-rs-relay_8/benchmark_report.adoc
|
||||
|
||||
RELAY_NAME: nostr-rs-relay
|
||||
RELAY_URL: ws://nostr-rs-relay:8080
|
||||
TEST_TIMESTAMP: 2025-11-19T06:13:35+00:00
|
||||
BENCHMARK_CONFIG:
|
||||
Events: 50000
|
||||
Workers: 24
|
||||
Duration: 60s
|
||||
@@ -0,0 +1,422 @@
|
||||
Starting Nostr Relay Benchmark (Badger Backend)
|
||||
Data Directory: /tmp/benchmark_relayer-basic_8
|
||||
Events: 50000, Workers: 24, Duration: 1m0s
|
||||
1763532215281177ℹ️ migrating to version 1... /build/pkg/database/migrations.go:66
|
||||
1763532215281256ℹ️ migrating to version 2... /build/pkg/database/migrations.go:73
|
||||
1763532215281278ℹ️ migrating to version 3... /build/pkg/database/migrations.go:80
|
||||
1763532215281284ℹ️ cleaning up ephemeral events (kinds 20000-29999)... /build/pkg/database/migrations.go:287
|
||||
1763532215281295ℹ️ cleaned up 0 ephemeral events from database /build/pkg/database/migrations.go:332
|
||||
1763532215281311ℹ️ migrating to version 4... /build/pkg/database/migrations.go:87
|
||||
1763532215281316ℹ️ converting events to optimized inline storage (Reiser4 optimization)... /build/pkg/database/migrations.go:340
|
||||
1763532215281327ℹ️ found 0 events to convert (0 regular, 0 replaceable, 0 addressable) /build/pkg/database/migrations.go:429
|
||||
1763532215281332ℹ️ migration complete: converted 0 events to optimized inline storage, deleted 0 old keys /build/pkg/database/migrations.go:538
|
||||
Loading real-world sample events from embedded data...
|
||||
Loading real-world sample events (11,596 events from 6 months of Nostr)...
|
||||
Loaded 11596 real-world events (already signed, zero crypto overhead)
|
||||
|
||||
Event Statistics:
|
||||
Total events: 11596
|
||||
Average content size: 588 bytes
|
||||
Event kinds found: 25 unique
|
||||
Most common kinds:
|
||||
Kind 1: 7152 events
|
||||
Kind 7: 1973 events
|
||||
Kind 6: 934 events
|
||||
Kind 10002: 337 events
|
||||
Kind 0: 290 events
|
||||
|
||||
|
||||
╔════════════════════════════════════════════════════════╗
|
||||
║ BADGER BACKEND BENCHMARK SUITE ║
|
||||
╚════════════════════════════════════════════════════════╝
|
||||
|
||||
=== Starting Badger benchmark ===
|
||||
RunPeakThroughputTest (Badger)..
|
||||
|
||||
=== Peak Throughput Test ===
|
||||
1763532215753642🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215771026🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215771047🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215771043🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215773057🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215773950🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215779106🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215779989🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215780044🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215794879🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215794911🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215795258🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215795902🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215864347🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215895247🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215897706🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215897846🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215909272🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215909338🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532215909351⚠️ failed to process deletion for event ecd7b942d5a473589b4a3bc34f0b3dadf0c6e0ba9325d7a47604167acc757d5c: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532215909277🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215909376🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532215909396⚠️ failed to process deletion for event 900e73566bb098d7ec1880ec68521ef76e066b933d4d6b71dbe99ee156c4b307: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532215921004🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215927644🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215927729🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215932204🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215932223🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215937326🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215937353🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215937533🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215937559🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215937604🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215938283🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215938525🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215938584🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215939171🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215941078🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215942075🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215942140🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215946108🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215946935🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532215947070🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216034256🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216575480🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216575680🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216576613🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216577132🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216579189🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216580190🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216581187🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216581297🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216581843🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216581932🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216582485🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216583310🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216583354🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216583797🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216584179🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216584829🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216584822🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216584849🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216586369🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216586560🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216586587🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532216586598⚠️ failed to process deletion for event 63eae8af9f42e2d37f93b1277bcf708c94aeb8935dd83d1e8e80136c4e4f8292: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532216592409🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216594068🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216594133🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216594171🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216595199🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216596193🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216604932🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216608011🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216610501🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216610709🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216610735🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532216610746⚠️ failed to process deletion for event 2f5e01050c81c0d711e9f391726af47933b5fcfbe497434164069787d201e3b9: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532216611730🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216611905🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216612710🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216612972🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216614620🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216614890🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216616830🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216617705🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216617912🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216618767🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216619811🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216619813🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216620154🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216622289🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216622299🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216622670🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216622759🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216627036🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216627071🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216627681🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216628332🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216628497🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216630956🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216634023🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216636620🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216637097🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216640322🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216640755🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216642971🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216646272🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216646356🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216646716🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216649588🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216649624🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216649707🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216651798🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216651837🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532216651846⚠️ failed to process deletion for event e56f683d8a3ad6a1d7ed41f50bf2739179ac8f6e1418ff34e5e20903172237ea: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532216652546🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216652647🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216654682🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216660436🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216660454🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216660818🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216660850🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216660892🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216664192🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216664242🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532216664233🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216664284🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216664252⚠️ failed to process deletion for event 4b07094ff22787f584f5ceddc11ae44c66ab513d01d7529e156d6adb75323eca: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532216664431🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216666902🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216671811🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216671937🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216702320🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216702414🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216705566🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216705636🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532216705653⚠️ failed to process deletion for event 59475e9f41d77977a2b2c0d9acf7c32bad368dafdeab1e8f7be8cf0fe0e00ceb: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532216736068🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216772632🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216772740🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216772872🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216775232🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216776926🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216778944🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216780479🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216781325🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216781901🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216782007🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216781924🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216782662🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216782943🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216792109🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216801957🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216802118🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216805275🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216805608🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216806675🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216806729🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216807256🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216807332🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216807702🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216808008🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216809164🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216809928🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216810178🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216810343🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216810553🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216813468🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216813917🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216815051🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216815580🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216815621🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532216815633⚠️ failed to process deletion for event 961a3d9582d896fcd8755ccc634b7846e549131284740f6fec0d635d0bb072af: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532216815855🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216815887🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532216815896⚠️ failed to process deletion for event bd502ba9dc5c173b3b82708561f35118e2ca580f9c7e5baffceccdd9f6502462: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532216817137🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216817988🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216818038🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216820280🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216820593🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216822434🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216822533🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216823260🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216825570🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216825661🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216825770🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216825766🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216828334🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216828596🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216830967🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216832985🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216834147🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216834169🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216834173🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216834249🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216835001🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216835042🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216835016🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216835898🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216835986🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216840462🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216841175🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216841614🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216842304🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216847871🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216864133🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216905124🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216905300🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216905361🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216905362🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216905440🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216906234🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216907434🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216907471🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216907464🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216908059🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216908080🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216908591🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216908908🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216909192🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216910036🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216910306🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216910950🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216931514🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216931602🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216931779🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216931793🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216932984🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532216933171🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
Events saved: 11592/50000 (23.2%), errors: 38408
|
||||
Duration: 4.281033199s
|
||||
Events/sec: 2707.76
|
||||
Avg latency: 4.657987ms
|
||||
P90 latency: 4.233468ms
|
||||
P95 latency: 5.603449ms
|
||||
P99 latency: 68.611381ms
|
||||
Bottom 10% Avg latency: 1.266467ms
|
||||
Wiping database between tests...
|
||||
RunBurstPatternTest (Badger)..
|
||||
|
||||
=== Burst Pattern Test ===
|
||||
Burst completed: 5000 events in 351.189041ms
|
||||
Burst completed: 5000 events in 345.793588ms
|
||||
Burst completed: 5000 events in 349.58856ms
|
||||
Burst completed: 5000 events in 347.409606ms
|
||||
Burst completed: 5000 events in 336.805967ms
|
||||
Burst completed: 5000 events in 342.539694ms
|
||||
Burst completed: 5000 events in 333.331965ms
|
||||
Burst completed: 5000 events in 343.768734ms
|
||||
Burst completed: 5000 events in 348.390792ms
|
||||
Burst completed: 5000 events in 349.455321ms
|
||||
Burst test completed: 0 events in 8.454879556s, errors: 50000
|
||||
Events/sec: 0.00
|
||||
Wiping database between tests...
|
||||
RunMixedReadWriteTest (Badger)..
|
||||
|
||||
=== Mixed Read/Write Test ===
|
||||
Pre-populating database for read tests...
|
||||
Mixed test completed: 0 writes, 25000 reads in 22.626268963s
|
||||
Combined ops/sec: 1104.91
|
||||
Wiping database between tests...
|
||||
RunQueryTest (Badger)..
|
||||
|
||||
=== Query Test ===
|
||||
Pre-populating database with 10000 events for query tests...
|
||||
Query test completed: 406188 queries in 1m0.004608218s
|
||||
Queries/sec: 6769.28
|
||||
Avg query latency: 1.56602ms
|
||||
P95 query latency: 5.365294ms
|
||||
P99 query latency: 9.302026ms
|
||||
Wiping database between tests...
|
||||
RunConcurrentQueryStoreTest (Badger)..
|
||||
|
||||
=== Concurrent Query/Store Test ===
|
||||
Pre-populating database with 5000 events for concurrent query/store test...
|
||||
Concurrent test completed: 563863 operations (563863 queries, 0 writes) in 1m0.001226916s
|
||||
Operations/sec: 9397.52
|
||||
Avg latency: 46.484µs
|
||||
Avg query latency: 46.484µs
|
||||
Avg write latency: 0s
|
||||
P95 latency: 89.861µs
|
||||
P99 latency: 137.252µs
|
||||
|
||||
=== Badger benchmark completed ===
|
||||
|
||||
|
||||
================================================================================
|
||||
BENCHMARK REPORT
|
||||
================================================================================
|
||||
|
||||
Test: Peak Throughput
|
||||
Duration: 4.281033199s
|
||||
Total Events: 11592
|
||||
Events/sec: 2707.76
|
||||
Success Rate: 23.2%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 344 MB
|
||||
Avg Latency: 4.657987ms
|
||||
P90 Latency: 4.233468ms
|
||||
P95 Latency: 5.603449ms
|
||||
P99 Latency: 68.611381ms
|
||||
Bottom 10% Avg Latency: 1.266467ms
|
||||
----------------------------------------
|
||||
|
||||
Test: Burst Pattern
|
||||
Duration: 8.454879556s
|
||||
Total Events: 0
|
||||
Events/sec: 0.00
|
||||
Success Rate: 0.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 368 MB
|
||||
Avg Latency: 0s
|
||||
P90 Latency: 0s
|
||||
P95 Latency: 0s
|
||||
P99 Latency: 0s
|
||||
Bottom 10% Avg Latency: 0s
|
||||
----------------------------------------
|
||||
|
||||
Test: Mixed Read/Write
|
||||
Duration: 22.626268963s
|
||||
Total Events: 25000
|
||||
Events/sec: 1104.91
|
||||
Success Rate: 50.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 339 MB
|
||||
Avg Latency: 81.834µs
|
||||
P90 Latency: 101.664µs
|
||||
P95 Latency: 112.123µs
|
||||
P99 Latency: 136.991µs
|
||||
Bottom 10% Avg Latency: 123.871µs
|
||||
Errors (25000):
|
||||
- blocked: event already exists: 06061b630fd0881cbe7ed02114584fea59b9621c2e9479e6e6aa2be561240a90
|
||||
- blocked: event already exists: 0312061d336fd22dc64b98130663835242e4479c54c7ca88b72c3b3093ef29a2
|
||||
- blocked: event already exists: 1ebc80bd3bb172fc38ce786e0717e9c82691cd495f0de9863c892284cbe47ca3
|
||||
- blocked: event already exists: 00a5f5f6c7f1c4e6f71ab7df2c056e238ccd9b441e59ddf119d7ab7f1d7510e0
|
||||
- blocked: event already exists: 2197ff7ffc723d2fb4f7e44aeaf0ed8c2e0e2f3fb3aae29f2e33e0683ddf1a99
|
||||
... and 24995 more errors
|
||||
----------------------------------------
|
||||
|
||||
Test: Query Performance
|
||||
Duration: 1m0.004608218s
|
||||
Total Events: 406188
|
||||
Events/sec: 6769.28
|
||||
Success Rate: 100.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 466 MB
|
||||
Avg Latency: 1.56602ms
|
||||
P90 Latency: 4.291057ms
|
||||
P95 Latency: 5.365294ms
|
||||
P99 Latency: 9.302026ms
|
||||
Bottom 10% Avg Latency: 6.278431ms
|
||||
----------------------------------------
|
||||
|
||||
Test: Concurrent Query/Store
|
||||
Duration: 1m0.001226916s
|
||||
Total Events: 563863
|
||||
Events/sec: 9397.52
|
||||
Success Rate: 100.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 347 MB
|
||||
Avg Latency: 46.484µs
|
||||
P90 Latency: 79.592µs
|
||||
P95 Latency: 89.861µs
|
||||
P99 Latency: 137.252µs
|
||||
Bottom 10% Avg Latency: 102.019µs
|
||||
Errors (50000):
|
||||
- blocked: event already exists: 00a5f5f6c7f1c4e6f71ab7df2c056e238ccd9b441e59ddf119d7ab7f1d7510e0
|
||||
- blocked: event already exists: 0f06ba91f371d4f8647a3f9529af3b9a012988eabf9f7c2eb42b39aa86697ea9
|
||||
- blocked: event already exists: 0ea6723d131534cf6e2209169a518c4bc598e3acad0618c2ef34df34c867cca1
|
||||
- blocked: event already exists: 0b50149a50e29b084c63f0b0d16a8d280445eb389e53b5c688f654665e9d56f5
|
||||
- blocked: event already exists: 05bf5bbba1a1fa85b9a5aaca7ff384d8e09a1b2441c01df5780c1bc99e377f85
|
||||
... and 49995 more errors
|
||||
----------------------------------------
|
||||
|
||||
Report saved to: /tmp/benchmark_relayer-basic_8/benchmark_report.txt
|
||||
AsciiDoc report saved to: /tmp/benchmark_relayer-basic_8/benchmark_report.adoc
|
||||
|
||||
RELAY_NAME: relayer-basic
|
||||
RELAY_URL: ws://relayer-basic:7447
|
||||
TEST_TIMESTAMP: 2025-11-19T06:06:51+00:00
|
||||
BENCHMARK_CONFIG:
|
||||
Events: 50000
|
||||
Workers: 24
|
||||
Duration: 60s
|
||||
422
cmd/benchmark/reports/run_20251119_054648/strfry_results.txt
Normal file
422
cmd/benchmark/reports/run_20251119_054648/strfry_results.txt
Normal file
@@ -0,0 +1,422 @@
|
||||
Starting Nostr Relay Benchmark (Badger Backend)
|
||||
Data Directory: /tmp/benchmark_strfry_8
|
||||
Events: 50000, Workers: 24, Duration: 1m0s
|
||||
1763532417029005ℹ️ migrating to version 1... /build/pkg/database/migrations.go:66
|
||||
1763532417029081ℹ️ migrating to version 2... /build/pkg/database/migrations.go:73
|
||||
1763532417029106ℹ️ migrating to version 3... /build/pkg/database/migrations.go:80
|
||||
1763532417029112ℹ️ cleaning up ephemeral events (kinds 20000-29999)... /build/pkg/database/migrations.go:287
|
||||
1763532417029144ℹ️ cleaned up 0 ephemeral events from database /build/pkg/database/migrations.go:332
|
||||
1763532417029202ℹ️ migrating to version 4... /build/pkg/database/migrations.go:87
|
||||
1763532417029209ℹ️ converting events to optimized inline storage (Reiser4 optimization)... /build/pkg/database/migrations.go:340
|
||||
1763532417029219ℹ️ found 0 events to convert (0 regular, 0 replaceable, 0 addressable) /build/pkg/database/migrations.go:429
|
||||
1763532417029225ℹ️ migration complete: converted 0 events to optimized inline storage, deleted 0 old keys /build/pkg/database/migrations.go:538
|
||||
Loading real-world sample events from embedded data...
|
||||
Loading real-world sample events (11,596 events from 6 months of Nostr)...
|
||||
Loaded 11596 real-world events (already signed, zero crypto overhead)
|
||||
|
||||
Event Statistics:
|
||||
Total events: 11596
|
||||
Average content size: 588 bytes
|
||||
Event kinds found: 25 unique
|
||||
Most common kinds:
|
||||
Kind 1: 7152 events
|
||||
Kind 7: 1973 events
|
||||
Kind 6: 934 events
|
||||
Kind 10002: 337 events
|
||||
Kind 0: 290 events
|
||||
|
||||
|
||||
╔════════════════════════════════════════════════════════╗
|
||||
║ BADGER BACKEND BENCHMARK SUITE ║
|
||||
╚════════════════════════════════════════════════════════╝
|
||||
|
||||
=== Starting Badger benchmark ===
|
||||
RunPeakThroughputTest (Badger)..
|
||||
|
||||
=== Peak Throughput Test ===
|
||||
1763532417446740🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417463442🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417463517🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417463528🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417465778🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417465773🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417471681🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417472327🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417473046🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417487367🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417488733🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417489155🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417489204🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417547895🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417576271🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417576642🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417577031🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417584020🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417584080🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532417584092⚠️ failed to process deletion for event 900e73566bb098d7ec1880ec68521ef76e066b933d4d6b71dbe99ee156c4b307: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532417584057🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417584119🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532417584130⚠️ failed to process deletion for event ecd7b942d5a473589b4a3bc34f0b3dadf0c6e0ba9325d7a47604167acc757d5c: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532417593777🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417599107🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417599108🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417601718🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417601761🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417605646🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417606054🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417606057🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417607124🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417607136🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417607268🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417607238🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417607238🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417607152🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417608114🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417609053🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417609524🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417612855🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417613254🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417613805🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532417677741🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418142727🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418142864🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418144600🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418144630🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418145646🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418146916🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418147551🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418148156🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418148197🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418148912🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418149551🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418149549🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418150165🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418150344🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418150653🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418151668🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418151756🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418151768🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418152942🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418153239🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418153258🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532418153267⚠️ failed to process deletion for event 63eae8af9f42e2d37f93b1277bcf708c94aeb8935dd83d1e8e80136c4e4f8292: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532418158828🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418159056🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418159184🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418160314🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418160324🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418161260🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418169316🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418172059🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418173558🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418174651🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418174692🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532418174703⚠️ failed to process deletion for event 2f5e01050c81c0d711e9f391726af47933b5fcfbe497434164069787d201e3b9: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532418175319🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418175322🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418175328🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418176201🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418178579🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418178687🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418179266🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418179679🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418179929🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418180514🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418180740🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418181634🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418182020🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418182137🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418182727🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418183912🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418183942🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418186474🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418186791🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418186808🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418186793🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418188620🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418189953🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418192500🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418194606🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418195626🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418199354🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418200303🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418200464🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418203342🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418204634🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418204728🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418205766🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418207111🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418207142🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418207931🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418207969🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532418207980⚠️ failed to process deletion for event e56f683d8a3ad6a1d7ed41f50bf2739179ac8f6e1418ff34e5e20903172237ea: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532418208766🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418210821🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418211495🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418215604🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418215614🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418216006🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418216035🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418219145🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418220994🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418221037🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532418221052⚠️ failed to process deletion for event 4b07094ff22787f584f5ceddc11ae44c66ab513d01d7529e156d6adb75323eca: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532418221209🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418222796🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418223147🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418227727🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418233362🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418233725🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418233725🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418233803🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532418233816⚠️ failed to process deletion for event 59475e9f41d77977a2b2c0d9acf7c32bad368dafdeab1e8f7be8cf0fe0e00ceb: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532418234917🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418234938🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418302772🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418304188🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418304225🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418307646🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418308235🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418309609🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418309963🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418310289🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418312036🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418312787🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418314158🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418315296🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418317296🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418317453🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418326901🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418336363🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418336826🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418337215🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418338156🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418338897🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418341107🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418341261🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418341288🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418341578🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418341805🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418344423🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418344476🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418344490🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418345300🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418345329🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418347344🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418349365🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418349398🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418349748🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418349778🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532418349790⚠️ failed to process deletion for event bd502ba9dc5c173b3b82708561f35118e2ca580f9c7e5baffceccdd9f6502462: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532418351994🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418352043🚨 id not found in database /build/pkg/database/save-event.go:332
|
||||
1763532418352055⚠️ failed to process deletion for event 961a3d9582d896fcd8755ccc634b7846e549131284740f6fec0d635d0bb072af: id not found in database /build/pkg/database/save-event.go:333
|
||||
1763532418354024🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418354037🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418354129🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418355732🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418357513🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418359713🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418360257🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418361239🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418361614🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418362673🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418362796🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418362959🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418363024🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418363609🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418364681🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418366172🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418366978🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418367050🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418367077🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418367056🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418368723🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418369089🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418369211🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418369213🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418369858🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418371869🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418373452🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418373544🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418373609🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418375088🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418375238🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418375309🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418375530🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418375554🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418375966🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418376137🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418376407🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418377845🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418377890🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418378015🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418378051🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418378088🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418379151🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418379686🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418390200🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418391344🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418391364🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418391484🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418392146🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418392202🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418392283🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418392401🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418393317🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
1763532418393350🚨 id not found in database /build/pkg/database/process-delete.go:43
|
||||
Events saved: 11596/50000 (23.2%), errors: 38404
|
||||
Duration: 4.081350203s
|
||||
Events/sec: 2841.22
|
||||
Avg latency: 4.088506ms
|
||||
P90 latency: 3.424405ms
|
||||
P95 latency: 4.517428ms
|
||||
P99 latency: 75.080835ms
|
||||
Bottom 10% Avg latency: 1.135387ms
|
||||
Wiping database between tests...
|
||||
RunBurstPatternTest (Badger)..
|
||||
|
||||
=== Burst Pattern Test ===
|
||||
Burst completed: 5000 events in 342.084914ms
|
||||
Burst completed: 5000 events in 368.596807ms
|
||||
Burst completed: 5000 events in 328.015947ms
|
||||
Burst completed: 5000 events in 335.615145ms
|
||||
Burst completed: 5000 events in 336.465114ms
|
||||
Burst completed: 5000 events in 339.72787ms
|
||||
Burst completed: 5000 events in 337.178121ms
|
||||
Burst completed: 5000 events in 337.603762ms
|
||||
Burst completed: 5000 events in 311.194123ms
|
||||
Burst completed: 5000 events in 320.093358ms
|
||||
Burst test completed: 0 events in 8.36134004s, errors: 50000
|
||||
Events/sec: 0.00
|
||||
Wiping database between tests...
|
||||
RunMixedReadWriteTest (Badger)..
|
||||
|
||||
=== Mixed Read/Write Test ===
|
||||
Pre-populating database for read tests...
|
||||
Mixed test completed: 0 writes, 25000 reads in 22.58702292s
|
||||
Combined ops/sec: 1106.83
|
||||
Wiping database between tests...
|
||||
RunQueryTest (Badger)..
|
||||
|
||||
=== Query Test ===
|
||||
Pre-populating database with 10000 events for query tests...
|
||||
Query test completed: 410409 queries in 1m0.005823994s
|
||||
Queries/sec: 6839.49
|
||||
Avg query latency: 1.547004ms
|
||||
P95 query latency: 5.256194ms
|
||||
P99 query latency: 9.085129ms
|
||||
Wiping database between tests...
|
||||
RunConcurrentQueryStoreTest (Badger)..
|
||||
|
||||
=== Concurrent Query/Store Test ===
|
||||
Pre-populating database with 5000 events for concurrent query/store test...
|
||||
Concurrent test completed: 568449 operations (568449 queries, 0 writes) in 1m0.000557559s
|
||||
Operations/sec: 9474.06
|
||||
Avg latency: 45.257µs
|
||||
Avg query latency: 45.257µs
|
||||
Avg write latency: 0s
|
||||
P95 latency: 86.775µs
|
||||
P99 latency: 128.615µs
|
||||
|
||||
=== Badger benchmark completed ===
|
||||
|
||||
|
||||
================================================================================
|
||||
BENCHMARK REPORT
|
||||
================================================================================
|
||||
|
||||
Test: Peak Throughput
|
||||
Duration: 4.081350203s
|
||||
Total Events: 11596
|
||||
Events/sec: 2841.22
|
||||
Success Rate: 23.2%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 322 MB
|
||||
Avg Latency: 4.088506ms
|
||||
P90 Latency: 3.424405ms
|
||||
P95 Latency: 4.517428ms
|
||||
P99 Latency: 75.080835ms
|
||||
Bottom 10% Avg Latency: 1.135387ms
|
||||
----------------------------------------
|
||||
|
||||
Test: Burst Pattern
|
||||
Duration: 8.36134004s
|
||||
Total Events: 0
|
||||
Events/sec: 0.00
|
||||
Success Rate: 0.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 352 MB
|
||||
Avg Latency: 0s
|
||||
P90 Latency: 0s
|
||||
P95 Latency: 0s
|
||||
P99 Latency: 0s
|
||||
Bottom 10% Avg Latency: 0s
|
||||
----------------------------------------
|
||||
|
||||
Test: Mixed Read/Write
|
||||
Duration: 22.58702292s
|
||||
Total Events: 25000
|
||||
Events/sec: 1106.83
|
||||
Success Rate: 50.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 319 MB
|
||||
Avg Latency: 81.227µs
|
||||
P90 Latency: 102.275µs
|
||||
P95 Latency: 113.396µs
|
||||
P99 Latency: 139.054µs
|
||||
Bottom 10% Avg Latency: 125.516µs
|
||||
Errors (25000):
|
||||
- blocked: event already exists: 11aa0b6defe3d58cef2f93c06fb194bc72241f17fb35312594d279f6c8f13d44
|
||||
- blocked: event already exists: 00a5f5f6c7f1c4e6f71ab7df2c056e238ccd9b441e59ddf119d7ab7f1d7510e0
|
||||
- blocked: event already exists: 1ebc80bd3bb172fc38ce786e0717e9c82691cd495f0de9863c892284cbe47ca3
|
||||
- blocked: event already exists: 0ce484c600cb1c0b33f1e38ddea4b38a47069615d22114a9c621a9164d9b6218
|
||||
- blocked: event already exists: 1642d6770a74de7ca45169bc76dab334591bcb2191044da0b18459888164f9fc
|
||||
... and 24995 more errors
|
||||
----------------------------------------
|
||||
|
||||
Test: Query Performance
|
||||
Duration: 1m0.005823994s
|
||||
Total Events: 410409
|
||||
Events/sec: 6839.49
|
||||
Success Rate: 100.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 403 MB
|
||||
Avg Latency: 1.547004ms
|
||||
P90 Latency: 4.258013ms
|
||||
P95 Latency: 5.256194ms
|
||||
P99 Latency: 9.085129ms
|
||||
Bottom 10% Avg Latency: 6.154516ms
|
||||
----------------------------------------
|
||||
|
||||
Test: Concurrent Query/Store
|
||||
Duration: 1m0.000557559s
|
||||
Total Events: 568449
|
||||
Events/sec: 9474.06
|
||||
Success Rate: 100.0%
|
||||
Concurrent Workers: 24
|
||||
Memory Used: 403 MB
|
||||
Avg Latency: 45.257µs
|
||||
P90 Latency: 77.187µs
|
||||
P95 Latency: 86.775µs
|
||||
P99 Latency: 128.615µs
|
||||
Bottom 10% Avg Latency: 98.387µs
|
||||
Errors (50000):
|
||||
- blocked: event already exists: 0312061d336fd22dc64b98130663835242e4479c54c7ca88b72c3b3093ef29a2
|
||||
- blocked: event already exists: 06061b630fd0881cbe7ed02114584fea59b9621c2e9479e6e6aa2be561240a90
|
||||
- blocked: event already exists: 0ea6723d131534cf6e2209169a518c4bc598e3acad0618c2ef34df34c867cca1
|
||||
- blocked: event already exists: 0ce484c600cb1c0b33f1e38ddea4b38a47069615d22114a9c621a9164d9b6218
|
||||
- blocked: event already exists: 0f06ba91f371d4f8647a3f9529af3b9a012988eabf9f7c2eb42b39aa86697ea9
|
||||
... and 49995 more errors
|
||||
----------------------------------------
|
||||
|
||||
Report saved to: /tmp/benchmark_strfry_8/benchmark_report.txt
|
||||
AsciiDoc report saved to: /tmp/benchmark_strfry_8/benchmark_report.adoc
|
||||
|
||||
RELAY_NAME: strfry
|
||||
RELAY_URL: ws://strfry:8080
|
||||
TEST_TIMESTAMP: 2025-11-19T06:10:13+00:00
|
||||
BENCHMARK_CONFIG:
|
||||
Events: 50000
|
||||
Workers: 24
|
||||
Duration: 60s
|
||||
19
cmd/benchmark/run-badger-benchmark.sh
Executable file
19
cmd/benchmark/run-badger-benchmark.sh
Executable file
@@ -0,0 +1,19 @@
|
||||
#!/bin/bash
|
||||
# Run Badger benchmark with reduced cache sizes to avoid OOM
|
||||
|
||||
# Set reasonable cache sizes for benchmark
|
||||
export ORLY_DB_BLOCK_CACHE_MB=256 # Reduced from 1024MB
|
||||
export ORLY_DB_INDEX_CACHE_MB=128 # Reduced from 512MB
|
||||
export ORLY_QUERY_CACHE_SIZE_MB=128 # Reduced from 512MB
|
||||
|
||||
# Clean up old data
|
||||
rm -rf /tmp/benchmark_db_badger
|
||||
|
||||
echo "Running Badger benchmark with reduced cache sizes:"
|
||||
echo " Block Cache: ${ORLY_DB_BLOCK_CACHE_MB}MB"
|
||||
echo " Index Cache: ${ORLY_DB_INDEX_CACHE_MB}MB"
|
||||
echo " Query Cache: ${ORLY_QUERY_CACHE_SIZE_MB}MB"
|
||||
echo ""
|
||||
|
||||
# Run benchmark
|
||||
./benchmark -events "${1:-1000}" -workers "${2:-4}" -datadir /tmp/benchmark_db_badger
|
||||
@@ -29,11 +29,27 @@ if [ -d "data" ]; then
|
||||
fi
|
||||
fi
|
||||
|
||||
# Stop any running containers from previous runs
|
||||
echo "Stopping any running containers..."
|
||||
$DOCKER_COMPOSE down 2>/dev/null || true
|
||||
|
||||
# Create fresh data directories with correct permissions
|
||||
echo "Preparing data directories..."
|
||||
mkdir -p data/{next-orly,khatru-sqlite,khatru-badger,relayer-basic,strfry,nostr-rs-relay,postgres}
|
||||
chmod 777 data/{next-orly,khatru-sqlite,khatru-badger,relayer-basic,strfry,nostr-rs-relay,postgres}
|
||||
|
||||
# Clean Neo4j data to prevent "already running" errors
|
||||
if [ -d "data/neo4j" ]; then
|
||||
echo "Cleaning Neo4j data directory..."
|
||||
rm -rf data/neo4j/*
|
||||
fi
|
||||
|
||||
mkdir -p data/{next-orly-badger,next-orly-dgraph,next-orly-neo4j,dgraph-zero,dgraph-alpha,neo4j,neo4j-logs,khatru-sqlite,khatru-badger,relayer-basic,strfry,nostr-rs-relay,postgres}
|
||||
chmod 777 data/{next-orly-badger,next-orly-dgraph,next-orly-neo4j,dgraph-zero,dgraph-alpha,neo4j,neo4j-logs,khatru-sqlite,khatru-badger,relayer-basic,strfry,nostr-rs-relay,postgres}
|
||||
|
||||
echo "Building fresh Docker images..."
|
||||
# Force rebuild to pick up latest code changes
|
||||
$DOCKER_COMPOSE build --no-cache benchmark-runner next-orly-badger next-orly-dgraph next-orly-neo4j
|
||||
|
||||
echo ""
|
||||
echo "Starting benchmark suite..."
|
||||
echo "This will automatically shut down all containers when the benchmark completes."
|
||||
echo ""
|
||||
|
||||
406
docs/NEO4J_BACKEND.md
Normal file
406
docs/NEO4J_BACKEND.md
Normal file
@@ -0,0 +1,406 @@
|
||||
# Neo4j Database Backend for ORLY Relay
|
||||
|
||||
## Overview
|
||||
|
||||
The Neo4j database backend provides a graph-native storage solution for the ORLY Nostr relay. Unlike traditional key-value or document stores, Neo4j is optimized for relationship-heavy queries, making it an ideal fit for Nostr's social graph and event reference patterns.
|
||||
|
||||
## Architecture
|
||||
|
||||
### Core Components
|
||||
|
||||
1. **Main Database File** ([pkg/neo4j/neo4j.go](../pkg/neo4j/neo4j.go))
|
||||
- Implements the `database.Database` interface
|
||||
- Manages Neo4j driver connection and lifecycle
|
||||
- Uses Badger for metadata storage (markers, identity, subscriptions)
|
||||
- Registers with the database factory via `init()`
|
||||
|
||||
2. **Schema Management** ([pkg/neo4j/schema.go](../pkg/neo4j/schema.go))
|
||||
- Defines Neo4j constraints and indexes using Cypher
|
||||
- Creates unique constraints on Event IDs and Author pubkeys
|
||||
- Indexes for optimal query performance (kind, created_at, tags)
|
||||
|
||||
3. **Query Engine** ([pkg/neo4j/query-events.go](../pkg/neo4j/query-events.go))
|
||||
- Translates Nostr REQ filters to Cypher queries
|
||||
- Leverages graph traversal for tag relationships
|
||||
- Supports prefix matching for IDs and pubkeys
|
||||
- Parameterized queries for security and performance
|
||||
|
||||
4. **Event Storage** ([pkg/neo4j/save-event.go](../pkg/neo4j/save-event.go))
|
||||
- Stores events as nodes with properties
|
||||
- Creates graph relationships:
|
||||
- `AUTHORED_BY`: Event → Author
|
||||
- `REFERENCES`: Event → Event (e-tags)
|
||||
- `MENTIONS`: Event → Author (p-tags)
|
||||
- `TAGGED_WITH`: Event → Tag
|
||||
|
||||
## Graph Schema
|
||||
|
||||
### Node Types
|
||||
|
||||
**Event Node**
|
||||
```cypher
|
||||
(:Event {
|
||||
id: string, // Hex-encoded event ID (32 bytes)
|
||||
serial: int, // Sequential serial number
|
||||
kind: int, // Event kind
|
||||
created_at: int, // Unix timestamp
|
||||
content: string, // Event content
|
||||
sig: string, // Hex-encoded signature
|
||||
pubkey: string, // Hex-encoded author pubkey
|
||||
tags: string // JSON-encoded tags array
|
||||
})
|
||||
```
|
||||
|
||||
**Author Node**
|
||||
```cypher
|
||||
(:Author {
|
||||
pubkey: string // Hex-encoded pubkey (unique)
|
||||
})
|
||||
```
|
||||
|
||||
**Tag Node**
|
||||
```cypher
|
||||
(:Tag {
|
||||
type: string, // Tag type (e.g., "t", "d")
|
||||
value: string // Tag value
|
||||
})
|
||||
```
|
||||
|
||||
**Marker Node** (for metadata)
|
||||
```cypher
|
||||
(:Marker {
|
||||
key: string, // Unique key
|
||||
value: string // Hex-encoded value
|
||||
})
|
||||
```
|
||||
|
||||
### Relationships
|
||||
|
||||
- `(:Event)-[:AUTHORED_BY]->(:Author)` - Event authorship
|
||||
- `(:Event)-[:REFERENCES]->(:Event)` - Event references (e-tags)
|
||||
- `(:Event)-[:MENTIONS]->(:Author)` - Author mentions (p-tags)
|
||||
- `(:Event)-[:TAGGED_WITH]->(:Tag)` - Generic tag associations
|
||||
|
||||
## How Nostr REQ Messages Are Implemented
|
||||
|
||||
### Filter to Cypher Translation
|
||||
|
||||
The query engine in [query-events.go](../pkg/neo4j/query-events.go) translates Nostr filters to Cypher queries:
|
||||
|
||||
#### 1. ID Filters
|
||||
```json
|
||||
{"ids": ["abc123..."]}
|
||||
```
|
||||
Becomes:
|
||||
```cypher
|
||||
MATCH (e:Event)
|
||||
WHERE e.id = $id_0
|
||||
```
|
||||
|
||||
For prefix matching (partial IDs):
|
||||
```cypher
|
||||
WHERE e.id STARTS WITH $id_0
|
||||
```
|
||||
|
||||
#### 2. Author Filters
|
||||
```json
|
||||
{"authors": ["pubkey1...", "pubkey2..."]}
|
||||
```
|
||||
Becomes:
|
||||
```cypher
|
||||
MATCH (e:Event)
|
||||
WHERE e.pubkey IN $authors
|
||||
```
|
||||
|
||||
#### 3. Kind Filters
|
||||
```json
|
||||
{"kinds": [1, 7]}
|
||||
```
|
||||
Becomes:
|
||||
```cypher
|
||||
MATCH (e:Event)
|
||||
WHERE e.kind IN $kinds
|
||||
```
|
||||
|
||||
#### 4. Time Range Filters
|
||||
```json
|
||||
{"since": 1234567890, "until": 1234567900}
|
||||
```
|
||||
Becomes:
|
||||
```cypher
|
||||
MATCH (e:Event)
|
||||
WHERE e.created_at >= $since AND e.created_at <= $until
|
||||
```
|
||||
|
||||
#### 5. Tag Filters (Graph Advantage!)
|
||||
```json
|
||||
{"#t": ["bitcoin", "nostr"]}
|
||||
```
|
||||
Becomes:
|
||||
```cypher
|
||||
MATCH (e:Event)
|
||||
OPTIONAL MATCH (e)-[:TAGGED_WITH]->(t0:Tag)
|
||||
WHERE t0.type = $tagType_0 AND t0.value IN $tagValues_0
|
||||
```
|
||||
|
||||
This leverages Neo4j's native graph traversal for efficient tag queries!
|
||||
|
||||
#### 6. Combined Filters
|
||||
```json
|
||||
{
|
||||
"kinds": [1],
|
||||
"authors": ["abc..."],
|
||||
"#p": ["xyz..."],
|
||||
"limit": 50
|
||||
}
|
||||
```
|
||||
Becomes:
|
||||
```cypher
|
||||
MATCH (e:Event)
|
||||
OPTIONAL MATCH (e)-[:TAGGED_WITH]->(t0:Tag)
|
||||
WHERE e.kind IN $kinds
|
||||
AND e.pubkey IN $authors
|
||||
AND t0.type = $tagType_0
|
||||
AND t0.value IN $tagValues_0
|
||||
RETURN e.id, e.kind, e.created_at, e.content, e.sig, e.pubkey, e.tags
|
||||
ORDER BY e.created_at DESC
|
||||
LIMIT $limit
|
||||
```
|
||||
|
||||
### Query Execution Flow
|
||||
|
||||
1. **Parse Filter**: Extract IDs, authors, kinds, times, tags
|
||||
2. **Build Cypher**: Construct parameterized query with MATCH/WHERE clauses
|
||||
3. **Execute**: Run via `ExecuteRead()` with read-only session
|
||||
4. **Parse Results**: Convert Neo4j records to Nostr events
|
||||
5. **Return**: Send events back to client
|
||||
|
||||
## Configuration
|
||||
|
||||
### Environment Variables
|
||||
|
||||
```bash
|
||||
# Neo4j Connection
|
||||
ORLY_NEO4J_URI="bolt://localhost:7687"
|
||||
ORLY_NEO4J_USER="neo4j"
|
||||
ORLY_NEO4J_PASSWORD="password"
|
||||
|
||||
# Database Type Selection
|
||||
ORLY_DB_TYPE="neo4j"
|
||||
|
||||
# Data Directory (for Badger metadata storage)
|
||||
ORLY_DATA_DIR="~/.local/share/ORLY"
|
||||
```
|
||||
|
||||
### Example Docker Compose Setup
|
||||
|
||||
```yaml
|
||||
version: '3.8'
|
||||
services:
|
||||
neo4j:
|
||||
image: neo4j:5.15
|
||||
ports:
|
||||
- "7474:7474" # HTTP
|
||||
- "7687:7687" # Bolt
|
||||
environment:
|
||||
- NEO4J_AUTH=neo4j/password
|
||||
- NEO4J_PLUGINS=["apoc"]
|
||||
volumes:
|
||||
- neo4j_data:/data
|
||||
- neo4j_logs:/logs
|
||||
|
||||
orly:
|
||||
build: .
|
||||
ports:
|
||||
- "3334:3334"
|
||||
environment:
|
||||
- ORLY_DB_TYPE=neo4j
|
||||
- ORLY_NEO4J_URI=bolt://neo4j:7687
|
||||
- ORLY_NEO4J_USER=neo4j
|
||||
- ORLY_NEO4J_PASSWORD=password
|
||||
depends_on:
|
||||
- neo4j
|
||||
|
||||
volumes:
|
||||
neo4j_data:
|
||||
neo4j_logs:
|
||||
```
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
### Advantages Over Badger/DGraph
|
||||
|
||||
1. **Native Graph Queries**: Tag relationships and social graph traversals are native operations
|
||||
2. **Optimized Indexes**: Automatic index usage for constrained properties
|
||||
3. **Efficient Joins**: Relationship traversals are O(1) lookups
|
||||
4. **Query Planner**: Neo4j's query planner optimizes complex multi-filter queries
|
||||
|
||||
### Tuning Recommendations
|
||||
|
||||
1. **Indexes**: The schema creates indexes for:
|
||||
- Event ID (unique constraint + index)
|
||||
- Event kind
|
||||
- Event created_at
|
||||
- Composite: kind + created_at
|
||||
- Tag type + value
|
||||
|
||||
2. **Cache Configuration**: Configure Neo4j's page cache and heap size:
|
||||
```conf
|
||||
# neo4j.conf
|
||||
dbms.memory.heap.initial_size=2G
|
||||
dbms.memory.heap.max_size=4G
|
||||
dbms.memory.pagecache.size=4G
|
||||
```
|
||||
|
||||
3. **Query Limits**: Always use LIMIT in queries to prevent memory exhaustion
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Replaceable Events
|
||||
|
||||
Replaceable events (kinds 0, 3, 10000-19999) are handled in `WouldReplaceEvent()`:
|
||||
|
||||
```cypher
|
||||
MATCH (e:Event {kind: $kind, pubkey: $pubkey})
|
||||
WHERE e.created_at < $createdAt
|
||||
RETURN e.serial, e.created_at
|
||||
```
|
||||
|
||||
Older events are deleted before saving the new one.
|
||||
|
||||
### Parameterized Replaceable Events
|
||||
|
||||
For kinds 30000-39999, we also match on the d-tag:
|
||||
|
||||
```cypher
|
||||
MATCH (e:Event {kind: $kind, pubkey: $pubkey})-[:TAGGED_WITH]->(t:Tag {type: 'd', value: $dValue})
|
||||
WHERE e.created_at < $createdAt
|
||||
RETURN e.serial
|
||||
```
|
||||
|
||||
### Event Deletion (NIP-09)
|
||||
|
||||
Delete events (kind 5) are processed via graph traversal:
|
||||
|
||||
```cypher
|
||||
MATCH (target:Event {id: $targetId})
|
||||
MATCH (delete:Event {kind: 5})-[:REFERENCES]->(target)
|
||||
WHERE delete.pubkey = $pubkey OR delete.pubkey IN $admins
|
||||
RETURN delete.id
|
||||
```
|
||||
|
||||
Only same-author or admin deletions are allowed.
|
||||
|
||||
## Comparison with Other Backends
|
||||
|
||||
| Feature | Badger | DGraph | Neo4j |
|
||||
|---------|--------|--------|-------|
|
||||
| **Storage Type** | Key-value | Graph (distributed) | Graph (native) |
|
||||
| **Query Language** | Custom indexes | DQL | Cypher |
|
||||
| **Tag Queries** | Index lookups | Graph traversal | Native relationships |
|
||||
| **Scaling** | Single-node | Distributed | Cluster/Causal cluster |
|
||||
| **Memory Usage** | Low | Medium | High |
|
||||
| **Setup Complexity** | Minimal | Medium | Medium |
|
||||
| **Best For** | Small relays | Large distributed | Relationship-heavy |
|
||||
|
||||
## Development Guide
|
||||
|
||||
### Adding New Indexes
|
||||
|
||||
1. Update [schema.go](../pkg/neo4j/schema.go) with new index definition
|
||||
2. Add to `applySchema()` function
|
||||
3. Restart relay to apply schema changes
|
||||
|
||||
Example:
|
||||
```cypher
|
||||
CREATE INDEX event_content_fulltext IF NOT EXISTS
|
||||
FOR (e:Event) ON (e.content)
|
||||
OPTIONS {indexConfig: {`fulltext.analyzer`: 'english'}}
|
||||
```
|
||||
|
||||
### Custom Queries
|
||||
|
||||
To add custom query methods:
|
||||
|
||||
1. Add method to [query-events.go](../pkg/neo4j/query-events.go)
|
||||
2. Build Cypher query with parameterization
|
||||
3. Use `ExecuteRead()` or `ExecuteWrite()` as appropriate
|
||||
4. Parse results with `parseEventsFromResult()`
|
||||
|
||||
### Testing
|
||||
|
||||
Due to Neo4j dependency, tests require a running Neo4j instance:
|
||||
|
||||
```bash
|
||||
# Start Neo4j via Docker
|
||||
docker run -d --name neo4j-test \
|
||||
-p 7687:7687 \
|
||||
-e NEO4J_AUTH=neo4j/test \
|
||||
neo4j:5.15
|
||||
|
||||
# Run tests
|
||||
ORLY_NEO4J_URI="bolt://localhost:7687" \
|
||||
ORLY_NEO4J_USER="neo4j" \
|
||||
ORLY_NEO4J_PASSWORD="test" \
|
||||
go test ./pkg/neo4j/...
|
||||
|
||||
# Cleanup
|
||||
docker rm -f neo4j-test
|
||||
```
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
1. **Full-text Search**: Leverage Neo4j's full-text indexes for content search
|
||||
2. **Graph Analytics**: Implement social graph metrics (centrality, communities)
|
||||
3. **Advanced Queries**: Support NIP-50 search via Cypher full-text capabilities
|
||||
4. **Clustering**: Deploy Neo4j cluster for high availability
|
||||
5. **APOC Procedures**: Utilize APOC library for advanced graph algorithms
|
||||
6. **Caching Layer**: Implement query result caching similar to Badger backend
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Connection Issues
|
||||
|
||||
```bash
|
||||
# Test connectivity
|
||||
cypher-shell -a bolt://localhost:7687 -u neo4j -p password
|
||||
|
||||
# Check Neo4j logs
|
||||
docker logs neo4j
|
||||
```
|
||||
|
||||
### Performance Issues
|
||||
|
||||
```cypher
|
||||
// View query execution plan
|
||||
EXPLAIN MATCH (e:Event) WHERE e.kind = 1 RETURN e LIMIT 10
|
||||
|
||||
// Profile query performance
|
||||
PROFILE MATCH (e:Event)-[:AUTHORED_BY]->(a:Author) RETURN e, a LIMIT 10
|
||||
```
|
||||
|
||||
### Schema Issues
|
||||
|
||||
```cypher
|
||||
// List all constraints
|
||||
SHOW CONSTRAINTS
|
||||
|
||||
// List all indexes
|
||||
SHOW INDEXES
|
||||
|
||||
// Drop and recreate schema
|
||||
DROP CONSTRAINT event_id_unique IF EXISTS
|
||||
CREATE CONSTRAINT event_id_unique FOR (e:Event) REQUIRE e.id IS UNIQUE
|
||||
```
|
||||
|
||||
## References
|
||||
|
||||
- [Neo4j Documentation](https://neo4j.com/docs/)
|
||||
- [Cypher Query Language](https://neo4j.com/docs/cypher-manual/current/)
|
||||
- [Neo4j Go Driver](https://neo4j.com/docs/go-manual/current/)
|
||||
- [Graph Database Patterns](https://neo4j.com/developer/graph-db-vs-rdbms/)
|
||||
- [Nostr Protocol (NIP-01)](https://github.com/nostr-protocol/nips/blob/master/01.md)
|
||||
|
||||
## License
|
||||
|
||||
This Neo4j backend implementation follows the same license as the ORLY relay project.
|
||||
3
go.mod
3
go.mod
@@ -10,7 +10,9 @@ require (
|
||||
github.com/ebitengine/purego v0.9.1
|
||||
github.com/gorilla/websocket v1.5.3
|
||||
github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0
|
||||
github.com/klauspost/compress v1.18.1
|
||||
github.com/minio/sha256-simd v1.0.1
|
||||
github.com/neo4j/neo4j-go-driver/v5 v5.28.4
|
||||
github.com/pkg/profile v1.7.0
|
||||
github.com/puzpuzpuz/xsync/v3 v3.5.1
|
||||
github.com/stretchr/testify v1.11.1
|
||||
@@ -39,7 +41,6 @@ require (
|
||||
github.com/golang/protobuf v1.5.4 // indirect
|
||||
github.com/google/flatbuffers v25.9.23+incompatible // indirect
|
||||
github.com/google/pprof v0.0.0-20251007162407-5df77e3f7d1d // indirect
|
||||
github.com/klauspost/compress v1.18.1 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.3.0 // indirect
|
||||
github.com/pkg/errors v0.8.1 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
|
||||
2
go.sum
2
go.sum
@@ -94,6 +94,8 @@ github.com/ledongthuc/pdf v0.0.0-20220302134840-0c2507a12d80/go.mod h1:imJHygn/1
|
||||
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
|
||||
github.com/minio/sha256-simd v1.0.1 h1:6kaan5IFmwTNynnKKpDHe6FWHohJOHhCPchzK49dzMM=
|
||||
github.com/minio/sha256-simd v1.0.1/go.mod h1:Pz6AKMiUdngCLpeTL/RJY1M9rUuPMYujV5xJjtbRSN8=
|
||||
github.com/neo4j/neo4j-go-driver/v5 v5.28.4 h1:7toxehVcYkZbyxV4W3Ib9VcnyRBQPucF+VwNNmtSXi4=
|
||||
github.com/neo4j/neo4j-go-driver/v5 v5.28.4/go.mod h1:Vff8OwT7QpLm7L2yYr85XNWe9Rbqlbeb9asNXJTHO4k=
|
||||
github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde/go.mod h1:nZgzbfBr3hhjoZnS66nKrHmduYNpc34ny7RK4z5/HM0=
|
||||
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
|
||||
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
|
||||
1
main.go
1
main.go
@@ -22,6 +22,7 @@ import (
|
||||
"next.orly.dev/pkg/crypto/keys"
|
||||
"next.orly.dev/pkg/database"
|
||||
_ "next.orly.dev/pkg/dgraph" // Import to register dgraph factory
|
||||
_ "next.orly.dev/pkg/neo4j" // Import to register neo4j factory
|
||||
"next.orly.dev/pkg/encoders/hex"
|
||||
"next.orly.dev/pkg/utils/interrupt"
|
||||
"next.orly.dev/pkg/version"
|
||||
|
||||
@@ -3,8 +3,11 @@
|
||||
package secp
|
||||
|
||||
import (
|
||||
_ "embed"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"sync"
|
||||
"unsafe"
|
||||
@@ -12,6 +15,9 @@ import (
|
||||
"github.com/ebitengine/purego"
|
||||
)
|
||||
|
||||
//go:embed libsecp256k1.so
|
||||
var embeddedLibLinux []byte
|
||||
|
||||
// Constants for context flags
|
||||
const (
|
||||
ContextNone = 1
|
||||
@@ -40,9 +46,11 @@ const (
|
||||
)
|
||||
|
||||
var (
|
||||
libHandle uintptr
|
||||
loadLibOnce sync.Once
|
||||
loadLibErr error
|
||||
libHandle uintptr
|
||||
loadLibOnce sync.Once
|
||||
loadLibErr error
|
||||
extractedPath string
|
||||
extractLibOnce sync.Once
|
||||
)
|
||||
|
||||
// Function pointers
|
||||
@@ -83,69 +91,132 @@ var (
|
||||
xonlyPubkeyFromPubkey func(ctx uintptr, xonlyPubkey *byte, pkParity *int32, pubkey *byte) int32
|
||||
)
|
||||
|
||||
// extractEmbeddedLibrary extracts the embedded library to a temporary location
|
||||
func extractEmbeddedLibrary() (path string, err error) {
|
||||
extractLibOnce.Do(func() {
|
||||
var libData []byte
|
||||
var filename string
|
||||
|
||||
// Select the appropriate embedded library for this platform
|
||||
switch runtime.GOOS {
|
||||
case "linux":
|
||||
if len(embeddedLibLinux) == 0 {
|
||||
err = fmt.Errorf("no embedded library for linux")
|
||||
return
|
||||
}
|
||||
libData = embeddedLibLinux
|
||||
filename = "libsecp256k1.so"
|
||||
default:
|
||||
err = fmt.Errorf("no embedded library for %s", runtime.GOOS)
|
||||
return
|
||||
}
|
||||
|
||||
// Create a temporary directory for the library
|
||||
// Use a deterministic name so we don't create duplicates
|
||||
tmpDir := filepath.Join(os.TempDir(), "orly-libsecp256k1")
|
||||
if err = os.MkdirAll(tmpDir, 0755); err != nil {
|
||||
err = fmt.Errorf("failed to create temp directory: %w", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Write the library to the temp directory
|
||||
extractedPath = filepath.Join(tmpDir, filename)
|
||||
|
||||
// Check if file already exists and is valid
|
||||
if info, e := os.Stat(extractedPath); e == nil && info.Size() == int64(len(libData)) {
|
||||
// File exists and has correct size, assume it's valid
|
||||
return
|
||||
}
|
||||
|
||||
if err = os.WriteFile(extractedPath, libData, 0755); err != nil {
|
||||
err = fmt.Errorf("failed to write library to %s: %w", extractedPath, err)
|
||||
return
|
||||
}
|
||||
|
||||
log.Printf("INFO: Extracted embedded libsecp256k1 to %s", extractedPath)
|
||||
})
|
||||
|
||||
return extractedPath, err
|
||||
}
|
||||
|
||||
// LoadLibrary loads the libsecp256k1 shared library
|
||||
func LoadLibrary() (err error) {
|
||||
loadLibOnce.Do(func() {
|
||||
var libPath string
|
||||
|
||||
// Try to find the library
|
||||
switch runtime.GOOS {
|
||||
case "linux":
|
||||
// Try common library paths
|
||||
// For linux/amd64, try the bundled library first
|
||||
paths := []string{
|
||||
"./libsecp256k1.so", // Bundled in repo for linux amd64
|
||||
"libsecp256k1.so.5",
|
||||
"libsecp256k1.so.2",
|
||||
"libsecp256k1.so.1",
|
||||
"libsecp256k1.so.0",
|
||||
"libsecp256k1.so",
|
||||
"/usr/lib/libsecp256k1.so",
|
||||
"/usr/local/lib/libsecp256k1.so",
|
||||
"/usr/lib/x86_64-linux-gnu/libsecp256k1.so",
|
||||
// First, try to extract and use the embedded library
|
||||
usedEmbedded := false
|
||||
if embeddedPath, extractErr := extractEmbeddedLibrary(); extractErr == nil {
|
||||
libHandle, err = purego.Dlopen(embeddedPath, purego.RTLD_NOW|purego.RTLD_GLOBAL)
|
||||
if err == nil {
|
||||
libPath = embeddedPath
|
||||
usedEmbedded = true
|
||||
} else {
|
||||
log.Printf("WARN: Failed to load embedded library from %s: %v, falling back to system paths", embeddedPath, err)
|
||||
}
|
||||
for _, p := range paths {
|
||||
libHandle, err = purego.Dlopen(p, purego.RTLD_NOW|purego.RTLD_GLOBAL)
|
||||
if err == nil {
|
||||
libPath = p
|
||||
break
|
||||
} else {
|
||||
log.Printf("WARN: Failed to extract embedded library: %v, falling back to system paths", extractErr)
|
||||
}
|
||||
|
||||
// If embedded library failed, fall back to system paths
|
||||
if err != nil {
|
||||
switch runtime.GOOS {
|
||||
case "linux":
|
||||
// Try common library paths
|
||||
paths := []string{
|
||||
"./libsecp256k1.so", // Bundled in repo for linux amd64
|
||||
"libsecp256k1.so.5",
|
||||
"libsecp256k1.so.2",
|
||||
"libsecp256k1.so.1",
|
||||
"libsecp256k1.so.0",
|
||||
"libsecp256k1.so",
|
||||
"/usr/lib/libsecp256k1.so",
|
||||
"/usr/local/lib/libsecp256k1.so",
|
||||
"/usr/lib/x86_64-linux-gnu/libsecp256k1.so",
|
||||
}
|
||||
}
|
||||
case "darwin":
|
||||
paths := []string{
|
||||
"libsecp256k1.2.dylib",
|
||||
"libsecp256k1.1.dylib",
|
||||
"libsecp256k1.0.dylib",
|
||||
"libsecp256k1.dylib",
|
||||
"/usr/local/lib/libsecp256k1.dylib",
|
||||
"/opt/homebrew/lib/libsecp256k1.dylib",
|
||||
}
|
||||
for _, p := range paths {
|
||||
libHandle, err = purego.Dlopen(p, purego.RTLD_NOW|purego.RTLD_GLOBAL)
|
||||
if err == nil {
|
||||
libPath = p
|
||||
break
|
||||
for _, p := range paths {
|
||||
libHandle, err = purego.Dlopen(p, purego.RTLD_NOW|purego.RTLD_GLOBAL)
|
||||
if err == nil {
|
||||
libPath = p
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
case "windows":
|
||||
paths := []string{
|
||||
"libsecp256k1-2.dll",
|
||||
"libsecp256k1-1.dll",
|
||||
"libsecp256k1-0.dll",
|
||||
"libsecp256k1.dll",
|
||||
"secp256k1.dll",
|
||||
}
|
||||
for _, p := range paths {
|
||||
libHandle, err = purego.Dlopen(p, purego.RTLD_NOW|purego.RTLD_GLOBAL)
|
||||
if err == nil {
|
||||
libPath = p
|
||||
break
|
||||
case "darwin":
|
||||
paths := []string{
|
||||
"libsecp256k1.2.dylib",
|
||||
"libsecp256k1.1.dylib",
|
||||
"libsecp256k1.0.dylib",
|
||||
"libsecp256k1.dylib",
|
||||
"/usr/local/lib/libsecp256k1.dylib",
|
||||
"/opt/homebrew/lib/libsecp256k1.dylib",
|
||||
}
|
||||
for _, p := range paths {
|
||||
libHandle, err = purego.Dlopen(p, purego.RTLD_NOW|purego.RTLD_GLOBAL)
|
||||
if err == nil {
|
||||
libPath = p
|
||||
break
|
||||
}
|
||||
}
|
||||
case "windows":
|
||||
paths := []string{
|
||||
"libsecp256k1-2.dll",
|
||||
"libsecp256k1-1.dll",
|
||||
"libsecp256k1-0.dll",
|
||||
"libsecp256k1.dll",
|
||||
"secp256k1.dll",
|
||||
}
|
||||
for _, p := range paths {
|
||||
libHandle, err = purego.Dlopen(p, purego.RTLD_NOW|purego.RTLD_GLOBAL)
|
||||
if err == nil {
|
||||
libPath = p
|
||||
break
|
||||
}
|
||||
}
|
||||
default:
|
||||
err = fmt.Errorf("unsupported platform: %s", runtime.GOOS)
|
||||
loadLibErr = err
|
||||
return
|
||||
}
|
||||
default:
|
||||
err = fmt.Errorf("unsupported platform: %s", runtime.GOOS)
|
||||
loadLibErr = err
|
||||
return
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
@@ -159,7 +230,11 @@ func LoadLibrary() (err error) {
|
||||
return
|
||||
}
|
||||
|
||||
log.Printf("INFO: Successfully loaded libsecp256k1 v5.0.0 from %s", libPath)
|
||||
if usedEmbedded {
|
||||
log.Printf("INFO: Successfully loaded embedded libsecp256k1 v5.0.0 from %s", libPath)
|
||||
} else {
|
||||
log.Printf("INFO: Successfully loaded libsecp256k1 v5.0.0 from system path: %s", libPath)
|
||||
}
|
||||
loadLibErr = nil
|
||||
})
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ import (
|
||||
)
|
||||
|
||||
// NewDatabase creates a database instance based on the specified type.
|
||||
// Supported types: "badger", "dgraph"
|
||||
// Supported types: "badger", "dgraph", "neo4j"
|
||||
func NewDatabase(
|
||||
ctx context.Context,
|
||||
cancel context.CancelFunc,
|
||||
@@ -23,8 +23,12 @@ func NewDatabase(
|
||||
// Use the new dgraph implementation
|
||||
// Import dynamically to avoid import cycles
|
||||
return newDgraphDatabase(ctx, cancel, dataDir, logLevel)
|
||||
case "neo4j":
|
||||
// Use the new neo4j implementation
|
||||
// Import dynamically to avoid import cycles
|
||||
return newNeo4jDatabase(ctx, cancel, dataDir, logLevel)
|
||||
default:
|
||||
return nil, fmt.Errorf("unsupported database type: %s (supported: badger, dgraph)", dbType)
|
||||
return nil, fmt.Errorf("unsupported database type: %s (supported: badger, dgraph, neo4j)", dbType)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,3 +41,13 @@ var newDgraphDatabase func(context.Context, context.CancelFunc, string, string)
|
||||
func RegisterDgraphFactory(factory func(context.Context, context.CancelFunc, string, string) (Database, error)) {
|
||||
newDgraphDatabase = factory
|
||||
}
|
||||
|
||||
// newNeo4jDatabase creates a neo4j database instance
|
||||
// This is defined here to avoid import cycles
|
||||
var newNeo4jDatabase func(context.Context, context.CancelFunc, string, string) (Database, error)
|
||||
|
||||
// RegisterNeo4jFactory registers the neo4j database factory
|
||||
// This is called from the neo4j package's init() function
|
||||
func RegisterNeo4jFactory(factory func(context.Context, context.CancelFunc, string, string) (Database, error)) {
|
||||
newNeo4jDatabase = factory
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package database
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
|
||||
"github.com/dgraph-io/badger/v4"
|
||||
"lol.mleku.dev/chk"
|
||||
@@ -10,12 +11,13 @@ import (
|
||||
"next.orly.dev/pkg/database/indexes/types"
|
||||
"next.orly.dev/pkg/encoders/event"
|
||||
"next.orly.dev/pkg/encoders/filter"
|
||||
"next.orly.dev/pkg/encoders/hex"
|
||||
|
||||
// "next.orly.dev/pkg/encoders/hex"
|
||||
"next.orly.dev/pkg/encoders/tag"
|
||||
)
|
||||
|
||||
func (d *D) GetSerialById(id []byte) (ser *types.Uint40, err error) {
|
||||
log.T.F("GetSerialById: input id=%s", hex.Enc(id))
|
||||
// log.T.F("GetSerialById: input id=%s", hex.Enc(id))
|
||||
var idxs []Range
|
||||
if idxs, err = GetIndexesFromFilter(&filter.F{Ids: tag.NewFromBytesSlice(id)}); chk.E(err) {
|
||||
return
|
||||
@@ -58,7 +60,7 @@ func (d *D) GetSerialById(id []byte) (ser *types.Uint40, err error) {
|
||||
return
|
||||
}
|
||||
if !idFound {
|
||||
err = errorf.T("id not found in database: %s", hex.Enc(id))
|
||||
err = fmt.Errorf("id not found in database")
|
||||
return
|
||||
}
|
||||
|
||||
@@ -80,7 +82,7 @@ func (d *D) GetSerialsByIds(ids *tag.T) (
|
||||
func (d *D) GetSerialsByIdsWithFilter(
|
||||
ids *tag.T, fn func(ev *event.E, ser *types.Uint40) bool,
|
||||
) (serials map[string]*types.Uint40, err error) {
|
||||
log.T.F("GetSerialsByIdsWithFilter: input ids count=%d", ids.Len())
|
||||
// log.T.F("GetSerialsByIdsWithFilter: input ids count=%d", ids.Len())
|
||||
|
||||
// Initialize the result map with estimated capacity to reduce reallocations
|
||||
serials = make(map[string]*types.Uint40, ids.Len())
|
||||
|
||||
@@ -33,7 +33,7 @@ func (d *D) GetSerialsByRange(idx Range) (
|
||||
}
|
||||
iterCount := 0
|
||||
it.Seek(endBoundary)
|
||||
log.T.F("GetSerialsByRange: iterator valid=%v, sought to endBoundary", it.Valid())
|
||||
// log.T.F("GetSerialsByRange: iterator valid=%v, sought to endBoundary", it.Valid())
|
||||
for it.Valid() {
|
||||
iterCount++
|
||||
if iterCount > 100 {
|
||||
@@ -46,12 +46,12 @@ func (d *D) GetSerialsByRange(idx Range) (
|
||||
key = item.Key()
|
||||
keyWithoutSerial := key[:len(key)-5]
|
||||
cmp := bytes.Compare(keyWithoutSerial, idx.Start)
|
||||
log.T.F("GetSerialsByRange: iter %d, key prefix matches=%v, cmp=%d", iterCount, bytes.HasPrefix(key, idx.Start[:len(idx.Start)-8]), cmp)
|
||||
// log.T.F("GetSerialsByRange: iter %d, key prefix matches=%v, cmp=%d", iterCount, bytes.HasPrefix(key, idx.Start[:len(idx.Start)-8]), cmp)
|
||||
if cmp < 0 {
|
||||
// didn't find it within the timestamp range
|
||||
log.T.F("GetSerialsByRange: key out of range (cmp=%d), stopping iteration", cmp)
|
||||
log.T.F(" keyWithoutSerial len=%d: %x", len(keyWithoutSerial), keyWithoutSerial)
|
||||
log.T.F(" idx.Start len=%d: %x", len(idx.Start), idx.Start)
|
||||
// log.T.F("GetSerialsByRange: key out of range (cmp=%d), stopping iteration", cmp)
|
||||
// log.T.F(" keyWithoutSerial len=%d: %x", len(keyWithoutSerial), keyWithoutSerial)
|
||||
// log.T.F(" idx.Start len=%d: %x", len(idx.Start), idx.Start)
|
||||
return
|
||||
}
|
||||
ser := new(types.Uint40)
|
||||
@@ -62,7 +62,7 @@ func (d *D) GetSerialsByRange(idx Range) (
|
||||
sers = append(sers, ser)
|
||||
it.Next()
|
||||
}
|
||||
log.T.F("GetSerialsByRange: iteration complete, found %d serials", len(sers))
|
||||
// log.T.F("GetSerialsByRange: iteration complete, found %d serials", len(sers))
|
||||
return
|
||||
},
|
||||
); chk.E(err) {
|
||||
|
||||
@@ -20,7 +20,7 @@ import (
|
||||
)
|
||||
|
||||
// TestInlineSmallEventStorage tests the Reiser4-inspired inline storage optimization
|
||||
// for small events (<=384 bytes).
|
||||
// for small events (<=1024 bytes by default).
|
||||
func TestInlineSmallEventStorage(t *testing.T) {
|
||||
// Create a temporary directory for the database
|
||||
tempDir, err := os.MkdirTemp("", "test-inline-db-*")
|
||||
@@ -129,8 +129,8 @@ func TestInlineSmallEventStorage(t *testing.T) {
|
||||
largeEvent := event.New()
|
||||
largeEvent.Kind = kind.TextNote.K
|
||||
largeEvent.CreatedAt = timestamp.Now().V
|
||||
// Create content larger than 384 bytes
|
||||
largeContent := make([]byte, 500)
|
||||
// Create content larger than 1024 bytes (the default inline storage threshold)
|
||||
largeContent := make([]byte, 1500)
|
||||
for i := range largeContent {
|
||||
largeContent[i] = 'x'
|
||||
}
|
||||
|
||||
@@ -5,7 +5,6 @@ import (
|
||||
|
||||
"lol.mleku.dev/chk"
|
||||
"lol.mleku.dev/errorf"
|
||||
"lol.mleku.dev/log"
|
||||
"next.orly.dev/pkg/database/indexes/types"
|
||||
"next.orly.dev/pkg/encoders/event"
|
||||
"next.orly.dev/pkg/encoders/filter"
|
||||
@@ -21,7 +20,7 @@ import (
|
||||
// pubkeys that also may delete the event, normally only the author is allowed
|
||||
// to delete an event.
|
||||
func (d *D) CheckForDeleted(ev *event.E, admins [][]byte) (err error) {
|
||||
log.T.F("CheckForDeleted: checking event %x", ev.ID)
|
||||
// log.T.F("CheckForDeleted: checking event %x", ev.ID)
|
||||
keys := append([][]byte{ev.Pubkey}, admins...)
|
||||
authors := tag.NewFromBytesSlice(keys...)
|
||||
// if the event is addressable, check for a deletion event with the same
|
||||
@@ -186,9 +185,9 @@ func (d *D) CheckForDeleted(ev *event.E, admins [][]byte) (err error) {
|
||||
return
|
||||
}
|
||||
// otherwise we check for a delete by event id
|
||||
log.T.F("CheckForDeleted: checking for e-tag deletion of event %x", ev.ID)
|
||||
log.T.F("CheckForDeleted: authors filter: %v", authors)
|
||||
log.T.F("CheckForDeleted: looking for tag e with value: %s", hex.Enc(ev.ID))
|
||||
// log.T.F("CheckForDeleted: checking for e-tag deletion of event %x", ev.ID)
|
||||
// log.T.F("CheckForDeleted: authors filter: %v", authors)
|
||||
// log.T.F("CheckForDeleted: looking for tag e with value: %s", hex.Enc(ev.ID))
|
||||
var idxs []Range
|
||||
if idxs, err = GetIndexesFromFilter(
|
||||
&filter.F{
|
||||
@@ -201,18 +200,18 @@ func (d *D) CheckForDeleted(ev *event.E, admins [][]byte) (err error) {
|
||||
); chk.E(err) {
|
||||
return
|
||||
}
|
||||
log.T.F("CheckForDeleted: found %d indexes", len(idxs))
|
||||
// log.T.F("CheckForDeleted: found %d indexes", len(idxs))
|
||||
var sers types.Uint40s
|
||||
for i, idx := range idxs {
|
||||
log.T.F("CheckForDeleted: checking index %d: %v", i, idx)
|
||||
for _, idx := range idxs {
|
||||
// log.T.F("CheckForDeleted: checking index %d: %v", i, idx)
|
||||
var s types.Uint40s
|
||||
if s, err = d.GetSerialsByRange(idx); chk.E(err) {
|
||||
return
|
||||
}
|
||||
log.T.F("CheckForDeleted: index %d returned %d serials", i, len(s))
|
||||
// log.T.F("CheckForDeleted: index %d returned %d serials", i, len(s))
|
||||
if len(s) > 0 {
|
||||
// Any e-tag deletion found means the exact event was deleted and cannot be resubmitted
|
||||
log.T.F("CheckForDeleted: found e-tag deletion for event %x", ev.ID)
|
||||
// log.T.F("CheckForDeleted: found e-tag deletion for event %x", ev.ID)
|
||||
err = errorf.E("blocked: %0x has been deleted", ev.ID)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -180,10 +180,10 @@ func (d *D) SaveEvent(c context.Context, ev *event.E) (
|
||||
if idxs, err = GetIndexesForEvent(ev, serial); chk.E(err) {
|
||||
return
|
||||
}
|
||||
log.T.F(
|
||||
"SaveEvent: generated %d indexes for event %x (kind %d)", len(idxs),
|
||||
ev.ID, ev.Kind,
|
||||
)
|
||||
// log.T.F(
|
||||
// "SaveEvent: generated %d indexes for event %x (kind %d)", len(idxs),
|
||||
// ev.ID, ev.Kind,
|
||||
// )
|
||||
|
||||
// Serialize event once to check size
|
||||
eventDataBuf := new(bytes.Buffer)
|
||||
@@ -247,10 +247,10 @@ func (d *D) SaveEvent(c context.Context, ev *event.E) (
|
||||
if err = txn.Set(keyBuf.Bytes(), nil); chk.E(err) {
|
||||
return
|
||||
}
|
||||
log.T.F(
|
||||
"SaveEvent: stored small event inline (%d bytes)",
|
||||
len(eventData),
|
||||
)
|
||||
// log.T.F(
|
||||
// "SaveEvent: stored small event inline (%d bytes)",
|
||||
// len(eventData),
|
||||
// )
|
||||
} else {
|
||||
// Large event: store separately with evt prefix
|
||||
keyBuf := new(bytes.Buffer)
|
||||
@@ -260,10 +260,10 @@ func (d *D) SaveEvent(c context.Context, ev *event.E) (
|
||||
if err = txn.Set(keyBuf.Bytes(), eventData); chk.E(err) {
|
||||
return
|
||||
}
|
||||
log.T.F(
|
||||
"SaveEvent: stored large event separately (%d bytes)",
|
||||
len(eventData),
|
||||
)
|
||||
// log.T.F(
|
||||
// "SaveEvent: stored large event separately (%d bytes)",
|
||||
// len(eventData),
|
||||
// )
|
||||
}
|
||||
|
||||
// Additionally, store replaceable/addressable events with specialized keys for direct access
|
||||
@@ -293,7 +293,7 @@ func (d *D) SaveEvent(c context.Context, ev *event.E) (
|
||||
if err = txn.Set(keyBuf.Bytes(), nil); chk.E(err) {
|
||||
return
|
||||
}
|
||||
log.T.F("SaveEvent: also stored addressable event with specialized key")
|
||||
// log.T.F("SaveEvent: also stored addressable event with specialized key")
|
||||
} else if isReplaceableEvent && isSmallEvent {
|
||||
// Replaceable event: also store with rev|pubkey_hash|kind|size|data
|
||||
pubHash := new(types.PubHash)
|
||||
@@ -340,7 +340,7 @@ func (d *D) SaveEvent(c context.Context, ev *event.E) (
|
||||
// This ensures subsequent queries will see the new event
|
||||
if d.queryCache != nil {
|
||||
d.queryCache.Invalidate()
|
||||
log.T.F("SaveEvent: invalidated query cache")
|
||||
// log.T.F("SaveEvent: invalidated query cache")
|
||||
}
|
||||
|
||||
return
|
||||
|
||||
@@ -2,7 +2,9 @@ package dgraph
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/dgraph-io/dgo/v230/protos/api"
|
||||
"next.orly.dev/pkg/database/indexes/types"
|
||||
@@ -98,13 +100,83 @@ func (d *D) DeleteEventBySerial(c context.Context, ser *types.Uint40, ev *event.
|
||||
return nil
|
||||
}
|
||||
|
||||
// DeleteExpired removes events that have passed their expiration time
|
||||
// DeleteExpired removes events that have passed their expiration time (NIP-40)
|
||||
func (d *D) DeleteExpired() {
|
||||
// Query for events with expiration tags
|
||||
// This is a stub - full implementation would:
|
||||
// 1. Find events with "expiration" tag
|
||||
// 2. Check if current time > expiration time
|
||||
// 3. Delete those events
|
||||
// Query for events that have an "expiration" tag
|
||||
// NIP-40: events should have a tag ["expiration", "<unix timestamp>"]
|
||||
query := `{
|
||||
events(func: has(event.tags)) {
|
||||
uid
|
||||
event.id
|
||||
event.tags
|
||||
event.created_at
|
||||
}
|
||||
}`
|
||||
|
||||
resp, err := d.Query(context.Background(), query)
|
||||
if err != nil {
|
||||
d.Logger.Errorf("failed to query events for expiration: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
var result struct {
|
||||
Events []struct {
|
||||
UID string `json:"uid"`
|
||||
ID string `json:"event.id"`
|
||||
Tags string `json:"event.tags"`
|
||||
CreatedAt int64 `json:"event.created_at"`
|
||||
} `json:"events"`
|
||||
}
|
||||
|
||||
if err = unmarshalJSON(resp.Json, &result); err != nil {
|
||||
d.Logger.Errorf("failed to parse events for expiration: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
now := time.Now().Unix()
|
||||
deletedCount := 0
|
||||
|
||||
for _, ev := range result.Events {
|
||||
// Parse tags
|
||||
if ev.Tags == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
var tags [][]string
|
||||
if err := json.Unmarshal([]byte(ev.Tags), &tags); err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
// Look for expiration tag
|
||||
var expirationTime int64
|
||||
for _, tag := range tags {
|
||||
if len(tag) >= 2 && tag[0] == "expiration" {
|
||||
// Parse expiration timestamp
|
||||
if _, err := fmt.Sscanf(tag[1], "%d", &expirationTime); err != nil {
|
||||
continue
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// If expiration time found and passed, delete the event
|
||||
if expirationTime > 0 && now > expirationTime {
|
||||
mutation := &api.Mutation{
|
||||
DelNquads: []byte(fmt.Sprintf("<%s> * * .", ev.UID)),
|
||||
CommitNow: true,
|
||||
}
|
||||
|
||||
if _, err := d.Mutate(context.Background(), mutation); err != nil {
|
||||
d.Logger.Warningf("failed to delete expired event %s: %v", ev.ID, err)
|
||||
} else {
|
||||
deletedCount++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if deletedCount > 0 {
|
||||
d.Logger.Infof("deleted %d expired events", deletedCount)
|
||||
}
|
||||
}
|
||||
|
||||
// ProcessDelete processes a kind 5 deletion event
|
||||
|
||||
@@ -4,19 +4,19 @@ package dgraph
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/dgraph-io/badger/v4"
|
||||
"github.com/dgraph-io/dgo/v230"
|
||||
"github.com/dgraph-io/dgo/v230/protos/api"
|
||||
"google.golang.org/grpc"
|
||||
"next.orly.dev/pkg/encoders/filter"
|
||||
"google.golang.org/grpc/credentials/insecure"
|
||||
"lol.mleku.dev"
|
||||
"lol.mleku.dev/chk"
|
||||
"next.orly.dev/pkg/database"
|
||||
"next.orly.dev/pkg/encoders/filter"
|
||||
"next.orly.dev/pkg/utils/apputil"
|
||||
)
|
||||
|
||||
@@ -31,9 +31,6 @@ type D struct {
|
||||
client *dgo.Dgraph
|
||||
conn *grpc.ClientConn
|
||||
|
||||
// Fallback badger storage for metadata
|
||||
pstore *badger.DB
|
||||
|
||||
// Configuration
|
||||
dgraphURL string
|
||||
enableGraphQL bool
|
||||
@@ -105,11 +102,6 @@ func New(
|
||||
return
|
||||
}
|
||||
|
||||
// Initialize badger for metadata storage
|
||||
if err = d.initStorage(); chk.E(err) {
|
||||
return
|
||||
}
|
||||
|
||||
// Apply Nostr schema to dgraph
|
||||
if err = d.applySchema(ctx); chk.E(err) {
|
||||
return
|
||||
@@ -130,9 +122,6 @@ func New(
|
||||
if d.conn != nil {
|
||||
d.conn.Close()
|
||||
}
|
||||
if d.pstore != nil {
|
||||
d.pstore.Close()
|
||||
}
|
||||
}()
|
||||
|
||||
return
|
||||
@@ -155,25 +144,6 @@ func (d *D) initDgraphClient() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// initStorage opens Badger database for metadata storage
|
||||
func (d *D) initStorage() error {
|
||||
metadataDir := filepath.Join(d.dataDir, "metadata")
|
||||
|
||||
if err := os.MkdirAll(metadataDir, 0755); err != nil {
|
||||
return fmt.Errorf("failed to create metadata directory: %w", err)
|
||||
}
|
||||
|
||||
opts := badger.DefaultOptions(metadataDir)
|
||||
|
||||
var err error
|
||||
d.pstore, err = badger.Open(opts)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to open badger metadata store: %w", err)
|
||||
}
|
||||
|
||||
d.Logger.Infof("metadata storage initialized")
|
||||
return nil
|
||||
}
|
||||
|
||||
// Query executes a DQL query against dgraph
|
||||
func (d *D) Query(ctx context.Context, query string) (*api.Response, error) {
|
||||
@@ -198,8 +168,11 @@ func (d *D) Mutate(ctx context.Context, mutation *api.Mutation) (*api.Response,
|
||||
return nil, fmt.Errorf("dgraph mutation failed: %w", err)
|
||||
}
|
||||
|
||||
if err := txn.Commit(ctx); err != nil {
|
||||
return nil, fmt.Errorf("dgraph commit failed: %w", err)
|
||||
// Only commit if CommitNow is false (mutation didn't auto-commit)
|
||||
if !mutation.CommitNow {
|
||||
if err := txn.Commit(ctx); err != nil {
|
||||
return nil, fmt.Errorf("dgraph commit failed: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
return resp, nil
|
||||
@@ -214,11 +187,8 @@ func (d *D) Init(path string) (err error) {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Sync flushes pending writes
|
||||
// Sync flushes pending writes (DGraph handles persistence automatically)
|
||||
func (d *D) Sync() (err error) {
|
||||
if d.pstore != nil {
|
||||
return d.pstore.Sync()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -230,25 +200,26 @@ func (d *D) Close() (err error) {
|
||||
err = e
|
||||
}
|
||||
}
|
||||
if d.pstore != nil {
|
||||
if e := d.pstore.Close(); e != nil && err == nil {
|
||||
err = e
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Wipe removes all data
|
||||
func (d *D) Wipe() (err error) {
|
||||
if d.pstore != nil {
|
||||
if err = d.pstore.Close(); chk.E(err) {
|
||||
return
|
||||
}
|
||||
// Drop all data in DGraph using Alter
|
||||
op := &api.Operation{
|
||||
DropOp: api.Operation_DATA,
|
||||
}
|
||||
|
||||
if err = d.client.Alter(context.Background(), op); err != nil {
|
||||
return fmt.Errorf("failed to drop dgraph data: %w", err)
|
||||
}
|
||||
|
||||
// Remove data directory
|
||||
if err = os.RemoveAll(d.dataDir); chk.E(err) {
|
||||
return
|
||||
}
|
||||
return d.initStorage()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// SetLogLevel sets the logging level
|
||||
@@ -256,12 +227,38 @@ func (d *D) SetLogLevel(level string) {
|
||||
// d.Logger.SetLevel(lol.GetLogLevel(level))
|
||||
}
|
||||
|
||||
// EventIdsBySerial retrieves event IDs by serial range (stub)
|
||||
// EventIdsBySerial retrieves event IDs by serial range
|
||||
func (d *D) EventIdsBySerial(start uint64, count int) (
|
||||
evs []uint64, err error,
|
||||
) {
|
||||
err = fmt.Errorf("not implemented")
|
||||
return
|
||||
// Query for events in the specified serial range
|
||||
query := fmt.Sprintf(`{
|
||||
events(func: ge(event.serial, %d), orderdesc: event.serial, first: %d) {
|
||||
event.serial
|
||||
}
|
||||
}`, start, count)
|
||||
|
||||
resp, err := d.Query(context.Background(), query)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to query event IDs by serial: %w", err)
|
||||
}
|
||||
|
||||
var result struct {
|
||||
Events []struct {
|
||||
Serial int64 `json:"event.serial"`
|
||||
} `json:"events"`
|
||||
}
|
||||
|
||||
if err = json.Unmarshal(resp.Json, &result); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
evs = make([]uint64, 0, len(result.Events))
|
||||
for _, ev := range result.Events {
|
||||
evs = append(evs, uint64(ev.Serial))
|
||||
}
|
||||
|
||||
return evs, nil
|
||||
}
|
||||
|
||||
// RunMigrations runs database migrations (no-op for dgraph)
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"next.orly.dev/pkg/database"
|
||||
"next.orly.dev/pkg/database/indexes/types"
|
||||
@@ -54,15 +55,16 @@ func (d *D) FetchEventsBySerials(serials []*types.Uint40) (
|
||||
return make(map[uint64]*event.E), nil
|
||||
}
|
||||
|
||||
// Build query for multiple serials
|
||||
serialStrs := make([]string, len(serials))
|
||||
// Build a filter for multiple serials using OR conditions
|
||||
serialConditions := make([]string, len(serials))
|
||||
for i, ser := range serials {
|
||||
serialStrs[i] = fmt.Sprintf("%d", ser.Get())
|
||||
serialConditions[i] = fmt.Sprintf("eq(event.serial, %d)", ser.Get())
|
||||
}
|
||||
serialFilter := strings.Join(serialConditions, " OR ")
|
||||
|
||||
// Use uid() function for efficient multi-get
|
||||
// Query with proper batch filtering
|
||||
query := fmt.Sprintf(`{
|
||||
events(func: uid(%s)) {
|
||||
events(func: has(event.serial)) @filter(%s) {
|
||||
event.id
|
||||
event.kind
|
||||
event.created_at
|
||||
@@ -72,24 +74,70 @@ func (d *D) FetchEventsBySerials(serials []*types.Uint40) (
|
||||
event.tags
|
||||
event.serial
|
||||
}
|
||||
}`, serialStrs[0]) // Simplified - in production you'd handle multiple UIDs properly
|
||||
}`, serialFilter)
|
||||
|
||||
resp, err := d.Query(context.Background(), query)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to fetch events by serials: %w", err)
|
||||
}
|
||||
|
||||
evs, err := d.parseEventsFromResponse(resp.Json)
|
||||
if err != nil {
|
||||
// Parse the response including serial numbers
|
||||
var result struct {
|
||||
Events []struct {
|
||||
ID string `json:"event.id"`
|
||||
Kind int `json:"event.kind"`
|
||||
CreatedAt int64 `json:"event.created_at"`
|
||||
Content string `json:"event.content"`
|
||||
Sig string `json:"event.sig"`
|
||||
Pubkey string `json:"event.pubkey"`
|
||||
Tags string `json:"event.tags"`
|
||||
Serial int64 `json:"event.serial"`
|
||||
} `json:"events"`
|
||||
}
|
||||
|
||||
if err = json.Unmarshal(resp.Json, &result); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Map events by serial
|
||||
// Map events by their serial numbers
|
||||
events = make(map[uint64]*event.E)
|
||||
for i, ser := range serials {
|
||||
if i < len(evs) {
|
||||
events[ser.Get()] = evs[i]
|
||||
for _, ev := range result.Events {
|
||||
// Decode hex strings
|
||||
id, err := hex.Dec(ev.ID)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
sig, err := hex.Dec(ev.Sig)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
pubkey, err := hex.Dec(ev.Pubkey)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
// Parse tags from JSON
|
||||
var tags tag.S
|
||||
if ev.Tags != "" {
|
||||
if err := json.Unmarshal([]byte(ev.Tags), &tags); err != nil {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// Create event
|
||||
e := &event.E{
|
||||
Kind: uint16(ev.Kind),
|
||||
CreatedAt: ev.CreatedAt,
|
||||
Content: []byte(ev.Content),
|
||||
Tags: &tags,
|
||||
}
|
||||
|
||||
// Copy fixed-size arrays
|
||||
copy(e.ID[:], id)
|
||||
copy(e.Sig[:], sig)
|
||||
copy(e.Pubkey[:], pubkey)
|
||||
|
||||
events[uint64(ev.Serial)] = e
|
||||
}
|
||||
|
||||
return events, nil
|
||||
@@ -140,17 +188,54 @@ func (d *D) GetSerialsByIds(ids *tag.T) (
|
||||
return serials, nil
|
||||
}
|
||||
|
||||
// Query each ID individually (simplified implementation)
|
||||
for _, id := range ids.T {
|
||||
if len(id) >= 2 {
|
||||
idStr := string(id[1])
|
||||
serial, err := d.GetSerialById([]byte(idStr))
|
||||
if err == nil {
|
||||
serials[idStr] = serial
|
||||
}
|
||||
// Build batch query for all IDs at once
|
||||
idConditions := make([]string, 0, len(ids.T))
|
||||
idMap := make(map[string][]byte) // Map hex ID to original bytes
|
||||
|
||||
for _, idBytes := range ids.T {
|
||||
if len(idBytes) > 0 {
|
||||
idStr := hex.Enc(idBytes)
|
||||
idConditions = append(idConditions, fmt.Sprintf("eq(event.id, %q)", idStr))
|
||||
idMap[idStr] = idBytes
|
||||
}
|
||||
}
|
||||
|
||||
if len(idConditions) == 0 {
|
||||
return serials, nil
|
||||
}
|
||||
|
||||
// Create single query with OR conditions
|
||||
idFilter := strings.Join(idConditions, " OR ")
|
||||
query := fmt.Sprintf(`{
|
||||
events(func: has(event.id)) @filter(%s) {
|
||||
event.id
|
||||
event.serial
|
||||
}
|
||||
}`, idFilter)
|
||||
|
||||
resp, err := d.Query(context.Background(), query)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to batch query serials by IDs: %w", err)
|
||||
}
|
||||
|
||||
var result struct {
|
||||
Events []struct {
|
||||
ID string `json:"event.id"`
|
||||
Serial int64 `json:"event.serial"`
|
||||
} `json:"events"`
|
||||
}
|
||||
|
||||
if err = json.Unmarshal(resp.Json, &result); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Map results back
|
||||
for _, ev := range result.Events {
|
||||
serial := types.Uint40{}
|
||||
serial.Set(uint64(ev.Serial))
|
||||
serials[ev.ID] = &serial
|
||||
}
|
||||
|
||||
return serials, nil
|
||||
}
|
||||
|
||||
@@ -191,10 +276,47 @@ func (d *D) GetSerialsByIdsWithFilter(
|
||||
func (d *D) GetSerialsByRange(idx database.Range) (
|
||||
serials types.Uint40s, err error,
|
||||
) {
|
||||
// This would need to be implemented based on how ranges are defined
|
||||
// For now, returning not implemented
|
||||
err = fmt.Errorf("not implemented")
|
||||
return
|
||||
// Range represents a byte-prefix range for index scanning
|
||||
// For dgraph, we need to convert this to a query on indexed fields
|
||||
// The range is typically used for scanning event IDs or other hex-encoded keys
|
||||
|
||||
if len(idx.Start) == 0 && len(idx.End) == 0 {
|
||||
return nil, fmt.Errorf("empty range provided")
|
||||
}
|
||||
|
||||
startStr := hex.Enc(idx.Start)
|
||||
endStr := hex.Enc(idx.End)
|
||||
|
||||
// Query for events with IDs in the specified range
|
||||
query := fmt.Sprintf(`{
|
||||
events(func: ge(event.id, %q)) @filter(le(event.id, %q)) {
|
||||
event.serial
|
||||
}
|
||||
}`, startStr, endStr)
|
||||
|
||||
resp, err := d.Query(context.Background(), query)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to query serials by range: %w", err)
|
||||
}
|
||||
|
||||
var result struct {
|
||||
Events []struct {
|
||||
Serial int64 `json:"event.serial"`
|
||||
} `json:"events"`
|
||||
}
|
||||
|
||||
if err = json.Unmarshal(resp.Json, &result); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
serials = make([]*types.Uint40, 0, len(result.Events))
|
||||
for _, ev := range result.Events {
|
||||
serial := types.Uint40{}
|
||||
serial.Set(uint64(ev.Serial))
|
||||
serials = append(serials, &serial)
|
||||
}
|
||||
|
||||
return serials, nil
|
||||
}
|
||||
|
||||
// GetFullIdPubkeyBySerial retrieves ID and pubkey for a serial number
|
||||
|
||||
@@ -6,8 +6,10 @@ import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
"next.orly.dev/pkg/encoders/event"
|
||||
"next.orly.dev/pkg/encoders/hex"
|
||||
)
|
||||
|
||||
// Import imports events from a reader (JSONL format)
|
||||
@@ -17,11 +19,83 @@ func (d *D) Import(rr io.Reader) {
|
||||
|
||||
// Export exports events to a writer (JSONL format)
|
||||
func (d *D) Export(c context.Context, w io.Writer, pubkeys ...[]byte) {
|
||||
// Query all events or events for specific pubkeys
|
||||
// Write as JSONL
|
||||
// Build query based on whether pubkeys are specified
|
||||
var query string
|
||||
|
||||
// Stub implementation
|
||||
fmt.Fprintf(w, "# Export not yet implemented for dgraph\n")
|
||||
if len(pubkeys) > 0 {
|
||||
// Build pubkey filter
|
||||
pubkeyStrs := make([]string, len(pubkeys))
|
||||
for i, pk := range pubkeys {
|
||||
pubkeyStrs[i] = fmt.Sprintf("eq(event.pubkey, %q)", hex.Enc(pk))
|
||||
}
|
||||
pubkeyFilter := strings.Join(pubkeyStrs, " OR ")
|
||||
|
||||
query = fmt.Sprintf(`{
|
||||
events(func: has(event.id)) @filter(%s) {
|
||||
event.id
|
||||
event.kind
|
||||
event.created_at
|
||||
event.content
|
||||
event.sig
|
||||
event.pubkey
|
||||
event.tags
|
||||
}
|
||||
}`, pubkeyFilter)
|
||||
} else {
|
||||
// Export all events
|
||||
query = `{
|
||||
events(func: has(event.id)) {
|
||||
event.id
|
||||
event.kind
|
||||
event.created_at
|
||||
event.content
|
||||
event.sig
|
||||
event.pubkey
|
||||
event.tags
|
||||
}
|
||||
}`
|
||||
}
|
||||
|
||||
// Execute query
|
||||
resp, err := d.Query(c, query)
|
||||
if err != nil {
|
||||
d.Logger.Errorf("failed to query events for export: %v", err)
|
||||
fmt.Fprintf(w, "# Error: failed to query events: %v\n", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Parse events
|
||||
evs, err := d.parseEventsFromResponse(resp.Json)
|
||||
if err != nil {
|
||||
d.Logger.Errorf("failed to parse events for export: %v", err)
|
||||
fmt.Fprintf(w, "# Error: failed to parse events: %v\n", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Write header comment
|
||||
fmt.Fprintf(w, "# Exported %d events from dgraph\n", len(evs))
|
||||
|
||||
// Write each event as JSONL
|
||||
count := 0
|
||||
for _, ev := range evs {
|
||||
jsonData, err := json.Marshal(ev)
|
||||
if err != nil {
|
||||
d.Logger.Warningf("failed to marshal event: %v", err)
|
||||
continue
|
||||
}
|
||||
|
||||
if _, err := fmt.Fprintf(w, "%s\n", jsonData); err != nil {
|
||||
d.Logger.Errorf("failed to write event: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
count++
|
||||
if count%1000 == 0 {
|
||||
d.Logger.Infof("exported %d events", count)
|
||||
}
|
||||
}
|
||||
|
||||
d.Logger.Infof("export complete: %d events written", count)
|
||||
}
|
||||
|
||||
// ImportEventsFromReader imports events from a reader
|
||||
|
||||
@@ -48,6 +48,20 @@ func (d *D) QueryEventsWithOptions(
|
||||
|
||||
// buildDQLQuery constructs a DQL query from a Nostr filter
|
||||
func (d *D) buildDQLQuery(f *filter.F, includeDeleteEvents bool) string {
|
||||
return d.buildDQLQueryWithFields(f, includeDeleteEvents, []string{
|
||||
"uid",
|
||||
"event.id",
|
||||
"event.kind",
|
||||
"event.created_at",
|
||||
"event.content",
|
||||
"event.sig",
|
||||
"event.pubkey",
|
||||
"event.tags",
|
||||
})
|
||||
}
|
||||
|
||||
// buildDQLQueryWithFields constructs a DQL query with custom field selection
|
||||
func (d *D) buildDQLQueryWithFields(f *filter.F, includeDeleteEvents bool, fields []string) string {
|
||||
var conditions []string
|
||||
var funcQuery string
|
||||
|
||||
@@ -139,18 +153,14 @@ func (d *D) buildDQLQuery(f *filter.F, includeDeleteEvents bool) string {
|
||||
limitStr = fmt.Sprintf(", first: %d", f.Limit)
|
||||
}
|
||||
|
||||
// Build field list
|
||||
fieldStr := strings.Join(fields, "\n\t\t\t")
|
||||
|
||||
query := fmt.Sprintf(`{
|
||||
events(func: %s%s%s%s) {
|
||||
uid
|
||||
event.id
|
||||
event.kind
|
||||
event.created_at
|
||||
event.content
|
||||
event.sig
|
||||
event.pubkey
|
||||
event.tags
|
||||
%s
|
||||
}
|
||||
}`, funcQuery, filterStr, orderBy, limitStr)
|
||||
}`, funcQuery, filterStr, orderBy, limitStr, fieldStr)
|
||||
|
||||
return query
|
||||
}
|
||||
@@ -257,12 +267,8 @@ func (d *D) QueryDeleteEventsByTargetId(c context.Context, targetEventId []byte)
|
||||
func (d *D) QueryForSerials(c context.Context, f *filter.F) (
|
||||
serials types.Uint40s, err error,
|
||||
) {
|
||||
// Build query
|
||||
query := d.buildDQLQuery(f, false)
|
||||
|
||||
// Modify query to only return serial numbers
|
||||
query = strings.Replace(query, "event.id\n\t\t\tevent.kind", "event.serial", 1)
|
||||
query = strings.Replace(query, "\t\t\tevent.created_at\n\t\t\tevent.content\n\t\t\tevent.sig\n\t\t\tevent.pubkey\n\t\t\tevent.tags", "", 1)
|
||||
// Build query requesting only serial numbers
|
||||
query := d.buildDQLQueryWithFields(f, false, []string{"event.serial"})
|
||||
|
||||
resp, err := d.Query(c, query)
|
||||
if err != nil {
|
||||
@@ -293,11 +299,13 @@ func (d *D) QueryForSerials(c context.Context, f *filter.F) (
|
||||
func (d *D) QueryForIds(c context.Context, f *filter.F) (
|
||||
idPkTs []*store.IdPkTs, err error,
|
||||
) {
|
||||
// Build query
|
||||
query := d.buildDQLQuery(f, false)
|
||||
|
||||
// Modify query to only return ID, pubkey, created_at, serial
|
||||
query = strings.Replace(query, "event.kind\n\t\t\tevent.created_at\n\t\t\tevent.content\n\t\t\tevent.sig\n\t\t\tevent.pubkey\n\t\t\tevent.tags", "event.id\n\t\t\tevent.pubkey\n\t\t\tevent.created_at\n\t\t\tevent.serial", 1)
|
||||
// Build query requesting only ID, pubkey, created_at, serial
|
||||
query := d.buildDQLQueryWithFields(f, false, []string{
|
||||
"event.id",
|
||||
"event.pubkey",
|
||||
"event.created_at",
|
||||
"event.serial",
|
||||
})
|
||||
|
||||
resp, err := d.Query(c, query)
|
||||
if err != nil {
|
||||
@@ -342,11 +350,8 @@ func (d *D) QueryForIds(c context.Context, f *filter.F) (
|
||||
func (d *D) CountEvents(c context.Context, f *filter.F) (
|
||||
count int, approximate bool, err error,
|
||||
) {
|
||||
// Build query with count
|
||||
query := d.buildDQLQuery(f, false)
|
||||
|
||||
// Modify to count instead of returning full data
|
||||
query = strings.Replace(query, "uid\n\t\t\tevent.id\n\t\t\tevent.kind\n\t\t\tevent.created_at\n\t\t\tevent.content\n\t\t\tevent.sig\n\t\t\tevent.pubkey\n\t\t\tevent.tags", "count(uid)", 1)
|
||||
// Build query requesting only count
|
||||
query := d.buildDQLQueryWithFields(f, false, []string{"count(uid)"})
|
||||
|
||||
resp, err := d.Query(c, query)
|
||||
if err != nil {
|
||||
|
||||
@@ -127,10 +127,8 @@ func (d *D) buildEventNQuads(ev *event.E, serial uint64) string {
|
||||
|
||||
// GetSerialsFromFilter returns event serials matching a filter
|
||||
func (d *D) GetSerialsFromFilter(f *filter.F) (serials types.Uint40s, err error) {
|
||||
// For dgraph, we'll use the event.serial field
|
||||
// This is a stub implementation
|
||||
err = fmt.Errorf("not implemented")
|
||||
return
|
||||
// Use QueryForSerials which already implements the proper filter logic
|
||||
return d.QueryForSerials(context.Background(), f)
|
||||
}
|
||||
|
||||
// WouldReplaceEvent checks if an event would replace existing events
|
||||
|
||||
132
pkg/neo4j/README.md
Normal file
132
pkg/neo4j/README.md
Normal file
@@ -0,0 +1,132 @@
|
||||
# Neo4j Database Backend
|
||||
|
||||
A graph database backend implementation for the ORLY Nostr relay using Neo4j.
|
||||
|
||||
## Quick Start
|
||||
|
||||
### 1. Start Neo4j
|
||||
|
||||
```bash
|
||||
docker run -d --name neo4j \
|
||||
-p 7474:7474 -p 7687:7687 \
|
||||
-e NEO4J_AUTH=neo4j/password \
|
||||
neo4j:5.15
|
||||
```
|
||||
|
||||
### 2. Configure Environment
|
||||
|
||||
```bash
|
||||
export ORLY_DB_TYPE=neo4j
|
||||
export ORLY_NEO4J_URI=bolt://localhost:7687
|
||||
export ORLY_NEO4J_USER=neo4j
|
||||
export ORLY_NEO4J_PASSWORD=password
|
||||
```
|
||||
|
||||
### 3. Run ORLY
|
||||
|
||||
```bash
|
||||
./orly
|
||||
```
|
||||
|
||||
## Features
|
||||
|
||||
- **Graph-Native Storage**: Events, authors, and tags stored as nodes and relationships
|
||||
- **Efficient Queries**: Leverages Neo4j's native graph traversal for tag and social graph queries
|
||||
- **Cypher Query Language**: Powerful, expressive query language for complex filters
|
||||
- **Automatic Indexing**: Unique constraints and indexes for optimal performance
|
||||
- **Relationship Queries**: Native support for event references, mentions, and tags
|
||||
|
||||
## Architecture
|
||||
|
||||
See [docs/NEO4J_BACKEND.md](../../docs/NEO4J_BACKEND.md) for comprehensive documentation on:
|
||||
- Graph schema design
|
||||
- How Nostr REQ messages are implemented in Cypher
|
||||
- Performance tuning
|
||||
- Development guide
|
||||
- Comparison with other backends
|
||||
|
||||
## File Structure
|
||||
|
||||
- `neo4j.go` - Main database implementation
|
||||
- `schema.go` - Graph schema and index definitions
|
||||
- `query-events.go` - REQ filter to Cypher translation
|
||||
- `save-event.go` - Event storage with relationship creation
|
||||
- `fetch-event.go` - Event retrieval by serial/ID
|
||||
- `serial.go` - Serial number management
|
||||
- `markers.go` - Metadata key-value storage
|
||||
- `identity.go` - Relay identity management
|
||||
- `delete.go` - Event deletion (NIP-09)
|
||||
- `subscriptions.go` - Subscription management
|
||||
- `nip43.go` - Invite-based ACL (NIP-43)
|
||||
- `import-export.go` - Event import/export
|
||||
- `logger.go` - Logging infrastructure
|
||||
|
||||
## Testing
|
||||
|
||||
```bash
|
||||
# Start Neo4j test instance
|
||||
docker run -d --name neo4j-test \
|
||||
-p 7687:7687 \
|
||||
-e NEO4J_AUTH=neo4j/test \
|
||||
neo4j:5.15
|
||||
|
||||
# Run tests
|
||||
ORLY_NEO4J_URI="bolt://localhost:7687" \
|
||||
ORLY_NEO4J_USER="neo4j" \
|
||||
ORLY_NEO4J_PASSWORD="test" \
|
||||
go test ./pkg/neo4j/...
|
||||
|
||||
# Cleanup
|
||||
docker rm -f neo4j-test
|
||||
```
|
||||
|
||||
## Example Cypher Queries
|
||||
|
||||
### Find all events by an author
|
||||
```cypher
|
||||
MATCH (e:Event {pubkey: "abc123..."})
|
||||
RETURN e
|
||||
ORDER BY e.created_at DESC
|
||||
```
|
||||
|
||||
### Find events with specific tags
|
||||
```cypher
|
||||
MATCH (e:Event)-[:TAGGED_WITH]->(t:Tag {type: "t", value: "bitcoin"})
|
||||
RETURN e
|
||||
```
|
||||
|
||||
### Social graph query
|
||||
```cypher
|
||||
MATCH (author:Author {pubkey: "abc123..."})
|
||||
<-[:AUTHORED_BY]-(e:Event)
|
||||
-[:MENTIONS]->(mentioned:Author)
|
||||
RETURN author, e, mentioned
|
||||
```
|
||||
|
||||
## Performance Tips
|
||||
|
||||
1. **Use Limits**: Always include LIMIT in queries
|
||||
2. **Index Usage**: Ensure queries use indexed properties (id, kind, created_at)
|
||||
3. **Parameterize**: Use parameterized queries to enable query plan caching
|
||||
4. **Monitor**: Use `EXPLAIN` and `PROFILE` to analyze query performance
|
||||
|
||||
## Limitations
|
||||
|
||||
- Requires external Neo4j database (not embedded)
|
||||
- Higher memory usage compared to Badger
|
||||
- Metadata still uses Badger (markers, subscriptions)
|
||||
- More complex deployment than single-binary solutions
|
||||
|
||||
## Why Neo4j for Nostr?
|
||||
|
||||
Nostr is inherently a social graph with heavy relationship queries:
|
||||
- Event references (e-tags) → Graph edges
|
||||
- Author mentions (p-tags) → Graph edges
|
||||
- Follow relationships → Graph structure
|
||||
- Thread traversal → Path queries
|
||||
|
||||
Neo4j excels at these patterns, making it a natural fit for relationship-heavy Nostr queries.
|
||||
|
||||
## License
|
||||
|
||||
Same as ORLY relay project.
|
||||
159
pkg/neo4j/delete.go
Normal file
159
pkg/neo4j/delete.go
Normal file
@@ -0,0 +1,159 @@
|
||||
package neo4j
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"next.orly.dev/pkg/database/indexes/types"
|
||||
"next.orly.dev/pkg/encoders/event"
|
||||
"next.orly.dev/pkg/encoders/hex"
|
||||
)
|
||||
|
||||
// DeleteEvent deletes an event by its ID
|
||||
func (n *N) DeleteEvent(c context.Context, eid []byte) error {
|
||||
idStr := hex.Enc(eid)
|
||||
|
||||
cypher := "MATCH (e:Event {id: $id}) DETACH DELETE e"
|
||||
params := map[string]any{"id": idStr}
|
||||
|
||||
_, err := n.ExecuteWrite(c, cypher, params)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to delete event: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// DeleteEventBySerial deletes an event by its serial number
|
||||
func (n *N) DeleteEventBySerial(c context.Context, ser *types.Uint40, ev *event.E) error {
|
||||
serial := ser.Get()
|
||||
|
||||
cypher := "MATCH (e:Event {serial: $serial}) DETACH DELETE e"
|
||||
params := map[string]any{"serial": int64(serial)}
|
||||
|
||||
_, err := n.ExecuteWrite(c, cypher, params)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to delete event: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// DeleteExpired deletes expired events (stub implementation)
|
||||
func (n *N) DeleteExpired() {
|
||||
// This would need to implement expiration logic based on event.expiration tag (NIP-40)
|
||||
// For now, this is a no-op
|
||||
}
|
||||
|
||||
// ProcessDelete processes a kind 5 deletion event
|
||||
func (n *N) ProcessDelete(ev *event.E, admins [][]byte) error {
|
||||
// Deletion events (kind 5) can delete events by the same author
|
||||
// or by relay admins
|
||||
|
||||
// Check if this is a kind 5 event
|
||||
if ev.Kind != 5 {
|
||||
return fmt.Errorf("not a deletion event")
|
||||
}
|
||||
|
||||
// Get all 'e' tags (event IDs to delete)
|
||||
eTags := ev.Tags.GetAll([]byte{'e'})
|
||||
if len(eTags) == 0 {
|
||||
return nil // Nothing to delete
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
isAdmin := false
|
||||
|
||||
// Check if author is an admin
|
||||
for _, adminPk := range admins {
|
||||
if string(ev.Pubkey[:]) == string(adminPk) {
|
||||
isAdmin = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// For each event ID in e-tags, delete it if allowed
|
||||
for _, eTag := range eTags {
|
||||
if len(eTag.T) < 2 {
|
||||
continue
|
||||
}
|
||||
|
||||
eventIDStr := string(eTag.T[1])
|
||||
eventID, err := hex.Dec(eventIDStr)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
// Fetch the event to check authorship
|
||||
cypher := "MATCH (e:Event {id: $id}) RETURN e.pubkey AS pubkey"
|
||||
params := map[string]any{"id": eventIDStr}
|
||||
|
||||
result, err := n.ExecuteRead(ctx, cypher, params)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
if result.Next(ctx) {
|
||||
record := result.Record()
|
||||
if record != nil {
|
||||
pubkeyValue, found := record.Get("pubkey")
|
||||
if found {
|
||||
if pubkeyStr, ok := pubkeyValue.(string); ok {
|
||||
pubkey, err := hex.Dec(pubkeyStr)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check if deletion is allowed (same author or admin)
|
||||
canDelete := isAdmin || string(ev.Pubkey[:]) == string(pubkey)
|
||||
if canDelete {
|
||||
// Delete the event
|
||||
if err := n.DeleteEvent(ctx, eventID); err != nil {
|
||||
n.Logger.Warningf("failed to delete event %s: %v", eventIDStr, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// CheckForDeleted checks if an event has been deleted
|
||||
func (n *N) CheckForDeleted(ev *event.E, admins [][]byte) error {
|
||||
// Query for kind 5 events that reference this event
|
||||
ctx := context.Background()
|
||||
idStr := hex.Enc(ev.ID[:])
|
||||
|
||||
// Build cypher query to find deletion events
|
||||
cypher := `
|
||||
MATCH (target:Event {id: $targetId})
|
||||
MATCH (delete:Event {kind: 5})-[:REFERENCES]->(target)
|
||||
WHERE delete.pubkey = $pubkey OR delete.pubkey IN $admins
|
||||
RETURN delete.id AS id
|
||||
LIMIT 1`
|
||||
|
||||
adminPubkeys := make([]string, len(admins))
|
||||
for i, admin := range admins {
|
||||
adminPubkeys[i] = hex.Enc(admin)
|
||||
}
|
||||
|
||||
params := map[string]any{
|
||||
"targetId": idStr,
|
||||
"pubkey": hex.Enc(ev.Pubkey[:]),
|
||||
"admins": adminPubkeys,
|
||||
}
|
||||
|
||||
result, err := n.ExecuteRead(ctx, cypher, params)
|
||||
if err != nil {
|
||||
return nil // Not deleted
|
||||
}
|
||||
|
||||
if result.Next(ctx) {
|
||||
return fmt.Errorf("event has been deleted")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
433
pkg/neo4j/fetch-event.go
Normal file
433
pkg/neo4j/fetch-event.go
Normal file
@@ -0,0 +1,433 @@
|
||||
package neo4j
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"next.orly.dev/pkg/database"
|
||||
"next.orly.dev/pkg/database/indexes/types"
|
||||
"next.orly.dev/pkg/encoders/event"
|
||||
"next.orly.dev/pkg/encoders/hex"
|
||||
"next.orly.dev/pkg/encoders/tag"
|
||||
"next.orly.dev/pkg/interfaces/store"
|
||||
)
|
||||
|
||||
// FetchEventBySerial retrieves an event by its serial number
|
||||
func (n *N) FetchEventBySerial(ser *types.Uint40) (ev *event.E, err error) {
|
||||
serial := ser.Get()
|
||||
|
||||
cypher := `
|
||||
MATCH (e:Event {serial: $serial})
|
||||
RETURN e.id AS id,
|
||||
e.kind AS kind,
|
||||
e.created_at AS created_at,
|
||||
e.content AS content,
|
||||
e.sig AS sig,
|
||||
e.pubkey AS pubkey,
|
||||
e.tags AS tags`
|
||||
|
||||
params := map[string]any{"serial": int64(serial)}
|
||||
|
||||
result, err := n.ExecuteRead(context.Background(), cypher, params)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to fetch event by serial: %w", err)
|
||||
}
|
||||
|
||||
evs, err := n.parseEventsFromResult(result)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(evs) == 0 {
|
||||
return nil, fmt.Errorf("event not found")
|
||||
}
|
||||
|
||||
return evs[0], nil
|
||||
}
|
||||
|
||||
// FetchEventsBySerials retrieves multiple events by their serial numbers
|
||||
func (n *N) FetchEventsBySerials(serials []*types.Uint40) (
|
||||
events map[uint64]*event.E, err error,
|
||||
) {
|
||||
if len(serials) == 0 {
|
||||
return make(map[uint64]*event.E), nil
|
||||
}
|
||||
|
||||
// Build list of serial numbers
|
||||
serialNums := make([]int64, len(serials))
|
||||
for i, ser := range serials {
|
||||
serialNums[i] = int64(ser.Get())
|
||||
}
|
||||
|
||||
cypher := `
|
||||
MATCH (e:Event)
|
||||
WHERE e.serial IN $serials
|
||||
RETURN e.id AS id,
|
||||
e.kind AS kind,
|
||||
e.created_at AS created_at,
|
||||
e.content AS content,
|
||||
e.sig AS sig,
|
||||
e.pubkey AS pubkey,
|
||||
e.tags AS tags,
|
||||
e.serial AS serial`
|
||||
|
||||
params := map[string]any{"serials": serialNums}
|
||||
|
||||
result, err := n.ExecuteRead(context.Background(), cypher, params)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to fetch events by serials: %w", err)
|
||||
}
|
||||
|
||||
// Parse events and map by serial
|
||||
events = make(map[uint64]*event.E)
|
||||
ctx := context.Background()
|
||||
|
||||
for result.Next(ctx) {
|
||||
record := result.Record()
|
||||
if record == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
// Parse event
|
||||
idRaw, _ := record.Get("id")
|
||||
kindRaw, _ := record.Get("kind")
|
||||
createdAtRaw, _ := record.Get("created_at")
|
||||
contentRaw, _ := record.Get("content")
|
||||
sigRaw, _ := record.Get("sig")
|
||||
pubkeyRaw, _ := record.Get("pubkey")
|
||||
tagsRaw, _ := record.Get("tags")
|
||||
serialRaw, _ := record.Get("serial")
|
||||
|
||||
idStr, _ := idRaw.(string)
|
||||
kind, _ := kindRaw.(int64)
|
||||
createdAt, _ := createdAtRaw.(int64)
|
||||
content, _ := contentRaw.(string)
|
||||
sigStr, _ := sigRaw.(string)
|
||||
pubkeyStr, _ := pubkeyRaw.(string)
|
||||
tagsStr, _ := tagsRaw.(string)
|
||||
serialVal, _ := serialRaw.(int64)
|
||||
|
||||
id, err := hex.Dec(idStr)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
sig, err := hex.Dec(sigStr)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
pubkey, err := hex.Dec(pubkeyStr)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
tags := tag.NewS()
|
||||
if tagsStr != "" {
|
||||
_ = tags.UnmarshalJSON([]byte(tagsStr))
|
||||
}
|
||||
|
||||
e := &event.E{
|
||||
Kind: uint16(kind),
|
||||
CreatedAt: createdAt,
|
||||
Content: []byte(content),
|
||||
Tags: tags,
|
||||
}
|
||||
|
||||
copy(e.ID[:], id)
|
||||
copy(e.Sig[:], sig)
|
||||
copy(e.Pubkey[:], pubkey)
|
||||
|
||||
events[uint64(serialVal)] = e
|
||||
}
|
||||
|
||||
return events, nil
|
||||
}
|
||||
|
||||
// GetSerialById retrieves the serial number for an event ID
|
||||
func (n *N) GetSerialById(id []byte) (ser *types.Uint40, err error) {
|
||||
idStr := hex.Enc(id)
|
||||
|
||||
cypher := "MATCH (e:Event {id: $id}) RETURN e.serial AS serial"
|
||||
params := map[string]any{"id": idStr}
|
||||
|
||||
result, err := n.ExecuteRead(context.Background(), cypher, params)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get serial by ID: %w", err)
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
if result.Next(ctx) {
|
||||
record := result.Record()
|
||||
if record != nil {
|
||||
serialRaw, found := record.Get("serial")
|
||||
if found {
|
||||
if serialVal, ok := serialRaw.(int64); ok {
|
||||
ser = &types.Uint40{}
|
||||
ser.Set(uint64(serialVal))
|
||||
return ser, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("event not found")
|
||||
}
|
||||
|
||||
// GetSerialsByIds retrieves serial numbers for multiple event IDs
|
||||
func (n *N) GetSerialsByIds(ids *tag.T) (
|
||||
serials map[string]*types.Uint40, err error,
|
||||
) {
|
||||
serials = make(map[string]*types.Uint40)
|
||||
|
||||
if len(ids.T) == 0 {
|
||||
return serials, nil
|
||||
}
|
||||
|
||||
// Extract ID strings
|
||||
idStrs := make([]string, 0, len(ids.T))
|
||||
for _, idTag := range ids.T {
|
||||
if len(idTag) >= 2 {
|
||||
idStrs = append(idStrs, string(idTag[1]))
|
||||
}
|
||||
}
|
||||
|
||||
if len(idStrs) == 0 {
|
||||
return serials, nil
|
||||
}
|
||||
|
||||
cypher := `
|
||||
MATCH (e:Event)
|
||||
WHERE e.id IN $ids
|
||||
RETURN e.id AS id, e.serial AS serial`
|
||||
|
||||
params := map[string]any{"ids": idStrs}
|
||||
|
||||
result, err := n.ExecuteRead(context.Background(), cypher, params)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get serials by IDs: %w", err)
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
for result.Next(ctx) {
|
||||
record := result.Record()
|
||||
if record == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
idRaw, found := record.Get("id")
|
||||
if !found {
|
||||
continue
|
||||
}
|
||||
serialRaw, found := record.Get("serial")
|
||||
if !found {
|
||||
continue
|
||||
}
|
||||
|
||||
idStr, _ := idRaw.(string)
|
||||
serialVal, _ := serialRaw.(int64)
|
||||
|
||||
serial := &types.Uint40{}
|
||||
serial.Set(uint64(serialVal))
|
||||
serials[idStr] = serial
|
||||
}
|
||||
|
||||
return serials, nil
|
||||
}
|
||||
|
||||
// GetSerialsByIdsWithFilter retrieves serials with a filter function
|
||||
func (n *N) GetSerialsByIdsWithFilter(
|
||||
ids *tag.T, fn func(ev *event.E, ser *types.Uint40) bool,
|
||||
) (serials map[string]*types.Uint40, err error) {
|
||||
serials = make(map[string]*types.Uint40)
|
||||
|
||||
if fn == nil {
|
||||
// No filter, just return all
|
||||
return n.GetSerialsByIds(ids)
|
||||
}
|
||||
|
||||
// With filter, need to fetch events
|
||||
for _, idTag := range ids.T {
|
||||
if len(idTag) < 2 {
|
||||
continue
|
||||
}
|
||||
|
||||
idBytes, err := hex.Dec(string(idTag[1]))
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
serial, err := n.GetSerialById(idBytes)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
ev, err := n.FetchEventBySerial(serial)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
if fn(ev, serial) {
|
||||
serials[string(idTag[1])] = serial
|
||||
}
|
||||
}
|
||||
|
||||
return serials, nil
|
||||
}
|
||||
|
||||
// GetSerialsByRange retrieves serials within a range
|
||||
func (n *N) GetSerialsByRange(idx database.Range) (
|
||||
serials types.Uint40s, err error,
|
||||
) {
|
||||
// This would need to be implemented based on how ranges are defined
|
||||
// For now, returning not implemented
|
||||
err = fmt.Errorf("not implemented")
|
||||
return
|
||||
}
|
||||
|
||||
// GetFullIdPubkeyBySerial retrieves ID and pubkey for a serial number
|
||||
func (n *N) GetFullIdPubkeyBySerial(ser *types.Uint40) (
|
||||
fidpk *store.IdPkTs, err error,
|
||||
) {
|
||||
serial := ser.Get()
|
||||
|
||||
cypher := `
|
||||
MATCH (e:Event {serial: $serial})
|
||||
RETURN e.id AS id,
|
||||
e.pubkey AS pubkey,
|
||||
e.created_at AS created_at`
|
||||
|
||||
params := map[string]any{"serial": int64(serial)}
|
||||
|
||||
result, err := n.ExecuteRead(context.Background(), cypher, params)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get ID and pubkey by serial: %w", err)
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
if result.Next(ctx) {
|
||||
record := result.Record()
|
||||
if record != nil {
|
||||
idRaw, found := record.Get("id")
|
||||
if !found {
|
||||
return nil, fmt.Errorf("event not found")
|
||||
}
|
||||
pubkeyRaw, found := record.Get("pubkey")
|
||||
if !found {
|
||||
return nil, fmt.Errorf("event not found")
|
||||
}
|
||||
createdAtRaw, found := record.Get("created_at")
|
||||
if !found {
|
||||
return nil, fmt.Errorf("event not found")
|
||||
}
|
||||
|
||||
idStr, _ := idRaw.(string)
|
||||
pubkeyStr, _ := pubkeyRaw.(string)
|
||||
createdAt, _ := createdAtRaw.(int64)
|
||||
|
||||
id, err := hex.Dec(idStr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pubkey, err := hex.Dec(pubkeyStr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
fidpk = &store.IdPkTs{
|
||||
Id: id,
|
||||
Pub: pubkey,
|
||||
Ts: createdAt,
|
||||
Ser: serial,
|
||||
}
|
||||
|
||||
return fidpk, nil
|
||||
}
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("event not found")
|
||||
}
|
||||
|
||||
// GetFullIdPubkeyBySerials retrieves IDs and pubkeys for multiple serials
|
||||
func (n *N) GetFullIdPubkeyBySerials(sers []*types.Uint40) (
|
||||
fidpks []*store.IdPkTs, err error,
|
||||
) {
|
||||
fidpks = make([]*store.IdPkTs, 0, len(sers))
|
||||
|
||||
if len(sers) == 0 {
|
||||
return fidpks, nil
|
||||
}
|
||||
|
||||
// Build list of serial numbers
|
||||
serialNums := make([]int64, len(sers))
|
||||
for i, ser := range sers {
|
||||
serialNums[i] = int64(ser.Get())
|
||||
}
|
||||
|
||||
cypher := `
|
||||
MATCH (e:Event)
|
||||
WHERE e.serial IN $serials
|
||||
RETURN e.id AS id,
|
||||
e.pubkey AS pubkey,
|
||||
e.created_at AS created_at,
|
||||
e.serial AS serial`
|
||||
|
||||
params := map[string]any{"serials": serialNums}
|
||||
|
||||
result, err := n.ExecuteRead(context.Background(), cypher, params)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get IDs and pubkeys by serials: %w", err)
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
for result.Next(ctx) {
|
||||
record := result.Record()
|
||||
if record == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
idRaw, found := record.Get("id")
|
||||
if !found {
|
||||
continue
|
||||
}
|
||||
pubkeyRaw, found := record.Get("pubkey")
|
||||
if !found {
|
||||
continue
|
||||
}
|
||||
createdAtRaw, found := record.Get("created_at")
|
||||
if !found {
|
||||
continue
|
||||
}
|
||||
serialRaw, found := record.Get("serial")
|
||||
if !found {
|
||||
continue
|
||||
}
|
||||
|
||||
idStr, _ := idRaw.(string)
|
||||
pubkeyStr, _ := pubkeyRaw.(string)
|
||||
createdAt, _ := createdAtRaw.(int64)
|
||||
serialVal, _ := serialRaw.(int64)
|
||||
|
||||
id, err := hex.Dec(idStr)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
pubkey, err := hex.Dec(pubkeyStr)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
fidpks = append(fidpks, &store.IdPkTs{
|
||||
Id: id,
|
||||
Pub: pubkey,
|
||||
Ts: createdAt,
|
||||
Ser: uint64(serialVal),
|
||||
})
|
||||
}
|
||||
|
||||
return fidpks, nil
|
||||
}
|
||||
44
pkg/neo4j/identity.go
Normal file
44
pkg/neo4j/identity.go
Normal file
@@ -0,0 +1,44 @@
|
||||
package neo4j
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"next.orly.dev/pkg/crypto/keys"
|
||||
)
|
||||
|
||||
// Relay identity methods
|
||||
// We use the marker system to store the relay's private key
|
||||
|
||||
const relayIdentityMarkerKey = "relay_identity_secret"
|
||||
|
||||
// GetRelayIdentitySecret retrieves the relay's identity secret key
|
||||
func (n *N) GetRelayIdentitySecret() (skb []byte, err error) {
|
||||
return n.GetMarker(relayIdentityMarkerKey)
|
||||
}
|
||||
|
||||
// SetRelayIdentitySecret sets the relay's identity secret key
|
||||
func (n *N) SetRelayIdentitySecret(skb []byte) error {
|
||||
return n.SetMarker(relayIdentityMarkerKey, skb)
|
||||
}
|
||||
|
||||
// GetOrCreateRelayIdentitySecret retrieves or creates the relay identity
|
||||
func (n *N) GetOrCreateRelayIdentitySecret() (skb []byte, err error) {
|
||||
skb, err = n.GetRelayIdentitySecret()
|
||||
if err == nil {
|
||||
return skb, nil
|
||||
}
|
||||
|
||||
// Generate new identity
|
||||
skb, err = keys.GenerateSecretKey()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to generate identity: %w", err)
|
||||
}
|
||||
|
||||
// Store it
|
||||
if err = n.SetRelayIdentitySecret(skb); err != nil {
|
||||
return nil, fmt.Errorf("failed to store identity: %w", err)
|
||||
}
|
||||
|
||||
n.Logger.Infof("generated new relay identity")
|
||||
return skb, nil
|
||||
}
|
||||
97
pkg/neo4j/import-export.go
Normal file
97
pkg/neo4j/import-export.go
Normal file
@@ -0,0 +1,97 @@
|
||||
package neo4j
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"next.orly.dev/pkg/encoders/event"
|
||||
)
|
||||
|
||||
// Import imports events from a reader (JSONL format)
|
||||
func (n *N) Import(rr io.Reader) {
|
||||
n.ImportEventsFromReader(context.Background(), rr)
|
||||
}
|
||||
|
||||
// Export exports events to a writer (JSONL format)
|
||||
func (n *N) Export(c context.Context, w io.Writer, pubkeys ...[]byte) {
|
||||
// Query all events or events for specific pubkeys
|
||||
// Write as JSONL
|
||||
|
||||
// Stub implementation
|
||||
fmt.Fprintf(w, "# Export not yet implemented for neo4j\n")
|
||||
}
|
||||
|
||||
// ImportEventsFromReader imports events from a reader
|
||||
func (n *N) ImportEventsFromReader(ctx context.Context, rr io.Reader) error {
|
||||
scanner := bufio.NewScanner(rr)
|
||||
scanner.Buffer(make([]byte, 1024*1024), 10*1024*1024) // 10MB max line size
|
||||
|
||||
count := 0
|
||||
for scanner.Scan() {
|
||||
line := scanner.Bytes()
|
||||
if len(line) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
// Skip comments
|
||||
if line[0] == '#' {
|
||||
continue
|
||||
}
|
||||
|
||||
// Parse event
|
||||
ev := &event.E{}
|
||||
if err := json.Unmarshal(line, ev); err != nil {
|
||||
n.Logger.Warningf("failed to parse event: %v", err)
|
||||
continue
|
||||
}
|
||||
|
||||
// Save event
|
||||
if _, err := n.SaveEvent(ctx, ev); err != nil {
|
||||
n.Logger.Warningf("failed to import event: %v", err)
|
||||
continue
|
||||
}
|
||||
|
||||
count++
|
||||
if count%1000 == 0 {
|
||||
n.Logger.Infof("imported %d events", count)
|
||||
}
|
||||
}
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
return fmt.Errorf("scanner error: %w", err)
|
||||
}
|
||||
|
||||
n.Logger.Infof("import complete: %d events", count)
|
||||
return nil
|
||||
}
|
||||
|
||||
// ImportEventsFromStrings imports events from JSON strings
|
||||
func (n *N) ImportEventsFromStrings(
|
||||
ctx context.Context,
|
||||
eventJSONs []string,
|
||||
policyManager interface{ CheckPolicy(action string, ev *event.E, pubkey []byte, remote string) (bool, error) },
|
||||
) error {
|
||||
for _, eventJSON := range eventJSONs {
|
||||
ev := &event.E{}
|
||||
if err := json.Unmarshal([]byte(eventJSON), ev); err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check policy if manager is provided
|
||||
if policyManager != nil {
|
||||
if allowed, err := policyManager.CheckPolicy("write", ev, ev.Pubkey[:], "import"); err != nil || !allowed {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// Save event
|
||||
if _, err := n.SaveEvent(ctx, ev); err != nil {
|
||||
n.Logger.Warningf("failed to import event: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
68
pkg/neo4j/logger.go
Normal file
68
pkg/neo4j/logger.go
Normal file
@@ -0,0 +1,68 @@
|
||||
package neo4j
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"runtime"
|
||||
"strings"
|
||||
|
||||
"go.uber.org/atomic"
|
||||
"lol.mleku.dev"
|
||||
"lol.mleku.dev/log"
|
||||
)
|
||||
|
||||
// NewLogger creates a new dgraph logger.
|
||||
func NewLogger(logLevel int, label string) (l *logger) {
|
||||
l = &logger{Label: label}
|
||||
l.Level.Store(int32(logLevel))
|
||||
return
|
||||
}
|
||||
|
||||
type logger struct {
|
||||
Level atomic.Int32
|
||||
Label string
|
||||
}
|
||||
|
||||
// SetLogLevel atomically adjusts the log level to the given log level code.
|
||||
func (l *logger) SetLogLevel(level int) {
|
||||
l.Level.Store(int32(level))
|
||||
}
|
||||
|
||||
// Errorf is a log printer for this level of message.
|
||||
func (l *logger) Errorf(s string, i ...interface{}) {
|
||||
if l.Level.Load() >= lol.Error {
|
||||
s = l.Label + ": " + s
|
||||
txt := fmt.Sprintf(s, i...)
|
||||
_, file, line, _ := runtime.Caller(2)
|
||||
log.E.F("%s\n%s:%d", strings.TrimSpace(txt), file, line)
|
||||
}
|
||||
}
|
||||
|
||||
// Warningf is a log printer for this level of message.
|
||||
func (l *logger) Warningf(s string, i ...interface{}) {
|
||||
if l.Level.Load() >= lol.Warn {
|
||||
s = l.Label + ": " + s
|
||||
txt := fmt.Sprintf(s, i...)
|
||||
_, file, line, _ := runtime.Caller(2)
|
||||
log.W.F("%s\n%s:%d", strings.TrimSpace(txt), file, line)
|
||||
}
|
||||
}
|
||||
|
||||
// Infof is a log printer for this level of message.
|
||||
func (l *logger) Infof(s string, i ...interface{}) {
|
||||
if l.Level.Load() >= lol.Info {
|
||||
s = l.Label + ": " + s
|
||||
txt := fmt.Sprintf(s, i...)
|
||||
_, file, line, _ := runtime.Caller(2)
|
||||
log.I.F("%s\n%s:%d", strings.TrimSpace(txt), file, line)
|
||||
}
|
||||
}
|
||||
|
||||
// Debugf is a log printer for this level of message.
|
||||
func (l *logger) Debugf(s string, i ...interface{}) {
|
||||
if l.Level.Load() >= lol.Debug {
|
||||
s = l.Label + ": " + s
|
||||
txt := fmt.Sprintf(s, i...)
|
||||
_, file, line, _ := runtime.Caller(2)
|
||||
log.D.F("%s\n%s:%d", strings.TrimSpace(txt), file, line)
|
||||
}
|
||||
}
|
||||
83
pkg/neo4j/markers.go
Normal file
83
pkg/neo4j/markers.go
Normal file
@@ -0,0 +1,83 @@
|
||||
package neo4j
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"next.orly.dev/pkg/encoders/hex"
|
||||
)
|
||||
|
||||
// Markers provide metadata key-value storage using Neo4j Marker nodes
|
||||
// We store markers as special nodes with label "Marker"
|
||||
|
||||
// SetMarker sets a metadata marker
|
||||
func (n *N) SetMarker(key string, value []byte) error {
|
||||
valueHex := hex.Enc(value)
|
||||
|
||||
cypher := `
|
||||
MERGE (m:Marker {key: $key})
|
||||
SET m.value = $value`
|
||||
|
||||
params := map[string]any{
|
||||
"key": key,
|
||||
"value": valueHex,
|
||||
}
|
||||
|
||||
_, err := n.ExecuteWrite(context.Background(), cypher, params)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to set marker: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetMarker retrieves a metadata marker
|
||||
func (n *N) GetMarker(key string) (value []byte, err error) {
|
||||
cypher := "MATCH (m:Marker {key: $key}) RETURN m.value AS value"
|
||||
params := map[string]any{"key": key}
|
||||
|
||||
result, err := n.ExecuteRead(context.Background(), cypher, params)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get marker: %w", err)
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
if result.Next(ctx) {
|
||||
record := result.Record()
|
||||
if record != nil {
|
||||
valueRaw, found := record.Get("value")
|
||||
if found {
|
||||
if valueStr, ok := valueRaw.(string); ok {
|
||||
// Decode hex value
|
||||
value, err = hex.Dec(valueStr)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to decode marker value: %w", err)
|
||||
}
|
||||
return value, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("marker not found: %s", key)
|
||||
}
|
||||
|
||||
// HasMarker checks if a marker exists
|
||||
func (n *N) HasMarker(key string) bool {
|
||||
_, err := n.GetMarker(key)
|
||||
return err == nil
|
||||
}
|
||||
|
||||
// DeleteMarker removes a metadata marker
|
||||
func (n *N) DeleteMarker(key string) error {
|
||||
cypher := "MATCH (m:Marker {key: $key}) DELETE m"
|
||||
params := map[string]any{"key": key}
|
||||
|
||||
_, err := n.ExecuteWrite(context.Background(), cypher, params)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to delete marker: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
277
pkg/neo4j/neo4j.go
Normal file
277
pkg/neo4j/neo4j.go
Normal file
@@ -0,0 +1,277 @@
|
||||
// Package neo4j provides a Neo4j-based implementation of the database interface.
|
||||
// Neo4j is a native graph database optimized for relationship-heavy queries,
|
||||
// making it ideal for Nostr's social graph and event reference patterns.
|
||||
package neo4j
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/neo4j/neo4j-go-driver/v5/neo4j"
|
||||
"lol.mleku.dev"
|
||||
"lol.mleku.dev/chk"
|
||||
"next.orly.dev/pkg/database"
|
||||
"next.orly.dev/pkg/encoders/filter"
|
||||
"next.orly.dev/pkg/utils/apputil"
|
||||
)
|
||||
|
||||
// N implements the database.Database interface using Neo4j as the storage backend
|
||||
type N struct {
|
||||
ctx context.Context
|
||||
cancel context.CancelFunc
|
||||
dataDir string
|
||||
Logger *logger
|
||||
|
||||
// Neo4j client connection
|
||||
driver neo4j.DriverWithContext
|
||||
|
||||
// Configuration
|
||||
neo4jURI string
|
||||
neo4jUser string
|
||||
neo4jPassword string
|
||||
|
||||
ready chan struct{} // Closed when database is ready to serve requests
|
||||
}
|
||||
|
||||
// Ensure N implements database.Database interface at compile time
|
||||
var _ database.Database = (*N)(nil)
|
||||
|
||||
// init registers the neo4j database factory
|
||||
func init() {
|
||||
database.RegisterNeo4jFactory(func(
|
||||
ctx context.Context,
|
||||
cancel context.CancelFunc,
|
||||
dataDir string,
|
||||
logLevel string,
|
||||
) (database.Database, error) {
|
||||
return New(ctx, cancel, dataDir, logLevel)
|
||||
})
|
||||
}
|
||||
|
||||
// Config holds configuration options for the Neo4j database
|
||||
type Config struct {
|
||||
DataDir string
|
||||
LogLevel string
|
||||
Neo4jURI string // Neo4j bolt URI (e.g., "bolt://localhost:7687")
|
||||
Neo4jUser string // Authentication username
|
||||
Neo4jPassword string // Authentication password
|
||||
}
|
||||
|
||||
// New creates a new Neo4j-based database instance
|
||||
func New(
|
||||
ctx context.Context, cancel context.CancelFunc, dataDir, logLevel string,
|
||||
) (
|
||||
n *N, err error,
|
||||
) {
|
||||
// Get Neo4j connection details from environment
|
||||
neo4jURI := os.Getenv("ORLY_NEO4J_URI")
|
||||
if neo4jURI == "" {
|
||||
neo4jURI = "bolt://localhost:7687"
|
||||
}
|
||||
neo4jUser := os.Getenv("ORLY_NEO4J_USER")
|
||||
if neo4jUser == "" {
|
||||
neo4jUser = "neo4j"
|
||||
}
|
||||
neo4jPassword := os.Getenv("ORLY_NEO4J_PASSWORD")
|
||||
if neo4jPassword == "" {
|
||||
neo4jPassword = "password"
|
||||
}
|
||||
|
||||
n = &N{
|
||||
ctx: ctx,
|
||||
cancel: cancel,
|
||||
dataDir: dataDir,
|
||||
Logger: NewLogger(lol.GetLogLevel(logLevel), dataDir),
|
||||
neo4jURI: neo4jURI,
|
||||
neo4jUser: neo4jUser,
|
||||
neo4jPassword: neo4jPassword,
|
||||
ready: make(chan struct{}),
|
||||
}
|
||||
|
||||
// Ensure the data directory exists
|
||||
if err = os.MkdirAll(dataDir, 0755); chk.E(err) {
|
||||
return
|
||||
}
|
||||
|
||||
// Ensure directory structure
|
||||
dummyFile := filepath.Join(dataDir, "dummy.sst")
|
||||
if err = apputil.EnsureDir(dummyFile); chk.E(err) {
|
||||
return
|
||||
}
|
||||
|
||||
// Initialize neo4j client connection
|
||||
if err = n.initNeo4jClient(); chk.E(err) {
|
||||
return
|
||||
}
|
||||
|
||||
// Apply Nostr schema to neo4j (create constraints and indexes)
|
||||
if err = n.applySchema(ctx); chk.E(err) {
|
||||
return
|
||||
}
|
||||
|
||||
// Initialize serial counter
|
||||
if err = n.initSerialCounter(); chk.E(err) {
|
||||
return
|
||||
}
|
||||
|
||||
// Start warmup goroutine to signal when database is ready
|
||||
go n.warmup()
|
||||
|
||||
// Setup shutdown handler
|
||||
go func() {
|
||||
<-n.ctx.Done()
|
||||
n.cancel()
|
||||
if n.driver != nil {
|
||||
n.driver.Close(context.Background())
|
||||
}
|
||||
}()
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// initNeo4jClient establishes connection to Neo4j server
|
||||
func (n *N) initNeo4jClient() error {
|
||||
n.Logger.Infof("connecting to neo4j at %s", n.neo4jURI)
|
||||
|
||||
// Create Neo4j driver
|
||||
driver, err := neo4j.NewDriverWithContext(
|
||||
n.neo4jURI,
|
||||
neo4j.BasicAuth(n.neo4jUser, n.neo4jPassword, ""),
|
||||
)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create neo4j driver: %w", err)
|
||||
}
|
||||
|
||||
n.driver = driver
|
||||
|
||||
// Verify connectivity
|
||||
ctx := context.Background()
|
||||
if err := driver.VerifyConnectivity(ctx); err != nil {
|
||||
return fmt.Errorf("failed to verify neo4j connectivity: %w", err)
|
||||
}
|
||||
|
||||
n.Logger.Infof("successfully connected to neo4j")
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
// ExecuteRead executes a read query against Neo4j
|
||||
func (n *N) ExecuteRead(ctx context.Context, cypher string, params map[string]any) (neo4j.ResultWithContext, error) {
|
||||
session := n.driver.NewSession(ctx, neo4j.SessionConfig{AccessMode: neo4j.AccessModeRead})
|
||||
defer session.Close(ctx)
|
||||
|
||||
result, err := session.Run(ctx, cypher, params)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("neo4j read query failed: %w", err)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// ExecuteWrite executes a write query against Neo4j
|
||||
func (n *N) ExecuteWrite(ctx context.Context, cypher string, params map[string]any) (neo4j.ResultWithContext, error) {
|
||||
session := n.driver.NewSession(ctx, neo4j.SessionConfig{AccessMode: neo4j.AccessModeWrite})
|
||||
defer session.Close(ctx)
|
||||
|
||||
result, err := session.Run(ctx, cypher, params)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("neo4j write query failed: %w", err)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// ExecuteWriteTransaction executes a transactional write operation
|
||||
func (n *N) ExecuteWriteTransaction(ctx context.Context, work func(tx neo4j.ManagedTransaction) (any, error)) (any, error) {
|
||||
session := n.driver.NewSession(ctx, neo4j.SessionConfig{AccessMode: neo4j.AccessModeWrite})
|
||||
defer session.Close(ctx)
|
||||
|
||||
return session.ExecuteWrite(ctx, work)
|
||||
}
|
||||
|
||||
// Path returns the data directory path
|
||||
func (n *N) Path() string { return n.dataDir }
|
||||
|
||||
// Init initializes the database with a given path (no-op, path set in New)
|
||||
func (n *N) Init(path string) (err error) {
|
||||
// Path already set in New()
|
||||
return nil
|
||||
}
|
||||
|
||||
// Sync flushes pending writes (Neo4j handles persistence automatically)
|
||||
func (n *N) Sync() (err error) {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Close closes the database
|
||||
func (n *N) Close() (err error) {
|
||||
n.cancel()
|
||||
if n.driver != nil {
|
||||
if e := n.driver.Close(context.Background()); e != nil {
|
||||
err = e
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Wipe removes all data
|
||||
func (n *N) Wipe() (err error) {
|
||||
// Delete all nodes and relationships in Neo4j
|
||||
ctx := context.Background()
|
||||
_, err = n.ExecuteWrite(ctx, "MATCH (n) DETACH DELETE n", nil)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to wipe neo4j database: %w", err)
|
||||
}
|
||||
|
||||
// Remove data directory
|
||||
if err = os.RemoveAll(n.dataDir); chk.E(err) {
|
||||
return
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// SetLogLevel sets the logging level
|
||||
func (n *N) SetLogLevel(level string) {
|
||||
// n.Logger.SetLevel(lol.GetLogLevel(level))
|
||||
}
|
||||
|
||||
// EventIdsBySerial retrieves event IDs by serial range (stub)
|
||||
func (n *N) EventIdsBySerial(start uint64, count int) (
|
||||
evs []uint64, err error,
|
||||
) {
|
||||
err = fmt.Errorf("not implemented")
|
||||
return
|
||||
}
|
||||
|
||||
// RunMigrations runs database migrations (no-op for neo4j)
|
||||
func (n *N) RunMigrations() {
|
||||
// No-op for neo4j
|
||||
}
|
||||
|
||||
// Ready returns a channel that closes when the database is ready to serve requests.
|
||||
// This allows callers to wait for database warmup to complete.
|
||||
func (n *N) Ready() <-chan struct{} {
|
||||
return n.ready
|
||||
}
|
||||
|
||||
// warmup performs database warmup operations and closes the ready channel when complete.
|
||||
// For Neo4j, warmup ensures the connection is healthy and constraints are applied.
|
||||
func (n *N) warmup() {
|
||||
defer close(n.ready)
|
||||
|
||||
// Neo4j connection and schema are already verified during initialization
|
||||
// Just give a brief moment for any background processes to settle
|
||||
n.Logger.Infof("neo4j database warmup complete, ready to serve requests")
|
||||
}
|
||||
|
||||
// GetCachedJSON returns cached query results (not implemented for Neo4j)
|
||||
func (n *N) GetCachedJSON(f *filter.F) ([][]byte, bool) { return nil, false }
|
||||
|
||||
// CacheMarshaledJSON caches marshaled JSON results (not implemented for Neo4j)
|
||||
func (n *N) CacheMarshaledJSON(f *filter.F, marshaledJSON [][]byte) {}
|
||||
|
||||
// InvalidateQueryCache invalidates the query cache (not implemented for Neo4j)
|
||||
func (n *N) InvalidateQueryCache() {}
|
||||
212
pkg/neo4j/nip43.go
Normal file
212
pkg/neo4j/nip43.go
Normal file
@@ -0,0 +1,212 @@
|
||||
package neo4j
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"next.orly.dev/pkg/database"
|
||||
"next.orly.dev/pkg/encoders/hex"
|
||||
)
|
||||
|
||||
// NIP-43 Invite-based ACL methods
|
||||
// Simplified implementation using marker-based storage via Badger
|
||||
// For production, these could use Neo4j nodes with relationships
|
||||
|
||||
// AddNIP43Member adds a member using an invite code
|
||||
func (n *N) AddNIP43Member(pubkey []byte, inviteCode string) error {
|
||||
key := "nip43_" + hex.Enc(pubkey)
|
||||
|
||||
member := database.NIP43Membership{
|
||||
InviteCode: inviteCode,
|
||||
AddedAt: time.Now(),
|
||||
}
|
||||
copy(member.Pubkey[:], pubkey)
|
||||
|
||||
data, err := json.Marshal(member)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal membership: %w", err)
|
||||
}
|
||||
|
||||
// Also add to members list
|
||||
if err := n.addToMembersList(pubkey); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return n.SetMarker(key, data)
|
||||
}
|
||||
|
||||
// RemoveNIP43Member removes a member
|
||||
func (n *N) RemoveNIP43Member(pubkey []byte) error {
|
||||
key := "nip43_" + hex.Enc(pubkey)
|
||||
|
||||
// Remove from members list
|
||||
if err := n.removeFromMembersList(pubkey); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return n.DeleteMarker(key)
|
||||
}
|
||||
|
||||
// IsNIP43Member checks if a pubkey is a member
|
||||
func (n *N) IsNIP43Member(pubkey []byte) (isMember bool, err error) {
|
||||
_, err = n.GetNIP43Membership(pubkey)
|
||||
return err == nil, nil
|
||||
}
|
||||
|
||||
// GetNIP43Membership retrieves membership information
|
||||
func (n *N) GetNIP43Membership(pubkey []byte) (*database.NIP43Membership, error) {
|
||||
key := "nip43_" + hex.Enc(pubkey)
|
||||
|
||||
data, err := n.GetMarker(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var member database.NIP43Membership
|
||||
if err := json.Unmarshal(data, &member); err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal membership: %w", err)
|
||||
}
|
||||
|
||||
return &member, nil
|
||||
}
|
||||
|
||||
// GetAllNIP43Members retrieves all member pubkeys
|
||||
func (n *N) GetAllNIP43Members() ([][]byte, error) {
|
||||
data, err := n.GetMarker("nip43_members_list")
|
||||
if err != nil {
|
||||
return nil, nil // No members = empty list
|
||||
}
|
||||
|
||||
var members []string
|
||||
if err := json.Unmarshal(data, &members); err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal members list: %w", err)
|
||||
}
|
||||
|
||||
result := make([][]byte, 0, len(members))
|
||||
for _, hexPubkey := range members {
|
||||
pubkey, err := hex.Dec(hexPubkey)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
result = append(result, pubkey)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// StoreInviteCode stores an invite code with expiration
|
||||
func (n *N) StoreInviteCode(code string, expiresAt time.Time) error {
|
||||
key := "invite_" + code
|
||||
|
||||
inviteData := map[string]interface{}{
|
||||
"code": code,
|
||||
"expiresAt": expiresAt,
|
||||
}
|
||||
|
||||
data, err := json.Marshal(inviteData)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal invite: %w", err)
|
||||
}
|
||||
|
||||
return n.SetMarker(key, data)
|
||||
}
|
||||
|
||||
// ValidateInviteCode checks if an invite code is valid
|
||||
func (n *N) ValidateInviteCode(code string) (valid bool, err error) {
|
||||
key := "invite_" + code
|
||||
|
||||
data, err := n.GetMarker(key)
|
||||
if err != nil {
|
||||
return false, nil // Code doesn't exist
|
||||
}
|
||||
|
||||
var inviteData map[string]interface{}
|
||||
if err := json.Unmarshal(data, &inviteData); err != nil {
|
||||
return false, fmt.Errorf("failed to unmarshal invite: %w", err)
|
||||
}
|
||||
|
||||
// Check expiration
|
||||
if expiresStr, ok := inviteData["expiresAt"].(string); ok {
|
||||
expiresAt, err := time.Parse(time.RFC3339, expiresStr)
|
||||
if err == nil && time.Now().After(expiresAt) {
|
||||
return false, nil // Expired
|
||||
}
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// DeleteInviteCode removes an invite code
|
||||
func (n *N) DeleteInviteCode(code string) error {
|
||||
key := "invite_" + code
|
||||
return n.DeleteMarker(key)
|
||||
}
|
||||
|
||||
// PublishNIP43MembershipEvent publishes a membership event
|
||||
func (n *N) PublishNIP43MembershipEvent(kind int, pubkey []byte) error {
|
||||
// This would require publishing an actual Nostr event
|
||||
// For now, just log it
|
||||
n.Logger.Infof("would publish NIP-43 event kind %d for %s", kind, hex.Enc(pubkey))
|
||||
return nil
|
||||
}
|
||||
|
||||
// addToMembersList adds a pubkey to the members list
|
||||
func (n *N) addToMembersList(pubkey []byte) error {
|
||||
data, err := n.GetMarker("nip43_members_list")
|
||||
|
||||
var members []string
|
||||
if err == nil {
|
||||
if err := json.Unmarshal(data, &members); err != nil {
|
||||
return fmt.Errorf("failed to unmarshal members list: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
hexPubkey := hex.Enc(pubkey)
|
||||
|
||||
// Check if already in list
|
||||
for _, member := range members {
|
||||
if member == hexPubkey {
|
||||
return nil // Already in list
|
||||
}
|
||||
}
|
||||
|
||||
members = append(members, hexPubkey)
|
||||
|
||||
data, err = json.Marshal(members)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal members list: %w", err)
|
||||
}
|
||||
|
||||
return n.SetMarker("nip43_members_list", data)
|
||||
}
|
||||
|
||||
// removeFromMembersList removes a pubkey from the members list
|
||||
func (n *N) removeFromMembersList(pubkey []byte) error {
|
||||
data, err := n.GetMarker("nip43_members_list")
|
||||
if err != nil {
|
||||
return nil // No list = nothing to remove
|
||||
}
|
||||
|
||||
var members []string
|
||||
if err := json.Unmarshal(data, &members); err != nil {
|
||||
return fmt.Errorf("failed to unmarshal members list: %w", err)
|
||||
}
|
||||
|
||||
hexPubkey := hex.Enc(pubkey)
|
||||
|
||||
// Filter out the pubkey
|
||||
filtered := make([]string, 0, len(members))
|
||||
for _, member := range members {
|
||||
if member != hexPubkey {
|
||||
filtered = append(filtered, member)
|
||||
}
|
||||
}
|
||||
|
||||
data, err = json.Marshal(filtered)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal members list: %w", err)
|
||||
}
|
||||
|
||||
return n.SetMarker("nip43_members_list", data)
|
||||
}
|
||||
492
pkg/neo4j/query-events.go
Normal file
492
pkg/neo4j/query-events.go
Normal file
@@ -0,0 +1,492 @@
|
||||
package neo4j
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/neo4j/neo4j-go-driver/v5/neo4j"
|
||||
"next.orly.dev/pkg/database/indexes/types"
|
||||
"next.orly.dev/pkg/encoders/event"
|
||||
"next.orly.dev/pkg/encoders/filter"
|
||||
"next.orly.dev/pkg/encoders/hex"
|
||||
"next.orly.dev/pkg/encoders/tag"
|
||||
"next.orly.dev/pkg/interfaces/store"
|
||||
)
|
||||
|
||||
// QueryEvents retrieves events matching the given filter
|
||||
func (n *N) QueryEvents(c context.Context, f *filter.F) (evs event.S, err error) {
|
||||
return n.QueryEventsWithOptions(c, f, false, false)
|
||||
}
|
||||
|
||||
// QueryAllVersions retrieves all versions of events matching the filter
|
||||
func (n *N) QueryAllVersions(c context.Context, f *filter.F) (evs event.S, err error) {
|
||||
return n.QueryEventsWithOptions(c, f, false, true)
|
||||
}
|
||||
|
||||
// QueryEventsWithOptions retrieves events with specific options
|
||||
func (n *N) QueryEventsWithOptions(
|
||||
c context.Context, f *filter.F, includeDeleteEvents bool, showAllVersions bool,
|
||||
) (evs event.S, err error) {
|
||||
// Build Cypher query from Nostr filter
|
||||
cypher, params := n.buildCypherQuery(f, includeDeleteEvents)
|
||||
|
||||
// Execute query
|
||||
result, err := n.ExecuteRead(c, cypher, params)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to execute query: %w", err)
|
||||
}
|
||||
|
||||
// Parse response
|
||||
evs, err = n.parseEventsFromResult(result)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse events: %w", err)
|
||||
}
|
||||
|
||||
return evs, nil
|
||||
}
|
||||
|
||||
// buildCypherQuery constructs a Cypher query from a Nostr filter
|
||||
// This is the core translation layer between Nostr's REQ filter format and Neo4j's Cypher
|
||||
func (n *N) buildCypherQuery(f *filter.F, includeDeleteEvents bool) (string, map[string]any) {
|
||||
params := make(map[string]any)
|
||||
var whereClauses []string
|
||||
|
||||
// Start with basic MATCH clause
|
||||
matchClause := "MATCH (e:Event)"
|
||||
|
||||
// IDs filter - uses exact match or prefix matching
|
||||
if len(f.Ids.T) > 0 {
|
||||
idConditions := make([]string, len(f.Ids.T))
|
||||
for i, id := range f.Ids.T {
|
||||
paramName := fmt.Sprintf("id_%d", i)
|
||||
hexID := hex.Enc(id)
|
||||
|
||||
// Handle prefix matching for partial IDs
|
||||
if len(id) < 32 { // Full event ID is 32 bytes (64 hex chars)
|
||||
idConditions[i] = fmt.Sprintf("e.id STARTS WITH $%s", paramName)
|
||||
} else {
|
||||
idConditions[i] = fmt.Sprintf("e.id = $%s", paramName)
|
||||
}
|
||||
params[paramName] = hexID
|
||||
}
|
||||
whereClauses = append(whereClauses, "("+strings.Join(idConditions, " OR ")+")")
|
||||
}
|
||||
|
||||
// Authors filter - supports prefix matching for partial pubkeys
|
||||
if len(f.Authors.T) > 0 {
|
||||
authorConditions := make([]string, len(f.Authors.T))
|
||||
for i, author := range f.Authors.T {
|
||||
paramName := fmt.Sprintf("author_%d", i)
|
||||
hexAuthor := hex.Enc(author)
|
||||
|
||||
// Handle prefix matching for partial pubkeys
|
||||
if len(author) < 32 { // Full pubkey is 32 bytes (64 hex chars)
|
||||
authorConditions[i] = fmt.Sprintf("e.pubkey STARTS WITH $%s", paramName)
|
||||
} else {
|
||||
authorConditions[i] = fmt.Sprintf("e.pubkey = $%s", paramName)
|
||||
}
|
||||
params[paramName] = hexAuthor
|
||||
}
|
||||
whereClauses = append(whereClauses, "("+strings.Join(authorConditions, " OR ")+")")
|
||||
}
|
||||
|
||||
// Kinds filter - matches event types
|
||||
if len(f.Kinds.K) > 0 {
|
||||
kinds := make([]int64, len(f.Kinds.K))
|
||||
for i, k := range f.Kinds.K {
|
||||
kinds[i] = int64(k.K)
|
||||
}
|
||||
params["kinds"] = kinds
|
||||
whereClauses = append(whereClauses, "e.kind IN $kinds")
|
||||
}
|
||||
|
||||
// Time range filters - for temporal queries
|
||||
if f.Since != nil {
|
||||
params["since"] = f.Since.V
|
||||
whereClauses = append(whereClauses, "e.created_at >= $since")
|
||||
}
|
||||
if f.Until != nil {
|
||||
params["until"] = f.Until.V
|
||||
whereClauses = append(whereClauses, "e.created_at <= $until")
|
||||
}
|
||||
|
||||
// Tag filters - this is where Neo4j's graph capabilities shine
|
||||
// We can efficiently traverse tag relationships
|
||||
tagIndex := 0
|
||||
for _, tagValues := range *f.Tags {
|
||||
if len(tagValues.T) > 0 {
|
||||
tagVarName := fmt.Sprintf("t%d", tagIndex)
|
||||
tagTypeParam := fmt.Sprintf("tagType_%d", tagIndex)
|
||||
tagValuesParam := fmt.Sprintf("tagValues_%d", tagIndex)
|
||||
|
||||
// Add tag relationship to MATCH clause
|
||||
matchClause += fmt.Sprintf(" OPTIONAL MATCH (e)-[:TAGGED_WITH]->(%s:Tag)", tagVarName)
|
||||
|
||||
// The first element is the tag type (e.g., "e", "p", etc.)
|
||||
tagType := string(tagValues.T[0])
|
||||
|
||||
// Convert remaining tag values to strings (skip first element which is the type)
|
||||
tagValueStrings := make([]string, len(tagValues.T)-1)
|
||||
for i, tv := range tagValues.T[1:] {
|
||||
tagValueStrings[i] = string(tv)
|
||||
}
|
||||
|
||||
// Add WHERE conditions for this tag
|
||||
params[tagTypeParam] = tagType
|
||||
params[tagValuesParam] = tagValueStrings
|
||||
whereClauses = append(whereClauses,
|
||||
fmt.Sprintf("(%s.type = $%s AND %s.value IN $%s)",
|
||||
tagVarName, tagTypeParam, tagVarName, tagValuesParam))
|
||||
|
||||
tagIndex++
|
||||
}
|
||||
}
|
||||
|
||||
// Exclude delete events unless requested
|
||||
if !includeDeleteEvents {
|
||||
whereClauses = append(whereClauses, "e.kind <> 5")
|
||||
}
|
||||
|
||||
// Build WHERE clause
|
||||
whereClause := ""
|
||||
if len(whereClauses) > 0 {
|
||||
whereClause = " WHERE " + strings.Join(whereClauses, " AND ")
|
||||
}
|
||||
|
||||
// Build RETURN clause with all event properties
|
||||
returnClause := `
|
||||
RETURN e.id AS id,
|
||||
e.kind AS kind,
|
||||
e.created_at AS created_at,
|
||||
e.content AS content,
|
||||
e.sig AS sig,
|
||||
e.pubkey AS pubkey,
|
||||
e.tags AS tags,
|
||||
e.serial AS serial`
|
||||
|
||||
// Add ordering (most recent first)
|
||||
orderClause := " ORDER BY e.created_at DESC"
|
||||
|
||||
// Add limit if specified
|
||||
limitClause := ""
|
||||
if *f.Limit > 0 {
|
||||
params["limit"] = *f.Limit
|
||||
limitClause = " LIMIT $limit"
|
||||
}
|
||||
|
||||
// Combine all parts
|
||||
cypher := matchClause + whereClause + returnClause + orderClause + limitClause
|
||||
|
||||
return cypher, params
|
||||
}
|
||||
|
||||
// parseEventsFromResult converts Neo4j query results to Nostr events
|
||||
func (n *N) parseEventsFromResult(result any) ([]*event.E, error) {
|
||||
events := make([]*event.E, 0)
|
||||
ctx := context.Background()
|
||||
|
||||
// Type assert to the interface we actually use
|
||||
resultIter, ok := result.(interface {
|
||||
Next(context.Context) bool
|
||||
Record() *neo4j.Record
|
||||
Err() error
|
||||
})
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid result type")
|
||||
}
|
||||
|
||||
// Iterate through result records
|
||||
for resultIter.Next(ctx) {
|
||||
record := resultIter.Record()
|
||||
if record == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
// Parse event fields
|
||||
idRaw, _ := record.Get("id")
|
||||
kindRaw, _ := record.Get("kind")
|
||||
createdAtRaw, _ := record.Get("created_at")
|
||||
contentRaw, _ := record.Get("content")
|
||||
sigRaw, _ := record.Get("sig")
|
||||
pubkeyRaw, _ := record.Get("pubkey")
|
||||
tagsRaw, _ := record.Get("tags")
|
||||
|
||||
idStr, _ := idRaw.(string)
|
||||
kind, _ := kindRaw.(int64)
|
||||
createdAt, _ := createdAtRaw.(int64)
|
||||
content, _ := contentRaw.(string)
|
||||
sigStr, _ := sigRaw.(string)
|
||||
pubkeyStr, _ := pubkeyRaw.(string)
|
||||
tagsStr, _ := tagsRaw.(string)
|
||||
|
||||
// Decode hex strings
|
||||
id, err := hex.Dec(idStr)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
sig, err := hex.Dec(sigStr)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
pubkey, err := hex.Dec(pubkeyStr)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
// Parse tags from JSON
|
||||
tags := tag.NewS()
|
||||
if tagsStr != "" {
|
||||
_ = tags.UnmarshalJSON([]byte(tagsStr))
|
||||
}
|
||||
|
||||
// Create event
|
||||
e := &event.E{
|
||||
Kind: uint16(kind),
|
||||
CreatedAt: createdAt,
|
||||
Content: []byte(content),
|
||||
Tags: tags,
|
||||
}
|
||||
|
||||
// Copy fixed-size arrays
|
||||
copy(e.ID[:], id)
|
||||
copy(e.Sig[:], sig)
|
||||
copy(e.Pubkey[:], pubkey)
|
||||
|
||||
events = append(events, e)
|
||||
}
|
||||
|
||||
if err := resultIter.Err(); err != nil {
|
||||
return nil, fmt.Errorf("error iterating results: %w", err)
|
||||
}
|
||||
|
||||
return events, nil
|
||||
}
|
||||
|
||||
// QueryDeleteEventsByTargetId retrieves delete events targeting a specific event ID
|
||||
func (n *N) QueryDeleteEventsByTargetId(c context.Context, targetEventId []byte) (
|
||||
evs event.S, err error,
|
||||
) {
|
||||
targetIDStr := hex.Enc(targetEventId)
|
||||
|
||||
// Query for kind 5 events that reference this event
|
||||
// This uses Neo4j's graph traversal to find delete events
|
||||
cypher := `
|
||||
MATCH (target:Event {id: $targetId})
|
||||
MATCH (e:Event {kind: 5})-[:REFERENCES]->(target)
|
||||
RETURN e.id AS id,
|
||||
e.kind AS kind,
|
||||
e.created_at AS created_at,
|
||||
e.content AS content,
|
||||
e.sig AS sig,
|
||||
e.pubkey AS pubkey,
|
||||
e.tags AS tags,
|
||||
e.serial AS serial
|
||||
ORDER BY e.created_at DESC`
|
||||
|
||||
params := map[string]any{"targetId": targetIDStr}
|
||||
|
||||
result, err := n.ExecuteRead(c, cypher, params)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to query delete events: %w", err)
|
||||
}
|
||||
|
||||
evs, err = n.parseEventsFromResult(result)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse delete events: %w", err)
|
||||
}
|
||||
|
||||
return evs, nil
|
||||
}
|
||||
|
||||
// QueryForSerials retrieves event serials matching a filter
|
||||
func (n *N) QueryForSerials(c context.Context, f *filter.F) (
|
||||
serials types.Uint40s, err error,
|
||||
) {
|
||||
// Build query but only return serial numbers
|
||||
cypher, params := n.buildCypherQuery(f, false)
|
||||
|
||||
// Replace RETURN clause to only fetch serials
|
||||
returnClause := " RETURN e.serial AS serial"
|
||||
cypherParts := strings.Split(cypher, "RETURN")
|
||||
if len(cypherParts) < 2 {
|
||||
return nil, fmt.Errorf("invalid query structure")
|
||||
}
|
||||
|
||||
// Rebuild query with serial-only return
|
||||
cypher = cypherParts[0] + returnClause
|
||||
if strings.Contains(cypherParts[1], "ORDER BY") {
|
||||
orderPart := " ORDER BY" + strings.Split(cypherParts[1], "ORDER BY")[1]
|
||||
cypher += orderPart
|
||||
}
|
||||
|
||||
result, err := n.ExecuteRead(c, cypher, params)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to query serials: %w", err)
|
||||
}
|
||||
|
||||
// Parse serials from result
|
||||
serials = make([]*types.Uint40, 0)
|
||||
ctx := context.Background()
|
||||
|
||||
resultIter, ok := result.(interface {
|
||||
Next(context.Context) bool
|
||||
Record() *neo4j.Record
|
||||
Err() error
|
||||
})
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid result type")
|
||||
}
|
||||
|
||||
for resultIter.Next(ctx) {
|
||||
record := resultIter.Record()
|
||||
if record == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
serialRaw, found := record.Get("serial")
|
||||
if !found {
|
||||
continue
|
||||
}
|
||||
|
||||
serialVal, ok := serialRaw.(int64)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
serial := types.Uint40{}
|
||||
serial.Set(uint64(serialVal))
|
||||
serials = append(serials, &serial)
|
||||
}
|
||||
|
||||
return serials, nil
|
||||
}
|
||||
|
||||
// QueryForIds retrieves event IDs matching a filter
|
||||
func (n *N) QueryForIds(c context.Context, f *filter.F) (
|
||||
idPkTs []*store.IdPkTs, err error,
|
||||
) {
|
||||
// Build query but only return ID, pubkey, created_at, serial
|
||||
cypher, params := n.buildCypherQuery(f, false)
|
||||
|
||||
// Replace RETURN clause
|
||||
returnClause := `
|
||||
RETURN e.id AS id,
|
||||
e.pubkey AS pubkey,
|
||||
e.created_at AS created_at,
|
||||
e.serial AS serial`
|
||||
|
||||
cypherParts := strings.Split(cypher, "RETURN")
|
||||
if len(cypherParts) < 2 {
|
||||
return nil, fmt.Errorf("invalid query structure")
|
||||
}
|
||||
|
||||
cypher = cypherParts[0] + returnClause
|
||||
if strings.Contains(cypherParts[1], "ORDER BY") {
|
||||
orderPart := " ORDER BY" + strings.Split(cypherParts[1], "ORDER BY")[1]
|
||||
cypher += orderPart
|
||||
}
|
||||
|
||||
result, err := n.ExecuteRead(c, cypher, params)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to query IDs: %w", err)
|
||||
}
|
||||
|
||||
// Parse IDs from result
|
||||
idPkTs = make([]*store.IdPkTs, 0)
|
||||
ctx := context.Background()
|
||||
|
||||
resultIter, ok := result.(interface {
|
||||
Next(context.Context) bool
|
||||
Record() *neo4j.Record
|
||||
Err() error
|
||||
})
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid result type")
|
||||
}
|
||||
|
||||
for resultIter.Next(ctx) {
|
||||
record := resultIter.Record()
|
||||
if record == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
idRaw, _ := record.Get("id")
|
||||
pubkeyRaw, _ := record.Get("pubkey")
|
||||
createdAtRaw, _ := record.Get("created_at")
|
||||
serialRaw, _ := record.Get("serial")
|
||||
|
||||
idStr, _ := idRaw.(string)
|
||||
pubkeyStr, _ := pubkeyRaw.(string)
|
||||
createdAt, _ := createdAtRaw.(int64)
|
||||
serialVal, _ := serialRaw.(int64)
|
||||
|
||||
id, err := hex.Dec(idStr)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
pubkey, err := hex.Dec(pubkeyStr)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
idPkTs = append(idPkTs, &store.IdPkTs{
|
||||
Id: id,
|
||||
Pub: pubkey,
|
||||
Ts: createdAt,
|
||||
Ser: uint64(serialVal),
|
||||
})
|
||||
}
|
||||
|
||||
return idPkTs, nil
|
||||
}
|
||||
|
||||
// CountEvents counts events matching a filter
|
||||
func (n *N) CountEvents(c context.Context, f *filter.F) (
|
||||
count int, approximate bool, err error,
|
||||
) {
|
||||
// Build query but only count results
|
||||
cypher, params := n.buildCypherQuery(f, false)
|
||||
|
||||
// Replace RETURN clause with COUNT
|
||||
returnClause := " RETURN count(e) AS count"
|
||||
cypherParts := strings.Split(cypher, "RETURN")
|
||||
if len(cypherParts) < 2 {
|
||||
return 0, false, fmt.Errorf("invalid query structure")
|
||||
}
|
||||
|
||||
// Remove ORDER BY and LIMIT for count query
|
||||
cypher = cypherParts[0] + returnClause
|
||||
delete(params, "limit") // Remove limit parameter if it exists
|
||||
|
||||
result, err := n.ExecuteRead(c, cypher, params)
|
||||
if err != nil {
|
||||
return 0, false, fmt.Errorf("failed to count events: %w", err)
|
||||
}
|
||||
|
||||
// Parse count from result
|
||||
ctx := context.Background()
|
||||
resultIter, ok := result.(interface {
|
||||
Next(context.Context) bool
|
||||
Record() *neo4j.Record
|
||||
Err() error
|
||||
})
|
||||
if !ok {
|
||||
return 0, false, fmt.Errorf("invalid result type")
|
||||
}
|
||||
|
||||
if resultIter.Next(ctx) {
|
||||
record := resultIter.Record()
|
||||
if record != nil {
|
||||
countRaw, found := record.Get("count")
|
||||
if found {
|
||||
countVal, ok := countRaw.(int64)
|
||||
if ok {
|
||||
count = int(countVal)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return count, false, nil
|
||||
}
|
||||
256
pkg/neo4j/save-event.go
Normal file
256
pkg/neo4j/save-event.go
Normal file
@@ -0,0 +1,256 @@
|
||||
package neo4j
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"next.orly.dev/pkg/database/indexes/types"
|
||||
"next.orly.dev/pkg/encoders/event"
|
||||
"next.orly.dev/pkg/encoders/filter"
|
||||
"next.orly.dev/pkg/encoders/hex"
|
||||
)
|
||||
|
||||
// SaveEvent stores a Nostr event in the Neo4j database.
|
||||
// It creates event nodes and relationships for authors, tags, and references.
|
||||
// This method leverages Neo4j's graph capabilities to model Nostr's social graph naturally.
|
||||
func (n *N) SaveEvent(c context.Context, ev *event.E) (exists bool, err error) {
|
||||
eventID := hex.Enc(ev.ID[:])
|
||||
|
||||
// Check if event already exists
|
||||
checkCypher := "MATCH (e:Event {id: $id}) RETURN e.id AS id"
|
||||
checkParams := map[string]any{"id": eventID}
|
||||
|
||||
result, err := n.ExecuteRead(c, checkCypher, checkParams)
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to check event existence: %w", err)
|
||||
}
|
||||
|
||||
// Check if we got a result
|
||||
ctx := context.Background()
|
||||
if result.Next(ctx) {
|
||||
return true, nil // Event already exists
|
||||
}
|
||||
|
||||
// Get next serial number
|
||||
serial, err := n.getNextSerial()
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to get serial number: %w", err)
|
||||
}
|
||||
|
||||
// Build and execute Cypher query to create event with all relationships
|
||||
cypher, params := n.buildEventCreationCypher(ev, serial)
|
||||
|
||||
if _, err = n.ExecuteWrite(c, cypher, params); err != nil {
|
||||
return false, fmt.Errorf("failed to save event: %w", err)
|
||||
}
|
||||
|
||||
return false, nil
|
||||
}
|
||||
|
||||
// buildEventCreationCypher constructs a Cypher query to create an event node with all relationships
|
||||
// This is a single atomic operation that creates:
|
||||
// - Event node with all properties
|
||||
// - Author node and AUTHORED_BY relationship
|
||||
// - Tag nodes and TAGGED_WITH relationships
|
||||
// - Reference relationships (REFERENCES for 'e' tags, MENTIONS for 'p' tags)
|
||||
func (n *N) buildEventCreationCypher(ev *event.E, serial uint64) (string, map[string]any) {
|
||||
params := make(map[string]any)
|
||||
|
||||
// Event properties
|
||||
eventID := hex.Enc(ev.ID[:])
|
||||
authorPubkey := hex.Enc(ev.Pubkey[:])
|
||||
|
||||
params["eventId"] = eventID
|
||||
params["serial"] = serial
|
||||
params["kind"] = int64(ev.Kind)
|
||||
params["createdAt"] = ev.CreatedAt
|
||||
params["content"] = string(ev.Content)
|
||||
params["sig"] = hex.Enc(ev.Sig[:])
|
||||
params["pubkey"] = authorPubkey
|
||||
|
||||
// Serialize tags as JSON string for storage
|
||||
tagsJSON, _ := ev.Tags.MarshalJSON()
|
||||
params["tags"] = string(tagsJSON)
|
||||
|
||||
// Start building the Cypher query
|
||||
// Use MERGE to ensure idempotency for author nodes
|
||||
cypher := `
|
||||
// Create or match author node
|
||||
MERGE (a:Author {pubkey: $pubkey})
|
||||
|
||||
// Create event node
|
||||
CREATE (e:Event {
|
||||
id: $eventId,
|
||||
serial: $serial,
|
||||
kind: $kind,
|
||||
created_at: $createdAt,
|
||||
content: $content,
|
||||
sig: $sig,
|
||||
pubkey: $pubkey,
|
||||
tags: $tags
|
||||
})
|
||||
|
||||
// Link event to author
|
||||
CREATE (e)-[:AUTHORED_BY]->(a)
|
||||
`
|
||||
|
||||
// Process tags to create relationships
|
||||
// Different tag types create different relationship patterns
|
||||
tagNodeIndex := 0
|
||||
eTagIndex := 0
|
||||
pTagIndex := 0
|
||||
|
||||
for _, tagItem := range *ev.Tags {
|
||||
if len(tagItem.T) < 2 {
|
||||
continue
|
||||
}
|
||||
|
||||
tagType := string(tagItem.T[0])
|
||||
tagValue := string(tagItem.T[1])
|
||||
|
||||
switch tagType {
|
||||
case "e": // Event reference - creates REFERENCES relationship
|
||||
// Create reference to another event (if it exists)
|
||||
paramName := fmt.Sprintf("eTag_%d", eTagIndex)
|
||||
params[paramName] = tagValue
|
||||
|
||||
cypher += fmt.Sprintf(`
|
||||
// Reference to event (e-tag)
|
||||
OPTIONAL MATCH (ref%d:Event {id: $%s})
|
||||
FOREACH (ignoreMe IN CASE WHEN ref%d IS NOT NULL THEN [1] ELSE [] END |
|
||||
CREATE (e)-[:REFERENCES]->(ref%d)
|
||||
)
|
||||
`, eTagIndex, paramName, eTagIndex, eTagIndex)
|
||||
|
||||
eTagIndex++
|
||||
|
||||
case "p": // Pubkey mention - creates MENTIONS relationship
|
||||
// Create mention to another author
|
||||
paramName := fmt.Sprintf("pTag_%d", pTagIndex)
|
||||
params[paramName] = tagValue
|
||||
|
||||
cypher += fmt.Sprintf(`
|
||||
// Mention of author (p-tag)
|
||||
MERGE (mentioned%d:Author {pubkey: $%s})
|
||||
CREATE (e)-[:MENTIONS]->(mentioned%d)
|
||||
`, pTagIndex, paramName, pTagIndex)
|
||||
|
||||
pTagIndex++
|
||||
|
||||
default: // Other tags - creates Tag nodes and TAGGED_WITH relationships
|
||||
// Create tag node and relationship
|
||||
typeParam := fmt.Sprintf("tagType_%d", tagNodeIndex)
|
||||
valueParam := fmt.Sprintf("tagValue_%d", tagNodeIndex)
|
||||
params[typeParam] = tagType
|
||||
params[valueParam] = tagValue
|
||||
|
||||
cypher += fmt.Sprintf(`
|
||||
// Generic tag relationship
|
||||
MERGE (tag%d:Tag {type: $%s, value: $%s})
|
||||
CREATE (e)-[:TAGGED_WITH]->(tag%d)
|
||||
`, tagNodeIndex, typeParam, valueParam, tagNodeIndex)
|
||||
|
||||
tagNodeIndex++
|
||||
}
|
||||
}
|
||||
|
||||
// Return the created event
|
||||
cypher += `
|
||||
RETURN e.id AS id`
|
||||
|
||||
return cypher, params
|
||||
}
|
||||
|
||||
// GetSerialsFromFilter returns event serials matching a filter
|
||||
func (n *N) GetSerialsFromFilter(f *filter.F) (serials types.Uint40s, err error) {
|
||||
// Use QueryForSerials with background context
|
||||
return n.QueryForSerials(context.Background(), f)
|
||||
}
|
||||
|
||||
// WouldReplaceEvent checks if an event would replace existing events
|
||||
// This handles replaceable events (kinds 0, 3, and 10000-19999)
|
||||
// and parameterized replaceable events (kinds 30000-39999)
|
||||
func (n *N) WouldReplaceEvent(ev *event.E) (bool, types.Uint40s, error) {
|
||||
// Check for replaceable events (kinds 0, 3, and 10000-19999)
|
||||
isReplaceable := ev.Kind == 0 || ev.Kind == 3 || (ev.Kind >= 10000 && ev.Kind < 20000)
|
||||
|
||||
// Check for parameterized replaceable events (kinds 30000-39999)
|
||||
isParameterizedReplaceable := ev.Kind >= 30000 && ev.Kind < 40000
|
||||
|
||||
if !isReplaceable && !isParameterizedReplaceable {
|
||||
return false, nil, nil
|
||||
}
|
||||
|
||||
authorPubkey := hex.Enc(ev.Pubkey[:])
|
||||
ctx := context.Background()
|
||||
|
||||
var cypher string
|
||||
params := map[string]any{
|
||||
"pubkey": authorPubkey,
|
||||
"kind": int64(ev.Kind),
|
||||
"createdAt": ev.CreatedAt,
|
||||
}
|
||||
|
||||
if isParameterizedReplaceable {
|
||||
// For parameterized replaceable events, we need to match on d-tag as well
|
||||
dTag := ev.Tags.GetFirst([]byte{'d'})
|
||||
if dTag == nil {
|
||||
return false, nil, nil
|
||||
}
|
||||
|
||||
dValue := ""
|
||||
if len(dTag.T) >= 2 {
|
||||
dValue = string(dTag.T[1])
|
||||
}
|
||||
|
||||
params["dValue"] = dValue
|
||||
|
||||
// Query for existing parameterized replaceable events with same kind, pubkey, and d-tag
|
||||
cypher = `
|
||||
MATCH (e:Event {kind: $kind, pubkey: $pubkey})-[:TAGGED_WITH]->(t:Tag {type: 'd', value: $dValue})
|
||||
WHERE e.created_at < $createdAt
|
||||
RETURN e.serial AS serial, e.created_at AS created_at
|
||||
ORDER BY e.created_at DESC`
|
||||
|
||||
} else {
|
||||
// Query for existing replaceable events with same kind and pubkey
|
||||
cypher = `
|
||||
MATCH (e:Event {kind: $kind, pubkey: $pubkey})
|
||||
WHERE e.created_at < $createdAt
|
||||
RETURN e.serial AS serial, e.created_at AS created_at
|
||||
ORDER BY e.created_at DESC`
|
||||
}
|
||||
|
||||
result, err := n.ExecuteRead(ctx, cypher, params)
|
||||
if err != nil {
|
||||
return false, nil, fmt.Errorf("failed to query replaceable events: %w", err)
|
||||
}
|
||||
|
||||
// Parse results
|
||||
var serials types.Uint40s
|
||||
wouldReplace := false
|
||||
|
||||
for result.Next(ctx) {
|
||||
record := result.Record()
|
||||
if record == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
serialRaw, found := record.Get("serial")
|
||||
if !found {
|
||||
continue
|
||||
}
|
||||
|
||||
serialVal, ok := serialRaw.(int64)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
wouldReplace = true
|
||||
serial := types.Uint40{}
|
||||
serial.Set(uint64(serialVal))
|
||||
serials = append(serials, &serial)
|
||||
}
|
||||
|
||||
return wouldReplace, serials, nil
|
||||
}
|
||||
108
pkg/neo4j/schema.go
Normal file
108
pkg/neo4j/schema.go
Normal file
@@ -0,0 +1,108 @@
|
||||
package neo4j
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// applySchema creates Neo4j constraints and indexes for Nostr events
|
||||
// Neo4j uses Cypher queries to define schema constraints and indexes
|
||||
func (n *N) applySchema(ctx context.Context) error {
|
||||
n.Logger.Infof("applying Nostr schema to neo4j")
|
||||
|
||||
// Create constraints and indexes using Cypher queries
|
||||
// Constraints ensure uniqueness and are automatically indexed
|
||||
constraints := []string{
|
||||
// Unique constraint on Event.id (event ID must be unique)
|
||||
"CREATE CONSTRAINT event_id_unique IF NOT EXISTS FOR (e:Event) REQUIRE e.id IS UNIQUE",
|
||||
|
||||
// Unique constraint on Author.pubkey (author public key must be unique)
|
||||
"CREATE CONSTRAINT author_pubkey_unique IF NOT EXISTS FOR (a:Author) REQUIRE a.pubkey IS UNIQUE",
|
||||
|
||||
// Unique constraint on Marker.key (marker key must be unique)
|
||||
"CREATE CONSTRAINT marker_key_unique IF NOT EXISTS FOR (m:Marker) REQUIRE m.key IS UNIQUE",
|
||||
}
|
||||
|
||||
// Additional indexes for query optimization
|
||||
indexes := []string{
|
||||
// Index on Event.kind for kind-based queries
|
||||
"CREATE INDEX event_kind IF NOT EXISTS FOR (e:Event) ON (e.kind)",
|
||||
|
||||
// Index on Event.created_at for time-range queries
|
||||
"CREATE INDEX event_created_at IF NOT EXISTS FOR (e:Event) ON (e.created_at)",
|
||||
|
||||
// Index on Event.serial for serial-based lookups
|
||||
"CREATE INDEX event_serial IF NOT EXISTS FOR (e:Event) ON (e.serial)",
|
||||
|
||||
// Composite index for common query patterns (kind + created_at)
|
||||
"CREATE INDEX event_kind_created_at IF NOT EXISTS FOR (e:Event) ON (e.kind, e.created_at)",
|
||||
|
||||
// Index on Tag.type for tag-type queries
|
||||
"CREATE INDEX tag_type IF NOT EXISTS FOR (t:Tag) ON (t.type)",
|
||||
|
||||
// Index on Tag.value for tag-value queries
|
||||
"CREATE INDEX tag_value IF NOT EXISTS FOR (t:Tag) ON (t.value)",
|
||||
|
||||
// Composite index for tag queries (type + value)
|
||||
"CREATE INDEX tag_type_value IF NOT EXISTS FOR (t:Tag) ON (t.type, t.value)",
|
||||
}
|
||||
|
||||
// Execute all constraint creation queries
|
||||
for _, constraint := range constraints {
|
||||
if _, err := n.ExecuteWrite(ctx, constraint, nil); err != nil {
|
||||
return fmt.Errorf("failed to create constraint: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Execute all index creation queries
|
||||
for _, index := range indexes {
|
||||
if _, err := n.ExecuteWrite(ctx, index, nil); err != nil {
|
||||
return fmt.Errorf("failed to create index: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
n.Logger.Infof("schema applied successfully")
|
||||
return nil
|
||||
}
|
||||
|
||||
// dropAll drops all data from neo4j (useful for testing)
|
||||
func (n *N) dropAll(ctx context.Context) error {
|
||||
n.Logger.Warningf("dropping all data from neo4j")
|
||||
|
||||
// Delete all nodes and relationships
|
||||
_, err := n.ExecuteWrite(ctx, "MATCH (n) DETACH DELETE n", nil)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to drop all data: %w", err)
|
||||
}
|
||||
|
||||
// Drop all constraints
|
||||
constraints := []string{
|
||||
"DROP CONSTRAINT event_id_unique IF EXISTS",
|
||||
"DROP CONSTRAINT author_pubkey_unique IF EXISTS",
|
||||
"DROP CONSTRAINT marker_key_unique IF EXISTS",
|
||||
}
|
||||
|
||||
for _, constraint := range constraints {
|
||||
_, _ = n.ExecuteWrite(ctx, constraint, nil)
|
||||
// Ignore errors as constraints may not exist
|
||||
}
|
||||
|
||||
// Drop all indexes
|
||||
indexes := []string{
|
||||
"DROP INDEX event_kind IF EXISTS",
|
||||
"DROP INDEX event_created_at IF EXISTS",
|
||||
"DROP INDEX event_serial IF EXISTS",
|
||||
"DROP INDEX event_kind_created_at IF EXISTS",
|
||||
"DROP INDEX tag_type IF EXISTS",
|
||||
"DROP INDEX tag_value IF EXISTS",
|
||||
"DROP INDEX tag_type_value IF EXISTS",
|
||||
}
|
||||
|
||||
for _, index := range indexes {
|
||||
_, _ = n.ExecuteWrite(ctx, index, nil)
|
||||
// Ignore errors as indexes may not exist
|
||||
}
|
||||
|
||||
// Reapply schema after dropping
|
||||
return n.applySchema(ctx)
|
||||
}
|
||||
99
pkg/neo4j/serial.go
Normal file
99
pkg/neo4j/serial.go
Normal file
@@ -0,0 +1,99 @@
|
||||
package neo4j
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// Serial number management
|
||||
// We use a special Marker node in Neo4j to track the next available serial number
|
||||
|
||||
const serialCounterKey = "serial_counter"
|
||||
|
||||
var (
|
||||
serialMutex sync.Mutex
|
||||
)
|
||||
|
||||
// getNextSerial atomically increments and returns the next serial number
|
||||
func (n *N) getNextSerial() (uint64, error) {
|
||||
serialMutex.Lock()
|
||||
defer serialMutex.Unlock()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
// Query current serial value
|
||||
cypher := "MATCH (m:Marker {key: $key}) RETURN m.value AS value"
|
||||
params := map[string]any{"key": serialCounterKey}
|
||||
|
||||
result, err := n.ExecuteRead(ctx, cypher, params)
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("failed to query serial counter: %w", err)
|
||||
}
|
||||
|
||||
var currentSerial uint64 = 1
|
||||
if result.Next(ctx) {
|
||||
record := result.Record()
|
||||
if record != nil {
|
||||
valueRaw, found := record.Get("value")
|
||||
if found {
|
||||
if value, ok := valueRaw.(int64); ok {
|
||||
currentSerial = uint64(value)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Increment serial
|
||||
nextSerial := currentSerial + 1
|
||||
|
||||
// Update counter
|
||||
updateCypher := `
|
||||
MERGE (m:Marker {key: $key})
|
||||
SET m.value = $value`
|
||||
updateParams := map[string]any{
|
||||
"key": serialCounterKey,
|
||||
"value": int64(nextSerial),
|
||||
}
|
||||
|
||||
_, err = n.ExecuteWrite(ctx, updateCypher, updateParams)
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("failed to update serial counter: %w", err)
|
||||
}
|
||||
|
||||
return currentSerial, nil
|
||||
}
|
||||
|
||||
// initSerialCounter initializes the serial counter if it doesn't exist
|
||||
func (n *N) initSerialCounter() error {
|
||||
ctx := context.Background()
|
||||
|
||||
// Check if counter exists
|
||||
cypher := "MATCH (m:Marker {key: $key}) RETURN m.value AS value"
|
||||
params := map[string]any{"key": serialCounterKey}
|
||||
|
||||
result, err := n.ExecuteRead(ctx, cypher, params)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to check serial counter: %w", err)
|
||||
}
|
||||
|
||||
if result.Next(ctx) {
|
||||
// Counter already exists
|
||||
return nil
|
||||
}
|
||||
|
||||
// Initialize counter at 1
|
||||
initCypher := "CREATE (m:Marker {key: $key, value: $value})"
|
||||
initParams := map[string]any{
|
||||
"key": serialCounterKey,
|
||||
"value": int64(1),
|
||||
}
|
||||
|
||||
_, err = n.ExecuteWrite(ctx, initCypher, initParams)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to initialize serial counter: %w", err)
|
||||
}
|
||||
|
||||
n.Logger.Infof("initialized serial counter")
|
||||
return nil
|
||||
}
|
||||
181
pkg/neo4j/subscriptions.go
Normal file
181
pkg/neo4j/subscriptions.go
Normal file
@@ -0,0 +1,181 @@
|
||||
package neo4j
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"next.orly.dev/pkg/database"
|
||||
"next.orly.dev/pkg/encoders/hex"
|
||||
)
|
||||
|
||||
// Subscription and payment methods
|
||||
// Simplified implementation using marker-based storage via Badger
|
||||
// For production graph-based storage, these could use Neo4j nodes with relationships
|
||||
|
||||
// GetSubscription retrieves subscription information for a pubkey
|
||||
func (n *N) GetSubscription(pubkey []byte) (*database.Subscription, error) {
|
||||
key := "sub_" + hex.Enc(pubkey)
|
||||
data, err := n.GetMarker(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var sub database.Subscription
|
||||
if err := json.Unmarshal(data, &sub); err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal subscription: %w", err)
|
||||
}
|
||||
|
||||
return &sub, nil
|
||||
}
|
||||
|
||||
// IsSubscriptionActive checks if a pubkey has an active subscription
|
||||
func (n *N) IsSubscriptionActive(pubkey []byte) (bool, error) {
|
||||
sub, err := n.GetSubscription(pubkey)
|
||||
if err != nil {
|
||||
return false, nil // No subscription = not active
|
||||
}
|
||||
|
||||
return sub.PaidUntil.After(time.Now()), nil
|
||||
}
|
||||
|
||||
// ExtendSubscription extends a subscription by the specified number of days
|
||||
func (n *N) ExtendSubscription(pubkey []byte, days int) error {
|
||||
key := "sub_" + hex.Enc(pubkey)
|
||||
|
||||
// Get existing subscription or create new
|
||||
var sub database.Subscription
|
||||
data, err := n.GetMarker(key)
|
||||
if err == nil {
|
||||
if err := json.Unmarshal(data, &sub); err != nil {
|
||||
return fmt.Errorf("failed to unmarshal subscription: %w", err)
|
||||
}
|
||||
} else {
|
||||
// New subscription - set trial period
|
||||
sub.TrialEnd = time.Now()
|
||||
sub.PaidUntil = time.Now()
|
||||
}
|
||||
|
||||
// Extend expiration
|
||||
if sub.PaidUntil.Before(time.Now()) {
|
||||
sub.PaidUntil = time.Now()
|
||||
}
|
||||
sub.PaidUntil = sub.PaidUntil.Add(time.Duration(days) * 24 * time.Hour)
|
||||
|
||||
// Save
|
||||
data, err = json.Marshal(sub)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal subscription: %w", err)
|
||||
}
|
||||
|
||||
return n.SetMarker(key, data)
|
||||
}
|
||||
|
||||
// RecordPayment records a payment for subscription extension
|
||||
func (n *N) RecordPayment(
|
||||
pubkey []byte, amount int64, invoice, preimage string,
|
||||
) error {
|
||||
// Store payment in payments list
|
||||
key := "payments_" + hex.Enc(pubkey)
|
||||
|
||||
var payments []database.Payment
|
||||
data, err := n.GetMarker(key)
|
||||
if err == nil {
|
||||
if err := json.Unmarshal(data, &payments); err != nil {
|
||||
return fmt.Errorf("failed to unmarshal payments: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
payment := database.Payment{
|
||||
Amount: amount,
|
||||
Timestamp: time.Now(),
|
||||
Invoice: invoice,
|
||||
Preimage: preimage,
|
||||
}
|
||||
|
||||
payments = append(payments, payment)
|
||||
|
||||
data, err = json.Marshal(payments)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal payments: %w", err)
|
||||
}
|
||||
|
||||
return n.SetMarker(key, data)
|
||||
}
|
||||
|
||||
// GetPaymentHistory retrieves payment history for a pubkey
|
||||
func (n *N) GetPaymentHistory(pubkey []byte) ([]database.Payment, error) {
|
||||
key := "payments_" + hex.Enc(pubkey)
|
||||
|
||||
data, err := n.GetMarker(key)
|
||||
if err != nil {
|
||||
return nil, nil // No payments = empty list
|
||||
}
|
||||
|
||||
var payments []database.Payment
|
||||
if err := json.Unmarshal(data, &payments); err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal payments: %w", err)
|
||||
}
|
||||
|
||||
return payments, nil
|
||||
}
|
||||
|
||||
// ExtendBlossomSubscription extends a Blossom storage subscription
|
||||
func (n *N) ExtendBlossomSubscription(
|
||||
pubkey []byte, tier string, storageMB int64, daysExtended int,
|
||||
) error {
|
||||
key := "blossom_" + hex.Enc(pubkey)
|
||||
|
||||
// Simple implementation - just store tier and expiry
|
||||
data := map[string]interface{}{
|
||||
"tier": tier,
|
||||
"storageMB": storageMB,
|
||||
"extended": daysExtended,
|
||||
"updated": time.Now(),
|
||||
}
|
||||
|
||||
jsonData, err := json.Marshal(data)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal blossom subscription: %w", err)
|
||||
}
|
||||
|
||||
return n.SetMarker(key, jsonData)
|
||||
}
|
||||
|
||||
// GetBlossomStorageQuota retrieves the storage quota for a pubkey
|
||||
func (n *N) GetBlossomStorageQuota(pubkey []byte) (quotaMB int64, err error) {
|
||||
key := "blossom_" + hex.Enc(pubkey)
|
||||
|
||||
data, err := n.GetMarker(key)
|
||||
if err != nil {
|
||||
return 0, nil // No subscription = 0 quota
|
||||
}
|
||||
|
||||
var subData map[string]interface{}
|
||||
if err := json.Unmarshal(data, &subData); err != nil {
|
||||
return 0, fmt.Errorf("failed to unmarshal blossom data: %w", err)
|
||||
}
|
||||
|
||||
if storageMB, ok := subData["storageMB"].(float64); ok {
|
||||
return int64(storageMB), nil
|
||||
}
|
||||
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
// IsFirstTimeUser checks if this is the first time a user is accessing the relay
|
||||
func (n *N) IsFirstTimeUser(pubkey []byte) (bool, error) {
|
||||
key := "first_seen_" + hex.Enc(pubkey)
|
||||
|
||||
// If marker exists, not first time
|
||||
if n.HasMarker(key) {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
// Mark as seen
|
||||
if err := n.SetMarker(key, []byte{1}); err != nil {
|
||||
return true, err
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
15
pkg/neo4j/testmain_test.go
Normal file
15
pkg/neo4j/testmain_test.go
Normal file
@@ -0,0 +1,15 @@
|
||||
package neo4j
|
||||
|
||||
import (
|
||||
"os"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// skipIfNeo4jNotAvailable skips the test if Neo4j is not available
|
||||
func skipIfNeo4jNotAvailable(t *testing.T) {
|
||||
// Check if Neo4j connection details are provided
|
||||
uri := os.Getenv("ORLY_NEO4J_URI")
|
||||
if uri == "" {
|
||||
t.Skip("Neo4j not available (set ORLY_NEO4J_URI to enable tests)")
|
||||
}
|
||||
}
|
||||
@@ -19,6 +19,7 @@ import (
|
||||
"lol.mleku.dev/log"
|
||||
"next.orly.dev/pkg/encoders/event"
|
||||
"next.orly.dev/pkg/encoders/hex"
|
||||
"next.orly.dev/pkg/utils"
|
||||
)
|
||||
|
||||
// Kinds defines whitelist and blacklist policies for event kinds.
|
||||
@@ -70,6 +71,73 @@ type Rule struct {
|
||||
MaxAgeOfEvent *int64 `json:"max_age_of_event,omitempty"`
|
||||
// MaxAgeEventInFuture is the offset in seconds that is the newest timestamp allowed for an event's created_at time ahead of the current time.
|
||||
MaxAgeEventInFuture *int64 `json:"max_age_event_in_future,omitempty"`
|
||||
|
||||
// Binary caches for faster comparison (populated from hex strings above)
|
||||
// These are not exported and not serialized to JSON
|
||||
writeAllowBin [][]byte
|
||||
writeDenyBin [][]byte
|
||||
readAllowBin [][]byte
|
||||
readDenyBin [][]byte
|
||||
}
|
||||
|
||||
// populateBinaryCache converts hex-encoded pubkey strings to binary for faster comparison.
|
||||
// This should be called after unmarshaling the policy from JSON.
|
||||
func (r *Rule) populateBinaryCache() error {
|
||||
var err error
|
||||
|
||||
// Convert WriteAllow hex strings to binary
|
||||
if len(r.WriteAllow) > 0 {
|
||||
r.writeAllowBin = make([][]byte, 0, len(r.WriteAllow))
|
||||
for _, hexPubkey := range r.WriteAllow {
|
||||
binPubkey, decErr := hex.Dec(hexPubkey)
|
||||
if decErr != nil {
|
||||
log.W.F("failed to decode WriteAllow pubkey %q: %v", hexPubkey, decErr)
|
||||
continue
|
||||
}
|
||||
r.writeAllowBin = append(r.writeAllowBin, binPubkey)
|
||||
}
|
||||
}
|
||||
|
||||
// Convert WriteDeny hex strings to binary
|
||||
if len(r.WriteDeny) > 0 {
|
||||
r.writeDenyBin = make([][]byte, 0, len(r.WriteDeny))
|
||||
for _, hexPubkey := range r.WriteDeny {
|
||||
binPubkey, decErr := hex.Dec(hexPubkey)
|
||||
if decErr != nil {
|
||||
log.W.F("failed to decode WriteDeny pubkey %q: %v", hexPubkey, decErr)
|
||||
continue
|
||||
}
|
||||
r.writeDenyBin = append(r.writeDenyBin, binPubkey)
|
||||
}
|
||||
}
|
||||
|
||||
// Convert ReadAllow hex strings to binary
|
||||
if len(r.ReadAllow) > 0 {
|
||||
r.readAllowBin = make([][]byte, 0, len(r.ReadAllow))
|
||||
for _, hexPubkey := range r.ReadAllow {
|
||||
binPubkey, decErr := hex.Dec(hexPubkey)
|
||||
if decErr != nil {
|
||||
log.W.F("failed to decode ReadAllow pubkey %q: %v", hexPubkey, decErr)
|
||||
continue
|
||||
}
|
||||
r.readAllowBin = append(r.readAllowBin, binPubkey)
|
||||
}
|
||||
}
|
||||
|
||||
// Convert ReadDeny hex strings to binary
|
||||
if len(r.ReadDeny) > 0 {
|
||||
r.readDenyBin = make([][]byte, 0, len(r.ReadDeny))
|
||||
for _, hexPubkey := range r.ReadDeny {
|
||||
binPubkey, decErr := hex.Dec(hexPubkey)
|
||||
if decErr != nil {
|
||||
log.W.F("failed to decode ReadDeny pubkey %q: %v", hexPubkey, decErr)
|
||||
continue
|
||||
}
|
||||
r.readDenyBin = append(r.readDenyBin, binPubkey)
|
||||
}
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
// PolicyEvent represents an event with additional context for policy scripts.
|
||||
@@ -191,6 +259,15 @@ func New(policyJSON []byte) (p *P, err error) {
|
||||
if p.DefaultPolicy == "" {
|
||||
p.DefaultPolicy = "allow"
|
||||
}
|
||||
|
||||
// Populate binary caches for all rules (including global rule)
|
||||
p.Global.populateBinaryCache()
|
||||
for kind := range p.Rules {
|
||||
rule := p.Rules[kind] // Get a copy
|
||||
rule.populateBinaryCache()
|
||||
p.Rules[kind] = rule // Store the modified copy back
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
@@ -457,9 +534,9 @@ func (sr *ScriptRunner) Start() error {
|
||||
// Stop stops the script gracefully.
|
||||
func (sr *ScriptRunner) Stop() error {
|
||||
sr.mutex.Lock()
|
||||
defer sr.mutex.Unlock()
|
||||
|
||||
if !sr.isRunning || sr.currentCmd == nil {
|
||||
sr.mutex.Unlock()
|
||||
return fmt.Errorf("script is not running")
|
||||
}
|
||||
|
||||
@@ -473,45 +550,49 @@ func (sr *ScriptRunner) Stop() error {
|
||||
sr.currentCancel()
|
||||
}
|
||||
|
||||
// Wait for graceful shutdown with timeout
|
||||
done := make(chan error, 1)
|
||||
go func() {
|
||||
done <- sr.currentCmd.Wait()
|
||||
}()
|
||||
// Get the process reference before releasing the lock
|
||||
process := sr.currentCmd.Process
|
||||
sr.mutex.Unlock()
|
||||
|
||||
select {
|
||||
case <-done:
|
||||
// Process exited gracefully
|
||||
log.I.F("policy script stopped: %s", sr.scriptPath)
|
||||
case <-time.After(5 * time.Second):
|
||||
// Force kill after 5 seconds
|
||||
// Wait for graceful shutdown with timeout
|
||||
// Note: monitorProcess() is the one that calls cmd.Wait() and cleans up
|
||||
// We just wait for it to finish by polling isRunning
|
||||
gracefulShutdown := false
|
||||
for i := 0; i < 50; i++ { // 5 seconds total (50 * 100ms)
|
||||
time.Sleep(100 * time.Millisecond)
|
||||
sr.mutex.RLock()
|
||||
running := sr.isRunning
|
||||
sr.mutex.RUnlock()
|
||||
if !running {
|
||||
gracefulShutdown = true
|
||||
log.I.F("policy script stopped gracefully: %s", sr.scriptPath)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !gracefulShutdown {
|
||||
// Force kill after timeout
|
||||
log.W.F(
|
||||
"policy script did not stop gracefully, sending SIGKILL: %s",
|
||||
sr.scriptPath,
|
||||
)
|
||||
if err := sr.currentCmd.Process.Kill(); chk.E(err) {
|
||||
log.E.F("failed to kill script process: %v", err)
|
||||
if process != nil {
|
||||
if err := process.Kill(); chk.E(err) {
|
||||
log.E.F("failed to kill script process: %v", err)
|
||||
}
|
||||
}
|
||||
<-done // Wait for the kill to complete
|
||||
}
|
||||
|
||||
// Clean up pipes
|
||||
if sr.stdin != nil {
|
||||
sr.stdin.Close()
|
||||
sr.stdin = nil
|
||||
// Wait a bit more for monitorProcess to clean up
|
||||
for i := 0; i < 30; i++ { // 3 more seconds
|
||||
time.Sleep(100 * time.Millisecond)
|
||||
sr.mutex.RLock()
|
||||
running := sr.isRunning
|
||||
sr.mutex.RUnlock()
|
||||
if !running {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if sr.stdout != nil {
|
||||
sr.stdout.Close()
|
||||
sr.stdout = nil
|
||||
}
|
||||
if sr.stderr != nil {
|
||||
sr.stderr.Close()
|
||||
sr.stderr = nil
|
||||
}
|
||||
|
||||
sr.isRunning = false
|
||||
sr.currentCmd = nil
|
||||
sr.currentCancel = nil
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -747,6 +828,13 @@ func (p *P) LoadFromFile(configPath string) error {
|
||||
return fmt.Errorf("failed to parse policy configuration JSON: %v", err)
|
||||
}
|
||||
|
||||
// Populate binary caches for all rules (including global rule)
|
||||
p.Global.populateBinaryCache()
|
||||
for kind, rule := range p.Rules {
|
||||
rule.populateBinaryCache()
|
||||
p.Rules[kind] = rule // Update the map with the modified rule
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -863,12 +951,24 @@ func (p *P) checkGlobalRulePolicy(
|
||||
func (p *P) checkRulePolicy(
|
||||
access string, ev *event.E, rule Rule, loggedInPubkey []byte,
|
||||
) (allowed bool, err error) {
|
||||
pubkeyHex := hex.Enc(ev.Pubkey)
|
||||
|
||||
// Check pubkey-based access control
|
||||
if access == "write" {
|
||||
// Check write allow/deny lists
|
||||
if len(rule.WriteAllow) > 0 {
|
||||
// Prefer binary cache for performance (3x faster than hex)
|
||||
// Fall back to hex comparison if cache not populated (for backwards compatibility with tests)
|
||||
if len(rule.writeAllowBin) > 0 {
|
||||
allowed = false
|
||||
for _, allowedPubkey := range rule.writeAllowBin {
|
||||
if utils.FastEqual(ev.Pubkey, allowedPubkey) {
|
||||
allowed = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !allowed {
|
||||
return false, nil
|
||||
}
|
||||
} else if len(rule.WriteAllow) > 0 {
|
||||
// Fallback: binary cache not populated, use hex comparison
|
||||
pubkeyHex := hex.Enc(ev.Pubkey)
|
||||
allowed = false
|
||||
for _, allowedPubkey := range rule.WriteAllow {
|
||||
if pubkeyHex == allowedPubkey {
|
||||
@@ -879,7 +979,17 @@ func (p *P) checkRulePolicy(
|
||||
if !allowed {
|
||||
return false, nil
|
||||
}
|
||||
}
|
||||
|
||||
if len(rule.writeDenyBin) > 0 {
|
||||
for _, deniedPubkey := range rule.writeDenyBin {
|
||||
if utils.FastEqual(ev.Pubkey, deniedPubkey) {
|
||||
return false, nil
|
||||
}
|
||||
}
|
||||
} else if len(rule.WriteDeny) > 0 {
|
||||
// Fallback: binary cache not populated, use hex comparison
|
||||
pubkeyHex := hex.Enc(ev.Pubkey)
|
||||
for _, deniedPubkey := range rule.WriteDeny {
|
||||
if pubkeyHex == deniedPubkey {
|
||||
return false, nil
|
||||
@@ -887,11 +997,14 @@ func (p *P) checkRulePolicy(
|
||||
}
|
||||
}
|
||||
} else if access == "read" {
|
||||
// Check read allow/deny lists
|
||||
if len(rule.ReadAllow) > 0 {
|
||||
// For read access, check the logged-in user's pubkey (who is trying to READ),
|
||||
// not the event author's pubkey
|
||||
// Prefer binary cache for performance (3x faster than hex)
|
||||
// Fall back to hex comparison if cache not populated (for backwards compatibility with tests)
|
||||
if len(rule.readAllowBin) > 0 {
|
||||
allowed = false
|
||||
for _, allowedPubkey := range rule.ReadAllow {
|
||||
if pubkeyHex == allowedPubkey {
|
||||
for _, allowedPubkey := range rule.readAllowBin {
|
||||
if utils.FastEqual(loggedInPubkey, allowedPubkey) {
|
||||
allowed = true
|
||||
break
|
||||
}
|
||||
@@ -899,9 +1012,32 @@ func (p *P) checkRulePolicy(
|
||||
if !allowed {
|
||||
return false, nil
|
||||
}
|
||||
} else if len(rule.ReadAllow) > 0 {
|
||||
// Fallback: binary cache not populated, use hex comparison
|
||||
loggedInPubkeyHex := hex.Enc(loggedInPubkey)
|
||||
allowed = false
|
||||
for _, allowedPubkey := range rule.ReadAllow {
|
||||
if loggedInPubkeyHex == allowedPubkey {
|
||||
allowed = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !allowed {
|
||||
return false, nil
|
||||
}
|
||||
}
|
||||
|
||||
if len(rule.readDenyBin) > 0 {
|
||||
for _, deniedPubkey := range rule.readDenyBin {
|
||||
if utils.FastEqual(loggedInPubkey, deniedPubkey) {
|
||||
return false, nil
|
||||
}
|
||||
}
|
||||
} else if len(rule.ReadDeny) > 0 {
|
||||
// Fallback: binary cache not populated, use hex comparison
|
||||
loggedInPubkeyHex := hex.Enc(loggedInPubkey)
|
||||
for _, deniedPubkey := range rule.ReadDeny {
|
||||
if pubkeyHex == deniedPubkey {
|
||||
if loggedInPubkeyHex == deniedPubkey {
|
||||
return false, nil
|
||||
}
|
||||
}
|
||||
|
||||
505
pkg/policy/read_access_test.go
Normal file
505
pkg/policy/read_access_test.go
Normal file
@@ -0,0 +1,505 @@
|
||||
package policy
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"next.orly.dev/pkg/encoders/hex"
|
||||
"next.orly.dev/pkg/interfaces/signer/p8k"
|
||||
)
|
||||
|
||||
// TestReadAllowLogic tests the correct semantics of ReadAllow:
|
||||
// ReadAllow should control WHO can read events of a kind,
|
||||
// not which event authors can be read.
|
||||
func TestReadAllowLogic(t *testing.T) {
|
||||
// Set up: Create 3 different users
|
||||
// - alice: will author an event
|
||||
// - bob: will be allowed to read (in ReadAllow list)
|
||||
// - charlie: will NOT be allowed to read (not in ReadAllow list)
|
||||
|
||||
aliceSigner, alicePubkey := generateTestKeypair(t)
|
||||
_, bobPubkey := generateTestKeypair(t)
|
||||
_, charliePubkey := generateTestKeypair(t)
|
||||
|
||||
// Create an event authored by Alice (kind 30166)
|
||||
aliceEvent := createTestEvent(t, aliceSigner, "server heartbeat", 30166)
|
||||
|
||||
// Create policy: Only Bob can READ kind 30166 events
|
||||
policy := &P{
|
||||
DefaultPolicy: "allow",
|
||||
Rules: map[int]Rule{
|
||||
30166: {
|
||||
Description: "Private server heartbeat events",
|
||||
ReadAllow: []string{hex.Enc(bobPubkey)}, // Only Bob can read
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// Test 1: Bob (who is in ReadAllow) should be able to READ Alice's event
|
||||
t.Run("allowed_reader_can_read", func(t *testing.T) {
|
||||
allowed, err := policy.CheckPolicy("read", aliceEvent, bobPubkey, "127.0.0.1")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
if !allowed {
|
||||
t.Error("Bob should be allowed to READ Alice's event (Bob is in ReadAllow list)")
|
||||
}
|
||||
})
|
||||
|
||||
// Test 2: Charlie (who is NOT in ReadAllow) should NOT be able to READ Alice's event
|
||||
t.Run("disallowed_reader_cannot_read", func(t *testing.T) {
|
||||
allowed, err := policy.CheckPolicy("read", aliceEvent, charliePubkey, "127.0.0.1")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
if allowed {
|
||||
t.Error("Charlie should NOT be allowed to READ Alice's event (Charlie is not in ReadAllow list)")
|
||||
}
|
||||
})
|
||||
|
||||
// Test 3: Alice (the author) should NOT be able to READ her own event if she's not in ReadAllow
|
||||
t.Run("author_not_in_readallow_cannot_read", func(t *testing.T) {
|
||||
allowed, err := policy.CheckPolicy("read", aliceEvent, alicePubkey, "127.0.0.1")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
if allowed {
|
||||
t.Error("Alice should NOT be allowed to READ her own event (Alice is not in ReadAllow list)")
|
||||
}
|
||||
})
|
||||
|
||||
// Test 4: Unauthenticated user should NOT be able to READ
|
||||
t.Run("unauthenticated_cannot_read", func(t *testing.T) {
|
||||
allowed, err := policy.CheckPolicy("read", aliceEvent, nil, "127.0.0.1")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
if allowed {
|
||||
t.Error("Unauthenticated user should NOT be allowed to READ (not in ReadAllow list)")
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// TestReadDenyLogic tests the correct semantics of ReadDeny:
|
||||
// ReadDeny should control WHO cannot read events of a kind,
|
||||
// not which event authors cannot be read.
|
||||
func TestReadDenyLogic(t *testing.T) {
|
||||
// Set up: Create 3 different users
|
||||
aliceSigner, alicePubkey := generateTestKeypair(t)
|
||||
_, bobPubkey := generateTestKeypair(t)
|
||||
_, charliePubkey := generateTestKeypair(t)
|
||||
|
||||
// Create an event authored by Alice
|
||||
aliceEvent := createTestEvent(t, aliceSigner, "test content", 1)
|
||||
|
||||
// Create policy: Charlie cannot READ kind 1 events (but others can)
|
||||
policy := &P{
|
||||
DefaultPolicy: "allow",
|
||||
Rules: map[int]Rule{
|
||||
1: {
|
||||
Description: "Test events",
|
||||
ReadDeny: []string{hex.Enc(charliePubkey)}, // Charlie cannot read
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// Test 1: Bob (who is NOT in ReadDeny) should be able to READ Alice's event
|
||||
t.Run("non_denied_reader_can_read", func(t *testing.T) {
|
||||
allowed, err := policy.CheckPolicy("read", aliceEvent, bobPubkey, "127.0.0.1")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
if !allowed {
|
||||
t.Error("Bob should be allowed to READ Alice's event (Bob is not in ReadDeny list)")
|
||||
}
|
||||
})
|
||||
|
||||
// Test 2: Charlie (who IS in ReadDeny) should NOT be able to READ Alice's event
|
||||
t.Run("denied_reader_cannot_read", func(t *testing.T) {
|
||||
allowed, err := policy.CheckPolicy("read", aliceEvent, charliePubkey, "127.0.0.1")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
if allowed {
|
||||
t.Error("Charlie should NOT be allowed to READ Alice's event (Charlie is in ReadDeny list)")
|
||||
}
|
||||
})
|
||||
|
||||
// Test 3: Alice (the author, not in ReadDeny) should be able to READ her own event
|
||||
t.Run("author_not_denied_can_read", func(t *testing.T) {
|
||||
allowed, err := policy.CheckPolicy("read", aliceEvent, alicePubkey, "127.0.0.1")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
if !allowed {
|
||||
t.Error("Alice should be allowed to READ her own event (Alice is not in ReadDeny list)")
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// TestSamplePolicyFromUser tests the exact policy configuration provided by the user
|
||||
func TestSamplePolicyFromUser(t *testing.T) {
|
||||
policyJSON := []byte(`{
|
||||
"kind": {
|
||||
"whitelist": [4678, 10306, 30520, 30919, 30166]
|
||||
},
|
||||
"rules": {
|
||||
"4678": {
|
||||
"description": "Zenotp message events",
|
||||
"write_allow": [
|
||||
"04eeb1ed409c0b9205e722f8bf1780f553b61876ef323aff16c9f80a9d8ee9f5",
|
||||
"e4101949fb0367c72f5105fc9bd810cde0e0e0f950da26c1f47a6af5f77ded31",
|
||||
"3f5fefcdc3fb41f3b299732acad7dc9c3649e8bde97d4f238380dde547b5e0e0"
|
||||
],
|
||||
"privileged": true
|
||||
},
|
||||
"10306": {
|
||||
"description": "End user whitelist change requests",
|
||||
"read_allow": [
|
||||
"04eeb1ed409c0b9205e722f8bf1780f553b61876ef323aff16c9f80a9d8ee9f5"
|
||||
],
|
||||
"privileged": true
|
||||
},
|
||||
"30520": {
|
||||
"description": "End user whitelist events",
|
||||
"write_allow": [
|
||||
"04eeb1ed409c0b9205e722f8bf1780f553b61876ef323aff16c9f80a9d8ee9f5"
|
||||
],
|
||||
"privileged": true
|
||||
},
|
||||
"30919": {
|
||||
"description": "Customer indexing events",
|
||||
"write_allow": [
|
||||
"04eeb1ed409c0b9205e722f8bf1780f553b61876ef323aff16c9f80a9d8ee9f5"
|
||||
],
|
||||
"privileged": true
|
||||
},
|
||||
"30166": {
|
||||
"description": "Private server heartbeat events",
|
||||
"write_allow": [
|
||||
"4d13154d82477a2d2e07a5c0d52def9035fdf379ae87cd6f0a5fb87801a4e5e4",
|
||||
"e400106ed10310ea28b039e81824265434bf86ece58722655c7a98f894406112"
|
||||
],
|
||||
"read_allow": [
|
||||
"04eeb1ed409c0b9205e722f8bf1780f553b61876ef323aff16c9f80a9d8ee9f5",
|
||||
"4d13154d82477a2d2e07a5c0d52def9035fdf379ae87cd6f0a5fb87801a4e5e4",
|
||||
"e400106ed10310ea28b039e81824265434bf86ece58722655c7a98f894406112"
|
||||
]
|
||||
}
|
||||
}
|
||||
}`)
|
||||
|
||||
policy, err := New(policyJSON)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create policy: %v", err)
|
||||
}
|
||||
|
||||
// Define the test users
|
||||
adminPubkeyHex := "04eeb1ed409c0b9205e722f8bf1780f553b61876ef323aff16c9f80a9d8ee9f5"
|
||||
server1PubkeyHex := "4d13154d82477a2d2e07a5c0d52def9035fdf379ae87cd6f0a5fb87801a4e5e4"
|
||||
server2PubkeyHex := "e400106ed10310ea28b039e81824265434bf86ece58722655c7a98f894406112"
|
||||
|
||||
adminPubkey, _ := hex.Dec(adminPubkeyHex)
|
||||
server1Pubkey, _ := hex.Dec(server1PubkeyHex)
|
||||
server2Pubkey, _ := hex.Dec(server2PubkeyHex)
|
||||
|
||||
// Create a random user not in any allow list
|
||||
randomSigner, randomPubkey := generateTestKeypair(t)
|
||||
|
||||
// Test Kind 30166 (Private server heartbeat events)
|
||||
t.Run("kind_30166_read_access", func(t *testing.T) {
|
||||
// We can't sign with the exact pubkey without the private key,
|
||||
// so we'll create a generic event and manually set the pubkey for testing
|
||||
heartbeatEvent := createTestEvent(t, randomSigner, "heartbeat data", 30166)
|
||||
heartbeatEvent.Pubkey = server1Pubkey // Set to server1's pubkey
|
||||
|
||||
// Test 1: Admin (in read_allow) should be able to READ the heartbeat
|
||||
allowed, err := policy.CheckPolicy("read", heartbeatEvent, adminPubkey, "127.0.0.1")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
if !allowed {
|
||||
t.Error("Admin should be allowed to READ kind 30166 events (admin is in read_allow list)")
|
||||
}
|
||||
|
||||
// Test 2: Server1 (in read_allow) should be able to READ the heartbeat
|
||||
allowed, err = policy.CheckPolicy("read", heartbeatEvent, server1Pubkey, "127.0.0.1")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
if !allowed {
|
||||
t.Error("Server1 should be allowed to READ kind 30166 events (server1 is in read_allow list)")
|
||||
}
|
||||
|
||||
// Test 3: Server2 (in read_allow) should be able to READ the heartbeat
|
||||
allowed, err = policy.CheckPolicy("read", heartbeatEvent, server2Pubkey, "127.0.0.1")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
if !allowed {
|
||||
t.Error("Server2 should be allowed to READ kind 30166 events (server2 is in read_allow list)")
|
||||
}
|
||||
|
||||
// Test 4: Random user (NOT in read_allow) should NOT be able to READ the heartbeat
|
||||
allowed, err = policy.CheckPolicy("read", heartbeatEvent, randomPubkey, "127.0.0.1")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
if allowed {
|
||||
t.Error("Random user should NOT be allowed to READ kind 30166 events (not in read_allow list)")
|
||||
}
|
||||
|
||||
// Test 5: Unauthenticated user should NOT be able to READ (privileged + read_allow)
|
||||
allowed, err = policy.CheckPolicy("read", heartbeatEvent, nil, "127.0.0.1")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
if allowed {
|
||||
t.Error("Unauthenticated user should NOT be allowed to READ kind 30166 events (privileged)")
|
||||
}
|
||||
})
|
||||
|
||||
// Test Kind 10306 (End user whitelist change requests)
|
||||
t.Run("kind_10306_read_access", func(t *testing.T) {
|
||||
// Create an event authored by a random user
|
||||
requestEvent := createTestEvent(t, randomSigner, "whitelist change request", 10306)
|
||||
// Add admin to p tag to satisfy privileged requirement
|
||||
addPTag(requestEvent, adminPubkey)
|
||||
|
||||
// Test 1: Admin (in read_allow) should be able to READ the request
|
||||
allowed, err := policy.CheckPolicy("read", requestEvent, adminPubkey, "127.0.0.1")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
if !allowed {
|
||||
t.Error("Admin should be allowed to READ kind 10306 events (admin is in read_allow list)")
|
||||
}
|
||||
|
||||
// Test 2: Server1 (NOT in read_allow for kind 10306) should NOT be able to READ
|
||||
// Even though server1 might be allowed for kind 30166
|
||||
allowed, err = policy.CheckPolicy("read", requestEvent, server1Pubkey, "127.0.0.1")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
if allowed {
|
||||
t.Error("Server1 should NOT be allowed to READ kind 10306 events (not in read_allow list for this kind)")
|
||||
}
|
||||
|
||||
// Test 3: Random user should NOT be able to READ
|
||||
allowed, err = policy.CheckPolicy("read", requestEvent, randomPubkey, "127.0.0.1")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
if allowed {
|
||||
t.Error("Random user should NOT be allowed to READ kind 10306 events (not in read_allow list)")
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// TestReadAllowWithPrivileged tests interaction between read_allow and privileged
|
||||
func TestReadAllowWithPrivileged(t *testing.T) {
|
||||
aliceSigner, alicePubkey := generateTestKeypair(t)
|
||||
_, bobPubkey := generateTestKeypair(t)
|
||||
_, charliePubkey := generateTestKeypair(t)
|
||||
|
||||
// Create policy: Kind 100 is privileged AND has read_allow
|
||||
policy := &P{
|
||||
DefaultPolicy: "allow",
|
||||
Rules: map[int]Rule{
|
||||
100: {
|
||||
Description: "Privileged with read_allow",
|
||||
Privileged: true,
|
||||
ReadAllow: []string{hex.Enc(bobPubkey)}, // Only Bob can read
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// Create event authored by Alice, with Bob in p tag
|
||||
ev := createTestEvent(t, aliceSigner, "secret message", 100)
|
||||
addPTag(ev, bobPubkey)
|
||||
|
||||
// Test 1: Bob (in ReadAllow AND in p tag) should be able to READ
|
||||
t.Run("bob_in_readallow_and_ptag", func(t *testing.T) {
|
||||
allowed, err := policy.CheckPolicy("read", ev, bobPubkey, "127.0.0.1")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
if !allowed {
|
||||
t.Error("Bob should be allowed to READ (in ReadAllow AND satisfies privileged)")
|
||||
}
|
||||
})
|
||||
|
||||
// Test 2: Alice (author, but NOT in ReadAllow) should NOT be able to READ
|
||||
// Even though she's the author (privileged check would pass), ReadAllow takes precedence
|
||||
t.Run("alice_author_but_not_in_readallow", func(t *testing.T) {
|
||||
allowed, err := policy.CheckPolicy("read", ev, alicePubkey, "127.0.0.1")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
if allowed {
|
||||
t.Error("Alice should NOT be allowed to READ (not in ReadAllow list, even though she's the author)")
|
||||
}
|
||||
})
|
||||
|
||||
// Test 3: Charlie (NOT in ReadAllow, NOT in p tag) should NOT be able to READ
|
||||
t.Run("charlie_not_authorized", func(t *testing.T) {
|
||||
allowed, err := policy.CheckPolicy("read", ev, charliePubkey, "127.0.0.1")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
if allowed {
|
||||
t.Error("Charlie should NOT be allowed to READ (not in ReadAllow)")
|
||||
}
|
||||
})
|
||||
|
||||
// Test 4: Create event with Charlie in p tag but Charlie not in ReadAllow
|
||||
evWithCharlie := createTestEvent(t, aliceSigner, "message for charlie", 100)
|
||||
addPTag(evWithCharlie, charliePubkey)
|
||||
|
||||
t.Run("charlie_in_ptag_but_not_readallow", func(t *testing.T) {
|
||||
allowed, err := policy.CheckPolicy("read", evWithCharlie, charliePubkey, "127.0.0.1")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
if allowed {
|
||||
t.Error("Charlie should NOT be allowed to READ (privileged check passes but not in ReadAllow)")
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// TestReadAllowWriteAllowIndependent verifies that read_allow and write_allow are independent
|
||||
func TestReadAllowWriteAllowIndependent(t *testing.T) {
|
||||
aliceSigner, alicePubkey := generateTestKeypair(t)
|
||||
bobSigner, bobPubkey := generateTestKeypair(t)
|
||||
_, charliePubkey := generateTestKeypair(t)
|
||||
|
||||
// Create policy:
|
||||
// - Alice can WRITE
|
||||
// - Bob can READ
|
||||
// - Charlie can do neither
|
||||
policy := &P{
|
||||
DefaultPolicy: "allow",
|
||||
Rules: map[int]Rule{
|
||||
200: {
|
||||
Description: "Write/Read separation test",
|
||||
WriteAllow: []string{hex.Enc(alicePubkey)}, // Only Alice can write
|
||||
ReadAllow: []string{hex.Enc(bobPubkey)}, // Only Bob can read
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// Alice creates an event
|
||||
aliceEvent := createTestEvent(t, aliceSigner, "alice's message", 200)
|
||||
|
||||
// Test 1: Alice can WRITE her own event
|
||||
t.Run("alice_can_write", func(t *testing.T) {
|
||||
allowed, err := policy.CheckPolicy("write", aliceEvent, alicePubkey, "127.0.0.1")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
if !allowed {
|
||||
t.Error("Alice should be allowed to WRITE (in WriteAllow)")
|
||||
}
|
||||
})
|
||||
|
||||
// Test 2: Alice CANNOT READ her own event (not in ReadAllow)
|
||||
t.Run("alice_cannot_read", func(t *testing.T) {
|
||||
allowed, err := policy.CheckPolicy("read", aliceEvent, alicePubkey, "127.0.0.1")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
if allowed {
|
||||
t.Error("Alice should NOT be allowed to READ (not in ReadAllow, even though she wrote it)")
|
||||
}
|
||||
})
|
||||
|
||||
// Bob creates an event (will be denied on write)
|
||||
bobEvent := createTestEvent(t, bobSigner, "bob's message", 200)
|
||||
|
||||
// Test 3: Bob CANNOT WRITE (not in WriteAllow)
|
||||
t.Run("bob_cannot_write", func(t *testing.T) {
|
||||
allowed, err := policy.CheckPolicy("write", bobEvent, bobPubkey, "127.0.0.1")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
if allowed {
|
||||
t.Error("Bob should NOT be allowed to WRITE (not in WriteAllow)")
|
||||
}
|
||||
})
|
||||
|
||||
// Test 4: Bob CAN READ Alice's event (in ReadAllow)
|
||||
t.Run("bob_can_read", func(t *testing.T) {
|
||||
allowed, err := policy.CheckPolicy("read", aliceEvent, bobPubkey, "127.0.0.1")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
if !allowed {
|
||||
t.Error("Bob should be allowed to READ Alice's event (in ReadAllow)")
|
||||
}
|
||||
})
|
||||
|
||||
// Test 5: Charlie cannot write or read
|
||||
t.Run("charlie_cannot_write_or_read", func(t *testing.T) {
|
||||
// Create an event authored by Charlie
|
||||
charlieSigner := p8k.MustNew()
|
||||
charlieSigner.Generate()
|
||||
charlieEvent := createTestEvent(t, charlieSigner, "charlie's message", 200)
|
||||
charlieEvent.Pubkey = charliePubkey // Set to Charlie's pubkey
|
||||
|
||||
// Charlie's event should be denied for write (Charlie not in WriteAllow)
|
||||
allowed, err := policy.CheckPolicy("write", charlieEvent, charliePubkey, "127.0.0.1")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
if allowed {
|
||||
t.Error("Charlie should NOT be allowed to WRITE events of kind 200 (not in WriteAllow)")
|
||||
}
|
||||
|
||||
// Charlie should not be able to READ Alice's event (not in ReadAllow)
|
||||
allowed, err = policy.CheckPolicy("read", aliceEvent, charliePubkey, "127.0.0.1")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
if allowed {
|
||||
t.Error("Charlie should NOT be allowed to READ (not in ReadAllow)")
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// TestReadAccessEdgeCases tests edge cases like nil pubkeys
|
||||
func TestReadAccessEdgeCases(t *testing.T) {
|
||||
aliceSigner, _ := generateTestKeypair(t)
|
||||
|
||||
policy := &P{
|
||||
DefaultPolicy: "allow",
|
||||
Rules: map[int]Rule{
|
||||
300: {
|
||||
Description: "Test edge cases",
|
||||
ReadAllow: []string{"somepubkey"}, // Non-empty ReadAllow
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
event := createTestEvent(t, aliceSigner, "test", 300)
|
||||
|
||||
// Test 1: Nil loggedInPubkey with ReadAllow should be denied
|
||||
t.Run("nil_pubkey_with_readallow", func(t *testing.T) {
|
||||
allowed, err := policy.CheckPolicy("read", event, nil, "127.0.0.1")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
if allowed {
|
||||
t.Error("Nil pubkey should NOT be allowed when ReadAllow is set")
|
||||
}
|
||||
})
|
||||
|
||||
// Test 2: Verify hex.Enc(nil) doesn't accidentally match anything
|
||||
t.Run("hex_enc_nil_no_match", func(t *testing.T) {
|
||||
emptyStringHex := hex.Enc(nil)
|
||||
t.Logf("hex.Enc(nil) = %q (len=%d)", emptyStringHex, len(emptyStringHex))
|
||||
|
||||
// Verify it's empty string
|
||||
if emptyStringHex != "" {
|
||||
t.Errorf("Expected hex.Enc(nil) to be empty string, got %q", emptyStringHex)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1 +1 @@
|
||||
v0.29.2
|
||||
v0.29.8
|
||||
Reference in New Issue
Block a user