Fix web UI not showing cached events and add Blossom toggle (v0.48.13)
Some checks failed
Go / build-and-release (push) Has been cancelled

- Fix fetchEvents() discarding IndexedDB cached events instead of merging with relay results
- Add mergeAndDeduplicateEvents() helper to combine and dedupe events by ID
- Add ORLY_BLOSSOM_ENABLED config option to disable Blossom server
- Make fetch-kinds.js fall back to existing eventKinds.js when network unavailable

Files modified:
- app/web/src/nostr.js: Fix event caching, add merge helper
- app/web/scripts/fetch-kinds.js: Add fallback for network failures
- app/config/config.go: Add BlossomEnabled config field
- app/main.go: Check BlossomEnabled before initializing Blossom server
- pkg/version/version: Bump to v0.48.13

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
woikos
2026-01-11 04:55:55 +01:00
parent be72b694eb
commit 604d759a6a
7 changed files with 81 additions and 36 deletions

View File

@@ -72,7 +72,8 @@ type C struct {
FollowsThrottlePerEvent time.Duration `env:"ORLY_FOLLOWS_THROTTLE_INCREMENT" default:"200ms" usage:"delay added per event for non-followed users"` FollowsThrottlePerEvent time.Duration `env:"ORLY_FOLLOWS_THROTTLE_INCREMENT" default:"200ms" usage:"delay added per event for non-followed users"`
FollowsThrottleMaxDelay time.Duration `env:"ORLY_FOLLOWS_THROTTLE_MAX" default:"60s" usage:"maximum throttle delay cap"` FollowsThrottleMaxDelay time.Duration `env:"ORLY_FOLLOWS_THROTTLE_MAX" default:"60s" usage:"maximum throttle delay cap"`
// Blossom blob storage service level settings // Blossom blob storage service settings
BlossomEnabled bool `env:"ORLY_BLOSSOM_ENABLED" default:"true" usage:"enable Blossom blob storage server (only works with Badger backend)"`
BlossomServiceLevels string `env:"ORLY_BLOSSOM_SERVICE_LEVELS" usage:"comma-separated list of service levels in format: name:storage_mb_per_sat_per_month (e.g., basic:1,premium:10)"` BlossomServiceLevels string `env:"ORLY_BLOSSOM_SERVICE_LEVELS" usage:"comma-separated list of service levels in format: name:storage_mb_per_sat_per_month (e.g., basic:1,premium:10)"`
// Web UI and dev mode settings // Web UI and dev mode settings

View File

@@ -435,7 +435,7 @@ func Run(
// Initialize Blossom blob storage server (only for Badger backend) // Initialize Blossom blob storage server (only for Badger backend)
// MUST be done before UserInterface() which registers routes // MUST be done before UserInterface() which registers routes
if badgerDB, ok := db.(*database.D); ok { if badgerDB, ok := db.(*database.D); ok && cfg.BlossomEnabled {
log.I.F("Badger backend detected, initializing Blossom server...") log.I.F("Badger backend detected, initializing Blossom server...")
if l.blossomServer, err = initializeBlossomServer(ctx, cfg, badgerDB); err != nil { if l.blossomServer, err = initializeBlossomServer(ctx, cfg, badgerDB); err != nil {
log.E.F("failed to initialize blossom server: %v", err) log.E.F("failed to initialize blossom server: %v", err)
@@ -445,6 +445,8 @@ func Run(
} else { } else {
log.W.F("blossom server initialization returned nil without error") log.W.F("blossom server initialization returned nil without error")
} }
} else if !cfg.BlossomEnabled {
log.I.F("Blossom server disabled via ORLY_BLOSSOM_ENABLED=false")
} else { } else {
log.I.F("Non-Badger backend detected (type: %T), Blossom server not available", db) log.I.F("Non-Badger backend detected (type: %T), Blossom server not available", db)
} }

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -6,25 +6,35 @@
import { fileURLToPath } from 'url'; import { fileURLToPath } from 'url';
import { dirname, join } from 'path'; import { dirname, join } from 'path';
import { writeFileSync } from 'fs'; import { writeFileSync, existsSync } from 'fs';
const __filename = fileURLToPath(import.meta.url); const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename); const __dirname = dirname(__filename);
const KINDS_URL = 'https://git.mleku.dev/mleku/nostr/raw/branch/main/encoders/kind/kinds.json'; const KINDS_URL = 'https://git.mleku.dev/mleku/nostr/raw/branch/main/encoders/kind/kinds.json';
const OUTPUT_PATH = join(__dirname, '..', 'src', 'eventKinds.js');
async function fetchKinds() { async function fetchKinds() {
console.log(`Fetching kinds from ${KINDS_URL}...`); console.log(`Fetching kinds from ${KINDS_URL}...`);
const response = await fetch(KINDS_URL); try {
if (!response.ok) { const response = await fetch(KINDS_URL, { timeout: 10000 });
throw new Error(`Failed to fetch kinds.json: ${response.status} ${response.statusText}`); if (!response.ok) {
throw new Error(`HTTP ${response.status} ${response.statusText}`);
}
const data = await response.json();
console.log(`Fetched ${Object.keys(data.kinds).length} kinds (version: ${data.version})`);
return data;
} catch (error) {
// Check if we have an existing eventKinds.js we can use
if (existsSync(OUTPUT_PATH)) {
console.warn(`Warning: Could not fetch kinds.json (${error.message})`);
console.log(`Using existing ${OUTPUT_PATH}`);
return null; // Signal to skip generation
}
throw new Error(`Failed to fetch kinds.json and no existing file: ${error.message}`);
} }
const data = await response.json();
console.log(`Fetched ${Object.keys(data.kinds).length} kinds (version: ${data.version})`);
return data;
} }
function generateEventKinds(data) { function generateEventKinds(data) {
@@ -202,14 +212,18 @@ export const kindCategories = [
async function main() { async function main() {
try { try {
const data = await fetchKinds(); const data = await fetchKinds();
// If fetchKinds returned null, we're using the existing file
if (data === null) {
console.log('Skipping generation, using existing eventKinds.js');
return;
}
const kinds = generateEventKinds(data); const kinds = generateEventKinds(data);
const js = generateJS(kinds, data); const js = generateJS(kinds, data);
// Write to src/eventKinds.js writeFileSync(OUTPUT_PATH, js);
const outPath = join(__dirname, '..', 'src', 'eventKinds.js'); console.log(`Generated ${OUTPUT_PATH} with ${kinds.length} kinds`);
writeFileSync(outPath, js);
console.log(`Generated ${outPath} with ${kinds.length} kinds`);
} catch (error) { } catch (error) {
console.error('Error:', error.message); console.error('Error:', error.message);
process.exit(1); process.exit(1);

View File

@@ -179,6 +179,28 @@ export class Nip07Signer {
} }
} }
// Merge two event arrays, deduplicating by event id
// Newer events (by created_at) take precedence for same id
function mergeAndDeduplicateEvents(cached, relay) {
const eventMap = new Map();
// Add cached events first
for (const event of cached) {
eventMap.set(event.id, event);
}
// Add/update with relay events (they may be newer)
for (const event of relay) {
const existing = eventMap.get(event.id);
if (!existing || event.created_at >= existing.created_at) {
eventMap.set(event.id, event);
}
}
// Return sorted by created_at descending (newest first)
return Array.from(eventMap.values()).sort((a, b) => b.created_at - a.created_at);
}
// IndexedDB helpers for unified event storage // IndexedDB helpers for unified event storage
// This provides a local cache that all components can access // This provides a local cache that all components can access
const DB_NAME = "nostrCache"; const DB_NAME = "nostrCache";
@@ -573,9 +595,10 @@ export async function fetchEvents(filters, options = {}) {
} = options; } = options;
// Try to get cached events first if requested // Try to get cached events first if requested
let cachedEvents = [];
if (useCache) { if (useCache) {
try { try {
const cachedEvents = await queryEventsFromDB(filters); cachedEvents = await queryEventsFromDB(filters);
if (cachedEvents.length > 0) { if (cachedEvents.length > 0) {
console.log(`Found ${cachedEvents.length} cached events in IndexedDB`); console.log(`Found ${cachedEvents.length} cached events in IndexedDB`);
} }
@@ -585,17 +608,19 @@ export async function fetchEvents(filters, options = {}) {
} }
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const events = []; const relayEvents = [];
const timeoutId = setTimeout(() => { const timeoutId = setTimeout(() => {
console.log(`Timeout reached after ${timeout}ms, returning ${events.length} events`); console.log(`Timeout reached after ${timeout}ms, returning ${relayEvents.length} relay events`);
sub.close(); sub.close();
// Store all received events in IndexedDB before resolving // Store all received events in IndexedDB before resolving
if (events.length > 0) { if (relayEvents.length > 0) {
putEvents(events).catch(e => console.warn("Failed to cache events", e)); putEvents(relayEvents).catch(e => console.warn("Failed to cache events", e));
} }
resolve(events); // Merge cached events with relay events, deduplicate by id
const mergedEvents = mergeAndDeduplicateEvents(cachedEvents, relayEvents);
resolve(mergedEvents);
}, timeout); }, timeout);
try { try {
@@ -615,22 +640,25 @@ export async function fetchEvents(filters, options = {}) {
created_at: event.created_at, created_at: event.created_at,
content_preview: event.content?.substring(0, 50) content_preview: event.content?.substring(0, 50)
}); });
events.push(event); relayEvents.push(event);
// Store event immediately in IndexedDB // Store event immediately in IndexedDB
putEvent(event).catch(e => console.warn("Failed to cache event", e)); putEvent(event).catch(e => console.warn("Failed to cache event", e));
}, },
oneose() { oneose() {
console.log(`✅ EOSE received for REQ [${subId}], got ${events.length} events`); console.log(`✅ EOSE received for REQ [${subId}], got ${relayEvents.length} relay events`);
clearTimeout(timeoutId); clearTimeout(timeoutId);
sub.close(); sub.close();
// Store all events in IndexedDB before resolving // Store all events in IndexedDB before resolving
if (events.length > 0) { if (relayEvents.length > 0) {
putEvents(events).catch(e => console.warn("Failed to cache events", e)); putEvents(relayEvents).catch(e => console.warn("Failed to cache events", e));
} }
resolve(events); // Merge cached events with relay events, deduplicate by id
const mergedEvents = mergeAndDeduplicateEvents(cachedEvents, relayEvents);
console.log(`Merged ${cachedEvents.length} cached + ${relayEvents.length} relay = ${mergedEvents.length} total events`);
resolve(mergedEvents);
} }
} }
); );

View File

@@ -1 +1 @@
v0.48.12 v0.48.13