import { BIG_RELAY_URLS, ExtendedKind } from '@/constants' import { getLatestEvent } from '@/lib/event' import { getProfileFromEvent, getRelayListFromEvent } from '@/lib/event-metadata' import { formatPubkey, pubkeyToNpub, userIdToPubkey } from '@/lib/pubkey' import { getPubkeysFromPTags, getServersFromServerTags } from '@/lib/tag' import { isLocalNetworkUrl, isWebsocketUrl, normalizeUrl } from '@/lib/url' import { ISigner, TProfile, TRelayList } from '@/types' import { sha256 } from '@noble/hashes/sha2' import DataLoader from 'dataloader' import dayjs from 'dayjs' import FlexSearch from 'flexsearch' import { LRUCache } from 'lru-cache' import { EventTemplate, Filter, kinds, Event as NEvent, nip19, SimplePool, VerifiedEvent } from 'nostr-tools' import { AbstractRelay } from 'nostr-tools/abstract-relay' import indexedDb from './indexed-db.service' type TTimelineRef = [string, number] class ClientService extends EventTarget { static instance: ClientService signer?: ISigner pubkey?: string private currentRelayUrls: string[] = [] private pool: SimplePool private timelines: Record< string, | { refs: TTimelineRef[] filter: Omit & { limit: number } urls: string[] } | string[] | undefined > = {} private eventCacheMap = new Map>() private eventDataLoader = new DataLoader( (ids) => Promise.all(ids.map((id) => this._fetchEvent(id))), { cacheMap: this.eventCacheMap } ) private fetchEventFromBigRelaysDataloader = new DataLoader( this.fetchEventsFromBigRelays.bind(this), { cache: false, batchScheduleFn: (callback) => setTimeout(callback, 50) } ) private userIndex = new FlexSearch.Index({ tokenize: 'forward' }) constructor() { super() this.pool = new SimplePool() this.pool.trackRelays = true } public static getInstance(): ClientService { if (!ClientService.instance) { ClientService.instance = new ClientService() ClientService.instance.init() } return ClientService.instance } async init() { await indexedDb.iterateProfileEvents((profileEvent) => this.addUsernameToIndex(profileEvent)) } setCurrentRelayUrls(urls: string[]) { this.currentRelayUrls = urls } getCurrentRelayUrls() { return this.currentRelayUrls } async publishEvent(relayUrls: string[], event: NEvent) { try { const uniqueRelayUrls = Array.from(new Set(relayUrls)) const result = await Promise.any( uniqueRelayUrls.map(async (url) => { // eslint-disable-next-line @typescript-eslint/no-this-alias const that = this const relay = await this.pool.ensureRelay(url) return relay .publish(event) .catch((error) => { if ( error instanceof Error && error.message.startsWith('auth-required') && !!that.signer ) { return relay .auth((authEvt: EventTemplate) => that.signer!.signEvent(authEvt)) .then(() => relay.publish(event)) } else { throw error } }) .then((reason) => { this.trackEventSeenOn(event.id, relay) return reason }) }) ) this.dispatchEvent(new CustomEvent('eventPublished', { detail: event })) return result } catch (error) { if (error instanceof AggregateError) { throw error.errors[0] } throw error } } async signHttpAuth(url: string, method: string, description = '') { if (!this.signer) { throw new Error('Please login first to sign the event') } const event = await this.signer?.signEvent({ content: description, kind: kinds.HTTPAuth, created_at: dayjs().unix(), tags: [ ['u', url], ['method', method] ] }) return 'Nostr ' + btoa(JSON.stringify(event)) } /** =========== Timeline =========== */ private generateTimelineKey(urls: string[], filter: Filter) { const stableFilter: any = {} Object.entries(filter) .sort() .forEach(([key, value]) => { if (Array.isArray(value)) { stableFilter[key] = [...value].sort() } stableFilter[key] = value }) const paramsStr = JSON.stringify({ urls: [...urls].sort(), filter: stableFilter }) const encoder = new TextEncoder() const data = encoder.encode(paramsStr) const hashBuffer = sha256(data) const hashArray = Array.from(new Uint8Array(hashBuffer)) return hashArray.map((b) => b.toString(16).padStart(2, '0')).join('') } private generateMultipleTimelinesKey(subRequests: { urls: string[]; filter: Filter }[]) { const keys = subRequests.map(({ urls, filter }) => this.generateTimelineKey(urls, filter)) const encoder = new TextEncoder() const data = encoder.encode(JSON.stringify(keys.sort())) const hashBuffer = sha256(data) const hashArray = Array.from(new Uint8Array(hashBuffer)) return hashArray.map((b) => b.toString(16).padStart(2, '0')).join('') } async subscribeTimeline( subRequests: { urls: string[]; filter: Omit & { limit: number } }[], { onEvents, onNew }: { onEvents: (events: NEvent[], eosed: boolean) => void onNew: (evt: NEvent) => void }, { startLogin, needSort = true }: { startLogin?: () => void needSort?: boolean } = {} ) { const newEventIdSet = new Set() const requestCount = subRequests.length const threshold = Math.floor(requestCount / 2) let eventIdSet = new Set() let events: NEvent[] = [] let eosedCount = 0 const subs = await Promise.all( subRequests.map(({ urls, filter }) => { return this._subscribeTimeline( urls, filter, { onEvents: (_events, _eosed) => { if (_eosed) { eosedCount++ } _events.forEach((evt) => { if (eventIdSet.has(evt.id)) return eventIdSet.add(evt.id) events.push(evt) }) events = events.sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit) eventIdSet = new Set(events.map((evt) => evt.id)) if (eosedCount >= threshold) { onEvents(events, eosedCount >= requestCount) } }, onNew: (evt) => { if (newEventIdSet.has(evt.id)) return newEventIdSet.add(evt.id) onNew(evt) } }, { startLogin, needSort } ) }) ) const key = this.generateMultipleTimelinesKey(subRequests) this.timelines[key] = subs.map((sub) => sub.timelineKey) return { closer: () => { onEvents = () => {} onNew = () => {} subs.forEach((sub) => { sub.closer() }) }, timelineKey: key } } async loadMoreTimeline(key: string, until: number, limit: number) { const timeline = this.timelines[key] if (!timeline) return [] if (!Array.isArray(timeline)) { return this._loadMoreTimeline(key, until, limit) } const timelines = await Promise.all( timeline.map((key) => this._loadMoreTimeline(key, until, limit)) ) const eventIdSet = new Set() const events: NEvent[] = [] timelines.forEach((timeline) => { timeline.forEach((evt) => { if (eventIdSet.has(evt.id)) return eventIdSet.add(evt.id) events.push(evt) }) }) return events.sort((a, b) => b.created_at - a.created_at).slice(0, limit) } subscribe( urls: string[], filter: Filter | Filter[], { onevent, oneose, onclose, startLogin }: { onevent?: (evt: NEvent) => void oneose?: (eosed: boolean) => void onclose?: (reasons: string[]) => void startLogin?: () => void } ) { const relays = Array.from(new Set(urls)) const filters = Array.isArray(filter) ? filter : [filter] // eslint-disable-next-line @typescript-eslint/no-this-alias const that = this const _knownIds = new Set() let startedCount = 0 let eosedCount = 0 let eosed = false let closedCount = 0 const closeReasons: string[] = [] const subPromises: Promise<{ close: () => void }>[] = [] relays.forEach((url) => { let hasAuthed = false subPromises.push(startSub()) async function startSub() { startedCount++ const relay = await that.pool.ensureRelay(url, { connectionTimeout: 5000 }).catch(() => { return undefined }) // cannot connect to relay if (!relay) { if (!eosed) { eosedCount++ eosed = eosedCount >= startedCount oneose?.(eosed) } return { close: () => {} } } return relay.subscribe(filters, { receivedEvent: (relay, id) => { that.trackEventSeenOn(id, relay) }, alreadyHaveEvent: (id: string) => { const have = _knownIds.has(id) if (have) { return true } _knownIds.add(id) return false }, onevent: (evt: NEvent) => { onevent?.(evt) }, oneose: () => { // make sure eosed is not called multiple times if (eosed) return eosedCount++ eosed = eosedCount >= startedCount oneose?.(eosed) }, onclose: (reason: string) => { // auth-required if (reason.startsWith('auth-required') && !hasAuthed) { // already logged in if (that.signer) { relay .auth(async (authEvt: EventTemplate) => { const evt = await that.signer!.signEvent(authEvt) if (!evt) { throw new Error('sign event failed') } return evt as VerifiedEvent }) .then(() => { hasAuthed = true if (!eosed) { subPromises.push(startSub()) } }) .catch(() => { // ignore }) return } // open login dialog if (startLogin) { startLogin() return } } // close the subscription closedCount++ closeReasons.push(reason) if (closedCount >= startedCount) { onclose?.(closeReasons) } return }, eoseTimeout: 10_000 // 10s }) } }) return { close: () => { subPromises.forEach((subPromise) => { subPromise .then((sub) => { sub.close() }) .catch((err) => { console.error(err) }) }) } } } private async _subscribeTimeline( urls: string[], filter: Omit & { limit: number }, // filter with limit, { onEvents, onNew }: { onEvents: (events: NEvent[], eosed: boolean) => void onNew: (evt: NEvent) => void }, { startLogin, needSort = true }: { startLogin?: () => void needSort?: boolean } = {} ) { const relays = Array.from(new Set(urls)) const key = this.generateTimelineKey(relays, filter) const timeline = this.timelines[key] let cachedEvents: NEvent[] = [] let since: number | undefined if (timeline && !Array.isArray(timeline) && timeline.refs.length && needSort) { cachedEvents = ( await this.eventDataLoader.loadMany(timeline.refs.slice(0, filter.limit).map(([id]) => id)) ).filter((evt) => !!evt && !(evt instanceof Error)) as NEvent[] if (cachedEvents.length) { onEvents([...cachedEvents], false) since = cachedEvents[0].created_at + 1 } } // eslint-disable-next-line @typescript-eslint/no-this-alias const that = this let events: NEvent[] = [] let eosedAt: number | null = null const subCloser = this.subscribe(relays, since ? { ...filter, since } : filter, { startLogin, onevent: (evt: NEvent) => { that.eventDataLoader.prime(evt.id, Promise.resolve(evt)) // not eosed yet, push to events if (!eosedAt) { return events.push(evt) } // new event if (evt.created_at > eosedAt) { onNew(evt) } const timeline = that.timelines[key] if (!timeline || Array.isArray(timeline) || !timeline.refs.length) { return } // find the right position to insert let idx = 0 for (const ref of timeline.refs) { if (evt.created_at > ref[1] || (evt.created_at === ref[1] && evt.id < ref[0])) { break } // the event is already in the cache if (evt.created_at === ref[1] && evt.id === ref[0]) { return } idx++ } // the event is too old, ignore it if (idx >= timeline.refs.length) return // insert the event to the right position timeline.refs.splice(idx, 0, [evt.id, evt.created_at]) }, oneose: (eosed) => { if (eosed && !eosedAt) { eosedAt = dayjs().unix() } // (algo feeds) no need to sort and cache if (!needSort) { return onEvents([...events], !!eosedAt) } if (!eosed) { events = events.sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit) return onEvents([...events.concat(cachedEvents).slice(0, filter.limit)], false) } events = events.sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit) const timeline = that.timelines[key] // no cache yet if (!timeline || Array.isArray(timeline) || !timeline.refs.length) { that.timelines[key] = { refs: events.map((evt) => [evt.id, evt.created_at]), filter, urls } return onEvents([...events], true) } // Prevent concurrent requests from duplicating the same event const firstRefCreatedAt = timeline.refs[0][1] const newRefs = events .filter((evt) => evt.created_at > firstRefCreatedAt) .map((evt) => [evt.id, evt.created_at] as TTimelineRef) if (events.length >= filter.limit) { // if new refs are more than limit, means old refs are too old, replace them timeline.refs = newRefs onEvents([...events], true) } else { // merge new refs with old refs timeline.refs = newRefs.concat(timeline.refs) onEvents([...events.concat(cachedEvents).slice(0, filter.limit)], true) } } }) return { timelineKey: key, closer: () => { onEvents = () => {} onNew = () => {} subCloser.close() } } } private async _loadMoreTimeline(key: string, until: number, limit: number) { const timeline = this.timelines[key] if (!timeline || Array.isArray(timeline)) return [] const { filter, urls, refs } = timeline const startIdx = refs.findIndex(([, createdAt]) => createdAt <= until) const cachedEvents = startIdx >= 0 ? (( await this.eventDataLoader.loadMany( refs.slice(startIdx, startIdx + limit).map(([id]) => id) ) ).filter((evt) => !!evt && !(evt instanceof Error)) as NEvent[]) : [] if (cachedEvents.length >= limit) { return cachedEvents } until = cachedEvents.length ? cachedEvents[cachedEvents.length - 1].created_at - 1 : until limit = limit - cachedEvents.length let events = await this.query(urls, { ...filter, until, limit }) events.forEach((evt) => { this.eventDataLoader.prime(evt.id, Promise.resolve(evt)) }) events = events.sort((a, b) => b.created_at - a.created_at).slice(0, limit) // Prevent concurrent requests from duplicating the same event const lastRefCreatedAt = refs.length > 0 ? refs[refs.length - 1][1] : dayjs().unix() timeline.refs.push( ...events .filter((evt) => evt.created_at < lastRefCreatedAt) .map((evt) => [evt.id, evt.created_at] as TTimelineRef) ) return [...cachedEvents, ...events] } /** =========== Event =========== */ getSeenEventRelays(eventId: string) { return Array.from(this.pool.seenOn.get(eventId)?.values() || []) } getSeenEventRelayUrls(eventId: string) { return this.getSeenEventRelays(eventId).map((relay) => relay.url) } getEventHints(eventId: string) { return this.getSeenEventRelayUrls(eventId).filter((url) => !isLocalNetworkUrl(url)) } getEventHint(eventId: string) { return this.getSeenEventRelayUrls(eventId).find((url) => !isLocalNetworkUrl(url)) ?? '' } trackEventSeenOn(eventId: string, relay: AbstractRelay) { let set = this.pool.seenOn.get(eventId) if (!set) { set = new Set() this.pool.seenOn.set(eventId, set) } set.add(relay) } private async query(urls: string[], filter: Filter | Filter[], onevent?: (evt: NEvent) => void) { return await new Promise((resolve) => { const events: NEvent[] = [] const sub = this.subscribe(urls, filter, { onevent(evt) { onevent?.(evt) events.push(evt) }, oneose: (eosed) => { if (eosed) { sub.close() resolve(events) } }, onclose: () => { resolve(events) } }) }) } async fetchEvents( urls: string[], filter: Filter | Filter[], { onevent, cache = false }: { onevent?: (evt: NEvent) => void cache?: boolean } = {} ) { const relays = Array.from(new Set(urls)) const events = await this.query( relays.length > 0 ? relays : this.currentRelayUrls.concat(BIG_RELAY_URLS), filter, onevent ) if (cache) { events.forEach((evt) => { this.eventDataLoader.prime(evt.id, Promise.resolve(evt)) }) } return events } async fetchEvent(id: string): Promise { if (!/^[0-9a-f]{64}$/.test(id)) { let eventId: string | undefined const { type, data } = nip19.decode(id) switch (type) { case 'note': eventId = data break case 'nevent': eventId = data.id break } if (eventId) { const cache = this.eventCacheMap.get(eventId) if (cache) { return cache } } } return this.eventDataLoader.load(id) } addEventToCache(event: NEvent) { this.eventDataLoader.prime(event.id, Promise.resolve(event)) } private async fetchEventById(relayUrls: string[], id: string): Promise { const event = await this.fetchEventFromBigRelaysDataloader.load(id) if (event) { return event } return this.tryHarderToFetchEvent(relayUrls, { ids: [id], limit: 1 }, true) } private async _fetchEvent(id: string): Promise { let filter: Filter | undefined let relays: string[] = [] let author: string | undefined if (/^[0-9a-f]{64}$/.test(id)) { filter = { ids: [id] } } else { const { type, data } = nip19.decode(id) switch (type) { case 'note': filter = { ids: [data] } break case 'nevent': filter = { ids: [data.id] } if (data.relays) relays = data.relays if (data.author) author = data.author break case 'naddr': filter = { authors: [data.pubkey], kinds: [data.kind], limit: 1 } author = data.pubkey if (data.identifier) { filter['#d'] = [data.identifier] } if (data.relays) relays = data.relays } } if (!filter) { throw new Error('Invalid id') } let event: NEvent | undefined if (filter.ids) { event = await this.fetchEventById(relays, filter.ids[0]) } else { if (author) { const relayList = await this.fetchRelayList(author) relays.push(...relayList.write.slice(0, 4)) } event = await this.tryHarderToFetchEvent(relays, filter) } if (event && event.id !== id) { this.eventDataLoader.prime(event.id, Promise.resolve(event)) } return event } private async tryHarderToFetchEvent( relayUrls: string[], filter: Filter, alreadyFetchedFromBigRelays = false ) { if (!relayUrls.length && filter.authors?.length) { const relayList = await this.fetchRelayList(filter.authors[0]) relayUrls = alreadyFetchedFromBigRelays ? relayList.write.filter((url) => !BIG_RELAY_URLS.includes(url)).slice(0, 4) : relayList.write.slice(0, 4) } else if (!relayUrls.length && !alreadyFetchedFromBigRelays) { relayUrls = BIG_RELAY_URLS } if (!relayUrls.length) return const events = await this.query(relayUrls, filter) return events.sort((a, b) => b.created_at - a.created_at)[0] } private async fetchEventsFromBigRelays(ids: readonly string[]) { const events = await this.query(BIG_RELAY_URLS, { ids: Array.from(new Set(ids)), limit: ids.length }) const eventsMap = new Map() for (const event of events) { eventsMap.set(event.id, event) } return ids.map((id) => eventsMap.get(id)) } /** =========== Following favorite relays =========== */ private followingFavoriteRelaysCache = new LRUCache>({ max: 10, fetchMethod: this._fetchFollowingFavoriteRelays.bind(this) }) async fetchFollowingFavoriteRelays(pubkey: string) { return this.followingFavoriteRelaysCache.fetch(pubkey) } private async _fetchFollowingFavoriteRelays(pubkey: string) { const fetchNewData = async () => { const followings = await this.fetchFollowings(pubkey) const events = await this.fetchEvents(BIG_RELAY_URLS, { authors: followings, kinds: [ExtendedKind.FAVORITE_RELAYS, kinds.Relaysets], limit: 1000 }) const alreadyExistsFavoriteRelaysPubkeySet = new Set() const alreadyExistsRelaySetsPubkeySet = new Set() const uniqueEvents: NEvent[] = [] events .sort((a, b) => b.created_at - a.created_at) .forEach((event) => { if (event.kind === ExtendedKind.FAVORITE_RELAYS) { if (alreadyExistsFavoriteRelaysPubkeySet.has(event.pubkey)) return alreadyExistsFavoriteRelaysPubkeySet.add(event.pubkey) } else if (event.kind === kinds.Relaysets) { if (alreadyExistsRelaySetsPubkeySet.has(event.pubkey)) return alreadyExistsRelaySetsPubkeySet.add(event.pubkey) } else { return } uniqueEvents.push(event) }) const relayMap = new Map>() uniqueEvents.forEach((event) => { event.tags.forEach(([tagName, tagValue]) => { if (tagName === 'relay' && tagValue && isWebsocketUrl(tagValue)) { const url = normalizeUrl(tagValue) relayMap.set(url, (relayMap.get(url) || new Set()).add(event.pubkey)) } }) }) const relayMapEntries = Array.from(relayMap.entries()) .sort((a, b) => b[1].size - a[1].size) .map(([url, pubkeys]) => [url, Array.from(pubkeys)]) as [string, string[]][] indexedDb.putFollowingFavoriteRelays(pubkey, relayMapEntries) return relayMapEntries } const cached = await indexedDb.getFollowingFavoriteRelays(pubkey) if (cached) { fetchNewData() return cached } return fetchNewData() } /** =========== Followings =========== */ async initUserIndexFromFollowings(pubkey: string, signal: AbortSignal) { const followings = await this.fetchFollowings(pubkey) for (let i = 0; i * 20 < followings.length; i++) { if (signal.aborted) return await Promise.all( followings.slice(i * 20, (i + 1) * 20).map((pubkey) => this.fetchProfileEvent(pubkey)) ) await new Promise((resolve) => setTimeout(resolve, 1000)) } } /** =========== Profile =========== */ async searchProfiles(relayUrls: string[], filter: Filter): Promise { const events = await this.query(relayUrls, { ...filter, kinds: [kinds.Metadata] }) const profileEvents = events.sort((a, b) => b.created_at - a.created_at) await Promise.allSettled(profileEvents.map((profile) => this.addUsernameToIndex(profile))) profileEvents.forEach((profile) => this.updateProfileEventCache(profile)) return profileEvents.map((profileEvent) => getProfileFromEvent(profileEvent)) } async searchNpubsFromLocal(query: string, limit: number = 100) { const result = await this.userIndex.searchAsync(query, { limit }) return result.map((pubkey) => pubkeyToNpub(pubkey as string)).filter(Boolean) as string[] } async searchProfilesFromLocal(query: string, limit: number = 100) { const npubs = await this.searchNpubsFromLocal(query, limit) const profiles = await Promise.all(npubs.map((npub) => this.fetchProfile(npub))) return profiles.filter((profile) => !!profile) as TProfile[] } private async addUsernameToIndex(profileEvent: NEvent) { try { const profileObj = JSON.parse(profileEvent.content) const text = [ profileObj.display_name?.trim() ?? '', profileObj.name?.trim() ?? '', profileObj.nip05 ?.split('@') .map((s: string) => s.trim()) .join(' ') ?? '' ].join(' ') if (!text) return await this.userIndex.addAsync(profileEvent.pubkey, text) } catch { return } } async fetchProfileEvent(id: string, skipCache: boolean = false): Promise { let pubkey: string | undefined let relays: string[] = [] if (/^[0-9a-f]{64}$/.test(id)) { pubkey = id } else { const { data, type } = nip19.decode(id) switch (type) { case 'npub': pubkey = data break case 'nprofile': pubkey = data.pubkey if (data.relays) relays = data.relays break } } if (!pubkey) { throw new Error('Invalid id') } if (!skipCache) { const localProfile = await indexedDb.getReplaceableEvent(pubkey, kinds.Metadata) if (localProfile) { return localProfile } } const profileFromBigRelays = await this.replaceableEventFromBigRelaysDataloader.load({ pubkey, kind: kinds.Metadata }) if (profileFromBigRelays) { this.addUsernameToIndex(profileFromBigRelays) return profileFromBigRelays } if (!relays.length) { return undefined } const profileEvent = await this.tryHarderToFetchEvent( relays, { authors: [pubkey], kinds: [kinds.Metadata], limit: 1 }, true ) if (profileEvent) { this.addUsernameToIndex(profileEvent) indexedDb.putReplaceableEvent(profileEvent) } return profileEvent } async fetchProfile(id: string, skipCache: boolean = false): Promise { const profileEvent = await this.fetchProfileEvent(id, skipCache) if (profileEvent) { return getProfileFromEvent(profileEvent) } try { const pubkey = userIdToPubkey(id) return { pubkey, npub: pubkeyToNpub(pubkey) ?? '', username: formatPubkey(pubkey) } } catch { return undefined } } async updateProfileEventCache(event: NEvent) { await this.updateReplaceableEventFromBigRelaysCache(event) } /** =========== Relay list =========== */ async fetchRelayListEvent(pubkey: string) { const [relayEvent] = await this.fetchReplaceableEventsFromBigRelays([pubkey], kinds.RelayList) return relayEvent ?? null } async fetchRelayList(pubkey: string): Promise { const [relayList] = await this.fetchRelayLists([pubkey]) return relayList } async fetchRelayLists(pubkeys: string[]): Promise { const relayEvents = await this.fetchReplaceableEventsFromBigRelays(pubkeys, kinds.RelayList) return relayEvents.map((event) => { if (event) { return getRelayListFromEvent(event) } return { write: BIG_RELAY_URLS, read: BIG_RELAY_URLS, originalRelays: [] } }) } async forceUpdateRelayListEvent(pubkey: string) { await this.replaceableEventBatchLoadFn([{ pubkey, kind: kinds.RelayList }]) } async updateRelayListCache(event: NEvent) { await this.updateReplaceableEventFromBigRelaysCache(event) } /** =========== Replaceable event from big relays dataloader =========== */ private replaceableEventFromBigRelaysDataloader = new DataLoader< { pubkey: string; kind: number }, NEvent | null, string >(this.replaceableEventFromBigRelaysBatchLoadFn.bind(this), { batchScheduleFn: (callback) => setTimeout(callback, 50), maxBatchSize: 500, cacheKeyFn: ({ pubkey, kind }) => `${pubkey}:${kind}` }) private async replaceableEventFromBigRelaysBatchLoadFn( params: readonly { pubkey: string; kind: number }[] ) { const groups = new Map() params.forEach(({ pubkey, kind }) => { if (!groups.has(kind)) { groups.set(kind, []) } groups.get(kind)!.push(pubkey) }) const eventsMap = new Map() await Promise.allSettled( Array.from(groups.entries()).map(async ([kind, pubkeys]) => { const events = await this.query(BIG_RELAY_URLS, { authors: pubkeys, kinds: [kind] }) for (const event of events) { const key = `${event.pubkey}:${event.kind}` const existing = eventsMap.get(key) if (!existing || existing.created_at < event.created_at) { eventsMap.set(key, event) } } }) ) return params.map(({ pubkey, kind }) => { const key = `${pubkey}:${kind}` const event = eventsMap.get(key) if (event) { indexedDb.putReplaceableEvent(event) return event } else { indexedDb.putNullReplaceableEvent(pubkey, kind) return null } }) } private async fetchReplaceableEventsFromBigRelays(pubkeys: string[], kind: number) { const events = await indexedDb.getManyReplaceableEvents(pubkeys, kind) const nonExistingPubkeyIndexMap = new Map() pubkeys.forEach((pubkey, i) => { if (events[i] === undefined) { nonExistingPubkeyIndexMap.set(pubkey, i) } }) const newEvents = await this.replaceableEventFromBigRelaysDataloader.loadMany( Array.from(nonExistingPubkeyIndexMap.keys()).map((pubkey) => ({ pubkey, kind })) ) newEvents.forEach((event) => { if (event && !(event instanceof Error)) { const index = nonExistingPubkeyIndexMap.get(event.pubkey) if (index !== undefined) { events[index] = event } } }) return events } private async updateReplaceableEventFromBigRelaysCache(event: NEvent) { this.replaceableEventFromBigRelaysDataloader.clear({ pubkey: event.pubkey, kind: event.kind }) this.replaceableEventFromBigRelaysDataloader.prime( { pubkey: event.pubkey, kind: event.kind }, Promise.resolve(event) ) await indexedDb.putReplaceableEvent(event) } /** =========== Replaceable event dataloader =========== */ private replaceableEventDataLoader = new DataLoader< { pubkey: string; kind: number }, NEvent | null, string >(this.replaceableEventBatchLoadFn.bind(this), { cacheKeyFn: ({ pubkey, kind }) => `${pubkey}:${kind}` }) private async replaceableEventBatchLoadFn(params: readonly { pubkey: string; kind: number }[]) { const results = await Promise.allSettled( params.map(async ({ pubkey, kind }) => { const relayList = await this.fetchRelayList(pubkey) const events = await this.query(relayList.write.concat(BIG_RELAY_URLS).slice(0, 5), { authors: [pubkey], kinds: [kind] }) const event = getLatestEvent(events) ?? null if (event) { indexedDb.putReplaceableEvent(event) } else { indexedDb.putNullReplaceableEvent(pubkey, kind) } return event }) ) return results.map((result) => { if (result.status === 'fulfilled') { return result.value } else { console.error('Failed to load replaceable event:', result.reason) return null } }) } private async fetchReplaceableEvent(pubkey: string, kind: number) { const storedEvent = await indexedDb.getReplaceableEvent(pubkey, kind) if (storedEvent !== undefined) { return storedEvent } return await this.replaceableEventDataLoader.load({ pubkey, kind }) } private async updateReplaceableEventCache(event: NEvent) { this.replaceableEventDataLoader.clear({ pubkey: event.pubkey, kind: event.kind }) this.replaceableEventDataLoader.prime( { pubkey: event.pubkey, kind: event.kind }, Promise.resolve(event) ) await indexedDb.putReplaceableEvent(event) } /** =========== Replaceable event =========== */ async fetchFollowListEvent(pubkey: string) { return await this.fetchReplaceableEvent(pubkey, kinds.Contacts) } async fetchFollowings(pubkey: string) { const followListEvent = await this.fetchFollowListEvent(pubkey) return followListEvent ? getPubkeysFromPTags(followListEvent.tags) : [] } async updateFollowListCache(evt: NEvent) { await this.updateReplaceableEventCache(evt) } async fetchMuteListEvent(pubkey: string) { return await this.fetchReplaceableEvent(pubkey, kinds.Mutelist) } async fetchBookmarkListEvent(pubkey: string) { return this.fetchReplaceableEvent(pubkey, kinds.BookmarkList) } async fetchBlossomServerListEvent(pubkey: string) { return await this.fetchReplaceableEvent(pubkey, ExtendedKind.BLOSSOM_SERVER_LIST) } async fetchBlossomServerList(pubkey: string) { const evt = await this.fetchBlossomServerListEvent(pubkey) return evt ? getServersFromServerTags(evt.tags) : [] } async updateBlossomServerListEventCache(evt: NEvent) { await this.updateReplaceableEventCache(evt) } } const instance = ClientService.getInstance() export default instance