feat: add persistent cache for following feed and notifications

This commit is contained in:
codytseng 2026-01-02 00:39:10 +08:00
parent 7a9c777744
commit fd9f41c8f4
10 changed files with 268 additions and 102 deletions

View file

@ -40,6 +40,7 @@ class ClientService extends EventTarget {
pubkey?: string
currentRelays: string[] = []
private pool: SimplePool
private externalSeenOn = new Map<string, Set<string>>()
private timelines: Record<
string,
@ -306,20 +307,21 @@ class ClientService extends EventTarget {
},
{
startLogin,
needSort = true
needSort = true,
needSaveToDb = false
}: {
startLogin?: () => void
needSort?: boolean
needSaveToDb?: boolean
} = {}
) {
const newEventIdSet = new Set<string>()
const requestCount = subRequests.length
const threshold = Math.floor(requestCount / 2)
let events: NEvent[] = []
const timelines: NEvent[][] = new Array(requestCount).fill(0).map(() => [])
let eosedCount = 0
const subs = await Promise.all(
subRequests.map(({ urls, filter }) => {
subRequests.map(({ urls, filter }, i) => {
return this._subscribeTimeline(
urls,
filter,
@ -329,11 +331,9 @@ class ClientService extends EventTarget {
eosedCount++
}
events = this.mergeTimelines(events, _events)
if (eosedCount >= threshold) {
onEvents(events, eosedCount >= requestCount)
}
timelines[i] = _events
const events = this.mergeTimelines(timelines, filter.limit)
onEvents(events, eosedCount >= requestCount)
},
onNew: (evt) => {
if (newEventIdSet.has(evt.id)) return
@ -342,7 +342,7 @@ class ClientService extends EventTarget {
},
onClose
},
{ startLogin, needSort }
{ startLogin, needSort, needSaveToDb }
)
})
)
@ -362,14 +362,20 @@ class ClientService extends EventTarget {
}
}
private mergeTimelines(a: NEvent[], b: NEvent[]): NEvent[] {
if (a.length === 0) return [...b]
if (b.length === 0) return [...a]
private mergeTimelines(timelines: NEvent[][], limit: number) {
if (timelines.length === 0) return []
if (timelines.length === 1) return timelines[0].slice(0, limit)
return timelines.reduce((merged, current) => this._mergeTimelines(merged, current, limit), [])
}
private _mergeTimelines(a: NEvent[], b: NEvent[], limit: number): NEvent[] {
if (a.length === 0) return b.slice(0, limit)
if (b.length === 0) return a.slice(0, limit)
const result: NEvent[] = []
let i = 0
let j = 0
while (i < a.length && j < b.length) {
while (i < a.length && j < b.length && result.length < limit) {
const cmp = compareEvents(a[i], b[j])
if (cmp > 0) {
result.push(a[i])
@ -384,6 +390,20 @@ class ClientService extends EventTarget {
}
}
if (result.length >= limit) {
return result
}
while (i < a.length) {
result.push(a[i])
i++
}
while (j < b.length) {
result.push(b[j])
j++
}
return result
}
@ -579,10 +599,12 @@ class ClientService extends EventTarget {
},
{
startLogin,
needSort = true
needSort = true,
needSaveToDb = false
}: {
startLogin?: () => void
needSort?: boolean
needSaveToDb?: boolean
} = {}
) {
const relays = Array.from(new Set(urls))
@ -598,6 +620,15 @@ class ClientService extends EventTarget {
onEvents([...cachedEvents], false)
since = cachedEvents[0].created_at + 1
}
} else if (needSaveToDb) {
const storedEvents: NEvent[] = []
const items = await indexedDb.getEvents(filter)
items.forEach((item) => {
this.trackEventExternalSeenOn(item.event.id, item.relays)
storedEvents.push(item.event)
this.addEventToCache(item.event)
})
onEvents([...storedEvents], false)
}
// eslint-disable-next-line @typescript-eslint/no-this-alias
@ -615,6 +646,9 @@ class ClientService extends EventTarget {
// new event
if (evt.created_at > eosedAt) {
onNew(evt)
if (needSaveToDb) {
indexedDb.putEvents([{ event: evt, relays: that.getEventHints(evt.id) }])
}
}
const timeline = that.timelines[key]
@ -654,6 +688,11 @@ class ClientService extends EventTarget {
}
events = events.sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
if (needSaveToDb) {
indexedDb.putEvents(
events.map((evt) => ({ event: evt, relays: this.getEventHints(evt.id) }))
)
}
const timeline = that.timelines[key]
// no cache yet
if (!timeline || Array.isArray(timeline) || !timeline.refs.length) {
@ -675,6 +714,9 @@ class ClientService extends EventTarget {
// if new refs are more than limit, means old refs are too old, replace them
timeline.refs = newRefs
onEvents([...events], true)
if (needSaveToDb) {
indexedDb.deleteEvents({ ...filter, until: events[events.length - 1].created_at })
}
} else {
// merge new refs with old refs
timeline.refs = newRefs.concat(timeline.refs)
@ -737,7 +779,12 @@ class ClientService extends EventTarget {
}
getSeenEventRelayUrls(eventId: string) {
return this.getSeenEventRelays(eventId).map((relay) => relay.url)
return Array.from(
new Set([
...this.getSeenEventRelays(eventId).map((relay) => relay.url),
...(this.externalSeenOn.get(eventId) || [])
])
)
}
getEventHints(eventId: string) {
@ -757,6 +804,15 @@ class ClientService extends EventTarget {
set.add(relay)
}
trackEventExternalSeenOn(eventId: string, relayUrls: string[]) {
let set = this.externalSeenOn.get(eventId)
if (!set) {
set = new Set()
this.externalSeenOn.set(eventId, set)
}
relayUrls.forEach((url) => set.add(url))
}
private async query(urls: string[], filter: Filter | Filter[], onevent?: (evt: NEvent) => void) {
return await new Promise<NEvent[]>((resolve) => {
const events: NEvent[] = []

View file

@ -1,7 +1,8 @@
import { ExtendedKind } from '@/constants'
import { tagNameEquals } from '@/lib/tag'
import { TRelayInfo } from '@/types'
import { Event, kinds } from 'nostr-tools'
import dayjs from 'dayjs'
import { Event, Filter, kinds, matchFilter } from 'nostr-tools'
type TValue<T = any> = {
key: string
@ -25,6 +26,7 @@ const StoreNames = {
RELAY_INFOS: 'relayInfos',
DECRYPTED_CONTENTS: 'decryptedContents',
PINNED_USERS_EVENTS: 'pinnedUsersEvents',
EVENTS: 'events',
MUTE_DECRYPTED_TAGS: 'muteDecryptedTags', // deprecated
RELAY_INFO_EVENTS: 'relayInfoEvents' // deprecated
}
@ -45,7 +47,7 @@ class IndexedDbService {
init(): Promise<void> {
if (!this.initPromise) {
this.initPromise = new Promise((resolve, reject) => {
const request = window.indexedDB.open('jumble', 10)
const request = window.indexedDB.open('jumble', 11)
request.onerror = (event) => {
reject(event)
@ -103,6 +105,12 @@ class IndexedDbService {
if (!db.objectStoreNames.contains(StoreNames.PINNED_USERS_EVENTS)) {
db.createObjectStore(StoreNames.PINNED_USERS_EVENTS, { keyPath: 'key' })
}
if (!db.objectStoreNames.contains(StoreNames.EVENTS)) {
const feedEventsStore = db.createObjectStore(StoreNames.EVENTS, {
keyPath: 'event.id'
})
feedEventsStore.createIndex('createdAtIndex', 'event.created_at')
}
if (db.objectStoreNames.contains(StoreNames.RELAY_INFO_EVENTS)) {
db.deleteObjectStore(StoreNames.RELAY_INFO_EVENTS)
@ -113,7 +121,10 @@ class IndexedDbService {
this.db = db
}
})
setTimeout(() => this.cleanUp(), 1000 * 60) // 1 minute
setTimeout(() => {
this.cleanUpOldEvents()
this.cleanUp()
}, 1000 * 30) // 30 seconds after initialization
}
return this.initPromise
}
@ -440,6 +451,99 @@ class IndexedDbService {
})
}
async putEvents(items: { event: Event; relays: string[] }[]): Promise<void> {
await this.initPromise
return new Promise((resolve, reject) => {
if (!this.db) {
return reject('database not initialized')
}
const transaction = this.db.transaction(StoreNames.EVENTS, 'readwrite')
const store = transaction.objectStore(StoreNames.EVENTS)
let completed = 0
items.forEach((item) => {
const putRequest = store.put(item)
putRequest.onsuccess = () => {
completed++
if (completed === items.length) {
transaction.commit()
resolve()
}
}
putRequest.onerror = (event) => {
transaction.commit()
reject(event)
}
})
})
}
async getEvents({ limit, ...filter }: Filter): Promise<{ event: Event; relays: string[] }[]> {
await this.initPromise
return new Promise((resolve, reject) => {
if (!this.db) {
return reject('database not initialized')
}
const transaction = this.db.transaction(StoreNames.EVENTS, 'readonly')
const store = transaction.objectStore(StoreNames.EVENTS)
const index = store.index('createdAtIndex')
const request = index.openCursor(null, 'prev')
const results: { event: Event; relays: string[] }[] = []
request.onsuccess = (event) => {
const cursor = (event.target as IDBRequest).result
if (cursor && (!limit || results.length < limit)) {
const item = cursor.value as { event: Event; relays: string[] }
if (matchFilter(filter, item.event)) {
results.push(item)
}
cursor.continue()
} else {
transaction.commit()
resolve(results)
}
}
request.onerror = (event) => {
transaction.commit()
reject(event)
}
})
}
async deleteEvents(filter: Filter & { until: number }): Promise<void> {
await this.initPromise
return new Promise((resolve, reject) => {
if (!this.db) {
return reject('database not initialized')
}
const transaction = this.db.transaction(StoreNames.EVENTS, 'readwrite')
const store = transaction.objectStore(StoreNames.EVENTS)
const index = store.index('createdAtIndex')
const request = index.openCursor(IDBKeyRange.upperBound(filter.until, true))
request.onsuccess = (event) => {
const cursor = (event.target as IDBRequest).result
if (cursor) {
const item = cursor.value as { event: Event; relays: string[] }
if (matchFilter(filter, item.event)) {
cursor.delete()
}
cursor.continue()
} else {
transaction.commit()
resolve()
}
}
request.onerror = (event) => {
transaction.commit()
reject(event)
}
})
}
private getReplaceableEventKeyFromEvent(event: Event): string {
if (
[kinds.Metadata, kinds.Contacts].includes(event.kind) ||
@ -559,6 +663,33 @@ class IndexedDbService {
})
)
}
private async cleanUpOldEvents() {
await this.initPromise
if (!this.db) {
return
}
const transaction = this.db!.transaction(StoreNames.EVENTS, 'readwrite')
const store = transaction.objectStore(StoreNames.EVENTS)
const index = store.index('createdAtIndex')
const request = index.openCursor(IDBKeyRange.upperBound(dayjs().subtract(5, 'days').unix()))
request.onsuccess = (event) => {
const cursor = (event.target as IDBRequest).result
if (cursor) {
cursor.delete()
cursor.continue()
} else {
transaction.commit()
}
}
request.onerror = (event) => {
transaction.commit()
console.error('Failed to clean up old events:', event)
}
}
}
const instance = IndexedDbService.getInstance()