220 lines
5.9 KiB
TypeScript
220 lines
5.9 KiB
TypeScript
/**
|
|
* Synchronize user content (profile, series, publications) to IndexedDB cache
|
|
* Called after key import to ensure all user content is cached locally
|
|
*/
|
|
|
|
import type { Event } from 'nostr-tools'
|
|
import { nostrService } from './nostr'
|
|
import { fetchAuthorPresentationFromPool } from './articlePublisherHelpersPresentation'
|
|
import { extractTagsFromEvent } from './nostrTagSystem'
|
|
import { extractSeriesFromEvent, extractPublicationFromEvent } from './metadataExtractor'
|
|
import { objectCache } from './objectCache'
|
|
import { getLatestVersion } from './versionManager'
|
|
import { buildTagFilter } from './nostrTagSystemFilter'
|
|
import { getPrimaryRelaySync } from './config'
|
|
import { PLATFORM_SERVICE, MIN_EVENT_DATE } from './platformConfig'
|
|
import type { SimplePoolWithSub } from '@/types/nostr-tools-extended'
|
|
|
|
/**
|
|
* Fetch all publications by an author and cache them
|
|
*/
|
|
async function fetchAndCachePublications(
|
|
pool: SimplePoolWithSub,
|
|
authorPubkey: string
|
|
): Promise<void> {
|
|
const filters = [
|
|
{
|
|
...buildTagFilter({
|
|
type: 'publication',
|
|
authorPubkey,
|
|
service: PLATFORM_SERVICE,
|
|
}),
|
|
since: MIN_EVENT_DATE,
|
|
limit: 1000, // Get all publications
|
|
},
|
|
]
|
|
|
|
const relayUrl = getPrimaryRelaySync()
|
|
const { createSubscription } = require('@/types/nostr-tools-extended')
|
|
const sub = createSubscription(pool, [relayUrl], filters)
|
|
|
|
const events: Event[] = []
|
|
|
|
return new Promise((resolve) => {
|
|
let finished = false
|
|
|
|
const done = async () => {
|
|
if (finished) {
|
|
return
|
|
}
|
|
finished = true
|
|
sub.unsub()
|
|
|
|
// Group events by hash ID and cache the latest version of each
|
|
const eventsByHashId = new Map<string, Event[]>()
|
|
for (const event of events) {
|
|
const tags = extractTagsFromEvent(event)
|
|
if (tags.id) {
|
|
const hashId = tags.id
|
|
if (!eventsByHashId.has(hashId)) {
|
|
eventsByHashId.set(hashId, [])
|
|
}
|
|
eventsByHashId.get(hashId)!.push(event)
|
|
}
|
|
}
|
|
|
|
// Cache each publication
|
|
for (const [hashId, hashEvents] of eventsByHashId.entries()) {
|
|
const latestEvent = getLatestVersion(hashEvents)
|
|
if (latestEvent) {
|
|
const extracted = await extractPublicationFromEvent(latestEvent)
|
|
if (extracted) {
|
|
const tags = extractTagsFromEvent(latestEvent)
|
|
await objectCache.set(
|
|
'publication',
|
|
hashId,
|
|
latestEvent,
|
|
extracted,
|
|
tags.version ?? 0,
|
|
tags.hidden ?? false
|
|
)
|
|
}
|
|
}
|
|
}
|
|
|
|
resolve()
|
|
}
|
|
|
|
sub.on('event', (event: Event) => {
|
|
const tags = extractTagsFromEvent(event)
|
|
if (tags.type === 'publication' && !tags.hidden) {
|
|
events.push(event)
|
|
}
|
|
})
|
|
|
|
sub.on('eose', () => {
|
|
void done()
|
|
})
|
|
|
|
setTimeout(() => {
|
|
void done()
|
|
}, 10000).unref?.()
|
|
})
|
|
}
|
|
|
|
/**
|
|
* Fetch all series by an author and cache them
|
|
*/
|
|
async function fetchAndCacheSeries(
|
|
pool: SimplePoolWithSub,
|
|
authorPubkey: string
|
|
): Promise<void> {
|
|
// Fetch all events for series to cache them properly
|
|
const filters = [
|
|
{
|
|
...buildTagFilter({
|
|
type: 'series',
|
|
authorPubkey,
|
|
service: PLATFORM_SERVICE,
|
|
}),
|
|
since: MIN_EVENT_DATE,
|
|
limit: 1000, // Get all series events
|
|
},
|
|
]
|
|
|
|
const relayUrl = getPrimaryRelaySync()
|
|
const { createSubscription } = require('@/types/nostr-tools-extended')
|
|
const sub = createSubscription(pool, [relayUrl], filters)
|
|
|
|
const events: Event[] = []
|
|
|
|
return new Promise((resolve) => {
|
|
let finished = false
|
|
|
|
const done = async () => {
|
|
if (finished) {
|
|
return
|
|
}
|
|
finished = true
|
|
sub.unsub()
|
|
|
|
// Group events by hash ID and cache the latest version of each
|
|
const eventsByHashId = new Map<string, Event[]>()
|
|
for (const event of events) {
|
|
const tags = extractTagsFromEvent(event)
|
|
if (tags.id) {
|
|
const hashId = tags.id
|
|
if (!eventsByHashId.has(hashId)) {
|
|
eventsByHashId.set(hashId, [])
|
|
}
|
|
eventsByHashId.get(hashId)!.push(event)
|
|
}
|
|
}
|
|
|
|
// Cache each series
|
|
for (const [hashId, hashEvents] of eventsByHashId.entries()) {
|
|
const latestEvent = getLatestVersion(hashEvents)
|
|
if (latestEvent) {
|
|
const extracted = await extractSeriesFromEvent(latestEvent)
|
|
if (extracted) {
|
|
const tags = extractTagsFromEvent(latestEvent)
|
|
await objectCache.set(
|
|
'series',
|
|
hashId,
|
|
latestEvent,
|
|
extracted,
|
|
tags.version ?? 0,
|
|
tags.hidden ?? false
|
|
)
|
|
}
|
|
}
|
|
}
|
|
|
|
resolve()
|
|
}
|
|
|
|
sub.on('event', (event: Event) => {
|
|
const tags = extractTagsFromEvent(event)
|
|
if (tags.type === 'series' && !tags.hidden) {
|
|
events.push(event)
|
|
}
|
|
})
|
|
|
|
sub.on('eose', () => {
|
|
void done()
|
|
})
|
|
|
|
setTimeout(() => {
|
|
void done()
|
|
}, 10000).unref?.()
|
|
})
|
|
}
|
|
|
|
/**
|
|
* Synchronize all user content to IndexedDB cache
|
|
* Fetches profile, series, and publications and caches them
|
|
*/
|
|
export async function syncUserContentToCache(userPubkey: string): Promise<void> {
|
|
try {
|
|
const pool = nostrService.getPool()
|
|
if (!pool) {
|
|
console.warn('Pool not initialized, cannot sync user content')
|
|
return
|
|
}
|
|
|
|
const poolWithSub = pool as unknown as SimplePoolWithSub
|
|
|
|
// Fetch and cache author profile (already caches itself)
|
|
await fetchAuthorPresentationFromPool(poolWithSub, userPubkey)
|
|
|
|
// Fetch and cache all series
|
|
await fetchAndCacheSeries(poolWithSub, userPubkey)
|
|
|
|
// Fetch and cache all publications
|
|
await fetchAndCachePublications(poolWithSub, userPubkey)
|
|
} catch (error) {
|
|
console.error('Error syncing user content to cache:', error)
|
|
// Don't throw - this is a background operation
|
|
}
|
|
}
|