story-research-zapwall/lib/userContentSync.ts
2026-01-06 14:17:55 +01:00

425 lines
11 KiB
TypeScript

/**
* Synchronize user content (profile, series, publications) to IndexedDB cache
* Called after key import to ensure all user content is cached locally
*/
import type { Event } from 'nostr-tools'
import { nostrService } from './nostr'
import { fetchAuthorPresentationFromPool } from './articlePublisherHelpersPresentation'
import { extractTagsFromEvent } from './nostrTagSystem'
import { extractSeriesFromEvent, extractPublicationFromEvent, extractPurchaseFromEvent, extractSponsoringFromEvent, extractReviewTipFromEvent } from './metadataExtractor'
import { objectCache } from './objectCache'
import { getLatestVersion } from './versionManager'
import { buildTagFilter } from './nostrTagSystemFilter'
import { getPrimaryRelaySync } from './config'
import { PLATFORM_SERVICE, MIN_EVENT_DATE } from './platformConfig'
import { parseObjectId } from './urlGenerator'
import type { SimplePoolWithSub } from '@/types/nostr-tools-extended'
/**
* Fetch all publications by an author and cache them
*/
async function fetchAndCachePublications(
pool: SimplePoolWithSub,
authorPubkey: string
): Promise<void> {
const filters = [
{
...buildTagFilter({
type: 'publication',
authorPubkey,
service: PLATFORM_SERVICE,
}),
since: MIN_EVENT_DATE,
limit: 1000, // Get all publications
},
]
const relayUrl = getPrimaryRelaySync()
const { createSubscription } = require('@/types/nostr-tools-extended')
const sub = createSubscription(pool, [relayUrl], filters)
const events: Event[] = []
return new Promise((resolve) => {
let finished = false
const done = async () => {
if (finished) {
return
}
finished = true
sub.unsub()
// Group events by hash ID and cache the latest version of each
const eventsByHashId = new Map<string, Event[]>()
for (const event of events) {
const tags = extractTagsFromEvent(event)
if (tags.id) {
// Extract hash from id (can be <hash>_<index>_<version> or just hash)
const parsed = parseObjectId(tags.id)
const hash = parsed.hash ?? tags.id
if (!eventsByHashId.has(hash)) {
eventsByHashId.set(hash, [])
}
eventsByHashId.get(hash)!.push(event)
}
}
// Cache each publication
for (const [hash, hashEvents] of eventsByHashId.entries()) {
const latestEvent = getLatestVersion(hashEvents)
if (latestEvent) {
const extracted = await extractPublicationFromEvent(latestEvent)
if (extracted?.hash) {
const tags = extractTagsFromEvent(latestEvent)
await objectCache.set(
'publication',
extracted.hash,
latestEvent,
extracted,
tags.version ?? 0,
tags.hidden ?? false,
extracted.index
)
}
}
}
resolve()
}
sub.on('event', (event: Event) => {
const tags = extractTagsFromEvent(event)
if (tags.type === 'publication' && !tags.hidden) {
events.push(event)
}
})
sub.on('eose', () => {
void done()
})
setTimeout(() => {
void done()
}, 10000).unref?.()
})
}
/**
* Fetch all series by an author and cache them
*/
async function fetchAndCacheSeries(
pool: SimplePoolWithSub,
authorPubkey: string
): Promise<void> {
// Fetch all events for series to cache them properly
const filters = [
{
...buildTagFilter({
type: 'series',
authorPubkey,
service: PLATFORM_SERVICE,
}),
since: MIN_EVENT_DATE,
limit: 1000, // Get all series events
},
]
const relayUrl = getPrimaryRelaySync()
const { createSubscription } = require('@/types/nostr-tools-extended')
const sub = createSubscription(pool, [relayUrl], filters)
const events: Event[] = []
return new Promise((resolve) => {
let finished = false
const done = async () => {
if (finished) {
return
}
finished = true
sub.unsub()
// Group events by hash ID and cache the latest version of each
const eventsByHashId = new Map<string, Event[]>()
for (const event of events) {
const tags = extractTagsFromEvent(event)
if (tags.id) {
// Extract hash from id (can be <hash>_<index>_<version> or just hash)
const parsed = parseObjectId(tags.id)
const hash = parsed.hash ?? tags.id
if (!eventsByHashId.has(hash)) {
eventsByHashId.set(hash, [])
}
eventsByHashId.get(hash)!.push(event)
}
}
// Cache each series
for (const [hash, hashEvents] of eventsByHashId.entries()) {
const latestEvent = getLatestVersion(hashEvents)
if (latestEvent) {
const extracted = await extractSeriesFromEvent(latestEvent)
if (extracted?.hash) {
const tags = extractTagsFromEvent(latestEvent)
await objectCache.set(
'series',
extracted.hash,
latestEvent,
extracted,
tags.version ?? 0,
tags.hidden ?? false,
extracted.index
)
}
}
}
resolve()
}
sub.on('event', (event: Event) => {
const tags = extractTagsFromEvent(event)
if (tags.type === 'series' && !tags.hidden) {
events.push(event)
}
})
sub.on('eose', () => {
void done()
})
setTimeout(() => {
void done()
}, 10000).unref?.()
})
}
/**
* Fetch all purchases by a payer and cache them
*/
async function fetchAndCachePurchases(
pool: SimplePoolWithSub,
payerPubkey: string
): Promise<void> {
const filters = [
{
kinds: [9735], // Zap receipt
authors: [payerPubkey],
'#kind_type': ['purchase'],
since: MIN_EVENT_DATE,
limit: 1000,
},
]
const relayUrl = getPrimaryRelaySync()
const { createSubscription } = require('@/types/nostr-tools-extended')
const sub = createSubscription(pool, [relayUrl], filters)
const events: Event[] = []
return new Promise((resolve) => {
let finished = false
const done = async () => {
if (finished) {
return
}
finished = true
sub.unsub()
for (const event of events) {
const extracted = await extractPurchaseFromEvent(event)
if (extracted) {
const parsed = parseObjectId(extracted.id)
const hash = parsed.hash ?? extracted.id
const index = parsed.index ?? 0
// Parse to Purchase object for cache
const { parsePurchaseFromEvent } = await import('./nostrEventParsing')
const purchase = await parsePurchaseFromEvent(event)
if (purchase) {
await objectCache.set('purchase', purchase.hash, event, purchase, 0, false, purchase.index)
}
}
}
resolve()
}
sub.on('event', (event: Event) => {
events.push(event)
})
sub.on('eose', () => {
void done()
})
setTimeout(() => {
void done()
}, 10000).unref?.()
})
}
/**
* Fetch all sponsoring by an author and cache them
*/
async function fetchAndCacheSponsoring(
pool: SimplePoolWithSub,
authorPubkey: string
): Promise<void> {
const filters = [
{
kinds: [9735], // Zap receipt
'#p': [authorPubkey],
'#kind_type': ['sponsoring'],
since: MIN_EVENT_DATE,
limit: 1000,
},
]
const relayUrl = getPrimaryRelaySync()
const { createSubscription } = require('@/types/nostr-tools-extended')
const sub = createSubscription(pool, [relayUrl], filters)
const events: Event[] = []
return new Promise((resolve) => {
let finished = false
const done = async () => {
if (finished) {
return
}
finished = true
sub.unsub()
for (const event of events) {
const extracted = await extractSponsoringFromEvent(event)
if (extracted) {
// Parse to Sponsoring object for cache
const { parseSponsoringFromEvent } = await import('./nostrEventParsing')
const sponsoring = await parseSponsoringFromEvent(event)
if (sponsoring) {
await objectCache.set('sponsoring', sponsoring.hash, event, sponsoring, 0, false, sponsoring.index)
}
}
}
resolve()
}
sub.on('event', (event: Event) => {
events.push(event)
})
sub.on('eose', () => {
void done()
})
setTimeout(() => {
void done()
}, 10000).unref?.()
})
}
/**
* Fetch all review tips by an author and cache them
*/
async function fetchAndCacheReviewTips(
pool: SimplePoolWithSub,
authorPubkey: string
): Promise<void> {
const filters = [
{
kinds: [9735], // Zap receipt
'#p': [authorPubkey],
'#kind_type': ['review_tip'],
since: MIN_EVENT_DATE,
limit: 1000,
},
]
const relayUrl = getPrimaryRelaySync()
const { createSubscription } = require('@/types/nostr-tools-extended')
const sub = createSubscription(pool, [relayUrl], filters)
const events: Event[] = []
return new Promise((resolve) => {
let finished = false
const done = async () => {
if (finished) {
return
}
finished = true
sub.unsub()
for (const event of events) {
const extracted = await extractReviewTipFromEvent(event)
if (extracted) {
// Parse to ReviewTip object for cache
const { parseReviewTipFromEvent } = await import('./nostrEventParsing')
const reviewTip = await parseReviewTipFromEvent(event)
if (reviewTip) {
await objectCache.set('review_tip', reviewTip.hash, event, reviewTip, 0, false, reviewTip.index)
}
}
}
resolve()
}
sub.on('event', (event: Event) => {
events.push(event)
})
sub.on('eose', () => {
void done()
})
setTimeout(() => {
void done()
}, 10000).unref?.()
})
}
/**
* Synchronize all user content to IndexedDB cache
* Fetches profile, series, publications, purchases, sponsoring, and review tips and caches them
*/
export async function syncUserContentToCache(userPubkey: string): Promise<void> {
try {
const pool = nostrService.getPool()
if (!pool) {
console.warn('Pool not initialized, cannot sync user content')
return
}
const poolWithSub = pool as unknown as SimplePoolWithSub
// Fetch and cache author profile (already caches itself)
await fetchAuthorPresentationFromPool(poolWithSub, userPubkey)
// Fetch and cache all series
await fetchAndCacheSeries(poolWithSub, userPubkey)
// Fetch and cache all publications
await fetchAndCachePublications(poolWithSub, userPubkey)
// Fetch and cache all purchases (as payer)
await fetchAndCachePurchases(poolWithSub, userPubkey)
// Fetch and cache all sponsoring (as author)
await fetchAndCacheSponsoring(poolWithSub, userPubkey)
// Fetch and cache all review tips (as author)
await fetchAndCacheReviewTips(poolWithSub, userPubkey)
} catch (error) {
console.error('Error syncing user content to cache:', error)
// Don't throw - this is a background operation
}
}