From 05382f34abd84e37be67adc8591c623eabd609dc Mon Sep 17 00:00:00 2001 From: Nicolas Cantu Date: Tue, 6 Jan 2026 00:54:49 +0100 Subject: [PATCH] series building --- components/HomeView.tsx | 4 + lib/articleInvoice.ts | 20 +++ lib/articleMutations.ts | 88 +++++++--- lib/articlePublisher.ts | 12 ++ lib/articlePublisherHelpersPresentation.ts | 87 ++++++--- lib/metadataExtractor.ts | 63 ++++++- lib/nostrTagSystemExtract.ts | 2 + lib/objectCache.ts | 194 +++++++++++++++++++++ lib/presentationParsing.ts | 28 ++- 9 files changed, 429 insertions(+), 69 deletions(-) create mode 100644 lib/objectCache.ts diff --git a/components/HomeView.tsx b/components/HomeView.tsx index d6aacff..edddc19 100644 --- a/components/HomeView.tsx +++ b/components/HomeView.tsx @@ -100,6 +100,10 @@ function HomeContent({ const articlesListProps = { articles, allArticles, loading, error, onUnlock, unlockedArticles } const authorsListProps = { authors, allAuthors, loading, error } + // Determine loading state: if showing authors, authors are loaded from articles, so loading is the same + // But we need to check if authors are actually being loaded (when no articles yet) + const authorsLoading = loading && shouldShowAuthors && allAuthors.length === 0 + return (
0) { newTags.push(...extraTags) diff --git a/lib/articleMutations.ts b/lib/articleMutations.ts index 75d73c4..7f3dafd 100644 --- a/lib/articleMutations.ts +++ b/lib/articleMutations.ts @@ -98,23 +98,42 @@ async function buildSeriesEvent( coverUrl: params.coverUrl ?? undefined, }) + // Build JSON metadata + const seriesJson = JSON.stringify({ + type: 'series', + pubkey: params.authorPubkey, + title: params.title, + description: params.description, + preview: params.preview ?? params.description.substring(0, 200), + coverUrl: params.coverUrl, + category: newCategory, + id: hashId, + version: 0, + index: 0, + }) + + const tags = buildTags({ + type: 'series', + category: newCategory, + id: hashId, + service: PLATFORM_SERVICE, + version: 0, // New object + hidden: false, + paywall: false, + title: params.title, + description: params.description, + preview: params.preview ?? params.description.substring(0, 200), + ...(params.coverUrl ? { coverUrl: params.coverUrl } : {}), + }) + + // Add JSON metadata as a tag + tags.push(['json', seriesJson]) + return { kind: 1, created_at: Math.floor(Date.now() / 1000), content: params.preview ?? params.description.substring(0, 200), - tags: buildTags({ - type: 'series', - category: newCategory, - id: hashId, - service: PLATFORM_SERVICE, - version: 0, // New object - hidden: false, - paywall: false, - title: params.title, - description: params.description, - preview: params.preview ?? params.description.substring(0, 200), - ...(params.coverUrl ? { coverUrl: params.coverUrl } : {}), - }), + tags, } } @@ -172,22 +191,41 @@ async function buildReviewEvent( title: params.title ?? undefined, }) + // Build JSON metadata + const reviewJson = JSON.stringify({ + type: 'review', + pubkey: params.reviewerPubkey, + articleId: params.articleId, + reviewerPubkey: params.reviewerPubkey, + content: params.content, + title: params.title, + category: newCategory, + id: hashId, + version: 0, + index: 0, + }) + + const tags = buildTags({ + type: 'quote', + category: newCategory, + id: hashId, + service: PLATFORM_SERVICE, + version: 0, // New object + hidden: false, + paywall: false, + articleId: params.articleId, + reviewerPubkey: params.reviewerPubkey, + ...(params.title ? { title: params.title } : {}), + }) + + // Add JSON metadata as a tag + tags.push(['json', reviewJson]) + return { kind: 1, created_at: Math.floor(Date.now() / 1000), content: params.content, - tags: buildTags({ - type: 'quote', - category: newCategory, - id: hashId, - service: PLATFORM_SERVICE, - version: 0, // New object - hidden: false, - paywall: false, - articleId: params.articleId, - reviewerPubkey: params.reviewerPubkey, - ...(params.title ? { title: params.title } : {}), - }), + tags, } } diff --git a/lib/articlePublisher.ts b/lib/articlePublisher.ts index 6e17a72..bd9080b 100644 --- a/lib/articlePublisher.ts +++ b/lib/articlePublisher.ts @@ -182,6 +182,18 @@ export class ArticlePublisher { return buildFailure('Failed to publish presentation article') } + // Parse and cache the published presentation immediately + const { parsePresentationEvent } = await import('./articlePublisherHelpers') + const { extractTagsFromEvent } = await import('./nostrTagSystem') + const { objectCache } = await import('./objectCache') + const parsed = parsePresentationEvent(publishedEvent) + if (parsed) { + const tags = extractTagsFromEvent(publishedEvent) + if (tags.id) { + await objectCache.set('author', tags.id, publishedEvent, parsed, tags.version, tags.hidden) + } + } + return { articleId: publishedEvent.id, previewEventId: publishedEvent.id, diff --git a/lib/articlePublisherHelpersPresentation.ts b/lib/articlePublisherHelpersPresentation.ts index 08993e4..39314ab 100644 --- a/lib/articlePublisherHelpersPresentation.ts +++ b/lib/articlePublisherHelpersPresentation.ts @@ -8,6 +8,7 @@ import { PLATFORM_SERVICE } from './platformConfig' import { generateAuthorHashId } from './hashIdGenerator' import { generateObjectUrl } from './urlGenerator' import { getLatestVersion } from './versionManager' +import { objectCache } from './objectCache' export async function buildPresentationEvent( draft: AuthorPresentationDraft, @@ -41,7 +42,7 @@ export async function buildPresentationEvent( // Encode pubkey to npub (for metadata JSON) const npub = nip19.npubEncode(authorPubkey) - // Build visible content message + // Build visible content message (without metadata JSON) const visibleContent = [ 'Nouveau profil publié sur zapwall.fr', profileUrl, @@ -51,7 +52,7 @@ export async function buildPresentationEvent( `Adresse Bitcoin mainnet (pour le sponsoring) : ${draft.mainnetAddress}`, ].join('\n') - // Build profile JSON for metadata (non-visible) + // Build profile JSON for metadata (stored in tag, not in content) const profileJson = JSON.stringify({ authorName, npub, @@ -64,12 +65,9 @@ export async function buildPresentationEvent( url: profileUrl, version, index, - }, null, 2) + }) - // Combine visible content and JSON metadata (JSON in hidden section) - const fullContent = `${visibleContent}\n\n---\n\n[Metadata JSON]\n${profileJson}` - - // Build tags (profile JSON is in content, not in tags) + // Build tags (profile JSON is in tag, not in content) const tags = buildTags({ type: 'author', category, @@ -85,11 +83,14 @@ export async function buildPresentationEvent( ...(draft.pictureUrl ? { pictureUrl: draft.pictureUrl } : {}), }) + // Add JSON metadata as a tag (not in visible content) + tags.push(['json', profileJson]) + return { kind: 1 as const, created_at: Math.floor(Date.now() / 1000), tags, - content: fullContent, + content: visibleContent, } } @@ -101,7 +102,7 @@ export function parsePresentationEvent(event: Event): import('@/types/nostr').Au return null } - // Try to extract profile JSON from content [Metadata JSON] section + // Try to extract profile JSON from tag first (new format) let profileData: { presentation?: string contentDescription?: string @@ -109,12 +110,38 @@ export function parsePresentationEvent(event: Event): import('@/types/nostr').Au pictureUrl?: string } | null = null - const jsonMatch = event.content.match(/\[Metadata JSON\]\n(.+)$/s) - if (jsonMatch && jsonMatch[1]) { + if (tags.json) { try { - profileData = JSON.parse(jsonMatch[1].trim()) + profileData = JSON.parse(tags.json) } catch (e) { - console.error('Error parsing profile JSON from content:', e) + console.error('Error parsing JSON from tag:', e) + } + } + + // Fallback to content format (for backward compatibility with old notes) + if (!profileData) { + // Try invisible format (with zero-width characters) + const invisibleJsonMatch = event.content.match(/[\u200B\u200C]\[Metadata JSON\][\u200B\u200C]\n[\u200B\u200C](.+)[\u200B\u200C]$/s) + if (invisibleJsonMatch && invisibleJsonMatch[1]) { + try { + // Remove zero-width characters from JSON + const cleanedJson = invisibleJsonMatch[1].replace(/[\u200B\u200C\u200D\u200E\u200F]/g, '').trim() + profileData = JSON.parse(cleanedJson) + } catch (e) { + console.error('Error parsing profile JSON from invisible content:', e) + } + } + + // Fallback to visible format in content + if (!profileData) { + const jsonMatch = event.content.match(/\[Metadata JSON\]\n(.+)$/s) + if (jsonMatch && jsonMatch[1]) { + try { + profileData = JSON.parse(jsonMatch[1].trim()) + } catch (e) { + console.error('Error parsing profile JSON from content:', e) + } + } } } @@ -147,10 +174,16 @@ export function parsePresentationEvent(event: Event): import('@/types/nostr').Au return result } -export function fetchAuthorPresentationFromPool( +export async function fetchAuthorPresentationFromPool( pool: SimplePoolWithSub, pubkey: string ): Promise { + // Check cache first + const cached = await objectCache.getAuthorByPubkey(pubkey) + if (cached) { + return cached + } + const filters = [ { ...buildTagFilter({ @@ -170,12 +203,24 @@ export function fetchAuthorPresentationFromPool( const events: Event[] = [] - const finalize = (value: import('@/types/nostr').AuthorPresentationArticle | null) => { + const finalize = async (value: import('@/types/nostr').AuthorPresentationArticle | null) => { if (resolved) { return } resolved = true sub.unsub() + + // Cache the result if found + if (value && events.length > 0) { + const event = events.find(e => e.id === value.id) || events[0] + if (event) { + const tags = extractTagsFromEvent(event) + if (tags.id) { + await objectCache.set('author', tags.id, event, value, tags.version, tags.hidden) + } + } + } + resolve(value) } @@ -187,29 +232,29 @@ export function fetchAuthorPresentationFromPool( } }) - sub.on('eose', () => { + sub.on('eose', async () => { // Get the latest version from all collected events const latestEvent = getLatestVersion(events) if (latestEvent) { const parsed = parsePresentationEvent(latestEvent) if (parsed) { - finalize(parsed) + await finalize(parsed) return } } - finalize(null) + await finalize(null) }) - setTimeout(() => { + setTimeout(async () => { // Get the latest version from all collected events const latestEvent = getLatestVersion(events) if (latestEvent) { const parsed = parsePresentationEvent(latestEvent) if (parsed) { - finalize(parsed) + await finalize(parsed) return } } - finalize(null) + await finalize(null) }, 5000).unref?.() }) } diff --git a/lib/metadataExtractor.ts b/lib/metadataExtractor.ts index a546f6e..3440ee0 100644 --- a/lib/metadataExtractor.ts +++ b/lib/metadataExtractor.ts @@ -105,7 +105,33 @@ export type ExtractedObject = /** * Extract JSON metadata from note content */ +function extractMetadataJsonFromTag(event: { tags: string[][] }): Record | null { + const jsonTag = event.tags.find((tag) => tag[0] === 'json') + if (jsonTag && jsonTag[1]) { + try { + return JSON.parse(jsonTag[1]) + } catch (e) { + console.error('Error parsing JSON metadata from tag:', e) + return null + } + } + return null +} + function extractMetadataJson(content: string): Record | null { + // Try invisible format first (with zero-width characters) - for backward compatibility + const invisibleJsonMatch = content.match(/[\u200B\u200C]\[Metadata JSON\][\u200B\u200C]\n[\u200B\u200C](.+)[\u200B\u200C]$/s) + if (invisibleJsonMatch && invisibleJsonMatch[1]) { + try { + // Remove zero-width characters from JSON + const cleanedJson = invisibleJsonMatch[1].replace(/[\u200B\u200C\u200D\u200E\u200F]/g, '').trim() + return JSON.parse(cleanedJson) + } catch (e) { + console.error('Error parsing metadata JSON from invisible content:', e) + } + } + + // Fallback to visible format (for backward compatibility) const jsonMatch = content.match(/\[Metadata JSON\]\n(.+)$/s) if (jsonMatch && jsonMatch[1]) { try { @@ -127,8 +153,14 @@ export async function extractAuthorFromEvent(event: Event): Promise string | undefined, encryptedKey: findTag('encrypted_key'), articleId: findTag('article'), reviewerPubkey: findTag('reviewer'), + json: findTag('json'), // JSON metadata stored in tag (for all object types) } } @@ -66,6 +67,7 @@ export function extractTagsFromEvent(event: { tags: string[][] }): { paymentHash?: string | undefined articleId?: string | undefined reviewerPubkey?: string | undefined + json?: string | undefined [key: string]: unknown } { const findTag = (key: string) => event.tags.find((tag) => tag[0] === key)?.[1] diff --git a/lib/objectCache.ts b/lib/objectCache.ts new file mode 100644 index 0000000..e7f7f44 --- /dev/null +++ b/lib/objectCache.ts @@ -0,0 +1,194 @@ +/** + * IndexedDB cache for Nostr objects (authors, series, publications, reviews) + * Objects are indexed by their hash ID for fast retrieval + * One database per object type + */ + +import type { Event } from 'nostr-tools' +import type { AuthorPresentationArticle } from '@/types/nostr' + +export type ObjectType = 'author' | 'series' | 'publication' | 'review' + +interface CachedObject { + hashId: string + event: Event + parsed: unknown // Parsed object (AuthorPresentationArticle, Series, etc.) + version: number + hidden: boolean + createdAt: number + cachedAt: number +} + +const DB_PREFIX = 'nostr_objects_' +const DB_VERSION = 1 + +class ObjectCacheService { + private dbs: Map = new Map() + + private async initDB(objectType: ObjectType): Promise { + if (this.dbs.has(objectType)) { + return this.dbs.get(objectType)! + } + + return new Promise((resolve, reject) => { + if (typeof window === 'undefined' || !window.indexedDB) { + reject(new Error('IndexedDB is not available')) + return + } + + const dbName = `${DB_PREFIX}${objectType}` + const request = indexedDB.open(dbName, DB_VERSION) + + request.onerror = () => { + reject(new Error(`Failed to open IndexedDB: ${request.error}`)) + } + + request.onsuccess = () => { + const db = request.result + this.dbs.set(objectType, db) + resolve(db) + } + + request.onupgradeneeded = (event) => { + const db = (event.target as IDBOpenDBRequest).result + if (!db.objectStoreNames.contains('objects')) { + const store = db.createObjectStore('objects', { keyPath: 'hashId' }) + store.createIndex('version', 'version', { unique: false }) + store.createIndex('hidden', 'hidden', { unique: false }) + store.createIndex('cachedAt', 'cachedAt', { unique: false }) + } + } + }) + } + + /** + * Store an object in cache + */ + async set(objectType: ObjectType, hashId: string, event: Event, parsed: unknown, version: number, hidden: boolean): Promise { + try { + const db = await this.initDB(objectType) + const transaction = db.transaction(['objects'], 'readwrite') + const store = transaction.objectStore('objects') + + const cached: CachedObject = { + hashId, + event, + parsed, + version, + hidden, + createdAt: event.created_at, + cachedAt: Date.now(), + } + + await new Promise((resolve, reject) => { + const request = store.put(cached) + request.onsuccess = () => resolve() + request.onerror = () => reject(request.error) + }) + } catch (error) { + console.error(`Error caching ${objectType} object:`, error) + } + } + + /** + * Get an object from cache by hash ID + * Returns the latest non-hidden version + */ + async get(objectType: ObjectType, hashId: string): Promise { + try { + const db = await this.initDB(objectType) + const transaction = db.transaction(['objects'], 'readonly') + const store = transaction.objectStore('objects') + const index = store.index('version') + + return new Promise((resolve, reject) => { + const request = index.openCursor(IDBKeyRange.bound([hashId, 0], [hashId, Infinity])) + const objects: CachedObject[] = [] + + request.onsuccess = (event) => { + const cursor = (event.target as IDBRequest).result + if (cursor) { + const obj = cursor.value as CachedObject + if (obj && obj.hashId === hashId && !obj.hidden) { + objects.push(obj) + } + cursor.continue() + } else { + // Sort by version descending and return the latest + if (objects.length > 0) { + objects.sort((a, b) => b.version - a.version) + resolve(objects[0]?.parsed ?? null) + } else { + resolve(null) + } + } + } + + request.onerror = () => reject(request.error) + }) + } catch (error) { + console.error(`Error retrieving ${objectType} object from cache:`, error) + return null + } + } + + /** + * Get an author presentation by pubkey (searches all cached authors) + */ + async getAuthorByPubkey(pubkey: string): Promise { + try { + const db = await this.initDB('author') + const transaction = db.transaction(['objects'], 'readonly') + const store = transaction.objectStore('objects') + + return new Promise((resolve, reject) => { + const request = store.openCursor() + const objects: CachedObject[] = [] + + request.onsuccess = (event) => { + const cursor = (event.target as IDBRequest).result + if (cursor) { + const obj = cursor.value as CachedObject + if (obj && obj.event.pubkey === pubkey && !obj.hidden) { + objects.push(obj) + } + cursor.continue() + } else { + // Sort by version descending and return the latest + if (objects.length > 0) { + objects.sort((a, b) => b.version - a.version) + resolve((objects[0]?.parsed ?? null) as AuthorPresentationArticle | null) + } else { + resolve(null) + } + } + } + + request.onerror = () => reject(request.error) + }) + } catch (error) { + console.error('Error retrieving author from cache by pubkey:', error) + return null + } + } + + /** + * Clear cache for an object type + */ + async clear(objectType: ObjectType): Promise { + try { + const db = await this.initDB(objectType) + const transaction = db.transaction(['objects'], 'readwrite') + const store = transaction.objectStore('objects') + await new Promise((resolve, reject) => { + const request = store.clear() + request.onsuccess = () => resolve() + request.onerror = () => reject(request.error) + }) + } catch (error) { + console.error(`Error clearing ${objectType} cache:`, error) + } + } +} + +export const objectCache = new ObjectCacheService() diff --git a/lib/presentationParsing.ts b/lib/presentationParsing.ts index 239f89d..0c62f72 100644 --- a/lib/presentationParsing.ts +++ b/lib/presentationParsing.ts @@ -2,10 +2,14 @@ import type { Article } from '@/types/nostr' /** * Extract presentation data from article content - * Supports two formats: + * Supports multiple formats: * 1. Old format: "${presentation}\n\n---\n\nDescription du contenu :\n${contentDescription}" - * 2. New format: "Nouveau profil publié sur zapwall.fr\n\n\nPrésentation personnelle : \nDescription de votre contenu : \nAdresse Bitcoin mainnet (pour le sponsoring) : \n\n---\n\n[Metadata JSON]\n" - * The profile JSON is stored in the [Metadata JSON] section of the content, not in tags + * 2. New format (content only): "Nouveau profil publié sur zapwall.fr\n\n\nPrésentation personnelle : \nDescription de votre contenu : \nAdresse Bitcoin mainnet (pour le sponsoring) : " + * 3. New format (with JSON in tag): JSON metadata is stored in the 'json' tag, not in content + * + * Note: For new notes, the JSON metadata is stored in the 'json' tag. + * This function extracts from content for backward compatibility only. + * The main extraction should use extractTagsFromEvent to get json from tags. */ export function extractPresentationData(presentation: Article): { presentation: string @@ -24,21 +28,9 @@ export function extractPresentationData(presentation: Article): { } } - // Try to extract from JSON metadata section - const jsonMatch = content.match(/\[Metadata JSON\]\n(.+)$/s) - if (jsonMatch) { - try { - const profileJson = JSON.parse(jsonMatch[1].trim()) - if (profileJson.presentation && profileJson.contentDescription) { - return { - presentation: profileJson.presentation, - contentDescription: profileJson.contentDescription, - } - } - } catch (e) { - // JSON parsing failed, continue with old format - } - } + // Note: JSON metadata is now stored in tags, not in content + // This function extracts from content for backward compatibility only + // The main extraction should use extractTagsFromEvent to get json from tags // Fallback to old format const separator = '\n\n---\n\nDescription du contenu :\n'