series building

This commit is contained in:
Nicolas Cantu 2026-01-06 00:54:49 +01:00
parent ba0fcbfc96
commit 05382f34ab
9 changed files with 429 additions and 69 deletions

View File

@ -100,6 +100,10 @@ function HomeContent({
const articlesListProps = { articles, allArticles, loading, error, onUnlock, unlockedArticles } const articlesListProps = { articles, allArticles, loading, error, onUnlock, unlockedArticles }
const authorsListProps = { authors, allAuthors, loading, error } const authorsListProps = { authors, allAuthors, loading, error }
// Determine loading state: if showing authors, authors are loaded from articles, so loading is the same
// But we need to check if authors are actually being loaded (when no articles yet)
const authorsLoading = loading && shouldShowAuthors && allAuthors.length === 0
return ( return (
<div className="max-w-4xl mx-auto px-4 py-8"> <div className="max-w-4xl mx-auto px-4 py-8">
<ArticlesHero <ArticlesHero

View File

@ -109,6 +109,26 @@ async function buildPreviewTags(
...(encryptedKey ? { encryptedKey } : {}), ...(encryptedKey ? { encryptedKey } : {}),
}) })
// Build JSON metadata
const publicationJson = JSON.stringify({
type: 'publication',
pubkey: authorPubkey,
title: draft.title,
preview: draft.preview,
category,
seriesId: draft.seriesId,
bannerUrl: draft.bannerUrl,
zapAmount: draft.zapAmount,
invoice: invoice.invoice,
paymentHash: invoice.paymentHash,
id: hashId,
version: 0,
index: 0,
})
// Add JSON metadata as a tag
newTags.push(['json', publicationJson])
// Add any extra tags (for backward compatibility) // Add any extra tags (for backward compatibility)
if (extraTags.length > 0) { if (extraTags.length > 0) {
newTags.push(...extraTags) newTags.push(...extraTags)

View File

@ -98,11 +98,21 @@ async function buildSeriesEvent(
coverUrl: params.coverUrl ?? undefined, coverUrl: params.coverUrl ?? undefined,
}) })
return { // Build JSON metadata
kind: 1, const seriesJson = JSON.stringify({
created_at: Math.floor(Date.now() / 1000), type: 'series',
content: params.preview ?? params.description.substring(0, 200), pubkey: params.authorPubkey,
tags: buildTags({ title: params.title,
description: params.description,
preview: params.preview ?? params.description.substring(0, 200),
coverUrl: params.coverUrl,
category: newCategory,
id: hashId,
version: 0,
index: 0,
})
const tags = buildTags({
type: 'series', type: 'series',
category: newCategory, category: newCategory,
id: hashId, id: hashId,
@ -114,7 +124,16 @@ async function buildSeriesEvent(
description: params.description, description: params.description,
preview: params.preview ?? params.description.substring(0, 200), preview: params.preview ?? params.description.substring(0, 200),
...(params.coverUrl ? { coverUrl: params.coverUrl } : {}), ...(params.coverUrl ? { coverUrl: params.coverUrl } : {}),
}), })
// Add JSON metadata as a tag
tags.push(['json', seriesJson])
return {
kind: 1,
created_at: Math.floor(Date.now() / 1000),
content: params.preview ?? params.description.substring(0, 200),
tags,
} }
} }
@ -172,11 +191,21 @@ async function buildReviewEvent(
title: params.title ?? undefined, title: params.title ?? undefined,
}) })
return { // Build JSON metadata
kind: 1, const reviewJson = JSON.stringify({
created_at: Math.floor(Date.now() / 1000), type: 'review',
pubkey: params.reviewerPubkey,
articleId: params.articleId,
reviewerPubkey: params.reviewerPubkey,
content: params.content, content: params.content,
tags: buildTags({ title: params.title,
category: newCategory,
id: hashId,
version: 0,
index: 0,
})
const tags = buildTags({
type: 'quote', type: 'quote',
category: newCategory, category: newCategory,
id: hashId, id: hashId,
@ -187,7 +216,16 @@ async function buildReviewEvent(
articleId: params.articleId, articleId: params.articleId,
reviewerPubkey: params.reviewerPubkey, reviewerPubkey: params.reviewerPubkey,
...(params.title ? { title: params.title } : {}), ...(params.title ? { title: params.title } : {}),
}), })
// Add JSON metadata as a tag
tags.push(['json', reviewJson])
return {
kind: 1,
created_at: Math.floor(Date.now() / 1000),
content: params.content,
tags,
} }
} }

View File

@ -182,6 +182,18 @@ export class ArticlePublisher {
return buildFailure('Failed to publish presentation article') return buildFailure('Failed to publish presentation article')
} }
// Parse and cache the published presentation immediately
const { parsePresentationEvent } = await import('./articlePublisherHelpers')
const { extractTagsFromEvent } = await import('./nostrTagSystem')
const { objectCache } = await import('./objectCache')
const parsed = parsePresentationEvent(publishedEvent)
if (parsed) {
const tags = extractTagsFromEvent(publishedEvent)
if (tags.id) {
await objectCache.set('author', tags.id, publishedEvent, parsed, tags.version, tags.hidden)
}
}
return { return {
articleId: publishedEvent.id, articleId: publishedEvent.id,
previewEventId: publishedEvent.id, previewEventId: publishedEvent.id,

View File

@ -8,6 +8,7 @@ import { PLATFORM_SERVICE } from './platformConfig'
import { generateAuthorHashId } from './hashIdGenerator' import { generateAuthorHashId } from './hashIdGenerator'
import { generateObjectUrl } from './urlGenerator' import { generateObjectUrl } from './urlGenerator'
import { getLatestVersion } from './versionManager' import { getLatestVersion } from './versionManager'
import { objectCache } from './objectCache'
export async function buildPresentationEvent( export async function buildPresentationEvent(
draft: AuthorPresentationDraft, draft: AuthorPresentationDraft,
@ -41,7 +42,7 @@ export async function buildPresentationEvent(
// Encode pubkey to npub (for metadata JSON) // Encode pubkey to npub (for metadata JSON)
const npub = nip19.npubEncode(authorPubkey) const npub = nip19.npubEncode(authorPubkey)
// Build visible content message // Build visible content message (without metadata JSON)
const visibleContent = [ const visibleContent = [
'Nouveau profil publié sur zapwall.fr', 'Nouveau profil publié sur zapwall.fr',
profileUrl, profileUrl,
@ -51,7 +52,7 @@ export async function buildPresentationEvent(
`Adresse Bitcoin mainnet (pour le sponsoring) : ${draft.mainnetAddress}`, `Adresse Bitcoin mainnet (pour le sponsoring) : ${draft.mainnetAddress}`,
].join('\n') ].join('\n')
// Build profile JSON for metadata (non-visible) // Build profile JSON for metadata (stored in tag, not in content)
const profileJson = JSON.stringify({ const profileJson = JSON.stringify({
authorName, authorName,
npub, npub,
@ -64,12 +65,9 @@ export async function buildPresentationEvent(
url: profileUrl, url: profileUrl,
version, version,
index, index,
}, null, 2) })
// Combine visible content and JSON metadata (JSON in hidden section) // Build tags (profile JSON is in tag, not in content)
const fullContent = `${visibleContent}\n\n---\n\n[Metadata JSON]\n${profileJson}`
// Build tags (profile JSON is in content, not in tags)
const tags = buildTags({ const tags = buildTags({
type: 'author', type: 'author',
category, category,
@ -85,11 +83,14 @@ export async function buildPresentationEvent(
...(draft.pictureUrl ? { pictureUrl: draft.pictureUrl } : {}), ...(draft.pictureUrl ? { pictureUrl: draft.pictureUrl } : {}),
}) })
// Add JSON metadata as a tag (not in visible content)
tags.push(['json', profileJson])
return { return {
kind: 1 as const, kind: 1 as const,
created_at: Math.floor(Date.now() / 1000), created_at: Math.floor(Date.now() / 1000),
tags, tags,
content: fullContent, content: visibleContent,
} }
} }
@ -101,7 +102,7 @@ export function parsePresentationEvent(event: Event): import('@/types/nostr').Au
return null return null
} }
// Try to extract profile JSON from content [Metadata JSON] section // Try to extract profile JSON from tag first (new format)
let profileData: { let profileData: {
presentation?: string presentation?: string
contentDescription?: string contentDescription?: string
@ -109,6 +110,30 @@ export function parsePresentationEvent(event: Event): import('@/types/nostr').Au
pictureUrl?: string pictureUrl?: string
} | null = null } | null = null
if (tags.json) {
try {
profileData = JSON.parse(tags.json)
} catch (e) {
console.error('Error parsing JSON from tag:', e)
}
}
// Fallback to content format (for backward compatibility with old notes)
if (!profileData) {
// Try invisible format (with zero-width characters)
const invisibleJsonMatch = event.content.match(/[\u200B\u200C]\[Metadata JSON\][\u200B\u200C]\n[\u200B\u200C](.+)[\u200B\u200C]$/s)
if (invisibleJsonMatch && invisibleJsonMatch[1]) {
try {
// Remove zero-width characters from JSON
const cleanedJson = invisibleJsonMatch[1].replace(/[\u200B\u200C\u200D\u200E\u200F]/g, '').trim()
profileData = JSON.parse(cleanedJson)
} catch (e) {
console.error('Error parsing profile JSON from invisible content:', e)
}
}
// Fallback to visible format in content
if (!profileData) {
const jsonMatch = event.content.match(/\[Metadata JSON\]\n(.+)$/s) const jsonMatch = event.content.match(/\[Metadata JSON\]\n(.+)$/s)
if (jsonMatch && jsonMatch[1]) { if (jsonMatch && jsonMatch[1]) {
try { try {
@ -117,6 +142,8 @@ export function parsePresentationEvent(event: Event): import('@/types/nostr').Au
console.error('Error parsing profile JSON from content:', e) console.error('Error parsing profile JSON from content:', e)
} }
} }
}
}
// Map tag category to article category // Map tag category to article category
const articleCategory = tags.category === 'sciencefiction' ? 'science-fiction' : tags.category === 'research' ? 'scientific-research' : undefined const articleCategory = tags.category === 'sciencefiction' ? 'science-fiction' : tags.category === 'research' ? 'scientific-research' : undefined
@ -147,10 +174,16 @@ export function parsePresentationEvent(event: Event): import('@/types/nostr').Au
return result return result
} }
export function fetchAuthorPresentationFromPool( export async function fetchAuthorPresentationFromPool(
pool: SimplePoolWithSub, pool: SimplePoolWithSub,
pubkey: string pubkey: string
): Promise<import('@/types/nostr').AuthorPresentationArticle | null> { ): Promise<import('@/types/nostr').AuthorPresentationArticle | null> {
// Check cache first
const cached = await objectCache.getAuthorByPubkey(pubkey)
if (cached) {
return cached
}
const filters = [ const filters = [
{ {
...buildTagFilter({ ...buildTagFilter({
@ -170,12 +203,24 @@ export function fetchAuthorPresentationFromPool(
const events: Event[] = [] const events: Event[] = []
const finalize = (value: import('@/types/nostr').AuthorPresentationArticle | null) => { const finalize = async (value: import('@/types/nostr').AuthorPresentationArticle | null) => {
if (resolved) { if (resolved) {
return return
} }
resolved = true resolved = true
sub.unsub() sub.unsub()
// Cache the result if found
if (value && events.length > 0) {
const event = events.find(e => e.id === value.id) || events[0]
if (event) {
const tags = extractTagsFromEvent(event)
if (tags.id) {
await objectCache.set('author', tags.id, event, value, tags.version, tags.hidden)
}
}
}
resolve(value) resolve(value)
} }
@ -187,29 +232,29 @@ export function fetchAuthorPresentationFromPool(
} }
}) })
sub.on('eose', () => { sub.on('eose', async () => {
// Get the latest version from all collected events // Get the latest version from all collected events
const latestEvent = getLatestVersion(events) const latestEvent = getLatestVersion(events)
if (latestEvent) { if (latestEvent) {
const parsed = parsePresentationEvent(latestEvent) const parsed = parsePresentationEvent(latestEvent)
if (parsed) { if (parsed) {
finalize(parsed) await finalize(parsed)
return return
} }
} }
finalize(null) await finalize(null)
}) })
setTimeout(() => { setTimeout(async () => {
// Get the latest version from all collected events // Get the latest version from all collected events
const latestEvent = getLatestVersion(events) const latestEvent = getLatestVersion(events)
if (latestEvent) { if (latestEvent) {
const parsed = parsePresentationEvent(latestEvent) const parsed = parsePresentationEvent(latestEvent)
if (parsed) { if (parsed) {
finalize(parsed) await finalize(parsed)
return return
} }
} }
finalize(null) await finalize(null)
}, 5000).unref?.() }, 5000).unref?.()
}) })
} }

View File

@ -105,7 +105,33 @@ export type ExtractedObject =
/** /**
* Extract JSON metadata from note content * Extract JSON metadata from note content
*/ */
function extractMetadataJsonFromTag(event: { tags: string[][] }): Record<string, unknown> | null {
const jsonTag = event.tags.find((tag) => tag[0] === 'json')
if (jsonTag && jsonTag[1]) {
try {
return JSON.parse(jsonTag[1])
} catch (e) {
console.error('Error parsing JSON metadata from tag:', e)
return null
}
}
return null
}
function extractMetadataJson(content: string): Record<string, unknown> | null { function extractMetadataJson(content: string): Record<string, unknown> | null {
// Try invisible format first (with zero-width characters) - for backward compatibility
const invisibleJsonMatch = content.match(/[\u200B\u200C]\[Metadata JSON\][\u200B\u200C]\n[\u200B\u200C](.+)[\u200B\u200C]$/s)
if (invisibleJsonMatch && invisibleJsonMatch[1]) {
try {
// Remove zero-width characters from JSON
const cleanedJson = invisibleJsonMatch[1].replace(/[\u200B\u200C\u200D\u200E\u200F]/g, '').trim()
return JSON.parse(cleanedJson)
} catch (e) {
console.error('Error parsing metadata JSON from invisible content:', e)
}
}
// Fallback to visible format (for backward compatibility)
const jsonMatch = content.match(/\[Metadata JSON\]\n(.+)$/s) const jsonMatch = content.match(/\[Metadata JSON\]\n(.+)$/s)
if (jsonMatch && jsonMatch[1]) { if (jsonMatch && jsonMatch[1]) {
try { try {
@ -127,8 +153,14 @@ export async function extractAuthorFromEvent(event: Event): Promise<ExtractedAut
return null return null
} }
// Try to extract from metadata JSON first // Try to extract from tag first (new format)
const metadata = extractMetadataJson(event.content) let metadata = extractMetadataJsonFromTag(event)
// Fallback to content format (for backward compatibility)
if (!metadata) {
metadata = extractMetadataJson(event.content)
}
if (metadata && metadata.type === 'author') { if (metadata && metadata.type === 'author') {
const authorData = { const authorData = {
pubkey: (metadata.pubkey as string) ?? event.pubkey, pubkey: (metadata.pubkey as string) ?? event.pubkey,
@ -165,7 +197,14 @@ export async function extractSeriesFromEvent(event: Event): Promise<ExtractedSer
return null return null
} }
const metadata = extractMetadataJson(event.content) // Try to extract from tag first (new format)
let metadata = extractMetadataJsonFromTag(event)
// Fallback to content format (for backward compatibility)
if (!metadata) {
metadata = extractMetadataJson(event.content)
}
if (metadata && metadata.type === 'series') { if (metadata && metadata.type === 'series') {
const seriesData = { const seriesData = {
pubkey: (metadata.pubkey as string) ?? event.pubkey, pubkey: (metadata.pubkey as string) ?? event.pubkey,
@ -219,7 +258,14 @@ export async function extractPublicationFromEvent(event: Event): Promise<Extract
return null return null
} }
const metadata = extractMetadataJson(event.content) // Try to extract from tag first (new format)
let metadata = extractMetadataJsonFromTag(event)
// Fallback to content format (for backward compatibility)
if (!metadata) {
metadata = extractMetadataJson(event.content)
}
if (metadata && metadata.type === 'publication') { if (metadata && metadata.type === 'publication') {
const publicationData = { const publicationData = {
pubkey: (metadata.pubkey as string) ?? event.pubkey, pubkey: (metadata.pubkey as string) ?? event.pubkey,
@ -275,7 +321,14 @@ export async function extractReviewFromEvent(event: Event): Promise<ExtractedRev
return null return null
} }
const metadata = extractMetadataJson(event.content) // Try to extract from tag first (new format)
let metadata = extractMetadataJsonFromTag(event)
// Fallback to content format (for backward compatibility)
if (!metadata) {
metadata = extractMetadataJson(event.content)
}
if (metadata && metadata.type === 'review') { if (metadata && metadata.type === 'review') {
const reviewData = { const reviewData = {
pubkey: (metadata.pubkey as string) ?? event.pubkey, pubkey: (metadata.pubkey as string) ?? event.pubkey,

View File

@ -41,6 +41,7 @@ export function extractCommonTags(findTag: (key: string) => string | undefined,
encryptedKey: findTag('encrypted_key'), encryptedKey: findTag('encrypted_key'),
articleId: findTag('article'), articleId: findTag('article'),
reviewerPubkey: findTag('reviewer'), reviewerPubkey: findTag('reviewer'),
json: findTag('json'), // JSON metadata stored in tag (for all object types)
} }
} }
@ -66,6 +67,7 @@ export function extractTagsFromEvent(event: { tags: string[][] }): {
paymentHash?: string | undefined paymentHash?: string | undefined
articleId?: string | undefined articleId?: string | undefined
reviewerPubkey?: string | undefined reviewerPubkey?: string | undefined
json?: string | undefined
[key: string]: unknown [key: string]: unknown
} { } {
const findTag = (key: string) => event.tags.find((tag) => tag[0] === key)?.[1] const findTag = (key: string) => event.tags.find((tag) => tag[0] === key)?.[1]

194
lib/objectCache.ts Normal file
View File

@ -0,0 +1,194 @@
/**
* IndexedDB cache for Nostr objects (authors, series, publications, reviews)
* Objects are indexed by their hash ID for fast retrieval
* One database per object type
*/
import type { Event } from 'nostr-tools'
import type { AuthorPresentationArticle } from '@/types/nostr'
export type ObjectType = 'author' | 'series' | 'publication' | 'review'
interface CachedObject {
hashId: string
event: Event
parsed: unknown // Parsed object (AuthorPresentationArticle, Series, etc.)
version: number
hidden: boolean
createdAt: number
cachedAt: number
}
const DB_PREFIX = 'nostr_objects_'
const DB_VERSION = 1
class ObjectCacheService {
private dbs: Map<ObjectType, IDBDatabase> = new Map()
private async initDB(objectType: ObjectType): Promise<IDBDatabase> {
if (this.dbs.has(objectType)) {
return this.dbs.get(objectType)!
}
return new Promise((resolve, reject) => {
if (typeof window === 'undefined' || !window.indexedDB) {
reject(new Error('IndexedDB is not available'))
return
}
const dbName = `${DB_PREFIX}${objectType}`
const request = indexedDB.open(dbName, DB_VERSION)
request.onerror = () => {
reject(new Error(`Failed to open IndexedDB: ${request.error}`))
}
request.onsuccess = () => {
const db = request.result
this.dbs.set(objectType, db)
resolve(db)
}
request.onupgradeneeded = (event) => {
const db = (event.target as IDBOpenDBRequest).result
if (!db.objectStoreNames.contains('objects')) {
const store = db.createObjectStore('objects', { keyPath: 'hashId' })
store.createIndex('version', 'version', { unique: false })
store.createIndex('hidden', 'hidden', { unique: false })
store.createIndex('cachedAt', 'cachedAt', { unique: false })
}
}
})
}
/**
* Store an object in cache
*/
async set(objectType: ObjectType, hashId: string, event: Event, parsed: unknown, version: number, hidden: boolean): Promise<void> {
try {
const db = await this.initDB(objectType)
const transaction = db.transaction(['objects'], 'readwrite')
const store = transaction.objectStore('objects')
const cached: CachedObject = {
hashId,
event,
parsed,
version,
hidden,
createdAt: event.created_at,
cachedAt: Date.now(),
}
await new Promise<void>((resolve, reject) => {
const request = store.put(cached)
request.onsuccess = () => resolve()
request.onerror = () => reject(request.error)
})
} catch (error) {
console.error(`Error caching ${objectType} object:`, error)
}
}
/**
* Get an object from cache by hash ID
* Returns the latest non-hidden version
*/
async get(objectType: ObjectType, hashId: string): Promise<unknown | null> {
try {
const db = await this.initDB(objectType)
const transaction = db.transaction(['objects'], 'readonly')
const store = transaction.objectStore('objects')
const index = store.index('version')
return new Promise((resolve, reject) => {
const request = index.openCursor(IDBKeyRange.bound([hashId, 0], [hashId, Infinity]))
const objects: CachedObject[] = []
request.onsuccess = (event) => {
const cursor = (event.target as IDBRequest<IDBCursorWithValue>).result
if (cursor) {
const obj = cursor.value as CachedObject
if (obj && obj.hashId === hashId && !obj.hidden) {
objects.push(obj)
}
cursor.continue()
} else {
// Sort by version descending and return the latest
if (objects.length > 0) {
objects.sort((a, b) => b.version - a.version)
resolve(objects[0]?.parsed ?? null)
} else {
resolve(null)
}
}
}
request.onerror = () => reject(request.error)
})
} catch (error) {
console.error(`Error retrieving ${objectType} object from cache:`, error)
return null
}
}
/**
* Get an author presentation by pubkey (searches all cached authors)
*/
async getAuthorByPubkey(pubkey: string): Promise<AuthorPresentationArticle | null> {
try {
const db = await this.initDB('author')
const transaction = db.transaction(['objects'], 'readonly')
const store = transaction.objectStore('objects')
return new Promise((resolve, reject) => {
const request = store.openCursor()
const objects: CachedObject[] = []
request.onsuccess = (event) => {
const cursor = (event.target as IDBRequest<IDBCursorWithValue>).result
if (cursor) {
const obj = cursor.value as CachedObject
if (obj && obj.event.pubkey === pubkey && !obj.hidden) {
objects.push(obj)
}
cursor.continue()
} else {
// Sort by version descending and return the latest
if (objects.length > 0) {
objects.sort((a, b) => b.version - a.version)
resolve((objects[0]?.parsed ?? null) as AuthorPresentationArticle | null)
} else {
resolve(null)
}
}
}
request.onerror = () => reject(request.error)
})
} catch (error) {
console.error('Error retrieving author from cache by pubkey:', error)
return null
}
}
/**
* Clear cache for an object type
*/
async clear(objectType: ObjectType): Promise<void> {
try {
const db = await this.initDB(objectType)
const transaction = db.transaction(['objects'], 'readwrite')
const store = transaction.objectStore('objects')
await new Promise<void>((resolve, reject) => {
const request = store.clear()
request.onsuccess = () => resolve()
request.onerror = () => reject(request.error)
})
} catch (error) {
console.error(`Error clearing ${objectType} cache:`, error)
}
}
}
export const objectCache = new ObjectCacheService()

View File

@ -2,10 +2,14 @@ import type { Article } from '@/types/nostr'
/** /**
* Extract presentation data from article content * Extract presentation data from article content
* Supports two formats: * Supports multiple formats:
* 1. Old format: "${presentation}\n\n---\n\nDescription du contenu :\n${contentDescription}" * 1. Old format: "${presentation}\n\n---\n\nDescription du contenu :\n${contentDescription}"
* 2. New format: "Nouveau profil publié sur zapwall.fr\n<url>\n<photo>\nPrésentation personnelle : <presentation>\nDescription de votre contenu : <description>\nAdresse Bitcoin mainnet (pour le sponsoring) : <adresse>\n\n---\n\n[Metadata JSON]\n<json>" * 2. New format (content only): "Nouveau profil publié sur zapwall.fr\n<url>\n<photo>\nPrésentation personnelle : <presentation>\nDescription de votre contenu : <description>\nAdresse Bitcoin mainnet (pour le sponsoring) : <adresse>"
* The profile JSON is stored in the [Metadata JSON] section of the content, not in tags * 3. New format (with JSON in tag): JSON metadata is stored in the 'json' tag, not in content
*
* Note: For new notes, the JSON metadata is stored in the 'json' tag.
* This function extracts from content for backward compatibility only.
* The main extraction should use extractTagsFromEvent to get json from tags.
*/ */
export function extractPresentationData(presentation: Article): { export function extractPresentationData(presentation: Article): {
presentation: string presentation: string
@ -24,21 +28,9 @@ export function extractPresentationData(presentation: Article): {
} }
} }
// Try to extract from JSON metadata section // Note: JSON metadata is now stored in tags, not in content
const jsonMatch = content.match(/\[Metadata JSON\]\n(.+)$/s) // This function extracts from content for backward compatibility only
if (jsonMatch) { // The main extraction should use extractTagsFromEvent to get json from tags
try {
const profileJson = JSON.parse(jsonMatch[1].trim())
if (profileJson.presentation && profileJson.contentDescription) {
return {
presentation: profileJson.presentation,
contentDescription: profileJson.contentDescription,
}
}
} catch (e) {
// JSON parsing failed, continue with old format
}
}
// Fallback to old format // Fallback to old format
const separator = '\n\n---\n\nDescription du contenu :\n' const separator = '\n\n---\n\nDescription du contenu :\n'