From cdd923e981cddec9b87b5964eb7772c65a5356b2 Mon Sep 17 00:00:00 2001 From: Nicolas Cantu Date: Tue, 6 Jan 2026 20:59:59 +0100 Subject: [PATCH] lint fix wip --- .cursor/rules/quality.mdc | 2 +- components/ArticleEditor.tsx | 4 +- components/ArticlePages.tsx | 51 +++- components/ArticlePreview.tsx | 2 +- components/ArticleReviews.tsx | 30 +- components/CreateSeriesModal.tsx | 8 +- components/GlobalSyncProgressBar.tsx | 78 +++++ components/ImageUploadField.tsx | 20 +- components/RelayManager.tsx | 398 ++++++++++++++++++++++++++ components/ReviewForm.tsx | 8 +- components/ReviewTipForm.tsx | 8 +- components/SponsoringForm.tsx | 8 +- components/SyncProgressBar.tsx | 14 +- docs/author-funds-specification.md | 49 ++++ docs/nostr-event-order.md | 82 ++++++ docs/nostr-event-uniqueness.md | 57 ++++ docs/payment-linking-system.md | 106 +++++++ lib/articlePublisher.ts | 2 +- lib/authorQueries.ts | 10 +- lib/configStorage.ts | 19 +- lib/configStorageTypes.ts | 30 +- lib/nostr.ts | 56 +++- lib/objectCache.ts | 30 +- lib/platformTracking.ts | 34 ++- lib/purchaseQueries.ts | 54 ++-- lib/relayRotation.ts | 116 ++++++++ lib/relaySessionManager.ts | 105 +++++++ lib/reviewTipQueries.ts | 54 ++-- lib/reviews.ts | 12 +- lib/seriesQueries.ts | 12 +- lib/sponsoringQueries.ts | 36 +-- lib/sponsoringTracking.ts | 33 ++- lib/syncProgressManager.ts | 33 +++ lib/userContentSync.ts | 411 +++++++++++++++++++++++---- pages/_app.tsx | 18 +- pages/api/nip95-upload.ts | 24 +- pages/series/[id].tsx | 23 ++ pages/series/[id]/publish.tsx | 180 ++++++++++++ pages/settings.tsx | 2 + public/locales/en.txt | 34 +++ public/locales/fr.txt | 38 ++- 41 files changed, 2056 insertions(+), 235 deletions(-) create mode 100644 components/GlobalSyncProgressBar.tsx create mode 100644 components/RelayManager.tsx create mode 100644 docs/author-funds-specification.md create mode 100644 docs/nostr-event-order.md create mode 100644 docs/nostr-event-uniqueness.md create mode 100644 docs/payment-linking-system.md create mode 100644 lib/relayRotation.ts create mode 100644 lib/relaySessionManager.ts create mode 100644 lib/syncProgressManager.ts create mode 100644 pages/series/[id]/publish.tsx diff --git a/.cursor/rules/quality.mdc b/.cursor/rules/quality.mdc index d651557..8a91dd8 100644 --- a/.cursor/rules/quality.mdc +++ b/.cursor/rules/quality.mdc @@ -666,4 +666,4 @@ Dans tous les cas, aucun changement ne doit rester non commité plus de quelques * **Commits avant PR** : Tous les commits doivent être faits avant la création d'une Pull Request * **Commits dans les PRs** : Les commits dans une PR doivent être organisés et logiques * **Squash si nécessaire** : Les commits peuvent être squashés dans une PR si cela améliore la lisibilité, mais chaque commit individuel doit rester valide -* **Historique propre** : Maintenir un historique Git propre et lisible pour les contributeurs externes \ No newline at end of file +* **Historique propre** : Maintenir un historique Git propre et lisible pour les contributeurs externe diff --git a/components/ArticleEditor.tsx b/components/ArticleEditor.tsx index 370ee55..9ebb2aa 100644 --- a/components/ArticleEditor.tsx +++ b/components/ArticleEditor.tsx @@ -9,6 +9,7 @@ interface ArticleEditorProps { onCancel?: () => void seriesOptions?: { id: string; title: string }[] onSelectSeries?: ((seriesId: string | undefined) => void) | undefined + defaultSeriesId?: string } @@ -21,7 +22,7 @@ function SuccessMessage(): React.ReactElement { ) } -export function ArticleEditor({ onPublishSuccess, onCancel, seriesOptions, onSelectSeries }: ArticleEditorProps): React.ReactElement { +export function ArticleEditor({ onPublishSuccess, onCancel, seriesOptions, onSelectSeries, defaultSeriesId }: ArticleEditorProps): React.ReactElement { const { connected, pubkey, connect } = useNostrAuth() const { loading, error, success, publishArticle } = useArticlePublishing(pubkey ?? null) const [draft, setDraft] = useState({ @@ -30,6 +31,7 @@ export function ArticleEditor({ onPublishSuccess, onCancel, seriesOptions, onSel content: '', zapAmount: 800, media: [], + ...(defaultSeriesId ? { seriesId: defaultSeriesId } : {}), }) const submit = buildSubmitHandler(publishArticle, draft, onPublishSuccess, connect, connected) diff --git a/components/ArticlePages.tsx b/components/ArticlePages.tsx index 50ca435..8456c74 100644 --- a/components/ArticlePages.tsx +++ b/components/ArticlePages.tsx @@ -1,15 +1,64 @@ import type { Page } from '@/types/nostr' import { t } from '@/lib/i18n' +import { useNostrAuth } from '@/hooks/useNostrAuth' +import { useEffect, useState } from 'react' +import { objectCache } from '@/lib/objectCache' interface ArticlePagesProps { pages: Page[] + articleId: string } -export function ArticlePages({ pages }: ArticlePagesProps): React.ReactElement | null { +export function ArticlePages({ pages, articleId }: ArticlePagesProps): React.ReactElement | null { + const { pubkey } = useNostrAuth() + const [hasPurchased, setHasPurchased] = useState(false) + + useEffect(() => { + const checkPurchase = async (): Promise => { + if (!pubkey || !articleId) { + setHasPurchased(false) + return + } + + try { + // Check if user has purchased this article from cache + const purchases = await objectCache.getAll('purchase') + const userPurchases = purchases.filter((p) => { + if (typeof p !== 'object' || p === null) { + return false + } + const purchase = p as { payerPubkey?: string; articleId?: string } + return purchase.payerPubkey === pubkey && purchase.articleId === articleId + }) + + setHasPurchased(userPurchases.length > 0) + } catch (error) { + console.error('Error checking purchase status:', error) + setHasPurchased(false) + } + } + + void checkPurchase() + }, [pubkey, articleId]) + if (!pages || pages.length === 0) { return null } + // If user hasn't purchased, show locked message + if (!hasPurchased) { + return ( +
+

{t('article.pages.title')}

+
+

{t('article.pages.locked.title')}

+

{t('article.pages.locked.message', { count: pages.length })}

+
+
+ ) + } + + // User has purchased, show all pages return (

{t('article.pages.title')}

diff --git a/components/ArticlePreview.tsx b/components/ArticlePreview.tsx index 77bdfbb..ee06fdd 100644 --- a/components/ArticlePreview.tsx +++ b/components/ArticlePreview.tsx @@ -13,7 +13,7 @@ export function ArticlePreview({ article, loading, onUnlock }: ArticlePreviewPro

{article.preview}

{article.content}

- {article.pages && article.pages.length > 0 && } + {article.pages && article.pages.length > 0 && }
) } diff --git a/components/ArticleReviews.tsx b/components/ArticleReviews.tsx index 1daca3c..b1fd525 100644 --- a/components/ArticleReviews.tsx +++ b/components/ArticleReviews.tsx @@ -1,4 +1,4 @@ -import { useEffect, useState } from 'react' +import { useCallback, useEffect, useState } from 'react' import type { Review, Article } from '@/types/nostr' import { getReviewsForArticle } from '@/lib/reviews' import { getReviewTipsForArticle } from '@/lib/reviewAggregation' @@ -19,7 +19,7 @@ export function ArticleReviews({ article, authorPubkey }: ArticleReviewsProps): const [showReviewForm, setShowReviewForm] = useState(false) const [selectedReviewForTip, setSelectedReviewForTip] = useState(null) - const loadReviews = async (): Promise => { + const loadReviews = useCallback(async (): Promise => { setLoading(true) setError(null) try { @@ -29,16 +29,16 @@ export function ArticleReviews({ article, authorPubkey }: ArticleReviewsProps): ]) setReviews(list) setTips(tipsTotal) - } catch (e) { - setError(e instanceof Error ? e.message : 'Erreur lors du chargement des critiques') + } catch (loadError) { + setError(loadError instanceof Error ? loadError.message : 'Erreur lors du chargement des critiques') } finally { setLoading(false) } - } + }, [article.id, authorPubkey]) useEffect(() => { void loadReviews() - }, [article.id, authorPubkey]) + }, [loadReviews]) return (
@@ -65,10 +65,15 @@ export function ArticleReviews({ article, authorPubkey }: ArticleReviewsProps): {!loading && !error && { setSelectedReviewForTip(reviewId) }} />} - {selectedReviewForTip && ( - r.id === selectedReviewForTip)!} - article={article} + {selectedReviewForTip && (() => { + const review = reviews.find((r) => r.id === selectedReviewForTip) + if (!review) { + return null + } + return ( + { setSelectedReviewForTip(null) void loadReviews() @@ -76,8 +81,9 @@ export function ArticleReviews({ article, authorPubkey }: ArticleReviewsProps): onCancel={() => { setSelectedReviewForTip(null) }} - /> - )} + /> + ) + })()}
) } diff --git a/components/CreateSeriesModal.tsx b/components/CreateSeriesModal.tsx index 940d290..3e20394 100644 --- a/components/CreateSeriesModal.tsx +++ b/components/CreateSeriesModal.tsx @@ -79,8 +79,8 @@ export function CreateSeriesModal({ isOpen, onClose, onSuccess, authorPubkey }: }) onSuccess() onClose() - } catch (e) { - setError(e instanceof Error ? e.message : t('series.create.error.publishFailed')) + } catch (submitError) { + setError(submitError instanceof Error ? submitError.message : t('series.create.error.publishFailed')) } finally { setLoading(false) } @@ -121,7 +121,9 @@ export function CreateSeriesModal({ isOpen, onClose, onSuccess, authorPubkey }:
)} -
+ { + void handleSubmit(e) + }} className="space-y-4">
{showUnlockModal && ( { + void handleUnlockSuccess() + }} onClose={() => { setShowUnlockModal(false) }} diff --git a/components/RelayManager.tsx b/components/RelayManager.tsx new file mode 100644 index 0000000..6cf7231 --- /dev/null +++ b/components/RelayManager.tsx @@ -0,0 +1,398 @@ +import { useState, useEffect } from 'react' +import { configStorage } from '@/lib/configStorage' +import type { RelayConfig } from '@/lib/configStorageTypes' +import { t } from '@/lib/i18n' +import { userConfirm } from '@/lib/userConfirm' + +interface RelayManagerProps { + onConfigChange?: () => void +} + +export function RelayManager({ onConfigChange }: RelayManagerProps): React.ReactElement { + const [relays, setRelays] = useState([]) + const [loading, setLoading] = useState(true) + const [error, setError] = useState(null) + const [editingId, setEditingId] = useState(null) + const [newUrl, setNewUrl] = useState('') + const [showAddForm, setShowAddForm] = useState(false) + const [draggedId, setDraggedId] = useState(null) + const [dragOverId, setDragOverId] = useState(null) + + useEffect(() => { + void loadRelays() + }, []) + + async function loadRelays(): Promise { + try { + setLoading(true) + setError(null) + const config = await configStorage.getConfig() + setRelays(config.relays.sort((a, b) => a.priority - b.priority)) + } catch (e) { + const errorMessage = e instanceof Error ? e.message : t('settings.relay.error.loadFailed') + setError(errorMessage) + console.error('Error loading relays:', e) + } finally { + setLoading(false) + } + } + + async function handleToggleEnabled(id: string, enabled: boolean): Promise { + try { + await configStorage.updateRelay(id, { enabled }) + await loadRelays() + onConfigChange?.() + } catch (e) { + const errorMessage = e instanceof Error ? e.message : t('settings.relay.error.updateFailed') + setError(errorMessage) + console.error('Error updating relay:', e) + } + } + + async function handleUpdatePriorities(newOrder: RelayConfig[]): Promise { + try { + const updatePromises = newOrder.map((relay, index) => { + const newPriority = index + 1 + if (relay.priority !== newPriority) { + return configStorage.updateRelay(relay.id, { priority: newPriority }) + } + return Promise.resolve() + }) + + await Promise.all(updatePromises) + await loadRelays() + onConfigChange?.() + } catch (e) { + const errorMessage = e instanceof Error ? e.message : t('settings.relay.error.priorityFailed') + setError(errorMessage) + console.error('Error updating priorities:', e) + } + } + + function handleDragStart(e: React.DragEvent, id: string): void { + setDraggedId(id) + e.dataTransfer.effectAllowed = 'move' + e.dataTransfer.setData('text/plain', id) + } + + function handleDragOver(e: React.DragEvent, id: string): void { + e.preventDefault() + e.dataTransfer.dropEffect = 'move' + setDragOverId(id) + } + + function handleDragLeave(): void { + setDragOverId(null) + } + + function handleDrop(e: React.DragEvent, targetId: string): void { + e.preventDefault() + setDragOverId(null) + + if (!draggedId || draggedId === targetId) { + setDraggedId(null) + return + } + + const draggedIndex = relays.findIndex((relay) => relay.id === draggedId) + const targetIndex = relays.findIndex((relay) => relay.id === targetId) + + if (draggedIndex === -1 || targetIndex === -1) { + setDraggedId(null) + return + } + + const newRelays = [...relays] + const removed = newRelays[draggedIndex] + if (!removed) { + setDraggedId(null) + return + } + newRelays.splice(draggedIndex, 1) + newRelays.splice(targetIndex, 0, removed) + + setRelays(newRelays) + setDraggedId(null) + + void handleUpdatePriorities(newRelays) + } + + function handleDragEnd(): void { + setDraggedId(null) + } + + function DragHandle(): React.ReactElement { + return ( +
+ + + + + + + + + + + +
+ ) + } + + async function handleUpdateUrl(id: string, url: string): Promise { + try { + await configStorage.updateRelay(id, { url }) + await loadRelays() + setEditingId(null) + onConfigChange?.() + } catch (e) { + const errorMessage = e instanceof Error ? e.message : t('settings.relay.error.urlFailed') + setError(errorMessage) + console.error('Error updating URL:', e) + } + } + + async function handleAddRelay(): Promise { + if (!newUrl.trim()) { + setError(t('settings.relay.error.urlRequired')) + return + } + + try { + // Normalize URL (add wss:// if missing) + let normalizedUrl = newUrl.trim() + if (!normalizedUrl.startsWith('ws://') && !normalizedUrl.startsWith('wss://')) { + normalizedUrl = `wss://${normalizedUrl}` + } + + // Validate URL format + new URL(normalizedUrl) + await configStorage.addRelay(normalizedUrl, true) + setNewUrl('') + setShowAddForm(false) + await loadRelays() + onConfigChange?.() + } catch (e) { + if (e instanceof TypeError && e.message.includes('Invalid URL')) { + setError(t('settings.relay.error.invalidUrl')) + } else { + const errorMessage = e instanceof Error ? e.message : t('settings.relay.error.addFailed') + setError(errorMessage) + } + console.error('Error adding relay:', e) + } + } + + async function handleRemoveRelay(id: string): Promise { + if (!userConfirm(t('settings.relay.remove.confirm'))) { + return + } + + try { + await configStorage.removeRelay(id) + await loadRelays() + onConfigChange?.() + } catch (e) { + const errorMessage = e instanceof Error ? e.message : t('settings.relay.error.removeFailed') + setError(errorMessage) + console.error('Error removing relay:', e) + } + } + + if (loading) { + return ( +
+
{t('settings.relay.loading')}
+
+ ) + } + + return ( +
+ {error && ( +
+ {error} + +
+ )} + +
+

{t('settings.relay.title')}

+ +
+ + {showAddForm && ( +
+
+ + { + setNewUrl(e.target.value) + }} + placeholder={t('settings.relay.add.placeholder')} + className="w-full px-4 py-2 bg-cyber-darker border border-cyber-accent/30 rounded text-cyber-light focus:border-neon-cyan focus:outline-none" + /> +
+
+ + +
+
+ )} + +
+ {relays.length === 0 ? ( +
+ {t('settings.relay.empty')} +
+ ) : ( + relays.map((relay, index) => ( +
{ + handleDragOver(e, relay.id) + }} + onDragLeave={handleDragLeave} + onDrop={(e) => { + handleDrop(e, relay.id) + }} + className={`bg-cyber-dark border rounded p-4 space-y-3 transition-all ${ + draggedId === relay.id + ? 'opacity-50 border-neon-cyan' + : dragOverId === relay.id + ? 'border-neon-green shadow-lg' + : 'border-neon-cyan/30' + }`} + > +
+
+
{ + handleDragStart(e, relay.id) + }} + onDragEnd={handleDragEnd} + onMouseDown={(e) => { + e.stopPropagation() + }} + > + +
+
+ {editingId === relay.id ? ( +
+ { + if (e.target.value !== relay.url) { + void handleUpdateUrl(relay.id, e.target.value) + } else { + setEditingId(null) + } + }} + onKeyDown={(e) => { + if (e.key === 'Enter') { + e.currentTarget.blur() + } else if (e.key === 'Escape') { + setEditingId(null) + } + }} + className="w-full px-3 py-2 bg-cyber-darker border border-neon-cyan/50 rounded text-cyber-light focus:border-neon-cyan focus:outline-none" + autoFocus + /> +
+ ) : ( +
{ + setEditingId(relay.id) + }} + title={t('settings.relay.list.editUrl')} + > + {relay.url} +
+ )} +
+
+
+ + +
+
+
+ + {t('settings.relay.list.priorityLabel', { priority: index + 1, id: relay.id })} + +
+
+ )) + )} +
+ +
+

+ {t('settings.relay.note.title')} {t('settings.relay.note.priority')} +

+

+ {t('settings.relay.note.rotation')} +

+
+
+ ) +} diff --git a/components/ReviewForm.tsx b/components/ReviewForm.tsx index faece29..6f0eaa9 100644 --- a/components/ReviewForm.tsx +++ b/components/ReviewForm.tsx @@ -60,8 +60,8 @@ export function ReviewForm({ article, onSuccess, onCancel }: ReviewFormProps): R setTitle('') setText('') onSuccess?.() - } catch (e) { - setError(e instanceof Error ? e.message : t('review.form.error.publishFailed')) + } catch (submitError) { + setError(submitError instanceof Error ? submitError.message : t('review.form.error.publishFailed')) } finally { setLoading(false) } @@ -84,7 +84,9 @@ export function ReviewForm({ article, onSuccess, onCancel }: ReviewFormProps): R } return ( - + { + void handleSubmit(e) + }} className="border border-neon-cyan/30 rounded-lg p-4 bg-cyber-dark space-y-4">

{t('review.form.title')}

diff --git a/components/ReviewTipForm.tsx b/components/ReviewTipForm.tsx index 39b885c..b66e8e8 100644 --- a/components/ReviewTipForm.tsx +++ b/components/ReviewTipForm.tsx @@ -62,8 +62,8 @@ export function ReviewTipForm({ review, article, onSuccess, onCancel }: ReviewTi setText('') onSuccess?.() - } catch (e) { - setError(e instanceof Error ? e.message : t('reviewTip.form.error.paymentFailed')) + } catch (submitError) { + setError(submitError instanceof Error ? submitError.message : t('reviewTip.form.error.paymentFailed')) } finally { setLoading(false) } @@ -88,7 +88,9 @@ export function ReviewTipForm({ review, article, onSuccess, onCancel }: ReviewTi const split = calculateReviewSplit() return ( - + { + void handleSubmit(e) + }} className="border border-neon-cyan/30 rounded-lg p-4 bg-cyber-dark space-y-4">

{t('reviewTip.form.title')}

{t('reviewTip.form.description', { amount: split.total, reviewer: split.reviewer, platform: split.platform })} diff --git a/components/SponsoringForm.tsx b/components/SponsoringForm.tsx index 59c44a6..2d971e3 100644 --- a/components/SponsoringForm.tsx +++ b/components/SponsoringForm.tsx @@ -77,8 +77,8 @@ export function SponsoringForm({ author, onSuccess, onCancel }: SponsoringFormPr setText('') onSuccess?.() - } catch (e) { - setError(e instanceof Error ? e.message : t('sponsoring.form.error.paymentFailed')) + } catch (submitError) { + setError(submitError instanceof Error ? submitError.message : t('sponsoring.form.error.paymentFailed')) } finally { setLoading(false) } @@ -109,7 +109,9 @@ export function SponsoringForm({ author, onSuccess, onCancel }: SponsoringFormPr } return ( - + { + void handleSubmit(e) + }} className="border border-neon-cyan/30 rounded-lg p-4 bg-cyber-dark space-y-4">

{t('sponsoring.form.title')}

{t('sponsoring.form.description', { amount: '0.046' })} diff --git a/components/SyncProgressBar.tsx b/components/SyncProgressBar.tsx index 946353b..7ccf7d0 100644 --- a/components/SyncProgressBar.tsx +++ b/components/SyncProgressBar.tsx @@ -30,8 +30,8 @@ export function SyncProgressBar(): React.ReactElement | null { setLastSyncDate(storedLastSyncDate) setTotalDays(days) - } catch (error) { - console.error('Error loading sync status:', error) + } catch (loadError) { + console.error('Error loading sync status:', loadError) } } @@ -103,10 +103,10 @@ export function SyncProgressBar(): React.ReactElement | null { }) // Check if sync completed successfully (if it didn't, isSyncing should still be false) setIsSyncing(false) - } catch (error) { - console.error('[SyncProgressBar] Error during auto-sync:', error) + } catch (autoSyncError) { + console.error('[SyncProgressBar] Error during auto-sync:', autoSyncError) setIsSyncing(false) - setError(error instanceof Error ? error.message : 'Erreur de synchronisation') + setError(autoSyncError instanceof Error ? autoSyncError.message : 'Erreur de synchronisation') } } else { console.log('[SyncProgressBar] Skipping auto-sync:', { isRecentlySynced, isSyncing, hasPubkey: Boolean(connectionState.pubkey) }) @@ -151,8 +151,8 @@ export function SyncProgressBar(): React.ReactElement | null { } }) } - } catch (error) { - console.error('Error resynchronizing:', error) + } catch (resyncError) { + console.error('Error resynchronizing:', resyncError) setIsSyncing(false) } } diff --git a/docs/author-funds-specification.md b/docs/author-funds-specification.md new file mode 100644 index 0000000..da0516e --- /dev/null +++ b/docs/author-funds-specification.md @@ -0,0 +1,49 @@ +# Author Funds Collection Specification + +**Author**: Équipe 4NK +**Date**: 2026-01-14 + +## Objectifs + +1. Pour chaque auteur, collecter : + - Fonds perçus sur le mempool (Bitcoin mainnet pour sponsoring) + - Fonds perçus par la plateforme (commission) + +2. Créer un lien entre : + - Paiement (zap receipt) ↔ Note de paiement ↔ Objet (publication/avis/auteur) + +3. Clarification de la structure : + - **Publications** (plusieurs pages d'une série) sont les objets achetés + - **Séries** sont les objets commentés (reviews) + - **Avis/Commentaires** sont les objets récompensés (review tips) + - **Auteurs** sont les objets sponsorisés + - Les **publications** sont des notes (kind 1) + - Les **pages** sont dans le JSON des notes de publications (pas de notes séparées) + +## Tags à ajouter + +### Payment Notes (kind 1, type='payment') +- `publication_id` ou `article`: ID de la publication (pour achat) +- `series_id` ou `series`: ID de la série (si applicable) +- `review_id`: ID de l'avis (pour review tip) +- `author`: Pubkey de l'auteur (receveur) +- `zap_receipt`: ID du zap receipt (si Lightning) +- `transaction_id`: ID de la transaction Bitcoin (si mainnet sponsoring) +- `platform_commission`: Montant de la commission +- `author_funds`: Fonds reçus par l'auteur + +### Zap Receipts (kind 9735) +Déjà présents : +- `#e`: Event ID (article ID pour purchases) +- `#p`: Pubkey (author pubkey) +- `kind_type`: Type de paiement +- `review_id`: Review ID (pour review tips) +- `series`: Series ID (optionnel) + +## Prochaines étapes + +1. ✅ Document de spécification créé +2. ⏳ Modifier les payment notes pour inclure les tags de liaison +3. ⏳ Créer un service pour collecter les fonds par auteur +4. ⏳ Intégrer la collecte des fonds du mempool pour sponsoring Bitcoin +5. ⏳ Créer une interface pour afficher les fonds par auteur diff --git a/docs/nostr-event-order.md b/docs/nostr-event-order.md new file mode 100644 index 0000000..2900de4 --- /dev/null +++ b/docs/nostr-event-order.md @@ -0,0 +1,82 @@ +# Ordre d'arrivée des notes depuis les relais + +## Comment fonctionne la récupération avec rotation de relais + +### 1. Sélection du relai + +Avec `tryWithRelayRotation`, le système : + +1. **Essaie les relais un par un** dans l'ordre de priorité configuré +2. **Crée une subscription** avec **un seul relai à la fois** +3. Si le premier relai échoue (timeout ou erreur de connexion), passe au suivant +4. Continue jusqu'à trouver un relai qui répond + +### 2. Arrivée des événements + +Une fois qu'une subscription est établie avec un relai qui fonctionne : + +``` +Subscription créée → Relai commence à envoyer les événements +``` + +Les événements arrivent **de manière asynchrone** via le callback `sub.on('event')` : + +``` +Event 1 reçu +Event 2 reçu +Event 3 reçu +... +Event N reçu +EOSE (End of Stored Events) → Le relai a fini d'envoyer +``` + +### 3. Ordre des événements + +L'ordre d'arrivée des événements **n'est pas garanti** : + +- ❌ **Pas nécessairement chronologique** : Le relai peut envoyer dans n'importe quel ordre +- ❌ **Pas nécessairement par ID** : Les événements peuvent arriver dans un ordre aléatoire +- ✅ **Déduplication automatique** : Les événements avec le même ID ne sont traités qu'une seule fois + +### 4. Exemple concret + +Si vous avez 3 relais configurés : `relay1`, `relay2`, `relay3` + +**Scénario 1 : Relay1 fonctionne** +``` +1. Essaie relay1 → ✅ Connexion réussie +2. Reçoit événements depuis relay1 uniquement +3. Tous les événements viennent de relay1 +``` + +**Scénario 2 : Relay1 échoue, relay2 fonctionne** +``` +1. Essaie relay1 → ❌ Timeout après 5 secondes +2. Essaie relay2 → ✅ Connexion réussie +3. Reçoit événements depuis relay2 uniquement +4. Tous les événements viennent de relay2 +``` + +**Scénario 3 : Tous les relais fonctionnent** +``` +1. Essaie relay1 → ✅ Connexion réussie (premier qui répond) +2. Reçoit événements depuis relay1 uniquement +3. relay2 et relay3 ne sont pas utilisés pour cette subscription +``` + +### 5. Pourquoi un seul relai à la fois ? + +- **Efficacité** : Un seul relai suffit pour récupérer les événements +- **Simplicité** : Plus facile de gérer une seule source +- **Pas de doublons** : Les événements viennent d'une seule source, pas besoin de dédupliquer + +### 6. Problèmes de rotation pendant la réception + +⚠️ **Important** : Si un relai échoue **pendant** la réception des événements : +- La subscription est interrompue +- Les événements déjà reçus sont traités +- Un nouveau relai est essayé pour une nouvelle subscription + +## Conclusion + +Les notes arrivent **d'un seul relai à la fois**, dans un **ordre non garanti** (pas forcément chronologique), et sont traitées au fur et à mesure de leur arrivée jusqu'à recevoir EOSE (End of Stored Events). diff --git a/docs/nostr-event-uniqueness.md b/docs/nostr-event-uniqueness.md new file mode 100644 index 0000000..a3f86a3 --- /dev/null +++ b/docs/nostr-event-uniqueness.md @@ -0,0 +1,57 @@ +# Pourquoi publier sur plusieurs relais ne crée pas de doublons + +## Comment fonctionne l'identité d'un événement Nostr + +Dans Nostr, chaque événement est **uniquement identifié** par : + +1. **Son ID** : un hash SHA256 calculé à partir de : + - `kind` (type d'événement) + - `created_at` (timestamp) + - `pubkey` (clé publique de l'auteur) + - `tags` (tableau de tags) + - `content` (contenu de l'événement) + +2. **Sa signature** : signature cryptographique de l'ID par la clé privée de l'auteur + +## Pourquoi pas de doublons ? + +### 1. Identité unique +Le même contenu génère **exactement le même ID**. Si vous publiez le même événement (même contenu, même timestamp, mêmes tags) sur 100 relais, c'est **exactement le même événement** avec le même ID. + +### 2. Stockage par ID dans les relais +Les relais stockent les événements dans une base de données indexée par **ID d'événement**. Si un relais reçoit un événement avec un ID qu'il a déjà, il : +- **Ignore** l'événement (déjà stocké) +- **Ne crée pas de doublon** + +### 3. Déduplication automatique +Quand un utilisateur s'abonne à plusieurs relais : +- Il peut recevoir le même événement de plusieurs sources +- Mais c'est **le même événement** (même ID) +- Les clients Nostr dédupliquent automatiquement basé sur l'ID + +## Exemple concret + +``` +Événement publié sur relay1, relay2, relay3 : +- ID: abc123... +- Signature: def456... +- Contenu: "Mon article..." +``` + +Résultat : +- **Relay1** stocke : `abc123...` → "Mon article..." +- **Relay2** stocke : `abc123...` → "Mon article..." +- **Relay3** stocke : `abc123...` → "Mon article..." + +C'est **le même événement** stocké 3 fois pour redondance, pas 3 événements différents. + +## Avantages de publier sur plusieurs relais + +1. **Redondance** : si un relais tombe, l'événement reste disponible sur les autres +2. **Performance** : les utilisateurs proches de différents relais obtiennent une latence réduite +3. **Résilience** : protège contre la censure si un relais supprime l'événement +4. **Découverte** : augmente les chances que votre contenu soit trouvé + +## Conclusion + +**Publier le même événement sur plusieurs relais est la pratique recommandée dans Nostr.** Cela ne crée pas de doublons car les relais utilisent l'ID comme clé unique. C'est similaire à uploader le même fichier sur plusieurs serveurs : c'est le même fichier, pas des copies différentes. diff --git a/docs/payment-linking-system.md b/docs/payment-linking-system.md new file mode 100644 index 0000000..6a68dd2 --- /dev/null +++ b/docs/payment-linking-system.md @@ -0,0 +1,106 @@ +# Payment Linking System + +**Author**: Équipe 4NK +**Date**: 2026-01-14 + +## Overview + +This document describes the system for linking payments, payment notes, and related objects (publications, reviews, authors), and tracking funds received by authors and the platform. + +## Structure + +### Objects and Payments + +- **Publications** (kind 1, type='publication'): Objects that are purchased + - Contains pages in JSON metadata (not separate notes) + - Linked via `articleId` tag in payment events + - Purchased via zap receipts (kind 9735) with `kind_type: purchase` + +- **Series** (kind 1, type='series'): Objects that are reviewed + - Linked via `seriesId` tag in review payment events + - Reviews are about series + +- **Reviews** (kind 1, type='quote'): Objects that are rewarded + - Linked via `reviewId` tag in payment events + - Rewarded via zap receipts (kind 9735) with `kind_type: review_tip` + +- **Authors** (kind 1, type='author'): Objects that are sponsored + - Linked via `authorPubkey` / `#p` tag in payment events + - Sponsored via zap receipts (kind 9735) with `kind_type: sponsoring` (Lightning) + - Sponsored via Bitcoin mainnet transactions (mempool) + +### Payment Flow + +1. **Zap Receipt** (kind 9735) is created by Lightning wallet + - Contains payment hash, amount, recipient pubkey + - Tagged with `kind_type: purchase | review_tip | sponsoring` + - Links to object via tags: `article`, `review_id`, `author` (seriesId) + +2. **Payment Note** (kind 1, type='payment') is published by platform/user + - Contains same information as zap receipt + - Links to zap receipt via `zap_receipt` tag + - Links to object via tags: `article`, `review_id`, `series`, `author` + +3. **Funds Split**: + - **Author funds**: Amount received by author (after platform commission) + - **Platform funds**: Commission amount collected by platform + +## Links + +### Purchase Payment +- Zap Receipt (kind 9735) → `#e: [articleId]`, `#p: [authorPubkey]`, `kind_type: purchase` +- Payment Note (kind 1) → `article: [articleId]`, `recipient: [authorPubkey]`, `zap_receipt: [zapReceiptId]` +- Publication (kind 1) → `id: [articleId]` + +### Review Tip Payment +- Zap Receipt (kind 9735) → `#p: [authorPubkey]`, `#e: [articleId]`, `kind_type: review_tip`, `review_id: [reviewId]` +- Payment Note (kind 1) → `recipient: [authorPubkey]`, `article: [articleId]`, `review_id: [reviewId]`, `zap_receipt: [zapReceiptId]` +- Review (kind 1) → `id: [reviewId]`, `article: [articleId]` + +### Sponsoring Payment +- Zap Receipt (kind 9735) → `#p: [authorPubkey]`, `kind_type: sponsoring` +- OR Bitcoin Transaction (mempool) → verified via transaction ID +- Payment Note (kind 1) → `recipient: [authorPubkey]`, `zap_receipt: [zapReceiptId]` OR `transaction_id: [txId]` +- Author Presentation (kind 1) → `id: [authorPresentationId]`, `pubkey: [authorPubkey]` + +## Funds Tracking + +### For Each Author + +**Funds Received** (author portion): +- Purchase payments: Total from zap receipts - platform commission +- Review tips: Total from zap receipts - platform commission +- Sponsoring (Lightning): Total from zap receipts - platform commission +- Sponsoring (Bitcoin mainnet): Total from verified transactions - platform commission + +**Platform Funds** (commission): +- Purchase commission: 100 sats per purchase (from 800 sats total) +- Review tip commission: Variable per tip +- Sponsoring commission: Variable per sponsoring + +### Implementation + +1. Query all zap receipts for author (as recipient) +2. Query all payment notes for author (as recipient) +3. Query Bitcoin mainnet transactions for author's mainnet address +4. Calculate funds split for each payment +5. Aggregate totals + +## Tags Enhancement + +### Payment Notes should include: +- `publication_id` or `article`: Publication/Article ID +- `series_id` or `series`: Series ID (if applicable) +- `review_id`: Review ID (for review tips) +- `author`: Author pubkey (recipient) +- `zap_receipt`: Zap receipt ID (if Lightning) +- `transaction_id`: Bitcoin transaction ID (if mainnet sponsoring) +- `platform_commission`: Commission amount +- `author_funds`: Funds received by author + +### Zap Receipts already include: +- `#e`: Event ID (article ID for purchases) +- `#p`: Pubkey (author pubkey) +- `kind_type`: Payment type +- `review_id`: Review ID (for review tips) +- `series`: Series ID (optional) diff --git a/lib/articlePublisher.ts b/lib/articlePublisher.ts index 1e31d4c..763a57f 100644 --- a/lib/articlePublisher.ts +++ b/lib/articlePublisher.ts @@ -186,7 +186,7 @@ export class ArticlePublisher { const { parsePresentationEvent } = await import('./articlePublisherHelpers') const { extractTagsFromEvent } = await import('./nostrTagSystem') const { objectCache } = await import('./objectCache') - const parsed = parsePresentationEvent(publishedEvent) + const parsed = await parsePresentationEvent(publishedEvent) if (parsed) { const tags = extractTagsFromEvent(publishedEvent) const { id: tagId, version: tagVersion, hidden: tagHidden } = tags diff --git a/lib/authorQueries.ts b/lib/authorQueries.ts index 015f58b..d41e55d 100644 --- a/lib/authorQueries.ts +++ b/lib/authorQueries.ts @@ -103,11 +103,11 @@ export async function fetchAuthorByHashId( }) setTimeout(async (): Promise => { // Get the latest version from all collected events - const latestEvent = getLatestVersion(events) - if (latestEvent) { - const parsed = await parsePresentationEvent(latestEvent) - if (parsed) { - await finalize(parsed) + const timeoutLatestEvent = getLatestVersion(events) + if (timeoutLatestEvent) { + const timeoutParsed = await parsePresentationEvent(timeoutLatestEvent) + if (timeoutParsed) { + await finalize(timeoutParsed) return } } diff --git a/lib/configStorage.ts b/lib/configStorage.ts index 9bf1e72..e44379d 100644 --- a/lib/configStorage.ts +++ b/lib/configStorage.ts @@ -98,15 +98,28 @@ export class ConfigStorage { const store = transaction.objectStore(STORE_NAME) const request = store.get('config') - request.onsuccess = () => { + request.onsuccess = async () => { const result = request.result as { key: string; value: ConfigData } | undefined if (!result?.value) { - resolve(this.getDefaultConfig()) + // First time: initialize with defaults + const defaultConfig = this.getDefaultConfig() + await this.saveConfig(defaultConfig) + resolve(defaultConfig) return } - resolve(result.value) + // Migrate: if relays array is empty or only has old default, add all defaults + const existingConfig = result.value + if (existingConfig.relays.length === 0 || (existingConfig.relays.length === 1 && existingConfig.relays[0]?.id === 'default')) { + const defaultConfig = this.getDefaultConfig() + existingConfig.relays = defaultConfig.relays + await this.saveConfig(existingConfig) + resolve(existingConfig) + return + } + + resolve(existingConfig) } request.onerror = () => { diff --git a/lib/configStorageTypes.ts b/lib/configStorageTypes.ts index e04fc9a..c1ed416 100644 --- a/lib/configStorageTypes.ts +++ b/lib/configStorageTypes.ts @@ -27,15 +27,31 @@ export interface ConfigData { /** * Default configuration values (hardcoded in the code) + * All relays are enabled by default and sorted by priority */ +const now = Date.now() export const DEFAULT_RELAYS: RelayConfig[] = [ - { - id: 'default', - url: 'wss://relay.damus.io', - enabled: true, - priority: 1, - createdAt: Date.now(), - }, + { id: 'default_1', url: 'wss://relay.damus.io', enabled: true, priority: 1, createdAt: now }, + { id: 'default_2', url: 'wss://relay.nostr.band', enabled: true, priority: 2, createdAt: now }, + { id: 'default_3', url: 'wss://relay.nostr.wine', enabled: true, priority: 3, createdAt: now }, + { id: 'default_4', url: 'wss://cache1.primal.net', enabled: true, priority: 4, createdAt: now }, + { id: 'default_5', url: 'wss://relay.bitcoiner.social', enabled: true, priority: 5, createdAt: now }, + { id: 'default_6', url: 'wss://nostr.mutinywallet.com', enabled: true, priority: 6, createdAt: now }, + { id: 'default_7', url: 'wss://relay.current.fyi', enabled: true, priority: 7, createdAt: now }, + { id: 'default_8', url: 'wss://eden.nostr.land', enabled: true, priority: 8, createdAt: now }, + { id: 'default_9', url: 'wss://filter.nostr1.com', enabled: true, priority: 9, createdAt: now }, + { id: 'default_10', url: 'wss://relay.nos.social', enabled: true, priority: 10, createdAt: now }, + { id: 'default_11', url: 'wss://relay.nostr.dev.br', enabled: true, priority: 11, createdAt: now }, + { id: 'default_12', url: 'wss://relay.nostr.inosta.cc', enabled: true, priority: 12, createdAt: now }, + { id: 'default_13', url: 'wss://nostr.land', enabled: true, priority: 13, createdAt: now }, + { id: 'default_14', url: 'wss://relay.nostr.pub', enabled: true, priority: 14, createdAt: now }, + { id: 'default_15', url: 'wss://nostr.rocks', enabled: true, priority: 15, createdAt: now }, + { id: 'default_16', url: 'wss://purplepag.es', enabled: true, priority: 16, createdAt: now }, + { id: 'default_17', url: 'wss://relay.nostr.info', enabled: true, priority: 17, createdAt: now }, + { id: 'default_18', url: 'wss://relay.nostrich.de', enabled: true, priority: 18, createdAt: now }, + { id: 'default_19', url: 'wss://relay.snort.social', enabled: true, priority: 19, createdAt: now }, + { id: 'default_20', url: 'wss://relay.wellorder.net', enabled: true, priority: 20, createdAt: now }, + { id: 'default_21', url: 'wss://wot.nostr.party', enabled: true, priority: 21, createdAt: now }, ] export const DEFAULT_NIP95_APIS: Nip95Config[] = [ diff --git a/lib/nostr.ts b/lib/nostr.ts index 17a869f..378a547 100644 --- a/lib/nostr.ts +++ b/lib/nostr.ts @@ -76,12 +76,40 @@ class NostrService { const event = finalizeEvent(unsignedEvent, secretKey) try { - const relayUrl = await getPrimaryRelay() - const pubs = this.pool.publish([relayUrl], event) - await Promise.all(pubs) + // Publish to all active relays (enabled and not marked inactive for this session) + // Each event has a unique ID based on content, so publishing to multiple relays + // doesn't create duplicates - it's the same event stored redundantly + const { relaySessionManager } = await import('./relaySessionManager') + const activeRelays = await relaySessionManager.getActiveRelays() + + if (activeRelays.length === 0) { + // Fallback to primary relay if no active relays + const relayUrl = await getPrimaryRelay() + const pubs = this.pool.publish([relayUrl], event) + await Promise.all(pubs) + } else { + // Publish to all active relays + console.log(`[NostrService] Publishing event ${event.id} to ${activeRelays.length} active relay(s)`) + const pubs = this.pool.publish(activeRelays, event) + + // Track failed relays and mark them inactive for the session + const results = await Promise.allSettled(pubs) + results.forEach((result, index) => { + const relayUrl = activeRelays[index] + if (!relayUrl) { + return + } + if (result.status === 'rejected') { + const error = result.reason + console.error(`[NostrService] Relay ${relayUrl} failed during publish:`, error) + relaySessionManager.markRelayFailed(relayUrl) + } + }) + } + return event - } catch (e) { - throw new Error(`Publish failed: ${e}`) + } catch (publishError) { + throw new Error(`Publish failed: ${publishError}`) } } @@ -127,8 +155,9 @@ class NostrService { const sub = this.createArticleSubscription(this.pool, limit) - sub.on('event', async (event: Event): Promise => { - try { + sub.on('event', (event: Event): void => { + void (async (): Promise => { + try { // Try to parse as regular article first let article = await parseArticleFromEvent(event) // If not a regular article, try to parse as author presentation @@ -141,9 +170,10 @@ class NostrService { if (article) { callback(article) } - } catch (e) { - console.error('Error parsing article:', e) + } catch (parseError) { + console.error('Error parsing article:', parseError) } + })() }) return (): void => { @@ -200,11 +230,11 @@ class NostrService { } return decryptArticleContentWithKey(event.content, decryptionKey) - } catch (error) { + } catch (decryptError) { console.error('Error decrypting article content', { eventId, authorPubkey, - error: error instanceof Error ? error.message : 'Unknown error', + error: decryptError instanceof Error ? decryptError.message : 'Unknown error', }) return null } @@ -239,8 +269,8 @@ class NostrService { try { const profile = JSON.parse(event.content) as NostrProfile return { ...profile, pubkey } - } catch (error) { - console.error('Error parsing profile:', error) + } catch (parseProfileError) { + console.error('Error parsing profile:', parseProfileError) return null } } diff --git a/lib/objectCache.ts b/lib/objectCache.ts index 5cf9c0f..97ca9d2 100644 --- a/lib/objectCache.ts +++ b/lib/objectCache.ts @@ -8,7 +8,7 @@ import type { Event as NostrEvent } from 'nostr-tools' import type { AuthorPresentationArticle } from '@/types/nostr' import { buildObjectId } from './urlGenerator' -export type ObjectType = 'author' | 'series' | 'publication' | 'review' | 'purchase' | 'sponsoring' | 'review_tip' +export type ObjectType = 'author' | 'series' | 'publication' | 'review' | 'purchase' | 'sponsoring' | 'review_tip' | 'payment_note' interface CachedObject { id: string // Format: __ @@ -99,8 +99,8 @@ class ObjectCacheService { reject(request.error) } }) - } catch (error) { - console.error(`Error counting objects with hash ${hash}:`, error) + } catch (countError) { + console.error(`Error counting objects with hash ${hash}:`, countError) return 0 } } @@ -155,8 +155,8 @@ class ObjectCacheService { reject(request.error) } }) - } catch (error) { - console.error(`Error caching ${objectType} object:`, error) + } catch (cacheError) { + console.error(`Error caching ${objectType} object:`, cacheError) } } @@ -198,8 +198,8 @@ class ObjectCacheService { reject(request.error) } }) - } catch (error) { - console.error(`Error retrieving ${objectType} object from cache:`, error) + } catch (retrieveError) { + console.error(`Error retrieving ${objectType} object from cache:`, retrieveError) return null } } @@ -227,8 +227,8 @@ class ObjectCacheService { reject(request.error) } }) - } catch (error) { - console.error(`Error retrieving ${objectType} object by ID from cache:`, error) + } catch (retrieveByIdError) { + console.error(`Error retrieving ${objectType} object by ID from cache:`, retrieveByIdError) return null } } @@ -269,8 +269,8 @@ class ObjectCacheService { reject(request.error) } }) - } catch (error) { - console.error('Error retrieving author from cache by pubkey:', error) + } catch (authorRetrieveError) { + console.error('Error retrieving author from cache by pubkey:', authorRetrieveError) return null } } @@ -305,8 +305,8 @@ class ObjectCacheService { reject(request.error) } }) - } catch (error) { - console.error(`Error retrieving all ${objectType} objects from cache:`, error) + } catch (getAllError) { + console.error(`Error retrieving all ${objectType} objects from cache:`, getAllError) return [] } } @@ -328,8 +328,8 @@ class ObjectCacheService { reject(request.error) } }) - } catch (error) { - console.error(`Error clearing ${objectType} cache:`, error) + } catch (clearError) { + console.error(`Error clearing ${objectType} cache:`, clearError) } } } diff --git a/lib/platformTracking.ts b/lib/platformTracking.ts index 04fbbc6..26e08a6 100644 --- a/lib/platformTracking.ts +++ b/lib/platformTracking.ts @@ -21,10 +21,36 @@ export class PlatformTrackingService { if (!pool) { throw new Error('Pool not initialized') } - const { getPrimaryRelaySync } = await import('./config') - const relayUrl = getPrimaryRelaySync() - const pubs = pool.publish([relayUrl], event) - await Promise.all(pubs) + + // Publish to all active relays (enabled and not marked inactive for this session) + const { relaySessionManager } = await import('./relaySessionManager') + const activeRelays = await relaySessionManager.getActiveRelays() + + if (activeRelays.length === 0) { + // Fallback to primary relay if no active relays + const { getPrimaryRelaySync } = await import('./config') + const relayUrl = getPrimaryRelaySync() + const pubs = pool.publish([relayUrl], event) + await Promise.all(pubs) + } else { + // Publish to all active relays + console.log(`[PlatformTracking] Publishing tracking event ${event.id} to ${activeRelays.length} active relay(s)`) + const pubs = pool.publish(activeRelays, event) + + // Track failed relays and mark them inactive for the session + const results = await Promise.allSettled(pubs) + results.forEach((result, index) => { + const relayUrl = activeRelays[index] + if (!relayUrl) { + return + } + if (result.status === 'rejected') { + const error = result.reason + console.error(`[PlatformTracking] Relay ${relayUrl} failed during publish:`, error) + relaySessionManager.markRelayFailed(relayUrl) + } + }) + } } private validateTrackingPool(): { pool: SimplePoolWithSub; authorPubkey: string } | null { diff --git a/lib/purchaseQueries.ts b/lib/purchaseQueries.ts index 9a2ca36..f95eb14 100644 --- a/lib/purchaseQueries.ts +++ b/lib/purchaseQueries.ts @@ -85,15 +85,17 @@ export async function getPurchaseById(purchaseId: string, timeoutMs: number = 50 resolve(value) } - sub.on('event', async (event: Event): Promise => { - const purchaseParsed = await parsePurchaseFromEvent(event) - if (purchaseParsed?.id === purchaseId) { - // Cache the parsed purchase - if (purchaseParsed.hash) { - await objectCache.set('purchase', purchaseParsed.hash, event, purchaseParsed, 0, false, purchaseParsed.index) + sub.on('event', (event: Event): void => { + void (async (): Promise => { + const purchaseParsed = await parsePurchaseFromEvent(event) + if (purchaseParsed?.id === purchaseId) { + // Cache the parsed purchase + if (purchaseParsed.hash) { + await objectCache.set('purchase', purchaseParsed.hash, event, purchaseParsed, 0, false, purchaseParsed.index) + } + done(purchaseParsed) } - done(purchaseParsed) - } + })() }) sub.on('eose', (): void => { @@ -127,15 +129,17 @@ export function getPurchasesForArticle(articleId: string, timeoutMs: number = 50 resolve(results) } - sub.on('event', async (event: Event): Promise => { - const purchaseParsed = await parsePurchaseFromEvent(event) - if (purchaseParsed?.articleId === articleId) { - // Cache the parsed purchase - if (purchaseParsed.hash) { - await objectCache.set('purchase', purchaseParsed.hash, event, purchaseParsed, 0, false, purchaseParsed.index) + sub.on('event', (event: Event): void => { + void (async (): Promise => { + const purchaseParsed = await parsePurchaseFromEvent(event) + if (purchaseParsed?.articleId === articleId) { + // Cache the parsed purchase + if (purchaseParsed.hash) { + await objectCache.set('purchase', purchaseParsed.hash, event, purchaseParsed, 0, false, purchaseParsed.index) + } + results.push(purchaseParsed) } - results.push(purchaseParsed) - } + })() }) sub.on('eose', () => done()) @@ -167,15 +171,17 @@ export function getPurchasesByPayer(payerPubkey: string, timeoutMs: number = 500 resolve(results) } - sub.on('event', async (event: Event): Promise => { - const purchaseParsed = await parsePurchaseFromEvent(event) - if (purchaseParsed) { - // Cache the parsed purchase - if (purchaseParsed.hash) { - await objectCache.set('purchase', purchaseParsed.hash, event, purchaseParsed, 0, false, purchaseParsed.index ?? 0) + sub.on('event', (event: Event): void => { + void (async (): Promise => { + const purchaseParsed = await parsePurchaseFromEvent(event) + if (purchaseParsed) { + // Cache the parsed purchase + if (purchaseParsed.hash) { + await objectCache.set('purchase', purchaseParsed.hash, event, purchaseParsed, 0, false, purchaseParsed.index ?? 0) + } + results.push(purchaseParsed) } - results.push(purchaseParsed) - } + })() }) sub.on('eose', (): void => { diff --git a/lib/relayRotation.ts b/lib/relayRotation.ts new file mode 100644 index 0000000..e7af6e8 --- /dev/null +++ b/lib/relayRotation.ts @@ -0,0 +1,116 @@ +/** + * Relay rotation utility + * Tries relays in sequence, rotating through the list on failure + * No retry on individual relay, just move to next and loop + * Relays that fail are marked inactive for the session + */ + +import type { SimplePool } from 'nostr-tools' +import type { Filter } from 'nostr-tools' +import type { SimplePoolWithSub } from '@/types/nostr-tools-extended' +import { createSubscription } from '@/types/nostr-tools-extended' +import { relaySessionManager } from './relaySessionManager' + +/** + * Try to execute an operation with relay rotation + * Tries each relay in sequence, moving to next on failure + * Loops back to first relay after trying all + */ +export async function tryWithRelayRotation( + pool: SimplePool, + operation: (relayUrl: string, pool: SimplePoolWithSub) => Promise, + timeout: number = 10000 +): Promise { + // Get active relays (enabled and not marked inactive for this session) + const activeRelays = await relaySessionManager.getActiveRelays() + + if (activeRelays.length === 0) { + throw new Error('No active relays available') + } + + let lastError: Error | null = null + let attempts = 0 + const maxAttempts = activeRelays.length * 2 // Try all active relays twice (loop once) + + while (attempts < maxAttempts) { + // Get current active relays (may have changed if some were marked inactive) + const currentActiveRelays = await relaySessionManager.getActiveRelays() + + if (currentActiveRelays.length === 0) { + throw new Error('No active relays available') + } + + const relayIndex = attempts % currentActiveRelays.length + const relayUrl = currentActiveRelays[relayIndex] + if (!relayUrl) { + throw new Error('Invalid relay configuration') + } + + // Skip if relay was marked failed during the loop (it will be at the bottom now) + // We continue to use it but it's lower priority + + try { + console.log(`[RelayRotation] Trying relay ${relayIndex + 1}/${currentActiveRelays.length}: ${relayUrl}`) + + // Notify progress manager that we're switching to a new relay (reset to 0 for this relay) + const { syncProgressManager } = await import('./syncProgressManager') + const currentProgress = syncProgressManager.getProgress() + if (currentProgress) { + syncProgressManager.setProgress({ + ...currentProgress, + currentStep: 0, // Reset to 0 when changing relay + currentRelay: relayUrl, + }) + } + + const poolWithSub = pool as unknown as SimplePoolWithSub + const result = await Promise.race([ + operation(relayUrl, poolWithSub), + new Promise((_, reject) => + setTimeout(() => reject(new Error(`Timeout after ${timeout}ms`)), timeout) + ), + ]) + console.log(`[RelayRotation] Success with relay: ${relayUrl}`) + return result + } catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error) + console.warn(`[RelayRotation] Relay ${relayUrl} failed: ${errorMessage}`) + + // Mark relay as failed (move to bottom of priority list) + relaySessionManager.markRelayFailed(relayUrl) + + lastError = error instanceof Error ? error : new Error(String(error)) + attempts++ + + // If we've tried all relays once, loop back + if (attempts < maxAttempts) { + continue + } + } + } + + // If we get here, all relays failed + throw lastError ?? new Error('All relays failed') +} + +/** + * Create a subscription with relay rotation + * Tries each relay until one succeeds + */ +export async function createSubscriptionWithRotation( + pool: SimplePool, + filters: Filter[], + timeout: number = 10000 +): Promise<{ + subscription: import('@/types/nostr-tools-extended').Subscription + relayUrl: string +}> { + return tryWithRelayRotation( + pool, + async (relayUrl, poolWithSub) => { + const subscription = createSubscription(poolWithSub, [relayUrl], filters) + return { subscription, relayUrl } + }, + timeout + ) +} diff --git a/lib/relaySessionManager.ts b/lib/relaySessionManager.ts new file mode 100644 index 0000000..efca688 --- /dev/null +++ b/lib/relaySessionManager.ts @@ -0,0 +1,105 @@ +/** + * Relay session manager + * Tracks which relays are active/inactive during the current browser session + * Relays that fail (connection or publish errors) are moved to the bottom of the priority list + * All relays are reset to active on page load + */ + +import { getEnabledRelays } from './config' + +class RelaySessionManager { + private failedRelays: Set = new Set() // Relays that have failed (moved to bottom) + + /** + * Initialize: reset all relays to active at session start + */ + public async initialize(): Promise { + this.failedRelays.clear() + console.log('[RelaySessionManager] Session initialized - all relays active') + } + + /** + * Get active relays (enabled relays, with failed ones moved to the bottom) + * Failed relays are still active but prioritized last + */ + public async getActiveRelays(): Promise { + const enabledRelays = await getEnabledRelays() + + // Separate working relays from failed ones + const workingRelays: string[] = [] + const failedRelaysList: string[] = [] + + for (const relay of enabledRelays) { + if (this.failedRelays.has(relay)) { + failedRelaysList.push(relay) + } else { + workingRelays.push(relay) + } + } + + // Return working relays first, then failed ones at the bottom + return [...workingRelays, ...failedRelaysList] + } + + /** + * Mark a relay as failed (move it to the bottom of the priority list) + */ + public markRelayFailed(relayUrl: string): void { + if (!this.failedRelays.has(relayUrl)) { + this.failedRelays.add(relayUrl) + console.warn(`[RelaySessionManager] Relay moved to bottom of priority list: ${relayUrl}`) + } + } + + /** + * Check if a relay has failed (is at the bottom of the list) + */ + public isRelayFailed(relayUrl: string): boolean { + return this.failedRelays.has(relayUrl) + } + + /** + * Get list of failed relays for this session + */ + public getFailedRelays(): string[] { + return Array.from(this.failedRelays) + } + + /** + * Get count of active relays (including failed ones at the bottom) + */ + public async getActiveRelayCount(): Promise { + const activeRelays = await this.getActiveRelays() + return activeRelays.length + } + + /** + * Legacy method name for compatibility (now moves to bottom instead of deactivating) + */ + public markRelayInactive(relayUrl: string): void { + this.markRelayFailed(relayUrl) + } + + /** + * Legacy method name for compatibility (failed relays are still "active" but at bottom) + */ + public isRelayActive(_relayUrl: string): boolean { + // All enabled relays are considered "active", even if failed + // They're just at the bottom of the priority list + return true + } + + /** + * Legacy method name for compatibility + */ + public getInactiveRelays(): string[] { + return this.getFailedRelays() + } +} + +export const relaySessionManager = new RelaySessionManager() + +// Initialize on module load (page load) +if (typeof window !== 'undefined') { + void relaySessionManager.initialize() +} diff --git a/lib/reviewTipQueries.ts b/lib/reviewTipQueries.ts index 63a3c52..719e031 100644 --- a/lib/reviewTipQueries.ts +++ b/lib/reviewTipQueries.ts @@ -92,15 +92,17 @@ export async function getReviewTipById(reviewTipId: string, timeoutMs: number = resolve(value) } - sub.on('event', async (event: Event): Promise => { - const reviewTipParsed = await parseReviewTipFromEvent(event) - if (reviewTipParsed?.id === reviewTipId) { - // Cache the parsed review tip - if (reviewTipParsed.hash) { - await objectCache.set('review_tip', reviewTipParsed.hash, event, reviewTipParsed, 0, false, reviewTipParsed.index ?? 0) + sub.on('event', (event: Event): void => { + void (async (): Promise => { + const reviewTipParsed = await parseReviewTipFromEvent(event) + if (reviewTipParsed?.id === reviewTipId) { + // Cache the parsed review tip + if (reviewTipParsed.hash) { + await objectCache.set('review_tip', reviewTipParsed.hash, event, reviewTipParsed, 0, false, reviewTipParsed.index ?? 0) + } + done(reviewTipParsed) } - done(reviewTipParsed) - } + })() }) sub.on('eose', (): void => { @@ -134,15 +136,17 @@ export function getReviewTipsForArticle(articleId: string, timeoutMs: number = 5 resolve(results) } - sub.on('event', async (event: Event): Promise => { - const reviewTipParsed = await parseReviewTipFromEvent(event) - if (reviewTipParsed?.articleId === articleId) { - // Cache the parsed review tip - if (reviewTipParsed.hash) { - await objectCache.set('review_tip', reviewTipParsed.hash, event, reviewTipParsed, 0, false, reviewTipParsed.index ?? 0) + sub.on('event', (event: Event): void => { + void (async (): Promise => { + const reviewTipParsed = await parseReviewTipFromEvent(event) + if (reviewTipParsed?.articleId === articleId) { + // Cache the parsed review tip + if (reviewTipParsed.hash) { + await objectCache.set('review_tip', reviewTipParsed.hash, event, reviewTipParsed, 0, false, reviewTipParsed.index ?? 0) + } + results.push(reviewTipParsed) } - results.push(reviewTipParsed) - } + })() }) sub.on('eose', (): void => { @@ -176,15 +180,17 @@ export function getReviewTipsForReview(reviewId: string, timeoutMs: number = 500 resolve(results) } - sub.on('event', async (event: Event): Promise => { - const reviewTipParsed = await parseReviewTipFromEvent(event) - if (reviewTipParsed?.reviewId === reviewId) { - // Cache the parsed review tip - if (reviewTipParsed.hash) { - await objectCache.set('review_tip', reviewTipParsed.hash, event, reviewTipParsed, 0, false, reviewTipParsed.index ?? 0) + sub.on('event', (event: Event): void => { + void (async (): Promise => { + const reviewTipParsed = await parseReviewTipFromEvent(event) + if (reviewTipParsed?.reviewId === reviewId) { + // Cache the parsed review tip + if (reviewTipParsed.hash) { + await objectCache.set('review_tip', reviewTipParsed.hash, event, reviewTipParsed, 0, false, reviewTipParsed.index ?? 0) + } + results.push(reviewTipParsed) } - results.push(reviewTipParsed) - } + })() }) sub.on('eose', (): void => { diff --git a/lib/reviews.ts b/lib/reviews.ts index 1ea0b0a..1606b93 100644 --- a/lib/reviews.ts +++ b/lib/reviews.ts @@ -59,11 +59,13 @@ export function getReviewsForArticle(articleId: string, timeoutMs: number = 5000 resolve(results) } - sub.on('event', async (event: Event): Promise => { - const parsed = await parseReviewFromEvent(event) - if (parsed) { - results.push(parsed) - } + sub.on('event', (event: Event): void => { + void (async (): Promise => { + const parsed = await parseReviewFromEvent(event) + if (parsed) { + results.push(parsed) + } + })() }) sub.on('eose', (): void => { diff --git a/lib/seriesQueries.ts b/lib/seriesQueries.ts index 070635f..07b1aae 100644 --- a/lib/seriesQueries.ts +++ b/lib/seriesQueries.ts @@ -53,11 +53,13 @@ export function getSeriesByAuthor(authorPubkey: string, timeoutMs: number = 5000 resolve(results) } - sub.on('event', async (event: Event): Promise => { - const parsed = await parseSeriesFromEvent(event) - if (parsed) { - results.push(parsed) - } + sub.on('event', (event: Event): void => { + void (async (): Promise => { + const parsed = await parseSeriesFromEvent(event) + if (parsed) { + results.push(parsed) + } + })() }) sub.on('eose', (): void => { diff --git a/lib/sponsoringQueries.ts b/lib/sponsoringQueries.ts index 1d853f4..f17f585 100644 --- a/lib/sponsoringQueries.ts +++ b/lib/sponsoringQueries.ts @@ -92,15 +92,17 @@ export async function getSponsoringById(sponsoringId: string, timeoutMs: number resolve(value) } - sub.on('event', async (event: Event) => { - const sponsoringParsed = await parseSponsoringFromEvent(event) - if (sponsoringParsed?.id === sponsoringId) { - // Cache the parsed sponsoring - if (sponsoringParsed.hash) { - await objectCache.set('sponsoring', sponsoringParsed.hash, event, sponsoringParsed, 0, false, sponsoringParsed.index ?? 0) + sub.on('event', (event: Event): void => { + void (async (): Promise => { + const sponsoringParsed = await parseSponsoringFromEvent(event) + if (sponsoringParsed?.id === sponsoringId) { + // Cache the parsed sponsoring + if (sponsoringParsed.hash) { + await objectCache.set('sponsoring', sponsoringParsed.hash, event, sponsoringParsed, 0, false, sponsoringParsed.index ?? 0) + } + done(sponsoringParsed) } - done(sponsoringParsed) - } + })() }) sub.on('eose', (): void => { @@ -134,15 +136,17 @@ export function getSponsoringByAuthor(authorPubkey: string, timeoutMs: number = resolve(results) } - sub.on('event', async (event: Event): Promise => { - const sponsoringParsed = await parseSponsoringFromEvent(event) - if (sponsoringParsed?.authorPubkey === authorPubkey) { - // Cache the parsed sponsoring - if (sponsoringParsed.hash) { - await objectCache.set('sponsoring', sponsoringParsed.hash, event, sponsoringParsed, 0, false, sponsoringParsed.index ?? 0) + sub.on('event', (event: Event): void => { + void (async (): Promise => { + const sponsoringParsed = await parseSponsoringFromEvent(event) + if (sponsoringParsed?.authorPubkey === authorPubkey) { + // Cache the parsed sponsoring + if (sponsoringParsed.hash) { + await objectCache.set('sponsoring', sponsoringParsed.hash, event, sponsoringParsed, 0, false, sponsoringParsed.index ?? 0) + } + results.push(sponsoringParsed) } - results.push(sponsoringParsed) - } + })() }) sub.on('eose', (): void => { diff --git a/lib/sponsoringTracking.ts b/lib/sponsoringTracking.ts index 92f4fd5..5d246b4 100644 --- a/lib/sponsoringTracking.ts +++ b/lib/sponsoringTracking.ts @@ -74,10 +74,35 @@ export class SponsoringTrackingService { if (!pool) { throw new Error('Pool not initialized') } - const poolWithSub = pool - const relayUrl = getPrimaryRelaySync() - const pubs = poolWithSub.publish([relayUrl], event) - await Promise.all(pubs) + + // Publish to all active relays (enabled and not marked inactive for this session) + const { relaySessionManager } = await import('./relaySessionManager') + const activeRelays = await relaySessionManager.getActiveRelays() + + if (activeRelays.length === 0) { + // Fallback to primary relay if no active relays + const relayUrl = getPrimaryRelaySync() + const pubs = pool.publish([relayUrl], event) + await Promise.all(pubs) + } else { + // Publish to all active relays + console.log(`[SponsoringTracking] Publishing tracking event ${event.id} to ${activeRelays.length} active relay(s)`) + const pubs = pool.publish(activeRelays, event) + + // Track failed relays and mark them inactive for the session + const results = await Promise.allSettled(pubs) + results.forEach((result, index) => { + const relayUrl = activeRelays[index] + if (!relayUrl) { + return + } + if (result.status === 'rejected') { + const error = result.reason + console.error(`[SponsoringTracking] Relay ${relayUrl} failed during publish:`, error) + relaySessionManager.markRelayFailed(relayUrl) + } + }) + } } async trackSponsoringPayment( diff --git a/lib/syncProgressManager.ts b/lib/syncProgressManager.ts new file mode 100644 index 0000000..66d4131 --- /dev/null +++ b/lib/syncProgressManager.ts @@ -0,0 +1,33 @@ +/** + * Global sync progress manager + * Stores sync progress state that can be accessed from any component + */ + +import type { SyncProgress } from './userContentSync' + +type SyncProgressListener = (progress: SyncProgress | null) => void + +class SyncProgressManager { + private progress: SyncProgress | null = null + private listeners: Set = new Set() + + setProgress(progress: SyncProgress | null): void { + this.progress = progress + this.listeners.forEach((listener) => listener(progress)) + } + + getProgress(): SyncProgress | null { + return this.progress + } + + subscribe(listener: SyncProgressListener): () => void { + this.listeners.add(listener) + // Immediately call with current progress + listener(this.progress) + return () => { + this.listeners.delete(listener) + } + } +} + +export const syncProgressManager = new SyncProgressManager() diff --git a/lib/userContentSync.ts b/lib/userContentSync.ts index 03dc452..bccb4c8 100644 --- a/lib/userContentSync.ts +++ b/lib/userContentSync.ts @@ -12,6 +12,7 @@ import { objectCache } from './objectCache' import { getLatestVersion } from './versionManager' import { buildTagFilter } from './nostrTagSystemFilter' import { getPrimaryRelaySync } from './config' +import { tryWithRelayRotation } from './relayRotation' import { PLATFORM_SERVICE, MIN_EVENT_DATE } from './platformConfig' import { parseObjectId } from './urlGenerator' import type { SimplePoolWithSub } from '@/types/nostr-tools-extended' @@ -35,9 +36,40 @@ async function fetchAndCachePublications( }, ] - const relayUrl = getPrimaryRelaySync() + // Try relays with rotation (no retry on failure, just move to next) const { createSubscription } = require('@/types/nostr-tools-extended') - const sub = createSubscription(pool, [relayUrl], filters) + let sub: ReturnType | null = null + let usedRelayUrl = '' + + try { + const result = await tryWithRelayRotation( + pool as unknown as import('nostr-tools').SimplePool, + async (relayUrl, poolWithSub) => { + usedRelayUrl = relayUrl + // Notify progress manager that we're starting with a new relay (reset step counter) + const { syncProgressManager } = await import('./syncProgressManager') + const currentProgress = syncProgressManager.getProgress() + if (currentProgress) { + syncProgressManager.setProgress({ + ...currentProgress, + currentStep: 0, + currentRelay: relayUrl, + }) + } + return createSubscription(poolWithSub, [relayUrl], filters) + }, + 5000 // 5 second timeout per relay + ) + sub = result + } catch (rotationError) { + // Fallback to primary relay if rotation fails + usedRelayUrl = getPrimaryRelaySync() + sub = createSubscription(pool, [usedRelayUrl], filters) + } + + if (!sub) { + throw new Error('Failed to create subscription') + } const events: Event[] = [] @@ -72,9 +104,9 @@ async function fetchAndCachePublications( if (latestEvent) { const extracted = await extractPublicationFromEvent(latestEvent) if (extracted) { - const parsed = parseObjectId(extracted.id) - const extractedHash = parsed.hash ?? extracted.id - const extractedIndex = parsed.index ?? 0 + const publicationParsed = parseObjectId(extracted.id) + const extractedHash = publicationParsed.hash ?? extracted.id + const extractedIndex = publicationParsed.index ?? 0 const tags = extractTagsFromEvent(latestEvent) await objectCache.set( 'publication', @@ -129,9 +161,40 @@ async function fetchAndCacheSeries( }, ] - const relayUrl = getPrimaryRelaySync() + // Try relays with rotation (no retry on failure, just move to next) const { createSubscription } = require('@/types/nostr-tools-extended') - const sub = createSubscription(pool, [relayUrl], filters) + let sub: ReturnType | null = null + let usedRelayUrl = '' + + try { + const result = await tryWithRelayRotation( + pool as unknown as import('nostr-tools').SimplePool, + async (relayUrl, poolWithSub) => { + usedRelayUrl = relayUrl + // Notify progress manager that we're starting with a new relay (reset step counter) + const { syncProgressManager } = await import('./syncProgressManager') + const currentProgress = syncProgressManager.getProgress() + if (currentProgress) { + syncProgressManager.setProgress({ + ...currentProgress, + currentStep: 0, + currentRelay: relayUrl, + }) + } + return createSubscription(poolWithSub, [relayUrl], filters) + }, + 5000 // 5 second timeout per relay + ) + sub = result + } catch (rotationError) { + // Fallback to primary relay if rotation fails + usedRelayUrl = getPrimaryRelaySync() + sub = createSubscription(pool, [usedRelayUrl], filters) + } + + if (!sub) { + throw new Error('Failed to create subscription') + } const events: Event[] = [] @@ -151,8 +214,8 @@ async function fetchAndCacheSeries( const tags = extractTagsFromEvent(event) if (tags.id) { // Extract hash from id (can be __ or just hash) - const parsed = parseObjectId(tags.id) - const hash = parsed.hash ?? tags.id + const seriesParsed = parseObjectId(tags.id) + const hash = seriesParsed.hash ?? tags.id if (!eventsByHashId.has(hash)) { eventsByHashId.set(hash, []) } @@ -166,9 +229,9 @@ async function fetchAndCacheSeries( if (latestEvent) { const extracted = await extractSeriesFromEvent(latestEvent) if (extracted) { - const parsed = parseObjectId(extracted.id) - const extractedHash = parsed.hash ?? extracted.id - const extractedIndex = parsed.index ?? 0 + const publicationParsed = parseObjectId(extracted.id) + const extractedHash = publicationParsed.hash ?? extracted.id + const extractedIndex = publicationParsed.index ?? 0 const tags = extractTagsFromEvent(latestEvent) await objectCache.set( 'series', @@ -189,15 +252,20 @@ async function fetchAndCacheSeries( sub.on('event', (event: Event): void => { const tags = extractTagsFromEvent(event) if (tags.type === 'series' && !tags.hidden) { + console.log('[Sync] Received series event:', event.id) events.push(event) } }) sub.on('eose', (): void => { + console.log(`[Sync] EOSE for series, received ${events.length} events`) void done() }) setTimeout((): void => { + if (!finished) { + console.log(`[Sync] Timeout for series, received ${events.length} events`) + } void done() }, 10000).unref?.() }) @@ -220,9 +288,40 @@ async function fetchAndCachePurchases( }, ] - const relayUrl = getPrimaryRelaySync() + // Try relays with rotation (no retry on failure, just move to next) const { createSubscription } = require('@/types/nostr-tools-extended') - const sub = createSubscription(pool, [relayUrl], filters) + let sub: ReturnType | null = null + let usedRelayUrl = '' + + try { + const result = await tryWithRelayRotation( + pool as unknown as import('nostr-tools').SimplePool, + async (relayUrl, poolWithSub) => { + usedRelayUrl = relayUrl + // Notify progress manager that we're starting with a new relay (reset step counter) + const { syncProgressManager } = await import('./syncProgressManager') + const currentProgress = syncProgressManager.getProgress() + if (currentProgress) { + syncProgressManager.setProgress({ + ...currentProgress, + currentStep: 0, + currentRelay: relayUrl, + }) + } + return createSubscription(poolWithSub, [relayUrl], filters) + }, + 5000 // 5 second timeout per relay + ) + sub = result + } catch (rotationError) { + // Fallback to primary relay if rotation fails + usedRelayUrl = getPrimaryRelaySync() + sub = createSubscription(pool, [usedRelayUrl], filters) + } + + if (!sub) { + throw new Error('Failed to create subscription') + } const events: Event[] = [] @@ -252,14 +351,19 @@ async function fetchAndCachePurchases( } sub.on('event', (event: Event): void => { + console.log('[Sync] Received purchase event:', event.id) events.push(event) }) sub.on('eose', (): void => { + console.log(`[Sync] EOSE for purchases, received ${events.length} events`) void done() }) setTimeout((): void => { + if (!finished) { + console.log(`[Sync] Timeout for purchases, received ${events.length} events`) + } void done() }, 10000).unref?.() }) @@ -282,9 +386,40 @@ async function fetchAndCacheSponsoring( }, ] - const relayUrl = getPrimaryRelaySync() + // Try relays with rotation (no retry on failure, just move to next) const { createSubscription } = require('@/types/nostr-tools-extended') - const sub = createSubscription(pool, [relayUrl], filters) + let sub: ReturnType | null = null + let usedRelayUrl = '' + + try { + const result = await tryWithRelayRotation( + pool as unknown as import('nostr-tools').SimplePool, + async (relayUrl, poolWithSub) => { + usedRelayUrl = relayUrl + // Notify progress manager that we're starting with a new relay (reset step counter) + const { syncProgressManager } = await import('./syncProgressManager') + const currentProgress = syncProgressManager.getProgress() + if (currentProgress) { + syncProgressManager.setProgress({ + ...currentProgress, + currentStep: 0, + currentRelay: relayUrl, + }) + } + return createSubscription(poolWithSub, [relayUrl], filters) + }, + 5000 // 5 second timeout per relay + ) + sub = result + } catch (rotationError) { + // Fallback to primary relay if rotation fails + usedRelayUrl = getPrimaryRelaySync() + sub = createSubscription(pool, [usedRelayUrl], filters) + } + + if (!sub) { + throw new Error('Failed to create subscription') + } const events: Event[] = [] @@ -314,14 +449,19 @@ async function fetchAndCacheSponsoring( } sub.on('event', (event: Event): void => { + console.log('[Sync] Received sponsoring event:', event.id) events.push(event) }) sub.on('eose', (): void => { + console.log(`[Sync] EOSE for sponsoring, received ${events.length} events`) void done() }) setTimeout((): void => { + if (!finished) { + console.log(`[Sync] Timeout for sponsoring, received ${events.length} events`) + } void done() }, 10000).unref?.() }) @@ -344,9 +484,40 @@ async function fetchAndCacheReviewTips( }, ] - const relayUrl = getPrimaryRelaySync() + // Try relays with rotation (no retry on failure, just move to next) const { createSubscription } = require('@/types/nostr-tools-extended') - const sub = createSubscription(pool, [relayUrl], filters) + let sub: ReturnType | null = null + let usedRelayUrl = '' + + try { + const result = await tryWithRelayRotation( + pool as unknown as import('nostr-tools').SimplePool, + async (relayUrl, poolWithSub) => { + usedRelayUrl = relayUrl + // Notify progress manager that we're starting with a new relay (reset step counter) + const { syncProgressManager } = await import('./syncProgressManager') + const currentProgress = syncProgressManager.getProgress() + if (currentProgress) { + syncProgressManager.setProgress({ + ...currentProgress, + currentStep: 0, + currentRelay: relayUrl, + }) + } + return createSubscription(poolWithSub, [relayUrl], filters) + }, + 5000 // 5 second timeout per relay + ) + sub = result + } catch (rotationError) { + // Fallback to primary relay if rotation fails + usedRelayUrl = getPrimaryRelaySync() + sub = createSubscription(pool, [usedRelayUrl], filters) + } + + if (!sub) { + throw new Error('Failed to create subscription') + } const events: Event[] = [] @@ -376,14 +547,19 @@ async function fetchAndCacheReviewTips( } sub.on('event', (event: Event): void => { + console.log('[Sync] Received review tip event:', event.id) events.push(event) }) sub.on('eose', (): void => { + console.log(`[Sync] EOSE for review tips, received ${events.length} events`) void done() }) setTimeout((): void => { + if (!finished) { + console.log(`[Sync] Timeout for review tips, received ${events.length} events`) + } void done() }, 10000).unref?.() }) @@ -393,11 +569,135 @@ export interface SyncProgress { currentStep: number totalSteps: number completed: boolean + currentRelay?: string // URL of the relay currently being used +} + +/** + * Fetch all payment notes (kind 1 with type='payment') by a user and cache them + */ +async function fetchAndCachePaymentNotes( + pool: SimplePoolWithSub, + userPubkey: string +): Promise { + // Payment notes are kind 1 with type='payment' + // They can be: as payer (authors) or as recipient (#recipient tag) + const filters = [ + { + kinds: [1], + authors: [userPubkey], + '#payment': [''], + '#service': [PLATFORM_SERVICE], + since: MIN_EVENT_DATE, + limit: 1000, + }, + { + kinds: [1], + '#recipient': [userPubkey], + '#payment': [''], + '#service': [PLATFORM_SERVICE], + since: MIN_EVENT_DATE, + limit: 1000, + }, + ] + + // Try relays with rotation (no retry on failure, just move to next) + const { createSubscription } = require('@/types/nostr-tools-extended') + let subscriptions: Array> = [] + let usedRelayUrl = '' + + try { + const result = await tryWithRelayRotation( + pool as unknown as import('nostr-tools').SimplePool, + async (relayUrl, poolWithSub) => { + usedRelayUrl = relayUrl + // Notify progress manager that we're starting with a new relay (reset step counter) + const { syncProgressManager } = await import('./syncProgressManager') + const currentProgress = syncProgressManager.getProgress() + if (currentProgress) { + syncProgressManager.setProgress({ + ...currentProgress, + currentStep: 0, + currentRelay: relayUrl, + }) + } + // Create subscriptions for both filters (payer and recipient) + return filters.map((filter) => createSubscription(poolWithSub, [relayUrl], [filter])) + }, + 5000 // 5 second timeout per relay + ) + subscriptions = result.flat() + } catch (rotationError) { + // Fallback to primary relay if rotation fails + usedRelayUrl = getPrimaryRelaySync() + subscriptions = filters.map((filter) => createSubscription(pool, [usedRelayUrl], [filter])) + } + + if (subscriptions.length === 0) { + throw new Error('Failed to create subscriptions') + } + + const events: Event[] = [] + + return new Promise((resolve) => { + let finished = false + let eoseCount = 0 + + const done = async (): Promise => { + if (finished) { + return + } + finished = true + subscriptions.forEach((sub) => sub.unsub()) + + for (const event of events) { + const tags = extractTagsFromEvent(event) + if (tags.type === 'payment' && tags.payment) { + // Cache the payment note event + // Use event.id as hash since payment notes don't have a separate hash system + await objectCache.set('payment_note', event.id, event, { + id: event.id, + type: 'payment_note', + eventId: event.id, + }, 0, false, 0) + } + } + + resolve() + } + + subscriptions.forEach((sub) => { + sub.on('event', (event: Event): void => { + const tags = extractTagsFromEvent(event) + if (tags.type === 'payment' && tags.payment) { + console.log('[Sync] Received payment note event:', event.id) + // Deduplicate events (same event might match both filters) + if (!events.some((e) => e.id === event.id)) { + events.push(event) + } + } + }) + + sub.on('eose', (): void => { + eoseCount++ + if (eoseCount >= subscriptions.length) { + console.log(`[Sync] EOSE for payment notes, received ${events.length} events`) + void done() + } + }) + }) + + setTimeout((): void => { + if (!finished) { + console.log(`[Sync] Timeout for payment notes, received ${events.length} events`) + } + void done() + }, 10000).unref?.() + }) } /** * Synchronize all user content to IndexedDB cache - * Fetches profile, series, publications, purchases, sponsoring, and review tips and caches them + * Fetches profile, series, publications, purchases, sponsoring, review tips, and payment notes and caches them * @param userPubkey - The user's public key * @param onProgress - Optional callback to report progress (currentStep, totalSteps, completed) */ @@ -419,68 +719,79 @@ export async function syncUserContentToCache( const { setLastSyncDate, getCurrentTimestamp } = await import('./syncStorage') const currentTimestamp = getCurrentTimestamp() - const TOTAL_STEPS = 6 + const TOTAL_STEPS = 7 // Report initial progress + const { relaySessionManager } = await import('./relaySessionManager') + const { syncProgressManager } = await import('./syncProgressManager') + const activeRelays = await relaySessionManager.getActiveRelays() + const initialRelay = activeRelays[0] ?? 'Connecting...' + if (onProgress) { - onProgress({ currentStep: 0, totalSteps: TOTAL_STEPS, completed: false }) + onProgress({ currentStep: 0, totalSteps: TOTAL_STEPS, completed: false, currentRelay: initialRelay }) } + syncProgressManager.setProgress({ currentStep: 0, totalSteps: TOTAL_STEPS, completed: false, currentRelay: initialRelay }) let currentStep = 0 - // Fetch and cache author profile (already caches itself) - console.log('[Sync] Step 1/6: Fetching author profile...') - await fetchAuthorPresentationFromPool(poolWithSub, userPubkey) - currentStep++ - if (onProgress) { - onProgress({ currentStep, totalSteps: TOTAL_STEPS, completed: false }) + // Helper function to update progress with current relay + const updateProgress = (step: number, completed: boolean = false): void => { + const currentRelay = syncProgressManager.getProgress()?.currentRelay ?? initialRelay + const progressUpdate = { currentStep: step, totalSteps: TOTAL_STEPS, completed, currentRelay } + if (onProgress) { + onProgress(progressUpdate) + } + syncProgressManager.setProgress(progressUpdate) } + // Fetch and cache author profile (already caches itself) + console.log('[Sync] Step 1/7: Fetching author profile...') + await fetchAuthorPresentationFromPool(poolWithSub, userPubkey) + console.log('[Sync] Step 1/7: Author profile fetch completed') + currentStep++ + updateProgress(currentStep) + // Fetch and cache all series - console.log('[Sync] Step 2/6: Fetching series...') + console.log('[Sync] Step 2/7: Fetching series...') await fetchAndCacheSeries(poolWithSub, userPubkey) currentStep++ - if (onProgress) { - onProgress({ currentStep, totalSteps: TOTAL_STEPS, completed: false }) - } + updateProgress(currentStep) // Fetch and cache all publications - console.log('[Sync] Step 3/6: Fetching publications...') + console.log('[Sync] Step 3/7: Fetching publications...') await fetchAndCachePublications(poolWithSub, userPubkey) currentStep++ - if (onProgress) { - onProgress({ currentStep, totalSteps: TOTAL_STEPS, completed: false }) - } + updateProgress(currentStep) // Fetch and cache all purchases (as payer) - console.log('[Sync] Step 4/6: Fetching purchases...') + console.log('[Sync] Step 4/7: Fetching purchases...') await fetchAndCachePurchases(poolWithSub, userPubkey) currentStep++ - if (onProgress) { - onProgress({ currentStep, totalSteps: TOTAL_STEPS, completed: false }) - } + updateProgress(currentStep) // Fetch and cache all sponsoring (as author) - console.log('[Sync] Step 5/6: Fetching sponsoring...') + console.log('[Sync] Step 5/7: Fetching sponsoring...') await fetchAndCacheSponsoring(poolWithSub, userPubkey) currentStep++ - if (onProgress) { - onProgress({ currentStep, totalSteps: TOTAL_STEPS, completed: false }) - } + updateProgress(currentStep) // Fetch and cache all review tips (as author) - console.log('[Sync] Step 6/6: Fetching review tips...') + console.log('[Sync] Step 6/7: Fetching review tips...') await fetchAndCacheReviewTips(poolWithSub, userPubkey) currentStep++ - if (onProgress) { - onProgress({ currentStep, totalSteps: TOTAL_STEPS, completed: true }) - } + updateProgress(currentStep) + + // Fetch and cache all payment notes (kind 1 with type='payment') + console.log('[Sync] Step 7/7: Fetching payment notes...') + await fetchAndCachePaymentNotes(poolWithSub, userPubkey) + currentStep++ + updateProgress(currentStep, true) // Store the current timestamp as last sync date await setLastSyncDate(currentTimestamp) console.log('[Sync] Synchronization completed successfully') - } catch (error) { - console.error('Error syncing user content to cache:', error) - throw error // Re-throw to allow UI to handle it + } catch (syncError) { + console.error('Error syncing user content to cache:', syncError) + throw syncError // Re-throw to allow UI to handle it } } diff --git a/pages/_app.tsx b/pages/_app.tsx index 3d11932..404db5d 100644 --- a/pages/_app.tsx +++ b/pages/_app.tsx @@ -6,6 +6,9 @@ import { platformSyncService } from '@/lib/platformSync' import { nostrAuthService } from '@/lib/nostrAuth' import { syncUserContentToCache } from '@/lib/userContentSync' import { getLastSyncDate, getCurrentTimestamp } from '@/lib/syncStorage' +import { syncProgressManager } from '@/lib/syncProgressManager' +import { GlobalSyncProgressBar } from '@/components/GlobalSyncProgressBar' +import { relaySessionManager } from '@/lib/relaySessionManager' function I18nProvider({ children }: { children: React.ReactNode }): React.ReactElement { // Get saved locale from localStorage or default to French @@ -45,6 +48,11 @@ function I18nProvider({ children }: { children: React.ReactNode }): React.ReactE } export default function App({ Component, pageProps }: AppProps): React.ReactElement { + // Initialize relay session manager on app mount (reset all relays to active) + React.useEffect(() => { + void relaySessionManager.initialize() + }, []) + // Start platform sync on app mount and resume on each page navigation React.useEffect(() => { // Start continuous sync (runs periodically in background) @@ -95,10 +103,17 @@ export default function App({ Component, pageProps }: AppProps): React.ReactElem console.log('[App] Starting user content sync...') try { - await syncUserContentToCache(state.pubkey) + await syncUserContentToCache(state.pubkey, (progress) => { + syncProgressManager.setProgress(progress) + if (progress.completed) { + syncProgressManager.setProgress(null) + } + }) console.log('[App] User content sync completed') + syncProgressManager.setProgress(null) } catch (error) { console.error('[App] Error during user content sync:', error) + syncProgressManager.setProgress(null) } finally { syncInProgress = false } @@ -121,6 +136,7 @@ export default function App({ Component, pageProps }: AppProps): React.ReactElem return ( + ) diff --git a/pages/api/nip95-upload.ts b/pages/api/nip95-upload.ts index 13cfe7c..31c40cf 100644 --- a/pages/api/nip95-upload.ts +++ b/pages/api/nip95-upload.ts @@ -64,7 +64,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) let response: { statusCode: number; statusMessage: string; body: string } try { response = await new Promise<{ statusCode: number; statusMessage: string; body: string }>((resolve, reject) => { - function makeRequest(url: URL, redirectCount: number, fileField: FormidableFile, authToken?: string): void { + function makeRequest(url: URL, redirectCount: number, file: FormidableFile, token?: string): void { if (redirectCount > MAX_REDIRECTS) { reject(new Error(`Too many redirects (max ${MAX_REDIRECTS})`)) return @@ -72,14 +72,14 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) // Recreate FormData for each request (needed for redirects) const requestFormData = new FormData() - const fileStream = fs.createReadStream(fileField.filepath) + const fileStream = fs.createReadStream(file.filepath) // Use 'file' as field name (standard for NIP-95, but some endpoints may use different names) // Note: nostrimg.com might expect a different field name - if issues persist, try 'image' or 'upload' const fieldName = 'file' requestFormData.append(fieldName, fileStream, { - filename: fileField.originalFilename || fileField.newFilename || 'upload', - contentType: fileField.mimetype || 'application/octet-stream', + filename: file.originalFilename ?? file.newFilename ?? 'upload', + contentType: file.mimetype ?? 'application/octet-stream', }) const isHttps = url.protocol === 'https:' @@ -91,8 +91,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) headers['User-Agent'] = 'zapwall.fr/1.0' // Add NIP-98 Authorization header if token is provided - if (authToken) { - headers['Authorization'] = `Nostr ${authToken}` + if (token) { + headers['Authorization'] = `Nostr ${token}` } // Log request details for debugging (only for problematic endpoints) @@ -101,14 +101,14 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) url: url.toString(), method: 'POST', fieldName, - filename: fileField.originalFilename || fileField.newFilename || 'upload', - contentType: fileField.mimetype || 'application/octet-stream', - fileSize: fileField.size, + filename: file.originalFilename ?? file.newFilename ?? 'upload', + contentType: file.mimetype ?? 'application/octet-stream', + fileSize: file.size, headers: { 'Content-Type': headers['content-type'], 'Accept': headers['Accept'], 'User-Agent': headers['User-Agent'], - 'Authorization': authToken ? '[present]' : '[absent]', + 'Authorization': token ? '[present]' : '[absent]', }, }) } @@ -140,7 +140,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) // Drain the response before redirecting proxyResponse.resume() // Make new request to redirect location (preserve auth token for redirects) - makeRequest(redirectUrl, redirectCount + 1, fileField, authToken) + makeRequest(redirectUrl, redirectCount + 1, file, token) return } catch (urlError) { console.error('NIP-95 proxy invalid redirect URL:', { @@ -221,7 +221,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) console.error('NIP-95 proxy file stream error:', { targetEndpoint, hostname: url.hostname, - filepath: fileField.filepath, + filepath: file.filepath, error: error instanceof Error ? error.message : 'Unknown file stream error', }) reject(error) diff --git a/pages/series/[id].tsx b/pages/series/[id].tsx index 5882abf..93e490b 100644 --- a/pages/series/[id].tsx +++ b/pages/series/[id].tsx @@ -10,6 +10,8 @@ import { ArticleCard } from '@/components/ArticleCard' import { t } from '@/lib/i18n' import Image from 'next/image' import { ArticleReviews } from '@/components/ArticleReviews' +import { useNostrAuth } from '@/hooks/useNostrAuth' +import Link from 'next/link' function SeriesHeader({ series }: { series: Series }): React.ReactElement { return ( @@ -60,6 +62,7 @@ export default function SeriesPage(): React.ReactElement | null { {series && ( <> + + + {t('series.createPublication')} + +

+ ) +} + function SeriesPublications({ articles }: { articles: Article[] }): React.ReactElement { if (articles.length === 0) { return

Aucune publication pour cette série.

diff --git a/pages/series/[id]/publish.tsx b/pages/series/[id]/publish.tsx new file mode 100644 index 0000000..e8ca4b0 --- /dev/null +++ b/pages/series/[id]/publish.tsx @@ -0,0 +1,180 @@ +import Head from 'next/head' +import { useRouter } from 'next/router' +import { useEffect, useState } from 'react' +import { ArticleEditor } from '@/components/ArticleEditor' +import { useNostrAuth } from '@/hooks/useNostrAuth' +import { getSeriesById } from '@/lib/seriesQueries' +import type { Series } from '@/types/nostr' +import { t } from '@/lib/i18n' +import Image from 'next/image' + +function PublishHeader({ series }: { series: Series }): React.ReactElement { + return ( + + {t('series.publish.title', { series: series.title })} - zapwall.fr + + + ) +} + +function SeriesHeader({ series }: { series: Series }): React.ReactElement { + return ( +
+ {series.coverUrl && ( +
+ {series.title} +
+ )} +

{series.title}

+

{t('series.publish.subtitle')}

+
+ ) +} + +export default function SeriesPublishPage(): React.ReactElement | null { + const router = useRouter() + const { id } = router.query + const seriesId = typeof id === 'string' ? id : '' + const { pubkey } = useNostrAuth() + const { series, loading, error, isAuthor } = useSeriesPublishPageData(seriesId, pubkey ?? null) + + if (!seriesId) { + return null + } + + if (loading) { + return ( +
+
+

{t('common.loading')}

+
+
+ ) + } + + if (error || !series) { + return ( +
+
+

{error ?? 'Série introuvable'}

+ +
+
+ ) + } + + if (!isAuthor) { + return ( +
+
+

{t('series.publish.error.notAuthor')}

+ +
+
+ ) + } + + const handlePublishSuccess = (): void => { + setTimeout(() => { + void router.push(`/series/${seriesId}`) + }, 2000) + } + + return ( + <> + +
+
+
+

zapwall4Science

+
+
+ +
+ + + { + void router.push(`/series/${seriesId}`) + }} + seriesOptions={[{ id: series.id, title: series.title }]} + onSelectSeries={() => { + // Series is already selected and cannot be changed + }} + defaultSeriesId={series.id} + /> +
+
+ + ) +} + +function useSeriesPublishPageData( + seriesId: string, + userPubkey: string | null +): { + series: Series | null + loading: boolean + error: string | null + isAuthor: boolean +} { + const [series, setSeries] = useState(null) + const [loading, setLoading] = useState(true) + const [error, setError] = useState(null) + + useEffect(() => { + if (!seriesId) { + return + } + const load = async (): Promise => { + setLoading(true) + setError(null) + try { + const s = await getSeriesById(seriesId) + if (!s) { + setError('Série introuvable') + setLoading(false) + return + } + setSeries(s) + } catch (e) { + setError(e instanceof Error ? e.message : 'Erreur lors du chargement de la série') + } finally { + setLoading(false) + } + } + void load() + }, [seriesId]) + + const isAuthor = series !== null && userPubkey !== null && series.pubkey === userPubkey + + return { series, loading, error, isAuthor } +} diff --git a/pages/settings.tsx b/pages/settings.tsx index 98e235e..0cda128 100644 --- a/pages/settings.tsx +++ b/pages/settings.tsx @@ -5,6 +5,7 @@ import { Nip95ConfigManager } from '@/components/Nip95ConfigManager' import { KeyManagementManager } from '@/components/KeyManagementManager' import { CacheUpdateManager } from '@/components/CacheUpdateManager' import { LanguageSettingsManager } from '@/components/LanguageSettingsManager' +import { RelayManager } from '@/components/RelayManager' import { t } from '@/lib/i18n' export default function SettingsPage(): React.ReactElement { @@ -23,6 +24,7 @@ export default function SettingsPage(): React.ReactElement {
+
diff --git a/public/locales/en.txt b/public/locales/en.txt index 779c876..8a0e157 100644 --- a/public/locales/en.txt +++ b/public/locales/en.txt @@ -74,6 +74,11 @@ series.create.field.cover.help=Cover image for the series (optional, max 5MB, fo series.create.error.notAuthor=You must be the author of this page and have unlocked your account to create a series series.create.error.missingFields=Please fill in all required fields series.create.error.publishFailed=Error publishing series +series.createPublication=Create a publication +series.publish.title=Create a publication for {series} +series.publish.subtitle=Add pages to your series +series.publish.error.notAuthor=You are not the author of this series +series.publish.description=Create a new publication for this series # Author page author.title=Author page @@ -242,6 +247,8 @@ settings.sync.daysRange=From {{startDate}} to {{endDate}} ({{days}} days) settings.sync.progress=Step {{current}} of {{total}} settings.sync.completed=Everything is synchronized settings.sync.ready=Ready to synchronize +settings.sync.syncing=Synchronizing +settings.sync.connecting=Connecting... settings.nip95.title=NIP-95 Upload Endpoints settings.nip95.loading=Loading... settings.nip95.error.loadFailed=Failed to load NIP-95 APIs @@ -274,6 +281,31 @@ settings.nip95.list.editUrl=Click to edit URL settings.nip95.note.title=Note: settings.nip95.note.priority=Endpoints are tried in priority order (lower number = higher priority). Only enabled endpoints will be used for uploads. settings.nip95.note.fallback=If an endpoint fails, the next enabled endpoint will be tried automatically. +settings.relay.title=Nostr Relays +settings.relay.loading=Loading... +settings.relay.error.loadFailed=Failed to load relays +settings.relay.error.updateFailed=Failed to update relay +settings.relay.error.priorityFailed=Failed to update priority +settings.relay.error.urlFailed=Failed to update URL +settings.relay.error.addFailed=Failed to add relay +settings.relay.error.removeFailed=Failed to remove relay +settings.relay.error.invalidUrl=Invalid URL format +settings.relay.error.urlRequired=URL is required +settings.relay.addButton=Add relay +settings.relay.add.url=Relay URL +settings.relay.add.placeholder=wss://relay.example.com +settings.relay.add.add=Add +settings.relay.add.cancel=Cancel +settings.relay.list.enabled=Enabled +settings.relay.list.disabled=Disabled +settings.relay.list.priorityLabel=Priority {{priority}} (ID: {{id}}) +settings.relay.list.editUrl=Click to edit URL +settings.relay.list.remove=Remove +settings.relay.remove.confirm=Are you sure you want to remove this relay? +settings.relay.empty=No relays configured +settings.relay.note.title=Note: +settings.relay.note.priority=Relays are tried in priority order (lower number = higher priority). Only enabled relays will be used to fetch notes. +settings.relay.note.rotation=If a relay fails, the next enabled relay will be tried automatically. Once all relays have been tried, the system loops back to the first one. settings.language.title=Preferred Language settings.language.description=Choose your preferred language for the interface settings.language.loading=Loading... @@ -368,6 +400,8 @@ page.image.remove=Remove image page.image.alt=Page {{number}} image page.image.empty=No image article.pages.title=A5 Pages +article.pages.locked.title=Locked pages +article.pages.locked.message=This publication contains {count} page(s). Purchase it to unlock all pages. # Notification notification.empty=No notifications yet diff --git a/public/locales/fr.txt b/public/locales/fr.txt index 7edb4c6..19ccb6b 100644 --- a/public/locales/fr.txt +++ b/public/locales/fr.txt @@ -59,7 +59,12 @@ publication.price={{amount}} sats series.title=Séries series.empty=Aucune série publiée pour le moment. series.view=Voir la série -series.publications=Publications de la série +series.publications=Publications +series.createPublication=Créer une publication +series.publish.title=Créer une publication pour {series} +series.publish.subtitle=Ajoutez des pages à votre série +series.publish.error.notAuthor=Vous n'êtes pas l'auteur de cette série +series.publish.description=Créer une nouvelle publication pour cette série series.publications.empty=Aucune publication pour cette série. series.create.button=Créer une série series.create.title=Créer une nouvelle série @@ -242,6 +247,8 @@ settings.sync.daysRange=Du {{startDate}} au {{endDate}} ({{days}} jours) settings.sync.progress=Étape {{current}} sur {{total}} settings.sync.completed=Tout est synchronisé settings.sync.ready=Prêt à synchroniser +settings.sync.syncing=Synchronisation en cours +settings.sync.connecting=Connexion... settings.language.title=Langue de préférence settings.language.description=Choisissez votre langue préférée pour l'interface settings.language.loading=Chargement... @@ -279,6 +286,31 @@ settings.nip95.list.editUrl=Cliquer pour modifier l'URL settings.nip95.note.title=Note : settings.nip95.note.priority=Les endpoints sont essayés dans l'ordre de priorité (nombre plus bas = priorité plus haute). Seuls les endpoints activés seront utilisés pour les uploads. settings.nip95.note.fallback=Si un endpoint échoue, le prochain endpoint activé sera essayé automatiquement. +settings.relay.title=Relais Nostr +settings.relay.loading=Chargement... +settings.relay.error.loadFailed=Échec du chargement des relais +settings.relay.error.updateFailed=Échec de la mise à jour du relais +settings.relay.error.priorityFailed=Échec de la mise à jour de la priorité +settings.relay.error.urlFailed=Échec de la mise à jour de l'URL +settings.relay.error.addFailed=Échec de l'ajout du relais +settings.relay.error.removeFailed=Échec de la suppression du relais +settings.relay.error.invalidUrl=Format d'URL invalide +settings.relay.error.urlRequired=L'URL est requise +settings.relay.addButton=Ajouter un relais +settings.relay.add.url=URL du relais +settings.relay.add.placeholder=wss://relay.example.com +settings.relay.add.add=Ajouter +settings.relay.add.cancel=Annuler +settings.relay.list.enabled=Activé +settings.relay.list.disabled=Désactivé +settings.relay.list.priorityLabel=Priorité {{priority}} (ID: {{id}}) +settings.relay.list.editUrl=Cliquer pour modifier l'URL +settings.relay.list.remove=Supprimer +settings.relay.remove.confirm=Êtes-vous sûr de vouloir supprimer ce relais ? +settings.relay.empty=Aucun relais configuré +settings.relay.note.title=Note : +settings.relay.note.priority=Les relais sont essayés dans l'ordre de priorité (nombre plus bas = priorité plus haute). Seuls les relais activés seront utilisés pour récupérer les notes. +settings.relay.note.rotation=Si un relais échoue, le prochain relais activé sera essayé automatiquement. Une fois tous les relais essayés, le système repart du premier et boucle. # Common UI common.repositoryGit=Repository Git @@ -367,7 +399,9 @@ page.image.upload=Uploader une image page.image.remove=Supprimer l'image page.image.alt=Image page {{number}} page.image.empty=Aucune image -article.pages.title=Pages A5 +article.pages.title=Pages +article.pages.locked.title=Pages verrouillées +article.pages.locked.message=Cette publication contient {count} page(s). Achetez-la pour débloquer toutes les pages. A5 # Notification notification.empty=Aucune notification pour le moment