This commit is contained in:
Nicolas Cantu 2026-01-09 13:13:24 +01:00
parent 526fb5af6f
commit 9e76a9e18a
25 changed files with 768 additions and 503 deletions

17
components/DragHandle.tsx Normal file
View File

@ -0,0 +1,17 @@
export function DragHandle(): React.ReactElement {
return (
<div className="flex flex-col gap-1 cursor-grab active:cursor-grabbing text-cyber-accent/50 hover:text-neon-cyan transition-colors">
<svg width="12" height="12" viewBox="0 0 12 12" fill="currentColor" aria-hidden="true">
<circle cx="2" cy="2" r="1.5" />
<circle cx="6" cy="2" r="1.5" />
<circle cx="10" cy="2" r="1.5" />
<circle cx="2" cy="6" r="1.5" />
<circle cx="6" cy="6" r="1.5" />
<circle cx="10" cy="6" r="1.5" />
<circle cx="2" cy="10" r="1.5" />
<circle cx="6" cy="10" r="1.5" />
<circle cx="10" cy="10" r="1.5" />
</svg>
</div>
)
}

View File

@ -3,6 +3,7 @@ import { configStorage } from '@/lib/configStorage'
import type { Nip95Config } from '@/lib/configStorageTypes' import type { Nip95Config } from '@/lib/configStorageTypes'
import { t } from '@/lib/i18n' import { t } from '@/lib/i18n'
import { userConfirm } from '@/lib/userConfirm' import { userConfirm } from '@/lib/userConfirm'
import { DragHandle } from './DragHandle'
interface Nip95ConfigManagerProps { interface Nip95ConfigManagerProps {
onConfigChange?: () => void onConfigChange?: () => void
@ -122,24 +123,6 @@ export function Nip95ConfigManager({ onConfigChange }: Nip95ConfigManagerProps):
void handleUpdatePriorities(newApis) void handleUpdatePriorities(newApis)
} }
function DragHandle(): React.ReactElement {
return (
<div className="flex flex-col gap-1 cursor-grab active:cursor-grabbing text-cyber-accent/50 hover:text-neon-cyan transition-colors">
<svg width="12" height="12" viewBox="0 0 12 12" fill="currentColor">
<circle cx="2" cy="2" r="1.5" />
<circle cx="6" cy="2" r="1.5" />
<circle cx="10" cy="2" r="1.5" />
<circle cx="2" cy="6" r="1.5" />
<circle cx="6" cy="6" r="1.5" />
<circle cx="10" cy="6" r="1.5" />
<circle cx="2" cy="10" r="1.5" />
<circle cx="6" cy="10" r="1.5" />
<circle cx="10" cy="10" r="1.5" />
</svg>
</div>
)
}
async function handleUpdateUrl(id: string, url: string): Promise<void> { async function handleUpdateUrl(id: string, url: string): Promise<void> {
try { try {
await configStorage.updateNip95Api(id, { url }) await configStorage.updateNip95Api(id, { url })

View File

@ -4,6 +4,7 @@ import type { RelayConfig } from '@/lib/configStorageTypes'
import { t } from '@/lib/i18n' import { t } from '@/lib/i18n'
import { userConfirm } from '@/lib/userConfirm' import { userConfirm } from '@/lib/userConfirm'
import { relaySessionManager } from '@/lib/relaySessionManager' import { relaySessionManager } from '@/lib/relaySessionManager'
import { DragHandle } from './DragHandle'
interface RelayManagerProps { interface RelayManagerProps {
onConfigChange?: () => void onConfigChange?: () => void
@ -147,24 +148,6 @@ export function RelayManager({ onConfigChange }: RelayManagerProps): React.React
setDraggedId(null) setDraggedId(null)
} }
function DragHandle(): React.ReactElement {
return (
<div className="flex flex-col gap-1 cursor-grab active:cursor-grabbing text-cyber-accent/50 hover:text-neon-cyan transition-colors">
<svg width="12" height="12" viewBox="0 0 12 12" fill="currentColor">
<circle cx="2" cy="2" r="1.5" />
<circle cx="6" cy="2" r="1.5" />
<circle cx="10" cy="2" r="1.5" />
<circle cx="2" cy="6" r="1.5" />
<circle cx="6" cy="6" r="1.5" />
<circle cx="10" cy="6" r="1.5" />
<circle cx="2" cy="10" r="1.5" />
<circle cx="6" cy="10" r="1.5" />
<circle cx="10" cy="10" r="1.5" />
</svg>
</div>
)
}
async function handleUpdateUrl(id: string, url: string): Promise<void> { async function handleUpdateUrl(id: string, url: string): Promise<void> {
try { try {
await configStorage.updateRelay(id, { url }) await configStorage.updateRelay(id, { url })

View File

@ -1,4 +1,4 @@
import { useState, useEffect } from 'react' import { useState, useEffect, useCallback } from 'react'
import { nostrAuthService } from '@/lib/nostrAuth' import { nostrAuthService } from '@/lib/nostrAuth'
import { getLastSyncDate, setLastSyncDate as setLastSyncDateStorage, getCurrentTimestamp, calculateDaysBetween } from '@/lib/syncStorage' import { getLastSyncDate, setLastSyncDate as setLastSyncDateStorage, getCurrentTimestamp, calculateDaysBetween } from '@/lib/syncStorage'
import { MIN_EVENT_DATE } from '@/lib/platformConfig' import { MIN_EVENT_DATE } from '@/lib/platformConfig'
@ -15,13 +15,7 @@ export function SyncProgressBar(): React.ReactElement | null {
const [connectionState, setConnectionState] = useState<{ connected: boolean; pubkey: string | null }>({ connected: false, pubkey: null }) const [connectionState, setConnectionState] = useState<{ connected: boolean; pubkey: string | null }>({ connected: false, pubkey: null })
const [error, setError] = useState<string | null>(null) const [error, setError] = useState<string | null>(null)
const { syncProgress, isSyncing, startMonitoring, stopMonitoring } = useSyncProgress({ const loadSyncStatus = useCallback(async (): Promise<void> => {
onComplete: async () => {
await loadSyncStatus()
},
})
async function loadSyncStatus(): Promise<void> {
try { try {
const state = nostrAuthService.getState() const state = nostrAuthService.getState()
if (!state.connected || !state.pubkey) { if (!state.connected || !state.pubkey) {
@ -37,7 +31,11 @@ export function SyncProgressBar(): React.ReactElement | null {
} catch (loadError) { } catch (loadError) {
console.error('Error loading sync status:', loadError) console.error('Error loading sync status:', loadError)
} }
} }, [])
const { syncProgress, isSyncing, startMonitoring, stopMonitoring } = useSyncProgress({
onComplete: loadSyncStatus,
})
useEffect(() => { useEffect(() => {
// Check connection state // Check connection state
@ -113,7 +111,7 @@ export function SyncProgressBar(): React.ReactElement | null {
console.warn('[SyncProgressBar] Skipping auto-sync:', { isRecentlySynced, isSyncing, hasPubkey: Boolean(connectionState.pubkey) }) console.warn('[SyncProgressBar] Skipping auto-sync:', { isRecentlySynced, isSyncing, hasPubkey: Boolean(connectionState.pubkey) })
} }
})() })()
}, [isInitialized, connectionState.connected, connectionState.pubkey, isSyncing]) }, [isInitialized, connectionState.connected, connectionState.pubkey, isSyncing, loadSyncStatus, startMonitoring, stopMonitoring])
async function resynchronize(): Promise<void> { async function resynchronize(): Promise<void> {
try { try {

View File

@ -1,4 +1,4 @@
import { useState, useRef, useEffect } from 'react' import { useState, useRef, useMemo } from 'react'
import { nostrAuthService } from '@/lib/nostrAuth' import { nostrAuthService } from '@/lib/nostrAuth'
import { getWordSuggestions } from '@/lib/keyManagementBIP39' import { getWordSuggestions } from '@/lib/keyManagementBIP39'
@ -20,26 +20,24 @@ function WordInputWithAutocomplete({
onFocus: () => void onFocus: () => void
onBlur: () => void onBlur: () => void
}): React.ReactElement { }): React.ReactElement {
const [suggestions, setSuggestions] = useState<string[]>([])
const [showSuggestions, setShowSuggestions] = useState(false) const [showSuggestions, setShowSuggestions] = useState(false)
const [selectedIndex, setSelectedIndex] = useState(-1) const [selectedIndex, setSelectedIndex] = useState(-1)
const inputRef = useRef<HTMLInputElement>(null) const inputRef = useRef<HTMLInputElement>(null)
const suggestionsRef = useRef<HTMLDivElement>(null) const suggestionsRef = useRef<HTMLDivElement>(null)
useEffect(() => { const suggestions = useMemo((): string[] => {
if (value.length > 0) { if (value.length === 0) {
const newSuggestions = getWordSuggestions(value, 5) return []
setSuggestions(newSuggestions)
setShowSuggestions(newSuggestions.length > 0)
setSelectedIndex(-1)
} else {
setSuggestions([])
setShowSuggestions(false)
} }
return getWordSuggestions(value, 5)
}, [value]) }, [value])
const handleChange = (event: React.ChangeEvent<HTMLInputElement>): void => { const handleChange = (event: React.ChangeEvent<HTMLInputElement>): void => {
const newValue = event.target.value.trim().toLowerCase() const newValue = event.target.value.trim().toLowerCase()
setSelectedIndex(-1)
if (newValue.length === 0) {
setShowSuggestions(false)
}
onChange(newValue) onChange(newValue)
} }

View File

@ -1,4 +1,4 @@
import { useEffect, useState, type Dispatch, type SetStateAction } from 'react' import { useMemo, useState, type Dispatch, type SetStateAction } from 'react'
import type { Article } from '@/types/nostr' import type { Article } from '@/types/nostr'
import type { ArticleDraft } from '@/lib/articlePublisherTypes' import type { ArticleDraft } from '@/lib/articlePublisherTypes'
import { useArticleEditing } from '@/hooks/useArticleEditing' import { useArticleEditing } from '@/hooks/useArticleEditing'
@ -63,12 +63,17 @@ function useUserArticlesController({
submitEdit: () => Promise<import('@/lib/articleMutations').ArticleUpdateResult | null> submitEdit: () => Promise<import('@/lib/articleMutations').ArticleUpdateResult | null>
deleteArticle: (id: string) => Promise<boolean> deleteArticle: (id: string) => Promise<boolean>
} { } {
const [localArticles, setLocalArticles] = useState<Article[]>(articles) const [deletedArticleIds, setDeletedArticleIds] = useState<Set<string>>(new Set())
const [articleOverridesById, setArticleOverridesById] = useState<Map<string, Article>>(new Map())
const [unlockedArticles, setUnlockedArticles] = useState<Set<string>>(new Set()) const [unlockedArticles, setUnlockedArticles] = useState<Set<string>>(new Set())
const [pendingDeleteId, setPendingDeleteId] = useState<string | null>(null) const [pendingDeleteId, setPendingDeleteId] = useState<string | null>(null)
const editingCtx = useArticleEditing(currentPubkey) const editingCtx = useArticleEditing(currentPubkey)
useEffect(() => setLocalArticles(articles), [articles]) const localArticles = useMemo((): Article[] => {
return articles
.filter((a) => !deletedArticleIds.has(a.id))
.map((a) => articleOverridesById.get(a.id) ?? a)
}, [articles, articleOverridesById, deletedArticleIds])
return { return {
localArticles, localArticles,
@ -76,12 +81,12 @@ function useUserArticlesController({
pendingDeleteId, pendingDeleteId,
requestDelete: (id: string) => setPendingDeleteId(id), requestDelete: (id: string) => setPendingDeleteId(id),
handleUnlock: createHandleUnlock(onLoadContent, setUnlockedArticles), handleUnlock: createHandleUnlock(onLoadContent, setUnlockedArticles),
handleDelete: createHandleDelete(editingCtx.deleteArticle, setLocalArticles, setPendingDeleteId), handleDelete: createHandleDelete(editingCtx.deleteArticle, setDeletedArticleIds, setPendingDeleteId),
handleEditSubmit: createHandleEditSubmit( handleEditSubmit: createHandleEditSubmit(
editingCtx.submitEdit, editingCtx.submitEdit,
editingCtx.editingDraft, editingCtx.editingDraft,
currentPubkey, currentPubkey,
setLocalArticles setArticleOverridesById
), ),
...editingCtx, ...editingCtx,
} }
@ -101,13 +106,13 @@ function createHandleUnlock(
function createHandleDelete( function createHandleDelete(
deleteArticle: (id: string) => Promise<boolean>, deleteArticle: (id: string) => Promise<boolean>,
setLocalArticles: Dispatch<SetStateAction<Article[]>>, setDeletedArticleIds: Dispatch<SetStateAction<Set<string>>>,
setPendingDeleteId: Dispatch<SetStateAction<string | null>> setPendingDeleteId: Dispatch<SetStateAction<string | null>>
): (article: Article) => Promise<void> { ): (article: Article) => Promise<void> {
return async (article: Article): Promise<void> => { return async (article: Article): Promise<void> => {
const ok = await deleteArticle(article.id) const ok = await deleteArticle(article.id)
if (ok) { if (ok) {
setLocalArticles((prev) => prev.filter((a) => a.id !== article.id)) setDeletedArticleIds((prev) => new Set([...prev, article.id]))
} }
setPendingDeleteId(null) setPendingDeleteId(null)
} }
@ -117,15 +122,16 @@ function createHandleEditSubmit(
submitEdit: () => Promise<import('@/lib/articleMutations').ArticleUpdateResult | null>, submitEdit: () => Promise<import('@/lib/articleMutations').ArticleUpdateResult | null>,
draft: ReturnType<typeof useArticleEditing>['editingDraft'], draft: ReturnType<typeof useArticleEditing>['editingDraft'],
currentPubkey: string | null, currentPubkey: string | null,
setLocalArticles: Dispatch<SetStateAction<Article[]>> setArticleOverridesById: Dispatch<SetStateAction<Map<string, Article>>>
): () => Promise<void> { ): () => Promise<void> {
return async (): Promise<void> => { return async (): Promise<void> => {
const result = await submitEdit() const result = await submitEdit()
if (result && draft) { if (result && draft) {
const updated = buildUpdatedArticle(draft, currentPubkey ?? '', result.articleId) const updated = buildUpdatedArticle(draft, currentPubkey ?? '', result.articleId)
setLocalArticles((prev) => { setArticleOverridesById((prev) => {
const filtered = prev.filter((a) => a.id !== result.originalArticleId) const next = new Map(prev)
return [updated, ...filtered] next.set(result.originalArticleId, { ...updated, id: result.originalArticleId })
return next
}) })
} }
} }

View File

@ -20,10 +20,6 @@ export function useArticles(searchQuery: string = '', filters: ArticleFilters |
const hasArticlesRef = useRef(false) const hasArticlesRef = useRef(false)
useEffect(() => { useEffect(() => {
setLoading(true)
setError(null)
// Load authors from cache first
const loadAuthorsFromCache = async (): Promise<boolean> => { const loadAuthorsFromCache = async (): Promise<boolean> => {
try { try {
const cachedAuthors = await objectCache.getAll('author') const cachedAuthors = await objectCache.getAll('author')
@ -60,11 +56,18 @@ export function useArticles(searchQuery: string = '', filters: ArticleFilters |
const sponsoringResults = await Promise.all(sponsoringPromises) const sponsoringResults = await Promise.all(sponsoringPromises)
// Update articles with sponsoring amounts // Update articles with sponsoring amounts
const sponsoringByAuthorId = new Map<string, number>()
sponsoringResults.forEach((result) => {
if (result) {
sponsoringByAuthorId.set(result.authorId, result.totalSponsoring)
}
})
setArticles((prev) => setArticles((prev) =>
prev.map((article) => { prev.map((article) => {
const sponsoringResult = sponsoringResults.find((r) => r?.authorId === article.id) const totalSponsoring = sponsoringByAuthorId.get(article.id)
if (sponsoringResult && article.isPresentation) { if (totalSponsoring !== undefined && article.isPresentation) {
return { ...article, totalSponsoring: sponsoringResult.totalSponsoring } return { ...article, totalSponsoring }
} }
return article return article
}) })
@ -78,19 +81,24 @@ export function useArticles(searchQuery: string = '', filters: ArticleFilters |
setLoading(false) setLoading(false)
hasArticlesRef.current = false hasArticlesRef.current = false
return false return false
} catch (error) { } catch (loadError) {
console.error('Error loading authors from cache:', error) console.error('Error loading authors from cache:', loadError)
setLoading(false) setLoading(false)
return false return false
} }
} }
// Read only from IndexedDB cache - no network subscription const load = async (): Promise<void> => {
void loadAuthorsFromCache().then((hasCachedAuthors) => { setLoading(true)
setError(null)
const hasCachedAuthors = await loadAuthorsFromCache()
if (!hasCachedAuthors) { if (!hasCachedAuthors) {
setError(t('common.error.noContent')) setError(t('common.error.noContent'))
} }
}) }
void load()
return () => { return () => {
// No cleanup needed - no network subscription // No cleanup needed - no network subscription

View File

@ -16,13 +16,13 @@ export function useAuthorsProfiles(authorPubkeys: string[]): {
const pubkeysKey = useMemo(() => [...authorPubkeys].sort().join(','), [authorPubkeys]) const pubkeysKey = useMemo(() => [...authorPubkeys].sort().join(','), [authorPubkeys])
useEffect(() => { useEffect(() => {
const loadProfiles = async (): Promise<void> => {
if (authorPubkeys.length === 0) { if (authorPubkeys.length === 0) {
setProfiles(new Map()) setProfiles(new Map())
setLoading(false) setLoading(false)
return return
} }
const loadProfiles = async (): Promise<void> => {
setLoading(true) setLoading(true)
const profilesMap = new Map<string, AuthorProfile>() const profilesMap = new Map<string, AuthorProfile>()
@ -33,8 +33,8 @@ export function useAuthorsProfiles(authorPubkeys: string[]): {
pubkey, pubkey,
profile: profile ?? { pubkey }, profile: profile ?? { pubkey },
} }
} catch (error) { } catch (loadError) {
console.error(`Error loading profile for ${pubkey}:`, error) console.error(`Error loading profile for ${pubkey}:`, loadError)
return { return {
pubkey, pubkey,
profile: { pubkey }, profile: { pubkey },

View File

@ -56,9 +56,8 @@ export function useNotifications(userPubkey: string | null): {
void (async (): Promise<void> => { void (async (): Promise<void> => {
try { try {
await notificationService.markAsRead(notificationId) await notificationService.markAsRead(notificationId)
setNotifications((prev) => const storedNotifications = await notificationService.getAllNotifications(100)
prev.map((n) => (n.id === notificationId ? { ...n, read: true } : n)) setNotifications(storedNotifications)
)
} catch (error) { } catch (error) {
console.error('[useNotifications] Error marking notification as read:', error) console.error('[useNotifications] Error marking notification as read:', error)
} }
@ -75,7 +74,8 @@ export function useNotifications(userPubkey: string | null): {
void (async (): Promise<void> => { void (async (): Promise<void> => {
try { try {
await notificationService.markAllAsRead() await notificationService.markAllAsRead()
setNotifications((prev) => prev.map((n) => ({ ...n, read: true }))) const storedNotifications = await notificationService.getAllNotifications(100)
setNotifications(storedNotifications)
} catch (error) { } catch (error) {
console.error('[useNotifications] Error marking all as read:', error) console.error('[useNotifications] Error marking all as read:', error)
} }
@ -91,7 +91,8 @@ export function useNotifications(userPubkey: string | null): {
void (async (): Promise<void> => { void (async (): Promise<void> => {
try { try {
await notificationService.deleteNotification(notificationId) await notificationService.deleteNotification(notificationId)
setNotifications((prev) => prev.filter((n) => n.id !== notificationId)) const storedNotifications = await notificationService.getAllNotifications(100)
setNotifications(storedNotifications)
} catch (error) { } catch (error) {
console.error('[useNotifications] Error deleting notification:', error) console.error('[useNotifications] Error deleting notification:', error)
} }

View File

@ -87,8 +87,7 @@ async function buildPreviewTags(
encryptedKey?: string encryptedKey?: string
} }
): Promise<string[][]> { ): Promise<string[][]> {
// Map category to new system const category = normalizePublicationCategory(params.draft.category)
const category = params.draft.category === 'science-fiction' ? 'sciencefiction' : params.draft.category === 'scientific-research' ? 'research' : 'sciencefiction'
// Generate hash ID from publication data // Generate hash ID from publication data
const hashId = await generatePublicationHashId({ const hashId = await generatePublicationHashId({
@ -148,3 +147,10 @@ async function buildPreviewTags(
return newTags return newTags
} }
function normalizePublicationCategory(category: ArticleDraft['category'] | undefined): 'sciencefiction' | 'research' {
if (category === 'scientific-research') {
return 'research'
}
return 'sciencefiction'
}

View File

@ -129,135 +129,243 @@ export async function parsePresentationEvent(event: Event): Promise<import('@/ty
return null return null
} }
// Try to extract profile JSON from tag first (new format) const profileData = readPresentationProfileData(tags.json, event.content)
let profileData: { const originalCategory = mapTagCategoryToOriginalCategory(tags.category)
const { hash, version, index } = await resolvePresentationIdParts({ tags, event, profileData })
const id = buildObjectId(hash, index, version)
return buildPresentationArticle({
id,
hash,
version,
index,
event,
tags,
profileData,
originalCategory,
})
}
type PresentationProfileData = {
authorName?: string authorName?: string
presentation?: string presentation?: string
contentDescription?: string contentDescription?: string
mainnetAddress?: string mainnetAddress?: string
pictureUrl?: string pictureUrl?: string
category?: string category?: string
} | null = null }
if (tags.json) { function readPresentationProfileData(jsonTag: string | undefined, content: string): PresentationProfileData | null {
profileData = parsePresentationProfileJson(tags.json) if (jsonTag) {
return parsePresentationProfileJson(jsonTag)
} }
// Fallback to content format (for backward compatibility with old notes) // Backward compatibility: invisible format (with zero-width characters)
if (!profileData) { const invisibleJsonMatch = content.match(/[\u200B\u200C]\[Metadata JSON\][\u200B\u200C]\n[\u200B\u200C](.+)[\u200B\u200C]$/s)
// Try invisible format (with zero-width characters)
const invisibleJsonMatch = event.content.match(/[\u200B\u200C]\[Metadata JSON\][\u200B\u200C]\n[\u200B\u200C](.+)[\u200B\u200C]$/s)
if (invisibleJsonMatch?.[1]) { if (invisibleJsonMatch?.[1]) {
try { try {
// Remove zero-width characters from JSON
const cleanedJson = invisibleJsonMatch[1].replace(/[\u200B\u200C\u200D\u200E\u200F]/g, '').trim() const cleanedJson = invisibleJsonMatch[1].replace(/[\u200B\u200C\u200D\u200E\u200F]/g, '').trim()
profileData = parsePresentationProfileJson(cleanedJson) return parsePresentationProfileJson(cleanedJson)
} catch (invisibleJsonError) { } catch (invisibleJsonError) {
console.error('Error parsing profile JSON from invisible content:', invisibleJsonError) console.error('Error parsing profile JSON from invisible content:', invisibleJsonError)
} }
} }
// Fallback to visible format in content // Backward compatibility: visible format
if (!profileData) { const jsonMatch = content.match(/\[Metadata JSON\]\n(.+)$/s)
const jsonMatch = event.content.match(/\[Metadata JSON\]\n(.+)$/s)
if (jsonMatch?.[1]) { if (jsonMatch?.[1]) {
profileData = parsePresentationProfileJson(jsonMatch[1].trim()) return parsePresentationProfileJson(jsonMatch[1].trim())
}
}
} }
// Map tag category to article category return null
let articleCategory: 'science-fiction' | 'scientific-research' | undefined }
if (tags.category === 'sciencefiction') {
articleCategory = 'science-fiction' function mapTagCategoryToOriginalCategory(category: unknown): 'science-fiction' | 'scientific-research' | undefined {
} else if (tags.category === 'research') { if (category === 'sciencefiction') {
articleCategory = 'scientific-research' return 'science-fiction'
} else { }
articleCategory = undefined if (category === 'research') {
return 'scientific-research'
}
return undefined
}
async function resolvePresentationIdParts(params: {
tags: ReturnType<typeof extractTagsFromEvent>
event: Event
profileData: PresentationProfileData | null
}): Promise<{ hash: string; version: number; index: number }> {
const version = typeof params.tags.version === 'number' ? params.tags.version : 0
const index = 0
const fromIdTag = resolvePresentationIdPartsFromIdTag(params.tags.id, version, index)
if (fromIdTag) {
return fromIdTag
} }
// Extract hash, version, index from id tag or parse it const mainnetAddress = resolvePresentationMainnetAddressCandidate(params.profileData, params.tags)
let hash: string const pictureUrl = resolvePresentationPictureUrlCandidate(params.profileData, params.tags)
let version = tags.version ?? 0
let index = 0
if (tags.id) { const hash = await generateAuthorHashId({
const parsed = parseObjectId(tags.id) pubkey: params.event.pubkey,
if (parsed.hash) { authorName: resolveOptionalString(params.profileData?.authorName),
;({ hash } = parsed) presentation: resolveOptionalString(params.profileData?.presentation),
version = parsed.version ?? version contentDescription: resolveOptionalString(params.profileData?.contentDescription),
index = parsed.index ?? index
} else {
// If id is just a hash, use it directly
hash = tags.id
}
} else {
// Generate hash from author data
const mainnetAddress = profileData?.mainnetAddress ?? (typeof tags.mainnetAddress === 'string' ? tags.mainnetAddress : undefined)
const pictureUrl = profileData?.pictureUrl ?? (typeof tags.pictureUrl === 'string' ? tags.pictureUrl : undefined)
hash = await generateAuthorHashId({
pubkey: event.pubkey,
authorName: profileData?.authorName ?? '',
presentation: profileData?.presentation ?? '',
contentDescription: profileData?.contentDescription ?? '',
mainnetAddress, mainnetAddress,
pictureUrl, pictureUrl,
category: profileData?.category ?? tags.category ?? 'sciencefiction', category: resolvePresentationHashCategory(params.profileData, params.tags),
}) })
return { hash, version, index }
}
function resolvePresentationIdPartsFromIdTag(
idTag: string | undefined,
defaultVersion: number,
defaultIndex: number
): { hash: string; version: number; index: number } | undefined {
if (!idTag) {
return undefined
} }
const id = buildObjectId(hash, index, version) const parsed = parseObjectId(idTag)
if (parsed.hash) {
return {
hash: parsed.hash,
version: parsed.version ?? defaultVersion,
index: parsed.index ?? defaultIndex,
}
}
// totalSponsoring is calculated from cache, not from tags return { hash: idTag, version: defaultVersion, index: defaultIndex }
// It will be set when the article is loaded from cache or calculated on demand }
const result: import('@/types/nostr').AuthorPresentationArticle = {
id, function buildPresentationArticle(params: {
hash, id: string
version, hash: string
index, version: number
pubkey: event.pubkey, index: number
title: tags.title ?? 'Présentation', event: Event
preview: tags.preview ?? event.content.substring(0, 200), tags: ReturnType<typeof extractTagsFromEvent>
content: event.content, profileData: PresentationProfileData | null
description: profileData?.presentation ?? tags.description ?? '', // Required field originalCategory: 'science-fiction' | 'scientific-research' | undefined
contentDescription: ((): string => { }): import('@/types/nostr').AuthorPresentationArticle {
const raw = profileData?.contentDescription ?? tags.description ?? '' const description = resolvePresentationDescription(params.profileData, params.tags)
// Remove Bitcoin address from contentDescription if present (should not be visible) const contentDescription = sanitizePresentationContentDescription(resolvePresentationContentDescriptionRaw(params.profileData, params.tags))
const thumbnailUrl = resolvePresentationThumbnailUrl(params.profileData, params.tags)
const mainnetAddress = resolvePresentationMainnetAddress(params.profileData, params.tags)
const bannerUrl = resolvePresentationBannerUrl(params.profileData, params.tags)
const title = resolvePresentationTitle(params.tags)
const preview = resolvePresentationPreview(params.tags, params.event.content)
return {
id: params.id,
hash: params.hash,
version: params.version,
index: params.index,
pubkey: params.event.pubkey,
title,
preview,
content: params.event.content,
description,
contentDescription,
thumbnailUrl,
createdAt: params.event.created_at,
zapAmount: 0,
paid: true,
category: 'author-presentation',
isPresentation: true,
mainnetAddress,
totalSponsoring: 0,
originalCategory: params.originalCategory ?? 'science-fiction',
...(bannerUrl ? { bannerUrl } : {}),
}
}
function resolvePresentationTitle(tags: ReturnType<typeof extractTagsFromEvent>): string {
return typeof tags.title === 'string' && tags.title.length > 0 ? tags.title : 'Présentation'
}
function resolvePresentationPreview(tags: ReturnType<typeof extractTagsFromEvent>, content: string): string {
if (typeof tags.preview === 'string' && tags.preview.length > 0) {
return tags.preview
}
return content.substring(0, 200)
}
function resolvePresentationDescription(profileData: PresentationProfileData | null, tags: ReturnType<typeof extractTagsFromEvent>): string {
if (typeof profileData?.presentation === 'string') {
return profileData.presentation
}
return typeof tags.description === 'string' ? tags.description : ''
}
function resolvePresentationContentDescriptionRaw(profileData: PresentationProfileData | null, tags: ReturnType<typeof extractTagsFromEvent>): string {
if (typeof profileData?.contentDescription === 'string') {
return profileData.contentDescription
}
return typeof tags.description === 'string' ? tags.description : ''
}
function resolveOptionalString(value: unknown): string {
return typeof value === 'string' ? value : ''
}
function resolvePresentationMainnetAddressCandidate(
profileData: PresentationProfileData | null,
tags: ReturnType<typeof extractTagsFromEvent>
): string | undefined {
if (typeof profileData?.mainnetAddress === 'string') {
return profileData.mainnetAddress
}
return typeof tags.mainnetAddress === 'string' ? tags.mainnetAddress : undefined
}
function resolvePresentationPictureUrlCandidate(
profileData: PresentationProfileData | null,
tags: ReturnType<typeof extractTagsFromEvent>
): string | undefined {
if (typeof profileData?.pictureUrl === 'string') {
return profileData.pictureUrl
}
return typeof tags.pictureUrl === 'string' ? tags.pictureUrl : undefined
}
function resolvePresentationHashCategory(profileData: PresentationProfileData | null, tags: ReturnType<typeof extractTagsFromEvent>): string {
if (typeof profileData?.category === 'string') {
return profileData.category
}
return typeof tags.category === 'string' ? tags.category : 'sciencefiction'
}
function sanitizePresentationContentDescription(raw: string): string {
return raw return raw
.split('\n') .split('\n')
.filter((line) => !line.includes('Adresse Bitcoin mainnet (pour le sponsoring)')) .filter((line) => !line.includes('Adresse Bitcoin mainnet (pour le sponsoring)'))
.join('\n') .join('\n')
.trim() .trim()
})(), // Required field }
thumbnailUrl: ((): string => {
function resolvePresentationThumbnailUrl(profileData: PresentationProfileData | null, tags: ReturnType<typeof extractTagsFromEvent>): string {
if (typeof profileData?.pictureUrl === 'string') { if (typeof profileData?.pictureUrl === 'string') {
return profileData.pictureUrl return profileData.pictureUrl
} }
if (typeof tags.pictureUrl === 'string') { return typeof tags.pictureUrl === 'string' ? tags.pictureUrl : ''
return tags.pictureUrl }
}
return ''
})(), // Required field
createdAt: event.created_at,
zapAmount: 0,
paid: true,
category: 'author-presentation',
isPresentation: true,
mainnetAddress: profileData?.mainnetAddress ?? tags.mainnetAddress ?? '',
totalSponsoring: 0, // Will be calculated from cache when needed
originalCategory: articleCategory ?? 'science-fiction', // Store original category for filtering
}
// Add bannerUrl if available function resolvePresentationBannerUrl(profileData: PresentationProfileData | null, tags: ReturnType<typeof extractTagsFromEvent>): string | undefined {
if (profileData?.pictureUrl !== undefined && profileData?.pictureUrl !== null) { if (typeof profileData?.pictureUrl === 'string' && profileData.pictureUrl.length > 0) {
result.bannerUrl = profileData.pictureUrl return profileData.pictureUrl
} else if (tags.pictureUrl !== undefined && tags.pictureUrl !== null && typeof tags.pictureUrl === 'string') {
result.bannerUrl = tags.pictureUrl
} }
return typeof tags.pictureUrl === 'string' ? tags.pictureUrl : undefined
}
return result function resolvePresentationMainnetAddress(profileData: PresentationProfileData | null, tags: ReturnType<typeof extractTagsFromEvent>): string {
const fromProfile = profileData?.mainnetAddress
if (typeof fromProfile === 'string') {
return fromProfile
}
return typeof tags.mainnetAddress === 'string' ? tags.mainnetAddress : ''
} }
function parsePresentationProfileJson(json: string): { function parsePresentationProfileJson(json: string): {

View File

@ -25,50 +25,77 @@ interface ExtractedObjectWithId {
index?: number index?: number
} }
/** function groupEventsByHashId(events: Event[]): Map<string, Event[]> {
* Group events by hash ID and cache the latest version of each
*/
export async function groupAndCacheEventsByHash(
events: Event[],
config: EventCacheConfig
): Promise<void> {
const { objectType, extractor, getHash, getIndex, getVersion, getHidden } = config
// Group events by hash ID
const eventsByHashId = new Map<string, Event[]>() const eventsByHashId = new Map<string, Event[]>()
for (const event of events) { for (const event of events) {
const tags = extractTagsFromEvent(event) const tags = extractTagsFromEvent(event)
if (tags.id) { const { id } = tags
// Extract hash from id (can be <hash>_<index>_<version> or just hash) if (id) {
const parsed = parseObjectId(tags.id) const parsed = parseObjectId(id)
const hash = parsed.hash ?? tags.id const hash = parsed.hash ?? id
if (!eventsByHashId.has(hash)) {
eventsByHashId.set(hash, []) const current = eventsByHashId.get(hash)
} if (current) {
const hashEvents = eventsByHashId.get(hash) current.push(event)
if (hashEvents) { } else {
hashEvents.push(event) eventsByHashId.set(hash, [event])
} }
} }
} }
// Cache each object (latest version) return eventsByHashId
for (const [_hash, hashEvents] of eventsByHashId.entries()) { }
const latestEvent = getLatestVersion(hashEvents)
if (latestEvent) { function resolveExtractedId(extracted: unknown, getHash: EventCacheConfig['getHash']): string | undefined {
const extracted = await extractor(latestEvent)
if (extracted) {
// Get hash, index, version, hidden
const extractedHash = getHash ? getHash(extracted) : null const extractedHash = getHash ? getHash(extracted) : null
const extractedObj = extracted as ExtractedObjectWithId const extractedObj = extracted as ExtractedObjectWithId
const extractedId = extractedHash ?? extractedObj.id return extractedHash ?? extractedObj.id
}
if (extractedId) { function resolveHashAndIndex(
const publicationParsed = parseObjectId(extractedId) extractedId: string,
const hash = publicationParsed.hash ?? extractedId extracted: unknown,
const index = getIndex ? getIndex(extracted) : publicationParsed.index ?? extractedObj.index ?? 0 getIndex: EventCacheConfig['getIndex']
const version = getVersion ? getVersion(latestEvent) : extractTagsFromEvent(latestEvent).version ?? 0 ): { hash: string; index: number } {
const hidden = getHidden ? getHidden(latestEvent) : extractTagsFromEvent(latestEvent).hidden ?? false const parsed = parseObjectId(extractedId)
const extractedObj = extracted as ExtractedObjectWithId
const hash = parsed.hash ?? extractedId
const index = getIndex ? getIndex(extracted) : (parsed.index ?? extractedObj.index ?? 0)
return { hash, index }
}
function resolveVersionAndHidden(
latestEvent: Event,
getVersion: EventCacheConfig['getVersion'],
getHidden: EventCacheConfig['getHidden']
): { version: number; hidden: boolean } {
const tags = extractTagsFromEvent(latestEvent)
const version = getVersion ? getVersion(latestEvent) : (tags.version ?? 0)
const hidden = getHidden ? getHidden(latestEvent) : (tags.hidden ?? false)
return { version, hidden }
}
async function cacheLatestEventForHashGroup(hashEvents: Event[], config: EventCacheConfig): Promise<void> {
const { objectType, extractor, getHash, getIndex, getVersion, getHidden } = config
const latestEvent = getLatestVersion(hashEvents)
if (!latestEvent) {
return
}
const extracted = await extractor(latestEvent)
if (!extracted) {
return
}
const extractedId = resolveExtractedId(extracted, getHash)
if (!extractedId) {
return
}
const { hash, index } = resolveHashAndIndex(extractedId, extracted, getIndex)
const { version, hidden } = resolveVersionAndHidden(latestEvent, getVersion, getHidden)
await writeObjectToCache({ await writeObjectToCache({
objectType, objectType,
@ -79,9 +106,18 @@ export async function groupAndCacheEventsByHash(
hidden, hidden,
index, index,
}) })
} }
}
} /**
* Group events by hash ID and cache the latest version of each
*/
export async function groupAndCacheEventsByHash(
events: Event[],
config: EventCacheConfig
): Promise<void> {
const eventsByHashId = groupEventsByHashId(events)
for (const hashEvents of eventsByHashId.values()) {
await cacheLatestEventForHashGroup(hashEvents, config)
} }
} }

View File

@ -3,7 +3,7 @@
* Centralizes the pattern of polling syncProgressManager and updating state * Centralizes the pattern of polling syncProgressManager and updating state
*/ */
import { useState, useEffect, useRef } from 'react' import { useState, useEffect, useRef, useCallback } from 'react'
import type { SyncProgress } from '../helpers/syncProgressHelper' import type { SyncProgress } from '../helpers/syncProgressHelper'
export interface UseSyncProgressOptions { export interface UseSyncProgressOptions {
@ -32,7 +32,7 @@ export function useSyncProgress(options: UseSyncProgressOptions = {}): UseSyncPr
const onCompleteRef = useRef(onComplete) const onCompleteRef = useRef(onComplete)
const isMonitoringRef = useRef(false) const isMonitoringRef = useRef(false)
function stopMonitoring(): void { const stopMonitoring = useCallback((): void => {
if (!isMonitoringRef.current) { if (!isMonitoringRef.current) {
return return
} }
@ -49,14 +49,14 @@ export function useSyncProgress(options: UseSyncProgressOptions = {}): UseSyncPr
clearTimeout(timeoutRef.current) clearTimeout(timeoutRef.current)
timeoutRef.current = null timeoutRef.current = null
} }
} }, [])
// Update onComplete ref when it changes // Update onComplete ref when it changes
useEffect(() => { useEffect(() => {
onCompleteRef.current = onComplete onCompleteRef.current = onComplete
}, [onComplete]) }, [onComplete])
const checkProgress = async (): Promise<void> => { const checkProgress = useCallback(async (): Promise<void> => {
const { syncProgressManager } = await import('../syncProgressManager') const { syncProgressManager } = await import('../syncProgressManager')
const currentProgress = syncProgressManager.getProgress() const currentProgress = syncProgressManager.getProgress()
if (currentProgress) { if (currentProgress) {
@ -69,9 +69,9 @@ export function useSyncProgress(options: UseSyncProgressOptions = {}): UseSyncPr
stopMonitoring() stopMonitoring()
} }
} }
} }, [stopMonitoring])
const startMonitoring = (): void => { const startMonitoring = useCallback((): void => {
if (isMonitoringRef.current) { if (isMonitoringRef.current) {
return return
} }
@ -89,14 +89,14 @@ export function useSyncProgress(options: UseSyncProgressOptions = {}): UseSyncPr
timeoutRef.current = setTimeout(() => { timeoutRef.current = setTimeout(() => {
stopMonitoring() stopMonitoring()
}, maxDuration) }, maxDuration)
} }, [checkProgress, maxDuration, pollInterval, stopMonitoring])
// Cleanup on unmount // Cleanup on unmount
useEffect(() => { useEffect(() => {
return () => { return () => {
stopMonitoring() stopMonitoring()
} }
}, []) }, [stopMonitoring])
return { return {
syncProgress, syncProgress,

View File

@ -109,7 +109,8 @@ function extractMetadataJsonFromTag(event: { tags: string[][] }): Record<string,
const jsonTag = event.tags.find((tag) => tag[0] === 'json') const jsonTag = event.tags.find((tag) => tag[0] === 'json')
if (jsonTag?.[1]) { if (jsonTag?.[1]) {
try { try {
return JSON.parse(jsonTag[1]) const parsed: unknown = JSON.parse(jsonTag[1])
return isRecord(parsed) ? parsed : null
} catch (e) { } catch (e) {
console.error('Error parsing JSON metadata from tag:', e) console.error('Error parsing JSON metadata from tag:', e)
return null return null
@ -125,7 +126,8 @@ function extractMetadataJson(content: string): Record<string, unknown> | null {
try { try {
// Remove zero-width characters from JSON // Remove zero-width characters from JSON
const cleanedJson = invisibleJsonMatch[1].replace(/[\u200B\u200C\u200D\u200E\u200F]/g, '').trim() const cleanedJson = invisibleJsonMatch[1].replace(/[\u200B\u200C\u200D\u200E\u200F]/g, '').trim()
return JSON.parse(cleanedJson) const parsed: unknown = JSON.parse(cleanedJson)
return isRecord(parsed) ? parsed : null
} catch (e) { } catch (e) {
console.error('Error parsing metadata JSON from invisible content:', e) console.error('Error parsing metadata JSON from invisible content:', e)
} }
@ -135,7 +137,8 @@ function extractMetadataJson(content: string): Record<string, unknown> | null {
const jsonMatch = content.match(/\[Metadata JSON\]\n(.+)$/s) const jsonMatch = content.match(/\[Metadata JSON\]\n(.+)$/s)
if (jsonMatch?.[1]) { if (jsonMatch?.[1]) {
try { try {
return JSON.parse(jsonMatch[1].trim()) const parsed: unknown = JSON.parse(jsonMatch[1].trim())
return isRecord(parsed) ? parsed : null
} catch (e) { } catch (e) {
console.error('Error parsing metadata JSON from content:', e) console.error('Error parsing metadata JSON from content:', e)
return null return null
@ -144,6 +147,10 @@ function extractMetadataJson(content: string): Record<string, unknown> | null {
return null return null
} }
function isRecord(value: unknown): value is Record<string, unknown> {
return typeof value === 'object' && value !== null && !Array.isArray(value)
}
/** /**
* Extract author from event * Extract author from event
*/ */

View File

@ -58,22 +58,19 @@ function parseUploadResponse(result: unknown, endpoint: string): string {
const obj = result as Record<string, unknown> const obj = result as Record<string, unknown>
// void.cat format: { ok: true, file: { id, url } } const fromVoidCat = readVoidCatUploadUrl(obj)
if ('ok' in obj && obj.ok === true && 'file' in obj) { if (fromVoidCat) {
const file = obj.file as Record<string, unknown> return fromVoidCat
if (typeof file.url === 'string') {
return file.url
}
} }
// nostrcheck.me format: { status: 'success', url: string } const fromNostrcheck = readNostrcheckUploadUrl(obj)
if ('status' in obj && obj.status === 'success' && 'url' in obj && typeof obj.url === 'string') { if (fromNostrcheck) {
return obj.url return fromNostrcheck
} }
// Standard format: { url: string } const fromStandard = readStandardUploadUrl(obj)
if ('url' in obj && typeof obj.url === 'string') { if (fromStandard) {
return obj.url return fromStandard
} }
console.error('NIP-95 upload missing URL:', { console.error('NIP-95 upload missing URL:', {
@ -83,6 +80,28 @@ function parseUploadResponse(result: unknown, endpoint: string): string {
throw new Error('Upload response missing URL') throw new Error('Upload response missing URL')
} }
function readVoidCatUploadUrl(obj: Record<string, unknown>): string | undefined {
if (!('ok' in obj) || obj.ok !== true || !('file' in obj)) {
return undefined
}
if (typeof obj.file !== 'object' || obj.file === null) {
return undefined
}
const file = obj.file as Record<string, unknown>
return typeof file.url === 'string' ? file.url : undefined
}
function readNostrcheckUploadUrl(obj: Record<string, unknown>): string | undefined {
if (!('status' in obj) || obj.status !== 'success') {
return undefined
}
return typeof obj.url === 'string' ? obj.url : undefined
}
function readStandardUploadUrl(obj: Record<string, unknown>): string | undefined {
return typeof obj.url === 'string' ? obj.url : undefined
}
/** /**
* Try uploading to a single endpoint * Try uploading to a single endpoint
* Uses proxy API route for endpoints that have CORS issues * Uses proxy API route for endpoints that have CORS issues
@ -139,70 +158,11 @@ export async function uploadNip95Media(file: File): Promise<MediaRef> {
let lastError: Error | null = null let lastError: Error | null = null
for (const endpoint of endpoints) { for (const endpoint of endpoints) {
try { const upload = await attemptUploadToEndpoint({ endpoint, formData, mediaType, file })
// Check if endpoint requires NIP-98 authentication (nostrcheck.me) if (upload) {
const needsAuth = endpoint.includes('nostrcheck.me') return upload
let authToken: string | undefined
if (needsAuth) {
if (!isNip98Available()) {
const pubkey = nostrService.getPublicKey()
const isUnlocked = nostrAuthService.isUnlocked()
if (!pubkey) {
console.warn('NIP-98 authentication required for nostrcheck.me but no account found. Please create or import an account.')
// Skip this endpoint
} else if (!isUnlocked) {
// Throw a special error that can be caught to trigger unlock modal
// This error should propagate to the caller, not be caught here
throw createUnlockRequiredError()
} else {
console.warn('NIP-98 authentication required for nostrcheck.me but not available. Skipping endpoint.')
// Skip this endpoint
}
} else {
try {
// Generate NIP-98 token for the actual endpoint (not the proxy)
// The token must be for the final destination URL
authToken = await generateNip98Token('POST', endpoint)
} catch (authError) {
console.error('Failed to generate NIP-98 token:', authError)
// Skip this endpoint if auth fails
}
}
}
// Only proceed if we have auth token when needed, or if auth is not needed
if (!needsAuth || authToken) {
// Always use proxy to avoid CORS, 405, and name resolution issues
// Pass endpoint and auth token as query parameters to proxy
const proxyUrlParams = new URLSearchParams({
endpoint,
})
if (authToken) {
proxyUrlParams.set('auth', authToken)
}
const proxyUrl = `/api/nip95-upload?${proxyUrlParams.toString()}`
const url = await tryUploadEndpoint(proxyUrl, formData, true)
return { url, type: mediaType }
}
} catch (e) {
const error = e instanceof Error ? e : new Error(String(e))
const errorMessage = error.message
// If unlock is required, propagate the error immediately
if (errorMessage === 'UNLOCK_REQUIRED' || isUnlockRequiredError(error)) {
throw error
}
console.error('NIP-95 upload endpoint error:', {
endpoint,
error: errorMessage,
fileSize: file.size,
fileType: file.type,
})
lastError = error
// Continue to next endpoint
} }
lastError = new Error(`Upload failed for endpoint: ${endpoint}`)
} }
// All endpoints failed // All endpoints failed
@ -211,3 +171,72 @@ export async function uploadNip95Media(file: File): Promise<MediaRef> {
} }
throw new Error('Failed to upload: no endpoints available') throw new Error('Failed to upload: no endpoints available')
} }
async function attemptUploadToEndpoint(params: {
endpoint: string
formData: FormData
mediaType: MediaRef['type']
file: File
}): Promise<MediaRef | null> {
try {
const needsAuth = params.endpoint.includes('nostrcheck.me')
const authToken = await resolveNip98AuthToken({ endpoint: params.endpoint, needsAuth })
if (needsAuth && !authToken) {
return null
}
const proxyUrlParams = new URLSearchParams({ endpoint: params.endpoint })
if (authToken) {
proxyUrlParams.set('auth', authToken)
}
const proxyUrl = `/api/nip95-upload?${proxyUrlParams.toString()}`
const url = await tryUploadEndpoint(proxyUrl, params.formData, true)
return { url, type: params.mediaType }
} catch (e) {
const error = e instanceof Error ? e : new Error(String(e))
if (error.message === 'UNLOCK_REQUIRED' || isUnlockRequiredError(error)) {
throw error
}
console.error('NIP-95 upload endpoint error:', {
endpoint: params.endpoint,
error: error.message,
fileSize: params.file.size,
fileType: params.file.type,
})
return null
}
}
async function resolveNip98AuthToken(params: { endpoint: string; needsAuth: boolean }): Promise<string | undefined> {
if (!params.needsAuth) {
return undefined
}
if (!isNip98Available()) {
const pubkey = nostrService.getPublicKey()
if (!pubkey) {
console.warn('NIP-98 authentication required for nostrcheck.me but no account found. Please create or import an account.')
return undefined
}
const isUnlocked = nostrAuthService.isUnlocked()
if (!isUnlocked) {
throw createUnlockRequiredError()
}
console.warn('NIP-98 authentication required for nostrcheck.me but not available. Skipping endpoint.')
return undefined
}
try {
// Generate NIP-98 token for the actual endpoint (not the proxy)
// The token must be for the final destination URL
return await generateNip98Token('POST', params.endpoint)
} catch (authError) {
console.error('Failed to generate NIP-98 token:', authError)
return undefined
}
}

View File

@ -1,5 +1,5 @@
import type { Event } from 'nostr-tools' import type { Event } from 'nostr-tools'
import type { Article, KindType, Page, Purchase, Review, ReviewTip, Series, Sponsoring } from '@/types/nostr' import type { Article, Page, Purchase, Review, ReviewTip, Series, Sponsoring } from '@/types/nostr'
import { extractTagsFromEvent } from './nostrTagSystem' import { extractTagsFromEvent } from './nostrTagSystem'
import { buildObjectId, parseObjectId } from './urlGenerator' import { buildObjectId, parseObjectId } from './urlGenerator'
import { generateHashId } from './hashIdGenerator' import { generateHashId } from './hashIdGenerator'
@ -34,41 +34,21 @@ export async function parseSeriesFromEvent(event: Event): Promise<Series | null>
if (!tags.title || !tags.description) { if (!tags.title || !tags.description) {
return null return null
} }
// Map category from new system to old system const category = mapNostrCategoryToLegacy(tags.category) ?? 'science-fiction'
let category: 'science-fiction' | 'scientific-research' = 'science-fiction'
if (tags.category === 'sciencefiction') {
category = 'science-fiction'
} else if (tags.category === 'research') {
category = 'scientific-research'
}
// Extract hash, version, index from id tag or parse it const { hash, version, index } = await resolveObjectIdParts({
let hash: string ...(tags.id ? { idTag: tags.id } : {}),
let version = tags.version ?? 0 defaultVersion: tags.version ?? 0,
let index = 0 defaultIndex: 0,
generateHash: async (): Promise<string> => generateHashId({
if (tags.id) {
const parsed = parseObjectId(tags.id)
const { hash: parsedHash, version: parsedVersion, index: parsedIndex } = parsed
if (parsedHash) {
hash = parsedHash
version = parsedVersion ?? version
index = parsedIndex ?? index
} else {
// If id is just a hash, use it directly
hash = tags.id
}
} else {
// Generate hash from series data
hash = await generateHashId({
type: 'series', type: 'series',
pubkey: event.pubkey, pubkey: event.pubkey,
title: tags.title, title: tags.title,
description: tags.description, description: tags.description,
category: tags.category ?? 'sciencefiction', category: tags.category ?? 'sciencefiction',
coverUrl: tags.coverUrl ?? '', coverUrl: tags.coverUrl ?? '',
}),
}) })
}
const id = buildObjectId(hash, index, version) const id = buildObjectId(hash, index, version)
@ -172,36 +152,142 @@ function getPreviewContent(content: string, previewTag?: string): { previewConte
return { previewContent } return { previewContent }
} }
async function buildArticle(event: Event, tags: ReturnType<typeof extractTagsFromEvent>, preview: string): Promise<Article> { function mapNostrCategoryToLegacy(
// Map category from new system to old system category: string | undefined
let category: 'science-fiction' | 'scientific-research' | undefined ): 'science-fiction' | 'scientific-research' | undefined {
if (tags.category === 'sciencefiction') { if (category === 'sciencefiction') {
category = 'science-fiction' return 'science-fiction'
} else if (tags.category === 'research') {
category = 'scientific-research'
} else {
category = undefined
} }
if (category === 'research') {
return 'scientific-research'
}
return undefined
}
interface ObjectIdParts {
hash: string
version: number
index: number
}
function parseObjectIdPartsFromTag(params: { idTag: string; defaultVersion: number; defaultIndex: number }): ObjectIdParts {
const parsed = parseObjectId(params.idTag)
const hash = parsed.hash ?? params.idTag
const version = parsed.version ?? params.defaultVersion
const index = parsed.index ?? params.defaultIndex
return { hash, version, index }
}
async function resolveObjectIdParts(params: {
idTag?: string
defaultVersion: number
defaultIndex: number
generateHash: () => Promise<string>
}): Promise<ObjectIdParts> {
if (params.idTag) {
return parseObjectIdPartsFromTag({
idTag: params.idTag,
defaultVersion: params.defaultVersion,
defaultIndex: params.defaultIndex,
})
}
const hash = await params.generateHash()
return { hash, version: params.defaultVersion, index: params.defaultIndex }
}
function resolveThumbnailUrl(tags: ReturnType<typeof extractTagsFromEvent>): string {
if (typeof tags.bannerUrl === 'string') {
return tags.bannerUrl
}
if (typeof tags.pictureUrl === 'string') {
return tags.pictureUrl
}
return ''
}
function parsePagesFromEventJsonTag(event: Event): Page[] | undefined {
const jsonTag = event.tags.find((tag) => tag[0] === 'json')?.[1]
if (!jsonTag) {
return undefined
}
try {
const parsed: unknown = JSON.parse(jsonTag)
if (typeof parsed !== 'object' || parsed === null) {
return undefined
}
const metadata = parsed as { pages?: unknown }
if (!Array.isArray(metadata.pages)) {
return undefined
}
return metadata.pages as Page[]
} catch {
return undefined
}
}
function buildArticlePaymentFields(tags: ReturnType<typeof extractTagsFromEvent>): Partial<Article> {
const result: Partial<Article> = {}
if (tags.invoice) {
result.invoice = tags.invoice
}
if (tags.paymentHash) {
result.paymentHash = tags.paymentHash
}
return result
}
function buildArticleClassificationFields(params: {
tags: ReturnType<typeof extractTagsFromEvent>
category: 'science-fiction' | 'scientific-research' | undefined
isPresentation: boolean
}): Partial<Article> {
const result: Partial<Article> = {}
if (params.category) {
result.category = params.category
}
if (params.isPresentation) {
result.isPresentation = true
}
if (params.tags.type === 'publication' || params.tags.type === 'author') {
result.kindType = 'article'
}
return result
}
function buildArticleOptionalMetaFields(params: {
tags: ReturnType<typeof extractTagsFromEvent>
pages: Page[] | undefined
}): Partial<Article> {
const result: Partial<Article> = {}
if (params.tags.mainnetAddress) {
result.mainnetAddress = params.tags.mainnetAddress
}
if (params.tags.totalSponsoring) {
result.totalSponsoring = params.tags.totalSponsoring
}
if (params.tags.seriesId) {
result.seriesId = params.tags.seriesId
}
if (params.tags.bannerUrl) {
result.bannerUrl = params.tags.bannerUrl
}
if (params.pages && params.pages.length > 0) {
result.pages = params.pages
}
return result
}
async function buildArticle(event: Event, tags: ReturnType<typeof extractTagsFromEvent>, preview: string): Promise<Article> {
const category = mapNostrCategoryToLegacy(tags.category)
const isPresentation = tags.type === 'author' const isPresentation = tags.type === 'author'
// Extract hash, version, index from id tag or parse it const { hash, version, index } = await resolveObjectIdParts({
let hash: string ...(tags.id ? { idTag: tags.id } : {}),
let version = tags.version ?? 0 defaultVersion: tags.version ?? 0,
let index = 0 defaultIndex: 0,
generateHash: async (): Promise<string> => generateHashId({
if (tags.id) {
const parsed = parseObjectId(tags.id)
if (parsed.hash) {
;({ hash } = parsed)
version = parsed.version ?? version
index = parsed.index ?? index
} else {
// If id is just a hash, use it directly
hash = tags.id
}
} else {
// Generate hash from article data
hash = await generateHashId({
type: isPresentation ? 'author' : 'publication', type: isPresentation ? 'author' : 'publication',
pubkey: event.pubkey, pubkey: event.pubkey,
title: tags.title ?? 'Untitled', title: tags.title ?? 'Untitled',
@ -210,25 +296,12 @@ async function buildArticle(event: Event, tags: ReturnType<typeof extractTagsFro
seriesId: tags.seriesId ?? '', seriesId: tags.seriesId ?? '',
bannerUrl: tags.bannerUrl ?? '', bannerUrl: tags.bannerUrl ?? '',
zapAmount: tags.zapAmount ?? 800, zapAmount: tags.zapAmount ?? 800,
}),
}) })
}
const id = buildObjectId(hash, index, version) const id = buildObjectId(hash, index, version)
// Extract pages from JSON metadata if present const pages = parsePagesFromEventJsonTag(event)
let pages: Page[] | undefined
try {
const jsonTag = event.tags.find((tag) => tag[0] === 'json')?.[1]
if (jsonTag) {
const metadata = JSON.parse(jsonTag) as { pages?: Page[] }
const { pages: metadataPages } = metadata
if (metadataPages && Array.isArray(metadataPages)) {
pages = metadataPages
}
}
} catch {
// Ignore JSON parsing errors
}
return { return {
id, id,
@ -244,25 +317,10 @@ async function buildArticle(event: Event, tags: ReturnType<typeof extractTagsFro
createdAt: event.created_at, createdAt: event.created_at,
zapAmount: tags.zapAmount ?? 800, zapAmount: tags.zapAmount ?? 800,
paid: false, paid: false,
thumbnailUrl: ((): string => { thumbnailUrl: resolveThumbnailUrl(tags), // Required field with default
if (typeof tags.bannerUrl === 'string') { ...buildArticlePaymentFields(tags),
return tags.bannerUrl ...buildArticleClassificationFields({ tags, category, isPresentation }),
} ...buildArticleOptionalMetaFields({ tags, pages }),
if (typeof tags.pictureUrl === 'string') {
return tags.pictureUrl
}
return ''
})(), // Required field with default
...(tags.invoice ? { invoice: tags.invoice } : {}),
...(tags.paymentHash ? { paymentHash: tags.paymentHash } : {}),
...(category ? { category } : {}),
...(isPresentation ? { isPresentation: true } : {}),
...(tags.mainnetAddress ? { mainnetAddress: tags.mainnetAddress } : {}),
...(tags.totalSponsoring ? { totalSponsoring: tags.totalSponsoring } : {}),
...(tags.seriesId ? { seriesId: tags.seriesId } : {}),
...(tags.bannerUrl ? { bannerUrl: tags.bannerUrl } : {}),
...(pages && pages.length > 0 ? { pages } : {}),
...(tags.type === 'publication' || tags.type === 'author' ? { kindType: 'article' as KindType } : {}),
} }
} }

View File

@ -162,29 +162,12 @@ class NotificationDetector {
}) })
for (const obj of userObjects) { for (const obj of userObjects) {
if (Array.isArray(obj.published) && obj.published.length > 0) { await this.maybeCreatePublishedNotification({
const eventId = obj.id.split(':')[1] ?? obj.id obj,
const existing = await notificationService.getNotificationByEventId(eventId)
const alreadyNotified = existing?.type === 'published'
const recentlyCreated = obj.createdAt * 1000 > oneHourAgo
if (!alreadyNotified && recentlyCreated) {
const relays = obj.published
await notificationService.createNotification({
type: 'published',
objectType, objectType,
objectId: obj.id, oneHourAgo,
eventId,
data: {
relays,
object: obj,
title: 'Publication réussie',
message: `Votre contenu a été publié sur ${relays.length} relais`,
},
}) })
} }
}
}
} catch (error) { } catch (error) {
console.error(`[NotificationDetector] Error scanning published status for ${objectType}:`, error) console.error(`[NotificationDetector] Error scanning published status for ${objectType}:`, error)
} }
@ -194,6 +177,40 @@ class NotificationDetector {
} }
} }
private async maybeCreatePublishedNotification(params: {
obj: CachedObject
objectType: string
oneHourAgo: number
}): Promise<void> {
if (!Array.isArray(params.obj.published) || params.obj.published.length === 0) {
return
}
if (params.obj.createdAt * 1000 <= params.oneHourAgo) {
return
}
const eventId = params.obj.id.split(':')[1] ?? params.obj.id
const existing = await notificationService.getNotificationByEventId(eventId)
if (existing?.type === 'published') {
return
}
const relays = params.obj.published
await notificationService.createNotification({
type: 'published',
objectType: params.objectType,
objectId: params.obj.id,
eventId,
data: {
relays,
object: params.obj,
title: 'Publication réussie',
message: `Votre contenu a été publié sur ${relays.length} relais`,
},
})
}
/** /**
* Manually check for a specific object change * Manually check for a specific object change
*/ */

View File

@ -239,14 +239,14 @@ class ObjectCacheService {
* Get an object from cache by hash * Get an object from cache by hash
* Returns the latest non-hidden version * Returns the latest non-hidden version
*/ */
async get(objectType: ObjectType, hash: string): Promise<unknown | null> { async get(objectType: ObjectType, hash: string): Promise<unknown> {
try { try {
const db = await this.initDB(objectType) const db = await this.initDB(objectType)
const transaction = db.transaction(['objects'], 'readonly') const transaction = db.transaction(['objects'], 'readonly')
const store = transaction.objectStore('objects') const store = transaction.objectStore('objects')
const hashIndex = store.index('hash') const hashIndex = store.index('hash')
return new Promise<unknown | null>((resolve, reject) => { return new Promise<unknown>((resolve, reject) => {
const request = hashIndex.openCursor(IDBKeyRange.only(hash)) const request = hashIndex.openCursor(IDBKeyRange.only(hash))
const objects: CachedObject[] = [] const objects: CachedObject[] = []
@ -282,7 +282,7 @@ class ObjectCacheService {
/** /**
* Get an object from cache by ID * Get an object from cache by ID
*/ */
async getById(objectType: ObjectType, id: string): Promise<unknown | null> { async getById(objectType: ObjectType, id: string): Promise<unknown> {
try { try {
const helper = this.getDBHelper(objectType) const helper = this.getDBHelper(objectType)
const obj = await helper.get<CachedObject>(id) const obj = await helper.get<CachedObject>(id)

View File

@ -128,23 +128,28 @@ export function verifyPaymentSplit(
authorAmount?: number, authorAmount?: number,
platformAmount?: number platformAmount?: number
): boolean { ): boolean {
switch (type) { const author = authorAmount ?? 0
case 'article': { const platform = platformAmount ?? 0
if (type === 'article') {
const articleSplit = calculateArticleSplit(totalAmount) const articleSplit = calculateArticleSplit(totalAmount)
return articleSplit.author === (authorAmount ?? 0) && articleSplit.platform === (platformAmount ?? 0) if (articleSplit.author !== author) {
}
case 'review': {
const reviewSplit = calculateReviewSplit(totalAmount)
return reviewSplit.reviewer === (authorAmount ?? 0) && reviewSplit.platform === (platformAmount ?? 0)
}
case 'sponsoring': {
const sponsoringSplit = calculateSponsoringSplit(totalAmount)
return sponsoringSplit.authorSats === (authorAmount ?? 0) && sponsoringSplit.platformSats === (platformAmount ?? 0)
}
default:
return false return false
} }
return articleSplit.platform === platform
}
if (type === 'review') {
const reviewSplit = calculateReviewSplit(totalAmount)
if (reviewSplit.reviewer !== author) {
return false
}
return reviewSplit.platform === platform
}
const sponsoringSplit = calculateSponsoringSplit(totalAmount)
if (sponsoringSplit.authorSats !== author) {
return false
}
return sponsoringSplit.platformSats === platform
} }

View File

@ -364,9 +364,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const errorText = titleMatch?.[1] ?? h1Match?.[1] ?? 'HTML error page returned' const errorText = titleMatch?.[1] ?? h1Match?.[1] ?? 'HTML error page returned'
// Check if it's a 404 or other error page // Check if it's a 404 or other error page
const is404 = response.body.includes('404') || response.body.includes('Not Found') || titleMatch?.[1]?.includes('404') const is404 = response.body.includes('404') || response.body.includes('Not Found') || titleMatch?.[1]?.includes('404') === true
const is403 = response.body.includes('403') || response.body.includes('Forbidden') || titleMatch?.[1]?.includes('403') const is403 = response.body.includes('403') || response.body.includes('Forbidden') || titleMatch?.[1]?.includes('403') === true
const is500 = response.body.includes('500') || response.body.includes('Internal Server Error') || titleMatch?.[1]?.includes('500') const is500 = response.body.includes('500') || response.body.includes('Internal Server Error') || titleMatch?.[1]?.includes('500') === true
console.error('NIP-95 proxy HTML response error:', { console.error('NIP-95 proxy HTML response error:', {
targetEndpoint, targetEndpoint,
@ -378,13 +378,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
is500, is500,
bodyPreview: response.body.substring(0, 500), bodyPreview: response.body.substring(0, 500),
contentType: 'HTML (expected JSON)', contentType: 'HTML (expected JSON)',
suggestion: is404 suggestion: buildHtmlErrorSuggestion({ is404, is403, is500 }),
? 'The endpoint URL may be incorrect or the endpoint does not exist'
: is403
? 'The endpoint may require authentication or have access restrictions'
: is500
? 'The endpoint server encountered an error'
: 'The endpoint may not be a valid NIP-95 upload endpoint or may require specific headers',
}) })
let userMessage = `Endpoint returned an HTML error page instead of JSON` let userMessage = `Endpoint returned an HTML error page instead of JSON`
@ -427,6 +421,19 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
} }
} }
function buildHtmlErrorSuggestion(params: { is404: boolean; is403: boolean; is500: boolean }): string {
if (params.is404) {
return 'The endpoint URL may be incorrect or the endpoint does not exist'
}
if (params.is403) {
return 'The endpoint may require authentication or have access restrictions'
}
if (params.is500) {
return 'The endpoint server encountered an error'
}
return 'The endpoint may not be a valid NIP-95 upload endpoint or may require specific headers'
}
function getErrnoCode(error: unknown): string | undefined { function getErrnoCode(error: unknown): string | undefined {
if (typeof error !== 'object' || error === null) { if (typeof error !== 'object' || error === null) {
return undefined return undefined

View File

@ -1,6 +1,6 @@
import { useRouter } from 'next/router' import { useRouter } from 'next/router'
import Head from 'next/head' import Head from 'next/head'
import { useEffect, useState } from 'react' import { useEffect, useState, useCallback } from 'react'
import { fetchAuthorByHashId } from '@/lib/authorQueries' import { fetchAuthorByHashId } from '@/lib/authorQueries'
import { getSeriesByAuthor } from '@/lib/seriesQueries' import { getSeriesByAuthor } from '@/lib/seriesQueries'
import { getAuthorSponsoring } from '@/lib/sponsoring' import { getAuthorSponsoring } from '@/lib/sponsoring'
@ -194,7 +194,7 @@ function useAuthorData(hashIdOrPubkey: string): {
const [loading, setLoading] = useState(true) const [loading, setLoading] = useState(true)
const [error, setError] = useState<string | null>(null) const [error, setError] = useState<string | null>(null)
const reload = async (): Promise<void> => { const reload = useCallback(async (): Promise<void> => {
if (!hashIdOrPubkey) { if (!hashIdOrPubkey) {
return return
} }
@ -212,7 +212,7 @@ function useAuthorData(hashIdOrPubkey: string): {
} finally { } finally {
setLoading(false) setLoading(false)
} }
} }, [hashIdOrPubkey])
useEffect(() => { useEffect(() => {
void reload() void reload()

View File

@ -8,17 +8,15 @@ import type { ArticleFilters } from '@/components/ArticleFilters'
import { HomeView } from '@/components/HomeView' import { HomeView } from '@/components/HomeView'
function usePresentationArticles(allArticles: Article[]): Map<string, Article> { function usePresentationArticles(allArticles: Article[]): Map<string, Article> {
const [presentationArticles, setPresentationArticles] = useState<Map<string, Article>>(new Map()) return useMemo(() => {
useEffect(() => {
const presentations = new Map<string, Article>() const presentations = new Map<string, Article>()
allArticles.forEach((article) => { allArticles.forEach((article) => {
if (article.isPresentation && article.pubkey) { if (article.isPresentation && article.pubkey) {
presentations.set(article.pubkey, article) presentations.set(article.pubkey, article)
} }
}) })
setPresentationArticles(presentations) return presentations
}, [allArticles]) }, [allArticles])
return presentationArticles
} }
function useHomeState(): { function useHomeState(): {
@ -117,7 +115,7 @@ function useHomeController(): {
unlockedArticles: Set<string> unlockedArticles: Set<string>
handleUnlock: (article: Article) => Promise<void> handleUnlock: (article: Article) => Promise<void>
} { } {
const { } = useNostrAuth() useNostrAuth()
const { const {
searchQuery, searchQuery,
setSearchQuery, setSearchQuery,

View File

@ -43,11 +43,11 @@ export default function PublishPage(): React.ReactElement {
} }
useEffect(() => { useEffect(() => {
const load = async (): Promise<void> => {
if (!pubkey) { if (!pubkey) {
setSeriesOptions([]) setSeriesOptions([])
return return
} }
const load = async (): Promise<void> => {
const items = await getSeriesByAuthor(pubkey) const items = await getSeriesByAuthor(pubkey)
setSeriesOptions(items.map((s) => ({ id: s.id, title: s.title }))) setSeriesOptions(items.map((s) => ({ id: s.id, title: s.title })))
} }

View File

@ -55,7 +55,7 @@ export default function PurchasePage(): React.ReactElement | null {
{error && <p className="text-sm text-red-400">{error}</p>} {error && <p className="text-sm text-red-400">{error}</p>}
{purchase && ( {purchase && (
<div className="bg-cyber-darker border border-neon-cyan/30 rounded-lg p-6 space-y-4"> <div className="bg-cyber-darker border border-neon-cyan/30 rounded-lg p-6 space-y-4">
<h1 className="text-3xl font-bold text-neon-cyan">Paiement d'article</h1> <h1 className="text-3xl font-bold text-neon-cyan">Paiement d&apos;article</h1>
<div className="space-y-2"> <div className="space-y-2">
<p className="text-cyber-accent"> <p className="text-cyber-accent">
<span className="font-semibold">Montant :</span> {purchase.amount} sats <span className="font-semibold">Montant :</span> {purchase.amount} sats

View File

@ -46,7 +46,7 @@ export default function ReviewTipPage(): React.ReactElement | null {
return ( return (
<> <>
<Head> <Head>
<title>Remerciement d'avis - zapwall.fr</title> <title>Remerciement d&apos;avis - zapwall.fr</title>
</Head> </Head>
<main className="min-h-screen bg-cyber-darker"> <main className="min-h-screen bg-cyber-darker">
<PageHeader /> <PageHeader />
@ -55,7 +55,7 @@ export default function ReviewTipPage(): React.ReactElement | null {
{error && <p className="text-sm text-red-400">{error}</p>} {error && <p className="text-sm text-red-400">{error}</p>}
{reviewTip && ( {reviewTip && (
<div className="bg-cyber-darker border border-neon-cyan/30 rounded-lg p-6 space-y-4"> <div className="bg-cyber-darker border border-neon-cyan/30 rounded-lg p-6 space-y-4">
<h1 className="text-3xl font-bold text-neon-cyan">Remerciement d'avis</h1> <h1 className="text-3xl font-bold text-neon-cyan">Remerciement d&apos;avis</h1>
<div className="space-y-2"> <div className="space-y-2">
<p className="text-cyber-accent"> <p className="text-cyber-accent">
<span className="font-semibold">Montant :</span> {reviewTip.amount} sats <span className="font-semibold">Montant :</span> {reviewTip.amount} sats