diff --git a/hooks/useUserArticles.ts b/hooks/useUserArticles.ts index 2990587..45594a8 100644 --- a/hooks/useUserArticles.ts +++ b/hooks/useUserArticles.ts @@ -51,8 +51,8 @@ export function useUserArticles( if (sortedArticles.length === 0) { setError('Aucun contenu trouvé') } - } catch (error) { - console.error('Error loading user articles from cache:', error) + } catch (loadError) { + console.error('Error loading user articles from cache:', loadError) setError('Erreur lors du chargement des articles') } finally { setLoading(false) diff --git a/lib/articlePublisher.ts b/lib/articlePublisher.ts index 78ff201..6a0206a 100644 --- a/lib/articlePublisher.ts +++ b/lib/articlePublisher.ts @@ -284,8 +284,8 @@ export class ArticlePublisher { async getAuthorPresentation(pubkey: string): Promise { try { // Read only from IndexedDB cache - const { objectCache } = await import('./objectCache') - const cached = await objectCache.getAuthorByPubkey(pubkey) + const { objectCache: objectCacheService } = await import('./objectCache') + const cached = await objectCacheService.getAuthorByPubkey(pubkey) if (cached) { const presentation = cached // Calculate totalSponsoring from cache diff --git a/lib/articlePublisherHelpersPresentation.ts b/lib/articlePublisherHelpersPresentation.ts index b5d3a2b..5f7507e 100644 --- a/lib/articlePublisherHelpersPresentation.ts +++ b/lib/articlePublisherHelpersPresentation.ts @@ -192,13 +192,27 @@ export async function parsePresentationEvent(event: Event): Promise Promise + extractor: (event: Event) => Promise getHash?: (extracted: unknown) => string | null getIndex?: (extracted: unknown) => number getVersion?: (event: Event) => number @@ -45,46 +45,43 @@ export async function groupAndCacheEventsByHash( if (!eventsByHashId.has(hash)) { eventsByHashId.set(hash, []) } - eventsByHashId.get(hash)!.push(event) + const hashEvents = eventsByHashId.get(hash) + if (hashEvents) { + hashEvents.push(event) + } } } // Cache each object (latest version) for (const [_hash, hashEvents] of eventsByHashId.entries()) { const latestEvent = getLatestVersion(hashEvents) - if (!latestEvent) { - continue + if (latestEvent) { + const extracted = await extractor(latestEvent) + if (extracted) { + // Get hash, index, version, hidden + const extractedHash = getHash ? getHash(extracted) : null + const extractedObj = extracted as ExtractedObjectWithId + const extractedId = extractedHash ?? extractedObj.id + + if (extractedId) { + const publicationParsed = parseObjectId(extractedId) + const hash = publicationParsed.hash ?? extractedId + const index = getIndex ? getIndex(extracted) : publicationParsed.index ?? extractedObj.index ?? 0 + const version = getVersion ? getVersion(latestEvent) : extractTagsFromEvent(latestEvent).version ?? 0 + const hidden = getHidden ? getHidden(latestEvent) : extractTagsFromEvent(latestEvent).hidden ?? false + + await writeObjectToCache({ + objectType, + hash, + event: latestEvent, + parsed: extracted, + version, + hidden, + index, + }) + } + } } - - const extracted = await extractor(latestEvent) - if (!extracted) { - continue - } - - // Get hash, index, version, hidden - const extractedHash = getHash ? getHash(extracted) : null - const extractedObj = extracted as ExtractedObjectWithId - const extractedId = extractedHash ?? extractedObj.id - - if (!extractedId) { - continue - } - - const publicationParsed = parseObjectId(extractedId) - const hash = publicationParsed.hash ?? extractedId - const index = getIndex ? getIndex(extracted) : publicationParsed.index ?? extractedObj.index ?? 0 - const version = getVersion ? getVersion(latestEvent) : extractTagsFromEvent(latestEvent).version ?? 0 - const hidden = getHidden ? getHidden(latestEvent) : extractTagsFromEvent(latestEvent).hidden ?? false - - await writeObjectToCache({ - objectType, - hash, - event: latestEvent, - parsed: extracted, - version, - hidden, - index, - }) } } @@ -95,7 +92,7 @@ export async function groupAndCacheEventsByHash( export async function cacheEventAsObject( event: Event, objectType: ObjectType, - extractor: (event: Event) => Promise + extractor: (event: Event) => Promise ): Promise { const extracted = await extractor(event) if (!extracted) { diff --git a/lib/helpers/indexedDBHelper.ts b/lib/helpers/indexedDBHelper.ts index 6612c5c..1fe06d2 100644 --- a/lib/helpers/indexedDBHelper.ts +++ b/lib/helpers/indexedDBHelper.ts @@ -21,7 +21,7 @@ export interface IndexedDBConfig { export class IndexedDBError extends Error { public readonly operation: string public readonly storeName: string | undefined - public override readonly cause: unknown | undefined + public override readonly cause: unknown public override readonly name = 'IndexedDBError' diff --git a/lib/keyManagementTwoLevel.ts b/lib/keyManagementTwoLevel.ts index 168c43b..5b43c24 100644 --- a/lib/keyManagementTwoLevel.ts +++ b/lib/keyManagementTwoLevel.ts @@ -433,7 +433,7 @@ export async function deleteAccountTwoLevel(): Promise { await indexedDBStorage.delete('nostr_account_exists') // Try to remove credential (may not be possible via API) - if (navigator.credentials && navigator.credentials.preventSilentAccess) { - navigator.credentials.preventSilentAccess() + if (navigator.credentials?.preventSilentAccess) { + void navigator.credentials.preventSilentAccess() } } diff --git a/lib/mnemonicIcons.ts b/lib/mnemonicIcons.ts index 8889666..17d5b14 100644 --- a/lib/mnemonicIcons.ts +++ b/lib/mnemonicIcons.ts @@ -65,17 +65,15 @@ function expandDictionary(): MnemonicIcon[] { const variantIndex = Math.floor(i / base) % variants.length const baseIcon = ALL_ICONS[baseIndex] - if (!baseIcon) { - continue - } - - if (variantIndex === 0) { - expanded.push(baseIcon) - } else { - expanded.push({ - name: `${baseIcon.name}_${variantIndex}`, - emoji: `${baseIcon.emoji}${variants[variantIndex]}`, - }) + if (baseIcon) { + if (variantIndex === 0) { + expanded.push(baseIcon) + } else { + expanded.push({ + name: `${baseIcon.name}_${variantIndex}`, + emoji: `${baseIcon.emoji}${variants[variantIndex]}`, + }) + } } } @@ -100,6 +98,7 @@ export function generateMnemonicIcons(pubkey: string): string[] { for (let i = 0; i < 4; i++) { const segment = pubkey.slice(i * 8, (i + 1) * 8) || pubkey.slice(-8) + // Note: slice returns empty string, not null/undefined, so || is appropriate here const segmentHash = hashString(segment) const combinedHash = (baseHash + segmentHash + i * 1000) % DICTIONARY.length const icon = DICTIONARY[combinedHash] diff --git a/lib/nip95.ts b/lib/nip95.ts index 9274264..dc7eff3 100644 --- a/lib/nip95.ts +++ b/lib/nip95.ts @@ -87,7 +87,7 @@ async function tryUploadEndpoint(endpoint: string, formData: FormData, useProxy: let errorMessage = 'Upload failed' try { const text = await response.text() - errorMessage = text || `HTTP ${response.status} ${response.statusText}` + errorMessage = text ?? `HTTP ${response.status} ${response.statusText}` } catch { errorMessage = `HTTP ${response.status} ${response.statusText}` } @@ -137,7 +137,7 @@ export async function uploadNip95Media(file: File): Promise { const isUnlocked = nostrAuthService.isUnlocked() if (!pubkey) { console.warn('NIP-98 authentication required for nostrcheck.me but no account found. Please create or import an account.') - continue + // Skip this endpoint } else if (!isUnlocked) { // Throw a special error that can be caught to trigger unlock modal // This error should propagate to the caller, not be caught here @@ -146,31 +146,34 @@ export async function uploadNip95Media(file: File): Promise { throw unlockError } else { console.warn('NIP-98 authentication required for nostrcheck.me but not available. Skipping endpoint.') - continue + // Skip this endpoint + } + } else { + try { + // Generate NIP-98 token for the actual endpoint (not the proxy) + // The token must be for the final destination URL + authToken = await generateNip98Token('POST', endpoint) + } catch (authError) { + console.error('Failed to generate NIP-98 token:', authError) + // Skip this endpoint if auth fails } - } - try { - // Generate NIP-98 token for the actual endpoint (not the proxy) - // The token must be for the final destination URL - authToken = await generateNip98Token('POST', endpoint) - } catch (authError) { - console.error('Failed to generate NIP-98 token:', authError) - // Continue to next endpoint if auth fails - continue } } - // Always use proxy to avoid CORS, 405, and name resolution issues - // Pass endpoint and auth token as query parameters to proxy - const proxyUrlParams = new URLSearchParams({ - endpoint, - }) - if (authToken) { - proxyUrlParams.set('auth', authToken) + // Only proceed if we have auth token when needed, or if auth is not needed + if (!needsAuth || authToken) { + // Always use proxy to avoid CORS, 405, and name resolution issues + // Pass endpoint and auth token as query parameters to proxy + const proxyUrlParams = new URLSearchParams({ + endpoint, + }) + if (authToken) { + proxyUrlParams.set('auth', authToken) + } + const proxyUrl = `/api/nip95-upload?${proxyUrlParams.toString()}` + const url = await tryUploadEndpoint(proxyUrl, formData, true) + return { url, type: mediaType } } - const proxyUrl = `/api/nip95-upload?${proxyUrlParams.toString()}` - const url = await tryUploadEndpoint(proxyUrl, formData, true) - return { url, type: mediaType } } catch (e) { const error = e instanceof Error ? e : new Error(String(e)) const errorMessage = error.message diff --git a/lib/nostr.ts b/lib/nostr.ts index b5fa8e9..5bc500f 100644 --- a/lib/nostr.ts +++ b/lib/nostr.ts @@ -479,7 +479,7 @@ class NostrService { } } catch (error) { console.warn(`[NostrService] Error checking unpublished in ${objectType}:`, error) - continue + // Continue to next object type on error } } @@ -499,7 +499,7 @@ class NostrService { } } catch (error) { console.warn(`[NostrService] Error searching for event in ${objectType}:`, error) - continue + // Continue to next object type on error } } } diff --git a/lib/objectCache.ts b/lib/objectCache.ts index c04d1a7..d01eed4 100644 --- a/lib/objectCache.ts +++ b/lib/objectCache.ts @@ -68,7 +68,11 @@ class ObjectCacheService { }) this.dbHelpers.set(objectType, helper) } - return this.dbHelpers.get(objectType)! + const helper = this.dbHelpers.get(objectType) + if (!helper) { + throw new Error(`Database helper not found for ${objectType}`) + } + return helper } /** diff --git a/lib/platformSync.ts b/lib/platformSync.ts index cd3078f..4db8834 100644 --- a/lib/platformSync.ts +++ b/lib/platformSync.ts @@ -97,12 +97,9 @@ class PlatformSyncService { // Synchronize from all active relays for (let i = 0; i < activeRelays.length; i++) { const relayUrl = activeRelays[i] - if (!relayUrl) { - continue - } - - // Update progress with current relay - syncProgressManager.setProgress({ + if (relayUrl) { + // Update progress with current relay + syncProgressManager.setProgress({ currentStep: 0, totalSteps: activeRelays.length, completed: false, @@ -183,11 +180,9 @@ class PlatformSyncService { if (event.id === '527d83e0af20bf23c3e104974090ccc21536ece72c24eb784b3642890f63b763') { console.warn(`[PlatformSync] Target event accepted and added to relayEvents`) } - } else { + } else if (event.id === '527d83e0af20bf23c3e104974090ccc21536ece72c24eb784b3642890f63b763') { // Log events that match filter but don't have service tag - if (event.id === '527d83e0af20bf23c3e104974090ccc21536ece72c24eb784b3642890f63b763') { - console.warn(`[PlatformSync] Event ${event.id} rejected: service tag is "${tags.service}", expected "${PLATFORM_SERVICE}"`) - } + console.warn(`[PlatformSync] Event ${event.id} rejected: service tag is "${tags.service}", expected "${PLATFORM_SERVICE}"`) } }) @@ -228,6 +223,7 @@ class PlatformSyncService { currentRelay: relayUrl, }) } + } } // Process all collected events diff --git a/lib/publishWorker.ts b/lib/publishWorker.ts index c5256c7..57fe2d1 100644 --- a/lib/publishWorker.ts +++ b/lib/publishWorker.ts @@ -118,25 +118,22 @@ class PublishWorkerService { const key = `${objectType}:${id}` const existing = this.unpublishedObjects.get(key) - // Skip if recently retried - if (existing && Date.now() - existing.lastRetryAt < RETRY_DELAY_MS) { - continue - } + // Skip if recently retried or max retries reached + const recentlyRetried = existing && Date.now() - existing.lastRetryAt < RETRY_DELAY_MS + const maxRetriesReached = existing && existing.retryCount >= MAX_RETRIES_PER_OBJECT - // Skip if max retries reached - if (existing && existing.retryCount >= MAX_RETRIES_PER_OBJECT) { + if (maxRetriesReached) { console.warn(`[PublishWorker] Max retries reached for ${objectType}:${id}, skipping`) - continue + } else if (!recentlyRetried) { + // Add or update in map + this.unpublishedObjects.set(key, { + objectType, + id, + event, + retryCount: existing?.retryCount ?? 0, + lastRetryAt: Date.now(), + }) } - - // Add or update in map - this.unpublishedObjects.set(key, { - objectType, - id, - event, - retryCount: existing?.retryCount ?? 0, - lastRetryAt: Date.now(), - }) } } diff --git a/lib/relayRotation.ts b/lib/relayRotation.ts index 14973a2..adce9bc 100644 --- a/lib/relayRotation.ts +++ b/lib/relayRotation.ts @@ -83,8 +83,8 @@ export async function tryWithRelayRotation( attempts++ // If we've tried all relays once, loop back - if (attempts < maxAttempts) { - continue + if (attempts >= maxAttempts) { + break } } } diff --git a/lib/swClient.ts b/lib/swClient.ts index 09b85a7..cc09472 100644 --- a/lib/swClient.ts +++ b/lib/swClient.ts @@ -115,7 +115,9 @@ class ServiceWorkerClient { resolve(event.data) } - this.registration!.active!.postMessage(message, [messageChannel.port2]) + if (this.registration?.active) { + this.registration.active.postMessage(message, [messageChannel.port2]) + } }) } @@ -142,15 +144,18 @@ class ServiceWorkerClient { if (!this.messageHandlers.has(type)) { this.messageHandlers.set(type, []) } - this.messageHandlers.get(type)!.push(handler) + const handlers = this.messageHandlers.get(type) + if (handlers) { + handlers.push(handler) + } // Return unsubscribe function return () => { - const handlers = this.messageHandlers.get(type) - if (handlers) { - const index = handlers.indexOf(handler) + const typeHandlers = this.messageHandlers.get(type) + if (typeHandlers) { + const index = typeHandlers.indexOf(handler) if (index > -1) { - handlers.splice(index, 1) + typeHandlers.splice(index, 1) } } } diff --git a/lib/versionManager.ts b/lib/versionManager.ts index 3ede8e4..f83119b 100644 --- a/lib/versionManager.ts +++ b/lib/versionManager.ts @@ -29,21 +29,22 @@ export function getLatestVersion(events: Event[]): Event | null { for (const event of events) { const tags = extractTagsFromEvent(event) - if (!tags.id) { - continue - } + if (tags.id) { + if (!byId.has(tags.id)) { + byId.set(tags.id, []) + } - if (!byId.has(tags.id)) { - byId.set(tags.id, []) + const idArray = byId.get(tags.id) + if (idArray) { + idArray.push({ + event, + version: tags.version, + hidden: tags.hidden, + pubkey: event.pubkey, + id: tags.id, + }) + } } - - byId.get(tags.id)!.push({ - event, - version: tags.version, - hidden: tags.hidden, - pubkey: event.pubkey, - id: tags.id, - }) } // For each ID, find the latest non-hidden version @@ -53,16 +54,13 @@ export function getLatestVersion(events: Event[]): Event | null { // Filter out hidden objects const visible = objects.filter((obj) => !obj.hidden) - if (visible.length === 0) { - // All versions are hidden, skip this object - continue - } - - // Sort by version (descending) and take the first (latest) - visible.sort((a, b) => b.version - a.version) - const latest = visible[0] - if (latest) { - latestVersions.push(latest) + if (visible.length > 0) { + // Sort by version (descending) and take the first (latest) + visible.sort((a, b) => b.version - a.version) + const latest = visible[0] + if (latest) { + latestVersions.push(latest) + } } } @@ -86,17 +84,15 @@ export function getAllVersions(events: Event[]): VersionedObject[] { for (const event of events) { const tags = extractTagsFromEvent(event) - if (!tags.id) { - continue + if (tags.id) { + versions.push({ + event, + version: tags.version, + hidden: tags.hidden, + pubkey: event.pubkey, + id: tags.id, + }) } - - versions.push({ - event, - version: tags.version, - hidden: tags.hidden, - pubkey: event.pubkey, - id: tags.id, - }) } // Sort by version (descending) diff --git a/lib/writeOrchestrator.ts b/lib/writeOrchestrator.ts index 7d0a4db..43365f8 100644 --- a/lib/writeOrchestrator.ts +++ b/lib/writeOrchestrator.ts @@ -49,7 +49,7 @@ class WriteOrchestrator { // 1. Publish to network via WebSocket service (en parallèle) websocketService.publishEvent(event, relays).then((statuses) => { return statuses - .map((status, index) => (status.success ? relays[index] : null)) + .map((status, statusIndex) => (status.success ? relays[statusIndex] : null)) .filter((relay): relay is string => relay !== null) }), // 2. Write to IndexedDB via Web Worker (en parallèle, avec published: false initialement) @@ -106,14 +106,14 @@ class WriteOrchestrator { } const secretKey = hexToBytes(this.privateKey) - const event = finalizeEvent(unsignedEvent, secretKey) + const finalizedEvent = finalizeEvent(unsignedEvent, secretKey) // Write and publish const result = await this.writeAndPublish( { objectType, hash, - event, + event: finalizedEvent, parsed, version, hidden, diff --git a/lib/writeService.ts b/lib/writeService.ts index e57d203..aac4dab 100644 --- a/lib/writeService.ts +++ b/lib/writeService.ts @@ -124,9 +124,10 @@ class WriteService { } } - this.writeWorker!.addEventListener('message', handler) - this.writeWorker!.postMessage({ - type: 'WRITE_OBJECT', + if (this.writeWorker) { + this.writeWorker.addEventListener('message', handler) + this.writeWorker.postMessage({ + type: 'WRITE_OBJECT', data: { objectType, hash, @@ -138,6 +139,7 @@ class WriteService { published, }, }) + } }) } // Fallback: direct write @@ -181,11 +183,13 @@ class WriteService { } } - this.writeWorker!.addEventListener('message', handler) - this.writeWorker!.postMessage({ - type: 'UPDATE_PUBLISHED', - data: { objectType, id, published }, - }) + if (this.writeWorker) { + this.writeWorker.addEventListener('message', handler) + this.writeWorker.postMessage({ + type: 'UPDATE_PUBLISHED', + data: { objectType, id, published }, + }) + } }) } // Fallback: direct write @@ -231,11 +235,13 @@ class WriteService { } } - this.writeWorker!.addEventListener('message', handler) - this.writeWorker!.postMessage({ - type: 'CREATE_NOTIFICATION', - data: { type, objectType, objectId, eventId, notificationData: data }, - }) + if (this.writeWorker) { + this.writeWorker.addEventListener('message', handler) + this.writeWorker.postMessage({ + type: 'CREATE_NOTIFICATION', + data: { type, objectType, objectId, eventId, notificationData: data }, + }) + } }) } // Fallback: direct write @@ -284,8 +290,8 @@ class WriteService { const { publishLog } = await import('./publishLog') await publishLog.logPublicationDirect(eventId, relayUrl, success, error, objectType, objectId) } - } catch (error) { - console.error('[WriteService] Error logging publication:', error) + } catch (logError) { + console.error('[WriteService] Error logging publication:', logError) // Don't throw for logs } } diff --git a/lib/zapAggregation.ts b/lib/zapAggregation.ts index bbb1e52..450abd0 100644 --- a/lib/zapAggregation.ts +++ b/lib/zapAggregation.ts @@ -89,7 +89,7 @@ function collectZap( sub.on('eose', () => done()) setTimeout(() => done(), timeout).unref?.() if (typeof (sub as unknown as { on?: unknown }).on === 'function') { - const subWithError = sub as unknown as { on: (event: string, handler: (error: Error) => void) => void } + const subWithError = sub as unknown as { on: (event: string, handler: (err: Error) => void) => void } subWithError.on('error', onError) } })