lint fix wip

This commit is contained in:
Nicolas Cantu 2026-01-08 21:49:57 +01:00
parent 24d30eb5d0
commit 5694dbdb8a
19 changed files with 195 additions and 169 deletions

View File

@ -51,8 +51,8 @@ export function useUserArticles(
if (sortedArticles.length === 0) {
setError('Aucun contenu trouvé')
}
} catch (error) {
console.error('Error loading user articles from cache:', error)
} catch (loadError) {
console.error('Error loading user articles from cache:', loadError)
setError('Erreur lors du chargement des articles')
} finally {
setLoading(false)

View File

@ -284,8 +284,8 @@ export class ArticlePublisher {
async getAuthorPresentation(pubkey: string): Promise<import('@/types/nostr').AuthorPresentationArticle | null> {
try {
// Read only from IndexedDB cache
const { objectCache } = await import('./objectCache')
const cached = await objectCache.getAuthorByPubkey(pubkey)
const { objectCache: objectCacheService } = await import('./objectCache')
const cached = await objectCacheService.getAuthorByPubkey(pubkey)
if (cached) {
const presentation = cached
// Calculate totalSponsoring from cache

View File

@ -192,13 +192,27 @@ export async function parsePresentationEvent(event: Event): Promise<import('@/ty
}
} else {
// Generate hash from author data
let mainnetAddress: string | undefined
if (profileData?.mainnetAddress) {
mainnetAddress = profileData.mainnetAddress
} else if (typeof tags.mainnetAddress === 'string') {
mainnetAddress = tags.mainnetAddress
}
let pictureUrl: string | undefined
if (profileData?.pictureUrl) {
pictureUrl = profileData.pictureUrl
} else if (typeof tags.pictureUrl === 'string') {
pictureUrl = tags.pictureUrl
}
hash = await generateAuthorHashId({
pubkey: event.pubkey,
authorName: profileData?.authorName ?? '',
presentation: profileData?.presentation ?? '',
contentDescription: profileData?.contentDescription ?? '',
mainnetAddress: profileData?.mainnetAddress ?? (typeof tags.mainnetAddress === 'string' ? tags.mainnetAddress : undefined),
pictureUrl: profileData?.pictureUrl ?? (typeof tags.pictureUrl === 'string' ? tags.pictureUrl : undefined),
mainnetAddress,
pictureUrl,
category: profileData?.category ?? tags.category ?? 'sciencefiction',
})
}

View File

@ -30,7 +30,10 @@ export function detectDuplicates(objects: ExtractedObject[]): DuplicateWarning[]
if (!byType.has(obj.type)) {
byType.set(obj.type, [])
}
byType.get(obj.type)!.push(obj)
const typeArray = byType.get(obj.type)
if (typeArray) {
typeArray.push(obj)
}
}
// For each type, group by ID
@ -40,7 +43,10 @@ export function detectDuplicates(objects: ExtractedObject[]): DuplicateWarning[]
if (!byId.has(obj.id)) {
byId.set(obj.id, [])
}
byId.get(obj.id)!.push(obj)
const idArray = byId.get(obj.id)
if (idArray) {
idArray.push(obj)
}
}
// Check for duplicates (same ID, multiple objects)
@ -93,7 +99,10 @@ export function resolveDuplicatesByDate(objects: ExtractedObject[]): ExtractedOb
if (!byKey.has(key)) {
byKey.set(key, [])
}
byKey.get(key)!.push(obj)
const keyArray = byKey.get(key)
if (keyArray) {
keyArray.push(obj)
}
}
const resolved: ExtractedObject[] = []

View File

@ -12,7 +12,7 @@ import { writeObjectToCache } from './writeObjectHelper'
export interface EventCacheConfig {
objectType: ObjectType
extractor: (event: Event) => Promise<unknown | null>
extractor: (event: Event) => Promise<unknown>
getHash?: (extracted: unknown) => string | null
getIndex?: (extracted: unknown) => number
getVersion?: (event: Event) => number
@ -45,31 +45,25 @@ export async function groupAndCacheEventsByHash(
if (!eventsByHashId.has(hash)) {
eventsByHashId.set(hash, [])
}
eventsByHashId.get(hash)!.push(event)
const hashEvents = eventsByHashId.get(hash)
if (hashEvents) {
hashEvents.push(event)
}
}
}
// Cache each object (latest version)
for (const [_hash, hashEvents] of eventsByHashId.entries()) {
const latestEvent = getLatestVersion(hashEvents)
if (!latestEvent) {
continue
}
if (latestEvent) {
const extracted = await extractor(latestEvent)
if (!extracted) {
continue
}
if (extracted) {
// Get hash, index, version, hidden
const extractedHash = getHash ? getHash(extracted) : null
const extractedObj = extracted as ExtractedObjectWithId
const extractedId = extractedHash ?? extractedObj.id
if (!extractedId) {
continue
}
if (extractedId) {
const publicationParsed = parseObjectId(extractedId)
const hash = publicationParsed.hash ?? extractedId
const index = getIndex ? getIndex(extracted) : publicationParsed.index ?? extractedObj.index ?? 0
@ -87,6 +81,9 @@ export async function groupAndCacheEventsByHash(
})
}
}
}
}
}
/**
* Cache a single event as an object
@ -95,7 +92,7 @@ export async function groupAndCacheEventsByHash(
export async function cacheEventAsObject(
event: Event,
objectType: ObjectType,
extractor: (event: Event) => Promise<unknown | null>
extractor: (event: Event) => Promise<unknown>
): Promise<boolean> {
const extracted = await extractor(event)
if (!extracted) {

View File

@ -21,7 +21,7 @@ export interface IndexedDBConfig {
export class IndexedDBError extends Error {
public readonly operation: string
public readonly storeName: string | undefined
public override readonly cause: unknown | undefined
public override readonly cause: unknown
public override readonly name = 'IndexedDBError'

View File

@ -433,7 +433,7 @@ export async function deleteAccountTwoLevel(): Promise<void> {
await indexedDBStorage.delete('nostr_account_exists')
// Try to remove credential (may not be possible via API)
if (navigator.credentials && navigator.credentials.preventSilentAccess) {
navigator.credentials.preventSilentAccess()
if (navigator.credentials?.preventSilentAccess) {
void navigator.credentials.preventSilentAccess()
}
}

View File

@ -65,10 +65,7 @@ function expandDictionary(): MnemonicIcon[] {
const variantIndex = Math.floor(i / base) % variants.length
const baseIcon = ALL_ICONS[baseIndex]
if (!baseIcon) {
continue
}
if (baseIcon) {
if (variantIndex === 0) {
expanded.push(baseIcon)
} else {
@ -78,6 +75,7 @@ function expandDictionary(): MnemonicIcon[] {
})
}
}
}
return expanded
}
@ -100,6 +98,7 @@ export function generateMnemonicIcons(pubkey: string): string[] {
for (let i = 0; i < 4; i++) {
const segment = pubkey.slice(i * 8, (i + 1) * 8) || pubkey.slice(-8)
// Note: slice returns empty string, not null/undefined, so || is appropriate here
const segmentHash = hashString(segment)
const combinedHash = (baseHash + segmentHash + i * 1000) % DICTIONARY.length
const icon = DICTIONARY[combinedHash]

View File

@ -87,7 +87,7 @@ async function tryUploadEndpoint(endpoint: string, formData: FormData, useProxy:
let errorMessage = 'Upload failed'
try {
const text = await response.text()
errorMessage = text || `HTTP ${response.status} ${response.statusText}`
errorMessage = text ?? `HTTP ${response.status} ${response.statusText}`
} catch {
errorMessage = `HTTP ${response.status} ${response.statusText}`
}
@ -137,7 +137,7 @@ export async function uploadNip95Media(file: File): Promise<MediaRef> {
const isUnlocked = nostrAuthService.isUnlocked()
if (!pubkey) {
console.warn('NIP-98 authentication required for nostrcheck.me but no account found. Please create or import an account.')
continue
// Skip this endpoint
} else if (!isUnlocked) {
// Throw a special error that can be caught to trigger unlock modal
// This error should propagate to the caller, not be caught here
@ -146,20 +146,22 @@ export async function uploadNip95Media(file: File): Promise<MediaRef> {
throw unlockError
} else {
console.warn('NIP-98 authentication required for nostrcheck.me but not available. Skipping endpoint.')
continue
}
// Skip this endpoint
}
} else {
try {
// Generate NIP-98 token for the actual endpoint (not the proxy)
// The token must be for the final destination URL
authToken = await generateNip98Token('POST', endpoint)
} catch (authError) {
console.error('Failed to generate NIP-98 token:', authError)
// Continue to next endpoint if auth fails
continue
// Skip this endpoint if auth fails
}
}
}
// Only proceed if we have auth token when needed, or if auth is not needed
if (!needsAuth || authToken) {
// Always use proxy to avoid CORS, 405, and name resolution issues
// Pass endpoint and auth token as query parameters to proxy
const proxyUrlParams = new URLSearchParams({
@ -171,6 +173,7 @@ export async function uploadNip95Media(file: File): Promise<MediaRef> {
const proxyUrl = `/api/nip95-upload?${proxyUrlParams.toString()}`
const url = await tryUploadEndpoint(proxyUrl, formData, true)
return { url, type: mediaType }
}
} catch (e) {
const error = e instanceof Error ? e : new Error(String(e))
const errorMessage = error.message

View File

@ -479,7 +479,7 @@ class NostrService {
}
} catch (error) {
console.warn(`[NostrService] Error checking unpublished in ${objectType}:`, error)
continue
// Continue to next object type on error
}
}
@ -499,7 +499,7 @@ class NostrService {
}
} catch (error) {
console.warn(`[NostrService] Error searching for event in ${objectType}:`, error)
continue
// Continue to next object type on error
}
}
}

View File

@ -68,7 +68,11 @@ class ObjectCacheService {
})
this.dbHelpers.set(objectType, helper)
}
return this.dbHelpers.get(objectType)!
const helper = this.dbHelpers.get(objectType)
if (!helper) {
throw new Error(`Database helper not found for ${objectType}`)
}
return helper
}
/**

View File

@ -97,10 +97,7 @@ class PlatformSyncService {
// Synchronize from all active relays
for (let i = 0; i < activeRelays.length; i++) {
const relayUrl = activeRelays[i]
if (!relayUrl) {
continue
}
if (relayUrl) {
// Update progress with current relay
syncProgressManager.setProgress({
currentStep: 0,
@ -183,12 +180,10 @@ class PlatformSyncService {
if (event.id === '527d83e0af20bf23c3e104974090ccc21536ece72c24eb784b3642890f63b763') {
console.warn(`[PlatformSync] Target event accepted and added to relayEvents`)
}
} else {
} else if (event.id === '527d83e0af20bf23c3e104974090ccc21536ece72c24eb784b3642890f63b763') {
// Log events that match filter but don't have service tag
if (event.id === '527d83e0af20bf23c3e104974090ccc21536ece72c24eb784b3642890f63b763') {
console.warn(`[PlatformSync] Event ${event.id} rejected: service tag is "${tags.service}", expected "${PLATFORM_SERVICE}"`)
}
}
})
sub.on('eose', (): void => {
@ -229,6 +224,7 @@ class PlatformSyncService {
})
}
}
}
// Process all collected events
await this.processAndCacheEvents(allEvents)

View File

@ -118,17 +118,13 @@ class PublishWorkerService {
const key = `${objectType}:${id}`
const existing = this.unpublishedObjects.get(key)
// Skip if recently retried
if (existing && Date.now() - existing.lastRetryAt < RETRY_DELAY_MS) {
continue
}
// Skip if recently retried or max retries reached
const recentlyRetried = existing && Date.now() - existing.lastRetryAt < RETRY_DELAY_MS
const maxRetriesReached = existing && existing.retryCount >= MAX_RETRIES_PER_OBJECT
// Skip if max retries reached
if (existing && existing.retryCount >= MAX_RETRIES_PER_OBJECT) {
if (maxRetriesReached) {
console.warn(`[PublishWorker] Max retries reached for ${objectType}:${id}, skipping`)
continue
}
} else if (!recentlyRetried) {
// Add or update in map
this.unpublishedObjects.set(key, {
objectType,
@ -139,6 +135,7 @@ class PublishWorkerService {
})
}
}
}
// Process all unpublished objects
const objectsToProcess = Array.from(this.unpublishedObjects.values())

View File

@ -83,8 +83,8 @@ export async function tryWithRelayRotation<T>(
attempts++
// If we've tried all relays once, loop back
if (attempts < maxAttempts) {
continue
if (attempts >= maxAttempts) {
break
}
}
}

View File

@ -115,7 +115,9 @@ class ServiceWorkerClient {
resolve(event.data)
}
this.registration!.active!.postMessage(message, [messageChannel.port2])
if (this.registration?.active) {
this.registration.active.postMessage(message, [messageChannel.port2])
}
})
}
@ -142,15 +144,18 @@ class ServiceWorkerClient {
if (!this.messageHandlers.has(type)) {
this.messageHandlers.set(type, [])
}
this.messageHandlers.get(type)!.push(handler)
const handlers = this.messageHandlers.get(type)
if (handlers) {
handlers.push(handler)
}
// Return unsubscribe function
return () => {
const handlers = this.messageHandlers.get(type)
if (handlers) {
const index = handlers.indexOf(handler)
const typeHandlers = this.messageHandlers.get(type)
if (typeHandlers) {
const index = typeHandlers.indexOf(handler)
if (index > -1) {
handlers.splice(index, 1)
typeHandlers.splice(index, 1)
}
}
}

View File

@ -29,15 +29,14 @@ export function getLatestVersion(events: Event[]): Event | null {
for (const event of events) {
const tags = extractTagsFromEvent(event)
if (!tags.id) {
continue
}
if (tags.id) {
if (!byId.has(tags.id)) {
byId.set(tags.id, [])
}
byId.get(tags.id)!.push({
const idArray = byId.get(tags.id)
if (idArray) {
idArray.push({
event,
version: tags.version,
hidden: tags.hidden,
@ -45,6 +44,8 @@ export function getLatestVersion(events: Event[]): Event | null {
id: tags.id,
})
}
}
}
// For each ID, find the latest non-hidden version
const latestVersions: VersionedObject[] = []
@ -53,11 +54,7 @@ export function getLatestVersion(events: Event[]): Event | null {
// Filter out hidden objects
const visible = objects.filter((obj) => !obj.hidden)
if (visible.length === 0) {
// All versions are hidden, skip this object
continue
}
if (visible.length > 0) {
// Sort by version (descending) and take the first (latest)
visible.sort((a, b) => b.version - a.version)
const latest = visible[0]
@ -65,6 +62,7 @@ export function getLatestVersion(events: Event[]): Event | null {
latestVersions.push(latest)
}
}
}
// If we have multiple IDs, we need to return the one with the highest version
// But typically we expect one ID per query, so return the first
@ -86,10 +84,7 @@ export function getAllVersions(events: Event[]): VersionedObject[] {
for (const event of events) {
const tags = extractTagsFromEvent(event)
if (!tags.id) {
continue
}
if (tags.id) {
versions.push({
event,
version: tags.version,
@ -98,6 +93,7 @@ export function getAllVersions(events: Event[]): VersionedObject[] {
id: tags.id,
})
}
}
// Sort by version (descending)
versions.sort((a, b) => b.version - a.version)

View File

@ -49,7 +49,7 @@ class WriteOrchestrator {
// 1. Publish to network via WebSocket service (en parallèle)
websocketService.publishEvent(event, relays).then((statuses) => {
return statuses
.map((status, index) => (status.success ? relays[index] : null))
.map((status, statusIndex) => (status.success ? relays[statusIndex] : null))
.filter((relay): relay is string => relay !== null)
}),
// 2. Write to IndexedDB via Web Worker (en parallèle, avec published: false initialement)
@ -106,14 +106,14 @@ class WriteOrchestrator {
}
const secretKey = hexToBytes(this.privateKey)
const event = finalizeEvent(unsignedEvent, secretKey)
const finalizedEvent = finalizeEvent(unsignedEvent, secretKey)
// Write and publish
const result = await this.writeAndPublish(
{
objectType,
hash,
event,
event: finalizedEvent,
parsed,
version,
hidden,

View File

@ -124,8 +124,9 @@ class WriteService {
}
}
this.writeWorker!.addEventListener('message', handler)
this.writeWorker!.postMessage({
if (this.writeWorker) {
this.writeWorker.addEventListener('message', handler)
this.writeWorker.postMessage({
type: 'WRITE_OBJECT',
data: {
objectType,
@ -138,6 +139,7 @@ class WriteService {
published,
},
})
}
})
}
// Fallback: direct write
@ -181,11 +183,13 @@ class WriteService {
}
}
this.writeWorker!.addEventListener('message', handler)
this.writeWorker!.postMessage({
if (this.writeWorker) {
this.writeWorker.addEventListener('message', handler)
this.writeWorker.postMessage({
type: 'UPDATE_PUBLISHED',
data: { objectType, id, published },
})
}
})
}
// Fallback: direct write
@ -231,11 +235,13 @@ class WriteService {
}
}
this.writeWorker!.addEventListener('message', handler)
this.writeWorker!.postMessage({
if (this.writeWorker) {
this.writeWorker.addEventListener('message', handler)
this.writeWorker.postMessage({
type: 'CREATE_NOTIFICATION',
data: { type, objectType, objectId, eventId, notificationData: data },
})
}
})
}
// Fallback: direct write
@ -284,8 +290,8 @@ class WriteService {
const { publishLog } = await import('./publishLog')
await publishLog.logPublicationDirect(eventId, relayUrl, success, error, objectType, objectId)
}
} catch (error) {
console.error('[WriteService] Error logging publication:', error)
} catch (logError) {
console.error('[WriteService] Error logging publication:', logError)
// Don't throw for logs
}
}

View File

@ -89,7 +89,7 @@ function collectZap(
sub.on('eose', () => done())
setTimeout(() => done(), timeout).unref?.()
if (typeof (sub as unknown as { on?: unknown }).on === 'function') {
const subWithError = sub as unknown as { on: (event: string, handler: (error: Error) => void) => void }
const subWithError = sub as unknown as { on: (event: string, handler: (err: Error) => void) => void }
subWithError.on('error', onError)
}
})