lint fix wip

This commit is contained in:
Nicolas Cantu 2026-01-08 21:49:57 +01:00
parent 24d30eb5d0
commit 5694dbdb8a
19 changed files with 195 additions and 169 deletions

View File

@ -51,8 +51,8 @@ export function useUserArticles(
if (sortedArticles.length === 0) { if (sortedArticles.length === 0) {
setError('Aucun contenu trouvé') setError('Aucun contenu trouvé')
} }
} catch (error) { } catch (loadError) {
console.error('Error loading user articles from cache:', error) console.error('Error loading user articles from cache:', loadError)
setError('Erreur lors du chargement des articles') setError('Erreur lors du chargement des articles')
} finally { } finally {
setLoading(false) setLoading(false)

View File

@ -284,8 +284,8 @@ export class ArticlePublisher {
async getAuthorPresentation(pubkey: string): Promise<import('@/types/nostr').AuthorPresentationArticle | null> { async getAuthorPresentation(pubkey: string): Promise<import('@/types/nostr').AuthorPresentationArticle | null> {
try { try {
// Read only from IndexedDB cache // Read only from IndexedDB cache
const { objectCache } = await import('./objectCache') const { objectCache: objectCacheService } = await import('./objectCache')
const cached = await objectCache.getAuthorByPubkey(pubkey) const cached = await objectCacheService.getAuthorByPubkey(pubkey)
if (cached) { if (cached) {
const presentation = cached const presentation = cached
// Calculate totalSponsoring from cache // Calculate totalSponsoring from cache

View File

@ -192,13 +192,27 @@ export async function parsePresentationEvent(event: Event): Promise<import('@/ty
} }
} else { } else {
// Generate hash from author data // Generate hash from author data
let mainnetAddress: string | undefined
if (profileData?.mainnetAddress) {
mainnetAddress = profileData.mainnetAddress
} else if (typeof tags.mainnetAddress === 'string') {
mainnetAddress = tags.mainnetAddress
}
let pictureUrl: string | undefined
if (profileData?.pictureUrl) {
pictureUrl = profileData.pictureUrl
} else if (typeof tags.pictureUrl === 'string') {
pictureUrl = tags.pictureUrl
}
hash = await generateAuthorHashId({ hash = await generateAuthorHashId({
pubkey: event.pubkey, pubkey: event.pubkey,
authorName: profileData?.authorName ?? '', authorName: profileData?.authorName ?? '',
presentation: profileData?.presentation ?? '', presentation: profileData?.presentation ?? '',
contentDescription: profileData?.contentDescription ?? '', contentDescription: profileData?.contentDescription ?? '',
mainnetAddress: profileData?.mainnetAddress ?? (typeof tags.mainnetAddress === 'string' ? tags.mainnetAddress : undefined), mainnetAddress,
pictureUrl: profileData?.pictureUrl ?? (typeof tags.pictureUrl === 'string' ? tags.pictureUrl : undefined), pictureUrl,
category: profileData?.category ?? tags.category ?? 'sciencefiction', category: profileData?.category ?? tags.category ?? 'sciencefiction',
}) })
} }

View File

@ -30,7 +30,10 @@ export function detectDuplicates(objects: ExtractedObject[]): DuplicateWarning[]
if (!byType.has(obj.type)) { if (!byType.has(obj.type)) {
byType.set(obj.type, []) byType.set(obj.type, [])
} }
byType.get(obj.type)!.push(obj) const typeArray = byType.get(obj.type)
if (typeArray) {
typeArray.push(obj)
}
} }
// For each type, group by ID // For each type, group by ID
@ -40,7 +43,10 @@ export function detectDuplicates(objects: ExtractedObject[]): DuplicateWarning[]
if (!byId.has(obj.id)) { if (!byId.has(obj.id)) {
byId.set(obj.id, []) byId.set(obj.id, [])
} }
byId.get(obj.id)!.push(obj) const idArray = byId.get(obj.id)
if (idArray) {
idArray.push(obj)
}
} }
// Check for duplicates (same ID, multiple objects) // Check for duplicates (same ID, multiple objects)
@ -93,7 +99,10 @@ export function resolveDuplicatesByDate(objects: ExtractedObject[]): ExtractedOb
if (!byKey.has(key)) { if (!byKey.has(key)) {
byKey.set(key, []) byKey.set(key, [])
} }
byKey.get(key)!.push(obj) const keyArray = byKey.get(key)
if (keyArray) {
keyArray.push(obj)
}
} }
const resolved: ExtractedObject[] = [] const resolved: ExtractedObject[] = []

View File

@ -12,7 +12,7 @@ import { writeObjectToCache } from './writeObjectHelper'
export interface EventCacheConfig { export interface EventCacheConfig {
objectType: ObjectType objectType: ObjectType
extractor: (event: Event) => Promise<unknown | null> extractor: (event: Event) => Promise<unknown>
getHash?: (extracted: unknown) => string | null getHash?: (extracted: unknown) => string | null
getIndex?: (extracted: unknown) => number getIndex?: (extracted: unknown) => number
getVersion?: (event: Event) => number getVersion?: (event: Event) => number
@ -45,31 +45,25 @@ export async function groupAndCacheEventsByHash(
if (!eventsByHashId.has(hash)) { if (!eventsByHashId.has(hash)) {
eventsByHashId.set(hash, []) eventsByHashId.set(hash, [])
} }
eventsByHashId.get(hash)!.push(event) const hashEvents = eventsByHashId.get(hash)
if (hashEvents) {
hashEvents.push(event)
}
} }
} }
// Cache each object (latest version) // Cache each object (latest version)
for (const [_hash, hashEvents] of eventsByHashId.entries()) { for (const [_hash, hashEvents] of eventsByHashId.entries()) {
const latestEvent = getLatestVersion(hashEvents) const latestEvent = getLatestVersion(hashEvents)
if (!latestEvent) { if (latestEvent) {
continue
}
const extracted = await extractor(latestEvent) const extracted = await extractor(latestEvent)
if (!extracted) { if (extracted) {
continue
}
// Get hash, index, version, hidden // Get hash, index, version, hidden
const extractedHash = getHash ? getHash(extracted) : null const extractedHash = getHash ? getHash(extracted) : null
const extractedObj = extracted as ExtractedObjectWithId const extractedObj = extracted as ExtractedObjectWithId
const extractedId = extractedHash ?? extractedObj.id const extractedId = extractedHash ?? extractedObj.id
if (!extractedId) { if (extractedId) {
continue
}
const publicationParsed = parseObjectId(extractedId) const publicationParsed = parseObjectId(extractedId)
const hash = publicationParsed.hash ?? extractedId const hash = publicationParsed.hash ?? extractedId
const index = getIndex ? getIndex(extracted) : publicationParsed.index ?? extractedObj.index ?? 0 const index = getIndex ? getIndex(extracted) : publicationParsed.index ?? extractedObj.index ?? 0
@ -87,6 +81,9 @@ export async function groupAndCacheEventsByHash(
}) })
} }
} }
}
}
}
/** /**
* Cache a single event as an object * Cache a single event as an object
@ -95,7 +92,7 @@ export async function groupAndCacheEventsByHash(
export async function cacheEventAsObject( export async function cacheEventAsObject(
event: Event, event: Event,
objectType: ObjectType, objectType: ObjectType,
extractor: (event: Event) => Promise<unknown | null> extractor: (event: Event) => Promise<unknown>
): Promise<boolean> { ): Promise<boolean> {
const extracted = await extractor(event) const extracted = await extractor(event)
if (!extracted) { if (!extracted) {

View File

@ -21,7 +21,7 @@ export interface IndexedDBConfig {
export class IndexedDBError extends Error { export class IndexedDBError extends Error {
public readonly operation: string public readonly operation: string
public readonly storeName: string | undefined public readonly storeName: string | undefined
public override readonly cause: unknown | undefined public override readonly cause: unknown
public override readonly name = 'IndexedDBError' public override readonly name = 'IndexedDBError'

View File

@ -433,7 +433,7 @@ export async function deleteAccountTwoLevel(): Promise<void> {
await indexedDBStorage.delete('nostr_account_exists') await indexedDBStorage.delete('nostr_account_exists')
// Try to remove credential (may not be possible via API) // Try to remove credential (may not be possible via API)
if (navigator.credentials && navigator.credentials.preventSilentAccess) { if (navigator.credentials?.preventSilentAccess) {
navigator.credentials.preventSilentAccess() void navigator.credentials.preventSilentAccess()
} }
} }

View File

@ -65,10 +65,7 @@ function expandDictionary(): MnemonicIcon[] {
const variantIndex = Math.floor(i / base) % variants.length const variantIndex = Math.floor(i / base) % variants.length
const baseIcon = ALL_ICONS[baseIndex] const baseIcon = ALL_ICONS[baseIndex]
if (!baseIcon) { if (baseIcon) {
continue
}
if (variantIndex === 0) { if (variantIndex === 0) {
expanded.push(baseIcon) expanded.push(baseIcon)
} else { } else {
@ -78,6 +75,7 @@ function expandDictionary(): MnemonicIcon[] {
}) })
} }
} }
}
return expanded return expanded
} }
@ -100,6 +98,7 @@ export function generateMnemonicIcons(pubkey: string): string[] {
for (let i = 0; i < 4; i++) { for (let i = 0; i < 4; i++) {
const segment = pubkey.slice(i * 8, (i + 1) * 8) || pubkey.slice(-8) const segment = pubkey.slice(i * 8, (i + 1) * 8) || pubkey.slice(-8)
// Note: slice returns empty string, not null/undefined, so || is appropriate here
const segmentHash = hashString(segment) const segmentHash = hashString(segment)
const combinedHash = (baseHash + segmentHash + i * 1000) % DICTIONARY.length const combinedHash = (baseHash + segmentHash + i * 1000) % DICTIONARY.length
const icon = DICTIONARY[combinedHash] const icon = DICTIONARY[combinedHash]

View File

@ -87,7 +87,7 @@ async function tryUploadEndpoint(endpoint: string, formData: FormData, useProxy:
let errorMessage = 'Upload failed' let errorMessage = 'Upload failed'
try { try {
const text = await response.text() const text = await response.text()
errorMessage = text || `HTTP ${response.status} ${response.statusText}` errorMessage = text ?? `HTTP ${response.status} ${response.statusText}`
} catch { } catch {
errorMessage = `HTTP ${response.status} ${response.statusText}` errorMessage = `HTTP ${response.status} ${response.statusText}`
} }
@ -137,7 +137,7 @@ export async function uploadNip95Media(file: File): Promise<MediaRef> {
const isUnlocked = nostrAuthService.isUnlocked() const isUnlocked = nostrAuthService.isUnlocked()
if (!pubkey) { if (!pubkey) {
console.warn('NIP-98 authentication required for nostrcheck.me but no account found. Please create or import an account.') console.warn('NIP-98 authentication required for nostrcheck.me but no account found. Please create or import an account.')
continue // Skip this endpoint
} else if (!isUnlocked) { } else if (!isUnlocked) {
// Throw a special error that can be caught to trigger unlock modal // Throw a special error that can be caught to trigger unlock modal
// This error should propagate to the caller, not be caught here // This error should propagate to the caller, not be caught here
@ -146,20 +146,22 @@ export async function uploadNip95Media(file: File): Promise<MediaRef> {
throw unlockError throw unlockError
} else { } else {
console.warn('NIP-98 authentication required for nostrcheck.me but not available. Skipping endpoint.') console.warn('NIP-98 authentication required for nostrcheck.me but not available. Skipping endpoint.')
continue // Skip this endpoint
}
} }
} else {
try { try {
// Generate NIP-98 token for the actual endpoint (not the proxy) // Generate NIP-98 token for the actual endpoint (not the proxy)
// The token must be for the final destination URL // The token must be for the final destination URL
authToken = await generateNip98Token('POST', endpoint) authToken = await generateNip98Token('POST', endpoint)
} catch (authError) { } catch (authError) {
console.error('Failed to generate NIP-98 token:', authError) console.error('Failed to generate NIP-98 token:', authError)
// Continue to next endpoint if auth fails // Skip this endpoint if auth fails
continue }
} }
} }
// Only proceed if we have auth token when needed, or if auth is not needed
if (!needsAuth || authToken) {
// Always use proxy to avoid CORS, 405, and name resolution issues // Always use proxy to avoid CORS, 405, and name resolution issues
// Pass endpoint and auth token as query parameters to proxy // Pass endpoint and auth token as query parameters to proxy
const proxyUrlParams = new URLSearchParams({ const proxyUrlParams = new URLSearchParams({
@ -171,6 +173,7 @@ export async function uploadNip95Media(file: File): Promise<MediaRef> {
const proxyUrl = `/api/nip95-upload?${proxyUrlParams.toString()}` const proxyUrl = `/api/nip95-upload?${proxyUrlParams.toString()}`
const url = await tryUploadEndpoint(proxyUrl, formData, true) const url = await tryUploadEndpoint(proxyUrl, formData, true)
return { url, type: mediaType } return { url, type: mediaType }
}
} catch (e) { } catch (e) {
const error = e instanceof Error ? e : new Error(String(e)) const error = e instanceof Error ? e : new Error(String(e))
const errorMessage = error.message const errorMessage = error.message

View File

@ -479,7 +479,7 @@ class NostrService {
} }
} catch (error) { } catch (error) {
console.warn(`[NostrService] Error checking unpublished in ${objectType}:`, error) console.warn(`[NostrService] Error checking unpublished in ${objectType}:`, error)
continue // Continue to next object type on error
} }
} }
@ -499,7 +499,7 @@ class NostrService {
} }
} catch (error) { } catch (error) {
console.warn(`[NostrService] Error searching for event in ${objectType}:`, error) console.warn(`[NostrService] Error searching for event in ${objectType}:`, error)
continue // Continue to next object type on error
} }
} }
} }

View File

@ -68,7 +68,11 @@ class ObjectCacheService {
}) })
this.dbHelpers.set(objectType, helper) this.dbHelpers.set(objectType, helper)
} }
return this.dbHelpers.get(objectType)! const helper = this.dbHelpers.get(objectType)
if (!helper) {
throw new Error(`Database helper not found for ${objectType}`)
}
return helper
} }
/** /**

View File

@ -97,10 +97,7 @@ class PlatformSyncService {
// Synchronize from all active relays // Synchronize from all active relays
for (let i = 0; i < activeRelays.length; i++) { for (let i = 0; i < activeRelays.length; i++) {
const relayUrl = activeRelays[i] const relayUrl = activeRelays[i]
if (!relayUrl) { if (relayUrl) {
continue
}
// Update progress with current relay // Update progress with current relay
syncProgressManager.setProgress({ syncProgressManager.setProgress({
currentStep: 0, currentStep: 0,
@ -183,12 +180,10 @@ class PlatformSyncService {
if (event.id === '527d83e0af20bf23c3e104974090ccc21536ece72c24eb784b3642890f63b763') { if (event.id === '527d83e0af20bf23c3e104974090ccc21536ece72c24eb784b3642890f63b763') {
console.warn(`[PlatformSync] Target event accepted and added to relayEvents`) console.warn(`[PlatformSync] Target event accepted and added to relayEvents`)
} }
} else { } else if (event.id === '527d83e0af20bf23c3e104974090ccc21536ece72c24eb784b3642890f63b763') {
// Log events that match filter but don't have service tag // Log events that match filter but don't have service tag
if (event.id === '527d83e0af20bf23c3e104974090ccc21536ece72c24eb784b3642890f63b763') {
console.warn(`[PlatformSync] Event ${event.id} rejected: service tag is "${tags.service}", expected "${PLATFORM_SERVICE}"`) console.warn(`[PlatformSync] Event ${event.id} rejected: service tag is "${tags.service}", expected "${PLATFORM_SERVICE}"`)
} }
}
}) })
sub.on('eose', (): void => { sub.on('eose', (): void => {
@ -229,6 +224,7 @@ class PlatformSyncService {
}) })
} }
} }
}
// Process all collected events // Process all collected events
await this.processAndCacheEvents(allEvents) await this.processAndCacheEvents(allEvents)

View File

@ -118,17 +118,13 @@ class PublishWorkerService {
const key = `${objectType}:${id}` const key = `${objectType}:${id}`
const existing = this.unpublishedObjects.get(key) const existing = this.unpublishedObjects.get(key)
// Skip if recently retried // Skip if recently retried or max retries reached
if (existing && Date.now() - existing.lastRetryAt < RETRY_DELAY_MS) { const recentlyRetried = existing && Date.now() - existing.lastRetryAt < RETRY_DELAY_MS
continue const maxRetriesReached = existing && existing.retryCount >= MAX_RETRIES_PER_OBJECT
}
// Skip if max retries reached if (maxRetriesReached) {
if (existing && existing.retryCount >= MAX_RETRIES_PER_OBJECT) {
console.warn(`[PublishWorker] Max retries reached for ${objectType}:${id}, skipping`) console.warn(`[PublishWorker] Max retries reached for ${objectType}:${id}, skipping`)
continue } else if (!recentlyRetried) {
}
// Add or update in map // Add or update in map
this.unpublishedObjects.set(key, { this.unpublishedObjects.set(key, {
objectType, objectType,
@ -139,6 +135,7 @@ class PublishWorkerService {
}) })
} }
} }
}
// Process all unpublished objects // Process all unpublished objects
const objectsToProcess = Array.from(this.unpublishedObjects.values()) const objectsToProcess = Array.from(this.unpublishedObjects.values())

View File

@ -83,8 +83,8 @@ export async function tryWithRelayRotation<T>(
attempts++ attempts++
// If we've tried all relays once, loop back // If we've tried all relays once, loop back
if (attempts < maxAttempts) { if (attempts >= maxAttempts) {
continue break
} }
} }
} }

View File

@ -115,7 +115,9 @@ class ServiceWorkerClient {
resolve(event.data) resolve(event.data)
} }
this.registration!.active!.postMessage(message, [messageChannel.port2]) if (this.registration?.active) {
this.registration.active.postMessage(message, [messageChannel.port2])
}
}) })
} }
@ -142,15 +144,18 @@ class ServiceWorkerClient {
if (!this.messageHandlers.has(type)) { if (!this.messageHandlers.has(type)) {
this.messageHandlers.set(type, []) this.messageHandlers.set(type, [])
} }
this.messageHandlers.get(type)!.push(handler) const handlers = this.messageHandlers.get(type)
if (handlers) {
handlers.push(handler)
}
// Return unsubscribe function // Return unsubscribe function
return () => { return () => {
const handlers = this.messageHandlers.get(type) const typeHandlers = this.messageHandlers.get(type)
if (handlers) { if (typeHandlers) {
const index = handlers.indexOf(handler) const index = typeHandlers.indexOf(handler)
if (index > -1) { if (index > -1) {
handlers.splice(index, 1) typeHandlers.splice(index, 1)
} }
} }
} }

View File

@ -29,15 +29,14 @@ export function getLatestVersion(events: Event[]): Event | null {
for (const event of events) { for (const event of events) {
const tags = extractTagsFromEvent(event) const tags = extractTagsFromEvent(event)
if (!tags.id) { if (tags.id) {
continue
}
if (!byId.has(tags.id)) { if (!byId.has(tags.id)) {
byId.set(tags.id, []) byId.set(tags.id, [])
} }
byId.get(tags.id)!.push({ const idArray = byId.get(tags.id)
if (idArray) {
idArray.push({
event, event,
version: tags.version, version: tags.version,
hidden: tags.hidden, hidden: tags.hidden,
@ -45,6 +44,8 @@ export function getLatestVersion(events: Event[]): Event | null {
id: tags.id, id: tags.id,
}) })
} }
}
}
// For each ID, find the latest non-hidden version // For each ID, find the latest non-hidden version
const latestVersions: VersionedObject[] = [] const latestVersions: VersionedObject[] = []
@ -53,11 +54,7 @@ export function getLatestVersion(events: Event[]): Event | null {
// Filter out hidden objects // Filter out hidden objects
const visible = objects.filter((obj) => !obj.hidden) const visible = objects.filter((obj) => !obj.hidden)
if (visible.length === 0) { if (visible.length > 0) {
// All versions are hidden, skip this object
continue
}
// Sort by version (descending) and take the first (latest) // Sort by version (descending) and take the first (latest)
visible.sort((a, b) => b.version - a.version) visible.sort((a, b) => b.version - a.version)
const latest = visible[0] const latest = visible[0]
@ -65,6 +62,7 @@ export function getLatestVersion(events: Event[]): Event | null {
latestVersions.push(latest) latestVersions.push(latest)
} }
} }
}
// If we have multiple IDs, we need to return the one with the highest version // If we have multiple IDs, we need to return the one with the highest version
// But typically we expect one ID per query, so return the first // But typically we expect one ID per query, so return the first
@ -86,10 +84,7 @@ export function getAllVersions(events: Event[]): VersionedObject[] {
for (const event of events) { for (const event of events) {
const tags = extractTagsFromEvent(event) const tags = extractTagsFromEvent(event)
if (!tags.id) { if (tags.id) {
continue
}
versions.push({ versions.push({
event, event,
version: tags.version, version: tags.version,
@ -98,6 +93,7 @@ export function getAllVersions(events: Event[]): VersionedObject[] {
id: tags.id, id: tags.id,
}) })
} }
}
// Sort by version (descending) // Sort by version (descending)
versions.sort((a, b) => b.version - a.version) versions.sort((a, b) => b.version - a.version)

View File

@ -49,7 +49,7 @@ class WriteOrchestrator {
// 1. Publish to network via WebSocket service (en parallèle) // 1. Publish to network via WebSocket service (en parallèle)
websocketService.publishEvent(event, relays).then((statuses) => { websocketService.publishEvent(event, relays).then((statuses) => {
return statuses return statuses
.map((status, index) => (status.success ? relays[index] : null)) .map((status, statusIndex) => (status.success ? relays[statusIndex] : null))
.filter((relay): relay is string => relay !== null) .filter((relay): relay is string => relay !== null)
}), }),
// 2. Write to IndexedDB via Web Worker (en parallèle, avec published: false initialement) // 2. Write to IndexedDB via Web Worker (en parallèle, avec published: false initialement)
@ -106,14 +106,14 @@ class WriteOrchestrator {
} }
const secretKey = hexToBytes(this.privateKey) const secretKey = hexToBytes(this.privateKey)
const event = finalizeEvent(unsignedEvent, secretKey) const finalizedEvent = finalizeEvent(unsignedEvent, secretKey)
// Write and publish // Write and publish
const result = await this.writeAndPublish( const result = await this.writeAndPublish(
{ {
objectType, objectType,
hash, hash,
event, event: finalizedEvent,
parsed, parsed,
version, version,
hidden, hidden,

View File

@ -124,8 +124,9 @@ class WriteService {
} }
} }
this.writeWorker!.addEventListener('message', handler) if (this.writeWorker) {
this.writeWorker!.postMessage({ this.writeWorker.addEventListener('message', handler)
this.writeWorker.postMessage({
type: 'WRITE_OBJECT', type: 'WRITE_OBJECT',
data: { data: {
objectType, objectType,
@ -138,6 +139,7 @@ class WriteService {
published, published,
}, },
}) })
}
}) })
} }
// Fallback: direct write // Fallback: direct write
@ -181,11 +183,13 @@ class WriteService {
} }
} }
this.writeWorker!.addEventListener('message', handler) if (this.writeWorker) {
this.writeWorker!.postMessage({ this.writeWorker.addEventListener('message', handler)
this.writeWorker.postMessage({
type: 'UPDATE_PUBLISHED', type: 'UPDATE_PUBLISHED',
data: { objectType, id, published }, data: { objectType, id, published },
}) })
}
}) })
} }
// Fallback: direct write // Fallback: direct write
@ -231,11 +235,13 @@ class WriteService {
} }
} }
this.writeWorker!.addEventListener('message', handler) if (this.writeWorker) {
this.writeWorker!.postMessage({ this.writeWorker.addEventListener('message', handler)
this.writeWorker.postMessage({
type: 'CREATE_NOTIFICATION', type: 'CREATE_NOTIFICATION',
data: { type, objectType, objectId, eventId, notificationData: data }, data: { type, objectType, objectId, eventId, notificationData: data },
}) })
}
}) })
} }
// Fallback: direct write // Fallback: direct write
@ -284,8 +290,8 @@ class WriteService {
const { publishLog } = await import('./publishLog') const { publishLog } = await import('./publishLog')
await publishLog.logPublicationDirect(eventId, relayUrl, success, error, objectType, objectId) await publishLog.logPublicationDirect(eventId, relayUrl, success, error, objectType, objectId)
} }
} catch (error) { } catch (logError) {
console.error('[WriteService] Error logging publication:', error) console.error('[WriteService] Error logging publication:', logError)
// Don't throw for logs // Don't throw for logs
} }
} }

View File

@ -89,7 +89,7 @@ function collectZap(
sub.on('eose', () => done()) sub.on('eose', () => done())
setTimeout(() => done(), timeout).unref?.() setTimeout(() => done(), timeout).unref?.()
if (typeof (sub as unknown as { on?: unknown }).on === 'function') { if (typeof (sub as unknown as { on?: unknown }).on === 'function') {
const subWithError = sub as unknown as { on: (event: string, handler: (error: Error) => void) => void } const subWithError = sub as unknown as { on: (event: string, handler: (err: Error) => void) => void }
subWithError.on('error', onError) subWithError.on('error', onError)
} }
}) })