lint fix wip

This commit is contained in:
Nicolas Cantu 2026-01-06 18:47:12 +01:00
parent 07b9d9d7bb
commit 9e364d0313
5 changed files with 31 additions and 32 deletions

View File

@ -124,8 +124,8 @@ export async function parsePresentationEvent(event: Event): Promise<import('@/ty
if (tags.json) { if (tags.json) {
try { try {
profileData = JSON.parse(tags.json) profileData = JSON.parse(tags.json)
} catch (e) { } catch (jsonError) {
console.error('Error parsing JSON from tag:', e) console.error('Error parsing JSON from tag:', jsonError)
} }
} }
@ -138,8 +138,8 @@ export async function parsePresentationEvent(event: Event): Promise<import('@/ty
// Remove zero-width characters from JSON // Remove zero-width characters from JSON
const cleanedJson = invisibleJsonMatch[1].replace(/[\u200B\u200C\u200D\u200E\u200F]/g, '').trim() const cleanedJson = invisibleJsonMatch[1].replace(/[\u200B\u200C\u200D\u200E\u200F]/g, '').trim()
profileData = JSON.parse(cleanedJson) profileData = JSON.parse(cleanedJson)
} catch (e) { } catch (invisibleJsonError) {
console.error('Error parsing profile JSON from invisible content:', e) console.error('Error parsing profile JSON from invisible content:', invisibleJsonError)
} }
} }
@ -149,8 +149,8 @@ export async function parsePresentationEvent(event: Event): Promise<import('@/ty
if (jsonMatch?.[1]) { if (jsonMatch?.[1]) {
try { try {
profileData = JSON.parse(jsonMatch[1].trim()) profileData = JSON.parse(jsonMatch[1].trim())
} catch (e) { } catch (contentJsonError) {
console.error('Error parsing profile JSON from content:', e) console.error('Error parsing profile JSON from content:', contentJsonError)
} }
} }
} }

View File

@ -24,7 +24,6 @@ async function getOrCreateMasterKey(): Promise<string> {
if (typeof window === 'undefined') { if (typeof window === 'undefined') {
throw new Error('Storage encryption requires browser environment') throw new Error('Storage encryption requires browser environment')
} }
const { storageService } = await import('./storage/indexedDB')
const existing = await storageService.get<string>(MASTER_KEY_STORAGE_KEY, 'article_storage') const existing = await storageService.get<string>(MASTER_KEY_STORAGE_KEY, 'article_storage')
if (existing) { if (existing) {
return existing return existing

View File

@ -379,8 +379,8 @@ export async function unlockAccountTwoLevel(
recoveryPhrase.fill('') recoveryPhrase.fill('')
// Step 4: Get encrypted private key from IndexedDB // Step 4: Get encrypted private key from IndexedDB
const { storageService } = await import('./storage/indexedDB') const { storageService: indexedDBStorage } = await import('./storage/indexedDB')
const encryptedPrivateKey = await storageService.get<EncryptedPayload>('nostr_encrypted_key', 'nostr_key_storage') const encryptedPrivateKey = await indexedDBStorage.get<EncryptedPayload>('nostr_encrypted_key', 'nostr_key_storage')
if (!encryptedPrivateKey) { if (!encryptedPrivateKey) {
throw new Error('No encrypted private key found in IndexedDB') throw new Error('No encrypted private key found in IndexedDB')
} }
@ -409,8 +409,8 @@ export async function unlockAccountTwoLevel(
*/ */
export async function accountExistsTwoLevel(): Promise<boolean> { export async function accountExistsTwoLevel(): Promise<boolean> {
try { try {
const { storageService } = await import('./storage/indexedDB') const { storageService: indexedDBStorage } = await import('./storage/indexedDB')
const exists = await storageService.get<boolean>('nostr_account_exists', 'nostr_key_storage') const exists = await indexedDBStorage.get<boolean>('nostr_account_exists', 'nostr_key_storage')
return exists === true return exists === true
} catch { } catch {
return false return false
@ -422,8 +422,8 @@ export async function accountExistsTwoLevel(): Promise<boolean> {
*/ */
export async function getPublicKeysTwoLevel(): Promise<{ publicKey: string; npub: string } | null> { export async function getPublicKeysTwoLevel(): Promise<{ publicKey: string; npub: string } | null> {
try { try {
const { storageService } = await import('./storage/indexedDB') const { storageService: indexedDBStorage } = await import('./storage/indexedDB')
return storageService.get<{ publicKey: string; npub: string }>('nostr_public_key', 'nostr_key_storage') return indexedDBStorage.get<{ publicKey: string; npub: string }>('nostr_public_key', 'nostr_key_storage')
} catch { } catch {
return null return null
} }
@ -433,10 +433,10 @@ export async function getPublicKeysTwoLevel(): Promise<{ publicKey: string; npub
* Delete account (remove all stored data) * Delete account (remove all stored data)
*/ */
export async function deleteAccountTwoLevel(): Promise<void> { export async function deleteAccountTwoLevel(): Promise<void> {
const { storageService } = await import('./storage/indexedDB') const { storageService: indexedDBStorage } = await import('./storage/indexedDB')
await storageService.delete('nostr_encrypted_key') await indexedDBStorage.delete('nostr_encrypted_key')
await storageService.delete('nostr_public_key') await indexedDBStorage.delete('nostr_public_key')
await storageService.delete('nostr_account_exists') await indexedDBStorage.delete('nostr_account_exists')
// Try to remove credential (may not be possible via API) // Try to remove credential (may not be possible via API)
if (navigator.credentials && navigator.credentials.preventSilentAccess) { if (navigator.credentials && navigator.credentials.preventSilentAccess) {

View File

@ -168,13 +168,13 @@ export function getPurchasesByPayer(payerPubkey: string, timeoutMs: number = 500
} }
sub.on('event', async (event: Event): Promise<void> => { sub.on('event', async (event: Event): Promise<void> => {
const parsed = await parsePurchaseFromEvent(event) const purchaseParsed = await parsePurchaseFromEvent(event)
if (parsed) { if (purchaseParsed) {
// Cache the parsed purchase // Cache the parsed purchase
if (parsed.hash) { if (purchaseParsed.hash) {
await objectCache.set('purchase', parsed.hash, event, parsed, 0, false, parsed.index) await objectCache.set('purchase', purchaseParsed.hash, event, purchaseParsed, 0, false, purchaseParsed.index ?? 0)
} }
results.push(parsed) results.push(purchaseParsed)
} }
}) })

View File

@ -135,13 +135,13 @@ export function getReviewTipsForArticle(articleId: string, timeoutMs: number = 5
} }
sub.on('event', async (event: Event): Promise<void> => { sub.on('event', async (event: Event): Promise<void> => {
const parsed = await parseReviewTipFromEvent(event) const reviewTipParsed = await parseReviewTipFromEvent(event)
if (parsed?.articleId === articleId) { if (reviewTipParsed?.articleId === articleId) {
// Cache the parsed review tip // Cache the parsed review tip
if (parsed.hash) { if (reviewTipParsed.hash) {
await objectCache.set('review_tip', parsed.hash, event, parsed, 0, false, parsed.index) await objectCache.set('review_tip', reviewTipParsed.hash, event, reviewTipParsed, 0, false, reviewTipParsed.index ?? 0)
} }
results.push(parsed) results.push(reviewTipParsed)
} }
}) })
@ -177,13 +177,13 @@ export function getReviewTipsForReview(reviewId: string, timeoutMs: number = 500
} }
sub.on('event', async (event: Event): Promise<void> => { sub.on('event', async (event: Event): Promise<void> => {
const parsed = await parseReviewTipFromEvent(event) const reviewTipParsed = await parseReviewTipFromEvent(event)
if (parsed?.reviewId === reviewId) { if (reviewTipParsed?.reviewId === reviewId) {
// Cache the parsed review tip // Cache the parsed review tip
if (parsed.hash) { if (reviewTipParsed.hash) {
await objectCache.set('review_tip', parsed.hash, event, parsed, 0, false, parsed.index) await objectCache.set('review_tip', reviewTipParsed.hash, event, reviewTipParsed, 0, false, reviewTipParsed.index ?? 0)
} }
results.push(parsed) results.push(reviewTipParsed)
} }
}) })