lint fix wip

This commit is contained in:
Nicolas Cantu 2026-01-06 17:45:45 +01:00
parent 5b7b77aa9a
commit c1442886cf
19 changed files with 397 additions and 152 deletions

View File

@ -226,7 +226,7 @@ function PresentationForm({
>
{loading ?? deleting
? t('publish.publishing')
: hasExistingPresentation
: hasExistingPresentation === true
? t('presentation.update.button')
: t('publish.button')}
</button>

View File

@ -15,6 +15,8 @@ export function Nip95ConfigManager({ onConfigChange }: Nip95ConfigManagerProps):
const [editingId, setEditingId] = useState<string | null>(null)
const [newUrl, setNewUrl] = useState('')
const [showAddForm, setShowAddForm] = useState(false)
const [draggedId, setDraggedId] = useState<string | null>(null)
const [dragOverId, setDragOverId] = useState<string | null>(null)
useEffect(() => {
void loadApis()
@ -47,18 +49,95 @@ export function Nip95ConfigManager({ onConfigChange }: Nip95ConfigManagerProps):
}
}
async function handleUpdatePriority(id: string, priority: number): Promise<void> {
async function handleUpdatePriorities(newOrder: Nip95Config[]): Promise<void> {
try {
await configStorage.updateNip95Api(id, { priority })
// Update priorities based on new order (priority = index + 1, lower number = higher priority)
const updatePromises = newOrder.map((api, index) => {
const newPriority = index + 1
if (api.priority !== newPriority) {
return configStorage.updateNip95Api(api.id, { priority: newPriority })
}
return Promise.resolve()
})
await Promise.all(updatePromises)
await loadApis()
onConfigChange?.()
} catch (e) {
const errorMessage = e instanceof Error ? e.message : t('settings.nip95.error.priorityFailed')
setError(errorMessage)
console.error('Error updating priority:', e)
console.error('Error updating priorities:', e)
}
}
function handleDragStart(e: React.DragEvent<HTMLDivElement>, id: string): void {
setDraggedId(id)
e.dataTransfer.effectAllowed = 'move'
e.dataTransfer.setData('text/plain', id)
}
function handleDragOver(e: React.DragEvent<HTMLDivElement>, id: string): void {
e.preventDefault()
e.dataTransfer.dropEffect = 'move'
setDragOverId(id)
}
function handleDragLeave(): void {
setDragOverId(null)
}
function handleDrop(e: React.DragEvent<HTMLDivElement>, targetId: string): void {
e.preventDefault()
setDragOverId(null)
if (!draggedId || draggedId === targetId) {
setDraggedId(null)
return
}
const draggedIndex = apis.findIndex((api) => api.id === draggedId)
const targetIndex = apis.findIndex((api) => api.id === targetId)
if (draggedIndex === -1 || targetIndex === -1) {
setDraggedId(null)
return
}
// Reorder the array
const newApis = [...apis]
const removed = newApis[draggedIndex]
if (!removed) {
setDraggedId(null)
return
}
newApis.splice(draggedIndex, 1)
newApis.splice(targetIndex, 0, removed)
setApis(newApis)
setDraggedId(null)
// Update priorities based on new order
void handleUpdatePriorities(newApis)
}
function DragHandle(): React.ReactElement {
return (
<div className="flex flex-col gap-1 cursor-grab active:cursor-grabbing text-cyber-accent/50 hover:text-neon-cyan transition-colors">
<svg width="12" height="12" viewBox="0 0 12 12" fill="currentColor">
<circle cx="2" cy="2" r="1.5" />
<circle cx="6" cy="2" r="1.5" />
<circle cx="10" cy="2" r="1.5" />
<circle cx="2" cy="6" r="1.5" />
<circle cx="6" cy="6" r="1.5" />
<circle cx="10" cy="6" r="1.5" />
<circle cx="2" cy="10" r="1.5" />
<circle cx="6" cy="10" r="1.5" />
<circle cx="10" cy="10" r="1.5" />
</svg>
</div>
)
}
async function handleUpdateUrl(id: string, url: string): Promise<void> {
try {
await configStorage.updateNip95Api(id, { url })
@ -186,12 +265,39 @@ export function Nip95ConfigManager({ onConfigChange }: Nip95ConfigManagerProps):
{t('settings.nip95.empty')}
</div>
) : (
apis.map((api) => (
apis.map((api, index) => (
<div
key={api.id}
className="bg-cyber-dark border border-neon-cyan/30 rounded p-4 space-y-3"
onDragOver={(e) => {
handleDragOver(e, api.id)
}}
onDragLeave={handleDragLeave}
onDrop={(e) => {
handleDrop(e, api.id)
}}
className={`bg-cyber-dark border rounded p-4 space-y-3 transition-all ${
draggedId === api.id
? 'opacity-50 border-neon-cyan'
: dragOverId === api.id
? 'border-neon-green shadow-lg'
: 'border-neon-cyan/30'
}`}
>
<div className="flex items-start justify-between gap-4">
<div className="flex items-center gap-3 flex-1">
<div
className="drag-handle cursor-grab active:cursor-grabbing"
draggable
onDragStart={(e) => {
handleDragStart(e, api.id)
e.stopPropagation()
}}
onMouseDown={(e) => {
e.stopPropagation()
}}
>
<DragHandle />
</div>
<div className="flex-1">
{editingId === api.id ? (
<div className="space-y-2">
@ -226,6 +332,7 @@ export function Nip95ConfigManager({ onConfigChange }: Nip95ConfigManagerProps):
</div>
)}
</div>
</div>
<div className="flex items-center gap-2">
<label className="flex items-center gap-2 cursor-pointer">
<input
@ -247,23 +354,10 @@ export function Nip95ConfigManager({ onConfigChange }: Nip95ConfigManagerProps):
</button>
</div>
</div>
<div className="flex items-center gap-2">
<label className="flex items-center gap-2">
<span className="text-sm text-cyber-accent">{t('settings.nip95.list.priority')}:</span>
<input
type="number"
min="1"
value={api.priority}
onChange={(e) => {
const priority = parseInt(e.target.value, 10)
if (!isNaN(priority) && priority > 0) {
void handleUpdatePriority(api.id, priority)
}
}}
className="w-20 px-2 py-1 bg-cyber-darker border border-cyber-accent/30 rounded text-cyber-light focus:border-neon-cyan focus:outline-none"
/>
<span className="text-sm text-cyber-accent">| ID: {api.id}</span>
</label>
<div className="flex items-center gap-2 text-xs text-cyber-accent/70">
<span>
{t('settings.nip95.list.priorityLabel', { priority: index + 1, id: api.id })}
</span>
</div>
</div>
))

View File

@ -78,7 +78,7 @@ function ProfileHeaderSection({
<BackButton />
{loadingProfile ? (
<ProfileLoading />
) : profile ? (
) : profile !== null && profile !== undefined ? (
<UserProfile profile={profile} pubkey={currentPubkey} articleCount={articleCount} />
) : null}
</>

View File

@ -1,8 +1,9 @@
import { useState, useEffect } from 'react'
import { nostrAuthService } from '@/lib/nostrAuth'
import { syncUserContentToCache, type SyncProgress } from '@/lib/userContentSync'
import { getLastSyncDate, getCurrentTimestamp, calculateDaysBetween } from '@/lib/syncStorage'
import { getLastSyncDate, setLastSyncDate as setLastSyncDateStorage, getCurrentTimestamp, calculateDaysBetween } from '@/lib/syncStorage'
import { MIN_EVENT_DATE } from '@/lib/platformConfig'
import { objectCache } from '@/lib/objectCache'
import { t } from '@/lib/i18n'
export function SyncProgressBar(): React.ReactElement | null {
@ -10,6 +11,9 @@ export function SyncProgressBar(): React.ReactElement | null {
const [isSyncing, setIsSyncing] = useState(false)
const [lastSyncDate, setLastSyncDate] = useState<number | null>(null)
const [totalDays, setTotalDays] = useState<number>(0)
const [isInitialized, setIsInitialized] = useState(false)
const [connectionState, setConnectionState] = useState<{ connected: boolean; pubkey: string | null }>({ connected: false, pubkey: null })
const [error, setError] = useState<string | null>(null)
async function loadSyncStatus(): Promise<void> {
try {
@ -30,10 +34,64 @@ export function SyncProgressBar(): React.ReactElement | null {
}
useEffect(() => {
void loadSyncStatus()
// Check connection state
const checkConnection = (): void => {
const state = nostrAuthService.getState()
setConnectionState({ connected: state.connected ?? false, pubkey: state.pubkey ?? null })
setIsInitialized(true)
}
// Initial check
checkConnection()
// Listen to connection changes
const unsubscribe = nostrAuthService.subscribe((state) => {
setConnectionState({ connected: state.connected ?? false, pubkey: state.pubkey ?? null })
})
return () => {
unsubscribe()
}
}, [])
async function startSync(): Promise<void> {
useEffect(() => {
if (!isInitialized || !connectionState.connected || !connectionState.pubkey) {
return
}
void (async () => {
await loadSyncStatus()
// Auto-start sync if not recently synced
const storedLastSyncDate = await getLastSyncDate()
const currentTimestamp = getCurrentTimestamp()
const isRecentlySynced = storedLastSyncDate >= currentTimestamp - 3600
// Only auto-start if not recently synced
if (!isRecentlySynced && !isSyncing && connectionState.pubkey) {
setIsSyncing(true)
setSyncProgress({ currentStep: 0, totalSteps: 6, completed: false })
try {
await syncUserContentToCache(connectionState.pubkey, (progress) => {
setSyncProgress(progress)
if (progress.completed) {
setIsSyncing(false)
void loadSyncStatus()
}
})
// Check if sync completed successfully (if it didn't, isSyncing should still be false)
setIsSyncing(false)
} catch (error) {
console.error('Error during auto-sync:', error)
setIsSyncing(false)
setError(error instanceof Error ? error.message : 'Erreur de synchronisation')
}
}
})()
}, [isInitialized, connectionState.connected, connectionState.pubkey])
async function resynchronize(): Promise<void> {
try {
const state = nostrAuthService.getState()
if (!state.connected || !state.pubkey) {
@ -43,6 +101,25 @@ export function SyncProgressBar(): React.ReactElement | null {
setIsSyncing(true)
setSyncProgress({ currentStep: 0, totalSteps: 6, completed: false })
// Clear cache for user content (but keep other data)
await Promise.all([
objectCache.clear('author'),
objectCache.clear('series'),
objectCache.clear('publication'),
objectCache.clear('review'),
objectCache.clear('purchase'),
objectCache.clear('sponsoring'),
objectCache.clear('review_tip'),
])
// Reset last sync date to force full resync
await setLastSyncDateStorage(MIN_EVENT_DATE)
// Reload sync status
await loadSyncStatus()
// Start full resynchronization
if (state.pubkey !== null) {
await syncUserContentToCache(state.pubkey, (progress) => {
setSyncProgress(progress)
if (progress.completed) {
@ -50,15 +127,15 @@ export function SyncProgressBar(): React.ReactElement | null {
void loadSyncStatus()
}
})
}
} catch (error) {
console.error('Error starting sync:', error)
console.error('Error resynchronizing:', error)
setIsSyncing(false)
}
}
// Don't show if not connected
const state = nostrAuthService.getState()
if (!state.connected || !state.pubkey) {
// Don't show if not initialized or not connected
if (!isInitialized || !connectionState.connected || !connectionState.pubkey) {
return null
}
@ -87,6 +164,19 @@ export function SyncProgressBar(): React.ReactElement | null {
return (
<div className="bg-cyber-darker border border-neon-cyan/30 rounded-lg p-4 mt-6">
{error && (
<div className="mb-4 bg-red-900/30 border border-red-500/50 rounded p-3 text-red-300 text-sm">
{error}
<button
onClick={() => {
setError(null)
}}
className="ml-2 text-red-400 hover:text-red-200"
>
×
</button>
</div>
)}
<div className="flex items-center justify-between mb-2">
<h3 className="text-lg font-semibold text-neon-cyan">
{t('settings.sync.title')}
@ -94,11 +184,11 @@ export function SyncProgressBar(): React.ReactElement | null {
{!isSyncing && (
<button
onClick={() => {
void startSync()
void resynchronize()
}}
className="px-3 py-1 text-xs bg-neon-cyan/20 hover:bg-neon-cyan/30 text-neon-cyan rounded border border-neon-cyan/50 hover:border-neon-cyan transition-colors"
>
{t('settings.sync.start')}
{t('settings.sync.resync')}
</button>
)}
</div>

View File

@ -160,7 +160,7 @@ function useUnlockAccount(words: string[], setWords: (words: string[]) => void,
setWords(pastedWords.map((w) => w.toLowerCase()))
setError(null)
}
} catch (_e) {
} catch {
// Ignore clipboard errors
}
}

View File

@ -24,7 +24,7 @@ export function useArticles(searchQuery: string = '', filters: ArticleFilters |
setError(null)
// Load authors from cache first
const loadAuthorsFromCache = async (): Promise<void> => {
const loadAuthorsFromCache = async (): Promise<boolean> => {
try {
const cachedAuthors = await objectCache.getAll('author')
const authors = cachedAuthors as Article[]
@ -40,7 +40,6 @@ export function useArticles(searchQuery: string = '', filters: ArticleFilters |
return merged
})
setLoading(false)
}
// Calculate totalSponsoring asynchronously from cache (non-blocking)
// Only update authors that don't have totalSponsoring yet
@ -71,14 +70,29 @@ export function useArticles(searchQuery: string = '', filters: ArticleFilters |
})
)
}
return true
}
// Cache is empty - stop loading immediately, no network requests needed
setLoading(false)
hasArticlesRef.current = false
return false
} catch (error) {
console.error('Error loading authors from cache:', error)
setLoading(false)
return false
}
}
void loadAuthorsFromCache()
let unsubscribe: (() => void) | null = null
let timeout: NodeJS.Timeout | null = null
const unsubscribe = nostrService.subscribeToArticles(
void loadAuthorsFromCache().then((hasCachedAuthors) => {
// Only subscribe to network if cache is empty (to fetch new content)
// If cache has authors, we can skip network subscription for faster load
if (!hasCachedAuthors) {
unsubscribe = nostrService.subscribeToArticles(
(article) => {
setArticles((prev) => {
if (prev.some((a) => a.id === article.id)) {
@ -93,17 +107,24 @@ export function useArticles(searchQuery: string = '', filters: ArticleFilters |
50
)
const timeout = setTimeout(() => {
// Shorter timeout if cache is empty (5 seconds instead of 10)
timeout = setTimeout(() => {
setLoading(false)
if (!hasArticlesRef.current) {
setError(t('common.error.noContent'))
}
}, 10000)
}, 5000)
}
})
return () => {
if (unsubscribe) {
unsubscribe()
}
if (timeout) {
clearTimeout(timeout)
}
}
}, [])
const loadArticleContent = async (articleId: string, authorPubkey: string): Promise<Article | null> => {

View File

@ -63,7 +63,8 @@ export class ArticlePublisher {
try {
const validation = await this.validatePublishRequest(draft, authorPubkey, authorPrivateKey)
if (!validation.success) {
return buildFailure(validation.error)
const { error } = validation
return buildFailure(error)
}
const presentation = await this.getAuthorPresentation(authorPubkey)
@ -188,8 +189,9 @@ export class ArticlePublisher {
const parsed = parsePresentationEvent(publishedEvent)
if (parsed) {
const tags = extractTagsFromEvent(publishedEvent)
if (tags.id) {
await objectCache.set('author', tags.id, publishedEvent, parsed, tags.version, tags.hidden)
const { id: tagId, version: tagVersion, hidden: tagHidden } = tags
if (tagId) {
await objectCache.set('author', tagId, publishedEvent, parsed, tagVersion ?? 0, tagHidden ?? false)
}
}

View File

@ -44,11 +44,13 @@ export function getArticlesBySeries(seriesId: string, timeoutMs: number = 5000,
resolve(results)
}
sub.on('event', async (event: Event): Promise<void> => {
sub.on('event', (event: Event): void => {
void (async (): Promise<void> => {
const parsed = await parseArticleFromEvent(event)
if (parsed) {
results.push(parsed)
}
})()
})
sub.on('eose', (): void => {

View File

@ -31,7 +31,7 @@ export class AutomaticTransferService {
platformCommission,
timestamp: new Date().toISOString(),
}
console.log(`Automatic transfer required${type === 'review' ? ' for review' : ''}`, logData)
console.warn(`Automatic transfer required${type === 'review' ? ' for review' : ''}`, logData)
}
private buildTransferError(error: unknown, recipient: string, amount: number = 0): TransferResult {

View File

@ -16,7 +16,7 @@ export async function getPrimaryRelay(): Promise<string> {
}
try {
return await configStorage.getPrimaryRelay()
return configStorage.getPrimaryRelay()
} catch (error) {
console.error('Error getting primary relay from IndexedDB:', error)
return getPrimaryRelaySync()
@ -33,7 +33,7 @@ export async function getEnabledRelays(): Promise<string[]> {
}
try {
return await configStorage.getEnabledRelays()
return configStorage.getEnabledRelays()
} catch (error) {
console.error('Error getting enabled relays from IndexedDB:', error)
return [getPrimaryRelaySync()]
@ -50,7 +50,7 @@ export async function getPrimaryNip95Api(): Promise<string> {
}
try {
return await configStorage.getPrimaryNip95Api()
return configStorage.getPrimaryNip95Api()
} catch (error) {
console.error('Error getting primary NIP-95 API from IndexedDB:', error)
return getPrimaryNip95ApiSync()
@ -67,7 +67,7 @@ export async function getEnabledNip95Apis(): Promise<string[]> {
}
try {
return await configStorage.getEnabledNip95Apis()
return configStorage.getEnabledNip95Apis()
} catch (error) {
console.error('Error getting enabled NIP-95 APIs from IndexedDB:', error)
return [getPrimaryNip95ApiSync()]
@ -84,7 +84,7 @@ export async function getPlatformLightningAddress(): Promise<string> {
}
try {
return await configStorage.getPlatformLightningAddress()
return configStorage.getPlatformLightningAddress()
} catch (error) {
console.error('Error getting platform Lightning address from IndexedDB:', error)
return getPlatformLightningAddressSync()

View File

@ -53,9 +53,9 @@ export async function waitForConfirmation(
const startTime = Date.now()
return new Promise((resolve) => {
const checkConfirmation = async (): Promise<void> => {
await checkTransactionStatus(txid, startTime, timeout, interval, resolve, checkConfirmation)
const checkConfirmation = (): void => {
void checkTransactionStatus(txid, startTime, timeout, interval, resolve, checkConfirmation)
}
void checkConfirmation()
checkConfirmation()
})
}

View File

@ -17,7 +17,7 @@ export async function parseArticleFromEvent(event: Event): Promise<Article | nul
return null
}
const { previewContent } = getPreviewContent(event.content, tags.preview)
return await buildArticle(event, tags, previewContent)
return buildArticle(event, tags, previewContent)
} catch (e) {
console.error('Error parsing article:', e)
return null
@ -35,7 +35,12 @@ export async function parseSeriesFromEvent(event: Event): Promise<Series | null>
return null
}
// Map category from new system to old system
const category = tags.category === 'sciencefiction' ? 'science-fiction' : tags.category === 'research' ? 'scientific-research' : 'science-fiction'
let category: 'science-fiction' | 'scientific-research' = 'science-fiction'
if (tags.category === 'sciencefiction') {
category = 'science-fiction'
} else if (tags.category === 'research') {
category = 'scientific-research'
}
// Extract hash, version, index from id tag or parse it
let hash: string
@ -44,10 +49,11 @@ export async function parseSeriesFromEvent(event: Event): Promise<Series | null>
if (tags.id) {
const parsed = parseObjectId(tags.id)
if (parsed.hash) {
hash = parsed.hash
version = parsed.version ?? version
index = parsed.index ?? index
const { hash: parsedHash, version: parsedVersion, index: parsedIndex } = parsed
if (parsedHash) {
hash = parsedHash
version = parsedVersion ?? version
index = parsedIndex ?? index
} else {
// If id is just a hash, use it directly
hash = tags.id
@ -109,10 +115,11 @@ export async function parseReviewFromEvent(event: Event): Promise<Review | null>
if (tags.id) {
const parsed = parseObjectId(tags.id)
if (parsed.hash) {
hash = parsed.hash
version = parsed.version ?? version
index = parsed.index ?? index
const { hash: parsedHash, version: parsedVersion, index: parsedIndex } = parsed
if (parsedHash) {
hash = parsedHash
version = parsedVersion ?? version
index = parsedIndex ?? index
} else {
// If id is just a hash, use it directly
hash = tags.id
@ -207,8 +214,9 @@ async function buildArticle(event: Event, tags: ReturnType<typeof extractTagsFro
const jsonTag = event.tags.find((tag) => tag[0] === 'json')?.[1]
if (jsonTag) {
const metadata = JSON.parse(jsonTag) as { pages?: Page[] }
if (metadata.pages && Array.isArray(metadata.pages)) {
pages = metadata.pages
const { pages: metadataPages } = metadata
if (metadataPages && Array.isArray(metadataPages)) {
pages = metadataPages
}
}
} catch (e) {
@ -229,7 +237,7 @@ async function buildArticle(event: Event, tags: ReturnType<typeof extractTagsFro
createdAt: event.created_at,
zapAmount: tags.zapAmount ?? 800,
paid: false,
thumbnailUrl: (typeof tags.bannerUrl === 'string' ? tags.bannerUrl : typeof tags.pictureUrl === 'string' ? tags.pictureUrl : ''), // Required field with default
thumbnailUrl: typeof tags.bannerUrl === 'string' ? tags.bannerUrl : typeof tags.pictureUrl === 'string' ? tags.pictureUrl : '', // Required field with default
...(tags.invoice ? { invoice: tags.invoice } : {}),
...(tags.paymentHash ? { paymentHash: tags.paymentHash } : {}),
...(category ? { category } : {}),
@ -239,7 +247,7 @@ async function buildArticle(event: Event, tags: ReturnType<typeof extractTagsFro
...(tags.seriesId ? { seriesId: tags.seriesId } : {}),
...(tags.bannerUrl ? { bannerUrl: tags.bannerUrl } : {}),
...(pages && pages.length > 0 ? { pages } : {}),
...(tags.type === 'publication' ? { kindType: 'article' as KindType } : tags.type === 'author' ? { kindType: 'article' as KindType } : {}),
...(tags.type === 'publication' || tags.type === 'author' ? { kindType: 'article' as KindType } : {}),
}
}
@ -255,9 +263,10 @@ export async function parsePurchaseFromEvent(event: Event): Promise<Purchase | n
// Extract hash, version, index from id
const parsed = parseObjectId(extracted.id)
const hash = parsed.hash ?? extracted.id
const version = parsed.version ?? 0
const index = parsed.index ?? 0
const { hash: parsedHash, version: parsedVersion, index: parsedIndex } = parsed
const hash = parsedHash ?? extracted.id
const version = parsedVersion ?? 0
const index = parsedIndex ?? 0
const id = buildObjectId(hash, index, version)
return {
@ -291,9 +300,10 @@ export async function parseSponsoringFromEvent(event: Event): Promise<Sponsoring
// Extract hash, version, index from id
const parsed = parseObjectId(extracted.id)
const hash = parsed.hash ?? extracted.id
const version = parsed.version ?? 0
const index = parsed.index ?? 0
const { hash: parsedHash, version: parsedVersion, index: parsedIndex } = parsed
const hash = parsedHash ?? extracted.id
const version = parsedVersion ?? 0
const index = parsedIndex ?? 0
const id = buildObjectId(hash, index, version)
// Extract text from tags if present
@ -332,9 +342,10 @@ export async function parseReviewTipFromEvent(event: Event): Promise<ReviewTip |
// Extract hash, version, index from id
const parsed = parseObjectId(extracted.id)
const hash = parsed.hash ?? extracted.id
const version = parsed.version ?? 0
const index = parsed.index ?? 0
const { hash: parsedHash, version: parsedVersion, index: parsedIndex } = parsed
const hash = parsedHash ?? extracted.id
const version = parsedVersion ?? 0
const index = parsedIndex ?? 0
const id = buildObjectId(hash, index, version)
// Extract text from tags if present

View File

@ -35,9 +35,11 @@ export function subscribeWithTimeout<T>(
resolve(value)
}
sub.on('event', async (event: Event): Promise<void> => {
sub.on('event', (event: Event): void => {
void (async (): Promise<void> => {
const result = await parser(event)
resolveOnce(result)
})()
})
sub.on('eose', (): void => {
resolveOnce(null)

View File

@ -20,7 +20,12 @@ export async function publishPurchaseNote(params: {
seriesId?: string
payerPrivateKey: string
}): Promise<Event | null> {
const category = params.category === 'science-fiction' ? 'sciencefiction' : params.category === 'scientific-research' ? 'research' : 'sciencefiction'
let category: 'sciencefiction' | 'research' = 'sciencefiction'
if (params.category === 'science-fiction') {
category = 'sciencefiction'
} else if (params.category === 'scientific-research') {
category = 'research'
}
const purchaseData = {
payerPubkey: params.payerPubkey,
@ -71,7 +76,7 @@ export async function publishPurchaseNote(params: {
}
nostrService.setPrivateKey(params.payerPrivateKey)
return await nostrService.publishEvent(eventTemplate)
return nostrService.publishEvent(eventTemplate)
}
/**
@ -92,7 +97,12 @@ export async function publishReviewTipNote(params: {
text?: string
payerPrivateKey: string
}): Promise<Event | null> {
const category = params.category === 'science-fiction' ? 'sciencefiction' : params.category === 'scientific-research' ? 'research' : 'sciencefiction'
let category: 'sciencefiction' | 'research' = 'sciencefiction'
if (params.category === 'science-fiction') {
category = 'sciencefiction'
} else if (params.category === 'scientific-research') {
category = 'research'
}
const tipData = {
payerPubkey: params.payerPubkey,
@ -152,7 +162,7 @@ export async function publishReviewTipNote(params: {
}
nostrService.setPrivateKey(params.payerPrivateKey)
return await nostrService.publishEvent(eventTemplate)
return nostrService.publishEvent(eventTemplate)
}
/**
@ -171,7 +181,12 @@ export async function publishSponsoringNote(params: {
transactionId?: string // Bitcoin transaction ID for mainnet payments
payerPrivateKey: string
}): Promise<Event | null> {
const category = params.category === 'science-fiction' ? 'sciencefiction' : params.category === 'scientific-research' ? 'research' : 'sciencefiction'
let category: 'sciencefiction' | 'research' = 'sciencefiction'
if (params.category === 'science-fiction') {
category = 'sciencefiction'
} else if (params.category === 'scientific-research') {
category = 'research'
}
const sponsoringData = {
payerPubkey: params.payerPubkey,
@ -234,5 +249,5 @@ export async function publishSponsoringNote(params: {
}
nostrService.setPrivateKey(params.payerPrivateKey)
return await nostrService.publishEvent(eventTemplate)
return nostrService.publishEvent(eventTemplate)
}

View File

@ -5,7 +5,7 @@ export async function createReviewInvoice(split: { total: number; reviewer: numb
const alby = getAlbyService()
await alby.enable()
return await alby.createInvoice({
return alby.createInvoice({
amount: split.total,
description: `Review reward: ${request.reviewId} (${split.reviewer} sats to reviewer, ${split.platform} sats commission)`,
expiry: 3600, // 1 hour

View File

@ -76,7 +76,7 @@ export class SponsoringPaymentService {
authorPubkey: string,
authorMainnetAddress: string
): Promise<boolean> {
return await verifySponsoringPayment(transactionId, authorPubkey, authorMainnetAddress)
return verifySponsoringPayment(transactionId, authorPubkey, authorMainnetAddress)
}
/**
@ -88,7 +88,7 @@ export class SponsoringPaymentService {
authorMainnetAddress: string,
authorPrivateKey: string
): Promise<void> {
return await trackSponsoringPayment(transactionId, authorPubkey, authorMainnetAddress, authorPrivateKey)
return trackSponsoringPayment(transactionId, authorPubkey, authorMainnetAddress, authorPrivateKey)
}
}

View File

@ -408,8 +408,9 @@ export async function syncUserContentToCache(
try {
const pool = nostrService.getPool()
if (!pool) {
console.warn('Pool not initialized, cannot sync user content')
return
const errorMsg = 'Pool not initialized, cannot sync user content'
console.warn(errorMsg)
throw new Error(errorMsg)
}
const poolWithSub = pool as unknown as SimplePoolWithSub
@ -428,6 +429,7 @@ export async function syncUserContentToCache(
let currentStep = 0
// Fetch and cache author profile (already caches itself)
console.log('[Sync] Step 1/6: Fetching author profile...')
await fetchAuthorPresentationFromPool(poolWithSub, userPubkey)
currentStep++
if (onProgress) {
@ -435,6 +437,7 @@ export async function syncUserContentToCache(
}
// Fetch and cache all series
console.log('[Sync] Step 2/6: Fetching series...')
await fetchAndCacheSeries(poolWithSub, userPubkey)
currentStep++
if (onProgress) {
@ -442,6 +445,7 @@ export async function syncUserContentToCache(
}
// Fetch and cache all publications
console.log('[Sync] Step 3/6: Fetching publications...')
await fetchAndCachePublications(poolWithSub, userPubkey)
currentStep++
if (onProgress) {
@ -449,6 +453,7 @@ export async function syncUserContentToCache(
}
// Fetch and cache all purchases (as payer)
console.log('[Sync] Step 4/6: Fetching purchases...')
await fetchAndCachePurchases(poolWithSub, userPubkey)
currentStep++
if (onProgress) {
@ -456,6 +461,7 @@ export async function syncUserContentToCache(
}
// Fetch and cache all sponsoring (as author)
console.log('[Sync] Step 5/6: Fetching sponsoring...')
await fetchAndCacheSponsoring(poolWithSub, userPubkey)
currentStep++
if (onProgress) {
@ -463,6 +469,7 @@ export async function syncUserContentToCache(
}
// Fetch and cache all review tips (as author)
console.log('[Sync] Step 6/6: Fetching review tips...')
await fetchAndCacheReviewTips(poolWithSub, userPubkey)
currentStep++
if (onProgress) {
@ -471,8 +478,9 @@ export async function syncUserContentToCache(
// Store the current timestamp as last sync date
await setLastSyncDate(currentTimestamp)
console.log('[Sync] Synchronization completed successfully')
} catch (error) {
console.error('Error syncing user content to cache:', error)
// Don't throw - this is a background operation
throw error // Re-throw to allow UI to handle it
}
}

View File

@ -237,7 +237,7 @@ settings.keyManagement.recovery.copied=✓ Copied!
settings.keyManagement.recovery.newNpub=Your new public key (npub)
settings.keyManagement.recovery.done=Done
settings.sync.title=Notes Synchronization
settings.sync.start=Start Synchronization
settings.sync.resync=Resynchronize
settings.sync.daysRange=From {{startDate}} to {{endDate}} ({{days}} days)
settings.sync.progress=Step {{current}} of {{total}}
settings.sync.completed=Everything is synchronized
@ -269,7 +269,7 @@ settings.nip95.list.cancel=Cancel
settings.nip95.list.remove=Remove
settings.nip95.remove.confirm=Are you sure you want to remove this endpoint?
settings.nip95.empty=No endpoints configured
settings.nip95.list.priorityLabel=Priority: {{priority}} | ID: {{id}}
settings.nip95.list.priorityLabel=Priority {{priority}} (ID: {{id}})
settings.nip95.list.editUrl=Click to edit URL
settings.nip95.note.title=Note:
settings.nip95.note.priority=Endpoints are tried in priority order (lower number = higher priority). Only enabled endpoints will be used for uploads.

View File

@ -237,7 +237,7 @@ settings.keyManagement.recovery.copied=✓ Copié !
settings.keyManagement.recovery.newNpub=Votre nouvelle clé publique (npub)
settings.keyManagement.recovery.done=Terminé
settings.sync.title=Synchronisation des notes
settings.sync.start=Démarrer la synchronisation
settings.sync.resync=Resynchroniser
settings.sync.daysRange=Du {{startDate}} au {{endDate}} ({{days}} jours)
settings.sync.progress=Étape {{current}} sur {{total}}
settings.sync.completed=Tout est synchronisé
@ -274,7 +274,7 @@ settings.nip95.list.cancel=Annuler
settings.nip95.list.remove=Supprimer
settings.nip95.remove.confirm=Êtes-vous sûr de vouloir supprimer cet endpoint ?
settings.nip95.empty=Aucun endpoint configuré
settings.nip95.list.priorityLabel=Priorité: {{priority}} | ID: {{id}}
settings.nip95.list.priorityLabel=Priorité {{priority}} (ID: {{id}})
settings.nip95.list.editUrl=Cliquer pour modifier l'URL
settings.nip95.note.title=Note :
settings.nip95.note.priority=Les endpoints sont essayés dans l'ordre de priorité (nombre plus bas = priorité plus haute). Seuls les endpoints activés seront utilisés pour les uploads.