lint fix wip

This commit is contained in:
Nicolas Cantu 2026-01-07 03:10:40 +01:00
parent ff7b83d829
commit 17e4b10b1f
26 changed files with 2409 additions and 1725 deletions

View File

@ -0,0 +1,658 @@
# Analyse de duplication, mutualisation et optimisation du code
**Date** : 2024-12-19
**Auteur** : Équipe 4NK
## Résumé exécutif
Cette analyse identifie les duplications de code, les opportunités de mutualisation/centralisation et les axes d'organisation et d'optimisation dans le codebase. L'objectif est de réduire la duplication, améliorer la maintenabilité et optimiser l'architecture sans dégrader les performances.
## 1. Duplications identifiées
### 1.1 Initialisation IndexedDB (Critique - Forte duplication)
**Localisation** : Multiple fichiers avec pattern identique
**Fichiers concernés** :
- `lib/notificationService.ts` (lignes 39-89)
- `lib/publishLog.ts` (lignes 24-72)
- `lib/storage/indexedDB.ts` (lignes 25-72)
- `lib/objectCache.ts` (lignes 33-86)
- `lib/localeStorage.ts` (lignes 14-43)
- `lib/settingsCache.ts` (lignes 24-54)
- `public/writeWorker.js` (lignes 404-479)
**Pattern dupliqué** :
```typescript
private async init(): Promise<void> {
if (this.db) return
if (this.initPromise) return this.initPromise
this.initPromise = this.openDatabase()
try {
await this.initPromise
} catch (error) {
this.initPromise = null
throw error
}
}
private openDatabase(): Promise<void> {
return new Promise((resolve, reject) => {
if (typeof window === 'undefined' || !window.indexedDB) {
reject(new Error('IndexedDB is not available'))
return
}
const request = window.indexedDB.open(DB_NAME, DB_VERSION)
request.onerror = () => reject(...)
request.onsuccess = () => { this.db = request.result; resolve() }
request.onupgradeneeded = (event) => { /* schema creation */ }
})
}
```
**Impact** :
- ~200 lignes de code dupliquées
- Maintenance difficile (changements à appliquer en 7+ endroits)
- Risque d'incohérences entre implémentations
**Solution proposée** : Créer un utilitaire générique `lib/indexedDBHelper.ts` avec factory pattern
---
### 1.2 Pattern de subscription avec relay rotation (Critique - Forte duplication)
**Localisation** : `lib/userContentSync.ts`
**Fonctions concernées** :
- `fetchAndCachePublications` (lignes 22-146)
- `fetchAndCacheSeries` (lignes 151-281)
- `fetchAndCachePurchases` (lignes 286-383)
- `fetchAndCacheSponsoring` (lignes 388-485)
- `fetchAndCacheReviewTips` (lignes 490-587)
- `fetchAndCachePaymentNotes` (lignes 599-721)
**Pattern dupliqué** (~50 lignes par fonction) :
```typescript
// 1. Récupération lastSyncDate
const { getLastSyncDate } = await import('./syncStorage')
const lastSyncDate = await getLastSyncDate()
// 2. Construction des filters
const filters = [{ ...buildTagFilter(...), since: lastSyncDate, limit: 1000 }]
// 3. Tentative avec relay rotation
const { createSubscription } = require('@/types/nostr-tools-extended')
let sub = null
let usedRelayUrl = ''
try {
const result = await tryWithRelayRotation(
pool as unknown as import('nostr-tools').SimplePool,
async (relayUrl, poolWithSub) => {
usedRelayUrl = relayUrl
// Notification syncProgressManager
const { syncProgressManager } = await import('./syncProgressManager')
const currentProgress = syncProgressManager.getProgress()
if (currentProgress) {
syncProgressManager.setProgress({
...currentProgress,
currentStep: 0,
currentRelay: relayUrl,
})
}
return createSubscription(poolWithSub, [relayUrl], filters)
},
5000
)
sub = result
} catch {
// Fallback to primary relay
usedRelayUrl = getPrimaryRelaySync()
sub = createSubscription(pool, [usedRelayUrl], filters)
}
// 4. Gestion des événements avec Promise + timeout
const events: Event[] = []
return new Promise<void>((resolve) => {
let finished = false
const done = async () => { /* ... */ }
sub.on('event', (event) => { events.push(event) })
sub.on('eose', () => void done())
setTimeout(() => void done(), 10000).unref?.()
})
```
**Impact** :
- ~300 lignes de code dupliquées
- Logique de gestion d'événements répétée 6 fois
- Risque d'incohérences dans la gestion des erreurs et timeouts
**Solution proposée** : Créer une fonction générique `createSyncSubscription` dans `lib/syncSubscriptionHelper.ts`
---
### 1.3 Pattern de traitement d'événements avec groupement par hash (Moyenne duplication)
**Localisation** : `lib/userContentSync.ts`
**Fonctions concernées** :
- `fetchAndCachePublications` (lignes 88-126)
- `fetchAndCacheSeries` (lignes 218-256)
**Pattern dupliqué** :
```typescript
// Group events by hash ID and cache the latest version of each
const eventsByHashId = new Map<string, Event[]>()
for (const event of events) {
const tags = extractTagsFromEvent(event)
if (tags.id) {
const parsed = parseObjectId(tags.id)
const hash = parsed.hash ?? tags.id
if (!eventsByHashId.has(hash)) {
eventsByHashId.set(hash, [])
}
eventsByHashId.get(hash)!.push(event)
}
}
// Cache each publication/series
for (const [_hash, hashEvents] of eventsByHashId.entries()) {
const latestEvent = getLatestVersion(hashEvents)
if (latestEvent) {
const extracted = await extractPublicationFromEvent(latestEvent) // ou extractSeriesFromEvent
if (extracted) {
const publicationParsed = parseObjectId(extracted.id)
const extractedHash = publicationParsed.hash ?? extracted.id
const extractedIndex = publicationParsed.index ?? 0
const tags = extractTagsFromEvent(latestEvent)
const { writeService } = await import('./writeService')
await writeService.writeObject(
'publication', // ou 'series'
extractedHash,
latestEvent,
extracted,
tags.version ?? 0,
tags.hidden ?? false,
extractedIndex,
false
)
}
}
}
```
**Impact** :
- ~40 lignes dupliquées
- Logique de groupement et cache répétée
**Solution proposée** : Créer une fonction générique `groupAndCacheEventsByHash` dans `lib/eventCacheHelper.ts`
---
### 1.4 Pattern de queries avec objectCache (Faible duplication mais répétitif)
**Localisation** : Fichiers `*Queries.ts`
**Fichiers concernés** :
- `lib/purchaseQueries.ts`
- `lib/seriesQueries.ts`
- `lib/articleQueries.ts`
- (et probablement d'autres fichiers queries)
**Pattern répétitif** :
```typescript
export async function getXxxById(id: string, _timeoutMs: number = 5000): Promise<Xxx | null> {
const parsed = parseObjectId(id)
const hash = parsed.hash ?? id
// Read only from IndexedDB cache
const cached = await objectCache.get('xxx', hash)
if (cached) {
return cached as Xxx
}
// Also try by ID if hash lookup failed
const cachedById = await objectCache.getById('xxx', id)
if (cachedById) {
return cachedById as Xxx
}
// Not found in cache - return null (no network request)
return null
}
```
**Impact** :
- Pattern répété dans plusieurs fichiers queries
- Logique de fallback identique
**Solution proposée** : Créer une fonction helper `getCachedObjectById` dans `lib/queryHelpers.ts`
---
### 1.5 Pattern de writeObject avec extraction (Moyenne duplication)
**Localisation** : Multiple fichiers
**Fichiers concernés** :
- `lib/userContentSync.ts` (lignes 107-123, 237-253, 351-359, 453-461, 555-563)
- `lib/platformSync.ts` (lignes 304-305, 310-311, 316-317)
**Pattern répétitif** :
```typescript
const extracted = await extractXxxFromEvent(event)
if (extracted) {
const { writeService } = await import('./writeService')
await writeService.writeObject(
'xxx',
extracted.hash,
event,
extracted,
tags.version ?? 0,
tags.hidden ?? false,
extracted.index ?? 0,
false
)
}
```
**Impact** :
- Appels répétés avec mêmes paramètres par défaut
- Logique d'extraction + écriture répétée
**Solution proposée** : Créer une fonction helper `cacheEventAsObject` dans `lib/eventCacheHelper.ts`
---
### 1.6 Pattern de gestion de transactions IndexedDB (Moyenne duplication)
**Localisation** : Multiple fichiers
**Pattern répétitif** :
```typescript
const transaction = db.transaction([STORE_NAME], 'readonly' | 'readwrite')
const store = transaction.objectStore(STORE_NAME)
const index = store.index('xxx')
return new Promise((resolve, reject) => {
const request = index.get(key) // ou openCursor, getAll, etc.
request.onsuccess = () => resolve(request.result)
request.onerror = () => reject(request.error)
})
```
**Impact** :
- Wrapper Promise répété dans de nombreux endroits
- Gestion d'erreurs similaire partout
**Solution proposée** : Créer des helpers dans `lib/indexedDBHelper.ts` : `getFromStore`, `getAllFromStore`, `putToStore`, `deleteFromStore`, `openCursor`
---
### 1.7 Pattern de gestion de progress dans SyncProgressBar (Duplication interne)
**Localisation** : `components/SyncProgressBar.tsx`
**Pattern dupliqué** (lignes 104-126 et 177-199) :
```typescript
const { syncProgressManager } = await import('@/lib/syncProgressManager')
const checkProgress = (): void => {
const currentProgress = syncProgressManager.getProgress()
if (currentProgress) {
setSyncProgress(currentProgress)
if (currentProgress.completed) {
setIsSyncing(false)
void loadSyncStatus()
}
}
}
// Check progress periodically
const progressInterval = setInterval(() => {
checkProgress()
const currentProgress = syncProgressManager.getProgress()
if (currentProgress?.completed) {
clearInterval(progressInterval)
}
}, 500)
// Cleanup after 60 seconds max
setTimeout(() => {
clearInterval(progressInterval)
setIsSyncing(false)
}, 60000)
```
**Impact** :
- Code dupliqué dans deux fonctions (auto-sync et resynchronize)
- Logique de polling répétée
**Solution proposée** : Extraire dans un hook custom `useSyncProgress` ou une fonction helper
---
## 2. Opportunités de mutualisation/centralisation
### 2.1 Service d'initialisation IndexedDB unifié
**Objectif** : Centraliser toute la logique d'initialisation IndexedDB
**Structure proposée** :
```typescript
// lib/indexedDBHelper.ts
export interface IndexedDBConfig {
dbName: string
version: number
storeName: string
keyPath: string
indexes?: Array<{ name: string; keyPath: string; unique?: boolean }>
onUpgrade?: (db: IDBDatabase, event: IDBVersionChangeEvent) => void
}
export class IndexedDBHelper {
private static instances = new Map<string, IndexedDBHelper>()
private db: IDBDatabase | null = null
private initPromise: Promise<void> | null = null
static getInstance(config: IndexedDBConfig): IndexedDBHelper {
// Singleton par dbName
}
async init(): Promise<IDBDatabase> { /* ... */ }
async getStore(mode: 'readonly' | 'readwrite'): Promise<IDBObjectStore> { /* ... */ }
// Helpers pour opérations courantes
}
```
**Bénéfices** :
- Réduction de ~200 lignes de code dupliqué
- Maintenance centralisée
- Cohérence garantie entre services
---
### 2.2 Helper de subscription avec relay rotation
**Objectif** : Centraliser le pattern de subscription avec rotation de relais
**Structure proposée** :
```typescript
// lib/syncSubscriptionHelper.ts
export interface SyncSubscriptionConfig {
pool: SimplePoolWithSub
filters: Filter[]
onEvent: (event: Event) => void | Promise<void>
onComplete?: (events: Event[]) => void | Promise<void>
timeout?: number
updateProgress?: (relayUrl: string) => void
}
export async function createSyncSubscription(
config: SyncSubscriptionConfig
): Promise<{ subscription: Subscription; relayUrl: string; events: Event[] }> {
// Centralise toute la logique de rotation, gestion d'événements, timeout
}
```
**Bénéfices** :
- Réduction de ~300 lignes de code dupliqué
- Gestion d'erreurs unifiée
- Facilite les tests
---
### 2.3 Helper de groupement et cache d'événements
**Objectif** : Centraliser la logique de groupement par hash et cache
**Structure proposée** :
```typescript
// lib/eventCacheHelper.ts
export interface EventCacheConfig {
objectType: ObjectType
extractor: (event: Event) => Promise<ExtractedObject | null>
getHash: (extracted: ExtractedObject) => string
getIndex: (extracted: ExtractedObject) => number
}
export async function groupAndCacheEventsByHash(
events: Event[],
config: EventCacheConfig
): Promise<void> {
// Groupement par hash, sélection de la dernière version, cache
}
```
**Bénéfices** :
- Réduction de ~40 lignes de code dupliqué
- Logique de versioning centralisée
---
### 2.4 Helper de queries unifié
**Objectif** : Simplifier les queries avec fallback hash/ID
**Structure proposée** :
```typescript
// lib/queryHelpers.ts
export async function getCachedObjectById<T>(
objectType: ObjectType,
id: string
): Promise<T | null> {
// Logique de fallback hash/ID centralisée
}
```
**Bénéfices** :
- Réduction de code répétitif dans les queries
- Cohérence des fallbacks
---
### 2.5 Helper de cache d'événements
**Objectif** : Simplifier l'écriture d'objets après extraction
**Structure proposée** :
```typescript
// lib/eventCacheHelper.ts
export async function cacheEventAsObject(
event: Event,
objectType: ObjectType,
extractor: (event: Event) => Promise<ExtractedObject | null>
): Promise<boolean> {
// Extraction + écriture avec paramètres par défaut
}
```
**Bénéfices** :
- Réduction de code répétitif
- Paramètres par défaut cohérents
---
## 3. Axes d'organisation et d'optimisation
### 3.1 Organisation des helpers
**Structure proposée** :
```
lib/
helpers/
indexedDBHelper.ts # Initialisation et opérations IndexedDB
syncSubscriptionHelper.ts # Subscriptions avec relay rotation
eventCacheHelper.ts # Groupement et cache d'événements
queryHelpers.ts # Helpers pour queries
transactionHelpers.ts # Wrappers pour transactions IndexedDB
```
**Bénéfices** :
- Organisation claire par responsabilité
- Facilite la découverte et la réutilisation
- Séparation des préoccupations
---
### 3.2 Optimisation des imports dynamiques
**Problème identifié** : Imports dynamiques répétés dans les boucles
**Exemples** :
- `const { writeService } = await import('./writeService')` dans des boucles
- `const { syncProgressManager } = await import('./syncProgressManager')` dans des callbacks
**Solution** : Importer en début de fonction ou utiliser des imports statiques quand possible
**Impact** : Réduction des latences et amélioration des performances
---
### 3.3 Optimisation de la gestion des événements
**Problème identifié** : Accumulation d'événements en mémoire avant traitement
**Solution** : Traitement en streaming avec backpressure
**Structure proposée** :
```typescript
export async function createStreamingSyncSubscription<T>(
config: SyncSubscriptionConfig & {
processor: (event: Event) => Promise<T>
batchSize?: number
}
): Promise<{ results: T[] }> {
// Traitement par batch au lieu d'accumulation complète
}
```
**Bénéfices** :
- Réduction de l'utilisation mémoire
- Traitement plus rapide pour de gros volumes
---
### 3.4 Centralisation de la gestion d'erreurs IndexedDB
**Problème identifié** : Gestion d'erreurs dispersée et parfois incohérente
**Solution** : Créer un wrapper d'erreur IndexedDB avec logging structuré
**Structure proposée** :
```typescript
// lib/indexedDBHelper.ts
export class IndexedDBError extends Error {
constructor(
message: string,
public readonly operation: string,
public readonly storeName?: string,
public readonly cause?: unknown
) {
super(message)
// Logging structuré automatique
}
}
```
**Bénéfices** :
- Traçabilité améliorée
- Gestion d'erreurs cohérente
---
### 3.5 Optimisation des transactions IndexedDB
**Problème identifié** : Transactions multiples pour des opérations liées
**Solution** : Regrouper les opérations dans une seule transaction quand possible
**Exemple** : Dans `writeWorker.js`, `handleWriteMultiTable` pourrait optimiser les transactions par type
---
### 3.6 Typage strict pour les helpers
**Problème identifié** : Utilisation de `unknown` et `any` dans certains helpers
**Solution** : Génériques TypeScript stricts pour tous les helpers
**Bénéfices** :
- Sécurité de type améliorée
- Meilleure autocomplétion
- Détection d'erreurs à la compilation
---
### 3.7 Documentation et exemples
**Problème identifié** : Manque de documentation sur les patterns à utiliser
**Solution** : Créer `docs/patterns/` avec :
- `indexedDB-patterns.md` : Patterns d'utilisation IndexedDB
- `subscription-patterns.md` : Patterns de subscription
- `caching-patterns.md` : Patterns de cache
**Bénéfices** :
- Onboarding facilité
- Cohérence des implémentations futures
---
## 4. Priorisation des actions
### Priorité 1 (Critique - Impact élevé)
1. **Service d'initialisation IndexedDB unifié** (~200 lignes économisées)
2. **Helper de subscription avec relay rotation** (~300 lignes économisées)
### Priorité 2 (Important - Impact moyen)
3. **Helper de groupement et cache d'événements** (~40 lignes économisées)
4. **Helper de cache d'événements** (réduction de code répétitif)
5. **Helper de queries unifié** (simplification des queries)
### Priorité 3 (Amélioration - Impact faible mais bénéfique)
6. **Optimisation des imports dynamiques**
7. **Centralisation de la gestion d'erreurs**
8. **Documentation des patterns**
---
## 5. Risques et précautions
### Risques identifiés
1. **Régression fonctionnelle** : Refactoring de code critique (IndexedDB, subscriptions)
- **Mitigation** : Tests unitaires avant refactoring, migration progressive
2. **Performance** : Abstraction peut introduire overhead
- **Mitigation** : Benchmarks avant/après, optimisation si nécessaire
3. **Compatibilité** : Changements d'API peuvent casser le code existant
- **Mitigation** : Déprecation progressive, migration guides
### Précautions
- Valider chaque refactoring avec des tests
- Maintenir la rétrocompatibilité quand possible
- Documenter les breaking changes
- Mesurer l'impact sur les performances
---
## 6. Métriques de succès
### Réduction de code
- **Objectif** : Réduction de ~600 lignes de code dupliqué
- **Mesure** : Comparaison avant/après avec `cloc` ou similaire
### Maintenabilité
- **Objectif** : Réduction du temps de modification de patterns communs
- **Mesure** : Temps moyen pour appliquer un changement (avant/après)
### Qualité
- **Objectif** : Réduction des bugs liés à l'incohérence
- **Mesure** : Nombre de bugs liés à la duplication (avant/après)
---
## Conclusion
Cette analyse identifie des opportunités significatives de réduction de duplication et d'amélioration de l'organisation du code. Les priorités 1 et 2 devraient être traitées en premier pour maximiser l'impact sur la maintenabilité et réduire les risques d'incohérences.
Les refactorings proposés respectent l'architecture existante et les principes de séparation des responsabilités. Ils doivent être réalisés progressivement avec validation à chaque étape.

View File

@ -33,20 +33,10 @@ export async function fetchAuthorByHashId(
// Otherwise, treat as hash ID // Otherwise, treat as hash ID
const hashId = hashIdOrPubkey const hashId = hashIdOrPubkey
// Read only from IndexedDB cache const { getCachedObjectById } = await import('./helpers/queryHelpers')
const cached = await objectCache.get('author', hashId) const cached = await getCachedObjectById<import('@/types/nostr').AuthorPresentationArticle>('author', hashId)
if (cached) { if (cached) {
const presentation = cached as import('@/types/nostr').AuthorPresentationArticle const presentation = cached
// Calculate totalSponsoring from cache
const { getAuthorSponsoring } = await import('./sponsoring')
presentation.totalSponsoring = await getAuthorSponsoring(presentation.pubkey)
return presentation
}
// Also try by ID if hash lookup failed
const cachedById = await objectCache.getById('author', hashId)
if (cachedById) {
const presentation = cachedById as import('@/types/nostr').AuthorPresentationArticle
// Calculate totalSponsoring from cache // Calculate totalSponsoring from cache
const { getAuthorSponsoring } = await import('./sponsoring') const { getAuthorSponsoring } = await import('./sponsoring')
presentation.totalSponsoring = await getAuthorSponsoring(presentation.pubkey) presentation.totalSponsoring = await getAuthorSponsoring(presentation.pubkey)

View File

@ -0,0 +1,111 @@
/**
* Helper for grouping and caching events by hash
* Centralizes the pattern of grouping events by hash ID and caching the latest version
*/
import type { Event } from 'nostr-tools'
import { extractTagsFromEvent } from '../nostrTagSystem'
import { parseObjectId } from '../urlGenerator'
import { getLatestVersion } from '../versionManager'
import type { ObjectType } from '../objectCache'
export interface EventCacheConfig {
objectType: ObjectType
extractor: (event: Event) => Promise<unknown | null>
getHash?: (extracted: unknown) => string | null
getIndex?: (extracted: unknown) => number
getVersion?: (event: Event) => number
getHidden?: (event: Event) => boolean
}
interface ExtractedObjectWithId {
id?: string
index?: number
}
/**
* Group events by hash ID and cache the latest version of each
*/
export async function groupAndCacheEventsByHash(
events: Event[],
config: EventCacheConfig
): Promise<void> {
const { objectType, extractor, getHash, getIndex, getVersion, getHidden } = config
// Group events by hash ID
const eventsByHashId = new Map<string, Event[]>()
for (const event of events) {
const tags = extractTagsFromEvent(event)
if (tags.id) {
// Extract hash from id (can be <hash>_<index>_<version> or just hash)
const parsed = parseObjectId(tags.id)
const hash = parsed.hash ?? tags.id
if (!eventsByHashId.has(hash)) {
eventsByHashId.set(hash, [])
}
eventsByHashId.get(hash)!.push(event)
}
}
// Cache each object (latest version)
for (const [_hash, hashEvents] of eventsByHashId.entries()) {
const latestEvent = getLatestVersion(hashEvents)
if (!latestEvent) {
continue
}
const extracted = await extractor(latestEvent)
if (!extracted) {
continue
}
// Get hash, index, version, hidden
const extractedHash = getHash ? getHash(extracted) : null
const extractedObj = extracted as ExtractedObjectWithId
const extractedId = extractedHash ?? extractedObj.id
if (!extractedId) {
continue
}
const publicationParsed = parseObjectId(extractedId)
const hash = publicationParsed.hash ?? extractedId
const index = getIndex ? getIndex(extracted) : publicationParsed.index ?? extractedObj.index ?? 0
const version = getVersion ? getVersion(latestEvent) : extractTagsFromEvent(latestEvent).version ?? 0
const hidden = getHidden ? getHidden(latestEvent) : extractTagsFromEvent(latestEvent).hidden ?? false
const { writeService } = await import('../writeService')
await writeService.writeObject(objectType, hash, latestEvent, extracted, version, hidden, index, false)
}
}
/**
* Cache a single event as an object
* Simplified version for direct event caching
*/
export async function cacheEventAsObject(
event: Event,
objectType: ObjectType,
extractor: (event: Event) => Promise<unknown | null>
): Promise<boolean> {
const extracted = await extractor(event)
if (!extracted) {
return false
}
const tags = extractTagsFromEvent(event)
const hash = (extracted as { hash?: string })?.hash ?? (extracted as { id?: string })?.id ?? ''
if (!hash) {
return false
}
const index = (extracted as { index?: number })?.index ?? 0
const version = tags.version ?? 0
const hidden = tags.hidden ?? false
const { writeService } = await import('../writeService')
await writeService.writeObject(objectType, hash, event, extracted, version, hidden, index, false)
return true
}

View File

@ -0,0 +1,616 @@
/**
* Centralized IndexedDB helper for initialization and transaction management
* Provides unified API for all IndexedDB operations across the application
*/
export interface IndexedDBIndex {
name: string
keyPath: string | string[]
unique?: boolean
}
export interface IndexedDBConfig {
dbName: string
version: number
storeName: string
keyPath: string
indexes?: IndexedDBIndex[]
onUpgrade?: (db: IDBDatabase, event: IDBVersionChangeEvent) => void
}
export class IndexedDBError extends Error {
public readonly operation: string
public readonly storeName: string | undefined
public override readonly cause: unknown | undefined
public override readonly name = 'IndexedDBError'
constructor(message: string, operation: string, storeName?: string, cause?: unknown) {
super(message)
this.operation = operation
this.storeName = storeName
this.cause = cause
console.error(`[IndexedDBError] ${operation}${storeName ? ` on ${storeName}` : ''}: ${message}`, cause)
}
}
class IndexedDBHelper {
private db: IDBDatabase | null = null
private initPromise: Promise<void> | null = null
private readonly config: IndexedDBConfig
constructor(config: IndexedDBConfig) {
this.config = config
}
/**
* Initialize the IndexedDB database
*/
async init(): Promise<IDBDatabase> {
if (this.db) {
return this.db
}
if (this.initPromise) {
await this.initPromise
if (this.db) {
return this.db
}
throw new IndexedDBError('Database initialization failed', 'init', this.config.storeName)
}
this.initPromise = this.openDatabase()
try {
await this.initPromise
if (!this.db) {
throw new IndexedDBError('Database not initialized after open', 'init', this.config.storeName)
}
return this.db
} catch (error) {
this.initPromise = null
throw new IndexedDBError(
error instanceof Error ? error.message : 'Unknown error',
'init',
this.config.storeName,
error
)
}
}
private openDatabase(): Promise<void> {
return new Promise((resolve, reject) => {
if (typeof window === 'undefined' || !window.indexedDB) {
reject(new IndexedDBError('IndexedDB is not available', 'openDatabase', this.config.storeName))
return
}
const request = window.indexedDB.open(this.config.dbName, this.config.version)
request.onerror = (): void => {
reject(
new IndexedDBError(
`Failed to open IndexedDB: ${request.error}`,
'openDatabase',
this.config.storeName,
request.error
)
)
}
request.onsuccess = (): void => {
this.db = request.result
resolve()
}
request.onupgradeneeded = (event: IDBVersionChangeEvent): void => {
this.handleUpgrade(event)
}
})
}
private handleUpgrade(event: IDBVersionChangeEvent): void {
const db = (event.target as IDBOpenDBRequest).result
// Create object store if it doesn't exist
if (!db.objectStoreNames.contains(this.config.storeName)) {
this.createObjectStore(db)
} else {
// Store exists, check for missing indexes
this.createMissingIndexes(db, event)
}
// Call custom upgrade handler if provided
if (this.config.onUpgrade) {
this.config.onUpgrade(db, event)
}
}
private createObjectStore(db: IDBDatabase): void {
const store = db.createObjectStore(this.config.storeName, { keyPath: this.config.keyPath })
// Create indexes
if (this.config.indexes) {
for (const index of this.config.indexes) {
if (!store.indexNames.contains(index.name)) {
store.createIndex(index.name, index.keyPath, { unique: index.unique ?? false })
}
}
}
}
private createMissingIndexes(_db: IDBDatabase, event: IDBVersionChangeEvent): void {
const target = event.target as IDBOpenDBRequest
const { transaction } = target
if (!transaction) {
return
}
const store = transaction.objectStore(this.config.storeName)
if (this.config.indexes) {
for (const index of this.config.indexes) {
if (!store.indexNames.contains(index.name)) {
store.createIndex(index.name, index.keyPath, { unique: index.unique ?? false })
}
}
}
}
/**
* Get object store for read operations
*/
async getStore(mode: 'readonly'): Promise<IDBObjectStore> {
const db = await this.init()
const transaction = db.transaction([this.config.storeName], mode)
return transaction.objectStore(this.config.storeName)
}
/**
* Get object store for write operations
*/
async getStoreWrite(mode: 'readwrite'): Promise<IDBObjectStore> {
const db = await this.init()
const transaction = db.transaction([this.config.storeName], mode)
return transaction.objectStore(this.config.storeName)
}
/**
* Get a value from the store by key
*/
async get<T = unknown>(key: string | number): Promise<T | null> {
try {
const store = await this.getStore('readonly')
return new Promise<T | null>((resolve, reject) => {
const request = store.get(key)
request.onsuccess = (): void => {
resolve((request.result as T) ?? null)
}
request.onerror = (): void => {
reject(
new IndexedDBError(
`Failed to get value: ${request.error}`,
'get',
this.config.storeName,
request.error
)
)
}
})
} catch (error) {
if (error instanceof IndexedDBError) {
throw error
}
throw new IndexedDBError(
error instanceof Error ? error.message : 'Unknown error',
'get',
this.config.storeName,
error
)
}
}
/**
* Get a value from an index
*/
async getByIndex<T = unknown>(indexName: string, key: string | number): Promise<T | null> {
try {
const store = await this.getStore('readonly')
const index = store.index(indexName)
return new Promise<T | null>((resolve, reject) => {
const request = index.get(key)
request.onsuccess = (): void => {
resolve((request.result as T) ?? null)
}
request.onerror = (): void => {
reject(
new IndexedDBError(
`Failed to get value by index: ${request.error}`,
'getByIndex',
this.config.storeName,
request.error
)
)
}
})
} catch (error) {
if (error instanceof IndexedDBError) {
throw error
}
throw new IndexedDBError(
error instanceof Error ? error.message : 'Unknown error',
'getByIndex',
this.config.storeName,
error
)
}
}
/**
* Get all values from an index
*/
async getAllByIndex<T = unknown>(indexName: string, key?: IDBValidKey | IDBKeyRange): Promise<T[]> {
try {
const store = await this.getStore('readonly')
const index = store.index(indexName)
return new Promise<T[]>((resolve, reject) => {
const request = key !== undefined ? index.getAll(key) : index.getAll()
request.onsuccess = (): void => {
resolve((request.result as T[]) ?? [])
}
request.onerror = (): void => {
reject(
new IndexedDBError(
`Failed to get all values by index: ${request.error}`,
'getAllByIndex',
this.config.storeName,
request.error
)
)
}
})
} catch (error) {
if (error instanceof IndexedDBError) {
throw error
}
throw new IndexedDBError(
error instanceof Error ? error.message : 'Unknown error',
'getAllByIndex',
this.config.storeName,
error
)
}
}
/**
* Put a value in the store
*/
async put<T = unknown>(value: T): Promise<void> {
try {
const store = await this.getStoreWrite('readwrite')
return new Promise<void>((resolve, reject) => {
const request = store.put(value)
request.onsuccess = (): void => {
resolve()
}
request.onerror = (): void => {
reject(
new IndexedDBError(
`Failed to put value: ${request.error}`,
'put',
this.config.storeName,
request.error
)
)
}
})
} catch (error) {
if (error instanceof IndexedDBError) {
throw error
}
throw new IndexedDBError(
error instanceof Error ? error.message : 'Unknown error',
'put',
this.config.storeName,
error
)
}
}
/**
* Add a value to the store (fails if key exists)
*/
async add<T = unknown>(value: T): Promise<void> {
try {
const store = await this.getStoreWrite('readwrite')
return new Promise<void>((resolve, reject) => {
const request = store.add(value)
request.onsuccess = (): void => {
resolve()
}
request.onerror = (): void => {
reject(
new IndexedDBError(
`Failed to add value: ${request.error}`,
'add',
this.config.storeName,
request.error
)
)
}
})
} catch (error) {
if (error instanceof IndexedDBError) {
throw error
}
throw new IndexedDBError(
error instanceof Error ? error.message : 'Unknown error',
'add',
this.config.storeName,
error
)
}
}
/**
* Delete a value from the store by key
*/
async delete(key: string | number): Promise<void> {
try {
const store = await this.getStoreWrite('readwrite')
return new Promise<void>((resolve, reject) => {
const request = store.delete(key)
request.onsuccess = (): void => {
resolve()
}
request.onerror = (): void => {
reject(
new IndexedDBError(
`Failed to delete value: ${request.error}`,
'delete',
this.config.storeName,
request.error
)
)
}
})
} catch (error) {
if (error instanceof IndexedDBError) {
throw error
}
throw new IndexedDBError(
error instanceof Error ? error.message : 'Unknown error',
'delete',
this.config.storeName,
error
)
}
}
/**
* Clear all values from the store
*/
async clear(): Promise<void> {
try {
const store = await this.getStoreWrite('readwrite')
return new Promise<void>((resolve, reject) => {
const request = store.clear()
request.onsuccess = (): void => {
resolve()
}
request.onerror = (): void => {
reject(
new IndexedDBError(
`Failed to clear store: ${request.error}`,
'clear',
this.config.storeName,
request.error
)
)
}
})
} catch (error) {
if (error instanceof IndexedDBError) {
throw error
}
throw new IndexedDBError(
error instanceof Error ? error.message : 'Unknown error',
'clear',
this.config.storeName,
error
)
}
}
/**
* Open a cursor on the store
*/
async openCursor(
direction?: IDBCursorDirection,
range?: IDBKeyRange
): Promise<IDBCursorWithValue | null> {
try {
const store = await this.getStore('readonly')
return new Promise<IDBCursorWithValue | null>((resolve, reject) => {
const request = range ? store.openCursor(range, direction) : store.openCursor(direction)
request.onsuccess = (): void => {
resolve(request.result)
}
request.onerror = (): void => {
reject(
new IndexedDBError(
`Failed to open cursor: ${request.error}`,
'openCursor',
this.config.storeName,
request.error
)
)
}
})
} catch (error) {
if (error instanceof IndexedDBError) {
throw error
}
throw new IndexedDBError(
error instanceof Error ? error.message : 'Unknown error',
'openCursor',
this.config.storeName,
error
)
}
}
/**
* Open a cursor on an index
*/
async openCursorOnIndex(
indexName: string,
direction?: IDBCursorDirection,
range?: IDBKeyRange
): Promise<IDBCursorWithValue | null> {
try {
const store = await this.getStore('readonly')
const index = store.index(indexName)
return new Promise<IDBCursorWithValue | null>((resolve, reject) => {
const request = range ? index.openCursor(range, direction) : index.openCursor(direction)
request.onsuccess = (): void => {
resolve(request.result)
}
request.onerror = (): void => {
reject(
new IndexedDBError(
`Failed to open cursor on index: ${request.error}`,
'openCursorOnIndex',
this.config.storeName,
request.error
)
)
}
})
} catch (error) {
if (error instanceof IndexedDBError) {
throw error
}
throw new IndexedDBError(
error instanceof Error ? error.message : 'Unknown error',
'openCursorOnIndex',
this.config.storeName,
error
)
}
}
/**
* Count records in the store
*/
async count(range?: IDBKeyRange): Promise<number> {
try {
const store = await this.getStore('readonly')
return new Promise<number>((resolve, reject) => {
const request = range ? store.count(range) : store.count()
request.onsuccess = (): void => {
resolve(request.result)
}
request.onerror = (): void => {
reject(
new IndexedDBError(
`Failed to count records: ${request.error}`,
'count',
this.config.storeName,
request.error
)
)
}
})
} catch (error) {
if (error instanceof IndexedDBError) {
throw error
}
throw new IndexedDBError(
error instanceof Error ? error.message : 'Unknown error',
'count',
this.config.storeName,
error
)
}
}
/**
* Count records in an index
*/
async countByIndex(indexName: string, range?: IDBKeyRange): Promise<number> {
try {
const store = await this.getStore('readonly')
const index = store.index(indexName)
return new Promise<number>((resolve, reject) => {
const request = range ? index.count(range) : index.count()
request.onsuccess = (): void => {
resolve(request.result)
}
request.onerror = (): void => {
reject(
new IndexedDBError(
`Failed to count records by index: ${request.error}`,
'countByIndex',
this.config.storeName,
request.error
)
)
}
})
} catch (error) {
if (error instanceof IndexedDBError) {
throw error
}
throw new IndexedDBError(
error instanceof Error ? error.message : 'Unknown error',
'countByIndex',
this.config.storeName,
error
)
}
}
/**
* Get all values from the store
*/
async getAll<T = unknown>(range?: IDBKeyRange, count?: number): Promise<T[]> {
try {
const store = await this.getStore('readonly')
return new Promise<T[]>((resolve, reject) => {
const request = range ? store.getAll(range, count) : store.getAll(undefined, count)
request.onsuccess = (): void => {
resolve((request.result as T[]) ?? [])
}
request.onerror = (): void => {
reject(
new IndexedDBError(
`Failed to get all values: ${request.error}`,
'getAll',
this.config.storeName,
request.error
)
)
}
})
} catch (error) {
if (error instanceof IndexedDBError) {
throw error
}
throw new IndexedDBError(
error instanceof Error ? error.message : 'Unknown error',
'getAll',
this.config.storeName,
error
)
}
}
}
/**
* Create a new IndexedDB helper instance
*/
export function createIndexedDBHelper(config: IndexedDBConfig): IndexedDBHelper {
return new IndexedDBHelper(config)
}
export { IndexedDBHelper }

View File

@ -0,0 +1,132 @@
/**
* Helper for syncing payment notes
* Handles the special case of payment notes which require multiple subscriptions
*/
import type { Event, Filter } from 'nostr-tools'
import type { SimplePoolWithSub } from '@/types/nostr-tools-extended'
import { PLATFORM_SERVICE } from '../platformConfig'
import { getPrimaryRelaySync } from '../config'
import { createSubscription } from '@/types/nostr-tools-extended'
import { tryWithRelayRotation } from '../relayRotation'
import { extractTagsFromEvent } from '../nostrTagSystem'
import { cachePaymentNotes } from './syncCacheHelpers'
export function buildPaymentNoteFilters(userPubkey: string, lastSyncDate: number): Filter[] {
return [
{
kinds: [1],
authors: [userPubkey],
'#payment': [''],
'#service': [PLATFORM_SERVICE],
since: lastSyncDate,
limit: 1000,
},
{
kinds: [1],
'#recipient': [userPubkey],
'#payment': [''],
'#service': [PLATFORM_SERVICE],
since: lastSyncDate,
limit: 1000,
},
]
}
export async function createPaymentNoteSubscriptions(
pool: SimplePoolWithSub,
filters: Filter[]
): Promise<Array<ReturnType<typeof createSubscription>>> {
let subscriptions: Array<ReturnType<typeof createSubscription>> = []
try {
const result = await tryWithRelayRotation(
pool as unknown as import('nostr-tools').SimplePool,
async (relayUrl, poolWithSub) => {
await updateProgressForRelay(relayUrl)
return filters.map((filter) => createSubscription(poolWithSub, [relayUrl], [filter]))
},
5000
)
subscriptions = result.flat()
} catch {
const usedRelayUrl = getPrimaryRelaySync()
subscriptions = filters.map((filter) => createSubscription(pool, [usedRelayUrl], [filter]))
}
if (subscriptions.length === 0) {
throw new Error('Failed to create subscriptions')
}
return subscriptions
}
async function updateProgressForRelay(relayUrl: string): Promise<void> {
const { syncProgressManager } = await import('../syncProgressManager')
const currentProgress = syncProgressManager.getProgress()
if (currentProgress) {
syncProgressManager.setProgress({
...currentProgress,
currentStep: 0,
currentRelay: relayUrl,
})
}
}
export async function processPaymentNoteEvents(
subscriptions: Array<ReturnType<typeof createSubscription>>
): Promise<void> {
const events: Event[] = []
return new Promise<void>((resolve) => {
let finished = false
let eoseCount = 0
const done = async (): Promise<void> => {
if (finished) {
return
}
finished = true
subscriptions.forEach((sub) => sub.unsub())
await cachePaymentNotes(events)
resolve()
}
const handleEose = (): void => {
eoseCount++
if (eoseCount >= subscriptions.length) {
console.warn(`[Sync] EOSE for payment notes, received ${events.length} events`)
void done()
}
}
setupPaymentNoteSubscriptions(subscriptions, events, handleEose)
setTimeout((): void => {
if (!finished) {
console.warn(`[Sync] Timeout for payment notes, received ${events.length} events`)
}
void done()
}, 10000).unref?.()
})
}
function setupPaymentNoteSubscriptions(
subscriptions: Array<ReturnType<typeof createSubscription>>,
events: Event[],
onEose: () => void
): void {
subscriptions.forEach((sub) => {
sub.on('event', (event: Event): void => {
const tags = extractTagsFromEvent(event)
if (tags.type === 'payment' && tags.payment) {
console.warn('[Sync] Received payment note event:', event.id)
if (!events.some((e) => e.id === event.id)) {
events.push(event)
}
}
})
sub.on('eose', onEose)
})
}

View File

@ -0,0 +1,36 @@
/**
* Helper for querying cached objects
* Centralizes the pattern of querying with fallback hash/ID lookup
*/
import { objectCache } from '../objectCache'
import { parseObjectId } from '../urlGenerator'
import type { ObjectType } from '../objectCache'
/**
* Get a cached object by ID with fallback to hash lookup
* Tries by hash first, then by full ID
*/
export async function getCachedObjectById<T>(
objectType: ObjectType,
id: string
): Promise<T | null> {
// Try to parse id as id format (<hash>_<index>_<version>) or use it as hash
const parsed = parseObjectId(id)
const hash = parsed.hash ?? id
// Read only from IndexedDB cache
const cached = await objectCache.get(objectType, hash)
if (cached) {
return cached as T
}
// Also try by ID if hash lookup failed
const cachedById = await objectCache.getById(objectType, id)
if (cachedById) {
return cachedById as T
}
// Not found in cache - return null (no network request)
return null
}

View File

@ -0,0 +1,73 @@
/**
* Helper functions for caching synced events
* Centralizes the logic for caching purchases, sponsoring, review tips, and payment notes
*/
import type { Event } from 'nostr-tools'
import { extractTagsFromEvent } from '../nostrTagSystem'
import { extractPurchaseFromEvent, extractSponsoringFromEvent, extractReviewTipFromEvent } from '../metadataExtractor'
export async function cachePurchases(events: Event[]): Promise<void> {
const { writeService } = await import('../writeService')
const { parsePurchaseFromEvent } = await import('../nostrEventParsing')
for (const event of events) {
const extracted = await extractPurchaseFromEvent(event)
if (extracted) {
const purchase = await parsePurchaseFromEvent(event)
if (purchase) {
const purchaseTyped = purchase
if (purchaseTyped.hash) {
await writeService.writeObject('purchase', purchaseTyped.hash, event, purchaseTyped, 0, false, purchaseTyped.index ?? 0, false)
}
}
}
}
}
export async function cacheSponsoring(events: Event[]): Promise<void> {
const { writeService } = await import('../writeService')
const { parseSponsoringFromEvent } = await import('../nostrEventParsing')
for (const event of events) {
const extracted = await extractSponsoringFromEvent(event)
if (extracted) {
const sponsoring = await parseSponsoringFromEvent(event)
if (sponsoring) {
const sponsoringTyped = sponsoring
if (sponsoringTyped.hash) {
await writeService.writeObject('sponsoring', sponsoringTyped.hash, event, sponsoringTyped, 0, false, sponsoringTyped.index ?? 0, false)
}
}
}
}
}
export async function cacheReviewTips(events: Event[]): Promise<void> {
const { writeService } = await import('../writeService')
const { parseReviewTipFromEvent } = await import('../nostrEventParsing')
for (const event of events) {
const extracted = await extractReviewTipFromEvent(event)
if (extracted) {
const reviewTip = await parseReviewTipFromEvent(event)
if (reviewTip) {
const reviewTipTyped = reviewTip
if (reviewTipTyped.hash) {
await writeService.writeObject('review_tip', reviewTipTyped.hash, event, reviewTipTyped, 0, false, reviewTipTyped.index ?? 0, false)
}
}
}
}
}
export async function cachePaymentNotes(events: Event[]): Promise<void> {
const { writeService } = await import('../writeService')
for (const event of events) {
const tags = extractTagsFromEvent(event)
if (tags.type === 'payment' && tags.payment) {
await writeService.writeObject('payment_note', event.id, event, {
id: event.id,
type: 'payment_note',
eventId: event.id,
}, 0, false, 0, false)
}
}
}

View File

@ -0,0 +1,57 @@
/**
* Helper functions for caching publications and series by hash
*/
import type { Event } from 'nostr-tools'
import { parseObjectId } from '../urlGenerator'
import { extractTagsFromEvent } from '../nostrTagSystem'
import { extractPublicationFromEvent, extractSeriesFromEvent } from '../metadataExtractor'
import { groupAndCacheEventsByHash } from './eventCacheHelper'
export async function cachePublicationsByHash(events: Event[]): Promise<void> {
await groupAndCacheEventsByHash(events, {
objectType: 'publication',
extractor: extractPublicationFromEvent,
getHash: (extracted: unknown): string | null => {
const id = (extracted as { id?: string })?.id
if (!id) {
return null
}
const parsed = parseObjectId(id)
return parsed.hash ?? id
},
getIndex: (extracted: unknown): number => {
return (extracted as { index?: number })?.index ?? 0
},
getVersion: (event: Event): number => {
return extractTagsFromEvent(event).version ?? 0
},
getHidden: (event: Event): boolean => {
return extractTagsFromEvent(event).hidden ?? false
},
})
}
export async function cacheSeriesByHash(events: Event[]): Promise<void> {
await groupAndCacheEventsByHash(events, {
objectType: 'series',
extractor: extractSeriesFromEvent,
getHash: (extracted: unknown): string | null => {
const id = (extracted as { id?: string })?.id
if (!id) {
return null
}
const parsed = parseObjectId(id)
return parsed.hash ?? id
},
getIndex: (extracted: unknown): number => {
return (extracted as { index?: number })?.index ?? 0
},
getVersion: (event: Event): number => {
return extractTagsFromEvent(event).version ?? 0
},
getHidden: (event: Event): boolean => {
return extractTagsFromEvent(event).hidden ?? false
},
})
}

View File

@ -0,0 +1,54 @@
/**
* Helper for managing sync progress
*/
export interface SyncProgress {
currentStep: number
totalSteps: number
completed: boolean
currentRelay?: string // URL of the relay currently being used
}
export async function initializeSyncProgress(
onProgress?: (progress: SyncProgress) => void
): Promise<{ updateProgress: (step: number, completed?: boolean) => void }> {
const TOTAL_STEPS = 7
const { relaySessionManager } = await import('../relaySessionManager')
const { syncProgressManager } = await import('../syncProgressManager')
const activeRelays = await relaySessionManager.getActiveRelays()
const initialRelay = activeRelays[0] ?? 'Connecting...'
if (onProgress) {
onProgress({ currentStep: 0, totalSteps: TOTAL_STEPS, completed: false, currentRelay: initialRelay })
}
syncProgressManager.setProgress({ currentStep: 0, totalSteps: TOTAL_STEPS, completed: false, currentRelay: initialRelay })
const updateProgress = (step: number, completed: boolean = false): void => {
const currentRelay = syncProgressManager.getProgress()?.currentRelay ?? initialRelay
const progressUpdate = { currentStep: step, totalSteps: TOTAL_STEPS, completed, currentRelay }
if (onProgress) {
onProgress(progressUpdate)
}
syncProgressManager.setProgress(progressUpdate)
}
return { updateProgress }
}
export async function finalizeSync(currentTimestamp: number): Promise<void> {
const { setLastSyncDate } = await import('../syncStorage')
await setLastSyncDate(currentTimestamp)
const { syncProgressManager } = await import('../syncProgressManager')
const { configStorage } = await import('../configStorage')
const config = await configStorage.getConfig()
const currentRelay = syncProgressManager.getProgress()?.currentRelay
if (currentRelay) {
const relayConfig = config.relays.find((r) => r.url === currentRelay)
if (relayConfig) {
await configStorage.updateRelay(relayConfig.id, { lastSyncDate: Date.now() })
}
}
console.warn('[Sync] Synchronization completed successfully')
}

View File

@ -0,0 +1,127 @@
/**
* Helper for creating sync subscriptions with relay rotation
* Centralizes the pattern of subscription creation, event handling, and timeout management
*/
import type { Event, Filter } from 'nostr-tools'
import type { SimplePool } from 'nostr-tools'
import type { SimplePoolWithSub } from '@/types/nostr-tools-extended'
import { tryWithRelayRotation } from '../relayRotation'
import { getPrimaryRelaySync } from '../config'
import { createSubscription } from '@/types/nostr-tools-extended'
export interface SyncSubscriptionConfig {
pool: SimplePoolWithSub
filters: Filter[]
onEvent?: (event: Event) => void | Promise<void>
onComplete?: (events: Event[]) => void | Promise<void>
timeout?: number
updateProgress?: (relayUrl: string) => void
eventFilter?: (event: Event) => boolean
}
export interface SyncSubscriptionResult {
subscription: ReturnType<typeof createSubscription>
relayUrl: string
events: Event[]
}
/**
* Create a sync subscription with relay rotation
* Handles relay rotation, progress updates, event collection, and timeout
*/
export async function createSyncSubscription(
config: SyncSubscriptionConfig
): Promise<SyncSubscriptionResult> {
const { pool, filters, onEvent, onComplete, timeout = 10000, updateProgress, eventFilter } = config
const events: Event[] = []
let sub: ReturnType<typeof createSubscription> | null = null
let usedRelayUrl = ''
// Try relays with rotation
try {
const result = await tryWithRelayRotation(
pool as unknown as SimplePool,
async (relayUrl, poolWithSub) => {
usedRelayUrl = relayUrl
// Update progress if callback provided
if (updateProgress) {
updateProgress(relayUrl)
} else {
// Default: notify progress manager
const { syncProgressManager } = await import('../syncProgressManager')
const currentProgress = syncProgressManager.getProgress()
if (currentProgress) {
syncProgressManager.setProgress({
...currentProgress,
currentStep: 0,
currentRelay: relayUrl,
})
}
}
return createSubscription(poolWithSub, [relayUrl], filters)
},
5000 // 5 second timeout per relay
)
sub = result
} catch {
// Fallback to primary relay if rotation fails
usedRelayUrl = getPrimaryRelaySync()
sub = createSubscription(pool, [usedRelayUrl], filters)
}
if (!sub) {
throw new Error('Failed to create subscription')
}
return new Promise<SyncSubscriptionResult>((resolve) => {
let finished = false
const done = async (): Promise<void> => {
if (finished) {
return
}
finished = true
sub?.unsub()
// Call onComplete callback if provided
if (onComplete) {
await onComplete(events)
}
resolve({
subscription: sub,
relayUrl: usedRelayUrl,
events,
})
}
// Handle events
sub.on('event', (event: Event): void => {
// Apply event filter if provided
if (eventFilter && !eventFilter(event)) {
return
}
events.push(event)
// Call onEvent callback if provided
if (onEvent) {
void onEvent(event)
}
})
// Handle end of stream
sub.on('eose', (): void => {
void done()
})
// Timeout fallback
setTimeout((): void => {
void done()
}, timeout).unref?.()
})
}

View File

@ -2,6 +2,8 @@
* IndexedDB storage for locale preference * IndexedDB storage for locale preference
*/ */
import { createIndexedDBHelper, type IndexedDBHelper } from './helpers/indexedDBHelper'
const DB_NAME = 'nostr_paywall_settings' const DB_NAME = 'nostr_paywall_settings'
const DB_VERSION = 2 // Incremented to add locale store const DB_VERSION = 2 // Incremented to add locale store
const STORE_NAME = 'locale' const STORE_NAME = 'locale'
@ -9,36 +11,14 @@ const STORE_NAME = 'locale'
export type Locale = 'fr' | 'en' export type Locale = 'fr' | 'en'
class LocaleStorageService { class LocaleStorageService {
private db: IDBDatabase | null = null private readonly dbHelper: IndexedDBHelper
private async initDB(): Promise<IDBDatabase> { constructor() {
if (this.db) { this.dbHelper = createIndexedDBHelper({
return this.db dbName: DB_NAME,
} version: DB_VERSION,
storeName: STORE_NAME,
return new Promise((resolve, reject) => { keyPath: 'key',
if (typeof window === 'undefined' || !window.indexedDB) {
reject(new Error('IndexedDB is not available'))
return
}
const request = indexedDB.open(DB_NAME, DB_VERSION)
request.onerror = () => {
reject(new Error(`Failed to open IndexedDB: ${request.error}`))
}
request.onsuccess = () => {
this.db = request.result
resolve(this.db)
}
request.onupgradeneeded = (event) => {
const db = (event.target as IDBOpenDBRequest).result
if (!db.objectStoreNames.contains(STORE_NAME)) {
db.createObjectStore(STORE_NAME, { keyPath: 'key' })
}
}
}) })
} }
@ -47,25 +27,12 @@ class LocaleStorageService {
*/ */
async getLocale(): Promise<Locale | null> { async getLocale(): Promise<Locale | null> {
try { try {
const db = await this.initDB() const result = await this.dbHelper.get<{ key: string; value: Locale }>('locale')
const transaction = db.transaction([STORE_NAME], 'readonly') const locale = result?.value
const store = transaction.objectStore(STORE_NAME) if (locale === 'fr' || locale === 'en') {
return locale
return new Promise((resolve, reject) => { }
const request = store.get('locale') return null
request.onsuccess = () => {
const result = request.result as { key: string; value: Locale } | undefined
const locale = result?.value
if (locale === 'fr' || locale === 'en') {
resolve(locale)
} else {
resolve(null)
}
}
request.onerror = () => reject(request.error)
})
} catch (error) { } catch (error) {
console.error('Error retrieving locale from IndexedDB:', error) console.error('Error retrieving locale from IndexedDB:', error)
return null return null
@ -77,19 +44,7 @@ class LocaleStorageService {
*/ */
async saveLocale(locale: Locale): Promise<void> { async saveLocale(locale: Locale): Promise<void> {
try { try {
const db = await this.initDB() await this.dbHelper.put({ key: 'locale', value: locale })
const transaction = db.transaction([STORE_NAME], 'readwrite')
const store = transaction.objectStore(STORE_NAME)
return new Promise((resolve, reject) => {
const request = store.put({ key: 'locale', value: locale })
request.onsuccess = () => {
resolve()
}
request.onerror = () => reject(request.error)
})
} catch (error) { } catch (error) {
console.error('Error saving locale to IndexedDB:', error) console.error('Error saving locale to IndexedDB:', error)
throw error throw error

View File

@ -14,7 +14,7 @@ import { getPrimaryRelay, getPrimaryRelaySync } from './config'
import { buildTagFilter } from './nostrTagSystem' import { buildTagFilter } from './nostrTagSystem'
import { PLATFORM_SERVICE, MIN_EVENT_DATE } from './platformConfig' import { PLATFORM_SERVICE, MIN_EVENT_DATE } from './platformConfig'
import type { PublishResult, RelayPublishStatus } from './publishResult' import type { PublishResult, RelayPublishStatus } from './publishResult'
import { objectCache } from './objectCache' import { objectCache, type ObjectType } from './objectCache'
class NostrService { class NostrService {
private pool: SimplePool | null = null private pool: SimplePool | null = null
@ -484,7 +484,8 @@ class NostrService {
// If not found in unpublished, search all objects // If not found in unpublished, search all objects
for (const objectType of objectTypes) { for (const objectType of objectTypes) {
try { try {
const db = await objectCache['initDB'](objectType) // Use private method via type assertion for direct database access
const db = await (objectCache as unknown as { initDB: (objectType: ObjectType) => Promise<IDBDatabase> }).initDB(objectType)
const transaction = db.transaction(['objects'], 'readonly') const transaction = db.transaction(['objects'], 'readonly')
const store = transaction.objectStore('objects') const store = transaction.objectStore('objects')
const request = store.openCursor() const request = store.openCursor()

View File

@ -238,51 +238,6 @@ class NotificationDetector {
} }
} }
/**
* Get notification title based on type
*/
private _getNotificationTitle(type: NotificationType, _obj: CachedObject): string {
switch (type) {
case 'purchase':
return 'Nouvel achat'
case 'review':
return 'Nouvel avis'
case 'sponsoring':
return 'Nouveau sponsoring'
case 'review_tip':
return 'Nouveau remerciement'
case 'payment_note':
return 'Nouvelle note de paiement'
case 'published':
return 'Publication réussie'
default:
return 'Nouvelle notification'
}
}
/**
* Get notification message based on type
*/
private _getNotificationMessage(type: NotificationType, _obj: CachedObject): string {
switch (type) {
case 'purchase':
return `Vous avez acheté un article`
case 'review':
return `Un nouvel avis a été publié`
case 'sponsoring':
return `Vous avez reçu un sponsoring`
case 'review_tip':
return `Vous avez reçu un remerciement`
case 'payment_note':
return `Une note de paiement a été ajoutée`
case 'published':
const cachedObj = _obj
const relays = Array.isArray(cachedObj.published) ? cachedObj.published : []
return `Votre contenu a été publié sur ${relays.length} relais`
default:
return 'Nouvelle notification'
}
}
} }
export const notificationDetector = new NotificationDetector() export const notificationDetector = new NotificationDetector()

View File

@ -2,6 +2,8 @@
* Notification service - stores and manages notifications in IndexedDB * Notification service - stores and manages notifications in IndexedDB
*/ */
import { createIndexedDBHelper, type IndexedDBHelper } from './helpers/indexedDBHelper'
const DB_NAME = 'nostr_notifications' const DB_NAME = 'nostr_notifications'
const DB_VERSION = 1 const DB_VERSION = 1
const STORE_NAME = 'notifications' const STORE_NAME = 'notifications'
@ -33,58 +35,22 @@ export interface Notification {
} }
class NotificationService { class NotificationService {
private db: IDBDatabase | null = null private readonly dbHelper: IndexedDBHelper
private initPromise: Promise<void> | null = null
private async init(): Promise<void> { constructor() {
if (this.db) { this.dbHelper = createIndexedDBHelper({
return dbName: DB_NAME,
} version: DB_VERSION,
storeName: STORE_NAME,
if (this.initPromise) { keyPath: 'id',
return this.initPromise indexes: [
} { name: 'type', keyPath: 'type', unique: false },
{ name: 'objectId', keyPath: 'objectId', unique: false },
this.initPromise = this.openDatabase() { name: 'eventId', keyPath: 'eventId', unique: false },
{ name: 'timestamp', keyPath: 'timestamp', unique: false },
try { { name: 'read', keyPath: 'read', unique: false },
await this.initPromise { name: 'objectType', keyPath: 'objectType', unique: false },
} catch (error) { ],
this.initPromise = null
throw error
}
}
private openDatabase(): Promise<void> {
return new Promise((resolve, reject) => {
if (typeof window === 'undefined' || !window.indexedDB) {
reject(new Error('IndexedDB is not available'))
return
}
const request = window.indexedDB.open(DB_NAME, DB_VERSION)
request.onerror = (): void => {
reject(new Error(`Failed to open IndexedDB: ${request.error}`))
}
request.onsuccess = (): void => {
this.db = request.result
resolve()
}
request.onupgradeneeded = (event: IDBVersionChangeEvent): void => {
const db = (event.target as IDBOpenDBRequest).result
if (!db.objectStoreNames.contains(STORE_NAME)) {
const store = db.createObjectStore(STORE_NAME, { keyPath: 'id' })
store.createIndex('type', 'type', { unique: false })
store.createIndex('objectId', 'objectId', { unique: false })
store.createIndex('eventId', 'eventId', { unique: false })
store.createIndex('timestamp', 'timestamp', { unique: false })
store.createIndex('read', 'read', { unique: false })
store.createIndex('objectType', 'objectType', { unique: false })
}
}
}) })
} }
@ -120,25 +86,7 @@ class NotificationService {
*/ */
async getNotificationByEventId(eventId: string): Promise<Notification | null> { async getNotificationByEventId(eventId: string): Promise<Notification | null> {
try { try {
await this.init() return await this.dbHelper.getByIndex<Notification>('eventId', eventId)
if (!this.db) {
return null
}
const transaction = this.db.transaction([STORE_NAME], 'readonly')
const store = transaction.objectStore(STORE_NAME)
const index = store.index('eventId')
return new Promise((resolve, reject) => {
const request = index.get(eventId)
request.onsuccess = (): void => {
resolve((request.result as Notification) ?? null)
}
request.onerror = (): void => {
reject(request.error)
}
})
} catch (error) { } catch (error) {
console.error('[NotificationService] Error getting notification by event ID:', error) console.error('[NotificationService] Error getting notification by event ID:', error)
return null return null
@ -150,19 +98,12 @@ class NotificationService {
*/ */
async getAllNotifications(limit: number = 100): Promise<Notification[]> { async getAllNotifications(limit: number = 100): Promise<Notification[]> {
try { try {
await this.init() const notifications: Notification[] = []
const store = await this.dbHelper.getStore('readonly')
if (!this.db) {
return []
}
const transaction = this.db.transaction([STORE_NAME], 'readonly')
const store = transaction.objectStore(STORE_NAME)
const index = store.index('timestamp') const index = store.index('timestamp')
return new Promise((resolve, reject) => { return new Promise<Notification[]>((resolve, reject) => {
const request = index.openCursor(null, 'prev') // Descending order (newest first) const request = index.openCursor(null, 'prev') // Descending order (newest first)
const notifications: Notification[] = []
request.onsuccess = (event: globalThis.Event): void => { request.onsuccess = (event: globalThis.Event): void => {
const cursor = (event.target as IDBRequest<IDBCursorWithValue>).result const cursor = (event.target as IDBRequest<IDBCursorWithValue>).result
@ -177,8 +118,13 @@ class NotificationService {
resolve(notifications) resolve(notifications)
} }
} }
request.onerror = (): void => { request.onerror = (): void => {
reject(request.error) if (request.error) {
reject(request.error)
} else {
reject(new Error('Unknown error opening cursor'))
}
} }
}) })
} catch (error) { } catch (error) {
@ -192,33 +138,7 @@ class NotificationService {
*/ */
async getUnreadCount(): Promise<number> { async getUnreadCount(): Promise<number> {
try { try {
await this.init() return await this.dbHelper.countByIndex('read', IDBKeyRange.only(false))
if (!this.db) {
return 0
}
const transaction = this.db.transaction([STORE_NAME], 'readonly')
const store = transaction.objectStore(STORE_NAME)
const index = store.index('read')
return new Promise((resolve, reject) => {
const request = index.openCursor(IDBKeyRange.only(false))
let count = 0
request.onsuccess = (event: globalThis.Event): void => {
const cursor = (event.target as IDBRequest<IDBCursorWithValue>).result
if (cursor) {
count++
cursor.continue()
} else {
resolve(count)
}
}
request.onerror = (): void => {
reject(request.error)
}
})
} catch (error) { } catch (error) {
console.error('[NotificationService] Error getting unread count:', error) console.error('[NotificationService] Error getting unread count:', error)
return 0 return 0
@ -230,41 +150,17 @@ class NotificationService {
*/ */
async markAsRead(notificationId: string): Promise<void> { async markAsRead(notificationId: string): Promise<void> {
try { try {
await this.init() const notification = await this.dbHelper.get<Notification>(notificationId)
if (!notification) {
if (!this.db) { throw new Error('Notification not found')
throw new Error('Database not initialized')
} }
const transaction = this.db.transaction([STORE_NAME], 'readwrite') const updatedNotification: Notification = {
const store = transaction.objectStore(STORE_NAME) ...notification,
const request = store.get(notificationId) read: true,
}
await new Promise<void>((resolve, reject) => { await this.dbHelper.put(updatedNotification)
request.onsuccess = (): void => {
const notification = request.result as Notification | undefined
if (!notification) {
reject(new Error('Notification not found'))
return
}
const updatedNotification: Notification = {
...notification,
read: true,
}
const updateRequest = store.put(updatedNotification)
updateRequest.onsuccess = (): void => {
resolve()
}
updateRequest.onerror = (): void => {
reject(new Error(`Failed to update notification: ${updateRequest.error}`))
}
}
request.onerror = (): void => {
reject(request.error)
}
})
} catch (error) { } catch (error) {
console.error('[NotificationService] Error marking notification as read:', error) console.error('[NotificationService] Error marking notification as read:', error)
throw error throw error
@ -276,35 +172,17 @@ class NotificationService {
*/ */
async markAllAsRead(): Promise<void> { async markAllAsRead(): Promise<void> {
try { try {
await this.init()
if (!this.db) {
throw new Error('Database not initialized')
}
const notifications = await this.getAllNotifications(10000) const notifications = await this.getAllNotifications(10000)
const transaction = this.db.transaction([STORE_NAME], 'readwrite') const unreadNotifications = notifications.filter((n) => !n.read)
const store = transaction.objectStore(STORE_NAME)
await Promise.all( await Promise.all(
notifications unreadNotifications.map((notification) => {
.filter((n) => !n.read) const updatedNotification: Notification = {
.map( ...notification,
(notification) => read: true,
new Promise<void>((resolve, reject) => { }
const updatedNotification: Notification = { return this.dbHelper.put(updatedNotification)
...notification, })
read: true,
}
const request = store.put(updatedNotification)
request.onsuccess = (): void => {
resolve()
}
request.onerror = (): void => {
reject(request.error)
}
})
)
) )
} catch (error) { } catch (error) {
console.error('[NotificationService] Error marking all notifications as read:', error) console.error('[NotificationService] Error marking all notifications as read:', error)
@ -317,24 +195,7 @@ class NotificationService {
*/ */
async deleteNotification(notificationId: string): Promise<void> { async deleteNotification(notificationId: string): Promise<void> {
try { try {
await this.init() await this.dbHelper.delete(notificationId)
if (!this.db) {
throw new Error('Database not initialized')
}
const transaction = this.db.transaction([STORE_NAME], 'readwrite')
const store = transaction.objectStore(STORE_NAME)
await new Promise<void>((resolve, reject) => {
const request = store.delete(notificationId)
request.onsuccess = (): void => {
resolve()
}
request.onerror = (): void => {
reject(new Error(`Failed to delete notification: ${request.error}`))
}
})
} catch (error) { } catch (error) {
console.error('[NotificationService] Error deleting notification:', error) console.error('[NotificationService] Error deleting notification:', error)
throw error throw error

View File

@ -7,6 +7,7 @@
import type { Event as NostrEvent } from 'nostr-tools' import type { Event as NostrEvent } from 'nostr-tools'
import type { AuthorPresentationArticle } from '@/types/nostr' import type { AuthorPresentationArticle } from '@/types/nostr'
import { buildObjectId } from './urlGenerator' import { buildObjectId } from './urlGenerator'
import { createIndexedDBHelper, type IndexedDBHelper } from './helpers/indexedDBHelper'
export type ObjectType = 'author' | 'series' | 'publication' | 'review' | 'purchase' | 'sponsoring' | 'review_tip' | 'payment_note' export type ObjectType = 'author' | 'series' | 'publication' | 'review' | 'purchase' | 'sponsoring' | 'review_tip' | 'payment_note'
@ -26,50 +27,33 @@ export interface CachedObject {
const DB_PREFIX = 'nostr_objects_' const DB_PREFIX = 'nostr_objects_'
const DB_VERSION = 3 // Incremented to add published field const DB_VERSION = 3 // Incremented to add published field
const STORE_NAME = 'objects'
class ObjectCacheService { class ObjectCacheService {
private dbs: Map<ObjectType, IDBDatabase> = new Map() private readonly dbHelpers: Map<ObjectType, IndexedDBHelper> = new Map()
private async initDB(objectType: ObjectType): Promise<IDBDatabase> { private getDBHelper(objectType: ObjectType): IndexedDBHelper {
if (this.dbs.has(objectType)) { if (!this.dbHelpers.has(objectType)) {
return this.dbs.get(objectType)! const helper = createIndexedDBHelper({
} dbName: `${DB_PREFIX}${objectType}`,
version: DB_VERSION,
return new Promise((resolve, reject) => { storeName: STORE_NAME,
if (typeof window === 'undefined' || !window.indexedDB) { keyPath: 'id',
reject(new Error('IndexedDB is not available')) indexes: [
return { name: 'hash', keyPath: 'hash', unique: false },
} { name: 'hashId', keyPath: 'hashId', unique: false }, // Legacy index
{ name: 'version', keyPath: 'version', unique: false },
const dbName = `${DB_PREFIX}${objectType}` { name: 'index', keyPath: 'index', unique: false },
const request = window.indexedDB.open(dbName, DB_VERSION) { name: 'hidden', keyPath: 'hidden', unique: false },
{ name: 'cachedAt', keyPath: 'cachedAt', unique: false },
request.onerror = (): void => { { name: 'published', keyPath: 'published', unique: false },
reject(new Error(`Failed to open IndexedDB: ${request.error}`)) ],
} onUpgrade: (_db: IDBDatabase, event: IDBVersionChangeEvent): void => {
request.onsuccess = (): void => {
const db = request.result
this.dbs.set(objectType, db)
resolve(db)
}
request.onupgradeneeded = (event: IDBVersionChangeEvent): void => {
const db = (event.target as IDBOpenDBRequest).result
if (!db.objectStoreNames.contains('objects')) {
const store = db.createObjectStore('objects', { keyPath: 'id' })
store.createIndex('hash', 'hash', { unique: false })
store.createIndex('hashId', 'hashId', { unique: false }) // Legacy index
store.createIndex('version', 'version', { unique: false })
store.createIndex('index', 'index', { unique: false })
store.createIndex('hidden', 'hidden', { unique: false })
store.createIndex('cachedAt', 'cachedAt', { unique: false })
store.createIndex('published', 'published', { unique: false })
} else {
// Migration: add new indexes if they don't exist // Migration: add new indexes if they don't exist
const {transaction} = (event.target as IDBOpenDBRequest) const target = event.target as IDBOpenDBRequest
const { transaction } = target
if (transaction) { if (transaction) {
const store = transaction.objectStore('objects') const store = transaction.objectStore(STORE_NAME)
if (!store.indexNames.contains('hash')) { if (!store.indexNames.contains('hash')) {
store.createIndex('hash', 'hash', { unique: false }) store.createIndex('hash', 'hash', { unique: false })
} }
@ -80,9 +64,20 @@ class ObjectCacheService {
store.createIndex('published', 'published', { unique: false }) store.createIndex('published', 'published', { unique: false })
} }
} }
} },
} })
}) this.dbHelpers.set(objectType, helper)
}
return this.dbHelpers.get(objectType)!
}
/**
* Initialize database and return IDBDatabase instance
* Used for direct database access when needed
*/
private async initDB(objectType: ObjectType): Promise<IDBDatabase> {
const helper = this.getDBHelper(objectType)
return await helper.init()
} }
/** /**
@ -90,20 +85,8 @@ class ObjectCacheService {
*/ */
private async countObjectsWithHash(objectType: ObjectType, hash: string): Promise<number> { private async countObjectsWithHash(objectType: ObjectType, hash: string): Promise<number> {
try { try {
const db = await this.initDB(objectType) const helper = this.getDBHelper(objectType)
const transaction = db.transaction(['objects'], 'readonly') return await helper.countByIndex('hash', IDBKeyRange.only(hash))
const store = transaction.objectStore('objects')
const index = store.index('hash')
return new Promise((resolve, reject) => {
const request = index.count(IDBKeyRange.only(hash))
request.onsuccess = (): void => {
resolve(request.result)
}
request.onerror = (): void => {
reject(request.error)
}
})
} catch (countError) { } catch (countError) {
console.error(`Error counting objects with hash ${hash}:`, countError) console.error(`Error counting objects with hash ${hash}:`, countError)
return 0 return 0
@ -126,7 +109,7 @@ class ObjectCacheService {
published: false | string[] = false published: false | string[] = false
): Promise<void> { ): Promise<void> {
try { try {
const db = await this.initDB(objectType) const helper = this.getDBHelper(objectType)
// If index is not provided, calculate it by counting objects with the same hash // If index is not provided, calculate it by counting objects with the same hash
let finalIndex = index let finalIndex = index
@ -137,19 +120,8 @@ class ObjectCacheService {
const id = buildObjectId(hash, finalIndex, version) const id = buildObjectId(hash, finalIndex, version)
const transaction = db.transaction(['objects'], 'readwrite')
const store = transaction.objectStore('objects')
// Check if object already exists to preserve published status if updating // Check if object already exists to preserve published status if updating
const existing = await new Promise<CachedObject | null>((resolve, reject) => { const existing = await helper.get<CachedObject>(id).catch(() => null)
const getRequest = store.get(id)
getRequest.onsuccess = (): void => {
resolve((getRequest.result as CachedObject) ?? null)
}
getRequest.onerror = (): void => {
reject(getRequest.error)
}
}).catch(() => null)
// If updating and published is not provided, preserve existing published status // If updating and published is not provided, preserve existing published status
const finalPublished = existing && published === false ? existing.published : published const finalPublished = existing && published === false ? existing.published : published
@ -168,15 +140,7 @@ class ObjectCacheService {
published: finalPublished, published: finalPublished,
} }
await new Promise<void>((resolve, reject) => { await helper.put(cached)
const request = store.put(cached)
request.onsuccess = (): void => {
resolve()
}
request.onerror = (): void => {
reject(request.error)
}
})
} catch (cacheError) { } catch (cacheError) {
console.error(`Error caching ${objectType} object:`, cacheError) console.error(`Error caching ${objectType} object:`, cacheError)
} }
@ -191,19 +155,8 @@ class ObjectCacheService {
published: false | string[] published: false | string[]
): Promise<void> { ): Promise<void> {
try { try {
const db = await this.initDB(objectType) const helper = this.getDBHelper(objectType)
const transaction = db.transaction(['objects'], 'readwrite') const existing = await helper.get<CachedObject>(id)
const store = transaction.objectStore('objects')
const existing = await new Promise<CachedObject | null>((resolve, reject) => {
const request = store.get(id)
request.onsuccess = (): void => {
resolve((request.result as CachedObject) ?? null)
}
request.onerror = (): void => {
reject(request.error)
}
})
if (!existing) { if (!existing) {
console.warn(`Object ${id} not found in cache, cannot update published status`) console.warn(`Object ${id} not found in cache, cannot update published status`)
@ -216,15 +169,7 @@ class ObjectCacheService {
published, published,
} }
await new Promise<void>((resolve, reject) => { await helper.put(updated)
const request = store.put(updated)
request.onsuccess = (): void => {
resolve()
}
request.onerror = (): void => {
reject(request.error)
}
})
// Notify about published status change (false -> array of relays) // Notify about published status change (false -> array of relays)
if (oldPublished === false && Array.isArray(published) && published.length > 0) { if (oldPublished === false && Array.isArray(published) && published.length > 0) {
@ -329,24 +274,12 @@ class ObjectCacheService {
*/ */
async getById(objectType: ObjectType, id: string): Promise<unknown | null> { async getById(objectType: ObjectType, id: string): Promise<unknown | null> {
try { try {
const db = await this.initDB(objectType) const helper = this.getDBHelper(objectType)
const transaction = db.transaction(['objects'], 'readonly') const obj = await helper.get<CachedObject>(id)
const store = transaction.objectStore('objects') if (obj && !obj.hidden) {
return obj.parsed
return new Promise<unknown | null>((resolve, reject) => { }
const request = store.get(id) return null
request.onsuccess = (): void => {
const obj = request.result as CachedObject | undefined
if (obj && !obj.hidden) {
resolve(obj.parsed)
} else {
resolve(null)
}
}
request.onerror = (): void => {
reject(request.error)
}
})
} catch (retrieveByIdError) { } catch (retrieveByIdError) {
console.error(`Error retrieving ${objectType} object by ID from cache:`, retrieveByIdError) console.error(`Error retrieving ${objectType} object by ID from cache:`, retrieveByIdError)
return null return null
@ -358,24 +291,12 @@ class ObjectCacheService {
*/ */
async getEventById(objectType: ObjectType, id: string): Promise<NostrEvent | null> { async getEventById(objectType: ObjectType, id: string): Promise<NostrEvent | null> {
try { try {
const db = await this.initDB(objectType) const helper = this.getDBHelper(objectType)
const transaction = db.transaction(['objects'], 'readonly') const obj = await helper.get<CachedObject>(id)
const store = transaction.objectStore('objects') if (obj && !obj.hidden) {
return obj.event
return new Promise<NostrEvent | null>((resolve, reject) => { }
const request = store.get(id) return null
request.onsuccess = (): void => {
const obj = request.result as CachedObject | undefined
if (obj && !obj.hidden) {
resolve(obj.event)
} else {
resolve(null)
}
}
request.onerror = (): void => {
reject(request.error)
}
})
} catch (retrieveByIdError) { } catch (retrieveByIdError) {
console.error(`Error retrieving ${objectType} event by ID from cache:`, retrieveByIdError) console.error(`Error retrieving ${objectType} event by ID from cache:`, retrieveByIdError)
return null return null
@ -465,18 +386,8 @@ class ObjectCacheService {
*/ */
async clear(objectType: ObjectType): Promise<void> { async clear(objectType: ObjectType): Promise<void> {
try { try {
const db = await this.initDB(objectType) const helper = this.getDBHelper(objectType)
const transaction = db.transaction(['objects'], 'readwrite') await helper.clear()
const store = transaction.objectStore('objects')
await new Promise<void>((resolve, reject) => {
const request = store.clear()
request.onsuccess = (): void => {
resolve()
}
request.onerror = (): void => {
reject(request.error)
}
})
} catch (clearError) { } catch (clearError) {
console.error(`Error clearing ${objectType} cache:`, clearError) console.error(`Error clearing ${objectType} cache:`, clearError)
} }

View File

@ -173,7 +173,6 @@ export class PlatformTrackingService {
try { try {
const { websocketService } = await import('./websocketService') const { websocketService } = await import('./websocketService')
const { getPrimaryRelaySync } = await import('./config') const { getPrimaryRelaySync } = await import('./config')
const { getTrackingKind } = await import('./platformTrackingEvents')
const { swClient } = await import('./swClient') const { swClient } = await import('./swClient')
const filters = [ const filters = [

View File

@ -2,6 +2,8 @@
* Publication log service - stores publication attempts and results in IndexedDB * Publication log service - stores publication attempts and results in IndexedDB
*/ */
import { createIndexedDBHelper, type IndexedDBHelper } from './helpers/indexedDBHelper'
const DB_NAME = 'nostr_publish_log' const DB_NAME = 'nostr_publish_log'
const DB_VERSION = 1 const DB_VERSION = 1
const STORE_NAME = 'publications' const STORE_NAME = 'publications'
@ -18,48 +20,23 @@ interface PublicationLogEntry {
} }
class PublishLogService { class PublishLogService {
private db: IDBDatabase | null = null private readonly dbHelper: IndexedDBHelper
private initPromise: Promise<void> | null = null
private async init(): Promise<void> { constructor() {
if (this.db) { this.dbHelper = createIndexedDBHelper({
return dbName: DB_NAME,
} version: DB_VERSION,
storeName: STORE_NAME,
if (this.initPromise) { keyPath: 'id',
return this.initPromise indexes: [
} { name: 'eventId', keyPath: 'eventId', unique: false },
{ name: 'relayUrl', keyPath: 'relayUrl', unique: false },
this.initPromise = this.openDatabase() { name: 'timestamp', keyPath: 'timestamp', unique: false },
{ name: 'success', keyPath: 'success', unique: false },
try { ],
await this.initPromise onUpgrade: (db: IDBDatabase): void => {
} catch (error) { // Note: autoIncrement is handled in the store creation, but IndexedDBHelper doesn't support it directly
this.initPromise = null // We need to handle this in the upgrade handler
throw error
}
}
private openDatabase(): Promise<void> {
return new Promise((resolve, reject) => {
if (typeof window === 'undefined' || !window.indexedDB) {
reject(new Error('IndexedDB is not available'))
return
}
const request = indexedDB.open(DB_NAME, DB_VERSION)
request.onerror = (): void => {
reject(new Error(`Failed to open IndexedDB: ${request.error}`))
}
request.onsuccess = (): void => {
this.db = request.result
resolve()
}
request.onupgradeneeded = (event: IDBVersionChangeEvent): void => {
const db = (event.target as IDBOpenDBRequest).result
if (!db.objectStoreNames.contains(STORE_NAME)) { if (!db.objectStoreNames.contains(STORE_NAME)) {
const store = db.createObjectStore(STORE_NAME, { keyPath: 'id', autoIncrement: true }) const store = db.createObjectStore(STORE_NAME, { keyPath: 'id', autoIncrement: true })
store.createIndex('eventId', 'eventId', { unique: false }) store.createIndex('eventId', 'eventId', { unique: false })
@ -67,7 +44,7 @@ class PublishLogService {
store.createIndex('timestamp', 'timestamp', { unique: false }) store.createIndex('timestamp', 'timestamp', { unique: false })
store.createIndex('success', 'success', { unique: false }) store.createIndex('success', 'success', { unique: false })
} }
} },
}) })
} }
@ -102,12 +79,6 @@ class PublishLogService {
objectId?: string objectId?: string
): Promise<void> { ): Promise<void> {
try { try {
await this.init()
if (!this.db) {
throw new Error('Database not initialized')
}
const entry: PublicationLogEntry = { const entry: PublicationLogEntry = {
id: `${eventId}_${relayUrl}_${Date.now()}`, // Unique ID id: `${eventId}_${relayUrl}_${Date.now()}`, // Unique ID
eventId, eventId,
@ -119,18 +90,7 @@ class PublishLogService {
...(objectId !== undefined ? { objectId } : {}), ...(objectId !== undefined ? { objectId } : {}),
} }
const transaction = this.db.transaction([STORE_NAME], 'readwrite') await this.dbHelper.add(entry)
const store = transaction.objectStore(STORE_NAME)
await new Promise<void>((resolve, reject) => {
const request = store.add(entry)
request.onsuccess = (): void => {
resolve()
}
request.onerror = (): void => {
reject(new Error(`Failed to log publication: ${request.error}`))
}
})
} catch (logError) { } catch (logError) {
console.error('[PublishLog] Error logging publication:', logError) console.error('[PublishLog] Error logging publication:', logError)
} }
@ -141,25 +101,7 @@ class PublishLogService {
*/ */
async getLogsForEvent(eventId: string): Promise<PublicationLogEntry[]> { async getLogsForEvent(eventId: string): Promise<PublicationLogEntry[]> {
try { try {
await this.init() return await this.dbHelper.getAllByIndex<PublicationLogEntry>('eventId', eventId)
if (!this.db) {
return []
}
const transaction = this.db.transaction([STORE_NAME], 'readonly')
const store = transaction.objectStore(STORE_NAME)
const index = store.index('eventId')
return new Promise((resolve, reject) => {
const request = index.getAll(eventId)
request.onsuccess = (): void => {
resolve((request.result as PublicationLogEntry[]) ?? [])
}
request.onerror = (): void => {
reject(request.error)
}
})
} catch (error) { } catch (error) {
console.error('[PublishLog] Error getting logs for event:', error) console.error('[PublishLog] Error getting logs for event:', error)
return [] return []
@ -171,19 +113,12 @@ class PublishLogService {
*/ */
async getLogsForRelay(relayUrl: string, limit: number = 100): Promise<PublicationLogEntry[]> { async getLogsForRelay(relayUrl: string, limit: number = 100): Promise<PublicationLogEntry[]> {
try { try {
await this.init() const entries: PublicationLogEntry[] = []
const store = await this.dbHelper.getStore('readonly')
if (!this.db) {
return []
}
const transaction = this.db.transaction([STORE_NAME], 'readonly')
const store = transaction.objectStore(STORE_NAME)
const index = store.index('relayUrl') const index = store.index('relayUrl')
return new Promise((resolve, reject) => { return new Promise<PublicationLogEntry[]>((resolve, reject) => {
const request = index.openCursor(IDBKeyRange.only(relayUrl)) const request = index.openCursor(IDBKeyRange.only(relayUrl))
const entries: PublicationLogEntry[] = []
request.onsuccess = (event: globalThis.Event): void => { request.onsuccess = (event: globalThis.Event): void => {
const cursor = (event.target as IDBRequest<IDBCursorWithValue>).result const cursor = (event.target as IDBRequest<IDBCursorWithValue>).result
@ -198,8 +133,13 @@ class PublishLogService {
resolve(entries.sort((a, b) => b.timestamp - a.timestamp)) resolve(entries.sort((a, b) => b.timestamp - a.timestamp))
} }
} }
request.onerror = (): void => { request.onerror = (): void => {
reject(request.error) if (request.error) {
reject(request.error)
} else {
reject(new Error('Unknown error opening cursor'))
}
} }
}) })
} catch (error) { } catch (error) {
@ -213,19 +153,12 @@ class PublishLogService {
*/ */
async getAllLogs(limit: number = 1000): Promise<PublicationLogEntry[]> { async getAllLogs(limit: number = 1000): Promise<PublicationLogEntry[]> {
try { try {
await this.init() const entries: PublicationLogEntry[] = []
const store = await this.dbHelper.getStore('readonly')
if (!this.db) {
return []
}
const transaction = this.db.transaction([STORE_NAME], 'readonly')
const store = transaction.objectStore(STORE_NAME)
const index = store.index('timestamp') const index = store.index('timestamp')
return new Promise((resolve, reject) => { return new Promise<PublicationLogEntry[]>((resolve, reject) => {
const request = index.openCursor(null, 'prev') // Descending order const request = index.openCursor(null, 'prev') // Descending order
const entries: PublicationLogEntry[] = []
request.onsuccess = (event: globalThis.Event): void => { request.onsuccess = (event: globalThis.Event): void => {
const cursor = (event.target as IDBRequest<IDBCursorWithValue>).result const cursor = (event.target as IDBRequest<IDBCursorWithValue>).result
@ -240,8 +173,13 @@ class PublishLogService {
resolve(entries) resolve(entries)
} }
} }
request.onerror = (): void => { request.onerror = (): void => {
reject(request.error) if (request.error) {
reject(request.error)
} else {
reject(new Error('Unknown error opening cursor'))
}
} }
}) })
} catch (error) { } catch (error) {

View File

@ -1,25 +1,9 @@
import type { Purchase } from '@/types/nostr' import type { Purchase } from '@/types/nostr'
import { getCachedObjectById } from './helpers/queryHelpers'
import { objectCache } from './objectCache' import { objectCache } from './objectCache'
import { parseObjectId } from './urlGenerator'
export async function getPurchaseById(purchaseId: string, _timeoutMs: number = 5000): Promise<Purchase | null> { export async function getPurchaseById(purchaseId: string, _timeoutMs: number = 5000): Promise<Purchase | null> {
const parsed = parseObjectId(purchaseId) return await getCachedObjectById<Purchase>('purchase', purchaseId)
const hash = parsed.hash ?? purchaseId
// Read only from IndexedDB cache
const cached = await objectCache.get('purchase', hash)
if (cached) {
return cached as Purchase
}
// Also try by ID if hash lookup failed
const cachedById = await objectCache.getById('purchase', purchaseId)
if (cachedById) {
return cachedById as Purchase
}
// Not found in cache - return null (no network request)
return null
} }
export async function getPurchasesForArticle(articleId: string, _timeoutMs: number = 5000): Promise<Purchase[]> { export async function getPurchasesForArticle(articleId: string, _timeoutMs: number = 5000): Promise<Purchase[]> {

View File

@ -1,25 +1,9 @@
import type { ReviewTip } from '@/types/nostr' import type { ReviewTip } from '@/types/nostr'
import { objectCache } from './objectCache' import { objectCache } from './objectCache'
import { parseObjectId } from './urlGenerator' import { getCachedObjectById } from './helpers/queryHelpers'
export async function getReviewTipById(reviewTipId: string, _timeoutMs: number = 5000): Promise<ReviewTip | null> { export async function getReviewTipById(reviewTipId: string, _timeoutMs: number = 5000): Promise<ReviewTip | null> {
const parsed = parseObjectId(reviewTipId) return await getCachedObjectById<ReviewTip>('review_tip', reviewTipId)
const hash = parsed.hash ?? reviewTipId
// Read only from IndexedDB cache
const cached = await objectCache.get('review_tip', hash)
if (cached) {
return cached as ReviewTip
}
// Also try by ID if hash lookup failed
const cachedById = await objectCache.getById('review_tip', reviewTipId)
if (cachedById) {
return cachedById as ReviewTip
}
// Not found in cache - return null (no network request)
return null
} }
export async function getReviewTipsForArticle(articleId: string, _timeoutMs: number = 5000): Promise<ReviewTip[]> { export async function getReviewTipsForArticle(articleId: string, _timeoutMs: number = 5000): Promise<ReviewTip[]> {

View File

@ -1,6 +1,6 @@
import type { Series } from '@/types/nostr' import type { Series } from '@/types/nostr'
import { objectCache } from './objectCache' import { objectCache } from './objectCache'
import { parseObjectId } from './urlGenerator' import { getCachedObjectById } from './helpers/queryHelpers'
export async function getSeriesByAuthor(authorPubkey: string, _timeoutMs: number = 5000): Promise<Series[]> { export async function getSeriesByAuthor(authorPubkey: string, _timeoutMs: number = 5000): Promise<Series[]> {
// Read only from IndexedDB cache // Read only from IndexedDB cache
@ -22,22 +22,5 @@ export async function getSeriesByAuthor(authorPubkey: string, _timeoutMs: number
} }
export async function getSeriesById(seriesId: string, _timeoutMs: number = 2000): Promise<Series | null> { export async function getSeriesById(seriesId: string, _timeoutMs: number = 2000): Promise<Series | null> {
// Try to parse seriesId as id format (<hash>_<index>_<version>) or use it as hash return await getCachedObjectById<Series>('series', seriesId)
const parsed = parseObjectId(seriesId)
const hash = parsed.hash ?? seriesId
// Read only from IndexedDB cache
const cached = await objectCache.get('series', hash)
if (cached) {
return cached as Series
}
// Also try by ID if hash lookup failed
const cachedById = await objectCache.getById('series', seriesId)
if (cachedById) {
return cachedById as Series
}
// Not found in cache - return null (no network request)
return null
} }

View File

@ -3,6 +3,8 @@
* Stores settings and last sync date for background synchronization * Stores settings and last sync date for background synchronization
*/ */
import { createIndexedDBHelper, type IndexedDBHelper } from './helpers/indexedDBHelper'
const DB_NAME = 'nostr_paywall_settings' const DB_NAME = 'nostr_paywall_settings'
const DB_VERSION = 1 const DB_VERSION = 1
const STORE_NAME = 'settings' const STORE_NAME = 'settings'
@ -19,37 +21,15 @@ export interface SettingsData {
} }
class SettingsCacheService { class SettingsCacheService {
private db: IDBDatabase | null = null private readonly dbHelper: IndexedDBHelper
private async initDB(): Promise<IDBDatabase> { constructor() {
if (this.db) { this.dbHelper = createIndexedDBHelper({
return this.db dbName: DB_NAME,
} version: DB_VERSION,
storeName: STORE_NAME,
return new Promise((resolve, reject) => { keyPath: 'key',
if (typeof window === 'undefined' || !window.indexedDB) { indexes: [{ name: 'updatedAt', keyPath: 'updatedAt', unique: false }],
reject(new Error('IndexedDB is not available'))
return
}
const request = indexedDB.open(DB_NAME, DB_VERSION)
request.onerror = () => {
reject(new Error(`Failed to open IndexedDB: ${request.error}`))
}
request.onsuccess = () => {
this.db = request.result
resolve(this.db)
}
request.onupgradeneeded = (event) => {
const db = (event.target as IDBOpenDBRequest).result
if (!db.objectStoreNames.contains(STORE_NAME)) {
const store = db.createObjectStore(STORE_NAME, { keyPath: 'key' })
store.createIndex('updatedAt', 'updatedAt', { unique: false })
}
}
}) })
} }
@ -58,20 +38,8 @@ class SettingsCacheService {
*/ */
async getSettings(): Promise<SettingsData | null> { async getSettings(): Promise<SettingsData | null> {
try { try {
const db = await this.initDB() const result = await this.dbHelper.get<{ key: string; value: SettingsData }>('settings')
const transaction = db.transaction([STORE_NAME], 'readonly') return result?.value ?? null
const store = transaction.objectStore(STORE_NAME)
return new Promise((resolve, reject) => {
const request = store.get('settings')
request.onsuccess = () => {
const result = request.result as { key: string; value: SettingsData } | undefined
resolve(result?.value ?? null)
}
request.onerror = () => reject(request.error)
})
} catch (error) { } catch (error) {
console.error('Error retrieving settings from cache:', error) console.error('Error retrieving settings from cache:', error)
return null return null
@ -83,21 +51,12 @@ class SettingsCacheService {
*/ */
async saveSettings(settings: SettingsData): Promise<void> { async saveSettings(settings: SettingsData): Promise<void> {
try { try {
const db = await this.initDB() await this.dbHelper.put({
const transaction = db.transaction([STORE_NAME], 'readwrite') key: 'settings',
const store = transaction.objectStore(STORE_NAME) value: {
...settings,
await new Promise<void>((resolve, reject) => { updatedAt: Date.now(),
const request = store.put({ },
key: 'settings',
value: {
...settings,
updatedAt: Date.now(),
},
})
request.onsuccess = () => resolve()
request.onerror = () => reject(request.error)
}) })
} catch (error) { } catch (error) {
console.error('Error saving settings to cache:', error) console.error('Error saving settings to cache:', error)

View File

@ -1,25 +1,9 @@
import type { Sponsoring } from '@/types/nostr' import type { Sponsoring } from '@/types/nostr'
import { objectCache } from './objectCache' import { objectCache } from './objectCache'
import { parseObjectId } from './urlGenerator' import { getCachedObjectById } from './helpers/queryHelpers'
export async function getSponsoringById(sponsoringId: string, _timeoutMs: number = 5000): Promise<Sponsoring | null> { export async function getSponsoringById(sponsoringId: string, _timeoutMs: number = 5000): Promise<Sponsoring | null> {
const parsed = parseObjectId(sponsoringId) return await getCachedObjectById<Sponsoring>('sponsoring', sponsoringId)
const hash = parsed.hash ?? sponsoringId
// Read only from IndexedDB cache
const cached = await objectCache.get('sponsoring', hash)
if (cached) {
return cached as Sponsoring
}
// Also try by ID if hash lookup failed
const cachedById = await objectCache.getById('sponsoring', sponsoringId)
if (cachedById) {
return cachedById as Sponsoring
}
// Not found in cache - return null (no network request)
return null
} }
export async function getSponsoringByAuthor(authorPubkey: string, _timeoutMs: number = 5000): Promise<Sponsoring[]> { export async function getSponsoringByAuthor(authorPubkey: string, _timeoutMs: number = 5000): Promise<Sponsoring[]> {

View File

@ -1,4 +1,5 @@
import { decryptPayload, encryptPayload, type EncryptedPayload } from './cryptoHelpers' import { decryptPayload, encryptPayload, type EncryptedPayload } from './cryptoHelpers'
import { createIndexedDBHelper, type IndexedDBHelper } from '../helpers/indexedDBHelper'
const DB_NAME = 'nostr_paywall' const DB_NAME = 'nostr_paywall'
const DB_VERSION = 1 const DB_VERSION = 1
@ -16,58 +17,18 @@ interface DBData {
* More robust than localStorage and supports larger data sizes * More robust than localStorage and supports larger data sizes
*/ */
export class IndexedDBStorage { export class IndexedDBStorage {
private db: IDBDatabase | null = null private readonly dbHelper: IndexedDBHelper
private initPromise: Promise<void> | null = null
/** constructor() {
* Initialize the IndexedDB database this.dbHelper = createIndexedDBHelper({
*/ dbName: DB_NAME,
private async init(): Promise<void> { version: DB_VERSION,
if (this.db) { storeName: STORE_NAME,
return keyPath: 'id',
} indexes: [
{ name: 'createdAt', keyPath: 'createdAt', unique: false },
if (this.initPromise) { { name: 'expiresAt', keyPath: 'expiresAt', unique: false },
return this.initPromise ],
}
this.initPromise = this.openDatabase()
try {
await this.initPromise
} catch (error) {
this.initPromise = null
throw error
}
}
private openDatabase(): Promise<void> {
return new Promise((resolve, reject) => {
if (typeof window === 'undefined' || !window.indexedDB) {
reject(new Error('IndexedDB is not available. This application requires IndexedDB support.'))
return
}
const request = window.indexedDB.open(DB_NAME, DB_VERSION)
request.onerror = (): void => {
reject(new Error(`Failed to open IndexedDB: ${request.error}`))
}
request.onsuccess = (): void => {
this.db = request.result
resolve()
}
request.onupgradeneeded = (event): void => {
const db = (event.target as IDBOpenDBRequest).result
if (!db.objectStoreNames.contains(STORE_NAME)) {
const store = db.createObjectStore(STORE_NAME, { keyPath: 'id' })
store.createIndex('createdAt', 'createdAt', { unique: false })
store.createIndex('expiresAt', 'expiresAt', { unique: false })
}
}
}) })
} }
@ -76,12 +37,6 @@ export class IndexedDBStorage {
*/ */
async set(key: string, value: unknown, secret: string, expiresIn?: number): Promise<void> { async set(key: string, value: unknown, secret: string, expiresIn?: number): Promise<void> {
try { try {
await this.init()
if (!this.db) {
throw new Error('Database not initialized')
}
const encrypted = await encryptPayload(secret, value) const encrypted = await encryptPayload(secret, value)
const now = Date.now() const now = Date.now()
const data: DBData = { const data: DBData = {
@ -91,23 +46,7 @@ export class IndexedDBStorage {
...(expiresIn ? { expiresAt: now + expiresIn } : {}), ...(expiresIn ? { expiresAt: now + expiresIn } : {}),
} }
const {db} = this await this.dbHelper.put(data)
if (!db) {
throw new Error('Database not initialized')
}
return new Promise((resolve, reject) => {
const transaction = db.transaction([STORE_NAME], 'readwrite')
const store = transaction.objectStore(STORE_NAME)
const request = store.put(data)
request.onsuccess = (): void => {
resolve()
}
request.onerror = (): void => {
reject(new Error(`Failed to store data: ${request.error}`))
}
})
} catch (error) { } catch (error) {
console.error('Error storing in IndexedDB:', error) console.error('Error storing in IndexedDB:', error)
throw error throw error
@ -119,85 +58,35 @@ export class IndexedDBStorage {
*/ */
async get<T = unknown>(key: string, secret: string): Promise<T | null> { async get<T = unknown>(key: string, secret: string): Promise<T | null> {
try { try {
await this.init() const result = await this.dbHelper.get<DBData>(key)
if (!this.db) { if (!result) {
throw new Error('Database not initialized') return null
} }
return this.readValue<T>(key, secret) if (result.expiresAt && result.expiresAt < Date.now()) {
await this.delete(key).catch(console.error)
return null
}
try {
return await decryptPayload<T>(secret, result.data)
} catch (decryptError) {
console.error('Error decrypting from IndexedDB:', decryptError)
return null
}
} catch (error) { } catch (error) {
console.error('Error getting from IndexedDB:', error) console.error('Error getting from IndexedDB:', error)
return null return null
} }
} }
private readValue<T>(key: string, secret: string): Promise<T | null> {
const {db} = this
if (!db) {
throw new Error('Database not initialized')
}
return new Promise((resolve, reject) => {
const transaction = db.transaction([STORE_NAME], 'readonly')
const store = transaction.objectStore(STORE_NAME)
const request = store.get(key)
request.onsuccess = (): void => {
const result = request.result as DBData | undefined
if (!result) {
resolve(null)
return
}
if (result.expiresAt && result.expiresAt < Date.now()) {
this.delete(key).catch(console.error)
resolve(null)
return
}
decryptPayload<T>(secret, result.data)
.then((value) => {
resolve(value)
})
.catch((error) => {
console.error('Error decrypting from IndexedDB:', error)
resolve(null)
})
}
request.onerror = (): void => {
reject(new Error(`Failed to get data: ${request.error}`))
}
})
}
/** /**
* Delete data from IndexedDB * Delete data from IndexedDB
*/ */
async delete(key: string): Promise<void> { async delete(key: string): Promise<void> {
try { try {
await this.init() await this.dbHelper.delete(key)
if (!this.db) {
throw new Error('Database not initialized')
}
const {db} = this
return new Promise((resolve, reject) => {
const transaction = db.transaction([STORE_NAME], 'readwrite')
const store = transaction.objectStore(STORE_NAME)
const request = store.delete(key)
request.onsuccess = (): void => {
resolve()
}
request.onerror = (): void => {
reject(new Error(`Failed to delete data: ${request.error}`))
}
})
} catch (error) { } catch (error) {
console.error('Error deleting from IndexedDB:', error) console.error('Error deleting from IndexedDB:', error)
throw error throw error
@ -209,22 +98,14 @@ export class IndexedDBStorage {
*/ */
async clearExpired(): Promise<void> { async clearExpired(): Promise<void> {
try { try {
await this.init() const store = await this.dbHelper.getStoreWrite('readwrite')
const index = store.index('expiresAt')
if (!this.db) { return new Promise<void>((resolve, reject) => {
throw new Error('Database not initialized')
}
const {db} = this
return new Promise((resolve, reject) => {
const transaction = db.transaction([STORE_NAME], 'readwrite')
const store = transaction.objectStore(STORE_NAME)
const index = store.index('expiresAt')
const request = index.openCursor(IDBKeyRange.upperBound(Date.now())) const request = index.openCursor(IDBKeyRange.upperBound(Date.now()))
request.onsuccess = (event): void => { request.onsuccess = (event): void => {
const cursor = (event.target as IDBRequest).result const cursor = (event.target as IDBRequest<IDBCursorWithValue>).result
if (cursor) { if (cursor) {
cursor.delete() cursor.delete()
cursor.continue() cursor.continue()
@ -234,7 +115,11 @@ export class IndexedDBStorage {
} }
request.onerror = (): void => { request.onerror = (): void => {
reject(new Error(`Failed to clear expired: ${request.error}`)) if (request.error) {
reject(new Error(`Failed to clear expired: ${request.error}`))
} else {
reject(new Error('Unknown error clearing expired entries'))
}
} }
}) })
} catch (error) { } catch (error) {

View File

@ -0,0 +1,205 @@
/**
* Individual sync steps for user content
* Each function handles fetching and caching a specific type of content
*/
import type { SimplePoolWithSub } from '@/types/nostr-tools-extended'
import type { Event } from 'nostr-tools'
import { fetchAuthorPresentationFromPool } from '../articlePublisherHelpersPresentation'
import { buildTagFilter } from '../nostrTagSystemFilter'
import { PLATFORM_SERVICE } from '../platformConfig'
import { extractTagsFromEvent } from '../nostrTagSystem'
import { createSyncSubscription } from '../helpers/syncSubscriptionHelper'
import { cachePublicationsByHash, cacheSeriesByHash } from '../helpers/syncContentCacheHelpers'
import { cachePurchases, cacheSponsoring, cacheReviewTips } from '../helpers/syncCacheHelpers'
import { buildPaymentNoteFilters, createPaymentNoteSubscriptions, processPaymentNoteEvents } from '../helpers/paymentNoteSyncHelper'
export async function syncAuthorProfile(
poolWithSub: SimplePoolWithSub,
userPubkey: string
): Promise<void> {
console.warn('[Sync] Step 1/7: Fetching author profile...')
await fetchAuthorPresentationFromPool(poolWithSub, userPubkey)
console.warn('[Sync] Step 1/7: Author profile fetch completed')
}
export async function syncSeries(
poolWithSub: SimplePoolWithSub,
userPubkey: string
): Promise<void> {
console.warn('[Sync] Step 2/7: Fetching series...')
const { getLastSyncDate } = await import('../syncStorage')
const lastSyncDate = await getLastSyncDate()
const filters = [
{
...buildTagFilter({
type: 'series',
authorPubkey: userPubkey,
service: PLATFORM_SERVICE,
}),
since: lastSyncDate,
limit: 1000,
},
]
const result = await createSyncSubscription({
pool: poolWithSub,
filters,
eventFilter: (event: Event): boolean => {
const tags = extractTagsFromEvent(event)
return tags.type === 'series' && !tags.hidden
},
timeout: 10000,
onEvent: (event: Event): void => {
console.warn('[Sync] Received series event:', event.id)
},
onComplete: (events: Event[]): void => {
console.warn(`[Sync] EOSE for series, received ${events.length} events`)
},
})
await cacheSeriesByHash(result.events)
}
export async function syncPublications(
poolWithSub: SimplePoolWithSub,
userPubkey: string
): Promise<void> {
console.warn('[Sync] Step 3/7: Fetching publications...')
const { getLastSyncDate } = await import('../syncStorage')
const lastSyncDate = await getLastSyncDate()
const filters = [
{
...buildTagFilter({
type: 'publication',
authorPubkey: userPubkey,
service: PLATFORM_SERVICE,
}),
since: lastSyncDate,
limit: 1000,
},
]
const result = await createSyncSubscription({
pool: poolWithSub,
filters,
eventFilter: (event: Event): boolean => {
const tags = extractTagsFromEvent(event)
return tags.type === 'publication' && !tags.hidden
},
timeout: 10000,
})
await cachePublicationsByHash(result.events)
}
export async function syncPurchases(
poolWithSub: SimplePoolWithSub,
userPubkey: string
): Promise<void> {
console.warn('[Sync] Step 4/7: Fetching purchases...')
const { getLastSyncDate } = await import('../syncStorage')
const lastSyncDate = await getLastSyncDate()
const filters = [
{
kinds: [9735],
authors: [userPubkey],
'#kind_type': ['purchase'],
since: lastSyncDate,
limit: 1000,
},
]
await createSyncSubscription({
pool: poolWithSub,
filters,
timeout: 10000,
onEvent: (event: Event): void => {
console.warn('[Sync] Received purchase event:', event.id)
},
onComplete: async (events: Event[]): Promise<void> => {
console.warn(`[Sync] EOSE for purchases, received ${events.length} events`)
await cachePurchases(events)
},
})
}
export async function syncSponsoring(
poolWithSub: SimplePoolWithSub,
userPubkey: string
): Promise<void> {
console.warn('[Sync] Step 5/7: Fetching sponsoring...')
const { getLastSyncDate } = await import('../syncStorage')
const lastSyncDate = await getLastSyncDate()
const filters = [
{
kinds: [9735],
'#p': [userPubkey],
'#kind_type': ['sponsoring'],
since: lastSyncDate,
limit: 1000,
},
]
await createSyncSubscription({
pool: poolWithSub,
filters,
timeout: 10000,
onEvent: (event: Event): void => {
console.warn('[Sync] Received sponsoring event:', event.id)
},
onComplete: async (events: Event[]): Promise<void> => {
console.warn(`[Sync] EOSE for sponsoring, received ${events.length} events`)
await cacheSponsoring(events)
},
})
}
export async function syncReviewTips(
poolWithSub: SimplePoolWithSub,
userPubkey: string
): Promise<void> {
console.warn('[Sync] Step 6/7: Fetching review tips...')
const { getLastSyncDate } = await import('../syncStorage')
const lastSyncDate = await getLastSyncDate()
const filters = [
{
kinds: [9735],
'#p': [userPubkey],
'#kind_type': ['review_tip'],
since: lastSyncDate,
limit: 1000,
},
]
await createSyncSubscription({
pool: poolWithSub,
filters,
timeout: 10000,
onEvent: (event: Event): void => {
console.warn('[Sync] Received review tip event:', event.id)
},
onComplete: async (events: Event[]): Promise<void> => {
console.warn(`[Sync] EOSE for review tips, received ${events.length} events`)
await cacheReviewTips(events)
},
})
}
export async function syncPaymentNotes(
poolWithSub: SimplePoolWithSub,
userPubkey: string
): Promise<void> {
console.warn('[Sync] Step 7/7: Fetching payment notes...')
const { getLastSyncDate } = await import('../syncStorage')
const lastSyncDate = await getLastSyncDate()
const filters = buildPaymentNoteFilters(userPubkey, lastSyncDate)
const subscriptions = await createPaymentNoteSubscriptions(poolWithSub, filters)
await processPaymentNoteEvents(subscriptions)
}

View File

@ -3,722 +3,21 @@
* Called after key import to ensure all user content is cached locally * Called after key import to ensure all user content is cached locally
*/ */
import type { Event } from 'nostr-tools'
import { nostrService } from './nostr' import { nostrService } from './nostr'
import { fetchAuthorPresentationFromPool } from './articlePublisherHelpersPresentation'
import { extractTagsFromEvent } from './nostrTagSystem'
import { extractSeriesFromEvent, extractPublicationFromEvent, extractPurchaseFromEvent, extractSponsoringFromEvent, extractReviewTipFromEvent } from './metadataExtractor'
import { getLatestVersion } from './versionManager'
import { buildTagFilter } from './nostrTagSystemFilter'
import { getPrimaryRelaySync } from './config'
import { tryWithRelayRotation } from './relayRotation'
import { PLATFORM_SERVICE } from './platformConfig'
import { parseObjectId } from './urlGenerator'
import type { SimplePoolWithSub } from '@/types/nostr-tools-extended' import type { SimplePoolWithSub } from '@/types/nostr-tools-extended'
import type { SyncProgress } from './helpers/syncProgressHelper'
/** import { initializeSyncProgress, finalizeSync } from './helpers/syncProgressHelper'
* Fetch all publications by an author and cache them import {
*/ syncAuthorProfile,
async function fetchAndCachePublications( syncSeries,
pool: SimplePoolWithSub, syncPublications,
authorPubkey: string syncPurchases,
): Promise<void> { syncSponsoring,
const { getLastSyncDate } = await import('./syncStorage') syncReviewTips,
const lastSyncDate = await getLastSyncDate() syncPaymentNotes,
} from './sync/userContentSyncSteps'
const filters = [
{ export type { SyncProgress }
...buildTagFilter({
type: 'publication',
authorPubkey,
service: PLATFORM_SERVICE,
}),
since: lastSyncDate,
limit: 1000, // Get all publications
},
]
// Try relays with rotation (no retry on failure, just move to next)
const { createSubscription } = require('@/types/nostr-tools-extended')
let sub: ReturnType<typeof createSubscription> | null = null
let usedRelayUrl = ''
try {
const result = await tryWithRelayRotation(
pool as unknown as import('nostr-tools').SimplePool,
async (relayUrl, poolWithSub) => {
usedRelayUrl = relayUrl
// Notify progress manager that we're starting with a new relay (reset step counter)
const { syncProgressManager } = await import('./syncProgressManager')
const currentProgress = syncProgressManager.getProgress()
if (currentProgress) {
syncProgressManager.setProgress({
...currentProgress,
currentStep: 0,
currentRelay: relayUrl,
})
}
return createSubscription(poolWithSub, [relayUrl], filters)
},
5000 // 5 second timeout per relay
)
sub = result
} catch {
// Fallback to primary relay if rotation fails
usedRelayUrl = getPrimaryRelaySync()
sub = createSubscription(pool, [usedRelayUrl], filters)
}
if (!sub) {
throw new Error('Failed to create subscription')
}
const events: Event[] = []
return new Promise<void>((resolve) => {
let finished = false
const done = async (): Promise<void> => {
if (finished) {
return
}
finished = true
sub.unsub()
// Group events by hash ID and cache the latest version of each
const eventsByHashId = new Map<string, Event[]>()
for (const event of events) {
const tags = extractTagsFromEvent(event)
if (tags.id) {
// Extract hash from id (can be <hash>_<index>_<version> or just hash)
const parsed = parseObjectId(tags.id)
const hash = parsed.hash ?? tags.id
if (!eventsByHashId.has(hash)) {
eventsByHashId.set(hash, [])
}
eventsByHashId.get(hash)!.push(event)
}
}
// Cache each publication
for (const [_hash, hashEvents] of eventsByHashId.entries()) {
const latestEvent = getLatestVersion(hashEvents)
if (latestEvent) {
const extracted = await extractPublicationFromEvent(latestEvent)
if (extracted) {
const publicationParsed = parseObjectId(extracted.id)
const extractedHash = publicationParsed.hash ?? extracted.id
const extractedIndex = publicationParsed.index ?? 0
const tags = extractTagsFromEvent(latestEvent)
const { writeService } = await import('./writeService')
await writeService.writeObject(
'publication',
extractedHash,
latestEvent,
extracted,
tags.version ?? 0,
tags.hidden ?? false,
extractedIndex,
false
)
}
}
}
resolve()
}
sub.on('event', (event: Event): void => {
const tags = extractTagsFromEvent(event)
if (tags.type === 'publication' && !tags.hidden) {
events.push(event)
}
})
sub.on('eose', (): void => {
void done()
})
setTimeout((): void => {
void done()
}, 10000).unref?.()
})
}
/**
* Fetch all series by an author and cache them
*/
async function fetchAndCacheSeries(
pool: SimplePoolWithSub,
authorPubkey: string
): Promise<void> {
const { getLastSyncDate } = await import('./syncStorage')
const lastSyncDate = await getLastSyncDate()
// Fetch all events for series to cache them properly
const filters = [
{
...buildTagFilter({
type: 'series',
authorPubkey,
service: PLATFORM_SERVICE,
}),
since: lastSyncDate,
limit: 1000, // Get all series events
},
]
// Try relays with rotation (no retry on failure, just move to next)
const { createSubscription } = require('@/types/nostr-tools-extended')
let sub: ReturnType<typeof createSubscription> | null = null
let usedRelayUrl = ''
try {
const result = await tryWithRelayRotation(
pool as unknown as import('nostr-tools').SimplePool,
async (relayUrl, poolWithSub) => {
usedRelayUrl = relayUrl
// Notify progress manager that we're starting with a new relay (reset step counter)
const { syncProgressManager } = await import('./syncProgressManager')
const currentProgress = syncProgressManager.getProgress()
if (currentProgress) {
syncProgressManager.setProgress({
...currentProgress,
currentStep: 0,
currentRelay: relayUrl,
})
}
return createSubscription(poolWithSub, [relayUrl], filters)
},
5000 // 5 second timeout per relay
)
sub = result
} catch {
// Fallback to primary relay if rotation fails
usedRelayUrl = getPrimaryRelaySync()
sub = createSubscription(pool, [usedRelayUrl], filters)
}
if (!sub) {
throw new Error('Failed to create subscription')
}
const events: Event[] = []
return new Promise<void>((resolve) => {
let finished = false
const done = async (): Promise<void> => {
if (finished) {
return
}
finished = true
sub.unsub()
// Group events by hash ID and cache the latest version of each
const eventsByHashId = new Map<string, Event[]>()
for (const event of events) {
const tags = extractTagsFromEvent(event)
if (tags.id) {
// Extract hash from id (can be <hash>_<index>_<version> or just hash)
const seriesParsed = parseObjectId(tags.id)
const hash = seriesParsed.hash ?? tags.id
if (!eventsByHashId.has(hash)) {
eventsByHashId.set(hash, [])
}
eventsByHashId.get(hash)!.push(event)
}
}
// Cache each series
for (const [_hash, hashEvents] of eventsByHashId.entries()) {
const latestEvent = getLatestVersion(hashEvents)
if (latestEvent) {
const extracted = await extractSeriesFromEvent(latestEvent)
if (extracted) {
const publicationParsed = parseObjectId(extracted.id)
const extractedHash = publicationParsed.hash ?? extracted.id
const extractedIndex = publicationParsed.index ?? 0
const tags = extractTagsFromEvent(latestEvent)
const { writeService } = await import('./writeService')
await writeService.writeObject(
'series',
extractedHash,
latestEvent,
extracted,
tags.version ?? 0,
tags.hidden ?? false,
extractedIndex,
false
)
}
}
}
resolve()
}
sub.on('event', (event: Event): void => {
const tags = extractTagsFromEvent(event)
if (tags.type === 'series' && !tags.hidden) {
console.warn('[Sync] Received series event:', event.id)
events.push(event)
}
})
sub.on('eose', (): void => {
console.warn(`[Sync] EOSE for series, received ${events.length} events`)
void done()
})
setTimeout((): void => {
if (!finished) {
console.warn(`[Sync] Timeout for series, received ${events.length} events`)
}
void done()
}, 10000).unref?.()
})
}
/**
* Fetch all purchases by a payer and cache them
*/
async function fetchAndCachePurchases(
pool: SimplePoolWithSub,
payerPubkey: string
): Promise<void> {
const { getLastSyncDate } = await import('./syncStorage')
const lastSyncDate = await getLastSyncDate()
const filters = [
{
kinds: [9735], // Zap receipt
authors: [payerPubkey],
'#kind_type': ['purchase'],
since: lastSyncDate,
limit: 1000,
},
]
// Try relays with rotation (no retry on failure, just move to next)
const { createSubscription } = require('@/types/nostr-tools-extended')
let sub: ReturnType<typeof createSubscription> | null = null
let usedRelayUrl = ''
try {
const result = await tryWithRelayRotation(
pool as unknown as import('nostr-tools').SimplePool,
async (relayUrl, poolWithSub) => {
usedRelayUrl = relayUrl
// Notify progress manager that we're starting with a new relay (reset step counter)
const { syncProgressManager } = await import('./syncProgressManager')
const currentProgress = syncProgressManager.getProgress()
if (currentProgress) {
syncProgressManager.setProgress({
...currentProgress,
currentStep: 0,
currentRelay: relayUrl,
})
}
return createSubscription(poolWithSub, [relayUrl], filters)
},
5000 // 5 second timeout per relay
)
sub = result
} catch {
// Fallback to primary relay if rotation fails
usedRelayUrl = getPrimaryRelaySync()
sub = createSubscription(pool, [usedRelayUrl], filters)
}
if (!sub) {
throw new Error('Failed to create subscription')
}
const events: Event[] = []
return new Promise<void>((resolve) => {
let finished = false
const done = async (): Promise<void> => {
if (finished) {
return
}
finished = true
sub.unsub()
for (const event of events) {
const extracted = await extractPurchaseFromEvent(event)
if (extracted) {
// Parse to Purchase object for cache
const { parsePurchaseFromEvent } = await import('./nostrEventParsing')
const purchase = await parsePurchaseFromEvent(event)
if (purchase) {
const { writeService } = await import('./writeService')
await writeService.writeObject('purchase', purchase.hash, event, purchase, 0, false, purchase.index, false)
}
}
}
resolve()
}
sub.on('event', (event: Event): void => {
console.warn('[Sync] Received purchase event:', event.id)
events.push(event)
})
sub.on('eose', (): void => {
console.warn(`[Sync] EOSE for purchases, received ${events.length} events`)
void done()
})
setTimeout((): void => {
if (!finished) {
console.warn(`[Sync] Timeout for purchases, received ${events.length} events`)
}
void done()
}, 10000).unref?.()
})
}
/**
* Fetch all sponsoring by an author and cache them
*/
async function fetchAndCacheSponsoring(
pool: SimplePoolWithSub,
authorPubkey: string
): Promise<void> {
const { getLastSyncDate } = await import('./syncStorage')
const lastSyncDate = await getLastSyncDate()
const filters = [
{
kinds: [9735], // Zap receipt
'#p': [authorPubkey],
'#kind_type': ['sponsoring'],
since: lastSyncDate,
limit: 1000,
},
]
// Try relays with rotation (no retry on failure, just move to next)
const { createSubscription } = require('@/types/nostr-tools-extended')
let sub: ReturnType<typeof createSubscription> | null = null
let usedRelayUrl = ''
try {
const result = await tryWithRelayRotation(
pool as unknown as import('nostr-tools').SimplePool,
async (relayUrl, poolWithSub) => {
usedRelayUrl = relayUrl
// Notify progress manager that we're starting with a new relay (reset step counter)
const { syncProgressManager } = await import('./syncProgressManager')
const currentProgress = syncProgressManager.getProgress()
if (currentProgress) {
syncProgressManager.setProgress({
...currentProgress,
currentStep: 0,
currentRelay: relayUrl,
})
}
return createSubscription(poolWithSub, [relayUrl], filters)
},
5000 // 5 second timeout per relay
)
sub = result
} catch {
// Fallback to primary relay if rotation fails
usedRelayUrl = getPrimaryRelaySync()
sub = createSubscription(pool, [usedRelayUrl], filters)
}
if (!sub) {
throw new Error('Failed to create subscription')
}
const events: Event[] = []
return new Promise<void>((resolve) => {
let finished = false
const done = async (): Promise<void> => {
if (finished) {
return
}
finished = true
sub.unsub()
for (const event of events) {
const extracted = await extractSponsoringFromEvent(event)
if (extracted) {
// Parse to Sponsoring object for cache
const { parseSponsoringFromEvent } = await import('./nostrEventParsing')
const sponsoring = await parseSponsoringFromEvent(event)
if (sponsoring) {
const { writeService } = await import('./writeService')
await writeService.writeObject('sponsoring', sponsoring.hash, event, sponsoring, 0, false, sponsoring.index, false)
}
}
}
resolve()
}
sub.on('event', (event: Event): void => {
console.warn('[Sync] Received sponsoring event:', event.id)
events.push(event)
})
sub.on('eose', (): void => {
console.warn(`[Sync] EOSE for sponsoring, received ${events.length} events`)
void done()
})
setTimeout((): void => {
if (!finished) {
console.warn(`[Sync] Timeout for sponsoring, received ${events.length} events`)
}
void done()
}, 10000).unref?.()
})
}
/**
* Fetch all review tips by an author and cache them
*/
async function fetchAndCacheReviewTips(
pool: SimplePoolWithSub,
authorPubkey: string
): Promise<void> {
const { getLastSyncDate } = await import('./syncStorage')
const lastSyncDate = await getLastSyncDate()
const filters = [
{
kinds: [9735], // Zap receipt
'#p': [authorPubkey],
'#kind_type': ['review_tip'],
since: lastSyncDate,
limit: 1000,
},
]
// Try relays with rotation (no retry on failure, just move to next)
const { createSubscription } = require('@/types/nostr-tools-extended')
let sub: ReturnType<typeof createSubscription> | null = null
let usedRelayUrl = ''
try {
const result = await tryWithRelayRotation(
pool as unknown as import('nostr-tools').SimplePool,
async (relayUrl, poolWithSub) => {
usedRelayUrl = relayUrl
// Notify progress manager that we're starting with a new relay (reset step counter)
const { syncProgressManager } = await import('./syncProgressManager')
const currentProgress = syncProgressManager.getProgress()
if (currentProgress) {
syncProgressManager.setProgress({
...currentProgress,
currentStep: 0,
currentRelay: relayUrl,
})
}
return createSubscription(poolWithSub, [relayUrl], filters)
},
5000 // 5 second timeout per relay
)
sub = result
} catch {
// Fallback to primary relay if rotation fails
usedRelayUrl = getPrimaryRelaySync()
sub = createSubscription(pool, [usedRelayUrl], filters)
}
if (!sub) {
throw new Error('Failed to create subscription')
}
const events: Event[] = []
return new Promise<void>((resolve) => {
let finished = false
const done = async (): Promise<void> => {
if (finished) {
return
}
finished = true
sub.unsub()
for (const event of events) {
const extracted = await extractReviewTipFromEvent(event)
if (extracted) {
// Parse to ReviewTip object for cache
const { parseReviewTipFromEvent } = await import('./nostrEventParsing')
const reviewTip = await parseReviewTipFromEvent(event)
if (reviewTip) {
const { writeService } = await import('./writeService')
await writeService.writeObject('review_tip', reviewTip.hash, event, reviewTip, 0, false, reviewTip.index, false)
}
}
}
resolve()
}
sub.on('event', (event: Event): void => {
console.warn('[Sync] Received review tip event:', event.id)
events.push(event)
})
sub.on('eose', (): void => {
console.warn(`[Sync] EOSE for review tips, received ${events.length} events`)
void done()
})
setTimeout((): void => {
if (!finished) {
console.warn(`[Sync] Timeout for review tips, received ${events.length} events`)
}
void done()
}, 10000).unref?.()
})
}
export interface SyncProgress {
currentStep: number
totalSteps: number
completed: boolean
currentRelay?: string // URL of the relay currently being used
}
/**
* Fetch all payment notes (kind 1 with type='payment') by a user and cache them
*/
async function fetchAndCachePaymentNotes(
pool: SimplePoolWithSub,
userPubkey: string
): Promise<void> {
const { getLastSyncDate } = await import('./syncStorage')
const lastSyncDate = await getLastSyncDate()
// Payment notes are kind 1 with type='payment'
// They can be: as payer (authors) or as recipient (#recipient tag)
const filters = [
{
kinds: [1],
authors: [userPubkey],
'#payment': [''],
'#service': [PLATFORM_SERVICE],
since: lastSyncDate,
limit: 1000,
},
{
kinds: [1],
'#recipient': [userPubkey],
'#payment': [''],
'#service': [PLATFORM_SERVICE],
since: lastSyncDate,
limit: 1000,
},
]
// Try relays with rotation (no retry on failure, just move to next)
const { createSubscription } = require('@/types/nostr-tools-extended')
let subscriptions: Array<ReturnType<typeof createSubscription>> = []
let usedRelayUrl = ''
try {
const result = await tryWithRelayRotation(
pool as unknown as import('nostr-tools').SimplePool,
async (relayUrl, poolWithSub) => {
usedRelayUrl = relayUrl
// Notify progress manager that we're starting with a new relay (reset step counter)
const { syncProgressManager } = await import('./syncProgressManager')
const currentProgress = syncProgressManager.getProgress()
if (currentProgress) {
syncProgressManager.setProgress({
...currentProgress,
currentStep: 0,
currentRelay: relayUrl,
})
}
// Create subscriptions for both filters (payer and recipient)
return filters.map((filter) => createSubscription(poolWithSub, [relayUrl], [filter]))
},
5000 // 5 second timeout per relay
)
subscriptions = result.flat()
} catch {
// Fallback to primary relay if rotation fails
usedRelayUrl = getPrimaryRelaySync()
subscriptions = filters.map((filter) => createSubscription(pool, [usedRelayUrl], [filter]))
}
if (subscriptions.length === 0) {
throw new Error('Failed to create subscriptions')
}
const events: Event[] = []
return new Promise<void>((resolve) => {
let finished = false
let eoseCount = 0
const done = async (): Promise<void> => {
if (finished) {
return
}
finished = true
subscriptions.forEach((sub) => sub.unsub())
for (const event of events) {
const tags = extractTagsFromEvent(event)
if (tags.type === 'payment' && tags.payment) {
// Cache the payment note event
// Use event.id as hash since payment notes don't have a separate hash system
const { writeService } = await import('./writeService')
await writeService.writeObject('payment_note', event.id, event, {
id: event.id,
type: 'payment_note',
eventId: event.id,
}, 0, false, 0, false)
}
}
resolve()
}
subscriptions.forEach((sub) => {
sub.on('event', (event: Event): void => {
const tags = extractTagsFromEvent(event)
if (tags.type === 'payment' && tags.payment) {
console.warn('[Sync] Received payment note event:', event.id)
// Deduplicate events (same event might match both filters)
if (!events.some((e) => e.id === event.id)) {
events.push(event)
}
}
})
sub.on('eose', (): void => {
eoseCount++
if (eoseCount >= subscriptions.length) {
console.warn(`[Sync] EOSE for payment notes, received ${events.length} events`)
void done()
}
})
})
setTimeout((): void => {
if (!finished) {
console.warn(`[Sync] Timeout for payment notes, received ${events.length} events`)
}
void done()
}, 10000).unref?.()
})
}
/** /**
* Synchronize all user content to IndexedDB cache * Synchronize all user content to IndexedDB cache
@ -739,96 +38,45 @@ export async function syncUserContentToCache(
} }
const poolWithSub = pool as unknown as SimplePoolWithSub const poolWithSub = pool as unknown as SimplePoolWithSub
const { getCurrentTimestamp } = await import('./syncStorage')
// Get current timestamp for last sync date
const { setLastSyncDate, getCurrentTimestamp } = await import('./syncStorage')
const currentTimestamp = getCurrentTimestamp() const currentTimestamp = getCurrentTimestamp()
const TOTAL_STEPS = 7 const { updateProgress } = await initializeSyncProgress(onProgress)
// Report initial progress await executeSyncSteps(poolWithSub, userPubkey, updateProgress)
const { relaySessionManager } = await import('./relaySessionManager')
const { syncProgressManager } = await import('./syncProgressManager')
const activeRelays = await relaySessionManager.getActiveRelays()
const initialRelay = activeRelays[0] ?? 'Connecting...'
if (onProgress) { await finalizeSync(currentTimestamp)
onProgress({ currentStep: 0, totalSteps: TOTAL_STEPS, completed: false, currentRelay: initialRelay })
}
syncProgressManager.setProgress({ currentStep: 0, totalSteps: TOTAL_STEPS, completed: false, currentRelay: initialRelay })
let currentStep = 0
// Helper function to update progress with current relay
const updateProgress = (step: number, completed: boolean = false): void => {
const currentRelay = syncProgressManager.getProgress()?.currentRelay ?? initialRelay
const progressUpdate = { currentStep: step, totalSteps: TOTAL_STEPS, completed, currentRelay }
if (onProgress) {
onProgress(progressUpdate)
}
syncProgressManager.setProgress(progressUpdate)
}
// Fetch and cache author profile (already caches itself)
console.warn('[Sync] Step 1/7: Fetching author profile...')
await fetchAuthorPresentationFromPool(poolWithSub, userPubkey)
console.warn('[Sync] Step 1/7: Author profile fetch completed')
currentStep++
updateProgress(currentStep)
// Fetch and cache all series
console.warn('[Sync] Step 2/7: Fetching series...')
await fetchAndCacheSeries(poolWithSub, userPubkey)
currentStep++
updateProgress(currentStep)
// Fetch and cache all publications
console.warn('[Sync] Step 3/7: Fetching publications...')
await fetchAndCachePublications(poolWithSub, userPubkey)
currentStep++
updateProgress(currentStep)
// Fetch and cache all purchases (as payer)
console.warn('[Sync] Step 4/7: Fetching purchases...')
await fetchAndCachePurchases(poolWithSub, userPubkey)
currentStep++
updateProgress(currentStep)
// Fetch and cache all sponsoring (as author)
console.warn('[Sync] Step 5/7: Fetching sponsoring...')
await fetchAndCacheSponsoring(poolWithSub, userPubkey)
currentStep++
updateProgress(currentStep)
// Fetch and cache all review tips (as author)
console.warn('[Sync] Step 6/7: Fetching review tips...')
await fetchAndCacheReviewTips(poolWithSub, userPubkey)
currentStep++
updateProgress(currentStep)
// Fetch and cache all payment notes (kind 1 with type='payment')
console.warn('[Sync] Step 7/7: Fetching payment notes...')
await fetchAndCachePaymentNotes(poolWithSub, userPubkey)
currentStep++
updateProgress(currentStep, true)
// Store the current timestamp as last sync date
await setLastSyncDate(currentTimestamp)
// Update lastSyncDate for all relays used during sync
const { configStorage } = await import('./configStorage')
const config = await configStorage.getConfig()
const currentRelay = syncProgressManager.getProgress()?.currentRelay
if (currentRelay) {
const relayConfig = config.relays.find((r) => r.url === currentRelay)
if (relayConfig) {
await configStorage.updateRelay(relayConfig.id, { lastSyncDate: Date.now() })
}
}
console.warn('[Sync] Synchronization completed successfully')
} catch (syncError) { } catch (syncError) {
console.error('Error syncing user content to cache:', syncError) console.error('Error syncing user content to cache:', syncError)
throw syncError // Re-throw to allow UI to handle it throw syncError
} }
} }
async function executeSyncSteps(
poolWithSub: SimplePoolWithSub,
userPubkey: string,
updateProgress: (step: number, completed?: boolean) => void
): Promise<void> {
let currentStep = 0
await syncAuthorProfile(poolWithSub, userPubkey)
updateProgress(++currentStep)
await syncSeries(poolWithSub, userPubkey)
updateProgress(++currentStep)
await syncPublications(poolWithSub, userPubkey)
updateProgress(++currentStep)
await syncPurchases(poolWithSub, userPubkey)
updateProgress(++currentStep)
await syncSponsoring(poolWithSub, userPubkey)
updateProgress(++currentStep)
await syncReviewTips(poolWithSub, userPubkey)
updateProgress(++currentStep)
await syncPaymentNotes(poolWithSub, userPubkey)
updateProgress(++currentStep, true)
}

View File

@ -120,11 +120,7 @@ async function handleWriteObject(data, taskId) {
const store = transaction.objectStore('objects') const store = transaction.objectStore('objects')
// Vérifier si l'objet existe déjà pour préserver published // Vérifier si l'objet existe déjà pour préserver published
const existing = await new Promise((resolve, reject) => { const existing = await executeTransactionOperation(store, (s) => s.get(finalId)).catch(() => null)
const request = store.get(finalId)
request.onsuccess = () => resolve(request.result)
request.onerror = () => reject(request.error)
}).catch(() => null)
// Préserver published si existant et non fourni // Préserver published si existant et non fourni
const finalPublished = existing && published === false ? existing.published : (published ?? false) const finalPublished = existing && published === false ? existing.published : (published ?? false)
@ -144,11 +140,7 @@ async function handleWriteObject(data, taskId) {
pubkey: event.pubkey, pubkey: event.pubkey,
} }
await new Promise((resolve, reject) => { await executeTransactionOperation(store, (s) => s.put(object))
const request = store.put(object)
request.onsuccess = () => resolve()
request.onerror = () => reject(request.error)
})
self.postMessage({ self.postMessage({
type: 'WRITE_OBJECT_SUCCESS', type: 'WRITE_OBJECT_SUCCESS',
@ -195,11 +187,7 @@ async function handleUpdatePublished(data, taskId) {
const transaction = db.transaction(['objects'], 'readwrite') const transaction = db.transaction(['objects'], 'readwrite')
const store = transaction.objectStore('objects') const store = transaction.objectStore('objects')
const existing = await new Promise((resolve, reject) => { const existing = await executeTransactionOperation(store, (s) => s.get(id))
const request = store.get(id)
request.onsuccess = () => resolve(request.result)
request.onerror = () => reject(request.error)
})
if (!existing) { if (!existing) {
throw new Error(`Object ${id} not found`) throw new Error(`Object ${id} not found`)
@ -262,11 +250,7 @@ async function handleWriteMultiTable(data, taskId) {
finalId = `${hash}:${count}:${version}` finalId = `${hash}:${count}:${version}`
} }
const existing = await new Promise((resolve, reject) => { const existing = await executeTransactionOperation(store, (s) => s.get(finalId)).catch(() => null)
const request = store.get(finalId)
request.onsuccess = () => resolve(request.result)
request.onerror = () => reject(request.error)
}).catch(() => null)
const finalPublished = existing && published === false ? existing.published : (published ?? false) const finalPublished = existing && published === false ? existing.published : (published ?? false)
@ -317,11 +301,7 @@ async function handleCreateNotification(data, taskId) {
// Vérifier si la notification existe déjà // Vérifier si la notification existe déjà
const index = store.index('eventId') const index = store.index('eventId')
const existing = await new Promise((resolve, reject) => { const existing = await executeTransactionOperation(index, (idx) => idx.get(eventId)).catch(() => null)
const request = index.get(eventId)
request.onsuccess = () => resolve(request.result)
request.onerror = () => reject(request.error)
}).catch(() => null)
if (existing) { if (existing) {
// Notification déjà existante // Notification déjà existante
@ -349,11 +329,7 @@ async function handleCreateNotification(data, taskId) {
fromPubkey: notificationData?.fromPubkey, fromPubkey: notificationData?.fromPubkey,
} }
await new Promise((resolve, reject) => { await executeTransactionOperation(store, (s) => s.add(notification))
const request = store.add(notification)
request.onsuccess = () => resolve()
request.onerror = () => reject(request.error)
})
self.postMessage({ self.postMessage({
type: 'CREATE_NOTIFICATION_SUCCESS', type: 'CREATE_NOTIFICATION_SUCCESS',
@ -386,11 +362,7 @@ async function handleLogPublication(data, taskId) {
objectId, objectId,
} }
await new Promise((resolve, reject) => { await executeTransactionOperation(store, (s) => s.add(entry))
const request = store.add(entry)
request.onsuccess = () => resolve()
request.onerror = () => reject(request.error)
})
// Pas de réponse pour les logs (fire and forget) // Pas de réponse pour les logs (fire and forget)
} catch (error) { } catch (error) {
// Don't throw for logs, just log the error // Don't throw for logs, just log the error
@ -399,34 +371,45 @@ async function handleLogPublication(data, taskId) {
} }
/** /**
* Open IndexedDB for object type * Generic helper to open IndexedDB database
*/ */
function openDB(objectType) { function openIndexedDB(dbName, version, upgradeHandler) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const dbName = `nostr_${objectType}_cache`
const version = DB_VERSIONS[objectType] ?? 1
const request = indexedDB.open(dbName, version) const request = indexedDB.open(dbName, version)
request.onerror = () => reject(request.error) request.onerror = () => reject(request.error)
request.onsuccess = () => resolve(request.result) request.onsuccess = () => resolve(request.result)
request.onupgradeneeded = (event) => { request.onupgradeneeded = (event) => {
const db = event.target.result const db = event.target.result
if (!db.objectStoreNames.contains('objects')) { if (upgradeHandler) {
const store = db.createObjectStore('objects', { keyPath: 'id' }) upgradeHandler(db, event)
store.createIndex('hash', 'hash', { unique: false })
store.createIndex('index', 'index', { unique: false })
store.createIndex('published', 'published', { unique: false })
} else {
// Migration : ajouter l'index published si nécessaire
const transaction = event.target.transaction
const store = transaction.objectStore('objects')
if (!store.indexNames.contains('published')) {
store.createIndex('published', 'published', { unique: false })
}
}
} }
}
})
}
/**
* Open IndexedDB for object type
*/
function openDB(objectType) {
const dbName = `nostr_${objectType}_cache`
const version = DB_VERSIONS[objectType] ?? 1
return openIndexedDB(dbName, version, (db) => {
if (!db.objectStoreNames.contains('objects')) {
const store = db.createObjectStore('objects', { keyPath: 'id' })
store.createIndex('hash', 'hash', { unique: false })
store.createIndex('index', 'index', { unique: false })
store.createIndex('published', 'published', { unique: false })
} else {
// Migration : ajouter l'index published si nécessaire
const transaction = db.transaction(['objects'], 'readwrite')
const store = transaction.objectStore('objects')
if (!store.indexNames.contains('published')) {
store.createIndex('published', 'published', { unique: false })
}
}
}) })
} }
@ -434,23 +417,15 @@ function openDB(objectType) {
* Open IndexedDB for notifications * Open IndexedDB for notifications
*/ */
function openNotificationDB() { function openNotificationDB() {
return new Promise((resolve, reject) => { return openIndexedDB('nostr_notifications', 1, (db) => {
const request = indexedDB.open('nostr_notifications', 1) if (!db.objectStoreNames.contains('notifications')) {
const store = db.createObjectStore('notifications', { keyPath: 'id' })
request.onerror = () => reject(request.error) store.createIndex('type', 'type', { unique: false })
request.onsuccess = () => resolve(request.result) store.createIndex('objectId', 'objectId', { unique: false })
store.createIndex('eventId', 'eventId', { unique: false })
request.onupgradeneeded = (event) => { store.createIndex('timestamp', 'timestamp', { unique: false })
const db = event.target.result store.createIndex('read', 'read', { unique: false })
if (!db.objectStoreNames.contains('notifications')) { store.createIndex('objectType', 'objectType', { unique: false })
const store = db.createObjectStore('notifications', { keyPath: 'id' })
store.createIndex('type', 'type', { unique: false })
store.createIndex('objectId', 'objectId', { unique: false })
store.createIndex('eventId', 'eventId', { unique: false })
store.createIndex('timestamp', 'timestamp', { unique: false })
store.createIndex('read', 'read', { unique: false })
store.createIndex('objectType', 'objectType', { unique: false })
}
} }
}) })
} }
@ -459,25 +434,28 @@ function openNotificationDB() {
* Open IndexedDB for publish log * Open IndexedDB for publish log
*/ */
function openPublishLogDB() { function openPublishLogDB() {
return new Promise((resolve, reject) => { return openIndexedDB('nostr_publish_log', 1, (db) => {
const request = indexedDB.open('nostr_publish_log', 1) if (!db.objectStoreNames.contains('publications')) {
const store = db.createObjectStore('publications', { keyPath: 'id', autoIncrement: true })
request.onerror = () => reject(request.error) store.createIndex('eventId', 'eventId', { unique: false })
request.onsuccess = () => resolve(request.result) store.createIndex('relayUrl', 'relayUrl', { unique: false })
store.createIndex('timestamp', 'timestamp', { unique: false })
request.onupgradeneeded = (event) => { store.createIndex('success', 'success', { unique: false })
const db = event.target.result
if (!db.objectStoreNames.contains('publications')) {
const store = db.createObjectStore('publications', { keyPath: 'id', autoIncrement: true })
store.createIndex('eventId', 'eventId', { unique: false })
store.createIndex('relayUrl', 'relayUrl', { unique: false })
store.createIndex('timestamp', 'timestamp', { unique: false })
store.createIndex('success', 'success', { unique: false })
}
} }
}) })
} }
/**
* Helper to execute a transaction operation
*/
function executeTransactionOperation(store, operation) {
return new Promise((resolve, reject) => {
const request = operation(store)
request.onsuccess = () => resolve(request.result)
request.onerror = () => reject(request.error)
})
}
// Notify main thread that worker is ready // Notify main thread that worker is ready
self.postMessage({ type: 'WORKER_READY' }) self.postMessage({ type: 'WORKER_READY' })