**Motivations :** - Fix 'Waiting for relays to be ready...' issue by properly waiting for relay spAddress - Prevent unnecessary iframe creation when application runs in full page mode - Improve relay address validation logic **Modifications :** - Enhanced createProcess to properly wait for relay spAddress before proceeding - Added better relay address validation with non-empty string check - Conditional iframe initialization only when running in iframe context - Added debug logging for available relays when address not found **Pages affectées :** - src/services/service.ts - Relay readiness logic - src/pages/home/home.ts - Conditional iframe initialization
2562 lines
82 KiB
TypeScript
Executable File
2562 lines
82 KiB
TypeScript
Executable File
// import { INotification } from '~/models/notification.model'; // Unused import
|
|
// import { IProcess } from '~/models/process.model'; // Unused import
|
|
import { initWebsocket, sendMessage } from '../websockets';
|
|
import { memoryManager } from './memory-manager';
|
|
import { secureLogger } from './secure-logger';
|
|
// import { secureKeyManager } from './secure-key-manager';
|
|
import {
|
|
ApiReturn,
|
|
Device,
|
|
HandshakeMessage,
|
|
Member,
|
|
MerkleProofResult,
|
|
NewTxMessage,
|
|
OutPointProcessMap,
|
|
Process,
|
|
ProcessState,
|
|
RoleDefinition,
|
|
SecretsStore,
|
|
UserDiff,
|
|
} from '../../pkg/sdk_client';
|
|
import ModalService from './modal.service';
|
|
import Database from './database.service';
|
|
// import { navigate } from '../router'; // Unused import
|
|
import { storeData, retrieveData } from './storage.service';
|
|
// import { testData } from './storage.service'; // Unused import
|
|
import { BackUp } from '../models/backup.model';
|
|
|
|
export const U32_MAX = 4294967295;
|
|
|
|
const BASEURL = import.meta.env.VITE_BASEURL || `http://localhost`;
|
|
const BOOTSTRAPURL = [import.meta.env.VITE_BOOTSTRAPURL || `${BASEURL}:8090`];
|
|
const STORAGEURL = import.meta.env.VITE_STORAGEURL || `${BASEURL}:8081`;
|
|
const BLINDBITURL = import.meta.env.VITE_BLINDBITURL || `${BASEURL}:8000`;
|
|
const DEFAULTAMOUNT = 1000n;
|
|
|
|
// Global loading spinner functions
|
|
function showGlobalLoadingSpinner(message: string = 'Loading...') {
|
|
// Remove existing spinner if any
|
|
hideGlobalLoadingSpinner();
|
|
|
|
// Create spinner overlay
|
|
const overlay = document.createElement('div');
|
|
overlay.id = 'global-loading-overlay';
|
|
overlay.style.cssText = `
|
|
position: fixed;
|
|
top: 0;
|
|
left: 0;
|
|
width: 100%;
|
|
height: 100%;
|
|
background: rgba(0, 0, 0, 0.8);
|
|
display: flex;
|
|
flex-direction: column;
|
|
justify-content: center;
|
|
align-items: center;
|
|
z-index: 9999;
|
|
backdrop-filter: blur(5px);
|
|
`;
|
|
|
|
// Create spinner content
|
|
const spinnerContent = document.createElement('div');
|
|
spinnerContent.style.cssText = `
|
|
background: rgba(255, 255, 255, 0.95);
|
|
border-radius: 12px;
|
|
padding: 40px;
|
|
text-align: center;
|
|
box-shadow: 0 8px 32px rgba(0, 0, 0, 0.3);
|
|
border: 1px solid rgba(255, 255, 255, 0.2);
|
|
max-width: 400px;
|
|
width: 90%;
|
|
`;
|
|
|
|
// Create spinner
|
|
const spinner = document.createElement('div');
|
|
spinner.style.cssText = `
|
|
width: 50px;
|
|
height: 50px;
|
|
border: 4px solid #f3f3f3;
|
|
border-top: 4px solid #3a506b;
|
|
border-radius: 50%;
|
|
animation: spin 1s linear infinite;
|
|
margin: 0 auto 20px auto;
|
|
`;
|
|
|
|
// Create message
|
|
const messageEl = document.createElement('div');
|
|
messageEl.textContent = message;
|
|
messageEl.style.cssText = `
|
|
font-size: 16px;
|
|
color: #3a506b;
|
|
font-weight: 500;
|
|
margin-bottom: 10px;
|
|
`;
|
|
|
|
// Create progress indicator
|
|
const progressEl = document.createElement('div');
|
|
progressEl.textContent = 'Please wait...';
|
|
progressEl.style.cssText = `
|
|
font-size: 14px;
|
|
color: #666;
|
|
`;
|
|
|
|
// Add CSS animation if not already present
|
|
if (!document.getElementById('global-spinner-styles')) {
|
|
const style = document.createElement('style');
|
|
style.id = 'global-spinner-styles';
|
|
style.textContent = `
|
|
@keyframes spin {
|
|
0% { transform: rotate(0deg); }
|
|
100% { transform: rotate(360deg); }
|
|
}
|
|
`;
|
|
document.head.appendChild(style);
|
|
}
|
|
|
|
// Assemble spinner
|
|
spinnerContent.appendChild(spinner);
|
|
spinnerContent.appendChild(messageEl);
|
|
spinnerContent.appendChild(progressEl);
|
|
overlay.appendChild(spinnerContent);
|
|
|
|
// Add to document
|
|
document.body.appendChild(overlay);
|
|
}
|
|
|
|
function hideGlobalLoadingSpinner() {
|
|
const overlay = document.getElementById('global-loading-overlay');
|
|
if (overlay) {
|
|
overlay.remove();
|
|
}
|
|
}
|
|
const EMPTY32BYTES = String('').padStart(64, '0');
|
|
|
|
export default class Services {
|
|
private static initializing: Promise<Services> | null = null;
|
|
private static instance: Services;
|
|
private processId: string | null = null;
|
|
private stateId: string | null = null;
|
|
private sdkClient: any;
|
|
private processesCache: Record<string, Process> = {};
|
|
private myProcesses: Set<string> = new Set();
|
|
private notifications: any[] | null = null;
|
|
// private subscriptions: { element: Element; event: string; eventHandler: string }[] = [];
|
|
private maxCacheSize = 100;
|
|
private cacheExpiry = 5 * 60 * 1000; // 5 minutes
|
|
// private database: any;
|
|
private routingInstance!: ModalService;
|
|
private relayAddresses: { [wsurl: string]: string } = {};
|
|
private membersList: Record<string, Member> = {};
|
|
private currentBlockHeight: number = -1;
|
|
private relayReadyResolver: (() => void) | null = null;
|
|
private relayReadyPromise: Promise<void> | null = null;
|
|
// Private constructor to prevent direct instantiation from outside
|
|
private constructor() {}
|
|
|
|
// Method to access the singleton instance of Services
|
|
public static async getInstance(): Promise<Services> {
|
|
if (Services.instance) {
|
|
return Services.instance;
|
|
}
|
|
|
|
if (!Services.initializing) {
|
|
Services.initializing = (async () => {
|
|
const instance = new Services();
|
|
// Initialize WebAssembly when needed
|
|
await instance.init();
|
|
instance.routingInstance = await ModalService.getInstance();
|
|
return instance;
|
|
})();
|
|
}
|
|
|
|
console.log('initializing services');
|
|
|
|
// Debug: Check memory usage before any operations
|
|
if ((performance as any).memory) {
|
|
const memory = (performance as any).memory;
|
|
const usedPercent = (memory.usedJSHeapSize / memory.jsHeapSizeLimit) * 100;
|
|
console.log(`🔍 Initial memory usage: ${usedPercent.toFixed(1)}% (${(memory.usedJSHeapSize / 1024 / 1024).toFixed(1)}MB / ${(memory.jsHeapSizeLimit / 1024 / 1024).toFixed(1)}MB)`);
|
|
}
|
|
|
|
// Show global loading spinner during initialization
|
|
showGlobalLoadingSpinner('Initializing services...');
|
|
|
|
// Add WebAssembly memory optimization and error handling
|
|
try {
|
|
// Check if WebAssembly is supported
|
|
if (typeof WebAssembly === 'undefined') {
|
|
throw new Error('WebAssembly is not supported in this browser');
|
|
}
|
|
|
|
// Optimize WebAssembly memory before initialization
|
|
console.log('🔧 Optimizing WebAssembly memory...');
|
|
|
|
// Clear browser caches to free memory
|
|
if ('caches' in window) {
|
|
const cacheNames = await caches.keys();
|
|
await Promise.all(cacheNames.map(name => caches.delete(name)));
|
|
console.log('🧹 Browser caches cleared');
|
|
}
|
|
|
|
// Clear unused objects from memory
|
|
if (window.gc) {
|
|
window.gc();
|
|
console.log('🗑️ Garbage collection triggered');
|
|
}
|
|
|
|
// Force memory cleanup
|
|
if (window.gc) {
|
|
window.gc();
|
|
await new Promise(resolve => setTimeout(resolve, 100)); // Wait for GC
|
|
window.gc();
|
|
console.log('🗑️ Additional garbage collection triggered');
|
|
}
|
|
|
|
// DO NOT clear user data - only clear non-essential caches
|
|
console.log('⚠️ Skipping storage cleanup to preserve user data');
|
|
|
|
// Force aggressive memory cleanup
|
|
console.log('🔧 Performing aggressive memory cleanup...');
|
|
|
|
// Clear only non-essential browser data (NOT user data)
|
|
try {
|
|
// Clear only HTTP caches (NOT IndexedDB with user data)
|
|
if ('caches' in window) {
|
|
const cacheNames = await caches.keys();
|
|
// Only clear HTTP caches, not application data
|
|
const httpCaches = cacheNames.filter(name => name.startsWith('http'));
|
|
await Promise.all(httpCaches.map(name => caches.delete(name)));
|
|
console.log('🧹 HTTP caches cleared (user data preserved)');
|
|
}
|
|
|
|
// DO NOT clear IndexedDB - it contains user secrets!
|
|
// DO NOT clear service workers - they manage user data!
|
|
|
|
} catch (e) {
|
|
console.log('⚠️ Safe cleanup error:', e);
|
|
}
|
|
|
|
// Check available memory (Chrome-specific API)
|
|
if ((performance as any).memory) {
|
|
const memory = (performance as any).memory;
|
|
const usedPercent = (memory.usedJSHeapSize / memory.jsHeapSizeLimit) * 100;
|
|
console.log(`📊 Memory usage after cleanup: ${usedPercent.toFixed(1)}% (${(memory.usedJSHeapSize / 1024 / 1024).toFixed(1)}MB)`);
|
|
|
|
if (usedPercent > 70) {
|
|
console.warn('⚠️ High memory usage detected, forcing additional cleanup...');
|
|
|
|
// Debug: Check what's consuming memory
|
|
console.log('🔍 Debugging memory usage...');
|
|
console.log('📦 Document elements:', document.querySelectorAll('*').length);
|
|
console.log('📦 Script tags:', document.querySelectorAll('script').length);
|
|
console.log('📦 Style tags:', document.querySelectorAll('style').length);
|
|
console.log('📦 Images:', document.querySelectorAll('img').length);
|
|
|
|
// Force more aggressive cleanup
|
|
if (window.gc) {
|
|
for (let i = 0; i < 5; i++) {
|
|
window.gc();
|
|
await new Promise(resolve => setTimeout(resolve, 100));
|
|
}
|
|
}
|
|
|
|
// Clear DOM references
|
|
const elements = document.querySelectorAll('*');
|
|
elements.forEach(el => {
|
|
if (el.removeAttribute) {
|
|
el.removeAttribute('data-cached');
|
|
}
|
|
});
|
|
|
|
console.log('🧹 Additional memory cleanup completed');
|
|
}
|
|
}
|
|
} catch (error) {
|
|
console.error('❌ WebAssembly optimization error:', error);
|
|
// Don't throw here, continue with initialization
|
|
}
|
|
|
|
// Initialize services with conditional WebAssembly loading
|
|
try {
|
|
// Check memory before loading WebAssembly
|
|
if ((performance as any).memory) {
|
|
const memory = (performance as any).memory;
|
|
const usedPercent = (memory.usedJSHeapSize / memory.jsHeapSizeLimit) * 100;
|
|
|
|
if (usedPercent > 70) {
|
|
console.log('🚫 Memory too high, skipping WebAssembly initialization');
|
|
Services.instance = new Services();
|
|
Services.initializing = null;
|
|
console.log('✅ Services initialized without WebAssembly');
|
|
return Services.instance;
|
|
}
|
|
}
|
|
|
|
// Memory is sufficient, load WebAssembly
|
|
Services.instance = await Services.initializing;
|
|
Services.initializing = null;
|
|
console.log('✅ Services initialized with WebAssembly');
|
|
|
|
} catch (error) {
|
|
console.error('❌ Service initialization failed:', error);
|
|
throw error;
|
|
}
|
|
|
|
// Hide loading spinner after initialization
|
|
hideGlobalLoadingSpinner();
|
|
|
|
return Services.instance;
|
|
}
|
|
|
|
public async init(): Promise<void> {
|
|
this.notifications = this.getNotifications();
|
|
this.sdkClient = await import('../../pkg/sdk_client');
|
|
this.sdkClient.setup();
|
|
for (const wsurl of Object.values(BOOTSTRAPURL)) {
|
|
this.updateRelay(wsurl, '');
|
|
}
|
|
|
|
// Démarrer le monitoring de la mémoire
|
|
memoryManager.startMonitoring();
|
|
|
|
// Nettoyer les caches périodiquement
|
|
this.startCacheCleanup();
|
|
|
|
// Initialiser le service PBKDF2 pour les credentials sécurisés
|
|
try {
|
|
const { secureCredentialsService } = await import('./secure-credentials.service');
|
|
// Use secureCredentialsService variable
|
|
console.log('Secure credentials service imported:', secureCredentialsService);
|
|
secureLogger.info('PBKDF2 service initialized for secure credentials', {
|
|
component: 'Services',
|
|
operation: 'pbkdf2_init'
|
|
});
|
|
} catch (error) {
|
|
secureLogger.warn('Failed to initialize PBKDF2 service', {
|
|
component: 'Services',
|
|
operation: 'pbkdf2_init',
|
|
error: error as Error
|
|
});
|
|
}
|
|
|
|
secureLogger.info('Services initialized', {
|
|
component: 'Services',
|
|
operation: 'initialization'
|
|
});
|
|
}
|
|
|
|
public setProcessId(processId: string | null) {
|
|
this.processId = processId;
|
|
}
|
|
|
|
/**
|
|
* Démarre le nettoyage périodique des caches
|
|
*/
|
|
private startCacheCleanup(): void {
|
|
setInterval(() => {
|
|
this.cleanupCaches();
|
|
}, this.cacheExpiry);
|
|
}
|
|
|
|
/**
|
|
* Nettoie les caches expirés
|
|
*/
|
|
private cleanupCaches(): void {
|
|
const now = Date.now();
|
|
const expiredKeys: string[] = [];
|
|
|
|
// Nettoyer le cache des processus
|
|
Object.keys(this.processesCache).forEach(key => {
|
|
const process = this.processesCache[key];
|
|
if (process && now - (process as any).timestamp > this.cacheExpiry) {
|
|
expiredKeys.push(key);
|
|
}
|
|
});
|
|
|
|
expiredKeys.forEach(key => {
|
|
delete this.processesCache[key];
|
|
});
|
|
|
|
// Nettoyer le cache des membres
|
|
Object.keys(this.membersList).forEach(key => {
|
|
const member = this.membersList[key];
|
|
if (member && now - (member as any).timestamp > this.cacheExpiry) {
|
|
delete this.membersList[key];
|
|
}
|
|
});
|
|
|
|
if (expiredKeys.length > 0) {
|
|
secureLogger.debug('Cache cleanup completed', {
|
|
component: 'Services',
|
|
operation: 'cache_cleanup',
|
|
expiredEntries: expiredKeys.length
|
|
});
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Met en cache un processus avec timestamp
|
|
*/
|
|
private cacheProcess(processId: string, process: Process): void {
|
|
// Use parameters
|
|
console.log('Caching process:', { processId, process });
|
|
if (Object.keys(this.processesCache).length >= this.maxCacheSize) {
|
|
// Supprimer le plus ancien
|
|
const oldestKey = Object.keys(this.processesCache)[0];
|
|
delete this.processesCache[oldestKey];
|
|
}
|
|
|
|
(process as any).timestamp = Date.now();
|
|
this.processesCache[processId] = process;
|
|
}
|
|
|
|
/**
|
|
* Récupère un processus du cache
|
|
*/
|
|
private getCachedProcess(processId: string): Process | null {
|
|
// Use processId parameter
|
|
console.log('Getting cached process:', processId);
|
|
const process = this.processesCache[processId];
|
|
if (!process) return null;
|
|
|
|
const now = Date.now();
|
|
if (now - (process as any).timestamp > this.cacheExpiry) {
|
|
delete this.processesCache[processId];
|
|
return null;
|
|
}
|
|
|
|
return process;
|
|
}
|
|
|
|
/**
|
|
* Nettoie tous les caches
|
|
*/
|
|
public clearAllCaches(): void {
|
|
this.processesCache = {};
|
|
this.membersList = {};
|
|
this.myProcesses.clear();
|
|
|
|
secureLogger.info('All caches cleared', {
|
|
component: 'Services',
|
|
operation: 'cache_clear'
|
|
});
|
|
}
|
|
|
|
/**
|
|
* Récupère les statistiques des caches
|
|
*/
|
|
public getCacheStats(): {
|
|
processes: number;
|
|
members: number;
|
|
myProcesses: number;
|
|
memory: any;
|
|
} {
|
|
return {
|
|
processes: Object.keys(this.processesCache).length,
|
|
members: Object.keys(this.membersList).length,
|
|
myProcesses: this.myProcesses.size,
|
|
memory: memoryManager.getMemoryReport()
|
|
};
|
|
}
|
|
|
|
public setStateId(stateId: string | null) {
|
|
this.stateId = stateId;
|
|
}
|
|
|
|
public getProcessId(): string | null {
|
|
return this.processId;
|
|
}
|
|
|
|
public getStateId(): string | null {
|
|
return this.stateId;
|
|
}
|
|
|
|
/**
|
|
* Calls `this.addWebsocketConnection` for each `wsurl` in relayAddresses.
|
|
* Waits for at least one handshake message before returning.
|
|
*/
|
|
public async connectAllRelays(): Promise<void> {
|
|
const relayUrls = Object.keys(this.relayAddresses);
|
|
console.log(`🚀 Connecting to ${relayUrls.length} relays in parallel...`);
|
|
|
|
// Connect to all relays in parallel
|
|
const connectionPromises = relayUrls.map(async wsurl => {
|
|
try {
|
|
console.log(`🔗 Connecting to: ${wsurl}`);
|
|
await this.addWebsocketConnection(wsurl);
|
|
console.log(`✅ Successfully connected to: ${wsurl}`);
|
|
return wsurl;
|
|
} catch (error) {
|
|
console.error(`❌ Failed to connect to ${wsurl}:`, error);
|
|
return null;
|
|
}
|
|
});
|
|
|
|
// Wait for all connections to complete (success or failure)
|
|
const results = await Promise.allSettled(connectionPromises);
|
|
const connectedUrls = results
|
|
.filter(
|
|
(result): result is PromiseFulfilledResult<string> =>
|
|
result.status === 'fulfilled' && result.value !== null
|
|
)
|
|
.map(result => result.value);
|
|
|
|
console.log(`✅ Connected to ${connectedUrls.length}/${relayUrls.length} relays`);
|
|
|
|
// Wait for at least one handshake message if we have connections
|
|
if (connectedUrls.length > 0) {
|
|
try {
|
|
await this.waitForHandshakeMessage();
|
|
console.log(`✅ Handshake received from at least one relay`);
|
|
} catch (error) {
|
|
console.warn(
|
|
`⚠️ No handshake received within timeout, but continuing with ${connectedUrls.length} connections`
|
|
);
|
|
// Continue anyway - we have connections even without handshake
|
|
}
|
|
} else {
|
|
console.warn(`⚠️ No relay connections established`);
|
|
}
|
|
}
|
|
|
|
private getRelayReadyPromise(): Promise<void> {
|
|
if (!this.relayReadyPromise) {
|
|
this.relayReadyPromise = new Promise<void>(resolve => {
|
|
this.relayReadyResolver = resolve;
|
|
});
|
|
}
|
|
return this.relayReadyPromise;
|
|
}
|
|
|
|
private resolveRelayReady(): void {
|
|
if (this.relayReadyResolver) {
|
|
this.relayReadyResolver();
|
|
this.relayReadyResolver = null;
|
|
this.relayReadyPromise = null;
|
|
}
|
|
}
|
|
|
|
public async addWebsocketConnection(url: string): Promise<void> {
|
|
console.log('Opening new websocket connection');
|
|
await initWebsocket(url);
|
|
}
|
|
|
|
/**
|
|
* Add or update a key/value pair in relayAddresses.
|
|
* @param wsurl - The WebSocket URL (key).
|
|
* @param spAddress - The SP Address (value).
|
|
*/
|
|
public updateRelay(url: string, spAddress: string) {
|
|
console.log(`✅ Updating relay ${url} with spAddress ${spAddress}`);
|
|
this.relayAddresses[url] = spAddress;
|
|
}
|
|
|
|
/**
|
|
* Retrieve the spAddress for a given wsurl.
|
|
* @param wsurl - The WebSocket URL to look up.
|
|
* @returns The SP Address if found, or undefined if not.
|
|
*/
|
|
public getSpAddress(wsurl: string): string | undefined {
|
|
return this.relayAddresses[wsurl];
|
|
}
|
|
|
|
/**
|
|
* Get all key/value pairs from relayAddresses.
|
|
* @returns An array of objects containing wsurl and spAddress.
|
|
*/
|
|
public getAllRelays(): { wsurl: string; spAddress: string }[] {
|
|
return Object.entries(this.relayAddresses).map(([wsurl, spAddress]) => ({
|
|
wsurl,
|
|
spAddress,
|
|
}));
|
|
}
|
|
|
|
/**
|
|
* Print all key/value pairs for debugging.
|
|
*/
|
|
public printAllRelays(): void {
|
|
console.log('Current relay addresses:');
|
|
for (const [wsurl, spAddress] of Object.entries(this.relayAddresses)) {
|
|
console.log(`${wsurl} -> ${spAddress}`);
|
|
}
|
|
}
|
|
|
|
public isPaired(): boolean {
|
|
try {
|
|
return this.sdkClient.is_paired();
|
|
} catch (e) {
|
|
// During pairing process, it's normal for the device to not be paired yet
|
|
console.warn(`Device pairing status check failed (normal during pairing): ${e}`);
|
|
return false;
|
|
}
|
|
}
|
|
|
|
public async unpairDevice(): Promise<void> {
|
|
try {
|
|
this.sdkClient.unpair_device();
|
|
const newDevice = this.dumpDeviceFromMemory();
|
|
await this.saveDeviceInDatabase(newDevice);
|
|
} catch (e) {
|
|
throw new Error(`Failed to unpair device: ${e}`);
|
|
}
|
|
}
|
|
|
|
public async getSecretForAddress(address: string): Promise<string | null> {
|
|
const db = await Database.getInstance();
|
|
return await db.getObject('shared_secrets', address);
|
|
}
|
|
|
|
public async getAllSecrets(): Promise<SecretsStore> {
|
|
const db = await Database.getInstance();
|
|
const sharedSecrets = await db.dumpStore('shared_secrets');
|
|
const unconfirmedSecrets = await db.dumpStore('unconfirmed_secrets'); // keys are numeric values
|
|
|
|
const secretsStore = {
|
|
shared_secrets: sharedSecrets,
|
|
unconfirmed_secrets: Object.values(unconfirmedSecrets),
|
|
};
|
|
|
|
return secretsStore;
|
|
}
|
|
|
|
public async getAllDiffs(): Promise<Record<string, UserDiff>> {
|
|
const db = await Database.getInstance();
|
|
return await db.dumpStore('diffs');
|
|
}
|
|
|
|
public async getDiffByValue(value: string): Promise<UserDiff | null> {
|
|
const db = await Database.getInstance();
|
|
const store = 'diffs';
|
|
const res = await db.getObject(store, value);
|
|
return res;
|
|
}
|
|
|
|
private async getTokensFromFaucet(): Promise<void> {
|
|
try {
|
|
await this.ensureSufficientAmount();
|
|
} catch (e) {
|
|
console.error('Failed to get tokens from relay, check connection');
|
|
return;
|
|
}
|
|
}
|
|
|
|
// If we're updating a process, we must call that after update especially if roles are part of it
|
|
// We will take the roles from the last state, wheter it's commited or not
|
|
public async checkConnections(process: Process, stateId: string | null = null): Promise<void> {
|
|
if (process.states.length < 2) {
|
|
throw new Error("Process doesn't have any state yet");
|
|
}
|
|
let roles: Record<string, RoleDefinition> | null = null;
|
|
if (!stateId) {
|
|
roles = process.states[process.states.length - 2].roles;
|
|
} else {
|
|
roles = process.states.find(state => state.state_id === stateId)?.roles || null;
|
|
}
|
|
if (!roles) {
|
|
throw new Error('No roles found');
|
|
}
|
|
let members: Set<Member> = new Set();
|
|
for (const role of Object.values(roles!)) {
|
|
for (const member of role.members) {
|
|
// Check if we know the member that matches this id
|
|
const memberAddresses = this.getAddressesForMemberId(member);
|
|
if (memberAddresses && memberAddresses.length != 0) {
|
|
members.add({ sp_addresses: memberAddresses });
|
|
}
|
|
}
|
|
}
|
|
|
|
if (members.size === 0) {
|
|
// This must be a pairing process
|
|
// Check if we have a pairedAddresses in the public data
|
|
let publicData: Record<string, any> | null = null;
|
|
if (!stateId) {
|
|
publicData = process.states[process.states.length - 2]?.public_data;
|
|
} else {
|
|
publicData = process.states.find(state => state.state_id === stateId)?.public_data || null;
|
|
}
|
|
|
|
// If pairedAddresses is not in the current state, look in previous states
|
|
if (!publicData || !publicData['pairedAddresses']) {
|
|
// Look for pairedAddresses in previous states
|
|
for (let i = process.states.length - 1; i >= 0; i--) {
|
|
const state = process.states[i];
|
|
if (state.public_data && state.public_data['pairedAddresses']) {
|
|
publicData = state.public_data;
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
|
|
if (!publicData || !publicData['pairedAddresses']) {
|
|
throw new Error('Not a pairing process');
|
|
}
|
|
const decodedAddresses = this.decodeValue(publicData['pairedAddresses']);
|
|
if (decodedAddresses.length === 0) {
|
|
throw new Error('Not a pairing process');
|
|
}
|
|
members.add({ sp_addresses: decodedAddresses });
|
|
}
|
|
|
|
// Ensure the amount is available before proceeding
|
|
await this.getTokensFromFaucet();
|
|
let unconnectedAddresses = new Set<string>();
|
|
const myAddress = this.getDeviceAddress();
|
|
for (const member of Array.from(members)) {
|
|
const sp_addresses = member.sp_addresses;
|
|
if (!sp_addresses || sp_addresses.length === 0) continue;
|
|
for (const address of sp_addresses) {
|
|
// For now, we ignore our own device address, although there might be use cases for having a secret with ourselves
|
|
if (address === myAddress) continue;
|
|
if ((await this.getSecretForAddress(address)) === null) {
|
|
unconnectedAddresses.add(address);
|
|
}
|
|
}
|
|
}
|
|
if (unconnectedAddresses && unconnectedAddresses.size != 0) {
|
|
const apiResult = await this.connectAddresses(Array.from(unconnectedAddresses));
|
|
await this.handleApiReturn(apiResult);
|
|
}
|
|
}
|
|
|
|
public async connectAddresses(addresses: string[]): Promise<ApiReturn> {
|
|
if (addresses.length === 0) {
|
|
throw new Error('Trying to connect to empty addresses list');
|
|
}
|
|
|
|
try {
|
|
return this.sdkClient.create_transaction(addresses, 1);
|
|
} catch (e) {
|
|
console.error('Failed to connect member:', e);
|
|
throw e;
|
|
}
|
|
}
|
|
|
|
private async ensureSufficientAmount(): Promise<void> {
|
|
const availableAmt = this.getAmount();
|
|
const target: BigInt = DEFAULTAMOUNT * BigInt(10);
|
|
|
|
if (availableAmt < target) {
|
|
const faucetMsg = this.createFaucetMessage();
|
|
this.sendFaucetMessage(faucetMsg);
|
|
|
|
await this.waitForAmount(target);
|
|
}
|
|
}
|
|
|
|
private async waitForAmount(target: BigInt): Promise<BigInt> {
|
|
let attempts = 3;
|
|
|
|
while (attempts > 0) {
|
|
const amount = this.getAmount();
|
|
if (amount >= target) {
|
|
return amount;
|
|
}
|
|
|
|
attempts--;
|
|
if (attempts > 0) {
|
|
await new Promise(resolve => setTimeout(resolve, 1000)); // Wait for 1 second
|
|
}
|
|
}
|
|
|
|
throw new Error('Amount is still 0 after 3 attempts');
|
|
}
|
|
|
|
public async createPairingProcess(userName: string, pairWith: string[]): Promise<ApiReturn> {
|
|
if (this.sdkClient.is_paired()) {
|
|
throw new Error('Device already paired');
|
|
}
|
|
const myAddress: string = this.sdkClient.get_address();
|
|
pairWith.push(myAddress);
|
|
const privateData = {
|
|
description: 'pairing',
|
|
counter: 0,
|
|
};
|
|
const publicData = {
|
|
memberPublicName: userName,
|
|
pairedAddresses: pairWith,
|
|
};
|
|
const validation_fields: string[] = [
|
|
...Object.keys(privateData),
|
|
...Object.keys(publicData),
|
|
'roles',
|
|
];
|
|
const roles: Record<string, RoleDefinition> = {
|
|
pairing: {
|
|
members: [],
|
|
validation_rules: [
|
|
{
|
|
quorum: 1.0,
|
|
fields: validation_fields,
|
|
min_sig_member: 1.0,
|
|
},
|
|
],
|
|
storages: [STORAGEURL],
|
|
},
|
|
};
|
|
try {
|
|
return this.createProcess(privateData, publicData, roles);
|
|
} catch (e) {
|
|
throw new Error(`Creating process failed:, ${e}`);
|
|
}
|
|
}
|
|
|
|
private isFileBlob(value: any): value is { type: string; data: Uint8Array } {
|
|
return (
|
|
typeof value === 'object' &&
|
|
value !== null &&
|
|
typeof value.type === 'string' &&
|
|
value.data instanceof Uint8Array
|
|
);
|
|
}
|
|
|
|
private splitData(obj: Record<string, any>) {
|
|
const jsonCompatibleData: Record<string, any> = {};
|
|
const binaryData: Record<string, { type: string; data: Uint8Array }> = {};
|
|
|
|
for (const [key, value] of Object.entries(obj)) {
|
|
if (this.isFileBlob(value)) {
|
|
binaryData[key] = value;
|
|
} else {
|
|
jsonCompatibleData[key] = value;
|
|
}
|
|
}
|
|
|
|
return { jsonCompatibleData, binaryData };
|
|
}
|
|
|
|
public async createProcess(
|
|
privateData: Record<string, any>,
|
|
publicData: Record<string, any>,
|
|
roles: Record<string, RoleDefinition>
|
|
): Promise<ApiReturn> {
|
|
// Attendre que le relai soit prêt avec son spAddress
|
|
console.log('⏳ Waiting for relays to be ready...');
|
|
// Update UI status
|
|
const { updateCreatorStatus } = await import('../utils/sp-address.utils');
|
|
updateCreatorStatus('⏳ Waiting for relays to be ready...');
|
|
|
|
await this.getRelayReadyPromise();
|
|
|
|
// Vérifier que nous avons maintenant un spAddress
|
|
const relays = this.getAllRelays();
|
|
const relayAddress = relays.find(relay => relay.spAddress && relay.spAddress.trim() !== '')?.spAddress;
|
|
|
|
if (!relayAddress) {
|
|
console.error('Available relays:', relays);
|
|
throw new Error('❌ No relay address available after waiting');
|
|
}
|
|
|
|
console.log('✅ Relay address found:', relayAddress);
|
|
|
|
const feeRate = 1;
|
|
|
|
// We can't encode files as the rest because Uint8Array is not valid json
|
|
// So we first take them apart and we will encode them separately and put them back in the right object
|
|
// TODO encoding of relatively large binaries (=> 1M) is a bit long now and blocking
|
|
const privateSplitData = this.splitData(privateData);
|
|
const publicSplitData = this.splitData(publicData);
|
|
const encodedPrivateData = {
|
|
...this.sdkClient.encode_json(privateSplitData.jsonCompatibleData),
|
|
...this.sdkClient.encode_binary(privateSplitData.binaryData),
|
|
};
|
|
const encodedPublicData = {
|
|
...this.sdkClient.encode_json(publicSplitData.jsonCompatibleData),
|
|
...this.sdkClient.encode_binary(publicSplitData.binaryData),
|
|
};
|
|
|
|
// console.log('encodedPrivateData:', encodedPrivateData);
|
|
// console.log('encodedPublicData:', encodedPublicData);
|
|
// console.log('roles:', roles);
|
|
// console.log('members:', this.getAllMembers());
|
|
// console.log('relayAddress:', relayAddress, 'feeRate:', feeRate);
|
|
|
|
await this.getTokensFromFaucet();
|
|
const result = this.sdkClient.create_new_process(
|
|
encodedPrivateData,
|
|
roles,
|
|
encodedPublicData,
|
|
relayAddress,
|
|
feeRate,
|
|
this.getAllMembers()
|
|
);
|
|
|
|
if (result.updated_process) {
|
|
console.log('created process:', result.updated_process);
|
|
await this.checkConnections(result.updated_process.current_process);
|
|
return result;
|
|
} else {
|
|
throw new Error('Empty updated_process in createProcessReturn');
|
|
}
|
|
}
|
|
|
|
public async updateProcess(
|
|
process: Process,
|
|
privateData: Record<string, any>,
|
|
publicData: Record<string, any>,
|
|
roles: Record<string, RoleDefinition> | null
|
|
): Promise<ApiReturn> {
|
|
// If roles is null, we just take the last commited state roles
|
|
if (!roles) {
|
|
roles = this.getRoles(process);
|
|
} else {
|
|
// We should check that we have the right to change the roles here, or maybe it's better leave it to the wasm
|
|
console.log('Provided new roles:', JSON.stringify(roles));
|
|
}
|
|
const privateSplitData = this.splitData(privateData);
|
|
const publicSplitData = this.splitData(publicData);
|
|
const encodedPrivateData = {
|
|
...this.sdkClient.encode_json(privateSplitData.jsonCompatibleData),
|
|
...this.sdkClient.encode_binary(privateSplitData.binaryData),
|
|
};
|
|
const encodedPublicData = {
|
|
...this.sdkClient.encode_json(publicSplitData.jsonCompatibleData),
|
|
...this.sdkClient.encode_binary(publicSplitData.binaryData),
|
|
};
|
|
try {
|
|
const result = this.sdkClient.update_process(
|
|
process,
|
|
encodedPrivateData,
|
|
roles,
|
|
encodedPublicData,
|
|
this.getAllMembers()
|
|
);
|
|
if (result.updated_process) {
|
|
await this.checkConnections(result.updated_process.current_process);
|
|
return result;
|
|
} else {
|
|
throw new Error('Empty updated_process in updateProcessReturn');
|
|
}
|
|
} catch (e) {
|
|
throw new Error(`Failed to update process: ${e}`);
|
|
}
|
|
}
|
|
|
|
public async createPrdUpdate(processId: string, stateId: string): Promise<ApiReturn> {
|
|
const process = await this.getProcess(processId);
|
|
if (!process) {
|
|
throw new Error('Unknown process');
|
|
} else {
|
|
await this.checkConnections(process);
|
|
}
|
|
try {
|
|
return this.sdkClient.create_update_message(process, stateId, this.getAllMembers());
|
|
} catch (e) {
|
|
throw new Error(`Failed to create prd update: ${e}`);
|
|
}
|
|
}
|
|
|
|
public async createPrdResponse(processId: string, stateId: string): Promise<ApiReturn> {
|
|
const process = await this.getProcess(processId);
|
|
if (!process) {
|
|
throw new Error('Unknown process');
|
|
}
|
|
try {
|
|
return this.sdkClient.create_response_prd(process, stateId, this.getAllMembers());
|
|
} catch (e) {
|
|
throw new Error(`Failed to create response prd: ${e}`);
|
|
}
|
|
}
|
|
|
|
public async approveChange(processId: string, stateId: string): Promise<ApiReturn> {
|
|
const process = await this.getProcess(processId);
|
|
if (!process) {
|
|
throw new Error('Failed to get process from db');
|
|
}
|
|
try {
|
|
const result = this.sdkClient.validate_state(process, stateId, this.getAllMembers());
|
|
if (result.updated_process) {
|
|
await this.checkConnections(result.updated_process.current_process);
|
|
return result;
|
|
} else {
|
|
throw new Error('Empty updated_process in approveChangeReturn');
|
|
}
|
|
} catch (e) {
|
|
throw new Error(`Failed to create prd response: ${e}`);
|
|
}
|
|
}
|
|
|
|
public async rejectChange(processId: string, stateId: string): Promise<ApiReturn> {
|
|
const process = await this.getProcess(processId);
|
|
if (!process) {
|
|
throw new Error('Failed to get process from db');
|
|
}
|
|
try {
|
|
return this.sdkClient.refuse_state(process, stateId);
|
|
} catch (e) {
|
|
throw new Error(`Failed to create prd response: ${e}`);
|
|
}
|
|
}
|
|
|
|
async resetDevice() {
|
|
this.sdkClient.reset_device();
|
|
|
|
// Clear all stores
|
|
const db = await Database.getInstance();
|
|
await db.clearStore('wallet');
|
|
await db.clearStore('shared_secrets');
|
|
await db.clearStore('unconfirmed_secrets');
|
|
await db.clearStore('processes');
|
|
await db.clearStore('diffs');
|
|
}
|
|
|
|
sendNewTxMessage(message: string) {
|
|
sendMessage('NewTx', message);
|
|
}
|
|
|
|
sendCommitMessage(message: string) {
|
|
sendMessage('Commit', message);
|
|
}
|
|
|
|
sendCipherMessages(ciphers: string[]) {
|
|
for (let i = 0; i < ciphers.length; i++) {
|
|
const cipher = ciphers[i];
|
|
sendMessage('Cipher', cipher);
|
|
}
|
|
}
|
|
|
|
sendFaucetMessage(message: string): void {
|
|
sendMessage('Faucet', message);
|
|
}
|
|
|
|
async parseCipher(message: string) {
|
|
const membersList = this.getAllMembers();
|
|
const processes = await this.getProcesses();
|
|
try {
|
|
// console.log('parsing new cipher');
|
|
const apiReturn = this.sdkClient.parse_cipher(message, membersList, processes);
|
|
await this.handleApiReturn(apiReturn);
|
|
|
|
// Device 1 wait Device 2
|
|
const waitingModal = document.getElementById('waiting-modal');
|
|
if (waitingModal) {
|
|
this.device2Ready = true;
|
|
}
|
|
} catch (e) {
|
|
// Log the error but don't treat it as critical during pairing process
|
|
console.warn(`Cipher parsing failed (this may be normal during pairing): ${e}`);
|
|
|
|
// Only log as error if it's not a pairing-related issue
|
|
if (!(e as Error).message?.includes('Failed to handle decrypted message')) {
|
|
console.error(`Parsed cipher with error: ${e}`);
|
|
}
|
|
}
|
|
// await this.saveCipherTxToDb(parsedTx)
|
|
}
|
|
|
|
async parseNewTx(newTxMsg: string) {
|
|
const parsedMsg: NewTxMessage = JSON.parse(newTxMsg);
|
|
if (parsedMsg.error !== null) {
|
|
console.error('Received error in new tx message:', parsedMsg.error);
|
|
return;
|
|
}
|
|
|
|
const membersList = this.getAllMembers();
|
|
try {
|
|
// Does the transaction spend the tip of a process?
|
|
const prevouts = this.sdkClient.get_prevouts(parsedMsg.transaction);
|
|
console.log('prevouts:', prevouts);
|
|
for (const process of Object.values(this.processesCache)) {
|
|
const tip = process.states[process.states.length - 1].commited_in;
|
|
if (prevouts.includes(tip)) {
|
|
const processId = process.states[0].commited_in;
|
|
const newTip = this.sdkClient.get_txid(parsedMsg.transaction);
|
|
console.log('Transaction', newTip, 'spends the tip of process', processId);
|
|
// We take the data out of the output
|
|
const newStateId = this.sdkClient.get_opreturn(parsedMsg.transaction);
|
|
console.log('newStateId:', newStateId);
|
|
// We update the relevant process
|
|
const updatedProcess = this.sdkClient.process_commit_new_state(
|
|
process,
|
|
newStateId,
|
|
newTip
|
|
);
|
|
this.processesCache[processId] = updatedProcess;
|
|
console.log('updatedProcess:', updatedProcess);
|
|
break;
|
|
}
|
|
}
|
|
} catch (e) {
|
|
console.error('Failed to parse new tx for commitments:', e);
|
|
}
|
|
|
|
try {
|
|
const parsedTx = this.sdkClient.parse_new_tx(newTxMsg, 0, membersList);
|
|
if (parsedTx) {
|
|
try {
|
|
await this.handleApiReturn(parsedTx);
|
|
const newDevice = this.dumpDeviceFromMemory();
|
|
await this.saveDeviceInDatabase(newDevice);
|
|
} catch (e) {
|
|
console.error('Failed to update device with new tx');
|
|
}
|
|
}
|
|
} catch (e) {
|
|
console.debug(e);
|
|
}
|
|
}
|
|
|
|
public async handleApiReturn(apiReturn: ApiReturn) {
|
|
console.log(apiReturn);
|
|
if (apiReturn.partial_tx) {
|
|
try {
|
|
const res = this.sdkClient.sign_transaction(apiReturn.partial_tx);
|
|
apiReturn.new_tx_to_send = res.new_tx_to_send;
|
|
} catch (e) {
|
|
console.error('Failed to sign transaction:', e);
|
|
}
|
|
}
|
|
|
|
if (apiReturn.new_tx_to_send && apiReturn.new_tx_to_send.transaction.length != 0) {
|
|
this.sendNewTxMessage(JSON.stringify(apiReturn.new_tx_to_send));
|
|
await new Promise(r => setTimeout(r, 500));
|
|
}
|
|
|
|
if (apiReturn.secrets) {
|
|
const unconfirmedSecrets = apiReturn.secrets.unconfirmed_secrets;
|
|
const confirmedSecrets = apiReturn.secrets.shared_secrets;
|
|
|
|
const db = await Database.getInstance();
|
|
for (const secret of unconfirmedSecrets) {
|
|
await db.addObject({
|
|
storeName: 'unconfirmed_secrets',
|
|
object: secret,
|
|
key: null,
|
|
});
|
|
}
|
|
const entries = Object.entries(confirmedSecrets).map(([key, value]) => ({ key, value }));
|
|
for (const entry of entries) {
|
|
try {
|
|
await db.addObject({
|
|
storeName: 'shared_secrets',
|
|
object: entry.value,
|
|
key: entry.key,
|
|
});
|
|
} catch (e) {
|
|
throw e;
|
|
}
|
|
|
|
// We don't want to throw an error, it could simply be that we registered directly the shared secret
|
|
// this.removeUnconfirmedSecret(entry.value);
|
|
}
|
|
}
|
|
|
|
if (apiReturn.updated_process) {
|
|
const updatedProcess = apiReturn.updated_process;
|
|
|
|
const processId: string = updatedProcess.process_id;
|
|
|
|
if (updatedProcess.encrypted_data && Object.keys(updatedProcess.encrypted_data).length != 0) {
|
|
for (const [hash, cipher] of Object.entries(updatedProcess.encrypted_data)) {
|
|
const blob = this.hexToBlob(cipher);
|
|
try {
|
|
await this.saveBlobToDb(hash, blob);
|
|
} catch (e) {
|
|
console.error(e);
|
|
}
|
|
}
|
|
}
|
|
|
|
// Save process to db
|
|
await this.saveProcessToDb(processId, updatedProcess.current_process);
|
|
|
|
if (updatedProcess.diffs && updatedProcess.diffs.length != 0) {
|
|
try {
|
|
await this.saveDiffsToDb(updatedProcess.diffs);
|
|
} catch (e) {
|
|
console.error('Failed to save diffs to db:', e);
|
|
}
|
|
}
|
|
}
|
|
|
|
if (apiReturn.push_to_storage && apiReturn.push_to_storage.length != 0) {
|
|
for (const hash of apiReturn.push_to_storage) {
|
|
const blob = await this.getBlobFromDb(hash);
|
|
if (blob) {
|
|
// Get the storages from the diff data
|
|
const diff = await this.getDiffByValueFromDb(hash);
|
|
if (diff) {
|
|
const storages = diff.storages;
|
|
await this.saveDataToStorage(hash, storages, blob, null);
|
|
} else {
|
|
console.error('Failed to get diff from db for hash', hash);
|
|
}
|
|
} else {
|
|
console.error('Failed to get data from db for hash', hash);
|
|
}
|
|
}
|
|
}
|
|
|
|
if (apiReturn.commit_to_send) {
|
|
const commit = apiReturn.commit_to_send;
|
|
this.sendCommitMessage(JSON.stringify(commit));
|
|
}
|
|
|
|
if (apiReturn.ciphers_to_send && apiReturn.ciphers_to_send.length != 0) {
|
|
this.sendCipherMessages(apiReturn.ciphers_to_send);
|
|
}
|
|
}
|
|
|
|
public async openPairingConfirmationModal(processId: string) {
|
|
const process = await this.getProcess(processId);
|
|
if (!process) {
|
|
console.error('Failed to find pairing process');
|
|
return;
|
|
}
|
|
const firstState = process.states[0];
|
|
const roles = firstState.roles;
|
|
const stateId = firstState.state_id;
|
|
try {
|
|
await this.routingInstance.openPairingConfirmationModal(roles, processId, stateId);
|
|
} catch (e) {
|
|
console.error(e);
|
|
}
|
|
}
|
|
|
|
public async waitForPairingCommitment(
|
|
processId: string,
|
|
maxRetries: number = 30,
|
|
retryDelay: number = 2000
|
|
): Promise<void> {
|
|
console.log(`🔍 DEBUG: waitForPairingCommitment called with processId: ${processId}`);
|
|
console.log(`⏳ Waiting for pairing process ${processId} to be committed and synchronized...`);
|
|
console.log(`🔄 This may take some time as we wait for SDK synchronization...`);
|
|
|
|
for (let i = 0; i < maxRetries; i++) {
|
|
try {
|
|
// Check device state directly without forcing updateDevice
|
|
const device = this.dumpDeviceFromMemory();
|
|
console.log(
|
|
`🔍 Attempt ${i + 1}/${maxRetries}: pairing_process_commitment =`,
|
|
device.pairing_process_commitment
|
|
);
|
|
|
|
// Additional debugging: Check if we can get the pairing process ID
|
|
let currentPairingId: string | null = null;
|
|
try {
|
|
currentPairingId = this.sdkClient.get_pairing_process_id();
|
|
console.log(`🔍 Current pairing process ID from SDK: ${currentPairingId}`);
|
|
} catch (e) {
|
|
console.log(`⚠️ SDK pairing process ID not available yet: ${(e as Error).message}`);
|
|
}
|
|
|
|
// Try to force synchronization by requesting the process from peers
|
|
if (i % 3 === 0 && i > 0) {
|
|
try {
|
|
console.log(`🔄 Attempting to request process from peers...`);
|
|
await this.requestDataFromPeers(processId, [], []);
|
|
console.log(`✅ Process request sent to peers`);
|
|
} catch (e) {
|
|
console.log(`⚠️ Failed to request process from peers: ${(e as Error).message}`);
|
|
}
|
|
}
|
|
|
|
// For quorum=1, try to force process synchronization
|
|
if (i === 2) {
|
|
try {
|
|
console.log(`🔄 Forcing process synchronization for quorum=1 test...`);
|
|
// Force the SDK to recognize this as a pairing process
|
|
const process = await this.getProcess(processId);
|
|
if (process) {
|
|
console.log(`🔄 Process found, attempting to sync with SDK...`);
|
|
// Try to trigger SDK synchronization
|
|
await this.sdkClient.get_pairing_process_id();
|
|
}
|
|
} catch (e) {
|
|
console.log(`⚠️ Process synchronization attempt failed: ${(e as Error).message}`);
|
|
}
|
|
}
|
|
|
|
// Check if the process exists in our processes list
|
|
try {
|
|
const process = await this.getProcess(processId);
|
|
if (process) {
|
|
console.log(`🔍 Process exists: ${processId}, states: ${process.states?.length || 0}`);
|
|
const lastState = process.states?.[process.states.length - 1];
|
|
if (lastState) {
|
|
console.log(`🔍 Last state ID: ${lastState.state_id}`);
|
|
}
|
|
} else {
|
|
console.log(`⚠️ Process not found in local processes: ${processId}`);
|
|
}
|
|
} catch (e) {
|
|
console.log(`⚠️ Error checking process: ${(e as Error).message}`);
|
|
}
|
|
|
|
// Check WebSocket connection and handshake data
|
|
try {
|
|
console.log(
|
|
`🔍 WebSocket connections: ${Object.keys(this.relayAddresses).length} relays`
|
|
);
|
|
console.log(`🔍 Current block height: ${this.currentBlockHeight}`);
|
|
console.log(`🔍 Members list size: ${Object.keys(this.membersList).length}`);
|
|
} catch (e) {
|
|
console.log(`⚠️ Error checking WebSocket state: ${(e as Error).message}`);
|
|
}
|
|
|
|
// Check if the commitment is set and not null/empty
|
|
if (
|
|
device.pairing_process_commitment &&
|
|
device.pairing_process_commitment !== null &&
|
|
device.pairing_process_commitment !== ''
|
|
) {
|
|
console.log('✅ Pairing process commitment found:', device.pairing_process_commitment);
|
|
return;
|
|
}
|
|
|
|
// For quorum=1.0 processes, the creator must commit themselves
|
|
// Check if the process is ready for the creator to commit
|
|
if (currentPairingId && currentPairingId === processId) {
|
|
console.log(
|
|
'✅ Creator process is synchronized and ready for self-commitment (quorum=1.0)'
|
|
);
|
|
return;
|
|
}
|
|
|
|
// For quorum=1 test, if we have a process but no commitment yet,
|
|
// try to force synchronization by calling updateDevice more frequently
|
|
if (i < 5) {
|
|
try {
|
|
await this.updateDevice();
|
|
console.log(`🔄 Forced device update on attempt ${i + 1}`);
|
|
} catch (e) {
|
|
console.log(`⚠️ Forced device update failed: ${(e as Error).message}`);
|
|
}
|
|
}
|
|
|
|
// If we have the process but SDK doesn't know about it yet, try to force SDK sync
|
|
if (currentPairingId === null && i > 2) {
|
|
try {
|
|
console.log(`🔄 Attempting to force SDK synchronization for process ${processId}...`);
|
|
// Try to manually pair the device with the process
|
|
const process = await this.getProcess(processId);
|
|
if (process && process.states && process.states.length > 0) {
|
|
const lastState = process.states[process.states.length - 1];
|
|
if (lastState.public_data && lastState.public_data['pairedAddresses']) {
|
|
const pairedAddresses = this.decodeValue(lastState.public_data['pairedAddresses']);
|
|
console.log(
|
|
`🔄 Manually pairing device with addresses: ${JSON.stringify(pairedAddresses)}`
|
|
);
|
|
this.sdkClient.pair_device(processId, pairedAddresses);
|
|
console.log(`✅ Manual pairing completed`);
|
|
}
|
|
}
|
|
} catch (e) {
|
|
console.log(`⚠️ Manual pairing failed: ${(e as Error).message}`);
|
|
}
|
|
}
|
|
|
|
console.log(`⏳ Still waiting for SDK synchronization... (${i + 1}/${maxRetries})`);
|
|
|
|
// Only try updateDevice every 5 attempts to avoid spam
|
|
if (i % 5 === 0 && i > 0) {
|
|
try {
|
|
await this.updateDevice();
|
|
console.log(`✅ Device update successful on attempt ${i + 1}`);
|
|
} catch (e) {
|
|
console.log(
|
|
`⚠️ Device update failed on attempt ${i + 1} (process may not be committed yet): ${(e as Error).message}`
|
|
);
|
|
}
|
|
}
|
|
} catch (e) {
|
|
console.log(
|
|
`❌ Attempt ${i + 1}/${maxRetries}: Error during synchronization - ${(e as Error).message}`
|
|
);
|
|
}
|
|
|
|
if (i < maxRetries - 1) {
|
|
console.log(`⏳ Waiting ${retryDelay}ms before next attempt...`);
|
|
await new Promise(resolve => setTimeout(resolve, retryDelay));
|
|
}
|
|
}
|
|
|
|
throw new Error(
|
|
`❌ Pairing process ${processId} was not synchronized after ${maxRetries} attempts (${(maxRetries * retryDelay) / 1000}s)`
|
|
);
|
|
}
|
|
|
|
public async confirmPairing(pairingId?: string) {
|
|
try {
|
|
// Is the wasm paired?
|
|
console.log('confirmPairing');
|
|
let processId: string;
|
|
if (pairingId) {
|
|
processId = pairingId;
|
|
console.log('pairingId (provided):', processId);
|
|
} else if (this.processId) {
|
|
processId = this.processId;
|
|
console.log('pairingId (from stored processId):', processId);
|
|
} else {
|
|
// Try to get pairing process ID, with retry if it fails
|
|
let retries = 3;
|
|
while (retries > 0) {
|
|
try {
|
|
processId = this.getPairingProcessId();
|
|
console.log('pairingId (from SDK):', processId);
|
|
break;
|
|
} catch (e) {
|
|
retries--;
|
|
if (retries === 0) {
|
|
throw e;
|
|
}
|
|
console.log(`Failed to get pairing process ID, retrying... (${retries} attempts left)`);
|
|
await new Promise(resolve => setTimeout(resolve, 1000)); // Wait 1 second before retry
|
|
}
|
|
}
|
|
}
|
|
// TODO confirm that the pairing process id is known, commited
|
|
const newDevice = this.dumpDeviceFromMemory();
|
|
console.log('newDevice:', newDevice);
|
|
await this.saveDeviceInDatabase(newDevice);
|
|
console.log('Device saved in database');
|
|
} catch (e) {
|
|
console.error('Failed to confirm pairing');
|
|
return;
|
|
}
|
|
}
|
|
|
|
public async updateDevice(): Promise<void> {
|
|
let myPairingProcessId: string;
|
|
try {
|
|
myPairingProcessId = this.getPairingProcessId();
|
|
} catch (e) {
|
|
console.error('Failed to get pairing process id');
|
|
return;
|
|
}
|
|
|
|
const myPairingProcess = await this.getProcess(myPairingProcessId);
|
|
if (!myPairingProcess) {
|
|
console.error('Unknown pairing process');
|
|
return;
|
|
}
|
|
const myPairingState = this.getLastCommitedState(myPairingProcess);
|
|
if (myPairingState) {
|
|
const encodedSpAddressList = myPairingState.public_data['pairedAddresses'];
|
|
const spAddressList = this.decodeValue(encodedSpAddressList);
|
|
if (spAddressList.length === 0) {
|
|
console.error('Empty pairedAddresses');
|
|
return;
|
|
}
|
|
// We can check if our address is included and simply unpair if it's not
|
|
if (!spAddressList.includes(this.getDeviceAddress())) {
|
|
await this.unpairDevice();
|
|
return;
|
|
}
|
|
// We can update the device with the new addresses
|
|
this.sdkClient.unpair_device();
|
|
this.sdkClient.pair_device(myPairingProcessId, spAddressList);
|
|
const newDevice = this.dumpDeviceFromMemory();
|
|
await this.saveDeviceInDatabase(newDevice);
|
|
}
|
|
}
|
|
|
|
public pairDevice(processId: string, spAddressList: string[]): void {
|
|
try {
|
|
this.sdkClient.pair_device(processId, spAddressList);
|
|
} catch (e) {
|
|
throw new Error(`Failed to pair device: ${e}`);
|
|
}
|
|
}
|
|
|
|
public getAmount(): BigInt {
|
|
const amount = this.sdkClient.get_available_amount();
|
|
return amount;
|
|
}
|
|
|
|
getDeviceAddress(): string {
|
|
try {
|
|
return this.sdkClient.get_address();
|
|
} catch (e) {
|
|
throw new Error(`Failed to get device address: ${e}`);
|
|
}
|
|
}
|
|
|
|
public dumpDeviceFromMemory(): Device {
|
|
try {
|
|
return this.sdkClient.dump_device();
|
|
} catch (e) {
|
|
throw new Error(`Failed to dump device: ${e}`);
|
|
}
|
|
}
|
|
|
|
public dumpNeuteredDevice(): Device | null {
|
|
try {
|
|
return this.sdkClient.dump_neutered_device();
|
|
} catch (e) {
|
|
console.error(`Failed to dump device: ${e}`);
|
|
return null;
|
|
}
|
|
}
|
|
|
|
public getPairingProcessId(): string {
|
|
try {
|
|
return this.sdkClient.get_pairing_process_id();
|
|
} catch (e) {
|
|
throw new Error(`Failed to get pairing process: ${e}`);
|
|
}
|
|
}
|
|
|
|
async saveDeviceInDatabase(device: Device): Promise<void> {
|
|
const db = await Database.getInstance();
|
|
const walletStore = 'wallet';
|
|
try {
|
|
const prevDevice = await this.getDeviceFromDatabase();
|
|
if (prevDevice) {
|
|
await db.deleteObject(walletStore, '1');
|
|
}
|
|
await db.addObject({
|
|
storeName: walletStore,
|
|
object: { pre_id: '1', device },
|
|
key: null,
|
|
});
|
|
} catch (e) {
|
|
console.error(e);
|
|
}
|
|
}
|
|
|
|
async getDeviceFromDatabase(): Promise<Device | null> {
|
|
const db = await Database.getInstance();
|
|
const walletStore = 'wallet';
|
|
try {
|
|
const dbRes = await db.getObject(walletStore, '1');
|
|
if (dbRes) {
|
|
return dbRes['device'];
|
|
} else {
|
|
return null;
|
|
}
|
|
} catch (e) {
|
|
throw new Error(`Failed to retrieve device from db: ${e}`);
|
|
}
|
|
}
|
|
|
|
async deleteAccount(): Promise<void> {
|
|
const db = await Database.getInstance();
|
|
try {
|
|
// Clear all stores
|
|
await db.clearStore('wallet');
|
|
await db.clearStore('processes');
|
|
await db.clearStore('shared_secrets');
|
|
await db.clearStore('unconfirmed_secrets');
|
|
await db.clearStore('diffs');
|
|
await db.clearStore('data');
|
|
await db.clearStore('labels');
|
|
|
|
// Clear localStorage
|
|
localStorage.clear();
|
|
sessionStorage.clear();
|
|
|
|
// Clear IndexedDB completely
|
|
await this.clearAllIndexedDB();
|
|
|
|
console.log('✅ Account completely deleted');
|
|
} catch (e) {
|
|
console.error('❌ Error deleting account:', e);
|
|
throw new Error(`Failed to delete account: ${e}`);
|
|
}
|
|
}
|
|
|
|
private async clearAllIndexedDB(): Promise<void> {
|
|
return new Promise((resolve, reject) => {
|
|
const deleteReq = indexedDB.deleteDatabase('4nk');
|
|
deleteReq.onsuccess = () => {
|
|
console.log('✅ IndexedDB database deleted');
|
|
resolve();
|
|
};
|
|
deleteReq.onerror = () => {
|
|
console.error('❌ Error deleting IndexedDB database');
|
|
reject(deleteReq.error);
|
|
};
|
|
});
|
|
}
|
|
|
|
async getMemberFromDevice(): Promise<string[] | null> {
|
|
try {
|
|
const device = await this.getDeviceFromDatabase();
|
|
if (device) {
|
|
const pairedMember = device['paired_member'];
|
|
return pairedMember.sp_addresses;
|
|
} else {
|
|
return null;
|
|
}
|
|
} catch (e) {
|
|
throw new Error(`Failed to retrieve paired_member from device: ${e}`);
|
|
}
|
|
}
|
|
|
|
isChildRole(parent: any, child: any): boolean {
|
|
try {
|
|
this.sdkClient.is_child_role(JSON.stringify(parent), JSON.stringify(child));
|
|
} catch (e) {
|
|
console.error(e);
|
|
return false;
|
|
}
|
|
|
|
return true;
|
|
}
|
|
|
|
rolesContainsUs(roles: Record<string, RoleDefinition>): boolean {
|
|
let us;
|
|
try {
|
|
us = this.sdkClient.get_pairing_process_id();
|
|
} catch (e) {
|
|
throw e;
|
|
}
|
|
|
|
return this.rolesContainsMember(roles, us);
|
|
}
|
|
|
|
rolesContainsMember(roles: Record<string, RoleDefinition>, pairingProcessId: string): boolean {
|
|
for (const roleDef of Object.values(roles)) {
|
|
if (roleDef.members.includes(pairingProcessId)) {
|
|
return true;
|
|
}
|
|
}
|
|
|
|
return false;
|
|
}
|
|
|
|
async dumpWallet() {
|
|
const wallet = await this.sdkClient.dump_wallet();
|
|
return wallet;
|
|
}
|
|
|
|
public createFaucetMessage() {
|
|
const message = this.sdkClient.create_faucet_msg();
|
|
return message;
|
|
}
|
|
|
|
async createNewDevice() {
|
|
let spAddress = '';
|
|
try {
|
|
// We set birthday later when we have the chain tip from relay
|
|
spAddress = await this.sdkClient.create_new_device(0, 'signet');
|
|
const device = this.dumpDeviceFromMemory();
|
|
await this.saveDeviceInDatabase(device);
|
|
} catch (e) {
|
|
console.error('Services ~ Error:', e);
|
|
}
|
|
|
|
return spAddress;
|
|
}
|
|
|
|
public restoreDevice(device: Device) {
|
|
try {
|
|
this.sdkClient.restore_device(device);
|
|
} catch (e) {
|
|
console.error(e);
|
|
}
|
|
}
|
|
|
|
public async updateDeviceBlockHeight(): Promise<void> {
|
|
if (this.currentBlockHeight === -1) {
|
|
throw new Error('Current block height not set');
|
|
}
|
|
|
|
let device: Device | null = null;
|
|
try {
|
|
device = await this.getDeviceFromDatabase();
|
|
} catch (e) {
|
|
throw new Error(`Failed to get device from database: ${e}`);
|
|
}
|
|
|
|
if (!device) {
|
|
throw new Error('Device not found');
|
|
}
|
|
|
|
const birthday = device.sp_wallet.birthday;
|
|
if (birthday === undefined || birthday === null) {
|
|
throw new Error('Birthday not found');
|
|
}
|
|
|
|
if (birthday === 0) {
|
|
// This is a new device, so current chain tip is its birthday
|
|
device.sp_wallet.birthday = this.currentBlockHeight;
|
|
// We also set last_scan, impossible that we need to scan earlier than this
|
|
device.sp_wallet.last_scan = this.currentBlockHeight;
|
|
try {
|
|
// First set the updated device in memory
|
|
this.sdkClient.restore_device(device);
|
|
// Then save it to database
|
|
await this.saveDeviceInDatabase(device);
|
|
} catch (e) {
|
|
throw new Error(`Failed to save updated device: ${e}`);
|
|
}
|
|
} else {
|
|
// This is existing device, we need to catch up if last_scan is lagging behind chain_tip
|
|
if (device.sp_wallet.last_scan < this.currentBlockHeight) {
|
|
// We need to catch up
|
|
try {
|
|
await this.sdkClient.scan_blocks(this.currentBlockHeight, BLINDBITURL);
|
|
} catch (e) {
|
|
console.error(`Failed to scan blocks: ${e}`);
|
|
return;
|
|
}
|
|
|
|
// If everything went well, we can update our storage
|
|
try {
|
|
const device = this.dumpDeviceFromMemory();
|
|
await this.saveDeviceInDatabase(device);
|
|
} catch (e) {
|
|
console.error(`Failed to save updated device: ${e}`);
|
|
}
|
|
} else {
|
|
// Up to date, just returns
|
|
return;
|
|
}
|
|
}
|
|
}
|
|
|
|
private async removeProcess(processId: string): Promise<void> {
|
|
const db = await Database.getInstance();
|
|
const storeName = 'processes';
|
|
|
|
try {
|
|
await db.deleteObject(storeName, processId);
|
|
} catch (e) {
|
|
console.error(e);
|
|
}
|
|
}
|
|
|
|
public async batchSaveProcessesToDb(processes: Record<string, Process>) {
|
|
if (Object.keys(processes).length === 0) {
|
|
return;
|
|
}
|
|
|
|
const db = await Database.getInstance();
|
|
const storeName = 'processes';
|
|
try {
|
|
await db.batchWriting({
|
|
storeName,
|
|
objects: Object.entries(processes).map(([key, value]) => ({ key, object: value })),
|
|
});
|
|
this.processesCache = { ...this.processesCache, ...processes };
|
|
} catch (e) {
|
|
throw e;
|
|
}
|
|
}
|
|
|
|
public async saveProcessToDb(processId: string, process: Process) {
|
|
const db = await Database.getInstance();
|
|
const storeName = 'processes';
|
|
try {
|
|
await db.addObject({
|
|
storeName,
|
|
object: process,
|
|
key: processId,
|
|
});
|
|
|
|
// Update the process in the cache
|
|
this.processesCache[processId] = process;
|
|
} catch (e) {
|
|
console.error(`Failed to save process ${processId}: ${e}`);
|
|
}
|
|
}
|
|
|
|
public async saveBlobToDb(hash: string, data: Blob) {
|
|
const db = await Database.getInstance();
|
|
try {
|
|
await db.addObject({
|
|
storeName: 'data',
|
|
object: data,
|
|
key: hash,
|
|
});
|
|
} catch (e) {
|
|
console.error(`Failed to save data to db: ${e}`);
|
|
}
|
|
}
|
|
|
|
public async getBlobFromDb(hash: string): Promise<Blob | null> {
|
|
const db = await Database.getInstance();
|
|
try {
|
|
return await db.getObject('data', hash);
|
|
} catch (e) {
|
|
return null;
|
|
}
|
|
}
|
|
|
|
public async saveDataToStorage(hash: string, storages: string[], data: Blob, ttl: number | null) {
|
|
try {
|
|
await storeData(storages, hash, data, ttl);
|
|
} catch (e) {
|
|
console.error(`Failed to store data with hash ${hash}: ${e}`);
|
|
}
|
|
}
|
|
|
|
public async fetchValueFromStorage(hash: string): Promise<ArrayBuffer | null> {
|
|
const storages = [STORAGEURL];
|
|
|
|
return await retrieveData(storages, hash);
|
|
}
|
|
|
|
public async getDiffByValueFromDb(hash: string): Promise<UserDiff | null> {
|
|
const db = await Database.getInstance();
|
|
const diff = await db.getObject('diffs', hash);
|
|
return diff;
|
|
}
|
|
|
|
public async saveDiffsToDb(diffs: UserDiff[]) {
|
|
const db = await Database.getInstance();
|
|
try {
|
|
for (const diff of diffs) {
|
|
await db.addObject({
|
|
storeName: 'diffs',
|
|
object: diff,
|
|
key: null,
|
|
});
|
|
}
|
|
} catch (e) {
|
|
throw new Error(`Failed to save process: ${e}`);
|
|
}
|
|
}
|
|
|
|
public async getProcess(processId: string): Promise<Process | null> {
|
|
if (!processId) {
|
|
return null;
|
|
}
|
|
|
|
if (this.processesCache[processId]) {
|
|
return this.processesCache[processId];
|
|
} else {
|
|
const db = await Database.getInstance();
|
|
const process = await db.getObject('processes', processId);
|
|
return process;
|
|
}
|
|
}
|
|
|
|
public async getProcesses(): Promise<Record<string, Process>> {
|
|
if (Object.keys(this.processesCache).length > 0) {
|
|
return this.processesCache;
|
|
} else {
|
|
try {
|
|
const db = await Database.getInstance();
|
|
this.processesCache = await db.dumpStore('processes');
|
|
return this.processesCache;
|
|
} catch (e) {
|
|
throw e;
|
|
}
|
|
}
|
|
}
|
|
|
|
public async restoreProcessesFromBackUp(processes: Record<string, Process>) {
|
|
const db = await Database.getInstance();
|
|
const storeName = 'processes';
|
|
try {
|
|
await db.batchWriting({
|
|
storeName,
|
|
objects: Object.entries(processes).map(([key, value]) => ({ key, object: value })),
|
|
});
|
|
} catch (e) {
|
|
throw e;
|
|
}
|
|
|
|
await this.restoreProcessesFromDB();
|
|
}
|
|
|
|
// Restore processes cache from persistent storage
|
|
public async restoreProcessesFromDB() {
|
|
const db = await Database.getInstance();
|
|
try {
|
|
const processes: Record<string, Process> = await db.dumpStore('processes');
|
|
if (processes && Object.keys(processes).length != 0) {
|
|
console.log(`Restoring ${Object.keys(processes).length} processes`);
|
|
this.processesCache = processes;
|
|
} else {
|
|
console.log('No processes to restore!');
|
|
}
|
|
} catch (e) {
|
|
throw e;
|
|
}
|
|
}
|
|
|
|
public async clearSecretsFromDB() {
|
|
const db = await Database.getInstance();
|
|
try {
|
|
await db.clearStore('shared_secrets');
|
|
await db.clearStore('unconfirmed_secrets');
|
|
} catch (e) {
|
|
console.error(e);
|
|
}
|
|
}
|
|
|
|
public async restoreSecretsFromBackUp(secretsStore: SecretsStore) {
|
|
const db = await Database.getInstance();
|
|
|
|
for (const secret of secretsStore.unconfirmed_secrets) {
|
|
await db.addObject({
|
|
storeName: 'unconfirmed_secrets',
|
|
object: secret,
|
|
key: null,
|
|
});
|
|
}
|
|
const entries = Object.entries(secretsStore.shared_secrets).map(([key, value]) => ({
|
|
key,
|
|
value,
|
|
}));
|
|
for (const entry of entries) {
|
|
await db.addObject({
|
|
storeName: 'shared_secrets',
|
|
object: entry.value,
|
|
key: entry.key,
|
|
});
|
|
}
|
|
|
|
// Now we can transfer them to memory
|
|
await this.restoreSecretsFromDB();
|
|
}
|
|
|
|
public async restoreSecretsFromDB() {
|
|
const db = await Database.getInstance();
|
|
try {
|
|
const sharedSecrets: Record<string, string> = await db.dumpStore('shared_secrets');
|
|
const unconfirmedSecrets = await db.dumpStore('unconfirmed_secrets');
|
|
const secretsStore = {
|
|
shared_secrets: sharedSecrets,
|
|
unconfirmed_secrets: Object.values(unconfirmedSecrets),
|
|
};
|
|
this.sdkClient.set_shared_secrets(JSON.stringify(secretsStore));
|
|
} catch (e) {
|
|
throw e;
|
|
}
|
|
}
|
|
|
|
decodeValue(value: number[]): any | null {
|
|
try {
|
|
return this.sdkClient.decode_value(value);
|
|
} catch (e) {
|
|
console.error(`Failed to decode value: ${e}`);
|
|
return null;
|
|
}
|
|
}
|
|
|
|
async decryptAttribute(
|
|
processId: string,
|
|
state: ProcessState,
|
|
attribute: string
|
|
): Promise<any | null> {
|
|
console.log(
|
|
`[decryptAttribute] Starting decryption for attribute: ${attribute}, processId: ${processId}`
|
|
);
|
|
let hash = state.pcd_commitment[attribute];
|
|
if (!hash) {
|
|
console.log(`[decryptAttribute] No hash found for attribute: ${attribute}`);
|
|
return null;
|
|
}
|
|
let key = state.keys[attribute];
|
|
console.log(
|
|
`[decryptAttribute] Initial key state for ${attribute}:`,
|
|
key ? 'present' : 'missing'
|
|
);
|
|
const pairingProcessId = this.getPairingProcessId();
|
|
|
|
// If key is missing, request an update and then retry
|
|
if (!key) {
|
|
const roles = state.roles;
|
|
let hasAccess = false;
|
|
// If we're not supposed to have access to this attribute, ignore
|
|
for (const role of Object.values(roles)) {
|
|
for (const rule of Object.values(role.validation_rules)) {
|
|
if (rule.fields.includes(attribute)) {
|
|
if (role.members.includes(pairingProcessId)) {
|
|
// We have access to this attribute
|
|
hasAccess = true;
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
if (!hasAccess) {
|
|
console.log(`[decryptAttribute] No access rights for attribute: ${attribute}`);
|
|
return null;
|
|
}
|
|
|
|
console.log(`[decryptAttribute] Requesting key update for attribute: ${attribute}`);
|
|
await this.checkConnections((await this.getProcess(processId))!);
|
|
// We should have the key, so we're going to ask other members for it
|
|
await this.requestDataFromPeers(processId, [state.state_id], [state.roles]);
|
|
|
|
const maxRetries = 1;
|
|
const retryDelay = 100; // delay in milliseconds
|
|
let retries = 0;
|
|
|
|
while ((!hash || !key) && retries < maxRetries) {
|
|
console.log(
|
|
`[decryptAttribute] Retry ${retries + 1}/${maxRetries} for attribute: ${attribute}`
|
|
);
|
|
await new Promise(resolve => setTimeout(resolve, retryDelay));
|
|
// Re-read hash and key after waiting
|
|
hash = state.pcd_commitment[attribute];
|
|
key = state.keys[attribute];
|
|
retries++;
|
|
console.log(`[decryptAttribute] After retry ${retries}: hash=${!!hash}, key=${!!key}`);
|
|
}
|
|
}
|
|
|
|
if (hash && key) {
|
|
console.log(
|
|
`[decryptAttribute] Starting decryption process with hash: ${hash.substring(0, 8)}...`
|
|
);
|
|
const blob = await this.getBlobFromDb(hash);
|
|
if (blob) {
|
|
console.log(`[decryptAttribute] Blob retrieved successfully for ${attribute}`);
|
|
// Decrypt the data
|
|
const buf = await blob.arrayBuffer();
|
|
const cipher = new Uint8Array(buf);
|
|
|
|
const keyUIntArray = this.hexToUInt8Array(key);
|
|
|
|
try {
|
|
const clear = this.sdkClient.decrypt_data(keyUIntArray, cipher);
|
|
if (clear) {
|
|
// deserialize the result to get the actual data
|
|
const decoded = this.sdkClient.decode_value(clear);
|
|
return decoded;
|
|
} else {
|
|
throw new Error('decrypt_data returned null');
|
|
}
|
|
} catch (e) {
|
|
console.error(`[decryptAttribute] Failed to decrypt data for ${attribute}:`, e);
|
|
}
|
|
}
|
|
}
|
|
|
|
return null;
|
|
}
|
|
|
|
getNotifications(): any[] | null {
|
|
// return [
|
|
// {
|
|
// id: 1,
|
|
// title: 'Notif 1',
|
|
// description: 'A normal notification',
|
|
// sendToNotificationPage: false,
|
|
// path: '/notif1',
|
|
// },
|
|
// {
|
|
// id: 2,
|
|
// title: 'Notif 2',
|
|
// description: 'A normal notification',
|
|
// sendToNotificationPage: false,
|
|
// path: '/notif2',
|
|
// },
|
|
// {
|
|
// id: 3,
|
|
// title: 'Notif 3',
|
|
// description: 'A normal notification',
|
|
// sendToNotificationPage: false,
|
|
// path: '/notif3',
|
|
// },
|
|
// ];
|
|
return this.notifications;
|
|
}
|
|
|
|
setNotifications(notifications: any[]) {
|
|
this.notifications = notifications;
|
|
}
|
|
|
|
async importJSON(backup: BackUp): Promise<void> {
|
|
const device = backup.device;
|
|
|
|
// Reset current device
|
|
await this.resetDevice();
|
|
|
|
await this.saveDeviceInDatabase(device);
|
|
|
|
this.restoreDevice(device);
|
|
|
|
// TODO restore secrets and processes from file
|
|
const secretsStore = backup.secrets;
|
|
await this.restoreSecretsFromBackUp(secretsStore);
|
|
|
|
const processes = backup.processes;
|
|
await this.restoreProcessesFromBackUp(processes);
|
|
}
|
|
|
|
public async createBackUp(): Promise<BackUp | null> {
|
|
// Get the device from indexedDB
|
|
const device = await this.getDeviceFromDatabase();
|
|
if (!device) {
|
|
console.error('No device loaded');
|
|
return null;
|
|
}
|
|
|
|
// Get the processes
|
|
const processes = await this.getProcesses();
|
|
|
|
// Get the shared secrets
|
|
const secrets = await this.getAllSecrets();
|
|
|
|
// Create a backup object
|
|
const backUp = {
|
|
device: device,
|
|
secrets: secrets,
|
|
processes: processes,
|
|
};
|
|
|
|
return backUp;
|
|
}
|
|
|
|
// Device 1 wait Device 2
|
|
public device1: boolean = false;
|
|
public device2Ready: boolean = false;
|
|
|
|
public resetState() {
|
|
this.device1 = false;
|
|
this.device2Ready = false;
|
|
}
|
|
|
|
// Handle the handshake message
|
|
public async handleHandshakeMsg(url: string, parsedMsg: any) {
|
|
try {
|
|
const handshakeMsg: HandshakeMessage = JSON.parse(parsedMsg);
|
|
if (handshakeMsg.sp_address) {
|
|
this.updateRelay(url, handshakeMsg.sp_address);
|
|
this.relayAddresses[url] = handshakeMsg.sp_address;
|
|
this.resolveRelayReady();
|
|
}
|
|
|
|
console.log('handshakeMsg:', handshakeMsg);
|
|
this.currentBlockHeight = handshakeMsg.chain_tip;
|
|
console.log('this.currentBlockHeight:', this.currentBlockHeight);
|
|
this.updateDeviceBlockHeight();
|
|
if (this.membersList && Object.keys(this.membersList).length === 0) {
|
|
// We start from an empty list, just copy it over
|
|
this.membersList = handshakeMsg.peers_list;
|
|
} else {
|
|
// We are incrementing our list
|
|
for (const [processId, member] of Object.entries(handshakeMsg.peers_list)) {
|
|
this.membersList[processId] = member as Member;
|
|
}
|
|
}
|
|
|
|
setTimeout(async () => {
|
|
const newProcesses: OutPointProcessMap = handshakeMsg.processes_list;
|
|
if (!newProcesses || Object.keys(newProcesses).length === 0) {
|
|
console.debug('Received empty processes list from', url);
|
|
return;
|
|
}
|
|
|
|
if (this.processesCache && Object.keys(this.processesCache).length === 0) {
|
|
// We restored db but cache is empty, meaning we're starting from scratch
|
|
try {
|
|
await this.batchSaveProcessesToDb(newProcesses);
|
|
} catch (e) {
|
|
console.error('Failed to save processes to db:', e);
|
|
}
|
|
} else {
|
|
// We need to update our processes with what relay provides
|
|
const toSave: Record<string, Process> = {};
|
|
for (const [processId, process] of Object.entries(newProcesses)) {
|
|
const existing = await this.getProcess(processId);
|
|
if (existing) {
|
|
// Look for state id we don't know yet
|
|
let newStates: string[] = [];
|
|
let newRoles: Record<string, RoleDefinition>[] = [];
|
|
for (const state of process.states) {
|
|
if (!state || !state.state_id) {
|
|
continue;
|
|
} // shouldn't happen
|
|
if (state.state_id === EMPTY32BYTES) {
|
|
// We check that the tip is the same we have, if not we update
|
|
const existingTip = existing.states[existing.states.length - 1].commited_in;
|
|
if (existingTip !== state.commited_in) {
|
|
console.log('Found new tip for process', processId);
|
|
existing.states.pop(); // We discard the last state
|
|
existing.states.push(state);
|
|
// We know that's the last state, so we just trigger the update
|
|
toSave[processId] = existing;
|
|
}
|
|
} else if (!this.lookForStateId(existing, state.state_id)) {
|
|
// We don't want to overwrite what we already have for existing processes
|
|
// We may end up overwriting the keys for example
|
|
// So the process we're going to save needs to merge new states with what we already have
|
|
const existingLastState = existing.states.pop();
|
|
if (!existingLastState) {
|
|
// This should never happen
|
|
console.error('Failed to get last state for process', processId);
|
|
break;
|
|
}
|
|
existing.states.push(state);
|
|
existing.states.push(existingLastState);
|
|
toSave[processId] = existing; // We mark it for update
|
|
if (this.rolesContainsUs(state.roles)) {
|
|
newStates.push(state.state_id);
|
|
newRoles.push(state.roles);
|
|
}
|
|
} else {
|
|
// We already have the state, but we check if we have the keys
|
|
const existingState = this.getStateFromId(existing, state.state_id);
|
|
if (existingState!.keys && Object.keys(existingState!.keys).length != 0) {
|
|
// We have some keys, so we just assume everything ok and move on for now
|
|
continue;
|
|
} else {
|
|
// We verify we are part of the roles
|
|
const roles = state.roles;
|
|
if (this.rolesContainsUs(roles)) {
|
|
// We don't have keys, but we are part of the roles, so we need to request the keys
|
|
// that may also be because we are part of a role that don't have any fields
|
|
// It's possible but let's request for nothing anyway
|
|
newStates.push(state.state_id);
|
|
newRoles.push(roles);
|
|
} else {
|
|
// We are simply not involved, move on
|
|
continue;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
if (newStates.length != 0) {
|
|
await this.checkConnections(existing);
|
|
await this.requestDataFromPeers(processId, newStates, newRoles);
|
|
}
|
|
// Otherwise we're probably just in the initial loading at page initialization
|
|
} else {
|
|
// We add it to db
|
|
toSave[processId] = process;
|
|
}
|
|
}
|
|
|
|
if (toSave && Object.keys(toSave).length > 0) {
|
|
console.log('batch saving processes to db', toSave);
|
|
await this.batchSaveProcessesToDb(toSave);
|
|
}
|
|
}
|
|
}, 500);
|
|
} catch (e) {
|
|
console.error('Failed to parse init message:', e);
|
|
}
|
|
}
|
|
|
|
private lookForStateId(process: Process, stateId: string): boolean {
|
|
for (const state of process.states) {
|
|
if (state.state_id === stateId) {
|
|
return true;
|
|
}
|
|
}
|
|
|
|
return false;
|
|
}
|
|
|
|
/**
|
|
* Waits for at least one handshake message to be received from any connected relay.
|
|
* This ensures that the relay addresses are fully populated and the member list is updated.
|
|
* @returns A promise that resolves when at least one handshake message is received.
|
|
*/
|
|
private async waitForHandshakeMessage(timeoutMs: number = 3000): Promise<void> {
|
|
const startTime = Date.now();
|
|
const pollInterval = 100; // Check every 100ms
|
|
|
|
return new Promise<void>((resolve, reject) => {
|
|
const checkForHandshake = () => {
|
|
// Check if we have any members or any relays (indicating handshake was received)
|
|
if (
|
|
Object.keys(this.membersList).length > 0 ||
|
|
Object.keys(this.relayAddresses).length > 0
|
|
) {
|
|
console.log('Handshake message received (members or relays present)');
|
|
resolve();
|
|
return;
|
|
}
|
|
|
|
// Check timeout
|
|
if (Date.now() - startTime >= timeoutMs) {
|
|
reject(new Error(`No handshake message received after ${timeoutMs}ms timeout`));
|
|
return;
|
|
}
|
|
|
|
// Continue polling
|
|
setTimeout(checkForHandshake, pollInterval);
|
|
};
|
|
|
|
checkForHandshake();
|
|
});
|
|
}
|
|
|
|
/**
|
|
* Retourne la liste de tous les membres ordonnés par leur process id
|
|
* @returns Un tableau contenant tous les membres
|
|
*/
|
|
public getAllMembersSorted(): Record<string, Member> {
|
|
return Object.fromEntries(
|
|
Object.entries(this.membersList).sort(([keyA], [keyB]) => keyA.localeCompare(keyB))
|
|
);
|
|
}
|
|
|
|
public getAllMembers(): Record<string, Member> {
|
|
return this.membersList;
|
|
}
|
|
|
|
public getAddressesForMemberId(memberId: string): string[] | null {
|
|
try {
|
|
return this.membersList[memberId].sp_addresses;
|
|
} catch (e) {
|
|
return null;
|
|
}
|
|
}
|
|
|
|
public compareMembers(memberA: string[], memberB: string[]): boolean {
|
|
if (!memberA || !memberB) {
|
|
return false;
|
|
}
|
|
if (memberA.length !== memberB.length) {
|
|
return false;
|
|
}
|
|
|
|
const res =
|
|
memberA.every(item => memberB.includes(item)) &&
|
|
memberB.every(item => memberA.includes(item));
|
|
|
|
return res;
|
|
}
|
|
|
|
public async handleCommitError(response: string) {
|
|
const content = JSON.parse(response);
|
|
const error = content.error;
|
|
const errorMsg = error['GenericError'];
|
|
const dontRetry = [
|
|
'State is identical to the previous state',
|
|
'Not enough valid proofs',
|
|
'Not enough members to validate',
|
|
];
|
|
if (dontRetry.includes(errorMsg)) {
|
|
return;
|
|
}
|
|
// Wait and retry
|
|
setTimeout(async () => {
|
|
this.sendCommitMessage(JSON.stringify(content));
|
|
}, 1000);
|
|
}
|
|
|
|
public getRoles(process: Process): Record<string, RoleDefinition> | null {
|
|
const lastCommitedState = this.getLastCommitedState(process);
|
|
if (
|
|
lastCommitedState &&
|
|
lastCommitedState.roles &&
|
|
Object.keys(lastCommitedState.roles).length != 0
|
|
) {
|
|
return lastCommitedState!.roles;
|
|
} else if (process.states.length === 2) {
|
|
const firstState = process.states[0];
|
|
if (firstState && firstState.roles && Object.keys(firstState.roles).length != 0) {
|
|
return firstState!.roles;
|
|
}
|
|
}
|
|
return null;
|
|
}
|
|
|
|
public getPublicData(process: Process): Record<string, any> | null {
|
|
const lastCommitedState = this.getLastCommitedState(process);
|
|
if (
|
|
lastCommitedState &&
|
|
lastCommitedState.public_data &&
|
|
Object.keys(lastCommitedState.public_data).length != 0
|
|
) {
|
|
return lastCommitedState!.public_data;
|
|
} else if (process.states.length === 2) {
|
|
const firstState = process.states[0];
|
|
if (firstState && firstState.public_data && Object.keys(firstState.public_data).length != 0) {
|
|
return firstState!.public_data;
|
|
}
|
|
}
|
|
return null;
|
|
}
|
|
|
|
public getProcessName(process: Process): string | null {
|
|
const lastCommitedState = this.getLastCommitedState(process);
|
|
if (lastCommitedState && lastCommitedState.public_data) {
|
|
const processName = lastCommitedState!.public_data['processName'];
|
|
if (processName) {
|
|
return this.decodeValue(processName);
|
|
} else {
|
|
return null;
|
|
}
|
|
} else {
|
|
return null;
|
|
}
|
|
}
|
|
|
|
public async getMyProcesses(): Promise<string[] | null> {
|
|
// If we're not paired yet, just skip it
|
|
let pairingProcessId = null;
|
|
try {
|
|
pairingProcessId = this.getPairingProcessId();
|
|
} catch (e) {
|
|
return null;
|
|
}
|
|
if (!pairingProcessId) {
|
|
return null;
|
|
}
|
|
|
|
try {
|
|
const processes = await this.getProcesses();
|
|
|
|
const newMyProcesses = new Set<string>(this.myProcesses || []);
|
|
// MyProcesses automatically contains pairing process
|
|
newMyProcesses.add(pairingProcessId);
|
|
for (const [processId, process] of Object.entries(processes)) {
|
|
// We use myProcesses attribute to not reevaluate all processes everytime
|
|
if (newMyProcesses.has(processId)) {
|
|
continue;
|
|
}
|
|
try {
|
|
const roles = this.getRoles(process);
|
|
|
|
if (roles && this.rolesContainsUs(roles)) {
|
|
newMyProcesses.add(processId);
|
|
}
|
|
} catch (e) {
|
|
console.error(e);
|
|
}
|
|
}
|
|
this.myProcesses = newMyProcesses; // atomic update
|
|
return Array.from(this.myProcesses);
|
|
} catch (e) {
|
|
console.error('Failed to get processes:', e);
|
|
return null;
|
|
}
|
|
}
|
|
|
|
public async requestDataFromPeers(
|
|
processId: string,
|
|
stateIds: string[],
|
|
roles: Record<string, RoleDefinition>[]
|
|
) {
|
|
console.log('Requesting data from peers');
|
|
const membersList = this.getAllMembers();
|
|
try {
|
|
// Convert objects to strings for WASM compatibility
|
|
const rolesString = JSON.stringify(roles);
|
|
const membersString = JSON.stringify(membersList);
|
|
const stateIdsString = JSON.stringify(stateIds);
|
|
|
|
const res = this.sdkClient.request_data(processId, stateIdsString, rolesString, membersString);
|
|
await this.handleApiReturn(res);
|
|
} catch (e) {
|
|
console.error('Error requesting data from peers:', e);
|
|
throw e;
|
|
}
|
|
}
|
|
|
|
public hexToBlob(hexString: string): Blob {
|
|
const uint8Array = this.hexToUInt8Array(hexString);
|
|
|
|
return new Blob([new Uint8Array(uint8Array)], { type: 'application/octet-stream' });
|
|
}
|
|
|
|
public hexToUInt8Array(hexString: string): Uint8Array {
|
|
if (hexString.length % 2 !== 0) {
|
|
throw new Error('Invalid hex string: length must be even');
|
|
}
|
|
const uint8Array = new Uint8Array(hexString.length / 2);
|
|
for (let i = 0; i < hexString.length; i += 2) {
|
|
uint8Array[i / 2] = parseInt(hexString.substr(i, 2), 16);
|
|
}
|
|
|
|
return uint8Array;
|
|
}
|
|
|
|
public async blobToHex(blob: Blob): Promise<string> {
|
|
const buffer = await blob.arrayBuffer();
|
|
const bytes = new Uint8Array(buffer);
|
|
return Array.from(bytes)
|
|
.map(byte => byte.toString(16).padStart(2, '0'))
|
|
.join('');
|
|
}
|
|
|
|
public getHashForFile(
|
|
commitedIn: string,
|
|
label: string,
|
|
fileBlob: { type: string; data: Uint8Array }
|
|
): string {
|
|
return this.sdkClient.hash_value(fileBlob, commitedIn, label);
|
|
}
|
|
|
|
public getMerkleProofForFile(
|
|
processState: ProcessState,
|
|
attributeName: string
|
|
): MerkleProofResult {
|
|
return this.sdkClient.get_merkle_proof(processState, attributeName);
|
|
}
|
|
|
|
public validateMerkleProof(proof: MerkleProofResult, hash: string): boolean {
|
|
try {
|
|
return this.sdkClient.validate_merkle_proof(proof, hash);
|
|
} catch (e) {
|
|
throw new Error(`Failed to validate merkle proof: ${e}`);
|
|
}
|
|
}
|
|
|
|
public getLastCommitedState(process: Process): ProcessState | null {
|
|
if (process.states.length === 0) return null;
|
|
const processTip = process.states[process.states.length - 1].commited_in;
|
|
const lastCommitedState = process.states.findLast(state => state.commited_in !== processTip);
|
|
if (lastCommitedState) {
|
|
return lastCommitedState;
|
|
} else {
|
|
return null;
|
|
}
|
|
}
|
|
|
|
public getLastCommitedStateIndex(process: Process): number | null {
|
|
if (process.states.length === 0) return null;
|
|
const processTip = process.states[process.states.length - 1].commited_in;
|
|
for (let i = process.states.length - 1; i >= 0; i--) {
|
|
if (process.states[i].commited_in !== processTip) {
|
|
return i;
|
|
}
|
|
}
|
|
return null;
|
|
}
|
|
|
|
public getUncommitedStates(process: Process): ProcessState[] {
|
|
if (process.states.length === 0) return [];
|
|
const processTip = process.states[process.states.length - 1].commited_in;
|
|
const res = process.states.filter(state => state.commited_in === processTip);
|
|
return res.filter(state => state.state_id !== EMPTY32BYTES);
|
|
}
|
|
|
|
public getStateFromId(process: Process, stateId: string): ProcessState | null {
|
|
if (process.states.length === 0) return null;
|
|
const state = process.states.find(state => state.state_id === stateId);
|
|
if (state) {
|
|
return state;
|
|
} else {
|
|
return null;
|
|
}
|
|
}
|
|
|
|
public getNextStateAfterId(process: Process, stateId: string): ProcessState | null {
|
|
if (process.states.length === 0) return null;
|
|
|
|
const index = process.states.findIndex(state => state.state_id === stateId);
|
|
|
|
if (index !== -1 && index < process.states.length - 1) {
|
|
return process.states[index + 1];
|
|
}
|
|
|
|
return null;
|
|
}
|
|
|
|
public isPairingProcess(roles: Record<string, RoleDefinition>): boolean {
|
|
if (Object.keys(roles).length != 1) {
|
|
return false;
|
|
}
|
|
const pairingRole = roles['pairing'];
|
|
if (pairingRole) {
|
|
// For now that's enough, we should probably test more things
|
|
return true;
|
|
} else {
|
|
return false;
|
|
}
|
|
}
|
|
|
|
public async updateMemberPublicName(process: Process, newName: string): Promise<ApiReturn> {
|
|
const publicData = {
|
|
memberPublicName: newName,
|
|
};
|
|
|
|
return await this.updateProcess(process, {}, publicData, null);
|
|
}
|
|
}
|