diff --git a/public/database.worker.js b/public/database.worker.js index a9e0548..3feb189 100755 --- a/public/database.worker.js +++ b/public/database.worker.js @@ -1,281 +1,106 @@ -const EMPTY32BYTES = String('').padStart(64, '0'); - -self.addEventListener('install', (event) => { - event.waitUntil(self.skipWaiting()); // Activate worker immediately -}); - -self.addEventListener('activate', (event) => { - event.waitUntil(self.clients.claim()); // Become available to all pages -}); - -// Event listener for messages from clients -self.addEventListener('message', async (event) => { - const data = event.data; - console.log(data); - - if (data.type === 'SCAN') { - try { - const myProcessesId = data.payload; - if (myProcessesId && myProcessesId.length != 0) { - const toDownload = await scanMissingData(myProcessesId); - if (toDownload.length != 0) { - console.log('Sending TO_DOWNLOAD message'); - event.source.postMessage({ type: 'TO_DOWNLOAD', data: toDownload}); - } - } else { - event.source.postMessage({ status: 'error', message: 'Empty lists' }); - } - } catch (error) { - event.source.postMessage({ status: 'error', message: error.message }); - } - } else if (data.type === 'ADD_OBJECT') { - try { - const { storeName, object, key } = data.payload; - const db = await openDatabase(); - const tx = db.transaction(storeName, 'readwrite'); - const store = tx.objectStore(storeName); - - if (key) { - await store.put(object, key); - } else { - await store.put(object); - } - - event.ports[0].postMessage({ status: 'success', message: '' }); - } catch (error) { - event.ports[0].postMessage({ status: 'error', message: error.message }); - } - } else if (data.type === 'BATCH_WRITING') { - const { storeName, objects } = data.payload; - const db = await openDatabase(); - const tx = db.transaction(storeName, 'readwrite'); - const store = tx.objectStore(storeName); - - for (const { key, object } of objects) { - if (key) { - await store.put(object, key); - } else { - await store.put(object); - } - } - - await tx.done; - } -}); - -async function scanMissingData(processesToScan) { - console.log('Scanning for missing data...'); - const myProcesses = await getProcesses(processesToScan); - - let toDownload = new Set(); - // Iterate on each process - if (myProcesses && myProcesses.length != 0) { - for (const process of myProcesses) { - // Iterate on states - const firstState = process.states[0]; - const processId = firstState.commited_in; - for (const state of process.states) { - if (state.state_id === EMPTY32BYTES) continue; - // iterate on pcd_commitment - for (const [field, hash] of Object.entries(state.pcd_commitment)) { - // Skip public fields - if (state.public_data[field] !== undefined || field === 'roles') continue; - // Check if we have the data in db - const existingData = await getBlob(hash); - if (!existingData) { - toDownload.add(hash); - // We also add an entry in diff, in case it doesn't already exist - await addDiff(processId, state.state_id, hash, state.roles, field); - } else { - // We remove it if we have it in the set - if (toDownload.delete(hash)) { - console.log(`Removing ${hash} from the set`); - } - } - } - } - } - } - - console.log(toDownload); - return Array.from(toDownload); -} - -async function openDatabase() { - return new Promise((resolve, reject) => { - const request = indexedDB.open('4nk', 1); - request.onerror = (event) => { - reject(request.error); - }; - request.onsuccess = (event) => { - resolve(request.result); - }; - request.onupgradeneeded = (event) => { - const db = event.target.result; - if (!db.objectStoreNames.contains('wallet')) { - db.createObjectStore('wallet', { keyPath: 'pre_id' }); - } - }; - }); -} - -// Function to get all processes because it is asynchronous -async function getAllProcesses() { - const db = await openDatabase(); - return new Promise((resolve, reject) => { - if (!db) { - reject(new Error('Database is not available')); - return; - } - const tx = db.transaction('processes', 'readonly'); - const store = tx.objectStore('processes'); - const request = store.getAll(); - - request.onsuccess = () => { - resolve(request.result); - }; - - request.onerror = () => { - reject(request.error); - }; - }); -}; - -async function getProcesses(processIds) { - if (!processIds || processIds.length === 0) { - return []; - } - - const db = await openDatabase(); - if (!db) { - throw new Error('Database is not available'); - } - - const tx = db.transaction('processes', 'readonly'); - const store = tx.objectStore('processes'); - - const requests = Array.from(processIds).map((processId) => { - return new Promise((resolve) => { - const request = store.get(processId); - request.onsuccess = () => resolve(request.result); - request.onerror = () => { - console.error(`Error fetching process ${processId}:`, request.error); - resolve(undefined); - }; - }); - }); - - const results = await Promise.all(requests); - return results.filter(result => result !== undefined); -} - -async function getAllDiffsNeedValidation() { - const db = await openDatabase(); - - const allProcesses = await getAllProcesses(); - const tx = db.transaction('diffs', 'readonly'); - const store = tx.objectStore('diffs'); - - return new Promise((resolve, reject) => { - const request = store.getAll(); - request.onsuccess = (event) => { - const allItems = event.target.result; - const itemsWithFlag = allItems.filter((item) => item.need_validation); - - const processMap = {}; - - for (const diff of itemsWithFlag) { - const currentProcess = allProcesses.find((item) => { - return item.states.some((state) => state.merkle_root === diff.new_state_merkle_root); - }); - - if (currentProcess) { - const processKey = currentProcess.merkle_root; - - if (!processMap[processKey]) { - processMap[processKey] = { - process: currentProcess.states, - processId: currentProcess.key, - diffs: [], - }; - } - processMap[processKey].diffs.push(diff); - } - } - - const results = Object.values(processMap).map((entry) => { - const diffs = [] - for(const state of entry.process) { - const filteredDiff = entry.diffs.filter(diff => diff.new_state_merkle_root === state.merkle_root); - if(filteredDiff && filteredDiff.length) { - diffs.push(filteredDiff) - } - } - return { - process: entry.process, - processId: entry.processId, - diffs: diffs, - }; - }); - - resolve(results); - }; - - request.onerror = (event) => { - reject(event.target.error); - }; - }); -} - -async function getBlob(hash) { - const db = await openDatabase(); - const storeName = 'data'; - const tx = db.transaction(storeName, 'readonly'); - const store = tx.objectStore(storeName); - const result = await new Promise((resolve, reject) => { - const getRequest = store.get(hash); - getRequest.onsuccess = () => resolve(getRequest.result); - getRequest.onerror = () => reject(getRequest.error); - }); - return result; -} - -async function addDiff(processId, stateId, hash, roles, field) { - const db = await openDatabase(); - const storeName = 'diffs'; - const tx = db.transaction(storeName, 'readwrite'); - const store = tx.objectStore(storeName); - - // Check if the diff already exists - const existingDiff = await new Promise((resolve, reject) => { - const getRequest = store.get(hash); - getRequest.onsuccess = () => resolve(getRequest.result); - getRequest.onerror = () => reject(getRequest.error); - }); - - if (!existingDiff) { - const newDiff = { - process_id: processId, - state_id: stateId, - value_commitment: hash, - roles: roles, - field: field, - description: null, - previous_value: null, - new_value: null, - notify_user: false, - need_validation: false, - validation_status: 'None' - }; - - const insertResult = await new Promise((resolve, reject) => { - const putRequest = store.put(newDiff); - putRequest.onsuccess = () => resolve(putRequest.result); - putRequest.onerror = () => reject(putRequest.error); - }); - - return insertResult; - } - - return existingDiff; -} +const EMPTY32BYTES = String('').padStart(64, '0'); + +self.addEventListener('install', (event) => { + event.waitUntil(self.skipWaiting()); // Activate worker immediately +}); + +self.addEventListener('activate', (event) => { + event.waitUntil(self.clients.claim()); // Become available to all pages +}); + +// Event listener for messages from clients +self.addEventListener('message', async (event) => { + const data = event.data; + console.log(data); + + if (data.type === 'SCAN') { + try { + const myProcessesId = data.payload; + if (myProcessesId && myProcessesId.length != 0) { + const toDownload = await scanMissingData(myProcessesId); + if (toDownload.length != 0) { + console.log('Sending TO_DOWNLOAD message'); + event.source.postMessage({ type: 'TO_DOWNLOAD', data: toDownload}); + } + } else { + event.source.postMessage({ status: 'error', message: 'Empty lists' }); + } + } catch (error) { + event.source.postMessage({ status: 'error', message: error.message }); + } + } else if (data.type === 'ADD_OBJECT') { + try { + const { storeName, object, key } = data.payload; + const db = await openDatabase(); + const tx = db.transaction(storeName, 'readwrite'); + const store = tx.objectStore(storeName); + + if (key) { + await store.put(object, key); + } else { + await store.put(object); + } + + event.ports[0].postMessage({ status: 'success', message: '' }); + } catch (error) { + event.ports[0].postMessage({ status: 'error', message: error.message }); + } + } else if (data.type === 'BATCH_WRITING') { + const { storeName, objects } = data.payload; + const db = await openDatabase(); + const tx = db.transaction(storeName, 'readwrite'); + const store = tx.objectStore(storeName); + + for (const { key, object } of objects) { + if (key) { + await store.put(object, key); + } else { + await store.put(object); + } + } + + await tx.done; + } +}); + +async function scanMissingData(processesToScan) { + console.log('Scanning for missing data...'); + const myProcesses = await getProcesses(processesToScan); + + let toDownload = new Set(); + // Iterate on each process + if (myProcesses && myProcesses.length != 0) { + for (const process of myProcesses) { + // Iterate on states + const firstState = process.states[0]; + const processId = firstState.commited_in; + for (const state of process.states) { + if (state.state_id === EMPTY32BYTES) continue; + // iterate on pcd_commitment + for (const [field, hash] of Object.entries(state.pcd_commitment)) { + // Skip public fields + if (state.public_data[field] !== undefined || field === 'roles') continue; + // Check if we have the data in db + const existingData = await getBlob(hash); + if (!existingData) { + toDownload.add(hash); + // We also add an entry in diff, in case it doesn't already exist + await addDiff(processId, state.state_id, hash, state.roles, field); + } else { + // We remove it if we have it in the set + if (toDownload.delete(hash)) { + console.log(`Removing ${hash} from the set`); + } + } + } + } + } + } + + console.log('[Service Worker] Scan complete:', { toDownload: toDownload.size, diffsToCreate: diffsToCreate.length }); + return { + toDownload: Array.from(toDownload), + diffsToCreate: diffsToCreate + }; +} +