Add UPDATE_PROCESSES logic to the worker
This commit is contained in:
parent
51664e62ec
commit
3813f542f2
@ -1,3 +1,6 @@
|
|||||||
|
let processesToScan = new Set();
|
||||||
|
let toDownload = [];
|
||||||
|
|
||||||
self.addEventListener('install', (event) => {
|
self.addEventListener('install', (event) => {
|
||||||
event.waitUntil(self.skipWaiting()); // Activate worker immediately
|
event.waitUntil(self.skipWaiting()); // Activate worker immediately
|
||||||
});
|
});
|
||||||
@ -11,7 +14,7 @@ self.addEventListener('message', async (event) => {
|
|||||||
const data = event.data;
|
const data = event.data;
|
||||||
if (data.type === 'START') {
|
if (data.type === 'START') {
|
||||||
const fetchNotifications = async () => {
|
const fetchNotifications = async () => {
|
||||||
const itemsWithFlag = await getAllItemsWithFlag();
|
const itemsWithFlag = await getAllDiffsNeedValidation();
|
||||||
|
|
||||||
// Process items with the specific flag
|
// Process items with the specific flag
|
||||||
itemsWithFlag?.forEach((item) => {
|
itemsWithFlag?.forEach((item) => {
|
||||||
@ -23,64 +26,55 @@ self.addEventListener('message', async (event) => {
|
|||||||
data: itemsWithFlag,
|
data: itemsWithFlag,
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
fetchNotifications();
|
const scanMissingData = async () => {
|
||||||
setInterval(fetchNotifications, 2 * 60 * 1000);
|
console.log('Scanning for missing data...');
|
||||||
|
const myProcesses = await getProcesses(processesToScan);
|
||||||
|
|
||||||
|
let toDownload = [];
|
||||||
|
// Iterate on each process
|
||||||
|
if (myProcesses && myProcesses.length != 0) {
|
||||||
|
console.log(myProcesses);
|
||||||
|
for (const process of myProcesses) {
|
||||||
|
console.log(process);
|
||||||
|
// Iterate on states
|
||||||
|
for (const state of process.states) {
|
||||||
|
if (!state.pcd_commitment) continue;
|
||||||
|
console.log(state);
|
||||||
|
// iterate on pcd_commitment
|
||||||
|
for (const hash of Object.values(state.pcd_commitment)) {
|
||||||
|
console.log(hash);
|
||||||
|
toDownload.push(hash);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (data.type === 'SCAN_PROCESS') {
|
|
||||||
try {
|
|
||||||
const { myProcessesId } = data.payload;
|
|
||||||
const db = await openDatabase();
|
|
||||||
|
|
||||||
// Créer un tableau pour stocker toutes les promesses de processus
|
|
||||||
const processPromises = myProcessesId.map(async (processId) => {
|
|
||||||
// Récupérer le processus
|
|
||||||
const process = await new Promise((resolve, reject) => {
|
|
||||||
const tx = db.transaction('processes', 'readonly');
|
|
||||||
const store = tx.objectStore('processes');
|
|
||||||
const request = store.get(processId);
|
|
||||||
request.onsuccess = () => resolve(request.result);
|
|
||||||
request.onerror = () => reject(request.error);
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!process || !process.states || process.states.length === 0) {
|
|
||||||
throw new Error(`Process ${processId} not found or invalid`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Récupérer les diffs pour chaque état
|
|
||||||
const diffPromises = process.states.map(async (state) => {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const tx = db.transaction('diffs', 'readonly');
|
|
||||||
const store = tx.objectStore('diffs');
|
|
||||||
for (const hash of state.pcd_commitment) {
|
|
||||||
const request = store.get(hash);
|
|
||||||
|
|
||||||
request.onsuccess = () => resolve(request.result);
|
|
||||||
request.onerror = () => reject(request.error);
|
|
||||||
}
|
}
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
const diffs = await Promise.all(diffPromises);
|
|
||||||
process.diffs = diffs.filter(diff => diff != null);
|
|
||||||
|
|
||||||
|
|
||||||
return process;
|
|
||||||
});
|
|
||||||
|
|
||||||
const results = await Promise.all(processPromises);
|
|
||||||
|
|
||||||
event.ports[0].postMessage({
|
event.ports[0].postMessage({
|
||||||
status: 'success',
|
type: 'TO_DOWNLOAD',
|
||||||
message: 'All processes scanned',
|
data: toDownload,
|
||||||
data: results
|
|
||||||
});
|
});
|
||||||
|
}
|
||||||
|
fetchNotifications();
|
||||||
|
setInterval(fetchNotifications, 2 * 60 * 1000);
|
||||||
|
scanMissingData();
|
||||||
|
setInterval(scanMissingData, 2 * 1000);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data.type === 'UPDATE_PROCESSES') {
|
||||||
|
try {
|
||||||
|
const { processIds } = data.payload;
|
||||||
|
console.log(processIds);
|
||||||
|
if (processIds && processIds.length != 0) {
|
||||||
|
for (const processId of processIds) {
|
||||||
|
processesToScan.add(processId);
|
||||||
|
}
|
||||||
|
console.log(processesToScan);
|
||||||
|
} else {
|
||||||
|
event.ports[0].postMessage({ status: 'error', message: 'Empty lists' });
|
||||||
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
event.ports[0].postMessage({ status: 'error', message: error.message });
|
event.ports[0].postMessage({ status: 'error', message: error.message });
|
||||||
}
|
}
|
||||||
setInterval(fetchNotifications, 2 * 1000);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (data.type === 'ADD_OBJECT') {
|
if (data.type === 'ADD_OBJECT') {
|
||||||
@ -123,31 +117,55 @@ async function openDatabase() {
|
|||||||
|
|
||||||
// Function to get all processes because it is asynchronous
|
// Function to get all processes because it is asynchronous
|
||||||
async function getAllProcesses() {
|
async function getAllProcesses() {
|
||||||
|
const db = await openDatabase();
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
|
if (!db) {
|
||||||
|
reject(new Error('Database is not available'));
|
||||||
|
return;
|
||||||
|
}
|
||||||
const tx = db.transaction('processes', 'readonly');
|
const tx = db.transaction('processes', 'readonly');
|
||||||
const store = tx.objectStore('processes');
|
const store = tx.objectStore('processes');
|
||||||
// const request = store.openCursor();
|
const request = store.getAll();
|
||||||
// const processes = [];
|
|
||||||
|
|
||||||
request.onsuccess = (event) => {
|
request.onsuccess = () => {
|
||||||
// const cursor = event.target.result;
|
resolve(request.result);
|
||||||
// if (cursor) {
|
|
||||||
// processes.push({ key: cursor.key, ...cursor.value });
|
|
||||||
// cursor.continue();
|
|
||||||
// } else {
|
|
||||||
// resolve(processes);
|
|
||||||
// }
|
|
||||||
const allProcesses = store.getAll();
|
|
||||||
resolve(allProcesses);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
request.onerror = (event) => {
|
request.onerror = () => {
|
||||||
reject(event.target.error);
|
reject(request.error);
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
async function getAllItemsWithFlag() {
|
async function getProcesses(processIds) {
|
||||||
|
if (!processIds || processIds.length === 0) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
const db = await openDatabase();
|
||||||
|
if (!db) {
|
||||||
|
throw new Error('Database is not available');
|
||||||
|
}
|
||||||
|
|
||||||
|
const tx = db.transaction('processes', 'readonly');
|
||||||
|
const store = tx.objectStore('processes');
|
||||||
|
|
||||||
|
const requests = Array.from(processIds).map((processId) => {
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
const request = store.get(processId);
|
||||||
|
request.onsuccess = () => resolve(request.result);
|
||||||
|
request.onerror = () => {
|
||||||
|
console.error(`Error fetching process ${processId}:`, request.error);
|
||||||
|
resolve(undefined);
|
||||||
|
};
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
const results = await Promise.all(requests);
|
||||||
|
return results.filter(result => result !== undefined);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getAllDiffsNeedValidation() {
|
||||||
const db = await openDatabase();
|
const db = await openDatabase();
|
||||||
|
|
||||||
const allProcesses = await getAllProcesses();
|
const allProcesses = await getAllProcesses();
|
||||||
|
Loading…
x
Reference in New Issue
Block a user