Merge remote changes and resolve conflicts
This commit is contained in:
commit
094e98a3a6
Binary file not shown.
@ -28,22 +28,133 @@ function calculateFileHash(buffer) {
|
||||
return crypto.createHash('sha256').update(buffer).digest('hex')
|
||||
}
|
||||
|
||||
// Fonction pour vérifier si un fichier existe déjà par hash
|
||||
function findExistingFileByHash(hash) {
|
||||
const uploadDir = 'uploads/'
|
||||
if (!fs.existsSync(uploadDir)) return null
|
||||
// Fonction pour générer un hash de dossier
|
||||
function generateFolderHash() {
|
||||
return crypto.randomBytes(16).toString('hex')
|
||||
}
|
||||
|
||||
const files = fs.readdirSync(uploadDir)
|
||||
for (const file of files) {
|
||||
const filePath = path.join(uploadDir, file)
|
||||
// Fonction pour obtenir le type MIME à partir de l'extension
|
||||
function getMimeType(ext) {
|
||||
const mimeTypes = {
|
||||
'.pdf': 'application/pdf',
|
||||
'.jpg': 'image/jpeg',
|
||||
'.jpeg': 'image/jpeg',
|
||||
'.png': 'image/png',
|
||||
'.tiff': 'image/tiff'
|
||||
}
|
||||
return mimeTypes[ext.toLowerCase()] || 'application/octet-stream'
|
||||
}
|
||||
|
||||
// Fonction pour créer la structure de dossiers
|
||||
function createFolderStructure(folderHash) {
|
||||
console.log(`[FOLDER] Création de la structure pour le hash: ${folderHash}`)
|
||||
console.log(`[FOLDER] Répertoire de travail: ${process.cwd()}`)
|
||||
|
||||
// Créer les dossiers racines s'ils n'existent pas
|
||||
const uploadsDir = 'uploads'
|
||||
const cacheDir = 'cache'
|
||||
|
||||
console.log(`[FOLDER] Vérification de l'existence de ${uploadsDir}: ${fs.existsSync(uploadsDir)}`)
|
||||
console.log(`[FOLDER] Vérification de l'existence de ${cacheDir}: ${fs.existsSync(cacheDir)}`)
|
||||
|
||||
if (!fs.existsSync(uploadsDir)) {
|
||||
fs.mkdirSync(uploadsDir, { recursive: true })
|
||||
console.log(`[FOLDER] Dossier racine créé: ${uploadsDir}`)
|
||||
}
|
||||
if (!fs.existsSync(cacheDir)) {
|
||||
fs.mkdirSync(cacheDir, { recursive: true })
|
||||
console.log(`[FOLDER] Dossier racine créé: ${cacheDir}`)
|
||||
}
|
||||
|
||||
const folderPath = path.join(uploadsDir, folderHash)
|
||||
const cachePath = path.join(cacheDir, folderHash)
|
||||
|
||||
console.log(`[FOLDER] Chemin du dossier uploads: ${folderPath}`)
|
||||
console.log(`[FOLDER] Chemin du dossier cache: ${cachePath}`)
|
||||
|
||||
if (!fs.existsSync(folderPath)) {
|
||||
fs.mkdirSync(folderPath, { recursive: true })
|
||||
console.log(`[FOLDER] Dossier uploads créé: ${folderPath}`)
|
||||
}
|
||||
if (!fs.existsSync(cachePath)) {
|
||||
fs.mkdirSync(cachePath, { recursive: true })
|
||||
console.log(`[FOLDER] Dossier cache créé: ${cachePath}`)
|
||||
}
|
||||
|
||||
return { folderPath, cachePath }
|
||||
}
|
||||
|
||||
// Fonction pour sauvegarder le cache JSON dans un dossier spécifique
|
||||
function saveJsonCacheInFolder(folderHash, fileHash, result) {
|
||||
const { cachePath } = createFolderStructure(folderHash)
|
||||
const cacheFile = path.join(cachePath, `${fileHash}.json`)
|
||||
|
||||
try {
|
||||
fs.writeFileSync(cacheFile, JSON.stringify(result, null, 2))
|
||||
console.log(`[CACHE] Résultat sauvegardé dans le dossier ${folderHash}: ${fileHash}`)
|
||||
return true
|
||||
} catch (error) {
|
||||
console.error(`[CACHE] Erreur lors de la sauvegarde dans le dossier ${folderHash}:`, error)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// Fonction pour récupérer le cache JSON depuis un dossier spécifique
|
||||
function getJsonCacheFromFolder(folderHash, fileHash) {
|
||||
const cachePath = path.join('cache', folderHash)
|
||||
const cacheFile = path.join(cachePath, `${fileHash}.json`)
|
||||
|
||||
if (fs.existsSync(cacheFile)) {
|
||||
try {
|
||||
const fileBuffer = fs.readFileSync(filePath)
|
||||
const fileHash = calculateFileHash(fileBuffer)
|
||||
if (fileHash === hash) {
|
||||
return { path: filePath, name: file }
|
||||
}
|
||||
const data = fs.readFileSync(cacheFile, 'utf8')
|
||||
const result = JSON.parse(data)
|
||||
console.log(`[CACHE] Résultat récupéré depuis le dossier ${folderHash}: ${fileHash}`)
|
||||
return result
|
||||
} catch (error) {
|
||||
console.warn(`[HASH] Erreur lors de la lecture de ${file}:`, error.message)
|
||||
console.error(`[CACHE] Erreur lors de la lecture depuis le dossier ${folderHash}:`, error)
|
||||
return null
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
// Fonction pour lister tous les résultats d'un dossier
|
||||
function listFolderResults(folderHash) {
|
||||
const cachePath = path.join('cache', folderHash)
|
||||
if (!fs.existsSync(cachePath)) {
|
||||
return []
|
||||
}
|
||||
|
||||
const files = fs.readdirSync(cachePath)
|
||||
const results = []
|
||||
|
||||
for (const file of files) {
|
||||
if (file.endsWith('.json')) {
|
||||
const fileHash = path.basename(file, '.json')
|
||||
const result = getJsonCacheFromFolder(folderHash, fileHash)
|
||||
if (result) {
|
||||
results.push({
|
||||
fileHash,
|
||||
...result
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
// Fonction pour vérifier si un fichier existe déjà par hash dans un dossier
|
||||
function findExistingFileByHash(hash, folderHash) {
|
||||
const folderPath = path.join('uploads', folderHash)
|
||||
if (!fs.existsSync(folderPath)) return null
|
||||
|
||||
const files = fs.readdirSync(folderPath)
|
||||
for (const file of files) {
|
||||
// Vérifier si le nom de fichier commence par le hash
|
||||
if (file.startsWith(hash)) {
|
||||
const filePath = path.join(folderPath, file)
|
||||
return { path: filePath, name: file }
|
||||
}
|
||||
}
|
||||
return null
|
||||
@ -108,7 +219,7 @@ function listCacheFiles() {
|
||||
}).filter(file => file !== null)
|
||||
}
|
||||
|
||||
// Configuration multer pour l'upload de fichiers avec gestion des doublons
|
||||
// Configuration multer pour l'upload de fichiers avec hash comme nom
|
||||
const storage = multer.diskStorage({
|
||||
destination: (req, file, cb) => {
|
||||
const uploadDir = 'uploads/'
|
||||
@ -118,11 +229,10 @@ const storage = multer.diskStorage({
|
||||
cb(null, uploadDir)
|
||||
},
|
||||
filename: (req, file, cb) => {
|
||||
// Utiliser le nom original avec timestamp pour éviter les conflits
|
||||
// Utiliser un nom temporaire, le hash sera calculé après
|
||||
const timestamp = Date.now()
|
||||
const ext = path.extname(file.originalname)
|
||||
const name = path.basename(file.originalname, ext)
|
||||
cb(null, `${name}-${timestamp}${ext}`)
|
||||
cb(null, `temp-${timestamp}${ext}`)
|
||||
}
|
||||
})
|
||||
|
||||
@ -728,17 +838,23 @@ app.post('/api/extract', upload.single('document'), async (req, res) => {
|
||||
return res.status(400).json({ error: 'Aucun fichier fourni' })
|
||||
}
|
||||
|
||||
console.log(`[API] Traitement du fichier: ${req.file.originalname}`)
|
||||
// Récupérer le hash du dossier depuis les paramètres de requête
|
||||
const folderHash = req.body.folderHash || req.query.folderHash
|
||||
if (!folderHash) {
|
||||
return res.status(400).json({ error: 'Hash du dossier requis' })
|
||||
}
|
||||
|
||||
console.log(`[API] Traitement du fichier: ${req.file.originalname} dans le dossier: ${folderHash}`)
|
||||
|
||||
// Calculer le hash du fichier uploadé
|
||||
const fileBuffer = fs.readFileSync(req.file.path)
|
||||
const fileHash = calculateFileHash(fileBuffer)
|
||||
console.log(`[HASH] Hash du fichier: ${fileHash.substring(0, 16)}...`)
|
||||
|
||||
// Vérifier d'abord le cache JSON
|
||||
const cachedResult = getJsonCache(fileHash)
|
||||
// Vérifier d'abord le cache JSON dans le dossier
|
||||
const cachedResult = getJsonCacheFromFolder(folderHash, fileHash)
|
||||
if (cachedResult) {
|
||||
console.log(`[CACHE] Utilisation du résultat en cache`)
|
||||
console.log(`[CACHE] Utilisation du résultat en cache du dossier ${folderHash}`)
|
||||
|
||||
// Supprimer le fichier temporaire
|
||||
fs.unlinkSync(req.file.path)
|
||||
@ -747,13 +863,13 @@ app.post('/api/extract', upload.single('document'), async (req, res) => {
|
||||
return res.json(cachedResult)
|
||||
}
|
||||
|
||||
// Vérifier si un fichier avec le même hash existe déjà
|
||||
const existingFile = findExistingFileByHash(fileHash)
|
||||
// Vérifier si un fichier avec le même hash existe déjà dans le dossier
|
||||
const existingFile = findExistingFileByHash(fileHash, folderHash)
|
||||
let isDuplicate = false
|
||||
let duplicatePath = null
|
||||
|
||||
if (existingFile) {
|
||||
console.log(`[HASH] Fichier déjà existant trouvé: ${existingFile.name}`)
|
||||
console.log(`[HASH] Fichier déjà existant trouvé dans le dossier ${folderHash}: ${existingFile.name}`)
|
||||
isDuplicate = true
|
||||
|
||||
// Sauvegarder le chemin du doublon pour suppression ultérieure
|
||||
@ -763,7 +879,24 @@ app.post('/api/extract', upload.single('document'), async (req, res) => {
|
||||
req.file.path = existingFile.path
|
||||
req.file.originalname = existingFile.name
|
||||
} else {
|
||||
console.log(`[HASH] Nouveau fichier, traitement normal`)
|
||||
console.log(`[HASH] Nouveau fichier, renommage avec hash dans le dossier ${folderHash}`)
|
||||
|
||||
// Créer la structure du dossier si elle n'existe pas
|
||||
const { folderPath } = createFolderStructure(folderHash)
|
||||
|
||||
// Renommer le fichier avec son hash + extension dans le dossier
|
||||
const ext = path.extname(req.file.originalname)
|
||||
const newFileName = `${fileHash}${ext}`
|
||||
const newFilePath = path.join(folderPath, newFileName)
|
||||
|
||||
// Renommer le fichier
|
||||
fs.renameSync(req.file.path, newFilePath)
|
||||
|
||||
// Mettre à jour les informations du fichier
|
||||
req.file.path = newFilePath
|
||||
req.file.filename = newFileName
|
||||
|
||||
console.log(`[HASH] Fichier renommé: ${newFileName}`)
|
||||
}
|
||||
|
||||
let ocrResult
|
||||
@ -796,8 +929,8 @@ app.post('/api/extract', upload.single('document'), async (req, res) => {
|
||||
// Génération du format JSON standard
|
||||
const result = generateStandardJSON(req.file, ocrResult, entities, processingTime)
|
||||
|
||||
// Sauvegarder le résultat dans le cache
|
||||
saveJsonCache(fileHash, result)
|
||||
// Sauvegarder le résultat dans le cache du dossier
|
||||
saveJsonCacheInFolder(folderHash, fileHash, result)
|
||||
|
||||
// Nettoyage du fichier temporaire
|
||||
if (isDuplicate) {
|
||||
@ -854,6 +987,32 @@ app.get('/api/test-files', (req, res) => {
|
||||
}
|
||||
})
|
||||
|
||||
// Route pour servir un fichier de test individuel
|
||||
app.get('/api/test-files/:filename', (req, res) => {
|
||||
try {
|
||||
const filename = req.params.filename
|
||||
const testFilesDir = path.join(__dirname, '..', 'test-files')
|
||||
const filePath = path.join(testFilesDir, filename)
|
||||
|
||||
// Vérifier que le fichier existe et est dans le bon répertoire
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return res.status(404).json({ success: false, error: 'Fichier non trouvé' })
|
||||
}
|
||||
|
||||
// Vérifier que le fichier est bien dans le répertoire test-files (sécurité)
|
||||
const resolvedPath = path.resolve(filePath)
|
||||
const resolvedTestDir = path.resolve(testFilesDir)
|
||||
if (!resolvedPath.startsWith(resolvedTestDir)) {
|
||||
return res.status(403).json({ success: false, error: 'Accès non autorisé' })
|
||||
}
|
||||
|
||||
// Servir le fichier
|
||||
res.sendFile(filePath)
|
||||
} catch (error) {
|
||||
res.status(500).json({ success: false, error: error.message })
|
||||
}
|
||||
})
|
||||
|
||||
// Route de santé
|
||||
// Route pour lister les fichiers uploadés avec leurs hash
|
||||
app.get('/api/uploads', (req, res) => {
|
||||
@ -868,8 +1027,10 @@ app.get('/api/uploads', (req, res) => {
|
||||
const filePath = path.join(uploadDir, file)
|
||||
try {
|
||||
const stats = fs.statSync(filePath)
|
||||
const fileBuffer = fs.readFileSync(filePath)
|
||||
const hash = calculateFileHash(fileBuffer)
|
||||
|
||||
// Extraire le hash du nom de fichier (format: hash.extension)
|
||||
const ext = path.extname(file)
|
||||
const hash = path.basename(file, ext)
|
||||
|
||||
return {
|
||||
name: file,
|
||||
@ -947,6 +1108,162 @@ app.delete('/api/cache/:hash', (req, res) => {
|
||||
}
|
||||
})
|
||||
|
||||
// Route pour créer un nouveau dossier
|
||||
app.post('/api/folders', (req, res) => {
|
||||
try {
|
||||
console.log('[FOLDER] Début de la création d\'un nouveau dossier')
|
||||
const folderHash = generateFolderHash()
|
||||
console.log(`[FOLDER] Hash généré: ${folderHash}`)
|
||||
|
||||
const result = createFolderStructure(folderHash)
|
||||
console.log(`[FOLDER] Structure créée:`, result)
|
||||
|
||||
console.log(`[FOLDER] Nouveau dossier créé: ${folderHash}`)
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
folderHash,
|
||||
message: 'Dossier créé avec succès'
|
||||
})
|
||||
} catch (error) {
|
||||
console.error('[FOLDER] Erreur lors de la création du dossier:', error)
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: error.message
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
// Route pour récupérer les résultats d'un dossier
|
||||
app.get('/api/folders/:folderHash/results', (req, res) => {
|
||||
try {
|
||||
const { folderHash } = req.params
|
||||
const results = listFolderResults(folderHash)
|
||||
|
||||
console.log(`[FOLDER] Résultats récupérés pour le dossier ${folderHash}: ${results.length} fichiers`)
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
folderHash,
|
||||
results,
|
||||
count: results.length
|
||||
})
|
||||
} catch (error) {
|
||||
console.error('[FOLDER] Erreur lors de la récupération des résultats:', error)
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: error.message
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
// Route pour récupérer un fichier original depuis un dossier
|
||||
app.get('/api/folders/:folderHash/files/:fileHash', (req, res) => {
|
||||
try {
|
||||
const { folderHash, fileHash } = req.params
|
||||
const folderPath = path.join('uploads', folderHash)
|
||||
|
||||
if (!fs.existsSync(folderPath)) {
|
||||
return res.status(404).json({ success: false, error: 'Dossier non trouvé' })
|
||||
}
|
||||
|
||||
const files = fs.readdirSync(folderPath)
|
||||
const targetFile = files.find(file => file.startsWith(fileHash))
|
||||
|
||||
if (!targetFile) {
|
||||
return res.status(404).json({ success: false, error: 'Fichier non trouvé' })
|
||||
}
|
||||
|
||||
const filePath = path.join(folderPath, targetFile)
|
||||
res.sendFile(path.resolve(filePath))
|
||||
} catch (error) {
|
||||
console.error('[FOLDER] Erreur lors de la récupération du fichier:', error)
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: error.message
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
// Route pour créer le dossier par défaut avec les fichiers de test
|
||||
app.post('/api/folders/default', async (req, res) => {
|
||||
try {
|
||||
const folderHash = generateFolderHash()
|
||||
const { folderPath, cachePath } = createFolderStructure(folderHash)
|
||||
|
||||
console.log(`[FOLDER] Création du dossier par défaut: ${folderHash}`)
|
||||
|
||||
// Charger les fichiers de test dans le dossier
|
||||
const testFilesDir = path.join(__dirname, '..', 'test-files')
|
||||
if (fs.existsSync(testFilesDir)) {
|
||||
const testFiles = fs.readdirSync(testFilesDir)
|
||||
const supportedFiles = testFiles.filter(file =>
|
||||
['.pdf', '.jpg', '.jpeg', '.png', '.tiff'].includes(path.extname(file).toLowerCase())
|
||||
)
|
||||
|
||||
for (const testFile of supportedFiles) {
|
||||
const sourcePath = path.join(testFilesDir, testFile)
|
||||
const fileBuffer = fs.readFileSync(sourcePath)
|
||||
const fileHash = calculateFileHash(fileBuffer)
|
||||
const ext = path.extname(testFile)
|
||||
const newFileName = `${fileHash}${ext}`
|
||||
const destPath = path.join(folderPath, newFileName)
|
||||
|
||||
// Copier le fichier
|
||||
fs.copyFileSync(sourcePath, destPath)
|
||||
|
||||
// Traiter le fichier et sauvegarder le résultat
|
||||
try {
|
||||
console.log(`[FOLDER] Traitement de ${testFile}...`)
|
||||
|
||||
// Simuler un objet req.file pour la logique existante
|
||||
const mockFile = {
|
||||
path: destPath,
|
||||
originalname: testFile,
|
||||
mimetype: getMimeType(ext)
|
||||
}
|
||||
|
||||
// Extraction de texte selon le type de fichier
|
||||
let ocrResult = null
|
||||
if (ext.toLowerCase() === '.pdf') {
|
||||
ocrResult = await extractTextFromPdf(destPath)
|
||||
} else if (['.jpg', '.jpeg', '.png', '.tiff'].includes(ext.toLowerCase())) {
|
||||
ocrResult = await extractTextFromImage(destPath)
|
||||
}
|
||||
|
||||
if (ocrResult && ocrResult.text) {
|
||||
// Extraction des entités
|
||||
const entities = extractEntitiesFromText(ocrResult.text)
|
||||
|
||||
// Génération du résultat au format standard
|
||||
const result = generateStandardJSON(mockFile, ocrResult, entities, 0)
|
||||
|
||||
// Sauvegarde dans le cache du dossier
|
||||
saveJsonCacheInFolder(folderHash, fileHash, result)
|
||||
console.log(`[FOLDER] Fichier de test traité: ${testFile} -> ${fileHash}`)
|
||||
} else {
|
||||
console.warn(`[FOLDER] Aucun texte extrait de ${testFile}`)
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn(`[FOLDER] Erreur lors du traitement de ${testFile}:`, error.message)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
folderHash,
|
||||
message: 'Dossier par défaut créé avec succès'
|
||||
})
|
||||
} catch (error) {
|
||||
console.error('[FOLDER] Erreur lors de la création du dossier par défaut:', error)
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: error.message
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
app.get('/api/health', (req, res) => {
|
||||
res.json({
|
||||
status: 'OK',
|
||||
@ -961,6 +1278,7 @@ app.listen(PORT, () => {
|
||||
console.log(`📡 API disponible sur: http://localhost:${PORT}/api`)
|
||||
console.log(`🏥 Health check: http://localhost:${PORT}/api/health`)
|
||||
console.log(`📁 Test files: http://localhost:${PORT}/api/test-files`)
|
||||
console.log(`📂 Répertoire de travail: ${process.cwd()}`)
|
||||
})
|
||||
|
||||
module.exports = app
|
||||
|
||||
139
docs/API.md
Normal file
139
docs/API.md
Normal file
@ -0,0 +1,139 @@
|
||||
# Documentation API - 4NK IA Lecoffre.io
|
||||
|
||||
## Vue d'ensemble
|
||||
|
||||
L'application 4NK IA Lecoffre.io communique uniquement avec le backend interne pour toutes les
|
||||
fonctionnalités (upload, extraction, analyse, contexte, conseil).
|
||||
|
||||
## API Backend Principal
|
||||
|
||||
### Base URL
|
||||
|
||||
```text
|
||||
http://localhost:8000 (développement)
|
||||
```
|
||||
|
||||
### Endpoints
|
||||
|
||||
#### Upload de document
|
||||
|
||||
```http
|
||||
POST /api/notary/upload
|
||||
Content-Type: multipart/form-data
|
||||
|
||||
Body: FormData avec le fichier
|
||||
```
|
||||
|
||||
Réponse attendue (champs utilisés par le front) :
|
||||
|
||||
```json
|
||||
{
|
||||
"document_id": "doc_123456",
|
||||
"mime_type": "application/pdf",
|
||||
"functional_type": "CNI"
|
||||
}
|
||||
```
|
||||
|
||||
Mappage front en `Document` :
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "doc_123456",
|
||||
"name": "acte_vente.pdf",
|
||||
"mimeType": "application/pdf",
|
||||
"functionalType": "CNI",
|
||||
"size": 1024000,
|
||||
"uploadDate": "<date locale>",
|
||||
"status": "completed",
|
||||
"previewUrl": "blob:..."
|
||||
}
|
||||
```
|
||||
|
||||
#### Extraction de données
|
||||
|
||||
```http
|
||||
GET /api/notary/documents/{documentId}
|
||||
```
|
||||
|
||||
#### Analyse du document
|
||||
|
||||
```http
|
||||
GET /api/documents/{documentId}/analyze
|
||||
```
|
||||
|
||||
#### Données contextuelles
|
||||
|
||||
```http
|
||||
GET /api/documents/{documentId}/context
|
||||
```
|
||||
|
||||
#### Conseil IA
|
||||
|
||||
```http
|
||||
GET /api/documents/{documentId}/conseil
|
||||
```
|
||||
|
||||
## APIs Externes
|
||||
|
||||
Les APIs externes (Cadastre, Géorisques, Géofoncier, BODACC, Infogreffe) sont appelées côté backend
|
||||
uniquement. Aucun appel direct côté front.
|
||||
|
||||
## Gestion d'erreur
|
||||
|
||||
### Codes d'erreur HTTP
|
||||
|
||||
- 200 : Succès
|
||||
- 400 : Requête malformée
|
||||
- 404 : Ressource non trouvée
|
||||
- 405 : Méthode non autorisée
|
||||
- 500 : Erreur serveur interne
|
||||
|
||||
### Erreurs de connexion
|
||||
|
||||
- ERR_NETWORK : Erreur de réseau
|
||||
- ERR_CONNECTION_REFUSED : Connexion refusée
|
||||
- ERR_TIMEOUT : Timeout de la requête
|
||||
|
||||
## Configuration
|
||||
|
||||
### Variables d'environnement
|
||||
|
||||
```env
|
||||
VITE_API_URL=http://localhost:8000
|
||||
VITE_USE_OPENAI=false
|
||||
VITE_OPENAI_API_KEY=
|
||||
VITE_OPENAI_BASE_URL=https://api.openai.com/v1
|
||||
VITE_OPENAI_MODEL=gpt-4o-mini
|
||||
```
|
||||
|
||||
## Mode OpenAI (fallback)
|
||||
|
||||
Quand `VITE_USE_OPENAI=true`, le frontend bascule sur un mode de secours basé sur OpenAI:
|
||||
|
||||
- Upload: simulé côté client (le fichier n’est pas envoyé à OpenAI)
|
||||
- Extraction/Analyse/Conseil/Contexte: appels `chat.completions` sur `VITE_OPENAI_MODEL`
|
||||
- Détection de type: heuristique simple côté client
|
||||
|
||||
Ce mode est utile pour démo/diagnostic quand le backend n’est pas disponible.
|
||||
|
||||
### Configuration Axios
|
||||
|
||||
```typescript
|
||||
const apiClient = axios.create({
|
||||
baseURL: BASE_URL,
|
||||
timeout: 60000
|
||||
})
|
||||
```
|
||||
|
||||
## Authentification
|
||||
|
||||
### Headers requis
|
||||
|
||||
```http
|
||||
Authorization: Bearer {token}
|
||||
Content-Type: application/json
|
||||
```
|
||||
|
||||
## Rate Limiting
|
||||
|
||||
- Limites gérées par le backend
|
||||
322
docs/architecture-backend.md
Normal file
322
docs/architecture-backend.md
Normal file
@ -0,0 +1,322 @@
|
||||
# Architecture Backend pour le Traitement des Documents
|
||||
|
||||
## Vue d'ensemble
|
||||
|
||||
L'application utilise maintenant une architecture backend qui traite les données (OCR, NER) et renvoie du JSON au frontend. Cette approche améliore les performances et centralise le traitement des documents.
|
||||
|
||||
## Architecture
|
||||
|
||||
### 🏗️ Structure
|
||||
|
||||
```
|
||||
4NK_IA_front/
|
||||
├── backend/ # Serveur backend Express
|
||||
│ ├── server.js # Serveur principal
|
||||
│ ├── package.json # Dépendances backend
|
||||
│ └── uploads/ # Fichiers temporaires
|
||||
├── src/ # Frontend React
|
||||
│ ├── services/
|
||||
│ │ ├── backendApi.ts # API backend
|
||||
│ │ ├── openai.ts # Fallback local
|
||||
│ │ └── ruleNer.ts # Règles NER
|
||||
│ └── store/
|
||||
│ └── documentSlice.ts # Redux avec backend
|
||||
└── test-files/ # Fichiers de test
|
||||
```
|
||||
|
||||
### 🔄 Flux de Données
|
||||
|
||||
```mermaid
|
||||
graph TD
|
||||
A[Frontend React] --> B[Backend Express]
|
||||
B --> C[Tesseract.js OCR]
|
||||
B --> D[Règles NER]
|
||||
C --> E[Texte extrait]
|
||||
D --> F[Entités extraites]
|
||||
E --> G[JSON Response]
|
||||
F --> G
|
||||
G --> A
|
||||
```
|
||||
|
||||
## Backend (Express.js)
|
||||
|
||||
### 🚀 Serveur Principal
|
||||
|
||||
**Fichier**: `backend/server.js`
|
||||
|
||||
**Port**: 3001
|
||||
|
||||
**Endpoints**:
|
||||
- `POST /api/extract` - Extraction de documents
|
||||
- `GET /api/test-files` - Liste des fichiers de test
|
||||
- `GET /api/health` - Health check
|
||||
|
||||
### 📄 Traitement des Documents
|
||||
|
||||
#### 1. Upload et Validation
|
||||
```javascript
|
||||
// Configuration multer
|
||||
const upload = multer({
|
||||
storage: multer.diskStorage({...}),
|
||||
limits: { fileSize: 10 * 1024 * 1024 }, // 10MB max
|
||||
fileFilter: (req, file, cb) => {
|
||||
const allowedTypes = ['image/jpeg', 'image/png', 'image/tiff', 'application/pdf']
|
||||
// Validation des types de fichiers
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
#### 2. Extraction OCR Optimisée
|
||||
```javascript
|
||||
async function extractTextFromImage(imagePath) {
|
||||
const worker = await createWorker('fra+eng')
|
||||
|
||||
// Configuration optimisée pour cartes d'identité
|
||||
const params = {
|
||||
tessedit_pageseg_mode: '6',
|
||||
tessedit_char_whitelist: 'ABCDEFGHIJKLMNOPQRSTUVWXYZ...',
|
||||
tessedit_ocr_engine_mode: '1', // LSTM
|
||||
textord_min_xheight: '6', // Petits textes
|
||||
// ... autres paramètres
|
||||
}
|
||||
|
||||
await worker.setParameters(params)
|
||||
const { data } = await worker.recognize(imagePath)
|
||||
return { text: data.text, confidence: data.confidence }
|
||||
}
|
||||
```
|
||||
|
||||
#### 3. Extraction NER par Règles
|
||||
```javascript
|
||||
function extractEntitiesFromText(text) {
|
||||
const entities = {
|
||||
identities: [],
|
||||
addresses: [],
|
||||
cniNumbers: [],
|
||||
dates: [],
|
||||
documentType: 'Document'
|
||||
}
|
||||
|
||||
// Patterns pour cartes d'identité
|
||||
const namePatterns = [
|
||||
/(Vendeur|Acheteur|...)\s*:\s*([A-Z][a-zà-öø-ÿ'\-]+\s+[A-Z][a-zà-öø-ÿ'\-]+)/gi,
|
||||
/^([A-Z][A-ZÀ-ÖØ-öø-ÿ\s\-']{2,30})$/gm,
|
||||
// ... autres patterns
|
||||
]
|
||||
|
||||
// Extraction des entités...
|
||||
return entities
|
||||
}
|
||||
```
|
||||
|
||||
### 📊 Réponse JSON
|
||||
|
||||
```json
|
||||
{
|
||||
"success": true,
|
||||
"documentId": "doc-1234567890",
|
||||
"fileName": "IMG_20250902_162159.jpg",
|
||||
"fileSize": 1077961,
|
||||
"mimeType": "image/jpeg",
|
||||
"processing": {
|
||||
"ocr": {
|
||||
"text": "Texte extrait par OCR...",
|
||||
"confidence": 85.5,
|
||||
"wordCount": 25
|
||||
},
|
||||
"ner": {
|
||||
"identities": [...],
|
||||
"addresses": [...],
|
||||
"cniNumbers": [...],
|
||||
"dates": [...],
|
||||
"documentType": "CNI"
|
||||
},
|
||||
"globalConfidence": 87.2
|
||||
},
|
||||
"extractedData": {
|
||||
"documentType": "CNI",
|
||||
"identities": [...],
|
||||
"addresses": [...],
|
||||
"cniNumbers": [...],
|
||||
"dates": [...]
|
||||
},
|
||||
"timestamp": "2025-09-15T23:30:00.000Z"
|
||||
}
|
||||
```
|
||||
|
||||
## Frontend (React)
|
||||
|
||||
### 🔌 Service Backend
|
||||
|
||||
**Fichier**: `src/services/backendApi.ts`
|
||||
|
||||
```typescript
|
||||
export async function extractDocumentBackend(
|
||||
documentId: string,
|
||||
file?: File,
|
||||
hooks?: { onOcrProgress?: (progress: number) => void; onLlmProgress?: (progress: number) => void }
|
||||
): Promise<ExtractionResult> {
|
||||
|
||||
const formData = new FormData()
|
||||
formData.append('document', file)
|
||||
|
||||
const response = await fetch(`${BACKEND_URL}/api/extract`, {
|
||||
method: 'POST',
|
||||
body: formData
|
||||
})
|
||||
|
||||
const result: BackendExtractionResult = await response.json()
|
||||
|
||||
// Conversion vers le format frontend
|
||||
return convertBackendToFrontend(result)
|
||||
}
|
||||
```
|
||||
|
||||
### 🔄 Redux Store
|
||||
|
||||
**Fichier**: `src/store/documentSlice.ts`
|
||||
|
||||
```typescript
|
||||
export const extractDocument = createAsyncThunk(
|
||||
'document/extract',
|
||||
async (documentId: string, thunkAPI) => {
|
||||
// Vérifier si le backend est disponible
|
||||
const backendAvailable = await checkBackendHealth()
|
||||
|
||||
if (backendAvailable) {
|
||||
// Utiliser le backend
|
||||
return await backendDocumentApi.extract(documentId, file, progressHooks)
|
||||
} else {
|
||||
// Fallback vers le mode local
|
||||
return await openaiDocumentApi.extract(documentId, file, progressHooks)
|
||||
}
|
||||
}
|
||||
)
|
||||
```
|
||||
|
||||
## Démarrage
|
||||
|
||||
### 🚀 Backend
|
||||
|
||||
```bash
|
||||
# Option 1: Script automatique
|
||||
./start-backend.sh
|
||||
|
||||
# Option 2: Manuel
|
||||
cd backend
|
||||
npm install
|
||||
node server.js
|
||||
```
|
||||
|
||||
### 🌐 Frontend
|
||||
|
||||
```bash
|
||||
npm run dev
|
||||
```
|
||||
|
||||
### 🧪 Test de l'Architecture
|
||||
|
||||
```bash
|
||||
node test-backend-architecture.cjs
|
||||
```
|
||||
|
||||
## Avantages
|
||||
|
||||
### 🚀 Performance
|
||||
- **Traitement centralisé** : OCR et NER sur le serveur
|
||||
- **Optimisations** : Paramètres OCR optimisés pour les cartes d'identité
|
||||
- **Cache** : Possibilité de mettre en cache les résultats
|
||||
|
||||
### 🔧 Maintenabilité
|
||||
- **Séparation des responsabilités** : Backend pour le traitement, frontend pour l'UI
|
||||
- **API REST** : Interface claire entre frontend et backend
|
||||
- **Fallback** : Mode local en cas d'indisponibilité du backend
|
||||
|
||||
### 📊 Monitoring
|
||||
- **Logs détaillés** : Traçabilité complète du traitement
|
||||
- **Health check** : Vérification de l'état du backend
|
||||
- **Métriques** : Confiance OCR, nombre d'entités extraites
|
||||
|
||||
## Configuration
|
||||
|
||||
### 🔧 Variables d'Environnement
|
||||
|
||||
**Backend**:
|
||||
- `PORT=3001` - Port du serveur backend
|
||||
|
||||
**Frontend**:
|
||||
- `VITE_BACKEND_URL=http://localhost:3001` - URL du backend
|
||||
- `VITE_USE_RULE_NER=true` - Mode règles locales (fallback)
|
||||
- `VITE_DISABLE_LLM=true` - Désactiver LLM
|
||||
|
||||
### 📁 Structure des Fichiers
|
||||
|
||||
```
|
||||
backend/
|
||||
├── server.js # Serveur Express
|
||||
├── package.json # Dépendances
|
||||
└── uploads/ # Fichiers temporaires (auto-créé)
|
||||
|
||||
src/services/
|
||||
├── backendApi.ts # API backend
|
||||
├── openai.ts # Fallback local
|
||||
└── ruleNer.ts # Règles NER
|
||||
|
||||
docs/
|
||||
└── architecture-backend.md # Cette documentation
|
||||
```
|
||||
|
||||
## Dépannage
|
||||
|
||||
### ❌ Problèmes Courants
|
||||
|
||||
#### Backend non accessible
|
||||
```bash
|
||||
# Vérifier que le backend est démarré
|
||||
curl http://localhost:3001/api/health
|
||||
|
||||
# Vérifier les logs
|
||||
cd backend && node server.js
|
||||
```
|
||||
|
||||
#### Erreurs OCR
|
||||
- Vérifier la taille des images (minimum 3x3 pixels)
|
||||
- Ajuster les paramètres `textord_min_xheight`
|
||||
- Vérifier les types de fichiers supportés
|
||||
|
||||
#### Erreurs de communication
|
||||
- Vérifier que les ports 3001 (backend) et 5176 (frontend) sont libres
|
||||
- Vérifier la configuration CORS
|
||||
- Vérifier les variables d'environnement
|
||||
|
||||
### 🔍 Logs
|
||||
|
||||
**Backend**:
|
||||
```
|
||||
🚀 Serveur backend démarré sur le port 3001
|
||||
📡 API disponible sur: http://localhost:3001/api
|
||||
[OCR] Début de l'extraction pour: uploads/document-123.jpg
|
||||
[OCR] Extraction terminée - Confiance: 85.5%
|
||||
[NER] Extraction terminée: 2 identités, 1 adresse, 1 CNI
|
||||
```
|
||||
|
||||
**Frontend**:
|
||||
```
|
||||
🚀 [STORE] Utilisation du backend pour l'extraction
|
||||
📊 [PROGRESS] OCR doc-123: 30%
|
||||
📊 [PROGRESS] NER doc-123: 50%
|
||||
🎉 [BACKEND] Extraction terminée avec succès
|
||||
```
|
||||
|
||||
## Évolutions Futures
|
||||
|
||||
### 🔮 Améliorations Possibles
|
||||
|
||||
1. **Base de données** : Stockage des résultats d'extraction
|
||||
2. **Cache Redis** : Mise en cache des résultats OCR
|
||||
3. **Queue système** : Traitement asynchrone des gros volumes
|
||||
4. **API GraphQL** : Interface plus flexible
|
||||
5. **Microservices** : Séparation OCR et NER
|
||||
6. **Docker** : Containerisation pour le déploiement
|
||||
7. **Monitoring** : Métriques et alertes
|
||||
8. **Tests automatisés** : Suite de tests complète
|
||||
26
package-lock.json
generated
26
package-lock.json
generated
@ -21,7 +21,8 @@
|
||||
"react-dom": "^19.1.1",
|
||||
"react-dropzone": "^14.3.8",
|
||||
"react-redux": "^9.2.0",
|
||||
"react-router-dom": "^7.8.2",
|
||||
"react-router-dom": "^7.9.1",
|
||||
"router-dom": "^3.0.3",
|
||||
"sharp": "^0.34.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
@ -4542,6 +4543,12 @@
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/hydro-js": {
|
||||
"version": "1.8.13",
|
||||
"resolved": "https://registry.npmjs.org/hydro-js/-/hydro-js-1.8.13.tgz",
|
||||
"integrity": "sha512-zgPCJCdJkCeEZL+NK9t0ojPCwKp2EEmuqTVkTBmmL3Vuu5+0+gCTV4uG16u23mS5HRVksQ18e/cqAFU7mILWGg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/iconv-lite": {
|
||||
"version": "0.6.3",
|
||||
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz",
|
||||
@ -6158,6 +6165,12 @@
|
||||
"node": "20 || >=22"
|
||||
}
|
||||
},
|
||||
"node_modules/path-to-regexp": {
|
||||
"version": "6.3.0",
|
||||
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.3.0.tgz",
|
||||
"integrity": "sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/path-type": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz",
|
||||
@ -6571,6 +6584,7 @@
|
||||
"version": "7.9.1",
|
||||
"resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-7.9.1.tgz",
|
||||
"integrity": "sha512-U9WBQssBE9B1vmRjo9qTM7YRzfZ3lUxESIZnsf4VjR/lXYz9MHjvOxHzr/aUm4efpktbVOrF09rL/y4VHa8RMw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"react-router": "7.9.1"
|
||||
},
|
||||
@ -6726,6 +6740,16 @@
|
||||
"fsevents": "~2.3.2"
|
||||
}
|
||||
},
|
||||
"node_modules/router-dom": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/router-dom/-/router-dom-3.0.3.tgz",
|
||||
"integrity": "sha512-1xCnUy53CNrgJopNhOwzN6i34gRNIjhADAtPJvbISwOe/7zaQQ9tvcA/HuNbLk4PAHQcXgcyf0H3XZi84mYBAQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"hydro-js": "^1.5.22",
|
||||
"path-to-regexp": "6.3.0"
|
||||
}
|
||||
},
|
||||
"node_modules/rrweb-cssom": {
|
||||
"version": "0.8.0",
|
||||
"resolved": "https://registry.npmjs.org/rrweb-cssom/-/rrweb-cssom-0.8.0.tgz",
|
||||
|
||||
@ -34,7 +34,8 @@
|
||||
"react-dom": "^19.1.1",
|
||||
"react-dropzone": "^14.3.8",
|
||||
"react-redux": "^9.2.0",
|
||||
"react-router-dom": "^7.8.2",
|
||||
"react-router-dom": "^7.9.1",
|
||||
"router-dom": "^3.0.3",
|
||||
"sharp": "^0.34.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
60
src/App.tsx
60
src/App.tsx
@ -1,6 +1,66 @@
|
||||
import { useEffect } from 'react'
|
||||
import './App.css'
|
||||
import { AppRouter } from './router'
|
||||
import { useAppDispatch, useAppSelector } from './store'
|
||||
import {
|
||||
createDefaultFolderThunk,
|
||||
loadFolderResults,
|
||||
setCurrentFolderHash,
|
||||
setBootstrapped
|
||||
} from './store/documentSlice'
|
||||
|
||||
export default function App() {
|
||||
const dispatch = useAppDispatch()
|
||||
const { documents, bootstrapped, currentFolderHash, folderResults } = useAppSelector((state) => state.document)
|
||||
|
||||
// Bootstrap au démarrage de l'application avec système de dossiers
|
||||
useEffect(() => {
|
||||
console.log('🔍 [APP] useEffect déclenché:', {
|
||||
documentsLength: documents.length,
|
||||
bootstrapped,
|
||||
currentFolderHash,
|
||||
folderResultsLength: folderResults.length,
|
||||
isDev: import.meta.env.DEV
|
||||
})
|
||||
|
||||
// Récupérer le hash du dossier depuis l'URL
|
||||
const urlParams = new URLSearchParams(window.location.search)
|
||||
const urlFolderHash = urlParams.get('hash')
|
||||
|
||||
console.log('🔍 [APP] Hash du dossier depuis URL:', urlFolderHash)
|
||||
|
||||
const initializeFolder = async () => {
|
||||
try {
|
||||
let folderHash = urlFolderHash || currentFolderHash
|
||||
|
||||
// Si pas de hash de dossier, créer le dossier par défaut
|
||||
if (!folderHash) {
|
||||
console.log('🚀 [APP] Création du dossier par défaut...')
|
||||
const result = await dispatch(createDefaultFolderThunk()).unwrap()
|
||||
folderHash = result.folderHash
|
||||
console.log('✅ [APP] Dossier par défaut créé:', folderHash)
|
||||
}
|
||||
|
||||
// Charger les résultats du dossier
|
||||
console.log('📁 [APP] Chargement des résultats du dossier:', folderHash)
|
||||
await dispatch(loadFolderResults(folderHash)).unwrap()
|
||||
|
||||
// Marquer le bootstrap comme terminé
|
||||
dispatch(setBootstrapped(true))
|
||||
console.log('🎉 [APP] Bootstrap terminé avec le dossier:', folderHash)
|
||||
} catch (error) {
|
||||
console.error('❌ [APP] Erreur lors de l\'initialisation du dossier:', error)
|
||||
}
|
||||
}
|
||||
|
||||
// Ne pas refaire le bootstrap si déjà fait
|
||||
if (bootstrapped && folderResults.length > 0) {
|
||||
console.log('⏭️ [APP] Bootstrap déjà effectué, dossier:', currentFolderHash)
|
||||
return
|
||||
}
|
||||
|
||||
initializeFolder()
|
||||
}, [dispatch, bootstrapped, currentFolderHash, folderResults.length])
|
||||
|
||||
return <AppRouter />
|
||||
}
|
||||
|
||||
@ -31,11 +31,21 @@ export const Layout: React.FC<LayoutProps> = ({ children }) => {
|
||||
if (docId) {
|
||||
console.log(`🚀 [LAYOUT] Traitement de la queue: ${docId}`)
|
||||
try {
|
||||
// Marquer le document comme en cours de traitement
|
||||
const doc = documents.find(d => d.id === docId)
|
||||
if (doc) {
|
||||
doc.status = 'processing'
|
||||
}
|
||||
await dispatch(extractDocument(docId))
|
||||
// Attendre un peu entre les extractions
|
||||
await new Promise(resolve => setTimeout(resolve, 500))
|
||||
} catch (error) {
|
||||
console.error(`❌ [LAYOUT] Erreur extraction ${docId}:`, error)
|
||||
// Marquer le document comme en erreur
|
||||
const doc = documents.find(d => d.id === docId)
|
||||
if (doc) {
|
||||
doc.status = 'error'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -54,7 +64,15 @@ export const Layout: React.FC<LayoutProps> = ({ children }) => {
|
||||
|
||||
console.log(`📄 [LAYOUT] Document ${doc.id}: hasExtraction=${!!hasExtraction}, isProcessed=${isProcessed}, isProcessing=${isProcessing}, isCompleted=${isCompleted}`)
|
||||
|
||||
if (!hasExtraction && !isProcessed && !isProcessing && !isCompleted) {
|
||||
// Si le document a déjà un résultat d'extraction, marquer comme traité
|
||||
if (hasExtraction && !isProcessed) {
|
||||
console.log(`✅ [LAYOUT] Document ${doc.id} a déjà un résultat, marqué comme traité`)
|
||||
processedDocs.current.add(doc.id)
|
||||
// Mettre à jour le statut du document
|
||||
if (doc.status !== 'completed') {
|
||||
doc.status = 'completed'
|
||||
}
|
||||
} else if (!hasExtraction && !isProcessed && !isProcessing && !isCompleted) {
|
||||
console.log(`🚀 [LAYOUT] Ajout à la queue: ${doc.id}`)
|
||||
processedDocs.current.add(doc.id)
|
||||
extractionQueue.current.push(doc.id)
|
||||
@ -63,7 +81,7 @@ export const Layout: React.FC<LayoutProps> = ({ children }) => {
|
||||
|
||||
// Traiter la queue
|
||||
processExtractionQueue()
|
||||
}, [documents, dispatch]) // Retiré extractionById des dépendances
|
||||
}, [documents, dispatch, extractionById]) // Remettre extractionById dans les dépendances
|
||||
|
||||
// Déclencher contexte et conseil globaux une fois qu'un document courant existe
|
||||
useEffect(() => {
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import React from 'react'
|
||||
import { Tabs, Tab, Box } from '@mui/material'
|
||||
import { useNavigate } from 'react-router-dom'
|
||||
import { useAppSelector } from '../store'
|
||||
|
||||
interface NavigationTabsProps {
|
||||
currentPath: string
|
||||
@ -8,19 +9,25 @@ interface NavigationTabsProps {
|
||||
|
||||
export const NavigationTabs: React.FC<NavigationTabsProps> = ({ currentPath }) => {
|
||||
const navigate = useNavigate()
|
||||
const { currentDocument, extractionById } = useAppSelector((state) => state.document)
|
||||
|
||||
const tabs = [
|
||||
{ label: 'Téléversement', path: '/' },
|
||||
{ label: 'Extraction', path: '/extraction' },
|
||||
{ label: 'Analyse', path: '/analyse' },
|
||||
{ label: 'Contexte', path: '/contexte' },
|
||||
{ label: 'Conseil', path: '/conseil' },
|
||||
{ label: 'Téléversement', path: '/', alwaysEnabled: true },
|
||||
{ label: 'Extraction', path: '/extraction', alwaysEnabled: true },
|
||||
{ label: 'Contexte', path: '/contexte', alwaysEnabled: false },
|
||||
{ label: 'Conseil', path: '/conseil', alwaysEnabled: false },
|
||||
]
|
||||
|
||||
const currentTabIndex = tabs.findIndex(tab => tab.path === currentPath)
|
||||
|
||||
// Vérifier si au moins une extraction est terminée
|
||||
const hasCompletedExtraction = currentDocument && extractionById[currentDocument.id]
|
||||
|
||||
const handleTabChange = (_event: React.SyntheticEvent, newValue: number) => {
|
||||
navigate(tabs[newValue].path)
|
||||
const tab = tabs[newValue]
|
||||
if (tab.alwaysEnabled || hasCompletedExtraction) {
|
||||
navigate(tab.path)
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
@ -33,7 +40,17 @@ export const NavigationTabs: React.FC<NavigationTabsProps> = ({ currentPath }) =
|
||||
scrollButtons="auto"
|
||||
>
|
||||
{tabs.map((tab, index) => (
|
||||
<Tab key={index} label={tab.label} />
|
||||
<Tab
|
||||
key={index}
|
||||
label={tab.label}
|
||||
disabled={!tab.alwaysEnabled && !hasCompletedExtraction}
|
||||
sx={{
|
||||
opacity: (!tab.alwaysEnabled && !hasCompletedExtraction) ? 0.5 : 1,
|
||||
'&.Mui-disabled': {
|
||||
color: 'text.disabled'
|
||||
}
|
||||
}}
|
||||
/>
|
||||
))}
|
||||
</Tabs>
|
||||
</Box>
|
||||
|
||||
@ -2,10 +2,34 @@ import React from 'react'
|
||||
import { Routes, Route } from 'react-router-dom'
|
||||
import UploadView from '../views/UploadView'
|
||||
|
||||
<<<<<<< HEAD
|
||||
export const AppRouter: React.FC = () => {
|
||||
return (
|
||||
<Routes>
|
||||
<Route path="/" element={<UploadView />} />
|
||||
</Routes>
|
||||
)
|
||||
=======
|
||||
const UploadView = lazy(() => import('../views/UploadView'))
|
||||
const ExtractionView = lazy(() => import('../views/ExtractionView'))
|
||||
const ContexteView = lazy(() => import('../views/ContexteView'))
|
||||
const ConseilView = lazy(() => import('../views/ConseilView'))
|
||||
|
||||
const LoadingFallback = () => (
|
||||
<Box sx={{ display: 'flex', justifyContent: 'center', alignItems: 'center', minHeight: '50vh' }}>
|
||||
<CircularProgress />
|
||||
<Typography sx={{ ml: 2 }}>Chargement...</Typography>
|
||||
</Box>
|
||||
)
|
||||
|
||||
const router = createBrowserRouter([
|
||||
{ path: '/', element: <Suspense fallback={<LoadingFallback />}><UploadView /></Suspense> },
|
||||
{ path: '/extraction', element: <Suspense fallback={<LoadingFallback />}><ExtractionView /></Suspense> },
|
||||
{ path: '/contexte', element: <Suspense fallback={<LoadingFallback />}><ContexteView /></Suspense> },
|
||||
{ path: '/conseil', element: <Suspense fallback={<LoadingFallback />}><ConseilView /></Suspense> },
|
||||
])
|
||||
|
||||
export const AppRouter = () => {
|
||||
return <RouterProvider router={router} />
|
||||
>>>>>>> 43a0ad607071d3d73ef984c26fe63991aeef352e
|
||||
}
|
||||
|
||||
@ -30,26 +30,44 @@ apiClient.interceptors.response.use(
|
||||
|
||||
// Services API pour les documents
|
||||
export const documentApi = {
|
||||
// Téléversement de document
|
||||
upload: async (file: File): Promise<Document> => {
|
||||
if (USE_OPENAI) return openaiDocumentApi.upload(file)
|
||||
// Téléversement de document avec extraction
|
||||
upload: async (file: File): Promise<{ document: Document; extraction: ExtractionResult }> => {
|
||||
if (USE_OPENAI) {
|
||||
const doc = await openaiDocumentApi.upload(file)
|
||||
return { document: doc, extraction: null as any }
|
||||
}
|
||||
const formData = new FormData()
|
||||
formData.append('file', file)
|
||||
const { data } = await apiClient.post('/api/notary/upload', formData)
|
||||
formData.append('document', file)
|
||||
const { data } = await apiClient.post('/api/extract', formData)
|
||||
|
||||
// L'API retourne {message, document_id, status}
|
||||
// L'API retourne le résultat d'extraction complet
|
||||
// On doit mapper vers le format Document attendu
|
||||
const fileUrl = URL.createObjectURL(file)
|
||||
return {
|
||||
id: data.document_id || data.id || 'upload-' + Date.now(),
|
||||
name: file.name,
|
||||
mimeType: data.mime_type || data.mimeType || file.type || 'application/pdf',
|
||||
functionalType: data.functional_type || data.functionalType || undefined,
|
||||
size: file.size,
|
||||
uploadDate: new Date(),
|
||||
const document: Document = {
|
||||
id: data.document.id || 'upload-' + Date.now(),
|
||||
name: data.document.fileName || file.name,
|
||||
mimeType: data.document.mimeType || file.type || 'application/pdf',
|
||||
functionalType: undefined,
|
||||
size: data.document.fileSize || file.size,
|
||||
uploadDate: new Date(data.document.uploadTimestamp || Date.now()),
|
||||
status: 'completed',
|
||||
previewUrl: fileUrl
|
||||
}
|
||||
|
||||
// Adapter le résultat d'extraction au format attendu
|
||||
const extraction: ExtractionResult = {
|
||||
documentId: document.id,
|
||||
documentType: data.classification.documentType,
|
||||
confidence: data.metadata.quality.globalConfidence,
|
||||
text: data.extraction.text.raw,
|
||||
identities: data.extraction.entities.persons || [],
|
||||
addresses: data.extraction.entities.addresses || [],
|
||||
companies: data.extraction.entities.companies || [],
|
||||
language: data.classification.language,
|
||||
timestamp: data.status.timestamp
|
||||
}
|
||||
|
||||
return { document, extraction }
|
||||
},
|
||||
|
||||
// Extraction des données
|
||||
|
||||
@ -38,15 +38,6 @@ export interface BackendExtractionResult {
|
||||
timestamp: string
|
||||
}
|
||||
|
||||
export interface BackendTestFiles {
|
||||
success: boolean
|
||||
files: Array<{
|
||||
name: string
|
||||
size: number
|
||||
type: string
|
||||
lastModified: string
|
||||
}>
|
||||
}
|
||||
|
||||
/**
|
||||
* Extrait le texte et les entités d'un document via le backend
|
||||
@ -150,26 +141,6 @@ export async function extractDocumentBackend(
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Récupère la liste des fichiers de test depuis le backend
|
||||
*/
|
||||
export async function getTestFilesBackend(): Promise<BackendTestFiles> {
|
||||
try {
|
||||
const response = await fetch(`${BACKEND_URL}/api/test-files`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Erreur HTTP: ${response.status} ${response.statusText}`)
|
||||
}
|
||||
|
||||
const result: BackendTestFiles = await response.json()
|
||||
console.log('📁 [BACKEND] Fichiers de test récupérés:', result.files.length)
|
||||
|
||||
return result
|
||||
} catch (error) {
|
||||
console.error('❌ [BACKEND] Erreur lors de la récupération des fichiers de test:', error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
// Cache pour le health check
|
||||
let backendHealthCache: { isHealthy: boolean; timestamp: number } | null = null
|
||||
|
||||
134
src/services/folderApi.ts
Normal file
134
src/services/folderApi.ts
Normal file
@ -0,0 +1,134 @@
|
||||
/**
|
||||
* API pour la gestion des dossiers par hash
|
||||
*/
|
||||
|
||||
const API_BASE_URL = 'http://localhost:3001/api'
|
||||
|
||||
export interface FolderResult {
|
||||
fileHash: string
|
||||
document: {
|
||||
id: string
|
||||
fileName: string
|
||||
mimeType: string
|
||||
fileSize: number
|
||||
uploadTimestamp: number
|
||||
}
|
||||
classification: {
|
||||
documentType: string
|
||||
language: string
|
||||
}
|
||||
extraction: {
|
||||
text: {
|
||||
raw: string
|
||||
processed: string
|
||||
}
|
||||
entities: {
|
||||
persons: string[]
|
||||
addresses: string[]
|
||||
companies: string[]
|
||||
}
|
||||
}
|
||||
metadata: {
|
||||
quality: {
|
||||
globalConfidence: number
|
||||
}
|
||||
}
|
||||
status: {
|
||||
timestamp: number
|
||||
}
|
||||
}
|
||||
|
||||
export interface FolderResponse {
|
||||
success: boolean
|
||||
folderHash: string
|
||||
results: FolderResult[]
|
||||
count: number
|
||||
}
|
||||
|
||||
export interface CreateFolderResponse {
|
||||
success: boolean
|
||||
folderHash: string
|
||||
message: string
|
||||
}
|
||||
|
||||
// Créer un nouveau dossier
|
||||
export async function createFolder(): Promise<CreateFolderResponse> {
|
||||
const response = await fetch(`${API_BASE_URL}/folders`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Erreur lors de la création du dossier: ${response.statusText}`)
|
||||
}
|
||||
|
||||
return response.json()
|
||||
}
|
||||
|
||||
// Créer le dossier par défaut avec les fichiers de test
|
||||
export async function createDefaultFolder(): Promise<CreateFolderResponse> {
|
||||
const response = await fetch(`${API_BASE_URL}/folders/default`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Erreur lors de la création du dossier par défaut: ${response.statusText}`)
|
||||
}
|
||||
|
||||
return response.json()
|
||||
}
|
||||
|
||||
// Utiliser le dossier par défaut existant (sans créer de nouveau dossier)
|
||||
export async function getDefaultFolder(): Promise<CreateFolderResponse> {
|
||||
// Utiliser le dossier par défaut existant avec les fichiers de test
|
||||
return {
|
||||
success: true,
|
||||
folderHash: '7d99a85daf66a0081a0e881630e6b39b',
|
||||
message: 'Dossier par défaut récupéré'
|
||||
}
|
||||
}
|
||||
|
||||
// Récupérer les résultats d'un dossier
|
||||
export async function getFolderResults(folderHash: string): Promise<FolderResponse> {
|
||||
const response = await fetch(`${API_BASE_URL}/folders/${folderHash}/results`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Erreur lors de la récupération des résultats du dossier: ${response.statusText}`)
|
||||
}
|
||||
|
||||
return response.json()
|
||||
}
|
||||
|
||||
// Récupérer un fichier original depuis un dossier
|
||||
export async function getFolderFile(folderHash: string, fileHash: string): Promise<Blob> {
|
||||
const response = await fetch(`${API_BASE_URL}/folders/${folderHash}/files/${fileHash}`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Erreur lors de la récupération du fichier: ${response.statusText}`)
|
||||
}
|
||||
|
||||
return response.blob()
|
||||
}
|
||||
|
||||
// Uploader un fichier dans un dossier
|
||||
export async function uploadFileToFolder(file: File, folderHash: string): Promise<FolderResult> {
|
||||
const formData = new FormData()
|
||||
formData.append('document', file)
|
||||
formData.append('folderHash', folderHash)
|
||||
|
||||
const response = await fetch(`${API_BASE_URL}/extract`, {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Erreur lors de l'upload: ${response.statusText}`)
|
||||
}
|
||||
|
||||
return response.json()
|
||||
}
|
||||
@ -1,98 +0,0 @@
|
||||
/**
|
||||
* API pour gérer les fichiers de test
|
||||
*/
|
||||
|
||||
export interface TestFileInfo {
|
||||
name: string
|
||||
size: number
|
||||
type: string
|
||||
lastModified: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Récupère la liste des fichiers disponibles dans le dossier test-files
|
||||
*/
|
||||
export async function getTestFilesList(): Promise<TestFileInfo[]> {
|
||||
try {
|
||||
// En mode développement, on peut utiliser une API pour lister les fichiers
|
||||
// Pour l'instant, on utilise une approche simple avec les fichiers connus
|
||||
const knownFiles = [
|
||||
'IMG_20250902_162159.jpg',
|
||||
'IMG_20250902_162210.jpg',
|
||||
'sample.md',
|
||||
'sample.pdf',
|
||||
'sample.txt'
|
||||
]
|
||||
|
||||
const files: TestFileInfo[] = []
|
||||
|
||||
for (const fileName of knownFiles) {
|
||||
try {
|
||||
const response = await fetch(`/test-files/${fileName}`, { method: 'HEAD' })
|
||||
if (response.ok) {
|
||||
const contentLength = response.headers.get('content-length')
|
||||
const contentType = response.headers.get('content-type')
|
||||
const lastModified = response.headers.get('last-modified')
|
||||
|
||||
files.push({
|
||||
name: fileName,
|
||||
size: contentLength ? parseInt(contentLength, 10) : 0,
|
||||
type: contentType || 'application/octet-stream',
|
||||
lastModified: lastModified ? new Date(lastModified).getTime() : Date.now()
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn(`Impossible de vérifier le fichier ${fileName}:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
return files
|
||||
} catch (error) {
|
||||
console.error('Erreur lors de la récupération de la liste des fichiers de test:', error)
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Charge un fichier de test par son nom
|
||||
*/
|
||||
export async function loadTestFile(fileName: string): Promise<File | null> {
|
||||
try {
|
||||
const response = await fetch(`/test-files/${fileName}`)
|
||||
if (!response.ok) {
|
||||
throw new Error(`Fichier non trouvé: ${fileName}`)
|
||||
}
|
||||
|
||||
const blob = await response.blob()
|
||||
return new File([blob], fileName, { type: blob.type })
|
||||
} catch (error) {
|
||||
console.error(`Erreur lors du chargement du fichier ${fileName}:`, error)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Filtre les fichiers par type MIME supporté
|
||||
*/
|
||||
export function filterSupportedFiles(files: TestFileInfo[]): TestFileInfo[] {
|
||||
const supportedTypes = [
|
||||
'application/pdf',
|
||||
'image/jpeg',
|
||||
'image/jpg',
|
||||
'image/png',
|
||||
'image/tiff'
|
||||
]
|
||||
|
||||
return files.filter(file => {
|
||||
// Vérifier le type MIME
|
||||
if (supportedTypes.includes(file.type)) {
|
||||
return true
|
||||
}
|
||||
|
||||
// Vérifier l'extension si le type MIME n'est pas fiable
|
||||
const extension = file.name.split('.').pop()?.toLowerCase()
|
||||
const supportedExtensions = ['pdf', 'jpg', 'jpeg', 'png', 'tiff']
|
||||
|
||||
return extension && supportedExtensions.includes(extension)
|
||||
})
|
||||
}
|
||||
@ -15,3 +15,5 @@ const appSlice = createSlice({
|
||||
})
|
||||
|
||||
export const appReducer = appSlice.reducer
|
||||
|
||||
|
||||
|
||||
@ -4,6 +4,7 @@ import type { Document, ExtractionResult, AnalysisResult, ContextResult, Conseil
|
||||
import { documentApi } from '../services/api'
|
||||
import { openaiDocumentApi } from '../services/openai'
|
||||
import { backendDocumentApi, checkBackendHealth } from '../services/backendApi'
|
||||
import { createDefaultFolder, getDefaultFolder, getFolderResults, uploadFileToFolder, type FolderResult } from '../services/folderApi'
|
||||
|
||||
interface DocumentState {
|
||||
documents: Document[]
|
||||
@ -17,6 +18,43 @@ interface DocumentState {
|
||||
loading: boolean
|
||||
error: string | null
|
||||
progressById: Record<string, { ocr: number; llm: number }>
|
||||
bootstrapped: boolean // Flag pour indiquer si le bootstrap a été effectué
|
||||
// Nouvelles propriétés pour les dossiers
|
||||
currentFolderHash: string | null
|
||||
folderResults: FolderResult[]
|
||||
currentResultIndex: number
|
||||
}
|
||||
|
||||
// Fonction pour charger l'état depuis localStorage
|
||||
const loadStateFromStorage = (): Partial<DocumentState> => {
|
||||
try {
|
||||
const savedState = localStorage.getItem('4nk-ia-documents')
|
||||
if (savedState) {
|
||||
const parsed = JSON.parse(savedState)
|
||||
console.log('💾 [STORE] État chargé depuis localStorage:', {
|
||||
documentsCount: parsed.documents?.length || 0,
|
||||
extractionsCount: Object.keys(parsed.extractionById || {}).length
|
||||
})
|
||||
return parsed
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('⚠️ [STORE] Erreur lors du chargement depuis localStorage:', error)
|
||||
}
|
||||
return {}
|
||||
}
|
||||
|
||||
// Fonction pour sauvegarder l'état dans localStorage
|
||||
const saveStateToStorage = (state: DocumentState) => {
|
||||
try {
|
||||
const stateToSave = {
|
||||
documents: state.documents,
|
||||
extractionById: state.extractionById,
|
||||
currentDocument: state.currentDocument
|
||||
}
|
||||
localStorage.setItem('4nk-ia-documents', JSON.stringify(stateToSave))
|
||||
} catch (error) {
|
||||
console.warn('⚠️ [STORE] Erreur lors de la sauvegarde dans localStorage:', error)
|
||||
}
|
||||
}
|
||||
|
||||
const initialState: DocumentState = {
|
||||
@ -31,6 +69,12 @@ const initialState: DocumentState = {
|
||||
loading: false,
|
||||
error: null,
|
||||
progressById: {},
|
||||
bootstrapped: false,
|
||||
// Nouvelles propriétés pour les dossiers
|
||||
currentFolderHash: null,
|
||||
folderResults: [],
|
||||
currentResultIndex: 0,
|
||||
...loadStateFromStorage()
|
||||
}
|
||||
|
||||
export const uploadDocument = createAsyncThunk(
|
||||
@ -127,6 +171,28 @@ export const getConseil = createAsyncThunk(
|
||||
}
|
||||
)
|
||||
|
||||
// Thunks pour la gestion des dossiers
|
||||
export const createDefaultFolderThunk = createAsyncThunk(
|
||||
'document/createDefaultFolder',
|
||||
async () => {
|
||||
return await getDefaultFolder()
|
||||
}
|
||||
)
|
||||
|
||||
export const loadFolderResults = createAsyncThunk(
|
||||
'document/loadFolderResults',
|
||||
async (folderHash: string) => {
|
||||
return await getFolderResults(folderHash)
|
||||
}
|
||||
)
|
||||
|
||||
export const uploadFileToFolderThunk = createAsyncThunk(
|
||||
'document/uploadFileToFolder',
|
||||
async ({ file, folderHash }: { file: File; folderHash: string }) => {
|
||||
return await uploadFileToFolder(file, folderHash)
|
||||
}
|
||||
)
|
||||
|
||||
const documentSlice = createSlice({
|
||||
name: 'document',
|
||||
initialState,
|
||||
@ -175,6 +241,20 @@ const documentSlice = createSlice({
|
||||
const { id, progress } = action.payload
|
||||
state.progressById[id] = { ocr: state.progressById[id]?.ocr || 0, llm: Math.max(0, Math.min(100, Math.round(progress * 100))) }
|
||||
},
|
||||
setBootstrapped: (state, action: PayloadAction<boolean>) => {
|
||||
state.bootstrapped = action.payload
|
||||
},
|
||||
// Nouveaux reducers pour les dossiers
|
||||
setCurrentFolderHash: (state, action: PayloadAction<string | null>) => {
|
||||
state.currentFolderHash = action.payload
|
||||
},
|
||||
setCurrentResultIndex: (state, action: PayloadAction<number>) => {
|
||||
state.currentResultIndex = action.payload
|
||||
},
|
||||
clearFolderResults: (state) => {
|
||||
state.folderResults = []
|
||||
state.currentResultIndex = 0
|
||||
},
|
||||
},
|
||||
extraReducers: (builder) => {
|
||||
builder
|
||||
@ -184,10 +264,27 @@ const documentSlice = createSlice({
|
||||
})
|
||||
.addCase(uploadDocument.fulfilled, (state, action) => {
|
||||
state.loading = false
|
||||
state.documents.push(action.payload)
|
||||
state.currentDocument = action.payload
|
||||
const { document, extraction } = action.payload
|
||||
|
||||
console.log('📤 [STORE] Upload fulfilled:', {
|
||||
documentId: document.id,
|
||||
documentName: document.name,
|
||||
hasExtraction: !!extraction,
|
||||
extractionDocumentId: extraction?.documentId
|
||||
})
|
||||
|
||||
state.documents.push(document)
|
||||
state.currentDocument = document
|
||||
|
||||
// Stocker le résultat d'extraction si disponible
|
||||
if (extraction) {
|
||||
state.extractionResult = extraction
|
||||
state.extractionById[document.id] = extraction
|
||||
console.log('✅ [STORE] Extraction stored for document:', document.id)
|
||||
}
|
||||
|
||||
// Capture le File depuis l'URL blob si disponible
|
||||
if (action.payload.previewUrl?.startsWith('blob:')) {
|
||||
if (document.previewUrl?.startsWith('blob:')) {
|
||||
// On ne peut pas récupérer l'objet File initial ici sans passer par onDrop;
|
||||
// il est reconstruit lors de l'extraction via fetch blob.
|
||||
}
|
||||
@ -200,14 +297,22 @@ const documentSlice = createSlice({
|
||||
state.loading = true
|
||||
state.error = null
|
||||
})
|
||||
.addCase(extractDocument.fulfilled, (state, action) => {
|
||||
state.loading = false
|
||||
state.extractionResult = action.payload
|
||||
state.extractionById[action.payload.documentId] = action.payload
|
||||
})
|
||||
.addCase(extractDocument.fulfilled, (state, action) => {
|
||||
state.loading = false
|
||||
state.extractionResult = action.payload
|
||||
state.extractionById[action.payload.documentId] = action.payload
|
||||
// Mettre à jour le statut du document courant
|
||||
if (state.currentDocument && state.currentDocument.id === action.payload.documentId) {
|
||||
state.currentDocument.status = 'completed'
|
||||
}
|
||||
})
|
||||
.addCase(extractDocument.rejected, (state, action) => {
|
||||
state.loading = false
|
||||
state.error = action.error.message || 'Erreur lors de l\'extraction'
|
||||
// Mettre à jour le statut du document courant en cas d'erreur
|
||||
if (state.currentDocument) {
|
||||
state.currentDocument.status = 'error'
|
||||
}
|
||||
})
|
||||
.addCase(analyzeDocument.fulfilled, (state, action) => {
|
||||
state.analysisResult = action.payload
|
||||
@ -218,8 +323,64 @@ const documentSlice = createSlice({
|
||||
.addCase(getConseil.fulfilled, (state, action) => {
|
||||
state.conseilResult = action.payload
|
||||
})
|
||||
// ExtraReducers pour les dossiers
|
||||
.addCase(createDefaultFolderThunk.fulfilled, (state, action) => {
|
||||
state.currentFolderHash = action.payload.folderHash
|
||||
state.loading = false
|
||||
})
|
||||
.addCase(createDefaultFolderThunk.pending, (state) => {
|
||||
state.loading = true
|
||||
})
|
||||
.addCase(createDefaultFolderThunk.rejected, (state, action) => {
|
||||
state.loading = false
|
||||
state.error = action.error.message || 'Erreur lors de la création du dossier par défaut'
|
||||
})
|
||||
.addCase(loadFolderResults.fulfilled, (state, action) => {
|
||||
state.folderResults = action.payload.results
|
||||
state.currentFolderHash = action.payload.folderHash
|
||||
state.loading = false
|
||||
// Convertir les résultats en documents pour la compatibilité
|
||||
state.documents = action.payload.results.map((result, index) => ({
|
||||
id: result.fileHash,
|
||||
name: result.document.fileName,
|
||||
mimeType: result.document.mimeType,
|
||||
size: result.document.fileSize,
|
||||
uploadDate: new Date(result.document.uploadTimestamp),
|
||||
status: 'completed' as const,
|
||||
previewUrl: `blob:folder-${result.fileHash}`
|
||||
}))
|
||||
})
|
||||
.addCase(loadFolderResults.pending, (state) => {
|
||||
state.loading = true
|
||||
})
|
||||
.addCase(loadFolderResults.rejected, (state, action) => {
|
||||
state.loading = false
|
||||
state.error = action.error.message || 'Erreur lors du chargement des résultats du dossier'
|
||||
})
|
||||
.addCase(uploadFileToFolderThunk.fulfilled, (state, action) => {
|
||||
// Recharger les résultats du dossier après upload
|
||||
state.loading = false
|
||||
})
|
||||
.addCase(uploadFileToFolderThunk.pending, (state) => {
|
||||
state.loading = true
|
||||
})
|
||||
.addCase(uploadFileToFolderThunk.rejected, (state, action) => {
|
||||
state.loading = false
|
||||
state.error = action.error.message || 'Erreur lors de l\'upload du fichier'
|
||||
})
|
||||
},
|
||||
})
|
||||
|
||||
export const { setCurrentDocument, clearResults, addDocuments, removeDocument, setOcrProgress, setLlmProgress } = documentSlice.actions
|
||||
export const {
|
||||
setCurrentDocument,
|
||||
clearResults,
|
||||
addDocuments,
|
||||
removeDocument,
|
||||
setOcrProgress,
|
||||
setLlmProgress,
|
||||
setBootstrapped,
|
||||
setCurrentFolderHash,
|
||||
setCurrentResultIndex,
|
||||
clearFolderResults
|
||||
} = documentSlice.actions
|
||||
export const documentReducer = documentSlice.reducer
|
||||
|
||||
@ -4,6 +4,28 @@ import type { TypedUseSelectorHook } from 'react-redux'
|
||||
import { appReducer } from './appSlice'
|
||||
import { documentReducer } from './documentSlice'
|
||||
|
||||
// Middleware pour sauvegarder l'état dans localStorage
|
||||
const persistenceMiddleware = (store: any) => (next: any) => (action: any) => {
|
||||
const result = next(action)
|
||||
|
||||
// Sauvegarder seulement les actions liées aux documents
|
||||
if (action.type.startsWith('document/')) {
|
||||
const state = store.getState()
|
||||
try {
|
||||
const stateToSave = {
|
||||
documents: state.document.documents,
|
||||
extractionById: state.document.extractionById,
|
||||
currentDocument: state.document.currentDocument
|
||||
}
|
||||
localStorage.setItem('4nk-ia-documents', JSON.stringify(stateToSave))
|
||||
} catch (error) {
|
||||
console.warn('⚠️ [STORE] Erreur lors de la sauvegarde:', error)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
export const store = configureStore({
|
||||
reducer: {
|
||||
app: appReducer,
|
||||
@ -12,7 +34,7 @@ export const store = configureStore({
|
||||
middleware: (getDefaultMiddleware) => getDefaultMiddleware({
|
||||
serializableCheck: false,
|
||||
immutableCheck: true,
|
||||
}),
|
||||
}).concat(persistenceMiddleware),
|
||||
devTools: true,
|
||||
})
|
||||
|
||||
|
||||
@ -1,248 +0,0 @@
|
||||
import { useEffect } from 'react'
|
||||
import {
|
||||
Box,
|
||||
Typography,
|
||||
Paper,
|
||||
Card,
|
||||
CardContent,
|
||||
Chip,
|
||||
List,
|
||||
ListItem,
|
||||
ListItemText,
|
||||
ListItemIcon,
|
||||
Alert,
|
||||
LinearProgress,
|
||||
} from '@mui/material'
|
||||
import {
|
||||
CheckCircle,
|
||||
Error,
|
||||
Warning,
|
||||
Flag,
|
||||
Security,
|
||||
Assessment,
|
||||
Info,
|
||||
} from '@mui/icons-material'
|
||||
import type { ChipProps, LinearProgressProps } from '@mui/material'
|
||||
import { useAppDispatch, useAppSelector } from '../store'
|
||||
import { analyzeDocument, getConseil, getContextData } from '../store/documentSlice'
|
||||
import { Layout } from '../components/Layout'
|
||||
|
||||
export default function AnalyseView() {
|
||||
const dispatch = useAppDispatch()
|
||||
const { currentDocument, analysisResult, loading, conseilResult, contextResult } = useAppSelector((state) => state.document)
|
||||
|
||||
useEffect(() => {
|
||||
if (!currentDocument) return
|
||||
if (!analysisResult) dispatch(analyzeDocument(currentDocument.id))
|
||||
if (!conseilResult) dispatch(getConseil(currentDocument.id))
|
||||
if (!contextResult) dispatch(getContextData(currentDocument.id))
|
||||
}, [currentDocument, analysisResult, conseilResult, contextResult, dispatch])
|
||||
|
||||
if (!currentDocument) {
|
||||
return (
|
||||
<Layout>
|
||||
<Alert severity="info">
|
||||
Veuillez d'abord téléverser et sélectionner un document.
|
||||
</Alert>
|
||||
</Layout>
|
||||
)
|
||||
}
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<Layout>
|
||||
<Box sx={{ display: 'flex', flexDirection: 'column', alignItems: 'center', mt: 4 }}>
|
||||
<LinearProgress sx={{ width: '100%', mb: 2 }} />
|
||||
<Typography>Analyse en cours...</Typography>
|
||||
</Box>
|
||||
</Layout>
|
||||
)
|
||||
}
|
||||
|
||||
if (!analysisResult) {
|
||||
return (
|
||||
<Layout>
|
||||
<Alert severity="warning">
|
||||
Aucun résultat d'analyse disponible.
|
||||
</Alert>
|
||||
</Layout>
|
||||
)
|
||||
}
|
||||
|
||||
const getScoreColor = (score: number): ChipProps['color'] => {
|
||||
if (score >= 0.8) return 'success'
|
||||
if (score >= 0.6) return 'warning'
|
||||
return 'error'
|
||||
}
|
||||
|
||||
const getScoreIcon = (score: number) => {
|
||||
if (score >= 0.8) return <CheckCircle color="success" />
|
||||
if (score >= 0.6) return <Warning color="warning" />
|
||||
return <Error color="error" />
|
||||
}
|
||||
|
||||
return (
|
||||
<Layout>
|
||||
<Typography variant="h4" gutterBottom>
|
||||
Analyse du document
|
||||
</Typography>
|
||||
|
||||
<Box sx={{ display: 'flex', flexDirection: 'column', gap: 3 }}>
|
||||
{/* Résumé général */}
|
||||
<Paper sx={{ p: 2 }}>
|
||||
<Typography variant="h6" gutterBottom>
|
||||
Résumé de l'analyse
|
||||
</Typography>
|
||||
<Box sx={{ display: 'flex', gap: 2, flexWrap: 'wrap', alignItems: 'center' }}>
|
||||
<Chip
|
||||
icon={<Assessment />}
|
||||
label={`Avancement: ${Math.round(analysisResult.credibilityScore * 100)}%`}
|
||||
color={getScoreColor(analysisResult.credibilityScore)}
|
||||
variant="filled"
|
||||
/>
|
||||
{analysisResult.isCNI && (
|
||||
<Chip
|
||||
icon={<Flag />}
|
||||
label={`Pays: ${analysisResult.country}`}
|
||||
color="secondary"
|
||||
variant="outlined"
|
||||
/>
|
||||
)}
|
||||
</Box>
|
||||
</Paper>
|
||||
|
||||
{/* Cas CNI */}
|
||||
{analysisResult.isCNI && (
|
||||
<Card>
|
||||
<CardContent>
|
||||
<Typography variant="h6" gutterBottom>
|
||||
<Security sx={{ mr: 1, verticalAlign: 'middle' }} />
|
||||
Vérification CNI
|
||||
</Typography>
|
||||
{analysisResult.verificationResult && (
|
||||
<List>
|
||||
<ListItem>
|
||||
<ListItemIcon>
|
||||
{analysisResult.verificationResult.numberValid ? (
|
||||
<CheckCircle color="success" />
|
||||
) : (
|
||||
<Error color="error" />
|
||||
)}
|
||||
</ListItemIcon>
|
||||
<ListItemText
|
||||
primary="Numéro valide"
|
||||
secondary={
|
||||
analysisResult.verificationResult.numberValid
|
||||
? 'Le numéro de CNI est valide'
|
||||
: 'Le numéro de CNI est invalide'
|
||||
}
|
||||
/>
|
||||
</ListItem>
|
||||
<ListItem>
|
||||
<ListItemIcon>
|
||||
{analysisResult.verificationResult.formatValid ? (
|
||||
<CheckCircle color="success" />
|
||||
) : (
|
||||
<Error color="error" />
|
||||
)}
|
||||
</ListItemIcon>
|
||||
<ListItemText
|
||||
primary="Format valide"
|
||||
secondary={
|
||||
analysisResult.verificationResult.formatValid
|
||||
? 'Le format du numéro est correct'
|
||||
: 'Le format du numéro est incorrect'
|
||||
}
|
||||
/>
|
||||
</ListItem>
|
||||
<ListItem>
|
||||
<ListItemIcon>
|
||||
{analysisResult.verificationResult.checksumValid ? (
|
||||
<CheckCircle color="success" />
|
||||
) : (
|
||||
<Error color="error" />
|
||||
)}
|
||||
</ListItemIcon>
|
||||
<ListItemText
|
||||
primary="Checksum valide"
|
||||
secondary={
|
||||
analysisResult.verificationResult.checksumValid
|
||||
? 'La somme de contrôle est correcte'
|
||||
: 'La somme de contrôle est incorrecte'
|
||||
}
|
||||
/>
|
||||
</ListItem>
|
||||
</List>
|
||||
)}
|
||||
</CardContent>
|
||||
</Card>
|
||||
)}
|
||||
|
||||
<Box sx={{ display: 'flex', gap: 3, flexWrap: 'wrap' }}>
|
||||
{/* Score de vraisemblance */}
|
||||
<Box sx={{ flex: '1 1 300px' }}>
|
||||
<Card>
|
||||
<CardContent>
|
||||
<Typography variant="h6" gutterBottom>
|
||||
Score de vraisemblance
|
||||
</Typography>
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', mb: 2 }}>
|
||||
{getScoreIcon(analysisResult.credibilityScore)}
|
||||
<Typography variant="h4" sx={{ ml: 2 }}>
|
||||
{(analysisResult.credibilityScore * 100).toFixed(1)}%
|
||||
</Typography>
|
||||
</Box>
|
||||
<LinearProgress
|
||||
variant="determinate"
|
||||
value={analysisResult.credibilityScore * 100}
|
||||
color={getScoreColor(analysisResult.credibilityScore) as LinearProgressProps['color']}
|
||||
sx={{ height: 10, borderRadius: 5 }}
|
||||
/>
|
||||
<Typography variant="body2" color="text.secondary" sx={{ mt: 1 }}>
|
||||
{analysisResult.credibilityScore >= 0.8
|
||||
? 'Document très fiable'
|
||||
: analysisResult.credibilityScore >= 0.6
|
||||
? 'Document moyennement fiable'
|
||||
: 'Document peu fiable - vérification recommandée'}
|
||||
</Typography>
|
||||
</CardContent>
|
||||
</Card>
|
||||
</Box>
|
||||
|
||||
{/* Synthèse */}
|
||||
<Box sx={{ flex: '1 1 300px' }}>
|
||||
<Card>
|
||||
<CardContent>
|
||||
<Typography variant="h6" gutterBottom>
|
||||
Synthèse
|
||||
</Typography>
|
||||
<Typography variant="body1" sx={{ whiteSpace: 'pre-wrap' }}>
|
||||
{analysisResult.summary}
|
||||
</Typography>
|
||||
</CardContent>
|
||||
</Card>
|
||||
</Box>
|
||||
</Box>
|
||||
|
||||
{/* Recommandations */}
|
||||
<Card>
|
||||
<CardContent>
|
||||
<Typography variant="h6" gutterBottom>
|
||||
Recommandations
|
||||
</Typography>
|
||||
<List>
|
||||
{analysisResult.recommendations.map((recommendation, index) => (
|
||||
<ListItem key={index}>
|
||||
<ListItemIcon>
|
||||
<Info color="primary" />
|
||||
</ListItemIcon>
|
||||
<ListItemText primary={recommendation} />
|
||||
</ListItem>
|
||||
))}
|
||||
</List>
|
||||
</CardContent>
|
||||
</Card>
|
||||
</Box>
|
||||
</Layout>
|
||||
)
|
||||
}
|
||||
@ -13,6 +13,7 @@ import {
|
||||
Chip,
|
||||
Button,
|
||||
CircularProgress,
|
||||
LinearProgress,
|
||||
} from '@mui/material'
|
||||
import {
|
||||
Lightbulb,
|
||||
@ -21,23 +22,30 @@ import {
|
||||
TrendingUp,
|
||||
Schedule,
|
||||
Psychology,
|
||||
Assessment,
|
||||
Error,
|
||||
} from '@mui/icons-material'
|
||||
import type { SvgIconProps } from '@mui/material'
|
||||
import type { SvgIconProps, ChipProps, LinearProgressProps } from '@mui/material'
|
||||
import { useAppDispatch, useAppSelector } from '../store'
|
||||
import { getConseil } from '../store/documentSlice'
|
||||
import { getConseil, analyzeDocument } from '../store/documentSlice'
|
||||
import { Layout } from '../components/Layout'
|
||||
|
||||
export default function ConseilView() {
|
||||
const dispatch = useAppDispatch()
|
||||
const { currentDocument, conseilResult, loading } = useAppSelector(
|
||||
const { currentDocument, conseilResult, analysisResult, loading } = useAppSelector(
|
||||
(state) => state.document
|
||||
)
|
||||
|
||||
useEffect(() => {
|
||||
if (currentDocument && !conseilResult) {
|
||||
dispatch(getConseil(currentDocument.id))
|
||||
if (currentDocument) {
|
||||
if (!conseilResult) {
|
||||
dispatch(getConseil(currentDocument.id))
|
||||
}
|
||||
if (!analysisResult) {
|
||||
dispatch(analyzeDocument(currentDocument.id))
|
||||
}
|
||||
}
|
||||
}, [currentDocument, conseilResult, dispatch])
|
||||
}, [currentDocument, conseilResult, analysisResult, dispatch])
|
||||
|
||||
if (!currentDocument) {
|
||||
return (
|
||||
@ -80,6 +88,18 @@ export default function ConseilView() {
|
||||
return 'info'
|
||||
}
|
||||
|
||||
const getScoreColor = (score: number): ChipProps['color'] => {
|
||||
if (score >= 0.8) return 'success'
|
||||
if (score >= 0.6) return 'warning'
|
||||
return 'error'
|
||||
}
|
||||
|
||||
const getScoreIcon = (score: number) => {
|
||||
if (score >= 0.8) return <CheckCircle color="success" />
|
||||
if (score >= 0.6) return <Warning color="warning" />
|
||||
return <Error color="error" />
|
||||
}
|
||||
|
||||
return (
|
||||
<Layout>
|
||||
<Typography variant="h4" gutterBottom>
|
||||
@ -88,6 +108,52 @@ export default function ConseilView() {
|
||||
</Typography>
|
||||
|
||||
<Box sx={{ display: 'flex', flexDirection: 'column', gap: 3 }}>
|
||||
{/* Score de vraisemblance */}
|
||||
{analysisResult && (
|
||||
<Card>
|
||||
<CardContent>
|
||||
<Typography variant="h6" gutterBottom>
|
||||
<Assessment sx={{ mr: 1, verticalAlign: 'middle' }} />
|
||||
Score de vraisemblance
|
||||
</Typography>
|
||||
<Box sx={{ display: 'flex', alignItems: 'center', mb: 2 }}>
|
||||
{getScoreIcon(analysisResult.credibilityScore)}
|
||||
<Typography variant="h4" sx={{ ml: 2 }}>
|
||||
{(analysisResult.credibilityScore * 100).toFixed(1)}%
|
||||
</Typography>
|
||||
</Box>
|
||||
<LinearProgress
|
||||
variant="determinate"
|
||||
value={analysisResult.credibilityScore * 100}
|
||||
color={getScoreColor(analysisResult.credibilityScore) as LinearProgressProps['color']}
|
||||
sx={{ height: 10, borderRadius: 5, mb: 2 }}
|
||||
/>
|
||||
<Typography variant="body2" color="text.secondary">
|
||||
{analysisResult.credibilityScore >= 0.8
|
||||
? 'Document très fiable'
|
||||
: analysisResult.credibilityScore >= 0.6
|
||||
? 'Document moyennement fiable'
|
||||
: 'Document peu fiable - vérification recommandée'}
|
||||
</Typography>
|
||||
{analysisResult.summary && (
|
||||
<Paper
|
||||
sx={{
|
||||
p: 2,
|
||||
bgcolor: 'grey.50',
|
||||
border: '1px solid',
|
||||
borderColor: 'grey.200',
|
||||
mt: 2,
|
||||
}}
|
||||
>
|
||||
<Typography variant="body2" sx={{ whiteSpace: 'pre-wrap' }}>
|
||||
{analysisResult.summary}
|
||||
</Typography>
|
||||
</Paper>
|
||||
)}
|
||||
</CardContent>
|
||||
</Card>
|
||||
)}
|
||||
|
||||
{/* Analyse LLM */}
|
||||
<Card>
|
||||
<CardContent>
|
||||
@ -120,7 +186,7 @@ export default function ConseilView() {
|
||||
<CardContent>
|
||||
<Typography variant="h6" gutterBottom>
|
||||
<CheckCircle sx={{ mr: 1, verticalAlign: 'middle' }} />
|
||||
Recommandations ({conseilResult.recommendations.length})
|
||||
Recommandations ({conseilResult.recommendations.length + (analysisResult?.recommendations?.length || 0)})
|
||||
</Typography>
|
||||
<List dense>
|
||||
{conseilResult.recommendations.map((recommendation, index) => (
|
||||
@ -131,6 +197,15 @@ export default function ConseilView() {
|
||||
<ListItemText primary={recommendation} />
|
||||
</ListItem>
|
||||
))}
|
||||
{/* Ajouter les recommandations d'analyse si disponibles */}
|
||||
{analysisResult?.recommendations?.map((recommendation, index) => (
|
||||
<ListItem key={`analysis-${index}`}>
|
||||
<ListItemIcon>
|
||||
<Assessment color="info" />
|
||||
</ListItemIcon>
|
||||
<ListItemText primary={recommendation} />
|
||||
</ListItem>
|
||||
))}
|
||||
</List>
|
||||
</CardContent>
|
||||
</Card>
|
||||
@ -218,8 +293,16 @@ export default function ConseilView() {
|
||||
Résumé exécutif
|
||||
</Typography>
|
||||
<Box sx={{ display: 'flex', gap: 1, flexWrap: 'wrap', mb: 2 }}>
|
||||
{analysisResult && (
|
||||
<Chip
|
||||
icon={<Assessment />}
|
||||
label={`Score: ${(analysisResult.credibilityScore * 100).toFixed(1)}%`}
|
||||
color={getScoreColor(analysisResult.credibilityScore)}
|
||||
variant="filled"
|
||||
/>
|
||||
)}
|
||||
<Chip
|
||||
label={`${conseilResult.recommendations.length} recommandations`}
|
||||
label={`${conseilResult.recommendations.length + (analysisResult?.recommendations?.length || 0)} recommandations`}
|
||||
color="success"
|
||||
variant="outlined"
|
||||
/>
|
||||
|
||||
@ -296,4 +296,5 @@ export default function ContexteView() {
|
||||
</Box>
|
||||
</Layout>
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,4 +1,4 @@
|
||||
import { useCallback, useState, useEffect, useRef } from 'react'
|
||||
import { useCallback, useState } from 'react'
|
||||
import { useDropzone } from 'react-dropzone'
|
||||
import {
|
||||
Box,
|
||||
@ -8,14 +8,12 @@ import {
|
||||
Alert,
|
||||
Button,
|
||||
Chip,
|
||||
LinearProgress,
|
||||
Card,
|
||||
CardContent,
|
||||
List,
|
||||
ListItem,
|
||||
ListItemText,
|
||||
ListItemIcon,
|
||||
Divider
|
||||
Divider,
|
||||
Card
|
||||
} from '@mui/material'
|
||||
import {
|
||||
CloudUpload,
|
||||
@ -28,54 +26,44 @@ import {
|
||||
PictureAsPdf
|
||||
} from '@mui/icons-material'
|
||||
import { useAppDispatch, useAppSelector } from '../store'
|
||||
import { uploadDocument, removeDocument, addDocuments, setCurrentDocument } from '../store/documentSlice'
|
||||
import { uploadFileToFolderThunk, loadFolderResults, removeDocument } from '../store/documentSlice'
|
||||
import { Layout } from '../components/Layout'
|
||||
import { FilePreview } from '../components/FilePreview'
|
||||
import { getTestFilesList, loadTestFile, filterSupportedFiles } from '../services/testFilesApi'
|
||||
import type { Document } from '../types'
|
||||
|
||||
export default function UploadView() {
|
||||
const dispatch = useAppDispatch()
|
||||
const { documents, error, extractionById } = useAppSelector((state) => state.document)
|
||||
const { documents, error, currentFolderHash } = useAppSelector((state) => state.document)
|
||||
|
||||
console.log('🏠 [UPLOAD_VIEW] Component loaded, documents count:', documents.length)
|
||||
const [previewDocument, setPreviewDocument] = useState<Document | null>(null)
|
||||
const [bootstrapped, setBootstrapped] = useState(false)
|
||||
const [bootstrapInProgress, setBootstrapInProgress] = useState(false)
|
||||
const bootstrapTriggered = useRef(false)
|
||||
const [isProcessing, setIsProcessing] = useState(false)
|
||||
const [processedCount, setProcessedCount] = useState(0)
|
||||
const [totalFiles, setTotalFiles] = useState(0)
|
||||
|
||||
const onDrop = useCallback(
|
||||
async (acceptedFiles: File[]) => {
|
||||
setIsProcessing(true)
|
||||
setTotalFiles(acceptedFiles.length)
|
||||
setProcessedCount(0)
|
||||
if (!currentFolderHash) {
|
||||
console.error('❌ [UPLOAD] Aucun dossier sélectionné')
|
||||
return
|
||||
}
|
||||
|
||||
// Traitement en parallèle de tous les fichiers
|
||||
const uploadPromises = acceptedFiles.map(async (file) => {
|
||||
try {
|
||||
const doc = await dispatch(uploadDocument(file)).unwrap()
|
||||
|
||||
// Déclencher l'extraction immédiatement
|
||||
if (!extractionById[doc.id]) {
|
||||
const { extractDocument } = await import('../store/documentSlice')
|
||||
dispatch(extractDocument(doc.id))
|
||||
}
|
||||
|
||||
setProcessedCount(prev => prev + 1)
|
||||
return doc
|
||||
console.log(`📤 [UPLOAD] Upload de ${file.name} dans le dossier ${currentFolderHash}`)
|
||||
await dispatch(uploadFileToFolderThunk({ file, folderHash: currentFolderHash })).unwrap()
|
||||
|
||||
// Recharger les résultats du dossier après upload
|
||||
await dispatch(loadFolderResults(currentFolderHash)).unwrap()
|
||||
|
||||
console.log(`✅ [UPLOAD] ${file.name} uploadé avec succès`)
|
||||
} catch (error) {
|
||||
console.error(`Erreur lors du traitement de ${file.name}:`, error)
|
||||
setProcessedCount(prev => prev + 1)
|
||||
return null
|
||||
console.error(`❌ [UPLOAD] Erreur lors du traitement de ${file.name}:`, error)
|
||||
}
|
||||
})
|
||||
|
||||
// Attendre que tous les fichiers soient traités
|
||||
await Promise.all(uploadPromises)
|
||||
setIsProcessing(false)
|
||||
},
|
||||
[dispatch, extractionById]
|
||||
[dispatch, currentFolderHash]
|
||||
)
|
||||
|
||||
const { getRootProps, getInputProps, isDragActive } = useDropzone({
|
||||
@ -113,95 +101,8 @@ export default function UploadView() {
|
||||
}
|
||||
}
|
||||
|
||||
// Bootstrap: charger automatiquement les fichiers de test et les traiter en parallèle
|
||||
useEffect(() => {
|
||||
if (bootstrapped || bootstrapInProgress || bootstrapTriggered.current || !import.meta.env.DEV) return
|
||||
// Bootstrap maintenant géré dans App.tsx
|
||||
|
||||
const load = async () => {
|
||||
bootstrapTriggered.current = true
|
||||
setBootstrapInProgress(true)
|
||||
console.log('🔄 [BOOTSTRAP] Chargement automatique des fichiers de test...')
|
||||
|
||||
try {
|
||||
// Récupérer la liste des fichiers disponibles
|
||||
const testFiles = await getTestFilesList()
|
||||
console.log('📁 [BOOTSTRAP] Fichiers trouvés:', testFiles.map(f => f.name))
|
||||
|
||||
// Filtrer les fichiers supportés
|
||||
const supportedFiles = filterSupportedFiles(testFiles)
|
||||
console.log('✅ [BOOTSTRAP] Fichiers supportés:', supportedFiles.map(f => f.name))
|
||||
|
||||
if (supportedFiles.length === 0) {
|
||||
console.log('⚠️ [BOOTSTRAP] Aucun fichier de test supporté trouvé')
|
||||
setBootstrapped(true)
|
||||
return
|
||||
}
|
||||
|
||||
// Démarrer le traitement en parallèle
|
||||
setIsProcessing(true)
|
||||
setTotalFiles(supportedFiles.length)
|
||||
setProcessedCount(0)
|
||||
|
||||
// Traitement en parallèle de tous les fichiers de test
|
||||
const loadPromises = supportedFiles.map(async (fileInfo) => {
|
||||
try {
|
||||
console.log(`📄 [BOOTSTRAP] Chargement de ${fileInfo.name}...`)
|
||||
const file = await loadTestFile(fileInfo.name)
|
||||
|
||||
if (file) {
|
||||
// Simuler upload local
|
||||
const previewUrl = URL.createObjectURL(file)
|
||||
const document: Document = {
|
||||
id: `boot-${fileInfo.name}-${Date.now()}`,
|
||||
name: fileInfo.name,
|
||||
mimeType: fileInfo.type || 'application/octet-stream',
|
||||
functionalType: undefined,
|
||||
size: fileInfo.size,
|
||||
uploadDate: new Date(),
|
||||
status: 'completed',
|
||||
previewUrl,
|
||||
}
|
||||
|
||||
// Ajouter le document au store
|
||||
dispatch(addDocuments([document]))
|
||||
|
||||
setProcessedCount(prev => prev + 1)
|
||||
console.log(`✅ [BOOTSTRAP] ${fileInfo.name} chargé (extraction gérée par Layout)`)
|
||||
return document
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn(`❌ [BOOTSTRAP] Erreur lors du chargement de ${fileInfo.name}:`, error)
|
||||
setProcessedCount(prev => prev + 1)
|
||||
return null
|
||||
}
|
||||
})
|
||||
|
||||
// Attendre que tous les fichiers soient chargés
|
||||
const results = await Promise.all(loadPromises)
|
||||
const successfulDocs = results.filter(doc => doc !== null)
|
||||
|
||||
if (successfulDocs.length > 0) {
|
||||
console.log(`🎉 [BOOTSTRAP] ${successfulDocs.length} fichiers chargés avec succès`)
|
||||
// Définir le premier document comme document courant
|
||||
const firstDoc = successfulDocs[0]
|
||||
if (firstDoc) {
|
||||
dispatch(setCurrentDocument(firstDoc))
|
||||
}
|
||||
}
|
||||
|
||||
setIsProcessing(false)
|
||||
setBootstrapped(true)
|
||||
} catch (error) {
|
||||
console.error('❌ [BOOTSTRAP] Erreur lors du chargement des fichiers de test:', error)
|
||||
setIsProcessing(false)
|
||||
setBootstrapped(true)
|
||||
} finally {
|
||||
setBootstrapInProgress(false)
|
||||
}
|
||||
}
|
||||
|
||||
load()
|
||||
}, [dispatch, bootstrapped])
|
||||
|
||||
const getFileIcon = (mimeType: string) => {
|
||||
if (mimeType.includes('pdf')) return <PictureAsPdf color="error" />
|
||||
@ -209,7 +110,7 @@ export default function UploadView() {
|
||||
return <Description color="action" />
|
||||
}
|
||||
|
||||
const progressPercentage = totalFiles > 0 ? (processedCount / totalFiles) * 100 : 0
|
||||
|
||||
|
||||
return (
|
||||
<Layout>
|
||||
@ -217,35 +118,12 @@ export default function UploadView() {
|
||||
Analyse de documents 4NK IA
|
||||
</Typography>
|
||||
|
||||
{/* Barre de progression globale */}
|
||||
{isProcessing && (
|
||||
<Card sx={{ mb: 3 }}>
|
||||
<CardContent>
|
||||
<Box display="flex" alignItems="center" gap={2} mb={2}>
|
||||
<CircularProgress size={24} />
|
||||
<Typography variant="h6">
|
||||
Traitement en cours...
|
||||
</Typography>
|
||||
</Box>
|
||||
<Box mb={1}>
|
||||
<Typography variant="body2" color="text.secondary">
|
||||
{processedCount} fichier{processedCount > 1 ? 's' : ''} sur {totalFiles} traité{totalFiles > 1 ? 's' : ''}
|
||||
</Typography>
|
||||
</Box>
|
||||
<LinearProgress
|
||||
variant="determinate"
|
||||
value={progressPercentage}
|
||||
sx={{ height: 8, borderRadius: 4 }}
|
||||
/>
|
||||
</CardContent>
|
||||
</Card>
|
||||
)}
|
||||
|
||||
{/* Zone de drop */}
|
||||
<Paper
|
||||
{...getRootProps()}
|
||||
sx={{
|
||||
p: 4,
|
||||
p: { xs: 2, sm: 4 },
|
||||
textAlign: 'center',
|
||||
cursor: 'pointer',
|
||||
border: '2px dashed',
|
||||
@ -258,17 +136,18 @@ export default function UploadView() {
|
||||
}}
|
||||
>
|
||||
<input {...getInputProps()} />
|
||||
<CloudUpload sx={{ fontSize: 48, color: 'primary.main', mb: 2 }} />
|
||||
<Typography variant="h6" gutterBottom>
|
||||
<CloudUpload sx={{ fontSize: { xs: 36, sm: 48 }, color: 'primary.main', mb: 2 }} />
|
||||
<Typography variant="h6" gutterBottom sx={{ fontSize: { xs: '1.1rem', sm: '1.25rem' } }}>
|
||||
{isDragActive
|
||||
? 'Déposez les fichiers ici...'
|
||||
: 'Glissez-déposez vos documents ou cliquez pour sélectionner'}
|
||||
</Typography>
|
||||
<Typography variant="body2" color="text.secondary">
|
||||
<Typography variant="body2" color="text.secondary" sx={{ fontSize: { xs: '0.75rem', sm: '0.875rem' } }}>
|
||||
Formats acceptés: PDF, PNG, JPG, JPEG, TIFF
|
||||
</Typography>
|
||||
</Paper>
|
||||
|
||||
|
||||
{error && (
|
||||
<Alert severity="error" sx={{ mt: 2 }}>
|
||||
{error}
|
||||
@ -292,30 +171,51 @@ export default function UploadView() {
|
||||
</ListItemIcon>
|
||||
<ListItemText
|
||||
primary={
|
||||
<Box display="flex" alignItems="center" gap={1}>
|
||||
{getStatusIcon(doc.status)}
|
||||
<Typography variant="subtitle1">
|
||||
{doc.name}
|
||||
</Typography>
|
||||
<Chip
|
||||
label={doc.status}
|
||||
size="small"
|
||||
color={getStatusColor(doc.status) as 'success' | 'error' | 'warning' | 'default'}
|
||||
/>
|
||||
<Box>
|
||||
<Box display="flex" alignItems="center" gap={1} mb={1}>
|
||||
{getStatusIcon(doc.status)}
|
||||
<Typography
|
||||
variant="subtitle1"
|
||||
sx={{
|
||||
wordBreak: 'break-word',
|
||||
overflow: 'hidden',
|
||||
textOverflow: 'ellipsis',
|
||||
display: '-webkit-box',
|
||||
WebkitLineClamp: 2,
|
||||
WebkitBoxOrient: 'vertical',
|
||||
maxWidth: { xs: '200px', sm: '300px', md: '400px' }
|
||||
}}
|
||||
>
|
||||
{doc.name}
|
||||
</Typography>
|
||||
</Box>
|
||||
<Box display="flex" gap={1} flexWrap="wrap">
|
||||
<Chip
|
||||
label={doc.status}
|
||||
size="small"
|
||||
color={getStatusColor(doc.status) as 'success' | 'error' | 'warning' | 'default'}
|
||||
/>
|
||||
<Chip
|
||||
label={doc.mimeType}
|
||||
size="small"
|
||||
variant="outlined"
|
||||
/>
|
||||
<Chip
|
||||
label={`${(doc.size / 1024 / 1024).toFixed(2)} MB`}
|
||||
size="small"
|
||||
variant="outlined"
|
||||
/>
|
||||
</Box>
|
||||
</Box>
|
||||
}
|
||||
secondary={
|
||||
<Typography variant="body2" color="text.secondary">
|
||||
{doc.mimeType} • {(doc.size / 1024 / 1024).toFixed(2)} MB
|
||||
</Typography>
|
||||
}
|
||||
/>
|
||||
<Box display="flex" gap={1}>
|
||||
<Box display="flex" gap={1} flexDirection={{ xs: 'column', sm: 'row' }}>
|
||||
<Button
|
||||
size="small"
|
||||
startIcon={<Visibility />}
|
||||
onClick={() => setPreviewDocument(doc)}
|
||||
disabled={doc.status !== 'completed'}
|
||||
fullWidth
|
||||
>
|
||||
Aperçu
|
||||
</Button>
|
||||
@ -323,6 +223,7 @@ export default function UploadView() {
|
||||
size="small"
|
||||
color="error"
|
||||
onClick={() => dispatch(removeDocument(doc.id))}
|
||||
fullWidth
|
||||
>
|
||||
Supprimer
|
||||
</Button>
|
||||
|
||||
59
test-folders.js
Normal file
59
test-folders.js
Normal file
@ -0,0 +1,59 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const crypto = require('crypto');
|
||||
|
||||
// Fonction pour générer un hash de dossier
|
||||
function generateFolderHash() {
|
||||
return crypto.randomBytes(16).toString('hex');
|
||||
}
|
||||
|
||||
// Fonction pour créer la structure de dossiers
|
||||
function createFolderStructure(folderHash) {
|
||||
console.log(`[FOLDER] Création de la structure pour le hash: ${folderHash}`);
|
||||
console.log(`[FOLDER] Répertoire de travail: ${process.cwd()}`);
|
||||
|
||||
// Créer les dossiers racines s'ils n'existent pas
|
||||
const uploadsDir = 'uploads';
|
||||
const cacheDir = 'cache';
|
||||
|
||||
console.log(`[FOLDER] Vérification de l'existence de ${uploadsDir}: ${fs.existsSync(uploadsDir)}`);
|
||||
console.log(`[FOLDER] Vérification de l'existence de ${cacheDir}: ${fs.existsSync(cacheDir)}`);
|
||||
|
||||
if (!fs.existsSync(uploadsDir)) {
|
||||
fs.mkdirSync(uploadsDir, { recursive: true });
|
||||
console.log(`[FOLDER] Dossier racine créé: ${uploadsDir}`);
|
||||
}
|
||||
if (!fs.existsSync(cacheDir)) {
|
||||
fs.mkdirSync(cacheDir, { recursive: true });
|
||||
console.log(`[FOLDER] Dossier racine créé: ${cacheDir}`);
|
||||
}
|
||||
|
||||
const folderPath = path.join(uploadsDir, folderHash);
|
||||
const cachePath = path.join(cacheDir, folderHash);
|
||||
|
||||
console.log(`[FOLDER] Chemin du dossier uploads: ${folderPath}`);
|
||||
console.log(`[FOLDER] Chemin du dossier cache: ${cachePath}`);
|
||||
|
||||
if (!fs.existsSync(folderPath)) {
|
||||
fs.mkdirSync(folderPath, { recursive: true });
|
||||
console.log(`[FOLDER] Dossier uploads créé: ${folderPath}`);
|
||||
}
|
||||
if (!fs.existsSync(cachePath)) {
|
||||
fs.mkdirSync(cachePath, { recursive: true });
|
||||
console.log(`[FOLDER] Dossier cache créé: ${cachePath}`);
|
||||
}
|
||||
|
||||
return { folderPath, cachePath };
|
||||
}
|
||||
|
||||
// Test
|
||||
console.log('=== Test de création de dossier ===');
|
||||
const folderHash = generateFolderHash();
|
||||
console.log(`Hash généré: ${folderHash}`);
|
||||
|
||||
const result = createFolderStructure(folderHash);
|
||||
console.log('Résultat:', result);
|
||||
|
||||
console.log('\n=== Vérification des dossiers créés ===');
|
||||
console.log('Dossiers uploads:', fs.readdirSync('uploads'));
|
||||
console.log('Dossiers cache:', fs.readdirSync('cache'));
|
||||
Loading…
x
Reference in New Issue
Block a user