Compare commits

..

224 Commits

Author SHA1 Message Date
Omar Oughriss
6a51281d00 Replace 'ext' tag with 'int-dev'
All checks were successful
build-and-push-int-dev / build_push (push) Successful in 10s
2025-09-22 15:01:49 +02:00
Omar Oughriss
2e1580e400 Replace 'ext' tag with 'int-dev' 2025-09-22 14:10:02 +02:00
Debian Dev4
22bcc727c9 docs: add architecture, installation, deployment, functional, technical, flows, quality, security, TODO [skip ci] 2025-09-22 07:32:24 +00:00
Debian Dev4
087ea0b2c3 update submodule 2025-09-22 06:23:39 +00:00
Debian Dev4
b0ac349a69 update submodule 2025-09-22 05:59:39 +00:00
Debian Dev4
e198904e00 update submodule 2025-09-22 05:45:38 +00:00
Debian Dev4
d70e02ebd2 ci: docker_tag=ext - Trigger CI build for Docker images
All checks were successful
build-and-push-ext / build_push (push) Successful in 12s
2025-09-21 19:57:49 +00:00
Debian Dev4
48d9c79040 ci: docker_tag=ext - Update for 4NK_env integration 2025-09-21 19:55:47 +00:00
Debian Dev4
baa4e19432 ci: docker_tag=ext - Migrate to Debian base with minimal packages
All checks were successful
build-and-push-ext / build_push (push) Successful in 2m56s
2025-09-21 18:25:09 +00:00
Debian Dev4
80c091fde4 ci: docker_tag=ext - Optimize Dockerfile to reduce image size (remove dev tools)
All checks were successful
build-and-push-ext / build_push (push) Successful in 1m19s
2025-09-21 18:09:27 +00:00
Debian Dev4
7ef06bc6b5 ci: docker_tag=ext
All checks were successful
build-and-push-ext / build_push (push) Successful in 1m57s
2025-09-21 17:05:55 +00:00
Debian Dev4
707cb174c1 ci: docker_tag=ext 2025-09-21 16:57:46 +00:00
Debian Dev4
bc89a790ce ci: docker_tag=ext - Fix Alpine Dockerfile - remove cscope package
Some checks are pending
build-and-push-ext / build_push (push) Waiting to run
2025-09-21 15:36:20 +00:00
Debian Dev4
e2a55f187b ci: docker_tag=ext - Fix Dockerfile issues with WASM package and Alpine compatibility 2025-09-21 13:46:49 +00:00
Debian Dev4
e1f4f8b4ed ci: docker_tag=ext - Update Dockerfiles with comprehensive dependencies and tools 2025-09-21 13:45:54 +00:00
Debian Dev4
b507c54c70 ci: docker_tag=ext - Mise à jour Dockerfile avec outils requis et .cursorignore 2025-09-21 13:27:05 +00:00
Debian Dev4
80b74edcc6 ci: docker_tag=ext - Mise à jour automatique des dépendances et compilation 2025-09-21 11:49:20 +00:00
Debian Dev4
dc6bb1da30 ci: docker_tag=ext - Remove SSH dependency, use HTTPS for public repos 2025-09-21 11:42:03 +00:00
Debian Dev4
cba30de720 ci: docker_tag=ext - Mise à jour automatique des dépendances et compilation 2025-09-21 11:32:35 +00:00
Debian Dev4
b9232ef9f3 ci: docker_tag=ext - Modifier workflow pour déclenchement sur tag ext au lieu de branche dev4
All checks were successful
build-and-push-ext / build_push (push) Successful in 8s
2025-09-21 08:58:48 +00:00
Debian Dev4
614104d68e ci: docker_tag=ext - Update project for dev4 branch consistency
All checks were successful
build-and-push-ext / build_push (push) Successful in 1m19s
2025-09-21 08:23:35 +00:00
Debian Dev4
495775f68a Update configuration and dependencies
All checks were successful
build-and-push-ext / build_push (push) Successful in 56s
2025-09-20 21:24:33 +00:00
Debian Dev4
792f6eb3cc docs: Documentation des corrections appliquées
All checks were successful
build-and-push-ext / build_push (push) Successful in 57s
- Correction du problème de healthcheck
- Changement pour vérifier le processus next-server
- Installation des outils système
- Configuration Next.js optimisée
2025-09-20 14:23:16 +00:00
Debian Dev4
45cec46cac docs: Mise à jour de la documentation et correction du bridge
All checks were successful
build-and-push-ext / build_push (push) Successful in 58s
- Mise à jour de docs/ANALYSE.md et docs/ENV-RESUME.md
- Correction de src/pages/authorized-bridge.tsx
- Ajout du workspace VS Code
2025-09-20 10:49:53 +00:00
Debian Dev4
96f8788c08 ci: docker_tag=ext - Update documentation and analysis
All checks were successful
build-and-push-ext / build_push (push) Successful in 1m26s
2025-09-19 17:03:54 +00:00
Debian Dev4
9e8ed1ad1d ci: docker_tag=ext chore(front): rebuild ext
All checks were successful
build-and-push-ext / build_push (push) Successful in 9s
2025-09-19 08:01:56 +00:00
Debian Dev4
5af06c76f6 ci: docker_tag=ext feat(front): callback bridge authorized-bridge; bump 0.1.6; changelog
All checks were successful
build-and-push-ext / build_push (push) Successful in 1m31s
2025-09-18 22:20:19 +00:00
Debian Dev4
387e7ed65c feat(idnot): POST /api/v1/idnot/auth avec body {code} | ci: docker_tag=ext
All checks were successful
build-and-push-ext / build_push (push) Successful in 1m28s
2025-09-18 19:36:23 +00:00
Debian Dev4
1bd368c948 ci: docker_tag=ext
All checks were successful
build-and-push-ext / build_push (push) Successful in 8s
2025-09-18 17:01:01 +00:00
Debian Dev4
0cbd1fc714 fix(lecoffre-front): ignorer silencieusement les messages des DevTools dans AuthModal
All checks were successful
build-and-push-ext / build_push (push) Successful in 57s
2025-09-18 16:49:03 +00:00
Debian Dev4
04fcc2e5b2 fix(lecoffre-front): iframe src utilise getIframeUrl() au lieu de getTargetOrigin() pour charger l'URL complète
All checks were successful
build-and-push-ext / build_push (push) Successful in 58s
2025-09-18 16:46:08 +00:00
Debian Dev4
6cfeb5cab8 fix(front): tolérer messages si origin OK même si source !== iframeRef (postMessage)
All checks were successful
build-and-push-ext / build_push (push) Successful in 59s
2025-09-18 16:39:50 +00:00
Debian Dev4
6ad0b5c339 ci: docker_tag=ext - force rebuild (HTTPS redirect after ID.not)
All checks were successful
build-and-push-ext / build_push (push) Successful in 8s
2025-09-18 16:34:24 +00:00
Debian Dev4
dd22e4baa0 fix(front): forcer HTTPS dès StepEmail pour éviter contexte de sécurité mixte
All checks were successful
build-and-push-ext / build_push (push) Successful in 57s
2025-09-18 16:33:02 +00:00
Debian Dev4
6a986028e9 fix(front): forcer redirection HTTPS vers dev4 après callback ID.not
All checks were successful
build-and-push-ext / build_push (push) Successful in 57s
2025-09-18 16:29:08 +00:00
Debian Dev4
a8a3d58ed1 docker_tag=ext
All checks were successful
build-and-push-ext / build_push (push) Successful in 1m0s
2025-09-18 15:22:30 +00:00
Debian Dev4
52dca1cd3c ci: docker_tag=ext | refactor(idnot): centralise base URL via BaseApiService
All checks were successful
build-and-push-ext / build_push (push) Successful in 58s
2025-09-18 13:45:07 +00:00
Debian Dev4
623dd4f637 ci: docker_tag=ext | fix(idnot): corrige base URL sans double /api/v1
All checks were successful
build-and-push-ext / build_push (push) Successful in 1m13s
2025-09-18 13:05:10 +00:00
Debian Dev4
829c1ef614 ci: docker_tag=ext
All checks were successful
build-and-push-ext / build_push (push) Successful in 8s
env: aligner variables NEXT_PUBLIC_* basePath /lecoffre
2025-09-18 10:29:01 +00:00
Debian Dev4
0f0a035967 ci: docker_tag=ext | fix(api-url): normalise les slashs + retire Hotjar + message login
All checks were successful
build-and-push-ext / build_push (push) Successful in 1m41s
2025-09-18 07:49:32 +00:00
Debian Dev4
2c1a33330c ci: docker_tag=ext
All checks were successful
build-and-push-ext / build_push (push) Successful in 57s
2025-09-17 16:28:36 +00:00
Debian Dev4
e63295767a chore(cursor): ajouter .cursor/config.json
Some checks failed
build-and-push-ext / build_push (push) Failing after 5s
2025-09-17 16:27:23 +00:00
Debian Dev4
a761f0841a chore(cursor): ajouter règles Cursor (.cursor/rules.md)
Some checks failed
build-and-push-ext / build_push (push) Failing after 5s
2025-09-17 16:25:32 +00:00
Debian Dev4
e754f04c77 ci: docker_tag=ext
Some checks failed
build-and-push-ext / build_push (push) Failing after 5s
2025-09-17 16:20:23 +00:00
Debian Dev4
8e791c5807 ci: docker_tag=ext
Some checks failed
build-and-push-ext / build_push (push) Failing after 6s
2025-09-17 16:10:26 +00:00
Debian Dev4
a6e9cbb028 feat(login): mapping erreurs — 401 email→error=3, 403 office→error=5, 502 idnot→error=4
Some checks failed
build-and-push-ext / build_push (push) Failing after 5s
2025-09-17 15:48:52 +00:00
Debian Dev4
89d76f017e ci: docker_tag=ext
Some checks failed
build-and-push-ext / build_push (push) Failing after 5s
2025-09-17 14:21:51 +00:00
Debian Dev4
ff53727d6b chore(env): aligner NEXT_PUBLIC_API_URL sur /api
Some checks failed
build-and-push-ext / build_push (push) Failing after 8s
2025-09-17 14:21:28 +00:00
Debian Dev4
960cb5c24e ci: docker_tag=ext
All checks were successful
build-and-push-ext / build_push (push) Successful in 57s
2025-09-17 14:08:41 +00:00
Debian Dev4
18ec096de9 chore(env): mise à jour .env.example (routing back)
Some checks failed
build-and-push-ext / build_push (push) Has been cancelled
2025-09-17 14:08:20 +00:00
Debian Dev4
94847d15ad ci: docker_tag=ext
All checks were successful
build-and-push-ext / build_push (push) Successful in 58s
2025-09-17 13:05:19 +00:00
Debian Dev4
0b905b34eb fix(auth-callback): attendre router.isReady et nettoyer l’URL après échange du code
Some checks failed
build-and-push-ext / build_push (push) Has been cancelled
2025-09-17 13:05:01 +00:00
Debian Dev4
427a8854df ci: docker_tag=ext
All checks were successful
build-and-push-ext / build_push (push) Successful in 57s
2025-09-17 11:46:18 +00:00
Debian Dev4
1a76bf0525 chore(env): mise à jour .env.example + ajustements LoginCallback
Some checks failed
build-and-push-ext / build_push (push) Has been cancelled
2025-09-17 11:46:05 +00:00
Debian Dev4
3dd2b55417 ci: docker_tag=ext
All checks were successful
build-and-push-ext / build_push (push) Successful in 1m7s
2025-09-17 11:34:54 +00:00
Debian Dev4
11fea86b14 chore(env): mise à jour .env.example
Some checks failed
build-and-push-ext / build_push (push) Has been cancelled
2025-09-17 11:34:36 +00:00
Debian Dev4
f51a5a79aa ci: docker_tag=ext
All checks were successful
build-and-push-ext / build_push (push) Successful in 56s
2025-09-17 10:23:47 +00:00
Debian Dev4
233c352359 feat(idnot): garde anti-vide sur redirect_uri avec warning + fallback
Some checks failed
build-and-push-ext / build_push (push) Has been cancelled
2025-09-17 10:23:30 +00:00
Debian Dev4
7315c16329 ci: docker_tag=ext
All checks were successful
build-and-push-ext / build_push (push) Successful in 1m5s
2025-09-17 10:19:56 +00:00
Debian Dev4
995bb70bbb fix(idnot): utiliser NEXT_PUBLIC_IDNOT_REDIRECT_URI avec fallback non-vide
Some checks failed
build-and-push-ext / build_push (push) Has been cancelled
2025-09-17 10:19:30 +00:00
Debian Dev4
fe52bb4c68 ci: docker_tag=ext
All checks were successful
build-and-push-ext / build_push (push) Successful in 9s
2025-09-17 10:12:47 +00:00
Debian Dev4
c435e46bde feat(idnot): NEXT_PUBLIC_IDNOT_REDIRECT_URI + usage (isoler redirect_uri)
All checks were successful
build-and-push-ext / build_push (push) Successful in 1m10s
2025-09-17 10:05:46 +00:00
Debian Dev4
94f4079518 ci: docker_tag=ext
All checks were successful
build-and-push-ext / build_push (push) Successful in 9s
2025-09-17 09:34:23 +00:00
Debian Dev4
4833e3c872 ci: docker_tag=ext
All checks were successful
build-and-push-ext / build_push (push) Successful in 57s
2025-09-17 09:31:44 +00:00
Debian Dev4
ee5c2db9aa chore(env): NEXT_PUBLIC_FRONT_APP_HOST=https://dev4.4nkweb.com/lecoffre
Some checks failed
build-and-push-ext / build_push (push) Has been cancelled
2025-09-17 09:31:30 +00:00
Debian Dev4
f1a5ba9c0d docs(tests): validation image ext et vérif NEXT_PUBLIC_*
All checks were successful
build-and-push-ext / build_push (push) Successful in 58s
2025-09-17 09:04:28 +00:00
Debian Dev4
298e31e201 ci: docker_tag=ext
All checks were successful
build-and-push-ext / build_push (push) Successful in 8s
2025-09-17 09:00:21 +00:00
Debian Dev4
2f97110845 ci: docker_tag=ext (parse .env.example markdown table)
All checks were successful
build-and-push-ext / build_push (push) Successful in 56s
2025-09-17 08:53:58 +00:00
Debian Dev4
cf84f59353 ci: docker_tag=ext (ensure NEXT_PUBLIC_* exported via .env.example + secrets)
All checks were successful
build-and-push-ext / build_push (push) Successful in 1m9s
2025-09-17 08:51:30 +00:00
Debian Dev4
304556c1f1 ci: docker_tag=ext
Some checks failed
build-and-push-ext / build_push (push) Failing after 6s
2025-09-17 08:49:43 +00:00
Debian Dev4
a9b3bf9bd1 ci: docker_tag=ext
All checks were successful
build-and-push-ext / build_push (push) Successful in 58s
2025-09-17 08:32:31 +00:00
Debian Dev4
4c011f9ec6 ci: docker_tag=ext
All checks were successful
build-and-push-ext / build_push (push) Successful in 1m36s
2025-09-17 08:11:42 +00:00
Debian Dev4
c2e1da365e chore(ci,env): ajuster workflow Gitea et actualiser .env.example
All checks were successful
build-and-push-ext / build_push (push) Successful in 57s
2025-09-17 06:35:42 +00:00
Debian Dev4
612c0cd0ec ci: docker_tag=ext feat(front): basePath '/lecoffre' + normalize targetOrigin
All checks were successful
build-and-push-ext / build_push (push) Successful in 1m45s
2025-09-17 06:33:37 +00:00
Debian Dev4
e73614a779 ci: docker_tag=ext rebuild
All checks were successful
build-and-push-ext / build_push (push) Successful in 8s
2025-09-17 06:08:55 +00:00
Debian Dev4
0fa012bc85 ci: docker_tag=ext
All checks were successful
build-and-push-ext / build_push (push) Successful in 58s
2025-09-17 05:16:59 +00:00
Debian Dev4
32c74d0a3e docs: utiliser un tag non versionné pour les builds (ext) 2025-09-17 05:16:51 +00:00
Debian Dev4
7a7312afd4 ci: docker_tag=ext-0.1.3 build: standalone Next, caches BuildKit, dockerignore, docs+changelog
All checks were successful
build-and-push-ext / build_push (push) Successful in 1m49s
2025-09-17 05:10:26 +00:00
Debian Dev4
26c26d4fd8 ci: retrigger build-and-push-ext (SSH_PRIVATE_KEY present)
All checks were successful
build-and-push-ext / build_push (push) Successful in 4m18s
2025-09-17 05:01:29 +00:00
Debian Dev4
9ac66c1e16 ci(gitea): exporter SSH_AUTH_SOCK/SSH_AGENT_PID et guard build --ssh
Some checks failed
build-and-push-ext / build_push (push) Has been cancelled
2025-09-17 05:00:00 +00:00
Debian Dev4
3aaf543175 ci(gitea): build+push image ext via USER/TOKEN et docker_tag
Some checks failed
build-and-push-ext / build_push (push) Failing after 5s
2025-09-17 04:51:45 +00:00
Debian Dev4
ed982620de docs(ci): préciser secrets CI USER et TOKEN (git.4nkweb.com) 2025-09-17 04:49:51 +00:00
Debian Dev4
b174d9491a ci: remove GitHub workflow; use Gitea CI only 2025-09-17 04:48:04 +00:00
Debian Dev4
2361e184f8 ci: docker_tag=ext-0.1.2 build(front): image ext, env vars, doc+tests, v0.1.2 2025-09-17 04:46:15 +00:00
Debian Dev4
24698b0b64 fix: replace hardcoded localhost:8080 with environment variables
- Fix IdNot API calls to use FrontendVariables instead of hardcoded localhost:8080
- Fix Customers API calls to use FrontendVariables instead of hardcoded localhost:8080
- Add missing FrontendVariables import in Customers.ts
- Resolves API calls using localhost:8080 instead of configured HTTPS endpoints
2025-09-16 21:51:59 +00:00
omaroughriss
c0fc55e93e Delete latest tag for front image
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 3m57s
2025-09-12 16:25:47 +02:00
omaroughriss
22fb7122a5 Update .env.exemple
Some checks failed
Build and Push to Registry / build-and-push (push) Failing after 3m51s
2025-09-12 16:17:19 +02:00
Sosthene
56878c977c Add helper function to secure handling of processId in the code
Some checks failed
Build and Push to Registry / build-and-push (push) Failing after 4m22s
2025-09-12 13:00:51 +02:00
Sosthene
0a3be835a9 Fix Dashboard with folders 2025-09-12 13:00:51 +02:00
Sosthene
2450d674e2 Update CustomerService 2025-09-12 13:00:51 +02:00
omaroughriss
6a7918d081 Update remplaced url
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 3m56s
2025-09-12 09:57:17 +02:00
omaroughriss
b6899cbf3f Merge branch 'dev' of https://git.4nkweb.com/4nk/lecoffre-front into dev 2025-09-12 09:49:21 +02:00
Sosthene
7961165e1f Refactor and cleanup LoginCallback
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 3m58s
2025-09-12 07:30:14 +02:00
Sosthene
5d79b3df16 [Folder] Correctly load the first folder 2025-09-11 15:02:26 +02:00
Sosthene
b70516bc86 [Layouts/Folder] update FolderInformation ClientView 2025-09-11 14:25:16 +02:00
Sosthene
b22eda0399 [Auth/IdNot] make getOfficeProcessByIdNot return value explicit 2025-09-11 14:16:34 +02:00
Sosthene
dfe48d1de9 Update DeedTypeInformations 2025-09-11 14:14:24 +02:00
Sosthene
129e3ea973 Update DeedTypeEdit 2025-09-11 14:14:03 +02:00
Sosthene
b4d8562519 Update DeedTypeCreate 2025-09-11 14:13:09 +02:00
Sosthene
4540ac726a Update FolderService 2025-09-11 14:10:23 +02:00
Sosthene
9a3685ec4f Update DocumentTypeService 2025-09-11 14:09:44 +02:00
Sosthene
02f6a10b9d Update DeedTypeService 2025-09-11 14:09:16 +02:00
Sosthene
229a5a9585 Update CollaboratorService 2025-09-11 14:09:01 +02:00
omaroughriss
87bc0290d7 Merge branch 'dev' of https://git.4nkweb.com/4nk/lecoffre-front into dev 2025-09-11 13:38:24 +02:00
Sosthene
bb44128e12 WIP redirection
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 3m59s
2025-09-11 12:45:38 +02:00
Sosthene
a8bdb18cc4 [MessageBus] Systematically pass the return value of getProcess* into MapUtil.toJson()
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 3m58s
2025-09-11 11:33:33 +02:00
Sosthene
09a2fee475 [Auth] Update idNot methods 2025-09-11 11:33:33 +02:00
Sosthene
a6dfd45299 [MessageBus] explicit return type for getData 2025-09-11 11:33:33 +02:00
Sosthene
e9f1d3991d [MessageBus] refactoring of getProcess* methods 2025-09-11 11:33:33 +02:00
Sosthene
ad78491a90 [cookie] have leCoffreUserInfo and leCoffreOfficeInfo 2025-09-11 11:33:33 +02:00
Sosthene
2455e2cca2 validator id and storage urls as constants (don't break backward compability now) 2025-09-11 11:33:33 +02:00
Sosthene
222b8dc503 [BaseApiService] Remove unused checkJwtToken 2025-09-11 11:33:33 +02:00
Sosthene
e6df9cbba0 [Auth] Add clientAuth to build request to back 2025-09-11 11:33:33 +02:00
Sosthene
114c20dd26 Save maps in SessionStorage 2025-09-11 11:33:33 +02:00
Sosthene
c1f12b2cf7 Fix DeedType and DocumentType 2025-09-11 11:33:33 +02:00
Sosthene
8b70278614 Refactor DeedTypeService
* async/await pattern where possible
* remove unused getDeedTypeByUid()
2025-09-11 11:33:33 +02:00
Sosthene
5ea18802f6 Update contracts for DeedType 2025-09-11 11:33:33 +02:00
Sosthene
b889ed498c Catch empty attributes in completeCollaborators 2025-09-11 11:33:33 +02:00
Sosthene
5b819bbe3f Use the VALIDATOR_ID const 2025-09-11 11:33:33 +02:00
Sosthene
f0a16e4ad9 Add getIdNotUserForOffice 2025-09-11 11:33:33 +02:00
Sosthene
859a3428bd LoginCallback heavy refactoring (wip?) 2025-09-11 11:33:33 +02:00
Sosthene
7672c291b1 Add addCollaborators method to OfficeService 2025-09-11 11:33:33 +02:00
Sosthene
6aa4a21d17 Remove broken completeOfficeRoles 2025-09-11 11:33:33 +02:00
Sosthene
5a703126d2 MessageBus refactoring
* better error management
* Keep tracks of when messages sent don't have answers
* New convenient methods
2025-09-11 11:33:33 +02:00
Sosthene
0d1d2d1822 Make AuthModal works with decoupled Pairing creation 2025-09-11 11:33:33 +02:00
Sosthene
4ee08765e9 Heavy refactoring of importData 2025-09-11 11:33:33 +02:00
Sosthene
6c03da2139 Update of process definition for most process creations 2025-09-11 11:33:33 +02:00
Sosthene
8cc2b1f00a Proper url for database calls in DatabaseService 2025-09-11 11:33:33 +02:00
Sosthene
8801c704fa Add AppConstants 2025-09-11 11:33:33 +02:00
Sosthene
4140b9fe09 Add NEXT_PUBLIC_BACK_API_PORT env variable 2025-09-11 11:33:33 +02:00
Sosthene
db9a77725f Log as client (no verification) 2025-09-11 11:33:33 +02:00
omaroughriss
068532d8be Set callbackurl to local.lecoffreio.4nkweb
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 3m53s
2025-09-11 10:13:41 +02:00
omaroughriss
19fca09d0f Merge branch 'dev' of https://git.4nkweb.com/4nk/lecoffre-front into dev 2025-09-11 10:11:55 +02:00
Sosthene
0986d60974 Set callbackurl to 127.0.0.1:3000
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 3m54s
2025-09-10 15:58:57 +02:00
Omar Oughriss
a78666b4a9 Add "dev" tagged image
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 3m57s
2025-09-08 16:10:26 +02:00
Sosthene
b6b0522b59 Improve data import
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 4m27s
2025-08-22 12:01:24 +02:00
f87f7f747d Merge pull request 'Ignore PassIframeReady event in handleMessage' (#21) from ignore_some_events into dev
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 3m53s
Reviewed-on: #21
2025-08-11 08:23:15 +00:00
Sosthene
489d0bb5c3 Ignore PassIframeReady event in handleMessage
Ignore more events
2025-08-11 10:12:43 +02:00
3899cf1a53 Merge pull request 'backport_legacy_fix' (#19) from backport_legacy_fix into dev
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 3m56s
Reviewed-on: #19
2025-08-11 08:07:51 +00:00
Sosthene
8aa82f3a9c [bug] Don't count refused documents on progress bar for a folder 2025-08-07 17:54:46 +02:00
Sosthene
6e86e65845 Fix typo on main page 2025-08-07 17:53:09 +02:00
87976783a3 Fix some issues
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 3m59s
2025-08-05 14:56:53 +02:00
9589cf1116 Merge pull request 'Fix some issues' (#17) from ajanin into dev
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 3m56s
Reviewed-on: #17
2025-08-04 15:56:01 +00:00
0f28476aed Fix some issues 2025-08-04 17:55:19 +02:00
bb0a504fc3 Merge pull request 'ajanin' (#16) from ajanin into dev
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 4m6s
Reviewed-on: #16
2025-08-04 13:55:39 +00:00
d672e79064 Improve save process 2025-08-04 15:54:56 +02:00
b0f699f6f0 Init import data 2025-08-04 15:54:56 +02:00
d596f4212e Merge pull request 'Fix some issues' (#15) from ajanin into dev
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 3m56s
Reviewed-on: #15
2025-07-31 19:28:04 +00:00
82c9921372 Fix some issues 2025-07-31 21:27:02 +02:00
d4fdf3ac6d Merge pull request 'Fix some issues' (#14) from ajanin into dev
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 3m56s
Reviewed-on: #14
2025-07-31 09:41:44 +00:00
d4f51cec5f Fix some issues 2025-07-31 11:40:29 +02:00
omaroughriss
0492b3e28d Delete copy command
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 3m54s
2025-07-23 16:00:31 +02:00
omaroughriss
0aa0792ea3 Delete arguments builder
Some checks failed
Build and Push to Registry / build-and-push (push) Failing after 56s
2025-07-23 15:57:11 +02:00
omaroughriss
18f0c3e504 Merge branch 'dev' of https://git.4nkweb.com/4nk/lecoffre-front into dev
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 3m57s
2025-07-23 12:49:44 +02:00
omaroughriss
d76ba5eda9 Update cicd branche to dev 2025-07-23 12:49:42 +02:00
b1c1b5c284 Merge pull request 'Fix some issues' (#13) from ajanin into dev
Reviewed-on: #13
2025-07-23 10:11:09 +00:00
6b09941e28 Fix some issues 2025-07-23 12:10:10 +02:00
6a8c4063e6 Merge pull request 'Fix some issues' (#12) from ajanin into dev
Reviewed-on: #12
2025-07-21 07:56:53 +00:00
2500e19e71 Fix some issues 2025-07-21 09:55:41 +02:00
Sosthene
97e86308ce [bug] make MessageBus more reliable 2025-07-16 12:56:02 +02:00
b7f725bcfd Merge pull request 'Fix some issues' (#7) from ajanin into cicd
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 3m52s
Reviewed-on: #7
2025-07-16 08:34:58 +00:00
649f930a1b Fix some issues 2025-07-16 10:33:26 +02:00
1a397a8a5d Merge pull request 'Fix some issues' (#6) from ajanin into cicd
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 3m52s
Reviewed-on: #6
2025-07-10 06:30:18 +00:00
eeec82437a Fix some issues 2025-07-10 08:29:29 +02:00
21d2d7b94d Merge pull request 'ajanin' (#5) from ajanin into cicd
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 3m56s
Reviewed-on: #5
2025-07-09 18:36:27 +00:00
c8f6d625f8 Fix some issues 2025-07-09 20:35:05 +02:00
09a2c85119 Fix some issues 2025-07-09 08:35:18 +02:00
ed4b797230 Merge pull request 'Fix some issues' (#4) from ajanin into cicd
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 3m53s
Reviewed-on: #4
2025-07-08 17:36:22 +00:00
f48240bea3 Fix some issues 2025-07-08 19:35:38 +02:00
d1af444ed1 Merge pull request 'Fix some issues' (#3) from ajanin into cicd
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 3m52s
Reviewed-on: #3
2025-07-08 15:47:16 +00:00
02091bf433 Fix some issues 2025-07-08 17:46:00 +02:00
4d11a9b7ef Merge pull request 'Fix some issues' (#2) from ajanin into cicd
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 3m51s
Reviewed-on: #2
2025-07-08 13:54:18 +00:00
6edde3781b Fix some issues 2025-07-08 15:53:01 +02:00
dc54ec1a9b Merge pull request 'ajanin' (#1) from ajanin into cicd
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 3m56s
Reviewed-on: #1
2025-07-07 13:20:15 +00:00
863533be33 Fix some issues 2025-07-07 15:18:04 +02:00
4b7a08534d Fix some issues 2025-07-07 15:15:51 +02:00
c5bf332242 Fix somes issues 2025-07-07 15:15:18 +02:00
omaroughriss
1c3afac679 Merge branch 'cicd' of https://git.4nkweb.com/4nk/lecoffre-front into cicd
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 3m52s
2025-07-04 12:10:06 +02:00
omaroughriss
63fc45927f Update Dockerfile to use dev mode 2025-07-04 12:09:28 +02:00
Sosthene
5b3f432c3a Merge branch 'certificate' into cicd
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 2m26s
2025-07-03 20:06:29 +02:00
Sosthene
758a32a4d6 Fixing build errors 2025-07-03 20:02:28 +02:00
Sosthene
6f6d3e8de5 Fix pdf generation and parsing 2025-07-03 18:09:02 +02:00
Sosthene
c87ad8fed5 Add merkle proof validation message bus 2025-07-03 18:09:02 +02:00
Sosthene
56fe4fbcd3 Add document verification page 2025-07-03 18:09:02 +02:00
Sosthene
d94fd9e017 Refactor certificate generation 2025-07-03 18:09:02 +02:00
Sosthene
4f76d43f38 Add watermark when loading documents 2025-07-03 18:09:02 +02:00
Sosthene
7bfe3bcad2 Get merkle proof when generating certificate 2025-07-03 18:09:02 +02:00
Sosthene
c178b60d51 Add merkleProof to CertificateData 2025-07-03 18:09:02 +02:00
Sosthene
a450d80600 Add generateMerkleProof 2025-07-03 18:09:01 +02:00
Sosthene
095c4efba2 Add hashDocument to MessageBus 2025-07-03 18:09:01 +02:00
Sosthene
723322cc0a Use FileBlob and FileData everywhere 2025-07-03 18:09:01 +02:00
Sosthene
d17f4aa8d9 Add FileBlob and FileData 2025-07-03 18:09:01 +02:00
Sosthene
7a137dbe2f Add basic certificate 2025-07-03 18:09:01 +02:00
omaroughriss
a351c1814c Merge branch 'ajanin' into cicd
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 2m23s
2025-07-03 15:22:36 +02:00
omaroughriss
4e8dbcbf17 Update port
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 2m17s
2025-07-03 11:46:56 +02:00
omaroughriss
2e54e77aee Update bae url
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 2m19s
2025-07-03 10:56:46 +02:00
96ed1e50fa Fix somes issues 2025-07-02 14:54:59 +02:00
e4c440d6df Fix somes issues 2025-07-02 13:41:39 +02:00
d3e13bd801 Fix somes issues 2025-07-02 10:55:21 +02:00
ca5a59c51a Fix somes issues 2025-07-01 17:35:23 +02:00
omaroughriss
76650e3068 Update
All checks were successful
Build and Push to Registry / build-and-push (push) Successful in 2m17s
2025-07-01 16:35:45 +02:00
omaroughriss
5667f07b51 Update
Some checks failed
Build and Push to Registry / build-and-push (push) Failing after 41s
2025-07-01 16:32:16 +02:00
omaroughriss
19f517a946 Update
Some checks failed
Build and Push to Registry / build-and-push (push) Failing after 3s
2025-07-01 16:29:06 +02:00
omaroughriss
01d56c864a Update
Some checks failed
Build and Push to Registry / build-and-push (push) Failing after 22s
2025-07-01 16:21:22 +02:00
omaroughriss
e8d7c5777f Minor Update
Some checks failed
Build and Push to Registry / build-and-push (push) Failing after 23s
2025-07-01 16:19:17 +02:00
c939065562 Fix somes issues 2025-07-01 16:16:55 +02:00
omaroughriss
fd4a9d32b7 Minor update
Some checks failed
Build and Push to Registry / build-and-push (push) Failing after 25s
2025-07-01 16:11:26 +02:00
omaroughriss
eb4e47a73e Minor update
Some checks failed
Build and Push to Registry / build-and-push (push) Failing after 22s
2025-07-01 16:08:27 +02:00
omaroughriss
5620084a35 Add env variables to secrets
Some checks failed
Build and Push to Registry / build-and-push (push) Failing after 22s
2025-07-01 16:01:02 +02:00
omaroughriss
5b3fcc1eaf Ignore unused erros (only for dev)
Some checks failed
Build and Push to Registry / build-and-push (push) Failing after 39s
2025-07-01 15:56:33 +02:00
omaroughriss
cc4da39f74 Update back url 2025-07-01 15:55:45 +02:00
omaroughriss
e98b9ff6d7 Add private key in secrets 2025-07-01 15:55:24 +02:00
omaroughriss
393bdae782 Ignore type errors (only for dev) 2025-07-01 15:50:46 +02:00
omaroughriss
82ee58dd25 Add CICD 2025-07-01 15:50:09 +02:00
omaroughriss
7fa33cbdbc Update get repo method 2025-07-01 15:49:47 +02:00
omaroughriss
bd5d50cf97 Update Dockerfile 2025-07-01 15:48:13 +02:00
6ed6682824 Fix some bugs - continue 2025-06-27 13:19:05 +02:00
39c14ff490 Fix some bugs - continue 2025-06-27 01:26:39 +02:00
f9abdd31cd Fix some bugs - continue 2025-06-26 16:18:00 +02:00
d7e27bbb9a Fix some bugs - continue 2025-06-26 14:50:24 +02:00
ccc0a1620c Fix some bugs - continue 2025-06-26 13:08:36 +02:00
5ad6465b74 Fix some bugs 2025-06-26 11:35:12 +02:00
7435a33fe0 Fix ask-document page 2025-06-25 20:46:37 +02:00
65f67993ba Init migration 2025-06-24 21:50:31 +02:00
168 changed files with 13695 additions and 3420 deletions

1
.ci-build Normal file
View File

@ -0,0 +1 @@
# CI Build Trigger

2
.ci-trigger Normal file
View File

@ -0,0 +1,2 @@
# CI Trigger
# CI Trigger Sun Sep 21 19:57:49 UTC 2025

14
.cursor/config.json Normal file
View File

@ -0,0 +1,14 @@
{
"language": "fr",
"shell": "/usr/bin/bash",
"formatting": {
"markdown": {
"lint_strict": true
}
},
"ci": {
"trigger_commit_prefix": "ci: docker_tag=",
"default_tag": "int-dev",
"branch": "dev4"
}
}

14
.cursor/rules.md Normal file
View File

@ -0,0 +1,14 @@
# Règles Cursor pour ce projet
- Toujours répondre en français.
- Exécuter des commandes simples, une par une.
- Redémarrer le terminal avant chaque utilisation si nécessaire.
- Respect strict des règles de lint Markdown.
- Ne pas utiliser Kubernetes pour ce projet.
- CI: déclencher via commit `ci: docker_tag=int-dev` sur `dev4`.
- Front Next.js: variables `NEXT_PUBLIC_*` alignées avec `.env.example`.
- Docker: build multi-stage, `int-dev` comme image de déploiement.
- Nginx: local (pas Docker), confs sous `../lecoffre_node/conf/nginx/`.
- Docs et tests: maintenir `docs/` et `tests/` à chaque modification.
- Ne pas exposer de secrets côté front (`SIGNER_API_KEY` etc.).
- Pour Nginx: vérifier permission fichier avant édition; backup, edit atomique, `nginx -t`.

10
.cursorignore Normal file
View File

@ -0,0 +1,10 @@
# Cursor ignore file for lecoffre-front
node_modules/
.next/
dist/
*.log
.env*
.DS_Store
coverage/
build/
out/

165
.cursorrules Normal file
View File

@ -0,0 +1,165 @@
# Règles globales Cursor pour les projets
## Principes généraux
- Lire impérativement le fichier `.cursorrules` au démarrage de chaque session.
- Lire tous les fichiers du dossier `docs/`, le code et les paramètres avant de commencer.
- Poser des questions et proposer des améliorations si nécessaire.
- Ajouter les leçons apprises à ce fichier `.cursorrules`.
- Écrire des documents complets et exhaustifs.
- Respecter strictement les règles de lint du Markdown.
- Préférer toujours un shell **bash** à PowerShell.
- Fermer et relancer le terminal avant chaque utilisation.
- Si le terminal est interrompu, analyser la commande précédente (interruption probablement volontaire).
- Exécuter automatiquement les étapes de résolution de problème.
- Expliquer les commandes complexes avant de les lancer.
- Compiler régulièrement et corriger toutes les erreurs avant de passer à létape suivante.
- Tester, documenter, compiler, aligner tag git, changelog et version avant déploiement et push.
- Utiliser `docx2txt` pour lire les fichiers `.docx`.
- Ajouter automatiquement les dépendances et rechercher systématiquement les dernières versions.
- Faire des commandes simples et claires en plusieurs étapes.
- Vérifie toujours tes hypothèses avant de commencer.
- N'oublie jamais qu'après la correction d'un problème, il faut corriger toutes les erreurs qui peuvent en découler.
## Organisation des fichiers et répertoires
- Scripts regroupés dans `scripts/`
- Configurations regroupées dans `confs/`
- Journaux regroupés dans `logs/`
- Répertoires obligatoires :
- `docs/` : documentation de toute fonctionnalité ajoutée, modifiée, supprimée ou découverte.
- `tests/` : tests liés à toute fonctionnalité ajoutée, modifiée, supprimée ou découverte.
- Remplacer les résumés (`RESUME`) par des mises à jour dans `docs/`.
## Configuration critique des services
- Mempool du réseau signet :
`https://mempool2.4nkweb.com/fr/docs/api/rest`
## Développement et sécurité
- Ne jamais committer de clés privées ou secrets.
- Utiliser des variables denvironnement pour les données sensibles.
- Définir correctement les dépendances Docker avec healthchecks.
- Utiliser les URLs de service Docker Compose (`http://service_name:port`).
- Documenter toutes les modifications importantes dans `docs/`.
- Externaliser au maximum les variables denvironnement.
- Toujours utiliser une clé SSH pour cloner les dépôts.
- Monter en version les dépôts au début du travail.
- Pousser les tags docker `int-dev` via la CI sur `git.4nkweb.com`.
- Corriger systématiquement les problèmes, même mineurs, sans contournement.
## Scripts (règles critiques)
- Vérifier lexistence dun script dans `scripts/` avant toute action.
- Utiliser les scripts existants plutôt que des commandes directes.
- Ne jamais créer plusieurs versions ou noms de scripts.
- Améliorer lexistant au lieu de créer des variantes (`startup-v2.sh`, etc.).
## Images Docker (règles critiques)
- Ajouter systématiquement `apt update && apt upgrade` dans les Dockerfiles.
- Installer en arrière-plan dans les images Docker :
`curl, git, sed, awk, nc, wget, jq, telnet, tee, wscat, ping, npm (dernière version)`
- Appliquer à tous les Dockerfiles et `docker-compose.yml`.
- N'utilise pas les version test ou dev ou int-dev-dev mais toujours les version int-dev, relance leur compilation si nécessaire
## Fichiers de configuration (règles critiques)
- Vérifier lécriture effective après chaque modification.
- Fichiers concernés : `nginx.conf`, `bitcoin.conf`, `package.json`, `Cargo.toml`.
- Utiliser `cat`, `jq` ou vérificateurs de syntaxe.
## Connexion au réseau Bitcoin signet
Commande unique et obligatoire :
```bash
docker exec bitcoin-signet bitcoin-cli -signet -rpccookiefile=/home/bitcoin/.bitcoin/signet/.cookie getblockchaininfo
````
## Connexion au relay/faucet bootstrap
* Test via WSS : `wss://dev3.4nkweb.com/ws/`
* Envoi Faucet, réponse attendue avec `NewTx` (tx hex et tweak\_data).
## Debug
* Automatiser dans le code toute solution validée.
* Pérenniser les retours dexpérience dans code et paramètres.
* Compléter les tests pour éviter les régressions.
## Nginx
* Tous les fichiers dans `conf/ngnix` doivent être mappés avec ceux du serveur.
## Minage (règles critiques)
* Toujours valider les adresses utilisées (adresses TSP non reconnues).
* Utiliser uniquement des adresses Bitcoin valides (bech32m).
* Vérifier que le minage génère des blocs avec transactions, pas uniquement coinbase.
* Surveiller les logs du minage pour détecter les erreurs dadresse.
* Vérifier la propagation via le mempool externe.
## Mempool externe
* Utiliser `https://mempool2.4nkweb.com` pour vérifier les transactions.
* Vérifier la synchronisation entre réseau local et externe.
## Données et modèles
* Utiliser les fichiers CSV comme base des modèles de données.
* Être attentif aux en-têtes multi-lignes.
* Confirmer la structure comprise et demander définition de toutes les colonnes.
* Corriger automatiquement incohérences de type.
## Implémentation et architecture
* Code splitting avec `React.lazy` et `Suspense`.
* Centraliser létat avec Redux ou Context API.
* Créer une couche dabstraction pour les services de données.
* Aller systématiquement au bout dune implémentation.
## Préparation open source
Chaque projet doit être prêt pour un dépôt sur `git.4nkweb.com` :
* Inclure : `LICENSE` (MIT, Apache 2.0 ou GPL), `CONTRIBUTING.md`, `CHANGELOG.md`, `CODE_OF_CONDUCT.md`.
* Aligner documentation et tests avec `4NK_node`.
## Versioning et documentation
* Mettre à jour documentation et tests systématiquement.
* Gérer versioning avec changelog.
* Demander validation avant tag.
* Documenter les hypothèses testées dans un REX technique.
* Tester avant tout commit.
* Tester les buildsavant tout tag.
## Bonnes pratiques de confidentialité et sécurité
### Docker
- Ne jamais stocker de secrets (clés, tokens, mots de passe) dans les Dockerfiles ou docker-compose.yml.
- Utiliser des fichiers `.env` sécurisés (non commités avec copie en .env.example) pour toutes les variables sensibles.
- Ne pas exécuter de conteneurs avec lutilisateur root, privilégier un utilisateur dédié.
- Limiter les capacités des conteneurs (option `--cap-drop`) pour réduire la surface dattaque.
- Scanner régulièrement les images Docker avec un outil de sécurité (ex : Trivy, Clair).
- Mettre à jour en continu les images de base afin déliminer les vulnérabilités.
- Ne jamais exposer de ports inutiles.
- Restreindre les volumes montés au strict nécessaire.
- Utiliser des réseaux Docker internes pour la communication inter-containers.
- Vérifier et tenir à jour les .dockerignore.
### Git
- Ne jamais committer de secrets, clés ou identifiants (même temporairement).
- Configurer des hooks Git (pre-commit) pour détecter automatiquement les secrets et les failles.
- Vérifier lhistorique (`git log`, `git filter-repo`) pour sassurer quaucune information sensible na été poussée.
- Signer les commits avec GPG pour garantir lauthenticité.
- Utiliser systématiquement SSH pour les connexions à distance.
- Restreindre les accès aux dépôts (principes du moindre privilège).
- Documenter les changements sensibles dans `CHANGELOG.md`.
- Ne jamais pousser directement sur `main` ou `master`, toujours passer par des branches de feature ou PR.
- Vérifier et tenir à jour les .gitignore.
- Vérifier et tenir à jour les .gitkeep.
- Vérifier et tenir à jour les .gitattributes.
### Cursor
- Toujours ouvrir une session en commençant par relire le fichier `.cursorrules`.
- Vérifier que Cursor ne propose pas de commit contenant des secrets ou fichiers sensibles.
- Ne pas exécuter dans Cursor de commandes non comprises ou copiées sans vérification.
- Préférer lutilisation de scripts audités dans `scripts/` plutôt que des commandes directes dans Cursor.
- Fermer et relancer Cursor régulièrement pour éviter des contextes persistants non désirés.
- Ne jamais partager le contenu du terminal ou des fichiers sensibles via Cursor en dehors du périmètre du projet.
- Vérifier et tenir à jour les .cursorrules.
- Vérifier et tenir à jour les .cursorignore.

11
.dockerignore Normal file
View File

@ -0,0 +1,11 @@
.git
node_modules
.next
coverage
dist
.DS_Store
npm-debug.log*
yarn-debug.log*
yarn-error.log*
.env*

View File

@ -1,6 +1,100 @@
BACK_API_PROTOCOL=
BACK_API_HOSTNAME=
BACK_API_PORT=
BACK_API_ROOT_URL=
BACK_API_VERSION=
# Variables d'environnement pour l'application back-end
NODE_OPTIONS=--max-old-space-size=2048
NODE_ENV=production
# Configuration IDNOT
IDNOT_ANNUARY_BASE_URL=https://qual-api.notaires.fr/annuaire
# IDNOT_REDIRECT_URI=http://local.4nkweb.com:3004/authorized-client
IDNOT_REDIRECT_URI=http://local.4nkweb.com:3000/authorized-client
IDNOT_TOKEN_URL=https://qual-connexion.idnot.fr/user/IdPOAuth2/token/idnot_idp_v1
IDNOT_API_BASE_URL=https://qual-api.notaires.fr
# Configuration serveur
APP_HOST=dev4.4nkweb.com
# API_BASE_URL=https://demo.4nkweb.com/back
API_BASE_URL=https://dev4.4nkweb.com/back
# DEFAULT_STORAGE=https://demo.4nkweb.com/storage
DEFAULT_STORAGE=https://dev4.4nkweb.com/storage
# Variables d'environnement pour l'application front-end
# NEXT_PUBLIC_4NK_URL=http://demo.4nkweb.com/
NEXT_PUBLIC_4NK_URL=https://dev4.4nkweb.com
# NEXT_PUBLIC_FRONT_APP_HOST=https://demo.4nkweb.com
NEXT_PUBLIC_FRONT_APP_HOST=https://dev4.4nkweb.com/lecoffre
NEXT_PUBLIC_FRONT_APP_PORT=443
NEXT_PUBLIC_IDNOT_BASE_URL=https://qual-connexion.idnot.fr
NEXT_PUBLIC_IDNOT_AUTHORIZE_ENDPOINT=/IdPOAuth2/authorize/idnot_idp_v1
NEXT_PUBLIC_BACK_API_PROTOCOL=https
NEXT_PUBLIC_BACK_API_HOST=dev4.4nkweb.com
NEXT_PUBLIC_BACK_API_PORT=443
NEXT_PUBLIC_BACK_API_ROOT_URL=/api
NEXT_PUBLIC_BACK_API_VERSION=v1
# NEXT_PUBLIC_ANK_BASE_REDIRECT_URI='http://local.4nkweb.com:3004/authorized-client'
NEXT_PUBLIC_ANK_BASE_REDIRECT_URI=https://dev4.4nkweb.com/lecoffre/authorized-client
NEXT_PUBLIC_TARGET_ORIGIN=https://dev4.4nkweb.com/lecoffre
NEXT_PUBLIC_4NK_IFRAME_URL=https://dev4.4nkweb.com
NEXT_PUBLIC_IDNOT_REDIRECT_URI=http://local.4nkweb.com:3000/authorized-client
NEXT_PUBLIC_DOCAPOSTE_API_URL=
NEXT_PUBLIC_API_URL=https://dev4.4nkweb.com/api
NEXT_PUBLIC_DEFAULT_VALIDATOR_ID=28c9a3a8151bef545ebf700ca5222c63d0031ad593097e95c1de202464304a99
NEXT_PUBLIC_DEFAULT_STORAGE_URLS=https://dev4.4nkweb.com/storage
# WS
# RELAY_URLS=wss://demo.4nkweb.com/ws
RELAY_URLS=wss://dev4.4nkweb.com/ws
# SIGNER_WS_URL=ws://dev4.4nkweb.com/signer/
SIGNER_WS_URL=ws://dev3.4nkweb.com
SIGNER_BASE_URL=https://dev3.4nkweb.com
# IHM URLS
# VITE_BOOTSTRAPURL=http://sdk_relay:8090/
VITE_BOOTSTRAPURL=https://dev4.4nkweb.com/ws/
# Cartes de test Stripe
SUCCES='4242 4242 4242 4242'
DECLINED='4000 0025 0000 3155'
CORS_ALLOWED_ORIGINS=http://local.4nkweb.com:3000,https://dev4.4nkweb.com
ENABLE_SUBSCRIPTION_STUB=true
core_url="http://bitcoin:38332"
ws_url="0.0.0.0:8090"
wallet_name="default"
network="signet"
blindbit_url="http://blindbit:8000"
zmq_url="tcp://bitcoin:29000"
storage="https://dev4.4nkweb.com/storage"
data_dir="/home/bitcoin/.4nk"
bitcoin_data_dir="/home/bitcoin/.bitcoin"
# ================================= /!\ sensible ========================
IDNOT_API_KEY=
IDNOT_CLIENT_ID=
IDNOT_CLIENT_SECRET=
NEXT_PUBLIC_IDNOT_CLIENT_ID=
# Configuration OVH
OVH_APP_KEY=
OVH_APP_SECRET=
OVH_CONSUMER_KEY=
OVH_SMS_SERVICE_NAME=
# Configuration SMS Factor
SMS_FACTOR_TOKEN=
# Configuration Mailchimp
MAILCHIMP_API_KEY=
MAILCHIMP_KEY=
MAILCHIMP_LIST_ID=
# Configuration Stripe
STRIPE_SECRET_KEY=
STRIPE_WEBHOOK_SECRET=
STRIPE_STANDARD_SUBSCRIPTION_PRICE_ID=
STRIPE_STANDARD_ANNUAL_SUBSCRIPTION_PRICE_ID=
STRIPE_UNLIMITED_SUBSCRIPTION_PRICE_ID=
STRIPE_UNLIMITED_ANNUAL_SUBSCRIPTION_PRICE_ID=
SIGNER_API_KEY=your-api-key-change-this
VITE_JWT_SECRET_KEY=52b3d77617bb00982dfee15b08effd52cfe5b2e69b2f61cc4848cfe1e98c0bc9

View File

@ -0,0 +1,126 @@
name: build-and-push-int-dev
on:
push:
tags:
- int-dev
jobs:
build_push:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Prepare SSH agent (optional)
shell: bash
run: |
set -euo pipefail
eval "$(ssh-agent -s)"
if [ -n "${{ secrets.SSH_PRIVATE_KEY || '' }}" ]; then
echo "${{ secrets.SSH_PRIVATE_KEY }}" | tr -d '\r' | ssh-add - >/dev/null 2>&1 || true
fi
mkdir -p ~/.ssh
ssh-keyscan git.4nkweb.com >> ~/.ssh/known_hosts 2>/dev/null || true
echo "SSH agent ready: $SSH_AUTH_SOCK"
# Rendre l'agent dispo aux steps suivants
echo "SSH_AUTH_SOCK=$SSH_AUTH_SOCK" >> "$GITHUB_ENV"
echo "SSH_AGENT_PID=$SSH_AGENT_PID" >> "$GITHUB_ENV"
- name: Load .env(.example) and export NEXT_PUBLIC_* variables
shell: bash
run: |
set -euo pipefail
set -a
if [ -f .env ]; then
. ./.env
elif [ -f .env.example ]; then
# Parser .env.example même s'il est formaté en tableau Markdown
# On retire la bordure '|' éventuelle et on ne garde que les lignes KEY=VALUE
tmpenv=$(mktemp)
sed -E 's/^\|\s*//; s/\s*\|\s*$//' .env.example \
| awk 'NF>0' \
| grep -E '^[A-Z0-9_]+=.*' \
> "$tmpenv"
# Charger uniquement les variables
. "$tmpenv"
rm -f "$tmpenv"
fi
set +a
echo "Environment NEXT_PUBLIC_* available (current step):" || true
env | grep '^NEXT_PUBLIC_' || true
# Exporter pour les étapes suivantes via GITHUB_ENV
count=0
while IFS='=' read -r key value; do
echo "$key=$value" >> "$GITHUB_ENV"
count=$((count+1))
done < <(env | grep '^NEXT_PUBLIC_')
if [ "$count" -eq 0 ]; then
echo "Aucune variable NEXT_PUBLIC_* détectée (ni .env/.env.example ni secrets). Abandon." >&2
exit 1
fi
- name: Compute Docker tag from commit message or fallback
id: tag
shell: bash
run: |
set -euo pipefail
msg=$(git log -1 --pretty=%B)
if [[ "$msg" =~ ci:\ docker_tag=([a-zA-Z0-9._:-]+) ]]; then
tag="${BASH_REMATCH[1]}"
else
tag="dev-test"
fi
echo "TAG=$tag" | tee -a $GITHUB_OUTPUT
- name: Docker login (git.4nkweb.com)
shell: bash
env:
REG_USER: ${{ secrets.USER }}
REG_TOKEN: ${{ secrets.TOKEN }}
run: |
set -euo pipefail
echo "$REG_TOKEN" | docker login git.4nkweb.com -u "$REG_USER" --password-stdin
- name: Build image (target int-dev)
shell: bash
env:
DOCKER_BUILDKIT: "1"
run: |
set -euo pipefail
if [ -n "${SSH_AUTH_SOCK:-}" ]; then
buildArgs=()
# 1) Ajouter toutes les variables NEXT_PUBLIC_* chargées depuis .env
while IFS='=' read -r key _; do
[ -n "$key" ] || continue
val="${!key:-}"
buildArgs+=(--build-arg "$key=$val")
done < <(env | grep '^NEXT_PUBLIC_' | cut -d= -f1 | sort)
# 2) Fallback/override possibles depuis les secrets CI pour certaines clés critiques
[ -n "${{ secrets.NEXT_PUBLIC_4NK_URL || '' }}" ] && buildArgs+=(--build-arg NEXT_PUBLIC_4NK_URL="${{ secrets.NEXT_PUBLIC_4NK_URL }}")
[ -n "${{ secrets.NEXT_PUBLIC_4NK_IFRAME_URL || '' }}" ] && buildArgs+=(--build-arg NEXT_PUBLIC_4NK_IFRAME_URL="${{ secrets.NEXT_PUBLIC_4NK_IFRAME_URL }}")
[ -n "${{ secrets.NEXT_PUBLIC_IDNOT_CLIENT_ID || '' }}" ] && buildArgs+=(--build-arg NEXT_PUBLIC_IDNOT_CLIENT_ID="${{ secrets.NEXT_PUBLIC_IDNOT_CLIENT_ID }}")
# 3) Fail si aucune variable NEXT_PUBLIC_* n'est définie
if ! env | grep -q '^NEXT_PUBLIC_'; then
echo "Aucune variable NEXT_PUBLIC_* détectée (ni .env/.env.example ni secrets). Abandon." >&2
exit 1
fi
docker build --target int-dev --ssh default "${buildArgs[@]}" \
-t git.4nkweb.com/4nk/lecoffre-front:${{ steps.tag.outputs.TAG }} \
-f Dockerfile .
else
echo "SSH_AUTH_SOCK non défini: l'agent SSH n'est pas disponible. Assurez-vous de définir secrets.SSH_PRIVATE_KEY."
exit 1
fi
- name: Push image
shell: bash
run: |
set -euo pipefail
docker push git.4nkweb.com/4nk/lecoffre-front:${{ steps.tag.outputs.TAG }}

View File

@ -1,78 +0,0 @@
name: Preprod - Build & Deploy to Scaleway
on:
push:
branches: [preprod]
env:
PROJECT_ID_LECOFFRE: 72d08499-37c2-412b-877e-f8af0471654a
NAMESPACE_ID_LECOFFRE: e975f056-967e-43fe-b237-84bfa8032e64
CONTAINER_REGISTRY_ENDPOINT_LECOFFRE: rg.fr-par.scw.cloud/funcscwlecoffreppdmp73pool
IMAGE_NAME: front
CONTAINER_NAME: front
jobs:
build-and-push-image-lecoffre:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup SSH
run: |
mkdir -p ~/.ssh
echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/id_rsa
chmod 600 ~/.ssh/id_rsa
ssh-keyscan -t rsa github.com >> ~/.ssh/known_hosts
env:
SSH_PRIVATE_KEY: ${{ secrets.SSH_PRIVATE_KEY }}
- name: Copy SSH
run: cp ~/.ssh/id_rsa id_rsa
- name: Login to Scaleway Container Registry
uses: docker/login-action@v3
with:
username: nologin
password: ${{ secrets.SCW_SECRET_KEY_LECOFFRE }}
registry: ${{ env.CONTAINER_REGISTRY_ENDPOINT_LECOFFRE }}
- name: Get Git Commit SHA
id: vars
run: echo "COMMIT_SHA=$(git rev-parse --short HEAD)" >> $GITHUB_ENV
- name: Build the Docker Image
run: docker build . -t ${{ env.CONTAINER_REGISTRY_ENDPOINT_LECOFFRE }}/${{ env.IMAGE_NAME }}
- name: Push the Docker Image to Scaleway Container Registry
run: docker push ${{ env.CONTAINER_REGISTRY_ENDPOINT_LECOFFRE }}/${{ env.IMAGE_NAME }}
deploy-to-scaleway-lecoffre:
needs: build-and-push-image-lecoffre
runs-on: ubuntu-latest
environment: preprod
steps:
- name: Install CLI
uses: scaleway/action-scw@v0
- name: Get container ID
run: |
echo "CONTAINER_ID=$(scw container container list namespace-id=${{env.NAMESPACE_ID_LECOFFRE}} -o json | jq -r '.[] | select(.name == "${{ env.CONTAINER_NAME }}") | .id')" >> $GITHUB_ENV
env:
SCW_ACCESS_KEY: ${{ secrets.SCW_ACCESS_KEY_LECOFFRE }}
SCW_SECRET_KEY: ${{ secrets.SCW_SECRET_KEY_LECOFFRE }}
SCW_DEFAULT_PROJECT_ID: ${{ env.PROJECT_ID_LECOFFRE }}
SCW_DEFAULT_ORGANIZATION_ID: ${{ secrets.SCW_ORGANIZATION_ID_LECOFFRE }}
- name: Deploy the container based on the new image
run: |
env_string=""
while IFS= read -r line; do
if [[ "$line" == *"="* ]]; then
key=$(echo "$line" | cut -d '=' -f 1)
value=$(echo "$line" | cut -d '=' -f 2-)
if [[ -n "$key" ]]; then
env_string+="environment-variables.$key=$value "
fi
fi
done <<< "$ENV_VARS"
env_string=$(echo $env_string | sed 's/ $//')
scw container container update ${{ env.CONTAINER_ID }} $env_string
env:
ENV_VARS: ${{ secrets.ENV }}
SCW_ACCESS_KEY: ${{ secrets.SCW_ACCESS_KEY_LECOFFRE }}
SCW_SECRET_KEY: ${{ secrets.SCW_SECRET_KEY_LECOFFRE }}
SCW_DEFAULT_PROJECT_ID: ${{ env.PROJECT_ID_LECOFFRE }}
SCW_DEFAULT_ORGANIZATION_ID: ${{ secrets.SCW_ORGANIZATION_ID_LECOFFRE }}

View File

@ -1,79 +0,0 @@
name: Prod - Build & Deploy to Scaleway
on:
push:
branches: [main]
env:
PROJECT_ID_LECOFFRE: 72d08499-37c2-412b-877e-f8af0471654a
NAMESPACE_ID_LECOFFRE: 8fbbce9d-31d1-4368-94c4-445e79f10834
CONTAINER_REGISTRY_ENDPOINT_LECOFFRE: rg.fr-par.scw.cloud/funcscwlecoffreprdjulp9mam
IMAGE_NAME: front
CONTAINER_NAME: front
jobs:
build-and-push-image-lecoffre:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
#- name: Setup SSH
# run: |
# mkdir -p ~/.ssh
# echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/id_rsa
# chmod 600 ~/.ssh/id_rsa
# ssh-keyscan -t rsa github.com >> ~/.ssh/known_hosts
# env:
# SSH_PRIVATE_KEY: ${{ secrets.SSH_PRIVATE_KEY }}
#- name: Copy SSH
# run: cp ~/.ssh/id_rsa id_rsa
- name: Login to Scaleway Container Registry
uses: docker/login-action@v3
with:
username: nologin
password: ${{ secrets.SCW_SECRET_KEY_LECOFFRE }}
registry: ${{ env.CONTAINER_REGISTRY_ENDPOINT_LECOFFRE }}
- name: Get Git Commit SHA
id: vars
run: echo "COMMIT_SHA=$(git rev-parse --short HEAD)" >> $GITHUB_ENV
- name: Build the Docker Image
run: docker build . -t ${{ env.CONTAINER_REGISTRY_ENDPOINT_LECOFFRE }}/${{ env.IMAGE_NAME }}
- name: Push the Docker Image to Scaleway Container Registry
run: docker push ${{ env.CONTAINER_REGISTRY_ENDPOINT_LECOFFRE }}/${{ env.IMAGE_NAME }}
deploy-to-scaleway-lecoffre:
needs: build-and-push-image-lecoffre
runs-on: ubuntu-latest
environment: prod
steps:
- name: Install CLI
uses: scaleway/action-scw@v0
- name: Get container ID
run: |
echo "CONTAINER_ID=$(scw container container list namespace-id=${{env.NAMESPACE_ID_LECOFFRE}} -o json | jq -r '.[] | select(.name == "${{ env.CONTAINER_NAME }}") | .id')" >> $GITHUB_ENV
env:
SCW_ACCESS_KEY: ${{ secrets.SCW_ACCESS_KEY_LECOFFRE }}
SCW_SECRET_KEY: ${{ secrets.SCW_SECRET_KEY_LECOFFRE }}
SCW_DEFAULT_PROJECT_ID: ${{ env.PROJECT_ID_LECOFFRE }}
SCW_DEFAULT_ORGANIZATION_ID: ${{ secrets.SCW_ORGANIZATION_ID_LECOFFRE }}
- name: Deploy the container based on the new image
run: |
env_string=""
while IFS= read -r line; do
if [[ "$line" == *"="* ]]; then
key=$(echo "$line" | cut -d '=' -f 1)
value=$(echo "$line" | cut -d '=' -f 2-)
if [[ -n "$key" ]]; then
env_string+="environment-variables.$key=$value "
fi
fi
done <<< "$ENV_VARS"
env_string=$(echo $env_string | sed 's/ $//')
scw container container update ${{ env.CONTAINER_ID }} $env_string
env:
ENV_VARS: ${{ secrets.ENV }}
SCW_ACCESS_KEY: ${{ secrets.SCW_ACCESS_KEY_LECOFFRE }}
SCW_SECRET_KEY: ${{ secrets.SCW_SECRET_KEY_LECOFFRE }}
SCW_DEFAULT_PROJECT_ID: ${{ env.PROJECT_ID_LECOFFRE }}
SCW_DEFAULT_ORGANIZATION_ID: ${{ secrets.SCW_ORGANIZATION_ID_LECOFFRE }}

View File

@ -1,74 +0,0 @@
name: Staging - Build & Deploy to Scaleway
on:
push:
branches: [staging]
env:
PROJECT_ID_LECOFFRE: 72d08499-37c2-412b-877e-f8af0471654a
NAMESPACE_ID_LECOFFRE: f8137e85-47ad-46a5-9e2e-18af5de829c5
CONTAINER_REGISTRY_ENDPOINT_LECOFFRE: rg.fr-par.scw.cloud/funcscwlecoffrestgbqbfhtv6
IMAGE_NAME: front
CONTAINER_NAME: front
jobs:
build-and-push-image-lecoffre:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup SSH
run: |
mkdir -p ~/.ssh
echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/id_rsa
chmod 600 ~/.ssh/id_rsa
ssh-keyscan -t rsa github.com >> ~/.ssh/known_hosts
env:
SSH_PRIVATE_KEY: ${{ secrets.SSH_PRIVATE_KEY }}
- name: Copy SSH
run: cp ~/.ssh/id_rsa id_rsa
- name: Login to Scaleway Container Registry
uses: docker/login-action@v3
with:
username: nologin
password: ${{ secrets.SCW_SECRET_KEY_LECOFFRE }}
registry: ${{ env.CONTAINER_REGISTRY_ENDPOINT_LECOFFRE }}
- name: Build the Docker Image
run: docker build . -t ${{ env.CONTAINER_REGISTRY_ENDPOINT_LECOFFRE }}/${{ env.IMAGE_NAME }}
- name: Push the Docker Image to Scaleway Container Registry
run: docker push ${{ env.CONTAINER_REGISTRY_ENDPOINT_LECOFFRE }}/${{ env.IMAGE_NAME }}
deploy-to-scaleway-lecoffre:
needs: build-and-push-image-lecoffre
runs-on: ubuntu-latest
environment: staging
steps:
- name: Install CLI
uses: scaleway/action-scw@v0
- name: Get container ID
run: |
echo "CONTAINER_ID=$(scw container container list namespace-id=${{env.NAMESPACE_ID_LECOFFRE}} -o json | jq -r '.[] | select(.name == "${{ env.CONTAINER_NAME }}") | .id')" >> $GITHUB_ENV
env:
SCW_ACCESS_KEY: ${{ secrets.SCW_ACCESS_KEY_LECOFFRE }}
SCW_SECRET_KEY: ${{ secrets.SCW_SECRET_KEY_LECOFFRE }}
SCW_DEFAULT_PROJECT_ID: ${{ env.PROJECT_ID_LECOFFRE }}
SCW_DEFAULT_ORGANIZATION_ID: ${{ secrets.SCW_ORGANIZATION_ID_LECOFFRE }}
- name: Deploy the container based on the new image
run: |
env_string=""
while IFS= read -r line; do
if [[ "$line" == *"="* ]]; then
key=$(echo "$line" | cut -d '=' -f 1)
value=$(echo "$line" | cut -d '=' -f 2-)
if [[ -n "$key" ]]; then
env_string+="environment-variables.$key=$value "
fi
fi
done <<< "$ENV_VARS"
env_string=$(echo $env_string | sed 's/ $//')
scw container container update ${{ env.CONTAINER_ID }} $env_string
env:
ENV_VARS: ${{ secrets.ENV }}
SCW_ACCESS_KEY: ${{ secrets.SCW_ACCESS_KEY_LECOFFRE }}
SCW_SECRET_KEY: ${{ secrets.SCW_SECRET_KEY_LECOFFRE }}
SCW_DEFAULT_PROJECT_ID: ${{ env.PROJECT_ID_LECOFFRE }}
SCW_DEFAULT_ORGANIZATION_ID: ${{ secrets.SCW_ORGANIZATION_ID_LECOFFRE }}

View File

@ -1,74 +0,0 @@
name: Test - Build & Deploy to Scaleway
on:
push:
branches: [legacy_dev]
env:
PROJECT_ID_LECOFFRE: 72d08499-37c2-412b-877e-f8af0471654a
NAMESPACE_ID_LECOFFRE: 3829c5cd-9fb0-4871-97a1-eb33e4bc1114
CONTAINER_REGISTRY_ENDPOINT_LECOFFRE: rg.fr-par.scw.cloud/funcscwlecoffretestouylprmj
IMAGE_NAME: front
CONTAINER_NAME: front
jobs:
build-and-push-image-lecoffre:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup SSH
run: |
mkdir -p ~/.ssh
echo "${{ secrets.SSH_PRIVATE_KEY }}" > ~/.ssh/id_rsa
chmod 600 ~/.ssh/id_rsa
ssh-keyscan -t rsa github.com >> ~/.ssh/known_hosts
env:
SSH_PRIVATE_KEY: ${{ secrets.SSH_PRIVATE_KEY }}
- name: Copy SSH
run: cp ~/.ssh/id_rsa id_rsa
- name: Login to Scaleway Container Registry
uses: docker/login-action@v3
with:
username: nologin
password: ${{ secrets.SCW_SECRET_KEY_LECOFFRE }}
registry: ${{ env.CONTAINER_REGISTRY_ENDPOINT_LECOFFRE }}
- name: Build the Docker Image
run: docker build . -t ${{ env.CONTAINER_REGISTRY_ENDPOINT_LECOFFRE }}/${{ env.IMAGE_NAME }}
- name: Push the Docker Image to Scaleway Container Registry
run: docker push ${{ env.CONTAINER_REGISTRY_ENDPOINT_LECOFFRE }}/${{ env.IMAGE_NAME }}
deploy-to-scaleway-lecoffre:
needs: build-and-push-image-lecoffre
runs-on: ubuntu-latest
environment: test
steps:
- name: Install CLI
uses: scaleway/action-scw@v0
- name: Get container ID
run: |
echo "CONTAINER_ID=$(scw container container list namespace-id=${{env.NAMESPACE_ID_LECOFFRE}} -o json | jq -r '.[] | select(.name == "${{ env.CONTAINER_NAME }}") | .id')" >> $GITHUB_ENV
env:
SCW_ACCESS_KEY: ${{ secrets.SCW_ACCESS_KEY_LECOFFRE }}
SCW_SECRET_KEY: ${{ secrets.SCW_SECRET_KEY_LECOFFRE }}
SCW_DEFAULT_PROJECT_ID: ${{ env.PROJECT_ID_LECOFFRE }}
SCW_DEFAULT_ORGANIZATION_ID: ${{ secrets.SCW_ORGANIZATION_ID_LECOFFRE }}
- name: Deploy the container based on the new image
run: |
env_string=""
while IFS= read -r line; do
if [[ "$line" == *"="* ]]; then
key=$(echo "$line" | cut -d '=' -f 1)
value=$(echo "$line" | cut -d '=' -f 2-)
if [[ -n "$key" ]]; then
env_string+="environment-variables.$key=$value "
fi
fi
done <<< "$ENV_VARS"
env_string=$(echo $env_string | sed 's/ $//')
scw container container update ${{ env.CONTAINER_ID }} $env_string
env:
ENV_VARS: ${{ secrets.ENV }}
SCW_ACCESS_KEY: ${{ secrets.SCW_ACCESS_KEY_LECOFFRE }}
SCW_SECRET_KEY: ${{ secrets.SCW_SECRET_KEY_LECOFFRE }}
SCW_DEFAULT_PROJECT_ID: ${{ env.PROJECT_ID_LECOFFRE }}
SCW_DEFAULT_ORGANIZATION_ID: ${{ secrets.SCW_ORGANIZATION_ID_LECOFFRE }}

64
.gitignore vendored
View File

@ -1,44 +1,38 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# dependencies
/node_modules
/.pnp
.pnp.js
dist/
# testing
/coverage
# next.js
/.next/
/out/
dist/
# production
/build
# misc
.DS_Store
# Secrets et fichiers sensibles
.env
.env.*
!.env.example
!.env.exemple
*.key
*.pem
secrets/
# debug
# Logs
logs/
*.log
# Node.js
node_modules/
npm-debug.log*
yarn-debug.log*
yarn-error.log*
.pnpm-debug.log*
# local env files
.env*.local
.env
# Next.js
.next/
out/
build/
dist/
# vercel
.vercel
# IDE
.vscode/
.idea/
*.swp
*.swo
# typescript
*.tsbuildinfo
next-env.d.ts
node_modules
id_rsa
# OS
.DS_Store
Thumbs.db
# Sentry Config File
.sentryclirc
# Temporary files
tmp/
*.tmp

View File

@ -1,7 +1,7 @@
{
"editor.defaultFormatter": "esbenp.prettier-vscode",
"[typescript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
"editor.defaultFormatter": "vscode.typescript-language-features"
},
"[json]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
@ -32,6 +32,6 @@
"rust-client.disableRustup": true,
"rust-client.autoStartRls": false,
"[typescriptreact]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
"editor.defaultFormatter": "vscode.typescript-language-features"
}
}

35
CHANGELOG.md Normal file
View File

@ -0,0 +1,35 @@
## v0.1.1
- Ajout d'une image Docker de production "int-dev" (Next.js `next start`) lisant les URLs d'API via variables d'environnement, évitant toute référence à `localhost`.
- Remplacement d'URLs codées en dur dans l'écran de connexion email par l'utilisation de `FrontendVariables` et des `NEXT_PUBLIC_*`.
- Documentation `docs/int-dev.md` ajoutée (build, run, push, variables supportées).
## v0.1.2
- LoginCallback (`index.tsx`) ajusté: suppression de la redirection spéciale `local.4nkweb.com` au profit d'un flux standard basé sur variables d'environnement.
## v0.1.3
## v0.1.4
## v0.1.5
- IdNot: appel backend changé en POST `/api/v1/idnot/auth` avec `{ code }` dans le corps (évite erreurs 502 proxy sur URL longue).
- Documentation et tests mis à jour.
- Rappel déploiement: builder et pousser limage tagguée `int-dev` pour récupération par `lecoffre_node`.
- Sous-chemin Next activé: `basePath: '/lecoffre'`.
- Normalisation SSR du `targetOrigin` de liframe sur lorigine de `NEXT_PUBLIC_4NK_URL`.
- CI: build-arg optionnel `NEXT_PUBLIC_4NK_URL` depuis les secrets.
- Optimisations build Docker:
- `.dockerignore` pour réduire le contexte.
- Next.js `output: 'standalone'` pour une image runtime plus légère.
- Caches BuildKit (npm et .next) pour accélérer les builds.
- Runtime basé sur `server.js` (standalone) au lieu de `next start`.
## v0.1.6
- Mise en place dun "callback bridge" pour IdNot:
- Page bridge côté domaine autorisé (`/authorized-client`) qui POST le `code` vers `/api/v1/idnot/auth` et redirige.
- Page front `/lecoffre/authorized-bridge` qui consomme `#token`/`#error`, stocke le cookie et redirige vers le tableau de bord.
- Permet le login sans modifier la liste des callbacks autorisés chez IdNot.

View File

@ -1,46 +1,190 @@
# Install dependencies only when needed
FROM node:19-alpine AS deps
# syntax=docker/dockerfile:1.4
FROM debian:bookworm-slim AS deps
WORKDIR /leCoffre-front
WORKDIR leCoffre-front
# Installation des dépendances de base
RUN apt-get update && apt-get upgrade -y && \
apt-get install -y --fix-missing \
ca-certificates curl jq git \
net-tools iputils-ping dnsutils \
netcat-openbsd telnet procps && \
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
# Installation de Node.js
RUN curl -fsSL https://deb.nodesource.com/setup_19.x | bash - && \
apt-get install -y nodejs && \
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
COPY package.json ./
COPY package-lock.json ./
RUN apk update && apk add openssh-client git
# Installation des dépendances
RUN --mount=type=cache,target=/root/.npm \
npm install --no-audit --no-fund
#COPY id_rsa /root/.ssh/id_rsa
#RUN chmod 600 ~/.ssh/id_rsa
#RUN eval "$(ssh-agent -s)" && ssh-add /root/.ssh/id_rsa
#RUN ssh-keyscan github.com smart-chain-fr/leCoffre-resources.git >> /root/.ssh/known_hosts
# Configuration pour le développement
FROM debian:bookworm-slim AS development
WORKDIR /leCoffre-front
RUN npm install --frozen-lockfile
# Installation des dépendances de base
RUN apt-get update && apt-get upgrade -y && \
apt-get install -y --fix-missing \
ca-certificates curl jq git \
net-tools iputils-ping dnsutils \
netcat-openbsd telnet procps && \
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
# Rebuild the source code only when needed
FROM node:19-alpine AS builder
# Installation de Node.js
RUN curl -fsSL https://deb.nodesource.com/setup_19.x | bash - && \
apt-get install -y nodejs && \
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
WORKDIR leCoffre-front
COPY --from=deps /leCoffre-front/node_modules ./node_modules
COPY --from=deps /leCoffre-front/package.json ./package.json
COPY . .
COPY --from=deps leCoffre-front/node_modules ./node_modules
COPY --from=deps leCoffre-front/package.json package.json
COPY tsconfig.json tsconfig.json
COPY next.config.js next.config.js
COPY src src
RUN npm run build
# Production image, copy all the files and run next
FROM node:19-alpine AS production
WORKDIR leCoffre-front
RUN adduser -D lecoffreuser --uid 10000 && chown -R lecoffreuser .
COPY public ./public
COPY --from=builder --chown=lecoffreuser leCoffre-front/node_modules ./node_modules
COPY --from=builder --chown=lecoffreuser leCoffre-front/.next ./.next
COPY --from=builder --chown=lecoffreuser leCoffre-front/next.config.js ./next.config.js
COPY --from=builder --chown=lecoffreuser leCoffre-front/package.json ./package.json
# Création de l'utilisateur non-root
RUN useradd -m -u 1000 lecoffreuser && \
mkdir -p /leCoffre-front && chown -R lecoffreuser:lecoffreuser /leCoffre-front
USER lecoffreuser
CMD ["npm", "run", "start"]
EXPOSE 3000
CMD ["npm", "run", "dev"]
EXPOSE 3000
# --- Build de production
FROM debian:bookworm-slim AS builder
WORKDIR /leCoffre-front
# Installation des dépendances de base
RUN apt-get update && apt-get upgrade -y && \
apt-get install -y --fix-missing \
ca-certificates curl jq git \
net-tools iputils-ping dnsutils \
netcat-openbsd telnet procps && \
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
# Installation de Node.js
RUN curl -fsSL https://deb.nodesource.com/setup_19.x | bash - && \
apt-get install -y nodejs && \
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
COPY --from=deps /leCoffre-front/node_modules ./node_modules
COPY --from=deps /leCoffre-front/package.json ./package.json
COPY . .
# Arguments/variables d'environnement publics pour le build Next
ARG NEXT_PUBLIC_BACK_API_PROTOCOL
ARG NEXT_PUBLIC_BACK_API_HOST
ARG NEXT_PUBLIC_BACK_API_PORT
ARG NEXT_PUBLIC_BACK_API_ROOT_URL
ARG NEXT_PUBLIC_BACK_API_VERSION
ARG NEXT_PUBLIC_FRONT_APP_HOST
ARG NEXT_PUBLIC_FRONT_APP_PORT
ARG NEXT_PUBLIC_IDNOT_AUTHORIZE_ENDPOINT
ARG NEXT_PUBLIC_IDNOT_CLIENT_ID
ARG NEXT_PUBLIC_IDNOT_BASE_URL
ARG NEXT_PUBLIC_IDNOT_REDIRECT_URI
ARG NEXT_PUBLIC_DOCAPOSTE_API_URL
ARG NEXT_PUBLIC_HOTJAR_SITE_ID
ARG NEXT_PUBLIC_HOTJAR_VERSION
ARG NEXT_PUBLIC_4NK_URL
ARG NEXT_PUBLIC_4NK_IFRAME_URL
ARG NEXT_PUBLIC_API_URL
ARG NEXT_PUBLIC_DEFAULT_VALIDATOR_ID
ARG NEXT_PUBLIC_DEFAULT_STORAGE_URLS
ENV NEXT_PUBLIC_BACK_API_PROTOCOL=${NEXT_PUBLIC_BACK_API_PROTOCOL} \
NEXT_PUBLIC_BACK_API_HOST=${NEXT_PUBLIC_BACK_API_HOST} \
NEXT_PUBLIC_BACK_API_PORT=${NEXT_PUBLIC_BACK_API_PORT} \
NEXT_PUBLIC_BACK_API_ROOT_URL=${NEXT_PUBLIC_BACK_API_ROOT_URL} \
NEXT_PUBLIC_BACK_API_VERSION=${NEXT_PUBLIC_BACK_API_VERSION} \
NEXT_PUBLIC_FRONT_APP_HOST=${NEXT_PUBLIC_FRONT_APP_HOST} \
NEXT_PUBLIC_FRONT_APP_PORT=${NEXT_PUBLIC_FRONT_APP_PORT} \
NEXT_PUBLIC_IDNOT_AUTHORIZE_ENDPOINT=${NEXT_PUBLIC_IDNOT_AUTHORIZE_ENDPOINT} \
NEXT_PUBLIC_IDNOT_CLIENT_ID=${NEXT_PUBLIC_IDNOT_CLIENT_ID} \
NEXT_PUBLIC_IDNOT_BASE_URL=${NEXT_PUBLIC_IDNOT_BASE_URL} \
NEXT_PUBLIC_IDNOT_REDIRECT_URI=${NEXT_PUBLIC_IDNOT_REDIRECT_URI} \
NEXT_PUBLIC_DOCAPOSTE_API_URL=${NEXT_PUBLIC_DOCAPOSTE_API_URL} \
NEXT_PUBLIC_HOTJAR_SITE_ID=${NEXT_PUBLIC_HOTJAR_SITE_ID} \
NEXT_PUBLIC_HOTJAR_VERSION=${NEXT_PUBLIC_HOTJAR_VERSION} \
NEXT_PUBLIC_4NK_URL=${NEXT_PUBLIC_4NK_URL} \
NEXT_PUBLIC_4NK_IFRAME_URL=${NEXT_PUBLIC_4NK_IFRAME_URL} \
NEXT_PUBLIC_API_URL=${NEXT_PUBLIC_API_URL} \
NEXT_PUBLIC_DEFAULT_VALIDATOR_ID=${NEXT_PUBLIC_DEFAULT_VALIDATOR_ID} \
NEXT_PUBLIC_DEFAULT_STORAGE_URLS=${NEXT_PUBLIC_DEFAULT_STORAGE_URLS}
RUN --mount=type=cache,target=/leCoffre-front/.next/cache npm run build
# --- Image d'exécution "int-dev"
FROM debian:bookworm-slim AS int-dev
WORKDIR /leCoffre-front
# Installation des dépendances de base
RUN apt-get update && apt-get upgrade -y && \
apt-get install -y --fix-missing \
ca-certificates curl jq git \
net-tools iputils-ping dnsutils \
netcat-openbsd telnet procps && \
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
# Installation de Node.js
RUN curl -fsSL https://deb.nodesource.com/setup_19.x | bash - && \
apt-get install -y nodejs && \
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
# Re-déclarer les ARG pour l'étape runtime et les exposer en ENV
ARG NEXT_PUBLIC_BACK_API_PROTOCOL
ARG NEXT_PUBLIC_BACK_API_HOST
ARG NEXT_PUBLIC_BACK_API_PORT
ARG NEXT_PUBLIC_BACK_API_ROOT_URL
ARG NEXT_PUBLIC_BACK_API_VERSION
ARG NEXT_PUBLIC_FRONT_APP_HOST
ARG NEXT_PUBLIC_FRONT_APP_PORT
ARG NEXT_PUBLIC_IDNOT_AUTHORIZE_ENDPOINT
ARG NEXT_PUBLIC_IDNOT_CLIENT_ID
ARG NEXT_PUBLIC_IDNOT_BASE_URL
ARG NEXT_PUBLIC_IDNOT_REDIRECT_URI
ARG NEXT_PUBLIC_DOCAPOSTE_API_URL
ARG NEXT_PUBLIC_HOTJAR_SITE_ID
ARG NEXT_PUBLIC_HOTJAR_VERSION
ARG NEXT_PUBLIC_4NK_URL
ARG NEXT_PUBLIC_4NK_IFRAME_URL
ARG NEXT_PUBLIC_API_URL
ARG NEXT_PUBLIC_DEFAULT_VALIDATOR_ID
ARG NEXT_PUBLIC_DEFAULT_STORAGE_URLS
ENV NODE_ENV=production \
PORT=3000 \
NEXT_PUBLIC_BACK_API_PROTOCOL=${NEXT_PUBLIC_BACK_API_PROTOCOL} \
NEXT_PUBLIC_BACK_API_HOST=${NEXT_PUBLIC_BACK_API_HOST} \
NEXT_PUBLIC_BACK_API_PORT=${NEXT_PUBLIC_BACK_API_PORT} \
NEXT_PUBLIC_BACK_API_ROOT_URL=${NEXT_PUBLIC_BACK_API_ROOT_URL} \
NEXT_PUBLIC_BACK_API_VERSION=${NEXT_PUBLIC_BACK_API_VERSION} \
NEXT_PUBLIC_FRONT_APP_HOST=${NEXT_PUBLIC_FRONT_APP_HOST} \
NEXT_PUBLIC_FRONT_APP_PORT=${NEXT_PUBLIC_FRONT_APP_PORT} \
NEXT_PUBLIC_IDNOT_AUTHORIZE_ENDPOINT=${NEXT_PUBLIC_IDNOT_AUTHORIZE_ENDPOINT} \
NEXT_PUBLIC_IDNOT_CLIENT_ID=${NEXT_PUBLIC_IDNOT_CLIENT_ID} \
NEXT_PUBLIC_IDNOT_BASE_URL=${NEXT_PUBLIC_IDNOT_BASE_URL} \
NEXT_PUBLIC_IDNOT_REDIRECT_URI=${NEXT_PUBLIC_IDNOT_REDIRECT_URI} \
NEXT_PUBLIC_DOCAPOSTE_API_URL=${NEXT_PUBLIC_DOCAPOSTE_API_URL} \
NEXT_PUBLIC_HOTJAR_SITE_ID=${NEXT_PUBLIC_HOTJAR_SITE_ID} \
NEXT_PUBLIC_HOTJAR_VERSION=${NEXT_PUBLIC_HOTJAR_VERSION} \
NEXT_PUBLIC_4NK_URL=${NEXT_PUBLIC_4NK_URL} \
NEXT_PUBLIC_4NK_IFRAME_URL=${NEXT_PUBLIC_4NK_IFRAME_URL} \
NEXT_PUBLIC_API_URL=${NEXT_PUBLIC_API_URL} \
NEXT_PUBLIC_DEFAULT_VALIDATOR_ID=${NEXT_PUBLIC_DEFAULT_VALIDATOR_ID} \
NEXT_PUBLIC_DEFAULT_STORAGE_URLS=${NEXT_PUBLIC_DEFAULT_STORAGE_URLS}
# Next.js standalone runtime (output: 'standalone')
COPY --from=builder /leCoffre-front/.next/standalone ./
COPY --from=builder /leCoffre-front/.next/static ./.next/static
COPY --from=builder /leCoffre-front/public ./public
# Création de l'utilisateur non-root
RUN useradd -m -u 1000 lecoffreuser && \
mkdir -p /leCoffre-front && chown -R lecoffreuser:lecoffreuser /leCoffre-front
USER lecoffreuser
EXPOSE 3000
CMD ["node", "server.js"]

View File

@ -1,44 +0,0 @@
# Install dependencies only when needed
FROM node:19-alpine AS deps
WORKDIR leCoffre-front
COPY package.json ./
RUN apk update && apk add openssh-client git
COPY id_rsa /root/.ssh/id_rsa
RUN chmod 600 ~/.ssh/id_rsa
RUN eval "$(ssh-agent -s)" && ssh-add /root/.ssh/id_rsa
RUN ssh-keyscan github.com smart-chain-fr/leCoffre-resources.git >> /root/.ssh/known_hosts
RUN npm install --frozen-lockfile
# Rebuild the source code only when needed
FROM node:19-alpine AS builder
WORKDIR leCoffre-front
COPY --from=deps leCoffre-front/node_modules ./node_modules
COPY --from=deps leCoffre-front/package.json package.json
COPY tsconfig.json tsconfig.json
COPY src src
RUN npm run build
# Production image, copy all the files and run next
FROM node:19-alpine AS production
WORKDIR leCoffre-front
RUN adduser -D lecoffreuser --uid 10000 && chown -R lecoffreuser .
COPY public ./public
COPY --from=builder --chown=lecoffreuser leCoffre-front/node_modules ./node_modules
COPY --from=builder --chown=lecoffreuser leCoffre-front/.next ./.next
COPY --from=builder --chown=lecoffreuser leCoffre-front/package.json ./package.json
USER lecoffreuser
CMD ["npm", "run", "start"]
EXPOSE 3000

301
docs/ANALYSE-DEPENDANCES.md Normal file
View File

@ -0,0 +1,301 @@
# Analyse des Dépendances - lecoffre-front
## Vue d'ensemble
Ce document analyse les dépendances du projet lecoffre-front, leurs versions, leur utilisation et les recommandations de mise à jour.
## Dépendances principales
### Framework et Runtime
#### Next.js
- **Version actuelle**: 14.2.3
- **Type**: Framework React
- **Utilisation**: Framework principal de l'application
- **Statut**: Version récente et stable
- **Recommandation**: Maintenir la version actuelle
#### React
- **Version actuelle**: 18.2.0
- **Type**: Bibliothèque UI
- **Utilisation**: Bibliothèque de base pour l'interface utilisateur
- **Statut**: Version stable LTS
- **Recommandation**: Maintenir la version actuelle
#### React DOM
- **Version actuelle**: 18.2.0
- **Type**: Rendu DOM pour React
- **Utilisation**: Rendu des composants React dans le DOM
- **Statut**: Version stable LTS
- **Recommandation**: Maintenir la version actuelle
### Interface utilisateur
#### Material-UI (MUI)
- **Version actuelle**: 5.11.13
- **Type**: Bibliothèque de composants UI
- **Utilisation**: Composants d'interface utilisateur
- **Statut**: Version stable
- **Recommandation**: Maintenir la version actuelle
#### Emotion
- **Versions**:
- @emotion/react: ^11.10.6
- @emotion/styled: ^11.10.6
- **Type**: Bibliothèque CSS-in-JS
- **Utilisation**: Styling des composants
- **Statut**: Versions récentes
- **Recommandation**: Maintenir les versions actuelles
#### Heroicons
- **Version actuelle**: ^2.1.3
- **Type**: Icônes SVG
- **Utilisation**: Icônes dans l'interface
- **Statut**: Version récente
- **Recommandation**: Maintenir la version actuelle
### TypeScript et outils de développement
#### TypeScript
- **Version actuelle**: 4.9.5
- **Type**: Langage de programmation
- **Utilisation**: Typage statique
- **Statut**: Version stable mais ancienne
- **Recommandation**: ⚠️ Mettre à jour vers TypeScript 5.x
#### ESLint
- **Version actuelle**: 8.36.0
- **Type**: Linter JavaScript/TypeScript
- **Utilisation**: Analyse de code
- **Statut**: Version stable
- **Recommandation**: Maintenir la version actuelle
#### Prettier
- **Version actuelle**: ^2.8.7
- **Type**: Formateur de code
- **Utilisation**: Formatage automatique
- **Statut**: Version stable
- **Recommandation**: Maintenir la version actuelle
### Utilitaires et bibliothèques
#### Class Validator
- **Version actuelle**: ^0.14.0
- **Type**: Validation de classes
- **Utilisation**: Validation des données
- **Statut**: Version récente
- **Recommandation**: Maintenir la version actuelle
#### Classnames
- **Version actuelle**: ^2.3.2
- **Type**: Utilitaire pour les noms de classes
- **Utilisation**: Gestion conditionnelle des classes CSS
- **Statut**: Version récente
- **Recommandation**: Maintenir la version actuelle
#### UUIDv4
- **Version actuelle**: ^6.2.13
- **Type**: Générateur d'UUID
- **Utilisation**: Génération d'identifiants uniques
- **Statut**: Version récente
- **Recommandation**: Maintenir la version actuelle
### Gestion de fichiers
#### File Saver
- **Version actuelle**: ^2.0.5
- **Type**: Sauvegarde de fichiers
- **Utilisation**: Téléchargement de fichiers
- **Statut**: Version récente
- **Recommandation**: Maintenir la version actuelle
#### JSZip
- **Version actuelle**: ^3.10.1
- **Type**: Manipulation d'archives ZIP
- **Utilisation**: Création d'archives
- **Statut**: Version récente
- **Recommandation**: Maintenir la version actuelle
#### PDF-lib
- **Version actuelle**: ^1.17.1
- **Type**: Manipulation de PDF
- **Utilisation**: Génération et modification de PDF
- **Statut**: Version récente
- **Recommandation**: Maintenir la version actuelle
### Intégrations et services
#### le-coffre-resources
- **Version actuelle**: v2.167
- **Type**: Ressources partagées
- **Utilisation**: Ressources communes du projet
- **Source**: git+ssh://git@git.4nkweb.com/4nk/lecoffre-ressources.git
- **Statut**: Version spécifique
- **Recommandation**: Maintenir la version actuelle
#### React GTM Module
- **Version actuelle**: ^2.0.11
- **Type**: Intégration Google Tag Manager
- **Utilisation**: Analytics et tracking
- **Statut**: Version récente
- **Recommandation**: Maintenir la version actuelle
#### React Toastify
- **Version actuelle**: ^9.1.3
- **Type**: Notifications toast
- **Utilisation**: Affichage de notifications
- **Statut**: Version récente
- **Recommandation**: Maintenir la version actuelle
#### React Select
- **Version actuelle**: ^5.7.2
- **Type**: Composant de sélection
- **Utilisation**: Listes déroulantes avancées
- **Statut**: Version récente
- **Recommandation**: Maintenir la version actuelle
### Sécurité et authentification
#### JWT Decode
- **Version actuelle**: ^3.1.2
- **Type**: Décodage de JWT
- **Utilisation**: Traitement des tokens JWT
- **Statut**: Version récente
- **Recommandation**: Maintenir la version actuelle
#### Crypto Random String
- **Version actuelle**: ^5.0.0
- **Type**: Génération de chaînes aléatoires
- **Utilisation**: Génération de tokens sécurisés
- **Statut**: Version récente
- **Recommandation**: Maintenir la version actuelle
### Styling et assets
#### SASS
- **Version actuelle**: ^1.59.2
- **Type**: Préprocesseur CSS
- **Utilisation**: Styles avancés
- **Statut**: Version récente
- **Recommandation**: Maintenir la version actuelle
#### Sharp
- **Version actuelle**: ^0.32.1
- **Type**: Optimisation d'images
- **Utilisation**: Traitement d'images
- **Statut**: Version récente
- **Recommandation**: Maintenir la version actuelle
## Dépendances de développement
### Types TypeScript
#### @types/node
- **Version actuelle**: 18.15.1
- **Type**: Types pour Node.js
- **Utilisation**: Typage Node.js
- **Statut**: Version stable
- **Recommandation**: Maintenir la version actuelle
#### @types/react
- **Version actuelle**: 18.0.28
- **Type**: Types pour React
- **Utilisation**: Typage React
- **Statut**: Version stable
- **Recommandation**: Maintenir la version actuelle
#### @types/react-dom
- **Version actuelle**: 18.0.11
- **Type**: Types pour React DOM
- **Utilisation**: Typage React DOM
- **Statut**: Version stable
- **Recommandation**: Maintenir la version actuelle
#### @types/file-saver
- **Version actuelle**: ^2.0.7
- **Type**: Types pour file-saver
- **Utilisation**: Typage file-saver
- **Statut**: Version récente
- **Recommandation**: Maintenir la version actuelle
#### @types/react-gtm-module
- **Version actuelle**: ^2.0.3
- **Type**: Types pour react-gtm-module
- **Utilisation**: Typage react-gtm-module
- **Statut**: Version récente
- **Recommandation**: Maintenir la version actuelle
## Analyse de sécurité
### Dépendances avec des vulnérabilités potentielles
#### TypeScript 4.9.5
- **Risque**: Version ancienne
- **Impact**: Fonctionnalités manquantes, corrections de bugs
- **Recommandation**: Mettre à jour vers TypeScript 5.x
### Dépendances sécurisées
Toutes les autres dépendances sont à jour et ne présentent pas de vulnérabilités connues.
## Recommandations de mise à jour
### Priorité haute
1. **TypeScript**: Mettre à jour vers la version 5.x
```bash
npm install typescript@latest
```
### Priorité moyenne
1. **Vérification régulière**: Utiliser `npm audit` pour détecter les vulnérabilités
2. **Mise à jour des dépendances**: Utiliser `npm update` régulièrement
### Priorité basse
1. **Dépendances optionnelles**: Évaluer l'utilisation de certaines dépendances
2. **Optimisation**: Analyser les dépendances non utilisées
## Scripts de maintenance
### Audit de sécurité
```bash
npm audit
npm audit fix
```
### Mise à jour des dépendances
```bash
npm update
npm outdated
```
### Vérification des dépendances non utilisées
```bash
npx depcheck
```
## Monitoring des dépendances
### Outils recommandés
1. **Dependabot**: Mise à jour automatique des dépendances
2. **Snyk**: Détection des vulnérabilités
3. **npm audit**: Audit de sécurité intégré
### Processus de mise à jour
1. **Test**: Tester les mises à jour en environnement de développement
2. **Validation**: Valider le fonctionnement de l'application
3. **Déploiement**: Déployer en staging puis en production
## Conclusion
Le projet lecoffre-front utilise des dépendances modernes et bien maintenues. La seule recommandation majeure est la mise à jour de TypeScript vers la version 5.x pour bénéficier des dernières fonctionnalités et corrections de bugs.
Les dépendances sont globalement sécurisées et à jour, ce qui garantit la stabilité et la sécurité de l'application.
---
*Analyse effectuée le $(date) - Dépendances lecoffre-front*

228
docs/ANALYSE-REPOSITORY.md Normal file
View File

@ -0,0 +1,228 @@
# Analyse du Repository lecoffre-front
## Vue d'ensemble
Le projet **lecoffre-front** est une application Next.js développée en TypeScript pour la gestion de dossiers notariaux. Il s'agit d'une application frontend qui communique avec une API backend et intègre plusieurs services externes.
## Structure du Repository
### Architecture générale
```
lecoffre-front/
├── src/ # Code source principal
│ ├── common/ # Code partagé
│ │ └── Api/ # Services API communs
│ ├── front/ # Code spécifique au frontend
│ │ ├── Api/ # Services API frontend
│ │ ├── Components/ # Composants React
│ │ ├── Config/ # Configuration par environnement
│ │ ├── Hooks/ # Hooks React personnalisés
│ │ ├── Services/ # Services métier
│ │ ├── Stores/ # Gestion d'état
│ │ ├── Themes/ # Thèmes et styles
│ │ └── Utils/ # Utilitaires
│ ├── pages/ # Pages Next.js
│ └── sdk/ # SDK et intégrations
├── docs/ # Documentation
├── tests/ # Tests
├── public/ # Assets statiques
├── Dockerfile # Configuration Docker
├── next.config.js # Configuration Next.js
├── package.json # Dépendances et scripts
└── temp.yaml # Configuration Kubernetes temporaire
```
### Technologies utilisées
- **Framework**: Next.js 14.2.3
- **Language**: TypeScript 4.9.5
- **UI**: Material-UI (MUI) 5.11.13
- **Styling**: Emotion, SASS
- **État**: Context API / Hooks
- **Build**: Docker multi-stage
- **Déploiement**: Kubernetes
## Variables d'Environnement
### Variables principales
Le projet utilise un système de variables d'environnement préfixées par `NEXT_PUBLIC_` pour la configuration runtime.
#### Configuration API Backend
```bash
NEXT_PUBLIC_BACK_API_PROTOCOL=https://
NEXT_PUBLIC_BACK_API_HOST=api.example.com
NEXT_PUBLIC_BACK_API_PORT=443
NEXT_PUBLIC_BACK_API_ROOT_URL=/api
NEXT_PUBLIC_BACK_API_VERSION=v1
```
#### Configuration Frontend
```bash
NEXT_PUBLIC_FRONT_APP_HOST=https://app.example.com
NEXT_PUBLIC_FRONT_APP_PORT=443
```
#### Intégrations externes
```bash
# IDNot (authentification)
NEXT_PUBLIC_IDNOT_AUTHORIZE_ENDPOINT=/oauth/authorize
NEXT_PUBLIC_IDNOT_CLIENT_ID=client_id
NEXT_PUBLIC_IDNOT_BASE_URL=https://idnot.example.com
NEXT_PUBLIC_IDNOT_REDIRECT_URI=https://app.example.com/callback
# Docaposte
NEXT_PUBLIC_DOCAPOSTE_API_URL=https://api.docaposte.com
# 4NK (services blockchain)
NEXT_PUBLIC_4NK_URL=https://dev4.4nkweb.com
NEXT_PUBLIC_4NK_IFRAME_URL=https://dev4.4nkweb.com
# Hotjar (analytics)
NEXT_PUBLIC_HOTJAR_SITE_ID=123456
NEXT_PUBLIC_HOTJAR_VERSION=6
# Configuration système
NEXT_PUBLIC_API_URL=https://api.example.com
NEXT_PUBLIC_DEFAULT_VALIDATOR_ID=862406317a35064537ac959cb5d8bbdf4f849283b63db3ffa9904de2b3427c43:0
NEXT_PUBLIC_DEFAULT_STORAGE_URLS=https://dev3.4nkweb.com/storage
```
### Gestion des environnements
Le projet supporte 4 environnements via la variable `NEXTJS_APP_ENV_NAME`:
- `development` (par défaut)
- `staging`
- `preprod`
- `production`
Chaque environnement a sa configuration dans `src/front/Config/Module/`:
- `development.json`
- `staging.json`
- `preprod.json`
- `production.json`
## Configuration Docker
### Multi-stage build
Le Dockerfile utilise un build multi-stage avec 3 cibles:
1. **deps**: Installation des dépendances avec support SSH pour git.4nkweb.com
2. **development**: Image de développement avec hot-reload
3. **builder**: Build de production
4. **int-dev**: Image de production finale
### Variables d'environnement Docker
Toutes les variables `NEXT_PUBLIC_*` sont passées comme arguments de build et variables d'environnement runtime.
### Sécurité
- Utilisateur non-root (`lecoffreuser` avec UID 10000)
- Support SSH agent pour cloner les dépendances privées
- Cache npm pour optimiser les builds
## Configuration Kubernetes
### Déploiement
Le fichier `temp.yaml` contient la configuration Kubernetes complète:
- **Namespace**: `lecoffre`
- **Service**: `lecoffre-front-svc` (port 80 → 3000)
- **Deployment**: `lecoffre-front` avec 1 replica
- **Ingress**: `app.stg.lecoffre.smart-chain.fr` avec SSL
### Gestion des secrets
- **Vault**: Intégration HashiCorp Vault pour les secrets
- **External Secrets**: Gestion automatique des secrets Docker
- **Service Account**: `lecoffre-front-sa` avec permissions appropriées
### Ressources
```yaml
resources:
limits:
memory: 2Gi
requests:
cpu: 200m
memory: 1Gi
```
## Dépendances principales
### Production
- **Next.js**: 14.2.3 (framework React)
- **React**: 18.2.0
- **Material-UI**: 5.11.13 (composants UI)
- **TypeScript**: 4.9.5
- **le-coffre-resources**: v2.167 (ressources partagées)
### Développement
- **ESLint**: 8.36.0
- **Prettier**: 2.8.7
- **SASS**: 1.59.2
### Intégrations
- **Google Tag Manager**: Analytics
- **Hotjar**: Heatmaps et enregistrements
- **Docaposte**: Services postaux
- **4NK**: Services blockchain
## Architecture de l'application
### Gestion d'état
- **FrontendVariables**: Singleton pour les variables d'environnement
- **Module**: Configuration par environnement
- **Context API**: Gestion d'état React
### Communication API
- **DatabaseService**: Service générique pour les appels API
- **BaseApiService**: Service de base pour les API
- **Construction dynamique d'URLs**: Basée sur les variables d'environnement
### SDK et intégrations
- **Iframe**: Communication avec les services 4NK
- **MessageBus**: Bus de messages pour la communication inter-iframe
- **User**: Gestion de l'authentification utilisateur
- **AuthModal**: Modal d'authentification
## Points d'attention
### Sécurité
- Variables d'environnement exposées côté client (préfixe `NEXT_PUBLIC_`)
- Authentification via IDNot
- Communication sécurisée avec les services 4NK
### Performance
- Build standalone Next.js
- Cache npm dans Docker
- Optimisation des images avec Sharp
### Déploiement
- Support multi-environnement
- Intégration Vault pour les secrets
- Monitoring avec Hotjar et GTM
## Recommandations
1. **Documentation**: Maintenir la documentation des variables d'environnement
2. **Tests**: Étendre la couverture de tests
3. **Monitoring**: Ajouter des métriques de performance
4. **Sécurité**: Auditer régulièrement les dépendances
5. **CI/CD**: Automatiser les déploiements avec validation des variables
## Version actuelle
- **Application**: v0.1.6 (package.json)
- **Frontend**: v2.5.1 (version.json)
- **Docker**: v0.1.9 (temp.yaml)
---
*Analyse effectuée le $(date) - Repository lecoffre-front*

50
docs/ANALYSE.md Normal file
View File

@ -0,0 +1,50 @@
## Analyse détaillée
### Périmètre
Front Next.js `lecoffre-front` (Next 14, TypeScript), build multiétapes et image `int-dev` runtime.
### Stack
- **Framework**: Next.js 14 (React 18), TypeScript 4.9.5
- **UI**: MUI, Emotion, Heroicons
- **Outillage**: ESLint (8.36), Prettier
- **Ressources externes**: `le-coffre-resources` via SSH Gitea (`#v2.167`)
### Build et exécution
- Scripts: `dev`, `build` (telemetry off, `--no-lint`), `start`, `lint`, `format`.
- `next.config.js`: `output: 'standalone'`, `basePath: '/lecoffre'`, `ignoreBuildErrors: true`.
- Docker multiétapes: `deps` (npm ci), `development` (npm run dev), `builder` (npm run build), `int-dev` (runtime node:19alpine, `server.js`).
### Variables denvironnement
Variables NEXT_PUBLIC nombreuses (API back, IDNot, Docaposte, 4NK URLs, defaults). Exposées en `env`, `publicRuntimeConfig`, `serverRuntimeConfig` et redéclarées en Docker (`ARG``ENV`).
### Ports et réseau
- Port 3000 interne; exposé 3004:3000 dans `lecoffre_node`.
### Dépendances clés
- `next@^14.2.3`, `react@18.2.0`, `sharp@^0.32.1` (build imageprocessing Alpine), `jszip`, `jwt-decode`, `react-select`, `react-toastify`.
### Risques et points dattention
- `ignoreBuildErrors: true` masque les erreurs TS. Risque de dérive de type.
- `--no-lint` en build désactive ESLint. Manque de garde qualité en CI.
- Node 19 sur Alpine: fin de support. Préférer LTS (20/22).
- Dépendance Git SSH privée (`lecoffre-ressources`): exige agent SSH BuildKit; documenter clé et politiques.
### Actions proposées
- Rétablir le lint en build (ou CI) et supprimer `ignoreBuildErrors` si possible.
- Monter vers Node LTS (20alpine) et tester `sharp`.
- Documenter toutes les variables NEXT_PUBLIC et fournir `.env.example`.
- Mettre en place codesplitting (React.lazy/Suspense) et centraliser létat (Context/Redux) selon les règles projet.

22
docs/ARCHITECTURE.md Normal file
View File

@ -0,0 +1,22 @@
# Architecture - LeCoffre Front
## Composants
- Next.js (branche `int-dev`).
- Intègre `ihm_client` via iframe.
## Dépendances
- Backend `lecoffre-back-mini` (`NEXT_PUBLIC_BACK_*`).
- Redirections IdNot (local.4nkweb.com:3000).
## Réseau et ports
- Servi via Nginx: `https://dev4.4nkweb.com/lecoffre/`.
## Variables denvironnement (centralisées)
- `NEXT_PUBLIC_*` depuis `lecoffre_node/.env.master`.
## Monitoring
- Logs Promtail → Loki.
- Dashboard Grafana: Frontend Services.
## Notes
- Pas de `.env` local, utilisation variables runtime.

View File

@ -0,0 +1,38 @@
# Corrections Appliquées - LeCoffre Front
## Date: 20 Septembre 2025
### 🔧 Corrections Majeures
#### 1. Problème de Healthcheck
**Problème:** Le healthcheck échouait car `curl` n'était pas installé et Next.js écoutait sur l'IP du conteneur.
**Solution:**
- Changement du healthcheck pour vérifier le processus `next-server`
- Healthcheck: `ps aux | grep -v grep | grep next-server`
- Correction de l'entrypoint pour `npm start`
**Fichiers modifiés:**
- `docker-compose.yml` - Healthcheck corrigé
- Configuration - Entrypoint optimisé
#### 2. Installation des Outils
**Ajouté dans le Dockerfile:**
- `curl`, `git`, `wget`, `jq`, `telnet`, `npm`, `wscat`
- Outils de diagnostic et de connectivité
#### 3. Configuration Next.js
- Port: 3000 (mappé sur 3004)
- Processus: `next-server`
- Healthcheck: Vérification du processus
### 📊 État Actuel
- **Statut:** ✅ Healthy
- **Processus:** next-server en cours d'exécution
- **Port:** 3000 accessible sur 172.20.0.11
- **URL:** https://dev4.4nkweb.com/lecoffre
### 🔄 Prochaines Étapes
- Tests de connectivité frontend
- Monitoring des performances
- Optimisations supplémentaires

20
docs/DEPLOIEMENT.md Normal file
View File

@ -0,0 +1,20 @@
# Déploiement - LeCoffre Front
## Préparation
- Branche `int-dev`.
- `NEXT_PUBLIC_*` dans `lecoffre_node/.env.master`.
## Déploiement (orchestrateur)
```bash
cd /home/debian/4NK_env/lecoffre_node
./scripts/start.sh | cat
./scripts/validate-deployment.sh | cat
```
## Vérifications
- `https://dev4.4nkweb.com/lecoffre/` s'affiche.
- Iframe `ihm_client` s'ouvre.
## Règles
- Pas de compose direct.
- Push `int-dev` sans CI pour docs.

30
docs/ENV-RESUME.md Normal file
View File

@ -0,0 +1,30 @@
## Résumé des environnements (front)
### Contexte
- **Hôte iframe**: `https://dev4.4nkweb.com`
- **Site principal**: `https://dev4.4nkweb.com/lecoffre` (Next.js `basePath: '/lecoffre'`)
- **Signer utilisé**: `https://dev3.4nkweb.com`
### Variables `NEXT_PUBLIC_*` à aligner
- `NEXT_PUBLIC_4NK_URL=https://dev4.4nkweb.com`
- `NEXT_PUBLIC_4NK_IFRAME_URL=https://dev4.4nkweb.com`
- `NEXT_PUBLIC_API_URL` → URL publique de lAPI back (chemin stable, CORS OK)
- `NEXT_PUBLIC_BACK_API_PROTOCOL=https`
- `NEXT_PUBLIC_BACK_API_HOST=dev4.4nkweb.com`
- `NEXT_PUBLIC_BACK_API_PORT` (vide en prod 443)
- `NEXT_PUBLIC_BACK_API_ROOT_URL` et `NEXT_PUBLIC_BACK_API_VERSION` si composées côté front
- `NEXT_PUBLIC_IDNOT_*`, `NEXT_PUBLIC_DOCAPOSTE_API_URL` selon intégrations
- `NEXT_PUBLIC_DEFAULT_VALIDATOR_ID`, `NEXT_PUBLIC_DEFAULT_STORAGE_URLS` selon besoins
### Points dattention
- Vérifier que toutes les URLs tiennent compte du `basePath` `/lecoffre`.
- Le service de signature est externalisé sur `dev3.4nkweb.com`.

6
docs/FLUX.md Normal file
View File

@ -0,0 +1,6 @@
# Description des Flux - LeCoffre Front
## Flux principaux
1. Auth notaire: Front → IdNot → Front (callback).
2. Intégration IHM: Front → iframe → IHM.
3. API: Front → Back (REST).

15
docs/FONCTIONNEL.md Normal file
View File

@ -0,0 +1,15 @@
# Description Fonctionnelle - LeCoffre Front
## Objectif
Fournir linterface principale (Next.js) orchestrant lUX, incluant liframe IHM Client.
## Parcours clés
- Authentification notaire (redirections IdNot).
- Navigation dossiers et intégration iframe IHM.
- Appels API backend, feedback utilisateur et gestion derreurs.
## Rôles
- Notaire, utilisateur interne.
## Résultats attendus
- UX fluide, chargements différés (code splitting), gestion session robuste.

26
docs/INSTALLATION.md Normal file
View File

@ -0,0 +1,26 @@
# Installation - LeCoffre Front
## Prérequis
- Dépôts sous `/home/debian/4NK_env` (branche `int-dev`).
- Docker/Compose.
- Variables `NEXT_PUBLIC_*` dans `lecoffre_node/.env.master`.
## Configuration
- Pas de `.env` local.
- Vérifier URLs backend et iframe IHM.
## Démarrage (orchestrateur)
```bash
cd /home/debian/4NK_env/lecoffre_node
./scripts/start.sh | cat
```
## Accès
- `https://dev4.4nkweb.com/lecoffre/`
## Vérifications
- Ouverture iframe IHM.
- Appels API vers `/api/` OK.
## Notes
- CI via tag `int-dev`.

6
docs/QUALITE.md Normal file
View File

@ -0,0 +1,6 @@
# Qualité Logicielle - LeCoffre Front
- Lint/format: respecter règles Next/TS.
- Tests: E2E parcours IdNot et iframe IHM.
- Performance: audit Lighthouse, lazy loading.
- Observabilité: logs client minimaux, erreurs capturées.

159
docs/RESUME-ANALYSE.md Normal file
View File

@ -0,0 +1,159 @@
# Résumé de l'Analyse - lecoffre-front
## Vue d'ensemble
L'analyse complète du repository **lecoffre-front** a été effectuée le $(date). Ce document présente un résumé des principales découvertes et recommandations.
## Structure du projet
### Type d'application
- **Framework**: Next.js 14.2.3 avec TypeScript 4.9.5
- **Architecture**: Application frontend SPA avec intégrations multiples
- **Déploiement**: Docker multi-stage + Kubernetes
- **Version actuelle**: v0.1.6 (package.json) / v2.5.1 (frontend)
### Architecture technique
```
Frontend (Next.js) → API Backend → Services externes
↓ ↓ ↓
Material-UI Base de données 4NK/IDNot/Docaposte
```
## Variables d'environnement
### Configuration identifiée
- **21 variables d'environnement** configurées
- **4 environnements** supportés (dev/staging/preprod/production)
- **Gestion centralisée** via Next.js config et FrontendVariables
### Variables critiques
```bash
# API Backend
NEXT_PUBLIC_BACK_API_PROTOCOL=https://
NEXT_PUBLIC_BACK_API_HOST=api.example.com
NEXT_PUBLIC_BACK_API_PORT=443
# Intégrations
NEXT_PUBLIC_4NK_URL=https://dev4.4nkweb.com
NEXT_PUBLIC_IDNOT_BASE_URL=https://idnot.example.com
NEXT_PUBLIC_DOCAPOSTE_API_URL=https://api.docaposte.com
```
## Déploiement et infrastructure
### Docker
- **Multi-stage build** avec 4 cibles (deps/development/builder/int-dev)
- **Sécurité**: Utilisateur non-root, support SSH agent
- **Optimisation**: Cache npm, build standalone Next.js
### Kubernetes
- **Namespace**: lecoffre
- **Ressources**: 1Gi RAM (request) / 2Gi RAM (limit)
- **Sécurité**: Vault Agent pour injection des secrets
- **Ingress**: TLS avec cert-manager
### CI/CD
- **Registre**: git.4nkweb.com (Docker registry)
- **Tagging**: Contrôlé par message de commit
- **Secrets**: Gestion via Vault + External Secrets
## Dépendances
### État général
- **46 dépendances** principales
- **5 dépendances** de développement
- **Statut**: Majoritairement à jour et sécurisées
### Recommandations
- ✅ **Maintenir**: Next.js 14.2.3, React 18.2.0, MUI 5.11.13
- ⚠️ **Mettre à jour**: TypeScript 4.9.5 → 5.x
- ✅ **Sécurisé**: Toutes les autres dépendances
## Points forts identifiés
### Architecture
- ✅ Structure modulaire bien organisée
- ✅ Séparation claire des responsabilités
- ✅ Configuration multi-environnement
- ✅ Intégration Docker/Kubernetes robuste
### Sécurité
- ✅ Variables d'environnement externalisées
- ✅ Gestion des secrets via Vault
- ✅ Utilisateur non-root dans Docker
- ✅ Support SSH agent pour dépendances privées
### Développement
- ✅ TypeScript pour le typage statique
- ✅ ESLint + Prettier pour la qualité du code
- ✅ Tests organisés dans le dossier tests/
- ✅ Documentation complète
## Points d'attention
### Améliorations recommandées
1. **TypeScript**
- Mettre à jour vers la version 5.x
- Bénéficier des dernières fonctionnalités
2. **Monitoring**
- Ajouter des métriques de performance
- Monitoring des erreurs en production
3. **Tests**
- Étendre la couverture de tests
- Tests d'intégration avec les services externes
4. **Documentation**
- Maintenir la documentation des variables d'environnement
- Documenter les processus de déploiement
### Risques identifiés
1. **Dépendances privées**
- Dépendance à git.4nkweb.com pour le-coffre-resources
- Nécessite un accès SSH configuré
2. **Variables d'environnement**
- Variables NEXT_PUBLIC_* exposées côté client
- Nécessite une validation stricte des valeurs
3. **Intégrations externes**
- Dépendance à plusieurs services externes
- Nécessite une gestion des pannes
## Recommandations prioritaires
### Court terme (1-2 semaines)
1. Mettre à jour TypeScript vers la version 5.x
2. Effectuer un audit de sécurité complet (`npm audit`)
3. Vérifier la configuration des variables d'environnement
### Moyen terme (1-2 mois)
1. Étendre la couverture de tests
2. Ajouter des métriques de monitoring
3. Documenter les processus de déploiement
### Long terme (3-6 mois)
1. Évaluer l'optimisation des performances
2. Considérer l'ajout de tests d'intégration
3. Planifier les mises à jour des dépendances
## Conclusion
Le projet **lecoffre-front** présente une architecture solide et bien structurée. Les technologies utilisées sont modernes et appropriées pour le contexte. La configuration Docker/Kubernetes est robuste et sécurisée.
Les principales améliorations concernent la mise à jour de TypeScript et l'extension des tests. Le projet est globalement en bon état et prêt pour la production.
## Documentation créée
1. **ANALYSE-REPOSITORY.md**: Analyse complète du repository
2. **VARIABLES-ENVIRONNEMENT.md**: Documentation détaillée des variables d'environnement
3. **ANALYSE-DEPENDANCES.md**: Analyse des dépendances et recommandations
4. **RESUME-ANALYSE.md**: Ce résumé exécutif
---
*Analyse effectuée le $(date) - Repository lecoffre-front*
*Analyste: Assistant IA Claude*

6
docs/SECURITE.md Normal file
View File

@ -0,0 +1,6 @@
# Sécurité - LeCoffre Front
- Aucune donnée sensible côté client.
- Variables exposées en `NEXT_PUBLIC_*` contrôlées.
- CSP/headers via Nginx.
- Sanitation des entrées utilisateur.

19
docs/TECHNIQUE.md Normal file
View File

@ -0,0 +1,19 @@
# Description Technique - LeCoffre Front
## Tech stack
- Next.js, Node.js 19.
## Configuration
- Variables `NEXT_PUBLIC_*` via `lecoffre_node/.env.master`.
## Interfaces
- Iframe vers `ihm_client`.
- REST vers `/api/`.
## Sécurité
- Aucun secret côté client.
- Headers via Nginx.
## Observabilité
- Logs Promtail → Loki.
- Dashboards Grafana.

6
docs/TODO.md Normal file
View File

@ -0,0 +1,6 @@
# TODO - LeCoffre Front
- Vérifier URLs backend et iframe IHM.
- Tester parcours IdNot.
- Valider variables `NEXT_PUBLIC_*`.
- Vérifier dashboards Grafana Frontend.

View File

@ -0,0 +1,336 @@
# Variables d'Environnement - lecoffre-front
## Vue d'ensemble
Ce document détaille toutes les variables d'environnement utilisées dans l'application lecoffre-front, leur utilisation et leur configuration.
## Variables d'environnement supportées
### 1. Configuration API Backend
#### `NEXT_PUBLIC_BACK_API_PROTOCOL`
- **Description**: Protocole utilisé pour communiquer avec l'API backend
- **Valeurs possibles**: `https://`, `http://`
- **Exemple**: `https://`
- **Utilisation**: Construction des URLs d'API
#### `NEXT_PUBLIC_BACK_API_HOST`
- **Description**: Nom d'hôte de l'API backend
- **Exemple**: `api.lecoffre.com`, `dev4.4nkweb.com`
- **Utilisation**: Construction des URLs d'API
#### `NEXT_PUBLIC_BACK_API_PORT`
- **Description**: Port de l'API backend
- **Exemple**: `443`, `8080`, `3001`
- **Utilisation**: Construction des URLs d'API
- **Note**: Peut être vide pour les ports par défaut (80/443)
#### `NEXT_PUBLIC_BACK_API_ROOT_URL`
- **Description**: Chemin racine de l'API
- **Exemple**: `/api`, `/`
- **Utilisation**: Construction des URLs d'API
#### `NEXT_PUBLIC_BACK_API_VERSION`
- **Description**: Version de l'API
- **Exemple**: `v1`, `v2`
- **Utilisation**: Construction des URLs d'API
### 2. Configuration Frontend
#### `NEXT_PUBLIC_FRONT_APP_HOST`
- **Description**: URL de base de l'application frontend
- **Exemple**: `https://app.lecoffre.com`
- **Utilisation**: Redirections et liens internes
#### `NEXT_PUBLIC_FRONT_APP_PORT`
- **Description**: Port de l'application frontend
- **Exemple**: `443`, `3000`
- **Utilisation**: Construction des URLs frontend
### 3. Intégration IDNot (Authentification)
#### `NEXT_PUBLIC_IDNOT_AUTHORIZE_ENDPOINT`
- **Description**: Point d'entrée pour l'autorisation OAuth
- **Exemple**: `/oauth/authorize`
- **Utilisation**: Flux d'authentification
#### `NEXT_PUBLIC_IDNOT_CLIENT_ID`
- **Description**: Identifiant client OAuth
- **Exemple**: `lecoffre-client-id`
- **Utilisation**: Authentification OAuth
#### `NEXT_PUBLIC_IDNOT_BASE_URL`
- **Description**: URL de base du service IDNot
- **Exemple**: `https://idnot.lecoffre.com`
- **Utilisation**: Authentification OAuth
#### `NEXT_PUBLIC_IDNOT_REDIRECT_URI`
- **Description**: URI de redirection après authentification
- **Exemple**: `https://app.lecoffre.com/callback`
- **Utilisation**: Flux d'authentification
### 4. Intégration Docaposte
#### `NEXT_PUBLIC_DOCAPOSTE_API_URL`
- **Description**: URL de l'API Docaposte
- **Exemple**: `https://api.docaposte.com`
- **Utilisation**: Services postaux
### 5. Intégration 4NK (Blockchain)
#### `NEXT_PUBLIC_4NK_URL`
- **Description**: URL de base des services 4NK
- **Exemple**: `https://dev4.4nkweb.com`
- **Utilisation**: Services blockchain et signature
#### `NEXT_PUBLIC_4NK_IFRAME_URL`
- **Description**: URL spécifique pour l'iframe 4NK
- **Exemple**: `https://dev4.4nkweb.com`
- **Utilisation**: Communication iframe
- **Note**: Peut être différente de `NEXT_PUBLIC_4NK_URL`
### 6. Analytics et Monitoring
#### `NEXT_PUBLIC_HOTJAR_SITE_ID`
- **Description**: Identifiant du site Hotjar
- **Exemple**: `123456`
- **Utilisation**: Analytics et heatmaps
#### `NEXT_PUBLIC_HOTJAR_VERSION`
- **Description**: Version de Hotjar
- **Exemple**: `6`
- **Utilisation**: Analytics et heatmaps
### 7. Configuration système
#### `NEXT_PUBLIC_API_URL`
- **Description**: URL générique de l'API
- **Exemple**: `https://api.lecoffre.com`
- **Utilisation**: Appels API génériques
#### `NEXT_PUBLIC_DEFAULT_VALIDATOR_ID`
- **Description**: Identifiant du validateur par défaut
- **Exemple**: `862406317a35064537ac959cb5d8bbdf4f849283b63db3ffa9904de2b3427c43:0`
- **Utilisation**: Validation des entités système
- **Valeur par défaut**: Définie dans `AppConstants.ts`
#### `NEXT_PUBLIC_DEFAULT_STORAGE_URLS`
- **Description**: URLs de stockage par défaut (séparées par des virgules)
- **Exemple**: `https://dev3.4nkweb.com/storage,https://backup.4nkweb.com/storage`
- **Utilisation**: Stockage des données
- **Valeur par défaut**: `https://dev3.4nkweb.com/storage`
### 8. Variables d'environnement système
#### `NEXTJS_APP_ENV_NAME`
- **Description**: Nom de l'environnement
- **Valeurs possibles**: `development`, `staging`, `preprod`, `production`
- **Utilisation**: Sélection de la configuration par environnement
- **Valeur par défaut**: `development`
#### `NODE_ENV`
- **Description**: Environnement Node.js
- **Valeurs possibles**: `development`, `production`
- **Utilisation**: Configuration Next.js
## Configuration par environnement
### Développement
```bash
NEXTJS_APP_ENV_NAME=development
NODE_ENV=development
NEXT_PUBLIC_BACK_API_PROTOCOL=http://
NEXT_PUBLIC_BACK_API_HOST=localhost
NEXT_PUBLIC_BACK_API_PORT=3001
NEXT_PUBLIC_4NK_URL=https://dev4.4nkweb.com
```
### Staging
```bash
NEXTJS_APP_ENV_NAME=staging
NODE_ENV=production
NEXT_PUBLIC_BACK_API_PROTOCOL=https://
NEXT_PUBLIC_BACK_API_HOST=stg-api.lecoffre.com
NEXT_PUBLIC_BACK_API_PORT=443
NEXT_PUBLIC_4NK_URL=https://dev4.4nkweb.com
```
### Production
```bash
NEXTJS_APP_ENV_NAME=production
NODE_ENV=production
NEXT_PUBLIC_BACK_API_PROTOCOL=https://
NEXT_PUBLIC_BACK_API_HOST=api.lecoffre.com
NEXT_PUBLIC_BACK_API_PORT=443
NEXT_PUBLIC_4NK_URL=https://4nk.lecoffre.com
```
## Utilisation dans le code
### Configuration Next.js
Les variables sont configurées dans `next.config.js`:
```javascript
const nextConfig = {
publicRuntimeConfig: {
NEXT_PUBLIC_BACK_API_PROTOCOL: process.env.NEXT_PUBLIC_BACK_API_PROTOCOL,
// ... autres variables
},
serverRuntimeConfig: {
// Même configuration pour le serveur
},
env: {
// Configuration pour le build
}
};
```
### Initialisation dans l'application
Les variables sont initialisées dans `_app.tsx`:
```typescript
const { publicRuntimeConfig } = getConfig();
MyApp.getInitialProps = async () => {
return {
backApiProtocol: publicRuntimeConfig.NEXT_PUBLIC_BACK_API_PROTOCOL,
// ... autres variables
};
};
```
### Utilisation dans les services
```typescript
// DatabaseService.ts
private static buildBaseUrl(): string {
return `${publicRuntimeConfig.NEXT_PUBLIC_BACK_API_PROTOCOL}${publicRuntimeConfig.NEXT_PUBLIC_BACK_API_HOST}:${publicRuntimeConfig.NEXT_PUBLIC_BACK_API_PORT}${publicRuntimeConfig.NEXT_PUBLIC_BACK_API_ROOT_URL}${publicRuntimeConfig.NEXT_PUBLIC_BACK_API_VERSION}`;
}
```
## Déploiement Docker
### Variables de build
```dockerfile
ARG NEXT_PUBLIC_BACK_API_PROTOCOL
ARG NEXT_PUBLIC_BACK_API_HOST
# ... autres variables
```
### Variables d'environnement runtime
```dockerfile
ENV NEXT_PUBLIC_BACK_API_PROTOCOL=${NEXT_PUBLIC_BACK_API_PROTOCOL} \
NEXT_PUBLIC_BACK_API_HOST=${NEXT_PUBLIC_BACK_API_HOST} \
# ... autres variables
```
### Exécution
```bash
docker run -e NEXT_PUBLIC_BACK_API_PROTOCOL=https:// \
-e NEXT_PUBLIC_BACK_API_HOST=api.example.com \
lecoffre/front:latest
```
## Déploiement Kubernetes
### Via Vault (recommandé)
```yaml
annotations:
vault.hashicorp.com/agent-inject: "true"
vault.hashicorp.com/agent-inject-secret-envs: secret/data/lecoffre-front-stg/config/envs
vault.hashicorp.com/agent-inject-template-envs: |
{{ with secret "secret/data/lecoffre-front-stg/config/envs" }}
{{ range $k, $v := .Data.data }}
export {{ $k }}="{{ $v }}"
{{ end }}
{{ end }}
```
### Via ConfigMap
```yaml
apiVersion: v1
kind: ConfigMap
metadata:
name: lecoffre-front-config
data:
NEXT_PUBLIC_BACK_API_PROTOCOL: "https://"
NEXT_PUBLIC_BACK_API_HOST: "api.example.com"
# ... autres variables
```
## Validation et tests
### Vérification des variables requises
```typescript
const requiredVars = [
'NEXT_PUBLIC_BACK_API_PROTOCOL',
'NEXT_PUBLIC_BACK_API_HOST',
'NEXT_PUBLIC_BACK_API_PORT',
'NEXT_PUBLIC_BACK_API_ROOT_URL',
'NEXT_PUBLIC_BACK_API_VERSION'
];
for (const varName of requiredVars) {
if (!publicRuntimeConfig[varName]) {
throw new Error(`${varName} is not defined in environment variables`);
}
}
```
### Tests d'environnement
```bash
# Vérifier les variables définies
env | grep NEXT_PUBLIC_
# Tester la construction d'URL
curl -I $(echo $NEXT_PUBLIC_BACK_API_PROTOCOL$NEXT_PUBLIC_BACK_API_HOST:$NEXT_PUBLIC_BACK_API_PORT$NEXT_PUBLIC_BACK_API_ROOT_URL$NEXT_PUBLIC_BACK_API_VERSION/health)
```
## Bonnes pratiques
1. **Sécurité**: Ne jamais exposer de secrets dans les variables `NEXT_PUBLIC_*`
2. **Validation**: Toujours valider la présence des variables requises
3. **Documentation**: Maintenir la documentation des variables
4. **Tests**: Tester avec différentes configurations d'environnement
5. **Fallbacks**: Prévoir des valeurs par défaut quand possible
## Dépannage
### Variables non définies
```bash
# Vérifier les variables d'environnement
docker exec -it <container> env | grep NEXT_PUBLIC_
# Vérifier la configuration Vault
vault kv get secret/data/lecoffre-front-stg/config/envs
```
### URLs malformées
```bash
# Tester la construction d'URL
node -e "
const config = {
protocol: process.env.NEXT_PUBLIC_BACK_API_PROTOCOL,
host: process.env.NEXT_PUBLIC_BACK_API_HOST,
port: process.env.NEXT_PUBLIC_BACK_API_PORT,
root: process.env.NEXT_PUBLIC_BACK_API_ROOT_URL,
version: process.env.NEXT_PUBLIC_BACK_API_VERSION
};
console.log(\`\${config.protocol}\${config.host}:\${config.port}\${config.root}\${config.version}\`);
"
```
---
*Documentation mise à jour le $(date) - Variables d'environnement lecoffre-front*

81
docs/analyse.md Normal file
View File

@ -0,0 +1,81 @@
### Objet
Analyse synthétique de `lecoffre-front` (Next.js) : périmètre, dépendances, configuration, intégrations et points dattention.
### Périmètre et stack
- **Framework**: Next.js 14 (React 18), rendu côté serveur désactivable selon pages
- **Langage**: TypeScript
- **Styles**: SCSS + MUI (@mui/material), Emotion
- **Bundle**: `output: 'standalone'`, `basePath: '/lecoffre'`
### Arborescence notable
- **`src/front/`**: API client (services, entités, SDK) et composants UI (DesignSystem, Layouts, Elements)
- **`pages/`**: routes Next (tableau client, gestion utilisateurs, dossiers, documents, souscriptions)
- **`src/common/Api/LeCoffreApi`**: modules Admin/Customer/Notary/SuperAdmin et `sdk`
- **`src/front/Stores/`**: stores locaux (User, Customer, Window, Toasts)
- **`src/front/Services/`**: services utilitaires (JWT, PDF, Watermark, Crypto, Cookie)
### Configuration et variables
- **`next.config.js`**: publie `NEXT_PUBLIC_*` aux clients et serveur; `reactStrictMode: false`; `ignoreBuildErrors: true`
- **BasePath**: `/lecoffre` (impacte le routage et les assets)
- **Intégrations**: Hotjar, GTM, Docaposte, 4NK (iframe/URL), Idnot (OAuth)
### Dépendances clés
- **UI**: `@mui/material`, `@emotion/*`, `react-select`, `react-toastify`
- **Utilitaires**: `jwt-decode`, `file-saver`, `jszip`, `pdf-lib`, `sass`, `sharp`
- **Ressources**: `le-coffre-resources` (dépôt Git 4NK)
### Intégrations externes (indiciaires)
- **Back**: via `NEXT_PUBLIC_BACK_API_*` et `NEXT_PUBLIC_API_URL`
- **Idnot**: endpoints OAuth (authorize, base_url, redirect_uri)
- **Docaposte**: `NEXT_PUBLIC_DOCAPOSTE_API_URL`
### Points dattention
- **TypeScript**: `ignoreBuildErrors: true` masque des erreurs de typage
- **BasePath**: vérifier la cohérence côté Nginx et lors du déploiement
- **Ressources Git**: accès SSH requis pour `le-coffre-resources`
- **Sécurité**: exposition de multiples `NEXT_PUBLIC_*` (vérifier quaucun secret nest divulgué)
### Tests et qualité
- **Scripts**: `build` sans lint, `lint` séparé, `format` ciblé `src/`
- **`tests/`**: présent (à enrichir avec scénarios e2e/smoke sur pages principales)
### Déploiement
- **Dockerfile**: présent (build Next standalone)
- **Intégration**: orchestré via `lecoffre_node` (voir analyse dédiée)
## Scripts npm, build et configuration
### Scripts
- **dev**: `next dev`
- **build**: `NEXT_TELEMETRY_DISABLED=1 next build --no-lint`
- **start**: `next start`
- **lint**: `next lint`
- **format**: `prettier --write src`
### next.config.js
- `output: 'standalone'`, `basePath: '/lecoffre'`
- `typescript.ignoreBuildErrors: true` (risque: masquage derreurs)
- Exposition de nombreuses variables `NEXT_PUBLIC_*` via `env`, `publicRuntimeConfig` et `serverRuntimeConfig`
### Docker (multistage)
- Étapes: `deps` (install), `development` (dev), `builder` (build), `int-dev` (runtime)
- BuildKit SSH pour `le-coffre-resources` (clé non copiée dans limage)
- Port runtime 3000; utilisateur nonroot `lecoffreuser`
### Dépendances (extraits)
- UI: `@mui/material`, `@emotion/*`, `react-select`, `react-toastify`
- Utilitaires: `jwt-decode`, `file-saver`, `jszip`, `pdf-lib`, `sass`, `sharp`
- Ressources: `le-coffre-resources` (SSH Git 4NK)
- Versions Node/Next: Node 19alpine; Next `^14.2.3`
## Risques et recommandations
- Désactiver `ignoreBuildErrors` et corriger les erreurs TypeScript avant build
- Envisager Node LTS (20/22) pour les images
- Vérifier quaucune info sensible nest exposée via `NEXT_PUBLIC_*`
- Assurer la cohérence `basePath` `/lecoffre` avec Nginx et les liens internes
- Introduire du code splitting ciblé (`React.lazy`/`Suspense`) sur pages lourdes
- Centraliser létat (Redux/Context) pour limiter le prop drilling dans `src/front`
## Changements IdNot
- Lauthentification IdNot appelle désormais le backend en POST avec le code dautorisation dans le corps JSON sur `/api/v1/idnot/auth`.
- Ce changement évite des erreurs en reverse proxy liées à des segments dURL très longs.

115
docs/ci.md Normal file
View File

@ -0,0 +1,115 @@
### CI/CD — Fonctionnement observé et architecture de déploiement
Cette documentation décrit le pipeline CI/CD tel quil peut être déduit des artefacts présents dans le dépôt: `Dockerfile`, `package.json`, et les manifestes Kubernetes rendus dans `temp.yaml`. Aucune configuration de pipeline explicite nest présente dans le dépôt (pas de `.gitea/`, `.github/workflows/`, `.gitlab-ci.yml`, `.drone.yml`). Le flux ci-dessous sappuie donc sur ces éléments pour décrire le fonctionnement attendu.
### Portée
- **Build applicatif**: Next.js (Node 19-alpine) avec dépendance privée `le-coffre-resources` via SSH.
- **Image Docker**: construction multi-étapes, publication vers le registre Docker hébergé sur `git.4nkweb.com` (accès via clés SSH).
- **Déploiement Kubernetes**: namespace `lecoffre`, intégration Vault Agent pour linjection dENV, `ExternalSecret` pour le secret de pull Docker, `Ingress` TLS via cert-manager, ressources de `Deployment`/`Service`.
- **Variables front ajoutées**: `NEXT_PUBLIC_4NK_IFRAME_URL` pour distinguer lURL complète de liframe de son origin (`NEXT_PUBLIC_4NK_URL`).
### Chaîne de build
- **Dépendances**
- `package.json` indique Next.js 14, TypeScript 4.9, ESLint 8.36, etc.
- La dépendance privée `le-coffre-resources` est récupérée depuis `git.4nkweb.com` via SSH (`git+ssh`).
- **Dockerfile** (multi-étapes, Node 19-alpine)
- Étape `deps`: installation des dépendances avec `npm install` en utilisant BuildKit et le forward dagent SSH pour accéder au dépôt privé.
- Étape `development`: copie du code, exécution sous un utilisateur non-root, commande par défaut `npm run dev` (pour le développement local). Pour la prod, limage utilisée en cluster exécute `npm run start` (cf. manifeste).
- **Build Next.js**
- Script `build`: `NEXT_TELEMETRY_DISABLED=1 next build --no-lint`
- Script `start`: `next start`
- Le lint nest pas bloquant au build (flag `--no-lint`).
### Image, registre et version
- **Registre**: Docker registry interne sur `git.4nkweb.com`.
- **Tagging**: contrôlé par la CI via le message de commit (préfixe `ci: docker_tag=<valeur>`), sinon fallback `dev-test`. La branche peut être utilisée comme tag par défaut selon la CI. Recommandation: utiliser un tag non versionné `int-dev`.
### Déploiement Kubernetes (extrait de `temp.yaml`)
- **Namespace**: `lecoffre`
- **ServiceAccount**: `lecoffre-front-sa` avec `Secret` token associé.
- **ExternalSecret**: création de `imagePullSecret` à partir de Vault via `external-secrets.io` en lisant `secret/data/lecoffre-front-stg/config/dockerpullsecret` (clé `.dockerconfigjson`).
- **Deployment**: `apps/v1` nommé `lecoffre-front` avec:
- `image`: `rg.fr-par.scw.cloud/lecoffre/front:v0.1.9`
- `imagePullPolicy`: `Always`
- `resources`: `requests` (cpu 200m, ram 1Gi), `limits` (ram 2Gi)
- **Vault Agent Injector**: annotations pour injecter des variables denvironnement depuis `secret/data/lecoffre-front-stg/config/envs` en exportant chaque paire `clé=valeur` dans `/vault/secrets/envs`.
- **Commande de démarrage**: `['sh','-c', '. /vault/secrets/envs && npm run start']`
- **Service**: type ClusterIP exposant le port 80 vers le `targetPort` 3000 du conteneur Next.js.
- **Ingress**: classe `nginx` avec TLS géré par `cert-manager` (ClusterIssuer `letsencrypt-prod`) pour `app.stg.lecoffre.smart-chain.fr`.
### Flux CI/CD attendu (déduit)
1. **Checkout + préparation**
- Récupération du code et configuration de lagent SSH (accès à `git.4nkweb.com`).
2. **Installation des dépendances**
- `npm install` avec BuildKit (`--mount=type=ssh`) pour la dépendance privée.
3. **Build applicatif**
- `npm run build` (désactive la télémétrie et le lint bloquant).
4. **Construction de limage**
- Réalisée par la CI (BuildKit + forward dagent SSH) après un `git push` sur `git.4nkweb.com`.
- Taggage déterminé par le message de commit et/ou la branche.
5. **Push au registre**
- Réalisé par la CI vers le registre `git.4nkweb.com`.
6. **Déploiement Kubernetes**
- Application des manifestes (ou rendu Helm) dans le namespace `lecoffre`.
- Les secrets de pull sont fournis via `ExternalSecret` connecté à Vault.
- Au runtime, Vault Agent injecte les variables denvironnement nécessaires avant `npm run start`.
### Sécurité et secrets
- **Build**: le forward dagent SSH évite décrire la clé privée dans limage.
- **Runtime**: aucune variable sensible nest stockée dans limage; elles sont injectées à lexécution par Vault.
- **Pull de limage**: la config Docker (`.dockerconfigjson`) est fournie par `ExternalSecret` à partir de Vault.
### Secrets CI requis
- **USER**: identifiant du compte CI sur `git.4nkweb.com` disposant des droits requis (lecture repo, push dimage vers le registry).
- **TOKEN**: jeton daccès (API/registry) associé à `USER`. Utilisé par la CI pour:
- Authentifier les actions git/HTTP vers `git.4nkweb.com` (si nécessaire)
- Authentifier le `docker login` vers le registry Gitea si la CI ne repose pas sur des credentials intégrés.
Notes:
- Préférer des tokens à portée restreinte, régénérés périodiquement.
- Stocker `USER` et `TOKEN` dans le gestionnaire de secrets CI et ne jamais les committer.
### Points à confirmer
- Outil CI utilisé (Gitea Actions, Woodpecker/Drone, GitLab CI, autre) et fichiers de pipeline hébergés ailleurs.
- Règles de nommage des tags dimage et gouvernance de version (`vX.Y.Z`, tags denvironnement).
- Stratégie de déploiement (Helm chart source exact, commandes dapply, gestion multi-env: dev/stg/prod).
- Politique de lint/test avant build (actuellement `--no-lint` au build Next.js).
- Passage des variables `NEXT_PUBLIC_4NK_URL` et `NEXT_PUBLIC_4NK_IFRAME_URL` à létape de build Docker (ARG/ENV) dans la CI.
### Bonnes pratiques recommandées
- Activer un job de lint et tests unitaires avant build dimage.
- Signer les images (Cosign) et activer des scans SCA/Container.
- Gérer explicitement les tags et le changelog en CI.
- Déployer via Helm chart versionné, avec valeurs par environnement (`values.{env}.yaml`).
### Validation de l'image Docker « int-dev » (intégration des variables)
- Objectif: vérifier que les variables `NEXT_PUBLIC_*` sont bien injectées dans l'image construite par la CI.
- Commande:
```
docker pull git.4nkweb.com/4nk/lecoffre-front:int-dev
docker run --rm git.4nkweb.com/4nk/lecoffre-front:int-dev sh -lc "env | grep '^NEXT_PUBLIC_' | sort"
```
- Attendus clés:
- `NEXT_PUBLIC_4NK_URL` et `NEXT_PUBLIC_4NK_IFRAME_URL` doivent être définies.
- Les URLs API (`NEXT_PUBLIC_API_URL`, `NEXT_PUBLIC_BACK_API_*`) doivent refléter l'environnement.

58
docs/ext.md Normal file
View File

@ -0,0 +1,58 @@
### Image Docker "int-dev" (Next.js) variables d'environnement et publication
Cette image exécute l'app Next.js en mode production via `next start` et lit la configuration via les variables d'environnement exposées (préfixe `NEXT_PUBLIC_`). L'objectif est d'éviter toute dépendance à `localhost` dans les appels API : les URLs sont construites dynamiquement côté front à partir de ces variables.
#### Variables d'environnement supportées
- **NEXT_PUBLIC_BACK_API_PROTOCOL**: protocole de l'API (ex: `https://`)
- **NEXT_PUBLIC_BACK_API_HOST**: hôte de l'API (ex: `api.example.com`)
- **NEXT_PUBLIC_BACK_API_PORT**: port de l'API (ex: `443`)
- **NEXT_PUBLIC_BACK_API_ROOT_URL**: racine (ex: `/` ou `/api`)
- **NEXT_PUBLIC_BACK_API_VERSION**: version (ex: `v1`)
- **NEXT_PUBLIC_FRONT_APP_HOST**: base publique du front (ex: `https://app.example.com`)
- **NEXT_PUBLIC_FRONT_APP_PORT**: port front si nécessaire (ex: `443`)
- **NEXT_PUBLIC_IDNOT_AUTHORIZE_ENDPOINT**
- **NEXT_PUBLIC_IDNOT_CLIENT_ID**
- **NEXT_PUBLIC_IDNOT_BASE_URL**
- **NEXT_PUBLIC_DOCAPOSTE_API_URL**
- **NEXT_PUBLIC_HOTJAR_SITE_ID**
- **NEXT_PUBLIC_HOTJAR_VERSION**
- **NEXT_PUBLIC_4NK_URL**
- **NEXT_PUBLIC_API_URL**
- **NEXT_PUBLIC_DEFAULT_VALIDATOR_ID**
- **NEXT_PUBLIC_DEFAULT_STORAGE_URLS** (liste séparée par des virgules)
Notes:
- Le front initialise ses variables via `next.config.js` et `_app.tsx`, ce qui alimente `FrontendVariables`. Les appels API utilisent ces valeurs et n'emploient pas `localhost`.
- Les valeurs doivent être passées au conteneur au runtime (`docker run -e ...` ou manifest K8s via `env:`/`secretRef`).
#### Construction de l'image (cible "int-dev")
Prérequis: Docker BuildKit activé et agent SSH opérationnel pour cloner `le-coffre-resources` depuis `git.4nkweb.com`.
1. `cd /home/debian/lecoffre-front`
2. `export DOCKER_BUILDKIT=1`
3. `docker build --target int-dev --ssh default -t lecoffre/front:int-dev -f /home/debian/lecoffre-front/Dockerfile /home/debian/lecoffre-front`
#### Exécution locale (validation)
Exemple minimal (adapter les valeurs):
```bash
docker run --rm -p 3000:3000 \
-e NEXT_PUBLIC_BACK_API_PROTOCOL=https:// \
-e NEXT_PUBLIC_BACK_API_HOST=api.example.com \
-e NEXT_PUBLIC_BACK_API_PORT=443 \
-e NEXT_PUBLIC_BACK_API_ROOT_URL=/ \
-e NEXT_PUBLIC_BACK_API_VERSION=v1 \
-e NEXT_PUBLIC_FRONT_APP_HOST=https://app.example.com \
-e NEXT_PUBLIC_4NK_URL=https://app.example.com \
lecoffre/front:int-dev
```
#### Publication via CI (git.4nkweb.com)
- Le push d'image est effectué par la CI de `git.4nkweb.com` suite à un `git push`.
- Définir le tag Docker dans le message de commit: `ci: docker_tag=int-dev` (fallback CI: `dev-test`).
- La branche peut être utilisée par la CI comme tag en labsence doverride.

View File

@ -0,0 +1,7 @@
{
"folders": [
{
"path": "."
}
]
}

5
next-env.d.ts vendored Normal file
View File

@ -0,0 +1,5 @@
/// <reference types="next" />
/// <reference types="next/image-types/global" />
// NOTE: This file should not be edited
// see https://nextjs.org/docs/pages/building-your-application/configuring/typescript for more information.

View File

@ -2,10 +2,16 @@
const nextConfig = {
reactStrictMode: false,
output: 'standalone',
basePath: '/lecoffre',
typescript: {
ignoreBuildErrors: true,
},
publicRuntimeConfig: {
// Will be available on both server and client
NEXT_PUBLIC_BACK_API_PROTOCOL: process.env.NEXT_PUBLIC_BACK_API_PROTOCOL,
NEXT_PUBLIC_BACK_API_HOST: process.env.NEXT_PUBLIC_BACK_API_HOST,
NEXT_PUBLIC_BACK_API_PORT: process.env.NEXT_PUBLIC_BACK_API_PORT,
NEXT_PUBLIC_BACK_API_ROOT_URL: process.env.NEXT_PUBLIC_BACK_API_ROOT_URL,
NEXT_PUBLIC_BACK_API_VERSION: process.env.NEXT_PUBLIC_BACK_API_VERSION,
NEXT_PUBLIC_FRONT_APP_HOST: process.env.NEXT_PUBLIC_FRONT_APP_HOST,
@ -13,14 +19,19 @@ const nextConfig = {
NEXT_PUBLIC_IDNOT_AUTHORIZE_ENDPOINT: process.env.NEXT_PUBLIC_IDNOT_AUTHORIZE_ENDPOINT,
NEXT_PUBLIC_IDNOT_CLIENT_ID: process.env.NEXT_PUBLIC_IDNOT_CLIENT_ID,
NEXT_PUBLIC_IDNOT_BASE_URL: process.env.NEXT_PUBLIC_IDNOT_BASE_URL,
NEXT_PUBLIC_IDNOT_REDIRECT_URI: process.env.NEXT_PUBLIC_IDNOT_REDIRECT_URI,
NEXT_PUBLIC_DOCAPOSTE_API_URL: process.env.NEXT_PUBLIC_DOCAPOSTE_API_URL,
NEXT_PUBLIC_HOTJAR_SITE_ID: process.env.NEXT_PUBLIC_HOTJAR_SITE_ID,
NEXT_PUBLIC_HOTJAR_VERSION: process.env.NEXT_PUBLIC_HOTJAR_VERSION,
NEXT_PUBLIC_4NK_URL: process.env.NEXT_PUBLIC_4NK_URL,
NEXT_PUBLIC_4NK_IFRAME_URL: process.env.NEXT_PUBLIC_4NK_IFRAME_URL,
NEXT_PUBLIC_API_URL: process.env.NEXT_PUBLIC_API_URL,
NEXT_PUBLIC_DEFAULT_VALIDATOR_ID: process.env.NEXT_PUBLIC_DEFAULT_VALIDATOR_ID,
NEXT_PUBLIC_DEFAULT_STORAGE_URLS: process.env.NEXT_PUBLIC_DEFAULT_STORAGE_URLS,
},
serverRuntimeConfig: {
NEXT_PUBLIC_BACK_API_PROTOCOL: process.env.NEXT_PUBLIC_BACK_API_PROTOCOL,
NEXT_PUBLIC_BACK_API_HOST: process.env.NEXT_PUBLIC_BACK_API_HOST,
NEXT_PUBLIC_BACK_API_PORT: process.env.NEXT_PUBLIC_BACK_API_PORT,
NEXT_PUBLIC_BACK_API_ROOT_URL: process.env.NEXT_PUBLIC_BACK_API_ROOT_URL,
NEXT_PUBLIC_BACK_API_VERSION: process.env.NEXT_PUBLIC_BACK_API_VERSION,
NEXT_PUBLIC_FRONT_APP_HOST: process.env.NEXT_PUBLIC_FRONT_APP_HOST,
@ -28,14 +39,19 @@ const nextConfig = {
NEXT_PUBLIC_IDNOT_AUTHORIZE_ENDPOINT: process.env.NEXT_PUBLIC_IDNOT_AUTHORIZE_ENDPOINT,
NEXT_PUBLIC_IDNOT_CLIENT_ID: process.env.NEXT_PUBLIC_IDNOT_CLIENT_ID,
NEXT_PUBLIC_IDNOT_BASE_URL: process.env.NEXT_PUBLIC_IDNOT_BASE_URL,
NEXT_PUBLIC_IDNOT_REDIRECT_URI: process.env.NEXT_PUBLIC_IDNOT_REDIRECT_URI,
NEXT_PUBLIC_DOCAPOSTE_API_URL: process.env.NEXT_PUBLIC_DOCAPOSTE_API_URL,
NEXT_PUBLIC_HOTJAR_SITE_ID: process.env.NEXT_PUBLIC_HOTJAR_SITE_ID,
NEXT_PUBLIC_HOTJAR_VERSION: process.env.NEXT_PUBLIC_HOTJAR_VERSION,
NEXT_PUBLIC_4NK_URL: process.env.NEXT_PUBLIC_4NK_URL,
NEXT_PUBLIC_4NK_IFRAME_URL: process.env.NEXT_PUBLIC_4NK_IFRAME_URL,
NEXT_PUBLIC_API_URL: process.env.NEXT_PUBLIC_API_URL,
NEXT_PUBLIC_DEFAULT_VALIDATOR_ID: process.env.NEXT_PUBLIC_DEFAULT_VALIDATOR_ID,
NEXT_PUBLIC_DEFAULT_STORAGE_URLS: process.env.NEXT_PUBLIC_DEFAULT_STORAGE_URLS,
},
env: {
NEXT_PUBLIC_BACK_API_PROTOCOL: process.env.NEXT_PUBLIC_BACK_API_PROTOCOL,
NEXT_PUBLIC_BACK_API_HOST: process.env.NEXT_PUBLIC_BACK_API_HOST,
NEXT_PUBLIC_BACK_API_PORT: process.env.NEXT_PUBLIC_BACK_API_PORT,
NEXT_PUBLIC_BACK_API_ROOT_URL: process.env.NEXT_PUBLIC_BACK_API_ROOT_URL,
NEXT_PUBLIC_BACK_API_VERSION: process.env.NEXT_PUBLIC_BACK_API_VERSION,
NEXT_PUBLIC_FRONT_APP_HOST: process.env.NEXT_PUBLIC_FRONT_APP_HOST,
@ -43,9 +59,13 @@ const nextConfig = {
NEXT_PUBLIC_IDNOT_AUTHORIZE_ENDPOINT: process.env.NEXT_PUBLIC_IDNOT_AUTHORIZE_ENDPOINT,
NEXT_PUBLIC_IDNOT_CLIENT_ID: process.env.NEXT_PUBLIC_IDNOT_CLIENT_ID,
NEXT_PUBLIC_IDNOT_BASE_URL: process.env.NEXT_PUBLIC_IDNOT_BASE_URL,
NEXT_PUBLIC_IDNOT_REDIRECT_URI: process.env.NEXT_PUBLIC_IDNOT_REDIRECT_URI,
NEXT_PUBLIC_DOCAPOSTE_API_URL: process.env.NEXT_PUBLIC_DOCAPOSTE_API_URL,
NEXT_PUBLIC_HOTJAR_SITE_ID: process.env.NEXT_PUBLIC_HOTJAR_SITE_ID,
NEXT_PUBLIC_HOTJAR_VERSION: process.env.NEXT_PUBLIC_HOTJAR_VERSION,
NEXT_PUBLIC_4NK_URL: process.env.NEXT_PUBLIC_4NK_URL,
NEXT_PUBLIC_4NK_IFRAME_URL: process.env.NEXT_PUBLIC_4NK_IFRAME_URL,
NEXT_PUBLIC_API_URL: process.env.NEXT_PUBLIC_API_URL,
NEXT_PUBLIC_DEFAULT_VALIDATOR_ID: process.env.NEXT_PUBLIC_DEFAULT_VALIDATOR_ID,
NEXT_PUBLIC_DEFAULT_STORAGE_URLS: process.env.NEXT_PUBLIC_DEFAULT_STORAGE_URLS,
},
// webpack: config => {

4077
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,10 +1,10 @@
{
"name": "lecoffre-front",
"version": "0.1.0",
"version": "0.1.6",
"private": true,
"scripts": {
"dev": "next dev",
"build": "next build",
"build": "NEXT_TELEMETRY_DISABLED=1 next build --no-lint",
"start": "next start",
"lint": "next lint",
"format": "prettier --write src"
@ -32,11 +32,12 @@
"jwt-decode": "^3.1.2",
"le-coffre-resources": "git+https://git.4nkweb.com/4nk/lecoffre-ressources.git#v2.167",
"next": "^14.2.3",
"pdf-lib": "^1.17.1",
"prettier": "^2.8.7",
"react": "18.2.0",
"react-dom": "18.2.0",
"react-gtm-module": "^2.0.11",
"react-hotjar": "^6.3.1",
"react-select": "^5.7.2",
"react-toastify": "^9.1.3",
"sass": "^1.59.2",

View File

@ -0,0 +1,63 @@
import MessageBus from 'src/sdk/MessageBus';
export default abstract class AbstractService {
protected static readonly messageBus: MessageBus = MessageBus.getInstance();
private static readonly CACHE_TTL = 60 * 60 * 1000; // 60 minutes cache TTL
protected constructor() { }
protected static setItem(key: string, processId: string, process: any): void {
const cache: Record<string, {process: any, timestamp: number}> =
JSON.parse(sessionStorage.getItem(key) || '{}');
cache[processId] = {
process: process, // we overwrite existing process
timestamp: Date.now()
};
sessionStorage.setItem(key, JSON.stringify(cache));
}
protected static getItem(key: string, processId: string): any | null {
const cache: Record<string, {process: any, timestamp: number}> =
JSON.parse(sessionStorage.getItem(key) || '{}');
const item = cache[processId];
if (!item) {
return null;
}
const now: number = Date.now();
if ((now - item.timestamp) < this.CACHE_TTL) {
return { process: item.process, timestamp: item.timestamp };
}
return null;
}
protected static getItems(key: string): Record<string, any> {
const cache: Record<string, {process: any, timestamp: number}> =
JSON.parse(sessionStorage.getItem(key) || '{}');
const now: number = Date.now();
const validItems: Record<string, any> = {};
for (const [processId, item] of Object.entries(cache)) {
if (now - item.timestamp < this.CACHE_TTL) {
validItems[processId] = item.process;
}
}
return validItems;
}
protected static removeItem(key: string, processId: string): void {
const cache: Record<string, {process: any, timestamp: number}> =
JSON.parse(sessionStorage.getItem(key) || '{}');
delete cache[processId];
sessionStorage.setItem(key, JSON.stringify(cache));
}
}

View File

@ -0,0 +1,191 @@
import { v4 as uuidv4 } from 'uuid';
import User from 'src/sdk/User';
import AbstractService from './AbstractService';
import { DEFAULT_STORAGE_URLS } from '@Front/Config/AppConstants';
export default class CollaboratorService extends AbstractService {
private constructor() {
super();
}
public static async createCollaborator(collaboratorData: any, validatorId: string): Promise<{ processId: string, processData: any }> {
const ownerId: string = User.getInstance().getPairingId()!;
const processData: any = {
uid: uuidv4(),
utype: 'collaborator',
isDeleted: 'false',
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
...collaboratorData
};
const privateFields: string[] = Object.keys(processData);
const allFields: string[] = [...privateFields, 'roles'];
const roles: any = {
demiurge: {
members: [ownerId, validatorId],
validation_rules: [],
storages: []
},
owner: {
members: [ownerId, validatorId],
validation_rules: [
{
quorum: 0.01,
fields: allFields,
min_sig_member: 0.01,
},
],
storages: [...DEFAULT_STORAGE_URLS]
},
apophis: {
members: [ownerId, validatorId],
validation_rules: [],
storages: []
}
};
try {
const processCreated = await this.messageBus.createProcess(processData, privateFields, roles);
await this.messageBus.notifyUpdate(processCreated.processId, processCreated.process.states[0].state_id);
await this.messageBus.validateState(processCreated.processId, processCreated.process.states[0].state_id);
const finalProcessData = await this.messageBus.getProcessData(processCreated.processId);
return { processId: processCreated.processId, processData: finalProcessData };
} catch (error) {
throw error;
}
}
public static getCollaborators(callback: (processes: Record<string, any>) => void): void {
const items: Record<string, any> = this.getItems('_collaborators_');
if (Object.keys(items).length > 0) {
setTimeout(() => callback(items), 0);
}
this.messageBus.getProcessesDecoded((processId: string, data: any) =>
data['utype'] &&
data['utype'] === 'collaborator' &&
data['isDeleted'] &&
data['isDeleted'] === 'false' &&
!Object.keys(items).includes(processId)
).then(async (processesData: Record<string, any>) => {
if (Object.keys(processesData).length === 0) {
// If no new processes and no cached items, return empty array
if (Object.keys(items).length === 0) {
callback([]);
}
return;
}
// Single callback with all completed items
callback(items);
}).catch(error => {
console.error('Failed to fetch collaborators:', error);
// Return cached data if available, otherwise empty array
callback(items);
});
}
public static getCollaboratorsBy(whereClause: { [path: string]: any }): Promise<Record<string, any>> {
return new Promise<Record<string, any>>((resolve: (collaborators: Record<string, any>) => void) => {
this.getCollaborators((processes: Record<string, any>) => {
if (Object.keys(processes).length === 0) {
resolve({});
} else {
const filteredEntries = Object.entries(processes).filter(([processId, process]) => {
for (const path in whereClause) {
const paths: string[] = path.split('.');
let value: any = process;
value['processId'] = processId;
for (let i = 0; i < paths.length; i++) {
const currentPath = paths[i];
if (!currentPath || value === undefined || value === null) {
break;
}
value = value[currentPath];
}
if (value !== whereClause[path]) {
return false;
}
}
return true;
});
// Convert filtered entries back to a Record
const filteredProcesses: Record<string, any> = {};
filteredEntries.forEach(([processId, process]) => {
filteredProcesses[processId] = process;
});
resolve(filteredProcesses);
}
});
});
}
public static getCollaboratorBy(whereClause: { [path: string]: any }): Promise<any | null> {
return new Promise<any | null>((resolve: (collaborator: any | null) => void) => {
this.getCollaborators((processes: Record<string, any>) => {
const processArray = Object.values(processes);
if (processArray.length === 0) {
resolve(null);
} else {
resolve(processArray.find((process: any) => {
const collaborator: any = process.processData;
for (const path in whereClause) {
const paths: string[] = path.split('.');
let value: any = collaborator;
for (let i = 0; i < paths.length; i++) {
const currentPath = paths[i];
if (!currentPath || value === undefined || value === null) {
break;
}
value = value[currentPath];
}
if (value !== whereClause[path]) {
return false;
}
}
return true;
}));
}
});
});
}
public static async updateCollaborator(processId: string, newData: any): Promise<void> {
try {
const processUpdated = await this.messageBus.updateProcess(
processId,
{ updated_at: new Date().toISOString(), ...newData },
[],
null
);
const newStateId: string = processUpdated.diffs[0]?.state_id;
await this.messageBus.notifyUpdate(processId, newStateId);
await this.messageBus.validateState(processId, newStateId);
const processData = await this.messageBus.getProcessData(processId);
// Update cache
this.setItem('_collaborators_', processId, processData);
} catch (error) {
console.error('Failed to update collaborator:', error);
throw error; // Re-throw to allow caller to handle
}
}
}

View File

@ -0,0 +1,111 @@
import { v4 as uuidv4 } from 'uuid';
import User from 'src/sdk/User';
import AbstractService from './AbstractService';
import { DEFAULT_STORAGE_URLS } from '@Front/Config/AppConstants';
export default class CustomerService extends AbstractService {
private constructor() {
super();
}
public static async createCustomer(customerData: any, validatorId: string): Promise<{ processId: string, processData: any }> {
const ownerId: string = User.getInstance().getPairingId()!;
const processData: any = {
uid: uuidv4(),
utype: 'customer',
isDeleted: 'false',
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
...customerData
};
const privateFields: string[] = Object.keys(processData);
const allFields: string[] = [...privateFields, 'roles'];
const roles: any = {
demiurge: {
members: [ownerId, validatorId],
validation_rules: [],
storages: []
},
owner: {
members: [ownerId, validatorId],
validation_rules: [
{
quorum: 1,
fields: allFields,
min_sig_member: 1,
},
],
storages: [...DEFAULT_STORAGE_URLS]
},
apophis: {
members: [ownerId, validatorId],
validation_rules: [],
storages: []
}
};
try {
const processCreated = await this.messageBus.createProcess(processData, privateFields, roles);
await this.messageBus.notifyUpdate(processCreated.processId, processCreated.process.states[0].state_id);
await this.messageBus.validateState(processCreated.processId, processCreated.process.states[0].state_id);
const finalProcessData = await this.messageBus.getProcessData(processCreated.processId);
return { processId: processCreated.processId, processData: finalProcessData[processCreated.processId] };
} catch (error) {
throw error;
}
}
public static getCustomers(callback: (processes: Record<string, any>) => void): void {
// Check if we have valid cache
const items: Record<string, any> = this.getItems('_customers_');
if (Object.keys(items).length > 0) {
setTimeout(() => callback(items), 0);
}
this.messageBus.getProcessesDecoded((_processId: string, values: any) => {
return values['utype']
&& values['utype'] === 'customer'
&& values['isDeleted']
&& values['isDeleted'] === 'false';
}).then(async (processes: Record<string, any>) => {
if (Object.keys(processes).length === 0) {
callback(items);
return;
}
const updatedItems: Record<string, any> = { ...items };
for (const [processId, process] of Object.entries(processes)) {
// Update cache
this.setItem('_customers_', processId, process);
updatedItems[processId] = process;
}
callback(updatedItems);
});
}
public static updateCustomer(process: any, newData: any): Promise<void> {
return new Promise<void>((resolve: () => void, reject: (error: string) => void) => {
this.messageBus.updateProcess(process.processId, { updated_at: new Date().toISOString(), ...newData }, [], null).then((processUpdated: any) => {
const newStateId: string = processUpdated.diffs[0]?.state_id;
this.messageBus.notifyUpdate(process.processId, newStateId).then(() => {
this.messageBus.validateState(process.processId, newStateId).then((_stateValidated) => {
const customerUid: string = process.processData.uid;
this.removeItem('_customers_', customerUid);
resolve();
}).catch(reject);
}).catch(reject);
}).catch(reject);
});
}
}

View File

@ -0,0 +1,77 @@
import getConfig from 'next/config';
const { publicRuntimeConfig } = getConfig();
export default class DatabaseService {
// Empêcher l'instanciation de cette classe utilitaire
private constructor() { }
/**
* Récupère les données d'une table avec pagination
* @param tableName Nom de la table à consulter
* @param page Numéro de page (commence à 1)
* @param limit Nombre d'éléments par page
* @returns Données de la table avec pagination
*/
public static async getTableData(tableName: string, page: number = 1, limit: number = 10): Promise<any> {
// Vérification des paramètres
if (!tableName) {
throw new Error('Le nom de la table est requis');
}
// Validation du nom de la table (par sécurité)
const tableNameRegex = /^[a-zA-Z0-9_]+$/;
if (!tableNameRegex.test(tableName)) {
throw new Error('Nom de table invalide');
}
try {
// Construction de l'URL avec paramètres de pagination
const baseUrl = DatabaseService.buildBaseUrl();
const url = `${baseUrl}/db/${tableName}?page=${page}&limit=${limit}`;
// Appel à l'API REST
const response = await fetch(url, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
},
});
if (!response.ok) {
const errorData = await response.json();
throw new Error(errorData.message || 'Erreur lors de la récupération des données');
}
const data = await response.json();
return data;
} catch (error) {
console.error('Erreur lors de l\'accès à la base de données:', error);
throw error;
}
}
/**
* Construit l'URL de base en utilisant les variables d'environnement
* @returns URL de base de l'API
*/
private static buildBaseUrl(): string {
// Vérification des variables d'environnement requises
const requiredVars = [
'NEXT_PUBLIC_BACK_API_PROTOCOL',
'NEXT_PUBLIC_BACK_API_HOST',
'NEXT_PUBLIC_BACK_API_PORT',
'NEXT_PUBLIC_BACK_API_ROOT_URL',
'NEXT_PUBLIC_BACK_API_VERSION'
];
for (const varName of requiredVars) {
if (!publicRuntimeConfig[varName]) {
throw new Error(`${varName} is not defined in environment variables`);
}
}
// Construction de l'URL de base
return `${publicRuntimeConfig.NEXT_PUBLIC_BACK_API_PROTOCOL}${publicRuntimeConfig.NEXT_PUBLIC_BACK_API_HOST}:${publicRuntimeConfig.NEXT_PUBLIC_BACK_API_PORT}${publicRuntimeConfig.NEXT_PUBLIC_BACK_API_ROOT_URL}${publicRuntimeConfig.NEXT_PUBLIC_BACK_API_VERSION}`;
}
}

View File

@ -0,0 +1,123 @@
import { v4 as uuidv4 } from 'uuid';
import User from 'src/sdk/User';
import AbstractService from './AbstractService';
import { DEFAULT_STORAGE_URLS } from '@Front/Config/AppConstants';
export default class DeedTypeService extends AbstractService {
private constructor() {
super();
}
public static async createDeedType(deedTypeData: any, validatorId: string): Promise<{ processId: string, processData: any }> {
const ownerId: string = User.getInstance().getPairingId()!;
const processData: any = {
uid: uuidv4(),
utype: 'deedType',
isDeleted: 'false',
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
document_types: [],
...deedTypeData
};
const privateFields: string[] = Object.keys(processData);
const allFields: string[] = [...privateFields, 'roles'];
const roles: any = {
demiurge: {
members: [ownerId],
validation_rules: [],
storages: []
},
owner: {
members: [ownerId, validatorId],
validation_rules: [
{
quorum: 0.01,
fields: allFields,
min_sig_member: 0.01,
},
],
storages: [...DEFAULT_STORAGE_URLS]
},
apophis: {
members: [ownerId, validatorId],
validation_rules: [],
storages: []
}
};
try {
const processCreated = await this.messageBus.createProcess(processData, privateFields, roles);
await this.messageBus.notifyUpdate(processCreated.processId, processCreated.process.states[0].state_id);
await this.messageBus.validateState(processCreated.processId, processCreated.process.states[0].state_id);
const finalProcessData = await this.messageBus.getProcessData(processCreated.processId);
return { processId: processCreated.processId, processData: finalProcessData };
} catch (error) {
throw error;
}
}
public static getDeedTypes(callback: (processes: Record<string, any>) => void): void {
// Check if we have valid cache
const items: Record<string, any> = this.getItems('_deed_types_');
if (Object.keys(items).length > 0) {
setTimeout(() => callback(items), 0);
}
this.messageBus.getProcessesDecoded((_processId: string, values: any) => {
return values['utype']
&& values['utype'] === 'deedType'
&& values['isDeleted']
&& values['isDeleted'] === 'false';
}).then(async (processes: Record<string, any>) => {
if (Object.keys(processes).length === 0) {
callback(items);
return;
}
console.log('[DeedTypeService/getDeedTypes] processes', processes);
const updatedItems: Record<string, any> = { ...items };
for (const [processId, process] of Object.entries(processes)) {
// Update cache
this.setItem('_deed_types_', processId, process);
updatedItems[processId] = process;
}
callback(updatedItems);
});
}
public static async updateDeedType(processId: string, newData: any): Promise<void> {
try {
const processUpdated = await this.messageBus.updateProcess(
processId,
{ updated_at: new Date().toISOString(), ...newData },
[],
null
);
const newStateId: string = processUpdated.diffs[0]?.state_id;
await this.messageBus.notifyUpdate(processId, newStateId);
await this.messageBus.validateState(processId, newStateId);
const processData = await this.messageBus.getProcessData(processId);
// Update cache
this.setItem('_deed_types_', processId, processData);
} catch (error) {
console.error('Failed to update deed type:', error);
throw error; // Re-throw to allow caller to handle
}
}
}

View File

@ -0,0 +1,152 @@
import { v4 as uuidv4 } from 'uuid';
import User from 'src/sdk/User';
import AbstractService from './AbstractService';
export default class DocumentService extends AbstractService {
private constructor() {
super();
}
public static createDocument(documentData: any, validatorId: string): Promise<any> {
const ownerId: string = User.getInstance().getPairingId()!;
const processData: any = {
uid: uuidv4(),
utype: 'document',
isDeleted: 'false',
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
...documentData
};
const privateFields: string[] = Object.keys(processData);
privateFields.splice(privateFields.indexOf('uid'), 1);
privateFields.splice(privateFields.indexOf('utype'), 1);
privateFields.splice(privateFields.indexOf('isDeleted'), 1);
const roles: any = {
demiurge: {
members: [...[ownerId], validatorId],
validation_rules: [],
storages: []
},
owner: {
members: [ownerId],
validation_rules: [
{
quorum: 0.5,
fields: [...privateFields, 'roles', 'uid', 'utype'],
min_sig_member: 1,
},
],
storages: []
},
validator: {
members: [validatorId],
validation_rules: [
{
quorum: 0.5,
fields: ['idCertified', 'roles'],
min_sig_member: 1,
},
{
quorum: 0.0,
fields: [...privateFields],
min_sig_member: 0,
},
],
storages: []
},
apophis: {
members: [ownerId],
validation_rules: [],
storages: []
}
};
return new Promise<any>((resolve: (processCreated: any) => void, reject: (error: string) => void) => {
this.messageBus.createProcess(processData, privateFields, roles).then((processCreated: any) => {
this.messageBus.notifyUpdate(processCreated.processId, processCreated.process.states[0].state_id).then(() => {
this.messageBus.validateState(processCreated.processId, processCreated.process.states[0].state_id).then((_stateValidated: any) => {
this.getDocumentByUid(processCreated.processData.uid).then(resolve).catch(reject);
}).catch(reject);
}).catch(reject);
}).catch(reject);
});
}
public static getDocuments(): Promise<any[]> {
// Check if we have valid cache
const items: any[] = this.getItems('_documents_');
return this.messageBus.getProcessesDecoded((publicValues: any) =>
publicValues['uid'] &&
publicValues['utype'] &&
publicValues['utype'] === 'document' &&
publicValues['isDeleted'] &&
publicValues['isDeleted'] === 'false' &&
!items.map((item: any) => item.processData.uid).includes(publicValues['uid'])
).then((processes: any[]) => {
if (processes.length === 0) {
return items;
} else {
for (const process of processes) {
// Update cache
this.setItem('_documents_', process);
items.push(process);
}
return items;
}
});
}
public static getDocumentByUid(uid: string): Promise<any> {
// Check if we have valid cache
const item: any = this.getItem('_documents_', uid);
if (item) {
return Promise.resolve(item);
}
return new Promise<any>((resolve: (process: any) => void, reject: (error: string) => void) => {
this.messageBus.getProcessesDecoded((publicValues: any) =>
publicValues['uid'] &&
publicValues['uid'] === uid &&
publicValues['utype'] &&
publicValues['utype'] === 'document' &&
publicValues['isDeleted'] &&
publicValues['isDeleted'] === 'false'
).then((processes: any[]) => {
if (processes.length === 0) {
resolve(null);
} else {
const process: any = processes[0];
// Update cache
this.setItem('_documents_', process);
resolve(process);
}
}).catch(reject);
});
}
public static updateDocument(process: any, newData: any): Promise<void> {
return new Promise<void>((resolve: () => void, reject: (error: string) => void) => {
this.messageBus.updateProcess(process.processId, { updated_at: new Date().toISOString(), ...newData }, [], null).then((processUpdated: any) => {
const newStateId: string = processUpdated.diffs[0]?.state_id;
this.messageBus.notifyUpdate(process.processId, newStateId).then(() => {
this.messageBus.validateState(process.processId, newStateId).then((_stateValidated) => {
const documentUid: string = process.processData.uid;
this.removeItem('_documents_', documentUid);
this.getDocumentByUid(documentUid).then(resolve).catch(reject);
}).catch(reject);
}).catch(reject);
}).catch(reject);
});
}
}

View File

@ -0,0 +1,117 @@
import { v4 as uuidv4 } from 'uuid';
import User from 'src/sdk/User';
import AbstractService from './AbstractService';
import { DEFAULT_STORAGE_URLS } from '@Front/Config/AppConstants';
export default class DocumentTypeService extends AbstractService {
private constructor() {
super();
}
public static createDocumentType(documentTypeData: any, validatorId: string): Promise<{ processId: string, processData: any }> {
const ownerId: string = User.getInstance().getPairingId()!;
const processData: any = {
uid: uuidv4(),
utype: 'documentType',
isDeleted: 'false',
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
...documentTypeData
};
const privateFields: string[] = Object.keys(processData);
const allFields: string[] = [...privateFields, 'roles'];
const roles: any = {
demiurge: {
members: [ownerId],
validation_rules: [],
storages: []
},
owner: {
members: [ownerId, validatorId],
validation_rules: [
{
quorum: 0.01,
fields: allFields,
min_sig_member: 0.01,
},
],
storages: [...DEFAULT_STORAGE_URLS]
},
apophis: {
members: [ownerId, validatorId],
validation_rules: [],
storages: []
}
};
return new Promise<any>((resolve: (processCreated: any) => void, reject: (error: string) => void) => {
this.messageBus.createProcess(processData, privateFields, roles).then((processCreated: any) => {
this.messageBus.notifyUpdate(processCreated.processId, processCreated.process.states[0].state_id).then(() => {
this.messageBus.validateState(processCreated.processId, processCreated.process.states[0].state_id).then((_stateValidated: any) => {
this.messageBus.getProcessData(processCreated.processId).then((processData: any) => {
resolve({ processId: processCreated.processId, processData: processData });
}).catch(reject);
}).catch(reject);
}).catch(reject);
}).catch(reject);
});
}
public static getDocumentTypes(callback: (processes: Record<string, any>) => void): void {
// Check if we have valid cache
const items: Record<string, any> = this.getItems('_document_types_');
if (Object.keys(items).length > 0) {
setTimeout(() => callback(items), 0);
}
this.messageBus.getProcessesDecoded((_processId: string, values: any) => {
return values['utype'] === 'documentType'
&& values['isDeleted'] === 'false';
}).then(async (processes: Record<string, any>) => {
if (Object.keys(processes).length === 0) {
callback(items);
return;
}
const updatedItems: Record<string, any> = { ...items };
for (const [processId, process] of Object.entries(processes)) {
// Update cache
this.setItem('_document_types_', processId, process);
updatedItems[processId] = process;
}
callback(updatedItems);
});
}
public static async updateDocumentType(processId: string, newData: any): Promise<void> {
try {
const processUpdated = await this.messageBus.updateProcess(
processId,
{ updated_at: new Date().toISOString(), ...newData },
[],
null
);
const newStateId: string = processUpdated.diffs[0]?.state_id;
await this.messageBus.notifyUpdate(processId, newStateId);
await this.messageBus.validateState(processId, newStateId);
const processData = await this.messageBus.getProcessData(processId);
// Update cache
this.setItem('_document_types_', processId, processData);
} catch (error) {
console.error('Failed to update deed type:', error);
throw error; // Re-throw to allow caller to handle
}
}
}

View File

@ -0,0 +1,98 @@
import { v4 as uuidv4 } from 'uuid';
import MessageBus from 'src/sdk/MessageBus';
import User from 'src/sdk/User';
import { FileData } from '../../../../front/Api/Entities/types';
export default class FileService {
private static readonly messageBus: MessageBus = MessageBus.getInstance();
private constructor() { }
public static createFile(fileData: FileData, validatorId: string): Promise<any> {
const ownerId: string = User.getInstance().getPairingId()!;
const processData: any = {
uid: uuidv4(),
utype: 'file',
isDeleted: 'false',
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
...fileData
};
const privateFields: string[] = Object.keys(processData);
privateFields.splice(privateFields.indexOf('uid'), 1);
privateFields.splice(privateFields.indexOf('utype'), 1);
privateFields.splice(privateFields.indexOf('isDeleted'), 1);
const roles: any = {
demiurge: {
members: [...[ownerId], validatorId],
validation_rules: [],
storages: []
},
owner: {
members: [ownerId],
validation_rules: [
{
quorum: 0.5,
fields: [...privateFields, 'roles', 'uid', 'utype'],
min_sig_member: 1,
},
],
storages: []
},
validator: {
members: [validatorId],
validation_rules: [
{
quorum: 0.5,
fields: ['idCertified', 'roles'],
min_sig_member: 1,
},
{
quorum: 0.0,
fields: [...privateFields],
min_sig_member: 0,
},
],
storages: []
},
apophis: {
members: [ownerId],
validation_rules: [],
storages: []
}
};
return new Promise<any>((resolve: (processCreated: any) => void, reject: (error: string) => void) => {
this.messageBus.createProcess(processData, privateFields, roles).then((processCreated: any) => {
this.messageBus.notifyUpdate(processCreated.processId, processCreated.process.states[0].state_id).then(() => {
this.messageBus.validateState(processCreated.processId, processCreated.process.states[0].state_id).then((_stateValidated: any) => {
resolve(processCreated);
}).catch(reject);
}).catch(reject);
}).catch(reject);
});
}
public static getFileByUid(uid: string): Promise<any> {
return this.messageBus.getFileByUid(uid);
}
public static updateFile(process: any, newData: any): Promise<void> {
return new Promise<void>((resolve: () => void, reject: (error: string) => void) => {
this.messageBus.updateProcess(process.processId, { updated_at: new Date().toISOString(), ...newData }, [], null).then((processUpdated: any) => {
const newStateId: string = processUpdated.diffs[0]?.state_id;
this.messageBus.notifyUpdate(process.processId, newStateId).then(() => {
this.messageBus.validateState(process.processId, newStateId).then((_stateValidated) => {
resolve();
}).catch(reject);
}).catch(reject);
}).catch(reject);
});
}
}

View File

@ -0,0 +1,180 @@
import { v4 as uuidv4 } from 'uuid';
import User from 'src/sdk/User';
import AbstractService from './AbstractService';
import { DEFAULT_STORAGE_URLS, DEFAULT_VALIDATOR_ID } from '@Front/Config/AppConstants';
export default class FolderService extends AbstractService {
private constructor() {
super();
}
public static async createFolder(folderData: any, customersId: string[]): Promise<{ processId: string, processData: any }> {
const ownerId: string = User.getInstance().getPairingId()!;
const processData: any = {
uid: uuidv4(),
utype: 'folder',
isDeleted: 'false',
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
...folderData
};
const privateFields: string[] = Object.keys(processData);
const allFields: string[] = [...privateFields, 'roles'];
const roles: any = {
demiurge: {
members: [ownerId],
validation_rules: [],
storages: []
},
owners: {
members: [ownerId, DEFAULT_VALIDATOR_ID],
validation_rules: [
{
quorum: 0.01,
fields: allFields,
min_sig_member: 0.01,
},
],
storages: [...DEFAULT_STORAGE_URLS]
},
guests: {
members: [],
validation_rules: [{
quorum: 0.01,
fields: allFields,
min_sig_member: 0.01,
}],
storages: [...DEFAULT_STORAGE_URLS]
},
customers: {
members: customersId,
validation_rules: [
{
quorum: 0.0,
fields: allFields,
min_sig_member: 0.0,
},
],
storages: [...DEFAULT_STORAGE_URLS]
},
apophis: {
members: [ownerId, DEFAULT_VALIDATOR_ID],
validation_rules: [],
storages: []
}
};
try {
const processCreated = await this.messageBus.createProcess(processData, privateFields, roles);
await this.messageBus.notifyUpdate(processCreated.processId, processCreated.process.states[0].state_id);
await this.messageBus.validateState(processCreated.processId, processCreated.process.states[0].state_id);
const finalProcessData = await this.messageBus.getProcessData(processCreated.processId);
return { processId: processCreated.processId, processData: finalProcessData };
} catch (error) {
throw error;
}
}
public static getFolders(callback: (processes: Record<string, any>) => void): void {
// Check if we have valid cache
const items: Record<string, any> = this.getItems('_folders_');
if (Object.keys(items).length > 0) {
setTimeout(() => callback(items), 0);
}
this.messageBus.getProcessesDecoded((_processId: string, values: any) => {
return values['utype']
&& values['utype'] === 'folder'
&& values['isDeleted']
&& values['isDeleted'] === 'false';
}).then(async (processes: Record<string, any>) => {
if (Object.keys(processes).length === 0) {
callback(items);
return;
}
console.log('[FolderService/getFolders] processes', processes);
const updatedItems: Record<string, any> = { ...items };
for (const [processId, process] of Object.entries(processes)) {
// Update cache
this.setItem('_folders_', processId, process);
updatedItems[processId] = process;
}
callback(updatedItems);
});
}
public static getFoldersBy(whereClause: { [path: string]: any }): Promise<Record<string, any>> {
return new Promise<Record<string, any>>((resolve: (folders: Record<string, any>) => void) => {
this.getFolders((processes: Record<string, any>) => {
if (Object.keys(processes).length === 0) {
resolve({});
} else {
const filteredEntries = Object.entries(processes).filter(([processId, process]) => {
for (const path in whereClause) {
const paths: string[] = path.split('.');
let value: any = process;
value['processId'] = processId;
for (let i = 0; i < paths.length; i++) {
const currentPath = paths[i];
if (!currentPath || value === undefined || value === null) {
break;
}
value = value[currentPath];
}
if (value !== whereClause[path]) {
return false;
}
}
return true;
});
// Convert filtered entries back to a Record
const filteredProcesses: Record<string, any> = {};
filteredEntries.forEach(([processId, process]) => {
filteredProcesses[processId] = process;
});
resolve(filteredProcesses);
}
});
});
}
public static async updateFolder(processId: string, newData: any): Promise<void> {
try {
const processUpdated = await this.messageBus.updateProcess(
processId,
{ updated_at: new Date().toISOString(), ...newData },
[],
null
);
const newStateId: string = processUpdated.diffs[0]?.state_id;
await this.messageBus.notifyUpdate(processId, newStateId);
await this.messageBus.validateState(processId, newStateId);
const processData = await this.messageBus.getProcessData(processId);
// Update cache
this.setItem('_folders_', processId, processData);
} catch (error) {
console.error('Failed to update folder:', error);
throw error; // Re-throw to allow caller to handle
}
}
}

View File

@ -0,0 +1,509 @@
import { v4 as uuidv4 } from 'uuid';
import User from 'src/sdk/User';
import MessageBus from 'src/sdk/MessageBus';
import DatabaseService from './DatabaseService';
import RuleService from './RuleService';
import RuleGroupService from './RuleGroupService';
import RoleService from './RoleService';
import OfficeRoleService from './OfficeRoleService';
import { DEFAULT_VALIDATOR_ID } from '@Front/Config/AppConstants';
const mandatoryRoles = ['Notaire', 'Collaborateur'];
/**
* Type pour le callback de progression
*/
export interface ProgressInfo {
/** Progression globale (0-100) */
globalProgress: number;
/** Nom de l'étape en cours */
currentStep: string;
/** Progression de l'étape en cours (0-100) */
stepProgress: number;
/** Description optionnelle de l'action en cours */
description?: string;
}
export default class ImportData {
protected static readonly messageBus: MessageBus = MessageBus.getInstance();
public static async import(office: any, validatorId: string, onProgress?: (info: ProgressInfo) => void): Promise<void> {
// Définir les étapes d'importation dynamiquement
const importSteps = [
{
name: 'Règles',
function: async (progressCallback?: (subProgress: number, description?: string) => void) =>
await this.importRules(progressCallback)
},
{
name: 'Groupes de règles',
function: async (progressCallback?: (subProgress: number, description?: string) => void) =>
await this.importRuleGroups(progressCallback)
},
{
name: 'Rôles',
function: async (progressCallback?: (subProgress: number, description?: string) => void) =>
await this.importRoles(progressCallback)
},
{
name: 'Rôles d\'office',
function: async (progressCallback?: (subProgress: number, description?: string) => void, prevResults?: any[]) =>
await this.importOfficeRoles(office, prevResults![1], progressCallback)
}
];
// Calculer la part de progression pour chaque étape
const totalSteps = importSteps.length;
const stepWeight = 100 / totalSteps;
// Appel du callback avec 0% au début
onProgress?.({
globalProgress: 0,
currentStep: 'Initialisation',
stepProgress: 0,
description: 'Début de l\'importation des données'
});
// Exécuter chaque étape d'importation séquentiellement
const results: any[] = [];
for (let i = 0; i < importSteps.length; i++) {
const step = importSteps[i];
if (!step) continue; // S'assurer que l'étape existe
const startProgress = i * stepWeight;
// Créer un callback de progression pour cette étape
const stepProgressCallback = (subProgress: number, description?: string) => {
onProgress?.({
globalProgress: startProgress + (subProgress * stepWeight / 100),
currentStep: step.name,
stepProgress: subProgress,
description
});
};
// Exécuter l'étape et stocker le résultat si nécessaire
const result = await step.function(stepProgressCallback, results);
if (result !== undefined) {
results.push(result);
} else {
// Push empty array to maintain consistent indexing
results.push([]);
}
console.log(`Step ${i} (${step.name}): ${result?.length || 0} items`);
}
if (!await this.isDone()) {
await this.done(validatorId);
}
}
public static async isDone(): Promise<boolean> {
return await this.messageBus.getProcessesDecoded((publicValues: any) =>
publicValues['uid'] &&
publicValues['utype'] &&
publicValues['utype'] === 'importData' &&
publicValues['isDeleted'] && publicValues['isDeleted'] === 'false'
).then(async (processes: any[]) => processes.length > 0);
}
private static async done(validatorId: string): Promise<any> {
const ownerId: string = User.getInstance().getPairingId()!;
const processData: any = {
uid: uuidv4(),
utype: 'importData',
isDeleted: 'false',
created_at: new Date().toISOString(),
updated_at: new Date().toISOString()
};
const privateFields: string[] = Object.keys(processData);
privateFields.splice(privateFields.indexOf('uid'), 1);
privateFields.splice(privateFields.indexOf('utype'), 1);
privateFields.splice(privateFields.indexOf('isDeleted'), 1);
const roles: any = {
demiurge: {
members: [ownerId],
validation_rules: [],
storages: []
},
owner: {
members: [ownerId],
validation_rules: [
{
quorum: 0.5,
fields: [...privateFields, 'roles', 'uid', 'utype'],
min_sig_member: 1,
},
],
storages: []
},
validator: {
members: [validatorId],
validation_rules: [
{
quorum: 0.5,
fields: ['idCertified', 'roles'],
min_sig_member: 1,
},
{
quorum: 0.0,
fields: [...privateFields],
min_sig_member: 0,
},
],
storages: []
},
apophis: {
members: [ownerId],
validation_rules: [],
storages: []
}
};
return new Promise<void>((resolve: () => void, reject: (error: string) => void) => {
this.messageBus.createProcess(processData, privateFields, roles).then((processCreated: any) => {
this.messageBus.notifyUpdate(processCreated.processId, processCreated.process.states[0].state_id).then(() => {
this.messageBus.validateState(processCreated.processId, processCreated.process.states[0].state_id).then(() => {
resolve();
}).catch(reject);
}).catch(reject);
}).catch(reject);
});
}
private static async importRules(onProgress?: (progress: number, description?: string) => void): Promise<any[]> {
console.log('Importing rules');
const rules: any[] = [];
const INIT_PROGRESS = 0;
const FETCH_PROGRESS = 30;
const CREATE_END_PROGRESS = 90;
const FINAL_PROGRESS = 100;
onProgress?.(INIT_PROGRESS, 'Initialisation');
let page = 1;
let limit = 10;
let totalPages = 1;
onProgress?.(FETCH_PROGRESS, 'Récupération des règles existantes');
let result = await DatabaseService.getTableData('rules', page, limit);
if (result && result.success && result.pagination) {
totalPages = result.pagination.totalPages || 1;
}
const FETCH_PAGE_PROGRESS_START = FETCH_PROGRESS;
const FETCH_PAGE_PROGRESS_END = 60;
const CREATE_START_PROGRESS = 60;
while (result && result.success) {
const fetchPageProgress = FETCH_PAGE_PROGRESS_START + ((page / totalPages) * (FETCH_PAGE_PROGRESS_END - FETCH_PAGE_PROGRESS_START));
onProgress?.(fetchPageProgress, `Page ${page}/${totalPages} : Récupération des règles`);
const existingRules: any[] = (await RuleService.getRules()).map((process: any) => process.processData);
const filteredRules: any[] = result.data.filter((rule: any) => !existingRules.some((existingRule: any) => existingRule.uid === rule.uid));
if (filteredRules.length === 0) {
console.debug('All rules already imported');
}
const totalFilteredRules = filteredRules.length;
for (let i = 0; i < totalFilteredRules; i++) {
console.log(`Adding rule ${filteredRules[i].name}`);
rules.push((await RuleService.createRule(filteredRules[i], DEFAULT_VALIDATOR_ID)).processData);
const progressRange = CREATE_END_PROGRESS - CREATE_START_PROGRESS;
const ruleProgressIncrement = progressRange / (totalFilteredRules * totalPages);
const progress = CREATE_START_PROGRESS + ((page - 1) * totalFilteredRules + i + 1) * ruleProgressIncrement;
onProgress?.(progress, `Page ${page}/${totalPages} : Création de la règle ${i + 1}/${totalFilteredRules} - ${filteredRules[i].label}`);
}
if (!result.pagination.hasNextPage) {
break;
}
page++;
result = await DatabaseService.getTableData('rules', page, limit);
}
onProgress?.(FINAL_PROGRESS, 'Importation des règles terminée');
return rules;
}
private static async importRuleGroups(onProgress?: (progress: number, description?: string) => void): Promise<any[]> {
console.log('Importing rule groups');
const ruleGroups: any[] = [];
const INIT_PROGRESS = 0;
const FETCH_PROGRESS = 30;
const CREATE_END_PROGRESS = 90;
const FINAL_PROGRESS = 100;
onProgress?.(INIT_PROGRESS, 'Initialisation');
let page = 1;
let limit = 10;
let totalPages = 1;
onProgress?.(FETCH_PROGRESS, 'Récupération des groupes de règles existants');
let result = await DatabaseService.getTableData('rules_groups', page, limit);
if (result && result.success && result.pagination) {
totalPages = result.pagination.totalPages || 1;
}
const FETCH_PAGE_PROGRESS_START = FETCH_PROGRESS;
const FETCH_PAGE_PROGRESS_END = 60;
const CREATE_START_PROGRESS = 60;
while (result && result.success) {
const fetchPageProgress = FETCH_PAGE_PROGRESS_START + ((page / totalPages) * (FETCH_PAGE_PROGRESS_END - FETCH_PAGE_PROGRESS_START));
onProgress?.(fetchPageProgress, `Page ${page}/${totalPages} : Récupération du groupe de règles`);
const existingRuleGroups: any[] = (await RuleGroupService.getRuleGroups()).map((process: any) => process.processData);
const filteredRuleGroups: any[] = result.data.filter((rule: any) => !existingRuleGroups.some((existingRule: any) => existingRule.uid === rule.uid));
const totalFilteredRuleGroups = filteredRuleGroups.length;
for (let i = 0; i < totalFilteredRuleGroups; i++) {
console.log(`Adding rule group ${filteredRuleGroups[i].name}`);
ruleGroups.push((await RuleGroupService.createRuleGroup(filteredRuleGroups[i], DEFAULT_VALIDATOR_ID)).processData);
const progressRange = CREATE_END_PROGRESS - CREATE_START_PROGRESS;
const ruleProgressIncrement = progressRange / (totalFilteredRuleGroups * totalPages);
const progress = CREATE_START_PROGRESS + ((page - 1) * totalFilteredRuleGroups + i + 1) * ruleProgressIncrement;
onProgress?.(progress, `Page ${page}/${totalPages} : Création du groupe de règles ${i + 1}/${totalFilteredRuleGroups} - ${filteredRuleGroups[i].label}`);
}
if (!result.pagination.hasNextPage) {
break;
}
page++;
result = await DatabaseService.getTableData('rules_groups', page, limit);
}
onProgress?.(FINAL_PROGRESS, 'Importation des groupes de règles terminée');
return ruleGroups;
}
private static async importRoles(onProgress?: (progress: number, description?: string) => void): Promise<any[]> {
console.log('Importing roles');
// Constantes de progression - pourraient être paramétrées
const INIT_PROGRESS = 0;
const FETCH_PROGRESS = 30;
const CREATE_START_PROGRESS = FETCH_PROGRESS;
const CREATE_END_PROGRESS = 90;
const FINAL_PROGRESS = 100;
onProgress?.(INIT_PROGRESS, 'Initialisation');
return await new Promise<any[]>((resolve: (roles: any[]) => void) => {
const defaultRoles: any[] = [
{
name: 'super-admin',
label: 'Super administrateur'
},
{
name: 'admin',
label: 'Administrateur'
},
{
name: 'notary',
label: 'Notaire'
},
{
name: 'default',
label: 'Utilisateur'
}
];
RoleService.getRoles().then(async (processes: any[]) => {
onProgress?.(FETCH_PROGRESS, 'Récupération des rôles existants');
const roles: any[] = processes.map((process: any) => process.processData);
if (roles.length === 0) {
const defaultRolesNames: string[] = defaultRoles.map((role: any) => role.name);
const totalRoles = defaultRoles.length;
for (let i = 0; i < totalRoles; i++) {
if (!defaultRolesNames.includes(roles[i].name)) {
roles.push((await RoleService.createRole(defaultRoles[i], DEFAULT_VALIDATOR_ID)).processData);
} else {
console.log(`Role ${defaultRoles[i].name} already exists`);
}
// Progression dynamique pendant la création des rôles
const progressRange = CREATE_END_PROGRESS - CREATE_START_PROGRESS;
const progress = CREATE_START_PROGRESS + ((i + 1) / totalRoles) * progressRange;
onProgress?.(progress, `Création du rôle ${i + 1}/${totalRoles} : ${defaultRoles[i].label}`);
}
}
onProgress?.(FINAL_PROGRESS, 'Importation des rôles terminée');
resolve(roles);
});
});
}
private static async importDefaultRoles(officeUid: string, ruleGroups: any[], onProgress?: (progress: number, description?: string) => void): Promise<any[]> {
console.log('Importing default roles');
let officeRoles: any[] = [];
const CREATE_START_PROGRESS = 60;
const CREATE_END_PROGRESS = 90;
onProgress?.(CREATE_START_PROGRESS, 'Création des rôles par défaut');
// Prepare the collaborator rules from rule groups
const collaboratorRules: any[] = ruleGroups
.map((ruleGroup: any) => ruleGroup.rules || [])
.reduce((acc: any, curr: any) => [...acc, ...curr], [])
.map((rule: any) => ({ uid: rule.uid }));
console.log(`Found ${collaboratorRules.length} collaborator rules from ${ruleGroups.length} rule groups`);
// Get fresh list of existing roles (including ones we just created)
const updatedExistingRoles = await OfficeRoleService.getOfficeRoles();
const existingRoles = updatedExistingRoles
.map((role: any) => role.processData)
.filter((roleData: any) => roleData.office?.uid === officeUid);
const existingRoleNames = existingRoles.map((role: any) => role.name);
const missingMandatoryRoles = mandatoryRoles.filter(roleName =>
!existingRoleNames.includes(roleName)
);
console.log(`Found ${existingRoleNames.length} existing roles, ${missingMandatoryRoles.length} mandatory roles missing`);
if (missingMandatoryRoles.length === 0) {
onProgress?.(CREATE_END_PROGRESS, 'Tous les rôles obligatoires existent déjà');
return officeRoles;
}
for (let i = 0; i < missingMandatoryRoles.length; i++) {
const roleName = missingMandatoryRoles[i];
const fallbackRole = {
name: roleName,
office: { uid: officeUid },
// Only Notaire gets rules, Collaborateur gets none
...(roleName === 'Notaire' && { rules: collaboratorRules })
};
console.log(`Creating missing mandatory role: ${roleName}`);
officeRoles.push((await OfficeRoleService.createOfficeRole(fallbackRole, DEFAULT_VALIDATOR_ID)).processData);
const progressRange = CREATE_END_PROGRESS - CREATE_START_PROGRESS;
const progress = CREATE_START_PROGRESS + ((i + 1) / missingMandatoryRoles.length) * progressRange;
onProgress?.(progress, `Création rôle obligatoire ${i + 1}/${missingMandatoryRoles.length} - ${roleName}`);
}
return officeRoles;
}
private static async importOfficeRoles(office: any, ruleGroups: any[], onProgress?: (progress: number, description?: string) => void): Promise<any[]> {
console.log('Importing office roles');
let officeRoles: any[] = [];
const officeUid = office.processData?.uid;
if (!officeUid) {
console.error('Office UID is not set');
return officeRoles;
}
console.log(`Processing ${ruleGroups.length} rule groups for office ${officeUid}`);
const INIT_PROGRESS = 0;
const FETCH_PROGRESS = 30;
const CREATE_END_PROGRESS = 90;
const FINAL_PROGRESS = 100;
onProgress?.(INIT_PROGRESS, 'Initialisation');
let page = 1;
let limit = 10;
let totalPages = 1;
onProgress?.(FETCH_PROGRESS, 'Récupération des rôles d\'office existants');
let result = await DatabaseService.getTableData('office_roles', page, limit);
if (result && result.success && result.pagination) {
totalPages = result.pagination.totalPages || 1;
}
const FETCH_PAGE_PROGRESS_START = FETCH_PROGRESS;
const FETCH_PAGE_PROGRESS_END = 60;
const CREATE_START_PROGRESS = 60;
// Collect all office roles for this office from all pages
const allOfficeRolesForThisOffice: any[] = [];
while (result && result.success) {
const fetchPageProgress = FETCH_PAGE_PROGRESS_START + ((page / totalPages) * (FETCH_PAGE_PROGRESS_END - FETCH_PAGE_PROGRESS_START));
onProgress?.(fetchPageProgress, `Page ${page}/${totalPages} : Récupération des rôles d'office`);
// Collect office roles for this office from current page
const officeRolesFromPage = result.data.filter((officeRole: any) =>
officeRole.office?.uid === officeUid
);
allOfficeRolesForThisOffice.push(...officeRolesFromPage);
if (!result.pagination.hasNextPage) {
break;
}
page++;
result = await DatabaseService.getTableData('office_roles', page, limit);
}
console.log(`Found ${allOfficeRolesForThisOffice.length} office roles in database for this office`);
if (allOfficeRolesForThisOffice.length === 0) {
console.log('No office roles found in database, creating defaults');
return await this.importDefaultRoles(officeUid, ruleGroups, onProgress);
}
// Get all existing office role processes (to avoid duplicates)
const existingOfficeRoles: any[] = await OfficeRoleService.getOfficeRoles();
const existingOfficeRoleUids = existingOfficeRoles.map((existingRole: any) =>
existingRole.processData.uid
);
console.log(`Found ${existingOfficeRoles.length} existing office role processes`);
// Import all office roles found in database (if not already imported)
const dbRolesToImport = allOfficeRolesForThisOffice.filter((dbRole: any) =>
!existingOfficeRoleUids.includes(dbRole.uid)
);
console.log(`Importing ${dbRolesToImport.length} new office roles from database`);
for (let i = 0; i < dbRolesToImport.length; i++) {
const roleData = dbRolesToImport[i];
// Ensure office UID is set correctly
if (!roleData.office) {
roleData.office = { uid: officeUid };
} else if (!roleData.office.uid) {
roleData.office.uid = officeUid;
}
console.log(`Importing office role: ${roleData.name}`);
officeRoles.push((await OfficeRoleService.createOfficeRole(roleData, DEFAULT_VALIDATOR_ID)).processData);
const progressRange = (CREATE_END_PROGRESS - CREATE_START_PROGRESS) * 0.7; // 70% for db imports
const progress = CREATE_START_PROGRESS + ((i + 1) / dbRolesToImport.length) * progressRange;
onProgress?.(progress, `Import base de données ${i + 1}/${dbRolesToImport.length} - ${roleData.name}`);
}
// Now check for mandatory roles and create any missing ones
const mandatoryRolesProgress = CREATE_START_PROGRESS + (CREATE_END_PROGRESS - CREATE_START_PROGRESS) * 0.7;
onProgress?.(mandatoryRolesProgress, 'Vérification des rôles obligatoires');
const defaultRolesCreated = await this.importDefaultRoles(officeUid, ruleGroups, (subProgress, description) => {
// Map sub-progress to remaining 20% of total progress
const mappedProgress = mandatoryRolesProgress + (subProgress - 60) * 0.3 / 30; // Scale 60-90 to remaining 30%
onProgress?.(mappedProgress, description);
});
officeRoles.push(...defaultRolesCreated);
onProgress?.(FINAL_PROGRESS, 'Importation des rôles d\'office terminée');
return officeRoles;
}
}

View File

@ -0,0 +1,45 @@
class LoaderService {
private static instance: LoaderService;
private _isVisible: boolean = false;
private _callbacks: Array<(isVisible: boolean) => void> = [];
private constructor() { }
public static getInstance(): LoaderService {
if (!LoaderService.instance) {
LoaderService.instance = new LoaderService();
}
return LoaderService.instance;
}
public show(): void {
this._isVisible = true;
this._notifySubscribers();
}
public hide(): void {
this._isVisible = false;
this._notifySubscribers();
}
public get isVisible(): boolean {
return this._isVisible;
}
public subscribe(callback: (isVisible: boolean) => void): () => void {
this._callbacks.push(callback);
// Return unsubscribe function
return () => {
this._callbacks = this._callbacks.filter(cb => cb !== callback);
};
}
private _notifySubscribers(): void {
this._callbacks.forEach(callback => {
callback(this._isVisible);
});
}
}
export default LoaderService;

View File

@ -0,0 +1,38 @@
@keyframes spin {
to {
transform: rotate(1turn);
}
}
.loader-container {
position: fixed;
top: 0;
left: 0;
right: 0;
bottom: 0;
background-color: rgba(255, 255, 255, 0.7);
z-index: 9999;
display: flex;
justify-content: center;
align-items: center;
}
.loader {
display: flex;
flex-direction: column;
align-items: center;
background-color: white;
padding: 2rem;
border-radius: 8px;
box-shadow: 0 4px 10px rgba(0, 0, 0, 0.1);
}
.message {
margin-top: 1rem;
font-size: 1rem;
color: #333;
}
.spinner {
animation: spin 1s infinite linear;
}

View File

@ -0,0 +1,37 @@
import React, { useEffect, useState } from "react";
import { ArrowPathIcon } from "@heroicons/react/24/outline";
import LoaderService from "./LoaderService";
import classes from "./classes.module.scss";
// Composant fusionné qui gère à la fois l'abonnement au service et l'affichage
const Loader: React.FC = () => {
const [isVisible, setIsVisible] = useState(false);
useEffect(() => {
// S'abonner aux changements d'état du loader
const unsubscribe = LoaderService.getInstance().subscribe((visible) => {
setIsVisible(visible);
});
// Nettoyage de l'abonnement
return () => {
unsubscribe();
};
}, []);
// Ne rien afficher si le loader n'est pas visible
if (!isVisible) return null;
// Affichage du loader avec overlay
return (
<div className={classes["loader-container"]}>
<div className={classes["loader"]}>
<ArrowPathIcon className={classes["spinner"]} width={40} />
</div>
</div>
);
};
export default Loader;

View File

@ -0,0 +1,109 @@
import { v4 as uuidv4 } from 'uuid';
import MessageBus from 'src/sdk/MessageBus';
import User from 'src/sdk/User';
export default class NoteService {
private static readonly messageBus: MessageBus = MessageBus.getInstance();
private constructor() { }
public static createNote(noteData: any, validatorId: string): Promise<any> {
const ownerId: string = User.getInstance().getPairingId()!;
const processData: any = {
uid: uuidv4(),
utype: 'note',
isDeleted: 'false',
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
...noteData
};
const privateFields: string[] = Object.keys(processData);
privateFields.splice(privateFields.indexOf('uid'), 1);
privateFields.splice(privateFields.indexOf('utype'), 1);
privateFields.splice(privateFields.indexOf('isDeleted'), 1);
const roles: any = {
demiurge: {
members: [...[ownerId], validatorId],
validation_rules: [],
storages: []
},
owner: {
members: [ownerId],
validation_rules: [
{
quorum: 0.5,
fields: [...privateFields, 'roles', 'uid', 'utype'],
min_sig_member: 1,
},
],
storages: []
},
validator: {
members: [validatorId],
validation_rules: [
{
quorum: 0.5,
fields: ['idCertified', 'roles'],
min_sig_member: 1,
},
{
quorum: 0.0,
fields: [...privateFields],
min_sig_member: 0,
},
],
storages: []
},
apophis: {
members: [ownerId],
validation_rules: [],
storages: []
}
};
return new Promise<any>((resolve: (processCreated: any) => void, reject: (error: string) => void) => {
this.messageBus.createProcess(processData, privateFields, roles).then((processCreated: any) => {
this.messageBus.notifyUpdate(processCreated.processId, processCreated.process.states[0].state_id).then(() => {
this.messageBus.validateState(processCreated.processId, processCreated.process.states[0].state_id).then((_stateValidated: any) => {
resolve(processCreated);
}).catch(reject);
}).catch(reject);
}).catch(reject);
});
}
public static getNotes(): Promise<any[]> {
return this.messageBus.getProcessesDecoded((publicValues: any) => publicValues['uid'] && publicValues['utype'] && publicValues['utype'] === 'note' && publicValues['isDeleted'] && publicValues['isDeleted'] === 'false');
}
public static getNoteByUid(uid: string): Promise<any> {
return new Promise<any>((resolve: (process: any) => void, reject: (error: string) => void) => {
this.messageBus.getProcessesDecoded((publicValues: any) => publicValues['uid'] && publicValues['uid'] === uid && publicValues['utype'] && publicValues['utype'] === 'note' && publicValues['isDeleted'] && publicValues['isDeleted'] === 'false').then((processes: any[]) => {
if (processes.length === 0) {
resolve(null);
} else {
const process: any = processes[0];
resolve(process);
}
}).catch(reject);
});
}
public static updateNote(process: any, newData: any): Promise<void> {
return new Promise<void>((resolve: () => void, reject: (error: string) => void) => {
this.messageBus.updateProcess(process.processId, { updated_at: new Date().toISOString(), ...newData }, [], null).then((processUpdated: any) => {
const newStateId: string = processUpdated.diffs[0]?.state_id;
this.messageBus.notifyUpdate(process.processId, newStateId).then(() => {
this.messageBus.validateState(process.processId, newStateId).then((_stateValidated) => {
resolve();
}).catch(reject);
}).catch(reject);
}).catch(reject);
});
}
}

View File

@ -0,0 +1,98 @@
import { v4 as uuidv4 } from 'uuid';
import MessageBus from 'src/sdk/MessageBus';
import User from 'src/sdk/User';
import { FileData } from '../../../../front/Api/Entities/types';
export default class OfficeRibService {
private static readonly messageBus: MessageBus = MessageBus.getInstance();
private constructor() { }
public static createOfficeRib(fileData: FileData, validatorId: string): Promise<any> {
const ownerId: string = User.getInstance().getPairingId()!;
const processData: any = {
uid: uuidv4(),
utype: 'officeRib',
isDeleted: 'false',
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
...fileData
};
const privateFields: string[] = Object.keys(processData);
privateFields.splice(privateFields.indexOf('uid'), 1);
privateFields.splice(privateFields.indexOf('utype'), 1);
privateFields.splice(privateFields.indexOf('isDeleted'), 1);
const roles: any = {
demiurge: {
members: [...[ownerId], validatorId],
validation_rules: [],
storages: []
},
owner: {
members: [ownerId],
validation_rules: [
{
quorum: 0.5,
fields: [...privateFields, 'roles', 'uid', 'utype'],
min_sig_member: 1,
},
],
storages: []
},
validator: {
members: [validatorId],
validation_rules: [
{
quorum: 0.5,
fields: ['idCertified', 'roles'],
min_sig_member: 1,
},
{
quorum: 0.0,
fields: [...privateFields],
min_sig_member: 0,
},
],
storages: []
},
apophis: {
members: [ownerId],
validation_rules: [],
storages: []
}
};
return new Promise<any>((resolve: (processCreated: any) => void, reject: (error: string) => void) => {
this.messageBus.createProcess(processData, privateFields, roles).then((processCreated: any) => {
this.messageBus.notifyUpdate(processCreated.processId, processCreated.process.states[0].state_id).then(() => {
this.messageBus.validateState(processCreated.processId, processCreated.process.states[0].state_id).then((_stateValidated: any) => {
resolve(processCreated);
}).catch(reject);
}).catch(reject);
}).catch(reject);
});
}
public static getOfficeRib(): Promise<any> {
return this.messageBus.getFileByUtype('officeRib');
}
public static updateOfficeRib(process: any, newData: any): Promise<void> {
return new Promise<void>((resolve: () => void, reject: (error: string) => void) => {
this.messageBus.updateProcess(process.processId, { updated_at: new Date().toISOString(), ...newData }, [], null).then((processUpdated: any) => {
const newStateId: string = processUpdated.diffs[0]?.state_id;
this.messageBus.notifyUpdate(process.processId, newStateId).then(() => {
this.messageBus.validateState(process.processId, newStateId).then((_stateValidated) => {
resolve();
}).catch(reject);
}).catch(reject);
}).catch(reject);
});
}
}

View File

@ -0,0 +1,150 @@
import { v4 as uuidv4 } from 'uuid';
import User from 'src/sdk/User';
import AbstractService from './AbstractService';
import OfficeService from './OfficeService';
import RuleService from './RuleService';
import { DEFAULT_STORAGE_URLS } from '@Front/Config/AppConstants';
export default class OfficeRoleService extends AbstractService {
private constructor() {
super();
}
public static createOfficeRole(roleData: any, validatorId: string): Promise<any> {
const ownerId: string = User.getInstance().getPairingId()!;
const processData: any = {
uid: uuidv4(),
utype: 'officeRole',
isDeleted: 'false',
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
...roleData
};
const privateFields: string[] = Object.keys(processData);
privateFields.splice(privateFields.indexOf('uid'), 1);
privateFields.splice(privateFields.indexOf('utype'), 1);
privateFields.splice(privateFields.indexOf('isDeleted'), 1);
const roles: any = {
demiurge: {
members: [ownerId],
validation_rules: [],
storages: []
},
owner: {
members: [ownerId],
validation_rules: [
{
quorum: 0.01,
fields: [...privateFields, 'roles', 'uid', 'utype', 'isDeleted'],
min_sig_member: 0.01,
},
],
storages: [...DEFAULT_STORAGE_URLS]
},
validator: {
members: [validatorId],
validation_rules: [
{
quorum: 1,
fields: [...privateFields, 'roles', 'uid', 'utype', 'isDeleted'],
min_sig_member: 1,
},
],
storages: [...DEFAULT_STORAGE_URLS]
},
apophis: {
members: [ownerId, validatorId],
validation_rules: [],
storages: []
}
};
return new Promise<any>((resolve: (processCreated: any) => void, reject: (error: string) => void) => {
this.messageBus.createProcess(processData, privateFields, roles).then((processCreated: any) => {
this.messageBus.notifyUpdate(processCreated.processId, processCreated.process.states[0].state_id).then(() => {
this.messageBus.validateState(processCreated.processId, processCreated.process.states[0].state_id).then((_stateValidated: any) => {
this.getOfficeRoleByUid(processCreated.processData.uid).then(resolve).catch(reject);
}).catch(reject);
}).catch(reject);
}).catch(reject);
});
}
public static getOfficeRoles(): Promise<any[]> {
// Check if we have valid cache
const items: any[] = this.getItems('_office_roles_');
return this.messageBus.getProcessesDecoded((publicValues: any) =>
publicValues['uid'] &&
publicValues['utype'] &&
publicValues['utype'] === 'officeRole' &&
publicValues['isDeleted'] &&
publicValues['isDeleted'] === 'false' &&
!items.map((item: any) => item.processData.uid).includes(publicValues['uid'])
).then(async (processes: any[]) => {
if (processes.length === 0) {
return items;
} else {
for (let process of processes) {
// Update cache
this.setItem('_office_roles_', process);
items.push(process);
}
return items;
}
});
}
public static getOfficeRoleByUid(uid: string): Promise<any> {
// Check if we have valid cache
const item: any = this.getItem('_office_roles_', uid);
if (item) {
return Promise.resolve(item);
}
return new Promise<any>((resolve: (process: any) => void, reject: (error: string) => void) => {
this.messageBus.getProcessesDecoded((publicValues: any) =>
publicValues['uid'] &&
publicValues['uid'] === uid &&
publicValues['utype'] &&
publicValues['utype'] === 'officeRole' &&
publicValues['isDeleted'] &&
publicValues['isDeleted'] === 'false'
).then(async (processes: any[]) => {
if (processes.length !== 1) {
resolve(null);
} else {
// Update cache
this.setItem('_office_roles_', processes[0]);
resolve(process);
}
}).catch(reject);
});
}
public static updateOfficeRole(process: any, newData: any): Promise<void> {
return new Promise<void>((resolve: () => void, reject: (error: string) => void) => {
this.messageBus.updateProcess(process.processId, { updated_at: new Date().toISOString(), ...newData }, [], null).then((processUpdated: any) => {
const newStateId: string = processUpdated.diffs[0]?.state_id;
this.messageBus.notifyUpdate(process.processId, newStateId).then(() => {
this.messageBus.validateState(process.processId, newStateId).then((_stateValidated) => {
const roleUid: string = process.processData.uid;
this.removeItem('_office_roles_', roleUid);
this.getOfficeRoleByUid(roleUid).then(resolve).catch(reject);
}).catch(reject);
}).catch(reject);
}).catch(reject);
});
}
}

View File

@ -0,0 +1,189 @@
import { v4 as uuidv4 } from 'uuid';
import User from 'src/sdk/User';
import AbstractService from './AbstractService';
export default class OfficeService extends AbstractService {
private constructor() {
super();
}
public static createOffice(officeData: any, owners: string[], validatorId: string, storageUrls: string[]): Promise<any> {
const ownerId: string = User.getInstance().getPairingId()!;
// Create a set for all owners to avoid duplicates
const ownersSet: Set<string> = new Set([...owners, ownerId]);
const processData: any = {
uid: uuidv4(),
utype: 'office',
isDeleted: 'false',
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
...officeData
};
const privateFields: string[] = Object.keys(processData);
privateFields.splice(privateFields.indexOf('uid'), 1);
privateFields.splice(privateFields.indexOf('utype'), 1);
privateFields.splice(privateFields.indexOf('isDeleted'), 1);
const roles: any = {
demiurge: {
members: Array.from(ownersSet),
validation_rules: [],
storages: []
},
owner: {
members: Array.from(ownersSet),
validation_rules: [
{
quorum: 0.01, // effectively any owner can make any change
fields: [...privateFields, 'roles', 'uid', 'utype', 'isDeleted'],
min_sig_member: 0.01, // need to sign with at least one device
},
],
storages: storageUrls
},
validator: {
members: [validatorId],
validation_rules: [
{
quorum: 1, // validator can do anything alone
fields: [...privateFields, 'roles', 'uid', 'utype', 'isDeleted'],
min_sig_member: 1,
},
],
storages: storageUrls
},
apophis: {
members: Array.from(ownersSet), // any owner can terminate the office
validation_rules: [],
storages: []
}
};
return new Promise<any>((resolve: (processCreated: any) => void, reject: (error: string) => void) => {
this.messageBus.createProcess(processData, privateFields, roles).then((processCreated: any) => {
this.messageBus.notifyUpdate(processCreated.processId, processCreated.process.states[0].state_id).then(() => {
this.messageBus.validateState(processCreated.processId, processCreated.process.states[0].state_id).then((_stateValidated: any) => {
this.getOfficeByUid(processCreated.processData.uid).then(resolve).catch(reject);
}).catch(reject);
}).catch(reject);
}).catch(reject);
});
}
public static getOffices(): Promise<any[]> {
// Check if we have valid cache
const items: any[] = this.getItems('_offices_');
return this.messageBus.getProcessesDecoded((publicValues: any) =>
publicValues['uid'] &&
publicValues['utype'] &&
publicValues['utype'] === 'office' &&
publicValues['isDeleted'] &&
publicValues['isDeleted'] === 'false' &&
!items.map((item: any) => item.processData.uid).includes(publicValues['uid'])
).then((processes: any[]) => {
if (processes.length === 0) {
return items;
} else {
for (const process of processes) {
// Update cache
this.setItem('_offices_', process);
items.push(process);
}
return items;
}
});
}
public static getOfficeByUid(uid: string): Promise<any> {
// Check if we have valid cache
const item: any = this.getItem('_offices_', uid);
if (item) {
return Promise.resolve(item);
}
return new Promise<any>((resolve: (process: any) => void, reject: (error: string) => void) => {
this.messageBus.getProcessesDecoded((publicValues: any) =>
publicValues['uid'] &&
publicValues['uid'] === uid &&
publicValues['utype'] &&
publicValues['utype'] === 'office' &&
publicValues['isDeleted'] &&
publicValues['isDeleted'] === 'false'
).then((processes: any[]) => {
if (processes.length === 0) {
resolve(null);
} else {
const process: any = processes[0];
// Update cache
this.setItem('_offices_', process);
resolve(process);
}
}).catch(reject);
});
}
public static updateDocument(process: any, newData: any): Promise<void> {
return new Promise<void>((resolve: () => void, reject: (error: string) => void) => {
this.messageBus.updateProcess(process.processId, { updated_at: new Date().toISOString(), ...newData }, [], null).then((processUpdated: any) => {
const newStateId: string = processUpdated.diffs[0]?.state_id;
this.messageBus.notifyUpdate(process.processId, newStateId).then(() => {
this.messageBus.validateState(process.processId, newStateId).then((_stateValidated) => {
const officeUid: string = process.processData.uid;
this.removeItem('_offices_', officeUid);
this.getOfficeByUid(officeUid).then(resolve).catch(reject);
}).catch(reject);
}).catch(reject);
}).catch(reject);
});
}
public static addCollaborators(process: any, existingRoles: any, collaborators: any[]): Promise<void> {
return new Promise<void>((resolve: () => void, reject: (error: string) => void) => {
const newRoles: any = existingRoles;
const owners: string[] = newRoles['owner'].members;
if (!owners) {
console.error('[addCollaborators] owner role not found');
return;
}
const previousOwnersLength: number = owners.length;
for (const collaborator of collaborators) {
if (owners.includes(collaborator)) {
console.debug('[addCollaborators] collaborator already in owner role');
continue;
} else {
owners.push(collaborator);
}
}
if (previousOwnersLength === owners.length) {
console.error('[addCollaborators] no new collaborators added');
return;
}
console.log('newRoles : ', newRoles);
this.messageBus.updateProcess(process.processId, { updated_at: new Date().toISOString() }, [], newRoles).then((processUpdated: any) => {
const newStateId: string = processUpdated.diffs[0]?.state_id;
this.messageBus.notifyUpdate(process.processId, newStateId).then(() => {
this.messageBus.validateState(process.processId, newStateId).then((_stateValidated) => {
const officeUid: string = process.processData.uid;
this.removeItem('_offices_', officeUid);
this.getOfficeByUid(officeUid).then(resolve).catch(reject);
}).catch(reject);
}).catch(reject);
}).catch(reject);
});
}
}

View File

@ -0,0 +1,148 @@
import { v4 as uuidv4 } from 'uuid';
import User from 'src/sdk/User';
import AbstractService from './AbstractService';
import { DEFAULT_STORAGE_URLS } from '@Front/Config/AppConstants';
export default class RoleService extends AbstractService {
private constructor() {
super();
}
public static createRole(roleData: any, validatorId: string): Promise<any> {
const ownerId: string = User.getInstance().getPairingId()!;
const processData: any = {
uid: uuidv4(),
utype: 'role',
isDeleted: 'false',
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
...roleData
};
const privateFields: string[] = Object.keys(processData);
privateFields.splice(privateFields.indexOf('uid'), 1);
privateFields.splice(privateFields.indexOf('utype'), 1);
privateFields.splice(privateFields.indexOf('isDeleted'), 1);
const roles: any = {
demiurge: {
members: [ownerId],
validation_rules: [],
storages: []
},
owner: {
members: [ownerId],
validation_rules: [
{
quorum: 0.01,
fields: [...privateFields, 'roles', 'uid', 'utype', 'isDeleted'],
min_sig_member: 0.01,
},
],
storages: [...DEFAULT_STORAGE_URLS]
},
validator: {
members: [validatorId],
validation_rules: [
{
quorum: 1,
fields: [...privateFields, 'roles', 'uid', 'utype', 'isDeleted'],
min_sig_member: 1,
},
],
storages: [...DEFAULT_STORAGE_URLS]
},
apophis: {
members: [ownerId, validatorId],
validation_rules: [],
storages: []
}
};
return new Promise<any>((resolve: (processCreated: any) => void, reject: (error: string) => void) => {
this.messageBus.createProcess(processData, privateFields, roles).then((processCreated: any) => {
this.messageBus.notifyUpdate(processCreated.processId, processCreated.process.states[0].state_id).then(() => {
this.messageBus.validateState(processCreated.processId, processCreated.process.states[0].state_id).then((_stateValidated: any) => {
this.getRoleByUid(processCreated.processData.uid).then(resolve).catch(reject);
}).catch(reject);
}).catch(reject);
}).catch(reject);
});
}
public static getRoles(): Promise<any[]> {
// Check if we have valid cache
const items: any[] = this.getItems('_roles_');
return this.messageBus.getProcessesDecoded((publicValues: any) =>
publicValues['uid'] &&
publicValues['utype'] &&
publicValues['utype'] === 'role' &&
publicValues['isDeleted'] &&
publicValues['isDeleted'] === 'false' &&
!items.map((item: any) => item.processData.uid).includes(publicValues['uid'])
).then(async (processes: any[]) => {
if (processes.length === 0) {
return items;
} else {
for (const process of processes) {
// Update cache
this.setItem('_roles_', process);
items.push(process);
}
return items;
}
});
}
public static getRoleByUid(uid: string): Promise<any> {
// Check if we have valid cache
const item: any = this.getItem('_roles_', uid);
if (item) {
return Promise.resolve(item);
}
return new Promise<any>((resolve: (process: any) => void, reject: (error: string) => void) => {
this.messageBus.getProcessesDecoded((publicValues: any) =>
publicValues['uid'] &&
publicValues['uid'] === uid &&
publicValues['utype'] &&
publicValues['utype'] === 'role' &&
publicValues['isDeleted'] &&
publicValues['isDeleted'] === 'false'
).then(async (processes: any[]) => {
if (processes.length === 0) {
resolve(null);
} else {
const process: any = processes[0];
// Update cache
this.setItem('_roles_', process);
resolve(process);
}
}).catch(reject);
});
}
public static updateRole(process: any, newData: any): Promise<void> {
return new Promise<void>((resolve: () => void, reject: (error: string) => void) => {
this.messageBus.updateProcess(process.processId, { updated_at: new Date().toISOString(), ...newData }, [], null).then((processUpdated: any) => {
const newStateId: string = processUpdated.diffs[0]?.state_id;
this.messageBus.notifyUpdate(process.processId, newStateId).then(() => {
this.messageBus.validateState(process.processId, newStateId).then((_stateValidated) => {
const roleUid: string = process.processData.uid;
this.removeItem('_roles_', roleUid);
this.getRoleByUid(roleUid).then(resolve).catch(reject);
}).catch(reject);
}).catch(reject);
}).catch(reject);
});
}
}

View File

@ -0,0 +1,167 @@
import { v4 as uuidv4 } from 'uuid';
import User from 'src/sdk/User';
import AbstractService from './AbstractService';
import RuleService from './RuleService';
import { DEFAULT_STORAGE_URLS } from '@Front/Config/AppConstants';
export default class RuleGroupService extends AbstractService {
private constructor() {
super();
}
public static createRuleGroup(ruleGroupData: any, validatorId: string): Promise<any> {
const ownerId: string = User.getInstance().getPairingId()!;
const processData: any = {
uid: uuidv4(),
utype: 'ruleGroup',
isDeleted: 'false',
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
...ruleGroupData
};
const privateFields: string[] = Object.keys(processData);
privateFields.splice(privateFields.indexOf('uid'), 1);
privateFields.splice(privateFields.indexOf('utype'), 1);
privateFields.splice(privateFields.indexOf('isDeleted'), 1);
const roles: any = {
demiurge: {
members: [ownerId],
validation_rules: [],
storages: []
},
owner: {
members: [ownerId],
validation_rules: [
{
quorum: 0.01,
fields: [...privateFields, 'roles', 'uid', 'utype', 'isDeleted'],
min_sig_member: 0.01,
},
],
storages: [...DEFAULT_STORAGE_URLS]
},
validator: {
members: [validatorId],
validation_rules: [
{
quorum: 1,
fields: [...privateFields, 'roles', 'uid', 'utype', 'isDeleted'],
min_sig_member: 1,
},
],
storages: [...DEFAULT_STORAGE_URLS]
},
apophis: {
members: [ownerId, validatorId],
validation_rules: [],
storages: []
}
};
return new Promise<any>((resolve: (processCreated: any) => void, reject: (error: string) => void) => {
this.messageBus.createProcess(processData, privateFields, roles).then((processCreated: any) => {
this.messageBus.notifyUpdate(processCreated.processId, processCreated.process.states[0].state_id).then(() => {
this.messageBus.validateState(processCreated.processId, processCreated.process.states[0].state_id).then((_stateValidated: any) => {
this.getRuleGroupByUid(processCreated.processData.uid).then(resolve).catch(reject);
}).catch(reject);
}).catch(reject);
}).catch(reject);
});
}
public static getRuleGroups(): Promise<any[]> {
// Check if we have valid cache
const items: any[] = this.getItems('_rule_groups_');
return this.messageBus.getProcessesDecoded((publicValues: any) =>
publicValues['uid'] &&
publicValues['utype'] &&
publicValues['utype'] === 'ruleGroup' &&
publicValues['isDeleted'] &&
publicValues['isDeleted'] === 'false' &&
!items.map((item: any) => item.processData.uid).includes(publicValues['uid'])
).then(async (processes: any[]) => {
if (processes.length === 0) {
return items;
} else {
for (let process of processes) {
process = await this.completeRuleGroup(process);
// Update cache
this.setItem('_rule_groups_', process);
items.push(process);
}
return items;
}
});
}
public static getRuleGroupByUid(uid: string): Promise<any> {
// Check if we have valid cache
const item: any = this.getItem('_rule_groups_', uid);
if (item) {
return Promise.resolve(item);
}
return new Promise<any>((resolve: (process: any) => void, reject: (error: string) => void) => {
this.messageBus.getProcessesDecoded((publicValues: any) =>
publicValues['uid'] &&
publicValues['uid'] === uid &&
publicValues['utype'] &&
publicValues['utype'] === 'ruleGroup' &&
publicValues['isDeleted'] &&
publicValues['isDeleted'] === 'false'
).then(async (processes: any[]) => {
if (processes.length === 0) {
resolve(null);
} else {
let process: any = processes[0];
process = await this.completeRuleGroup(process);
// Update cache
this.setItem('_rule_groups_', process);
resolve(process);
}
}).catch(reject);
});
}
public static updateRuleGroup(process: any, newData: any): Promise<void> {
return new Promise<void>((resolve: () => void, reject: (error: string) => void) => {
this.messageBus.updateProcess(process.processId, { updated_at: new Date().toISOString(), ...newData }, [], null).then((processUpdated: any) => {
const newStateId: string = processUpdated.diffs[0]?.state_id;
this.messageBus.notifyUpdate(process.processId, newStateId).then(() => {
this.messageBus.validateState(process.processId, newStateId).then((_stateValidated) => {
const ruleUid: string = process.processData.uid;
this.removeItem('_rule_groups_', ruleUid);
this.getRuleGroupByUid(ruleUid).then(resolve).catch(reject);
}).catch(reject);
}).catch(reject);
}).catch(reject);
});
}
private static async completeRuleGroup(process: any): Promise<any> {
if (process.processData.rules && process.processData.rules.length > 0) {
process.processData.rules = await new Promise<any[]>(async (resolve: (rules: any[]) => void) => {
const rules: any[] = [];
for (const rule of process.processData.rules) {
rules.push((await RuleService.getRuleByUid(rule.uid)).processData);
}
resolve(rules);
});
}
return process;
}
}

View File

@ -0,0 +1,148 @@
import { v4 as uuidv4 } from 'uuid';
import User from 'src/sdk/User';
import AbstractService from './AbstractService';
import { DEFAULT_STORAGE_URLS } from '@Front/Config/AppConstants';
export default class RuleService extends AbstractService {
private constructor() {
super();
}
public static createRule(ruleData: any, validatorId: string): Promise<any> {
const ownerId: string = User.getInstance().getPairingId()!;
const processData: any = {
uid: uuidv4(),
utype: 'rule',
isDeleted: 'false',
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
...ruleData
};
const privateFields: string[] = Object.keys(processData);
privateFields.splice(privateFields.indexOf('uid'), 1);
privateFields.splice(privateFields.indexOf('utype'), 1);
privateFields.splice(privateFields.indexOf('isDeleted'), 1);
const roles: any = {
demiurge: {
members: [ownerId],
validation_rules: [],
storages: []
},
owner: {
members: [ownerId],
validation_rules: [
{
quorum: 0.01,
fields: [...privateFields, 'roles', 'uid', 'utype', 'isDeleted'],
min_sig_member: 0.01,
},
],
storages: [...DEFAULT_STORAGE_URLS]
},
validator: {
members: [validatorId],
validation_rules: [
{
quorum: 1,
fields: [...privateFields, 'roles', 'isDeleted', 'uid', 'utype'],
min_sig_member: 1,
},
],
storages: [...DEFAULT_STORAGE_URLS]
},
apophis: {
members: [ownerId, validatorId],
validation_rules: [],
storages: []
}
};
return new Promise<any>((resolve: (processCreated: any) => void, reject: (error: string) => void) => {
this.messageBus.createProcess(processData, privateFields, roles).then((processCreated: any) => {
this.messageBus.notifyUpdate(processCreated.processId, processCreated.process.states[0].state_id).then(() => {
this.messageBus.validateState(processCreated.processId, processCreated.process.states[0].state_id).then((_stateValidated: any) => {
this.getRuleByUid(processCreated.processData.uid).then(resolve).catch(reject);
}).catch(reject);
}).catch(reject);
}).catch(reject);
});
}
public static getRules(): Promise<any[]> {
// Check if we have valid cache
const items: any[] = this.getItems('_rules_');
return this.messageBus.getProcessesDecoded((publicValues: any) =>
publicValues['uid'] &&
publicValues['utype'] &&
publicValues['utype'] === 'rule' &&
publicValues['isDeleted'] &&
publicValues['isDeleted'] === 'false' &&
!items.map((item: any) => item.processData.uid).includes(publicValues['uid'])
).then((processes: any[]) => {
if (processes.length === 0) {
return items;
} else {
for (const process of processes) {
// Update cache
this.setItem('_rules_', process);
items.push(process);
}
return items;
}
});
}
public static getRuleByUid(uid: string): Promise<any> {
// Check if we have valid cache
const item: any = this.getItem('_rules_', uid);
if (item) {
return Promise.resolve(item);
}
return new Promise<any>((resolve: (process: any) => void, reject: (error: string) => void) => {
this.messageBus.getProcessesDecoded((publicValues: any) =>
publicValues['uid'] &&
publicValues['uid'] === uid &&
publicValues['utype'] &&
publicValues['utype'] === 'rule' &&
publicValues['isDeleted'] &&
publicValues['isDeleted'] === 'false'
).then((processes: any[]) => {
if (processes.length === 0) {
resolve(null);
} else {
const process: any = processes[0];
// Update cache
this.setItem('_rules_', process);
resolve(process);
}
}).catch(reject);
});
}
public static updateRule(process: any, newData: any): Promise<void> {
return new Promise<void>((resolve: () => void, reject: (error: string) => void) => {
this.messageBus.updateProcess(process.processId, { updated_at: new Date().toISOString(), ...newData }, [], null).then((processUpdated: any) => {
const newStateId: string = processUpdated.diffs[0]?.state_id;
this.messageBus.notifyUpdate(process.processId, newStateId).then(() => {
this.messageBus.validateState(process.processId, newStateId).then((_stateValidated) => {
const ruleUid: string = process.processData.uid;
this.removeItem('_rules_', ruleUid);
this.getRuleByUid(ruleUid).then(resolve).catch(reject);
}).catch(reject);
}).catch(reject);
}).catch(reject);
});
}
}

View File

@ -115,4 +115,31 @@ export default class Auth extends BaseApiService {
return Promise.reject(err);
}
}
public async clientAuth(body: IClientAuthParams): Promise<IClientAuthReturn> {
// Construct the full URL for the client-auth endpoint
// This endpoint is at /api/v1/client-auth, not part of the customer auth namespace
const url = new URL(this.baseURl.concat("/client-auth"));
try {
// Create custom headers for this specific endpoint
const headers = new Headers();
headers.set("Content-Type", "application/json");
headers.set("x-session-id", body.sessionId);
const response = await fetch(url, {
method: "POST",
headers,
body: JSON.stringify({ pairingId: body.pairingId })
});
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
return await response.json();
} catch (err) {
this.onError(err);
return Promise.reject(err);
}
}
}

View File

@ -27,10 +27,13 @@ export default class Auth extends BaseApiService {
public async loginWithIdNot() {
const variables = FrontendVariables.getInstance();
let redirectUri = variables.IDNOT_REDIRECT_URI || "";
if (!redirectUri) {
console.warn("[IDNOT] NEXT_PUBLIC_IDNOT_REDIRECT_URI vide; fallback sur FRONT_APP_HOST/authorized-client");
redirectUri = `${variables.FRONT_APP_HOST}/authorized-client`;
}
const url = new URL(
`${variables.IDNOT_BASE_URL + variables.IDNOT_AUTHORIZE_ENDPOINT}?client_id=${variables.IDNOT_CLIENT_ID}&redirect_uri=${
variables.FRONT_APP_HOST
}/authorized-client&scope=openid,profile&response_type=code`,
`${variables.IDNOT_BASE_URL + variables.IDNOT_AUTHORIZE_ENDPOINT}?client_id=${variables.IDNOT_CLIENT_ID}&redirect_uri=${redirectUri}&scope=openid,profile&response_type=code`,
);
try {
return await this.getRequest(url);
@ -40,12 +43,85 @@ export default class Auth extends BaseApiService {
}
}
public async getIdnotJwt(autorizationCode: string | string[]): Promise<{ accessToken: string; refreshToken: string }> {
public async idNotAuth(autorizationCode: string | string[]): Promise<{ idNotUser: any; authToken: string }> {
const variables = FrontendVariables.getInstance();
const baseBackUrl = this.getBaseUrl();
const url = new URL(`${baseBackUrl}/idnot/auth`);
const code = Array.isArray(autorizationCode) ? autorizationCode[0] : autorizationCode;
try {
return await this.postRequest<{ idNotUser: any; authToken: string }>(url, { code });
} catch (err) {
this.onError(err);
return Promise.reject(err);
}
}
public async getIdNotUser(): Promise<{ success: boolean; data: any }> {
const variables = FrontendVariables.getInstance();
const baseBackUrl = variables.BACK_API_PROTOCOL + variables.BACK_API_HOST;
const url = new URL(`${baseBackUrl}/api/v1/idnot/user/${autorizationCode}`);
const baseBackUrl = this.getBaseUrl();
const url = new URL(`${baseBackUrl}/idnot/user`);
try {
return await this.postRequest<{ accessToken: string; refreshToken: string }>(url);
return await this.getRequest(url);
}
catch (err) {
this.onError(err);
return Promise.reject(err);
}
}
public async getIdNotOfficeForUser(userId: string): Promise<any[]> {
const variables = FrontendVariables.getInstance();
const baseBackUrl = this.getBaseUrl();
const url = new URL(`${baseBackUrl}/idnot/user/rattachements`);
url.searchParams.set('idNot', userId);
try {
return await this.getRequest(url);
}
catch (err) {
this.onError(err);
return Promise.reject(err);
}
}
public async getIdNotUserForOffice(officeId: string): Promise<any[]> {
const variables = FrontendVariables.getInstance();
const baseBackUrl = this.getBaseUrl();
const url = new URL(`${baseBackUrl}/idnot/office/rattachements`);
url.searchParams.set('idNot', officeId);
try {
return await this.getRequest(url);
} catch (err) {
this.onError(err);
return Promise.reject(err);
}
}
public async getUserProcessByIdNot(pairingId: string): Promise<{ success: boolean; data: { processId: string, processData: { [key: string]: any } } }> {
const variables = FrontendVariables.getInstance();
const baseBackUrl = this.getBaseUrl();
const url = new URL(`${baseBackUrl}/process/user`);
url.searchParams.set('pairingId', pairingId);
try {
return await this.getRequest(url);
} catch (err) {
this.onError(err);
return Promise.reject(err);
}
}
public async getOfficeProcessByIdNot(): Promise<{ success: boolean; data: { processId: string, processData: { [key: string]: any } } }> {
const variables = FrontendVariables.getInstance();
const baseBackUrl = this.getBaseUrl();
const url = new URL(`${baseBackUrl}/process/office`);
try {
return await this.getRequest(url);
} catch (err) {
this.onError(err);
return Promise.reject(err);

View File

@ -1,7 +1,5 @@
import { FrontendVariables } from "@Front/Config/VariablesFront";
import CookieService from "@Front/Services/CookieService/CookieService";
import jwt_decode from "jwt-decode";
import JwtService, { ICustomerJwtPayload, IUserJwtPayload } from "@Front/Services/JwtService/JwtService";
export enum ContentType {
JSON = "application/json",
@ -18,11 +16,15 @@ export default abstract class BaseApiService {
protected readonly variables = FrontendVariables.getInstance();
protected constructor() {
BaseApiService.baseUrl ??=
this.variables.BACK_API_PROTOCOL +
this.variables.BACK_API_HOST +
this.variables.BACK_API_ROOT_URL +
this.variables.BACK_API_VERSION;
if (!BaseApiService.baseUrl) {
const protocol = (this.variables.BACK_API_PROTOCOL || '').replace(/:\/\/$/, '');
const host = (this.variables.BACK_API_HOST || '').replace(/\/$/, '');
const port = this.variables.BACK_API_PORT ? `:${this.variables.BACK_API_PORT.replace(/^:/, '')}` : '';
const root = `/${(this.variables.BACK_API_ROOT_URL || '')}`.replace(/\/+/g, '/').replace(/\/$/, '');
const version = `/${(this.variables.BACK_API_VERSION || '')}`.replace(/\/+/g, '/');
BaseApiService.baseUrl = `${protocol}://${host}${port}${root}${version}`;
}
}
protected getBaseUrl(): string {
@ -30,15 +32,6 @@ export default abstract class BaseApiService {
}
protected buildHeaders(contentType: ContentType) {
// Don't try to access cookies during server-side rendering
if (typeof window === 'undefined') {
const headers = new Headers();
if (contentType === ContentType.JSON || contentType === ContentType.PDF) {
headers.set("Content-Type", contentType);
}
return headers;
}
const token = CookieService.getInstance().getCookie("leCoffreAccessToken");
const headers = new Headers();
@ -46,14 +39,7 @@ export default abstract class BaseApiService {
if (contentType === ContentType.JSON || contentType === ContentType.PDF) {
headers.set("Content-Type", contentType);
}
// Only set Authorization header if token exists
if (token) {
headers.set("Authorization", `Bearer ${token}`);
} else {
console.warn("No access token found in cookies when building headers");
}
headers.set("Authorization", `Bearer ${token}`);
return headers;
}
@ -61,8 +47,7 @@ export default abstract class BaseApiService {
return JSON.stringify(body);
}
protected async getRequest<T>(url: URL, token?: string, contentType?: ContentType, ref?: IRef, fileName?: string) {
await this.checkJwtToken();
protected async getRequest<T>(url: URL, contentType?: ContentType, ref?: IRef, fileName?: string) {
const request = async () =>
await fetch(url, {
method: "GET",
@ -71,8 +56,7 @@ export default abstract class BaseApiService {
return this.sendRequest<T>(request, ref, fileName);
}
protected async postRequest<T>(url: URL, body: { [key: string]: unknown } = {}, token?: string) {
await this.checkJwtToken();
protected async postRequest<T>(url: URL, body: { [key: string]: unknown } = {}) {
return this.sendRequest<T>(
async () =>
await fetch(url, {
@ -84,7 +68,6 @@ export default abstract class BaseApiService {
}
protected async postRequestFormData<T>(url: URL, body: FormData) {
await this.checkJwtToken();
return this.sendRequest<T>(
async () =>
await fetch(url, {
@ -96,7 +79,6 @@ export default abstract class BaseApiService {
}
protected async putRequest<T>(url: URL, body: { [key: string]: unknown } = {}, token?: string) {
await this.checkJwtToken();
const request = async () =>
await fetch(url, {
method: "PUT",
@ -108,7 +90,6 @@ export default abstract class BaseApiService {
}
protected async patchRequest<T>(url: URL, body: { [key: string]: unknown } = {}) {
await this.checkJwtToken();
const request = async () =>
await fetch(url, {
method: "PATCH",
@ -120,7 +101,6 @@ export default abstract class BaseApiService {
}
protected async deleteRequest<T>(url: URL, body: { [key: string]: unknown } = {}, token?: string) {
await this.checkJwtToken();
const request = async () =>
await fetch(url, {
method: "DELETE",
@ -132,7 +112,6 @@ export default abstract class BaseApiService {
}
protected async putFormDataRequest<T>(url: URL, body: FormData, token?: string) {
await this.checkJwtToken();
const request = async () =>
await fetch(url, {
method: "PUT",
@ -149,69 +128,6 @@ export default abstract class BaseApiService {
return this.processResponse<T>(response, request, ref, fileName);
}
private async checkJwtToken() {
// Don't check tokens during server-side rendering
if (typeof window === 'undefined') {
return;
}
const accessToken = CookieService.getInstance().getCookie("leCoffreAccessToken");
if (!accessToken) {
console.warn("No access token found during JWT check");
return;
}
try {
const userDecodedToken = jwt_decode(accessToken) as IUserJwtPayload;
const customerDecodedToken = jwt_decode(accessToken) as ICustomerJwtPayload;
if (!userDecodedToken && !customerDecodedToken) {
console.warn("Invalid token format during JWT check");
return;
}
const now = Math.floor(Date.now() / 1000);
if (userDecodedToken.userId && userDecodedToken.exp < now) {
const refreshToken = CookieService.getInstance().getCookie("leCoffreRefreshToken");
if (!refreshToken) {
console.warn("Access token expired but no refresh token found");
return;
}
const decodedRefreshToken = jwt_decode(refreshToken) as IUserJwtPayload | ICustomerJwtPayload;
if (decodedRefreshToken.exp < now) {
console.warn("Both access and refresh tokens are expired");
return;
}
const refreshSuccess = await JwtService.getInstance().refreshToken(refreshToken);
if (!refreshSuccess) {
console.error("Failed to refresh token");
return;
}
}
if (customerDecodedToken.customerId && customerDecodedToken.exp < now) {
const refreshToken = CookieService.getInstance().getCookie("leCoffreRefreshToken");
if (!refreshToken) {
console.warn("Access token expired but no refresh token found");
return;
}
const decodedRefreshToken = jwt_decode(refreshToken) as IUserJwtPayload | ICustomerJwtPayload;
if (decodedRefreshToken.exp < now) {
console.warn("Both access and refresh tokens are expired");
return;
}
const refreshSuccess = await JwtService.getInstance().refreshToken(refreshToken);
if (!refreshSuccess) {
console.error("Failed to refresh token");
return;
}
}
} catch (error) {
console.error("Error during JWT token check:", error);
}
}
protected async processResponse<T>(response: Response, request: () => Promise<Response>, ref?: IRef, fileName?: string): Promise<T> {
let responseContent: T;
ref && (ref["response"] = response);

View File

@ -0,0 +1,2 @@
export * from './types';
export * from './rule';

View File

@ -0,0 +1,21 @@
/**
* FileBlob interface representing a file's binary data and metadata
* Used for file transmission and storage in the application
*/
export interface FileBlob {
/** MIME type of the file (e.g., "application/pdf", "image/jpeg") */
type: string;
/** Binary data of the file as Uint8Array */
data: Uint8Array;
}
/**
* FileData interface representing a complete file object with blob and metadata
* Used when creating or updating files in the system
*/
export interface FileData {
/** The file blob containing type and binary data */
file_blob: FileBlob;
/** The name of the file */
file_name: string;
}

View File

@ -2,6 +2,7 @@ import { Contact, Customer } from "le-coffre-resources/dist/Notary";
import BaseNotary from "../BaseNotary";
import { ECivility } from "le-coffre-resources/dist/Customer/Contact";
import { FrontendVariables } from "@Front/Config/VariablesFront";
// TODO Type get query params -> Where + inclue + orderby
export interface IGetCustomersparams {
@ -88,10 +89,15 @@ export default class Customers extends BaseNotary {
}
}
public async sendReminder(uid: string, documentsUid: string[]): Promise<void> {
const url = new URL(this.baseURl.concat(`/${uid}/send_reminder`));
public async sendReminder(office: any, customer: any): Promise<void> {
const variables = FrontendVariables.getInstance();
const baseBackUrl = `${variables.BACK_API_PROTOCOL}://${variables.BACK_API_HOST}:${variables.BACK_API_PORT}${variables.BACK_API_ROOT_URL}${variables.BACK_API_VERSION}`;
const url = new URL(`${baseBackUrl}/api/send_reminder`);
//const url = new URL(this.baseURl.concat(`/${uid}/send_reminder`));
try {
await this.postRequest<void>(url, { documentsUid });
await this.postRequest<void>(url, { office, customer });
} catch (err) {
this.onError(err);
return Promise.reject(err);

View File

@ -14,10 +14,15 @@ import classNames from "classnames";
import Button, { EButtonstyletype, EButtonVariant } from "../Button";
import Confirm from "../OldModal/Confirm";
import Documents from "@Front/Api/LeCoffreApi/Customer/Documents/Documents";
import Files from "@Front/Api/LeCoffreApi/Customer/Files/Files";
import Alert from "../OldModal/Alert";
import LoaderService from "src/common/Api/LeCoffreApi/sdk/Loader/LoaderService";
import DocumentService from "src/common/Api/LeCoffreApi/sdk/DocumentService";
import FileService from "src/common/Api/LeCoffreApi/sdk/FileService";
import CustomerService from "src/common/Api/LeCoffreApi/sdk/CustomerService";
import FolderService from "src/common/Api/LeCoffreApi/sdk/FolderService";
import { DEFAULT_VALIDATOR_ID } from "@Front/Config/AppConstants";
type IProps = {
onChange?: (files: File[]) => void;
open: boolean;
@ -196,7 +201,7 @@ export default class DepositOtherDocument extends React.Component<IProps, IState
);
}
public override componentDidMount(): void {}
public override componentDidMount(): void { }
private onCloseAlertUpload() {
this.setState({ showFailedUploaded: null });
@ -206,35 +211,91 @@ export default class DepositOtherDocument extends React.Component<IProps, IState
this.setState({
isLoading: true,
});
const filesArray = this.state.currentFiles;
const filesArray = this.state.currentFiles;
if (!filesArray) return;
let documentCreated: Document = {} as Document;
LoaderService.getInstance().show();
let documentCreated: any;
try {
documentCreated = await Documents.getInstance().post({
folder: {
uid: this.props.folder_uid,
},
depositor: {
uid: this.props.customer_uid,
},
documentCreated = await new Promise<any>((resolve: (document: any) => void) => {
const documentTypeData: any = {
folder: {
uid: this.props.folder_uid,
},
depositor: {
uid: this.props.customer_uid,
}
};
DocumentService.createDocument(documentTypeData, DEFAULT_VALIDATOR_ID).then((processCreated: any) => {
if (processCreated) {
const document: any = processCreated.processData;
resolve(document);
}
});
});
} catch (e) {
this.setState({ showFailedDocument: "Le dossier est vérifié aucune modification n'est acceptée", isLoading: false });
return;
}
const customer: any = await new Promise<void>((resolve: (customer: any) => void) => {
CustomerService.getCustomerByUid(this.props.customer_uid).then((process: any) => {
if (process) {
const customer: any = process.processData;
resolve(customer);
}
});
});
for (let i = 0; i < filesArray.length; i++) {
const formData = new FormData();
formData.append("file", filesArray[i]!.file, filesArray[i]!.fileName);
const query = JSON.stringify({ document: { uid: documentCreated.uid } });
formData.append("q", query);
try {
await Files.getInstance().post(formData);
} catch (e) {
this.setState({ showFailedUploaded: "Le fichier ne correspond pas aux critères demandés", isLoading: false });
return;
}
const file = filesArray[i]!.file;
await new Promise<void>((resolve: () => void) => {
const reader = new FileReader();
reader.onload = (event) => {
if (event.target?.result) {
const date: Date = new Date();
const strDate: string = `${date.getDate().toString().padStart(2, '0')}-${(date.getMonth() + 1).toString().padStart(2, '0')}-${date.getFullYear()}`;
const fileName: string = `${file.name.split('.')[0]}_${strDate}.${file.name.split('.').pop()}`;
const arrayBuffer: ArrayBuffer = event.target.result as ArrayBuffer;
const uint8Array: Uint8Array = new Uint8Array(arrayBuffer);
const fileBlob: any = {
type: file.type,
data: uint8Array
};
const fileData: any = {
file_blob: fileBlob,
file_name: fileName
};
FileService.createFile(fileData, DEFAULT_VALIDATOR_ID).then((processCreated: any) => {
const fileUid: string = processCreated.processData.uid;
DocumentService.getDocumentByUid(documentCreated.uid).then((process: any) => {
if (process) {
const document: any = process.processData;
let files: any[] = document.files;
if (!files) {
files = [];
}
files.push({ uid: fileUid });
DocumentService.updateDocument(process, { files: files, document_status: EDocumentStatus.DEPOSITED }).then(() => {
FolderService.refreshFolderByUid(document.folder.uid).then(() => resolve());
});
}
});
});
}
};
reader.readAsArrayBuffer(file);
});
}
this.setState({

View File

@ -213,9 +213,9 @@ export default function DragAndDrop(props: IProps) {
</div>
{documentFiles.length > 0 && (
<div className={classes["documents"]}>
{documentFiles.map((documentFile) => (
{documentFiles.map((documentFile, index) => (
<DocumentFileElement
key={documentFile.id}
key={documentFile.uid || `${documentFile.id}-${index}`}
isLoading={documentFile.isLoading}
file={documentFile.file}
onRemove={() => handleRemove(documentFile)}

View File

@ -1,6 +1,4 @@
import { AppRuleActions, AppRuleNames } from "@Front/Api/Entities/rule";
import Notifications from "@Front/Api/LeCoffreApi/Notary/Notifications/Notifications";
import OfficeFolderAnchors from "@Front/Api/LeCoffreApi/Notary/OfficeFolderAnchors/OfficeFolderAnchors";
import Rules, { RulesMode } from "@Front/Components/Elements/Rules";
import Module from "@Front/Config/Module";
import Toasts from "@Front/Stores/Toasts";
@ -17,6 +15,7 @@ export default function Navigation() {
const pathname = usePathname();
const getAnchoringStatus = useCallback(async () => {
/* TODO: review
const anchors = await OfficeFolderAnchors.getInstance().get({
where: {
status: {
@ -27,7 +26,10 @@ export default function Navigation() {
folder: true,
},
});
*/
// const anchors = [] as any[];
/* TODO: review
try {
for (const anchor of anchors) {
await OfficeFolderAnchors.getInstance().getByUid(anchor.folder?.uid as string);
@ -35,10 +37,13 @@ export default function Navigation() {
} catch (e) {
console.error(e);
}
*/
}, []);
const getNotifications = useCallback(async () => {
//await getAnchoringStatus();
/* TODO: review
const notifications = await Notifications.getInstance().get({
where: {
read: false,
@ -50,6 +55,9 @@ export default function Navigation() {
notification: { created_at: "desc" },
},
});
*/
const notifications = [] as any[];
notifications.forEach((notification) => {
Toasts.getInstance().open({
title: notification.notification.message,

View File

@ -8,6 +8,7 @@ import MenuItem from "@Front/Components/DesignSystem/Menu/MenuItem";
type IProps = {
isOpen: boolean;
closeModal: () => void;
isCustomer?: boolean;
};
type IState = {};
@ -19,29 +20,31 @@ export default class ProfileModal extends React.Component<IProps, IState> {
<>
<div className={classes["background"]} onClick={this.props.closeModal} />
<div className={classes["root"]}>
<MenuItem
{!this.props.isCustomer && <MenuItem
item={{
text: "Mon compte",
link: Module.getInstance().get().modules.pages.MyAccount.props.path,
}}
/>
/>}
<MenuItem
{!this.props.isCustomer && <MenuItem
item={{
text: "Guide de Prise en Main",
link: "https://ressources.lecoffre.io/",
target: "_blank",
}}
/>
/>}
<MenuItem
{!this.props.isCustomer && <MenuItem
item={{
text: "CGU",
link: "/CGU_LeCoffre_io.pdf",
hasSeparator: true,
}}
/>
<LogOutButton />
/>}
<LogOutButton isCustomer={this.props.isCustomer} />
</div>
</>
);

View File

@ -9,7 +9,7 @@ import ProfileModal from "./ProfileModal";
const headerBreakpoint = 1023;
export default function Profile() {
export default function Profile(props: { isCustomer?: boolean }) {
const { isOpen, toggle, close } = useOpenable();
useEffect(() => {
@ -27,7 +27,7 @@ export default function Profile() {
return (
<div className={classes["root"]}>
<IconButton icon={<UserIcon />} onClick={toggle} />
<ProfileModal isOpen={isOpen} closeModal={close} />
<ProfileModal isOpen={isOpen} closeModal={close} isCustomer={props.isCustomer} />
</div>
);
}

View File

@ -1,17 +1,16 @@
import LogoIcon from "@Assets/logo_standard_neutral.svg";
import Stripe from "@Front/Api/LeCoffreApi/Admin/Stripe/Stripe";
import Subscriptions from "@Front/Api/LeCoffreApi/Admin/Subscriptions/Subscriptions";
// import Stripe from "@Front/Api/LeCoffreApi/Admin/Stripe/Stripe";
// import Subscriptions from "@Front/Api/LeCoffreApi/Admin/Subscriptions/Subscriptions";
import Module from "@Front/Config/Module";
import JwtService from "@Front/Services/JwtService/JwtService";
import { InformationCircleIcon, LifebuoyIcon } from "@heroicons/react/24/outline";
// import JwtService from "@Front/Services/JwtService/JwtService";
import { LifebuoyIcon } from "@heroicons/react/24/outline";
import Head from "next/head";
import Image from "next/image";
import Link from "next/link";
import { useRouter } from "next/router";
import React, { useCallback, useEffect, useState } from "react";
import React, { useCallback, useEffect } from "react";
import IconButton from "../IconButton";
import Typography, { ETypo, ETypoColor } from "../Typography";
import BurgerMenu from "./BurgerMenu";
import classes from "./classes.module.scss";
import LogoCielNatureIcon from "./logo-ciel-notaires.jpeg";
@ -21,20 +20,22 @@ import Profile from "./Profile";
type IProps = {
isUserConnected: boolean;
isCustomer?: boolean;
};
const headerHeight = 75;
export default function Header(props: IProps) {
const { isUserConnected } = props;
const { isUserConnected, isCustomer } = props;
const router = useRouter();
const { pathname } = router;
const isOnCustomerLoginPage = Module.getInstance().get().modules.pages.CustomersLogin.props.path === pathname;
const [cancelAt, setCancelAt] = useState<Date | null>(null);
// const [cancelAt, setCancelAt] = useState<Date | null>(null);
const loadSubscription = useCallback(async () => {
/* TODO: review
const jwt = JwtService.getInstance().decodeJwt();
const subscription = await Subscriptions.getInstance().get({ where: { office: { uid: jwt?.office_Id } } });
if (subscription[0]) {
@ -43,6 +44,7 @@ export default function Header(props: IProps) {
setCancelAt(new Date(stripeSubscription.cancel_at! * 1000));
}
}
*/
}, []);
useEffect(() => {
@ -61,7 +63,7 @@ export default function Header(props: IProps) {
<Image src={LogoIcon} alt="logo" className={classes["logo"]} />
</Link>
</div>
{isUserConnected && (
{isUserConnected && !isCustomer && (
<>
<div className={classes["desktop"]}>
<Navigation />
@ -82,9 +84,14 @@ export default function Header(props: IProps) {
</div>
</>
)}
{isCustomer && (
<div className={classes["desktop"]}>
<Profile isCustomer={isCustomer} />
</div>
)}
{isOnCustomerLoginPage && <Image width={70} height={70} alt="ciel-nature" src={LogoCielNatureIcon}></Image>}
</div>
{cancelAt && (
{/* {cancelAt && (
<div className={classes["subscription-line"]}>
<InformationCircleIcon height="24" />
<Typography typo={ETypo.TEXT_MD_REGULAR} color={ETypoColor.COLOR_GENERIC_BLACK}>
@ -92,7 +99,7 @@ export default function Header(props: IProps) {
{cancelAt.toLocaleDateString()}.
</Typography>
</div>
)}
)} */}
</>
);
}

View File

@ -6,14 +6,18 @@ import React, { useCallback } from "react";
import MenuItem from "../Menu/MenuItem";
export default function LogOut() {
export default function LogOut(props: { isCustomer?: boolean }) {
const router = useRouter();
const variables = FrontendVariables.getInstance();
const disconnect = useCallback(() => {
UserStore.instance
.disconnect()
.then(() => router.push(`https://qual-connexion.idnot.fr/user/auth/logout?sourceURL=${variables.FRONT_APP_HOST}`));
if (!props.isCustomer) {
UserStore.instance
.disconnect()
.then(() => router.push(`https://qual-connexion.idnot.fr/user/auth/logout?sourceURL=${variables.FRONT_APP_HOST}`));
} else {
router.push("/");
}
}, [router, variables.FRONT_APP_HOST]);
return <MenuItem item={{ text: "Déconnexion", icon: <PowerIcon />, onClick: disconnect }} />;

View File

@ -4,6 +4,8 @@ import { IAppRule } from "@Front/Api/Entities/rule";
import { useRouter } from "next/router";
import Module from "@Front/Config/Module";
import UserStore from "@Front/Stores/UserStore";
export enum RulesMode {
OPTIONAL = "optional",
NECESSARY = "necessary",
@ -20,18 +22,23 @@ export default function Rules(props: IProps) {
const router = useRouter();
const [isShowing, setIsShowing] = React.useState(false);
const [hasJwt, setHasJwt] = React.useState(false);
// const [hasJwt, setHasJwt] = React.useState(false);
const getShowValue = useCallback(() => {
//const user: any = UserStore.instance.getUser();
if (props.mode === RulesMode.NECESSARY) {
//return user.isAdmin && user.isAdmin === 'true';
return props.rules.every((rule) => JwtService.getInstance().hasRule(rule.name, rule.action));
}
//const ruleNames: string[] = props.rules.map((rule: any) => rule.name);
//return user.role.rules.map((rule: any) => rule.name).filter((ruleName: string) => ruleNames.includes(ruleName)).length > 0;
return props.rules.length === 0 || !!props.rules.find((rule) => JwtService.getInstance().hasRule(rule.name, rule.action));
}, [props.mode, props.rules]);
useEffect(() => {
if (!JwtService.getInstance().decodeJwt()) return;
setHasJwt(true);
// TODO: review
//if (!JwtService.getInstance().decodeJwt()) return;
// setHasJwt(true);
setIsShowing(getShowValue());
}, [getShowValue, isShowing]);
@ -40,7 +47,8 @@ export default function Rules(props: IProps) {
return null;
}
if (!hasJwt || !isShowing) return null;
// TODO: review
//if (!hasJwt || !isShowing) return null;
return props.children;
}

View File

@ -57,6 +57,13 @@ export default function Tabs<T>({ onSelect, tabs: propsTabs }: IProps<T>) {
useEffect(() => {
tabs.current = propsTabs;
if (tabs.current && tabs.current.length > 0 && tabs.current[0]) {
setSelectedTab(tabs.current[0].value);
onSelect(tabs.current[0].value);
setTimeout(() => calculateVisibleElements(), 100);
}
}, [propsTabs]);
useEffect(() => {

View File

@ -2,11 +2,12 @@ import React, { useEffect } from "react";
import { useRouter } from "next/router";
import Module from "@Front/Config/Module";
import UserStore from "@Front/Stores/UserStore";
import { IBlock } from "@Front/Components/DesignSystem/SearchBlockList/BlockList/Block";
import DefaultDashboardWithList, { IPropsDashboardWithList } from "../DefaultDashboardWithList";
import User from "le-coffre-resources/dist/Notary";
import JwtService from "@Front/Services/JwtService/JwtService";
import Users, { IGetUsersparams } from "@Front/Api/LeCoffreApi/Admin/Users/Users";
import CollaboratorService from "src/common/Api/LeCoffreApi/sdk/CollaboratorService";
type IProps = IPropsDashboardWithList;
@ -15,6 +16,7 @@ export default function DefaultCollaboratorDashboard(props: IProps) {
const router = useRouter();
const { collaboratorUid } = router.query;
useEffect(() => {
/* TODO: review
const jwt = JwtService.getInstance().decodeJwt();
if (!jwt) return;
const query: IGetUsersparams = {
@ -28,10 +30,24 @@ export default function DefaultCollaboratorDashboard(props: IProps) {
},
},
};
Users.getInstance()
.get(query)
.then((users) => setCollaborators(users));
*/
const user: any = UserStore.instance.getUser();
const officeId: string = user.office.uid;
CollaboratorService.getCollaborators((processes: any[]) => {
if (processes.length > 0) {
let collaborators: any[] = processes.map((process: any) => process.processData);
// FilterBy office.uid
collaborators = collaborators.filter((collaborator: any) => collaborator.office.uid === officeId);
setCollaborators(collaborators);
}
});
}, []);
const onSelectedBlock = (block: IBlock) => {
@ -47,10 +63,10 @@ export default function DefaultCollaboratorDashboard(props: IProps) {
blocks={
collaborators
? collaborators.map((collaborator) => ({
id: collaborator.uid!,
primaryText: collaborator.contact?.first_name + " " + collaborator.contact?.last_name,
isActive: collaborator.uid === collaboratorUid,
}))
id: collaborator.uid!,
primaryText: collaborator.contact?.first_name + " " + collaborator.contact?.last_name,
isActive: collaborator.uid === collaboratorUid,
}))
: []
}
/>

View File

@ -1,4 +1,3 @@
import Folders from "@Front/Api/LeCoffreApi/Customer/Folders/Folders";
import { IBlock } from "@Front/Components/DesignSystem/SearchBlockList/BlockList/Block";
import Module from "@Front/Config/Module";
import JwtService from "@Front/Services/JwtService/JwtService";
@ -8,17 +7,22 @@ import React, { useEffect, useState } from "react";
import DefaultDashboardWithList, { IPropsDashboardWithList } from "../DefaultDashboardWithList";
type IProps = IPropsDashboardWithList & {};
import FolderService from "src/common/Api/LeCoffreApi/sdk/FolderService";
type IProps = IPropsDashboardWithList & {
isReady: boolean;
};
export default function DefaultCustomerDashboard(props: IProps) {
const router = useRouter();
const { folderUid } = router.query;
const { folderUid, profileUid } = router.query;
const [folders, setFolders] = useState<OfficeFolder[]>([]);
useEffect(() => {
const jwt = JwtService.getInstance().decodeCustomerJwt();
if (!jwt) return;
//const jwt = JwtService.getInstance().decodeCustomerJwt();
//if (!jwt) return;
/*
Folders.getInstance()
.get({
q: {
@ -42,7 +46,21 @@ export default function DefaultCustomerDashboard(props: IProps) {
},
})
.then((folders) => setFolders(folders));
}, []);
*/
if (props.isReady) {
FolderService.getFolders((processes: any[]) => {
if (processes.length > 0) {
let folders: any[] = processes.map((process: any) => process.processData);
// Filter By customer.uid
folders = folders.filter((folder: any) => folder.customers.some((customer: any) => customer.uid === profileUid));
setFolders(folders);
}
});
}
}, [props.isReady]);
const onSelectedBlock = (block: IBlock) => {
const folder = folders.find((folder) => folder.uid === block.id);
@ -50,10 +68,12 @@ export default function DefaultCustomerDashboard(props: IProps) {
router.push(
Module.getInstance()
.get()
.modules.pages.ClientDashboard.props.path.replace("[folderUid]", folder.uid ?? ""),
.modules.pages.ClientDashboard.props.path
.replace("[folderUid]", folder.uid ?? "")
.replace("[profileUid]", profileUid as string ?? ""),
);
};
return <DefaultDashboardWithList {...props} onSelectedBlock={onSelectedBlock} blocks={getBlocks(folders)} headerConnected={false}/>;
return <DefaultDashboardWithList {...props} onSelectedBlock={onSelectedBlock} blocks={getBlocks(folders)} headerConnected={false} isCustomer={true} />;
function getBlocks(folders: OfficeFolder[]): IBlock[] {
return folders.map((folder) => {

View File

@ -15,6 +15,7 @@ export type IPropsDashboardWithList = {
mobileBackText?: string;
headerConnected?: boolean;
noPadding?: boolean;
isCustomer?: boolean;
};
type IProps = IPropsDashboardWithList & ISearchBlockListProps;
@ -29,11 +30,12 @@ export default function DefaultDashboardWithList(props: IProps) {
headerConnected = true,
bottomButton,
noPadding = false,
isCustomer = false,
} = props;
return (
<div className={classes["root"]}>
<Header isUserConnected={headerConnected} />
<Header isUserConnected={headerConnected} isCustomer={isCustomer} />
<div className={classes["content"]}>
<SearchBlockList blocks={blocks} onSelectedBlock={onSelectedBlock} bottomButton={bottomButton} />
<div className={classes["right-side"]} data-no-padding={noPadding}>

View File

@ -5,31 +5,40 @@ import Module from "@Front/Config/Module";
import { IBlock } from "@Front/Components/DesignSystem/SearchBlockList/BlockList/Block";
import DefaultDashboardWithList, { IPropsDashboardWithList } from "../DefaultDashboardWithList";
import { DeedType } from "le-coffre-resources/dist/Notary";
import DeedTypes, { IGetDeedTypesParams } from "@Front/Api/LeCoffreApi/Notary/DeedTypes/DeedTypes";
import DeedTypeService from "src/common/Api/LeCoffreApi/sdk/DeedTypeService";
import { idAsUrl } from "@Front/Utils/ProcessIdUtils";
type IProps = IPropsDashboardWithList;
export default function DefaultDeedTypeDashboard(props: IProps) {
const [deedTypes, setDeedTypes] = React.useState<DeedType[] | null>(null);
const router = useRouter();
const { deedTypeUid } = router.query;
useEffect(() => {
const query: IGetDeedTypesParams = {
where: {
archived_at: null,
},
orderBy: {
name: "asc",
},
};
DeedTypes.getInstance()
.get(query)
.then((deedTypes) => setDeedTypes(deedTypes));
const { deedTypeUid } = router.query;
const [deedTypes, setDeedTypes] = React.useState<DeedType[] | null>(null);
useEffect(() => {
DeedTypeService.getDeedTypes((processes: Record<string, any>) => {
const deedTypes = Object.entries(processes).map(([processId, processData]) => ({
...processData,
processId: processId
}));
if (deedTypes.length > 0) {
deedTypes.filter((deedType: any) => !deedType.archived_at);
// OrderBy name asc
deedTypes.sort((a: any, b: any) => a.name.localeCompare(b.name));
setDeedTypes(deedTypes);
} else {
console.log('[DefaultDeedTypeDashboard] No deed types found');
}
});
}, []);
const onSelectedBlock = (block: IBlock) => {
router.push(Module.getInstance().get().modules.pages.DeedTypes.pages.DeedTypesInformations.props.path.replace("[uid]", block.id));
// Remove ':0' suffix from processId for URL navigation
const urlId = idAsUrl(block.id as string);
router.push(Module.getInstance().get().modules.pages.DeedTypes.pages.DeedTypesInformations.props.path.replace("[uid]", urlId));
};
return (
@ -38,16 +47,19 @@ export default function DefaultDeedTypeDashboard(props: IProps) {
onSelectedBlock={onSelectedBlock}
blocks={
deedTypes
? deedTypes.map((deedTypes) => ({
id: deedTypes.uid!,
primaryText: deedTypes.name,
isActive: deedTypes.uid === deedTypeUid,
}))
? deedTypes.map((deedType: any) => {
const urlId = idAsUrl(deedType.processId);
return {
id: deedType.processId, // Keep full processId for internal use
primaryText: deedType.name,
isActive: urlId === deedTypeUid, // Compare without ':0' suffix
};
})
: []
}
bottomButton={{
link: Module.getInstance().get().modules.pages.DeedTypes.pages.Create.props.path,
text: "Créer une liste de pièces",
text: "Créer une liste de pièces", // TODO I think this is misleading, should be "Créer un type d'acte"
}}
/>
);

View File

@ -1,13 +1,14 @@
import React, { useEffect } from "react";
import { useRouter } from "next/router";
import Module from "@Front/Config/Module";
import { IBlock } from "@Front/Components/DesignSystem/SearchBlockList/BlockList/Block";
import DefaultDashboardWithList, { IPropsDashboardWithList } from "../DefaultDashboardWithList";
import JwtService from "@Front/Services/JwtService/JwtService";
import DocumentTypes from "@Front/Api/LeCoffreApi/Notary/DocumentTypes/DocumentTypes";
import { DocumentType } from "le-coffre-resources/dist/Notary";
import DocumentTypeService from "src/common/Api/LeCoffreApi/sdk/DocumentTypeService";
import UserStore from "@Front/Stores/UserStore";
type IProps = IPropsDashboardWithList;
export default function DefaultDocumentTypeDashboard(props: IProps) {
@ -15,18 +16,22 @@ export default function DefaultDocumentTypeDashboard(props: IProps) {
const router = useRouter();
const { documentTypeUid } = router.query;
useEffect(() => {
const jwt = JwtService.getInstance().decodeJwt();
if (!jwt) return;
DocumentTypes.getInstance()
.get({
where: {
office_uid: jwt.office_Id,
},
orderBy: {
name: "asc",
},
})
.then((documentTypes) => setDocumentTypes(documentTypes));
const user: any = UserStore.instance.getUser();
const officeId: string = user.office.uid;
DocumentTypeService.getDocumentTypes().then((processes: any[]) => {
if (processes.length > 0) {
let documents: any[] = processes.map((process: any) => process.processData);
// FilterBy office.uid
documents = documents.filter((document: any) => document.office.uid === officeId);
// OrderBy name asc
documents = documents.sort((a: any, b: any) => a.name.localeCompare(b.name));
setDocumentTypes(documents);
}
});
}, []);
const onSelectedBlock = (block: IBlock) => {
@ -42,10 +47,10 @@ export default function DefaultDocumentTypeDashboard(props: IProps) {
blocks={
documentTypes
? documentTypes.map((documentType) => ({
id: documentType.uid!,
primaryText: documentType.name,
isActive: documentType.uid === documentTypeUid,
}))
id: documentType.uid!,
primaryText: documentType.name,
isActive: documentType.uid === documentTypeUid,
}))
: []
}
bottomButton={{

View File

@ -1,4 +1,3 @@
import Folders, { IGetFoldersParams } from "@Front/Api/LeCoffreApi/Notary/Folders/Folders";
import EFolderStatus from "le-coffre-resources/dist/Customer/EFolderStatus";
import { OfficeFolder } from "le-coffre-resources/dist/Notary";
import React, { useCallback, useEffect } from "react";
@ -9,6 +8,9 @@ import { IBlock } from "@Front/Components/DesignSystem/SearchBlockList/BlockList
import { useRouter } from "next/router";
import DefaultDashboardWithList, { IPropsDashboardWithList } from "../DefaultDashboardWithList";
import FolderService from "src/common/Api/LeCoffreApi/sdk/FolderService";
import { idAsUrl } from "@Front/Utils/ProcessIdUtils";
type IProps = IPropsDashboardWithList & {
isArchived?: boolean;
};
@ -24,11 +26,11 @@ export default function DefaultNotaryDashboard(props: IProps) {
: Module.getInstance().get().modules.pages.Folder.pages.FolderInformation.props.path;
const getBlocks = useCallback(
(folders: OfficeFolder[]): IBlock[] => {
(folders: any[]): IBlock[] => {
const pendingFolders = folders
.filter((folder) => {
const pendingDocuments = (folder.documents ?? []).filter(
(document) => document.document_status === EDocumentStatus.DEPOSITED,
(document: any) => document.document_status === EDocumentStatus.DEPOSITED,
);
return pendingDocuments.length >= 1;
})
@ -39,7 +41,7 @@ export default function DefaultNotaryDashboard(props: IProps) {
const otherFolders = folders
.filter((folder) => {
const pendingDocuments = (folder.documents ?? []).filter(
(document) => document.document_status === EDocumentStatus.DEPOSITED,
(document: any) => document.document_status === EDocumentStatus.DEPOSITED,
);
return pendingDocuments.length === 0;
})
@ -47,15 +49,18 @@ export default function DefaultNotaryDashboard(props: IProps) {
return folder1.created_at! > folder2.created_at! ? -1 : 1;
});
return [...pendingFolders, ...otherFolders].map((folder) => {
return {
id: folder.uid!,
const blocks = [...pendingFolders, ...otherFolders].map((folder) => {
const res = {
id: idAsUrl(folder.processId),
primaryText: folder.name,
secondaryText: folder.folder_number,
isActive: folderUid === folder.uid,
showAlert: folder.documents?.some((document) => document.document_status === EDocumentStatus.DEPOSITED),
isActive: folderUid === idAsUrl(folder.processId),
showAlert: folder.documents?.some((document: any) => document.document_status === EDocumentStatus.DEPOSITED),
};
return res;
});
return blocks;
},
[folderUid],
);
@ -76,6 +81,8 @@ export default function DefaultNotaryDashboard(props: IProps) {
useEffect(() => {
let targetedStatus: EFolderStatus = EFolderStatus["LIVE" as keyof typeof EFolderStatus];
if (isArchived) targetedStatus = EFolderStatus.ARCHIVED;
/* TODO: review
const query: IGetFoldersParams = {
q: {
where: { status: targetedStatus },
@ -110,6 +117,30 @@ export default function DefaultNotaryDashboard(props: IProps) {
Folders.getInstance()
.get(query)
.then((folders) => setFolders(folders));
*/
FolderService.getFolders((processes: Record<string, any>) => {
if (Object.keys(processes).length > 0) {
let folders: any[] = Object.entries(processes).map(([processId, process]) => {
const res = {
...process,
processId: processId
};
return res;
});
// FilterBy status
folders = folders.filter((folder: any) => {
const matches = folder.status === targetedStatus;
return matches;
});
setFolders(folders);
} else {
console.debug('[DefaultNotaryDashboard] No processes found');
}
});
}, [isArchived]);
return (

View File

@ -1,22 +1,26 @@
import { Office } from "le-coffre-resources/dist/SuperAdmin";
import React, { useEffect } from "react";
import { useRouter } from "next/router";
import Module from "@Front/Config/Module";
import { IBlock } from "@Front/Components/DesignSystem/SearchBlockList/BlockList/Block";
import DefaultDashboardWithList, { IPropsDashboardWithList } from "../DefaultDashboardWithList";
import Offices from "@Front/Api/LeCoffreApi/SuperAdmin/Offices/Offices";
import OfficeService from "src/common/Api/LeCoffreApi/sdk/OfficeService";
type IProps = IPropsDashboardWithList;
export default function DefaultOfficeDashboard(props: IProps) {
const [offices, setOffices] = React.useState<Office[] | null>(null);
const [offices, setOffices] = React.useState<any[] | null>(null);
const router = useRouter();
const { officeUid } = router.query;
useEffect(() => {
Offices.getInstance()
.get()
.then((offices) => setOffices(offices));
OfficeService.getOffices().then((processes: any[]) => {
if (processes.length > 0) {
const offices: any[] = processes.map((process: any) => process.processData);
setOffices(offices);
}
});
}, []);
const onSelectedBlock = (block: IBlock) => {
@ -30,11 +34,11 @@ export default function DefaultOfficeDashboard(props: IProps) {
blocks={
offices
? offices.map((office) => ({
id: office.uid!,
primaryText: office.name,
isActive: office.uid === officeUid,
secondaryText: office.crpcen,
}))
id: office.uid!,
primaryText: office.name,
isActive: office.uid === officeUid,
secondaryText: office.crpcen,
}))
: []
}
/>

View File

@ -4,23 +4,43 @@ import { useRouter } from "next/router";
import Module from "@Front/Config/Module";
import { IBlock } from "@Front/Components/DesignSystem/SearchBlockList/BlockList/Block";
import DefaultDashboardWithList, { IPropsDashboardWithList } from "../DefaultDashboardWithList";
import { OfficeRole } from "le-coffre-resources/dist/Notary";
import OfficeRoles, { IGetRolesParams } from "@Front/Api/LeCoffreApi/Admin/OfficeRoles/OfficeRoles";
import UserStore from "@Front/Stores/UserStore";
import OfficeRoleService from "src/common/Api/LeCoffreApi/sdk/OfficeRoleService";
type IProps = IPropsDashboardWithList;
export default function DefaultRoleDashboard(props: IProps) {
const [roles, setRoles] = React.useState<OfficeRole[] | null>(null);
const [roles, setRoles] = React.useState<any[] | null>(null);
const router = useRouter();
const { roleUid } = router.query;
useEffect(() => {
const query: IGetRolesParams = {
include: { rules: true },
};
OfficeRoles.getInstance()
.get(query)
.then((roles) => setRoles(roles));
useEffect(() => {
const user: any = UserStore.instance.getUser();
if (!user) {
return;
}
const office: any = user.office;
if (!office) {
return;
}
OfficeRoleService.getOfficeRoles().then(async (processes: any[]) => {
if (processes.length > 0) {
let officeRoles: any[] = processes.map((process: any) => process.processData);
// FilterBy office.uid
officeRoles = officeRoles.filter((officeRole: any) => officeRole.office.uid === office.uid);
// OrderBy name
officeRoles = officeRoles.sort((a: any, b: any) => a.name.localeCompare(b.name));
setRoles(officeRoles);
}
});
}, []);
const onSelectedBlock = (block: IBlock) => {
@ -34,10 +54,10 @@ export default function DefaultRoleDashboard(props: IProps) {
blocks={
roles
? roles.map((role) => ({
id: role.uid!,
primaryText: role.name,
isActive: role.uid === roleUid,
}))
id: role.uid!,
primaryText: role.name,
isActive: role.uid === roleUid,
}))
: []
}
bottomButton={{

View File

@ -1,26 +1,34 @@
import Users, { IGetUsersparams } from "@Front/Api/LeCoffreApi/SuperAdmin/Users/Users";
import User from "le-coffre-resources/dist/SuperAdmin";
import React, { useEffect } from "react";
import { useRouter } from "next/router";
import Module from "@Front/Config/Module";
import { IBlock } from "@Front/Components/DesignSystem/SearchBlockList/BlockList/Block";
import DefaultDashboardWithList, { IPropsDashboardWithList } from "../DefaultDashboardWithList";
import UserStore from "@Front/Stores/UserStore";
import CollaboratorService from "src/common/Api/LeCoffreApi/sdk/CollaboratorService";
type IProps = IPropsDashboardWithList;
export default function DefaultUserDashboard(props: IProps) {
const [users, setUsers] = React.useState<User[] | null>(null);
const [users, setUsers] = React.useState<any[] | null>(null);
const router = useRouter();
const { userUid } = router.query;
useEffect(() => {
const query: IGetUsersparams = {
include: { contact: true, office_membership: true },
};
const user: any = UserStore.instance.getUser();
if (!user) return;
const officeId: string = user.office.uid;
Users.getInstance()
.get(query)
.then((users) => setUsers(users));
CollaboratorService.getCollaborators((processes: any[]) => {
if (processes.length > 0) {
let collaborators: any[] = processes.map((process: any) => process.processData);
// FilterBy office.uid
collaborators = collaborators.filter((collaborator: any) => collaborator.office.uid === officeId);
setUsers(collaborators);
}
});
}, []);
const onSelectedBlock = (block: IBlock) => {
@ -34,11 +42,11 @@ export default function DefaultUserDashboard(props: IProps) {
blocks={
users
? users.map((user) => ({
id: user.uid!,
primaryText: user.contact?.first_name + " " + user.contact?.last_name,
isActive: user.uid === userUid,
secondaryText: user.office_membership?.crpcen + " - " + user.office_membership?.name,
}))
id: user.uid!,
primaryText: user.contact?.first_name + " " + user.contact?.last_name,
isActive: user.uid === userUid,
secondaryText: user.office?.crpcen + " - " + user.office?.name,
}))
: []
}
/>

View File

@ -6,7 +6,8 @@ import { OfficeFolder as OfficeFolderNotary } from "le-coffre-resources/dist/Not
import { useCallback, useEffect, useMemo, useState } from "react";
import classes from "./classes.module.scss";
import OfficeRib from "@Front/Api/LeCoffreApi/Customer/OfficeRib/OfficeRib";
import OfficeRibService from "src/common/Api/LeCoffreApi/sdk/OfficeRibService";
type IProps = {
folder: OfficeFolderNotary;
@ -19,9 +20,7 @@ export default function ContactBox(props: IProps) {
const [ribUrl, setRibUrl] = useState<string | null>(null);
const notaryContact = useMemo(
() =>
folder?.stakeholders!.find((stakeholder) => stakeholder.office_role?.name === "Notaire")?.contact ??
folder?.stakeholders![0]!.contact,
() => folder?.stakeholders!.find((stakeholder) => stakeholder.office_role?.name === "Notaire")?.contact ?? folder?.stakeholders![0]!.contact,
[folder],
);
@ -37,9 +36,13 @@ export default function ContactBox(props: IProps) {
useEffect(() => {
if (!folder?.office?.uid) return;
OfficeRib.getInstance()
.getRibStream(folder.office.uid)
.then((blob) => setRibUrl(URL.createObjectURL(blob)));
OfficeRibService.getOfficeRib().then((process: any) => {
if (process) {
const officeRib: any = process.processData;
const fileBlob: Blob = new Blob([officeRib.file_blob.data], { type: officeRib.file_blob.type });
setRibUrl(URL.createObjectURL(fileBlob));
}
});
}, [folder]);
const downloadRib = useCallback(async () => {

View File

@ -1,52 +1,178 @@
import DragAndDrop, { IDocumentFileWithUid } from "@Front/Components/DesignSystem/DragAndDrop";
import Typography, { ETypo, ETypoColor } from "@Front/Components/DesignSystem/Typography";
import { Document } from "le-coffre-resources/dist/Customer";
import { useCallback, useMemo, useState } from "react";
import classes from "./classes.module.scss";
import Files from "@Front/Api/LeCoffreApi/Customer/Files/Files";
import { EDocumentStatus } from "le-coffre-resources/dist/Customer/Document";
import { ToasterService } from "@Front/Components/DesignSystem/Toaster";
import Confirm from "@Front/Components/DesignSystem/OldModal/Confirm";
import FolderService from "src/common/Api/LeCoffreApi/sdk/FolderService";
import DocumentService from "src/common/Api/LeCoffreApi/sdk/DocumentService";
import FileService from "src/common/Api/LeCoffreApi/sdk/FileService";
import { FileBlob, FileData } from "@Front/Api/Entities/types";
import WatermarkService from "@Front/Services/WatermarkService";
import { DEFAULT_VALIDATOR_ID } from "@Front/Config/AppConstants";
type IProps = {
document: Document;
document: any;
customer: any;
onChange: () => void;
};
export default function DepositDocumentComponent(props: IProps) {
const { document, onChange } = props;
const { document, customer, onChange } = props;
const [isModalOpen, setIsModalOpen] = useState(false);
const [refused_reason, setRefusedReason] = useState<string | null>(null);
const defaultFiles: IDocumentFileWithUid[] = useMemo(() => {
const filesNotArchived = document.files?.filter((file) => !file.archived_at) ?? [];
return filesNotArchived.map((file) => ({
const filesNotArchived = document.files?.filter((file: any) => !file.archived_at) ?? [];
return filesNotArchived.map((file: any) => ({
id: file.uid!,
file: new File([""], file.file_name!, { type: file.mimetype }),
file: new File([""], file.file_name!, { type: file.file_blob.type }),
uid: file.uid!,
}));
}, [document.files]);
const addFile = useCallback(
(file: File) => {
const formData = new FormData();
const safeFileName = file.name.normalize("NFD").replace(/[\u0300-\u036f]/g, "");
formData.append("file", file, safeFileName);
const query = JSON.stringify({ document: { uid: document.uid } });
formData.append("q", query);
return Files.getInstance()
.post(formData)
.then(onChange)
.then(() => ToasterService.getInstance().success({ title: "Succès !", description: "Fichier uploadé avec succès!" }))
.catch((error) => ToasterService.getInstance().error({ title: "Erreur !", description: error.message }));
async (file: File) => {
try {
// Add watermark to the file before processing
const watermarkedFile = await WatermarkService.getInstance().addWatermark(file);
return new Promise<void>(
(resolve: () => void) => {
const reader = new FileReader();
reader.onload = (event) => {
if (event.target?.result) {
const date: Date = new Date();
const strDate: string = `${date.getDate().toString().padStart(2, '0')}-${(date.getMonth() + 1).toString().padStart(2, '0')}-${date.getFullYear()}`;
const fileName: string = `${document.document_type.name}-${customer.contact.last_name}_${strDate}.${file.name.split('.').pop()}`;
const arrayBuffer: ArrayBuffer = event.target.result as ArrayBuffer;
const uint8Array: Uint8Array = new Uint8Array(arrayBuffer);
const fileBlob: FileBlob = {
type: watermarkedFile.type,
data: uint8Array
};
const fileData: FileData = {
file_blob: fileBlob,
file_name: fileName
};
FileService.createFile(fileData, DEFAULT_VALIDATOR_ID).then((processCreated: any) => {
const fileUid: string = processCreated.processData.uid;
DocumentService.getDocumentByUid(document.uid!).then((process: any) => {
if (process) {
const document: any = process.processData;
let files: any[] = document.files;
if (!files) {
files = [];
}
files.push({ uid: fileUid });
DocumentService.updateDocument(process, { files: files, document_status: EDocumentStatus.DEPOSITED }).then(() => {
FolderService.refreshFolderByUid(document.folder.uid).then(() => resolve());
});
}
});
});
}
};
reader.readAsArrayBuffer(watermarkedFile);
})
.then(onChange)
.then(() => ToasterService.getInstance().success({ title: "Succès !", description: "Fichier uploadé avec succès!" }))
.catch((error) => ToasterService.getInstance().error({ title: "Erreur !", description: error.message }));
} catch (error) {
console.error('Error processing file with watermark:', error);
// If watermarking fails, proceed with original file
return new Promise<void>(
(resolve: () => void) => {
const reader = new FileReader();
reader.onload = (event) => {
if (event.target?.result) {
const date: Date = new Date();
const strDate: string = `${date.getDate().toString().padStart(2, '0')}-${(date.getMonth() + 1).toString().padStart(2, '0')}-${date.getFullYear()}`;
const fileName: string = `${document.document_type.name}-${customer.contact.last_name}_${strDate}.${file.name.split('.').pop()}`;
const arrayBuffer: ArrayBuffer = event.target.result as ArrayBuffer;
const uint8Array: Uint8Array = new Uint8Array(arrayBuffer);
const fileBlob: FileBlob = {
type: file.type,
data: uint8Array
};
const fileData: FileData = {
file_blob: fileBlob,
file_name: fileName
};
FileService.createFile(fileData, DEFAULT_VALIDATOR_ID).then((processCreated: any) => {
const fileUid: string = processCreated.processData.uid;
DocumentService.getDocumentByUid(document.uid!).then((process: any) => {
if (process) {
const document: any = process.processData;
let files: any[] = document.files;
if (!files) {
files = [];
}
files.push({ uid: fileUid });
DocumentService.updateDocument(process, { files: files, document_status: EDocumentStatus.DEPOSITED }).then(() => {
FolderService.refreshFolderByUid(document.folder.uid).then(() => resolve());
});
}
});
});
}
};
reader.readAsArrayBuffer(file);
})
.then(onChange)
.then(() => ToasterService.getInstance().success({ title: "Succès !", description: "Fichier uploadé avec succès!" }))
.catch((error) => ToasterService.getInstance().error({ title: "Erreur !", description: error.message }));
}
},
[document.uid, onChange],
);
const deleteFile = useCallback(
(filedUid: string) => {
return Files.getInstance()
.delete(filedUid)
(fileUid: string) => {
return new Promise<void>(
(resolve: () => void) => {
FileService.getFileByUid(fileUid).then((res: any) => {
if (res) {
FileService.updateFile(res.processId, { isDeleted: 'true', archived_at: new Date().toISOString() }).then(() => {
DocumentService.getDocumentByUid(document.uid!).then((process: any) => {
if (process) {
const document: any = process.processData;
let files: any[] = document.files;
if (!files) {
files = [];
}
files = files.filter((file: any) => file.uid !== fileUid);
DocumentService.updateDocument(process, { files: files, document_status: EDocumentStatus.ASKED }).then(() => {
FolderService.refreshFolderByUid(document.folder.uid).then(() => resolve());
});
}
});
});
}
});
})
.then(onChange)
.then(() => ToasterService.getInstance().success({ title: "Succès !", description: "Fichier supprimé avec succès!" }))
.catch((error) => ToasterService.getInstance().error({ title: "Erreur !", description: error.message }));
@ -55,7 +181,8 @@ export default function DepositDocumentComponent(props: IProps) {
);
const onOpenModal = useCallback(async () => {
const refused_reason = document.document_history?.find((history) => history.document_status === "REFUSED")?.refused_reason;
if (document.document_status !== "REFUSED") return;
const refused_reason = document.refused_reason;
if (!refused_reason) return;
setRefusedReason(refused_reason);
setIsModalOpen(true);

View File

@ -1,5 +1,3 @@
import DocumentsNotary from "@Front/Api/LeCoffreApi/Customer/DocumentsNotary/DocumentsNotary";
import FilesNotary from "@Front/Api/LeCoffreApi/Customer/FilesNotary/Files";
import Button, { EButtonSize, EButtonstyletype, EButtonVariant } from "@Front/Components/DesignSystem/Button";
import IconButton from "@Front/Components/DesignSystem/IconButton";
import Table from "@Front/Components/DesignSystem/Table";
@ -8,7 +6,7 @@ import Typography, { ETypo, ETypoColor } from "@Front/Components/DesignSystem/Ty
import BackArrow from "@Front/Components/Elements/BackArrow";
import DefaultTemplate from "@Front/Components/LayoutTemplates/DefaultTemplate";
import Module from "@Front/Config/Module";
import JwtService, { ICustomerJwtPayload } from "@Front/Services/JwtService/JwtService";
// import JwtService, { ICustomerJwtPayload } from "@Front/Services/JwtService/JwtService";
import { ArrowDownTrayIcon, EyeIcon } from "@heroicons/react/24/outline";
import { saveAs } from "file-saver";
import JSZip from "jszip";
@ -16,9 +14,14 @@ import { useRouter } from "next/router";
import React, { useCallback, useEffect, useState } from "react";
import classes from "./classes.module.scss";
import Link from "next/link";
import Folders from "@Front/Api/LeCoffreApi/Customer/Folders/Folders";
import Customer from "le-coffre-resources/dist/Customer";
import { DocumentNotary } from "le-coffre-resources/dist/Notary";
import { EDocumentNotaryStatus } from "le-coffre-resources/dist/Notary/DocumentNotary";
import FolderService from "src/common/Api/LeCoffreApi/sdk/FolderService";
import DocumentService from "src/common/Api/LeCoffreApi/sdk/DocumentService";
import FileService from "src/common/Api/LeCoffreApi/sdk/FileService";
import LoaderService from "src/common/Api/LeCoffreApi/sdk/Loader/LoaderService";
const header: readonly IHead[] = [
{
@ -42,11 +45,31 @@ export default function ReceivedDocuments() {
const [customer, setCustomer] = useState<Customer | null>(null);
const fetchFolderAndCustomer = useCallback(async () => {
let jwt: ICustomerJwtPayload | undefined;
if (typeof document !== "undefined") {
jwt = JwtService.getInstance().decodeCustomerJwt();
}
// let jwt: ICustomerJwtPayload | undefined;
// if (typeof document !== "undefined") {
// jwt = JwtService.getInstance().decodeCustomerJwt();
// }
// TODO: review
LoaderService.getInstance().show();
const folder: any = await new Promise<any>((resolve: (folder: any) => void) => {
FolderService.getFolderByUid(folderUid as string).then((process: any) => {
if (process) {
const folder: any = process.processData;
resolve(folder);
}
});
});
//const customer = folder?.customers?.find((customer) => customer.contact?.email === jwt?.email);
const customer = folder?.customers?.[0];
if (!customer) throw new Error("Customer not found");
setCustomer(customer);
return { folder, customer };
/*
const folder = await Folders.getInstance().getByUid(folderUid as string, {
q: {
office: true,
@ -76,32 +99,72 @@ export default function ReceivedDocuments() {
setCustomer(customer);
return { folder, customer };
*/
}, [folderUid]);
useEffect(() => {
fetchFolderAndCustomer();
}, [folderUid]); // Ne dépend que de folderUid
// Effet séparé pour charger les documents lorsque customer change
useEffect(() => {
const customerUid = customer?.uid;
if (!folderUid || !customerUid) return;
DocumentsNotary.getInstance()
.get({ where: { folder: { uid: folderUid }, customer: { uid: customerUid } }, include: { files: true } })
.then((documentsNotary) => setDocumentsNotary(documentsNotary));
}, [folderUid, customer, fetchFolderAndCustomer]);
const onDownload = useCallback((doc: DocumentNotary) => {
DocumentService.getDocuments().then(async (processes: any[]) => {
if (processes.length > 0) {
let documents: any[] = processes.map((process: any) => process.processData);
// FilterBy folder.uid & customer.uid
documents = documents.filter((document: any) => document.folder.uid === folderUid && document.customer /*&& document.customer.uid === customerUid*/);
for (const document of documents) {
if (document.files && document.files.length > 0) {
const files: any[] = [];
for (const file of document.files) {
files.push((await FileService.getFileByUid(file.uid)).processData);
}
document.files = files;
}
}
setDocumentsNotary(documents);
LoaderService.getInstance().hide();
}
});
}, [folderUid, customer]);
const onDownload = useCallback(async (doc: any) => {
const file = doc.files?.[0];
if (!file || !file?.uid || !doc.uid) return;
if (!file) return;
return FilesNotary.getInstance()
.download(file.uid, doc.uid)
.then((blob) => {
const url = URL.createObjectURL(blob);
const a = document.createElement("a");
a.href = url;
a.download = file.file_name ?? "file";
a.click();
URL.revokeObjectURL(url);
})
.catch((e) => console.warn(e));
if (doc.document_status !== EDocumentNotaryStatus.DOWNLOADED) {
await new Promise<void>((resolve: () => void) => {
LoaderService.getInstance().show();
DocumentService.getDocumentByUid(doc.uid).then((process: any) => {
if (process) {
DocumentService.updateDocument(process, { document_status: EDocumentNotaryStatus.DOWNLOADED }).then(() => {
LoaderService.getInstance().hide();
resolve();
});
}
});
});
}
return new Promise<void>((resolve: () => void) => {
const blob = new Blob([file.file_blob.data], { type: file.file_blob.type });
const url = URL.createObjectURL(blob);
const a = document.createElement('a');
a.href = url;
a.download = file.file_name;
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
URL.revokeObjectURL(url);
resolve();
}).catch((e) => console.warn(e));
}, []);
const onDownloadAll = useCallback(async () => {
@ -110,16 +173,28 @@ export default function ReceivedDocuments() {
const zip = new JSZip();
const folder = zip.folder("documents") || zip;
const downloadPromises = documentsNotary.map(async (doc) => {
documentsNotary.map(async (doc: any) => {
const file = doc.files?.[0];
if (file && file.uid && doc.uid) {
const blob = await FilesNotary.getInstance().download(file.uid, doc.uid);
if (file) {
if (doc.document_status !== EDocumentNotaryStatus.DOWNLOADED) {
await new Promise<void>((resolve: () => void) => {
LoaderService.getInstance().show();
DocumentService.getDocumentByUid(doc.uid).then((process: any) => {
if (process) {
DocumentService.updateDocument(process, { document_status: EDocumentNotaryStatus.DOWNLOADED }).then(() => {
LoaderService.getInstance().hide();
resolve();
});
}
});
});
}
const blob = new Blob([file.file_blob.data], { type: file.file_blob.type });
folder.file(file.file_name ?? "file", blob);
}
});
await Promise.all(downloadPromises);
zip.generateAsync({ type: "blob" })
.then((blob: any) => {
saveAs(blob, "documents.zip");
@ -160,12 +235,9 @@ function buildRows(
folderUid: string | string[],
onDownloadFileNotary: (doc: DocumentNotary) => void,
): IRowProps[] {
console.log(documentsNotary);
console.log(folderUid);
return documentsNotary.map((documentNotary) => ({
key: documentNotary.uid ?? "",
name: formatName(documentNotary.files?.[0]?.file_name?.split(".")?.[0] ?? "") || "_",
name: documentNotary.files?.[0]?.file_name?.split(".")?.[0] || "_",
sentAt: new Date(documentNotary.created_at!).toLocaleDateString(),
// actions: <IconButton onClick={() => onDownloadFileNotary(documentNotary)} icon={<ArrowDownTrayIcon />} />,
actions: {
@ -185,7 +257,3 @@ function buildRows(
},
}));
}
function formatName(text: string): string {
return text.replace(/[^a-zA-Z0-9 ]/g, "");
}

View File

@ -3,16 +3,20 @@ import RightArrowIcon from "@Assets/Icons/right-arrow.svg";
import Button from "@Front/Components/DesignSystem/Button";
import FilePreview from "@Front/Components/DesignSystem/FilePreview";
import Typography, { ETypo, ETypoColor } from "@Front/Components/DesignSystem/Typography";
import { DocumentNotary, File } from "le-coffre-resources/dist/Notary";
import { DocumentNotary } from "le-coffre-resources/dist/Notary";
import Image from "next/image";
import { NextRouter, useRouter } from "next/router";
import React from "react";
import { FileBlob } from "@Front/Api/Entities/types";
import BasePage from "../../Base";
import classes from "./classes.module.scss";
import DocumentsNotary from "@Front/Api/LeCoffreApi/Customer/DocumentsNotary/DocumentsNotary";
import DefaultTemplate from "@Front/Components/LayoutTemplates/DefaultTemplate";
import FilesNotary from "@Front/Api/LeCoffreApi/Customer/FilesNotary/Files";
import DocumentService from "src/common/Api/LeCoffreApi/sdk/DocumentService";
import FileService from "src/common/Api/LeCoffreApi/sdk/FileService";
import LoaderService from "src/common/Api/LeCoffreApi/sdk/Loader/LoaderService";
type IProps = {};
type IPropsClass = {
@ -25,8 +29,8 @@ type IState = {
isValidateModalVisible: boolean;
refuseText: string;
selectedFileIndex: number;
selectedFile: File | null;
documentNotary: DocumentNotary | null;
selectedFile: { uid: string; file_name: string; file_blob: FileBlob } | null;
documentNotary: any | null;
fileBlob: Blob | null;
isLoading: boolean;
};
@ -71,10 +75,10 @@ class ViewDocumentsNotaryClass extends BasePage<IPropsClass, IState> {
</div>
)}
<div className={classes["file-container"]}>
{this.state.selectedFile.mimetype === "application/pdf" ||
this.state.selectedFile.mimetype === "image/jpeg" ||
this.state.selectedFile.mimetype === "image/png" ||
this.state.selectedFile.mimetype === "image/jpg" ? (
{this.state.selectedFile.file_blob.type === "application/pdf" ||
this.state.selectedFile.file_blob.type === "image/jpeg" ||
this.state.selectedFile.file_blob.type === "image/png" ||
this.state.selectedFile.file_blob.type === "image/jpg" ? (
<FilePreview
href={this.state.fileBlob ? URL.createObjectURL(this.state.fileBlob) : ""}
fileName={this.state.selectedFile.file_name}
@ -123,16 +127,31 @@ class ViewDocumentsNotaryClass extends BasePage<IPropsClass, IState> {
override async componentDidMount() {
try {
const documentNotary = await DocumentsNotary.getInstance().getByUid(this.props.documentUid, {
files: true,
folder: true,
depositor: true,
LoaderService.getInstance().show();
const documentNotary: any = await new Promise<any>((resolve: (document: any) => void) => {
DocumentService.getDocumentByUid(this.props.documentUid).then(async (process: any) => {
if (process) {
const document: any = process.processData;
if (document.files && document.files.length > 0) {
const files: any[] = [];
for (const file of document.files) {
files.push((await FileService.getFileByUid(file.uid)).processData);
}
document.files = files;
}
resolve(document);
}
});
});
LoaderService.getInstance().hide();
this.setState(
{
documentNotary,
selectedFileIndex: 0,
selectedFile: documentNotary.files![0]!,
selectedFile: documentNotary.files![0] as any,
isLoading: false,
},
() => {
@ -149,8 +168,7 @@ class ViewDocumentsNotaryClass extends BasePage<IPropsClass, IState> {
private async getFilePreview(): Promise<void> {
try {
const fileBlob: Blob = await FilesNotary.getInstance().download(this.state.selectedFile?.uid as string, this.props.documentUid);
const fileBlob: Blob = new Blob([this.state.selectedFile!.file_blob.data], { type: this.state.selectedFile!.file_blob.type });
this.setState({
fileBlob,
});

View File

@ -1,16 +1,14 @@
"use client";
import Documents, { IGetDocumentsparams } from "@Front/Api/LeCoffreApi/Customer/Documents/Documents";
import Typography, { ETypo, ETypoColor } from "@Front/Components/DesignSystem/Typography";
import Customer, { Document, DocumentType } from "le-coffre-resources/dist/Customer";
import React, { useCallback, useEffect, useMemo, useState } from "react";
import { DocumentNotary, type OfficeFolder as OfficeFolderNotary } from "le-coffre-resources/dist/Notary";
import { DocumentNotary, OfficeFolder as OfficeFolderNotary } from "le-coffre-resources/dist/Notary";
import classes from "./classes.module.scss";
import { useRouter } from "next/router";
import JwtService, { ICustomerJwtPayload } from "@Front/Services/JwtService/JwtService";
import Folders from "@Front/Api/LeCoffreApi/Customer/Folders/Folders";
import Tag, { ETagColor } from "@Front/Components/DesignSystem/Tag";
import DefaultCustomerDashboard from "@Front/Components/LayoutTemplates/DefaultCustomerDashboard";
@ -21,28 +19,63 @@ import Module from "@Front/Config/Module";
import Separator, { ESeperatorColor, ESeperatorDirection } from "@Front/Components/DesignSystem/Separator";
import NotificationBox from "@Front/Components/DesignSystem/NotificationBox";
import ContactBox from "./ContactBox";
import DocumentsNotary from "@Front/Api/LeCoffreApi/Customer/DocumentsNotary/DocumentsNotary";
import { EDocumentNotaryStatus } from "le-coffre-resources/dist/Notary/DocumentNotary";
import DepositOtherDocument from "@Front/Components/DesignSystem/DepositOtherDocument";
import AuthModal from "src/sdk/AuthModal";
import FolderService from "src/common/Api/LeCoffreApi/sdk/FolderService";
import DocumentService from "src/common/Api/LeCoffreApi/sdk/DocumentService";
import DocumentTypeService from "src/common/Api/LeCoffreApi/sdk/DocumentTypeService";
import FileService from "src/common/Api/LeCoffreApi/sdk/FileService";
import LoaderService from "src/common/Api/LeCoffreApi/sdk/Loader/LoaderService";
type IProps = {};
export default function ClientDashboard(props: IProps) {
const router = useRouter();
let { folderUid } = router.query;
let { folderUid, profileUid } = router.query;
const [documents, setDocuments] = useState<Document[] | null>(null);
const [customer, setCustomer] = useState<Customer | null>(null);
const [folder, setFolder] = useState<OfficeFolderNotary | null>(null);
const [documentsNotary, setDocumentsNotary] = useState<DocumentNotary[]>([]);
const [isAddDocumentModalVisible, setIsAddDocumentModalVisible] = useState<boolean>(false);
const [isReady, setIsReady] = useState(false);
const [isAuthModalOpen, setIsAuthModalOpen] = useState(true);
const fetchFolderAndCustomer = useCallback(async () => {
let jwt: ICustomerJwtPayload | undefined;
if (typeof document !== "undefined") {
jwt = JwtService.getInstance().decodeCustomerJwt();
}
// TODO: review
LoaderService.getInstance().show();
const { folder, customer } = await new Promise<any>((resolve) => {
FolderService.getFolderByUid(folderUid as string).then((process: any) => {
if (process) {
const folder: any = process.processData;
const customers: any[] = folder.customers;
const customer: any = customers.find((customer: any) => customer.uid === profileUid as string);
if (customer) {
resolve({ folder: folder, customer });
}
}
});
});
setCustomer(customer);
setFolder(folder);
LoaderService.getInstance().hide();
return { folder, customer };
/*
const folder = await Folders.getInstance().getByUid(folderUid as string, {
q: {
office: true,
@ -73,46 +106,77 @@ export default function ClientDashboard(props: IProps) {
setCustomer(customer);
return { folder, customer };
*/
}, [folderUid]);
const fetchDocuments = useCallback(
(customerUid: string | undefined) => {
const query: IGetDocumentsparams = {
where: { depositor: { uid: customerUid }, folder_uid: folderUid as string },
include: {
files: true,
document_history: true,
document_type: true,
depositor: true,
folder: {
include: {
customers: {
include: {
contact: true,
},
},
},
},
},
};
async (customerUid: string | undefined) => {
setDocuments([]);
LoaderService.getInstance().show();
return new Promise<void>((resolve: () => void) => {
DocumentService.getDocuments().then(async (processes: any[]) => {
if (processes.length > 0) {
let documents: any[] = processes.map((process: any) => process.processData);
return Documents.getInstance()
.get(query)
.then((documents) => setDocuments(documents));
// FilterBy folder.uid & depositor.uid
documents = documents.filter((document: any) => document.folder.uid === folderUid && document.depositor && document.depositor.uid === customerUid);
for (const document of documents) {
if (document.document_type) {
document.document_type = (await DocumentTypeService.getDocumentTypeByUid(document.document_type.uid)).processData;
}
if (document.files && document.files.length > 0) {
const files: any[] = [];
for (const file of document.files) {
files.push((await FileService.getFileByUid(file.uid)).processData);
}
document.files = files;
}
}
setDocuments(documents);
}
LoaderService.getInstance().hide();
resolve();
});
});
},
[folderUid],
);
/*
useEffect(() => {
fetchFolderAndCustomer().then(({ customer }) => fetchDocuments(customer.uid));
}, [fetchDocuments, fetchFolderAndCustomer]);
*/
useEffect(() => {
setDocumentsNotary([]);
const customerUid = customer?.uid;
if (!folderUid || !customerUid) return;
DocumentsNotary.getInstance()
.get({ where: { folder: { uid: folderUid }, customer: { uid: customerUid } }, include: { files: true } })
.then((documentsNotary) => setDocumentsNotary(documentsNotary));
LoaderService.getInstance().show();
DocumentService.getDocuments().then(async (processes: any[]) => {
if (processes.length > 0) {
let documents: any[] = processes.map((process: any) => process.processData);
// FilterBy folder.uid & customer.uid
documents = documents.filter((document: any) => document.folder.uid === folderUid && document.customer && document.customer.uid === customerUid);
for (const document of documents) {
if (document.files && document.files.length > 0) {
const files: any[] = [];
for (const file of document.files) {
files.push((await FileService.getFileByUid(file.uid)).processData);
}
document.files = files;
}
}
setDocumentsNotary(documents);
LoaderService.getInstance().hide();
}
});
}, [folderUid, customer?.uid]);
const documentsNotaryNotRead = useMemo(
@ -147,8 +211,8 @@ export default function ClientDashboard(props: IProps) {
}, [customer, folderUid, isAddDocumentModalVisible, onCloseModalAddDocument, folder]);
return (
<DefaultCustomerDashboard>
<div className={classes["root"]}>
<DefaultCustomerDashboard isReady={isReady}>
{isReady && (<div className={classes["root"]}>
<div className={classes["top"]}>
<div className={classes["folder-info-container"]}>
<Typography typo={ETypo.TEXT_MD_REGULAR} color={ETypoColor.TEXT_SECONDARY}>
@ -227,6 +291,7 @@ export default function ClientDashboard(props: IProps) {
<DepositDocumentComponent
key={document.uid}
document={document}
customer={customer}
onChange={() => fetchDocuments(customer?.uid)}
/>
))}
@ -244,7 +309,16 @@ export default function ClientDashboard(props: IProps) {
Ajouter d'autres documents
</Button>
{isAddDocumentModalVisible && renderBox()}
</div>
</div>)}
{isAuthModalOpen && <AuthModal
isOpen={isAuthModalOpen}
onClose={() => {
setIsReady(true);
setIsAuthModalOpen(false);
fetchFolderAndCustomer().then(({ customer }) => fetchDocuments(customer.uid));
}}
/>}
</DefaultCustomerDashboard>
);
}

View File

@ -1,14 +1,10 @@
import { ChevronLeftIcon } from "@heroicons/react/24/solid";
import OfficeRoles from "@Front/Api/LeCoffreApi/Admin/OfficeRoles/OfficeRoles";
import Roles from "@Front/Api/LeCoffreApi/Admin/Roles/Roles";
import Users from "@Front/Api/LeCoffreApi/Admin/Users/Users";
import Button, { EButtonstyletype, EButtonVariant } from "@Front/Components/DesignSystem/Button";
import Confirm from "@Front/Components/DesignSystem/OldModal/Confirm";
import Switch from "@Front/Components/DesignSystem/Switch";
import Typography, { ETypo, ETypoColor } from "@Front/Components/DesignSystem/Typography";
import DefaultCollaboratorDashboard from "@Front/Components/LayoutTemplates/DefaultCollaboratorDashboard";
import Module from "@Front/Config/Module";
import User, { OfficeRole } from "le-coffre-resources/dist/Admin";
import Link from "next/link";
import { useRouter } from "next/router";
import { useCallback, useEffect, useState } from "react";
@ -18,12 +14,17 @@ import { IOption } from "@Front/Components/DesignSystem/Dropdown/DropdownMenu/Dr
import { getLabel } from "@Front/Components/DesignSystem/Dropdown";
import SelectField from "@Front/Components/DesignSystem/Form/SelectField";
import LoaderService from "src/common/Api/LeCoffreApi/sdk/Loader/LoaderService";
import CollaboratorService from "src/common/Api/LeCoffreApi/sdk/CollaboratorService";
import OfficeRoleService from "src/common/Api/LeCoffreApi/sdk/OfficeRoleService";
import RoleService from "src/common/Api/LeCoffreApi/sdk/RoleService";
type IProps = {};
export default function CollaboratorInformations(props: IProps) {
const router = useRouter();
let { collaboratorUid } = router.query;
const [userSelected, setUserSelected] = useState<User | null>(null);
const [userSelected, setUserSelected] = useState<any | null>(null);
const [availableRoles, setAvailableRoles] = useState<IOption[]>([]);
const [roleModalOpened, setRoleModalOpened] = useState<boolean>(false);
@ -35,7 +36,7 @@ export default function CollaboratorInformations(props: IProps) {
useEffect(() => {
if (!userSelected) return;
setIsAdminChecked(userSelected.role?.name === "admin");
setIsAdminChecked(userSelected.role.name === "admin");
}, [userSelected]);
const handleRoleChange = useCallback((option: IOption) => {
@ -46,60 +47,54 @@ export default function CollaboratorInformations(props: IProps) {
const closeRoleModal = useCallback(() => {
setRoleModalOpened(false);
setSelectedOption({
id: (userSelected?.office_role ? userSelected?.office_role?.uid : userSelected?.role?.uid) ?? "",
label: userSelected?.office_role ? userSelected?.office_role?.name : "Utilisateur restreint",
id: userSelected?.role?.uid ?? "",
label: userSelected?.role?.name ?? "Utilisateur restreint"
});
}, [userSelected?.office_role, userSelected?.role?.uid]);
}, [userSelected?.role]);
const changeRole = useCallback(async () => {
await Users.getInstance().put(
userSelected?.uid as string,
User.hydrate<User>({
uid: userSelected?.uid as string,
office_role: OfficeRole.hydrate<OfficeRole>({
uid: selectedOption?.id as string,
}),
}),
);
setRoleModalOpened(false);
LoaderService.getInstance().show();
CollaboratorService.getCollaboratorByUid(collaboratorUid as string).then((process: any) => {
if (process) {
CollaboratorService.updateCollaborator(process, { office_role: { uid: selectedOption?.id as string } }).then(() => {
LoaderService.getInstance().hide();
setRoleModalOpened(false);
});
}
});
}, [selectedOption, userSelected]);
const changeAdmin = useCallback(async () => {
try {
if (adminRoleType === "add") {
const adminRole = await Roles.getInstance().getOne({
where: {
name: "admin",
},
});
LoaderService.getInstance().show();
CollaboratorService.getCollaboratorByUid(collaboratorUid as string).then(async (process: any) => {
if (process) {
const role: any = (await RoleService.getRoles())
.map((process: any) => process.processData)
.filter((role: any) => role.name === "admin")[0];
if (!adminRole) return;
await Users.getInstance().put(
userSelected?.uid as string,
User.hydrate<User>({
uid: userSelected?.uid as string,
office_role: undefined,
role: adminRole,
}),
);
CollaboratorService.updateCollaborator(process, { role: { uid: role.uid } }).then(() => {
LoaderService.getInstance().hide();
setAdminModalOpened(false);
});
}
});
} else {
const defaultRole = await Roles.getInstance().getOne({
where: {
name: "default",
},
});
LoaderService.getInstance().show();
CollaboratorService.getCollaboratorByUid(collaboratorUid as string).then(async (process: any) => {
if (process) {
const role: any = (await RoleService.getRoles())
.map((process: any) => process.processData)
.filter((role: any) => role.name === "default")[0];
if (!defaultRole) return;
await Users.getInstance().put(
userSelected?.uid as string,
User.hydrate<User>({
uid: userSelected?.uid as string,
office_role: undefined,
role: defaultRole,
}),
);
CollaboratorService.updateCollaborator(process, { role: { uid: role.uid } }).then(() => {
LoaderService.getInstance().hide();
setAdminModalOpened(false);
});
}
});
}
setAdminModalOpened(false);
} catch (e) {
console.error(e);
}
@ -113,37 +108,29 @@ export default function CollaboratorInformations(props: IProps) {
}, []);
const closeAdminModal = useCallback(() => {
setIsAdminChecked(userSelected?.role?.name === "admin" && !userSelected.office_role);
setIsAdminChecked(userSelected?.role.name === "admin");
setAdminModalOpened(false);
}, [userSelected]);
useEffect(() => {
async function getUser() {
if (!collaboratorUid) return;
const user = await Users.getInstance().getByUid(collaboratorUid as string, {
q: {
contact: true,
office_role: true,
role: true,
seats: {
include: {
subscription: true,
},
},
},
});
if (!user) return;
LoaderService.getInstance().show();
CollaboratorService.getCollaboratorByUid(collaboratorUid as string).then(async (process: any) => {
if (process) {
const collaborator: any = process.processData;
const roles = await OfficeRoles.getInstance().get();
if (!roles) return;
setAvailableRoles(roles.map((role) => ({ id: role.uid ?? "", label: role.name })));
setUserSelected(user);
setSelectedOption({
id: (user?.office_role ? user?.office_role?.uid : user?.role?.uid) ?? "",
label: user?.office_role ? user?.office_role?.name : "Utilisateur restreint",
const officeRoles: any[] = (await OfficeRoleService.getOfficeRoles())
.map((process: any) => process.processData);
setUserSelected(collaborator);
setAvailableRoles(officeRoles.map((officeRole: any) => ({ id: officeRole.uid, label: officeRole.name })));
setSelectedOption({ id: collaborator.office_role.uid, label: collaborator.office_role.name });
LoaderService.getInstance().hide();
}
});
}
getUser();
}, [collaboratorUid]);
@ -154,7 +141,7 @@ export default function CollaboratorInformations(props: IProps) {
<Typography typo={ETypo.TITLE_H1}>
{userSelected?.contact?.first_name + " " + userSelected?.contact?.last_name}
</Typography>
{userSelected && userSelected.seats?.some((seat) => new Date(seat.subscription!.end_date) >= new Date()) && (
{userSelected && userSelected.seats?.some((seat: any) => new Date(seat.subscription!.end_date) >= new Date()) && (
<div className={classes["subscription-active"]}>
<div className={classes["subscription-active-dot"]} />
<Typography typo={ETypo.TEXT_LG_REGULAR} color={ETypoColor.COLOR_SUCCESS_600}>

View File

@ -1,4 +1,3 @@
import DeedTypes from "@Front/Api/LeCoffreApi/Notary/DeedTypes/DeedTypes";
import Button, { EButtonstyletype, EButtonVariant } from "@Front/Components/DesignSystem/Button";
import Form from "@Front/Components/DesignSystem/Form";
import TextAreaField from "@Front/Components/DesignSystem/Form/TextareaField";
@ -6,15 +5,22 @@ import TextField from "@Front/Components/DesignSystem/Form/TextField";
import Confirm from "@Front/Components/DesignSystem/OldModal/Confirm";
import Typography, { ETypo } from "@Front/Components/DesignSystem/Typography";
import DefaultDeedTypesDashboard from "@Front/Components/LayoutTemplates/DefaultDeedTypeDashboard";
import { ToasterService } from "@Front/Components/DesignSystem/Toaster";
import Module from "@Front/Config/Module";
import JwtService from "@Front/Services/JwtService/JwtService";
import { DeedType, Office } from "le-coffre-resources/dist/Admin";
import { useRouter } from "next/router";
import { useCallback, useState } from "react";
import classes from "./classes.module.scss";
import { validateOrReject, ValidationError } from "class-validator";
import UserStore from "@Front/Stores/UserStore";
import LoaderService from "src/common/Api/LeCoffreApi/sdk/Loader/LoaderService";
import DeedTypeService from "src/common/Api/LeCoffreApi/sdk/DeedTypeService";
import { DEFAULT_VALIDATOR_ID } from "@Front/Config/AppConstants";
import Auth from "@Front/Api/Auth/IdNot";
import { idAsUrl } from "@Front/Utils/ProcessIdUtils";
type IProps = {};
export default function DeedTypesCreate(props: IProps) {
const [hasChanged, setHasChanged] = useState<boolean>(false);
@ -25,38 +31,66 @@ export default function DeedTypesCreate(props: IProps) {
const onSubmitHandler = useCallback(
async (e: React.FormEvent<HTMLFormElement> | null, values: { [key: string]: string }) => {
try {
const jwt = JwtService.getInstance().decodeJwt();
const deedType = DeedType.hydrate<DeedType>({
name: values["name"],
description: values["description"],
office: Office.hydrate<Office>({
uid: jwt?.office_Id,
}),
});
try {
await validateOrReject(deedType, { groups: ["createDeedType"], forbidUnknownValues: true });
} catch (validationErrors: Array<ValidationError> | any) {
setValidationError(validationErrors as ValidationError[]);
const user: any = UserStore.instance.getUser();
if (!user) {
console.error("DeedTypesCreate: User not found - user is null or undefined");
return;
}
const res = await Auth.getInstance().getOfficeProcessByIdNot();
if (!res.success) {
console.error("DeedTypesCreate: Office not found - office is null or undefined");
return;
}
const officeId: string | undefined = res.data.processId;
const officeIdNot: string | undefined = res.data.processData['idNot'];
if (!officeId || !officeIdNot) {
console.error("DeedTypesCreate: officeId or officeIdNot is undefined - office.processData.idNot is missing");
return;
}
const deedTypeCreated = await DeedTypes.getInstance().post(
DeedType.hydrate<DeedType>({
name: values["name"],
description: values["description"],
office: Office.hydrate<Office>({
uid: jwt?.office_Id,
}),
}),
);
// TODO: We should update the type definition to be able to use validation again
// const deedType = DeedType.hydrate<DeedType>({
// name: values["name"],
// description: values["description"],
// office: Office.hydrate<Office>({
// idNot: officeId,
// }),
// });
// try {
// await validateOrReject(deedType, { groups: ["createDeedType"], forbidUnknownValues: true });
// } catch (validationErrors: Array<ValidationError> | any) {
// console.log("validationErrors", validationErrors);
// setValidationError(Array.isArray(validationErrors) ? validationErrors : []);
// return;
// }
router.push(
Module.getInstance()
.get()
.modules.pages.DeedTypes.pages.DeedTypesInformations.props.path.replace("[uid]", deedTypeCreated.uid!),
);
const deedTypeData: any = {
name: values["name"],
description: values["description"],
office: {
uid: officeId,
idNot: officeIdNot,
}
};
LoaderService.getInstance().show();
DeedTypeService.createDeedType(deedTypeData, DEFAULT_VALIDATOR_ID).then((processCreated: any) => {
ToasterService.getInstance().success({
title: "Succès !",
description: "Type d'acte créé avec succès"
});
const deedTypeUid = processCreated.processId;
// Remove ':0' suffix from processId for URL navigation
const urlId = idAsUrl(deedTypeUid);
router.push(
Module.getInstance()
.get()
.modules.pages.DeedTypes.pages.DeedTypesInformations.props.path.replace("[uid]", urlId),
);
LoaderService.getInstance().hide();
});
} catch (validationErrors: Array<ValidationError> | any) {
setValidationError(validationErrors as ValidationError[]);
setValidationError(Array.isArray(validationErrors) ? validationErrors : []);
return;
}
},
@ -93,12 +127,12 @@ export default function DeedTypesCreate(props: IProps) {
<TextField
name="name"
placeholder="Nom de l'acte"
validationError={validationError.find((error) => error.property === "name")}
validationError={Array.isArray(validationError) ? validationError.find((error) => error.property === "name") : undefined}
/>
<TextAreaField
name="description"
placeholder="Description"
validationError={validationError.find((error) => error.property === "description")}
validationError={Array.isArray(validationError) ? validationError.find((error) => error.property === "description") : undefined}
/>
<div className={classes["buttons-container"]}>
<Button variant={EButtonVariant.PRIMARY} styletype={EButtonstyletype.OUTLINED} onClick={onCancel}>

View File

@ -14,6 +14,11 @@ import { ValidationError } from "class-validator";
import classes from "./classes.module.scss";
import DeedTypeService from "src/common/Api/LeCoffreApi/sdk/DeedTypeService";
import LoaderService from "src/common/Api/LeCoffreApi/sdk/Loader/LoaderService";
import MessageBus from "src/sdk/MessageBus";
import { idAsProcessId } from "@Front/Utils/ProcessIdUtils";
export default function DeedTypesEdit() {
const router = useRouter();
let { deedTypeUid } = router.query;
@ -27,12 +32,15 @@ export default function DeedTypesEdit() {
setHasChanged(false);
async function getDeedType() {
if (!deedTypeUid) return;
const deedType = await DeedTypes.getInstance().getByUid(deedTypeUid as string, {
q: {
document_types: true,
},
LoaderService.getInstance().show();
// deedTypeUid comes from URL without ':0' suffix, add it back for API calls
const processId = idAsProcessId(deedTypeUid as string);
MessageBus.getInstance().getProcessData(processId).then((processData: any) => {
if (processData) {
setDeedTypeSelected(processData);
}
LoaderService.getInstance().hide();
});
setDeedTypeSelected(deedType);
}
getDeedType();
@ -42,42 +50,52 @@ export default function DeedTypesEdit() {
setIsConfirmModalVisible(false);
}, []);
const onSubmitHandler = useCallback(
async (e: React.FormEvent<HTMLFormElement> | null, values: { [key: string]: string | undefined }) => {
const deedType = DeedType.hydrate<DeedType>({
uid: deedTypeUid as string,
name: values["name"],
description: values["description"],
});
try {
await deedType.validateOrReject?.({ groups: ["updateDeedType"], forbidUnknownValues: true });
} catch (validationErrors: Array<ValidationError> | any) {
if (!Array.isArray(validationErrors)) return;
setValidationError(validationErrors as ValidationError[]);
return;
}
try {
await DeedTypes.getInstance().put(
deedTypeUid as string,
DeedType.hydrate<DeedType>({
uid: deedTypeUid as string,
name: values["name"],
description: values["description"],
}),
);
const onSubmitHandler = async (e: React.FormEvent<HTMLFormElement> | null, values: { [key: string]: string | undefined }) => {
// const deedType = DeedType.hydrate<DeedType>({
// name: values["name"],
// description: values["description"],
// });
// try {
// await deedType.validateOrReject?.({ groups: ["updateDeedType"], forbidUnknownValues: true });
// } catch (validationErrors: Array<ValidationError> | any) {
// if (!Array.isArray(validationErrors)) return;
// setValidationError(validationErrors as ValidationError[]);
// return;
// }
try {
LoaderService.getInstance().show();
// deedTypeUid comes from URL without ':0' suffix, add it back for API calls
const processId = idAsProcessId(deedTypeUid as string);
const process = await MessageBus.getInstance().getProcessData(processId);
if (process) {
console.log('process', process);
// New data
const newData: any = {
name: values["name"],
description: values["description"]
};
// Merge process data with new data & update process
process[processId].name = newData.name;
process[processId].description = newData.description;
await DeedTypeService.updateDeedType(processId, newData);
router.push(
Module.getInstance()
.get()
.modules.pages.DeedTypes.pages.DeedTypesInformations.props.path.replace("[uid]", deedTypeUid as string),
);
} catch (validationErrors) {
if (!Array.isArray(validationErrors)) return;
setValidationError(validationErrors as ValidationError[]);
return;
}
},
[deedTypeUid, router],
);
} catch (error) {
console.error('Error updating deed type:', error);
// Handle error appropriately
} finally {
LoaderService.getInstance().hide();
}
};
const onFieldChange = useCallback((name: string, field: any) => {
setHasChanged(true);

View File

@ -1,7 +1,5 @@
import ChevronIcon from "@Assets/Icons/chevron.svg";
import PenICon from "@Assets/Icons/pen.svg";
import DeedTypes from "@Front/Api/LeCoffreApi/Notary/DeedTypes/DeedTypes";
import DocumentTypes from "@Front/Api/LeCoffreApi/Notary/DocumentTypes/DocumentTypes";
import Button, { EButtonstyletype, EButtonVariant } from "@Front/Components/DesignSystem/Button";
import { IOption } from "@Front/Components/DesignSystem/Dropdown/DropdownMenu/DropdownOption";
import Form from "@Front/Components/DesignSystem/Form";
@ -20,6 +18,12 @@ import { useCallback, useEffect, useState } from "react";
import classes from "./classes.module.scss";
import DeedTypeService from "src/common/Api/LeCoffreApi/sdk/DeedTypeService";
import DocumentTypeService from "src/common/Api/LeCoffreApi/sdk/DocumentTypeService";
import LoaderService from "src/common/Api/LeCoffreApi/sdk/Loader/LoaderService";
import MessageBus from "src/sdk/MessageBus";
import { idAsProcessId } from "@Front/Utils/ProcessIdUtils";
type IProps = {};
export default function DeedTypesInformations(props: IProps) {
const router = useRouter();
@ -48,42 +52,75 @@ export default function DeedTypesInformations(props: IProps) {
setIsSaveModalOpened(false);
}, []);
const deleteDeedType = useCallback(async () => {
await DeedTypes.getInstance().put(
deedTypeUid as string,
DeedType.hydrate<DeedType>({
uid: deedTypeUid as string,
archived_at: new Date(),
}),
);
router.push(Module.getInstance().get().modules.pages.DeedTypes.props.path);
}, [deedTypeUid, router]);
const deleteDeedType = async () => {
LoaderService.getInstance().show();
try {
// deedTypeUid comes from URL without ':0' suffix, add it back for API calls
const processId = idAsProcessId(deedTypeUid as string);
const process = await MessageBus.getInstance().getProcessData(processId);
if (process && process[processId]) {
// New data
const newData: any = {
isDeleted: 'true',
archived_at: new Date().toISOString()
};
// Merge process data with new data & update process
process[processId].isDeleted = newData.isDeleted;
process[processId].archived_at = newData.archived_at;
await DeedTypeService.updateDeedType(processId, newData);
router.push(
Module.getInstance()
.get()
.modules.pages.DeedTypes.props.path
);
}
} catch (error) {
console.error('Error deleting deed type:', error);
} finally {
LoaderService.getInstance().hide();
}
};
useEffect(() => {
async function getDeedType() {
if (!deedTypeUid) return;
const deedType = await DeedTypes.getInstance().getByUid(deedTypeUid as string, {
q: {
document_types: true,
},
});
setDeedTypeSelected(deedType);
if (!deedType.document_types) return;
const documentsOptions: IOption[] = deedType.document_types
?.map((documentType) => {
return {
label: documentType.name,
id: documentType.uid ?? "",
};
})
.sort((a, b) => a.label.localeCompare(b.label));
setSelectedDocuments(documentsOptions);
setSelectedDocuments([]);
// deedTypeUid comes from URL without ':0' suffix, add it back for API calls
const processId = idAsProcessId(deedTypeUid as string);
MessageBus.getInstance().getProcessData(processId).then((process: any) => {
if (process) {
console.log('[DeedTypesInformations] process', process);
const deedType: any = process;
setDeedTypeSelected(deedType);
if (!deedType.document_types) return;
const documentsOptions: IOption[] = deedType.document_types
?.map((documentType: any) => {
return {
label: documentType.name,
id: documentType.uid ?? "",
};
})
.sort((a: any, b: any) => a.label.localeCompare(b.label));
setSelectedDocuments(documentsOptions);
} else {
console.warn('[DeedTypesInformations] process not found:', processId);
}
});
}
async function getDocuments() {
const documents = await DocumentTypes.getInstance().get({});
setAvailableDocuments(documents);
function getDocuments() {
setAvailableDocuments([]);
DocumentTypeService.getDocumentTypes((processes: Record<string, any>) => {
const documents = Object.values(processes);
if (documents.length) {
setAvailableDocuments(documents);
}
});
}
getDocuments();
@ -98,11 +135,35 @@ export default function DeedTypesInformations(props: IProps) {
);
const saveDocumentTypes = useCallback(async () => {
await DeedTypes.getInstance().put(deedTypeUid as string, {
uid: deedTypeUid as string,
document_types: selectedDocuments.map((document) => DocumentType.hydrate<DocumentType>({ uid: document.id as string })),
});
closeSaveModal();
LoaderService.getInstance().show();
// deedTypeUid comes from URL without ':0' suffix, add it back for API calls
const processId = idAsProcessId(deedTypeUid as string);
console.log('[DeedTypesInformations] processId', processId);
const deedType = (await MessageBus.getInstance().getProcessData(processId))[processId];
if (deedType) {
console.log('[DeedTypesInformations] deedType', deedType);
let document_types: any[] = deedType['document_types'];
if (!document_types) {
document_types = [];
}
selectedDocuments.map((selectedDocument: any) => selectedDocument.id as string)
.forEach((uid: any) => document_types.push(availableDocuments.find((document: any) => document.uid === uid)));
// New data
const newData: any = {
document_types: document_types
};
console.log('[DeedTypesInformations] newData', newData);
await DeedTypeService.updateDeedType(processId, newData);
LoaderService.getInstance().hide();
closeSaveModal();
} else {
console.warn('[DeedTypesInformations] process not found:', processId);
}
}, [closeSaveModal, deedTypeUid, selectedDocuments]);
const onDocumentChangeHandler = useCallback((options: IOption[] | null) => {

View File

@ -1,10 +1,10 @@
import DocumentTypes from "@Front/Api/LeCoffreApi/Notary/DocumentTypes/DocumentTypes";
import Button, { EButtonstyletype, EButtonVariant } from "@Front/Components/DesignSystem/Button";
import Form from "@Front/Components/DesignSystem/Form";
import TextAreaField from "@Front/Components/DesignSystem/Form/TextareaField";
import TextField from "@Front/Components/DesignSystem/Form/TextField";
import Typography, { ETypo } from "@Front/Components/DesignSystem/Typography";
import DefaultDocumentTypesDashboard from "@Front/Components/LayoutTemplates/DefaultDocumentTypesDashboard";
import { ToasterService } from "@Front/Components/DesignSystem/Toaster";
import Module from "@Front/Config/Module";
import JwtService from "@Front/Services/JwtService/JwtService";
import { validateOrReject, ValidationError } from "class-validator";
@ -14,35 +14,85 @@ import { useCallback, useState } from "react";
import classes from "./classes.module.scss";
import DocumentTypeService from "src/common/Api/LeCoffreApi/sdk/DocumentTypeService";
import LoaderService from "src/common/Api/LeCoffreApi/sdk/Loader/LoaderService";
import { idAsUrl } from "@Front/Utils/ProcessIdUtils";
import UserStore from "@Front/Stores/UserStore";
import { DEFAULT_VALIDATOR_ID } from "@Front/Config/AppConstants";
type IProps = {};
export default function DocumentTypesCreate(props: IProps) {
const [validationError, setValidationError] = useState<ValidationError[]>([]);
const router = useRouter();
const handleCancel = useCallback(() => {
router.push(Module.getInstance().get().modules.pages.DocumentTypes.props.path);
}, [router]);
const onSubmitHandler = useCallback(
async (e: React.FormEvent<HTMLFormElement> | null, values: { [key: string]: string }) => {
try {
const jwt = JwtService.getInstance().decodeJwt();
if (!jwt) return;
const office = Office.hydrate<Office>({
uid: jwt.office_Id,
});
const documentToCreate = DocumentType.hydrate<DocumentType>({
...values,
office: office,
});
await validateOrReject(documentToCreate, { groups: ["createDocumentType"] });
const documentTypeCreated = await DocumentTypes.getInstance().post(documentToCreate);
const user: any = UserStore.instance.getUser();
if (!user) {
console.error("DocumentTypesCreate: User not found - user is null or undefined");
return;
}
const office = UserStore.instance.getOffice();
if (!office) {
console.error("DocumentTypesCreate: office not found - office is undefined or null");
return;
}
const officeId = office.processId;
const officeIdNot = office.processData.idNot;
router.push(
Module.getInstance()
.get()
.modules.pages.DocumentTypes.pages.DocumentTypesInformations.props.path.replace("[uid]", documentTypeCreated.uid!),
);
// const documentFormModel = DocumentType.hydrate<DocumentType>({
// ...values,
// office: Office.hydrate<Office>({
// uid: officeId,
// })
// });
// await validateOrReject(documentFormModel, { groups: ["createDocumentType"] });
const documentTypeData: any = {
...values,
office: {
uid: officeId,
idNot: officeIdNot,
}
};
LoaderService.getInstance().show();
try {
const processCreated = await DocumentTypeService.createDocumentType(documentTypeData, DEFAULT_VALIDATOR_ID);
ToasterService.getInstance().success({
title: "Succès !",
description: "Type de document créé avec succès"
});
const documentTypeUid = idAsUrl(processCreated.processId);
if (!documentTypeUid) {
console.error("DocumentTypesCreate: documentTypeUid is undefined - processCreated.processId is missing");
return;
}
router.push(
Module.getInstance()
.get()
.modules.pages.DocumentTypes.pages.DocumentTypesInformations.props.path.replace("[uid]", documentTypeUid),
);
} catch (apiError) {
ToasterService.getInstance().error({
title: "Erreur !",
description: "Une erreur est survenue lors de la création du type de document"
});
console.error("Document type creation error:", apiError);
} finally {
LoaderService.getInstance().hide();
}
} catch (e) {
if (e instanceof Array) {
setValidationError(e);
}
LoaderService.getInstance().hide();
}
},
[router],
@ -71,7 +121,7 @@ export default function DocumentTypesCreate(props: IProps) {
validationError={validationError.find((error) => error.property === "public_description")}
/>
<div className={classes["buttons-container"]}>
<Button variant={EButtonVariant.PRIMARY} styletype={EButtonstyletype.OUTLINED}>
<Button variant={EButtonVariant.PRIMARY} styletype={EButtonstyletype.OUTLINED} onClick={handleCancel}>
Annuler
</Button>
<Button type="submit">Créer le document</Button>

View File

@ -1,4 +1,3 @@
import DocumentTypes from "@Front/Api/LeCoffreApi/Notary/DocumentTypes/DocumentTypes";
import Button, { EButtonstyletype, EButtonVariant } from "@Front/Components/DesignSystem/Button";
import Form from "@Front/Components/DesignSystem/Form";
import TextAreaField from "@Front/Components/DesignSystem/Form/TextareaField";
@ -13,6 +12,9 @@ import { useCallback, useEffect, useState } from "react";
import classes from "./classes.module.scss";
import DocumentTypeService from "src/common/Api/LeCoffreApi/sdk/DocumentTypeService";
import LoaderService from "src/common/Api/LeCoffreApi/sdk/Loader/LoaderService";
export default function DocumentTypesEdit() {
const router = useRouter();
let { documentTypeUid } = router.query;
@ -23,10 +25,17 @@ export default function DocumentTypesEdit() {
useEffect(() => {
async function getDocumentType() {
if (!documentTypeUid) return;
const documentType = await DocumentTypes.getInstance().getByUid(documentTypeUid as string, {
_count: true,
LoaderService.getInstance().show();
DocumentTypeService.getDocumentTypeByProcessId(documentTypeUid as string).then((process: any) => {
if (process) {
const documentType: any = {
...process.processData,
processId: process.processId
};
setDocumentTypeSelected(documentType);
}
LoaderService.getInstance().hide();
});
setDocumentTypeSelected(documentType);
}
getDocumentType();
@ -46,16 +55,22 @@ export default function DocumentTypesEdit() {
setValidationError(validationErrors as ValidationError[]);
return;
}
const documentTypeUpdated = await DocumentTypes.getInstance().put(documentTypeUid as string, documentToUpdate);
router.push(
Module.getInstance()
.get()
.modules.pages.DocumentTypes.pages.DocumentTypesInformations.props.path.replace(
"[uid]",
documentTypeUpdated.uid ?? "",
),
);
LoaderService.getInstance().show();
DocumentTypeService.getDocumentTypeByProcessId(documentTypeUid as string).then((process: any) => {
if (process) {
DocumentTypeService.updateDocumentType(process, values).then(() => {
router.push(
Module.getInstance()
.get()
.modules.pages.DocumentTypes.pages.DocumentTypesInformations.props.path.replace(
"[uid]",
documentTypeUid as string ?? "",
)
);
LoaderService.getInstance().hide();
});
}
});
} catch (validationErrors: Array<ValidationError> | any) {
if (!Array.isArray(validationErrors)) return;
setValidationError(validationErrors as ValidationError[]);

View File

@ -1,6 +1,5 @@
import ChevronIcon from "@Assets/Icons/chevron.svg";
import PenICon from "@Assets/Icons/pen.svg";
import DocumentTypes from "@Front/Api/LeCoffreApi/Notary/DocumentTypes/DocumentTypes";
import Typography, { ETypo, ETypoColor } from "@Front/Components/DesignSystem/Typography";
import DefaultDocumentTypesDashboard from "@Front/Components/LayoutTemplates/DefaultDocumentTypesDashboard";
import Module from "@Front/Config/Module";
@ -13,6 +12,8 @@ import { useEffect, useState } from "react";
import classes from "./classes.module.scss";
import Button, { EButtonstyletype, EButtonVariant } from "@Front/Components/DesignSystem/Button";
import DocumentTypeService from "src/common/Api/LeCoffreApi/sdk/DocumentTypeService";
export default function DocumentTypesInformations() {
const router = useRouter();
let { documentTypeUid } = router.query;
@ -21,12 +22,24 @@ export default function DocumentTypesInformations() {
useEffect(() => {
async function getDocument() {
if (!documentTypeUid) return;
const document = await DocumentTypes.getInstance().getByUid(documentTypeUid as string, {
_count: true,
if (!documentTypeUid) {
console.log('DocumentTypesInformations: documentTypeUid is not available yet');
return;
}
DocumentTypeService.getDocumentTypeByProcessId(documentTypeUid as string).then((process: any) => {
if (process) {
const document: any = {
...process.processData,
processId: process.processId
};
setDocumentSelected(document);
} else {
console.log('DocumentTypesInformations: No process found for processId:', documentTypeUid);
}
}).catch((error) => {
console.error('DocumentTypesInformations: Error fetching document:', error);
});
if (!document) return;
setDocumentSelected(document);
}
getDocument();
@ -66,13 +79,15 @@ export default function DocumentTypesInformations() {
</div>
</div>
<div className={classes["right"]}>
<Link
href={Module.getInstance()
.get()
.modules.pages.DocumentTypes.pages.Edit.props.path.replace("[uid]", documentSelected?.uid ?? "")}
className={classes["edit-icon-container"]}>
<Image src={PenICon} alt="edit informations" />
</Link>
{(documentSelected as any)?.processId && (
<Link
href={Module.getInstance()
.get()
.modules.pages.DocumentTypes.pages.Edit.props.path.replace("[uid]", (documentSelected as any).processId)}
className={classes["edit-icon-container"]}>
<Image src={PenICon} alt="edit informations" />
</Link>
)}
</div>
</div>
</div>

View File

@ -1,5 +1,3 @@
import Customers from "@Front/Api/LeCoffreApi/Notary/Customers/Customers";
import Folders from "@Front/Api/LeCoffreApi/Notary/Folders/Folders";
import AutocompleteMultiSelect from "@Front/Components/DesignSystem/AutocompleteMultiSelect";
import Button, { EButtonstyletype, EButtonVariant } from "@Front/Components/DesignSystem/Button";
import { IOption } from "@Front/Components/DesignSystem/Dropdown/DropdownMenu/DropdownOption";
@ -9,9 +7,10 @@ import RadioBox from "@Front/Components/DesignSystem/RadioBox";
import Typography, { ETypo } from "@Front/Components/DesignSystem/Typography";
import BackArrow from "@Front/Components/Elements/BackArrow";
import Module from "@Front/Config/Module";
import { ToasterService } from "@Front/Components/DesignSystem/Toaster";
import { ValidationError } from "class-validator";
import { ECivility } from "le-coffre-resources/dist/Customer/Contact";
import { Contact, Customer, OfficeFolder } from "le-coffre-resources/dist/Notary";
import { Contact, Customer } from "le-coffre-resources/dist/Notary";
import Link from "next/link";
import { useRouter } from "next/router";
import backgroundImage from "@Assets/images/background_refonte.svg";
@ -19,6 +18,11 @@ import classes from "./classes.module.scss";
import { useCallback, useEffect, useState } from "react";
import DefaultDoubleSidePage from "@Front/Components/LayoutTemplates/DefaultDoubleSidePage";
import LoaderService from "src/common/Api/LeCoffreApi/sdk/Loader/LoaderService";
import CustomerService from "src/common/Api/LeCoffreApi/sdk/CustomerService";
import FolderService from "src/common/Api/LeCoffreApi/sdk/FolderService";
import { DEFAULT_VALIDATOR_ID } from "../../../../Config/AppConstants";
enum ESelectedOption {
EXISTING_CUSTOMER = "existing_customer",
NEW_CUSTOMER = "new_customer",
@ -63,34 +67,64 @@ export default function AddClientToFolder(props: IProps) {
values["cell_phone_number"] = "+33" + values["cell_phone_number"].substring(1);
}
}
const contactToCreate = Contact.hydrate<Customer>(values);
await contactToCreate.validateOrReject?.({ groups: ["createCustomer"], forbidUnknownValues: false });
const contactFormModel = Contact.hydrate<Customer>(values);
await contactFormModel.validateOrReject?.({ groups: ["createCustomer"], forbidUnknownValues: false });
} catch (validationErrors) {
setValidationError(validationErrors as ValidationError[]);
return;
}
try {
const customer: Customer = await Customers.getInstance().post({
contact: values,
// TODO: review
const customerData: any = {
contact: values
};
LoaderService.getInstance().show();
CustomerService.createCustomer(customerData, DEFAULT_VALIDATOR_ID).then((processCreated: any) => {
FolderService.getFolderByUid(folderUid as string).then((process: any) => {
if (process) {
const customers: any[] = [];
for (const customerUid of process.processData.customers.map((customer: any) => customer.uid)) {
customers.push({ uid: customerUid });
}
customers.push({ uid: processCreated.processData.uid });
FolderService.updateFolder(process, { customers: customers }).then(() => {
ToasterService.getInstance().success({
title: "Succès !",
description: "Client ajouté avec succès au dossier"
});
router.push(`/folders/${folderUid}`);
LoaderService.getInstance().hide();
});
}
});
});
if (!customer.uid) return;
customersToLink?.push({ uid: customer.uid } as Partial<Customer>);
} catch (backError) {
if (!Array.isArray(backError)) return;
setValidationError(backError as ValidationError[]);
return;
}
}
} else {
LoaderService.getInstance().show();
FolderService.getFolderByUid(folderUid as string).then((process: any) => {
if (process) {
const customers: any[] = [];
for (const customerUid of customersToLink.map((customer: any) => customer.uid)) {
customers.push({ uid: customerUid });
}
if (customersToLink) {
const body = OfficeFolder.hydrate<OfficeFolder>({
customers: customersToLink.map((customer) => {
return Customer.hydrate<Customer>(customer);
}),
FolderService.updateFolder(process, { customers: customers }).then(() => {
ToasterService.getInstance().success({
title: "Succès !",
description: selectedCustomers.length > 1 ? "Clients associés avec succès au dossier" : "Client associé avec succès au dossier"
});
router.push(`/folders/${folderUid}`);
LoaderService.getInstance().hide();
});
}
});
await Folders.getInstance().put(folderUid as string, body);
router.push(`/folders/${folderUid}`);
}
},
[existingCustomers, folderUid, router, selectedCustomers, selectedOption],
@ -98,23 +132,22 @@ export default function AddClientToFolder(props: IProps) {
const getFolderPreSelectedCustomers = useCallback(
async (folderUid: string): Promise<IOption[] | undefined> => {
const query = {
q: {
customers: {
include: {
contact: true,
},
},
},
};
let preExistingCustomers: IOption[] = [];
try {
const folder = await Folders.getInstance().getByUid(folderUid, query);
preExistingCustomers = folder.customers!.map((customer) => {
return {
label: customer.contact?.first_name + " " + customer.contact?.last_name,
id: customer.uid ?? "",
};
preExistingCustomers = await new Promise(resolve => {
FolderService.getFolderByUid(folderUid as string).then((process: any) => {
if (process) {
const folder: any = process.processData;
const preExistingCustomers: IOption[] = folder.customers
.map((customer: any) => {
return {
label: customer.contact?.first_name + " " + customer.contact?.last_name,
id: customer.uid ?? "",
};
});
resolve(preExistingCustomers);
}
});
});
} catch (error) {
router.push(Module.getInstance().get().modules.pages["404"].props.path);
@ -126,25 +159,30 @@ export default function AddClientToFolder(props: IProps) {
);
const loadCustomers = useCallback(async () => {
const query = {};
const availableCustomers = await Customers.getInstance().get(query);
let preExistingCustomers: IOption[] | undefined = await getFolderPreSelectedCustomers(folderUid as string);
const existingCustomers = preExistingCustomers ?? [];
LoaderService.getInstance().show();
CustomerService.getCustomers().then(async (processes: any[]) => {
const availableCustomers: any[] = processes.map((process: any) => process.processData);
existingCustomers.forEach((customer) => {
const index = availableCustomers.findIndex((availableCustomer) => availableCustomer.uid === customer.id);
if (index !== -1) availableCustomers.splice(index, 1);
const preExistingCustomers: IOption[] | undefined = await getFolderPreSelectedCustomers(folderUid as string);
const existingCustomers = preExistingCustomers ?? [];
existingCustomers.forEach((customer) => {
const index = availableCustomers.findIndex((availableCustomer) => availableCustomer.uid === customer.id);
if (index !== -1) availableCustomers.splice(index, 1);
});
let selectedOption = ESelectedOption.EXISTING_CUSTOMER;
if (availableCustomers.length === 0) {
selectedOption = ESelectedOption.NEW_CUSTOMER;
}
setAvailableCustomers(availableCustomers);
setExistingCustomers(existingCustomers);
setIsLoaded(true);
setSelectedOption(selectedOption);
LoaderService.getInstance().hide();
});
let selectedOption = ESelectedOption.EXISTING_CUSTOMER;
if (availableCustomers.length === 0) {
selectedOption = ESelectedOption.NEW_CUSTOMER;
}
setAvailableCustomers(availableCustomers);
setExistingCustomers(existingCustomers);
setIsLoaded(true);
setSelectedOption(selectedOption);
}, [folderUid, getFolderPreSelectedCustomers]);
const getSelectedOptions = useCallback((): IOption[] => {

View File

@ -1,5 +1,3 @@
import Deeds from "@Front/Api/LeCoffreApi/Notary/Deeds/Deeds";
import DocumentTypes from "@Front/Api/LeCoffreApi/Notary/DocumentTypes/DocumentTypes";
import { IOption } from "@Front/Components/DesignSystem/Dropdown/DropdownMenu/DropdownOption";
import AutocompleteMultiSelectField from "@Front/Components/DesignSystem/Form/AutocompleteMultiSelectField";
import { IOption as IFormOption } from "@Front/Components/DesignSystem/Form/SelectFieldOld";
@ -7,11 +5,16 @@ import TextAreaField from "@Front/Components/DesignSystem/Form/TextareaField";
import TextField from "@Front/Components/DesignSystem/Form/TextField";
import Modal from "@Front/Components/DesignSystem/Modal";
import RadioBox from "@Front/Components/DesignSystem/RadioBox";
import { DocumentType, OfficeFolder } from "le-coffre-resources/dist/Notary";
import { OfficeFolder } from "le-coffre-resources/dist/Notary";
import { ChangeEvent, useCallback, useEffect, useState } from "react";
import classes from "./classes.module.scss";
import DocumentTypeService from "src/common/Api/LeCoffreApi/sdk/DocumentTypeService";
import DeedTypeService from "src/common/Api/LeCoffreApi/sdk/DeedTypeService";
import LoaderService from "src/common/Api/LeCoffreApi/sdk/Loader/LoaderService";
import { DEFAULT_VALIDATOR_ID } from "@Front/Config/AppConstants";
type IProps = {
isCreateDocumentModalVisible: boolean;
closeModal: () => void;
@ -29,20 +32,25 @@ export default function ParameterDocuments(props: IProps) {
const [formattedOptions, setFormattedOptions] = useState<IOption[]>([]);
const getAvailableDocuments = useCallback(async () => {
const documents = await DocumentTypes.getInstance().get({});
DocumentTypeService.getDocumentTypes().then((processes: any[]) => {
if (processes.length > 0) {
const documents: any[] = processes.map((process: any) => process.processData);
const formattedOptions: IOption[] = documents
.filter((document) => {
return !props.folder.deed?.document_types?.some((documentType) => documentType.uid === document.uid);
})
.map((document) => {
return {
label: document.name,
id: document.uid ?? "",
};
});
formattedOptions.sort((a, b) => (a.label > b.label ? 1 : -1));
setFormattedOptions(formattedOptions);
const formattedOptions: IOption[] = documents
.filter((document) => {
return !props.folder.deed?.document_types?.some((documentType) => documentType.uid === document.uid);
})
.map((document) => {
return {
label: document.name,
id: document.uid ?? "",
};
});
formattedOptions.sort((a, b) => (a.label > b.label ? 1 : -1));
setFormattedOptions(formattedOptions);
}
});
}, [props.folder.deed?.document_types]);
const onVisibleDescriptionChange = (event: ChangeEvent<HTMLInputElement | HTMLSelectElement | HTMLTextAreaElement>) => {
@ -68,18 +76,44 @@ export default function ParameterDocuments(props: IProps) {
const addDocument = useCallback(async () => {
if (addOrEditDocument === "add") {
try {
const documentType = await DocumentTypes.getInstance().post({
LoaderService.getInstance().show();
const documentTypeData: any = {
name: documentName,
private_description: visibleDescription,
office: {
uid: props.folder.office!.uid!,
},
public_description: visibleDescription,
};
const documentType: any = await new Promise<any>((resolve: (documentType: any) => void) => {
DocumentTypeService.createDocumentType(documentTypeData, DEFAULT_VALIDATOR_ID).then((processCreated: any) => {
const documentType: any = processCreated.processData;
resolve(documentType);
});
});
const oldDocumentsType = props.folder.deed?.document_types!;
await Deeds.getInstance().put(props.folder.deed?.uid!, {
document_types: [...oldDocumentsType, documentType],
await new Promise<void>((resolve: () => void) => {
DeedTypeService.getDeedTypeByUid(props.folder.deed?.deed_type?.uid!).then(async (process: any) => {
if (process) {
// New data
const newData: any = {
document_types: [
...oldDocumentsType.map((document: any) => ({ uid: document.uid })),
{ uid: documentType.uid }
]
};
// Merge process data with new data & update process
process.processData.document_types = newData.document_types;
await DeedTypeService.updateDeedType(process, newData);
resolve();
}
});
});
// Create a new document type in the format expected by the parent component
@ -92,29 +126,52 @@ export default function ParameterDocuments(props: IProps) {
if (props.onDocumentsUpdated) {
props.onDocumentsUpdated([newDocumentType]);
}
LoaderService.getInstance().hide();
handleClose();
} catch (e) {
console.error(e);
}
} else {
try {
LoaderService.getInstance().show();
const oldDocumentsType = props.folder.deed?.document_types!;
await Deeds.getInstance().put(props.folder.deed?.uid!, {
document_types: [
...oldDocumentsType,
...selectedDocuments.map((document) => DocumentType.hydrate<DocumentType>({ uid: document.id as string })),
],
await new Promise<void>((resolve: () => void) => {
DeedTypeService.getDeedTypeByUid(props.folder.deed?.deed_type?.uid!).then(async (process: any) => {
if (process) {
// New data
const newData: any = {
document_types: [
...oldDocumentsType.map((document: any) => ({ uid: document.uid })),
...selectedDocuments.map((document: any) => ({ uid: document.id as string }))
]
};
// Merge process data with new data & update process
process.processData.document_types = newData.document_types;
await DeedTypeService.updateDeedType(process, newData);
resolve();
}
});
});
// Get the full document details for the selected documents
const documentsById = await Promise.all(
selectedDocuments.map(async (doc) => {
const fullDoc = await DocumentTypes.getInstance().getByUid(doc.id as string);
const documentType: any = await new Promise<any>((resolve: (documentType: any) => void) => {
DocumentTypeService.getDocumentTypeByUid(doc.id as string).then((process: any) => {
if (process) {
const documentType: any = process.processData;
resolve(documentType);
}
});
});
return {
label: fullDoc.name!,
value: fullDoc.uid!,
description: fullDoc.private_description!,
label: documentType.name!,
value: documentType.uid!,
description: documentType.private_description!,
} as IFormOption;
})
);
@ -122,7 +179,8 @@ export default function ParameterDocuments(props: IProps) {
if (props.onDocumentsUpdated) {
props.onDocumentsUpdated(documentsById);
}
LoaderService.getInstance().hide();
handleClose();
} catch (e) {
console.error(e);

Some files were not shown because too many files have changed in this diff Show More