From 58cc2493e56dacec468cbe8353595cf7dd87dbf4 Mon Sep 17 00:00:00 2001 From: Nicolas Cantu Date: Sat, 4 Apr 2026 18:36:43 +0200 Subject: [PATCH] chore: consolidate ia_dev module, sync tooling, and harden gateways (0.0.5) Initial state: - ia_dev was historically referenced as ./ia_dev in docs and integrations, while the vendored module lives under services/ia_dev. - AnythingLLM sync and hook installation had error masking / weak exit signaling. - Proxy layers did not validate proxy path segments, allowing path normalization tricks. Motivation: - Make the IDE-oriented workflow usable (sync -> act -> deploy/preview) with explicit errors. - Reduce security footguns in proxying and script automation. Resolution: - Standardize IA_DEV_ROOT usage and documentation to services/ia_dev. - Add SSH remote data mirroring + optional AnythingLLM ingestion. - Extend AnythingLLM pull sync to support upload-all/prefix and fail on upload errors. - Harden smart-ide-sso-gateway and smart-ide-global-api proxying with safe-path checks and non-leaking error responses. - Improve ia-dev-gateway runner validation and reduce sensitive path leakage. - Add site scaffold tool (Vite/React) with OIDC + chat via sso-gateway -> orchestrator. Root cause: - Historical layout changes (submodule -> vendored tree) and missing central contracts for path resolution. - Missing validation for proxy path traversal patterns. - Overuse of silent fallbacks (|| true, exit 0 on partial failures) in automation scripts. Impacted features: - Project sync: git pull + AnythingLLM sync + remote data mirror ingestion. - Site frontends: SSO gateway proxy and orchestrator intents (rag.query, chat.local). - Agent execution: ia-dev-gateway script runner and SSE output. Code modified: - scripts/remote-data-ssh-sync.sh - scripts/anythingllm-pull-sync/sync.mjs - scripts/install-anythingllm-post-merge-hook.sh - cron/git-pull-project-clones.sh - services/smart-ide-sso-gateway/src/server.ts - services/smart-ide-global-api/src/server.ts - services/smart-ide-orchestrator/src/server.ts - services/ia-dev-gateway/src/server.ts - services/ia_dev/tools/site-generate.sh Documentation modified: - docs/** (architecture, API docs, ia_dev module + integration, scripts) Configurations modified: - config/services.local.env.example - services/*/.env.example Files in deploy modified: - services/ia_dev/deploy/* Files in logs impacted: - logs/ia_dev.log (runtime only) - .logs/* (runtime only) Databases and other sources modified: - None Off-project modifications: - None Files in .smartIde modified: - .smartIde/agents/*.md - services/ia_dev/.smartIde/** Files in .secrets modified: - None New patch version in VERSION: - 0.0.5 CHANGELOG.md updated: - yes --- .gitignore | 5 + .smartIde/agents/ia-dev-agent-loop.md | 10 +- ...ia-dev-branch-align-by-script-from-test.md | 10 +- .../agents/ia-dev-change-to-all-branches.md | 10 +- .../ia-dev-closure-point-7-justification.md | 10 +- .smartIde/agents/ia-dev-code.md | 10 +- .smartIde/agents/ia-dev-deploy-by-script.md | 10 +- .../agents/ia-dev-deploy-pprod-or-prod.md | 10 +- .smartIde/agents/ia-dev-docupdate.md | 10 +- .smartIde/agents/ia-dev-evol.md | 10 +- .smartIde/agents/ia-dev-fix-lint.md | 10 +- .smartIde/agents/ia-dev-fix-search.md | 10 +- .smartIde/agents/ia-dev-fix.md | 10 +- .smartIde/agents/ia-dev-git-issues-process.md | 10 +- .smartIde/agents/ia-dev-notary-ai-loop.md | 10 +- .smartIde/agents/ia-dev-notary-ai-process.md | 10 +- .smartIde/agents/ia-dev-push-by-script.md | 10 +- .smartIde/agents/ia-dev-setup-host.md | 12 +- .vscode/settings.json | 1 + CHANGELOG.md | 21 + VERSION | 2 +- builazoo/README.md | 5 +- config/services.local.env.example | 1 + cron/git-pull-project-clones.sh | 15 +- docs/API/global-api.md | 4 +- docs/API/sso-gateway-api.md | 7 +- docs/README.md | 4 +- docs/core-ide.md | 17 + docs/deployment-target.md | 12 + docs/ecosystem-architecture-and-sync.md | 14 +- .../anythingllm-pull-sync-after-pull.md | 48 +- docs/features/docv-ai-integration.md | 2 +- docs/features/e2e-browser.md | 42 ++ docs/features/ia-dev-service.md | 12 +- docs/ia_dev-module.md | 18 +- docs/ia_dev-project-smart_ide.md | 10 +- docs/repo/README.md | 5 +- docs/repo/ia-dev-deploy-lib.md | 4 +- docs/repo/ia-dev-project-conf-schema.md | 8 +- docs/repo/ia-dev-repository-overview.md | 12 +- docs/repo/ia-dev-shared-lib.md | 4 +- docs/repo/ia-dev-smart-ide-integration.md | 12 +- docs/repo/logs-directory.md | 6 +- docs/repo/projects-directory.md | 10 +- docs/repo/script-anythingllm-pull-sync.md | 76 +- docs/repo/script-remote-data-ssh-sync.md | 61 ++ docs/repo/service-ia-dev-gateway.md | 39 +- docs/repo/smart-ide-overview.md | 2 +- docs/system-architecture.md | 6 +- ia_dev | 1 - patches/lapce/README.md | 44 ++ patches/lapce/series | 3 + projects/builazoo/conf.json | 2 +- scripts/anythingllm-pull-sync/sync.mjs | 201 ++++- scripts/core-ide-apply-patches.sh | 56 ++ scripts/core-ide-export-patches.sh | 114 +++ scripts/ensure-core-ide.sh | 84 +++ scripts/ensure-ia-dev-project-link.sh | 15 +- .../install-anythingllm-post-merge-hook.sh | 157 +++- scripts/remote-data-ssh-sync.sh | 318 ++++++++ scripts/smart-ide-ssh-tunnel-plan.sh | 175 +++++ services/ia-dev-gateway/src/paths.ts | 41 +- services/ia-dev-gateway/src/server.ts | 424 ++++++++++- services/ia_dev/.editorconfig | 40 + services/ia_dev/.gitattributes | 41 + services/ia_dev/.gitignore | 72 ++ services/ia_dev/.gitmessage | 47 ++ services/ia_dev/.hintrc | 5 + services/ia_dev/.markdownlint.json | 6 + services/ia_dev/.markdownlintignore | 6 + services/ia_dev/.prettierignore | 56 ++ .../ia_dev/.smartIde/agents/agent-loop.md | 109 +++ .../branch-align-by-script-from-test.md | 81 ++ .../agents/change-to-all-branches.md | 110 +++ .../agents/closure-point-7-justification.md | 37 + services/ia_dev/.smartIde/agents/code.md | 125 ++++ .../.smartIde/agents/deploy-by-script.md | 86 +++ .../.smartIde/agents/deploy-pprod-or-prod.md | 101 +++ services/ia_dev/.smartIde/agents/docupdate.md | 106 +++ services/ia_dev/.smartIde/agents/evol.md | 51 ++ services/ia_dev/.smartIde/agents/fix-lint.md | 183 +++++ .../ia_dev/.smartIde/agents/fix-search.md | 83 +++ services/ia_dev/.smartIde/agents/fix.md | 61 ++ .../.smartIde/agents/git-issues-process.md | 104 +++ .../ia_dev/.smartIde/agents/notary-ai-loop.md | 88 +++ .../.smartIde/agents/notary-ai-process.md | 85 +++ .../ia_dev/.smartIde/agents/push-by-script.md | 177 +++++ .../ia_dev/.smartIde/agents/setup-host.md | 32 + .../ia_dev/.smartIde/agents/site-generate.md | 26 + services/ia_dev/.smartIde/hooks.json | 10 + .../ia_dev/.smartIde/hooks/remonter-mails.sh | 42 ++ .../pousse-commit-msg-lecoffreio-now.txt | 36 + .../pousse-commit-msg-lecoffreio.txt | 54 ++ .../.smartIde/rules/cloture-evolution.mdc | 100 +++ .../ia_dev/.smartIde/rules/cloture-lint.mdc | 45 ++ services/ia_dev/.smartIde/rules/rules.mdc | 77 ++ services/ia_dev/CLAUDE.md | 227 ++++++ services/ia_dev/README.md | 10 +- .../business-qa/anon/anonymize.js | 85 +++ .../ia_dev/ai_working_help/business-qa/api.js | 82 ++ .../business-qa/config/default.json | 26 + .../business-qa/example/index.html | 142 ++++ .../ai_working_help/business-qa/interfaces.md | 88 +++ .../business-qa/recontext/recontextualize.js | 37 + .../ia_dev/ai_working_help/notary-ai/lib.sh | 39 + .../notary-ai/list-pending-notary-ai.sh | 19 + .../notary-ai/write-response-notary-ai.sh | 81 ++ services/ia_dev/ai_working_help/package.json | 13 + services/ia_dev/ai_working_help/server.js | 180 +++++ .../ia_dev/deploy/README-lpldf-https-watch.md | 32 + services/ia_dev/deploy/_lib/colors.sh | 17 + services/ia_dev/deploy/_lib/env-map.sh | 66 ++ services/ia_dev/deploy/_lib/git-flow.sh | 293 ++++++++ services/ia_dev/deploy/_lib/ssh.sh | 95 +++ services/ia_dev/deploy/branch-align.sh | 135 ++++ services/ia_dev/deploy/bump-version.sh | 111 +++ .../ia_dev/deploy/change-to-all-branches.sh | 61 ++ services/ia_dev/deploy/deploy-by-script-to.sh | 122 +++ services/ia_dev/deploy/deploy.sh | 35 + services/ia_dev/deploy/lib/README.md | 3 + .../ia_dev/deploy/lib/deploy-conf-handling.sh | 60 ++ services/ia_dev/deploy/lib/deploy-log.sh | 32 + .../ia_dev/deploy/lib/deploy-methodology.sh | 49 ++ services/ia_dev/deploy/lib/ssh.sh | 89 +++ services/ia_dev/deploy/orchestrator.sh | 101 +++ services/ia_dev/deploy/pousse.sh | 225 ++++++ .../proxy-units/lpldf-https-watch.service | 8 + .../proxy-units/lpldf-https-watch.timer | 10 + services/ia_dev/deploy/run-project-hooks.sh | 13 + .../deploy/scripts/add-ssh-key-anthony.sh | 52 ++ .../install-lpldf-https-watch-on-proxy.sh | 45 ++ .../git-issues/agent-loop-chat-iterations.sh | 82 ++ .../git-issues/agent-loop-lock-acquire.sh | 39 + .../git-issues/agent-loop-lock-release.sh | 35 + .../ia_dev/git-issues/agent-loop-n-cycles.sh | 72 ++ .../git-issues/agent-loop-retrieval-once.sh | 46 ++ .../git-issues/agent-loop-stop-requested.sh | 27 + services/ia_dev/git-issues/agent-loop-stop.sh | 28 + .../ia_dev/git-issues/agent-loop-treatment.sh | 52 ++ .../ia_dev/git-issues/agent-loop.env.example | 19 + services/ia_dev/git-issues/agent-loop.sh | 91 +++ services/ia_dev/git-issues/comment-issue.sh | 50 ++ .../git-issues/create-branch-for-issue.sh | 45 ++ services/ia_dev/git-issues/get-issue.sh | 39 + .../ia_dev/git-issues/imap-bridge.env.example | 43 ++ services/ia_dev/git-issues/lib.sh | 134 ++++ .../ia_dev/git-issues/list-open-issues.sh | 35 + .../ia_dev/git-issues/list-pending-spooler.sh | 31 + .../mail-create-issue-from-email.py | 110 +++ .../mail-create-issue-from-email.sh | 14 + services/ia_dev/git-issues/mail-get-thread.py | 208 ++++++ services/ia_dev/git-issues/mail-get-thread.sh | 18 + .../ia_dev/git-issues/mail-list-unread.py | 118 +++ .../ia_dev/git-issues/mail-list-unread.sh | 14 + services/ia_dev/git-issues/mail-mark-read.py | 41 + services/ia_dev/git-issues/mail-mark-read.sh | 14 + services/ia_dev/git-issues/mail-send-reply.py | 108 +++ services/ia_dev/git-issues/mail-send-reply.sh | 14 + services/ia_dev/git-issues/mail-thread-log.py | 333 +++++++++ services/ia_dev/git-issues/mail-thread-log.sh | 19 + services/ia_dev/git-issues/mail-to-issue.py | 116 +++ services/ia_dev/git-issues/mail-to-issue.sh | 18 + services/ia_dev/git-issues/mail_common.py | 144 ++++ .../ia_dev/git-issues/print-issue-prompt.sh | 35 + services/ia_dev/git-issues/project_config.py | 186 +++++ .../ia_dev/git-issues/tickets-fetch-inbox.py | 321 ++++++++ .../ia_dev/git-issues/tickets-fetch-inbox.sh | 17 + services/ia_dev/git-issues/wiki-api-test.sh | 88 +++ services/ia_dev/git-issues/wiki-get-page.sh | 34 + .../ia_dev/git-issues/wiki-migrate-docs.sh | 100 +++ services/ia_dev/git-issues/wiki-put-page.sh | 46 ++ .../git-issues/write-response-spooler.py | 66 ++ .../git-issues/write-response-spooler.sh | 14 + services/ia_dev/lib/README.md | 3 + services/ia_dev/lib/conf_path_resolve.sh | 48 ++ services/ia_dev/lib/project_config.sh | 73 ++ .../ia_dev/lib/project_git_root_from_conf.sh | 43 ++ services/ia_dev/lib/smart_ide_logs.sh | 58 ++ services/ia_dev/projects/builazoo | 1 + services/ia_dev/projects/enso | 1 + services/ia_dev/projects/smart_ide | 1 + .../tools/proxy-https-watch-lpldf.env.example | 11 + .../ia_dev/tools/proxy-https-watch-lpldf.sh | 106 +++ services/ia_dev/tools/site-generate.sh | 699 ++++++++++++++++++ services/ia_dev/tree.txt | 62 ++ services/smart-ide-global-api/src/server.ts | 43 +- services/smart-ide-orchestrator/.env.example | 9 + services/smart-ide-orchestrator/src/server.ts | 229 +++++- services/smart-ide-sso-gateway/.env.example | 4 + services/smart-ide-sso-gateway/src/server.ts | 78 +- 190 files changed, 11517 insertions(+), 273 deletions(-) create mode 100644 CHANGELOG.md create mode 100644 docs/features/e2e-browser.md create mode 100644 docs/repo/script-remote-data-ssh-sync.md delete mode 160000 ia_dev create mode 100644 patches/lapce/README.md create mode 100644 patches/lapce/series create mode 100755 scripts/core-ide-apply-patches.sh create mode 100755 scripts/core-ide-export-patches.sh create mode 100755 scripts/ensure-core-ide.sh create mode 100755 scripts/remote-data-ssh-sync.sh create mode 100755 scripts/smart-ide-ssh-tunnel-plan.sh create mode 100644 services/ia_dev/.editorconfig create mode 100644 services/ia_dev/.gitattributes create mode 100644 services/ia_dev/.gitignore create mode 100644 services/ia_dev/.gitmessage create mode 100644 services/ia_dev/.hintrc create mode 100644 services/ia_dev/.markdownlint.json create mode 100644 services/ia_dev/.markdownlintignore create mode 100644 services/ia_dev/.prettierignore create mode 100644 services/ia_dev/.smartIde/agents/agent-loop.md create mode 100644 services/ia_dev/.smartIde/agents/branch-align-by-script-from-test.md create mode 100644 services/ia_dev/.smartIde/agents/change-to-all-branches.md create mode 100644 services/ia_dev/.smartIde/agents/closure-point-7-justification.md create mode 100644 services/ia_dev/.smartIde/agents/code.md create mode 100644 services/ia_dev/.smartIde/agents/deploy-by-script.md create mode 100644 services/ia_dev/.smartIde/agents/deploy-pprod-or-prod.md create mode 100644 services/ia_dev/.smartIde/agents/docupdate.md create mode 100644 services/ia_dev/.smartIde/agents/evol.md create mode 100644 services/ia_dev/.smartIde/agents/fix-lint.md create mode 100644 services/ia_dev/.smartIde/agents/fix-search.md create mode 100644 services/ia_dev/.smartIde/agents/fix.md create mode 100644 services/ia_dev/.smartIde/agents/git-issues-process.md create mode 100644 services/ia_dev/.smartIde/agents/notary-ai-loop.md create mode 100644 services/ia_dev/.smartIde/agents/notary-ai-process.md create mode 100644 services/ia_dev/.smartIde/agents/push-by-script.md create mode 100644 services/ia_dev/.smartIde/agents/setup-host.md create mode 100644 services/ia_dev/.smartIde/agents/site-generate.md create mode 100644 services/ia_dev/.smartIde/hooks.json create mode 100755 services/ia_dev/.smartIde/hooks/remonter-mails.sh create mode 100644 services/ia_dev/.smartIde/pousse-commit-msg-lecoffreio-now.txt create mode 100644 services/ia_dev/.smartIde/pousse-commit-msg-lecoffreio.txt create mode 100644 services/ia_dev/.smartIde/rules/cloture-evolution.mdc create mode 100644 services/ia_dev/.smartIde/rules/cloture-lint.mdc create mode 100644 services/ia_dev/.smartIde/rules/rules.mdc create mode 100644 services/ia_dev/CLAUDE.md create mode 100644 services/ia_dev/ai_working_help/business-qa/anon/anonymize.js create mode 100644 services/ia_dev/ai_working_help/business-qa/api.js create mode 100644 services/ia_dev/ai_working_help/business-qa/config/default.json create mode 100644 services/ia_dev/ai_working_help/business-qa/example/index.html create mode 100644 services/ia_dev/ai_working_help/business-qa/interfaces.md create mode 100644 services/ia_dev/ai_working_help/business-qa/recontext/recontextualize.js create mode 100644 services/ia_dev/ai_working_help/notary-ai/lib.sh create mode 100755 services/ia_dev/ai_working_help/notary-ai/list-pending-notary-ai.sh create mode 100755 services/ia_dev/ai_working_help/notary-ai/write-response-notary-ai.sh create mode 100644 services/ia_dev/ai_working_help/package.json create mode 100644 services/ia_dev/ai_working_help/server.js create mode 100644 services/ia_dev/deploy/README-lpldf-https-watch.md create mode 100644 services/ia_dev/deploy/_lib/colors.sh create mode 100644 services/ia_dev/deploy/_lib/env-map.sh create mode 100644 services/ia_dev/deploy/_lib/git-flow.sh create mode 100644 services/ia_dev/deploy/_lib/ssh.sh create mode 100755 services/ia_dev/deploy/branch-align.sh create mode 100644 services/ia_dev/deploy/bump-version.sh create mode 100755 services/ia_dev/deploy/change-to-all-branches.sh create mode 100755 services/ia_dev/deploy/deploy-by-script-to.sh create mode 100755 services/ia_dev/deploy/deploy.sh create mode 100644 services/ia_dev/deploy/lib/README.md create mode 100644 services/ia_dev/deploy/lib/deploy-conf-handling.sh create mode 100644 services/ia_dev/deploy/lib/deploy-log.sh create mode 100644 services/ia_dev/deploy/lib/deploy-methodology.sh create mode 100644 services/ia_dev/deploy/lib/ssh.sh create mode 100755 services/ia_dev/deploy/orchestrator.sh create mode 100755 services/ia_dev/deploy/pousse.sh create mode 100644 services/ia_dev/deploy/proxy-units/lpldf-https-watch.service create mode 100644 services/ia_dev/deploy/proxy-units/lpldf-https-watch.timer create mode 100755 services/ia_dev/deploy/run-project-hooks.sh create mode 100755 services/ia_dev/deploy/scripts/add-ssh-key-anthony.sh create mode 100755 services/ia_dev/deploy/scripts/install-lpldf-https-watch-on-proxy.sh create mode 100755 services/ia_dev/git-issues/agent-loop-chat-iterations.sh create mode 100755 services/ia_dev/git-issues/agent-loop-lock-acquire.sh create mode 100755 services/ia_dev/git-issues/agent-loop-lock-release.sh create mode 100755 services/ia_dev/git-issues/agent-loop-n-cycles.sh create mode 100755 services/ia_dev/git-issues/agent-loop-retrieval-once.sh create mode 100755 services/ia_dev/git-issues/agent-loop-stop-requested.sh create mode 100755 services/ia_dev/git-issues/agent-loop-stop.sh create mode 100755 services/ia_dev/git-issues/agent-loop-treatment.sh create mode 100644 services/ia_dev/git-issues/agent-loop.env.example create mode 100755 services/ia_dev/git-issues/agent-loop.sh create mode 100755 services/ia_dev/git-issues/comment-issue.sh create mode 100755 services/ia_dev/git-issues/create-branch-for-issue.sh create mode 100755 services/ia_dev/git-issues/get-issue.sh create mode 100644 services/ia_dev/git-issues/imap-bridge.env.example create mode 100755 services/ia_dev/git-issues/lib.sh create mode 100755 services/ia_dev/git-issues/list-open-issues.sh create mode 100755 services/ia_dev/git-issues/list-pending-spooler.sh create mode 100755 services/ia_dev/git-issues/mail-create-issue-from-email.py create mode 100755 services/ia_dev/git-issues/mail-create-issue-from-email.sh create mode 100644 services/ia_dev/git-issues/mail-get-thread.py create mode 100755 services/ia_dev/git-issues/mail-get-thread.sh create mode 100755 services/ia_dev/git-issues/mail-list-unread.py create mode 100755 services/ia_dev/git-issues/mail-list-unread.sh create mode 100755 services/ia_dev/git-issues/mail-mark-read.py create mode 100755 services/ia_dev/git-issues/mail-mark-read.sh create mode 100755 services/ia_dev/git-issues/mail-send-reply.py create mode 100755 services/ia_dev/git-issues/mail-send-reply.sh create mode 100644 services/ia_dev/git-issues/mail-thread-log.py create mode 100755 services/ia_dev/git-issues/mail-thread-log.sh create mode 100755 services/ia_dev/git-issues/mail-to-issue.py create mode 100755 services/ia_dev/git-issues/mail-to-issue.sh create mode 100644 services/ia_dev/git-issues/mail_common.py create mode 100755 services/ia_dev/git-issues/print-issue-prompt.sh create mode 100644 services/ia_dev/git-issues/project_config.py create mode 100644 services/ia_dev/git-issues/tickets-fetch-inbox.py create mode 100755 services/ia_dev/git-issues/tickets-fetch-inbox.sh create mode 100755 services/ia_dev/git-issues/wiki-api-test.sh create mode 100755 services/ia_dev/git-issues/wiki-get-page.sh create mode 100755 services/ia_dev/git-issues/wiki-migrate-docs.sh create mode 100755 services/ia_dev/git-issues/wiki-put-page.sh create mode 100644 services/ia_dev/git-issues/write-response-spooler.py create mode 100755 services/ia_dev/git-issues/write-response-spooler.sh create mode 100644 services/ia_dev/lib/README.md create mode 100644 services/ia_dev/lib/conf_path_resolve.sh create mode 100644 services/ia_dev/lib/project_config.sh create mode 100644 services/ia_dev/lib/project_git_root_from_conf.sh create mode 100644 services/ia_dev/lib/smart_ide_logs.sh create mode 120000 services/ia_dev/projects/builazoo create mode 120000 services/ia_dev/projects/enso create mode 120000 services/ia_dev/projects/smart_ide create mode 100644 services/ia_dev/tools/proxy-https-watch-lpldf.env.example create mode 100755 services/ia_dev/tools/proxy-https-watch-lpldf.sh create mode 100755 services/ia_dev/tools/site-generate.sh create mode 100644 services/ia_dev/tree.txt diff --git a/.gitignore b/.gitignore index a0ad980..f56ee42 100644 --- a/.gitignore +++ b/.gitignore @@ -18,6 +18,9 @@ logs/**/*.log # .logs/ : journaux SSO / API globale (README + .gitignore versionnés) .logs/**/*.log +# Local mirrors of deployed data (SSH pull, RAG ingestion) +.data/ + # projects/ : ignorer tout répertoire d'id sous projects/ sauf les squelettes versionnés (conf.json par id, gabarit example/) projects/* !projects/README.md @@ -37,3 +40,5 @@ builazoo/* # Projet IDE actif (copie locale de active-project.json.example) projects/active-project.json +.workspace +*.code-workspace \ No newline at end of file diff --git a/.smartIde/agents/ia-dev-agent-loop.md b/.smartIde/agents/ia-dev-agent-loop.md index a9a2eae..1e81479 100644 --- a/.smartIde/agents/ia-dev-agent-loop.md +++ b/.smartIde/agents/ia-dev-agent-loop.md @@ -7,17 +7,17 @@ is_background: false ## Contexte obligatoire (dépôt smart_ide → ia_dev) -- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `ia_dev/projects/smart_ide`). +- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `services/ia_dev/projects/smart_ide`). - **Environnement cible :** `test`, `pprod` ou `prod`. Le reprendre dans le message utilisateur ; **si absent, le demander** avant d'exécuter des scripts dépendants du env. -- **Racine des scripts ia_dev :** le dossier `ia_dev/` à la racine du workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. +- **Racine des scripts ia_dev :** le dossier `services/ia_dev/` dans le workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. - **Variables / arguments :** `IA_PROJECT_ID=smart_ide` et/ou `--project smart_ide` selon le script ; référence : `docs/repo/ia-dev-project-conf-schema.md`. - **MAIL_TO (ticketing / mails) :** exporter `MAIL_TO` avec l'adresse pour l'environnement choisi, lue dans `projects/smart_ide/conf.json` (racine workspace) → `tickets.authorized_emails.to` : test → `AI.SMART_IDE.TEST@4nkweb.com`, pprod → `AI.SMART_IDE.PPROD@4nkweb.com`, prod → `AI.SMART_IDE.PROD@4nkweb.com`. -- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous ia_dev souvent absent). -- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `ia_dev/` et chemins issus de `conf.json`). +- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous services/ia_dev souvent absent). +- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `services/ia_dev/` et chemins issus de `conf.json`). - **`projects/*/conf.json` :** ne pas modifier sans validation utilisateur (règle ia_dev). ## Délégation -Lire le fichier **`ia_dev/.smartIde/agents/agent-loop.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. +Lire le fichier **`services/ia_dev/.smartIde/agents/agent-loop.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. **Référence résolution projet / env :** `docs/repo/ia-dev-project-conf-schema.md`. diff --git a/.smartIde/agents/ia-dev-branch-align-by-script-from-test.md b/.smartIde/agents/ia-dev-branch-align-by-script-from-test.md index b270fcd..9b6c04d 100644 --- a/.smartIde/agents/ia-dev-branch-align-by-script-from-test.md +++ b/.smartIde/agents/ia-dev-branch-align-by-script-from-test.md @@ -7,17 +7,17 @@ is_background: false ## Contexte obligatoire (dépôt smart_ide → ia_dev) -- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `ia_dev/projects/smart_ide`). +- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `services/ia_dev/projects/smart_ide`). - **Environnement cible :** `test`, `pprod` ou `prod`. Le reprendre dans le message utilisateur ; **si absent, le demander** avant d'exécuter des scripts dépendants du env. -- **Racine des scripts ia_dev :** le dossier `ia_dev/` à la racine du workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. +- **Racine des scripts ia_dev :** le dossier `services/ia_dev/` dans le workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. - **Variables / arguments :** `IA_PROJECT_ID=smart_ide` et/ou `--project smart_ide` selon le script ; référence : `docs/repo/ia-dev-project-conf-schema.md`. - **MAIL_TO (ticketing / mails) :** exporter `MAIL_TO` avec l'adresse pour l'environnement choisi, lue dans `projects/smart_ide/conf.json` (racine workspace) → `tickets.authorized_emails.to` : test → `AI.SMART_IDE.TEST@4nkweb.com`, pprod → `AI.SMART_IDE.PPROD@4nkweb.com`, prod → `AI.SMART_IDE.PROD@4nkweb.com`. -- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous ia_dev souvent absent). -- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `ia_dev/` et chemins issus de `conf.json`). +- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous services/ia_dev souvent absent). +- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `services/ia_dev/` et chemins issus de `conf.json`). - **`projects/*/conf.json` :** ne pas modifier sans validation utilisateur (règle ia_dev). ## Délégation -Lire le fichier **`ia_dev/.smartIde/agents/branch-align-by-script-from-test.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. +Lire le fichier **`services/ia_dev/.smartIde/agents/branch-align-by-script-from-test.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. **Référence résolution projet / env :** `docs/repo/ia-dev-project-conf-schema.md`. diff --git a/.smartIde/agents/ia-dev-change-to-all-branches.md b/.smartIde/agents/ia-dev-change-to-all-branches.md index f036ec3..44a90ee 100644 --- a/.smartIde/agents/ia-dev-change-to-all-branches.md +++ b/.smartIde/agents/ia-dev-change-to-all-branches.md @@ -7,17 +7,17 @@ is_background: false ## Contexte obligatoire (dépôt smart_ide → ia_dev) -- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `ia_dev/projects/smart_ide`). +- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `services/ia_dev/projects/smart_ide`). - **Environnement cible :** `test`, `pprod` ou `prod`. Le reprendre dans le message utilisateur ; **si absent, le demander** avant d'exécuter des scripts dépendants du env. -- **Racine des scripts ia_dev :** le dossier `ia_dev/` à la racine du workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. +- **Racine des scripts ia_dev :** le dossier `services/ia_dev/` dans le workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. - **Variables / arguments :** `IA_PROJECT_ID=smart_ide` et/ou `--project smart_ide` selon le script ; référence : `docs/repo/ia-dev-project-conf-schema.md`. - **MAIL_TO (ticketing / mails) :** exporter `MAIL_TO` avec l'adresse pour l'environnement choisi, lue dans `projects/smart_ide/conf.json` (racine workspace) → `tickets.authorized_emails.to` : test → `AI.SMART_IDE.TEST@4nkweb.com`, pprod → `AI.SMART_IDE.PPROD@4nkweb.com`, prod → `AI.SMART_IDE.PROD@4nkweb.com`. -- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous ia_dev souvent absent). -- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `ia_dev/` et chemins issus de `conf.json`). +- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous services/ia_dev souvent absent). +- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `services/ia_dev/` et chemins issus de `conf.json`). - **`projects/*/conf.json` :** ne pas modifier sans validation utilisateur (règle ia_dev). ## Délégation -Lire le fichier **`ia_dev/.smartIde/agents/change-to-all-branches.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. +Lire le fichier **`services/ia_dev/.smartIde/agents/change-to-all-branches.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. **Référence résolution projet / env :** `docs/repo/ia-dev-project-conf-schema.md`. diff --git a/.smartIde/agents/ia-dev-closure-point-7-justification.md b/.smartIde/agents/ia-dev-closure-point-7-justification.md index 4ade9f4..bfcfcd3 100644 --- a/.smartIde/agents/ia-dev-closure-point-7-justification.md +++ b/.smartIde/agents/ia-dev-closure-point-7-justification.md @@ -7,17 +7,17 @@ is_background: false ## Contexte obligatoire (dépôt smart_ide → ia_dev) -- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `ia_dev/projects/smart_ide`). +- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `services/ia_dev/projects/smart_ide`). - **Environnement cible :** `test`, `pprod` ou `prod`. Le reprendre dans le message utilisateur ; **si absent, le demander** avant d'exécuter des scripts dépendants du env. -- **Racine des scripts ia_dev :** le dossier `ia_dev/` à la racine du workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. +- **Racine des scripts ia_dev :** le dossier `services/ia_dev/` dans le workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. - **Variables / arguments :** `IA_PROJECT_ID=smart_ide` et/ou `--project smart_ide` selon le script ; référence : `docs/repo/ia-dev-project-conf-schema.md`. - **MAIL_TO (ticketing / mails) :** exporter `MAIL_TO` avec l'adresse pour l'environnement choisi, lue dans `projects/smart_ide/conf.json` (racine workspace) → `tickets.authorized_emails.to` : test → `AI.SMART_IDE.TEST@4nkweb.com`, pprod → `AI.SMART_IDE.PPROD@4nkweb.com`, prod → `AI.SMART_IDE.PROD@4nkweb.com`. -- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous ia_dev souvent absent). -- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `ia_dev/` et chemins issus de `conf.json`). +- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous services/ia_dev souvent absent). +- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `services/ia_dev/` et chemins issus de `conf.json`). - **`projects/*/conf.json` :** ne pas modifier sans validation utilisateur (règle ia_dev). ## Délégation -Lire le fichier **`ia_dev/.smartIde/agents/closure-point-7-justification.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. +Lire le fichier **`services/ia_dev/.smartIde/agents/closure-point-7-justification.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. **Référence résolution projet / env :** `docs/repo/ia-dev-project-conf-schema.md`. diff --git a/.smartIde/agents/ia-dev-code.md b/.smartIde/agents/ia-dev-code.md index 82ecc96..710c076 100644 --- a/.smartIde/agents/ia-dev-code.md +++ b/.smartIde/agents/ia-dev-code.md @@ -7,17 +7,17 @@ is_background: false ## Contexte obligatoire (dépôt smart_ide → ia_dev) -- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `ia_dev/projects/smart_ide`). +- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `services/ia_dev/projects/smart_ide`). - **Environnement cible :** `test`, `pprod` ou `prod`. Le reprendre dans le message utilisateur ; **si absent, le demander** avant d'exécuter des scripts dépendants du env. -- **Racine des scripts ia_dev :** le dossier `ia_dev/` à la racine du workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. +- **Racine des scripts ia_dev :** le dossier `services/ia_dev/` dans le workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. - **Variables / arguments :** `IA_PROJECT_ID=smart_ide` et/ou `--project smart_ide` selon le script ; référence : `docs/repo/ia-dev-project-conf-schema.md`. - **MAIL_TO (ticketing / mails) :** exporter `MAIL_TO` avec l'adresse pour l'environnement choisi, lue dans `projects/smart_ide/conf.json` (racine workspace) → `tickets.authorized_emails.to` : test → `AI.SMART_IDE.TEST@4nkweb.com`, pprod → `AI.SMART_IDE.PPROD@4nkweb.com`, prod → `AI.SMART_IDE.PROD@4nkweb.com`. -- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous ia_dev souvent absent). -- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `ia_dev/` et chemins issus de `conf.json`). +- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous services/ia_dev souvent absent). +- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `services/ia_dev/` et chemins issus de `conf.json`). - **`projects/*/conf.json` :** ne pas modifier sans validation utilisateur (règle ia_dev). ## Délégation -Lire le fichier **`ia_dev/.smartIde/agents/code.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. +Lire le fichier **`services/ia_dev/.smartIde/agents/code.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. **Référence résolution projet / env :** `docs/repo/ia-dev-project-conf-schema.md`. diff --git a/.smartIde/agents/ia-dev-deploy-by-script.md b/.smartIde/agents/ia-dev-deploy-by-script.md index d425f6b..e7facb4 100644 --- a/.smartIde/agents/ia-dev-deploy-by-script.md +++ b/.smartIde/agents/ia-dev-deploy-by-script.md @@ -7,17 +7,17 @@ is_background: false ## Contexte obligatoire (dépôt smart_ide → ia_dev) -- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `ia_dev/projects/smart_ide`). +- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `services/ia_dev/projects/smart_ide`). - **Environnement cible :** `test`, `pprod` ou `prod`. Le reprendre dans le message utilisateur ; **si absent, le demander** avant d'exécuter des scripts dépendants du env. -- **Racine des scripts ia_dev :** le dossier `ia_dev/` à la racine du workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. +- **Racine des scripts ia_dev :** le dossier `services/ia_dev/` dans le workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. - **Variables / arguments :** `IA_PROJECT_ID=smart_ide` et/ou `--project smart_ide` selon le script ; référence : `docs/repo/ia-dev-project-conf-schema.md`. - **MAIL_TO (ticketing / mails) :** exporter `MAIL_TO` avec l'adresse pour l'environnement choisi, lue dans `projects/smart_ide/conf.json` (racine workspace) → `tickets.authorized_emails.to` : test → `AI.SMART_IDE.TEST@4nkweb.com`, pprod → `AI.SMART_IDE.PPROD@4nkweb.com`, prod → `AI.SMART_IDE.PROD@4nkweb.com`. -- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous ia_dev souvent absent). -- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `ia_dev/` et chemins issus de `conf.json`). +- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous services/ia_dev souvent absent). +- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `services/ia_dev/` et chemins issus de `conf.json`). - **`projects/*/conf.json` :** ne pas modifier sans validation utilisateur (règle ia_dev). ## Délégation -Lire le fichier **`ia_dev/.smartIde/agents/deploy-by-script.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. +Lire le fichier **`services/ia_dev/.smartIde/agents/deploy-by-script.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. **Référence résolution projet / env :** `docs/repo/ia-dev-project-conf-schema.md`. diff --git a/.smartIde/agents/ia-dev-deploy-pprod-or-prod.md b/.smartIde/agents/ia-dev-deploy-pprod-or-prod.md index 78d0a03..edc5d1e 100644 --- a/.smartIde/agents/ia-dev-deploy-pprod-or-prod.md +++ b/.smartIde/agents/ia-dev-deploy-pprod-or-prod.md @@ -7,17 +7,17 @@ is_background: false ## Contexte obligatoire (dépôt smart_ide → ia_dev) -- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `ia_dev/projects/smart_ide`). +- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `services/ia_dev/projects/smart_ide`). - **Environnement cible :** `test`, `pprod` ou `prod`. Le reprendre dans le message utilisateur ; **si absent, le demander** avant d'exécuter des scripts dépendants du env. -- **Racine des scripts ia_dev :** le dossier `ia_dev/` à la racine du workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. +- **Racine des scripts ia_dev :** le dossier `services/ia_dev/` dans le workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. - **Variables / arguments :** `IA_PROJECT_ID=smart_ide` et/ou `--project smart_ide` selon le script ; référence : `docs/repo/ia-dev-project-conf-schema.md`. - **MAIL_TO (ticketing / mails) :** exporter `MAIL_TO` avec l'adresse pour l'environnement choisi, lue dans `projects/smart_ide/conf.json` (racine workspace) → `tickets.authorized_emails.to` : test → `AI.SMART_IDE.TEST@4nkweb.com`, pprod → `AI.SMART_IDE.PPROD@4nkweb.com`, prod → `AI.SMART_IDE.PROD@4nkweb.com`. -- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous ia_dev souvent absent). -- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `ia_dev/` et chemins issus de `conf.json`). +- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous services/ia_dev souvent absent). +- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `services/ia_dev/` et chemins issus de `conf.json`). - **`projects/*/conf.json` :** ne pas modifier sans validation utilisateur (règle ia_dev). ## Délégation -Lire le fichier **`ia_dev/.smartIde/agents/deploy-pprod-or-prod.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. +Lire le fichier **`services/ia_dev/.smartIde/agents/deploy-pprod-or-prod.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. **Référence résolution projet / env :** `docs/repo/ia-dev-project-conf-schema.md`. diff --git a/.smartIde/agents/ia-dev-docupdate.md b/.smartIde/agents/ia-dev-docupdate.md index 16f693f..6df960b 100644 --- a/.smartIde/agents/ia-dev-docupdate.md +++ b/.smartIde/agents/ia-dev-docupdate.md @@ -7,17 +7,17 @@ is_background: false ## Contexte obligatoire (dépôt smart_ide → ia_dev) -- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `ia_dev/projects/smart_ide`). +- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `services/ia_dev/projects/smart_ide`). - **Environnement cible :** `test`, `pprod` ou `prod`. Le reprendre dans le message utilisateur ; **si absent, le demander** avant d'exécuter des scripts dépendants du env. -- **Racine des scripts ia_dev :** le dossier `ia_dev/` à la racine du workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. +- **Racine des scripts ia_dev :** le dossier `services/ia_dev/` dans le workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. - **Variables / arguments :** `IA_PROJECT_ID=smart_ide` et/ou `--project smart_ide` selon le script ; référence : `docs/repo/ia-dev-project-conf-schema.md`. - **MAIL_TO (ticketing / mails) :** exporter `MAIL_TO` avec l'adresse pour l'environnement choisi, lue dans `projects/smart_ide/conf.json` (racine workspace) → `tickets.authorized_emails.to` : test → `AI.SMART_IDE.TEST@4nkweb.com`, pprod → `AI.SMART_IDE.PPROD@4nkweb.com`, prod → `AI.SMART_IDE.PROD@4nkweb.com`. -- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous ia_dev souvent absent). -- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `ia_dev/` et chemins issus de `conf.json`). +- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous services/ia_dev souvent absent). +- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `services/ia_dev/` et chemins issus de `conf.json`). - **`projects/*/conf.json` :** ne pas modifier sans validation utilisateur (règle ia_dev). ## Délégation -Lire le fichier **`ia_dev/.smartIde/agents/docupdate.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. +Lire le fichier **`services/ia_dev/.smartIde/agents/docupdate.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. **Référence résolution projet / env :** `docs/repo/ia-dev-project-conf-schema.md`. diff --git a/.smartIde/agents/ia-dev-evol.md b/.smartIde/agents/ia-dev-evol.md index 029fca7..1656c0b 100644 --- a/.smartIde/agents/ia-dev-evol.md +++ b/.smartIde/agents/ia-dev-evol.md @@ -7,17 +7,17 @@ is_background: false ## Contexte obligatoire (dépôt smart_ide → ia_dev) -- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `ia_dev/projects/smart_ide`). +- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `services/ia_dev/projects/smart_ide`). - **Environnement cible :** `test`, `pprod` ou `prod`. Le reprendre dans le message utilisateur ; **si absent, le demander** avant d'exécuter des scripts dépendants du env. -- **Racine des scripts ia_dev :** le dossier `ia_dev/` à la racine du workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. +- **Racine des scripts ia_dev :** le dossier `services/ia_dev/` dans le workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. - **Variables / arguments :** `IA_PROJECT_ID=smart_ide` et/ou `--project smart_ide` selon le script ; référence : `docs/repo/ia-dev-project-conf-schema.md`. - **MAIL_TO (ticketing / mails) :** exporter `MAIL_TO` avec l'adresse pour l'environnement choisi, lue dans `projects/smart_ide/conf.json` (racine workspace) → `tickets.authorized_emails.to` : test → `AI.SMART_IDE.TEST@4nkweb.com`, pprod → `AI.SMART_IDE.PPROD@4nkweb.com`, prod → `AI.SMART_IDE.PROD@4nkweb.com`. -- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous ia_dev souvent absent). -- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `ia_dev/` et chemins issus de `conf.json`). +- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous services/ia_dev souvent absent). +- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `services/ia_dev/` et chemins issus de `conf.json`). - **`projects/*/conf.json` :** ne pas modifier sans validation utilisateur (règle ia_dev). ## Délégation -Lire le fichier **`ia_dev/.smartIde/agents/evol.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. +Lire le fichier **`services/ia_dev/.smartIde/agents/evol.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. **Référence résolution projet / env :** `docs/repo/ia-dev-project-conf-schema.md`. diff --git a/.smartIde/agents/ia-dev-fix-lint.md b/.smartIde/agents/ia-dev-fix-lint.md index 524de9c..4c8fc16 100644 --- a/.smartIde/agents/ia-dev-fix-lint.md +++ b/.smartIde/agents/ia-dev-fix-lint.md @@ -7,17 +7,17 @@ is_background: false ## Contexte obligatoire (dépôt smart_ide → ia_dev) -- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `ia_dev/projects/smart_ide`). +- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `services/ia_dev/projects/smart_ide`). - **Environnement cible :** `test`, `pprod` ou `prod`. Le reprendre dans le message utilisateur ; **si absent, le demander** avant d'exécuter des scripts dépendants du env. -- **Racine des scripts ia_dev :** le dossier `ia_dev/` à la racine du workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. +- **Racine des scripts ia_dev :** le dossier `services/ia_dev/` dans le workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. - **Variables / arguments :** `IA_PROJECT_ID=smart_ide` et/ou `--project smart_ide` selon le script ; référence : `docs/repo/ia-dev-project-conf-schema.md`. - **MAIL_TO (ticketing / mails) :** exporter `MAIL_TO` avec l'adresse pour l'environnement choisi, lue dans `projects/smart_ide/conf.json` (racine workspace) → `tickets.authorized_emails.to` : test → `AI.SMART_IDE.TEST@4nkweb.com`, pprod → `AI.SMART_IDE.PPROD@4nkweb.com`, prod → `AI.SMART_IDE.PROD@4nkweb.com`. -- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous ia_dev souvent absent). -- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `ia_dev/` et chemins issus de `conf.json`). +- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous services/ia_dev souvent absent). +- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `services/ia_dev/` et chemins issus de `conf.json`). - **`projects/*/conf.json` :** ne pas modifier sans validation utilisateur (règle ia_dev). ## Délégation -Lire le fichier **`ia_dev/.smartIde/agents/fix-lint.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. +Lire le fichier **`services/ia_dev/.smartIde/agents/fix-lint.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. **Référence résolution projet / env :** `docs/repo/ia-dev-project-conf-schema.md`. diff --git a/.smartIde/agents/ia-dev-fix-search.md b/.smartIde/agents/ia-dev-fix-search.md index 8f0632c..2a5ed69 100644 --- a/.smartIde/agents/ia-dev-fix-search.md +++ b/.smartIde/agents/ia-dev-fix-search.md @@ -7,17 +7,17 @@ is_background: false ## Contexte obligatoire (dépôt smart_ide → ia_dev) -- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `ia_dev/projects/smart_ide`). +- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `services/ia_dev/projects/smart_ide`). - **Environnement cible :** `test`, `pprod` ou `prod`. Le reprendre dans le message utilisateur ; **si absent, le demander** avant d'exécuter des scripts dépendants du env. -- **Racine des scripts ia_dev :** le dossier `ia_dev/` à la racine du workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. +- **Racine des scripts ia_dev :** le dossier `services/ia_dev/` dans le workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. - **Variables / arguments :** `IA_PROJECT_ID=smart_ide` et/ou `--project smart_ide` selon le script ; référence : `docs/repo/ia-dev-project-conf-schema.md`. - **MAIL_TO (ticketing / mails) :** exporter `MAIL_TO` avec l'adresse pour l'environnement choisi, lue dans `projects/smart_ide/conf.json` (racine workspace) → `tickets.authorized_emails.to` : test → `AI.SMART_IDE.TEST@4nkweb.com`, pprod → `AI.SMART_IDE.PPROD@4nkweb.com`, prod → `AI.SMART_IDE.PROD@4nkweb.com`. -- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous ia_dev souvent absent). -- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `ia_dev/` et chemins issus de `conf.json`). +- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous services/ia_dev souvent absent). +- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `services/ia_dev/` et chemins issus de `conf.json`). - **`projects/*/conf.json` :** ne pas modifier sans validation utilisateur (règle ia_dev). ## Délégation -Lire le fichier **`ia_dev/.smartIde/agents/fix-search.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. +Lire le fichier **`services/ia_dev/.smartIde/agents/fix-search.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. **Référence résolution projet / env :** `docs/repo/ia-dev-project-conf-schema.md`. diff --git a/.smartIde/agents/ia-dev-fix.md b/.smartIde/agents/ia-dev-fix.md index 60d0675..32b5dcb 100644 --- a/.smartIde/agents/ia-dev-fix.md +++ b/.smartIde/agents/ia-dev-fix.md @@ -7,17 +7,17 @@ is_background: false ## Contexte obligatoire (dépôt smart_ide → ia_dev) -- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `ia_dev/projects/smart_ide`). +- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `services/ia_dev/projects/smart_ide`). - **Environnement cible :** `test`, `pprod` ou `prod`. Le reprendre dans le message utilisateur ; **si absent, le demander** avant d'exécuter des scripts dépendants du env. -- **Racine des scripts ia_dev :** le dossier `ia_dev/` à la racine du workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. +- **Racine des scripts ia_dev :** le dossier `services/ia_dev/` dans le workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. - **Variables / arguments :** `IA_PROJECT_ID=smart_ide` et/ou `--project smart_ide` selon le script ; référence : `docs/repo/ia-dev-project-conf-schema.md`. - **MAIL_TO (ticketing / mails) :** exporter `MAIL_TO` avec l'adresse pour l'environnement choisi, lue dans `projects/smart_ide/conf.json` (racine workspace) → `tickets.authorized_emails.to` : test → `AI.SMART_IDE.TEST@4nkweb.com`, pprod → `AI.SMART_IDE.PPROD@4nkweb.com`, prod → `AI.SMART_IDE.PROD@4nkweb.com`. -- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous ia_dev souvent absent). -- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `ia_dev/` et chemins issus de `conf.json`). +- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous services/ia_dev souvent absent). +- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `services/ia_dev/` et chemins issus de `conf.json`). - **`projects/*/conf.json` :** ne pas modifier sans validation utilisateur (règle ia_dev). ## Délégation -Lire le fichier **`ia_dev/.smartIde/agents/fix.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. +Lire le fichier **`services/ia_dev/.smartIde/agents/fix.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. **Référence résolution projet / env :** `docs/repo/ia-dev-project-conf-schema.md`. diff --git a/.smartIde/agents/ia-dev-git-issues-process.md b/.smartIde/agents/ia-dev-git-issues-process.md index d1fd045..d63bd95 100644 --- a/.smartIde/agents/ia-dev-git-issues-process.md +++ b/.smartIde/agents/ia-dev-git-issues-process.md @@ -7,17 +7,17 @@ is_background: false ## Contexte obligatoire (dépôt smart_ide → ia_dev) -- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `ia_dev/projects/smart_ide`). +- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `services/ia_dev/projects/smart_ide`). - **Environnement cible :** `test`, `pprod` ou `prod`. Le reprendre dans le message utilisateur ; **si absent, le demander** avant d'exécuter des scripts dépendants du env. -- **Racine des scripts ia_dev :** le dossier `ia_dev/` à la racine du workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. +- **Racine des scripts ia_dev :** le dossier `services/ia_dev/` dans le workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. - **Variables / arguments :** `IA_PROJECT_ID=smart_ide` et/ou `--project smart_ide` selon le script ; référence : `docs/repo/ia-dev-project-conf-schema.md`. - **MAIL_TO (ticketing / mails) :** exporter `MAIL_TO` avec l'adresse pour l'environnement choisi, lue dans `projects/smart_ide/conf.json` (racine workspace) → `tickets.authorized_emails.to` : test → `AI.SMART_IDE.TEST@4nkweb.com`, pprod → `AI.SMART_IDE.PPROD@4nkweb.com`, prod → `AI.SMART_IDE.PROD@4nkweb.com`. -- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous ia_dev souvent absent). -- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `ia_dev/` et chemins issus de `conf.json`). +- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous services/ia_dev souvent absent). +- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `services/ia_dev/` et chemins issus de `conf.json`). - **`projects/*/conf.json` :** ne pas modifier sans validation utilisateur (règle ia_dev). ## Délégation -Lire le fichier **`ia_dev/.smartIde/agents/git-issues-process.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. +Lire le fichier **`services/ia_dev/.smartIde/agents/git-issues-process.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. **Référence résolution projet / env :** `docs/repo/ia-dev-project-conf-schema.md`. diff --git a/.smartIde/agents/ia-dev-notary-ai-loop.md b/.smartIde/agents/ia-dev-notary-ai-loop.md index 7f10b35..ca258c0 100644 --- a/.smartIde/agents/ia-dev-notary-ai-loop.md +++ b/.smartIde/agents/ia-dev-notary-ai-loop.md @@ -7,17 +7,17 @@ is_background: false ## Contexte obligatoire (dépôt smart_ide → ia_dev) -- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `ia_dev/projects/smart_ide`). +- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `services/ia_dev/projects/smart_ide`). - **Environnement cible :** `test`, `pprod` ou `prod`. Le reprendre dans le message utilisateur ; **si absent, le demander** avant d'exécuter des scripts dépendants du env. -- **Racine des scripts ia_dev :** le dossier `ia_dev/` à la racine du workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. +- **Racine des scripts ia_dev :** le dossier `services/ia_dev/` dans le workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. - **Variables / arguments :** `IA_PROJECT_ID=smart_ide` et/ou `--project smart_ide` selon le script ; référence : `docs/repo/ia-dev-project-conf-schema.md`. - **MAIL_TO (ticketing / mails) :** exporter `MAIL_TO` avec l'adresse pour l'environnement choisi, lue dans `projects/smart_ide/conf.json` (racine workspace) → `tickets.authorized_emails.to` : test → `AI.SMART_IDE.TEST@4nkweb.com`, pprod → `AI.SMART_IDE.PPROD@4nkweb.com`, prod → `AI.SMART_IDE.PROD@4nkweb.com`. -- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous ia_dev souvent absent). -- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `ia_dev/` et chemins issus de `conf.json`). +- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous services/ia_dev souvent absent). +- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `services/ia_dev/` et chemins issus de `conf.json`). - **`projects/*/conf.json` :** ne pas modifier sans validation utilisateur (règle ia_dev). ## Délégation -Lire le fichier **`ia_dev/.smartIde/agents/notary-ai-loop.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. +Lire le fichier **`services/ia_dev/.smartIde/agents/notary-ai-loop.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. **Référence résolution projet / env :** `docs/repo/ia-dev-project-conf-schema.md`. diff --git a/.smartIde/agents/ia-dev-notary-ai-process.md b/.smartIde/agents/ia-dev-notary-ai-process.md index 7d0e6af..cf0b530 100644 --- a/.smartIde/agents/ia-dev-notary-ai-process.md +++ b/.smartIde/agents/ia-dev-notary-ai-process.md @@ -7,17 +7,17 @@ is_background: false ## Contexte obligatoire (dépôt smart_ide → ia_dev) -- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `ia_dev/projects/smart_ide`). +- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `services/ia_dev/projects/smart_ide`). - **Environnement cible :** `test`, `pprod` ou `prod`. Le reprendre dans le message utilisateur ; **si absent, le demander** avant d'exécuter des scripts dépendants du env. -- **Racine des scripts ia_dev :** le dossier `ia_dev/` à la racine du workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. +- **Racine des scripts ia_dev :** le dossier `services/ia_dev/` dans le workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. - **Variables / arguments :** `IA_PROJECT_ID=smart_ide` et/ou `--project smart_ide` selon le script ; référence : `docs/repo/ia-dev-project-conf-schema.md`. - **MAIL_TO (ticketing / mails) :** exporter `MAIL_TO` avec l'adresse pour l'environnement choisi, lue dans `projects/smart_ide/conf.json` (racine workspace) → `tickets.authorized_emails.to` : test → `AI.SMART_IDE.TEST@4nkweb.com`, pprod → `AI.SMART_IDE.PPROD@4nkweb.com`, prod → `AI.SMART_IDE.PROD@4nkweb.com`. -- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous ia_dev souvent absent). -- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `ia_dev/` et chemins issus de `conf.json`). +- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous services/ia_dev souvent absent). +- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `services/ia_dev/` et chemins issus de `conf.json`). - **`projects/*/conf.json` :** ne pas modifier sans validation utilisateur (règle ia_dev). ## Délégation -Lire le fichier **`ia_dev/.smartIde/agents/notary-ai-process.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. +Lire le fichier **`services/ia_dev/.smartIde/agents/notary-ai-process.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. **Référence résolution projet / env :** `docs/repo/ia-dev-project-conf-schema.md`. diff --git a/.smartIde/agents/ia-dev-push-by-script.md b/.smartIde/agents/ia-dev-push-by-script.md index 0bcd3e5..361042e 100644 --- a/.smartIde/agents/ia-dev-push-by-script.md +++ b/.smartIde/agents/ia-dev-push-by-script.md @@ -7,17 +7,17 @@ is_background: false ## Contexte obligatoire (dépôt smart_ide → ia_dev) -- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `ia_dev/projects/smart_ide`). - **Environnement cible :** `test`, `pprod` ou `prod`. Le reprendre dans le message utilisateur ; **si absent, le demander** avant d'exécuter des scripts dépendants du env. -- **Racine des scripts ia_dev :** le dossier `ia_dev/` à la racine du workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. +- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `services/ia_dev/projects/smart_ide`). +- **Racine des scripts ia_dev :** le dossier `services/ia_dev/` dans le workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. - **Variables / arguments :** `IA_PROJECT_ID=smart_ide` et/ou `--project smart_ide` selon le script ; référence : `docs/repo/ia-dev-project-conf-schema.md`. - **MAIL_TO (ticketing / mails) :** exporter `MAIL_TO` avec l'adresse pour l'environnement choisi, lue dans `projects/smart_ide/conf.json` (racine workspace) → `tickets.authorized_emails.to` : test → `AI.SMART_IDE.TEST@4nkweb.com`, pprod → `AI.SMART_IDE.PPROD@4nkweb.com`, prod → `AI.SMART_IDE.PROD@4nkweb.com`. -- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous ia_dev souvent absent). -- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `ia_dev/` et chemins issus de `conf.json`). +- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous services/ia_dev souvent absent). +- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `services/ia_dev/` et chemins issus de `conf.json`). - **`projects/*/conf.json` :** ne pas modifier sans validation utilisateur (règle ia_dev). ## Délégation -Lire le fichier **`ia_dev/.smartIde/agents/push-by-script.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. +Lire le fichier **`services/ia_dev/.smartIde/agents/push-by-script.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. **Référence résolution projet / env :** `docs/repo/ia-dev-project-conf-schema.md`. diff --git a/.smartIde/agents/ia-dev-setup-host.md b/.smartIde/agents/ia-dev-setup-host.md index dd6ad11..ba86f34 100644 --- a/.smartIde/agents/ia-dev-setup-host.md +++ b/.smartIde/agents/ia-dev-setup-host.md @@ -7,19 +7,19 @@ is_background: false ## Contexte obligatoire (dépôt smart_ide → ia_dev) -- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `ia_dev/projects/smart_ide`). +- **Identifiant projet ia_dev :** `smart_ide` (conf : `projects/smart_ide/` à la racine ; lien sous `services/ia_dev/projects/smart_ide`). - **Environnement cible :** `test`, `pprod` ou `prod`. Le reprendre dans le message utilisateur ; **si absent, le demander** avant d'exécuter des scripts dépendants du env. -- **Racine des scripts ia_dev :** le dossier `ia_dev/` à la racine du workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. +- **Racine des scripts ia_dev :** le dossier `services/ia_dev/` dans le workspace **smart_ide**. Pour `deploy/`, `git-issues/`, etc. : se placer dans ce répertoire avant d'exécuter. - **Variables / arguments :** `IA_PROJECT_ID=smart_ide` et/ou `--project smart_ide` selon le script ; référence : `docs/repo/ia-dev-project-conf-schema.md`. - **MAIL_TO (ticketing / mails) :** exporter `MAIL_TO` avec l'adresse pour l'environnement choisi, lue dans `projects/smart_ide/conf.json` (racine workspace) → `tickets.authorized_emails.to` : test → `AI.SMART_IDE.TEST@4nkweb.com`, pprod → `AI.SMART_IDE.PPROD@4nkweb.com`, prod → `AI.SMART_IDE.PROD@4nkweb.com`. -- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous ia_dev souvent absent). -- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `ia_dev/` et chemins issus de `conf.json`). +- **Dépôt applicatif :** racine du workspace smart_ide ; `project_path` dans `conf.json` doit y pointer. Doc principale du monorepo : `docs/` à la racine (`projects/smart_ide/docs` sous services/ia_dev souvent absent). +- **Chemins machines** dans les agents ia_dev (autre machine utilisateur) : les remplacer par le clone réel (workspace + `services/ia_dev/` et chemins issus de `conf.json`). - **`projects/*/conf.json` :** ne pas modifier sans validation utilisateur (règle ia_dev). -- **Périmètre hôte :** scripts et doc smart_ide pour le socle (Ollama, AnythingLLM, systemd, `setup/`, `scripts/`) ; les commandes définies dans l'agent ia_dev s'exécutent depuis `ia_dev/` lorsque la procédure l'exige. +- **Périmètre hôte :** scripts et doc smart_ide pour le socle (Ollama, AnythingLLM, systemd, `setup/`, `scripts/`) ; les commandes définies dans l'agent ia_dev s'exécutent depuis `services/ia_dev/` lorsque la procédure l'exige. ## Délégation -Lire le fichier **`ia_dev/.smartIde/agents/setup-host.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. +Lire le fichier **`services/ia_dev/.smartIde/agents/setup-host.md`** et appliquer **intégralement** sa procédure et ses contraintes, en respectant le contexte ci-dessus. **Référence résolution projet / env :** `docs/repo/ia-dev-project-conf-schema.md`. diff --git a/.vscode/settings.json b/.vscode/settings.json index 85b9e03..c211dea 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,3 +1,4 @@ { "smartIde.activeProjectId": "smart_ide" } + diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..efff5dc --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,21 @@ +# Changelog + +## 0.0.5 - 2026-04-04 + +### Added + +- `scripts/remote-data-ssh-sync.sh`: mirror deployed data over SSH into `.data/remote-data/` and optionally ingest into AnythingLLM. +- AnythingLLM sync enhancements: `--upload-all` + `--upload-prefix` modes for non-git directory ingestion. +- `site-generate` ia_dev tool + gateway runner to scaffold Vite/React sites with OIDC + Smart IDE chat (via sso-gateway → orchestrator). +- Documentation: remote data SSH sync, E2E browser (Carbonyl manual mode). + +### Changed + +- Canonical `ia_dev` module location: `services/ia_dev/` (with `IA_DEV_ROOT` resolution); docs and integration updated accordingly. +- SSO gateway: upstream allowlist support via `SSO_ALLOWED_UPSTREAMS`. +- Global API: explicit `503` when an upstream requires a token but the token is not configured. +- `install-anythingllm-post-merge-hook.sh`: supports `--all` and `--project ` based on `projects//conf.json`. + +### Fixed + +- `cron/git-pull-project-clones.sh`: error propagation and summary exit code for failed pulls. diff --git a/VERSION b/VERSION index bcab45a..bbdeab6 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -0.0.3 +0.0.5 diff --git a/builazoo/README.md b/builazoo/README.md index 7fbf03a..16e8de1 100644 --- a/builazoo/README.md +++ b/builazoo/README.md @@ -1,3 +1,6 @@ # builazoo -Racine du dépôt applicatif **builazoo** (clone Git ici). Configuration **smart_ide** / **ia_dev** : [`../projects/builazoo/conf.json`](../projects/builazoo/conf.json). +This directory is intentionally empty. + +The application repository is expected as a sibling checkout (default: `../builazoo`), configured via `projects/builazoo/conf.json`. + diff --git a/config/services.local.env.example b/config/services.local.env.example index 026d4a6..a9e12f0 100644 --- a/config/services.local.env.example +++ b/config/services.local.env.example @@ -18,6 +18,7 @@ GLOBAL_API_URL=http://127.0.0.1:37149 # OIDC_JWKS_URI= # SSO_CORS_ORIGIN= # SSO_GATEWAY_MAX_BODY_BYTES=33554432 +# SSO_ALLOWED_UPSTREAMS=orchestrator # --- Jetons / hôtes micro-services (consommés par smart-ide-global-api) --- ORCHESTRATOR_HOST=127.0.0.1 diff --git a/cron/git-pull-project-clones.sh b/cron/git-pull-project-clones.sh index dd8ac1b..0e7d2ec 100755 --- a/cron/git-pull-project-clones.sh +++ b/cron/git-pull-project-clones.sh @@ -20,6 +20,8 @@ elif [[ -n "${1:-}" && "$1" != "--all" ]]; then usage fi +err_count=0 + command -v jq >/dev/null 2>&1 || { echo "$LOG_PREFIX jq not found; install jq." >&2 exit 1 @@ -77,6 +79,10 @@ for conf in "$PROJECTS_CONF"/*/conf.json; do if [[ -n "$filter_id" && "$id" != "$filter_id" ]]; then continue fi + if [[ "$id" == "example" ]]; then + echo "$LOG_PREFIX skip $id: template project" + continue + fi if [[ "$(jq -r '.cron.git_pull // true' "$conf")" == "false" ]]; then echo "$LOG_PREFIX skip $id: cron.git_pull is false" continue @@ -89,5 +95,12 @@ for conf in "$PROJECTS_CONF"/*/conf.json; do if [[ "$path" != /* ]]; then path="$(cd "$ROOT" && realpath -m "$path" 2>/dev/null || echo "$ROOT/$path")" fi - pull_clone "$id" "$path" || true + if ! pull_clone "$id" "$path"; then + err_count=$((err_count + 1)) + fi done + +if [[ "$err_count" -gt 0 ]]; then + echo "$LOG_PREFIX errors: $err_count (see output above)" >&2 + exit 1 +fi diff --git a/docs/API/global-api.md b/docs/API/global-api.md index 6165f9e..faeca5d 100644 --- a/docs/API/global-api.md +++ b/docs/API/global-api.md @@ -23,14 +23,14 @@ Réponse `200` : `{ "status": "ok", "service": "smart-ide-global-api" }`. ### `GET /v1/upstreams` -Liste les clés d’amont : `{ "upstreams": [ "orchestrator", ... ] }` (même liste que côté SSO). + Liste les clés d’amont connues : `{ "upstreams": [ "orchestrator", ... ] }`. La passerelle SSO peut renvoyer un sous-ensemble (allowlist via `SSO_ALLOWED_UPSTREAMS`). ### Proxy — `ANY /v1/upstream//` - **``** : `orchestrator`, `repos_devtools`, `ia_dev_gateway`, `anythingllm_devtools`, `tools_bridge`, `langextract`, `regex_search`, `claw_proxy`, `local_office`. - **``** : transmis à l’URL de base du service (ex. `/v1/upstream/orchestrator/v1/...` → `http://127.0.0.1:37145/v1/...`). - **Corps** : relayé (limite `GLOBAL_API_MAX_BODY_BYTES`, défaut 32 MiB). -- **Erreurs** : `401` si Bearer interne absent ou incorrect ; `404` si clé inconnue ; `503` si `local_office` sans `LOCAL_OFFICE_API_KEY`. +- **Erreurs** : `401` si Bearer interne absent ou incorrect ; `404` si clé inconnue ; `503` si jeton manquant pour l’amont (ex. `ORCHESTRATOR_TOKEN`, `LOCAL_OFFICE_API_KEY`, etc.). ## Journaux diff --git a/docs/API/sso-gateway-api.md b/docs/API/sso-gateway-api.md index 3eb2ec9..2dd058f 100644 --- a/docs/API/sso-gateway-api.md +++ b/docs/API/sso-gateway-api.md @@ -22,14 +22,14 @@ Vérifie le Bearer utilisateur. Réponse `200` : `{ "valid": true, "claims": { . ### `GET /v1/upstreams` -Liste les clés de proxy disponibles : `{ "upstreams": [ "orchestrator", ... ] }`. +Liste les clés de proxy disponibles (allowlist) : `{ "upstreams": [ "orchestrator", ... ] }`. ### Proxy — `ANY /proxy//` - **``** : voir liste ci-dessus (`repos_devtools`, `orchestrator`, etc.). - **``** : relayé vers **smart-ide-global-api** sous `/v1/upstream/` (ex. `/proxy/orchestrator/v1/...` → `GLOBAL_API_URL/v1/upstream/orchestrator/v1/...`). - **Corps** : relayé pour les méthodes avec body (limite `SSO_GATEWAY_MAX_BODY_BYTES`, défaut 32 MiB). -- **Réponses d’erreur** : `401` si Bearer utilisateur absent ou invalide ; `404` si clé inconnue ; erreurs amont si l’API globale ou un micro-service refuse la requête. +- **Réponses d’erreur** : `401` si Bearer utilisateur absent ou invalide ; `403` si l’upstream est hors allowlist (`SSO_ALLOWED_UPSTREAMS`) ; `404` si route inconnue ; erreurs amont si l’API globale ou un micro-service refuse la requête. L’en-tête `Authorization` utilisateur n’est **pas** transmis à l’API globale ; il est remplacé par `GLOBAL_API_INTERNAL_TOKEN`. Les claims OIDC sont transmis en `X-OIDC-Sub` / `X-OIDC-Email` jusqu’aux micro-services. Voir [sso-gateway-service.md](../features/sso-gateway-service.md) et [global-api.md](./global-api.md). @@ -51,11 +51,12 @@ Aucun stockage d’**utilisateurs** ou de **comptes par projet** dans ce service | `SSO_GATEWAY_HOST` / `SSO_GATEWAY_PORT` | Bind HTTP | | `SSO_CORS_ORIGIN` | Si défini, en-têtes CORS sur les réponses | | `SSO_GATEWAY_MAX_BODY_BYTES` | Taille max du corps en entrée | +| `SSO_ALLOWED_UPSTREAMS` | Allowlist des upstreams (CSV). Défaut: `orchestrator`. `*`/`all` pour autoriser tout. | | `GLOBAL_API_URL` | Base HTTP de smart-ide-global-api (défaut `http://127.0.0.1:37149`) | | `GLOBAL_API_INTERNAL_TOKEN` | Obligatoire — même valeur que sur smart-ide-global-api | | `SMART_IDE_MONOREPO_ROOT` | Optionnel — racine pour écrire sous `.logs/sso-gateway/` | -Les jetons et hôtes des micro-services sont lus par **smart-ide-global-api** ; la liste des clés `GET /v1/upstreams` provient du module **`@4nk/smart-ide-upstreams`** (`packages/smart-ide-upstreams/`). Voir `config/services.local.env.example` et [global-api.md](./global-api.md). +Les jetons et hôtes des micro-services sont lus par **smart-ide-global-api** ; la liste des clés connues provient du module **`@4nk/smart-ide-upstreams`** (`packages/smart-ide-upstreams/`). `GET /v1/upstreams` applique ensuite `SSO_ALLOWED_UPSTREAMS`. Voir `config/services.local.env.example` et [global-api.md](./global-api.md). ## Voir aussi diff --git a/docs/README.md b/docs/README.md index 9216ea6..b3500af 100644 --- a/docs/README.md +++ b/docs/README.md @@ -26,8 +26,8 @@ Vue d’ensemble et index complet : **[repo/README.md](./repo/README.md)**. Règ | [repo/ia-dev-smart-ide-integration.md](./repo/ia-dev-smart-ide-integration.md) | Module `ia_dev` dans smart_ide | | [repo/ia-dev-repository-overview.md](./repo/ia-dev-repository-overview.md) | Dépôt ia_dev : agents, deploy | | [repo/ia-dev-project-conf-schema.md](./repo/ia-dev-project-conf-schema.md) | Schéma `conf.json`, résolution projet | -| [repo/ia-dev-deploy-lib.md](./repo/ia-dev-deploy-lib.md) | `ia_dev/deploy/lib/` | -| [repo/ia-dev-shared-lib.md](./repo/ia-dev-shared-lib.md) | `ia_dev/lib/project_config.sh` | +| [repo/ia-dev-deploy-lib.md](./repo/ia-dev-deploy-lib.md) | `IA_DEV_ROOT/deploy/lib/` | +| [repo/ia-dev-shared-lib.md](./repo/ia-dev-shared-lib.md) | `IA_DEV_ROOT/lib/project_config.sh` | | [repo/service-*.md](./repo/README.md) | Exploitation de chaque micro-service (voir index `repo/README`) | | [repo/script-anythingllm-pull-sync.md](./repo/script-anythingllm-pull-sync.md) | Hook post-merge → AnythingLLM | | [repo/service-anythingllm-devtools.md](./repo/service-anythingllm-devtools.md) | Service HTTP AnythingLLM + devtools | diff --git a/docs/core-ide.md b/docs/core-ide.md index ae2a16c..006c140 100644 --- a/docs/core-ide.md +++ b/docs/core-ide.md @@ -39,6 +39,23 @@ git clone https://github.com/lapce/lapce.git core_ide (Ou l’URL / remote interne retenu par l’équipe ; SSH si configuré.) +Alternative (recommandé) : utiliser le script de dépôt, qui vérifie aussi les remotes : + +```bash +./scripts/ensure-core-ide.sh +``` + +## Patches Smart IDE (Lapce) + +Le dossier `core_ide/` est ignoré par Git côté monorepo. Les modifications Smart IDE appliquées à Lapce sont donc **versionnées sous forme de patches** dans : + +- `patches/lapce/` (fichiers `*.patch` + liste ordonnée `series`) + +Scripts associés : + +- `./scripts/core-ide-apply-patches.sh` : applique `patches/lapce/series` sur `core_ide/` +- `./scripts/core-ide-export-patches.sh` : exporte les commits `core_ide` (base..HEAD) vers `patches/lapce/*.patch` et régénère `series` + ### Migration depuis l’ancien emplacement Si un clone Lapce existait sous `forks/lapce/`, le renommer une fois : diff --git a/docs/deployment-target.md b/docs/deployment-target.md index 19f8b06..98af53b 100644 --- a/docs/deployment-target.md +++ b/docs/deployment-target.md @@ -25,6 +25,18 @@ L’utilisateur travaille depuis un **Linux client** ; le **calcul**, les **mod - **Local Office** : données sous `services/local-office/data/` (ou chemins surchargés par `STORAGE_PATH` / `DATABASE_PATH`) sur l’**hôte qui exécute l’API** ; à sauvegarder et à protéger comme toute instance de fichiers métier. - Le client doit disposer d’une **identité SSH** autorisée sur le serveur (voir `add-ssh-key.sh` et [infrastructure.md](./infrastructure.md)). +## Tunnels SSH (poste client) + +Le poste client peut exposer localement les ports « loopback » du serveur via `ssh -L ...`. + +Script d’aide (génère la commande à exécuter, sans la daemoniser) : + +```bash +./scripts/smart-ide-ssh-tunnel-plan.sh --mode minimal +``` + +Ce script résout `--project/--env` via `projects/active-project.json` (local, gitignoré) ou variables, puis lit `projects//conf.json` → `smart_ide.remote_data_access.environments..ssh_host_alias`. + ## Documentation liée - Vision produit et envs : [platform-target.md](./platform-target.md) diff --git a/docs/ecosystem-architecture-and-sync.md b/docs/ecosystem-architecture-and-sync.md index 955e205..ee1b6b1 100644 --- a/docs/ecosystem-architecture-and-sync.md +++ b/docs/ecosystem-architecture-and-sync.md @@ -8,7 +8,7 @@ Références complémentaires : [services-functional-scope.md](./services-functi | Élément | Rôle | Où vit la vérité opérationnelle | |---------|------|----------------------------------| -| **smart_ide** | Socle : doc, `services/*`, scripts, systemd, confs `ia_dev` (`./projects//conf.json`), module **`ia_dev/`**, journaux **`logs/`** | Dépôt Git **smart_ide** (forge interne) | +| **smart_ide** | Socle : doc, `services/*`, scripts, systemd, confs `ia_dev` (`./projects//conf.json`), module **`services/ia_dev/`** (via `IA_DEV_ROOT`), journaux **`logs/`** | Dépôt Git **smart_ide** (forge interne) | | **Projets développés** | Code métier (docv, autres produits) : sources, builds, tests | **Autres** dépôts Git ; clones sur disque en dehors de `./projects/` (convention : répertoire frère `../projects//` ou équivalent) | | **Couche API IA** | Routage HTTP, auth, appels vers LLM, RAG, outils, agents | Processus sur l’**hôte** (systemd, ports locaux) ; contrats décrits sous `docs/API/` | | **Git (hôte)** | Historique des dépôts, hooks, branches par environnement | Chaque dépôt ; politique de branche documentée par projet | @@ -67,7 +67,7 @@ flowchart TB - **Source de vérité** pour le code et la documentation versionnée des projets. - **Branches** : alignement avec les environnements (test / pprod / prod) selon la politique du projet ; smart_ide documente les cibles dans [platform-target.md](./platform-target.md) et [deployment-target.md](./deployment-target.md). -- **Module `ia_dev`** : présent dans l’arborescence du dépôt **smart_ide** ; liens `ia_dev/projects/*` et scripts documentés ([ia_dev-module.md](./ia_dev-module.md), [repo/projects-directory.md](./repo/projects-directory.md)). +- **Module `ia_dev`** : présent dans l’arborescence du dépôt **smart_ide** sous `services/ia_dev/` ; liens `IA_DEV_ROOT/projects/*` et scripts documentés ([ia_dev-module.md](./ia_dev-module.md), [repo/projects-directory.md](./repo/projects-directory.md)). ### Ollama @@ -89,8 +89,8 @@ Objectif : après un changement **tracé dans Git**, les systèmes en aval (Anyt ### 4.1 Ordre de référence (nouvelle machine ou post-clone) -1. Cloner **smart_ide** : `git clone …` (le répertoire **`ia_dev/`** suit le même historique Git que le monorepo). -2. Recréer les liens `ia_dev/projects/` si besoin : `./scripts/ensure-ia-dev-project-link.sh smart_ide` (ou le wrapper `ensure-ia-dev-smart-ide-project-link.sh`) — voir [repo/projects-directory.md](./repo/projects-directory.md). +1. Cloner **smart_ide** : `git clone …` (le répertoire **`services/ia_dev/`** suit le même historique Git que le monorepo). +2. Recréer les liens `IA_DEV_ROOT/projects/` si besoin (ex. `services/ia_dev/projects/`) : `./scripts/ensure-ia-dev-project-link.sh smart_ide` (ou le wrapper `ensure-ia-dev-smart-ide-project-link.sh`) — voir [repo/projects-directory.md](./repo/projects-directory.md). 3. Cloner les **projets applicatifs** à l’emplacement convenu (ex. `../projects//`) et vérifier les chemins **absolus** dans `projects//conf.json` si `ia_dev` doit les piloter. 4. Démarrer **Ollama** et **AnythingLLM** sur l’hôte ([services.md](./services.md)) ; créer les **workspaces** et noter les **slugs**. 5. Configurer l’environnement de synchro AnythingLLM : `~/.config/4nk/anythingllm-sync.env` (URL, clé API) — ne pas commiter les secrets. @@ -104,13 +104,13 @@ Objectif : après un changement **tracé dans Git**, les systèmes en aval (Anyt ### 4.3 Cycle de travail sur smart_ide -1. `git pull` sur smart_ide (inclut les mises à jour sous **`ia_dev/`**). -2. Réexécuter **`ensure-ia-dev-project-link.sh`** pour chaque id versionné (`smart_ide`, `enso`, `builazoo`, …) si `ia_dev/projects/` a été réinitialisé. +1. `git pull` sur smart_ide (inclut les mises à jour sous **`services/ia_dev/`**). +2. Réexécuter **`ensure-ia-dev-project-link.sh`** pour chaque id versionné (`smart_ide`, `enso`, `builazoo`, …) si `IA_DEV_ROOT/projects/` a été réinitialisé. 3. Option : installer le même hook **post-merge** sur le dépôt **smart_ide** si un workspace AnythingLLM est dédié au monorepo (fichier `.anythingllm.json` + slug). ### 4.4 Agents, déploiement, ticketing (`ia_dev`) -- Exécution depuis la **racine** de `ia_dev/` ; résolution du projet : `IA_PROJECT_ID`, `--project`, `MAIL_TO`, `AI_AGENT_TOKEN` — voir [repo/ia-dev-project-conf-schema.md](./repo/ia-dev-project-conf-schema.md). +- Exécution depuis la **racine** de `IA_DEV_ROOT` (ex. `services/ia_dev/`) ; résolution du projet : `IA_PROJECT_ID`, `--project`, `MAIL_TO`, `AI_AGENT_TOKEN` — voir [repo/ia-dev-project-conf-schema.md](./repo/ia-dev-project-conf-schema.md). - Les scripts **ne remplacent pas** Git : ils **lisent** `conf.json` pour savoir où sont les dépôts et comment déployer. ### 4.5 Cohérence « clone présent + workspace aligné » diff --git a/docs/features/anythingllm-pull-sync-after-pull.md b/docs/features/anythingllm-pull-sync-after-pull.md index d601cec..457294a 100644 --- a/docs/features/anythingllm-pull-sync-after-pull.md +++ b/docs/features/anythingllm-pull-sync-after-pull.md @@ -1,33 +1,39 @@ -# AnythingLLM — synchronisation après `git pull` +# AnythingLLM — sync after `git pull` -## Objectif +## Goal -Déclencher un envoi vers AnythingLLM des fichiers **modifiés ou ajoutés** par un `git pull` (merge fast-forward ou merge classique), sans action manuelle dans l’éditeur. +Upload files **added or modified** by a `git pull` (fast-forward or merge) to AnythingLLM without manual IDE actions. -## Impacts +## What it changes -- Chaque dépôt concerné peut installer un hook Git **`post-merge`** qui appelle `scripts/anythingllm-pull-sync/sync.mjs`. -- Les mêmes exclusions que **`.4nkaiignore`** (et quelques motifs système) s’appliquent. -- Les suppressions ou renommages ne sont pas reflétés comme suppressions côté AnythingLLM dans cette version (upload uniquement). +- Each target repo can install a Git **`post-merge`** hook that calls `scripts/anythingllm-pull-sync/sync.mjs`. +- The same exclusions as **`.4nkaiignore`** (plus a few system patterns) apply. +- Deletions/renames are not mirrored as deletions in AnythingLLM in this version (upload only). -## Modifications (dépôt smart_ide) +## Implementation (smart_ide repo) -- `scripts/anythingllm-pull-sync/` : script Node (ESM), dépendance `ignore`, `package.json`, `README.md`. -- `scripts/install-anythingllm-post-merge-hook.sh` : pose le hook dans `.git/hooks/post-merge` avec le chemin absolu vers `sync.mjs`. +- `scripts/anythingllm-pull-sync/`: Node (ESM) script + dependency `ignore`. +- `scripts/install-anythingllm-post-merge-hook.sh`: installs `.git/hooks/post-merge` with an absolute path to `sync.mjs`. -## Configuration par dépôt +## Repo configuration (workspace slug) -- Fichier optionnel **`.anythingllm.json`** à la racine : `{ "workspaceSlug": "" }`. -- Ou variable d’environnement **`ANYTHINGLLM_WORKSPACE_SLUG`** (priorité documentée dans le README du script). +Slug resolution order (first match wins): -## Modalités de déploiement +1. `ANYTHINGLLM_WORKSPACE_SLUG` (env) +2. `.anythingllm.json` at repo root: `{ "workspaceSlug": "" }` +3. smart_ide `projects//conf.json` (matches `project_path` to the repo root; reads `smart_ide.anythingllm_workspace_slug[SMART_IDE_ENV]`, default env `test`) -1. Sur la machine de développement : `npm install` dans `scripts/anythingllm-pull-sync`. -2. Créer `~/.config/4nk/anythingllm-sync.env` avec `ANYTHINGLLM_BASE_URL` et `ANYTHINGLLM_API_KEY` (ne pas commiter la clé). -3. Exécuter `install-anythingllm-post-merge-hook.sh ` pour chaque dépôt à synchroniser. -4. S’assurer qu’AnythingLLM (collector) est joignable depuis cette machine. +## Deployment on the host -## Modalités d’analyse +1. Run `npm install` in `scripts/anythingllm-pull-sync`. +2. Create `~/.config/4nk/anythingllm-sync.env` with: + - `ANYTHINGLLM_BASE_URL` + - `ANYTHINGLLM_API_KEY` +3. Install the hook: + - per repo: `./scripts/install-anythingllm-post-merge-hook.sh /path/to/repo` + - or for configured clones: `./scripts/install-anythingllm-post-merge-hook.sh --all` -- Messages sur **stderr** : `uploaded=`, `skipped=`, `errors=`, détail des erreurs d’upload (tronqué au-delà de 20 lignes). -- Si `ORIG_HEAD` est absent, ou si URL / clé / slug manquent : message explicite et **code de sortie 0** pour ne pas bloquer le pull. +## Observability + +- stderr summary: `uploaded=`, `skipped=`, `errors=`, plus up to 20 error lines. +- If `ORIG_HEAD` is missing, or if URL/key/slug is missing: explicit message and exit code `0` (does not block the pull). diff --git a/docs/features/docv-ai-integration.md b/docs/features/docv-ai-integration.md index 7a269be..99e0f64 100644 --- a/docs/features/docv-ai-integration.md +++ b/docs/features/docv-ai-integration.md @@ -16,7 +16,7 @@ Pour chaque **projet logique** (ex. périmètre docv, autre produit) : 1. **Clone Git** : le dépôt applicatif doit être **déjà cloné** au même titre que les autres projets de l’espace de travail, en général sous une **racine de clones** **distincte** du dossier `./projects/` du monorepo (voir [repo/projects-directory.md](../repo/projects-directory.md)) — convention fréquente : répertoire **frère** `../projects//` par rapport à la racine `smart_ide`. 2. **AnythingLLM** : le projet doit être **rattaché à un workspace** AnythingLLM (un workspace par projet). L’alimentation du workspace repose sur un corpus **aligné sur les données déployées** : récupération via **SSH** depuis test / pprod / prod puis pipeline de synchro (voir [remote-deployed-data-ssh.md](./remote-deployed-data-ssh.md), [anythingllm-workspaces.md](../anythingllm-workspaces.md), scripts sous `scripts/`). -3. **Configuration ia_dev** : lorsqu’un id projet est enregistré pour les agents ou le ticketing, un `conf.json` peut être versionné sous **`smart_ide/projects//conf.json`** ; les scripts `ia_dev` y accèdent via le lien `ia_dev/projects/` lorsque le script [`ensure-ia-dev-project-link.sh`](../../scripts/ensure-ia-dev-project-link.sh) `` (ou le wrapper `ensure-ia-dev-smart-ide-project-link.sh` pour `smart_ide`) a été exécuté. +3. **Configuration ia_dev** : lorsqu’un id projet est enregistré pour les agents ou le ticketing, un `conf.json` peut être versionné sous **`smart_ide/projects//conf.json`** ; les scripts `ia_dev` y accèdent via le lien `IA_DEV_ROOT/projects/` (ex. `services/ia_dev/projects/`) lorsque le script [`ensure-ia-dev-project-link.sh`](../../scripts/ensure-ia-dev-project-link.sh) `` (ou le wrapper `ensure-ia-dev-smart-ide-project-link.sh` pour `smart_ide`) a été exécuté. ## Flux cible (vue simplifiée) diff --git a/docs/features/e2e-browser.md b/docs/features/e2e-browser.md new file mode 100644 index 0000000..d9896f7 --- /dev/null +++ b/docs/features/e2e-browser.md @@ -0,0 +1,42 @@ +# E2E browser mode (current + next) + +## Current mode (manual / assisted) + +Short-term E2E is **manual** and uses a terminal browser for fast feedback: + +- **Carbonyl** for previewing the **test** URL without a GUI browser: + - script: `scripts/open-carbonyl-preview-test.sh` + - per project config: `projects//conf.json` → `smart_ide.preview_urls.test` + +Typical workflow: + +1. Deploy to `test` (project workflow / `ia_dev`). +2. Open the `test` URL in Carbonyl: + +```bash +./scripts/open-carbonyl-preview-test.sh --project +``` + +3. Validate critical flows manually (login, navigation, chat proxy calls). + +## Optional next step (automation) + +If manual Carbonyl + system browser is not sufficient, introduce a dedicated service `browser-automation-api` **only** when at least one criterion is met (see [browser-automation-criteria.md](./browser-automation-criteria.md)): + +- agent-driven reproducible E2E with timeouts + domain allowlist +- controlled scraping (pre-approved URLs) +- headless visual regression on infra without GUI +- rendering capture (PDF/images) for internal pages + +Constraints for the future service: + +- isolated process (`services/browser-automation-api/`) +- queue + concurrency limits + strict timeouts +- **network allowlist** (no arbitrary browsing) +- Bearer auth (service-to-service) + URL logging + +## Related docs + +- [carbonyl-terminal-browser.md](./carbonyl-terminal-browser.md) +- [browser-automation-criteria.md](./browser-automation-criteria.md) + diff --git a/docs/features/ia-dev-service.md b/docs/features/ia-dev-service.md index c326d09..92e04d6 100644 --- a/docs/features/ia-dev-service.md +++ b/docs/features/ia-dev-service.md @@ -19,9 +19,7 @@ Remplacer à terme l’appel **direct** au répertoire module [`ia_dev`](../ia_d ## Cohabitation avec le sous-module -Aujourd’hui `./ia_dev` reste le **checkout canonique** sur l’hôte. Le binaire `ia-dev-gateway` reçoit `IA_DEV_ROOT` (défaut : répertoire parent du service ou chemin absolu vers `./ia_dev`). - -**Trajectoire** : module `ia_dev` dans le monorepo jusqu’à ce qu’un fork soit **vendored** ou **cloné par le service** au déploiement ; documentation de migration dans [ia_dev-module.md](../ia_dev-module.md). +Le binaire `ia-dev-gateway` reçoit `IA_DEV_ROOT` (chemin racine du checkout `ia_dev`). Si `IA_DEV_ROOT` n’est pas défini, il tente une résolution locale dans le monorepo (priorité `./ia_dev`, puis `./services/ia_dev`). ## API (spécification) @@ -49,7 +47,13 @@ Les codes d’erreur **401/403/404/409/422** sont explicites ; pas de fallback s ## Implémentation -Le répertoire [`services/ia-dev-gateway/`](../../services/ia-dev-gateway/) contient un **serveur Node/TypeScript** (`npm run build && npm start`) : scan des agents `.md`, runs en mémoire avec statut stub `completed`, flux SSE minimal. Brancher le **runner** réel (`ia_dev` scripts) sur `POST /v1/runs` reste à faire. L’orchestrateur [orchestrator-api.md](./orchestrator-api.md) peut cibler ce service pour `agent.run`. +Le répertoire [`services/ia-dev-gateway/`](../../services/ia-dev-gateway/) contient un **serveur Node/TypeScript** (`npm run build && npm start`) : + +- scan des agents `.md` depuis `IA_DEV_ROOT/.smartIde/agents` +- `POST /v1/runs` : **runner script-backed** (subset allowlist) qui spawn des scripts `bash` sous `IA_DEV_ROOT/` et stream `stdout/stderr` via SSE +- `GET /v1/runs/{runId}/events` : SSE avec replay (`Last-Event-ID`) + keep-alive + +L’orchestrateur peut cibler ce service via l’intent `agent.run`. ## Voir aussi diff --git a/docs/ia_dev-module.md b/docs/ia_dev-module.md index 64d931d..2ad52ff 100644 --- a/docs/ia_dev-module.md +++ b/docs/ia_dev-module.md @@ -1,8 +1,8 @@ # Module `ia_dev` dans smart_ide -Le répertoire **`./ia_dev`** à la racine du monorepo **smart_ide** contient l’**équipe d’agents** (définitions sous `.smartIde/agents/`, `.smartIde/rules/`), **`deploy/`**, **`git-issues/`**, etc. Il est **versionné dans ce dépôt** (plus de sous-module Git séparé pour le checkout standard). +Le répertoire **`./services/ia_dev`** dans le monorepo **smart_ide** contient l’**équipe d’agents** (définitions sous `.smartIde/agents/`, `.smartIde/rules/`), **`deploy/`**, **`git-issues/`**, etc. Il est **versionné dans ce dépôt**. -Les **`conf.json` par projet** pour ce monorepo restent sous **`./projects//`** à la racine de **smart_ide** (voir [repo/projects-directory.md](./repo/projects-directory.md)), pas mélangés avec les clones applicatifs (`../projects/` ou autre). Des **liens symboliques** `ia_dev/projects/` → `../../projects/` (pour chaque id versionné, ex. `smart_ide`, `enso`, `builazoo`) permettent aux scripts `ia_dev` de résoudre les `conf.json` sous `projects/`. Après un clone neuf ou une réorganisation des dossiers : +Les **`conf.json` par projet** pour ce monorepo restent sous **`./projects//`** à la racine de **smart_ide** (voir [repo/projects-directory.md](./repo/projects-directory.md)), pas mélangés avec les clones applicatifs (`../projects/` ou autre). Des **liens symboliques** `services/ia_dev/projects/` → `../../projects/` (pour chaque id versionné, ex. `smart_ide`, `enso`, `builazoo`) permettent aux scripts `ia_dev` de résoudre les `conf.json` sous `projects/`. Après un clone neuf ou une réorganisation des dossiers : ```bash ./scripts/ensure-ia-dev-project-link.sh smart_ide @@ -12,12 +12,12 @@ Les **`conf.json` par projet** pour ce monorepo restent sous **`./projects// Le script `ensure-ia-dev-smart-ide-project-link.sh` appelle `ensure-ia-dev-project-link.sh smart_ide`. -L’exécution des scripts reste **depuis la racine `ia_dev/`**, comme dans la documentation amont du dépôt historique [4nk/ia_dev](https://git.4nkweb.com/4nk/ia_dev.git) ; **smart_ide** fournit l’environnement IDE, les scripts hôte, les unités systemd et les journaux sous `logs/` ([repo/logs-directory.md](./repo/logs-directory.md)). +L’exécution des scripts reste **depuis la racine `services/ia_dev/`**, comme dans la documentation amont du dépôt historique [4nk/ia_dev](https://git.4nkweb.com/4nk/ia_dev.git) ; **smart_ide** fournit l’environnement IDE, les scripts hôte, les unités systemd et les journaux sous `logs/` ([repo/logs-directory.md](./repo/logs-directory.md)). ## Journaux - Tirage Git planifié des clones : `logs/git-pull-projects.log` ([repo/logs-directory.md](./repo/logs-directory.md), via [`cron/git-pull-wrapper.sh`](../cron/git-pull-wrapper.sh)). -- Scripts `ia_dev` : `logs/ia_dev.log` ([repo/logs-directory.md](./repo/logs-directory.md), via [`ia_dev/lib/smart_ide_logs.sh`](../ia_dev/lib/smart_ide_logs.sh)). +- Scripts `ia_dev` : `logs/ia_dev.log` ([repo/logs-directory.md](./repo/logs-directory.md), via [`services/ia_dev/lib/smart_ide_logs.sh`](../services/ia_dev/lib/smart_ide_logs.sh)). ## Service d’intégration @@ -28,14 +28,14 @@ Le périmètre « service » côté monorepo est documenté sous [repo/ia-de | Élément | Rôle | |---------|------| | **smart_ide** | Cible UX IDE, scripts socle, systemd, doc de déploiement, **`logs/`** | -| **`ia_dev/`** (module dans ce dépôt) | Agents, déploiements, ticketing ; confs projet dans `./projects/` + liens sous `ia_dev/projects/` | +| **`services/ia_dev/`** (module dans ce dépôt) | Agents, déploiements, ticketing ; confs projet dans `./projects/` + liens sous `services/ia_dev/projects/` | -Le futur **agent gateway** doit traiter **`./ia_dev`** comme chemin canonique sur le serveur sauf configuration contraire. Voir [system-architecture.md](./system-architecture.md). +Le futur **agent gateway** doit traiter `IA_DEV_ROOT` comme chemin canonique sur le serveur (résolution par défaut : `./ia_dev` puis `./services/ia_dev`). Voir [system-architecture.md](./system-architecture.md). ## Trajectoire : service `ia-dev-gateway` -Un service HTTP dédié ([features/ia-dev-service.md](./features/ia-dev-service.md), [API/ia-dev-gateway.md](./API/ia-dev-gateway.md)) prendra le relais pour les **clients** (Lapce, front, orchestrateur) : le répertoire **`./ia_dev`** reste la **source de fichiers** côté hôte jusqu’à une éventuelle extraction ou conteneurisation documentée ailleurs. +Un service HTTP dédié ([features/ia-dev-service.md](./features/ia-dev-service.md), [API/ia-dev-gateway.md](./API/ia-dev-gateway.md)) prendra le relais pour les **clients** (Lapce, front, orchestrateur) : le répertoire référencé par **`IA_DEV_ROOT`** reste la **source de fichiers** côté hôte jusqu’à une éventuelle extraction ou conteneurisation documentée ailleurs. -1. **Phase actuelle** : module dans **smart_ide** + exécution manuelle / scripts depuis la racine `ia_dev`. -2. **Phase gateway** : binaire `ia-dev-gateway` sur l’hôte, `IA_DEV_ROOT` pointant vers `./ia_dev`. +1. **Phase actuelle** : module dans **smart_ide** + exécution manuelle / scripts depuis la racine `services/ia_dev`. +2. **Phase gateway** : binaire `ia-dev-gateway` sur l’hôte, `IA_DEV_ROOT` pointant vers `./services/ia_dev` (ou `./ia_dev` si présent). 3. **Phase fork / mirror** : le dépôt amont `ia_dev` peut continuer à exister sur la forge ; la copie de travail dans **smart_ide** suit le flux Git habituel du monorepo. diff --git a/docs/ia_dev-project-smart_ide.md b/docs/ia_dev-project-smart_ide.md index fdd8df3..49724ca 100644 --- a/docs/ia_dev-project-smart_ide.md +++ b/docs/ia_dev-project-smart_ide.md @@ -1,17 +1,17 @@ # Projet `ia_dev` : `smart_ide` -Le dépôt **smart_ide** est enregistré dans le sous-module **`ia_dev`** sous l’identifiant de projet **`smart_ide`**, pour les agents, le ticketing Gitea et la doc wiki alignés sur la forge **4nk/smart_ide**. +Le dépôt **smart_ide** est enregistré pour le module **`ia_dev`** (racine `IA_DEV_ROOT`, checkout intégré sous `services/ia_dev/`) sous l’identifiant de projet **`smart_ide`**, pour les agents, le ticketing Gitea et la doc wiki alignés sur la forge **4nk/smart_ide**. ## Fichier de configuration - **Source de vérité (versionnée dans ce monorepo) :** [`projects/smart_ide/conf.json`](../projects/smart_ide/conf.json) — chemins machine (`project_path`), URLs wiki et issues (`https://git.4nkweb.com/4nk/smart_ide/...`), boîtes mail autorisées pour le ticketing (envs test / pprod / prod). -- **Sous-module `ia_dev` :** le chemin `ia_dev/projects/smart_ide/conf.json` doit résoudre le même fichier via le lien créé par [`scripts/ensure-ia-dev-project-link.sh`](../scripts/ensure-ia-dev-project-link.sh) `smart_ide` (wrapper : [`ensure-ia-dev-smart-ide-project-link.sh`](../scripts/ensure-ia-dev-smart-ide-project-link.sh)). +- **Module `ia_dev` :** le chemin `IA_DEV_ROOT/projects/smart_ide/conf.json` (ex. `services/ia_dev/projects/smart_ide/conf.json`) doit résoudre le même fichier via le lien créé par [`scripts/ensure-ia-dev-project-link.sh`](../scripts/ensure-ia-dev-project-link.sh) `smart_ide` (wrapper : [`ensure-ia-dev-smart-ide-project-link.sh`](../scripts/ensure-ia-dev-smart-ide-project-link.sh)). Adapter **`project_path`** (et champs dérivés si vous ajoutez `build_dirs` / `deploy`) sur chaque poste ou serveur où `ia_dev` exécute des commandes sur ce dépôt. Les **clones** d’autres apps (docv, etc.) ne vont **pas** dans `./projects/` : voir [repo/projects-directory.md](./repo/projects-directory.md). ## Agents Cursor (pont smart_ide → ia_dev) -Dans ce dépôt, les définitions **Cursor** sous [`.smartIde/agents/`](../.smartIde/agents/) utilisent le préfixe **`ia-dev-*`** : elles fixent le projet **`smart_ide`**, imposent d’indiquer l’**environnement** (`test`, `pprod`, `prod`), puis renvoient à la procédure complète dans le sous-module : `ia_dev/.smartIde/agents/.md`. +Dans ce dépôt, les définitions **Cursor** sous [`.smartIde/agents/`](../.smartIde/agents/) utilisent le préfixe **`ia-dev-*`** : elles fixent le projet **`smart_ide`**, imposent d’indiquer l’**environnement** (`test`, `pprod`, `prod`), puis renvoient à la procédure complète dans le module : `IA_DEV_ROOT/.smartIde/agents/.md` (ex. `services/ia_dev/.smartIde/agents/.md`). | Agent Cursor (smart_ide) | Définition ia_dev | |--------------------------|-------------------| @@ -33,9 +33,9 @@ Dans ce dépôt, les définitions **Cursor** sous [`.smartIde/agents/`](../.smar | `ia-dev-notary-ai-process` | `notary-ai-process.md` | | `ia-dev-closure-point-7-justification` | `closure-point-7-justification.md` | -Règle de contexte (fichiers sous `ia_dev/`, `docs/`, `services/`, etc.) : [`.smartIde/rules/smart-ide-ia-dev-bridge.mdc`](../.smartIde/rules/smart-ide-ia-dev-bridge.mdc). +Règle de contexte (fichiers sous `IA_DEV_ROOT/` (ex. `services/ia_dev/`), `docs/`, `services/`, etc.) : [`.smartIde/rules/smart-ide-ia-dev-bridge.mdc`](../.smartIde/rules/smart-ide-ia-dev-bridge.mdc). -**Usage :** choisir l’agent `ia-dev-*` dans Cursor et préciser dans le message l’environnement cible ; pour les scripts, `cd ia_dev` et `IA_PROJECT_ID=smart_ide` (ou `--project smart_ide`). Détail schéma conf : [repo/ia-dev-project-conf-schema.md](./repo/ia-dev-project-conf-schema.md). Conf **smart_ide** : `projects/smart_ide/conf.json` à la racine du monorepo. +**Usage :** choisir l’agent `ia-dev-*` dans Cursor et préciser dans le message l’environnement cible ; pour les scripts, `cd services/ia_dev` et `IA_PROJECT_ID=smart_ide` (ou `--project smart_ide`). Détail schéma conf : [repo/ia-dev-project-conf-schema.md](./repo/ia-dev-project-conf-schema.md). Conf **smart_ide** : `projects/smart_ide/conf.json` à la racine du monorepo. ## Liens diff --git a/docs/repo/README.md b/docs/repo/README.md index 5610575..a28ce55 100644 --- a/docs/repo/README.md +++ b/docs/repo/README.md @@ -24,8 +24,8 @@ Toute la documentation **opérationnelle** qui vivait auparavant sous des `READM | [ia-dev-smart-ide-integration.md](./ia-dev-smart-ide-integration.md) | Module `ia_dev` dans smart_ide, journaux, liens architecture | | [ia-dev-repository-overview.md](./ia-dev-repository-overview.md) | Dépôt ia_dev : agents, scripts deploy, usage standalone | | [ia-dev-project-conf-schema.md](./ia-dev-project-conf-schema.md) | Schéma `projects//conf.json`, résolution du projet, règles agents | -| [ia-dev-deploy-lib.md](./ia-dev-deploy-lib.md) | Bibliothèques partagées `ia_dev/deploy/lib/` | -| [ia-dev-shared-lib.md](./ia-dev-shared-lib.md) | `ia_dev/lib/project_config.sh` et résolution projet | +| [ia-dev-deploy-lib.md](./ia-dev-deploy-lib.md) | Bibliothèques partagées `IA_DEV_ROOT/deploy/lib/` | +| [ia-dev-shared-lib.md](./ia-dev-shared-lib.md) | `IA_DEV_ROOT/lib/project_config.sh` et résolution projet | | **Services HTTP (exploitation)** | | | [service-repos-devtools.md](./service-repos-devtools.md) | Clone / liste / load Git sous racine contrôlée | | [service-anythingllm-devtools.md](./service-anythingllm-devtools.md) | AnythingLLM + repos-devtools + RAG initial (HTTP) | @@ -37,6 +37,7 @@ Toute la documentation **opérationnelle** qui vivait auparavant sous des `READM | [service-langextract.md](./service-langextract.md) | Wrapper LangExtract | | **Scripts et extensions** | | | [script-anythingllm-pull-sync.md](./script-anythingllm-pull-sync.md) | Hook post-merge → upload AnythingLLM | +| [script-remote-data-ssh-sync.md](./script-remote-data-ssh-sync.md) | SSH pull deployed data → local mirror → optional AnythingLLM ingest | | [service-carbonyl.md](./service-carbonyl.md) | Carbonyl (navigateur terminal), sous-module amont | | [service-pageindex.md](./service-pageindex.md) | PageIndex (index sémantique vectorless), sous-module amont | | [service-chandra.md](./service-chandra.md) | Chandra OCR, sous-module amont | diff --git a/docs/repo/ia-dev-deploy-lib.md b/docs/repo/ia-dev-deploy-lib.md index dc7ecb0..c7bae2a 100644 --- a/docs/repo/ia-dev-deploy-lib.md +++ b/docs/repo/ia-dev-deploy-lib.md @@ -1,9 +1,9 @@ -# Bibliothèques partagées `ia_dev/deploy/lib/` +# Bibliothèques partagées `IA_DEV_ROOT/deploy/lib/` ## `ssh.sh` Helpers SSH/SCP canoniques (`ssh_run`, `scp_copy`, `require_ssh_key`, `ssh_common_opts`). -**LeCoffre** : `deploy/scripts_v2/_lib/ssh.sh` du projet peut sourcer `ia_dev/deploy/lib/ssh.sh` lorsque ce chemin existe depuis la racine du monorepo. +**LeCoffre** : `deploy/scripts_v2/_lib/ssh.sh` du projet peut sourcer `IA_DEV_ROOT/deploy/lib/ssh.sh` lorsque ce chemin existe depuis la racine du monorepo. ## `deploy-log.sh` diff --git a/docs/repo/ia-dev-project-conf-schema.md b/docs/repo/ia-dev-project-conf-schema.md index 4201ded..dd9af2c 100644 --- a/docs/repo/ia-dev-project-conf-schema.md +++ b/docs/repo/ia-dev-project-conf-schema.md @@ -1,13 +1,13 @@ # Schéma `projects//conf.json` (ia_dev) -Ce document est la **copie canonique** dans **smart_ide** du schéma de configuration ia_dev. Le fichier amont peut exister sous **`ia_dev/projects/README.md`** dans un checkout qui suit le dépôt ia_dev seul. +Ce document est la **copie canonique** dans **smart_ide** du schéma de configuration ia_dev. Le fichier amont peut exister sous **`IA_DEV_ROOT/projects/README.md`** (ex. `services/ia_dev/projects/README.md`) dans un checkout qui suit le dépôt ia_dev seul. Le dépôt **ia_dev** est un **dépôt autonome**. Les paramètres par projet sont dans `projects//conf.json`. Le **``** est le nom du répertoire sous `projects/`. **Chemins dans conf.json** Les scripts dans `deploy/` déploient les **projets configurés** dans leurs répertoires ; ils ne déploient pas ia_dev. -- **Chemins projet / deploy** (`project_path`, `deploy.repository_root`, etc.) : **absolus** ou **relatifs à la racine du monorepo smart_ide**. Résolution : `ia_dev/lib/conf_path_resolve.sh` lorsque la conf vit sous `…/projects//conf.json` ou `…/ia_dev/projects//conf.json`. +- **Chemins projet / deploy** (`project_path`, `deploy.repository_root`, etc.) : **absolus** ou **relatifs à la racine du monorepo smart_ide**. Résolution : `IA_DEV_ROOT/lib/conf_path_resolve.sh` (ex. `services/ia_dev/lib/conf_path_resolve.sh`) lorsque la conf vit sous `…/projects//conf.json` ou `…/IA_DEV_ROOT/projects//conf.json`. - **`build_dirs`** : **absolu** ; **relatif au monorepo** si la valeur commence par `../` ; sinon **relatif à la racine Git du projet** (`repository_root`). - **`version.package_json_paths`** : **relatif à la racine du dépôt projet** sauf absolu. - **Relatif à la racine ia_dev** : `mail.imap_bridge_env`, `git.token_file` → fichiers sous `.secrets/` de ia_dev. @@ -34,13 +34,13 @@ Les agents ne modifient pas `projects//conf.json` sans validation humaine ex | `name` | no | Nom lisible. | | `project_path` | no | Racine Git du clone pour cron / outillage. | | `build_dirs` | no | Répertoires `npm run build` (règles de relatif / absolu ci-dessus). | -| `deploy.*` | no | Chemins scripts, secrets, orchestrateur — voir tableau complet dans l’historique Git de ce fichier ou `ia_dev/projects/README.md` amont. | +| `deploy.*` | no | Chemins scripts, secrets, orchestrateur — voir tableau complet dans l’historique Git de ce fichier ou `IA_DEV_ROOT/projects/README.md` amont. | | `deploy.host_stays_on_test` | no | Comportement `deploy-by-script-to.sh` (branche test vs pprod/prod). | | `tickets` | no | URL issues, `authorized_emails` ; le **to** sert à résoudre le projet. | | `cron` | no | Extension **smart_ide** : `{ "git_pull": false }` pour désactiver le pull planifié. | | `smart_ide` | no | Extension **smart_ide** : `remote_data_access`, `anythingllm_workspace_slug`, `workspace` (`folders` + `settings`, équivalent `.code-workspace` ; y placer `smartIde.activeProjectId`), `preview_urls` (`test`, … URLs pour prévisualisation ex. Carbonyl), etc. | -Détail ticketing : `ia_dev/projects/ia_dev/docs/TICKETS_SPOOL_FORMAT.md`. +Détail ticketing : `IA_DEV_ROOT/projects/ia_dev/docs/TICKETS_SPOOL_FORMAT.md`. **.secrets à la racine ia_dev** : `token`, `git-issues/agent-loop.env`, `git-issues/imap-bridge.env`. diff --git a/docs/repo/ia-dev-repository-overview.md b/docs/repo/ia-dev-repository-overview.md index 3905160..9812b32 100644 --- a/docs/repo/ia-dev-repository-overview.md +++ b/docs/repo/ia-dev-repository-overview.md @@ -6,29 +6,29 @@ Dépôt de pilotage par l'IA pour les projets : **équipe d'agents IA** dont le ## Usage (standalone) -- **Racine d'exécution** : tous les scripts sont lancés depuis la **racine de ia_dev** (ce dépôt). L'id projet est résolu par **MAIL_TO**, **AI_AGENT_TOKEN**, **`IA_PROJECT_ID`**, **`--project`**, ou premier argument selon le script (voir [ia-dev-project-conf-schema.md](./ia-dev-project-conf-schema.md)). -- **Config** : dans `projects//conf.json`, les chemins vers les dépôts projet peuvent être **absolus** ou **relatifs à la racine du monorepo smart_ide** lorsque ia_dev y est intégré (`lib/conf_path_resolve.sh`). Les champs `mail.imap_bridge_env` et `git.token_file` sont **relatifs à la racine de ia_dev**. Le répertoire `.secrets` à la racine de ia_dev contient `token` et `git-issues/agent-loop.env`, `git-issues/imap-bridge.env`. +- **Racine d'exécution** : tous les scripts sont lancés depuis la **racine `IA_DEV_ROOT`**. L'id projet est résolu par **MAIL_TO**, **AI_AGENT_TOKEN**, **`IA_PROJECT_ID`**, **`--project`**, ou premier argument selon le script (voir [ia-dev-project-conf-schema.md](./ia-dev-project-conf-schema.md)). +- **Config** : dans `projects//conf.json`, les chemins vers les dépôts projet peuvent être **absolus** ou **relatifs à la racine du monorepo smart_ide** lorsque ia_dev y est intégré (`lib/conf_path_resolve.sh`). Les champs `mail.imap_bridge_env` et `git.token_file` sont **relatifs à `IA_DEV_ROOT`**. Le répertoire `.secrets` à la racine de `IA_DEV_ROOT` contient `token` et `git-issues/agent-loop.env`, `git-issues/imap-bridge.env`. ## Agents et domaines Les **agents** ont leur **code et définitions** dans ia_dev (`.smartIde/agents/`, `.smartIde/rules/`) et sont **lancés de façon centralisée** depuis ce dépôt pour **tous les projets**. Ils sont **dédiés aux projets configurés** : ils agissent sur ces projets (doc, code, déploiement, ticketing), pas sur ia_dev. -Chaque agent indique où se trouve la doc : **projets gérés** → `projects//docs` ; **ia_dev** → `ia_dev/projects/ia_dev/docs` (copie embarquée selon checkout). +Chaque agent indique où se trouve la doc : **projets gérés** → `projects//docs` ; **ia_dev** → `IA_DEV_ROOT/projects/ia_dev/docs` (copie embarquée selon checkout). | Domaine | Agents / composants | |---------|---------------------| -| **Doc** | `docupdate` ; `ia_dev/projects/ia_dev/docs/` ; migration wiki (`git-issues/wiki-migrate-docs.sh`). | +| **Doc** | `docupdate` ; `IA_DEV_ROOT/projects/ia_dev/docs/` ; migration wiki (`git-issues/wiki-migrate-docs.sh`). | | **Code** | `fix`, `evol`, `code`, `fix-search` ; workflow correctifs/évolutions. | | **Ticketing** | `git-issues-process`, `agent-loop` ; spooler `projects//data/issues` ; scripts `git-issues/`. | | **IA notaire (ai_working_help)** | `notary-ai-loop`, `notary-ai-process` ; API `ai_working_help/server.js` ; spooler `projects//data/notary-ai/{pending,responded}`. | | **DevOps** | `push-by-script`, `deploy-by-script`, `deploy-pprod-or-prod`, `branch-align-by-script-from-test`, `change-to-all-branches` ; scripts `deploy/`. | | **Sécurité / Qualité** | Règles `.smartIde/rules/` ; pas de secrets en dur ; `fix-lint` ; clôture obligatoire (`.smartIde/rules/cloture-evolution.mdc`). | -Référence détaillée : `ia_dev/projects/ia_dev/docs/GIT_ISSUES_SCRIPTS_AGENTS.md`. Index : `ia_dev/projects/ia_dev/docs/README.md`. +Référence détaillée : `IA_DEV_ROOT/projects/ia_dev/docs/GIT_ISSUES_SCRIPTS_AGENTS.md`. Index : `IA_DEV_ROOT/projects/ia_dev/docs/README.md`. ## Répertoire d'exécution (standalone) -Tous les scripts sont invoqués depuis la **racine de ia_dev**. +Tous les scripts sont invoqués depuis la **racine `IA_DEV_ROOT`**. - **deploy/** : déploient les **projets configurés**, pas ia_dev. - **git-issues/** : logs et data par projet sous `projects//logs/` et `projects//data/issues/`. diff --git a/docs/repo/ia-dev-shared-lib.md b/docs/repo/ia-dev-shared-lib.md index 9a48263..5cff3be 100644 --- a/docs/repo/ia-dev-shared-lib.md +++ b/docs/repo/ia-dev-shared-lib.md @@ -1,10 +1,10 @@ -# Bibliothèque partagée `ia_dev/lib/` +# Bibliothèque partagée `IA_DEV_ROOT/lib/` ## `project_config.sh` Sourcé par les scripts **deploy** et **git-issues** pour résoudre l’**id** projet et le chemin vers son JSON. -**Usage standalone** : exécution depuis la racine **ia_dev** ; définir **`IA_DEV_ROOT`** avant source si besoin. +**Usage standalone** : exécution depuis la racine **`IA_DEV_ROOT`** (ex. `services/ia_dev/` dans smart_ide). **Après source** : `PROJECT_ID`, `PROJECT_CONFIG_PATH` (souvent `projects//conf.json`). Avec résolution par token : `PROJECT_ENV`. diff --git a/docs/repo/ia-dev-smart-ide-integration.md b/docs/repo/ia-dev-smart-ide-integration.md index 3b480e7..2c3e4ce 100644 --- a/docs/repo/ia-dev-smart-ide-integration.md +++ b/docs/repo/ia-dev-smart-ide-integration.md @@ -1,15 +1,17 @@ -# ia_dev — module smart_ide (`services/ia_dev/` + `ia_dev/`) +# ia_dev — module smart_ide (`services/ia_dev/`) -Le répertoire **`ia_dev/`** à la racine du monorepo **smart_ide** est le **module agents / déploiement / ticketing** (équivalent historique du dépôt [4nk/ia_dev](https://git.4nkweb.com/4nk/ia_dev.git)). Il est **versionné dans ce dépôt**. +Le répertoire **`services/ia_dev/`** dans le monorepo **smart_ide** est le **module agents / déploiement / ticketing** (équivalent historique du dépôt [4nk/ia_dev](https://git.4nkweb.com/4nk/ia_dev.git)). Il est **versionné dans ce dépôt**. + +La racine du checkout `ia_dev` est traitée comme **`IA_DEV_ROOT`** par les scripts et services. Par défaut, les services tentent `./ia_dev` puis `./services/ia_dev` si `IA_DEV_ROOT` n’est pas défini. ## Rôle -- Scripts **`ia_dev/deploy/`**, **`ia_dev/git-issues/`**, outillage **`ia_dev/tools/`**, définitions **`ia_dev/.smartIde/`**. -- Résolution des projets via **`projects//conf.json`** à la racine **smart_ide** et liens sous `ia_dev/projects/` (voir [projects-directory.md](./projects-directory.md), [ia_dev-module.md](../ia_dev-module.md)). +- Scripts **`IA_DEV_ROOT/deploy/`**, **`IA_DEV_ROOT/git-issues/`**, outillage **`IA_DEV_ROOT/tools/`**, définitions **`IA_DEV_ROOT/.smartIde/`**. +- Résolution des projets via **`projects//conf.json`** à la racine **smart_ide** et liens sous `IA_DEV_ROOT/projects/` (voir [projects-directory.md](./projects-directory.md), [ia_dev-module.md](../ia_dev-module.md)). ## Journaux smart_ide -Les exécutions shell concernées écrivent dans **`logs/ia_dev.log`** à la racine **smart_ide** lorsque la détection du monorepo réussit. Détail : [logs-directory.md](./logs-directory.md), implémentation `ia_dev/lib/smart_ide_logs.sh`. +Les exécutions shell concernées écrivent dans **`logs/ia_dev.log`** à la racine **smart_ide** lorsque la détection du monorepo réussit. Détail : [logs-directory.md](./logs-directory.md), implémentation `services/ia_dev/lib/smart_ide_logs.sh`. ## Variables (référence) diff --git a/docs/repo/logs-directory.md b/docs/repo/logs-directory.md index 545e54a..27190c2 100644 --- a/docs/repo/logs-directory.md +++ b/docs/repo/logs-directory.md @@ -7,13 +7,13 @@ Le répertoire **`logs/`** est **versionné** comme conteneur (fichiers `README. | Fichier | Origine | |---------|---------| | `git-pull-projects.log` | Tirage Git planifié (`cron/git-pull-wrapper.sh`), variable `PULL_SYNC_LOG` dans `cron/config.env` | -| `ia_dev.log` | Exécutions shell sous `ia_dev/` lorsque le checkout est détecté comme monorepo **smart_ide** (`cron/git-pull-wrapper.sh` ou `projects/smart_ide/` au-dessus de `ia_dev/`) | +| `ia_dev.log` | Exécutions shell sous `services/ia_dev/` (ou `ia_dev/` si présent) lorsque le checkout est détecté comme monorepo **smart_ide** (détection en remontant vers `cron/git-pull-wrapper.sh` ou `projects/smart_ide/`) | -Variable interne : **`SMART_IDE_LOG_IA_DEV_ROOT`** (racine `ia_dev`), posée par `ia_dev/lib/smart_ide_logs.sh`. +Variable interne : **`SMART_IDE_LOG_IA_DEV_ROOT`** (racine ia_dev), posée par `services/ia_dev/lib/smart_ide_logs.sh`. ## Intégration -Contrat service : [ia-dev-smart-ide-integration.md](./ia-dev-smart-ide-integration.md), implémentation `ia_dev/lib/smart_ide_logs.sh`. +Contrat service : [ia-dev-smart-ide-integration.md](./ia-dev-smart-ide-integration.md), implémentation `services/ia_dev/lib/smart_ide_logs.sh`. Configuration du pull planifié : [cron-git-pull.md](./cron-git-pull.md). diff --git a/docs/repo/projects-directory.md b/docs/repo/projects-directory.md index 2d90c32..e19d012 100644 --- a/docs/repo/projects-directory.md +++ b/docs/repo/projects-directory.md @@ -9,23 +9,23 @@ Les **sous-répertoires** `projects//` non listés dans le **`.gitignore`** Les dépôts sources des produits (ex. backend **docv** sous un chemin du type `…/enso/docv`, autres apps) doivent vivre **ailleurs**, selon la convention du poste — typiquement un répertoire **frère** du monorepo, par ex. **`../projects//`** (relatif à la racine `smart_ide`), **distinct** de `./projects/` ici. - **`./projects/`** → **un seul fichier de configuration par id** : `projects//conf.json` (y compris le gabarit [`projects/example/conf.json`](../../projects/example/conf.json)). Pas de fichier `.code-workspace` versionné à côté : l’équivalent multi-root IDE est **`smart_ide.workspace`** dans ce même `conf.json`. `ia_dev` résout via le lien symbolique décrit dans [ia_dev-module.md](../ia_dev-module.md). -- **`../projects/`** (ou autre racine configurée) → arborescences Git complètes, builds, etc. ; dans chaque `conf.json`, **`project_path`** et les chemins **`deploy.*`** utiles sont **relatifs à la racine du monorepo smart_ide** (ex. `../enso`, `.`) ou **absolus** ; le script `cron/git-pull-project-clones.sh` et ia_dev les résolvent depuis cette racine (`ia_dev/lib/conf_path_resolve.sh`). +- **`../projects/`** (ou autre racine configurée) → arborescences Git complètes, builds, etc. ; dans chaque `conf.json`, **`project_path`** et les chemins **`deploy.*`** utiles sont **relatifs à la racine du monorepo smart_ide** (ex. `../enso`, `.`) ou **absolus** ; le script `cron/git-pull-project-clones.sh` et ia_dev les résolvent depuis cette racine (`IA_DEV_ROOT/lib/conf_path_resolve.sh`). ## `smart_ide` - Fichier : `projects/smart_ide/conf.json` -- Après clone ou réorganisation des dossiers : `./scripts/ensure-ia-dev-project-link.sh smart_ide` (ou le wrapper `scripts/ensure-ia-dev-smart-ide-project-link.sh`) pour recréer le lien `ia_dev/projects/smart_ide` → `../../projects/smart_ide`. +- Après clone ou réorganisation des dossiers : `./scripts/ensure-ia-dev-project-link.sh smart_ide` (ou le wrapper `scripts/ensure-ia-dev-smart-ide-project-link.sh`) pour recréer le lien sous `IA_DEV_ROOT/projects/smart_ide` (ex. `services/ia_dev/projects/smart_ide`) → `../../projects/smart_ide` (chemin relatif variant selon `IA_DEV_ROOT`). ## `enso` - Fichier : `projects/enso/conf.json` — clone **enso** frère du monorepo (`project_path` typique `../enso`), déploiement `deploy/scripts_v2`, forge **4nk/enso** (wiki / issues), mails ticketing `AI.ENSO.*@4nkweb.com`. - Chemins **absolus sur les serveurs** sous **`smart_ide.remote_data_access`** : alignés sur **`ENSO_REMOTE_ROOT`**, **`ENSO_SSH_HOST`** et **`data/dossiers-permanents`** (dépôt enso, `enso-deploy.env`). Valeurs réelles : fichiers **`enso-deploy.env`** non versionnés. - Cron fragment : `cron/fragments/enso.cron` -- Pour **`ia_dev`** : lien symbolique `ia_dev/projects/enso` → `../../projects/enso` (recréer avec `./scripts/ensure-ia-dev-project-link.sh enso`). +- Pour **`ia_dev`** : lien symbolique `IA_DEV_ROOT/projects/enso` → `../../projects/enso` (recréer avec `./scripts/ensure-ia-dev-project-link.sh enso` ; chemin relatif variant selon `IA_DEV_ROOT`). ## `builazoo` -- Fichier : `projects/builazoo/conf.json` — dépôt **sous la racine monorepo** (`project_path` : `builazoo`, soit `smart_ide/builazoo/`), forge **4nk/builazoo** (wiki / issues à ajuster si le dépôt diffère), mails ticketing `AI.BUILAZOO.*@4nkweb.com`. +- Fichier : `projects/builazoo/conf.json` — dépôt **frère du monorepo** (`project_path` typique `../builazoo`), forge **4nk/builazoo** (wiki / issues à ajuster si le dépôt diffère), mails ticketing `AI.BUILAZOO.*@4nkweb.com`. - **`smart_ide.remote_data_access`** : alias SSH `builazoo-test` / `builazoo-pprod` / `builazoo-prod` (à déclarer dans `~/.ssh/config` comme pour les autres ids). - Multi-root IDE : **`smart_ide.workspace`** dans `conf.json` (dossiers + `smartIde.activeProjectId`). - Cron fragment : `cron/fragments/builazoo.cron` @@ -61,4 +61,4 @@ Convention agents : `.smartIde/rules/smart-ide-ia-dev-bridge.mdc`. ## Référence amont (schéma conf) -Schéma détaillé des champs `conf.json` : [ia-dev-project-conf-schema.md](./ia-dev-project-conf-schema.md) (aligné sur `ia_dev/projects/README.md` amont). Extensions **`cron`** et **`smart_ide`** : spécifiques smart_ide ; `ia_dev` peut ignorer les champs non lus. +Schéma détaillé des champs `conf.json` : [ia-dev-project-conf-schema.md](./ia-dev-project-conf-schema.md) (aligné sur `IA_DEV_ROOT/projects/README.md` amont). Extensions **`cron`** et **`smart_ide`** : spécifiques smart_ide ; `ia_dev` peut ignorer les champs non lus. diff --git a/docs/repo/script-anythingllm-pull-sync.md b/docs/repo/script-anythingllm-pull-sync.md index 0c6a3a8..9bb1630 100644 --- a/docs/repo/script-anythingllm-pull-sync.md +++ b/docs/repo/script-anythingllm-pull-sync.md @@ -1,51 +1,77 @@ # anythingllm-pull-sync (`scripts/anythingllm-pull-sync/`) -S’exécute après **`git pull`** via le hook Git **`post-merge`** : envoie les **fichiers modifiés** entre `ORIG_HEAD` et `HEAD` vers un workspace AnythingLLM (`POST /api/v1/document/upload`). +Triggered after `git pull` via the Git `post-merge` hook: uploads files changed between `ORIG_HEAD` and `HEAD` to an AnythingLLM workspace (`POST /api/v1/document/upload`). -## Prérequis +## Requirements -- Processeur de documents AnythingLLM en ligne. -- Mêmes règles **`.4nkaiignore`** que l’extension VS Code (racine du dépôt cible). -- Variables d’environnement ou fichier **`repo/.anythingllm.json`** (`workspaceSlug`). +- AnythingLLM document processor reachable from the host. +- Same `.4nkaiignore` rules as the legacy IDE extension (in the target repo root). -## Variables +## Environment -| Variable | Obligatoire | Description | -|----------|-------------|-------------| -| `ANYTHINGLLM_BASE_URL` | oui | Sans `/` final | -| `ANYTHINGLLM_API_KEY` | oui | Clé API développeur | -| `ANYTHINGLLM_WORKSPACE_SLUG` | non* | Slug du workspace | -| `ANYTHINGLLM_SYNC_MAX_FILES` | non | Défaut `200` | -| `ANYTHINGLLM_SYNC_MAX_FILE_BYTES` | non | Défaut `5242880` | +The generated hook sources `~/.config/4nk/anythingllm-sync.env` if present. -\* Si absent : lecture de **`repo/.anythingllm.json`**. +Required variables: -Fichier optionnel **`~/.config/4nk/anythingllm-sync.env`** sourcé par le hook généré. +- `ANYTHINGLLM_BASE_URL` (no trailing `/`) +- `ANYTHINGLLM_API_KEY` -## Installation du hook sur un clone +Optional variables: + +- `ANYTHINGLLM_SYNC_MAX_FILES` (default `200`) +- `ANYTHINGLLM_SYNC_MAX_FILE_BYTES` (default `5242880`) +- `SMART_IDE_ENV` (`test|pprod|prod`, default `test`) for smart_ide project config resolution (see below) + +## Workspace slug resolution + +Order (first match wins): + +1. `ANYTHINGLLM_WORKSPACE_SLUG` (env) +2. `repo/.anythingllm.json` with `{ "workspaceSlug": "…" }` +3. smart_ide `projects//conf.json`: + - finds the project by matching `project_path` (resolved relative to the smart_ide root) to `--repo-root` + - reads `smart_ide.anythingllm_workspace_slug[SMART_IDE_ENV]` (default env `test`) + +If no slug can be resolved, the script prints an explicit message and exits with code `0` (does not block the pull). + +## Installing the hook + +Single repo: ```bash -/path/vers/smart_ide/scripts/install-anythingllm-post-merge-hook.sh /path/vers/repo-cible +./scripts/install-anythingllm-post-merge-hook.sh /path/to/repo ``` -Une fois par machine, depuis ce dépôt : +All configured project clones (from `projects/*/conf.json`): + +```bash +./scripts/install-anythingllm-post-merge-hook.sh --all +``` + +Single configured project: + +```bash +./scripts/install-anythingllm-post-merge-hook.sh --project enso +``` + +One-time setup on the host (from this repo): ```bash cd scripts/anythingllm-pull-sync && npm install ``` -## Comportement +## Behavior -- Uniquement les chemins de `git diff --name-only --diff-filter=ACMRT ORIG_HEAD HEAD`. -- Si `ORIG_HEAD` ou config manque → **exit 0** avec message (ne bloque pas le pull). -- Pas de suppression miroir des fichiers supprimés dans AnythingLLM dans cette version (upload seul). +- Only uploads paths from `git diff --name-only --diff-filter=ACMRT ORIG_HEAD HEAD`. +- If `ORIG_HEAD` is missing, or if AnythingLLM config is missing: explicit message, exit `0`. +- Deletions/renames are not mirrored as deletions in AnythingLLM in this version (upload only). -## Désinstallation +## Uninstall ```bash -rm -f /path/vers/repo/.git/hooks/post-merge +rm -f /path/to/repo/.git/hooks/post-merge ``` -## Liens +## Links [features/anythingllm-pull-sync-after-pull.md](../features/anythingllm-pull-sync-after-pull.md), [anythingllm-workspaces.md](../anythingllm-workspaces.md), [service-anythingllm-devtools.md](./service-anythingllm-devtools.md). diff --git a/docs/repo/script-remote-data-ssh-sync.md b/docs/repo/script-remote-data-ssh-sync.md new file mode 100644 index 0000000..3fc4243 --- /dev/null +++ b/docs/repo/script-remote-data-ssh-sync.md @@ -0,0 +1,61 @@ +# remote-data-ssh-sync (`scripts/remote-data-ssh-sync.sh`) + +Pulls **deployed environment data** over SSH into a **local mirror** (not versioned in Git), then optionally ingests that mirror into **AnythingLLM**. + +## Configuration source (per project) + +`projects//conf.json`: + +- `smart_ide.remote_data_access.environments..ssh_host_alias` +- `smart_ide.remote_data_access.environments..remote_data_directories[]` +- `smart_ide.anythingllm_workspace_slug[env]` (optional; required for ingestion) + +## Mirror location + +Default: + +- `/.data/remote-data////` + +This directory is ignored by Git (see `.gitignore`). + +Override: + +- `SMART_IDE_REMOTE_DATA_MIRROR_ROOT=/abs/path` + +## AnythingLLM ingestion + +By default, the script attempts ingestion and skips explicitly if config is missing. + +Inputs: + +- `~/.config/4nk/anythingllm-sync.env` (optional): provides `ANYTHINGLLM_BASE_URL` + `ANYTHINGLLM_API_KEY` +- `projects//conf.json`: provides the workspace slug for the selected env + +Implementation: + +- calls `scripts/anythingllm-pull-sync/sync.mjs` with `--upload-all` on each mirrored role directory + +## Usage + +```bash +./scripts/remote-data-ssh-sync.sh --project enso --env test +``` + +Fetch only (no ingestion): + +```bash +./scripts/remote-data-ssh-sync.sh --project enso --env test --no-anythingllm +``` + +Ingest only specific roles: + +```bash +./scripts/remote-data-ssh-sync.sh --project enso --env test --roles docv_dp_git_data +``` + +Dry-run (prints rsync command lines): + +```bash +./scripts/remote-data-ssh-sync.sh --project enso --env test --dry-run +``` + diff --git a/docs/repo/service-ia-dev-gateway.md b/docs/repo/service-ia-dev-gateway.md index 67185f0..549c0a8 100644 --- a/docs/repo/service-ia-dev-gateway.md +++ b/docs/repo/service-ia-dev-gateway.md @@ -1,6 +1,6 @@ # ia-dev-gateway (`services/ia-dev-gateway/`) -API HTTP pour le checkout **ia_dev** : liste des agents (`.smartIde/agents/*.md`), `POST /v1/runs` (complétion stub), SSE sur `/v1/runs/:id/events`. Branchement futur sur les scripts deploy / agents réels. +HTTP API for the **ia_dev** checkout: agent registry (`.smartIde/agents/*.md`), `POST /v1/runs` (script-backed runner), and SSE streaming on `/v1/runs/:id/events`. ## Build / run @@ -9,12 +9,47 @@ cd services/ia-dev-gateway npm install npm run build export IA_DEV_GATEWAY_TOKEN='your-secret' -# optionnel : IA_DEV_ROOT=/chemin/vers/ia_dev +# optional: IA_DEV_ROOT=/absolute/path/to/ia_dev npm start ``` Écoute par défaut : **`127.0.0.1:37144`**. +## IA_DEV_ROOT resolution + +If `IA_DEV_ROOT` is not set, the gateway resolves it in this order: + +- `./ia_dev` +- `./services/ia_dev` + +When using `./services/ia_dev`, you typically need to create project links so scripts can resolve `projects//conf.json`: + +```bash +./scripts/ensure-ia-dev-project-link.sh smart_ide +./scripts/ensure-ia-dev-project-link.sh enso +./scripts/ensure-ia-dev-project-link.sh builazoo +``` + +## Script-backed runner (current) + +The v1 runner executes a subset of agent IDs by spawning `bash` scripts under `IA_DEV_ROOT/deploy/` and streaming stdout/stderr as SSE events. + +Supported agent IDs: + +- `change-to-all-branches` → `deploy/change-to-all-branches.sh` +- `branch-align-by-script-from-test` → `deploy/branch-align.sh` (defaults to `env=test` if omitted) +- `deploy-by-script` → `deploy/deploy.sh` (env `test`) or `deploy/deploy-by-script-to.sh` (env `pprod|prod`) +- `push-by-script` → `deploy/pousse.sh` +- `site-generate` → `tools/site-generate.sh` + +### `payload` conventions + +- **`payload.args`**: `string[]` appended to the script arguments. +- **`payload.stdin`**: `string` written to the script stdin. +- **`payload.commitMessage`**: `string` alias for stdin for `push-by-script`. +- **`payload.remote`**: `string` passed as `--remote ` for `push-by-script`. +- **`payload.bumpVersion`**: `boolean` adds `--bump-version` for `push-by-script`. + ## Contrats [API/ia-dev-gateway.md](../API/ia-dev-gateway.md), [features/ia-dev-service.md](../features/ia-dev-service.md). diff --git a/docs/repo/smart-ide-overview.md b/docs/repo/smart-ide-overview.md index 2330287..e9fb8d5 100644 --- a/docs/repo/smart-ide-overview.md +++ b/docs/repo/smart-ide-overview.md @@ -27,7 +27,7 @@ Voir [anythingllm-workspaces.md](../anythingllm-workspaces.md) et [script-anythi ## Module `ia_dev` -Le répertoire **`ia_dev/`** contient l’**équipe d’agents**, les scripts `deploy/`, le ticketing Gitea, etc. (référence forge : [**ia_dev**](https://git.4nkweb.com/4nk/ia_dev.git)). Les **`conf.json`** pour ce monorepo sont sous **`projects//`** (voir [projects-directory.md](./projects-directory.md)). Détail : [ia_dev-module.md](../ia_dev-module.md), [ia-dev-smart-ide-integration.md](./ia-dev-smart-ide-integration.md), [ia-dev-repository-overview.md](./ia-dev-repository-overview.md), [ia_dev-project-smart_ide.md](../ia_dev-project-smart_ide.md). +Le répertoire **`services/ia_dev/`** (racine `IA_DEV_ROOT`) contient l’**équipe d’agents**, les scripts `deploy/`, le ticketing Gitea, etc. (référence forge : [**ia_dev**](https://git.4nkweb.com/4nk/ia_dev.git)). Les **`conf.json`** pour ce monorepo sont sous **`projects//`** (voir [projects-directory.md](./projects-directory.md)). Détail : [ia_dev-module.md](../ia_dev-module.md), [ia-dev-smart-ide-integration.md](./ia-dev-smart-ide-integration.md), [ia-dev-repository-overview.md](./ia-dev-repository-overview.md), [ia_dev-project-smart_ide.md](../ia_dev-project-smart_ide.md). ## Documentation centralisée diff --git a/docs/system-architecture.md b/docs/system-architecture.md index dfdcc67..2524ff2 100644 --- a/docs/system-architecture.md +++ b/docs/system-architecture.md @@ -19,7 +19,7 @@ Conséquences : - Les répertoires sous `services/` font partie du **même cycle de vie** que le reste du monorepo (revue, déploiement, systemd). - **`core_ide/`** est un **clone local** de l’éditeur **Lapce** (socle applicatif), présent **dans l’arborescence du monorepo** sur disque ; il est **exclu de l’index Git du parent** par volumétrie (voir racine `.gitignore`). Mise à jour : procédure dans [core-ide.md](./core-ide.md). -- `ia_dev` est un **répertoire versionné** dans ce monorepo (évolution historique depuis le dépôt forge [4nk/ia_dev](https://git.4nkweb.com/4nk/ia_dev.git)) ; intégration et journaux : [ia_dev-module.md](./ia_dev-module.md), [repo/ia-dev-smart-ide-integration.md](./repo/ia-dev-smart-ide-integration.md), [repo/logs-directory.md](./repo/logs-directory.md). Un service HTTP **`ia-dev-gateway`** ([features/ia-dev-service.md](./features/ia-dev-service.md)) exposera le registre et les exécutions agents. +- `ia_dev` est un **répertoire versionné** dans ce monorepo (évolution historique depuis le dépôt forge [4nk/ia_dev](https://git.4nkweb.com/4nk/ia_dev.git)) sous **`services/ia_dev/`** ; intégration et journaux : [ia_dev-module.md](./ia_dev-module.md), [repo/ia-dev-smart-ide-integration.md](./repo/ia-dev-smart-ide-integration.md), [repo/logs-directory.md](./repo/logs-directory.md). Un service HTTP **`ia-dev-gateway`** ([features/ia-dev-service.md](./features/ia-dev-service.md)) expose le registre et les exécutions agents. - **Orchestrateur** HTTP : [features/orchestrator-api.md](./features/orchestrator-api.md) — serveur stub sous `services/smart-ide-orchestrator/` ; routage intentions → Ollama, AnythingLLM, micro-services, `ia-dev-gateway` (forward HTTP à compléter). ## Cartographie des ressources (arborescence) @@ -41,7 +41,7 @@ Conséquences : | `services/local-office/` | **API REST** Office (upload, commandes docx, stockage SQLite + fichiers) ; complément programmatique à ONLYOFFICE | | `services/docv/` | **Contrat d’intégration** docv (hors monorepo) ; données projet sous `../projects//data/` ; pas de code applicatif docv ici — [features/docv-service-integration.md](./features/docv-service-integration.md) | | `projects//` (racine monorepo) | **Confs seules** pour `ia_dev` (`conf.json`) — **pas** les clones Git ; clones typiquement sous `../projects/` ou autre racine ; voir [repo/projects-directory.md](./repo/projects-directory.md) | -| `ia_dev/` | Agents, déploiements — exécution sous policy ; `ia_dev/projects/` peut pointer vers `../../projects/` (lien) ; voir [ia_dev-module.md](./ia_dev-module.md) | +| `services/ia_dev/` | Agents, déploiements — exécution sous policy ; `services/ia_dev/projects/` peut pointer vers `../../projects/` (lien) ; voir [ia_dev-module.md](./ia_dev-module.md) | | `services/ia-dev-gateway/` | Gateway HTTP (stub runner) : registre agents `.md`, runs, SSE — [features/ia-dev-service.md](./features/ia-dev-service.md) | | `services/smart-ide-orchestrator/` | Routage intentions (stub forward) — [features/orchestrator-api.md](./features/orchestrator-api.md) | | `services/smart-ide-tools-bridge/` | API IDE : registre des services + Carbonyl / PageIndex / Chandra — [repo/service-smart-ide-tools-bridge.md](./repo/service-smart-ide-tools-bridge.md) | @@ -56,7 +56,7 @@ Chaque environnement possède ses **URLs**, **secrets** et **politiques** (Anyth ## Module `ia_dev` dans ce dépôt -Le répertoire **`./ia_dev`** fait partie du dépôt **smart_ide** (référence historique : [4nk/ia_dev](https://git.4nkweb.com/4nk/ia_dev.git) sur la forge). Sur le serveur SSH, l’**agent gateway** et les outils peuvent pointer vers ce chemin comme racine d’exécution des agents (scripts invoqués depuis la racine `ia_dev`). Voir [ia_dev-module.md](./ia_dev-module.md) et [repo/ia-dev-smart-ide-integration.md](./repo/ia-dev-smart-ide-integration.md). +Le répertoire **`./services/ia_dev`** fait partie du dépôt **smart_ide** (référence historique : [4nk/ia_dev](https://git.4nkweb.com/4nk/ia_dev.git) sur la forge). Sur le serveur SSH, l’**agent gateway** et les outils peuvent pointer vers `IA_DEV_ROOT` (résolution par défaut : `./ia_dev` puis `./services/ia_dev`) comme racine d’exécution des agents (scripts invoqués depuis la racine `ia_dev`). Voir [ia_dev-module.md](./ia_dev-module.md) et [repo/ia-dev-smart-ide-integration.md](./repo/ia-dev-smart-ide-integration.md). ## Répartition physique (première cible) diff --git a/ia_dev b/ia_dev deleted file mode 160000 index b4ce818..0000000 --- a/ia_dev +++ /dev/null @@ -1 +0,0 @@ -Subproject commit b4ce81858cf4ba10b5a5ac16ea4ebc6f94e85a5d diff --git a/patches/lapce/README.md b/patches/lapce/README.md new file mode 100644 index 0000000..12ec740 --- /dev/null +++ b/patches/lapce/README.md @@ -0,0 +1,44 @@ +# Lapce patches (Smart IDE) + +This folder versions the **Smart IDE** modifications applied to **Lapce**. + +Why this exists: + +- The Lapce checkout lives in `core_ide/` (and is ignored by the parent `smart_ide` git repository). +- We still want a **reviewable**, **replayable** history of the core editor changes. + +## Files + +- `series`: ordered list of patch filenames to apply (one per line, `#` comments allowed). +- `*.patch`: `git format-patch` output generated from `core_ide/`. + +## Workflow + +### Apply patches to `core_ide/` + +From the `smart_ide` repo root: + +```bash +./scripts/core-ide-apply-patches.sh +``` + +### Export patches from `core_ide/` + +1. Make commits inside `core_ide/` (in a dedicated branch, e.g. `smart-ide`). +2. Export the patch series: + +```bash +./scripts/core-ide-export-patches.sh +``` + +This regenerates `patches/lapce/*.patch` and rewrites `patches/lapce/series`. + +## Remotes + +Recommended convention inside `core_ide/`: + +- `upstream`: official Lapce (`https://github.com/lapce/lapce.git`) +- `origin`: internal fork (team forge) + +Use `./scripts/ensure-core-ide.sh --fork-url <...>` to set this up. + diff --git a/patches/lapce/series b/patches/lapce/series new file mode 100644 index 0000000..74d7bfa --- /dev/null +++ b/patches/lapce/series @@ -0,0 +1,3 @@ +# List Lapce patch files (one per line). +# This file is consumed by: ./scripts/core-ide-apply-patches.sh + diff --git a/projects/builazoo/conf.json b/projects/builazoo/conf.json index da95d85..3d05360 100644 --- a/projects/builazoo/conf.json +++ b/projects/builazoo/conf.json @@ -4,7 +4,7 @@ "cron": { "git_pull": true }, - "project_path": "builazoo", + "project_path": "../builazoo", "build_dirs": [], "deploy": {}, "version": { diff --git a/scripts/anythingllm-pull-sync/sync.mjs b/scripts/anythingllm-pull-sync/sync.mjs index 260992c..84ab4cd 100755 --- a/scripts/anythingllm-pull-sync/sync.mjs +++ b/scripts/anythingllm-pull-sync/sync.mjs @@ -28,17 +28,54 @@ const git = (repoRoot, args) => { }; const parseArgs = () => { - const out = { repoRoot: process.cwd() }; + const out = { repoRoot: process.cwd(), uploadAll: false, uploadPrefix: "" }; const argv = process.argv.slice(2); for (let i = 0; i < argv.length; i += 1) { if (argv[i] === "--repo-root" && argv[i + 1]) { out.repoRoot = path.resolve(argv[i + 1]); i += 1; + continue; + } + if (argv[i] === "--upload-all") { + out.uploadAll = true; + continue; + } + if (argv[i] === "--upload-prefix" && argv[i + 1]) { + out.uploadPrefix = String(argv[i + 1] ?? "").trim(); + i += 1; + continue; } } return out; }; +const walkFiles = async (dir) => { + const out = []; + const scan = async (d) => { + const entries = await fsPromises.readdir(d, { withFileTypes: true }); + for (const e of entries) { + const p = path.join(d, e.name); + if (e.isSymbolicLink()) { + continue; + } + if (e.isDirectory()) { + await scan(p); + continue; + } + if (e.isFile()) { + out.push(p); + } + } + }; + await scan(dir); + return out; +}; + +const toPosixRel = (root, abs) => { + const rel = path.relative(root, abs); + return rel.split(path.sep).join("/"); +}; + const loadWorkspaceSlug = (repoRoot) => { const env = process.env.ANYTHINGLLM_WORKSPACE_SLUG?.trim(); if (env) { @@ -53,6 +90,63 @@ const loadWorkspaceSlug = (repoRoot) => { } catch { /* missing */ } + // smart_ide integration: resolve slug from projects//conf.json when available. + // This avoids having to write per-repo config files into the target clones. + try { + const smartIdeRoot = path.resolve(__dirname, "..", ".."); + const projectsDir = path.join(smartIdeRoot, "projects"); + if (!fs.existsSync(projectsDir)) { + return ""; + } + const repoReal = fs.realpathSync(repoRoot); + const envNameRaw = process.env.SMART_IDE_ENV?.trim() ?? ""; + const envName = envNameRaw === "test" || envNameRaw === "pprod" || envNameRaw === "prod" ? envNameRaw : "test"; + + for (const ent of fs.readdirSync(projectsDir, { withFileTypes: true })) { + if (!ent.isDirectory()) { + continue; + } + const confPath = path.join(projectsDir, ent.name, "conf.json"); + if (!fs.existsSync(confPath)) { + continue; + } + let conf; + try { + conf = readJson(confPath); + } catch { + continue; + } + const projectPath = typeof conf?.project_path === "string" ? conf.project_path.trim() : ""; + if (!projectPath) { + continue; + } + const absProjectPath = path.isAbsolute(projectPath) + ? projectPath + : path.resolve(smartIdeRoot, projectPath); + let projectReal; + try { + projectReal = fs.realpathSync(absProjectPath); + } catch { + continue; + } + if (projectReal !== repoReal) { + continue; + } + + const slugCfg = conf?.smart_ide?.anythingllm_workspace_slug; + if (typeof slugCfg === "string" && slugCfg.trim().length > 0) { + return slugCfg.trim(); + } + if (slugCfg && typeof slugCfg === "object") { + const slug = slugCfg?.[envName]; + if (typeof slug === "string" && slug.trim().length > 0) { + return slug.trim(); + } + } + } + } catch { + // ignore and fall back to empty (explicit skip handled by caller) + } return ""; }; @@ -62,6 +156,28 @@ const normalizeApiKey = (raw) => { return m ? t.slice(m[0].length).trim() : t; }; +const readPositiveIntEnv = (name, fallback) => { + const raw = process.env[name]; + if (!raw || raw.trim().length === 0) { + return fallback; + } + const s = raw.trim(); + if (!/^\d+$/.test(s)) { + console.error( + `anythingllm-pull-sync: invalid ${name}=${JSON.stringify(raw)}; using default ${fallback}`, + ); + return fallback; + } + const n = Number(s); + if (!Number.isFinite(n) || n <= 0) { + console.error( + `anythingllm-pull-sync: invalid ${name}=${JSON.stringify(raw)}; using default ${fallback}`, + ); + return fallback; + } + return n; +}; + const uploadOne = async (baseUrl, apiKey, slug, absPath, uploadName) => { const root = baseUrl.replace(/\/+$/, ""); const buf = await fsPromises.readFile(absPath); @@ -86,11 +202,11 @@ const uploadOne = async (baseUrl, apiKey, slug, absPath, uploadName) => { }; const main = async () => { - const { repoRoot } = parseArgs(); + const { repoRoot, uploadAll, uploadPrefix } = parseArgs(); const baseUrl = process.env.ANYTHINGLLM_BASE_URL?.trim() ?? ""; const apiKeyRaw = process.env.ANYTHINGLLM_API_KEY?.trim() ?? ""; - const maxBytes = Number(process.env.ANYTHINGLLM_SYNC_MAX_FILE_BYTES ?? 5242880); - const maxFiles = Number(process.env.ANYTHINGLLM_SYNC_MAX_FILES ?? 200); + const maxBytes = readPositiveIntEnv("ANYTHINGLLM_SYNC_MAX_FILE_BYTES", 5_242_880); + const maxFiles = readPositiveIntEnv("ANYTHINGLLM_SYNC_MAX_FILES", 200); if (!baseUrl || !apiKeyRaw) { console.error( @@ -107,6 +223,76 @@ const main = async () => { process.exit(0); } + if (uploadAll === true) { + const ignorePath = path.join(repoRoot, ".4nkaiignore"); + let userRules = ""; + try { + userRules = await fsPromises.readFile(ignorePath, "utf8"); + } catch { + userRules = ""; + } + const ig = ignore(); + ig.add(ALWAYS_IGNORE); + ig.add(userRules); + + let uploaded = 0; + let skipped = 0; + const errors = []; + + const absFiles = await walkFiles(repoRoot); + for (const abs of absFiles) { + const rel = toPosixRel(repoRoot, abs); + if (rel.length === 0 || rel.startsWith("..")) { + skipped += 1; + continue; + } + if (ig.ignores(rel)) { + skipped += 1; + continue; + } + let st; + try { + st = await fsPromises.stat(abs); + } catch { + skipped += 1; + continue; + } + if (!st.isFile()) { + skipped += 1; + continue; + } + if (st.size > maxBytes) { + skipped += 1; + continue; + } + if (uploaded >= maxFiles) { + console.error("anythingllm-pull-sync: cap reached (ANYTHINGLLM_SYNC_MAX_FILES)."); + break; + } + const relPosix = rel.split(path.sep).join("/"); + const baseName = relPosix.split("/").join("__"); + const uploadName = + uploadPrefix && uploadPrefix.length > 0 ? `${uploadPrefix}__${baseName}` : baseName; + try { + await uploadOne(baseUrl, apiKey, slug, abs, uploadName); + uploaded += 1; + } catch (e) { + errors.push(`${relPosix}: ${e instanceof Error ? e.message : String(e)}`); + } + } + + console.error( + `anythingllm-pull-sync: mode=upload-all uploaded=${uploaded} skipped=${skipped} errors=${errors.length}`, + ); + for (const line of errors.slice(0, 20)) { + console.error(line); + } + if (errors.length > 20) { + console.error(`… ${errors.length - 20} more`); + } + process.exit(errors.length > 0 ? 1 : 0); + } + try { git(repoRoot, ["rev-parse", "-q", "--verify", "ORIG_HEAD"]); } catch { @@ -125,7 +311,10 @@ const main = async () => { ]); names = out.length > 0 ? out.split("\n").filter(Boolean) : []; } catch (e) { - console.error("anythingllm-pull-sync: git diff failed — skip.", e.message); + console.error( + "anythingllm-pull-sync: git diff failed — skip.", + e instanceof Error ? e.message : String(e), + ); process.exit(0); } @@ -197,7 +386,7 @@ const main = async () => { if (errors.length > 20) { console.error(`… ${errors.length - 20} more`); } - process.exit(0); + process.exit(errors.length > 0 ? 1 : 0); }; main().catch((e) => { diff --git a/scripts/core-ide-apply-patches.sh b/scripts/core-ide-apply-patches.sh new file mode 100755 index 0000000..ce803f7 --- /dev/null +++ b/scripts/core-ide-apply-patches.sh @@ -0,0 +1,56 @@ +#!/usr/bin/env bash +set -euo pipefail + +ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" +CORE_IDE_DIR="${ROOT}/core_ide" +PATCH_DIR="${ROOT}/patches/lapce" +SERIES_FILE="${PATCH_DIR}/series" + +if [[ ! -d "${CORE_IDE_DIR}/.git" ]]; then + echo "Missing core_ide git checkout: ${CORE_IDE_DIR}" >&2 + echo "Run: ./scripts/ensure-core-ide.sh" >&2 + exit 1 +fi + +if [[ ! -f "${SERIES_FILE}" ]]; then + echo "Missing patch series file: ${SERIES_FILE}" >&2 + exit 1 +fi + +if [[ -d "${CORE_IDE_DIR}/.git/rebase-apply" ]] || [[ -d "${CORE_IDE_DIR}/.git/rebase-merge" ]]; then + echo "core_ide has an in-progress rebase/am. Resolve it first." >&2 + exit 1 +fi + +if [[ -n "$(git -C "${CORE_IDE_DIR}" status --porcelain)" ]]; then + echo "core_ide working tree is not clean. Commit/stash changes before applying patches." >&2 + exit 1 +fi + +mapfile -t PATCHES < <( + sed -e 's/[[:space:]]\+$//' "${SERIES_FILE}" \ + | awk 'NF && $1 !~ /^#/' \ + | cat +) + +if [[ ${#PATCHES[@]} -eq 0 ]]; then + echo "No patches listed in ${SERIES_FILE}. Nothing to apply." + exit 0 +fi + +for rel in "${PATCHES[@]}"; do + patch_path="${PATCH_DIR}/${rel}" + if [[ ! -f "${patch_path}" ]]; then + echo "Patch file not found: ${patch_path}" >&2 + exit 1 + fi + echo "Applying: ${rel}" + if ! git -C "${CORE_IDE_DIR}" am --3way "${patch_path}"; then + echo "Patch failed: ${rel}" >&2 + echo "To abort: (cd core_ide && git am --abort)" >&2 + exit 1 + fi +done + +echo "OK: applied ${#PATCHES[@]} patch(es) to core_ide." + diff --git a/scripts/core-ide-export-patches.sh b/scripts/core-ide-export-patches.sh new file mode 100755 index 0000000..4d6b4bd --- /dev/null +++ b/scripts/core-ide-export-patches.sh @@ -0,0 +1,114 @@ +#!/usr/bin/env bash +set -euo pipefail + +ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" +CORE_IDE_DIR="${ROOT}/core_ide" +PATCH_DIR="${ROOT}/patches/lapce" +SERIES_FILE="${PATCH_DIR}/series" + +BASE_REF="" + +usage() { + cat <<'EOF' +Usage: + ./scripts/core-ide-export-patches.sh [--base ] + +Exports the commits in core_ide (BASE..HEAD) as patch files into: + patches/lapce/*.patch +and rewrites: + patches/lapce/series + +If --base is not provided, the script tries (first match): + upstream/master, upstream/main, origin/master, origin/main +EOF +} + +while [[ $# -gt 0 ]]; do + case "$1" in + -h|--help) + usage + exit 0 + ;; + --base) + BASE_REF="${2:-}" + shift 2 + ;; + *) + echo "Unknown argument: $1" >&2 + usage >&2 + exit 2 + ;; + esac +done + +if [[ ! -d "${CORE_IDE_DIR}/.git" ]]; then + echo "Missing core_ide git checkout: ${CORE_IDE_DIR}" >&2 + exit 1 +fi + +mkdir -p "${PATCH_DIR}" + +if [[ ! -f "${SERIES_FILE}" ]]; then + echo "Missing series file: ${SERIES_FILE}" >&2 + exit 1 +fi + +if [[ -n "$(git -C "${CORE_IDE_DIR}" status --porcelain)" ]]; then + echo "core_ide working tree is not clean. Commit changes before exporting patches." >&2 + exit 1 +fi + +ref_exists() { + git -C "${CORE_IDE_DIR}" show-ref --verify --quiet "refs/remotes/$1" \ + || git -C "${CORE_IDE_DIR}" show-ref --verify --quiet "refs/heads/$1" \ + || git -C "${CORE_IDE_DIR}" rev-parse --verify --quiet "$1" >/dev/null 2>&1 +} + +if [[ -z "${BASE_REF}" ]]; then + for candidate in "upstream/master" "upstream/main" "origin/master" "origin/main"; do + if ref_exists "${candidate}"; then + BASE_REF="${candidate}" + break + fi + done +fi + +if [[ -z "${BASE_REF}" ]]; then + echo "Could not auto-detect a base ref. Provide --base ." >&2 + exit 1 +fi + +HEAD_SHA="$(git -C "${CORE_IDE_DIR}" rev-parse HEAD)" +BASE_SHA="$(git -C "${CORE_IDE_DIR}" rev-parse "${BASE_REF}")" + +if [[ "${HEAD_SHA}" == "${BASE_SHA}" ]]; then + echo "No commits to export (HEAD equals base ${BASE_REF})." + rm -f "${PATCH_DIR}"/*.patch + printf "%s\n" \ + "# List Lapce patch files (one per line)." \ + "# This file is consumed by: ./scripts/core-ide-apply-patches.sh" \ + "" \ + >"${SERIES_FILE}" + exit 0 +fi + +rm -f "${PATCH_DIR}"/*.patch + +git -C "${CORE_IDE_DIR}" format-patch \ + --output-directory "${PATCH_DIR}" \ + --no-stat \ + --no-signature \ + "${BASE_REF}..HEAD" + +( + cd "${PATCH_DIR}" + { + echo "# List Lapce patch files (one per line)." + echo "# This file is consumed by: ./scripts/core-ide-apply-patches.sh" + echo + ls -1 *.patch 2>/dev/null | sort + } >"${SERIES_FILE}" +) + +echo "OK: exported patches to ${PATCH_DIR} (base=${BASE_REF})." + diff --git a/scripts/ensure-core-ide.sh b/scripts/ensure-core-ide.sh new file mode 100755 index 0000000..9b26be2 --- /dev/null +++ b/scripts/ensure-core-ide.sh @@ -0,0 +1,84 @@ +#!/usr/bin/env bash +set -euo pipefail + +usage() { + cat <<'EOF' +Usage: + ./scripts/ensure-core-ide.sh [--fork-url ] [--upstream-url ] + +What it does: + - Ensures the Lapce checkout exists at ./core_ide (ignored by the parent repo). + - Optionally configures remotes for a fork workflow: + - origin = fork (if --fork-url is provided) + - upstream = official Lapce upstream (default: https://github.com/lapce/lapce.git) + +Notes: + - This script does not write any secrets. + - If you cloned with --depth 1 and need full history later: + (cd core_ide && git fetch --unshallow) +EOF +} + +ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" +CORE_IDE_DIR="${ROOT}/core_ide" + +FORK_URL="" +UPSTREAM_URL="https://github.com/lapce/lapce.git" + +while [[ $# -gt 0 ]]; do + case "$1" in + -h|--help) + usage + exit 0 + ;; + --fork-url) + FORK_URL="${2:-}" + shift 2 + ;; + --upstream-url) + UPSTREAM_URL="${2:-}" + shift 2 + ;; + *) + echo "Unknown argument: $1" >&2 + usage >&2 + exit 2 + ;; + esac +done + +if [[ -d "${CORE_IDE_DIR}/.git" ]]; then + echo "core_ide already exists: ${CORE_IDE_DIR}" +else + echo "core_ide missing; cloning..." + mkdir -p "${ROOT}" + if [[ -n "${FORK_URL}" ]]; then + git clone --depth 1 "${FORK_URL}" "${CORE_IDE_DIR}" + else + git clone --depth 1 "${UPSTREAM_URL}" "${CORE_IDE_DIR}" + fi +fi + +if [[ ! -f "${CORE_IDE_DIR}/Cargo.toml" ]]; then + echo "core_ide does not look like a Lapce checkout (missing Cargo.toml): ${CORE_IDE_DIR}" >&2 + exit 1 +fi + +( + cd "${CORE_IDE_DIR}" + + # Ensure upstream remote exists (official Lapce). + if ! git remote get-url upstream >/dev/null 2>&1; then + git remote add upstream "${UPSTREAM_URL}" + fi + + if [[ -n "${FORK_URL}" ]]; then + git remote set-url origin "${FORK_URL}" + fi + + echo "Remotes:" + git remote -v +) + +echo "OK" + diff --git a/scripts/ensure-ia-dev-project-link.sh b/scripts/ensure-ia-dev-project-link.sh index 484e9d9..6a6b3a5 100755 --- a/scripts/ensure-ia-dev-project-link.sh +++ b/scripts/ensure-ia-dev-project-link.sh @@ -11,8 +11,21 @@ if [[ ! -f "${CONF}" ]]; then exit 1 fi -LINK_PARENT="${ROOT}/ia_dev/projects" +IA_DEV_DIR="" +if [[ -d "${ROOT}/ia_dev" ]]; then + IA_DEV_DIR="${ROOT}/ia_dev" +elif [[ -d "${ROOT}/services/ia_dev" ]]; then + IA_DEV_DIR="${ROOT}/services/ia_dev" +else + echo "Missing ia_dev directory: expected '${ROOT}/ia_dev' or '${ROOT}/services/ia_dev'" >&2 + exit 1 +fi + +LINK_PARENT="${IA_DEV_DIR}/projects" TARGET="../../projects/${PROJECT_ID}" +if [[ "${IA_DEV_DIR}" == "${ROOT}/services/ia_dev" ]]; then + TARGET="../../../projects/${PROJECT_ID}" +fi LINK_NAME="${LINK_PARENT}/${PROJECT_ID}" mkdir -p "${LINK_PARENT}" if [[ -e "${LINK_NAME}" && ! -L "${LINK_NAME}" ]]; then diff --git a/scripts/install-anythingllm-post-merge-hook.sh b/scripts/install-anythingllm-post-merge-hook.sh index 8847ab0..4bcda36 100755 --- a/scripts/install-anythingllm-post-merge-hook.sh +++ b/scripts/install-anythingllm-post-merge-hook.sh @@ -1,33 +1,86 @@ #!/usr/bin/env bash set -euo pipefail -if [[ $# -lt 1 ]]; then - echo "Usage: $0 []" >&2 - exit 1 +usage() { + cat <<'EOF' >&2 +Usage: + install hook for one repo: + ./scripts/install-anythingllm-post-merge-hook.sh [] + + install hook for configured project clones: + ./scripts/install-anythingllm-post-merge-hook.sh --all [--smart-ide-root ] + ./scripts/install-anythingllm-post-merge-hook.sh --project [--smart-ide-root ] + +Notes: + - Project clones are resolved from smart_ide `projects//conf.json` -> `project_path`. + - The hook sources `${HOME}/.config/4nk/anythingllm-sync.env` if present. +EOF +} + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +SMART_IDE_ROOT="" + +repo_paths=() +mode="single" +filter_project_id="" + +if [[ $# -ge 1 && "${1:-}" != --* ]]; then + # Backward-compatible mode: [] + repo_paths+=( "$1" ) + SMART_IDE_ROOT="${2:-}" +else + while [[ $# -gt 0 ]]; do + case "$1" in + --smart-ide-root) + SMART_IDE_ROOT="${2:-}" + shift 2 + ;; + --all) + mode="all" + shift 1 + ;; + --project) + filter_project_id="${2:-}" + [[ -n "$filter_project_id" ]] || { echo "Missing value for --project" >&2; usage; exit 2; } + mode="project" + shift 2 + ;; + -h|--help) + usage + exit 0 + ;; + *) + echo "Unknown arg: $1" >&2 + usage + exit 2 + ;; + esac + done fi -REPO=$(cd "$1" && pwd) -SMART_IDE_ROOT=${2:-} if [[ -z "$SMART_IDE_ROOT" ]]; then - SCRIPT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) - SMART_IDE_ROOT=$(cd "$SCRIPT_DIR/.." && pwd) + SMART_IDE_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)" +else + SMART_IDE_ROOT="$(cd "$SMART_IDE_ROOT" && pwd)" fi SYNC_DIR="$SMART_IDE_ROOT/scripts/anythingllm-pull-sync" -HOOK="$REPO/.git/hooks/post-merge" - -if [[ ! -d "$REPO/.git" ]]; then - echo "Not a git repository: $REPO" >&2 - exit 1 -fi - if [[ ! -f "$SYNC_DIR/sync.mjs" ]]; then echo "Missing $SYNC_DIR/sync.mjs" >&2 exit 1 fi -mkdir -p "$(dirname "$HOOK")" -cat >"$HOOK" <&2 + return 1 + fi + local hook="$repo/.git/hooks/post-merge" + mkdir -p "$(dirname "$hook")" + cat >"$hook" </dev/null 2>&1 || { + echo "jq not found; install jq." >&2 + exit 1 +} + +projects_dir="$SMART_IDE_ROOT/projects" +if [[ ! -d "$projects_dir" ]]; then + echo "Missing projects dir: $projects_dir" >&2 + exit 1 +fi + +count_ok=0 +count_skip=0 +count_err=0 + +for conf in "$projects_dir"/*/conf.json; do + [[ -f "$conf" ]] || continue + project_id="$(basename "$(dirname "$conf")")" + if [[ "$mode" == "project" && "$project_id" != "$filter_project_id" ]]; then + continue + fi + if [[ "$project_id" == "example" ]]; then + echo "skip $project_id: template project" + count_skip=$((count_skip + 1)) + continue + fi + if ! project_path="$(jq -r '.project_path // empty' "$conf")"; then + echo "err $project_id: failed to read project_path from $conf" >&2 + count_err=$((count_err + 1)) + continue + fi + if [[ -z "$project_path" || "$project_path" == "null" ]]; then + echo "skip $project_id: empty project_path" + count_skip=$((count_skip + 1)) + continue + fi + if [[ "$project_path" != /* ]]; then + project_path="$(cd "$SMART_IDE_ROOT" && realpath -m "$project_path" 2>/dev/null || echo "$SMART_IDE_ROOT/$project_path")" + fi + if [[ ! -d "$project_path" ]]; then + echo "skip $project_id: not a directory: $project_path" + count_skip=$((count_skip + 1)) + continue + fi + if install_one "$project_path"; then + count_ok=$((count_ok + 1)) + else + count_err=$((count_err + 1)) + fi +done + +echo "Done: ok=$count_ok skip=$count_skip err=$count_err" echo "Run: (cd $SYNC_DIR && npm install) if node_modules is missing." +if [[ "$count_err" -gt 0 ]]; then + exit 1 +fi diff --git a/scripts/remote-data-ssh-sync.sh b/scripts/remote-data-ssh-sync.sh new file mode 100755 index 0000000..f83c562 --- /dev/null +++ b/scripts/remote-data-ssh-sync.sh @@ -0,0 +1,318 @@ +#!/usr/bin/env bash +# Pull deployed data directories over SSH into a local mirror, then optionally ingest into AnythingLLM. +# +# Source of truth: +# projects//conf.json -> smart_ide.remote_data_access.environments. +# +# This script never writes to remote databases. It only performs SSH/rsync reads. +set -euo pipefail + +usage() { + cat <<'EOF' +Usage: + ./scripts/remote-data-ssh-sync.sh [--project ] [--env ] + [--mirror-root ] + [--roles ] + [--no-anythingllm] + [--max-files ] [--max-bytes ] + [--dry-run] + +Project/env resolution (first match): + - --project / --env + - SMART_IDE_PROJECT_ID / SMART_IDE_ENV + - projects/active-project.json (local, gitignored) + +Mirror root: + - SMART_IDE_REMOTE_DATA_MIRROR_ROOT, else /.data/remote-data + +AnythingLLM ingestion: + - enabled by default (skip if AnythingLLM config or workspace slug is missing) + - reads workspace slug from projects//conf.json -> smart_ide.anythingllm_workspace_slug[env] + - reads ANYTHINGLLM_BASE_URL / ANYTHINGLLM_API_KEY from ~/.config/4nk/anythingllm-sync.env if present + - uses scripts/anythingllm-pull-sync/sync.mjs in --upload-all mode + +Notes: + - Requires: jq, ssh, rsync, node (>=20). + - Output is not filtered; rsync output remains visible. +EOF +} + +ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" +ACTIVE_PROJECT_FILE="${ROOT}/projects/active-project.json" + +PROJECT_ID="${SMART_IDE_PROJECT_ID:-}" +ENV_NAME="${SMART_IDE_ENV:-}" +MIRROR_ROOT="${SMART_IDE_REMOTE_DATA_MIRROR_ROOT:-${ROOT}/.data/remote-data}" + +INGEST_ANYTHINGLLM="true" +ROLES_CSV="" +DRY_RUN="false" +MAX_FILES="${ANYTHINGLLM_SYNC_MAX_FILES:-200}" +MAX_BYTES="${ANYTHINGLLM_SYNC_MAX_FILE_BYTES:-5242880}" + +while [[ $# -gt 0 ]]; do + case "$1" in + -h|--help) + usage + exit 0 + ;; + --project) + PROJECT_ID="${2:-}" + shift 2 + ;; + --env) + ENV_NAME="${2:-}" + shift 2 + ;; + --mirror-root) + MIRROR_ROOT="${2:-}" + shift 2 + ;; + --roles) + ROLES_CSV="${2:-}" + shift 2 + ;; + --no-anythingllm) + INGEST_ANYTHINGLLM="false" + shift 1 + ;; + --max-files) + MAX_FILES="${2:-}" + shift 2 + ;; + --max-bytes) + MAX_BYTES="${2:-}" + shift 2 + ;; + --dry-run) + DRY_RUN="true" + shift 1 + ;; + *) + echo "[remote-data-ssh-sync][ERROR] Unknown arg: $1" >&2 + usage >&2 + exit 2 + ;; + esac +done + +command -v jq >/dev/null 2>&1 || { echo "[remote-data-ssh-sync][ERROR] Missing dependency: jq" >&2; exit 1; } +command -v ssh >/dev/null 2>&1 || { echo "[remote-data-ssh-sync][ERROR] Missing dependency: ssh" >&2; exit 1; } +command -v rsync >/dev/null 2>&1 || { echo "[remote-data-ssh-sync][ERROR] Missing dependency: rsync" >&2; exit 1; } +command -v node >/dev/null 2>&1 || { echo "[remote-data-ssh-sync][ERROR] Missing dependency: node" >&2; exit 1; } + +if [[ -z "${PROJECT_ID}" && -f "${ACTIVE_PROJECT_FILE}" ]]; then + PROJECT_ID="$(jq -r '.id // empty' "${ACTIVE_PROJECT_FILE}")" +fi +if [[ -z "${ENV_NAME}" && -f "${ACTIVE_PROJECT_FILE}" ]]; then + ENV_NAME="$(jq -r '.default_env // empty' "${ACTIVE_PROJECT_FILE}")" +fi +if [[ -z "${PROJECT_ID}" ]]; then + echo "[remote-data-ssh-sync][ERROR] Missing project id. Provide --project or create projects/active-project.json" >&2 + exit 1 +fi +if [[ -z "${ENV_NAME}" ]]; then + ENV_NAME="test" +fi +case "${ENV_NAME}" in + test|pprod|prod) ;; + *) + echo "[remote-data-ssh-sync][ERROR] Invalid --env: ${ENV_NAME} (expected test|pprod|prod)" >&2 + exit 1 + ;; +esac + +CONF_FILE="${ROOT}/projects/${PROJECT_ID}/conf.json" +if [[ ! -f "${CONF_FILE}" ]]; then + echo "[remote-data-ssh-sync][ERROR] Missing project conf: ${CONF_FILE}" >&2 + exit 1 +fi + +SSH_HOST_ALIAS="$( + jq -r ".smart_ide.remote_data_access.environments.${ENV_NAME}.ssh_host_alias // empty" "${CONF_FILE}" +)" +if [[ -z "${SSH_HOST_ALIAS}" || "${SSH_HOST_ALIAS}" == "null" ]]; then + echo "[remote-data-ssh-sync][ERROR] Missing ssh_host_alias for ${PROJECT_ID}/${ENV_NAME} in ${CONF_FILE}" >&2 + exit 1 +fi + +mapfile -t ITEMS < <(jq -c ".smart_ide.remote_data_access.environments.${ENV_NAME}.remote_data_directories[]? // empty" "${CONF_FILE}") +if [[ ${#ITEMS[@]} -eq 0 ]]; then + echo "[remote-data-ssh-sync] No remote_data_directories configured for ${PROJECT_ID}/${ENV_NAME} (nothing to do)." + exit 0 +fi + +MIRROR_ROOT_ABS="${MIRROR_ROOT}" +if [[ "${MIRROR_ROOT_ABS}" != /* ]]; then + MIRROR_ROOT_ABS="$(cd "${ROOT}" && realpath -m "${MIRROR_ROOT_ABS}" 2>/dev/null || echo "${ROOT}/${MIRROR_ROOT_ABS}")" +fi + +base_dir="${MIRROR_ROOT_ABS}/${PROJECT_ID}/${ENV_NAME}" +mkdir -p "${base_dir}" + +tmp_items="$(mktemp -t remote-data-items.XXXXXX)" +cleanup() { + rm -f "${tmp_items}" +} +trap cleanup EXIT + +echo "[remote-data-ssh-sync] projectId=${PROJECT_ID}" +echo "[remote-data-ssh-sync] env=${ENV_NAME}" +echo "[remote-data-ssh-sync] sshHostAlias=${SSH_HOST_ALIAS}" +echo "[remote-data-ssh-sync] mirrorRoot=${MIRROR_ROOT_ABS}" +echo "[remote-data-ssh-sync] items=${#ITEMS[@]}" +echo + +should_ingest_role() { + local role="$1" + if [[ -z "${ROLES_CSV}" ]]; then + return 0 + fi + local IFS=, + read -r -a allowed <<<"${ROLES_CSV}" + for r in "${allowed[@]}"; do + if [[ "$(echo "$r" | sed 's/[[:space:]]//g')" == "$role" ]]; then + return 0 + fi + done + return 1 +} + +for item in "${ITEMS[@]}"; do + role="$(echo "$item" | jq -r '.role // empty')" + remote_path="$(echo "$item" | jq -r '.path_on_server // empty')" + if [[ -z "$role" || -z "$remote_path" || "$role" == "null" || "$remote_path" == "null" ]]; then + echo "[remote-data-ssh-sync][WARN] Skip invalid item: ${item}" >&2 + continue + fi + if [[ "${remote_path}" != /* ]]; then + echo "[remote-data-ssh-sync][WARN] Skip non-absolute path_on_server for role '${role}': ${remote_path}" >&2 + continue + fi + + dest="${base_dir}/${role}" + + echo "[remote-data-ssh-sync] rsync role=${role}" + echo " from: ${SSH_HOST_ALIAS}:${remote_path}" + echo " to : ${dest}" + + # Trailing slash to mirror directory contents into dest/ + src="${SSH_HOST_ALIAS}:${remote_path%/}/" + synced_json="false" + if [[ "${DRY_RUN}" == "true" ]]; then + echo " dry-run: rsync -a --delete -e \"ssh -o BatchMode=yes\" \"${src}\" \"${dest}/\"" + else + mkdir -p "${dest}" + rsync -a --delete -e "ssh -o BatchMode=yes" "${src}" "${dest}/" + synced_json="true" + fi + echo "$item" | jq -c --arg dest "$dest" --argjson synced "${synced_json}" '{ role: (.role // ""), path_on_server: (.path_on_server // ""), dest: $dest, synced: $synced }' >>"${tmp_items}" + echo +done + +manifest="${base_dir}/manifest.json" +started_at="$(date -Iseconds)" +if [[ -s "${tmp_items}" ]]; then + jq -s \ + --arg projectId "${PROJECT_ID}" \ + --arg env "${ENV_NAME}" \ + --arg sshHostAlias "${SSH_HOST_ALIAS}" \ + --arg startedAt "${started_at}" \ + --argjson dryRun "$( [[ "${DRY_RUN}" == "true" ]] && echo true || echo false )" \ + '{ projectId: $projectId, env: $env, sshHostAlias: $sshHostAlias, startedAt: $startedAt, dryRun: $dryRun, items: . }' \ + "${tmp_items}" >"${manifest}" +else + jq -n \ + --arg projectId "${PROJECT_ID}" \ + --arg env "${ENV_NAME}" \ + --arg sshHostAlias "${SSH_HOST_ALIAS}" \ + --arg startedAt "${started_at}" \ + --argjson dryRun "$( [[ "${DRY_RUN}" == "true" ]] && echo true || echo false )" \ + '{ projectId: $projectId, env: $env, sshHostAlias: $sshHostAlias, startedAt: $startedAt, dryRun: $dryRun, items: [] }' \ + >"${manifest}" +fi +echo "[remote-data-ssh-sync] manifest=${manifest}" + +if [[ "${INGEST_ANYTHINGLLM}" != "true" ]]; then + echo "[remote-data-ssh-sync] AnythingLLM ingestion disabled (--no-anythingllm)." + exit 0 +fi + +workspace_slug="$( + jq -r --arg env "${ENV_NAME}" ' + .smart_ide.anythingllm_workspace_slug as $s + | if ($s|type) == "string" then $s + elif ($s|type) == "object" then ($s[$env] // empty) + else empty end + ' "${CONF_FILE}" +)" +workspace_slug="$(echo "${workspace_slug}" | sed 's/[[:space:]]//g')" +if [[ -z "${workspace_slug}" || "${workspace_slug}" == "null" ]]; then + echo "[remote-data-ssh-sync] AnythingLLM: missing smart_ide.anythingllm_workspace_slug for ${PROJECT_ID}/${ENV_NAME} in ${CONF_FILE} — skip." + exit 0 +fi + +# Optional host-level AnythingLLM env file (same convention as the post-merge hook). +if [[ -f "${HOME}/.config/4nk/anythingllm-sync.env" ]]; then + set -a + # shellcheck source=/dev/null + source "${HOME}/.config/4nk/anythingllm-sync.env" + set +a +fi + +if [[ -z "${ANYTHINGLLM_BASE_URL:-}" || -z "${ANYTHINGLLM_API_KEY:-}" ]]; then + echo "[remote-data-ssh-sync] AnythingLLM: missing ANYTHINGLLM_BASE_URL or ANYTHINGLLM_API_KEY — skip." + exit 0 +fi + +echo +echo "[remote-data-ssh-sync] AnythingLLM ingest workspaceSlug=${workspace_slug}" +echo "[remote-data-ssh-sync] limits: maxFiles=${MAX_FILES} maxBytes=${MAX_BYTES}" + +sync_script="${ROOT}/scripts/anythingllm-pull-sync/sync.mjs" +if [[ ! -f "${sync_script}" ]]; then + echo "[remote-data-ssh-sync][ERROR] Missing ${sync_script}" >&2 + exit 1 +fi + +sanitize() { + echo "$1" | sed 's/[^A-Za-z0-9._-]/_/g' +} + +ingest_ok=0 +ingest_err=0 + +for item in "${ITEMS[@]}"; do + role="$(echo "$item" | jq -r '.role // empty')" + [[ -n "$role" && "$role" != "null" ]] || continue + if ! should_ingest_role "$role"; then + echo "[remote-data-ssh-sync] AnythingLLM: skip role=${role} (not in --roles)" + continue + fi + dest="${base_dir}/${role}" + if [[ ! -d "${dest}" ]]; then + echo "[remote-data-ssh-sync] AnythingLLM: skip role=${role} (missing dest dir: ${dest})" >&2 + continue + fi + prefix="$(sanitize "${PROJECT_ID}")__$(sanitize "${ENV_NAME}")__$(sanitize "${role}")" + echo "[remote-data-ssh-sync] AnythingLLM: upload-all role=${role} (prefix=${prefix})" + + if [[ "${DRY_RUN}" == "true" ]]; then + echo " dry-run: ANYTHINGLLM_WORKSPACE_SLUG=... node ${sync_script} --repo-root \"${dest}\" --upload-all --upload-prefix \"${prefix}\"" + continue + fi + + ANYTHINGLLM_WORKSPACE_SLUG="${workspace_slug}" \ + ANYTHINGLLM_SYNC_MAX_FILES="${MAX_FILES}" \ + ANYTHINGLLM_SYNC_MAX_FILE_BYTES="${MAX_BYTES}" \ + node "${sync_script}" --repo-root "${dest}" --upload-all --upload-prefix "${prefix}" \ + && ingest_ok=$((ingest_ok + 1)) \ + || ingest_err=$((ingest_err + 1)) +done + +if [[ "${ingest_err}" -gt 0 ]]; then + echo "[remote-data-ssh-sync][ERROR] AnythingLLM ingestion failed for ${ingest_err} role(s); ok=${ingest_ok}" >&2 + exit 1 +fi +echo "[remote-data-ssh-sync] OK (AnythingLLM ok=${ingest_ok})" + diff --git a/scripts/smart-ide-ssh-tunnel-plan.sh b/scripts/smart-ide-ssh-tunnel-plan.sh new file mode 100755 index 0000000..340bf47 --- /dev/null +++ b/scripts/smart-ide-ssh-tunnel-plan.sh @@ -0,0 +1,175 @@ +#!/usr/bin/env bash +set -euo pipefail + +ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" +ACTIVE_PROJECT_FILE="${ROOT}/projects/active-project.json" + +usage() { + cat <<'EOF' +Usage: + ./scripts/smart-ide-ssh-tunnel-plan.sh [--project ] [--env ] [--mode ] [--json] + +Purpose: + Print an SSH tunnel command to reach the Smart IDE services running on a remote host. + +Project/env resolution (first match): + - --project / --env + - SMART_IDE_PROJECT_ID / SMART_IDE_ENV + - projects/active-project.json (local, gitignored) + +The SSH target is read from: + projects//conf.json -> smart_ide.remote_data_access.environments..ssh_host_alias + +Notes: + - This prints a command; it does not daemonize anything. + - Bind is limited to 127.0.0.1 on the client. +EOF +} + +PROJECT_ID="${SMART_IDE_PROJECT_ID:-}" +ENV_NAME="${SMART_IDE_ENV:-}" +MODE="minimal" +AS_JSON="false" + +while [[ $# -gt 0 ]]; do + case "$1" in + -h|--help) + usage + exit 0 + ;; + --project) + PROJECT_ID="${2:-}" + shift 2 + ;; + --env) + ENV_NAME="${2:-}" + shift 2 + ;; + --mode) + MODE="${2:-}" + shift 2 + ;; + --json) + AS_JSON="true" + shift 1 + ;; + *) + echo "Unknown argument: $1" >&2 + usage >&2 + exit 2 + ;; + esac +done + +if ! command -v jq >/dev/null 2>&1; then + echo "Missing dependency: jq" >&2 + exit 1 +fi + +if [[ -z "${PROJECT_ID}" ]] && [[ -f "${ACTIVE_PROJECT_FILE}" ]]; then + PROJECT_ID="$(jq -r '.id // empty' "${ACTIVE_PROJECT_FILE}")" +fi + +if [[ -z "${ENV_NAME}" ]] && [[ -f "${ACTIVE_PROJECT_FILE}" ]]; then + ENV_NAME="$(jq -r '.default_env // empty' "${ACTIVE_PROJECT_FILE}")" +fi + +if [[ -z "${PROJECT_ID}" ]]; then + echo "Missing project id. Provide --project or create projects/active-project.json" >&2 + exit 1 +fi + +if [[ -z "${ENV_NAME}" ]]; then + ENV_NAME="test" +fi + +case "${ENV_NAME}" in + test|pprod|prod) ;; + *) + echo "Invalid --env: ${ENV_NAME} (expected test|pprod|prod)" >&2 + exit 1 + ;; +esac + +case "${MODE}" in + minimal|all) ;; + *) + echo "Invalid --mode: ${MODE} (expected minimal|all)" >&2 + exit 1 + ;; +esac + +CONF_FILE="${ROOT}/projects/${PROJECT_ID}/conf.json" +if [[ ! -f "${CONF_FILE}" ]]; then + echo "Missing project conf: ${CONF_FILE}" >&2 + exit 1 +fi + +SSH_HOST_ALIAS="$( + jq -r ".smart_ide.remote_data_access.environments.${ENV_NAME}.ssh_host_alias // empty" "${CONF_FILE}" +)" +if [[ -z "${SSH_HOST_ALIAS}" ]] || [[ "${SSH_HOST_ALIAS}" == "null" ]]; then + echo "Missing ssh_host_alias for ${PROJECT_ID}/${ENV_NAME} in ${CONF_FILE}" >&2 + exit 1 +fi + +declare -a forwards=() + +add_forward() { + local port="$1" + forwards+=("-L" "127.0.0.1:${port}:127.0.0.1:${port}") +} + +# Minimal: enough for Lapce to talk to orchestrator + agents + tools jobs via tunnel. +add_forward 37145 # smart-ide-orchestrator +add_forward 37144 # ia-dev-gateway +add_forward 37147 # smart-ide-tools-bridge + +if [[ "${MODE}" == "all" ]]; then + add_forward 37149 # smart-ide-global-api (internal; mostly for debugging) + add_forward 37148 # smart-ide-sso-gateway (OIDC front) + add_forward 37146 # anythingllm-devtools + add_forward 37140 # repos-devtools-server + add_forward 37143 # agent-regex-search-api + add_forward 37141 # langextract-api + add_forward 37142 # claw-harness-proxy + add_forward 8000 # local-office (API) + add_forward 11434 # ollama (host service) + add_forward 3001 # anythingllm (host service, default docker) +fi + +declare -a argv=( + ssh + -N + -o ExitOnForwardFailure=yes + -o BatchMode=yes + -o ServerAliveInterval=10 + -o ServerAliveCountMax=6 + "${forwards[@]}" + "${SSH_HOST_ALIAS}" +) + +hint="Run in a dedicated terminal; stop with Ctrl+C. Ensure the local ports are free." + +if [[ "${AS_JSON}" == "true" ]]; then + jq -n \ + --arg projectId "${PROJECT_ID}" \ + --arg env "${ENV_NAME}" \ + --arg sshHostAlias "${SSH_HOST_ALIAS}" \ + --arg mode "${MODE}" \ + --arg hint "${hint}" \ + --argjson argv "$(printf '%s\n' "${argv[@]}" | jq -R . | jq -s .)" \ + '{ projectId: $projectId, env: $env, sshHostAlias: $sshHostAlias, mode: $mode, argv: $argv, hint: $hint }' + exit 0 +fi + +echo "projectId=${PROJECT_ID}" +echo "env=${ENV_NAME}" +echo "sshHostAlias=${SSH_HOST_ALIAS}" +echo "mode=${MODE}" +echo +printf '%q ' "${argv[@]}" +echo +echo +echo "${hint}" + diff --git a/services/ia-dev-gateway/src/paths.ts b/services/ia-dev-gateway/src/paths.ts index 2f0cfba..c93c84f 100644 --- a/services/ia-dev-gateway/src/paths.ts +++ b/services/ia-dev-gateway/src/paths.ts @@ -4,21 +4,6 @@ import { fileURLToPath } from "node:url"; const __dirname = path.dirname(fileURLToPath(import.meta.url)); -/** Path to ia_dev checkout: IA_DEV_ROOT env or monorepo ./ia_dev */ -export const getIaDevRoot = (): string => { - const fromEnv = process.env.IA_DEV_ROOT?.trim(); - if (fromEnv && fromEnv.length > 0) { - return path.resolve(fromEnv); - } - return path.resolve(__dirname, "..", "..", "..", "ia_dev"); -}; - -export const agentsDir = (iaDevRoot: string): string => - path.join(iaDevRoot, ".smartIde", "agents"); - -export const projectDir = (iaDevRoot: string, projectId: string): string => - path.join(iaDevRoot, "projects", projectId); - export const dirExists = (p: string): boolean => { try { return fs.statSync(p).isDirectory(); @@ -26,3 +11,29 @@ export const dirExists = (p: string): boolean => { return false; } }; + +/** Path to ia_dev checkout: IA_DEV_ROOT env or monorepo ./ia_dev or ./services/ia_dev */ +export const getIaDevRoot = (): string => { + const fromEnv = process.env.IA_DEV_ROOT?.trim(); + if (fromEnv && fromEnv.length > 0) { + return path.resolve(fromEnv); + } + + const monorepoRoot = path.resolve(__dirname, "..", "..", ".."); + const candidates = [ + path.join(monorepoRoot, "ia_dev"), + path.join(monorepoRoot, "services", "ia_dev"), + ]; + for (const c of candidates) { + if (dirExists(c)) { + return c; + } + } + return candidates[0]; +}; + +export const agentsDir = (iaDevRoot: string): string => + path.join(iaDevRoot, ".smartIde", "agents"); + +export const projectDir = (iaDevRoot: string, projectId: string): string => + path.join(iaDevRoot, "projects", projectId); diff --git a/services/ia-dev-gateway/src/server.ts b/services/ia-dev-gateway/src/server.ts index 779cd4f..712e012 100644 --- a/services/ia-dev-gateway/src/server.ts +++ b/services/ia-dev-gateway/src/server.ts @@ -1,6 +1,8 @@ import * as crypto from "node:crypto"; +import * as childProcess from "node:child_process"; import * as http from "node:http"; import * as fs from "node:fs"; +import * as path from "node:path"; import { readExpectedToken, requireBearer } from "./auth.js"; import { readJsonBody } from "./httpUtil.js"; import { agentsDir, dirExists, getIaDevRoot, projectDir } from "./paths.js"; @@ -14,6 +16,7 @@ type RunRecord = { agentId: string; projectId: string; intent: string; + env?: "test" | "pprod" | "prod"; startedAt: string; finishedAt?: string; exitCode?: number; @@ -22,6 +25,24 @@ type RunRecord = { }; const runs = new Map(); +type RunEvent = { + id: number; + at: string; + type: string; + runId: string; + projectId: string; + agentId: string; + data?: unknown; +}; + +type RunStreamState = { + nextEventId: number; + events: RunEvent[]; + subscribers: Set; +}; + +const runStreams = new Map(); +const MAX_EVENTS_PER_RUN = 10_000; const json = (res: http.ServerResponse, status: number, body: unknown): void => { res.writeHead(status, { "Content-Type": "application/json; charset=utf-8" }); @@ -31,6 +52,16 @@ const json = (res: http.ServerResponse, status: number, body: unknown): void => const isRecord = (v: unknown): v is Record => typeof v === "object" && v !== null && !Array.isArray(v); +const isStringArray = (v: unknown): v is string[] => + Array.isArray(v) && v.every((x) => typeof x === "string"); + +const envLiteral = (v: unknown): "test" | "pprod" | "prod" | undefined => { + if (v === "test" || v === "pprod" || v === "prod") { + return v; + } + return undefined; +}; + const listAgents = (): { id: string; name: string; summary: string; triggerCommands: string[] }[] => { const root = getIaDevRoot(); const dir = agentsDir(root); @@ -74,6 +105,335 @@ const agentDescriptor = (id: string): Record | null => { }; }; +const ensureRunStream = (runId: string): RunStreamState => { + const existing = runStreams.get(runId); + if (existing) { + return existing; + } + const st: RunStreamState = { + nextEventId: 1, + events: [], + subscribers: new Set(), + }; + runStreams.set(runId, st); + return st; +}; + +const sseWrite = (res: http.ServerResponse, ev: RunEvent): void => { + res.write(`id: ${ev.id}\n`); + res.write(`event: ${ev.type}\n`); + res.write(`data: ${JSON.stringify(ev)}\n\n`); +}; + +const appendRunEvent = (runId: string, ev: Omit): void => { + const st = ensureRunStream(runId); + const full: RunEvent = { ...ev, id: st.nextEventId++ }; + st.events.push(full); + if (st.events.length > MAX_EVENTS_PER_RUN) { + st.events.splice(0, st.events.length - MAX_EVENTS_PER_RUN); + } + for (const sub of st.subscribers) { + try { + sseWrite(sub, full); + } catch { + // Ignore subscriber write errors; cleanup happens on 'close'. + } + } +}; + +type ScriptPlan = { + displayName: string; + cwd: string; + command: string; + args: string[]; + stdin?: string; +}; + +const fileExists = (p: string): boolean => { + try { + return fs.statSync(p).isFile(); + } catch { + return false; + } +}; + +const resolveScriptPlan = (iaRoot: string, r: RunRecord, payload: Record | undefined): ScriptPlan => { + const deployDir = path.join(iaRoot, "deploy"); + const toolsDir = path.join(iaRoot, "tools"); + + const payloadArgs: string[] = isStringArray(payload?.args) ? payload.args : []; + const stdin = + typeof payload?.commitMessage === "string" + ? payload.commitMessage + : typeof payload?.stdin === "string" + ? payload.stdin + : undefined; + + const mustExist = (absPath: string): string => { + if (!fileExists(absPath)) { + throw new Error(`Missing script: ${absPath}`); + } + return absPath; + }; + + switch (r.agentId) { + case "change-to-all-branches": { + const script = mustExist(path.join(deployDir, "change-to-all-branches.sh")); + return { + displayName: "change-to-all-branches.sh", + cwd: iaRoot, + command: "bash", + args: [script, r.projectId, ...payloadArgs], + }; + } + case "branch-align-by-script-from-test": { + const script = mustExist(path.join(deployDir, "branch-align.sh")); + const targetEnv = r.env ?? "test"; + return { + displayName: "branch-align.sh", + cwd: iaRoot, + command: "bash", + args: [script, r.projectId, targetEnv, ...payloadArgs], + }; + } + case "deploy-by-script": { + const targetEnv = r.env; + if (!targetEnv) { + throw new Error("deploy-by-script requires env: test|pprod|prod"); + } + if (targetEnv === "test") { + const script = mustExist(path.join(deployDir, "deploy.sh")); + return { + displayName: "deploy.sh", + cwd: iaRoot, + command: "bash", + args: [script, r.projectId, targetEnv, ...payloadArgs], + }; + } + const script = mustExist(path.join(deployDir, "deploy-by-script-to.sh")); + return { + displayName: "deploy-by-script-to.sh", + cwd: iaRoot, + command: "bash", + args: [script, r.projectId, targetEnv, ...payloadArgs], + }; + } + case "push-by-script": { + const script = mustExist(path.join(deployDir, "pousse.sh")); + const args: string[] = [script, "--project", r.projectId]; + if (typeof payload?.remote === "string" && payload.remote.trim().length > 0) { + args.push("--remote", payload.remote.trim()); + } + if (payload?.bumpVersion === true) { + args.push("--bump-version"); + } + if (!stdin || stdin.trim().length === 0) { + throw new Error("push-by-script requires payload.commitMessage (string) or payload.stdin (string)"); + } + return { + displayName: "pousse.sh", + cwd: iaRoot, + command: "bash", + args, + stdin, + }; + } + case "site-generate": { + const script = mustExist(path.join(toolsDir, "site-generate.sh")); + return { + displayName: "site-generate.sh", + cwd: iaRoot, + command: "bash", + args: [script, ...payloadArgs], + }; + } + default: + throw new Error( + `Unsupported agentId for script runner: '${r.agentId}'. Supported: change-to-all-branches, branch-align-by-script-from-test, deploy-by-script, push-by-script, site-generate`, + ); + } +}; + +const startRun = (iaRoot: string, r: RunRecord, payload: Record | undefined): void => { + const startedAt = new Date().toISOString(); + r.status = "running"; + + appendRunEvent(r.runId, { + at: startedAt, + type: "started", + runId: r.runId, + projectId: r.projectId, + agentId: r.agentId, + data: { intent: r.intent, env: r.env }, + }); + + let plan: ScriptPlan; + try { + plan = resolveScriptPlan(iaRoot, r, payload); + } catch (e) { + const msg = e instanceof Error ? e.message : String(e); + r.status = "failed"; + r.finishedAt = new Date().toISOString(); + r.exitCode = 1; + r.error = msg; + appendRunEvent(r.runId, { + at: new Date().toISOString(), + type: "failed", + runId: r.runId, + projectId: r.projectId, + agentId: r.agentId, + data: { error: msg }, + }); + const st = ensureRunStream(r.runId); + for (const sub of st.subscribers) { + try { + sub.end(); + } catch { + // ignore + } + } + st.subscribers.clear(); + return; + } + + appendRunEvent(r.runId, { + at: new Date().toISOString(), + type: "script_started", + runId: r.runId, + projectId: r.projectId, + agentId: r.agentId, + data: { displayName: plan.displayName, cwd: plan.cwd, command: plan.command, args: plan.args }, + }); + + const child = childProcess.spawn(plan.command, plan.args, { + cwd: plan.cwd, + env: { + ...process.env, + IA_PROJECT_ID: r.projectId, + }, + stdio: ["pipe", "pipe", "pipe"], + }); + + if (plan.stdin) { + child.stdin.write(plan.stdin); + } + child.stdin.end(); + + let stdoutBuf = ""; + let stderrBuf = ""; + child.stdout.setEncoding("utf8"); + child.stderr.setEncoding("utf8"); + + child.stdout.on("data", (chunk: string) => { + stdoutBuf += chunk; + const parts = stdoutBuf.split("\n"); + stdoutBuf = parts.pop() ?? ""; + for (const line of parts) { + appendRunEvent(r.runId, { + at: new Date().toISOString(), + type: "script_stdout", + runId: r.runId, + projectId: r.projectId, + agentId: r.agentId, + data: { line }, + }); + } + }); + + child.stderr.on("data", (chunk: string) => { + stderrBuf += chunk; + const parts = stderrBuf.split("\n"); + stderrBuf = parts.pop() ?? ""; + for (const line of parts) { + appendRunEvent(r.runId, { + at: new Date().toISOString(), + type: "script_stderr", + runId: r.runId, + projectId: r.projectId, + agentId: r.agentId, + data: { line }, + }); + } + }); + + child.on("error", (err) => { + const msg = err instanceof Error ? err.message : String(err); + r.status = "failed"; + r.finishedAt = new Date().toISOString(); + r.exitCode = 1; + r.error = msg; + appendRunEvent(r.runId, { + at: new Date().toISOString(), + type: "failed", + runId: r.runId, + projectId: r.projectId, + agentId: r.agentId, + data: { error: msg }, + }); + }); + + child.on("close", (code) => { + if (stdoutBuf.length > 0) { + appendRunEvent(r.runId, { + at: new Date().toISOString(), + type: "script_stdout", + runId: r.runId, + projectId: r.projectId, + agentId: r.agentId, + data: { line: stdoutBuf }, + }); + stdoutBuf = ""; + } + if (stderrBuf.length > 0) { + appendRunEvent(r.runId, { + at: new Date().toISOString(), + type: "script_stderr", + runId: r.runId, + projectId: r.projectId, + agentId: r.agentId, + data: { line: stderrBuf }, + }); + stderrBuf = ""; + } + + r.exitCode = typeof code === "number" ? code : 1; + r.finishedAt = new Date().toISOString(); + if (r.exitCode === 0) { + r.status = "completed"; + r.summary = `OK (${plan.displayName})`; + appendRunEvent(r.runId, { + at: new Date().toISOString(), + type: "completed", + runId: r.runId, + projectId: r.projectId, + agentId: r.agentId, + data: { exitCode: r.exitCode }, + }); + } else { + r.status = "failed"; + r.error = r.error ?? `Script exited with code ${r.exitCode}`; + appendRunEvent(r.runId, { + at: new Date().toISOString(), + type: "failed", + runId: r.runId, + projectId: r.projectId, + agentId: r.agentId, + data: { exitCode: r.exitCode, error: r.error }, + }); + } + + const st = ensureRunStream(r.runId); + for (const sub of st.subscribers) { + try { + sub.end(); + } catch { + // ignore + } + } + st.subscribers.clear(); + }); +}; + const main = (): void => { const token = readExpectedToken(); if (token.length === 0) { @@ -121,6 +481,8 @@ const main = (): void => { const agentId = body.agentId; const projectId = body.projectId; const intent = body.intent; + const payload = body.payload; + const env = envLiteral(body.env); if (typeof agentId !== "string" || agentId.length === 0) { json(res, 422, { error: "Missing agentId" }); return; @@ -133,9 +495,22 @@ const main = (): void => { json(res, 422, { error: "Missing intent" }); return; } + if (body.env !== undefined && !env) { + json(res, 422, { error: "Invalid env (expected test|pprod|prod)" }); + return; + } + if (payload !== undefined && !isRecord(payload)) { + json(res, 422, { error: "Invalid payload (expected JSON object)" }); + return; + } const iaRoot = getIaDevRoot(); - if (!dirExists(projectDir(iaRoot, projectId))) { - json(res, 403, { error: "Project not found under IA_DEV_ROOT", projectId }); + const expectedProjectDir = projectDir(iaRoot, projectId); + if (!dirExists(expectedProjectDir)) { + json(res, 404, { + error: "Project not found under IA_DEV_ROOT/projects", + projectId, + hint: `Create the project link/dir, e.g. ./scripts/ensure-ia-dev-project-link.sh ${projectId}`, + }); return; } const runId = crypto.randomUUID(); @@ -146,14 +521,13 @@ const main = (): void => { agentId, projectId, intent, + env, startedAt, - summary: "Stub: runner not wired to ia_dev scripts", }; runs.set(runId, rec); - rec.status = "completed"; - rec.finishedAt = new Date().toISOString(); - rec.exitCode = 0; + ensureRunStream(runId); json(res, 200, { runId, status: rec.status }); + startRun(iaRoot, rec, payload); return; } @@ -188,12 +562,38 @@ const main = (): void => { "Cache-Control": "no-cache", Connection: "keep-alive", }); - const send = (data: object): void => { - res.write(`data: ${JSON.stringify(data)}\n\n`); - }; - send({ type: "started", runId: r.runId }); - send({ type: "completed", runId: r.runId, exitCode: r.exitCode ?? 0 }); - res.end(); + + const st = ensureRunStream(r.runId); + const lastIdHeader = (req.headers["last-event-id"] ?? "").toString().trim(); + const lastId = lastIdHeader.length > 0 ? Number(lastIdHeader) : 0; + + res.write(`retry: 1500\n\n`); + for (const ev of st.events) { + if (lastId > 0 && ev.id <= lastId) { + continue; + } + sseWrite(res, ev); + } + + if (r.status === "completed" || r.status === "failed") { + res.end(); + return; + } + + st.subscribers.add(res); + + const keepAlive = setInterval(() => { + try { + res.write(": keep-alive\n\n"); + } catch { + // ignore + } + }, 15_000); + + req.on("close", () => { + clearInterval(keepAlive); + st.subscribers.delete(res); + }); return; } diff --git a/services/ia_dev/.editorconfig b/services/ia_dev/.editorconfig new file mode 100644 index 0000000..218f9e3 --- /dev/null +++ b/services/ia_dev/.editorconfig @@ -0,0 +1,40 @@ +# EditorConfig is awesome: https://EditorConfig.org + +# top-most EditorConfig file +root = true + +# Unix-style newlines with a newline ending every file +[*] +charset = utf-8 +end_of_line = lf +insert_final_newline = true +trim_trailing_whitespace = true + +# TypeScript/JavaScript files +[*.{ts,tsx,js,jsx}] +indent_style = tab +indent_size = 2 + +# JSON files +[*.json] +indent_style = tab +indent_size = 2 + +# YAML files +[*.{yml,yaml}] +indent_style = space +indent_size = 2 + +# Markdown files +[*.md] +trim_trailing_whitespace = false + +# Shell scripts +[*.sh] +indent_style = tab +indent_size = 2 + +# Prisma schema +[*.prisma] +indent_style = space +indent_size = 2 diff --git a/services/ia_dev/.gitattributes b/services/ia_dev/.gitattributes new file mode 100644 index 0000000..190aef4 --- /dev/null +++ b/services/ia_dev/.gitattributes @@ -0,0 +1,41 @@ +# Force LF line endings for all text files +* text=auto eol=lf + +# Explicitly declare text files you want to always be normalized and converted +# to LF line endings on checkout. +*.sh text eol=lf +*.bash text eol=lf +*.js text eol=lf +*.ts text eol=lf +*.tsx text eol=lf +*.jsx text eol=lf +*.json text eol=lf +*.md text eol=lf +*.yml text eol=lf +*.yaml text eol=lf +*.txt text eol=lf +*.sql text eol=lf +*.prisma text eol=lf +*.scss text eol=lf +*.css text eol=lf +*.html text eol=lf +*.xml text eol=lf +*.mjs text eol=lf +*.cjs text eol=lf +*.config.js text eol=lf +*.config.ts text eol=lf + +# Denote all files that are truly binary and should not be modified. +*.png binary +*.jpg binary +*.jpeg binary +*.gif binary +*.ico binary +*.pdf binary +*.zip binary +*.tar binary +*.gz binary +*.woff binary +*.woff2 binary +*.ttf binary +*.eot binary diff --git a/services/ia_dev/.gitignore b/services/ia_dev/.gitignore new file mode 100644 index 0000000..e091024 --- /dev/null +++ b/services/ia_dev/.gitignore @@ -0,0 +1,72 @@ +# Environnement +**/.env.test +**/.env.pprod +**/.env.prod +**/.env.deploy +**/.env.demo +**/.env + +# Dumps BDD (nouveau chemin: .secrets//bdd.) +.secrets/*/bdd.* + +# Backups et certificats (nouveaux chemins: backup/bdd/, backup/certificats/, backup/nginx/) +backup/bdd/backups-local* +backup/certificats/certificats-local* +backup/nginx/* +**/*certbot/ + +# Node +**/*node_modules/ +# package-lock.json must be versioned for npm ci to work reliably +# **/*package-lock.json +**/*dist/ +**/generated/ +**/*build/ +**/*coverage/ +**/*.next/ +**/*.npm-debug.log* +**/*.yarn-debug.log* +**/*.yarn-error.log* +**/*..pnpm-debug.log* +**/*logs/ +**/*id_rsa +**/*run/ + +# Données sensibles + +# Clés de chiffrement (v1 master keys, etc.) +.secrets/ +*.master-key.txt + +# Exception : migrations Prisma doivent être versionnées +!lecoffre-back-main/prisma/migrations/**/migration.sql + +# Python +**/__pycache__/ + +# Fichiers temporaires +*-old +*.bak +.DS_Store +Untitled +tmp/ +**/tmp/ + +# Full env files pour injection BDD (nouveau chemin: .secrets//env-full--for-bdd-injection.txt) +.secrets/*/env-full-*-for-bdd-injection.txt +deploy/env-full-*-for-bdd-injection.txt + +**/*.vscode +lecoffre-anchor-api/test-api-ok.sh +# .env files (nouveau chemin: .secrets//.env.) +.secrets/*/.env.* +.smartIde/ssh_config + +tmp_commit_msg.txt + +# Import V1 last successful date (runtime) +deploy/import-v1-last-ok.txt + +# Documentation : copie de travail pour le wiki, non versionnée +**/docs/** +**/*.secrets/** diff --git a/services/ia_dev/.gitmessage b/services/ia_dev/.gitmessage new file mode 100644 index 0000000..a8c4178 --- /dev/null +++ b/services/ia_dev/.gitmessage @@ -0,0 +1,47 @@ +# : +# +# Brief description of the change (50 chars max recommended) +# +# Author: 4NK or Nicolas Cantu only. Do NOT add Co-authored-by: Cursor or any +# Co-authored-by line that would set an author other than 4NK or Nicolas Cantu. + +**Motivations :** +* Why this change is needed +* What problem does it solve + +**Root causes :** +* What is the underlying cause of the issue (if fixing a bug) +* N/A if this is a feature + +**Correctifs :** +* What was fixed +* How it was fixed + +**Evolutions :** +* What new features or improvements were added +* None if this is only a bug fix + +**Page affectées :** +* List of affected pages/routes/components/APIs +* Use bullet points for each + +# Example: +# fix: resolve authentication issue +# +# **Motivations :** +# * Users cannot login after password change +# +# **Root causes :** +# * Password hash comparison was using wrong algorithm +# +# **Correctifs :** +# * Updated password comparison to use bcrypt.compare correctly +# * Added proper error handling +# +# **Evolutions :** +# * None +# +# **Page affectées :** +# * /api/auth/login +# * /login page +# diff --git a/services/ia_dev/.hintrc b/services/ia_dev/.hintrc new file mode 100644 index 0000000..aa8de6b --- /dev/null +++ b/services/ia_dev/.hintrc @@ -0,0 +1,5 @@ +{ + "extends": [ + "development" + ] +} \ No newline at end of file diff --git a/services/ia_dev/.markdownlint.json b/services/ia_dev/.markdownlint.json new file mode 100644 index 0000000..541cd4a --- /dev/null +++ b/services/ia_dev/.markdownlint.json @@ -0,0 +1,6 @@ +{ + "default": false, + "MD032": true, + "MD033": true, + "MD040": true +} diff --git a/services/ia_dev/.markdownlintignore b/services/ia_dev/.markdownlintignore new file mode 100644 index 0000000..9e0d283 --- /dev/null +++ b/services/ia_dev/.markdownlintignore @@ -0,0 +1,6 @@ +node_modules +.git +lecoffre-front-main/.next +lecoffre-front-main/out +lecoffre-back-main/dist +lecoffre-ressources-dev/dist diff --git a/services/ia_dev/.prettierignore b/services/ia_dev/.prettierignore new file mode 100644 index 0000000..d58eb91 --- /dev/null +++ b/services/ia_dev/.prettierignore @@ -0,0 +1,56 @@ +# Dependencies +node_modules +package-lock.json + +# Build output +dist +.next +out +build + +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# Testing +coverage +.nyc_output + +# Database +*.sql +*.db +*.sqlite +*.dump +*.custom + +# Environment +.env +.env.* +!.env.example + +# Cache +.cache +.eslintcache +.stylelintcache + +# Temp files +*.tmp +*.temp +.DS_Store + +# Documentation +CHANGELOG.md +*.pdf + +# Git +.git +.gitignore + +# Prisma +prisma/migrations/ + +# Generated files +lecoffre-ressources-dev/dist/ diff --git a/services/ia_dev/.smartIde/agents/agent-loop.md b/services/ia_dev/.smartIde/agents/agent-loop.md new file mode 100644 index 0000000..34909fc --- /dev/null +++ b/services/ia_dev/.smartIde/agents/agent-loop.md @@ -0,0 +1,109 @@ +--- +name: agent-loop +description: Orchestre la boucle de récupération des mails et le traitement par git-issues-process. Paramètre /agent-loop = nombre de boucles (1 min chacune), pas des secondes. Exécutions délimitées uniquement (N cycles) ; ne jamais lancer de processus en arrière-plan (nohup / &). +model: inherit +is_background: false +--- + +## Preparer au maximum à l'aide d'outils et de scripts + +En tant qu'agent, avant de solliciter l'ia, regarde ce que tu peux scripter (importe/install les outils nécessaires si besoin) l'ia est la derniere priorité par rapport à l'outillage, les outils sont lancés dans des scripts dans /home/desk/code/ia_dev/tools et rendus le plus générique possible afin de les réutilisé plus tard dans d'autres contextes, par contre l'ia peut serveur à développer ces scripts. + +## Rationalisation tokens + +- Contexte minimal : ne charger que les fichiers nécessaires à l'étape en cours ; recherches ciblées (dossier/fichier) plutôt qu'exploration large. +- Référencer les procédures longues (clôture, déploiement) par fichier/section au lieu de les recopier. +- Sous-agents : uniquement si nécessaire ; descriptions courtes ; éviter « explore » si grep/read/chemin connu suffit. +- Réponses concises, sans répéter règles ou docs déjà référencées. + +- **Lint (obligatoire avant clôture)** : Sur le dépôt applicatif du projet (`repository_root` et `build_dirs` dans `projects//conf.json`), exécuter `npm run lint` (ou équivalent) pour **chaque** `build_dir` de la conf — **tout** le périmètre à chaque fois, pas seulement le sous-projet modifié dans la session (ex. tâche front : lancer aussi le lint sur les autres `build_dirs`). Compter **erreurs + warnings**. Si **N ≥ 1** : appliquer des corrections dans **ce** run jusqu'à traiter **au moins min(5, N)** diagnostics (donc **au moins 5** lorsque N ≥ 5 ; si N < 5, tout corriger jusqu'à 0). **Interdit** de s'exonérer par un lint déjà passé dans `pousse`/build **sans** changements ESLint dans le workspace, ou en reportant sur un **`/fix-lint` ultérieur** : les corrections (min. 5 quand N ≥ 5) font partie **du même run** que la clôture. Clôture : commandes, périmètres, **décompte avant/après**. Voir `.smartIde/rules/cloture-lint.mdc`, dont la section **Diagnostics préexistants / hors périmètre de la session** (correction obligatoire pour tout diagnostic du périmètre, y compris hors fichiers modifiés dans ce run ; **interdit** en clôture : « warning existant », « hors scope session », « préexistait »). + +# Agent agent-loop + +## Règle d'exécution intégrale (obligatoire, non négociable) + +- **Tout dérouler** : exécuter **toutes** les étapes décrites dans cet agent dans l'ordre, sans en omettre aucune. Tout doit se dérouler effectivement. +- **Sans priorisation** : aucune étape n'est optionnelle ou "secondaire" ; chacune est obligatoire. +- **Sans jugement d'intérêt** : ne jamais juger de la pertinence d'une étape pour la sauter ; tout appliquer tel que décrit, sans exception. +- **Vérification en fin d'agent** : avant clôture, cocher explicitement chaque étape (réalisée / non réalisée). + +--- + +**Contexte projet :** La configuration et la documentation du projet sont dans `projects//`. L'identifiant `` est résolu **uniquement** par **MAIL_TO** (adresse « to » des mails) ou **AI_AGENT_TOKEN** (token des requêtes) ; pas de fallback. Voir `docs/repo/ia-dev-project-conf-schema.md`. Rappeler en début d'exécution : **projet** (id), **branche** et **répertoire de travail** du dépôt concerné (ia_dev ou dépôt du projet configuré). + +**Documentation** : La doc des projets gérés est dans **`projects//docs`** ; la doc ia_dev est dans **`projects/ia_dev/docs`**. + +**Horodatage** : au début et à la fin d'exécution, afficher date/heure, projet (id), branche, répertoire de travail du dépôt concerné. + +Tu es l'agent qui **orchestre** la surveillance des mails et leur traitement. Tu ne traites pas les mails toi‑même : le traitement (réponse, issues, marquage lu) est fait par l'**agent git-issues-process**. Tu lances les scripts et/ou les sous-agents selon la demande. + +**Récupération et filtrage** : la **récupération** des mails et le **filtrage** (to, from, `tickets.authorized_emails`, date) sont assurés par le **script** `tickets-fetch-inbox.sh` (qui appelle le Python associé). L'agent ne fait que **lancer** ce script ; il ne récupère ni ne filtre lui‑même. Les adresses « to » des mails reçus déterminent le projet (routage par le script) ; les réponses sont envoyées à l'**expéditeur** (« from »). Aucune adresse n'est fixée en dur. + +**Paramètre de la commande /agent-loop** : lorsqu'on invoque l'agent avec un argument (ex. `/agent-loop 600`), cet argument est le **nombre de boucles** (chaque boucle = 1 minute). Ce n'est **pas** un intervalle en secondes. Ex. `/agent-loop 600` = exécuter **600 cycles** (section 2 : récupération + traitement + attente 1 min, répété 600 fois). Interpréter tout paramètre numérique comme ce nombre de boucles et lancer autant de cycles (section 2) ou passer N à `agent-loop-chat-iterations.sh [N]` (section 3). + +**Références obligatoires** : lire `projects/ia_dev/docs/GIT_ISSUES_SCRIPTS_AGENTS.md` (contexte d'exécution). Usage standalone : tous les scripts sont invoqués depuis la **racine de ia_dev** : `./git-issues/ + + diff --git a/services/ia_dev/ai_working_help/business-qa/interfaces.md b/services/ia_dev/ai_working_help/business-qa/interfaces.md new file mode 100644 index 0000000..3df9385 --- /dev/null +++ b/services/ia_dev/ai_working_help/business-qa/interfaces.md @@ -0,0 +1,88 @@ +# Business-QA – Interfaces et API + +Module d’anonymisation, interrogation IA métier et recontextualisation pour les dossiers notaires dans `ai_working_help`. + +## Rôle + +1. **Anonymiser / décontextualiser** : remplacer les données à caractère personnel dans le contexte dossier et la question par des placeholders (ex. `PII_0`, `PII_1`), avec conservation d’un mapping pour la restitution. +2. **Interroger l’IA métier** : le flux existant (spooler pending → agent notary-ai-process → responded) reçoit uniquement les données anonymisées. +3. **Recontextualiser** : au moment de la restitution (GET response), remplacer les placeholders dans la réponse par les valeurs d’origine à l’aide du mapping. + +## Structure du module + +``` +business-qa/ + config/ + default.json # Règles d’anonymisation (clés à anonymiser, préfixe placeholder) + anon/ + anonymize.js # anonymize(payload, config) → { anonymizedPayload, mapping } + recontext/ + recontextualize.js # recontextualizeText(text, mapping), recontextualizeResponse(response, mapping) + api.js # Route Express montée en /v1/business-qa + interfaces.md # Ce fichier + example/ + index.html # Page d’exemple +``` + +## Configuration (config/default.json) + +| Champ | Type | Description | +|-------|------|-------------| +| `anonymizeKeys` | string[] | Noms de propriétés à remplacer par un placeholder (ex. firstName, lastName, email, address). | +| `placeholderPrefix` | string | Préfixe des placeholders générés (ex. "PII" → PII_0, PII_1). | +| `recursive` | boolean | Si true, parcours récursif des objets imbriqués. | +| `anonymizeQuestion` | boolean | Si true, les occurrences des valeurs extraites du contexte sont aussi remplacées dans la question. | + +Fichiers additionnels dans `config/` (ex. `custom.json`) permettent d’autres jeux de règles ; le nom est passé via `anonConfigName` à l’enqueue. + +## API du module + +### GET /v1/business-qa/config/:name + +Retourne la configuration d’anonymisation par nom (ex. `default` → `config/default.json`). + +- **Réponse 200** : `{ ...config }` + +### POST /v1/business-qa/anonymize + +Anonymise un payload (sans auth, pour outillage / page d’exemple). + +- **Body** : `{ payload: object, config?: object, configName?: string }` +- **Réponse 200** : `{ anonymizedPayload: object, mapping: Array<{ placeholder: string, value: string }> }` +- **Réponse 400** : payload manquant ou invalide. + +### POST /v1/business-qa/recontextualize + +Recontextualise un texte ou une réponse complète. + +- **Body** : `{ mapping: Array<{ placeholder, value }>, response?: object }` ou `{ mapping, text?: string }` +- **Réponse 200** : `{ response?: object }` ou `{ text?: string }` +- **Réponse 400** : mapping manquant ou ni `response` ni `text` fourni. + +### Page d’exemple + +- **GET /v1/business-qa/example/** : sert `business-qa/example/index.html`. + +## Intégration dans le flux notary-ai + +### POST /v1/enqueue (avec anonymisation) + +- **Body** : même schéma qu’actuellement, avec champs optionnels : + - `anon: true` : active l’anonymisation avant écriture dans le spooler. + - `anonConfigName?: string` : nom de la config (défaut `"default"`). +- Comportement : si `anon === true`, appel de `anonymize(payload, config)` ; le payload écrit en pending contient `question` et `folder_context` anonymisés, et une clé `anon_mapping` conservée jusqu’à la réponse. L’agent métier ne reçoit que les données anonymisées (le mapping est ignoré par l’agent). + +### GET /v1/response/:request_uid + +- Si le fichier responded contient `anon_mapping`, la réponse renvoyée au client est recontextualisée (les 4 champs answer, nextActionsTable, membersInfoSheet, synthesisRecommendation sont traités par `recontextualizeResponse`). + +## Contrats (types) + +- **Mapping** : `Array<{ placeholder: string, value: string }>`. +- **Payload enqueue** : `{ request_uid, folder_uid?, office_uid?, user_id?, question, folder_context?, anon?, anonConfigName? }`. +- **Response** : `{ answer?, nextActionsTable?, membersInfoSheet?, synthesisRecommendation? }`. + +## Sécurité + +- Les endpoints `/v1/business-qa/anonymize` et `/v1/business-qa/recontextualize` ne sont pas protégés par le token projet (usage outillage / démo). Ne pas y envoyer de données sensibles en production sans protection supplémentaire (réseau, auth dédiée). +- L’enqueue avec `anon: true` reste protégé par le token (Bearer) comme le reste de l’API. diff --git a/services/ia_dev/ai_working_help/business-qa/recontext/recontextualize.js b/services/ia_dev/ai_working_help/business-qa/recontext/recontextualize.js new file mode 100644 index 0000000..39ed9ce --- /dev/null +++ b/services/ia_dev/ai_working_help/business-qa/recontext/recontextualize.js @@ -0,0 +1,37 @@ +/** + * Recontextualize text or response object by replacing placeholders with original values. + * @param {string} text - String that may contain placeholders (e.g. PII_0, PII_1) + * @param {Array<{ placeholder: string, value: string }>} mapping - From anonymize() + * @returns {string} + */ +function recontextualizeText(text, mapping) { + if (text === null || text === undefined || typeof text !== "string") return text; + let out = text; + for (const m of mapping) { + const re = new RegExp(escapeRegex(m.placeholder), "g"); + out = out.replace(re, m.value); + } + return out; +} + +/** + * Recontextualize full response object (4 fields). + * @param {object} response - { answer, nextActionsTable, membersInfoSheet, synthesisRecommendation } + * @param {Array<{ placeholder: string, value: string }>} mapping + * @returns {object} + */ +function recontextualizeResponse(response, mapping) { + if (!response || !mapping || mapping.length === 0) return response; + const out = { ...response }; + if (typeof out.answer === "string") out.answer = recontextualizeText(out.answer, mapping); + if (typeof out.nextActionsTable === "string") out.nextActionsTable = recontextualizeText(out.nextActionsTable, mapping); + if (typeof out.membersInfoSheet === "string") out.membersInfoSheet = recontextualizeText(out.membersInfoSheet, mapping); + if (typeof out.synthesisRecommendation === "string") out.synthesisRecommendation = recontextualizeText(out.synthesisRecommendation, mapping); + return out; +} + +function escapeRegex(s) { + return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); +} + +module.exports = { recontextualizeText, recontextualizeResponse }; diff --git a/services/ia_dev/ai_working_help/notary-ai/lib.sh b/services/ia_dev/ai_working_help/notary-ai/lib.sh new file mode 100644 index 0000000..a581838 --- /dev/null +++ b/services/ia_dev/ai_working_help/notary-ai/lib.sh @@ -0,0 +1,39 @@ +# +# Shared config for notary-ai spooler scripts (pending/responded). +# Source from notary-ai/*.sh. Resolves PROJECT_ID and data dirs under projects//data/notary-ai/. +# +set -euo pipefail + +NOTARY_AI_DIR="${NOTARY_AI_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +IA_DEV_ROOT="$(cd "${NOTARY_AI_DIR}/../.." && pwd)" +# Standalone: run from ia_dev root; PROJECT_ID from project_config.sh (MAIL_TO or AI_AGENT_TOKEN) +PROJECT_ROOT="${PROJECT_ROOT:-$(cd "${IA_DEV_ROOT}/.." && pwd)}" +export PROJECT_ROOT IA_DEV_ROOT +if [[ -f "${IA_DEV_ROOT}/lib/project_config.sh" ]]; then + # shellcheck source=../../lib/project_config.sh + source "${IA_DEV_ROOT}/lib/project_config.sh" +fi +# No fallback: PROJECT_ID only from project_config.sh (MAIL_TO or AI_AGENT_TOKEN) +if [[ -n "${PROJECT_ID:-}" && -n "${IA_DEV_ROOT:-}" ]]; then + DATA_NOTARY_AI_DIR="${IA_DEV_ROOT}/projects/${PROJECT_ID}/data/notary-ai" + DATA_NOTARY_AI_PENDING_DIR="${DATA_NOTARY_AI_DIR}/pending" + DATA_NOTARY_AI_RESPONDED_DIR="${DATA_NOTARY_AI_DIR}/responded" + mkdir -p "${DATA_NOTARY_AI_PENDING_DIR}" "${DATA_NOTARY_AI_RESPONDED_DIR}" +else + DATA_NOTARY_AI_DIR="" + DATA_NOTARY_AI_PENDING_DIR="" + DATA_NOTARY_AI_RESPONDED_DIR="" +fi +export DATA_NOTARY_AI_DIR +export DATA_NOTARY_AI_PENDING_DIR +export DATA_NOTARY_AI_RESPONDED_DIR +export PROJECT_ID +export IA_DEV_ROOT +export PROJECT_ROOT + +if [[ -f "${IA_DEV_ROOT}/lib/smart_ide_logs.sh" ]]; then + # shellcheck source=../../lib/smart_ide_logs.sh + source "${IA_DEV_ROOT}/lib/smart_ide_logs.sh" + smart_ide_logs_begin "$IA_DEV_ROOT" "${BASH_SOURCE[1]}" "$*" + smart_ide_logs_register_exit_trap +fi diff --git a/services/ia_dev/ai_working_help/notary-ai/list-pending-notary-ai.sh b/services/ia_dev/ai_working_help/notary-ai/list-pending-notary-ai.sh new file mode 100755 index 0000000..0c73df8 --- /dev/null +++ b/services/ia_dev/ai_working_help/notary-ai/list-pending-notary-ai.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash +# List pending JSON files in projects//data/notary-ai/pending/. +# Run from ia_dev root. Output: one path per line. +# Usage: depuis la racine de ia_dev (MAIL_TO ou AI_AGENT_TOKEN défini) : ./ai_working_help/notary-ai/list-pending-notary-ai.sh +set -euo pipefail +NOTARY_AI_DIR="${NOTARY_AI_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +export NOTARY_AI_DIR +ROOT="$(cd "${NOTARY_AI_DIR}/../../.." && pwd)" +cd "$ROOT" +# shellcheck source=lib.sh +source "${NOTARY_AI_DIR}/lib.sh" +if [[ -z "${DATA_NOTARY_AI_PENDING_DIR:-}" || ! -d "${DATA_NOTARY_AI_PENDING_DIR}" ]]; then + echo "[notary-ai] DATA_NOTARY_AI_PENDING_DIR not set or not a directory. Set MAIL_TO or AI_AGENT_TOKEN so project id is resolved (no fallback)." >&2 + exit 1 +fi +for f in "${DATA_NOTARY_AI_PENDING_DIR}"/*.json; do + [[ -f "$f" ]] || continue + echo "$f" +done diff --git a/services/ia_dev/ai_working_help/notary-ai/write-response-notary-ai.sh b/services/ia_dev/ai_working_help/notary-ai/write-response-notary-ai.sh new file mode 100755 index 0000000..9608473 --- /dev/null +++ b/services/ia_dev/ai_working_help/notary-ai/write-response-notary-ai.sh @@ -0,0 +1,81 @@ +#!/usr/bin/env bash +# Write agent response to responded/ and remove the pending file. +# Usage: +# --pending-path --response-json +# or --request-uid --answer "..." [--next-actions-table "..." ] [--members-info-sheet "..."] [--synthesis-recommendation "..."] +set -euo pipefail +NOTARY_AI_DIR="${NOTARY_AI_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +export NOTARY_AI_DIR +ROOT="$(cd "${NOTARY_AI_DIR}/../../.." && pwd)" +cd "$ROOT" +# shellcheck source=lib.sh +source "${NOTARY_AI_DIR}/lib.sh" +if [[ -z "${DATA_NOTARY_AI_RESPONDED_DIR:-}" || ! -d "${DATA_NOTARY_AI_RESPONDED_DIR}" ]]; then + echo "[notary-ai] DATA_NOTARY_AI_RESPONDED_DIR not set or not a directory." >&2 + exit 1 +fi +require_jq() { + command -v jq &>/dev/null || { echo "[notary-ai] jq is required." >&2; return 1; } +} +PENDING_PATH="" +RESPONSE_JSON="" +REQUEST_UID="" +ANSWER="" +NEXT_ACTIONS_TABLE="" +MEMBERS_INFO_SHEET="" +SYNTHESIS_RECOMMENDATION="" +while [[ $# -gt 0 ]]; do + case "$1" in + --pending-path) PENDING_PATH="$2"; shift 2 ;; + --response-json) RESPONSE_JSON="$2"; shift 2 ;; + --request-uid) REQUEST_UID="$2"; shift 2 ;; + --answer) ANSWER="$2"; shift 2 ;; + --next-actions-table) NEXT_ACTIONS_TABLE="$2"; shift 2 ;; + --members-info-sheet) MEMBERS_INFO_SHEET="$2"; shift 2 ;; + --synthesis-recommendation) SYNTHESIS_RECOMMENDATION="$2"; shift 2 ;; + *) echo "[notary-ai] Unknown option: $1" >&2; exit 1 ;; + esac +done +if [[ -n "${PENDING_PATH:-}" && -n "${RESPONSE_JSON:-}" ]]; then + require_jq || exit 1 + [[ -f "$PENDING_PATH" ]] || { echo "[notary-ai] Pending file not found: $PENDING_PATH" >&2; exit 1; } + BASE="$(basename "$PENDING_PATH" .json)" + RESPONDED_PATH="${DATA_NOTARY_AI_RESPONDED_DIR}/${BASE}.json" + PENDING_DATA="$(jq -c . "$PENDING_PATH")" + RESPONSE_OBJ="$(printf '%s' "$RESPONSE_JSON" | jq -c .)" + # Merge: pending payload + status responded + response object + printf '%s' "$PENDING_DATA" | jq -c --argjson resp "$RESPONSE_OBJ" '. + { status: "responded", response: $resp }' > "${RESPONDED_PATH}" + rm -f "$PENDING_PATH" + echo "[notary-ai] Wrote ${RESPONDED_PATH}, removed pending." + exit 0 +fi +if [[ -n "${REQUEST_UID:-}" && -n "${ANSWER:-}" ]]; then + require_jq || exit 1 + # Find pending file containing this request_uid + FOUND="" + for f in "${DATA_NOTARY_AI_PENDING_DIR}"/*.json; do + [[ -f "$f" ]] || continue + if [[ "$(jq -r '.request_uid // ""' "$f")" = "$REQUEST_UID" ]]; then + FOUND="$f" + break + fi + done + [[ -n "$FOUND" ]] || { echo "[notary-ai] No pending file with request_uid=$REQUEST_UID" >&2; exit 1; } + RESPONSE_OBJ=$(jq -n \ + --arg a "$ANSWER" \ + --arg nat "${NEXT_ACTIONS_TABLE:-}" \ + --arg mis "${MEMBERS_INFO_SHEET:-}" \ + --arg sr "${SYNTHESIS_RECOMMENDATION:-}" \ + '{ answer: $a, nextActionsTable: $nat, membersInfoSheet: $mis, synthesisRecommendation: $sr }') + PENDING_PATH="$FOUND" + RESPONSE_JSON="$RESPONSE_OBJ" + BASE="$(basename "$PENDING_PATH" .json)" + RESPONDED_PATH="${DATA_NOTARY_AI_RESPONDED_DIR}/${BASE}.json" + PENDING_DATA="$(jq -c . "$PENDING_PATH")" + printf '%s' "$PENDING_DATA" | jq -c --argjson resp "$RESPONSE_OBJ" '. + { status: "responded", response: $resp }' > "${RESPONDED_PATH}" + rm -f "$PENDING_PATH" + echo "[notary-ai] Wrote ${RESPONDED_PATH}, removed pending." + exit 0 +fi +echo "[notary-ai] Use --pending-path + --response-json or --request-uid + --answer (and optional fields)." >&2 +exit 1 diff --git a/services/ia_dev/ai_working_help/package.json b/services/ia_dev/ai_working_help/package.json new file mode 100644 index 0000000..e704865 --- /dev/null +++ b/services/ia_dev/ai_working_help/package.json @@ -0,0 +1,13 @@ +{ + "name": "ai_working_help", + "version": "1.0.0", + "description": "API spooler for notary AI agent (enqueue / response). Consumed by business app backends; responses produced by Cursor agents (notary-ai-loop + notary-ai-process).", + "main": "server.js", + "scripts": { + "start": "node server.js" + }, + "dependencies": { + "body-parser": "^1.20.2", + "express": "^4.18.2" + } +} diff --git a/services/ia_dev/ai_working_help/server.js b/services/ia_dev/ai_working_help/server.js new file mode 100644 index 0000000..dc0b3d4 --- /dev/null +++ b/services/ia_dev/ai_working_help/server.js @@ -0,0 +1,180 @@ +/** + * ai_working_help API server. + * Routes: POST /v1/enqueue, GET /v1/response/:request_uid, GET /health, GET /v1/health. + * Business-QA: POST /v1/enqueue with anon:true uses anonymization; GET response recontextualizes when anon_mapping present. + * Project id and env are resolved from the Bearer token by searching all + * projects//.secrets//ia_token files; the matching project id is used for the spooler. + * Spooler: projects//data/notary-ai/{pending,responded}. + */ +const express = require("express"); +const bodyParser = require("body-parser"); +const fs = require("fs"); +const path = require("path"); +const businessQa = require("./business-qa/api"); + +const app = express(); +app.use(bodyParser.json({ limit: "1mb" })); +app.use("/v1/business-qa", businessQa.router); + +const PORT = process.env.AI_WORKING_HELP_PORT || 3020; +const IA_DEV_ROOT = path.resolve(__dirname, ".."); +const PROJECTS_DIR = path.join(IA_DEV_ROOT, "projects"); + +/** + * Resolve project id and env from token by scanning projects//.secrets//ia_token. + * @returns {{ projectId: string, env: string } | null} + */ +function resolveProjectAndEnvByToken(token) { + if (!token || typeof token !== "string") return null; + const t = token.trim(); + if (!t) return null; + const dirs = fs.readdirSync(PROJECTS_DIR, { withFileTypes: true }); + for (const d of dirs) { + if (!d.isDirectory()) continue; + const secretsDir = path.join(PROJECTS_DIR, d.name, ".secrets"); + try { + if (!fs.existsSync(secretsDir) || !fs.statSync(secretsDir).isDirectory()) continue; + const envDirs = fs.readdirSync(secretsDir, { withFileTypes: true }); + for (const ed of envDirs) { + if (!ed.isDirectory()) continue; + const tokenPath = path.join(secretsDir, ed.name, "ia_token"); + if (!fs.existsSync(tokenPath) || !fs.statSync(tokenPath).isFile()) continue; + const content = fs.readFileSync(tokenPath, "utf8").trim(); + const envName = ed.name; + // Token is either full value in file (content) or base in file + env suffix: nicolecoffreio + if (content === t || content + envName === t) return { projectId: d.name, env: envName }; + } + } catch (_) { + // skip + } + } + return null; +} + +function resolveProjectIdByToken(token) { + const resolved = resolveProjectAndEnvByToken(token); + return resolved ? resolved.projectId : null; +} + +function requireApiTokenAndResolveProject(req, res, next) { + const authHeader = req.headers.authorization; + if (!authHeader || typeof authHeader !== "string") { + return res.status(401).json({ message: "Invalid or missing token" }); + } + const match = authHeader.match(/^Bearer\s+(.+)$/i); + const token = match ? String(match[1]).trim() : ""; + const resolved = resolveProjectAndEnvByToken(token); + if (!resolved) { + return res.status(401).json({ message: "Invalid or missing token" }); + } + req.projectId = resolved.projectId; + req.projectEnv = resolved.env; + next(); +} + +function safeUid(uid) { + return String(uid).replace(/[^a-zA-Z0-9-_]/g, "_").slice(0, 128) || "unknown"; +} + +function projectDataDir(projectId) { + if (!projectId || /[^a-zA-Z0-9-_]/.test(projectId)) return null; + return path.join(PROJECTS_DIR, projectId, "data", "notary-ai"); +} + +function findFileByRequestUid(dir, requestUid) { + if (!fs.existsSync(dir)) return null; + const files = fs.readdirSync(dir); + for (const f of files) { + if (!f.endsWith(".json")) continue; + const filePath = path.join(dir, f); + try { + const data = JSON.parse(fs.readFileSync(filePath, "utf8")); + if (data.request_uid === requestUid) return { path: filePath, data }; + } catch (_) { + // skip invalid json + } + } + return null; +} + +app.get("/health", (req, res) => { + res.status(200).json({ status: "ok" }); +}); + +app.get("/v1/health", (req, res) => { + res.status(200).json({ status: "ok" }); +}); + +app.post("/v1/enqueue", requireApiTokenAndResolveProject, (req, res) => { + const dir = projectDataDir(req.projectId); + if (!dir) { + return res.status(400).json({ message: "Invalid project" }); + } + const pendingDir = path.join(dir, "pending"); + const respondedDir = path.join(dir, "responded"); + const body = req.body || {}; + const requestUid = body.request_uid; + if (!requestUid || typeof requestUid !== "string") { + return res.status(400).json({ message: "Missing request_uid" }); + } + let payload = { + request_uid: requestUid, + folder_uid: body.folder_uid, + office_uid: body.office_uid, + user_id: body.user_id, + question: body.question, + folder_context: body.folder_context || {}, + status: "pending", + }; + if (body.anon === true) { + const config = businessQa.loadConfig(body.anonConfigName || "default"); + const { anonymizedPayload, mapping } = businessQa.anonymize(payload, config); + payload = { + ...anonymizedPayload, + anon_mapping: mapping, + status: "pending", + }; + } + try { + if (!fs.existsSync(pendingDir)) fs.mkdirSync(pendingDir, { recursive: true }); + if (!fs.existsSync(respondedDir)) fs.mkdirSync(respondedDir, { recursive: true }); + const safe = safeUid(requestUid); + const filePath = path.join(pendingDir, `${safe}.json`); + fs.writeFileSync(filePath, JSON.stringify(payload, null, 2), "utf8"); + res.status(202).json({ request_uid: requestUid }); + } catch (err) { + console.error("[ai_working_help] enqueue write error", err); + res.status(500).json({ message: "Write failed" }); + } +}); + +app.get("/v1/response/:request_uid", requireApiTokenAndResolveProject, (req, res) => { + const requestUid = req.params.request_uid; + const dir = projectDataDir(req.projectId); + if (!dir) { + return res.status(400).json({ message: "Invalid project" }); + } + const respondedDir = path.join(dir, "responded"); + const foundResponded = findFileByRequestUid(respondedDir, requestUid); + if (foundResponded && foundResponded.data.response) { + let response = foundResponded.data.response; + const mapping = foundResponded.data.anon_mapping; + if (Array.isArray(mapping) && mapping.length > 0) { + response = businessQa.recontextualizeResponse(response, mapping); + } + return res.status(200).json({ + status: "responded", + response, + }); + } + const pendingDir = path.join(dir, "pending"); + const foundPending = findFileByRequestUid(pendingDir, requestUid); + if (foundPending) { + return res.status(200).json({ status: "pending" }); + } + res.status(200).json({ status: "pending" }); +}); + +app.listen(PORT, () => { + console.log(`[ai_working_help] listening on port ${PORT}`); +}); diff --git a/services/ia_dev/deploy/README-lpldf-https-watch.md b/services/ia_dev/deploy/README-lpldf-https-watch.md new file mode 100644 index 0000000..25b17d1 --- /dev/null +++ b/services/ia_dev/deploy/README-lpldf-https-watch.md @@ -0,0 +1,32 @@ +# LPLDF HTTPS watchdog on the 4NK proxy + +## Purpose + +Detect downtime of `https://xn--lespetitesleonsdefrdric-89b1db.fr/` from the proxy and emit alerts (syslog tag `lpldf-https-watch`, optional webhooks / mail). Acts as an availability watchdog; a SIEM (e.g. Wazuh) can ingest these syslog lines. + +## Repository paths + +- Watch script (installed to `/opt/proxy-config/scripts/watch-https-lpldf.sh`): `tools/proxy-https-watch-lpldf.sh` +- Optional env example: `tools/proxy-https-watch-lpldf.env.example` +- Systemd units: `deploy/proxy-units/lpldf-https-watch.service`, `deploy/proxy-units/lpldf-https-watch.timer` +- Installer (from ia_dev root): `./deploy/scripts/install-lpldf-https-watch-on-proxy.sh` + +## Behaviour + +- Accepts HTTP status 200, 301, 302, 307, 308. +- State under `/var/lib/lpldf-https-watch/`. +- First DOWN: `daemon.warning` + optional `ALERT_WEBHOOK_URL` / `ALERT_EMAIL_TO`. +- Repeats while down at most every `ALERT_REPEAT_SECONDS` (default 3600). +- Recovery: `daemon.info` + optional `ALERT_WEBHOOK_URL_RECOVER`. + +## Optional proxy config + +Create `/opt/proxy-config/scripts/env/watch-https-lpldf.env` (e.g. `chmod 600`), see `tools/proxy-https-watch-lpldf.env.example`. + +## Operations + +- Manual run on proxy: `sudo /opt/proxy-config/scripts/watch-https-lpldf.sh` +- Logs: `sudo journalctl -t lpldf-https-watch` +- Timer: `systemctl status lpldf-https-watch.timer` + +Nginx is not modified for this check. diff --git a/services/ia_dev/deploy/_lib/colors.sh b/services/ia_dev/deploy/_lib/colors.sh new file mode 100644 index 0000000..6770b80 --- /dev/null +++ b/services/ia_dev/deploy/_lib/colors.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env bash + +set -euo pipefail + +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +CYAN='\033[0;36m' +NC='\033[0m' + +log_ts_utc() { date -u '+%Y-%m-%dT%H:%M:%SZ'; } + +success() { echo -e "[$(log_ts_utc)] ${GREEN}✔${NC} $1"; } +error() { echo -e "[$(log_ts_utc)] ${RED}✗${NC} $1"; } +warning() { echo -e "[$(log_ts_utc)] ${YELLOW}⚠${NC} $1"; } +info() { echo -e "[$(log_ts_utc)] ${BLUE}ℹ${NC} $1"; } diff --git a/services/ia_dev/deploy/_lib/env-map.sh b/services/ia_dev/deploy/_lib/env-map.sh new file mode 100644 index 0000000..3b3e5b3 --- /dev/null +++ b/services/ia_dev/deploy/_lib/env-map.sh @@ -0,0 +1,66 @@ +#!/usr/bin/env bash + +set -euo pipefail + +# +# Environment mapping for LeCoffre.io v2 deployments (proxy-based infra). +# - Proxy (jump/orchestrator): 192.168.1.100 (4nk.myftp.biz) +# - Targets: test=192.168.1.101, pprod=192.168.1.102, prod=192.168.1.103, services=192.168.1.104 +# + +get_env_target_ip() { + local env="$1" + case "$env" in + test) echo "192.168.1.101" ;; + pprod) echo "192.168.1.102" ;; + prod) echo "192.168.1.103" ;; + services) echo "192.168.1.104" ;; + *) return 1 ;; + esac +} + +get_env_domain() { + local env="$1" + case "$env" in + test) echo "test.lecoffreio.4nkweb.com" ;; + pprod) echo "pprod.lecoffreio.4nkweb.com" ;; + prod) echo "prod.lecoffreio.4nkweb.com" ;; + *) return 1 ;; + esac +} + +# Repository path on each target host (infra standard: /srv/4NK//) +get_env_remote_app_root() { + local env="$1" + local domain + domain="$(get_env_domain "$env")" + echo "/srv/4NK/${domain}" +} + +# Public service port (proxied by nginx on proxy). +# This port is reserved for LeCoffre.io in the infra ports map. +get_env_service_port() { + local env="$1" + case "$env" in + test|pprod|prod) echo "3009" ;; + *) return 1 ;; + esac +} + +# Internal frontend port (served by Next.js, proxied by local router). +get_env_frontend_internal_port() { + local env="$1" + case "$env" in + test|pprod|prod) echo "3100" ;; + *) return 1 ;; + esac +} + +# Internal backend port (served by Express, proxied by local router). +get_env_backend_internal_port() { + local env="$1" + case "$env" in + test|pprod|prod) echo "3101" ;; + *) return 1 ;; + esac +} diff --git a/services/ia_dev/deploy/_lib/git-flow.sh b/services/ia_dev/deploy/_lib/git-flow.sh new file mode 100644 index 0000000..436e810 --- /dev/null +++ b/services/ia_dev/deploy/_lib/git-flow.sh @@ -0,0 +1,293 @@ +#!/usr/bin/env bash +# Git flow functions for automatic branch promotion and verification +# +# Prerequisites: This file must be sourced after env-map.sh and ssh.sh +# Functions used: get_env_target_ip, get_env_service_port, get_env_backend_internal_port, ssh_run +# Variables used: DEPLOY_SSH_KEY, DEPLOY_SSH_USER + +# Vérifie le succès d'un déploiement + +verify_deployment_success() { + local env="$1" + local domain="$2" + local ssh_key="${DEPLOY_SSH_KEY:-$HOME/.ssh/id_ed25519}" + local ssh_user="${DEPLOY_SSH_USER:-ncantu}" + local target_ip + local service_port + local backend_internal_port + + # These functions should be available from env-map.sh (sourced before this file) + target_ip="$(get_env_target_ip "$env")" + service_port="$(get_env_service_port "$env")" + backend_internal_port="$(get_env_backend_internal_port "$env")" + + # 1. Attendre quelques secondes pour que les services démarrent + info "[verify] Waiting for services to start (15 seconds)..." + sleep 15 + + # 2. Health check HTTP avec retries + info "[verify] Checking health endpoint via router (port ${service_port})..." + local health_status + local max_retries=3 + local retry_count=0 + + # Vérifier via le router depuis le serveur distant (via SSH) + # Le router route /api/ vers le backend + while [[ $retry_count -lt $max_retries ]]; do + health_status=$(ssh_run "$ssh_key" "$ssh_user" "$target_ip" \ + "curl -s -o /dev/null -w '%{http_code}' --max-time 10 --connect-timeout 5 'http://localhost:${service_port}/api/v1/public/health' 2>/dev/null || echo '000'") + + if [[ "$health_status" == "200" ]]; then + info "[verify] Health check passed via router (HTTP $health_status)" + break + fi + + retry_count=$((retry_count + 1)) + if [[ $retry_count -lt $max_retries ]]; then + info "[verify] Health check attempt $retry_count failed (HTTP $health_status), retrying in 5 seconds..." + sleep 5 + fi + done + + if [[ "$health_status" != "200" ]]; then + # Essayer directement le backend en fallback + info "[verify] Router check failed (HTTP $health_status), trying backend directly (port ${backend_internal_port})..." + health_status=$(ssh_run "$ssh_key" "$ssh_user" "$target_ip" \ + "curl -s -o /dev/null -w '%{http_code}' --max-time 10 --connect-timeout 5 'http://localhost:${backend_internal_port}/api/v1/public/health' 2>/dev/null || echo '000'") + + # If 404, backend may mount API at root (API_ROOT_URL=/); try path without /api prefix + if [[ "$health_status" == "404" ]]; then + info "[verify] Backend returned 404 for /api/v1/public/health, trying /v1/public/health..." + health_status=$(ssh_run "$ssh_key" "$ssh_user" "$target_ip" \ + "curl -s -o /dev/null -w '%{http_code}' --max-time 10 --connect-timeout 5 'http://localhost:${backend_internal_port}/v1/public/health' 2>/dev/null || echo '000'") + fi + + if [[ "$health_status" != "200" ]]; then + error "[verify] Health check failed: HTTP $health_status" + + # Afficher les logs du backend pour diagnostic + info "[verify] Backend logs (last 50 lines):" + ssh_run "$ssh_key" "$ssh_user" "$target_ip" \ + "journalctl -u lecoffreio-backend@${domain}.service --no-pager -n 50 2>/dev/null || true" | sed 's/^/ /' + + # Afficher l'état des services + info "[verify] Service status:" + ssh_run "$ssh_key" "$ssh_user" "$target_ip" \ + "systemctl status lecoffreio-backend@${domain}.service lecoffreio-router@${domain}.service --no-pager -l 2>/dev/null || true" | sed 's/^/ /' + + # Vérifier si le port est en écoute + info "[verify] Checking if backend port ${backend_internal_port} is listening:" + ssh_run "$ssh_key" "$ssh_user" "$target_ip" \ + "ss -tlnp | grep ':${backend_internal_port}' || echo ' Port ${backend_internal_port} is not listening'" | sed 's/^/ /' + + error "[verify] Backend may not be fully started yet. Check logs: journalctl -u lecoffreio-backend@${domain}.service -n 50" + error "[verify] Router status: systemctl status lecoffreio-router@${domain}.service" + return 1 + fi + info "[verify] Health check passed via direct backend (HTTP $health_status)" + fi + + # 3. Vérification des services systemd avec retries (frontend peut prendre plus de temps) + info "[verify] Checking systemd services..." + local services_status + local max_service_retries=10 + local service_retry_count=0 + local all_active=false + + while [[ $service_retry_count -lt $max_service_retries && "$all_active" != "true" ]]; do + services_status=$(ssh_run "$ssh_key" "$ssh_user" "$target_ip" \ + "systemctl is-active lecoffreio-backend@${domain}.service lecoffreio-frontend@${domain}.service lecoffreio-router@${domain}.service 2>/dev/null | grep -vE '^(active|activating)$' || true") + + if [[ -z "$services_status" ]]; then + # Vérifier que tous les services sont vraiment "active" (pas "activating") + local all_status + all_status=$(ssh_run "$ssh_key" "$ssh_user" "$target_ip" \ + "systemctl is-active lecoffreio-backend@${domain}.service lecoffreio-frontend@${domain}.service lecoffreio-router@${domain}.service 2>/dev/null") + + # Vérifier s'il y a des erreurs dans les logs du frontend (si en "activating") + if echo "$all_status" | grep -q "activating"; then + # Vérifier les logs du frontend pour voir s'il y a une erreur + local frontend_errors + frontend_errors=$(ssh_run "$ssh_key" "$ssh_user" "$target_ip" \ + "journalctl -u lecoffreio-frontend@${domain}.service --since '2 minutes ago' --no-pager 2>/dev/null | { grep -iE '(error|fatal|failed)' || true; } | tail -5") + + if [[ -n "$frontend_errors" ]]; then + error "[verify] Frontend errors detected while activating:" + echo "$frontend_errors" | sed 's/^/ /' + error "[verify] Check frontend logs: journalctl -u lecoffreio-frontend@${domain}.service -n 50" + return 1 + fi + + service_retry_count=$((service_retry_count + 1)) + if [[ $service_retry_count -lt $max_service_retries ]]; then + info "[verify] Some services still activating, waiting 10 seconds (attempt $service_retry_count/$max_service_retries)..." + sleep 10 + fi + else + all_active=true + fi + else + service_retry_count=$((service_retry_count + 1)) + if [[ $service_retry_count -lt $max_service_retries ]]; then + info "[verify] Some services not active, waiting 10 seconds (attempt $service_retry_count/$max_service_retries)..." + echo "$services_status" | sed 's/^/ /' + sleep 10 + fi + fi + done + + if [[ "$all_active" != "true" ]]; then + # Dernière vérification pour afficher l'état final et les logs + services_status=$(ssh_run "$ssh_key" "$ssh_user" "$target_ip" \ + "systemctl is-active lecoffreio-backend@${domain}.service lecoffreio-frontend@${domain}.service lecoffreio-router@${domain}.service 2>/dev/null || echo 'unknown'") + error "[verify] Some services are not active after $max_service_retries attempts:" + echo "$services_status" | sed 's/^/ /' + + # Afficher les logs du frontend si toujours en activating + if echo "$services_status" | grep -q "activating.*frontend"; then + info "[verify] Frontend logs (last 30 lines):" + ssh_run "$ssh_key" "$ssh_user" "$target_ip" \ + "journalctl -u lecoffreio-frontend@${domain}.service --no-pager -n 30 2>/dev/null || true" | sed 's/^/ /' + fi + + error "[verify] Check service status: systemctl status lecoffreio-backend@${domain}.service lecoffreio-frontend@${domain}.service lecoffreio-router@${domain}.service" + return 1 + fi + + info "[verify] All systemd services are active" + + # 3. Vérification des logs (erreurs critiques récentes) + info "[verify] Checking for critical errors in logs..." + local critical_errors + critical_errors=$(ssh_run "$ssh_key" "$ssh_user" "$target_ip" \ + "journalctl -u lecoffreio-backend@${domain}.service --since '5 minutes ago' --no-pager 2>/dev/null | { grep -iE '(error|fatal|critical)' || true; } | tail -10") + + if [[ -n "$critical_errors" ]]; then + warning "[verify] Critical errors found in recent logs:" + echo "$critical_errors" | sed 's/^/ /' + # Ne pas bloquer pour les warnings, seulement les erreurs fatales + # On pourrait ajouter une logique plus fine ici + fi + + info "[verify] Deployment verification passed" + return 0 +} + +# Détermine l'environnement suivant dans la chaîne +get_next_env() { + local current_env="$1" + case "$current_env" in + dev) echo "test" ;; + test) echo "pprod" ;; + pprod) echo "prod" ;; + prod) echo "" ;; + *) echo "" ;; + esac +} + +# Promotion automatique vers l'environnement suivant +auto_promote_to_next_env() { + local current_env="$1" + local current_branch="$2" + local project_root="$3" + local deploy_git_remote="${4:-lecoffre_ng}" + local next_env + local next_branch + + # Si on n'est pas sur dev, pas de promotion + if [[ "$current_branch" != "dev" ]]; then + return 0 + fi + + next_env=$(get_next_env "$current_env") + if [[ -z "$next_env" ]]; then + info "[promote] No next environment (already at prod)" + return 0 + fi + + # Déterminer la branche cible + case "$next_env" in + test) next_branch="test" ;; + pprod) next_branch="pprod" ;; + prod) next_branch="prod" ;; + *) return 0 ;; + esac + + info "[promote] Auto-promoting dev → $next_branch for $next_env environment..." + + # 1. Fetch la branche cible + git -C "$project_root" fetch "$deploy_git_remote" "$next_branch" || true + + # 2. Checkout la branche cible + git -C "$project_root" checkout "$next_branch" || { + # Branch doesn't exist locally, create it from remote + git -C "$project_root" checkout -b "$next_branch" "${deploy_git_remote}/${next_branch}" 2>/dev/null || { + # Remote branch doesn't exist, create new branch + git -C "$project_root" checkout -b "$next_branch" + } + } + + # 3. Merge dev into target branch + if ! git -C "$project_root" merge dev --allow-unrelated-histories --no-edit; then + error "[promote] Merge dev → $next_branch failed. Resolve conflicts manually." + git -C "$project_root" checkout dev + return 1 + fi + + # 4. Push + info "[promote] Pushing $next_branch..." + git -C "$project_root" push "$deploy_git_remote" "$next_branch" + + # 5. Retourner sur dev + info "[promote] Returning to dev branch..." + git -C "$project_root" checkout dev + + success "[promote] Successfully promoted dev → $next_branch" + info "[promote] Next step: deploy to $next_env with: ./deploy/scripts_v2/deploy.sh $next_env" + + return 0 +} + +# Stage all changes, commit with message, and push current branch +# Usage: git_add_commit_push [remote] +# Example: git_add_commit_push /path/to/repo "fix: something" +git_add_commit_push() { + local project_root="${1:-.}" + local commit_message="$2" + local deploy_git_remote="${3:-lecoffre_ng}" + local current_branch + + if [[ -z "$commit_message" ]]; then + error "[git] Commit message required" + return 1 + fi + + # Lint --fix on all projects before staging (resources, backend, frontend). Non-blocking. + info "[lint] Running lint --fix on lecoffre-ressources-dev, lecoffre-back-main, lecoffre-front-main..." + (cd "${project_root}/lecoffre-ressources-dev" && npm run lint:fix) || warning "[lint] lecoffre-ressources-dev lint:fix failed (non-blocking)" + (cd "${project_root}/lecoffre-back-main" && npm run lint:fix) || warning "[lint] lecoffre-back-main lint:fix failed (non-blocking)" + (cd "${project_root}/lecoffre-front-main" && npm run lint:fix) || warning "[lint] lecoffre-front-main lint:fix failed (non-blocking)" + info "[lint] Lint:fix step done" + + info "[git] Staging all changes (add -A)..." + git -C "$project_root" add -A || { + error "[git] git add -A failed" + return 1 + } + + info "[git] Committing..." + git -C "$project_root" commit -m "$commit_message" || { + error "[git] commit failed" + return 1 + } + + current_branch=$(git -C "$project_root" branch --show-current) + info "[git] Pushing to $deploy_git_remote $current_branch..." + git -C "$project_root" push "$deploy_git_remote" "$current_branch" || { + error "[git] push failed" + return 1 + } + + success "[git] add -A, commit, push done" + return 0 +} diff --git a/services/ia_dev/deploy/_lib/ssh.sh b/services/ia_dev/deploy/_lib/ssh.sh new file mode 100644 index 0000000..a20522f --- /dev/null +++ b/services/ia_dev/deploy/_lib/ssh.sh @@ -0,0 +1,95 @@ +#!/usr/bin/env bash + +set -euo pipefail + +require_ssh_key() { + local key_path="$1" + if [[ -z "$key_path" ]]; then + echo "SSH key path is required" >&2 + return 1 + fi + if [[ ! -f "$key_path" ]]; then + echo "SSH key not found: $key_path" >&2 + return 1 + fi +} + +ssh_common_opts() { + local ssh_user="$1" + local ssh_host="$2" + + # Keepalive to reduce flakiness through ProxyJump + # (observed: "Connection reset by peer" during scp/ssh). + # + # Notes: + # - Avoid SSH multiplexing here: ControlPath/ControlMaster can be flaky on Windows OpenSSH + MSYS paths. + # - Increased timeouts and keepalive settings to handle network instability + # - Compression disabled to reduce overhead and potential connection issues + echo \ + -o BatchMode=yes \ + -o StrictHostKeyChecking=accept-new \ + -o ConnectTimeout=30 \ + -o ServerAliveInterval=10 \ + -o ServerAliveCountMax=6 \ + -o TCPKeepAlive=yes \ + -o Compression=no +} + +ssh_run() { + local ssh_key="$1" + local ssh_user="$2" + local ssh_host="$3" + shift 3 + + require_ssh_key "$ssh_key" + + local proxy_host="${DEPLOY_SSH_PROXY_HOST:-}" + local proxy_user="${DEPLOY_SSH_PROXY_USER:-$ssh_user}" + + local proxy_args=() + if [[ -n "$proxy_host" ]]; then + proxy_args=(-J "$proxy_user@$proxy_host") + fi + + # shellcheck disable=SC2207 + local common_opts=($(ssh_common_opts "$ssh_user" "$ssh_host")) + + ssh -i "$ssh_key" \ + "${common_opts[@]}" \ + "${proxy_args[@]}" \ + "$ssh_user@$ssh_host" "$@" +} + +scp_copy() { + local ssh_key="$1" + local src="$2" + local ssh_user="$3" + local ssh_host="$4" + local dst="$5" + local recursive="${6:-false}" + + require_ssh_key "$ssh_key" + + local proxy_host="${DEPLOY_SSH_PROXY_HOST:-}" + local proxy_user="${DEPLOY_SSH_PROXY_USER:-$ssh_user}" + + local proxy_args=() + if [[ -n "$proxy_host" ]]; then + proxy_args=(-o "ProxyJump=$proxy_user@$proxy_host") + fi + + # shellcheck disable=SC2207 + local common_opts=($(ssh_common_opts "$ssh_user" "$ssh_host")) + + local scp_opts=() + # Add -r for recursive copy if requested or if source is a directory + if [[ "$recursive" == "true" ]] || [[ -d "$src" ]]; then + scp_opts=(-r) + fi + + scp -i "$ssh_key" \ + "${scp_opts[@]}" \ + "${common_opts[@]}" \ + "${proxy_args[@]}" \ + "$src" "$ssh_user@$ssh_host:$dst" +} diff --git a/services/ia_dev/deploy/branch-align.sh b/services/ia_dev/deploy/branch-align.sh new file mode 100755 index 0000000..b0e9d14 --- /dev/null +++ b/services/ia_dev/deploy/branch-align.sh @@ -0,0 +1,135 @@ +#!/usr/bin/env bash +# Aligns only origin/test, origin/pprod, origin/prod to current branch SHA. main is not aligned. +# Usage: ./deploy/branch-align.sh [project_id] +set -euo pipefail + +SCRIPT_REAL="$(readlink -f "${BASH_SOURCE[0]:-$0}" 2>/dev/null || realpath "${BASH_SOURCE[0]:-$0}" 2>/dev/null || echo "${BASH_SOURCE[0]:-$0}")" +DEPLOY_DIR="$(cd "$(dirname "$SCRIPT_REAL")" && pwd)" +IA_DEV_ROOT="$(cd "$DEPLOY_DIR/.." && pwd)" + +# shellcheck source=../lib/smart_ide_logs.sh +source "${IA_DEV_ROOT}/lib/smart_ide_logs.sh" +smart_ide_logs_begin "$IA_DEV_ROOT" "$0" "$*" + +# Optional first arg: project id (must exist as projects//conf.json); then re-exec from project root +if [[ -n "${1:-}" && -f "${IA_DEV_ROOT}/projects/${1}/conf.json" && ! "$1" =~ ^(main|test|pprod|prod)$ ]]; then + export IA_PROJECT_ID="$1" + shift + # shellcheck source=../lib/project_config.sh + source "${IA_DEV_ROOT}/lib/project_config.sh" + [[ -n "${PROJECT_ID:-}" ]] && export IA_PROJECT_ID="$PROJECT_ID" + # shellcheck source=../lib/project_git_root_from_conf.sh + source "${IA_DEV_ROOT}/lib/project_git_root_from_conf.sh" + ia_dev_resolve_project_git_root + PROJECT_ROOT="${IA_PROJECT_GIT_ROOT:-}" + if [[ -z "$PROJECT_ROOT" || ! -d "$PROJECT_ROOT" ]]; then + echo "[branch-align][ERROR] Could not resolve project root for project_id ${IA_PROJECT_ID}" >&2 + exit 1 + fi + cd "$PROJECT_ROOT" && exec "${DEPLOY_DIR}/$(basename "${BASH_SOURCE[0]:-$0}")" "$@" +fi + +if ! git rev-parse --is-inside-work-tree >/dev/null 2>&1; then + echo "[branch-align][ERROR] Not in a git repository" >&2 + exit 1 +fi + +PROJECT_ROOT="$(git rev-parse --show-toplevel)" +if [[ "$(pwd)" != "$PROJECT_ROOT" ]]; then + cd "$PROJECT_ROOT" && exec "${DEPLOY_DIR}/$(basename "${BASH_SOURCE[0]:-$0}")" "$@" +fi + +env_branch="${1:-}" +if [[ -z "$env_branch" ]]; then + echo "[branch-align][ERROR] Missing argument (expected: main|test|pprod|prod)" >&2 + echo "Usage: ./deploy/branch-align.sh [project_id] " >&2 + exit 1 +fi +if [[ ! "$env_branch" =~ ^(main|test|pprod|prod)$ ]]; then + echo "[branch-align][ERROR] Invalid : must be main, test, pprod or prod (got: '${env_branch}')" >&2 + echo "Usage: ./deploy/branch-align.sh [project_id] " >&2 + exit 1 +fi + +current_branch="$(git rev-parse --abbrev-ref HEAD)" +if [[ "$current_branch" != "$env_branch" ]]; then + echo "[branch-align][ERROR] Must be on branch '${env_branch}' (current: '${current_branch}')" >&2 + exit 1 +fi + +# Fetch latest refs + +git fetch origin + +target_sha="$(git rev-parse "$env_branch")" +origin_env_sha="$(git rev-parse "origin/${env_branch}")" + +if [[ "$target_sha" != "$origin_env_sha" ]]; then + echo "[branch-align] origin/${env_branch} differs from local ${env_branch}. Updating remote to local (env priority)." + git push --force-with-lease origin "${target_sha}:${env_branch}" + git fetch origin +fi + +# Align all three branches to env SHA +for br in test pprod prod; do + if [[ "$br" == "$env_branch" ]]; then + # Ensure tracking exists + git branch --set-upstream-to="origin/${br}" "$br" >/dev/null 2>&1 || true + continue + fi + + git branch -f "$br" "$target_sha" + git push --force-with-lease origin "${target_sha}:${br}" + git branch --set-upstream-to="origin/${br}" "$br" >/dev/null 2>&1 || true + +done + +# Also ensure env branch tracks its remote + +git branch --set-upstream-to="origin/${env_branch}" "$env_branch" >/dev/null 2>&1 || true + +# Verify last 30 commits are identical + +tmp1="$(mktemp -t branch-align-test.XXXXXX)" +tmp2="$(mktemp -t branch-align-pprod.XXXXXX)" +tmp3="$(mktemp -t branch-align-prod.XXXXXX)" +cleanup() { + local ec=$? + smart_ide_log_end_with_status "$ec" + rm -f "$tmp1" "$tmp2" "$tmp3" +} +trap cleanup EXIT + +git log -30 --format=%H origin/test > "$tmp1" +git log -30 --format=%H origin/pprod > "$tmp2" +git log -30 --format=%H origin/prod > "$tmp3" + +if ! diff -u "$tmp1" "$tmp2" >/dev/null; then + echo "[branch-align][ERROR] Last 30 commits differ: origin/test vs origin/pprod" >&2 + exit 1 +fi +if ! diff -u "$tmp1" "$tmp3" >/dev/null; then + echo "[branch-align][ERROR] Last 30 commits differ: origin/test vs origin/prod" >&2 + exit 1 +fi + +# Final assertions + +if [[ "$(git rev-parse --abbrev-ref HEAD)" != "$env_branch" ]]; then + echo "[branch-align][ERROR] Branch changed unexpectedly" >&2 + exit 1 +fi + +sha_test="$(git rev-parse origin/test)" +sha_pprod="$(git rev-parse origin/pprod)" +sha_prod="$(git rev-parse origin/prod)" + +if [[ "$sha_test" != "$sha_pprod" ]] || [[ "$sha_test" != "$sha_prod" ]]; then + echo "[branch-align][ERROR] Remote branches are not aligned" >&2 + echo "origin/test=$sha_test" >&2 + echo "origin/pprod=$sha_pprod" >&2 + echo "origin/prod=$sha_prod" >&2 + exit 1 +fi + +echo "[branch-align] OK: origin/test, origin/pprod, origin/prod aligned to ${sha_test}" diff --git a/services/ia_dev/deploy/bump-version.sh b/services/ia_dev/deploy/bump-version.sh new file mode 100644 index 0000000..3e98f71 --- /dev/null +++ b/services/ia_dev/deploy/bump-version.sh @@ -0,0 +1,111 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Bump version and optional package.json files from project config (projects//conf.json). +# Usage: ./bump-version.sh [project_id] [message_court] +# Requires: run from repo root; project id from param, or MAIL_TO or AI_AGENT_TOKEN; jq if using version.package_json_paths. +SCRIPT_REAL="$(readlink -f "${BASH_SOURCE[0]:-$0}" 2>/dev/null || realpath "${BASH_SOURCE[0]:-$0}" 2>/dev/null || echo "${BASH_SOURCE[0]:-$0}")" +DEPLOY_DIR="$(cd "$(dirname "$SCRIPT_REAL")" && pwd)" +IA_DEV_ROOT="$(cd "$DEPLOY_DIR/.." && pwd)" + +# shellcheck source=../lib/smart_ide_logs.sh +source "${IA_DEV_ROOT}/lib/smart_ide_logs.sh" +smart_ide_logs_begin "$IA_DEV_ROOT" "$0" "$*" +smart_ide_logs_register_exit_trap + +# Optional first arg: project id (must exist as projects//conf.json) +if [[ -n "${1:-}" && -f "${IA_DEV_ROOT}/projects/${1}/conf.json" && "$1" != *.* ]]; then + export IA_PROJECT_ID="$1" + shift +fi + +VERSION="${1:-}" +SHORT_MSG="${2:-Nouvelles fonctionnalités et améliorations}" + +if [[ -z "$VERSION" ]]; then + echo "❌ Usage: ./bump-version.sh [project_id] [message_court]" + echo " Exemple: ./bump-version.sh lecoffreio 2.1.0 'Nouveaux filtres'" + exit 1 +fi + +if ! [[ "$VERSION" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then + echo "❌ Version invalide. Format attendu: X.Y.Z (ex: 2.1.0)" + exit 1 +fi + +if ! git rev-parse --is-inside-work-tree >/dev/null 2>&1; then + echo "❌ Not in a git repository" >&2 + exit 1 +fi + +PROJECT_ROOT="$(git rev-parse --show-toplevel)" +SCRIPT_REAL="$(readlink -f "${BASH_SOURCE[0]:-$0}" 2>/dev/null || realpath "${BASH_SOURCE[0]:-$0}" 2>/dev/null || echo "${BASH_SOURCE[0]:-$0}")" +DEPLOY_DIR="$(cd "$(dirname "$SCRIPT_REAL")" && pwd)" +IA_DEV_ROOT="$(cd "$DEPLOY_DIR/.." && pwd)" +if [[ "$(pwd)" != "$PROJECT_ROOT" ]]; then + SCRIPT_ABS="${DEPLOY_DIR}/$(basename "${BASH_SOURCE[0]:-$0}")" + [[ -n "${IA_PROJECT_ID:-}" ]] && export IA_PROJECT_ID + cd "$PROJECT_ROOT" && exec "$SCRIPT_ABS" "$@" +fi + +# shellcheck source=../lib/project_config.sh +source "${IA_DEV_ROOT}/lib/project_config.sh" + +echo "🔄 Mise à jour vers v${VERSION}..." + +echo "$VERSION" > "$PROJECT_ROOT/VERSION" +echo "✅ VERSION → ${VERSION}" + +package_paths=() +splash_name="Application" +if [[ -n "${PROJECT_CONFIG_PATH:-}" && -f "$PROJECT_CONFIG_PATH" ]] && command -v jq >/dev/null 2>&1; then + while IFS= read -r p; do + [[ -n "$p" ]] && package_paths+=( "$p" ) + done < <(jq -r '.version.package_json_paths[]? // empty' "$PROJECT_CONFIG_PATH" 2>/dev/null) + splash_name="$(jq -r '.version.splash_app_name // "Application"' "$PROJECT_CONFIG_PATH" 2>/dev/null)" +fi + +for p in "${package_paths[@]}"; do + if [[ "$p" = /* ]]; then + abs_p="$p" + else + abs_p="$PROJECT_ROOT/$p" + fi + if [[ -f "$abs_p" ]]; then + sed -i "s/\"version\": \".*\"/\"version\": \"${VERSION}\"/" "$abs_p" + echo "✅ $p → ${VERSION}" + else + echo "⚠️ $p not found, skipped" + fi +done + +if [[ ! -f "$PROJECT_ROOT/CHANGELOG.md" ]]; then + echo "⚠️ CHANGELOG.md non trouvé. Créez-le manuellement avec les détails de cette version." +fi + +echo "" +echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" +echo "📋 PROCHAINES ÉTAPES" +echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" +echo "" +echo "1. Éditer CHANGELOG.md pour documenter les changements de v${VERSION}" +echo "" +echo "2. Mettre à jour le .env distant avec le message splash (si applicable) :" +echo "" +cat << EOF +NEXT_PUBLIC_SPLASH_MESSAGE="🎉 ${splash_name} v${VERSION} est disponible ! + +✨ ${SHORT_MSG} + +📖 Consultez CHANGELOG.md pour tous les détails" + +NEXT_PUBLIC_SPLASH_MAX_DISPLAYS=10 +NEXT_PUBLIC_SPLASH_ID="splash_v${VERSION}" +EOF +echo "" +echo "3. Rebuild, redémarrer et déployer selon le workflow du projet." +echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" +echo "" +echo "✅ Bump terminé. Éditer CHANGELOG.md puis lancer le déploiement selon le projet." + +exit 0 diff --git a/services/ia_dev/deploy/change-to-all-branches.sh b/services/ia_dev/deploy/change-to-all-branches.sh new file mode 100755 index 0000000..e1678f5 --- /dev/null +++ b/services/ia_dev/deploy/change-to-all-branches.sh @@ -0,0 +1,61 @@ +#!/usr/bin/env bash +# From branch test only: align origin/test, origin/pprod, origin/prod then deploy to test (import V1 is systematic after deploy-app when RUN_DEPLOY=true; see deploy.conf for other RUN_*). +# Usage: ./deploy/change-to-all-branches.sh [project_id] +set -euo pipefail + +SCRIPT_REAL="$(readlink -f "${BASH_SOURCE[0]:-$0}" 2>/dev/null || realpath "${BASH_SOURCE[0]:-$0}" 2>/dev/null || echo "${BASH_SOURCE[0]:-$0}")" +DEPLOY_DIR="$(cd "$(dirname "$SCRIPT_REAL")" && pwd)" +IA_DEV_ROOT="$(cd "$DEPLOY_DIR/.." && pwd)" + +# shellcheck source=../lib/smart_ide_logs.sh +source "${IA_DEV_ROOT}/lib/smart_ide_logs.sh" +smart_ide_logs_begin "$IA_DEV_ROOT" "$0" "$*" +smart_ide_logs_register_exit_trap + +# Optional first arg: project id (must exist as projects//conf.json); then re-exec from project root +if [[ -n "${1:-}" && -f "${IA_DEV_ROOT}/projects/${1}/conf.json" ]]; then + export IA_PROJECT_ID="$1" + shift + # shellcheck source=../lib/project_config.sh + source "${IA_DEV_ROOT}/lib/project_config.sh" + [[ -n "${PROJECT_ID:-}" ]] && export IA_PROJECT_ID="$PROJECT_ID" + # shellcheck source=../lib/project_git_root_from_conf.sh + source "${IA_DEV_ROOT}/lib/project_git_root_from_conf.sh" + ia_dev_resolve_project_git_root + PROJECT_ROOT="${IA_PROJECT_GIT_ROOT:-}" + if [[ -z "$PROJECT_ROOT" || ! -d "$PROJECT_ROOT" ]]; then + echo "[change-to-all-branches][ERROR] Could not resolve project root for project_id ${IA_PROJECT_ID}" >&2 + exit 1 + fi + cd "$PROJECT_ROOT" && exec "${DEPLOY_DIR}/$(basename "${BASH_SOURCE[0]:-$0}")" "$@" +fi + +if ! git rev-parse --is-inside-work-tree >/dev/null 2>&1; then + echo "[change-to-all-branches][ERROR] Not in a git repository" >&2 + exit 1 +fi + +PROJECT_ROOT="$(git rev-parse --show-toplevel)" +if [[ "$(pwd)" != "$PROJECT_ROOT" ]]; then + cd "$PROJECT_ROOT" && exec "${DEPLOY_DIR}/$(basename "${BASH_SOURCE[0]:-$0}")" "$@" +fi + +current="$(git rev-parse --abbrev-ref HEAD)" +if [[ "$current" != "test" ]]; then + echo "[change-to-all-branches][ERROR] Must be on branch 'test' (current: '${current}')" >&2 + exit 1 +fi + +echo "[change-to-all-branches] Aligning branches..." +"$DEPLOY_DIR/branch-align.sh" test + +# scripts_v2 lives in the host project's deploy/ (not necessarily under ia_dev) +DEPLOY_SCRIPTS_V2="${PROJECT_ROOT}/deploy/scripts_v2" +echo "[change-to-all-branches] Deploying test (--no-sync-origin; business flags from deploy.conf only)..." +if [[ -n "${IA_PROJECT_ID:-}" && -x "${DEPLOY_DIR}/orchestrator.sh" ]]; then + "${DEPLOY_DIR}/orchestrator.sh" test --no-sync-origin +else + "${DEPLOY_SCRIPTS_V2}/deploy.sh" test --no-sync-origin +fi + +echo "[change-to-all-branches] OK" diff --git a/services/ia_dev/deploy/deploy-by-script-to.sh b/services/ia_dev/deploy/deploy-by-script-to.sh new file mode 100755 index 0000000..7e17d0e --- /dev/null +++ b/services/ia_dev/deploy/deploy-by-script-to.sh @@ -0,0 +1,122 @@ +#!/usr/bin/env bash +# deploy-by-script-to [project_id] : verify .secrets/, deploy target, restore branch test when needed. +# Launched from ia_dev root. Project from projects//conf.json; id from param, or MAIL_TO or AI_AGENT_TOKEN. Target: pprod | prod only. +# If deploy.host_stays_on_test is true in conf: stay on branch test; no checkout/reset of pprod|prod (deploy.sh aligns remotes / worktree). +set -euo pipefail + +SCRIPT_REAL="$(readlink -f "${BASH_SOURCE[0]:-$0}" 2>/dev/null || realpath "${BASH_SOURCE[0]:-$0}" 2>/dev/null || echo "${BASH_SOURCE[0]:-$0}")" +DEPLOY_IA="$(cd "$(dirname "$SCRIPT_REAL")" && pwd)" +IA_DEV_ROOT="$(cd "$DEPLOY_IA/.." && pwd)" + +# shellcheck source=../lib/smart_ide_logs.sh +source "${IA_DEV_ROOT}/lib/smart_ide_logs.sh" +smart_ide_logs_begin "$IA_DEV_ROOT" "$0" "$*" +smart_ide_logs_register_exit_trap + +# Optional first arg: project id (must exist as projects//conf.json) +if [[ -n "${1:-}" && ! "$1" =~ ^(pprod|prod)$ && -f "${IA_DEV_ROOT}/projects/${1}/conf.json" ]]; then + export IA_PROJECT_ID="$1" + shift +fi +# shellcheck source=../lib/project_config.sh +source "${IA_DEV_ROOT}/lib/project_config.sh" +# shellcheck source=../lib/project_git_root_from_conf.sh +source "${IA_DEV_ROOT}/lib/project_git_root_from_conf.sh" +ia_dev_resolve_project_git_root +PROJECT_ROOT="${IA_PROJECT_GIT_ROOT:-}" +if [[ -z "$PROJECT_ROOT" || ! -d "$PROJECT_ROOT" ]]; then + PROJECT_ROOT="$(cd "$DEPLOY_IA/../.." && pwd)" +fi +if [[ -n "${PROJECT_CONFIG_PATH:-}" && -f "${PROJECT_CONFIG_PATH:-}" ]] && command -v jq >/dev/null 2>&1; then + _sp="$(jq -r '.deploy.secrets_path // empty' "$PROJECT_CONFIG_PATH" 2>/dev/null)" + _sp="$(ia_dev_resolve_path_from_conf "$PROJECT_CONFIG_PATH" "$_sp")" + if [[ -n "$_sp" && "$_sp" != "null" && -d "$_sp" ]]; then + export SECRETS_BASE="$_sp" + export LECOFFRE_SECRETS_BASE="$_sp" + fi +fi +if [[ "$(pwd)" != "$PROJECT_ROOT" ]]; then + # Preserve project id for re-exec so config is still resolved in child + [[ -n "${PROJECT_ID:-}" ]] && export IA_PROJECT_ID="$PROJECT_ID" + cd "$PROJECT_ROOT" && exec "$SCRIPT_REAL" "$@" +fi +if ! git rev-parse --is-inside-work-tree >/dev/null 2>&1; then + echo "[deploy-by-script-to][ERROR] Not in a git repository (PROJECT_ROOT=${PROJECT_ROOT})" >&2 + exit 1 +fi + +TARGET_BRANCH="${1:-}" +if [[ -z "$TARGET_BRANCH" ]]; then + echo "[deploy-by-script-to][ERROR] Missing argument (expected: pprod | prod)" >&2 + echo "Usage: ./deploy/deploy-by-script-to.sh [project_id] (from ia_dev; pprod or prod only)" >&2 + exit 1 +fi +if [[ ! "$TARGET_BRANCH" =~ ^(pprod|prod)$ ]]; then + echo "[deploy-by-script-to][ERROR] Invalid target branch: must be pprod or prod (got: '${TARGET_BRANCH}')" >&2 + echo "Usage: ./deploy/deploy-by-script-to.sh [project_id] " >&2 + exit 1 +fi + +current="$(git rev-parse --abbrev-ref HEAD)" +if [[ "$current" != "test" ]]; then + echo "[deploy-by-script-to][ERROR] Must be on branch 'test' (current: '${current}'). Run /change-to-all-branches first." >&2 + exit 1 +fi + +HOST_STAYS_ON_TEST=false +if [[ -n "${PROJECT_CONFIG_PATH:-}" && -f "${PROJECT_CONFIG_PATH}" ]] && command -v jq >/dev/null 2>&1; then + _ht="$(jq -r '.deploy.host_stays_on_test // false' "$PROJECT_CONFIG_PATH" 2>/dev/null || echo false)" + if [[ "$_ht" == "true" ]]; then + HOST_STAYS_ON_TEST=true + fi +fi + +if [[ "$HOST_STAYS_ON_TEST" != "true" ]]; then + echo "[deploy-by-script-to] Step 1/5: checkout ${TARGET_BRANCH}..." + if [[ "$(git rev-parse --abbrev-ref HEAD)" != "$TARGET_BRANCH" ]]; then + git checkout "$TARGET_BRANCH" + fi +fi + +SECRETS_PARENT="${SECRETS_BASE:-${LECOFFRE_SECRETS_BASE:-$PROJECT_ROOT/.secrets}}" +SECRETS_DIR="${SECRETS_PARENT}/${TARGET_BRANCH}" +if [[ ! -d "$SECRETS_DIR" ]]; then + echo "[deploy-by-script-to][ERROR] secrets env dir missing: ${SECRETS_DIR} (set SECRETS_BASE or deploy.secrets_path in conf)" >&2 + exit 1 +fi +if [[ "$HOST_STAYS_ON_TEST" == "true" ]]; then + echo "[deploy-by-script-to] Step 2: secrets/${TARGET_BRANCH} OK (${SECRETS_DIR}) ; host stays on test (deploy.host_stays_on_test)" +else + echo "[deploy-by-script-to] Step 2/5: secrets/${TARGET_BRANCH} OK (${SECRETS_DIR})" +fi + +if [[ "$HOST_STAYS_ON_TEST" != "true" ]]; then + echo "[deploy-by-script-to] Step 3/5: force sync local branch with origin/${TARGET_BRANCH}..." + git fetch origin + git reset --hard "origin/${TARGET_BRANCH}" +fi + +if [[ "$HOST_STAYS_ON_TEST" == "true" ]]; then + echo "[deploy-by-script-to] Step 3: deploy ${TARGET_BRANCH} from branch test (business flags from deploy.conf only)..." +else + echo "[deploy-by-script-to] Step 4/5: deploy ${TARGET_BRANCH} (business flags from deploy.conf only)..." +fi +if [[ -n "${IA_PROJECT_ID:-}" && -x "${DEPLOY_IA}/orchestrator.sh" ]]; then + "${DEPLOY_IA}/orchestrator.sh" "$TARGET_BRANCH" +else + deploy_script="$PROJECT_ROOT/deploy/scripts_v2/deploy.sh" + if [[ -n "${PROJECT_CONFIG_PATH:-}" && -f "${PROJECT_CONFIG_PATH:-}" ]] && command -v jq >/dev/null 2>&1; then + _cfg_script="$(jq -r '.deploy.deploy_script_path // ""' "$PROJECT_CONFIG_PATH" 2>/dev/null)" + _cfg_script="$(ia_dev_resolve_path_from_conf "$PROJECT_CONFIG_PATH" "$_cfg_script")" + [[ -n "$_cfg_script" && -x "$_cfg_script" ]] && deploy_script="$_cfg_script" + fi + "$deploy_script" "$TARGET_BRANCH" +fi + +if [[ "$HOST_STAYS_ON_TEST" == "true" ]]; then + echo "[deploy-by-script-to] OK: deployed to ${TARGET_BRANCH}, still on branch test" +else + echo "[deploy-by-script-to] Step 5/5: checkout test..." + git checkout test + echo "[deploy-by-script-to] OK: aligned, synced, deployed to ${TARGET_BRANCH}, back on test" +fi diff --git a/services/ia_dev/deploy/deploy.sh b/services/ia_dev/deploy/deploy.sh new file mode 100755 index 0000000..dccda21 --- /dev/null +++ b/services/ia_dev/deploy/deploy.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash +# Generic deploy entry from ia_dev: shared methodology (envs, contract), then ia_dev orchestrator → project orchestrator. +# Usage (from ia_dev root): ./deploy/deploy.sh [options…] +# Example: ./deploy/deploy.sh lecoffreio test --no-sync-origin +set -euo pipefail + +SCRIPT_REAL="$(readlink -f "${BASH_SOURCE[0]:-$0}" 2>/dev/null || realpath "${BASH_SOURCE[0]:-$0}" 2>/dev/null || echo "${BASH_SOURCE[0]:-$0}")" +DEPLOY_DIR="$(cd "$(dirname "$SCRIPT_REAL")" && pwd)" +IA_DEV_ROOT="$(cd "$DEPLOY_DIR/.." && pwd)" + +# shellcheck source=../lib/smart_ide_logs.sh +source "${IA_DEV_ROOT}/lib/smart_ide_logs.sh" +smart_ide_logs_begin "$IA_DEV_ROOT" "$0" "$*" + +# shellcheck source=lib/deploy-methodology.sh +source "${DEPLOY_DIR}/lib/deploy-methodology.sh" + +if [[ $# -lt 2 ]]; then + echo "[deploy][ERROR] Missing arguments" >&2 + echo "Usage: $0 [options passed to project orchestrator]" >&2 + echo "Example: $0 lecoffreio test --no-sync-origin" >&2 + exit 1 +fi + +CONF="${IA_DEV_ROOT}/projects/${1}/conf.json" +if [[ ! -f "$CONF" ]]; then + echo "[deploy][ERROR] No conf for project '${1}': ${CONF}" >&2 + exit 1 +fi + +ia_dev_deploy_assert_env_literal "${2}" || exit 1 + +export IA_PROJECT_ID="$1" +shift +exec "$DEPLOY_DIR/orchestrator.sh" "$@" diff --git a/services/ia_dev/deploy/lib/README.md b/services/ia_dev/deploy/lib/README.md new file mode 100644 index 0000000..de7786c --- /dev/null +++ b/services/ia_dev/deploy/lib/README.md @@ -0,0 +1,3 @@ +# Shared deploy libraries (ia_dev) + +**[docs/repo/ia-dev-deploy-lib.md](../../../docs/repo/ia-dev-deploy-lib.md)** diff --git a/services/ia_dev/deploy/lib/deploy-conf-handling.sh b/services/ia_dev/deploy/lib/deploy-conf-handling.sh new file mode 100644 index 0000000..5b826bb --- /dev/null +++ b/services/ia_dev/deploy/lib/deploy-conf-handling.sh @@ -0,0 +1,60 @@ +#!/usr/bin/env bash +# ia_dev — conf.json handling identical for every managed project (jq, secrets_path → SECRETS_BASE). +# Add here any new deploy.* field that must be read the same way for all projects. +# Do not put project-specific paths, hostnames, or phase ordering here beyond generic keys. + +_DEPLOY_LIB_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +# shellcheck source=../../lib/conf_path_resolve.sh +source "${_DEPLOY_LIB_DIR}/../../lib/conf_path_resolve.sh" + +# ia_dev_deploy_require_jq — exit 1 if jq missing (e.g. log_tag="[orchestrator]") +ia_dev_deploy_require_jq() { + local tag="${1:-[ia_dev][deploy]}" + if ! command -v jq >/dev/null 2>&1; then + echo "${tag}[ERROR] jq is required to read deploy.* from conf.json" >&2 + exit 1 + fi +} + +# ia_dev_deploy_secrets_export_from_conf — export SECRETS_BASE + LECOFFRE_SECRETS_BASE when deploy.secrets_path is a directory +ia_dev_deploy_secrets_export_from_conf() { + local conf="${1:?}" + local secrets_path + secrets_path="$(jq -r '.deploy.secrets_path // empty' "$conf")" + secrets_path="$(ia_dev_resolve_path_from_conf "$conf" "$secrets_path")" + if [[ -n "$secrets_path" && "$secrets_path" != "null" && -d "$secrets_path" ]]; then + export SECRETS_BASE="$secrets_path" + export LECOFFRE_SECRETS_BASE="$secrets_path" + fi +} + +# ia_dev_deploy_export_runtime_context — required handoff to project orchestrator (blocking checks below) +ia_dev_deploy_export_runtime_context() { + local repo="${1:?}" + local env="${2:?}" + export IA_DEV_DEPLOY_REPO_ROOT="$repo" + export IA_DEV_DEPLOY_ENV="$env" +} + +# ia_dev_deploy_assert_handoff_context — exit 1 if exports missing or inconsistent (no fallback) +ia_dev_deploy_assert_handoff_context() { + local repo="${1:?}" + local env="${2:?}" + local tag="${3:-[orchestrator]}" + if [[ -z "$repo" || ! -d "$repo" ]]; then + echo "${tag}[ERROR] Handoff: repository root invalid or not a directory: ${repo}" >&2 + exit 1 + fi + if [[ -z "$env" ]]; then + echo "${tag}[ERROR] Handoff: env is empty" >&2 + exit 1 + fi + if [[ "${IA_DEV_DEPLOY_REPO_ROOT:-}" != "$repo" ]]; then + echo "${tag}[ERROR] Handoff: IA_DEV_DEPLOY_REPO_ROOT mismatch or unset (expected ${repo})" >&2 + exit 1 + fi + if [[ "${IA_DEV_DEPLOY_ENV:-}" != "$env" ]]; then + echo "${tag}[ERROR] Handoff: IA_DEV_DEPLOY_ENV mismatch or unset (expected ${env}, got ${IA_DEV_DEPLOY_ENV:-})" >&2 + exit 1 + fi +} diff --git a/services/ia_dev/deploy/lib/deploy-log.sh b/services/ia_dev/deploy/lib/deploy-log.sh new file mode 100644 index 0000000..999104f --- /dev/null +++ b/services/ia_dev/deploy/lib/deploy-log.sh @@ -0,0 +1,32 @@ +#!/usr/bin/env bash +# Optional tee of deploy output to a log file under the project root. + +# Args: project_root log_to_dir_relative [deploy_env] +# If deploy_env is set (test|pprod|prod), log file is deploy__YYYYMMDD_HHMMSS.log and starts with a header. +deploy_script_tee_log_if_requested() { + local project_root="${1:?}" + local log_to_dir="${2:-}" + local deploy_env="${3:-}" + if [[ -z "$log_to_dir" ]]; then + return 0 + fi + mkdir -p "${project_root}/${log_to_dir}" + local log_file + if [[ -n "$deploy_env" ]]; then + log_file="${project_root}/${log_to_dir}/deploy_${deploy_env}_$(date +%Y%m%d_%H%M%S).log" + else + log_file="${project_root}/${log_to_dir}/deploy_$(date +%Y%m%d_%H%M%S).log" + fi + { + printf '%s\n' "=== LeCoffre deploy log ===" + printf '%s\n' "environment=${deploy_env:-} started_at=${DEPLOY_STARTED_AT:-} project_root=${project_root}" + } >"$log_file" + _IA_DEV_DEPLOY_LOG="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)" + if [[ -f "${_IA_DEV_DEPLOY_LOG}/lib/smart_ide_logs.sh" ]]; then + # shellcheck source=../../lib/smart_ide_logs.sh + SMART_IDE_LOG_IA_DEV_ROOT="$_IA_DEV_DEPLOY_LOG" source "${_IA_DEV_DEPLOY_LOG}/lib/smart_ide_logs.sh" + smart_ide_log_event "deploy_script_tee_log project_root=${project_root} log_file=${log_file}" + fi + exec > >(tee -a "$log_file") + info "[deploy] Teeing output to ${log_file}" +} diff --git a/services/ia_dev/deploy/lib/deploy-methodology.sh b/services/ia_dev/deploy/lib/deploy-methodology.sh new file mode 100644 index 0000000..f46889f --- /dev/null +++ b/services/ia_dev/deploy/lib/deploy-methodology.sh @@ -0,0 +1,49 @@ +#!/usr/bin/env bash +# Shared deploy methodology for all ia_dev–managed projects: environments, quality bar, ordering contract. +# Sourced by ia_dev/deploy/deploy.sh and ia_dev/deploy/orchestrator.sh — no project-specific paths here. +# +# Boundary (approved workflow): +# - Everything strictly identical across projects (allowed envs, shared tooling checks, shared conf.json +# field handling) lives in deploy-methodology.sh and siblings under deploy/lib/ (e.g. deploy-conf-handling.sh). +# - The repository’s project orchestrator (deploy.project_orchestrator_path) only sequences project-specific +# work (e.g. LeCoffre: _lib/deploy-phase-*.sh, remote layout, Prisma, systemd unit names). +# When a step is candidate for hoisting, move it here or into a small deploy/lib/*.sh peer; shrink the project script. + +# Environments are fixed across projects; extend only with an explicit decision and conf migration. +IA_DEV_DEPLOY_ENVS=(test pprod prod) + +# ia_dev_deploy_env_is_allowed — exit 0 if allowed +ia_dev_deploy_env_is_allowed() { + local e="${1:-}" + local x + for x in "${IA_DEV_DEPLOY_ENVS[@]}"; do + if [[ "$e" == "$x" ]]; then + return 0 + fi + done + return 1 +} + +# ia_dev_deploy_assert_first_arg_env "$@" — first positional must be test|pprod|prod; stderr + exit 1 otherwise +ia_dev_deploy_assert_first_arg_env() { + if [[ $# -lt 1 ]]; then + echo "[ia_dev][deploy][ERROR] Missing (expected: test | pprod | prod)" >&2 + return 1 + fi + local env_arg="$1" + ia_dev_deploy_assert_env_literal "$env_arg" +} + +# ia_dev_deploy_assert_env_literal — validate a single env token +ia_dev_deploy_assert_env_literal() { + local env_arg="${1:-}" + if [[ -z "$env_arg" ]]; then + echo "[ia_dev][deploy][ERROR] Missing (expected: test | pprod | prod)" >&2 + return 1 + fi + if ! ia_dev_deploy_env_is_allowed "$env_arg"; then + echo "[ia_dev][deploy][ERROR] Invalid env '${env_arg}' (allowed: ${IA_DEV_DEPLOY_ENVS[*]})" >&2 + return 1 + fi + return 0 +} diff --git a/services/ia_dev/deploy/lib/ssh.sh b/services/ia_dev/deploy/lib/ssh.sh new file mode 100644 index 0000000..6368933 --- /dev/null +++ b/services/ia_dev/deploy/lib/ssh.sh @@ -0,0 +1,89 @@ +#!/usr/bin/env bash +# Shared SSH/SCP helpers for deploy scripts (ProxyJump, BatchMode, keepalive). +# Sourced by project deploy/_lib/ssh.sh when ia_dev is present under the repo root (or sibling layout). + +set -euo pipefail + +require_ssh_key() { + local key_path="$1" + if [[ -z "$key_path" ]]; then + echo "SSH key path is required" >&2 + return 1 + fi + if [[ ! -f "$key_path" ]]; then + echo "SSH key not found: $key_path" >&2 + return 1 + fi +} + +ssh_common_opts() { + local ssh_user="$1" + local ssh_host="$2" + + echo \ + -o BatchMode=yes \ + -o StrictHostKeyChecking=accept-new \ + -o ConnectTimeout=30 \ + -o ServerAliveInterval=10 \ + -o ServerAliveCountMax=6 \ + -o TCPKeepAlive=yes \ + -o Compression=no +} + +ssh_run() { + local ssh_key="$1" + local ssh_user="$2" + local ssh_host="$3" + shift 3 + + require_ssh_key "$ssh_key" + + local proxy_host="${DEPLOY_SSH_PROXY_HOST:-}" + local proxy_user="${DEPLOY_SSH_PROXY_USER:-$ssh_user}" + + local proxy_args=() + if [[ -n "$proxy_host" ]]; then + proxy_args=(-J "$proxy_user@$proxy_host") + fi + + # shellcheck disable=SC2207 + local common_opts=($(ssh_common_opts "$ssh_user" "$ssh_host")) + + ssh -i "$ssh_key" \ + "${common_opts[@]}" \ + "${proxy_args[@]}" \ + "$ssh_user@$ssh_host" "$@" +} + +scp_copy() { + local ssh_key="$1" + local src="$2" + local ssh_user="$3" + local ssh_host="$4" + local dst="$5" + local recursive="${6:-false}" + + require_ssh_key "$ssh_key" + + local proxy_host="${DEPLOY_SSH_PROXY_HOST:-}" + local proxy_user="${DEPLOY_SSH_PROXY_USER:-$ssh_user}" + + local proxy_args=() + if [[ -n "$proxy_host" ]]; then + proxy_args=(-o "ProxyJump=$proxy_user@$proxy_host") + fi + + # shellcheck disable=SC2207 + local common_opts=($(ssh_common_opts "$ssh_user" "$ssh_host")) + + local scp_opts=() + if [[ "$recursive" == "true" ]] || [[ -d "$src" ]]; then + scp_opts=(-r) + fi + + scp -i "$ssh_key" \ + "${scp_opts[@]}" \ + "${common_opts[@]}" \ + "${proxy_args[@]}" \ + "$src" "$ssh_user@$ssh_host:$dst" +} diff --git a/services/ia_dev/deploy/orchestrator.sh b/services/ia_dev/deploy/orchestrator.sh new file mode 100755 index 0000000..244d383 --- /dev/null +++ b/services/ia_dev/deploy/orchestrator.sh @@ -0,0 +1,101 @@ +#!/usr/bin/env bash +# ia_dev orchestrator: applies shared methodology, then invokes exactly one project orchestrator +# (deploy.project_orchestrator_path relative to repository_root), which sequences project-specific scripts. +# Legacy: if project_orchestrator_path is absent, falls back to deploy.hooks.phases or deploy.deploy_script_path. +# Usage: orchestrator.sh [options…] — passed unchanged to the project orchestrator. +# Requires: IA_PROJECT_ID, IA_DEV_ROOT (or callers like change-to-all-branches). +set -euo pipefail + +SCRIPT_REAL="$(readlink -f "${BASH_SOURCE[0]:-$0}" 2>/dev/null || realpath "${BASH_SOURCE[0]:-$0}" 2>/dev/null || echo "${BASH_SOURCE[0]:-$0}")" +DEPLOY_DIR="$(cd "$(dirname "$SCRIPT_REAL")" && pwd)" +IA_DEV_ROOT="$(cd "$DEPLOY_DIR/.." && pwd)" + +# shellcheck source=../lib/smart_ide_logs.sh +source "${IA_DEV_ROOT}/lib/smart_ide_logs.sh" +smart_ide_logs_begin "$IA_DEV_ROOT" "$0" "$*" +smart_ide_logs_register_exit_trap + +_ORCH_TAG="[orchestrator]" + +# shellcheck source=lib/deploy-methodology.sh +source "${DEPLOY_DIR}/lib/deploy-methodology.sh" +# shellcheck source=lib/deploy-conf-handling.sh +source "${DEPLOY_DIR}/lib/deploy-conf-handling.sh" + +if [[ -z "${IA_PROJECT_ID:-}" ]]; then + echo "${_ORCH_TAG}[ERROR] IA_PROJECT_ID is not set" >&2 + exit 1 +fi + +ia_dev_deploy_assert_first_arg_env "$@" || exit 1 + +# shellcheck source=../lib/project_config.sh +source "${IA_DEV_ROOT}/lib/project_config.sh" +# shellcheck source=../lib/project_git_root_from_conf.sh +source "${IA_DEV_ROOT}/lib/project_git_root_from_conf.sh" +ia_dev_resolve_project_git_root +REPO_ROOT="${IA_PROJECT_GIT_ROOT:-}" +if [[ -z "$REPO_ROOT" || ! -d "$REPO_ROOT" ]]; then + echo "${_ORCH_TAG}[ERROR] Could not resolve repository root for project ${IA_PROJECT_ID}" >&2 + exit 1 +fi + +CONF="${PROJECT_CONFIG_PATH:-}" +if [[ -z "$CONF" || ! -f "$CONF" ]]; then + echo "${_ORCH_TAG}[ERROR] Missing conf: ${CONF:-}" >&2 + exit 1 +fi + +ia_dev_deploy_require_jq "${_ORCH_TAG}" +ia_dev_deploy_secrets_export_from_conf "$CONF" +ia_dev_deploy_export_runtime_context "$REPO_ROOT" "${1:-}" +ia_dev_deploy_assert_handoff_context "$REPO_ROOT" "${1:-}" "${_ORCH_TAG}" + +DEPLOY_SCRIPT_PATH="$(jq -r '.deploy.deploy_script_path // empty' "$CONF")" +DEPLOY_SCRIPT_PATH="$(ia_dev_resolve_path_from_conf "$CONF" "$DEPLOY_SCRIPT_PATH")" +PROJECT_ORCH_REL="$(jq -r '.deploy.project_orchestrator_path // empty' "$CONF")" + +if [[ -n "$PROJECT_ORCH_REL" && "$PROJECT_ORCH_REL" != "null" ]]; then + PROJECT_ORCH_ABS="${REPO_ROOT}/${PROJECT_ORCH_REL}" + if [[ ! -f "$PROJECT_ORCH_ABS" ]]; then + echo "${_ORCH_TAG}[ERROR] deploy.project_orchestrator_path not a file: ${PROJECT_ORCH_ABS}" >&2 + exit 1 + fi + PHASE_NON_EMPTY="$(jq '.deploy.hooks.phases // [] | length' "$CONF")" + if [[ "$PHASE_NON_EMPTY" != "0" ]]; then + echo "${_ORCH_TAG}[WARN] deploy.hooks.phases is non-empty but project_orchestrator_path takes precedence; phases are ignored." >&2 + fi + echo "${_ORCH_TAG} Project orchestrator: ${PROJECT_ORCH_REL} $*" + smart_ide_log_event "HANDOFF project_orchestrator ${PROJECT_ORCH_ABS} $*" + exec bash "$PROJECT_ORCH_ABS" "$@" +fi + +if [[ -z "$DEPLOY_SCRIPT_PATH" || ! -f "$DEPLOY_SCRIPT_PATH" ]]; then + echo "${_ORCH_TAG}[ERROR] deploy.deploy_script_path missing or not a file: ${DEPLOY_SCRIPT_PATH:-}" >&2 + exit 1 +fi + +PHASE_COUNT="$(jq '.deploy.hooks.phases // [] | length' "$CONF")" +if [[ "$PHASE_COUNT" == "0" ]]; then + echo "${_ORCH_TAG}[WARN] deploy.project_orchestrator_path unset; falling back to deploy_script_path only." >&2 + smart_ide_log_event "HANDOFF deploy_script_path ${DEPLOY_SCRIPT_PATH} $*" + exec bash "$DEPLOY_SCRIPT_PATH" "$@" +fi + +mapfile -t PHASE_SCRIPTS < <(jq -r '.deploy.hooks.phases[]? | if type == "string" then . elif type == "object" and (.run | type == "string") then .run else empty end' "$CONF") + +if [[ ${#PHASE_SCRIPTS[@]} -eq 0 ]]; then + smart_ide_log_event "HANDOFF deploy_script_path ${DEPLOY_SCRIPT_PATH} $*" + exec bash "$DEPLOY_SCRIPT_PATH" "$@" +fi + +for rel in "${PHASE_SCRIPTS[@]}"; do + [[ -z "$rel" ]] && continue + phase_path="${REPO_ROOT}/${rel}" + if [[ ! -f "$phase_path" ]]; then + echo "${_ORCH_TAG}[ERROR] Phase script not found: ${phase_path}" >&2 + exit 1 + fi + echo "${_ORCH_TAG} Running (legacy phases): ${rel} $*" + bash "$phase_path" "$@" +done diff --git a/services/ia_dev/deploy/pousse.sh b/services/ia_dev/deploy/pousse.sh new file mode 100755 index 0000000..5c76595 --- /dev/null +++ b/services/ia_dev/deploy/pousse.sh @@ -0,0 +1,225 @@ +#!/usr/bin/env bash +set -euo pipefail + +if ! git rev-parse --is-inside-work-tree >/dev/null 2>&1; then + echo "[pousse][ERROR] Not in a git repository" >&2 + exit 1 +fi + +PROJECT_ROOT="$(git rev-parse --show-toplevel)" +SCRIPT_REAL="$(readlink -f "${BASH_SOURCE[0]:-$0}" 2>/dev/null || realpath "${BASH_SOURCE[0]:-$0}" 2>/dev/null || echo "${BASH_SOURCE[0]:-$0}")" +DEPLOY_DIR="$(cd "$(dirname "$SCRIPT_REAL")" && pwd)" +IA_DEV_ROOT="$(cd "$DEPLOY_DIR/.." && pwd)" + +# shellcheck source=../lib/smart_ide_logs.sh +source "${IA_DEV_ROOT}/lib/smart_ide_logs.sh" +smart_ide_logs_begin "$IA_DEV_ROOT" "$0" "$*" + +# Optional project id: --project or first positional if projects//conf.json exists +while [[ $# -gt 0 ]]; do + if [[ "$1" == --project ]]; then + [[ -n "${2:-}" ]] || { echo "[pousse][ERROR] --project requires " >&2; exit 1; } + export IA_PROJECT_ID="$2" + shift 2 + elif [[ "$1" != --* && -n "${1:-}" && -f "${IA_DEV_ROOT}/projects/${1}/conf.json" ]]; then + export IA_PROJECT_ID="$1" + shift + else + break + fi +done + +# Resolve project id and config path: param (IA_PROJECT_ID), MAIL_TO or AI_AGENT_TOKEN → projects//conf.json +# shellcheck source=../lib/project_config.sh +source "${IA_DEV_ROOT}/lib/project_config.sh" +[[ -n "${PROJECT_ID:-}" ]] && export IA_PROJECT_ID="$PROJECT_ID" + +remote="origin" +bump_version=false + +usage() { + cat <<'EOF' +Usage: + ./deploy/pousse.sh [project_id|--project ] [--remote ] [--bump-version] + + project_id Optional. Id from projects//conf.json (e.g. lecoffreio). Else from MAIL_TO or AI_AGENT_TOKEN. + --project Same as positional project_id. + --bump-version Increment patch (third component) in VERSION before staging. + +Reads a full multi-line commit message from STDIN, then: +- if not in repo root: re-exec from repo root (standardized execution) +- build check (npm run build in each directory listed in projects//conf.json build_dirs, if any; exit on failure) +- git add -A +- git commit -F +- git push -u HEAD + +The current branch must already exist on the remote (e.g. origin/); otherwise the script refuses to push. + +Example: + ./deploy/pousse.sh <<'MSG' + Title + + **Motivations:** + - ... + + **Root causes:** + - ... + + **Correctifs:** + - ... + + **Evolutions:** + - ... + + **Pages affectées:** + - ... + MSG +EOF +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --remote) + remote="$2" + shift 2 + ;; + --bump-version) + bump_version=true + shift + ;; + -h|--help) + usage + exit 0 + ;; + *) + echo "[pousse][ERROR] Unknown arg: $1" >&2 + usage >&2 + exit 1 + ;; + esac +done + +repo_root="$(git rev-parse --show-toplevel)" +# When run from ia_dev root, use configured project repo for git operations (MAIL_TO or AI_AGENT_TOKEN must be set) +git_work_root="$repo_root" +if [[ -n "${PROJECT_CONFIG_PATH:-}" && -f "$PROJECT_CONFIG_PATH" ]] && command -v jq >/dev/null 2>&1 && [[ "$repo_root" == "$IA_DEV_ROOT" ]]; then + # shellcheck source=../lib/project_git_root_from_conf.sh + source "${IA_DEV_ROOT}/lib/project_git_root_from_conf.sh" + ia_dev_resolve_project_git_root + if [[ -n "${IA_PROJECT_GIT_ROOT:-}" && -d "$IA_PROJECT_GIT_ROOT" ]]; then + git_work_root="$IA_PROJECT_GIT_ROOT" + fi +fi +if [[ "$(pwd)" != "$git_work_root" ]]; then + cd "$git_work_root" || { echo "[pousse][ERROR] Cannot cd to project root ${git_work_root}" >&2; exit 1; } +fi +branch="$(git rev-parse --abbrev-ref HEAD)" +if [[ -z "$branch" || "$branch" == "HEAD" ]]; then + echo "[pousse][ERROR] Detached HEAD is not supported" >&2 + exit 1 +fi +author_name="$(git config user.name || true)" +if [[ "$author_name" != "4NK" && "$author_name" != "Nicolas Cantu" ]]; then + echo "[pousse][ERROR] Refusing to commit: git user.name must be '4NK' or 'Nicolas Cantu' (got: '${author_name}')" >&2 + exit 1 +fi +# Build dirs from project config (projects//conf.json); skip if no config or no build_dirs +build_dirs=() +if [[ -n "${PROJECT_CONFIG_PATH:-}" && -f "$PROJECT_CONFIG_PATH" ]] && command -v jq >/dev/null 2>&1; then + while IFS= read -r d; do + [[ -n "$d" ]] && build_dirs+=( "$d" ) + done < <(jq -r '.build_dirs[]? // empty' "$PROJECT_CONFIG_PATH" 2>/dev/null) +fi +if [[ ${#build_dirs[@]} -gt 0 ]]; then + # shellcheck source=../lib/conf_path_resolve.sh + source "${IA_DEV_ROOT}/lib/conf_path_resolve.sh" + echo "[pousse] Build check (${#build_dirs[@]} dirs from project config)..." + for dir in "${build_dirs[@]}"; do + if [[ "$dir" = /* ]]; then + abs_dir="$dir" + elif [[ "$dir" == ../* || "$dir" == ".." ]]; then + abs_dir="$(ia_dev_resolve_path_from_conf "$PROJECT_CONFIG_PATH" "$dir")" + else + abs_dir="${git_work_root}/${dir}" + fi + if [[ ! -d "$abs_dir" ]]; then + echo "[pousse][WARN] Skipping build ${dir} (directory not found)" >&2 + continue + fi + echo "[pousse] Building ${dir}..." + (cd "$abs_dir" && npm run build) || { + echo "[pousse][ERROR] Build failed in ${dir}" >&2 + exit 1 + } + done + echo "[pousse] Build check OK" +else + echo "[pousse] No build_dirs in project config (or no projects//conf.json / jq); skipping build check" +fi + +msg_file="$(mktemp -t pousse-commit-msg.XXXXXX)" +cleanup() { + local ec=$? + smart_ide_log_end_with_status "$ec" + rm -f "$msg_file" +} +trap cleanup EXIT + +cat >"$msg_file" || true +if [[ ! -s "$msg_file" ]]; then + echo "[pousse][ERROR] Empty commit message on STDIN" >&2 + exit 1 +fi + +if [[ "$bump_version" == "true" ]]; then + version_file="${git_work_root}/VERSION" + if [[ ! -f "$version_file" ]]; then + echo "[pousse][ERROR] VERSION not found at ${version_file}" >&2 + exit 1 + fi + current="$(cat "$version_file" | sed 's/[[:space:]]//g')" + if ! [[ "$current" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then + echo "[pousse][ERROR] VERSION format must be X.Y.Z (got: '${current}')" >&2 + exit 1 + fi + maj="${current%%.*}" + min="${current#*.}" + min="${min%%.*}" + patch="${current##*.}" + patch=$((patch + 1)) + new_version="${maj}.${min}.${patch}" + echo "$new_version" > "$version_file" + echo "[pousse] Bumped VERSION: ${current} -> ${new_version}" +fi + +# Stage all changes + +git add -A + +git_status_short="$(git status -sb)" +echo "$git_status_short" + +# Prevent committing potentially sensitive files +staged_files="$(git diff --cached --name-only || true)" +if [[ -n "$staged_files" ]]; then + if echo "$staged_files" | grep -Eiq '^(\.secrets/|\.env($|\.)|\.env\.|.*\.(key|pem|p12)$|.*credentials.*)'; then + echo "[pousse][ERROR] Refusing to commit: staged files look sensitive:" >&2 + echo "$staged_files" | grep -Ei '^(\.secrets/|\.env($|\.)|\.env\.|.*\.(key|pem|p12)$|.*credentials.*)' >&2 + exit 1 + fi +fi + +if git diff --cached --quiet; then + echo "[pousse] No staged changes to commit" >&2 + exit 0 +fi + +echo "[pousse] Staged changes:" +git diff --cached --stat + +git commit -F "$msg_file" +if ! git rev-parse "${remote}/${branch}" >/dev/null 2>&1; then + echo "[pousse][ERROR] Branch '${branch}' does not exist on remote '${remote}'. Refusing to push (would create a new remote branch). Create the branch on the remote first or push manually." >&2 + exit 1 +fi +git push -u "$remote" HEAD diff --git a/services/ia_dev/deploy/proxy-units/lpldf-https-watch.service b/services/ia_dev/deploy/proxy-units/lpldf-https-watch.service new file mode 100644 index 0000000..ce48cc9 --- /dev/null +++ b/services/ia_dev/deploy/proxy-units/lpldf-https-watch.service @@ -0,0 +1,8 @@ +[Unit] +Description=HTTPS availability check for LPLDF (Les Petites Lecons) +After=network-online.target +Wants=network-online.target + +[Service] +Type=oneshot +ExecStart=/opt/proxy-config/scripts/watch-https-lpldf.sh diff --git a/services/ia_dev/deploy/proxy-units/lpldf-https-watch.timer b/services/ia_dev/deploy/proxy-units/lpldf-https-watch.timer new file mode 100644 index 0000000..f611722 --- /dev/null +++ b/services/ia_dev/deploy/proxy-units/lpldf-https-watch.timer @@ -0,0 +1,10 @@ +[Unit] +Description=Run LPLDF HTTPS watch every 5 minutes + +[Timer] +OnBootSec=3min +OnUnitActiveSec=5min +Persistent=true + +[Install] +WantedBy=timers.target diff --git a/services/ia_dev/deploy/run-project-hooks.sh b/services/ia_dev/deploy/run-project-hooks.sh new file mode 100755 index 0000000..634daf0 --- /dev/null +++ b/services/ia_dev/deploy/run-project-hooks.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env bash +# Backward-compatible alias: delegates to orchestrator.sh (generic deploy orchestrator). +# Usage: run-project-hooks.sh [options passed to each phase / fallback script] +# Requires: IA_PROJECT_ID (set by caller or by deploy.sh). +set -euo pipefail + +SCRIPT_REAL="$(readlink -f "${BASH_SOURCE[0]:-$0}" 2>/dev/null || realpath "${BASH_SOURCE[0]:-$0}" 2>/dev/null || echo "${BASH_SOURCE[0]:-$0}")" +DEPLOY_DIR="$(cd "$(dirname "$SCRIPT_REAL")" && pwd)" +IA_DEV_ROOT="$(cd "$DEPLOY_DIR/.." && pwd)" +# shellcheck source=../lib/smart_ide_logs.sh +source "${IA_DEV_ROOT}/lib/smart_ide_logs.sh" +smart_ide_logs_begin "$IA_DEV_ROOT" "$0" "$*" +exec "$DEPLOY_DIR/orchestrator.sh" "$@" diff --git a/services/ia_dev/deploy/scripts/add-ssh-key-anthony.sh b/services/ia_dev/deploy/scripts/add-ssh-key-anthony.sh new file mode 100755 index 0000000..51086da --- /dev/null +++ b/services/ia_dev/deploy/scripts/add-ssh-key-anthony.sh @@ -0,0 +1,52 @@ +#!/usr/bin/env bash +# Add SSH public key for anthony@4nk to proxy and services (192.168.1.104). +# Run from ia_dev root. Uses DEPLOY_SSH_KEY or ~/.ssh/id_ed25519 to authenticate. + +set -euo pipefail + +SCRIPT_DIR="${BASH_SOURCE%/*}" +[[ -d "$SCRIPT_DIR" ]] || SCRIPT_DIR=. +IA_DEV_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)" +if [[ -f "${IA_DEV_ROOT}/lib/smart_ide_logs.sh" ]]; then + # shellcheck source=../../lib/smart_ide_logs.sh + source "${IA_DEV_ROOT}/lib/smart_ide_logs.sh" + smart_ide_logs_begin "$IA_DEV_ROOT" "$0" "$*" + smart_ide_logs_register_exit_trap +fi +LIB_DIR="$(cd "$SCRIPT_DIR/../_lib" && pwd)" +# shellcheck source=../_lib/ssh.sh +source "$LIB_DIR/ssh.sh" + +SSH_KEY="${DEPLOY_SSH_KEY:-$HOME/.ssh/id_ed25519}" +SSH_USER="${DEPLOY_SSH_USER:-ncantu}" +PROXY_HOST="${DEPLOY_SSH_PROXY_HOST:-4nk.myftp.biz}" +SERVICES_IP="192.168.1.104" + +KEY_LINE="ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIM7df89eMnNBc85o+hijYTnZALlTZssIZSYlN+hMW3c anthony@4nk" +printf -v KEY_ESC '%q' "$KEY_LINE" + +require_ssh_key "$SSH_KEY" + +add_key_remote() { + local target_host="$1" + local use_proxy="${2:-false}" + local proxy_args=() + if [[ "$use_proxy" == "true" ]]; then + proxy_args=(-o "ProxyJump=$SSH_USER@$PROXY_HOST") + fi + # shellcheck disable=SC2207 + local common_opts=($(ssh_common_opts "$SSH_USER" "$target_host")) + ssh -i "$SSH_KEY" \ + "${common_opts[@]}" \ + "${proxy_args[@]}" \ + "$SSH_USER@$target_host" \ + "mkdir -p ~/.ssh && (grep -qF $KEY_ESC ~/.ssh/authorized_keys 2>/dev/null || echo $KEY_ESC >> ~/.ssh/authorized_keys)" +} + +echo "Adding key to proxy ($PROXY_HOST)..." +add_key_remote "$PROXY_HOST" false + +echo "Adding key to services ($SERVICES_IP) via proxy..." +add_key_remote "$SERVICES_IP" true + +echo "Done. Key anthony@4nk added on proxy and services." diff --git a/services/ia_dev/deploy/scripts/install-lpldf-https-watch-on-proxy.sh b/services/ia_dev/deploy/scripts/install-lpldf-https-watch-on-proxy.sh new file mode 100755 index 0000000..3e46329 --- /dev/null +++ b/services/ia_dev/deploy/scripts/install-lpldf-https-watch-on-proxy.sh @@ -0,0 +1,45 @@ +#!/usr/bin/env bash +# Install HTTPS watchdog + systemd timer on the 4NK proxy (192.168.1.100). +# Does not modify Nginx. Run from ia_dev root: ./deploy/scripts/install-lpldf-https-watch-on-proxy.sh +set -euo pipefail + +IA_DEV_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)" +if [[ -f "${IA_DEV_ROOT}/lib/smart_ide_logs.sh" ]]; then + # shellcheck source=../../lib/smart_ide_logs.sh + source "${IA_DEV_ROOT}/lib/smart_ide_logs.sh" + smart_ide_logs_begin "$IA_DEV_ROOT" "$0" "$*" + smart_ide_logs_register_exit_trap +fi +# shellcheck source=deploy/_lib/ssh.sh +source "${IA_DEV_ROOT}/deploy/_lib/ssh.sh" + +readonly SSH_KEY="${DEPLOY_SSH_KEY:-${HOME}/.ssh/id_ed25519}" +readonly SSH_USER="${DEPLOY_SSH_USER:-ncantu}" +readonly PROXY_HOST="${DEPLOY_PROXY_HOST:-192.168.1.100}" +export DEPLOY_SSH_PROXY_HOST="${DEPLOY_SSH_PROXY_HOST:-4nk.myftp.biz}" + +readonly REMOTE_SCRIPT="/opt/proxy-config/scripts/watch-https-lpldf.sh" +readonly REMOTE_ENV_DIR="/opt/proxy-config/scripts/env" +readonly REMOTE_ENV="${REMOTE_ENV_DIR}/watch-https-lpldf.env" +readonly SERVICE_NAME="lpldf-https-watch" + +scp_copy "$SSH_KEY" "${IA_DEV_ROOT}/tools/proxy-https-watch-lpldf.sh" "$SSH_USER" "$PROXY_HOST" "/tmp/watch-https-lpldf.sh" "false" +scp_copy "$SSH_KEY" "${IA_DEV_ROOT}/deploy/proxy-units/${SERVICE_NAME}.service" "$SSH_USER" "$PROXY_HOST" "/tmp/${SERVICE_NAME}.service" "false" +scp_copy "$SSH_KEY" "${IA_DEV_ROOT}/deploy/proxy-units/${SERVICE_NAME}.timer" "$SSH_USER" "$PROXY_HOST" "/tmp/${SERVICE_NAME}.timer" "false" + +ssh_run "$SSH_KEY" "$SSH_USER" "$PROXY_HOST" \ + "sudo install -d -m 755 /opt/proxy-config/scripts && \ + sudo install -d -m 700 ${REMOTE_ENV_DIR} && \ + sudo install -m 755 /tmp/watch-https-lpldf.sh ${REMOTE_SCRIPT} && \ + sudo rm -f /tmp/watch-https-lpldf.sh && \ + sudo install -d -m 755 /var/lib/lpldf-https-watch && \ + sudo install -m 644 /tmp/${SERVICE_NAME}.service /etc/systemd/system/${SERVICE_NAME}.service && \ + sudo install -m 644 /tmp/${SERVICE_NAME}.timer /etc/systemd/system/${SERVICE_NAME}.timer && \ + sudo rm -f /tmp/${SERVICE_NAME}.service /tmp/${SERVICE_NAME}.timer && \ + sudo systemctl daemon-reload && \ + sudo systemctl enable --now ${SERVICE_NAME}.timer && \ + sudo systemctl start ${SERVICE_NAME}.service || true && \ + systemctl --no-pager --full status ${SERVICE_NAME}.timer" + +echo "[install-lpldf-https-watch] Installed ${REMOTE_SCRIPT} and ${SERVICE_NAME}.timer on ${PROXY_HOST}" +echo "[install-lpldf-https-watch] Optional: create ${REMOTE_ENV} (chmod 600); see tools/proxy-https-watch-lpldf.env.example" diff --git a/services/ia_dev/git-issues/agent-loop-chat-iterations.sh b/services/ia_dev/git-issues/agent-loop-chat-iterations.sh new file mode 100755 index 0000000..98304c0 --- /dev/null +++ b/services/ia_dev/git-issues/agent-loop-chat-iterations.sh @@ -0,0 +1,82 @@ +#!/usr/bin/env bash +# Bounded loop for chat: run mail retrieval N times, 1 minute between each. +# Use from Cursor chat when asked to "lance la boucle récupération emails puis attend 1 min et relance". +# Runs in foreground (no background); chat can run it for a few iterations to avoid timeout. +# +# Usage: +# ./git-issues/agent-loop-chat-iterations.sh [N] [--repeat] +# N = number of iterations (default 3). Each iteration: mail-list-unread.sh then sleep 60. +# --repeat = after N iterations, relaunch (infinite loop of N-by-N runs). +# Output and mail list (expéditeur, sujet) are appended to projects//logs/git-issues/agent-loop-chat-iterations.log. +# +set -euo pipefail +if [ -n "${HOME:-}" ] && [ -r "$HOME/.bashrc" ]; then + set +u + # shellcheck source=/dev/null + source "$HOME/.bashrc" 2>/dev/null || true + set -u +fi +[ -n "${HOME:-}" ] && [ -d "$HOME/.local/bin" ] && export PATH="$HOME/.local/bin:$PATH" + +GIT_ISSUES_DIR="${GIT_ISSUES_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +ROOT="$(cd "${GIT_ISSUES_DIR}/../.." && pwd)" +export GIT_ISSUES_DIR +export REPO_ROOT="${ROOT}" +cd "$ROOT" +# shellcheck source=lib.sh +source "${GIT_ISSUES_DIR}/lib.sh" 2>/dev/null || true +LOGS_GIT_ISSUES="${PROJECT_LOGS_DIR:-$ROOT/logs}/git-issues" + +REPEAT="" +if [ "${1:-}" = "--repeat" ]; then + REPEAT=1 + N="${2:-3}" +else + N="${1:-3}" +fi +if ! [[ "$N" =~ ^[0-9]+$ ]] || [ "$N" -lt 1 ]; then + echo "Usage: $0 [N] [--repeat] — N positive integer (default 3). --repeat = relancer à la fin." >&2 + exit 1 +fi + +LOG_DIR="${LOGS_GIT_ISSUES}" +LOG_FILE="${LOG_DIR}/agent-loop-chat-iterations.log" +mkdir -p "$LOG_DIR" +# Ensure absolute path so logs are always in the same place +[[ "$LOG_FILE" != /* ]] && LOG_FILE="$(cd "$LOG_DIR" && pwd)/agent-loop-chat-iterations.log" + +log_and_echo() { + echo "$1" | tee -a "$LOG_FILE" +} + +# Log path and start so logs are never "empty" from path confusion +log_and_echo "[agent-loop-chat] $(date -Iseconds) — log file: $LOG_FILE" +# Test send at launch: one test email to nicolas.cantu@pm.me +log_and_echo "[agent-loop-chat] $(date -Iseconds) — test d'envoi vers nicolas.cantu@pm.me" +"${GIT_ISSUES_DIR}/mail-send-reply.sh" --to "nicolas.cantu@pm.me" --subject "Test envoi - agent-loop-chat $(date +%Y-%m-%dT%H:%M:%S)" --body "Mail de test envoyé au lancement de agent-loop-chat-iterations.sh." 2>&1 | tee -a "$LOG_FILE" +if [ "${PIPESTATUS[0]:-0}" -eq 0 ]; then + log_and_echo "[agent-loop-chat] $(date -Iseconds) — test d'envoi OK" +else + log_and_echo "[agent-loop-chat] $(date -Iseconds) — test d'envoi échoué" + exit 1 +fi + +run_iterations() { + for i in $(seq 1 "$N"); do + log_and_echo "[agent-loop-chat] $(date -Iseconds) — iteration $i/$N" + "${GIT_ISSUES_DIR}/mail-list-unread.sh" 2>&1 | tee -a "$LOG_FILE" || true + if [ "$i" -lt "$N" ]; then + log_and_echo "[agent-loop-chat] $(date -Iseconds) — attente 60 s avant prochaine itération" + sleep 60 + fi + done + log_and_echo "[agent-loop-chat] $(date -Iseconds) — $N itérations terminées" +} + +while true; do + run_iterations + if [ -z "${REPEAT:-}" ]; then + break + fi + log_and_echo "[agent-loop-chat] $(date -Iseconds) — relance" +done diff --git a/services/ia_dev/git-issues/agent-loop-lock-acquire.sh b/services/ia_dev/git-issues/agent-loop-lock-acquire.sh new file mode 100755 index 0000000..94da81e --- /dev/null +++ b/services/ia_dev/git-issues/agent-loop-lock-acquire.sh @@ -0,0 +1,39 @@ +#!/usr/bin/env bash +# Try to acquire agent-loop.lock. Exit 0 and create lock if none or stale (>24h). Exit 1 if lock is recent. +# Run from repo root. Used by agent-loop agent before starting x cycles (section 2). +# +# Usage: +# Depuis la racine de ia_dev : ./git-issues/agent-loop-lock-acquire.sh +# +set -euo pipefail +if [ -n "${HOME:-}" ] && [ -r "$HOME/.bashrc" ]; then + set +u + # shellcheck source=/dev/null + source "$HOME/.bashrc" 2>/dev/null || true + set -u +fi + +GIT_ISSUES_DIR="${GIT_ISSUES_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +ROOT="$(cd "${GIT_ISSUES_DIR}/../.." && pwd)" +export GIT_ISSUES_DIR +export REPO_ROOT="${ROOT}" +cd "$ROOT" +# shellcheck source=lib.sh +source "${GIT_ISSUES_DIR}/lib.sh" 2>/dev/null || true +LOGS_GIT_ISSUES="${PROJECT_LOGS_DIR:-$ROOT/logs}/git-issues" + +LOCK_FILE="${AGENT_LOOP_LOCK_FILE:-$LOGS_GIT_ISSUES/agent-loop.lock}" +STALE_SEC=$((24 * 3600)) # 24 hours +mkdir -p "$(dirname "$LOCK_FILE")" + +if [ -f "$LOCK_FILE" ]; then + mtime=$(stat -c %Y "$LOCK_FILE" 2>/dev/null) || mtime=0 + now=$(date +%s) + if [ $((now - mtime)) -lt "$STALE_SEC" ]; then + echo "[agent-loop-lock-acquire] $(date -Iseconds) — Lock actif: $LOCK_FILE (mtime < 24 h). Ne pas lancer une deuxième instance." >&2 + exit 1 + fi +fi + +printf "%s\n%s\n" "$$" "$(date -Iseconds)" > "$LOCK_FILE" +echo "[agent-loop-lock-acquire] $(date -Iseconds) — Lock acquis: $LOCK_FILE" diff --git a/services/ia_dev/git-issues/agent-loop-lock-release.sh b/services/ia_dev/git-issues/agent-loop-lock-release.sh new file mode 100755 index 0000000..00184d7 --- /dev/null +++ b/services/ia_dev/git-issues/agent-loop-lock-release.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash +# Remove agent-loop.lock and optionally agent-loop.stop. Run at end of x cycles (normal or stop). +# Run from repo root. +# +# Usage: +# Depuis la racine de ia_dev : ./git-issues/agent-loop-lock-release.sh +# +set -euo pipefail +if [ -n "${HOME:-}" ] && [ -r "$HOME/.bashrc" ]; then + set +u + # shellcheck source=/dev/null + source "$HOME/.bashrc" 2>/dev/null || true + set -u +fi + +GIT_ISSUES_DIR="${GIT_ISSUES_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +ROOT="$(cd "${GIT_ISSUES_DIR}/../.." && pwd)" +export GIT_ISSUES_DIR +export REPO_ROOT="${ROOT}" +cd "$ROOT" +# shellcheck source=lib.sh +source "${GIT_ISSUES_DIR}/lib.sh" 2>/dev/null || true +LOGS_GIT_ISSUES="${PROJECT_LOGS_DIR:-$ROOT/logs}/git-issues" + +LOCK_FILE="${AGENT_LOOP_LOCK_FILE:-$LOGS_GIT_ISSUES/agent-loop.lock}" +STOP_FILE="${AGENT_LOOP_STOP_FILE:-$LOGS_GIT_ISSUES/agent-loop.stop}" + +if [ -f "$LOCK_FILE" ]; then + rm -f "$LOCK_FILE" + echo "[agent-loop-lock-release] $(date -Iseconds) — Lock supprimé: $LOCK_FILE" +fi +if [ -f "$STOP_FILE" ]; then + rm -f "$STOP_FILE" + echo "[agent-loop-lock-release] $(date -Iseconds) — Fichier stop supprimé: $STOP_FILE" +fi diff --git a/services/ia_dev/git-issues/agent-loop-n-cycles.sh b/services/ia_dev/git-issues/agent-loop-n-cycles.sh new file mode 100755 index 0000000..789ed39 --- /dev/null +++ b/services/ia_dev/git-issues/agent-loop-n-cycles.sh @@ -0,0 +1,72 @@ +#!/usr/bin/env bash +# Run N cycles of (retrieval + treatment via Cursor Agent CLI + sleep 60s) in foreground. +# Uses same lock as section 2. When AGENT_LOOP_RUN_AGENT=1 and "agent" in PATH, runs +# git-issues-process workflow via CLI each cycle (same as agent-loop.sh). No contournement: +# /agent-loop 600 = agent runs this script with N=600 and timeout ~11h; script does 600 full cycles. +# +# Usage: from ia_dev root: ./git-issues/agent-loop-n-cycles.sh [N] +# N = number of cycles (default 600). Each cycle = retrieval + agent CLI (if configured) + 60s sleep. +# +set -euo pipefail +if [ -n "${HOME:-}" ] && [ -r "$HOME/.bashrc" ]; then + set +u + # shellcheck source=/dev/null + source "$HOME/.bashrc" 2>/dev/null || true + set -u +fi +[ -n "${HOME:-}" ] && [ -d "$HOME/.local/bin" ] && export PATH="$HOME/.local/bin:$PATH" + +GIT_ISSUES_DIR="${GIT_ISSUES_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +ROOT="$(cd "${GIT_ISSUES_DIR}/../.." && pwd)" +IA_DEV_ROOT="${GIT_ISSUES_DIR}/.." +export GIT_ISSUES_DIR +export REPO_ROOT="${ROOT}" +cd "$ROOT" +# shellcheck source=lib.sh +source "${GIT_ISSUES_DIR}/lib.sh" 2>/dev/null || true + +AGENT_LOOP_ENV="${IA_DEV_ROOT}/.secrets/git-issues/agent-loop.env" +if [ -r "$AGENT_LOOP_ENV" ]; then + set +u + # shellcheck source=/dev/null + source "$AGENT_LOOP_ENV" + set -u +fi + +N="${1:-600}" +if ! [[ "$N" =~ ^[0-9]+$ ]] || [ "$N" -lt 1 ]; then + echo "[agent-loop-n-cycles] Usage: $0 [N] (N positive integer, default 600)" >&2 + exit 1 +fi + +"${GIT_ISSUES_DIR}/agent-loop-lock-acquire.sh" || exit 1 + +AGENT_PROMPT="Exécute le workflow mails du spooler (agent git-issues-process). 1) Lister les mails à traiter avec ./git-issues/list-pending-spooler.sh (ou utiliser les chemins dans agent-loop.pending). 2) Pour chaque fichier .pending : lire le JSON (from, to, subject, body, message_id, base). Répondre à l'expéditeur (--to du JSON), pas à une adresse fixe ; le « to » du mail reçu a déjà déterminé le projet. Rédiger une réponse pertinente (uniquement ton texte, pas de citation ; mail-send-reply.sh refuse si le body contient From:, Message-ID, wrote:, etc.). Envoyer avec ./git-issues/mail-send-reply.sh --to --subject \"Re: ...\" --body \"\" --in-reply-to \"\". Après envoi réussi : ./git-issues/write-response-spooler.sh --base --to --subject \"Re: ...\" --body \"\" --in-reply-to \"\". Ne pas appeler mail-mark-read.sh (spooler)." + +i=1 +while [ "$i" -le "$N" ]; do + if "${GIT_ISSUES_DIR}/agent-loop-stop-requested.sh" 2>/dev/null; then + echo "[agent-loop-n-cycles] $(date -Iseconds) — Arrêt demandé (cycle $i/$N). Libération du lock." + "${GIT_ISSUES_DIR}/agent-loop-lock-release.sh" + exit 0 + fi + echo "[agent-loop-n-cycles] $(date -Iseconds) — Cycle $i/$N" + "${GIT_ISSUES_DIR}/agent-loop-retrieval-once.sh" || true + + if [ "${AGENT_LOOP_RUN_AGENT:-0}" = "1" ] && command -v agent >/dev/null 2>&1; then + AGENT_MODEL="${AGENT_LOOP_MODEL:-sonnet-4.6}" + if agent -p "$AGENT_PROMPT" -f --model "$AGENT_MODEL" 2>&1; then + : # agent done + else + echo "[agent-loop-n-cycles] $(date -Iseconds) — Agent terminé avec erreur ou interruption (cycle $i/$N)" + fi + fi + + if [ "$i" -lt "$N" ]; then + sleep 60 + fi + i=$((i + 1)) +done + +"${GIT_ISSUES_DIR}/agent-loop-lock-release.sh" +echo "[agent-loop-n-cycles] $(date -Iseconds) — $N cycles terminés." diff --git a/services/ia_dev/git-issues/agent-loop-retrieval-once.sh b/services/ia_dev/git-issues/agent-loop-retrieval-once.sh new file mode 100755 index 0000000..38f4d17 --- /dev/null +++ b/services/ia_dev/git-issues/agent-loop-retrieval-once.sh @@ -0,0 +1,46 @@ +#!/usr/bin/env bash +# One-shot retrieval: run tickets-fetch-inbox (spooler from/to) then list-pending-spooler; write paths to agent-loop.pending and status file. +# Run from repo root. Used by agent-loop agent for "x times" cycles. Criterion: from/to in conf.json, not IMAP unread. +# +# Usage: +# Depuis la racine de ia_dev : ./git-issues/agent-loop-retrieval-once.sh +# +set -euo pipefail +if [ -n "${HOME:-}" ] && [ -r "$HOME/.bashrc" ]; then + set +u + # shellcheck source=/dev/null + source "$HOME/.bashrc" 2>/dev/null || true + set -u +fi + +GIT_ISSUES_DIR="${GIT_ISSUES_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +ROOT="$(cd "${GIT_ISSUES_DIR}/../.." && pwd)" +export GIT_ISSUES_DIR +export REPO_ROOT="${ROOT}" +cd "$ROOT" +# shellcheck source=lib.sh +source "${GIT_ISSUES_DIR}/lib.sh" 2>/dev/null || true +# When no PROJECT_ID (no MAIL_TO/AI_AGENT_TOKEN), use generic ROOT/logs for status/pending file +LOGS_GIT_ISSUES="${PROJECT_LOGS_DIR:-$ROOT/logs}/git-issues" + +STATUS_FILE="${AGENT_LOOP_STATUS_FILE:-$LOGS_GIT_ISSUES/agent-loop.status}" +PENDING_FILE="${AGENT_LOOP_PENDING_FILE:-$LOGS_GIT_ISSUES/agent-loop.pending}" +mkdir -p "$(dirname "$STATUS_FILE")" + +write_status() { + printf "%s\n%s\n%s\n" "$(date -Iseconds)" "$1" "${2:-}" > "$STATUS_FILE" +} + +"${GIT_ISSUES_DIR}/tickets-fetch-inbox.sh" 2>&1 || true +pending_out="" +pending_out=$("${GIT_ISSUES_DIR}/list-pending-spooler.sh" 2>&1) || true +if [ -n "$pending_out" ] && echo "$pending_out" | grep -q "\.pending"; then + write_status "mails_pending" "One-shot: mails en attente dans le spooler (from/to)." + printf "%s\n%s\n%s\n%s\n" "$(date -Iseconds)" "mails_pending" "---" "$pending_out" > "$PENDING_FILE" + n=$(echo "$pending_out" | grep -c "\.pending" || true) + echo "[agent-loop-retrieval-once] $(date -Iseconds) — $n mail(s) en attente dans le spooler (critère from/to). Écrit dans agent-loop.pending" +else + write_status "idle" "One-shot: aucun mail en attente dans le spooler (critère: from/to dans conf.json)." + [ -f "$PENDING_FILE" ] && : > "$PENDING_FILE" + echo "[agent-loop-retrieval-once] $(date -Iseconds) — Aucun mail en attente dans le spooler (critère: from/to dans conf.json)" +fi diff --git a/services/ia_dev/git-issues/agent-loop-stop-requested.sh b/services/ia_dev/git-issues/agent-loop-stop-requested.sh new file mode 100755 index 0000000..2727fba --- /dev/null +++ b/services/ia_dev/git-issues/agent-loop-stop-requested.sh @@ -0,0 +1,27 @@ +#!/usr/bin/env bash +# Exit 0 if agent-loop.stop exists (stop requested), 1 otherwise. Used at start of each cycle. +# Run from repo root. +# +# Usage: +# Depuis la racine de ia_dev : ./git-issues/agent-loop-stop-requested.sh +# +set -euo pipefail +if [ -n "${HOME:-}" ] && [ -r "$HOME/.bashrc" ]; then + set +u + # shellcheck source=/dev/null + source "$HOME/.bashrc" 2>/dev/null || true + set -u +fi + +GIT_ISSUES_DIR="${GIT_ISSUES_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +ROOT="$(cd "${GIT_ISSUES_DIR}/../.." && pwd)" +export GIT_ISSUES_DIR +export REPO_ROOT="${ROOT}" +cd "$ROOT" +# shellcheck source=lib.sh +source "${GIT_ISSUES_DIR}/lib.sh" 2>/dev/null || true +LOGS_GIT_ISSUES="${PROJECT_LOGS_DIR:-$ROOT/logs}/git-issues" + +STOP_FILE="${AGENT_LOOP_STOP_FILE:-$LOGS_GIT_ISSUES/agent-loop.stop}" +[ -f "$STOP_FILE" ] +# exit 0 if file exists, 1 otherwise diff --git a/services/ia_dev/git-issues/agent-loop-stop.sh b/services/ia_dev/git-issues/agent-loop-stop.sh new file mode 100755 index 0000000..233ceed --- /dev/null +++ b/services/ia_dev/git-issues/agent-loop-stop.sh @@ -0,0 +1,28 @@ +#!/usr/bin/env bash +# Create agent-loop.stop so the running agent-loop (section 2) stops at the next cycle start. +# Run from repo root. Same paths as agent-loop-retrieval-once.sh (projects//logs/git-issues). +# +# Usage: +# Depuis la racine de ia_dev : ./git-issues/agent-loop-stop.sh +# +set -euo pipefail +if [ -n "${HOME:-}" ] && [ -r "$HOME/.bashrc" ]; then + set +u + # shellcheck source=/dev/null + source "$HOME/.bashrc" 2>/dev/null || true + set -u +fi + +GIT_ISSUES_DIR="${GIT_ISSUES_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +ROOT="$(cd "${GIT_ISSUES_DIR}/../.." && pwd)" +export GIT_ISSUES_DIR +export REPO_ROOT="${ROOT}" +cd "$ROOT" +# shellcheck source=lib.sh +source "${GIT_ISSUES_DIR}/lib.sh" 2>/dev/null || true +LOGS_GIT_ISSUES="${PROJECT_LOGS_DIR:-$ROOT/logs}/git-issues" + +STOP_FILE="${AGENT_LOOP_STOP_FILE:-$LOGS_GIT_ISSUES/agent-loop.stop}" +mkdir -p "$(dirname "$STOP_FILE")" +touch "$STOP_FILE" +echo "[agent-loop-stop] $(date -Iseconds) — $STOP_FILE créé. La boucle en cours s'arrêtera au début du prochain cycle." diff --git a/services/ia_dev/git-issues/agent-loop-treatment.sh b/services/ia_dev/git-issues/agent-loop-treatment.sh new file mode 100755 index 0000000..ad1d208 --- /dev/null +++ b/services/ia_dev/git-issues/agent-loop-treatment.sh @@ -0,0 +1,52 @@ +#!/usr/bin/env bash +# Treatment loop: periodically check agent-loop.pending and run Cursor Agent CLI (git-issues-process workflow) when non-empty. +# Run from repo root. No timeout; runs forever. Do NOT start this script from the agent-loop agent (use bounded runs only). +# +# Usage (manual only; agent must not launch with nohup/&): +# Depuis la racine de ia_dev : ./git-issues/agent-loop-treatment.sh +# +set -euo pipefail +if [ -n "${HOME:-}" ] && [ -r "$HOME/.bashrc" ]; then + set +u + # shellcheck source=/dev/null + source "$HOME/.bashrc" 2>/dev/null || true + set -u +fi +[ -n "${HOME:-}" ] && [ -d "$HOME/.local/bin" ] && export PATH="$HOME/.local/bin:$PATH" + +GIT_ISSUES_DIR="${GIT_ISSUES_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +ROOT="$(git rev-parse --show-toplevel 2>/dev/null)" || ROOT="$(cd "${GIT_ISSUES_DIR}/../.." && pwd)" +export GIT_ISSUES_DIR +export REPO_ROOT="${ROOT}" +cd "$ROOT" +# shellcheck source=lib.sh +source "${GIT_ISSUES_DIR}/lib.sh" 2>/dev/null || true +LOGS_GIT_ISSUES="${PROJECT_LOGS_DIR:-$ROOT/logs}/git-issues" + +PENDING_FILE="${AGENT_LOOP_PENDING_FILE:-$LOGS_GIT_ISSUES/agent-loop.pending}" +LOG_DIR="$(dirname "$PENDING_FILE")" +mkdir -p "$LOG_DIR" + +AGENT_LOOP_ENV="${GIT_ISSUES_DIR}/../.secrets/git-issues/agent-loop.env" +if [ -r "$AGENT_LOOP_ENV" ]; then + set +u + # shellcheck source=/dev/null + source "$AGENT_LOOP_ENV" + set -u +fi +INTERVAL="${AGENT_LOOP_TREATMENT_INTERVAL_SEC:-60}" +AGENT_MODEL="${AGENT_LOOP_MODEL:-sonnet-4.6}" + +PROMPT="Exécute le workflow mails du spooler (agent git-issues-process). Les chemins des fichiers .pending sont dans projects//logs/git-issues/agent-loop.pending (ou exécuter ./git-issues/list-pending-spooler.sh). Pour chaque fichier .pending : lire le JSON (from, to, subject, body, message_id, base). Répondre à l'expéditeur (--to du JSON), pas à une adresse fixe ; le « to » du mail reçu a déjà déterminé le projet. Rédiger une réponse pertinente (uniquement ton texte ; pas de citation — mail-send-reply.sh refuse si le body contient From:, Message-ID, wrote:, etc.). Envoyer avec ./git-issues/mail-send-reply.sh --to --subject \"Re: ...\" --body \"\" --in-reply-to \"\". Après envoi réussi : ./git-issues/write-response-spooler.sh --base --to --subject \"Re: ...\" --body \"\" --in-reply-to \"\". Ne pas appeler mail-mark-read.sh (spooler)." + +while true; do + if [ -s "$PENDING_FILE" ] && command -v agent >/dev/null 2>&1; then + echo "[agent-loop-treatment] $(date -Iseconds) — Pending non vide, lancement de l'agent Cursor." + if agent -p "$PROMPT" -f --model "$AGENT_MODEL" 2>&1; then + echo "[agent-loop-treatment] $(date -Iseconds) — Agent terminé." + else + echo "[agent-loop-treatment] $(date -Iseconds) — Agent terminé avec erreur." + fi + fi + sleep "$INTERVAL" +done diff --git a/services/ia_dev/git-issues/agent-loop.env.example b/services/ia_dev/git-issues/agent-loop.env.example new file mode 100644 index 0000000..9b367fe --- /dev/null +++ b/services/ia_dev/git-issues/agent-loop.env.example @@ -0,0 +1,19 @@ +# Agent-loop parameters (Cursor Agent CLI, model, interval). +# Copy to .secrets/git-issues/agent-loop.env and set as needed. +# Do not commit .secrets/git-issues/agent-loop.env (directory is gitignored). +# +# Run Cursor Agent when unread mails are detected (0 or 1) +# AGENT_LOOP_RUN_AGENT=1 +# +# Model used by the CLI (default: sonnet-4.6 to avoid Opus usage limits) +# List: agent models +# AGENT_LOOP_MODEL=sonnet-4.6 +# +# Polling interval in seconds (default: 60) +# AGENT_LOOP_INTERVAL_SEC=60 +# +# Optional: custom paths for status, pending, lock and stop files +# AGENT_LOOP_STATUS_FILE=ia_dev/projects//logs/git-issues/agent-loop.status +# AGENT_LOOP_PENDING_FILE=ia_dev/projects//logs/git-issues/agent-loop.pending +# AGENT_LOOP_LOCK_FILE=ia_dev/projects//logs/git-issues/agent-loop.lock +# AGENT_LOOP_STOP_FILE=ia_dev/projects//logs/git-issues/agent-loop.stop diff --git a/services/ia_dev/git-issues/agent-loop.sh b/services/ia_dev/git-issues/agent-loop.sh new file mode 100755 index 0000000..88fc9ab --- /dev/null +++ b/services/ia_dev/git-issues/agent-loop.sh @@ -0,0 +1,91 @@ +#!/usr/bin/env bash +# Agent loop: poll for mails to treat (spooler criterion: from/to in conf.json, not IMAP unread). +# Run from repo root. Runs forever. Do NOT start from the agent-loop agent (use bounded runs only, e.g. agent-loop-chat-iterations.sh N). +# +# Usage (manual only; agent must not launch with nohup/&): +# ./git-issues/agent-loop.sh [interval_seconds] +# AGENT_LOOP_INTERVAL_SEC=120 ./git-issues/agent-loop.sh +# +# Witness file: projects//logs/git-issues/agent-loop.status (or AGENT_LOOP_STATUS_FILE) +# State file (not a log): updated every iteration. If mtime is older than 2*interval, loop is considered stopped. +# Pending file: projects//logs/git-issues/agent-loop.pending (or AGENT_LOOP_PENDING_FILE) +# Written when there are .pending mails in the spooler (no matching .response); contains paths to treat. +# +# Optional: set AGENT_LOOP_RUN_AGENT=1 to run the Cursor Agent CLI when mails are detected. +# Requires Cursor Agent CLI (https://cursor.com/docs/cli/using). If "agent" is not in PATH, the loop only updates status/pending. +# +# Optional: AGENT_LOOP_MODEL= to force the model (e.g. sonnet-4.6, gpt-5.4-low). Default: sonnet-4.6 to avoid Opus usage limits when running unattended. +# +set -euo pipefail +# Source user env so PATH includes ~/.local/bin (Cursor Agent CLI, etc.) +if [ -n "${HOME:-}" ] && [ -r "$HOME/.bashrc" ]; then + set +u + # shellcheck source=/dev/null + source "$HOME/.bashrc" 2>/dev/null || true + set -u +fi +[ -n "${HOME:-}" ] && [ -d "$HOME/.local/bin" ] && export PATH="$HOME/.local/bin:$PATH" + +GIT_ISSUES_DIR="${GIT_ISSUES_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +ROOT="$(cd "${GIT_ISSUES_DIR}/../.." && pwd)" +export GIT_ISSUES_DIR +export REPO_ROOT="${ROOT}" +cd "$ROOT" +# Per-project logs under projects//logs (lib.sh sets PROJECT_LOGS_DIR) +# shellcheck source=lib.sh +source "${GIT_ISSUES_DIR}/lib.sh" 2>/dev/null || true +LOGS_GIT_ISSUES="${PROJECT_LOGS_DIR:-$ROOT/logs}/git-issues" + +# Load agent-loop parameters from .secrets (optional; .secrets under ia_dev) +AGENT_LOOP_ENV="${GIT_ISSUES_DIR}/../.secrets/git-issues/agent-loop.env" +if [ -r "$AGENT_LOOP_ENV" ]; then + set +u + # shellcheck source=/dev/null + source "$AGENT_LOOP_ENV" + set -u +fi + +INTERVAL="${1:-${AGENT_LOOP_INTERVAL_SEC:-60}}" +# STATUS_FILE: state/witness file (not a log) — indicates if the loop is active; under projects//logs/git-issues/ for per-project state. +STATUS_FILE="${AGENT_LOOP_STATUS_FILE:-$LOGS_GIT_ISSUES/agent-loop.status}" +PENDING_FILE="${AGENT_LOOP_PENDING_FILE:-$LOGS_GIT_ISSUES/agent-loop.pending}" +mkdir -p "$(dirname "$STATUS_FILE")" + +write_status() { + local status="$1" + local detail="${2:-}" + printf "%s\n%s\n%s\n" "$(date -Iseconds)" "$status" "$detail" > "$STATUS_FILE" +} + +while true; do + write_status "running" "interval=${INTERVAL}s" + # Spooler flow: fetch by from/to (conf.json), then list .pending without .response. No IMAP unread criterion. + "${GIT_ISSUES_DIR}/tickets-fetch-inbox.sh" 2>&1 || true + pending_out="" + pending_out=$("${GIT_ISSUES_DIR}/list-pending-spooler.sh" 2>&1) || true + if [ -n "$pending_out" ] && echo "$pending_out" | grep -q "\.pending"; then + write_status "mails_pending" "Mails en attente dans le spooler (from/to). Lancer l'agent git-issues-process dans Cursor." + printf "%s\n%s\n%s\n%s\n" "$(date -Iseconds)" "mails_pending" "---" "$pending_out" > "$PENDING_FILE" + n=$(echo "$pending_out" | grep -c "\.pending" || true) + echo "[agent-loop] $(date -Iseconds) — $n mail(s) en attente dans le spooler (critère from/to). Lancer l'agent git-issues-process dans Cursor." + if [ "${AGENT_LOOP_RUN_AGENT:-0}" = "1" ] && command -v agent >/dev/null 2>&1; then + write_status "running_agent" "Lancement de l'agent Cursor pour traiter les mails du spooler." + echo "[agent-loop] $(date -Iseconds) — Lancement de l'agent Cursor (workflow git-issues-process spooler)." + AGENT_MODEL="${AGENT_LOOP_MODEL:-sonnet-4.6}" + echo "[agent-loop] $(date -Iseconds) — Modèle: $AGENT_MODEL" + AGENT_OPTS=(-p "Exécute le workflow mails du spooler (agent git-issues-process). 1) Lister les mails à traiter avec ./git-issues/list-pending-spooler.sh (ou utiliser les chemins dans agent-loop.pending). 2) Pour chaque fichier .pending : lire le JSON (from, to, subject, body, message_id, base). Répondre à l'expéditeur (--to du JSON), pas à une adresse fixe ; le « to » du mail reçu a déjà déterminé le projet. Rédiger une réponse pertinente (uniquement ton texte, pas de citation ; mail-send-reply.sh refuse si le body contient From:, Message-ID, wrote:, etc.). Envoyer avec ./git-issues/mail-send-reply.sh --to --subject \"Re: ...\" --body \"\" --in-reply-to \"\". Après envoi réussi : ./git-issues/write-response-spooler.sh --base --to --subject \"Re: ...\" --body \"\" --in-reply-to \"\". Ne pas appeler mail-mark-read.sh (spooler)." -f --model "$AGENT_MODEL") + if agent "${AGENT_OPTS[@]}" 2>&1; then + write_status "agent_done" "Agent terminé." + else + write_status "mails_pending" "Agent terminé avec erreur ou interruption. Relancer l'agent manuellement si besoin." + fi + fi + else + write_status "idle" "Aucun mail en attente dans le spooler (critère: from/to dans conf.json)." + if [ -f "$PENDING_FILE" ]; then + : > "$PENDING_FILE" + fi + echo "[agent-loop] $(date -Iseconds) — Aucun mail en attente dans le spooler (critère: from/to dans conf.json)." + fi + sleep "$INTERVAL" +done diff --git a/services/ia_dev/git-issues/comment-issue.sh b/services/ia_dev/git-issues/comment-issue.sh new file mode 100755 index 0000000..f8289a3 --- /dev/null +++ b/services/ia_dev/git-issues/comment-issue.sh @@ -0,0 +1,50 @@ +#!/usr/bin/env bash +# +# Add a comment to an issue via Gitea API. The comment body is automatically +# signed with: Support IA du projet Lecoffre.io / ai.support.lecoffreio@4nkweb.com +# Usage: ./comment-issue.sh +# Or: echo "message" | ./comment-issue.sh - +# +set -euo pipefail + +GIT_ISSUES_DIR="${GIT_ISSUES_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +# shellcheck source=lib.sh +source "${GIT_ISSUES_DIR}/lib.sh" + +# Signature appended to every comment (same as mail replies) +COMMENT_SIGNATURE=$'\n\n--\nSupport IA du projet Lecoffre.io\nai.support.lecoffreio@4nkweb.com' +if [[ -n "${GITEA_COMMENT_SIGNATURE:-}" ]]; then + COMMENT_SIGNATURE="${GITEA_COMMENT_SIGNATURE}" +fi + +require_jq || exit 1 + +if [[ $# -lt 1 ]]; then + log_err "Usage: $0 " + log_err " Or: $0 - (read message from stdin)" + exit 1 +fi + +ISSUE_NUM="$1" +if [[ "${2:-}" == "-" ]]; then + BODY="$(cat)" +else + BODY="${2:-}" +fi + +if [[ -z "$BODY" ]]; then + log_err "Comment body is empty." + exit 1 +fi + +BODY="${BODY}${COMMENT_SIGNATURE}" + +# Escape for JSON: jq -Rs . handles newlines and quotes +BODY_JSON="$(echo "$BODY" | jq -Rs .)" +RESPONSE="$(gitea_api_post "/repos/${GITEA_REPO_OWNER}/${GITEA_REPO_NAME}/issues/${ISSUE_NUM}/comments" "{\"body\":${BODY_JSON}}")" +if ! echo "$RESPONSE" | jq -e . &>/dev/null; then + log_err "API error posting comment: ${RESPONSE:0:200}" + exit 1 +fi + +log_info "Comment added to issue #${ISSUE_NUM}." diff --git a/services/ia_dev/git-issues/create-branch-for-issue.sh b/services/ia_dev/git-issues/create-branch-for-issue.sh new file mode 100755 index 0000000..dc3eb94 --- /dev/null +++ b/services/ia_dev/git-issues/create-branch-for-issue.sh @@ -0,0 +1,45 @@ +#!/usr/bin/env bash +# +# Create a local branch for an issue. Branch name: issue/ (safe, short). +# Base branch defaults to "test"; ensure it is up to date (fetch + reset to origin/base). +# Usage: ./create-branch-for-issue.sh [base_branch] +# +set -euo pipefail + +GIT_ISSUES_DIR="${GIT_ISSUES_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +# shellcheck source=lib.sh +source "${GIT_ISSUES_DIR}/lib.sh" + +if [[ $# -lt 1 ]]; then + log_err "Usage: $0 [base_branch]" + exit 1 +fi + +ISSUE_NUM="$1" +BASE="${2:-test}" + +require_git_root || exit 1 + +if git show-ref --quiet "refs/heads/issue/${ISSUE_NUM}"; then + log_info "Branch issue/${ISSUE_NUM} already exists. Checking it out." + git checkout "issue/${ISSUE_NUM}" + echo "issue/${ISSUE_NUM}" + exit 0 +fi + +if ! git show-ref --quiet "refs/heads/${BASE}"; then + log_err "Base branch ${BASE} does not exist locally." + exit 1 +fi + +git fetch origin +if git show-ref --quiet "refs/remotes/origin/${BASE}"; then + git checkout "${BASE}" + git reset --hard "origin/${BASE}" +else + git checkout "${BASE}" +fi + +git checkout -b "issue/${ISSUE_NUM}" +log_info "Created and checked out branch issue/${ISSUE_NUM} from ${BASE}." +echo "issue/${ISSUE_NUM}" diff --git a/services/ia_dev/git-issues/get-issue.sh b/services/ia_dev/git-issues/get-issue.sh new file mode 100755 index 0000000..615a65c --- /dev/null +++ b/services/ia_dev/git-issues/get-issue.sh @@ -0,0 +1,39 @@ +#!/usr/bin/env bash +# +# Get one issue by number. Output: JSON (default) or plain text summary (--summary). +# Usage: ./get-issue.sh [--summary] +# +set -euo pipefail + +GIT_ISSUES_DIR="${GIT_ISSUES_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +# shellcheck source=lib.sh +source "${GIT_ISSUES_DIR}/lib.sh" + +require_jq || exit 1 + +if [[ $# -lt 1 ]]; then + log_err "Usage: $0 [--summary]" + exit 1 +fi + +ISSUE_NUM="$1" +SUMMARY=false +[[ "${2:-}" == "--summary" ]] && SUMMARY=true + +RESPONSE="$(gitea_api_get "/repos/${GITEA_REPO_OWNER}/${GITEA_REPO_NAME}/issues/${ISSUE_NUM}")" +if ! echo "$RESPONSE" | jq -e . &>/dev/null; then + log_err "API error or invalid JSON (issue ${ISSUE_NUM}): ${RESPONSE:0:200}" + exit 1 +fi + +if [[ "$SUMMARY" == true ]]; then + echo "--- Issue #${ISSUE_NUM} ---" + echo "Title: $(echo "$RESPONSE" | jq -r '.title')" + echo "State: $(echo "$RESPONSE" | jq -r '.state')" + echo "Labels: $(echo "$RESPONSE" | jq -r '[.labels[].name] | join(", ")')" + echo "Body:" + echo "$RESPONSE" | jq -r '.body // "(empty)"' + echo "---" +else + echo "$RESPONSE" +fi diff --git a/services/ia_dev/git-issues/imap-bridge.env.example b/services/ia_dev/git-issues/imap-bridge.env.example new file mode 100644 index 0000000..a99a25e --- /dev/null +++ b/services/ia_dev/git-issues/imap-bridge.env.example @@ -0,0 +1,43 @@ +# IMAP config for mail-to-issue (e.g. Proton Mail Bridge). +# Copy to .secrets/git-issues/imap-bridge.env and set real values. +# Do not commit .secrets/git-issues/imap-bridge.env (directory is gitignored). +# +# IMAP (read) +# IMAP_HOST=127.0.0.1 +# IMAP_PORT=1143 +# IMAP_USER=your-address@pm.me +# IMAP_PASSWORD=your-bridge-password +# IMAP_USE_STARTTLS=true +# For local Proton Bridge with self-signed cert, set to false to skip SSL verification (localhost only). +# IMAP_SSL_VERIFY=false +# +# SMTP (send replies; same Bridge account) +# SMTP_HOST=127.0.0.1 +# SMTP_PORT=1025 +# SMTP_USER=your-address@pm.me +# SMTP_PASSWORD=your-bridge-password +# SMTP_USE_STARTTLS=true +# +# Restrict listed mails to those sent to this address (default: ai.support.lecoffreio@4nkweb.com) +# MAIL_FILTER_TO=ai.support.lecoffreio@4nkweb.com +# +# Only fetch/list mails on or after this date (IMAP format DD-Mon-YYYY). Default: 10-Mar-2026. +# MAIL_SINCE_DATE=10-Mar-2026 +# +# Signature appended to every reply (default: Support IA du projet Lecoffre.io + ai.support.lecoffreio@4nkweb.com) +# MAIL_REPLY_SIGNATURE=--\\nSupport IA du projet Lecoffre.io\\nai.support.lecoffreio@4nkweb.com +# +# Signature for Gitea issue comments (optional; comment-issue.sh uses same default as mail signature) +# GITEA_COMMENT_SIGNATURE= + +IMAP_HOST=127.0.0.1 +IMAP_PORT=1143 +IMAP_USER= +IMAP_PASSWORD= +IMAP_USE_STARTTLS=true + +SMTP_HOST=127.0.0.1 +SMTP_PORT=1025 +SMTP_USER= +SMTP_PASSWORD= +SMTP_USE_STARTTLS=true diff --git a/services/ia_dev/git-issues/lib.sh b/services/ia_dev/git-issues/lib.sh new file mode 100755 index 0000000..e819230 --- /dev/null +++ b/services/ia_dev/git-issues/lib.sh @@ -0,0 +1,134 @@ +#!/usr/bin/env bash +# +# Shared config and helpers for Gitea issues scripts. +# Source from git-issues/*.sh. Standalone: run from ia_dev root. +# +set -euo pipefail + +GIT_ISSUES_DIR="${GIT_ISSUES_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +GITEA_API_URL="${GITEA_API_URL:-https://git.4nkweb.com/api/v1}" +GITEA_REPO_OWNER="${GITEA_REPO_OWNER:-4nk}" +GITEA_REPO_NAME="${GITEA_REPO_NAME:-lecoffre_ng}" + +# Optional: load project config from ia_dev (projects//conf.json); logs and data per project under projects// +PROJECT_CONFIG_PATH="" +PROJECT_LOGS_DIR="" +DATA_ISSUES_DIR="" +if [[ -f "${GIT_ISSUES_DIR}/../lib/project_config.sh" ]]; then + PROJECT_ROOT="$(git rev-parse --show-toplevel 2>/dev/null)" || true + if [[ -z "${PROJECT_ROOT:-}" || "$(basename "$PROJECT_ROOT")" = "ia_dev" ]]; then + PROJECT_ROOT="$(cd "${GIT_ISSUES_DIR}/../.." && pwd)" + fi + IA_DEV_ROOT="$(cd "$GIT_ISSUES_DIR/.." && pwd)" + if [[ -n "${PROJECT_ROOT:-}" ]]; then + # shellcheck source=../lib/project_config.sh + source "${GIT_ISSUES_DIR}/../lib/project_config.sh" + if [[ -n "${PROJECT_ID:-}" && -n "${IA_DEV_ROOT:-}" ]]; then + PROJECT_LOGS_DIR="${IA_DEV_ROOT}/projects/${PROJECT_ID}/logs" + DATA_ISSUES_DIR="${IA_DEV_ROOT}/projects/${PROJECT_ID}/data/issues" + mkdir -p "${PROJECT_LOGS_DIR}" "${DATA_ISSUES_DIR}" + fi + fi +fi +export PROJECT_LOGS_DIR +export DATA_ISSUES_DIR + +if [[ -f "${GIT_ISSUES_DIR}/../lib/smart_ide_logs.sh" ]]; then + # shellcheck source=../lib/smart_ide_logs.sh + source "${GIT_ISSUES_DIR}/../lib/smart_ide_logs.sh" + _IA_DEV_ROOT_FOR_LOG="$(cd "${GIT_ISSUES_DIR}/.." && pwd)" + smart_ide_logs_begin "$_IA_DEV_ROOT_FOR_LOG" "${BASH_SOURCE[1]}" "$*" + smart_ide_logs_register_exit_trap +fi + +# Load token: GITEA_TOKEN env, then project config git.token_file (path relative to ia_dev root), then default +load_gitea_token() { + if [[ -n "${GITEA_TOKEN:-}" ]]; then + return 0 + fi + local token_file="" + local ia_dev_root="${GIT_ISSUES_DIR}/.." + if [[ -n "${PROJECT_CONFIG_PATH:-}" && -f "$PROJECT_CONFIG_PATH" ]] && command -v jq >/dev/null 2>&1; then + local rel_path + rel_path="$(jq -r '.git.token_file // empty' "$PROJECT_CONFIG_PATH" 2>/dev/null)" + if [[ -n "$rel_path" ]]; then + # token_file in conf is relative to ia_dev root (this project) + if [[ -f "${ia_dev_root}/${rel_path}" ]]; then + token_file="${ia_dev_root}/${rel_path}" + fi + fi + fi + if [[ -z "$token_file" ]]; then + token_file="${ia_dev_root}/.secrets/git-issues/token" + fi + if [[ -f "$token_file" ]]; then + GITEA_TOKEN="$(cat "$token_file")" + return 0 + fi + echo "[git-issues] ERROR: GITEA_TOKEN not set and ${token_file} not found" >&2 + echo "[git-issues] Set GITEA_TOKEN or create the token file with a Gitea Personal Access Token." >&2 + return 1 +} + +# curl wrapper for Gitea API (GET). Usage: gitea_api_get "/repos/owner/repo/issues" +gitea_api_get() { + local path="$1" + load_gitea_token || return 1 + curl -sS -H "Accept: application/json" \ + -H "Authorization: token ${GITEA_TOKEN}" \ + "${GITEA_API_URL}${path}" +} + +# curl wrapper for Gitea API (POST). Usage: gitea_api_post "/repos/owner/repo/issues/123/comments" '{"body":"..."}' +gitea_api_post() { + local path="$1" + local data="${2:-}" + load_gitea_token || return 1 + curl -sS -X POST -H "Accept: application/json" -H "Content-Type: application/json" \ + -H "Authorization: token ${GITEA_TOKEN}" \ + -d "$data" \ + "${GITEA_API_URL}${path}" +} + +# curl wrapper for Gitea API (PATCH). Usage: gitea_api_patch "/repos/owner/repo/wiki/page/Foo" '{"content_base64":"..."}' +gitea_api_patch() { + local path="$1" + local data="${2:-}" + load_gitea_token || return 1 + curl -sS -X PATCH -H "Accept: application/json" -H "Content-Type: application/json" \ + -H "Authorization: token ${GITEA_TOKEN}" \ + -d "$data" \ + "${GITEA_API_URL}${path}" +} + +# curl wrapper for Gitea API (DELETE). Usage: gitea_api_delete "/repos/owner/repo/wiki/page/Foo" +gitea_api_delete() { + local path="$1" + load_gitea_token || return 1 + curl -sS -X DELETE -H "Accept: application/json" \ + -H "Authorization: token ${GITEA_TOKEN}" \ + "${GITEA_API_URL}${path}" +} + +log_ts() { date -u '+%Y-%m-%dT%H:%M:%SZ'; } +log_info() { echo "[$(log_ts)] [git-issues] $*"; } +log_err() { echo "[$(log_ts)] [git-issues] $*" >&2; } + +# Require jq for JSON output +require_jq() { + if ! command -v jq &>/dev/null; then + log_err "jq is required. Install with: apt install jq / brew install jq" + return 1 + fi +} + +# Ensure we are in the git repo root (for create-branch, etc.) +require_git_root() { + local root + root="$(git rev-parse --show-toplevel 2>/dev/null)" || true + if [[ -z "$root" ]]; then + log_err "Not inside a git repository." + return 1 + fi + cd "$root" +} diff --git a/services/ia_dev/git-issues/list-open-issues.sh b/services/ia_dev/git-issues/list-open-issues.sh new file mode 100755 index 0000000..4cd6b22 --- /dev/null +++ b/services/ia_dev/git-issues/list-open-issues.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash +# +# List open issues for the configured Gitea repo. +# Output: JSON array (default) or one line per issue "number|title|state" with --lines. +# Usage: ./list-open-issues.sh [--lines] [--limit N] +# +set -euo pipefail + +GIT_ISSUES_DIR="${GIT_ISSUES_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +# shellcheck source=lib.sh +source "${GIT_ISSUES_DIR}/lib.sh" + +require_jq || exit 1 + +LINES=false +LIMIT=50 +while [[ $# -gt 0 ]]; do + case "$1" in + --lines) LINES=true; shift ;; + --limit) LIMIT="$2"; shift 2 ;; + *) log_err "Unknown option: $1"; exit 1 ;; + esac +done + +RESPONSE="$(gitea_api_get "/repos/${GITEA_REPO_OWNER}/${GITEA_REPO_NAME}/issues?state=open&page=1&limit=${LIMIT}")" +if ! echo "$RESPONSE" | jq -e . &>/dev/null; then + log_err "API error or invalid JSON: ${RESPONSE:0:200}" + exit 1 +fi + +if [[ "$LINES" == true ]]; then + echo "$RESPONSE" | jq -r '.[] | "\(.number)|\(.title)|\(.state)"' +else + echo "$RESPONSE" +fi diff --git a/services/ia_dev/git-issues/list-pending-spooler.sh b/services/ia_dev/git-issues/list-pending-spooler.sh new file mode 100755 index 0000000..086d516 --- /dev/null +++ b/services/ia_dev/git-issues/list-pending-spooler.sh @@ -0,0 +1,31 @@ +#!/usr/bin/env bash +# List .pending files in projects//data/issues/ with status "pending" (one file per message; status updated in place). +# Run from ia_dev root. If PROJECT_ID is set (from MAIL_TO or AI_AGENT_TOKEN), list that project only; else list all projects. +# Output: one path per line. +# Usage: depuis la racine de ia_dev : ./git-issues/list-pending-spooler.sh +set -euo pipefail +GIT_ISSUES_DIR="${GIT_ISSUES_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +ROOT="$(cd "${GIT_ISSUES_DIR}/../.." && pwd)" +export GIT_ISSUES_DIR +cd "$ROOT" +# shellcheck source=lib.sh +source "${GIT_ISSUES_DIR}/lib.sh" 2>/dev/null || true +if [[ -n "${DATA_ISSUES_DIR:-}" && -d "${DATA_ISSUES_DIR}" ]]; then + SPOOLS=("${DATA_ISSUES_DIR}") +else + # No project from MAIL_TO/AI_AGENT_TOKEN: list pending from all projects + SPOOLS=() + for spool in "${GIT_ISSUES_DIR}/../projects/"*/data/issues; do + [[ -d "$spool" ]] || continue + SPOOLS+=("$spool") + done +fi +for SPOOL in "${SPOOLS[@]}"; do + for f in "${SPOOL}"/*.pending; do + [[ -f "$f" ]] || continue + status="$(jq -r '.status // "pending"' "$f" 2>/dev/null)" + if [[ "$status" != "responded" ]]; then + echo "$f" + fi + done +done diff --git a/services/ia_dev/git-issues/mail-create-issue-from-email.py b/services/ia_dev/git-issues/mail-create-issue-from-email.py new file mode 100755 index 0000000..9091e09 --- /dev/null +++ b/services/ia_dev/git-issues/mail-create-issue-from-email.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python3 +""" +Create one Gitea issue from one email (by UID), then mark the email as read. +If --title and/or --body are provided (formalized by agent), use them; else use subject and body from the email. +Usage: ./git-issues/mail-create-issue-from-email.sh --uid [--title "..." ] [--body "..." ] +""" + +from __future__ import annotations + +import argparse +import email +import imaplib +import ssl +import sys +from email.header import decode_header +from pathlib import Path + +sys.path.insert(0, str(Path(__file__).resolve().parent)) +from mail_common import ( + create_gitea_issue, + load_gitea_config, + load_imap_config, + repo_root, + sanitize_title, +) + + +def decode_header_value(header: str | None) -> str: + if not header: + return "" + from email.header import decode_header as dh + parts = dh(header) + result = [] + for part, charset in parts: + if isinstance(part, bytes): + result.append(part.decode(charset or "utf-8", errors="replace")) + else: + result.append(part) + return "".join(result) + + +def get_text_body(msg: email.message.Message) -> str: + if msg.is_multipart(): + for part in msg.walk(): + if part.get_content_type() == "text/plain": + payload = part.get_payload(decode=True) + if payload: + return payload.decode(part.get_content_charset() or "utf-8", errors="replace") + return "" + payload = msg.get_payload(decode=True) + if not payload: + return "" + return payload.decode(msg.get_content_charset() or "utf-8", errors="replace") + + +def main() -> None: + ap = argparse.ArgumentParser(description="Create one Gitea issue from one email by UID") + ap.add_argument("--uid", required=True, help="IMAP message UID") + ap.add_argument("--title", default="", help="Formalized issue title (else use subject)") + ap.add_argument("--body", default="", help="Formalized issue body (else use email body + From)") + args = ap.parse_args() + + cfg = load_imap_config() + if not cfg["user"] or not cfg["password"]: + root = repo_root() + env_path = root / ".secrets" / "git-issues" / "imap-bridge.env" + print("[git-issues] ERROR: IMAP_USER and IMAP_PASSWORD required.", file=sys.stderr) + sys.exit(1) + + gitea = load_gitea_config() + if not gitea["token"]: + print("[git-issues] ERROR: GITEA_TOKEN not set.", file=sys.stderr) + sys.exit(1) + + mail = imaplib.IMAP4(cfg["host"], int(cfg["port"])) + if cfg["use_starttls"]: + mail.starttls(ssl.create_default_context()) + mail.login(cfg["user"], cfg["password"]) + mail.select("INBOX") + _, data = mail.fetch(args.uid, "(RFC822)") + if not data or not data[0]: + print("[git-issues] ERROR: Message UID not found.", file=sys.stderr) + mail.logout() + sys.exit(1) + + msg = email.message_from_bytes(data[0][1]) + from_ = decode_header_value(msg.get("From")) + subject = decode_header_value(msg.get("Subject")) + body_text = get_text_body(msg) + body_for_issue = f"**From:** {from_}\n\n{body_text}".strip() + + title = args.title.strip() if args.title else sanitize_title(subject) + body = args.body.strip() if args.body else body_for_issue + + issue = create_gitea_issue(title, body) + if not issue: + print("[git-issues] ERROR: Failed to create issue.", file=sys.stderr) + mail.logout() + sys.exit(1) + + mail.store(args.uid, "+FLAGS", "\\Seen") + mail.logout() + + num = issue.get("number", "?") + print(f"[git-issues] Created issue #{num}: {title[:60]}") + print(f"ISSUE_NUMBER={num}") + + +if __name__ == "__main__": + main() diff --git a/services/ia_dev/git-issues/mail-create-issue-from-email.sh b/services/ia_dev/git-issues/mail-create-issue-from-email.sh new file mode 100755 index 0000000..9bb6d46 --- /dev/null +++ b/services/ia_dev/git-issues/mail-create-issue-from-email.sh @@ -0,0 +1,14 @@ +#!/usr/bin/env bash +# Create one Gitea issue from one email (by UID), mark email read. Run from repo root. +# Usage: ./git-issues/mail-create-issue-from-email.sh --uid [--title "..." ] [--body "..." ] +set -euo pipefail +GIT_ISSUES_DIR="${GIT_ISSUES_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +export GIT_ISSUES_DIR +export REPO_ROOT="${GIT_ISSUES_DIR}/.." +_IA_FOR_LOG="$(cd "${GIT_ISSUES_DIR}/.." && pwd)" +if [[ -f "${_IA_FOR_LOG}/lib/smart_ide_logs.sh" ]]; then + # shellcheck source=../lib/smart_ide_logs.sh + source "${_IA_FOR_LOG}/lib/smart_ide_logs.sh" + smart_ide_logs_begin "$_IA_FOR_LOG" "$0" "$*" +fi +exec python3 "${GIT_ISSUES_DIR}/mail-create-issue-from-email.py" "$@" diff --git a/services/ia_dev/git-issues/mail-get-thread.py b/services/ia_dev/git-issues/mail-get-thread.py new file mode 100644 index 0000000..afd1152 --- /dev/null +++ b/services/ia_dev/git-issues/mail-get-thread.py @@ -0,0 +1,208 @@ +#!/usr/bin/env python3 +""" +Fetch the full email thread (conversation) for a given message UID. +Uses Message-ID, References and In-Reply-To to find all messages in the thread. +Output format: same as mail-list-unread (--- MAIL UID=... --- ... --- END MAIL ---), chronological order. +Usage: mail-get-thread.py + or: ./git-issues/mail-get-thread.sh +""" + +from __future__ import annotations + +import email +import imaplib +import re +import sys +from email.header import decode_header +from pathlib import Path + +sys.path.insert(0, str(Path(__file__).resolve().parent)) +from mail_common import imap_since_date, load_imap_config, repo_root, imap_ssl_context + + +def decode_header_value(header: str | None) -> str: + if not header: + return "" + parts = decode_header(header) + result = [] + for part, charset in parts: + if isinstance(part, bytes): + result.append(part.decode(charset or "utf-8", errors="replace")) + else: + result.append(part) + return "".join(result) + + +def get_text_body(msg: email.message.Message) -> str: + if msg.is_multipart(): + for part in msg.walk(): + if part.get_content_type() == "text/plain": + payload = part.get_payload(decode=True) + if payload: + return payload.decode( + part.get_content_charset() or "utf-8", errors="replace" + ) + return "" + payload = msg.get_payload(decode=True) + if not payload: + return "" + return payload.decode( + msg.get_content_charset() or "utf-8", errors="replace" + ) + + +def parse_message_ids(refs: str | None, in_reply_to: str | None) -> set[str]: + """Extract Message-ID values from References and In-Reply-To headers.""" + ids: set[str] = set() + for raw in (refs or "", in_reply_to or ""): + for part in re.split(r"\s+", raw.strip()): + part = part.strip() + if part.startswith("<") and ">" in part: + ids.add(part) + elif part and "@" in part and part not in ("<", ">"): + ids.add(part if part.startswith("<") else f"<{part}>") + return ids + + +def find_message_ids_from_msg(msg: email.message.Message) -> set[str]: + mid = (msg.get("Message-ID") or "").strip() + refs = (msg.get("References") or "").strip() + in_reply = (msg.get("In-Reply-To") or "").strip() + ids = {mid} if mid else set() + ids |= parse_message_ids(refs, in_reply) + return ids + + +def search_by_message_id(mail: imaplib.IMAP4, msg_id: str) -> list[str]: + """Return list of UIDs (as strings) for messages with given Message-ID, on or after MAIL_SINCE_DATE.""" + if not msg_id: + return [] + if not msg_id.startswith("<"): + msg_id = f"<{msg_id}>" + if not msg_id.endswith(">"): + msg_id = msg_id + ">" + since = imap_since_date() + criterion = f'(HEADER Message-ID "{msg_id}" SINCE {since})' + try: + _, data = mail.search(None, criterion) + except Exception: + return [] + if not data or not data[0]: + return [] + return [u.decode("ascii") for u in data[0].split() if u] + + +def fetch_message_by_uid( + mail: imaplib.IMAP4, uid: str +) -> email.message.Message | None: + """Fetch a single message by UID. Returns parsed email or None.""" + try: + _, data = mail.fetch(uid.encode("ascii"), "(RFC822)") + except Exception: + return None + if not data or not data[0] or len(data[0]) < 2: + return None + raw = data[0][1] + if isinstance(raw, bytes): + return email.message_from_bytes(raw) + return None + + +def format_message(uid: str, msg: email.message.Message) -> str: + mid = (msg.get("Message-ID") or "").strip() + from_ = decode_header_value(msg.get("From")) + to_ = decode_header_value(msg.get("To")) + subj = decode_header_value(msg.get("Subject")) + date_h = decode_header_value(msg.get("Date")) + body = get_text_body(msg) + lines = [ + "--- MAIL", + f"UID={uid}", + "---", + "Message-ID: " + (mid or "(none)"), + "From: " + from_, + "To: " + (to_ or ""), + "Subject: " + subj, + "Date: " + (date_h or ""), + "Body:", + body or "(empty)", + "--- END MAIL ---", + ] + return "\n".join(lines) + + +def main() -> int: + if len(sys.argv) < 2: + print("Usage: mail-get-thread.py ", file=sys.stderr) + return 1 + uid0 = sys.argv[1].strip() + if not uid0: + print("[git-issues] ERROR: UID required.", file=sys.stderr) + return 1 + + cfg = load_imap_config() + if not cfg["user"] or not cfg["password"]: + root = repo_root() + env_path = root / ".secrets" / "git-issues" / "imap-bridge.env" + print( + "[git-issues] ERROR: IMAP_USER and IMAP_PASSWORD required.", + file=sys.stderr, + ) + print(f"[git-issues] Set env or create {env_path}", file=sys.stderr) + return 1 + + mail = imaplib.IMAP4(cfg["host"], int(cfg["port"])) + if cfg["use_starttls"]: + mail.starttls(imap_ssl_context(cfg.get("ssl_verify", True))) + mail.login(cfg["user"], cfg["password"]) + mail.select("INBOX") + + msg0 = fetch_message_by_uid(mail, uid0) + if not msg0: + print(f"[git-issues] No message found for UID={uid0}.", file=sys.stderr) + mail.logout() + return 1 + + to_fetch: set[str] = find_message_ids_from_msg(msg0) + seen_ids: set[str] = set() + uids_by_mid: dict[str, str] = {} + + while to_fetch: + mid = to_fetch.pop() + if not mid or mid in seen_ids: + continue + seen_ids.add(mid) + uids = search_by_message_id(mail, mid) + if uids: + uids_by_mid[mid] = uids[0] + msg = fetch_message_by_uid(mail, uids[0]) + if msg: + to_fetch |= find_message_ids_from_msg(msg) + + mid0 = (msg0.get("Message-ID") or "").strip() + if mid0 and mid0 not in uids_by_mid: + uids_by_mid[mid0] = uid0 + + collected: list[tuple[str, str, email.message.Message]] = [] + for _mid, uid in uids_by_mid.items(): + msg = fetch_message_by_uid(mail, uid) + if not msg: + continue + date_h = (msg.get("Date") or "").strip() + collected.append((date_h, uid, msg)) + + if uid0 not in uids_by_mid.values(): + date0 = (msg0.get("Date") or "").strip() + collected.append((date0, uid0, msg0)) + + collected.sort(key=lambda x: x[0]) + + for _date, uid, msg in collected: + print(format_message(uid, msg)) + + mail.logout() + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/services/ia_dev/git-issues/mail-get-thread.sh b/services/ia_dev/git-issues/mail-get-thread.sh new file mode 100755 index 0000000..6597ff7 --- /dev/null +++ b/services/ia_dev/git-issues/mail-get-thread.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash +# Fetch full email thread for a given UID. Run from repo root. +# Usage: ./git-issues/mail-get-thread.sh +set -euo pipefail +GIT_ISSUES_DIR="${GIT_ISSUES_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +export GIT_ISSUES_DIR +export REPO_ROOT="${GIT_ISSUES_DIR}/.." +if [ $# -lt 1 ]; then + echo "Usage: $0 " >&2 + exit 1 +fi +_IA_FOR_LOG="$(cd "${GIT_ISSUES_DIR}/.." && pwd)" +if [[ -f "${_IA_FOR_LOG}/lib/smart_ide_logs.sh" ]]; then + # shellcheck source=../lib/smart_ide_logs.sh + source "${_IA_FOR_LOG}/lib/smart_ide_logs.sh" + smart_ide_logs_begin "$_IA_FOR_LOG" "$0" "$*" +fi +exec python3 "${GIT_ISSUES_DIR}/mail-get-thread.py" "$1" diff --git a/services/ia_dev/git-issues/mail-list-unread.py b/services/ia_dev/git-issues/mail-list-unread.py new file mode 100755 index 0000000..3a5a124 --- /dev/null +++ b/services/ia_dev/git-issues/mail-list-unread.py @@ -0,0 +1,118 @@ +#!/usr/bin/env python3 +""" +List unread emails via IMAP (e.g. Proton Mail Bridge). Read-only; does not mark as read. +Only lists messages sent to the configured alias (MAIL_FILTER_TO, default ai.support.lecoffreio@4nkweb.com). +Output is for the agent: each mail with UID, Message-ID, From, To, Subject, Date, body. +Usage: ./git-issues/mail-list-unread.sh +""" + +from __future__ import annotations + +import email +import imaplib +import sys +from email.header import decode_header +from pathlib import Path + +# Add git-issues to path for mail_common +sys.path.insert(0, str(Path(__file__).resolve().parent)) +from mail_common import imap_search_criterion_unseen, load_imap_config, repo_root, imap_ssl_context + + +def decode_header_value(header: str | None) -> str: + if not header: + return "" + from email.header import decode_header as dh + parts = dh(header) + result = [] + for part, charset in parts: + if isinstance(part, bytes): + result.append(part.decode(charset or "utf-8", errors="replace")) + else: + result.append(part) + return "".join(result) + + +def get_text_body(msg: email.message.Message) -> str: + if msg.is_multipart(): + for part in msg.walk(): + if part.get_content_type() == "text/plain": + payload = part.get_payload(decode=True) + if payload: + return payload.decode(part.get_content_charset() or "utf-8", errors="replace") + return "" + payload = msg.get_payload(decode=True) + if not payload: + return "" + return payload.decode(msg.get_content_charset() or "utf-8", errors="replace") + + +def is_sent_to_alias(msg: email.message.Message, filter_to: str) -> bool: + """True if any To/Delivered-To/X-Original-To/Cc header contains the filter address.""" + if not filter_to: + return True + headers_to_check = ("To", "Delivered-To", "X-Original-To", "Cc", "Envelope-To") + for name in headers_to_check: + value = msg.get(name) + if value: + decoded = decode_header_value(value).lower() + if filter_to in decoded: + return True + return False + + +def main() -> None: + cfg = load_imap_config() + if not cfg["user"] or not cfg["password"]: + root = repo_root() + env_path = root / ".secrets" / "git-issues" / "imap-bridge.env" + print("[git-issues] ERROR: IMAP_USER and IMAP_PASSWORD required.", file=sys.stderr) + print(f"[git-issues] Set env or create {env_path}", file=sys.stderr) + sys.exit(1) + + mail = imaplib.IMAP4(cfg["host"], int(cfg["port"])) + if cfg["use_starttls"]: + mail.starttls(imap_ssl_context(cfg.get("ssl_verify", True))) + mail.login(cfg["user"], cfg["password"]) + mail.select("INBOX") + criterion = imap_search_criterion_unseen() + _, nums = mail.search(None, criterion) + ids = nums[0].split() + if not ids: + print("[git-issues] No unread messages (IMAP UNSEEN, on or after MAIL_SINCE_DATE). For spooler criterion (from/to), use tickets-fetch-inbox.sh and list-pending-spooler.sh.") + mail.logout() + return + + shown = 0 + for uid in ids: + uid_s = uid.decode("ascii") + _, data = mail.fetch(uid, "(RFC822)") + if not data or not data[0]: + continue + msg = email.message_from_bytes(data[0][1]) + if not is_sent_to_alias(msg, cfg.get("filter_to", "")): + continue + mid = msg.get("Message-ID", "").strip() + from_ = decode_header_value(msg.get("From")) + to_ = decode_header_value(msg.get("To")) + subj = decode_header_value(msg.get("Subject")) + date_h = decode_header_value(msg.get("Date")) + body = get_text_body(msg) + print("--- MAIL", f"UID={uid_s}", "---") + print("Message-ID:", mid or "(none)") + print("From:", from_) + print("To:", to_ or "") + print("Subject:", subj) + print("Date:", date_h or "") + print("Body:") + print(body or "(empty)") + print("--- END MAIL ---") + shown += 1 + + if shown == 0: + print("[git-issues] No unread messages sent to the configured alias (MAIL_FILTER_TO). For spooler (from/to in conf.json), use tickets-fetch-inbox.sh and list-pending-spooler.sh.") + mail.logout() + + +if __name__ == "__main__": + main() diff --git a/services/ia_dev/git-issues/mail-list-unread.sh b/services/ia_dev/git-issues/mail-list-unread.sh new file mode 100755 index 0000000..5ba8826 --- /dev/null +++ b/services/ia_dev/git-issues/mail-list-unread.sh @@ -0,0 +1,14 @@ +#!/usr/bin/env bash +# List unread emails (read-only). Run from repo root. +set -euo pipefail +GIT_ISSUES_DIR="${GIT_ISSUES_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +# REPO_ROOT = ia_dev so mail_common.repo_root() finds .secrets under ia_dev +export GIT_ISSUES_DIR +export REPO_ROOT="${GIT_ISSUES_DIR}/.." +_IA_FOR_LOG="$(cd "${GIT_ISSUES_DIR}/.." && pwd)" +if [[ -f "${_IA_FOR_LOG}/lib/smart_ide_logs.sh" ]]; then + # shellcheck source=../lib/smart_ide_logs.sh + source "${_IA_FOR_LOG}/lib/smart_ide_logs.sh" + smart_ide_logs_begin "$_IA_FOR_LOG" "$0" "$*" +fi +exec python3 "${GIT_ISSUES_DIR}/mail-list-unread.py" diff --git a/services/ia_dev/git-issues/mail-mark-read.py b/services/ia_dev/git-issues/mail-mark-read.py new file mode 100755 index 0000000..36bc8d0 --- /dev/null +++ b/services/ia_dev/git-issues/mail-mark-read.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python3 +""" +Mark one email as read by UID (e.g. after replying without creating an issue). +Usage: ./git-issues/mail-mark-read.sh +""" + +from __future__ import annotations + +import imaplib +import sys +from pathlib import Path + +sys.path.insert(0, str(Path(__file__).resolve().parent)) +from mail_common import load_imap_config, repo_root, imap_ssl_context + + +def main() -> None: + if len(sys.argv) < 2: + print("[git-issues] Usage: mail-mark-read.sh ", file=sys.stderr) + sys.exit(1) + uid = sys.argv[1].strip() + + cfg = load_imap_config() + if not cfg["user"] or not cfg["password"]: + root = repo_root() + env_path = root / ".secrets" / "git-issues" / "imap-bridge.env" + print("[git-issues] ERROR: IMAP_USER and IMAP_PASSWORD required.", file=sys.stderr) + sys.exit(1) + + mail = imaplib.IMAP4(cfg["host"], int(cfg["port"])) + if cfg["use_starttls"]: + mail.starttls(imap_ssl_context(cfg.get("ssl_verify", True))) + mail.login(cfg["user"], cfg["password"]) + mail.select("INBOX") + mail.store(uid, "+FLAGS", "\\Seen") + mail.logout() + print("[git-issues] Marked as read.") + + +if __name__ == "__main__": + main() diff --git a/services/ia_dev/git-issues/mail-mark-read.sh b/services/ia_dev/git-issues/mail-mark-read.sh new file mode 100755 index 0000000..7834b50 --- /dev/null +++ b/services/ia_dev/git-issues/mail-mark-read.sh @@ -0,0 +1,14 @@ +#!/usr/bin/env bash +# Mark one email as read by UID. Run from repo root. +# Usage: ./git-issues/mail-mark-read.sh +set -euo pipefail +GIT_ISSUES_DIR="${GIT_ISSUES_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +export GIT_ISSUES_DIR +export REPO_ROOT="${GIT_ISSUES_DIR}/.." +_IA_FOR_LOG="$(cd "${GIT_ISSUES_DIR}/.." && pwd)" +if [[ -f "${_IA_FOR_LOG}/lib/smart_ide_logs.sh" ]]; then + # shellcheck source=../lib/smart_ide_logs.sh + source "${_IA_FOR_LOG}/lib/smart_ide_logs.sh" + smart_ide_logs_begin "$_IA_FOR_LOG" "$0" "$*" +fi +exec python3 "${GIT_ISSUES_DIR}/mail-mark-read.py" "$@" diff --git a/services/ia_dev/git-issues/mail-send-reply.py b/services/ia_dev/git-issues/mail-send-reply.py new file mode 100755 index 0000000..9d5d1a4 --- /dev/null +++ b/services/ia_dev/git-issues/mail-send-reply.py @@ -0,0 +1,108 @@ +#!/usr/bin/env python3 +""" +Send a reply email via SMTP (e.g. Proton Mail Bridge). +Usage: ./git-issues/mail-send-reply.sh --to addr@example.com --subject "..." --body "..." [--in-reply-to "" [--references ""]] + Or: echo "body" | ./git-issues/mail-send-reply.sh --to addr@example.com --subject "..." [--in-reply-to ""] +""" + +from __future__ import annotations + +import argparse +import os +import smtplib +import sys +from email.mime.text import MIMEText +from pathlib import Path + +sys.path.insert(0, str(Path(__file__).resolve().parent)) +from mail_common import load_smtp_config, repo_root, imap_ssl_context + +DEFAULT_SIGNATURE = """-- +Support IA du projet Lecoffre.io +ai.support.lecoffreio@4nkweb.com""" + +# Patterns that indicate the body contains a citation of the received message (forbidden). +CITATION_PATTERNS = ( + "From:", + "Message-ID:", + "Message-ID :", + " wrote:", + " a écrit", +) +CITATION_LINE_START = (">",) # Quoted line start + + +def body_contains_citation(body: str) -> bool: + """Return True if body looks like it contains the received message (citation).""" + if not body or not body.strip(): + return False + lines = body.strip().splitlines() + for line in lines: + stripped = line.strip() + if not stripped: + continue + for pat in CITATION_PATTERNS: + if pat in line: + return True + for start in CITATION_LINE_START: + if stripped.startswith(start): + return True + return False + + +def get_reply_signature() -> str: + sig = os.environ.get("MAIL_REPLY_SIGNATURE", "").strip() + if sig: + return "\n\n" + sig.replace("\\n", "\n") + return "\n\n" + DEFAULT_SIGNATURE + + +def main() -> None: + ap = argparse.ArgumentParser(description="Send reply email via Bridge SMTP") + ap.add_argument("--to", required=True, help="To address") + ap.add_argument("--subject", required=True, help="Subject") + ap.add_argument("--body", default="", help="Body (or use stdin)") + ap.add_argument("--in-reply-to", default="", help="Message-ID of the message we reply to") + ap.add_argument("--references", default="", help="References header for threading") + args = ap.parse_args() + + cfg = load_smtp_config() + if not cfg["user"] or not cfg["password"]: + root = repo_root() + env_path = root / ".secrets" / "git-issues" / "imap-bridge.env" + print("[git-issues] ERROR: SMTP_USER and SMTP_PASSWORD required.", file=sys.stderr) + print(f"[git-issues] Set env or create {env_path}", file=sys.stderr) + sys.exit(1) + + body = args.body + if not body and not sys.stdin.isatty(): + body = sys.stdin.read() + body = body.rstrip() + if body_contains_citation(body): + print( + "[git-issues] ERROR: Body must not contain the received message (no citation, no From:, Message-ID, wrote:, etc.). Send only your reply text.", + file=sys.stderr, + ) + sys.exit(1) + body = (body + get_reply_signature()).strip() + + msg = MIMEText(body, "plain", "utf-8") + msg["Subject"] = args.subject + msg["From"] = cfg["user"] + msg["To"] = args.to + if args.in_reply_to: + msg["In-Reply-To"] = args.in_reply_to + if args.references: + msg["References"] = args.references + + with smtplib.SMTP(cfg["host"], int(cfg["port"])) as smtp: + if cfg["use_starttls"]: + smtp.starttls(context=imap_ssl_context(cfg.get("ssl_verify", True))) + smtp.login(cfg["user"], cfg["password"]) + smtp.sendmail(cfg["user"], [args.to], msg.as_string()) + + print("[git-issues] Reply sent.") + + +if __name__ == "__main__": + main() diff --git a/services/ia_dev/git-issues/mail-send-reply.sh b/services/ia_dev/git-issues/mail-send-reply.sh new file mode 100755 index 0000000..458495d --- /dev/null +++ b/services/ia_dev/git-issues/mail-send-reply.sh @@ -0,0 +1,14 @@ +#!/usr/bin/env bash +# Send reply email via Bridge SMTP. Run from repo root. +# Usage: ./git-issues/mail-send-reply.sh --to addr --subject "..." [--body "..." | stdin] [--in-reply-to "" [--references "..." ]] +set -euo pipefail +GIT_ISSUES_DIR="${GIT_ISSUES_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +export GIT_ISSUES_DIR +export REPO_ROOT="${GIT_ISSUES_DIR}/.." +_IA_FOR_LOG="$(cd "${GIT_ISSUES_DIR}/.." && pwd)" +if [[ -f "${_IA_FOR_LOG}/lib/smart_ide_logs.sh" ]]; then + # shellcheck source=../lib/smart_ide_logs.sh + source "${_IA_FOR_LOG}/lib/smart_ide_logs.sh" + smart_ide_logs_begin "$_IA_FOR_LOG" "$0" "$*" +fi +exec python3 "${GIT_ISSUES_DIR}/mail-send-reply.py" "$@" diff --git a/services/ia_dev/git-issues/mail-thread-log.py b/services/ia_dev/git-issues/mail-thread-log.py new file mode 100644 index 0000000..8043f26 --- /dev/null +++ b/services/ia_dev/git-issues/mail-thread-log.py @@ -0,0 +1,333 @@ +#!/usr/bin/env python3 +""" +Thread log: one file per email thread under projects//logs/git-issues/threads/. +Content: exchanges (received + sent), tickets (issues), commits. +Usage: + mail-thread-log.py get-id --uid # print THREAD_ID=... + mail-thread-log.py init --uid # create/update log from thread + mail-thread-log.py append-sent --thread-id --to --subject "..." [--body "..."] [--date "..."] + mail-thread-log.py append-issue --thread-id --issue [--title "..."] + mail-thread-log.py append-commit --thread-id --hash --message "..." [--branch "..."] +""" + +from __future__ import annotations + +import argparse +import re +import subprocess +import sys +from datetime import datetime, timezone +from pathlib import Path + +sys.path.insert(0, str(Path(__file__).resolve().parent)) +from mail_common import load_gitea_config, load_imap_config, repo_root +from project_config import project_logs_dir + + +def threads_dir() -> Path: + """Thread log directory: projects//logs/git-issues/threads/ or repo logs fallback.""" + d = project_logs_dir() / "git-issues" / "threads" + d.mkdir(parents=True, exist_ok=True) + return d + + +def sanitize_thread_id(raw: str, max_len: int = 80) -> str: + s = re.sub(r"[^a-zA-Z0-9._-]", "_", raw).strip("_") + return s[:max_len] if s else "thread_unknown" + + +def get_thread_output(uid: str) -> str: + gitea_dir = Path(__file__).resolve().parent + root = gitea_dir.parent + env = {"GIT_ISSUES_DIR": str(gitea_dir)} + result = subprocess.run( + [sys.executable, str(gitea_dir / "mail-get-thread.py"), uid], + cwd=str(root), + capture_output=True, + text=True, + env={**__import__("os").environ, **env}, + timeout=60, + ) + if result.returncode != 0: + raise RuntimeError( + f"mail-get-thread failed: {result.stderr or result.stdout or 'unknown'}" + ) + return result.stdout + + +def parse_thread_blocks(text: str) -> list[dict[str, str]]: + """Parse --- MAIL UID=... --- ... --- END MAIL --- blocks.""" + blocks: list[dict[str, str]] = [] + pattern = re.compile( + r"--- MAIL\s+UID=(\S+)\s+---\s*\n" + r"(?:Message-ID:\s*(.*?)\n)?" + r"From:\s*(.*?)\n" + r"To:\s*(.*?)\n" + r"Subject:\s*(.*?)\n" + r"Date:\s*(.*?)\n" + r"Body:\s*\n(.*?)--- END MAIL ---", + re.DOTALL, + ) + for m in pattern.finditer(text): + blocks.append({ + "uid": m.group(1).strip(), + "message_id": (m.group(2) or "").strip(), + "from": (m.group(3) or "").strip(), + "to": (m.group(4) or "").strip(), + "subject": (m.group(5) or "").strip(), + "date": (m.group(6) or "").strip(), + "body": (m.group(7) or "").strip(), + }) + return blocks + + +def get_thread_id_from_uid(uid: str) -> str: + out = get_thread_output(uid) + blocks = parse_thread_blocks(out) + if not blocks: + return sanitize_thread_id(f"thread_uid_{uid}") + first_msg_id = (blocks[0].get("message_id") or "").strip() or blocks[0].get("uid", "") + return sanitize_thread_id(first_msg_id) + + +def format_exchange_received(block: dict[str, str]) -> str: + return ( + f"### {block.get('date', '')} — Reçu\n" + f"- **De:** {block.get('from', '')}\n" + f"- **À:** {block.get('to', '')}\n" + f"- **Sujet:** {block.get('subject', '')}\n\n" + f"{block.get('body', '')}\n\n" + ) + + +def format_exchange_sent(block: dict[str, str]) -> str: + return ( + f"### {block.get('date', '')} — Envoyé\n" + f"- **À:** {block.get('to', '')}\n" + f"- **Sujet:** {block.get('subject', '')}\n\n" + f"{block.get('body', '')}\n\n" + ) + + +def init_log(uid: str) -> str: + cfg = load_imap_config() + our_address = (cfg.get("filter_to") or "").strip().lower() + if not our_address: + our_address = (cfg.get("user") or "").strip().lower() + + out = get_thread_output(uid) + blocks = parse_thread_blocks(out) + thread_id = get_thread_id_from_uid(uid) + log_path = threads_dir() / f"{thread_id}.md" + + received_blocks: list[dict[str, str]] = [] + sent_blocks: list[dict[str, str]] = [] + for b in blocks: + from_ = (b.get("from") or "").lower() + if our_address and our_address in from_: + sent_blocks.append(b) + else: + received_blocks.append(b) + + existing_tickets = "" + existing_commits = "" + if log_path.exists(): + content = log_path.read_text(encoding="utf-8") + if "## Tickets (issues)" in content: + idx = content.index("## Tickets (issues)") + end = content.find("\n## ", idx + 1) + if end == -1: + end = len(content) + existing_tickets = content[idx:end].strip() + if "## Commits" in content: + idx = content.index("## Commits") + end = content.find("\n## ", idx + 1) + if end == -1: + end = len(content) + existing_commits = content[idx:end].strip() + + lines = [ + f"# Fil — {thread_id}", + "", + "## Échanges reçus", + "", + ] + for b in received_blocks: + lines.append(format_exchange_received(b)) + lines.append("## Échanges envoyés") + lines.append("") + for b in sent_blocks: + lines.append(format_exchange_sent(b)) + if existing_tickets: + lines.append(existing_tickets) + lines.append("") + else: + lines.append("## Tickets (issues)") + lines.append("") + lines.append("(aucun)") + lines.append("") + if existing_commits: + lines.append(existing_commits) + lines.append("") + else: + lines.append("## Commits") + lines.append("") + lines.append("(aucun)") + lines.append("") + + log_path.write_text("\n".join(lines), encoding="utf-8") + return thread_id + + +def append_sent( + thread_id: str, + to_addr: str, + subject: str, + body: str = "", + date_str: str | None = None, +) -> None: + if not date_str: + date_str = datetime.now(timezone.utc).strftime("%a, %d %b %Y %H:%M:%S +0000") + log_path = threads_dir() / f"{sanitize_thread_id(thread_id)}.md" + block = { + "date": date_str, + "to": to_addr, + "subject": subject, + "body": body, + } + section = format_exchange_sent(block) + if not log_path.exists(): + log_path.write_text( + f"# Fil — {thread_id}\n\n## Échanges reçus\n\n(aucun)\n\n" + "## Échanges envoyés\n\n" + section + "\n## Tickets (issues)\n\n(aucun)\n\n## Commits\n\n(aucun)\n", + encoding="utf-8", + ) + return + content = log_path.read_text(encoding="utf-8") + insert_marker = "## Échanges envoyés" + idx = content.find(insert_marker) + if idx == -1: + content += "\n\n## Échanges envoyés\n\n" + section + else: + next_section = content.find("\n## ", idx + 1) + if next_section == -1: + content = content.rstrip() + "\n\n" + section + else: + content = ( + content[:next_section].rstrip() + "\n\n" + section + content[next_section:] + ) + log_path.write_text(content, encoding="utf-8") + + +def append_issue(thread_id: str, issue_num: str, title: str = "") -> None: + gitea = load_gitea_config() + base = f"{gitea['api_url'].replace('/api/v1', '')}/{gitea['owner']}/{gitea['repo']}/issues/{issue_num}" + line = f"- #{issue_num}" + (f" — {title}" if title else "") + f" — <{base}>\n" + log_path = threads_dir() / f"{sanitize_thread_id(thread_id)}.md" + if not log_path.exists(): + log_path.write_text( + f"# Fil — {thread_id}\n\n## Échanges reçus\n\n(aucun)\n\n" + "## Échanges envoyés\n\n(aucun)\n\n## Tickets (issues)\n\n" + line + "\n## Commits\n\n(aucun)\n", + encoding="utf-8", + ) + return + content = log_path.read_text(encoding="utf-8") + marker = "## Tickets (issues)" + idx = content.find(marker) + if idx == -1: + content += "\n\n" + marker + "\n\n" + line + else: + end = idx + len(marker) + rest = content[end:] + if "(aucun)" in rest.split("\n## ")[0]: + content = content[:end] + "\n\n" + line + rest.replace("(aucun)\n", "", 1) + else: + content = content[:end] + "\n\n" + line + content[end:] + log_path.write_text(content, encoding="utf-8") + + +def append_commit( + thread_id: str, + commit_hash: str, + message: str, + branch: str = "", +) -> None: + line = f"- `{commit_hash[:12]}`" + if branch: + line += f" ({branch})" + line += f" — {message.strip()}\n" + log_path = threads_dir() / f"{sanitize_thread_id(thread_id)}.md" + if not log_path.exists(): + log_path.write_text( + f"# Fil — {thread_id}\n\n## Échanges reçus\n\n(aucun)\n\n" + "## Échanges envoyés\n\n(aucun)\n\n## Tickets (issues)\n\n(aucun)\n\n## Commits\n\n" + line, + encoding="utf-8", + ) + return + content = log_path.read_text(encoding="utf-8") + marker = "## Commits" + idx = content.find(marker) + if idx == -1: + content += "\n\n" + marker + "\n\n" + line + else: + end = idx + len(marker) + rest = content[end:] + if "(aucun)" in rest.split("\n## ")[0]: + content = content[:end] + "\n\n" + line + rest.replace("(aucun)\n", "", 1) + else: + content = content[:end] + "\n\n" + line + content[end:] + log_path.write_text(content, encoding="utf-8") + + +def main() -> int: + ap = argparse.ArgumentParser(prog="mail-thread-log.py") + sub = ap.add_subparsers(dest="cmd", required=True) + p_get = sub.add_parser("get-id") + p_get.add_argument("--uid", required=True, help="Mail UID") + p_init = sub.add_parser("init") + p_init.add_argument("--uid", required=True, help="Mail UID") + p_sent = sub.add_parser("append-sent") + p_sent.add_argument("--thread-id", required=True) + p_sent.add_argument("--to", required=True, dest="to_addr") + p_sent.add_argument("--subject", required=True) + p_sent.add_argument("--body", default="") + p_sent.add_argument("--date", default=None) + p_issue = sub.add_parser("append-issue") + p_issue.add_argument("--thread-id", required=True) + p_issue.add_argument("--issue", required=True) + p_issue.add_argument("--title", default="") + p_commit = sub.add_parser("append-commit") + p_commit.add_argument("--thread-id", required=True) + p_commit.add_argument("--hash", required=True) + p_commit.add_argument("--message", required=True) + p_commit.add_argument("--branch", default="") + + args = ap.parse_args() + if args.cmd == "get-id": + tid = get_thread_id_from_uid(args.uid) + print(f"THREAD_ID={tid}") + return 0 + if args.cmd == "init": + tid = init_log(args.uid) + print(f"THREAD_ID={tid}") + return 0 + if args.cmd == "append-sent": + append_sent( + args.thread_id, + args.to_addr, + args.subject, + args.body, + args.date, + ) + return 0 + if args.cmd == "append-issue": + append_issue(args.thread_id, args.issue, args.title) + return 0 + if args.cmd == "append-commit": + append_commit(args.thread_id, args.hash, args.message, args.branch) + return 0 + return 1 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/services/ia_dev/git-issues/mail-thread-log.sh b/services/ia_dev/git-issues/mail-thread-log.sh new file mode 100755 index 0000000..fb4b15f --- /dev/null +++ b/services/ia_dev/git-issues/mail-thread-log.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash +# Thread log: one file per thread under projects//logs/git-issues/threads/. Run from repo root. +# Usage: +# ./git-issues/mail-thread-log.sh get-id --uid +# ./git-issues/mail-thread-log.sh init --uid +# ./git-issues/mail-thread-log.sh append-sent --thread-id --to --subject "..." [--body "..."] [--date "..."] +# ./git-issues/mail-thread-log.sh append-issue --thread-id --issue [--title "..."] +# ./git-issues/mail-thread-log.sh append-commit --thread-id --hash --message "..." [--branch "..."] +set -euo pipefail +GIT_ISSUES_DIR="${GIT_ISSUES_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +export GIT_ISSUES_DIR +export REPO_ROOT="${GIT_ISSUES_DIR}/.." +_IA_FOR_LOG="$(cd "${GIT_ISSUES_DIR}/.." && pwd)" +if [[ -f "${_IA_FOR_LOG}/lib/smart_ide_logs.sh" ]]; then + # shellcheck source=../lib/smart_ide_logs.sh + source "${_IA_FOR_LOG}/lib/smart_ide_logs.sh" + smart_ide_logs_begin "$_IA_FOR_LOG" "$0" "$*" +fi +exec python3 "${GIT_ISSUES_DIR}/mail-thread-log.py" "$@" diff --git a/services/ia_dev/git-issues/mail-to-issue.py b/services/ia_dev/git-issues/mail-to-issue.py new file mode 100755 index 0000000..35d8d89 --- /dev/null +++ b/services/ia_dev/git-issues/mail-to-issue.py @@ -0,0 +1,116 @@ +#!/usr/bin/env python3 +""" +Create Gitea issues from unread emails via IMAP (e.g. Proton Mail Bridge). + +**Preferred flow (agent-driven):** do not chain directly. Use mail-list-unread.sh +to list unread emails, then for each: formalize the issue or send a reply (mail-send-reply.sh); +only when a correction/evolution is ready, create the issue (mail-create-issue-from-email.sh +with optional formalized title/body), treat it (fix/evol), then comment on the issue and +reply to the email via the Bridge. + +This script (mail-to-issue) is a **batch** fallback: it creates one issue per unread +message with title=subject and body=text+From, then marks messages as read. Use only +when the agent-driven flow is not used. + +Reads IMAP config from .secrets/git-issues/imap-bridge.env (or env vars). +Reads Gitea token from GITEA_TOKEN or .secrets/git-issues/token. +""" + +from __future__ import annotations + +import email +import imaplib +import ssl +import sys +from email.header import decode_header +from pathlib import Path + +sys.path.insert(0, str(Path(__file__).resolve().parent)) +from mail_common import ( + create_gitea_issue, + imap_search_criterion_unseen, + load_gitea_config, + load_imap_config, + repo_root, + sanitize_title, +) + + +def _decode_header_value(header: str | None) -> str: + if not header: + return "" + parts = decode_header(header) + result = [] + for part, charset in parts: + if isinstance(part, bytes): + result.append(part.decode(charset or "utf-8", errors="replace")) + else: + result.append(part) + return "".join(result) + + +def _get_text_body(msg: email.message.Message) -> str: + if msg.is_multipart(): + for part in msg.walk(): + if part.get_content_type() == "text/plain": + payload = part.get_payload(decode=True) + if payload: + return payload.decode(part.get_content_charset() or "utf-8", errors="replace") + return "" + payload = msg.get_payload(decode=True) + if not payload: + return "" + return payload.decode(msg.get_content_charset() or "utf-8", errors="replace") + + +def main() -> None: + imap_cfg = load_imap_config() + if not imap_cfg["user"] or not imap_cfg["password"]: + root = repo_root() + env_path = root / ".secrets" / "git-issues" / "imap-bridge.env" + print("[git-issues] ERROR: IMAP_USER and IMAP_PASSWORD required.", file=sys.stderr) + sys.exit(1) + gitea_cfg = load_gitea_config() + if not gitea_cfg["token"]: + print("[git-issues] ERROR: GITEA_TOKEN not set.", file=sys.stderr) + sys.exit(1) + + mail = imaplib.IMAP4(imap_cfg["host"], int(imap_cfg["port"])) + if imap_cfg["use_starttls"]: + mail.starttls(ssl.create_default_context()) + mail.login(imap_cfg["user"], imap_cfg["password"]) + mail.select("INBOX") + criterion = imap_search_criterion_unseen() + _, nums = mail.search(None, criterion) + ids = nums[0].split() + if not ids: + print("[git-issues] No unread messages.") + mail.logout() + return + + created = 0 + for uid in ids: + uid_s = uid.decode("ascii") + _, data = mail.fetch(uid, "(RFC822)") + if not data or not data[0]: + continue + msg = email.message_from_bytes(data[0][1]) + subject = _decode_header_value(msg.get("Subject")) + from_ = _decode_header_value(msg.get("From")) + body_text = _get_text_body(msg) + body_for_issue = f"**From:** {from_}\n\n{body_text}".strip() + title = sanitize_title(subject) + issue = create_gitea_issue(title, body_for_issue) + if issue: + created += 1 + print(f"[git-issues] Created issue #{issue.get('number', '?')}: {title[:60]}") + mail.store(uid_s, "+FLAGS", "\\Seen") + else: + print(f"[git-issues] Skipped (API failed): {title[:60]}", file=sys.stderr) + + mail.logout() + print(f"[git-issues] Done. Created {created} issue(s).") + + +if __name__ == "__main__": + main() diff --git a/services/ia_dev/git-issues/mail-to-issue.sh b/services/ia_dev/git-issues/mail-to-issue.sh new file mode 100755 index 0000000..2a1aeed --- /dev/null +++ b/services/ia_dev/git-issues/mail-to-issue.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash +# +# Create Gitea issues from unread emails (IMAP). Requires Proton Mail Bridge +# or any IMAP server. Config: .secrets/git-issues/imap-bridge.env and token. +# Usage: ./git-issues/mail-to-issue.sh +# +set -euo pipefail + +GIT_ISSUES_DIR="${GIT_ISSUES_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +export GIT_ISSUES_DIR +export REPO_ROOT="${GIT_ISSUES_DIR}/.." +_IA_FOR_LOG="$(cd "${GIT_ISSUES_DIR}/.." && pwd)" +if [[ -f "${_IA_FOR_LOG}/lib/smart_ide_logs.sh" ]]; then + # shellcheck source=../lib/smart_ide_logs.sh + source "${_IA_FOR_LOG}/lib/smart_ide_logs.sh" + smart_ide_logs_begin "$_IA_FOR_LOG" "$0" "$*" +fi +exec python3 "${GIT_ISSUES_DIR}/mail-to-issue.py" diff --git a/services/ia_dev/git-issues/mail_common.py b/services/ia_dev/git-issues/mail_common.py new file mode 100644 index 0000000..61d32cb --- /dev/null +++ b/services/ia_dev/git-issues/mail_common.py @@ -0,0 +1,144 @@ +# Shared config and helpers for git-issues mail scripts (IMAP/SMTP, Gitea). +# Used by mail-list-unread, mail-send-reply, mail-create-issue-from-email, mail-mark-read. + +from __future__ import annotations + +import json +import os +import re +import ssl +from pathlib import Path +from urllib.error import HTTPError, URLError +from urllib.request import Request, urlopen + +# Only consider messages on or after this date (IMAP format DD-Mon-YYYY). Override with env MAIL_SINCE_DATE. +MAIL_SINCE_DATE_DEFAULT = "10-Mar-2026" + + +def imap_since_date() -> str: + """Return IMAP SINCE date (DD-Mon-YYYY). Messages before this date are ignored by fetch/list scripts.""" + return os.environ.get("MAIL_SINCE_DATE", MAIL_SINCE_DATE_DEFAULT).strip() or MAIL_SINCE_DATE_DEFAULT + + +def imap_search_criterion_all() -> str: + """IMAP search: all messages on or after MAIL_SINCE_DATE.""" + return f"SINCE {imap_since_date()}" + + +def imap_search_criterion_unseen() -> str: + """IMAP search: unread messages on or after MAIL_SINCE_DATE.""" + return f"(UNSEEN SINCE {imap_since_date()})" + + +def repo_root() -> Path: + # Root = directory containing git-issues (ia_dev). .secrets and logs live under ia_dev (./.secrets, ./logs) + # so they do not depend on a specific project; same ia_dev works for any project (PROJECT_ID from MAIL_TO or AI_AGENT_TOKEN). + env_root = os.environ.get("REPO_ROOT") + if env_root: + return Path(env_root).resolve() + issues_dir = os.environ.get("GIT_ISSUES_DIR") + if issues_dir: + return Path(issues_dir).resolve().parent + return Path(__file__).resolve().parent.parent + + +def load_env_file(path: Path) -> None: + if not path.is_file(): + return + with open(path, encoding="utf-8") as f: + for line in f: + line = line.strip() + if not line or line.startswith("#"): + continue + if "=" in line: + key, _, value = line.partition("=") + key = key.strip() + value = value.strip().strip("'\"") + if key and key not in os.environ: + os.environ[key] = value + + +def load_imap_config() -> dict[str, str]: + root = repo_root() + env_path = root / ".secrets" / "git-issues" / "imap-bridge.env" + load_env_file(env_path) + ssl_verify_raw = os.environ.get("IMAP_SSL_VERIFY", "true").lower() + ssl_verify = ssl_verify_raw not in ("0", "false", "no") + return { + "host": os.environ.get("IMAP_HOST", "127.0.0.1"), + "port": os.environ.get("IMAP_PORT", "1143"), + "user": os.environ.get("IMAP_USER", ""), + "password": os.environ.get("IMAP_PASSWORD", ""), + "use_starttls": os.environ.get("IMAP_USE_STARTTLS", "true").lower() in ("1", "true", "yes"), + "ssl_verify": ssl_verify, + "filter_to": os.environ.get("MAIL_FILTER_TO", "ai.support.lecoffreio@4nkweb.com").strip().lower(), + } + + +def imap_ssl_context(ssl_verify: bool = True) -> ssl.SSLContext: + """Return SSL context for IMAP STARTTLS. Use ssl_verify=False only for local Bridge with self-signed cert.""" + if ssl_verify: + return ssl.create_default_context() + ctx = ssl.create_default_context() + ctx.check_hostname = False + ctx.verify_mode = ssl.CERT_NONE + return ctx + + +def load_smtp_config() -> dict[str, str]: + root = repo_root() + env_path = root / ".secrets" / "git-issues" / "imap-bridge.env" + load_env_file(env_path) + ssl_verify_raw = os.environ.get("IMAP_SSL_VERIFY", os.environ.get("SMTP_SSL_VERIFY", "true")).lower() + ssl_verify = ssl_verify_raw not in ("0", "false", "no") + return { + "host": os.environ.get("SMTP_HOST", os.environ.get("IMAP_HOST", "127.0.0.1")), + "port": os.environ.get("SMTP_PORT", "1025"), + "user": os.environ.get("SMTP_USER", os.environ.get("IMAP_USER", "")), + "password": os.environ.get("SMTP_PASSWORD", os.environ.get("IMAP_PASSWORD", "")), + "use_starttls": os.environ.get("SMTP_USE_STARTTLS", "true").lower() in ("1", "true", "yes"), + "ssl_verify": ssl_verify, + } + + +def load_gitea_config() -> dict[str, str]: + root = repo_root() + token = os.environ.get("GITEA_TOKEN") + if not token: + token_path = root / ".secrets" / "git-issues" / "token" + if token_path.is_file(): + token = token_path.read_text(encoding="utf-8").strip() + return { + "api_url": os.environ.get("GITEA_API_URL", "https://git.4nkweb.com/api/v1").rstrip("/"), + "owner": os.environ.get("GITEA_REPO_OWNER", "4nk"), + "repo": os.environ.get("GITEA_REPO_NAME", "lecoffre_ng"), + "token": token or "", + } + + +def sanitize_title(raw: str, max_len: int = 200) -> str: + one_line = re.sub(r"\s+", " ", raw).strip() + return one_line[:max_len] if one_line else "(no subject)" + + +def create_gitea_issue(title: str, body: str) -> dict | None: + gitea = load_gitea_config() + if not gitea["token"]: + return None + url = f"{gitea['api_url']}/repos/{gitea['owner']}/{gitea['repo']}/issues" + payload = json.dumps({"title": title, "body": body}).encode("utf-8") + req = Request( + url, + data=payload, + method="POST", + headers={ + "Accept": "application/json", + "Content-Type": "application/json", + "Authorization": f"token {gitea['token']}", + }, + ) + try: + with urlopen(req, timeout=30) as resp: + return json.loads(resp.read().decode("utf-8")) + except (HTTPError, URLError): + return None diff --git a/services/ia_dev/git-issues/print-issue-prompt.sh b/services/ia_dev/git-issues/print-issue-prompt.sh new file mode 100755 index 0000000..a327922 --- /dev/null +++ b/services/ia_dev/git-issues/print-issue-prompt.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash +# +# Print issue number, title and body in a single block for agent consumption. +# Used by the git-issues-process agent to get the ticket content before calling /fix or /evol. +# Usage: ./print-issue-prompt.sh +# +set -euo pipefail + +GIT_ISSUES_DIR="${GIT_ISSUES_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +# shellcheck source=lib.sh +source "${GIT_ISSUES_DIR}/lib.sh" + +require_jq || exit 1 + +if [[ $# -lt 1 ]]; then + log_err "Usage: $0 " + exit 1 +fi + +ISSUE_NUM="$1" +RESPONSE="$(gitea_api_get "/repos/${GITEA_REPO_OWNER}/${GITEA_REPO_NAME}/issues/${ISSUE_NUM}")" +if ! echo "$RESPONSE" | jq -e . &>/dev/null; then + log_err "API error (issue ${ISSUE_NUM}): ${RESPONSE:0:200}" + exit 1 +fi + +TITLE="$(echo "$RESPONSE" | jq -r '.title')" +BODY="$(echo "$RESPONSE" | jq -r '.body // "(no description)"')" +LABELS="$(echo "$RESPONSE" | jq -r '[.labels[].name] | join(", ")')" + +echo "Issue #${ISSUE_NUM}" +echo "Title: ${TITLE}" +echo "Labels: ${LABELS}" +echo "" +echo "${BODY}" diff --git a/services/ia_dev/git-issues/project_config.py b/services/ia_dev/git-issues/project_config.py new file mode 100644 index 0000000..5f444af --- /dev/null +++ b/services/ia_dev/git-issues/project_config.py @@ -0,0 +1,186 @@ +# Load project config (projects//conf.json) for tickets spooler and authorized_emails. +# Project id comes from PROJECT_ID (set by shell project_config.sh from MAIL_TO or AI_AGENT_TOKEN) or from per-message resolution (resolve_project_id_by_email_to). + +from __future__ import annotations + +import json +import os +from pathlib import Path + + +def project_root() -> Path: + """Project repo root when applicable. Derived from PROJECT_ROOT, REPO_ROOT or GIT_ISSUES_DIR.""" + env_root = os.environ.get("PROJECT_ROOT") + if env_root: + return Path(env_root).resolve() + env_repo = os.environ.get("REPO_ROOT") + if env_repo: + root = Path(env_repo).resolve() + # If REPO_ROOT is ia_dev (has git-issues), use it as repo root + if (root / "git-issues").is_dir(): + return root + return root + issues_dir = os.environ.get("GIT_ISSUES_DIR") + if issues_dir: + return Path(issues_dir).resolve().parent.parent + return Path(__file__).resolve().parent.parent.parent + + +def ia_dev_root() -> Path: + """Directory containing git-issues (ia_dev).""" + issues_dir = os.environ.get("GIT_ISSUES_DIR") + if issues_dir: + return Path(issues_dir).resolve().parent + return Path(__file__).resolve().parent.parent + + +def load_project_config() -> dict | None: + """Load projects//conf.json. Uses PROJECT_ID from env (set by shell from MAIL_TO or AI_AGENT_TOKEN). Returns None if not found or PROJECT_ID missing.""" + project_id = os.environ.get("PROJECT_ID", "").strip() + if not project_id: + return None + return load_project_config_by_id(project_id) + + +def project_dir() -> Path | None: + """Path to projects// (under ia_dev). Uses PROJECT_ID from env. None if PROJECT_ID not set.""" + project_id = os.environ.get("PROJECT_ID", "").strip() + if not project_id: + return None + return ia_dev_root() / "projects" / project_id + + +def data_issues_dir() -> Path: + """Path to data/issues/ spooler under projects// (ia_dev/projects//data/issues).""" + pd = project_dir() + if pd is not None: + return pd / "data" / "issues" + return project_root() / "data" / "issues" + + +def data_issues_dir_for_project(project_id: str) -> Path: + """Path to data/issues/ for a given project id (ia_dev/projects//data/issues).""" + ia_dev = ia_dev_root() + return ia_dev / "projects" / project_id / "data" / "issues" + + +def project_logs_dir() -> Path: + """Path to logs/ under projects// (ia_dev/projects//logs).""" + pd = project_dir() + if pd is not None: + return pd / "logs" + return project_root() / "logs" + + +def authorized_emails() -> dict[str, str | list[str]]: + """Return tickets.authorized_emails (to, from list). Empty dict if missing.""" + conf = load_project_config() + if not conf: + return {} + tickets = conf.get("tickets") or {} + return tickets.get("authorized_emails") or {} + + +def list_project_ids() -> list[str]: + """List all project ids (directory names under projects/).""" + ia_dev = ia_dev_root() + projects_dir = ia_dev / "projects" + if not projects_dir.is_dir(): + return [] + return [d.name for d in projects_dir.iterdir() if d.is_dir() and (d / "conf.json").is_file()] + + +def _normalize_conf_to_addresses(auth_to: object) -> set[str]: + """Return set of normalized (lowercase) email addresses from authorized_emails.to. + Supports: str (single address), list of str, or list of dict with env keys (e.g. test, pprod, prod). + Address pattern AI..@4nkweb.com; project_id and env may be uppercase.""" + out: set[str] = set() + if not auth_to: + return out + if isinstance(auth_to, str): + a = auth_to.strip().lower() + if a: + out.add(a) + return out + if isinstance(auth_to, list): + for item in auth_to: + if isinstance(item, str): + a = item.strip().lower() + if a: + out.add(a) + elif isinstance(item, dict): + for v in item.values(): + if isinstance(v, str): + a = v.strip().lower() + if a: + out.add(a) + return out + + +def resolve_project_id_by_email_to(to_address: str) -> str | None: + """Find project id whose tickets.authorized_emails.to matches the given address (case-insensitive). + authorized_emails.to may be a single string or a list of objects { test, pprod, prod } with addresses + AI..@4nkweb.com (project_id and env may be uppercase).""" + if not to_address or not to_address.strip(): + return None + to_normalized = to_address.strip().lower() + for pid in list_project_ids(): + conf_path = ia_dev_root() / "projects" / pid / "conf.json" + try: + with open(conf_path, encoding="utf-8") as f: + conf = json.load(f) + except (OSError, json.JSONDecodeError): + continue + tickets = conf.get("tickets") or {} + auth = tickets.get("authorized_emails") or {} + conf_to_set = _normalize_conf_to_addresses(auth.get("to")) + if to_normalized in conf_to_set: + return pid + return None + + +def _list_project_envs(project_id: str) -> list[str]: + """List env names (subdirs of .secrets) for a project that contain ia_token.""" + ia_dev = ia_dev_root() + secrets_dir = ia_dev / "projects" / project_id / ".secrets" + if not secrets_dir.is_dir(): + return [] + return [ + d.name + for d in secrets_dir.iterdir() + if d.is_dir() and (d / "ia_token").is_file() + ] + + +def resolve_project_and_env_by_token(token: str) -> tuple[str, str] | None: + """Find (project_id, env) by scanning projects//.secrets//ia_token. Returns first match.""" + if not token or not token.strip(): + return None + token_stripped = token.strip() + for pid in list_project_ids(): + for env in _list_project_envs(pid): + token_path = ia_dev_root() / "projects" / pid / ".secrets" / env / "ia_token" + try: + content = token_path.read_text(encoding="utf-8").strip() + # Token is either full value in file or base + env (e.g. nicolecoffreio) + if content == token_stripped or (content + env) == token_stripped: + return (pid, env) + except (OSError, UnicodeDecodeError): + continue + return None + + +def resolve_project_id_by_token(token: str) -> str | None: + """Find project id whose .secrets//ia_token matches the given token.""" + resolved = resolve_project_and_env_by_token(token) + return resolved[0] if resolved else None + + +def load_project_config_by_id(project_id: str) -> dict | None: + """Load conf.json for a given project id. Returns None if not found.""" + ia_dev = ia_dev_root() + conf_path = ia_dev / "projects" / project_id / "conf.json" + if not conf_path.is_file(): + return None + with open(conf_path, encoding="utf-8") as f: + return json.load(f) diff --git a/services/ia_dev/git-issues/tickets-fetch-inbox.py b/services/ia_dev/git-issues/tickets-fetch-inbox.py new file mode 100644 index 0000000..1dba2eb --- /dev/null +++ b/services/ia_dev/git-issues/tickets-fetch-inbox.py @@ -0,0 +1,321 @@ +#!/usr/bin/env python3 +""" +Fetch inbox emails and route each message to the project whose tickets.authorized_emails.to matches the message To. + +Project is resolved per message: To/Delivered-To/X-Original-To are compared to tickets.authorized_emails.to in each +projects//conf.json; the first matching project id is used. Only messages from authorized_emails.from are kept. +Messages on or after MAIL_SINCE_DATE are considered. Does not use UNSEEN; does not mark as read (BODY.PEEK[]). +Writes to projects//data/issues/ as JSON ...pending. One file per message. + +State: we skip creating .pending if .pending exists or .response exists for that base. +Usage: run with GIT_ISSUES_DIR set (e.g. via tickets-fetch-inbox.sh). MAIL_SINCE_DATE overrides date (DD-Mon-YYYY). +""" + +from __future__ import annotations + +import email +import hashlib +import imaplib +import json +import os +import re +import sys +from datetime import datetime, timezone +from email.header import decode_header +from email.utils import parsedate_to_datetime +from pathlib import Path + +sys.path.insert(0, str(Path(__file__).resolve().parent)) +from mail_common import imap_search_criterion_all, load_imap_config, imap_ssl_context +from project_config import ( + data_issues_dir_for_project, + ia_dev_root, + load_project_config_by_id, + project_root, + resolve_project_id_by_email_to, +) + + +def decode_header_value(header: str | None) -> str: + if not header: + return "" + parts = decode_header(header) + result = [] + for part, charset in parts: + if isinstance(part, bytes): + result.append(part.decode(charset or "utf-8", errors="replace")) + else: + result.append(part) + return "".join(result) + + +def parse_from_address(from_header: str) -> str: + """Extract email address from From header (e.g. 'Name ' -> user@host).""" + if not from_header: + return "" + match = re.search(r"<([^>]+)>", from_header) + if match: + return match.group(1).strip().lower() + return from_header.strip().lower() + + +def get_text_body(msg: email.message.Message) -> str: + if msg.is_multipart(): + for part in msg.walk(): + if part.get_content_type() == "text/plain": + payload = part.get_payload(decode=True) + if payload: + return payload.decode( + part.get_content_charset() or "utf-8", errors="replace" + ) + return "" + payload = msg.get_payload(decode=True) + if not payload: + return "" + return payload.decode( + msg.get_content_charset() or "utf-8", errors="replace" + ) + + +def _extract_addresses(header_value: str) -> set[str]: + """Extract email addresses from a header value (e.g. 'Name , other@host').""" + if not header_value or not header_value.strip(): + return set() + decoded = decode_header_value(header_value).strip() + # Angle-bracket: <...@...> + in_angle = re.findall(r"<([^>]+)>", decoded) + # Standalone addr-spec (simplified: local@domain) + plain = re.findall(r"[a-zA-Z0-9_.+-]+@[a-zA-Z0-9][a-zA-Z0-9.-]*[a-zA-Z0-9]", decoded) + out: set[str] = set() + for a in in_angle: + out.add(a.strip().lower()) + for a in plain: + out.add(a.strip().lower()) + return out + + +def get_message_to_addresses(msg: email.message.Message) -> list[str]: + """Return ordered list of recipient addresses (To, Delivered-To, X-Original-To, etc.) for project resolution.""" + order = ("To", "Delivered-To", "X-Original-To", "X-Delivered-To", "X-Envelope-To", "Envelope-To") + seen: set[str] = set() + result: list[str] = [] + for name in order: + value = msg.get(name) + if not value: + continue + addrs = _extract_addresses(decode_header_value(value)) + for a in addrs: + if a not in seen: + seen.add(a) + result.append(a) + return result + + +def sanitize_from_for_filename(email_addr: str) -> str: + """Filesystem-safe string from email (e.g. user@example.com -> user_example.com).""" + return re.sub(r"[^a-zA-Z0-9._-]", "_", email_addr.replace("@", "_")) + + +def generate_message_id(mid: str | None, uid_s: str, date_str: str, from_addr: str) -> str: + """Deterministic 8-char id so the same message always gets the same base filename.""" + raw = mid or f"{uid_s}_{date_str}_{from_addr}" + return hashlib.sha256(raw.encode("utf-8")).hexdigest()[:8] + + +def sanitize_attachment_filename(name: str) -> str: + """Safe filename for attachment (no path, no dangerous chars).""" + if not name or not name.strip(): + return "attachment" + base = Path(name).name + return re.sub(r"[^a-zA-Z0-9._-]", "_", base)[:200] or "attachment" + + +def get_attachments(msg: email.message.Message) -> list[tuple[str, bytes, str]]: + """Return list of (filename, payload_bytes, content_type) for each attachment.""" + result: list[tuple[str, bytes, str]] = [] + for part in msg.walk(): + content_type = (part.get_content_type() or "").lower() + if content_type.startswith("multipart/"): + continue + filename = part.get_filename() + if not filename: + # Optional: treat inline images etc. with Content-Disposition attachment + disp = part.get("Content-Disposition") or "" + if "attachment" in disp.lower(): + ext = "" + if "image" in content_type: + ext = ".bin" if "octet-stream" in content_type else ".img" + filename = f"attachment{ext}" + else: + continue + filename = decode_header_value(filename).strip() + if not filename: + continue + payload = part.get_payload(decode=True) + if payload is None: + continue + result.append((filename, payload, content_type)) + return result + + +def parse_references(refs: str | None) -> list[str]: + if not refs: + return [] + return [x.strip() for x in re.split(r"\s+", refs) if x.strip()] + + +def main() -> int: + cfg = load_imap_config() + if not cfg["user"] or not cfg["password"]: + print("[tickets-fetch-inbox] IMAP_USER and IMAP_PASSWORD required.", file=sys.stderr) + return 1 + + # Spool is per-project; each message is routed by its To address to projects//data/issues/ + print("[tickets-fetch-inbox] Project resolved per message from To/Delivered-To/X-Original-To (tickets.authorized_emails.to).") + + mail = imaplib.IMAP4(cfg["host"], int(cfg["port"])) + if cfg["use_starttls"]: + mail.starttls(imap_ssl_context(cfg.get("ssl_verify", True))) + mail.login(cfg["user"], cfg["password"]) + mail.select("INBOX") + # Do not use UNSEEN; fetch messages on or after MAIL_SINCE_DATE (default 10-Mar-2026). Filter by authorized senders only. + # Use BODY.PEEK[] instead of RFC822 so the server does not set \Seen (emails stay "unread"). + since_criterion = imap_search_criterion_all() + _, nums = mail.search(None, since_criterion) + ids = nums[0].split() + written = 0 + skipped_fetch = 0 + skipped_no_project = 0 + skipped_from = 0 + skipped_pending = 0 + skipped_response = 0 + for uid in ids: + uid_s = uid.decode("ascii") + _, data = mail.fetch(uid, "(BODY.PEEK[])") + if not data or not data[0]: + skipped_fetch += 1 + continue + raw = data[0] + raw_bytes = None + if isinstance(raw, tuple): + if len(raw) >= 2 and isinstance(raw[1], bytes): + raw_bytes = raw[1] + elif len(raw) >= 2 and isinstance(raw[1], str): + raw_bytes = raw[1].encode("utf-8", errors="replace") + elif isinstance(raw, bytes): + raw_bytes = raw + if not raw_bytes: + skipped_fetch += 1 + continue + try: + msg = email.message_from_bytes(raw_bytes) + except Exception: + skipped_fetch += 1 + continue + to_addresses = get_message_to_addresses(msg) + project_id: str | None = None + for addr in to_addresses: + project_id = resolve_project_id_by_email_to(addr) + if project_id: + break + if not project_id: + skipped_no_project += 1 + continue + conf = load_project_config_by_id(project_id) + if not conf: + skipped_no_project += 1 + continue + auth = (conf.get("tickets") or {}).get("authorized_emails") or {} + from_list = auth.get("from") + if isinstance(from_list, list): + allowed_from = {str(a).strip().lower() for a in from_list if a} + elif isinstance(from_list, str): + allowed_from = {a.strip().lower() for a in re.split(r"[,;]", from_list) if a.strip()} + else: + allowed_from = set() + from_raw = decode_header_value(msg.get("From")) + from_addr = parse_from_address(from_raw) + if from_addr not in allowed_from: + skipped_from += 1 + continue + spool = data_issues_dir_for_project(project_id) + spool.mkdir(parents=True, exist_ok=True) + mid = (msg.get("Message-ID") or "").strip() + to_raw = decode_header_value(msg.get("To")) + to_addrs = [a.strip() for a in re.split(r"[,;]", to_raw) if a.strip()] + subj = decode_header_value(msg.get("Subject")) + date_h = decode_header_value(msg.get("Date")) + refs = parse_references(msg.get("References")) + in_reply_to = (msg.get("In-Reply-To") or "").strip() or None + body = get_text_body(msg) + try: + if date_h: + dt = parsedate_to_datetime(date_h) + date_str = dt.strftime("%Y-%m-%dT%H%M%S") + else: + date_str = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H%M%S") + except Exception: + date_str = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H%M%S") + from_safe = sanitize_from_for_filename(from_addr) + msg_id_short = generate_message_id(mid, uid_s, date_str, from_addr) + base = f"{date_str}.{msg_id_short}.{from_safe}" + path = spool / f"{base}.pending" + if path.exists(): + skipped_pending += 1 + continue + # Already treated: .response exists (we don't keep .pending after replying). + if (spool / f"{base}.response").exists(): + skipped_response += 1 + continue + created_at = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ") + attachments_meta: list[dict[str, str | int]] = [] + attachment_parts = get_attachments(msg) + if attachment_parts: + att_dir = spool / f"{base}.d" + att_dir.mkdir(parents=True, exist_ok=True) + for idx, (orig_name, payload_bytes, content_type) in enumerate(attachment_parts): + safe_name = sanitize_attachment_filename(orig_name) + stored_name = f"{idx}_{safe_name}" + stored_path = att_dir / stored_name + stored_path.write_bytes(payload_bytes) + rel_path = f"{base}.d/{stored_name}" + attachments_meta.append({ + "filename": orig_name, + "path": rel_path, + "content_type": content_type, + "size": len(payload_bytes), + }) + payload = { + "version": 1, + "type": "incoming", + "id": msg_id_short, + "message_id": mid or "", + "from": from_addr, + "to": to_addrs, + "subject": subj, + "date": date_h or "", + "body": body or "", + "references": refs, + "in_reply_to": in_reply_to, + "uid": uid_s, + "created_at": created_at, + "issue_number": None, + "status": "pending", + "attachments": attachments_meta, + } + path.write_text(json.dumps(payload, ensure_ascii=False, indent=2), encoding="utf-8") + written += 1 + print(f"[tickets-fetch-inbox] Wrote {path.name}") + + mail.logout() + print(f"[tickets-fetch-inbox] Done. Wrote {written} new message(s).") + if skipped_fetch or skipped_no_project or skipped_from or skipped_pending or skipped_response: + print( + f"[tickets-fetch-inbox] Skipped: fetch/parse={skipped_fetch}, no_project_for_to={skipped_no_project}, " + f"from_not_allowed={skipped_from}, pending_exists={skipped_pending}, response_exists={skipped_response}." + ) + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/services/ia_dev/git-issues/tickets-fetch-inbox.sh b/services/ia_dev/git-issues/tickets-fetch-inbox.sh new file mode 100755 index 0000000..c059552 --- /dev/null +++ b/services/ia_dev/git-issues/tickets-fetch-inbox.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env bash +# Fetch inbox messages filtered by tickets.authorized_emails (conf.json). No UNSEEN; no mark read. +# Writes new messages to projects//data/issues/ as JSON (...pending). +# Usage: depuis la racine de ia_dev (MAIL_TO ou AI_AGENT_TOKEN défini) : ./git-issues/tickets-fetch-inbox.sh +set -euo pipefail + +GIT_ISSUES_DIR="${GIT_ISSUES_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +export GIT_ISSUES_DIR +# Use same project root and env as list-pending-spooler (lib.sh) so spool path is identical. +ROOT="$(cd "${GIT_ISSUES_DIR}/../.." && pwd)" +export REPO_ROOT="${GIT_ISSUES_DIR}/.." +cd "$ROOT" +# shellcheck source=lib.sh +source "${GIT_ISSUES_DIR}/lib.sh" 2>/dev/null || true +export PROJECT_ROOT="${PROJECT_ROOT:-$ROOT}" +cd "$PROJECT_ROOT" +exec python3 "${GIT_ISSUES_DIR}/tickets-fetch-inbox.py" "$@" diff --git a/services/ia_dev/git-issues/wiki-api-test.sh b/services/ia_dev/git-issues/wiki-api-test.sh new file mode 100755 index 0000000..dcf3874 --- /dev/null +++ b/services/ia_dev/git-issues/wiki-api-test.sh @@ -0,0 +1,88 @@ +#!/usr/bin/env bash +# +# Test Gitea Wiki API for repo 4nk/lecoffre_ng. +# Requires GITEA_TOKEN or .secrets/git-issues/token (same as issues scripts). +# Usage: ./wiki-api-test.sh [--create] +# --create: create a test page then delete it (checks write access). +# +set -euo pipefail + +GIT_ISSUES_DIR="${GIT_ISSUES_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +# shellcheck source=lib.sh +source "${GIT_ISSUES_DIR}/lib.sh" + +REPO_PATH="/repos/${GITEA_REPO_OWNER}/${GITEA_REPO_NAME}" +# Branch ref for wiki (default branch of wiki repo; use master when wiki is configured on master) +GITEA_WIKI_REF="${GITEA_WIKI_REF:-master}" +WIKI_PAGES="${REPO_PATH}/wiki/pages?ref=${GITEA_WIKI_REF}" +WIKI_PAGE="${REPO_PATH}/wiki/page" +WIKI_NEW="${REPO_PATH}/wiki/new" + +do_create=false +while [[ $# -gt 0 ]]; do + case "$1" in + --create) do_create=true; shift ;; + *) log_err "Unknown option: $1"; exit 1 ;; + esac +done + +if ! load_gitea_token 2>/dev/null; then + log_err "No GITEA_TOKEN and no .secrets/git-issues/token. Set token to run wiki API tests." + exit 1 +fi + +require_jq || exit 1 + +echo "=== 1. GET ${WIKI_PAGES} (list wiki pages) ===" +RESPONSE="$(gitea_api_get "${WIKI_PAGES}")" +if echo "$RESPONSE" | jq -e . &>/dev/null; then + if echo "$RESPONSE" | jq -e 'type == "array"' &>/dev/null; then + COUNT="$(echo "$RESPONSE" | jq 'length')" + log_info "List OK: ${COUNT} page(s)" + echo "$RESPONSE" | jq -r '.[] | " - \(.title)"' 2>/dev/null || echo "$RESPONSE" | jq . + else + log_info "Response: $(echo "$RESPONSE" | jq -c . 2>/dev/null || echo "$RESPONSE")" + fi +else + log_err "Response (first 300 chars): ${RESPONSE:0:300}" +fi + +echo "" +echo "=== 2. GET ${WIKI_PAGE}/Home (get one page, ref=${GITEA_WIKI_REF}) ===" +RESPONSE="$(gitea_api_get "${WIKI_PAGE}/Home?ref=${GITEA_WIKI_REF}")" +if echo "$RESPONSE" | jq -e .title &>/dev/null; then + log_info "Page OK: title=$(echo "$RESPONSE" | jq -r .title)" + echo "$RESPONSE" | jq '{ title, html_url, commit_count }' +else + log_info "Response: $(echo "$RESPONSE" | jq -c . 2>/dev/null || echo "${RESPONSE:0:200}")" +fi + +if [[ "$do_create" != true ]]; then + log_info "Done. Use --create to test POST wiki page and DELETE." + exit 0 +fi + +echo "" +echo "=== 3. POST ${WIKI_NEW} (create test page) ===" +TEST_TITLE="Api-test-$(date +%s)" +CONTENT="# Test\nCreated by wiki-api-test.sh. Safe to delete." +CONTENT_B64="$(echo -n "$CONTENT" | base64 -w 0)" +BODY="$(jq -n --arg title "$TEST_TITLE" --arg content "$CONTENT_B64" --arg msg "wiki-api-test.sh" \ + '{ title: $title, content_base64: $content, message: $msg }')" +RESPONSE="$(gitea_api_post "${WIKI_NEW}" "$BODY")" +if echo "$RESPONSE" | jq -e .title &>/dev/null; then + log_info "Create OK: $(echo "$RESPONSE" | jq -r .title)" + CREATED_TITLE="$TEST_TITLE" +else + log_err "Create failed: ${RESPONSE:0:300}" + exit 1 +fi + +echo "" +echo "=== 4. DELETE ${WIKI_PAGE}/${CREATED_TITLE} (remove test page) ===" +RESPONSE="$(gitea_api_delete "${WIKI_PAGE}/${CREATED_TITLE}")" +# DELETE often returns 204 No Content +log_info "Delete sent (204 or empty body = success)." + +echo "" +log_info "All wiki API tests completed." diff --git a/services/ia_dev/git-issues/wiki-get-page.sh b/services/ia_dev/git-issues/wiki-get-page.sh new file mode 100755 index 0000000..2ce65be --- /dev/null +++ b/services/ia_dev/git-issues/wiki-get-page.sh @@ -0,0 +1,34 @@ +#!/usr/bin/env bash +# +# Output the raw markdown of a wiki page (for agents or scripts). +# Usage: ./wiki-get-page.sh +# Example: ./wiki-get-page.sh Home +# Requires GITEA_TOKEN or .secrets/git-issues/token. +# +set -euo pipefail + +GIT_ISSUES_DIR="${GIT_ISSUES_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +# shellcheck source=lib.sh +source "${GIT_ISSUES_DIR}/lib.sh" + +REPO_PATH="/repos/${GITEA_REPO_OWNER}/${GITEA_REPO_NAME}" +GITEA_WIKI_REF="${GITEA_WIKI_REF:-master}" + +if [[ $# -lt 1 ]]; then + log_err "Usage: $0 " + exit 1 +fi + +PAGE_NAME="$1" +load_gitea_token || exit 1 +require_jq || exit 1 + +resp="$(gitea_api_get "${REPO_PATH}/wiki/page/${PAGE_NAME}?ref=${GITEA_WIKI_REF}")" +if ! echo "$resp" | jq -e .content_base64 &>/dev/null; then + log_err "Page not found or error: ${PAGE_NAME}" + echo "$resp" | jq . 2>/dev/null || echo "$resp" + exit 1 +fi + +echo "$resp" | jq -r '.content_base64' | base64 -d +echo diff --git a/services/ia_dev/git-issues/wiki-migrate-docs.sh b/services/ia_dev/git-issues/wiki-migrate-docs.sh new file mode 100755 index 0000000..c1b64af --- /dev/null +++ b/services/ia_dev/git-issues/wiki-migrate-docs.sh @@ -0,0 +1,100 @@ +#!/usr/bin/env bash +# +# Migrate all docs/*.md (repo root) to Gitea wiki as pages. +# Mapping: docs/FILE.md → page "File" (stem with _ → -, first letter upper per segment). +# Requires GITEA_TOKEN or .secrets/git-issues/token. +# Usage: ./wiki-migrate-docs.sh [--dry-run] [file.md ...] +# --dry-run: print mapping and skip API calls. +# If file(s) given: migrate only those; else migrate all docs/*.md. +# +set -euo pipefail + +GIT_ISSUES_DIR="${GIT_ISSUES_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +REPO_ROOT="${GIT_ISSUES_DIR}/.." +DOCS_DIR="${DOCS_DIR:-${REPO_ROOT}/docs}" +# shellcheck source=lib.sh +source "${GIT_ISSUES_DIR}/lib.sh" + +REPO_PATH="/repos/${GITEA_REPO_OWNER}/${GITEA_REPO_NAME}" +GITEA_WIKI_REF="${GITEA_WIKI_REF:-master}" +WIKI_PAGE="${REPO_PATH}/wiki/page" +WIKI_NEW="${REPO_PATH}/wiki/new" + +# docs/FILE.md → page name for wiki (stem: _ → -, title-case: First-Letter-Of-Each-Segment) +file_to_page_name() { + local base="$1" + local stem="${base%.md}" + echo "$stem" | tr '_' '-' | awk -F- '{ + for(i=1;i<=NF;i++) { + s = $i; l = length(s) + if (l > 0) $i = toupper(substr(s,1,1)) tolower(substr(s,2)) + } + }1' OFS='-' +} + +dry_run=false +files=() +while [[ $# -gt 0 ]]; do + case "$1" in + --dry-run) dry_run=true; shift ;; + *.md) files+=("$1"); shift ;; + *) log_err "Unknown option or not .md: $1"; exit 1 ;; + esac +done + +if [[ ${#files[@]} -eq 0 ]]; then + while IFS= read -r -d '' f; do + files+=("$f") + done < <(find "$DOCS_DIR" -maxdepth 1 -name '*.md' -print0 | sort -z) +else + # Resolve args to full paths under DOCS_DIR + for i in "${!files[@]}"; do + u="${files[$i]}" + if [[ "$u" != */* ]] && [[ -f "${DOCS_DIR}/${u}" ]]; then + files[$i]="${DOCS_DIR}/${u}" + fi + done +fi + +if [[ ${#files[@]} -eq 0 ]]; then + log_err "No .md files found in ${DOCS_DIR}" + exit 1 +fi + +if [[ "$dry_run" == true ]]; then + log_info "Dry run: would migrate ${#files[@]} file(s)" + for f in "${files[@]}"; do + base="$(basename "$f")" + page="$(file_to_page_name "$base")" + echo " $f → $page" + done + exit 0 +fi + +load_gitea_token || exit 1 +require_jq || exit 1 + +for f in "${files[@]}"; do + base="$(basename "$f")" + page="$(file_to_page_name "$base")" + if [[ ! -f "$f" ]]; then + log_err "Skip (not a file): $f" + continue + fi + content="$(cat "$f")" + content_b64="$(echo -n "$content" | base64 -w 0)" + body="$(jq -n --arg title "$page" --arg content "$content_b64" --arg msg "Migrate from docs/$base" \ + '{ title: $title, content_base64: $content, message: $msg }')" + # Check if page exists (GET); if 200 use PATCH else POST + resp="$(gitea_api_get "${REPO_PATH}/wiki/page/${page}?ref=${GITEA_WIKI_REF}")" + if echo "$resp" | jq -e .title &>/dev/null; then + log_info "Update: $base → $page" + patch_body="$(jq -n --arg content "$content_b64" --arg msg "Update from docs/$base" '{ content_base64: $content, message: $msg }')" + gitea_api_patch "${WIKI_PAGE}/${page}?ref=${GITEA_WIKI_REF}" "$patch_body" >/dev/null || true + else + log_info "Create: $base → $page" + gitea_api_post "${WIKI_NEW}" "$body" >/dev/null || true + fi +done + +log_info "Migration done: ${#files[@]} file(s)." diff --git a/services/ia_dev/git-issues/wiki-put-page.sh b/services/ia_dev/git-issues/wiki-put-page.sh new file mode 100755 index 0000000..e189ec7 --- /dev/null +++ b/services/ia_dev/git-issues/wiki-put-page.sh @@ -0,0 +1,46 @@ +#!/usr/bin/env bash +# +# Update a single wiki page from a local file. +# Usage: ./wiki-put-page.sh +# Example: ./wiki-put-page.sh Home docs/README.md +# Requires GITEA_TOKEN or .secrets/git-issues/token. +# +set -euo pipefail + +GIT_ISSUES_DIR="${GIT_ISSUES_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +# shellcheck source=lib.sh +source "${GIT_ISSUES_DIR}/lib.sh" + +REPO_PATH="/repos/${GITEA_REPO_OWNER}/${GITEA_REPO_NAME}" +GITEA_WIKI_REF="${GITEA_WIKI_REF:-master}" +WIKI_PAGE="${REPO_PATH}/wiki/page" +WIKI_NEW="${REPO_PATH}/wiki/new" + +if [[ $# -lt 2 ]]; then + log_err "Usage: $0 " + exit 1 +fi + +PAGE_NAME="$1" +FILE_PATH="$2" +[[ -f "$FILE_PATH" ]] || { log_err "File not found: $FILE_PATH"; exit 1; } + +load_gitea_token || exit 1 +require_jq || exit 1 + +content="$(cat "$FILE_PATH")" +content_b64="$(echo -n "$content" | base64 -w 0)" +msg="Update from $FILE_PATH" + +resp="$(gitea_api_get "${REPO_PATH}/wiki/page/${PAGE_NAME}?ref=${GITEA_WIKI_REF}")" +if echo "$resp" | jq -e .title &>/dev/null; then + log_info "PATCH ${PAGE_NAME}" + body="$(jq -n --arg title "$PAGE_NAME" --arg content "$content_b64" --arg msg "$msg" '{ title: $title, content_base64: $content, message: $msg }')" + gitea_api_patch "${WIKI_PAGE}/${PAGE_NAME}?ref=${GITEA_WIKI_REF}" "$body" +else + log_info "POST ${PAGE_NAME}" + body="$(jq -n --arg title "$PAGE_NAME" --arg content "$content_b64" --arg msg "$msg" '{ title: $title, content_base64: $content, message: $msg }')" + gitea_api_post "${WIKI_NEW}" "$body" +fi + +log_info "Done: ${PAGE_NAME}" diff --git a/services/ia_dev/git-issues/write-response-spooler.py b/services/ia_dev/git-issues/write-response-spooler.py new file mode 100644 index 0000000..4c64e0d --- /dev/null +++ b/services/ia_dev/git-issues/write-response-spooler.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python3 +""" +Update the single spooler file for a message after sending a reply. +One file per message: read the .pending file (same base), add response data and set status to responded, write back. +No separate .response file; no file deletion. +Usage: ./git-issues/write-response-spooler.sh --base --to --subject "..." --body "..." [--in-reply-to ""] + base = filename base without extension (e.g. 2026-03-14T094530.a1b2c3d4.user_example.com). +""" +from __future__ import annotations + +import argparse +import json +import sys +from datetime import datetime, timezone +from pathlib import Path + +sys.path.insert(0, str(Path(__file__).resolve().parent)) +from project_config import data_issues_dir, load_project_config + + +def main() -> int: + conf = load_project_config() + if not conf: + print("[write-response-spooler] No project config.", file=sys.stderr) + return 1 + spool = data_issues_dir() + spool.mkdir(parents=True, exist_ok=True) + + ap = argparse.ArgumentParser(description="Update spooler file in place after sending reply") + ap.add_argument("--base", required=True, help="Base name (e.g. 2026-03-14T094530.a1b2c3d4.user_example.com)") + ap.add_argument("--to", required=True, help="Recipient address") + ap.add_argument("--subject", required=True, help="Subject sent") + ap.add_argument("--body", required=True, help="Body sent") + ap.add_argument("--in-reply-to", default="", help="Message-ID we replied to") + args = ap.parse_args() + + base = args.base.strip() + if not base or "/" in base or ".." in base: + print("[write-response-spooler] Invalid --base.", file=sys.stderr) + return 1 + path = spool / f"{base}.pending" + if not path.exists(): + print(f"[write-response-spooler] No such file: {path.name}", file=sys.stderr) + return 1 + try: + data = json.loads(path.read_text(encoding="utf-8")) + except (json.JSONDecodeError, OSError) as e: + print(f"[write-response-spooler] Read error: {e}", file=sys.stderr) + return 1 + + now = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ") + data["status"] = "responded" + data["response"] = { + "in_reply_to_message_id": args.in_reply_to or "", + "to": args.to.strip(), + "subject": args.subject.strip(), + "body": args.body, + "sent_at": now, + } + path.write_text(json.dumps(data, ensure_ascii=False, indent=2), encoding="utf-8") + print(f"[write-response-spooler] Updated {path.name} (status=responded)") + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/services/ia_dev/git-issues/write-response-spooler.sh b/services/ia_dev/git-issues/write-response-spooler.sh new file mode 100755 index 0000000..3c40c2a --- /dev/null +++ b/services/ia_dev/git-issues/write-response-spooler.sh @@ -0,0 +1,14 @@ +#!/usr/bin/env bash +# Update the single spooler file (.pending) after sending a reply via mail-send-reply.sh. One file per message. +# Usage: depuis la racine de ia_dev : ./git-issues/write-response-spooler.sh --base --to --subject "..." --body "..." [--in-reply-to ""] +set -euo pipefail +GIT_ISSUES_DIR="${GIT_ISSUES_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}" +export GIT_ISSUES_DIR +export REPO_ROOT="${GIT_ISSUES_DIR}/.." +_IA_FOR_LOG="$(cd "${GIT_ISSUES_DIR}/.." && pwd)" +if [[ -f "${_IA_FOR_LOG}/lib/smart_ide_logs.sh" ]]; then + # shellcheck source=../lib/smart_ide_logs.sh + source "${_IA_FOR_LOG}/lib/smart_ide_logs.sh" + smart_ide_logs_begin "$_IA_FOR_LOG" "$0" "$*" +fi +exec python3 "${GIT_ISSUES_DIR}/write-response-spooler.py" "$@" diff --git a/services/ia_dev/lib/README.md b/services/ia_dev/lib/README.md new file mode 100644 index 0000000..0e82128 --- /dev/null +++ b/services/ia_dev/lib/README.md @@ -0,0 +1,3 @@ +# ia_dev shared lib + +**[docs/repo/ia-dev-shared-lib.md](../../docs/repo/ia-dev-shared-lib.md)** diff --git a/services/ia_dev/lib/conf_path_resolve.sh b/services/ia_dev/lib/conf_path_resolve.sh new file mode 100644 index 0000000..e8150f2 --- /dev/null +++ b/services/ia_dev/lib/conf_path_resolve.sh @@ -0,0 +1,48 @@ +#!/usr/bin/env bash +# Resolve paths in projects//conf.json: absolute paths unchanged; others relative to smart_ide monorepo root. +# Monorepo root is the directory that contains ./projects/ (conf at .../projects//conf.json) or, when the +# file lives under .../ia_dev/projects//conf.json, the parent of ./ia_dev/. + +# ia_dev_smart_ide_monorepo_root_from_conf +ia_dev_smart_ide_monorepo_root_from_conf() { + local conf="${1:?}" + local c="$conf" + if command -v realpath >/dev/null 2>&1; then + c="$(realpath "$conf" 2>/dev/null)" || c="$conf" + else + c="$(readlink -f "$conf" 2>/dev/null)" || c="$conf" + fi + local d + d="$(dirname "$c")" + if [[ "$c" == */ia_dev/projects/*/conf.json ]]; then + ( cd "$d/../../.." && pwd ) + return + fi + if [[ "$c" == */projects/*/conf.json ]]; then + ( cd "$d/../.." && pwd ) + return + fi + ( cd "$d/../.." && pwd ) +} + +# ia_dev_resolve_path_from_conf +ia_dev_resolve_path_from_conf() { + local conf="${1:?}" + local p="${2:-}" + p="${p//$'\r'/}" + if [[ -z "$p" || "$p" == "null" ]]; then + printf '%s\n' "" + return 0 + fi + if [[ "$p" = /* ]]; then + printf '%s\n' "$p" + return 0 + fi + local root + root="$(ia_dev_smart_ide_monorepo_root_from_conf "$conf")" + if ( cd "$root" && realpath -m "$p" >/dev/null 2>&1 ); then + ( cd "$root" && realpath -m "$p" ) + else + printf '%s\n' "$root/$p" + fi +} diff --git a/services/ia_dev/lib/project_config.sh b/services/ia_dev/lib/project_config.sh new file mode 100644 index 0000000..4882eef --- /dev/null +++ b/services/ia_dev/lib/project_config.sh @@ -0,0 +1,73 @@ +# +# Project config resolution for ia_dev scripts. +# Standalone: run from ia_dev root. Source after IA_DEV_ROOT is set (or it is derived from script location). +# Resolves PROJECT_ID and PROJECT_CONFIG_PATH (projects//conf.json). +# +# Project id is resolved by (first match wins): +# 1. IA_PROJECT_ID (env): explicit project id; set by scripts when passed as parameter (e.g. ./pousse.sh lecoffreio). +# 2. MAIL_TO (env): search all projects for tickets.authorized_emails.to matching this address (config may have a string or a list of env-keyed objects). +# 3. AI_AGENT_TOKEN (env): search all projects/.secrets//ia_token for matching token; sets PROJECT_ID and PROJECT_ENV (project and environment). +# +# No fallback: no IA_PROJECT, no ai_project_id, no .ia_project. +# +# Config file: projects//conf.json. +# +set -euo pipefail + +PROJECT_ID="" +if [[ -n "${IA_PROJECT_ID:-}" && -n "${IA_DEV_ROOT:-}" ]]; then + _conf="${IA_DEV_ROOT}/projects/${IA_PROJECT_ID}/conf.json" + if [[ -f "$_conf" ]]; then + PROJECT_ID="${IA_PROJECT_ID}" + fi +fi +if [[ -z "$PROJECT_ID" && -n "${MAIL_TO:-}" && -n "${IA_DEV_ROOT:-}" ]]; then + _to="$(echo "${MAIL_TO}" | sed 's/[[:space:]]//g' | tr '[:upper:]' '[:lower:]')" + for conf in "${IA_DEV_ROOT}/projects/"*/conf.json; do + [[ -f "$conf" ]] || continue + # Extract all "to" addresses: string, or array of strings, or array of objects with env values + while IFS= read -r _to_conf; do + _to_conf="$(echo "$_to_conf" | tr '[:upper:]' '[:lower:]' | sed 's/[[:space:]]//g')" + if [[ -n "$_to_conf" && "$_to_conf" = "$_to" ]]; then + PROJECT_ID="$(basename "$(dirname "$conf")")" + break 2 + fi + done < <(jq -r ' + .tickets.authorized_emails.to + | if type == "string" then . + elif type == "array" then (.[] | if type == "string" then . elif type == "object" then .[] else empty end) + else empty end + ' "$conf" 2>/dev/null || true) + done +fi +PROJECT_ENV="" +if [[ -z "$PROJECT_ID" && -n "${AI_AGENT_TOKEN:-}" && -n "${IA_DEV_ROOT:-}" ]]; then + _token="$(echo "${AI_AGENT_TOKEN}" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')" + for _pdir in "${IA_DEV_ROOT}/projects/"*/; do + [[ -d "${_pdir}.secrets" ]] || continue + _project="$(basename "$_pdir")" + for _envdir in "${_pdir}.secrets/"*/; do + [[ -f "${_envdir}ia_token" ]] || continue + _env="$(basename "$_envdir")" + _tok_conf="$(cat "${_envdir}ia_token" 2>/dev/null | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')" + # Token is either full value in file or base + env (e.g. nicolecoffreio) + if [[ -n "$_tok_conf" && ( "$_tok_conf" = "$_token" || "${_tok_conf}${_env}" = "$_token" ) ]]; then + PROJECT_ID="$_project" + PROJECT_ENV="$_env" + break 2 + fi + done + done +fi +export PROJECT_ENV + +PROJECT_CONFIG_PATH="" +if [[ -n "$PROJECT_ID" && -n "${IA_DEV_ROOT:-}" ]]; then + PROJECT_CONFIG_PATH="${IA_DEV_ROOT}/projects/${PROJECT_ID}/conf.json" + if [[ ! -f "$PROJECT_CONFIG_PATH" ]]; then + PROJECT_CONFIG_PATH="" + fi +fi + +export PROJECT_ID +export PROJECT_CONFIG_PATH diff --git a/services/ia_dev/lib/project_git_root_from_conf.sh b/services/ia_dev/lib/project_git_root_from_conf.sh new file mode 100644 index 0000000..3979ff9 --- /dev/null +++ b/services/ia_dev/lib/project_git_root_from_conf.sh @@ -0,0 +1,43 @@ +# +# Resolve the application git repository root from projects//conf.json. +# Used by ia_dev deploy wrappers (pousse, branch-align, change-to-all-branches, deploy-by-script-to). +# +# Priority: +# 1. deploy.repository_root (preferred) +# 2. deploy.git_work_tree (alias) +# 3. dirname(deploy.secrets_path) — legacy; breaks if secrets live outside the repo tree +# +# Paths may be absolute or relative to the smart_ide monorepo root (see conf_path_resolve.sh). +# +# Preconditions: PROJECT_CONFIG_PATH set and jq available. +# Sets: IA_PROJECT_GIT_ROOT (exported), or empty if unresolved. +# +# shellcheck source=conf_path_resolve.sh +source "$(dirname "${BASH_SOURCE[0]}")/conf_path_resolve.sh" + +ia_dev_resolve_project_git_root() { + IA_PROJECT_GIT_ROOT="" + export IA_PROJECT_GIT_ROOT + if [[ -z "${PROJECT_CONFIG_PATH:-}" || ! -f "$PROJECT_CONFIG_PATH" ]]; then + return 0 + fi + if ! command -v jq >/dev/null 2>&1; then + return 0 + fi + local r sp r_abs sp_abs + r="$(jq -r '.deploy.repository_root // .deploy.git_work_tree // empty' "$PROJECT_CONFIG_PATH" 2>/dev/null || true)" + r="${r//$'\r'/}" + r_abs="$(ia_dev_resolve_path_from_conf "$PROJECT_CONFIG_PATH" "$r")" + if [[ -n "$r_abs" && -d "$r_abs" ]]; then + IA_PROJECT_GIT_ROOT="$r_abs" + export IA_PROJECT_GIT_ROOT + return 0 + fi + sp="$(jq -r '.deploy.secrets_path // empty' "$PROJECT_CONFIG_PATH" 2>/dev/null || true)" + sp="${sp//$'\r'/}" + sp_abs="$(ia_dev_resolve_path_from_conf "$PROJECT_CONFIG_PATH" "$sp")" + if [[ -n "$sp_abs" && "$sp_abs" != "null" ]]; then + IA_PROJECT_GIT_ROOT="$(dirname "$sp_abs")" + export IA_PROJECT_GIT_ROOT + fi +} diff --git a/services/ia_dev/lib/smart_ide_logs.sh b/services/ia_dev/lib/smart_ide_logs.sh new file mode 100644 index 0000000..29038e4 --- /dev/null +++ b/services/ia_dev/lib/smart_ide_logs.sh @@ -0,0 +1,58 @@ +#!/usr/bin/env bash +# Append one line per event to /logs/ia_dev.log when ia_dev lives inside smart_ide. +# Usage: export SMART_IDE_LOG_IA_DEV_ROOT after cd to ia_dev root, then smart_ide_log_event / smart_ide_logs_begin. + +smart_ide_logs_resolve_root_for_ia_dev() { + local ia_root="${1:?}" + local parent base_name + base_name="$(basename "$ia_root")" + if [[ "$base_name" != "ia_dev" ]]; then + return 1 + fi + parent="$(cd "$ia_root/.." && pwd)" + local r="$parent" + for _ in 1 2 3 4 5; do + if [[ -f "${r}/cron/git-pull-wrapper.sh" ]] || [[ -d "${r}/projects/smart_ide" ]]; then + printf '%s\n' "$r" + return 0 + fi + if [[ "$r" == "/" ]]; then + break + fi + r="$(cd "$r/.." && pwd)" + done + return 1 +} + +smart_ide_log_event() { + local ia_root="${SMART_IDE_LOG_IA_DEV_ROOT:-}" + if [[ -z "$ia_root" ]]; then + return 0 + fi + local smart_root logf + smart_root="$(smart_ide_logs_resolve_root_for_ia_dev "$ia_root" 2>/dev/null)" || return 0 + logf="${smart_root}/logs/ia_dev.log" + mkdir -p "$(dirname "$logf")" + printf '%s %s\n' "$(date -Iseconds)" "$*" >>"$logf" +} + +smart_ide_logs_begin() { + export SMART_IDE_LOG_IA_DEV_ROOT="${1:?}" + export SMART_IDE_LOG_SCRIPT_PATH="${2:?}" + shift 2 + smart_ide_log_event "START ${SMART_IDE_LOG_SCRIPT_PATH} $*" +} + +smart_ide_logs_register_exit_trap() { + trap 'smart_ide_logs_on_exit_trap' EXIT +} + +smart_ide_logs_on_exit_trap() { + local ec=$? + smart_ide_log_event "END ${SMART_IDE_LOG_SCRIPT_PATH:-?} exit=${ec}" +} + +smart_ide_log_end_with_status() { + local ec="${1:?}" + smart_ide_log_event "END ${SMART_IDE_LOG_SCRIPT_PATH:-${0:-?}} exit=${ec}" +} diff --git a/services/ia_dev/projects/builazoo b/services/ia_dev/projects/builazoo new file mode 120000 index 0000000..7871040 --- /dev/null +++ b/services/ia_dev/projects/builazoo @@ -0,0 +1 @@ +../../../projects/builazoo \ No newline at end of file diff --git a/services/ia_dev/projects/enso b/services/ia_dev/projects/enso new file mode 120000 index 0000000..5535461 --- /dev/null +++ b/services/ia_dev/projects/enso @@ -0,0 +1 @@ +../../../projects/enso \ No newline at end of file diff --git a/services/ia_dev/projects/smart_ide b/services/ia_dev/projects/smart_ide new file mode 120000 index 0000000..6986d9a --- /dev/null +++ b/services/ia_dev/projects/smart_ide @@ -0,0 +1 @@ +../../../projects/smart_ide \ No newline at end of file diff --git a/services/ia_dev/tools/proxy-https-watch-lpldf.env.example b/services/ia_dev/tools/proxy-https-watch-lpldf.env.example new file mode 100644 index 0000000..c3df7fd --- /dev/null +++ b/services/ia_dev/tools/proxy-https-watch-lpldf.env.example @@ -0,0 +1,11 @@ +# Copy to /opt/proxy-config/scripts/env/watch-https-lpldf.env on the proxy (chmod 600). +# Optional: webhook for down alerts (plain text POST body). +# ALERT_WEBHOOK_URL=https://example.com/hook +# Optional: separate webhook when the site recovers. +# ALERT_WEBHOOK_URL_RECOVER=https://example.com/hook-recover +# Optional: repeat down notifications at most every N seconds while still failing. +# ALERT_REPEAT_SECONDS=3600 +# Optional: if `mail` is configured on the host. +# ALERT_EMAIL_TO=ops@example.com +# Override URL if needed (default is punycode LPLDF front URL). +# WATCH_URL=https://xn--lespetitesleonsdefrdric-89b1db.fr/ diff --git a/services/ia_dev/tools/proxy-https-watch-lpldf.sh b/services/ia_dev/tools/proxy-https-watch-lpldf.sh new file mode 100755 index 0000000..02feebd --- /dev/null +++ b/services/ia_dev/tools/proxy-https-watch-lpldf.sh @@ -0,0 +1,106 @@ +#!/usr/bin/env bash +# HTTPS availability watchdog for Les Petites Leçons de Frédéric (punycode host). +# Intended path on proxy: /opt/proxy-config/scripts/watch-https-lpldf.sh +# Optional env file (root-only recommended): /opt/proxy-config/scripts/env/watch-https-lpldf.env +# Syslog identifier: lpldf-https-watch (for SIEM / Wazuh-style log collection). +set -euo pipefail + +_IA_DEV_TOOLS="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" +if [[ -f "${_IA_DEV_TOOLS}/lib/smart_ide_logs.sh" ]]; then + # shellcheck source=../lib/smart_ide_logs.sh + source "${_IA_DEV_TOOLS}/lib/smart_ide_logs.sh" + smart_ide_logs_begin "$_IA_DEV_TOOLS" "$0" "$*" + smart_ide_logs_register_exit_trap +fi + +readonly WATCH_URL="${WATCH_URL:-https://xn--lespetitesleonsdefrdric-89b1db.fr/}" +readonly STATE_DIR="${STATE_DIR:-/var/lib/lpldf-https-watch}" +readonly STATE_FILE="${STATE_DIR}/last_state" +readonly LAST_ALERT_FILE="${STATE_DIR}/last_alert_epoch" +readonly LOG_TAG="lpldf-https-watch" + +ENV_FILE="${ENV_FILE:-/opt/proxy-config/scripts/env/watch-https-lpldf.env}" +if [[ -f "$ENV_FILE" ]]; then + set -a + # shellcheck disable=SC1090 + source "$ENV_FILE" + set +a +fi + +readonly ALERT_REPEAT_SECONDS="${ALERT_REPEAT_SECONDS:-3600}" + +mkdir -p "$STATE_DIR" + +last="OK" +if [[ -f "$STATE_FILE" ]]; then + last="$(tr -d '\n' <"$STATE_FILE" || true)" +fi +[[ -z "$last" ]] && last="OK" + +http_code="000" +if ! http_code="$(curl -sS -o /dev/null -w '%{http_code}' --max-time 25 "$WATCH_URL" 2>/dev/null)"; then + http_code="000" +fi +[[ -z "$http_code" ]] && http_code="000" + +is_ok="false" +if [[ "$http_code" =~ ^(200|301|302|307|308)$ ]]; then + is_ok="true" +fi + +send_notifications() { + local message="$1" + logger -t "$LOG_TAG" -p daemon.warning "$message" + if [[ -n "${ALERT_WEBHOOK_URL:-}" ]]; then + curl -sS -X POST "${ALERT_WEBHOOK_URL}" \ + -H "Content-Type: text/plain; charset=utf-8" \ + --data-binary "$message" \ + --max-time 15 || logger -t "$LOG_TAG" -p daemon.err "ALERT_WEBHOOK_URL post failed" + fi + if [[ -n "${ALERT_EMAIL_TO:-}" ]] && command -v mail >/dev/null 2>&1; then + printf '%s\n' "$message" | mail -s "LPLDF HTTPS down" "$ALERT_EMAIL_TO" || logger -t "$LOG_TAG" -p daemon.err "mail alert failed" + fi +} + +notify_recovery() { + local message="$1" + logger -t "$LOG_TAG" -p daemon.info "$message" + if [[ -n "${ALERT_WEBHOOK_URL_RECOVER:-}" ]]; then + curl -sS -X POST "${ALERT_WEBHOOK_URL_RECOVER}" \ + -H "Content-Type: text/plain; charset=utf-8" \ + --data-binary "$message" \ + --max-time 15 || true + fi +} + +now_epoch="$(date +%s)" + +if [[ "$is_ok" == "true" ]]; then + if [[ "$last" == "DOWN" ]]; then + notify_recovery "LPLDF HTTPS recovered: ${WATCH_URL} HTTP ${http_code}" + fi + printf '%s\n' "OK" >"$STATE_FILE" + exit 0 +fi + +msg="LPLDF HTTPS check FAIL: ${WATCH_URL} HTTP=${http_code}" + +if [[ "$last" != "DOWN" ]]; then + printf '%s\n' "DOWN" >"$STATE_FILE" + printf '%s\n' "$now_epoch" >"$LAST_ALERT_FILE" + send_notifications "$msg" + exit 1 +fi + +last_alert="0" +if [[ -f "$LAST_ALERT_FILE" ]]; then + last_alert="$(tr -d '\n' <"$LAST_ALERT_FILE" || echo 0)" +fi +[[ -z "$last_alert" ]] && last_alert="0" + +if [[ "$((now_epoch - last_alert))" -ge "$ALERT_REPEAT_SECONDS" ]]; then + printf '%s\n' "$now_epoch" >"$LAST_ALERT_FILE" + send_notifications "$msg (still down, repeat every ${ALERT_REPEAT_SECONDS}s)" +fi + +exit 1 diff --git a/services/ia_dev/tools/site-generate.sh b/services/ia_dev/tools/site-generate.sh new file mode 100755 index 0000000..8a07217 --- /dev/null +++ b/services/ia_dev/tools/site-generate.sh @@ -0,0 +1,699 @@ +#!/usr/bin/env bash +# Scaffold a "lovable-style" front-end website: +# - Vite + React + TypeScript +# - OIDC login (PKCE) with oidc-client-ts (sessionStorage) +# - Chat UI that calls smart-ide-sso-gateway -> proxy/orchestrator -> intents (rag.query, chat.local) +# +# This script creates files and installs npm dependencies. It does not deploy. +set -euo pipefail + +usage() { + cat <<'EOF' +Usage: + ./tools/site-generate.sh --dir [--name ] [--skip-install] + +Examples: + ./tools/site-generate.sh --dir ../sites/my-site --name my-site + +Notes: + - Requires: node (>=20), npm, git (optional). + - Output is intentionally verbose (no filtering). +EOF +} + +DIR="" +NAME="" +SKIP_INSTALL="false" + +while [[ $# -gt 0 ]]; do + case "$1" in + -h|--help) + usage + exit 0 + ;; + --dir) + DIR="${2:-}" + shift 2 + ;; + --name) + NAME="${2:-}" + shift 2 + ;; + --skip-install) + SKIP_INSTALL="true" + shift 1 + ;; + *) + echo "[site-generate][ERROR] Unknown arg: $1" >&2 + usage >&2 + exit 2 + ;; + esac +done + +command -v node >/dev/null 2>&1 || { echo "[site-generate][ERROR] Missing dependency: node" >&2; exit 1; } +command -v npm >/dev/null 2>&1 || { echo "[site-generate][ERROR] Missing dependency: npm" >&2; exit 1; } + +if [[ -z "${DIR}" ]]; then + echo "[site-generate][ERROR] Missing --dir " >&2 + usage >&2 + exit 2 +fi + +TARGET="${DIR}" +mkdir -p "$(dirname "${TARGET}")" +if [[ -e "${TARGET}" && ! -d "${TARGET}" ]]; then + echo "[site-generate][ERROR] Target exists and is not a directory: ${TARGET}" >&2 + exit 1 +fi +mkdir -p "${TARGET}" +if [[ ! -r "${TARGET}" || ! -x "${TARGET}" ]]; then + echo "[site-generate][ERROR] Target directory is not accessible: ${TARGET}" >&2 + exit 1 +fi +shopt -s nullglob dotglob +entries=( "${TARGET}"/* ) +shopt -u nullglob dotglob +if (( ${#entries[@]} > 0 )); then + echo "[site-generate][ERROR] Target directory is not empty: ${TARGET}" >&2 + exit 1 +fi + +if [[ -z "${NAME}" ]]; then + NAME="$(basename "${TARGET}")" +fi + +echo "[site-generate] target=${TARGET}" +echo "[site-generate] name=${NAME}" + +( + cd "${TARGET}" + + npm init -y + + if [[ "${SKIP_INSTALL}" != "true" ]]; then + # Runtime + npm install react react-dom oidc-client-ts + + # Tooling + npm install -D vite typescript @types/react @types/react-dom @vitejs/plugin-react + else + echo "[site-generate] --skip-install: dependencies will not be installed." + fi + + cat > vite.config.ts <<'EOF' +import { defineConfig } from "vite"; +import react from "@vitejs/plugin-react"; + +export default defineConfig({ + plugins: [react()], + server: { port: 5173, strictPort: true }, +}); +EOF + + cat > tsconfig.json <<'EOF' +{ + "compilerOptions": { + "target": "ES2022", + "useDefineForClassFields": true, + "lib": ["ES2022", "DOM", "DOM.Iterable"], + "module": "ESNext", + "skipLibCheck": true, + "moduleResolution": "Bundler", + "resolveJsonModule": true, + "isolatedModules": true, + "noEmit": true, + "jsx": "react-jsx", + "strict": true, + "types": ["vite/client"] + }, + "include": ["src"] +} +EOF + + cat > index.html <<'EOF' + + + + + + Smart IDE — Site + + +
+ + + +EOF + + mkdir -p src/auth src/api src/components + + cat > src/i18n.ts <<'EOF' +type Dict = Record; + +const fr: Dict = { + "app.title": "Smart IDE — Site", + "auth.signIn": "Se connecter", + "auth.signOut": "Se déconnecter", + "auth.status.signedOut": "Non connecté", + "auth.status.signedInAs": "Connecté en tant que {email}", + "auth.error.title": "Connexion", + "auth.error.message": "Un problème empêche la connexion. Réessayez ou contactez le support.", + "auth.error.details": "Détails techniques", + "chat.title": "Chat", + "chat.mode.rag": "Contexte du projet", + "chat.mode.local": "Assistant local", + "chat.mode.aria": "Mode de chat", + "chat.role.you": "Vous", + "chat.role.assistant": "Assistant", + "chat.input.placeholder": "Écrivez votre message…", + "chat.send": "Envoyer", + "chat.error.unauthorized": "Session expirée. Reconnectez-vous.", + "chat.error.network": "Le service est indisponible. Réessayez plus tard.", +}; + +export type I18nKey = keyof typeof fr; + +export const t = (key: I18nKey, vars?: Record): string => { + const raw = fr[key] ?? String(key); + if (!vars) return raw; + return raw.replace(/\{(\w+)\}/g, (_, k) => vars[k] ?? ""); +}; +EOF + + cat > src/auth/oidc.ts <<'EOF' +import { UserManager, WebStorageStateStore, type User } from "oidc-client-ts"; + +const requiredEnv = (k: string): string => { + const v = (import.meta as any).env?.[k] as string | undefined; + if (!v || v.trim().length === 0) { + throw new Error(`Missing ${k}`); + } + return v.trim(); +}; + +export const userManager = new UserManager({ + authority: requiredEnv("VITE_OIDC_ISSUER"), + client_id: requiredEnv("VITE_OIDC_CLIENT_ID"), + redirect_uri: requiredEnv("VITE_OIDC_REDIRECT_URI"), + post_logout_redirect_uri: requiredEnv("VITE_OIDC_POST_LOGOUT_REDIRECT_URI"), + response_type: "code", + scope: "openid profile email", + userStore: new WebStorageStateStore({ store: window.sessionStorage }), +}); + +export const signin = async (): Promise => { + await userManager.signinRedirect(); +}; + +export const signout = async (): Promise => { + await userManager.signoutRedirect(); +}; + +export const completeSignin = async (): Promise => { + return await userManager.signinRedirectCallback(); +}; + +export const getUser = async (): Promise => { + return await userManager.getUser(); +}; +EOF + + cat > src/api/orchestrator.ts <<'EOF' +export type OrchestratorExecuteBody = { + intent: string; + projectId?: string; + context?: Record; +}; + +const requiredEnv = (k: string): string => { + const v = (import.meta as any).env?.[k] as string | undefined; + if (!v || v.trim().length === 0) { + throw new Error(`Missing ${k}`); + } + return v.trim().replace(/\/+$/, ""); +}; + +const ssoBase = (): string => requiredEnv("VITE_SSO_GATEWAY_BASE_URL"); + +export const orchestratorExecute = async (accessToken: string, body: OrchestratorExecuteBody): Promise => { + const url = `${ssoBase()}/proxy/orchestrator/v1/execute`; + const res = await fetch(url, { + method: "POST", + headers: { + Authorization: `Bearer ${accessToken}`, + "Content-Type": "application/json", + Accept: "application/json", + }, + body: JSON.stringify(body), + }); + const text = await res.text(); + let parsed: unknown = undefined; + try { + parsed = JSON.parse(text); + } catch { + parsed = text; + } + if (!res.ok) { + const err = new Error(`HTTP ${res.status}`); + (err as any).response = parsed; + throw err; + } + return parsed; +}; +EOF + + cat > src/components/App.tsx <<'EOF' +import { useEffect, useMemo, useState } from "react"; +import type { User } from "oidc-client-ts"; +import { completeSignin, getUser, signin, signout, userManager } from "../auth/oidc"; +import { t } from "../i18n"; +import { ChatPanel } from "./ChatPanel"; +import "./styles.css"; + +export const App = () => { + const [user, setUser] = useState(null); + const [authError, setAuthError] = useState(""); + + const email = useMemo(() => (user?.profile?.email as string | undefined) ?? "", [user]); + + useEffect(() => { + const run = async () => { + try { + if (window.location.pathname.startsWith("/auth/callback")) { + await completeSignin(); + window.history.replaceState({}, "", "/"); + } + const u = await getUser(); + setUser(u); + } catch (e) { + const msg = e instanceof Error ? e.message : String(e); + setAuthError(msg); + } + }; + void run(); + + const onUserLoaded = (u: User) => setUser(u); + const onUserUnloaded = () => setUser(null); + userManager.events.addUserLoaded(onUserLoaded); + userManager.events.addUserUnloaded(onUserUnloaded); + return () => { + userManager.events.removeUserLoaded(onUserLoaded); + userManager.events.removeUserUnloaded(onUserUnloaded); + }; + }, []); + + return ( +
+
+
{t("app.title")}
+
+ {user ? ( + <> +
+ {t("auth.status.signedInAs", { email: email || user.profile.sub })} +
+ + + ) : ( + <> +
+ {t("auth.status.signedOut")} +
+ + + )} +
+
+ + {authError ? ( +
+
+
{t("auth.error.title")}
+
{t("auth.error.message")}
+ {import.meta.env.DEV ? ( +
+ {t("auth.error.details")} +
{authError}
+
+ ) : null} +
+
+ ) : ( +
+ +
+ )} +
+ ); +}; +EOF + + cat > src/components/ChatPanel.tsx <<'EOF' +import { useMemo, useState } from "react"; +import type { User } from "oidc-client-ts"; +import { orchestratorExecute } from "../api/orchestrator"; +import { t } from "../i18n"; + +type ChatMode = "rag" | "local"; + +const requiredEnv = (k: string): string => { + const v = (import.meta as any).env?.[k] as string | undefined; + if (!v || v.trim().length === 0) { + throw new Error(`Missing ${k}`); + } + return v.trim(); +}; + +export const ChatPanel = ({ user }: { user: User | null }) => { + const [mode, setMode] = useState("rag"); + const [input, setInput] = useState(""); + const [busy, setBusy] = useState(false); + const [error, setError] = useState(""); + const [messages, setMessages] = useState<{ role: "user" | "assistant"; content: string }[]>([]); + + const accessToken = useMemo(() => user?.access_token ?? "", [user]); + + const send = async () => { + if (!accessToken) { + setError(t("chat.error.unauthorized")); + return; + } + const content = input.trim(); + if (!content) return; + setInput(""); + setError(""); + setMessages((m) => [...m, { role: "user", content }]); + setBusy(true); + try { + if (mode === "local") { + const model = requiredEnv("VITE_OLLAMA_MODEL"); + const r = await orchestratorExecute(accessToken, { + intent: "chat.local", + context: { model, prompt: content, stream: false }, + }); + const text = (r as any)?.response?.response ?? (r as any)?.response?.message ?? JSON.stringify(r); + setMessages((m) => [...m, { role: "assistant", content: String(text) }]); + } else { + const workspace = requiredEnv("VITE_ANYTHINGLLM_WORKSPACE_SLUG"); + const r = await orchestratorExecute(accessToken, { + intent: "rag.query", + context: { + model: workspace, + messages: [ + { + role: "system", + content: + "Vous êtes un assistant utile. Répondez en utilisant le contexte du projet quand c'est pertinent.", + }, + ...messages.map((m) => ({ role: m.role, content: m.content })), + { role: "user", content }, + ], + }, + }); + const text = + (r as any)?.response?.choices?.[0]?.message?.content ?? + (r as any)?.response?.choices?.[0]?.text ?? + JSON.stringify(r); + setMessages((m) => [...m, { role: "assistant", content: String(text) }]); + } + } catch (e) { + const anyE = e as any; + if (anyE?.message === "HTTP 401") { + setError(t("chat.error.unauthorized")); + } else { + setError(t("chat.error.network")); + } + } finally { + setBusy(false); + } + }; + + return ( +
+
+
{t("chat.title")}
+
+ + +
+
+ +
+ {messages.map((m, idx) => ( +
+
{m.role === "user" ? t("chat.role.you") : t("chat.role.assistant")}
+
{m.content}
+
+ ))} +
+ + {error ? ( +
+ {error} +
+ ) : null} + +
+ + setInput(e.target.value)} + onKeyDown={(e) => { + if (e.key === "Enter" && !e.shiftKey) { + e.preventDefault(); + if (!busy) void send(); + } + }} + disabled={busy} + /> + +
+
+ ); +}; +EOF + + cat > src/components/styles.css <<'EOF' +:root { + --bg: #0b0f17; + --panel: #101826; + --text: #e6eaf2; + --muted: #aab4c5; + --border: #1b2940; + --accent: #60a5fa; + --danger: #f87171; +} + +* { box-sizing: border-box; } +html, body { height: 100%; } +body { + margin: 0; + background: radial-gradient(1200px 900px at 20% 10%, #13213b, var(--bg)); + color: var(--text); + font-family: ui-sans-serif, system-ui, -apple-system, Segoe UI, Roboto, sans-serif; +} + +.srOnly { + position: absolute; + width: 1px; + height: 1px; + padding: 0; + margin: -1px; + overflow: hidden; + clip: rect(0, 0, 0, 0); + white-space: nowrap; + border: 0; +} + +.page { min-height: 100%; display: flex; flex-direction: column; } +.header { + display: flex; + align-items: center; + justify-content: space-between; + gap: 12px; + padding: 16px 18px; + border-bottom: 1px solid var(--border); + background: rgba(16, 24, 38, 0.7); + backdrop-filter: blur(8px); +} +.title { font-weight: 700; letter-spacing: 0.2px; } +.auth { display: flex; align-items: center; gap: 10px; } +.authStatus { color: var(--muted); font-size: 13px; } + +.main { padding: 18px; display: flex; justify-content: center; } +.panel { + width: min(980px, 100%); + border: 1px solid var(--border); + background: rgba(16, 24, 38, 0.9); + border-radius: 14px; + overflow: hidden; + box-shadow: 0 18px 60px rgba(0,0,0,0.35); +} +.panelHeader { padding: 14px 14px 10px; display: flex; align-items: center; justify-content: space-between; gap: 12px; } +.panelTitle { font-weight: 700; } +.panel[role="alert"] { padding: 14px; } +.muted { color: var(--muted); margin-top: 8px; } +.details { margin-top: 10px; } +.details summary { cursor: pointer; color: var(--muted); } +.details summary:focus-visible { outline: 2px solid rgba(96, 165, 250, 0.8); outline-offset: 2px; border-radius: 6px; } + +.tabs { display: inline-flex; gap: 6px; } +.tab { + border: 1px solid var(--border); + background: transparent; + color: var(--muted); + padding: 6px 10px; + border-radius: 10px; + cursor: pointer; +} +.tabActive { border-color: rgba(96, 165, 250, 0.35); color: var(--text); } + +.chatLog { padding: 14px; display: flex; flex-direction: column; gap: 10px; max-height: 60vh; overflow: auto; } +.msg { border: 1px solid var(--border); border-radius: 12px; padding: 10px 12px; } +.msgUser { background: rgba(96, 165, 250, 0.07); } +.msgAssistant { background: rgba(170, 180, 197, 0.06); } +.msgRole { font-size: 12px; color: var(--muted); margin-bottom: 6px; } +.msgText { white-space: pre-wrap; line-height: 1.4; } + +.error { padding: 10px 14px; border-top: 1px solid var(--border); color: var(--danger); } + +.chatComposer { + display: flex; + gap: 10px; + padding: 14px; + border-top: 1px solid var(--border); +} +.input { + flex: 1; + border: 1px solid var(--border); + background: rgba(11, 15, 23, 0.6); + color: var(--text); + border-radius: 12px; + padding: 10px 12px; + outline: none; +} +.input:focus { border-color: rgba(96, 165, 250, 0.6); } +.input:focus-visible { outline: 2px solid rgba(96, 165, 250, 0.8); outline-offset: 2px; } +.button { + border: 1px solid rgba(96, 165, 250, 0.45); + background: rgba(96, 165, 250, 0.18); + color: var(--text); + border-radius: 12px; + padding: 10px 12px; + cursor: pointer; +} +.button:focus-visible { outline: 2px solid rgba(96, 165, 250, 0.8); outline-offset: 2px; } +.tab:focus-visible { outline: 2px solid rgba(96, 165, 250, 0.8); outline-offset: 2px; } +.button:disabled { opacity: 0.6; cursor: not-allowed; } +.mono { font-family: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, monospace; font-size: 12px; } +EOF + + cat > src/main.tsx <<'EOF' +import React from "react"; +import ReactDOM from "react-dom/client"; +import { App } from "./components/App"; + +ReactDOM.createRoot(document.getElementById("root")!).render( + + + , +); +EOF + + cat > .env.example <<'EOF' +# Public app config (Vite) +VITE_SSO_GATEWAY_BASE_URL=https://smart-ide-sso.example.test + +# OIDC PKCE client config +VITE_OIDC_ISSUER=https://docv.example.test +VITE_OIDC_CLIENT_ID=smart-ide-site +VITE_OIDC_REDIRECT_URI=http://localhost:5173/auth/callback +VITE_OIDC_POST_LOGOUT_REDIRECT_URI=http://localhost:5173/ + +# Chat modes +VITE_ANYTHINGLLM_WORKSPACE_SLUG=enso-test +VITE_OLLAMA_MODEL=llama3.1 +EOF + + # Update package.json + APP_NAME="${NAME}" node - <<'EOF' +const fs = require("fs"); +const p = JSON.parse(fs.readFileSync("package.json", "utf8")); +p.name = process.env.APP_NAME || p.name; +p.private = true; +p.type = "module"; +p.scripts = { + dev: "vite", + build: "tsc -p tsconfig.json && vite build", + preview: "vite preview --strictPort --port 4173", +}; +fs.writeFileSync("package.json", JSON.stringify(p, null, 2) + "\n", "utf8"); +EOF + + cat > README.md <<'EOF' +## Smart IDE — Lovable-style site scaffold + +This scaffold is a front-end web app that: + +- logs users in via **OIDC (PKCE)** +- calls `smart-ide-sso-gateway` which proxies to the internal services (including the orchestrator) +- exposes two chat modes: + - **RAG** (`rag.query`) via AnythingLLM OpenAI-compatible endpoint + - **Local chat** (`chat.local`) via Ollama + +### Setup + +1. Copy `.env.example` to `.env.local` and set: + - `VITE_SSO_GATEWAY_BASE_URL` + - OIDC settings (`VITE_OIDC_*`) + - chat settings (`VITE_ANYTHINGLLM_WORKSPACE_SLUG`, `VITE_OLLAMA_MODEL`) + +2. Run: + +```bash +npm install +npm run dev +``` + +### Runtime notes + +- Tokens are stored in **sessionStorage** (not localStorage). +- The app does not embed service tokens; it only uses the user OIDC access token. +EOF + + echo "[site-generate] Files created." + echo "[site-generate] Next steps:" + echo " - cd ${TARGET}" + echo " - cp .env.example .env.local" + echo " - npm run dev" +) + diff --git a/services/ia_dev/tree.txt b/services/ia_dev/tree.txt new file mode 100644 index 0000000..272c632 --- /dev/null +++ b/services/ia_dev/tree.txt @@ -0,0 +1,62 @@ +projects +├── algo +│   └── conf.json +├── enso +│   ├── conf.json +│   └── docs +├── ia_dev +│   ├── conf.json +│   └── docs +│   ├── agents-scripts-split.md +│   ├── GIT_ISSUES_SCRIPTS_AGENTS.md +│   ├── README.md +│   ├── TICKETS_SPOOL_FORMAT.md +│   └── WORKFLOWS_AND_COMPONENTS.md +├── lecoffreio +│   ├── conf.json +│   ├── data +│   │   ├── issues +│   │   │   ├── 2026-03-14T134128.af28dfa2.nicolas.cantu_pm.me.d +│   │   │   │   └── 0_publickey_-_nicolas.cantu_pm.me_-___0xAFF1ECF4.asc +│   │   │   └── 2026-03-14T134128.af28dfa2.nicolas.cantu_pm.me.pending +│   │   └── notary-ai +│   │   ├── pending +│   │   └── responded +│   ├── docs +│   │   ├── agents-scripts-split.md +│   │   ├── ANCRAGE_COMPLETE.md +│   │   ├── API.md +│   │   ├── ARCHITECTURE.md +│   │   ├── CODE_STANDARDS.md +│   │   ├── DATABASE_COMPLETE.md +│   │   ├── DEPLOYMENT.md +│   │   ├── FRONTEND.md +│   │   ├── IMPORT_V1_DEPENDENCIES.md +│   │   ├── import-v1-schema-and-scripts-analysis.md +│   │   ├── MAILCHIMP_TEMPLATES.md +│   │   ├── MIGRATION.md +│   │   ├── OPERATIONS.md +│   │   ├── README.md +│   │   ├── SCRIPTS.md +│   │   ├── sources +│   │   │   ├── API Annuaire - Hi├®rarchies des entit├®s dans le notariat - API Annuaire.pdf +│   │   │   ├── API Annuaire - Migration de l'APIv1 vers l'APIv2.pdf +│   │   │   ├── API Annuaire - Pr├®sentation et guide d'int├®gration.pdf +│   │   │   ├── API Annuaire - V2 - Documentation Utilisateur.pdf +│   │   │   ├── Documentation API 1.21 (1).pdf +│   │   │   ├── ID.NOT - Document d'int├®gration OpenIDConnect.pdf +│   │   │   ├── ID.NOT - Pr├®sentation et guide d'int├®gration.pdf +│   │   │   └── Portail des raccordements - Guide de d├®marrage.pdf +│   │   ├── SYNC_V1_TO_V2_AT_LOGIN_PLAN.md +│   │   ├── v1-schema.sql +│   │   └── WORKFLOWS_AND_COMPONENTS.md +│   └── logs +│   └── git-issues +│   ├── agent-loop-600-cycles.log +│   ├── agent-loop-chat-iterations.log +│   ├── agent-loop.lock +│   ├── agent-loop.pending +│   └── agent-loop.status +└── README.md + +17 directories, 43 files diff --git a/services/smart-ide-global-api/src/server.ts b/services/smart-ide-global-api/src/server.ts index 57e5469..a221f63 100644 --- a/services/smart-ide-global-api/src/server.ts +++ b/services/smart-ide-global-api/src/server.ts @@ -23,6 +23,33 @@ const readBearer = (req: http.IncomingMessage): string | null => { return m?.[1]?.trim() ?? null; }; +const isSafeProxyPath = (p: string): boolean => { + if (!p.startsWith("/")) { + return false; + } + for (const rawSeg of p.split("/")) { + if (rawSeg.length === 0) { + continue; + } + if (rawSeg === "." || rawSeg === "..") { + return false; + } + let seg: string; + try { + seg = decodeURIComponent(rawSeg); + } catch { + return false; + } + if (seg === "." || seg === "..") { + return false; + } + if (seg.includes("/") || seg.includes("\\")) { + return false; + } + } + return true; +}; + const readBodyBuffer = async (req: http.IncomingMessage): Promise => { const chunks: Buffer[] = []; let total = 0; @@ -154,6 +181,11 @@ const main = (): void => { } upstreamKey = proxyMatch[1]; const rest = proxyMatch[2] ?? "/"; + if (!isSafeProxyPath(rest)) { + status = 400; + json(res, status, { error: "Invalid proxy path" }); + return; + } logPath = `/proxy/${upstreamKey}${rest}`; const upstream = resolveUpstream(upstreamKey); if (!upstream) { @@ -161,6 +193,13 @@ const main = (): void => { json(res, status, { error: `Unknown upstream: ${upstreamKey}` }); return; } + if (upstream.auth.kind === "bearer" && !upstream.auth.token) { + status = 503; + json(res, status, { + error: `Token not configured for upstream ${upstreamKey}`, + }); + return; + } if (upstream.auth.kind === "header" && !upstream.auth.value) { status = 503; json(res, status, { @@ -175,10 +214,10 @@ const main = (): void => { status = await proxyToUpstream(res, targetUrl, headers, body, method); } catch (e) { const msg = e instanceof Error ? e.message : String(e); + console.error(`smart-ide-global-api: request failed: ${msg}`); status = status || 400; if (!res.headersSent) { - json(res, 400, { error: msg }); - status = 400; + json(res, status, { error: "Request failed" }); } } finally { const skipLog = diff --git a/services/smart-ide-orchestrator/.env.example b/services/smart-ide-orchestrator/.env.example index f63d7fe..7215c71 100644 --- a/services/smart-ide-orchestrator/.env.example +++ b/services/smart-ide-orchestrator/.env.example @@ -1,12 +1,21 @@ ORCHESTRATOR_HOST=127.0.0.1 ORCHESTRATOR_PORT=37145 ORCHESTRATOR_TOKEN= +ORCHESTRATOR_UPSTREAM_TIMEOUT_MS=300000 OLLAMA_URL=http://127.0.0.1:11434 ANYTHINGLLM_BASE_URL= +ANYTHINGLLM_API_KEY= REPOS_DEVTOOLS_URL=http://127.0.0.1:37140 +REPOS_DEVTOOLS_TOKEN= REGEX_SEARCH_URL=http://127.0.0.1:37143 +REGEX_SEARCH_TOKEN= LANGEXTRACT_URL=http://127.0.0.1:37141 +LANGEXTRACT_SERVICE_TOKEN= LOCAL_OFFICE_URL=http://127.0.0.1:8000 +# Must be one of services/local-office API_KEYS entries +LOCAL_OFFICE_API_KEY= IA_DEV_GATEWAY_URL=http://127.0.0.1:37144 +IA_DEV_GATEWAY_TOKEN= TOOLS_BRIDGE_URL=http://127.0.0.1:37147 +TOOLS_BRIDGE_TOKEN= diff --git a/services/smart-ide-orchestrator/src/server.ts b/services/smart-ide-orchestrator/src/server.ts index eaf2cf6..9a04e55 100644 --- a/services/smart-ide-orchestrator/src/server.ts +++ b/services/smart-ide-orchestrator/src/server.ts @@ -1,4 +1,5 @@ import * as http from "node:http"; +import * as crypto from "node:crypto"; import { readExpectedToken, requireBearer } from "./auth.js"; import { readJsonBody } from "./httpUtil.js"; @@ -39,6 +40,86 @@ const iaDevGatewayUrl = (): string => const toolsBridgeUrl = (): string => (process.env.TOOLS_BRIDGE_URL ?? "http://127.0.0.1:37147").replace(/\/+$/, ""); +const requiredEnv = (name: string): string => { + const v = process.env[name]?.trim() ?? ""; + if (v.length === 0) { + throw new Error(`Missing required env: ${name}`); + } + return v; +}; + +const headerValue = (h: string | string[] | undefined): string => { + if (typeof h === "string") { + return h; + } + if (Array.isArray(h)) { + return h[0] ?? ""; + } + return ""; +}; + +const userOidcSub = (req: http.IncomingMessage): string => + headerValue(req.headers["x-oidc-sub"]).trim(); + +const isUserRequest = (req: http.IncomingMessage): boolean => + userOidcSub(req).length > 0; + +const USER_ALLOWED_INTENTS = new Set(["chat.local", "rag.query"]); + +const upstreamAuthHeaders = (upstreamUrl: string): Record => { + const headers: Record = {}; + + const reposBase = reposDevtoolsUrl(); + if (upstreamUrl.startsWith(reposBase)) { + headers.Authorization = `Bearer ${requiredEnv("REPOS_DEVTOOLS_TOKEN")}`; + return headers; + } + + const regexBase = regexSearchUrl(); + if (upstreamUrl.startsWith(regexBase)) { + headers.Authorization = `Bearer ${requiredEnv("REGEX_SEARCH_TOKEN")}`; + return headers; + } + + const langextractBase = langextractUrl(); + if (upstreamUrl.startsWith(langextractBase)) { + headers.Authorization = `Bearer ${requiredEnv("LANGEXTRACT_SERVICE_TOKEN")}`; + return headers; + } + + const iaBase = iaDevGatewayUrl(); + if (upstreamUrl.startsWith(iaBase)) { + headers.Authorization = `Bearer ${requiredEnv("IA_DEV_GATEWAY_TOKEN")}`; + return headers; + } + + const toolsBase = toolsBridgeUrl(); + if (upstreamUrl.startsWith(toolsBase)) { + headers.Authorization = `Bearer ${requiredEnv("TOOLS_BRIDGE_TOKEN")}`; + return headers; + } + + const localOfficeBase = localOfficeUrl(); + if (upstreamUrl.startsWith(localOfficeBase)) { + headers["X-API-Key"] = requiredEnv("LOCAL_OFFICE_API_KEY"); + return headers; + } + + const allmBase = anythingLlmBase(); + if (allmBase.length > 0 && upstreamUrl.startsWith(allmBase)) { + headers.Authorization = `Bearer ${requiredEnv("ANYTHINGLLM_API_KEY")}`; + return headers; + } + + return headers; +}; + +const upstreamTimeoutMs = (): number => { + const raw = process.env.ORCHESTRATOR_UPSTREAM_TIMEOUT_MS ?? "300000"; + const n = Number(raw); + return Number.isFinite(n) && n > 0 ? Math.floor(n) : 300000; +}; + const resolveIntent = (intent: string): Resolution => { switch (intent) { case "code.complete": @@ -50,15 +131,16 @@ const resolveIntent = (intent: string): Resolution => { upstream: { method: "POST", url: `${ollamaBase()}/api/generate`, headersHint: ["Content-Type"] }, }; case "rag.query": + if (!anythingLlmBase()) { + return { resolved: false, reason: "Missing ANYTHINGLLM_BASE_URL" }; + } return { resolved: true, target: "anythingllm", - action: "workspace_chat", + action: "openai_chat_completions", upstream: { method: "POST", - url: anythingLlmBase() - ? `${anythingLlmBase()}/api/v1/workspace/...` - : "configure ANYTHINGLLM_BASE_URL", + url: `${anythingLlmBase()}/api/v1/openai/chat/completions`, headersHint: ["Authorization", "Content-Type"], }, }; @@ -178,6 +260,10 @@ const main = (): void => { if (!requireBearer(req, res, token)) { return; } + if (isUserRequest(req)) { + json(res, 403, { error: "Timeline is not available for user requests" }); + return; + } json(res, 200, { items: timeline.slice(-100) }); return; } @@ -191,6 +277,14 @@ const main = (): void => { json(res, 422, { error: "Missing intent (string)" }); return; } + if (isUserRequest(req) && !USER_ALLOWED_INTENTS.has(body.intent)) { + json(res, 403, { + error: "Intent not allowed for user requests", + intent: body.intent, + allowedIntents: Array.from(USER_ALLOWED_INTENTS.values()), + }); + return; + } const dryRun = body.dryRun === true; const r = resolveIntent(body.intent); if (!r.resolved) { @@ -225,23 +319,137 @@ const main = (): void => { json(res, 422, { error: "Missing intent (string)" }); return; } + if (isUserRequest(req) && !USER_ALLOWED_INTENTS.has(body.intent)) { + json(res, 403, { + error: "Intent not allowed for user requests", + intent: body.intent, + allowedIntents: Array.from(USER_ALLOWED_INTENTS.values()), + }); + return; + } const r = resolveIntent(body.intent); if (!r.resolved) { json(res, 422, { error: r.reason }); return; } + const runId = crypto.randomUUID(); + const projectId = typeof body.projectId === "string" ? body.projectId : undefined; timeline.push({ at: new Date().toISOString(), type: "execute", summary: `${body.intent} -> ${r.target}/${r.action}`, - projectId: typeof body.projectId === "string" ? body.projectId : undefined, + runId, + projectId, }); + const dryRun = body.dryRun === true; + if (dryRun) { + json(res, 200, { + ok: true, + forwarded: false, + dryRun: true, + runId, + resolution: r, + }); + return; + } + + if (!r.upstream) { + json(res, 500, { error: "Missing upstream in resolution", runId, resolution: r }); + return; + } + + const context = isRecord(body.context) ? body.context : {}; + + let upstreamStatus = 0; + let upstreamText = ""; + let upstreamJson: unknown = undefined; + let upstreamTruncated = false; + try { + const headers: Record = { + Accept: "application/json", + ...upstreamAuthHeaders(r.upstream.url), + }; + + let upstreamBody: BodyInit | undefined = undefined; + + if (r.upstream.method !== "GET") { + if (body.intent === "doc.office.upload") { + const filename = typeof context.filename === "string" ? context.filename.trim() : ""; + const mimeType = typeof context.mimeType === "string" ? context.mimeType.trim() : ""; + const base64 = typeof context.base64 === "string" ? context.base64.trim() : ""; + if (!filename || !mimeType || !base64) { + json(res, 422, { + error: + "doc.office.upload requires context: { filename (string), mimeType (string), base64 (string) }", + runId, + }); + return; + } + const bytes = Buffer.from(base64, "base64"); + const form = new FormData(); + form.append("file", new Blob([bytes], { type: mimeType }), filename); + upstreamBody = form; + } else { + headers["Content-Type"] = "application/json"; + upstreamBody = JSON.stringify(context); + } + } + + const response = await fetch(r.upstream.url, { + method: r.upstream.method, + headers, + body: upstreamBody, + signal: AbortSignal.timeout(upstreamTimeoutMs()), + }); + upstreamStatus = response.status; + upstreamText = (await response.text()) ?? ""; + if (upstreamText.length > 1_000_000) { + upstreamText = upstreamText.slice(0, 1_000_000); + upstreamTruncated = true; + } + try { + upstreamJson = JSON.parse(upstreamText) as unknown; + } catch { + upstreamJson = undefined; + } + } catch (e) { + const msg = e instanceof Error ? e.message : String(e); + console.error(`smart-ide-orchestrator: upstream request failed: ${msg}`); + timeline.push({ + at: new Date().toISOString(), + type: "forward_error", + summary: `${body.intent} -> ${r.upstream.url} (error)`, + runId, + projectId, + }); + json(res, 502, { + ok: false, + forwarded: true, + runId, + error: "Upstream request failed", + resolution: r, + }); + return; + } + + timeline.push({ + at: new Date().toISOString(), + type: "forward", + summary: `${body.intent} -> ${r.upstream.url} (HTTP ${upstreamStatus})`, + runId, + projectId, + }); + json(res, 200, { - ok: true, - forwarded: false, - message: - "Stub: call upstream yourself or extend orchestrator with fetch() and service tokens.", + ok: upstreamStatus >= 200 && upstreamStatus < 300, + forwarded: true, + runId, resolution: r, + upstream: { + status: upstreamStatus, + truncated: upstreamTruncated, + }, + response: upstreamJson ?? upstreamText, }); return; } @@ -249,7 +457,8 @@ const main = (): void => { json(res, 404, { error: "Not found" }); } catch (e) { const msg = e instanceof Error ? e.message : String(e); - json(res, 400, { error: msg }); + console.error(`smart-ide-orchestrator: request failed: ${msg}`); + json(res, 400, { error: "Request failed" }); } })(); }); diff --git a/services/smart-ide-sso-gateway/.env.example b/services/smart-ide-sso-gateway/.env.example index 1250bf9..a9e2e80 100644 --- a/services/smart-ide-sso-gateway/.env.example +++ b/services/smart-ide-sso-gateway/.env.example @@ -7,6 +7,10 @@ SSO_GATEWAY_PORT=37148 # SSO_CORS_ORIGIN=https://app.example.test # SSO_GATEWAY_MAX_BODY_BYTES=33554432 +# Upstream allowlist (comma-separated). Default is "orchestrator". +# Use "*" or "all" to allow every upstream key. +# SSO_ALLOWED_UPSTREAMS=orchestrator + # Required: docv / Enso OpenID issuer URL (JWKS discovery) OIDC_ISSUER=https://docv.example.test # Optional: validate access_token audience diff --git a/services/smart-ide-sso-gateway/src/server.ts b/services/smart-ide-sso-gateway/src/server.ts index 5949ff1..1e370c2 100644 --- a/services/smart-ide-sso-gateway/src/server.ts +++ b/services/smart-ide-sso-gateway/src/server.ts @@ -14,6 +14,37 @@ const globalApiBase = (): string => trimSlash(process.env.GLOBAL_API_URL ?? "http://127.0.0.1:37149"); const globalApiToken = (): string => process.env.GLOBAL_API_INTERNAL_TOKEN?.trim() ?? ""; +const parseAllowedUpstreams = (): string[] => { + const raw = process.env.SSO_ALLOWED_UPSTREAMS?.trim() ?? ""; + if (raw.length === 0) { + return ["orchestrator"]; + } + if (raw === "*" || raw.toLowerCase() === "all") { + return listUpstreamKeys(); + } + return raw + .split(",") + .map((s) => s.trim()) + .filter((s) => s.length > 0); +}; + +const allowedUpstreamKeys = (): string[] => { + const known = new Set(listUpstreamKeys()); + const wanted = parseAllowedUpstreams(); + const unknown = wanted.filter((k) => !known.has(k)); + if (unknown.length > 0) { + console.error( + `smart-ide-sso-gateway: unknown upstream keys in SSO_ALLOWED_UPSTREAMS: ${unknown.join( + ", ", + )}`, + ); + process.exit(1); + } + return wanted; +}; + +const ALLOWED_UPSTREAMS = new Set(allowedUpstreamKeys()); + const corsHeaders = (): Record => { if (!CORS_ORIGIN) { return {}; @@ -23,6 +54,7 @@ const corsHeaders = (): Record => { "Access-Control-Allow-Methods": "GET, POST, PUT, PATCH, DELETE, OPTIONS", "Access-Control-Allow-Headers": "Authorization, Content-Type", "Access-Control-Max-Age": "86400", + Vary: "Origin", }; }; @@ -45,6 +77,33 @@ const readBearer = (req: http.IncomingMessage): string | null => { return m?.[1]?.trim() ?? null; }; +const isSafeProxyPath = (p: string): boolean => { + if (!p.startsWith("/")) { + return false; + } + for (const rawSeg of p.split("/")) { + if (rawSeg.length === 0) { + continue; + } + if (rawSeg === "." || rawSeg === "..") { + return false; + } + let seg: string; + try { + seg = decodeURIComponent(rawSeg); + } catch { + return false; + } + if (seg === "." || seg === "..") { + return false; + } + if (seg.includes("/") || seg.includes("\\")) { + return false; + } + } + return true; +}; + const readBodyBuffer = async (req: http.IncomingMessage): Promise => { const chunks: Buffer[] = []; let total = 0; @@ -220,7 +279,7 @@ const main = async (): Promise => { if (method === "GET" && pathname === "/v1/upstreams") { status = 200; - json(res, status, { upstreams: listUpstreamKeys() }); + json(res, status, { upstreams: Array.from(ALLOWED_UPSTREAMS.values()) }); return; } @@ -231,7 +290,21 @@ const main = async (): Promise => { return; } upstreamKey = proxyMatch[1]; + if (!ALLOWED_UPSTREAMS.has(upstreamKey)) { + status = 403; + json(res, status, { + error: "Upstream not allowed for user requests", + upstream: upstreamKey, + allowedUpstreams: Array.from(ALLOWED_UPSTREAMS.values()), + }); + return; + } const rest = proxyMatch[2] ?? "/"; + if (!isSafeProxyPath(rest)) { + status = 400; + json(res, status, { error: "Invalid proxy path" }); + return; + } logPath = pathname; const targetUrl = `${globalApiBase()}/v1/upstream/${upstreamKey}${rest}${url.search}`; const body = await readBodyBuffer(req); @@ -239,9 +312,10 @@ const main = async (): Promise => { status = await proxyToGlobalApi(req, res, targetUrl, headers, body); } catch (e) { const msg = e instanceof Error ? e.message : String(e); + console.error(`smart-ide-sso-gateway: request failed: ${msg}`); if (!res.headersSent) { status = 400; - json(res, status, { error: msg }); + json(res, status, { error: "Request failed" }); } else if (status === 0) { status = 500; }