chore: remove gitmodules, add docv workspace crates, update systemd README

- Drop .gitmodules (ia_dev tracked as submodule pointer without file)
- Add services/docv Cargo workspace: docv-back, docv-shared, migrations, sources
- Refresh systemd/README.md
This commit is contained in:
Nicolas Cantu 2026-04-03 17:55:50 +02:00
parent 0f9a69e368
commit 01860b7af7
40 changed files with 10799 additions and 3 deletions

3
.gitmodules vendored
View File

@ -1,3 +0,0 @@
[submodule "ia_dev"]
path = ia_dev
url = https://git.4nkweb.com/4nk/ia_dev.git

2419
services/docv/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

14
services/docv/Cargo.toml Normal file
View File

@ -0,0 +1,14 @@
[workspace]
resolver = "2"
members = ["docv-shared", "docv-back"]
[workspace.lints.clippy]
# Align with fix-lint / docs/REGLES_CODING_PROJET.md: no unwrap in production, typed errors, propagation via ?
unwrap_used = "warn"
expect_used = "warn"
# Encourage explicit types and avoid unclear code
type_complexity = "warn"
# Avoid redundant clones
redundant_clone = "warn"
# Prefer ? over match on Option/Result where clear
question_mark = "warn"

View File

@ -0,0 +1,46 @@
# docv-back configuration. Copy to .env and set values.
# Database (docv BDD)
DATABASE_URL=postgres://user:password@localhost:5432/docv
# Auth
JWT_SECRET=your-secret-min-32-chars
# Durée (secondes) pour : jeton Bearer OAuth, JWT du cookie docv_oauth_session, Max-Age du cookie après POST /oauth/sign-in. Défaut 900 (15 min). Plage 6086400.
# DOCV_OAUTH_ACCESS_TOKEN_TTL_SEC=900
# Emails séparés par des virgules : au démarrage, rattachement au premier office (ancienneté) comme rôle client si lutilisateur na aucune société. Laisser vide pour désactiver.
# DOCV_DEMO_MEMBER_EMAILS=client@example.com
# Test uniquement : rattacher tous les utilisateurs sans office_members au plus ancien office (évite GET /api/v1/offices vide). Voir docs/docv/AUTH_SESSION.md
# DOCV_LINK_ORPHAN_USERS_TO_FIRST_OFFICE=1
# OAuth2 (authorization_code) — comma-separated client ids if several apps share this secret
OAUTH_CLIENT_ID=enso-web
OAUTH_CLIENT_SECRET=change-me
OAUTH_REDIRECT_URIS=https://localhost:3032/auth/docv-callback
# Prefix browsers use before /oauth/... (reverse proxy, e.g. /docv-api). Empty only if OAuth is at site root.
OAUTH_BROWSER_PATH_PREFIX=/docv-api
# Primary key column on table users: `id` (shipped migration) or `uid` (legacy / IMPL-style schema).
# DOCV_USERS_PK_COLUMN=uid
# Optional: branding for HTML sign-in (defaults to tenants.default.json in repo)
# OAUTH_TENANTS_PATH=/path/to/tenants.json
# OAUTH_TENANTS_JSON={"default":{"heading":"Connexion"},"clients":{}}
# External APIs (backend only): anchoring (services), IA (submodule ai)
ANCHORING_URL=http://localhost:3016
IA_API_URL=http://localhost:3022
# Server (aligned with nginx proxy → docv-back and /docv-api/; see docs/PORTS_ENSO.md)
HOST=0.0.0.0
PORT=3038
# Optional: directory for binary document uploads (POST /api/v1/folders/:uid/documents/binary).
# When unset, the front falls back to JSON-only metadata (no downloadable file).
# DOCV_FILE_STORAGE_DIR=/var/lib/docv/uploads
# Max body size for binary upload in bytes (default 10485760).
# DOCV_UPLOAD_MAX_BYTES=10485760
# Optional: mirror uploads under data/dossiers-permanents/ and git commit/push (see docs/features/DOSSIERS_PERMANENTS_DATA_GIT.md).
# DOCV_DP_GIT_SYNC=1
# DOCV_DP_GIT_REPO_ROOT=/path/to/enso
# DOCV_DP_GIT_DATA_SUBPATH=data/dossiers-permanents
# DOCV_DP_GIT_REMOTE=origin
# DOCV_DP_GIT_BRANCH=test

View File

@ -0,0 +1,34 @@
[package]
name = "docv-back"
version = "0.1.0"
edition = "2021"
description = "docv backend: HTTP API (Rust), PostgreSQL, auth, OAuth2 for enso-front"
[lints]
workspace = true
[[bin]]
name = "docv-back"
path = "src/main.rs"
[dependencies]
docv-shared = { path = "../docv-shared" }
bcrypt = "0.13"
deadpool-postgres = "0.10"
hyper = { version = "0.14", features = ["full"] }
jsonwebtoken = "8"
serde = { version = "1", features = ["derive"] }
serde_json = "1"
serde_urlencoded = "0.7"
tokio = { version = "1", features = ["full"] }
tokio-postgres = { version = "0.7.8", features = ["with-uuid-1"] }
postgres-types = { version = "0.2.6", features = ["with-uuid-1"] }
urlencoding = "2"
html-escape = "0.2"
uuid = { version = "1", features = ["v4", "serde"] }
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
reqwest = { version = "0.11", default-features = false, features = ["json", "rustls-tls"] }
[dev-dependencies]
tempfile = "3"

View File

@ -0,0 +1,14 @@
-- Minimal users table for docv authentication (zone 1, IMPL_01).
-- Compatible with PostgreSQL 13+ (gen_random_uuid).
CREATE TABLE IF NOT EXISTS users (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
email TEXT NOT NULL UNIQUE,
password_hash TEXT NOT NULL,
name TEXT,
phone TEXT,
preferences JSONB,
created_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
CREATE INDEX IF NOT EXISTS idx_users_email ON users (email);

View File

@ -0,0 +1,9 @@
-- Demo user aligned with Lovable mock (password: demo). Safe on repeated apply.
INSERT INTO users (email, password_hash, name)
VALUES (
'client@example.com',
'$2b$10$mUXYmHFzQhhB3e7OiRR4JOhLwIrQUhXVLM8b2mFBwjeJHFjUywrUq',
'Jean Dupont'
)
ON CONFLICT (email) DO NOTHING;

View File

@ -0,0 +1,30 @@
-- Minimal offices / memberships / folders for docv API (zones 5 & 2 subset).
-- user_uid matches users primary key value (UUID as text from JWT sub) without DB FK (supports id or uid PK on users).
CREATE TABLE IF NOT EXISTS offices (
uid UUID PRIMARY KEY DEFAULT gen_random_uuid(),
name TEXT NOT NULL,
siren TEXT,
address TEXT,
created_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
CREATE TABLE IF NOT EXISTS office_members (
office_uid UUID NOT NULL REFERENCES offices(uid) ON DELETE CASCADE,
user_uid UUID NOT NULL,
role TEXT NOT NULL DEFAULT 'member',
PRIMARY KEY (office_uid, user_uid)
);
CREATE INDEX IF NOT EXISTS idx_office_members_user ON office_members(user_uid);
CREATE TABLE IF NOT EXISTS folders (
uid UUID PRIMARY KEY DEFAULT gen_random_uuid(),
office_uid UUID NOT NULL REFERENCES offices(uid) ON DELETE CASCADE,
title TEXT NOT NULL,
status TEXT NOT NULL DEFAULT 'open',
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
CREATE INDEX IF NOT EXISTS idx_folders_office ON folders(office_uid);

View File

@ -0,0 +1,3 @@
-- Align legacy offices table (created before siren/address) with current API queries.
ALTER TABLE offices ADD COLUMN IF NOT EXISTS siren TEXT;
ALTER TABLE offices ADD COLUMN IF NOT EXISTS address TEXT;

View File

@ -0,0 +1,2 @@
-- Align legacy office_members (created before membership role) with API JOIN on m.role.
ALTER TABLE office_members ADD COLUMN IF NOT EXISTS role TEXT NOT NULL DEFAULT 'member';

View File

@ -0,0 +1,55 @@
-- Dashboard lists: notifications, pending documents, conversations (per user + office scope).
-- No FK to users (same pattern as office_members.user_uid) for legacy PK compatibility.
CREATE TABLE IF NOT EXISTS user_notifications (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
user_uid UUID NOT NULL,
office_uid UUID NOT NULL REFERENCES offices(uid) ON DELETE CASCADE,
notif_type TEXT NOT NULL CHECK (notif_type IN ('new_document', 'request_document', 'case_update')),
message TEXT NOT NULL,
case_uid UUID REFERENCES folders(uid) ON DELETE SET NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
is_read BOOLEAN NOT NULL DEFAULT false
);
CREATE INDEX IF NOT EXISTS idx_user_notifications_user ON user_notifications(user_uid, created_at DESC);
CREATE TABLE IF NOT EXISTS user_pending_documents (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
user_uid UUID NOT NULL,
office_uid UUID NOT NULL REFERENCES offices(uid) ON DELETE CASCADE,
name TEXT NOT NULL,
description TEXT NOT NULL DEFAULT '',
case_uid UUID NOT NULL REFERENCES folders(uid) ON DELETE CASCADE,
case_name TEXT NOT NULL DEFAULT '',
requested_at TIMESTAMPTZ NOT NULL DEFAULT now(),
due_date DATE
);
CREATE INDEX IF NOT EXISTS idx_user_pending_user ON user_pending_documents(user_uid, requested_at DESC);
CREATE TABLE IF NOT EXISTS user_conversations (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
user_uid UUID NOT NULL,
office_uid UUID NOT NULL REFERENCES offices(uid) ON DELETE CASCADE,
contact_name TEXT NOT NULL,
contact_role TEXT NOT NULL DEFAULT '',
last_message TEXT NOT NULL DEFAULT '',
last_message_at TIMESTAMPTZ NOT NULL DEFAULT now(),
unread_count INT NOT NULL DEFAULT 0
);
CREATE INDEX IF NOT EXISTS idx_user_conversations_user ON user_conversations(user_uid, last_message_at DESC);
CREATE TABLE IF NOT EXISTS conversation_messages (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
conversation_id UUID NOT NULL REFERENCES user_conversations(id) ON DELETE CASCADE,
sender_id TEXT NOT NULL DEFAULT '',
sender_name TEXT NOT NULL,
sender_role TEXT NOT NULL CHECK (sender_role IN ('client', 'cabinet')),
content TEXT NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
is_read BOOLEAN NOT NULL DEFAULT false
);
CREATE INDEX IF NOT EXISTS idx_conversation_messages_conv ON conversation_messages(conversation_id, created_at);

View File

@ -0,0 +1,22 @@
-- Documents rattachés à un dossier (lecture côté fiche dossier ; CRUD via API).
CREATE TABLE IF NOT EXISTS folder_documents (
uid UUID PRIMARY KEY DEFAULT gen_random_uuid(),
folder_uid UUID NOT NULL REFERENCES folders(uid) ON DELETE CASCADE,
name TEXT NOT NULL,
doc_type TEXT NOT NULL DEFAULT 'autre',
category TEXT NOT NULL DEFAULT 'dossier',
uploaded_by TEXT NOT NULL CHECK (uploaded_by IN ('cabinet', 'client')),
size_label TEXT NOT NULL DEFAULT '',
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
CONSTRAINT folder_documents_doc_type_chk CHECK (
doc_type IN (
'kbis', 'statuts', 'pv_ag', 'pacte_associes', 'contrat', 'facture', 'autre'
)
),
CONSTRAINT folder_documents_category_chk CHECK (
category IN ('permanent', 'dossier')
)
);
CREATE INDEX IF NOT EXISTS idx_folder_documents_folder ON folder_documents(folder_uid);

View File

@ -0,0 +1,5 @@
-- Métadonnées fichier (URL de stockage / type MIME) pour intégration upload future.
ALTER TABLE folder_documents ADD COLUMN IF NOT EXISTS storage_url TEXT;
ALTER TABLE folder_documents ADD COLUMN IF NOT EXISTS mime_type TEXT;
CREATE INDEX IF NOT EXISTS idx_user_pending_office ON user_pending_documents(office_uid);

View File

@ -0,0 +1,22 @@
-- Stub seed and API expect folders.title. Some setups only had folders.name.
DO $$
BEGIN
IF EXISTS (
SELECT 1 FROM information_schema.tables
WHERE table_schema = 'public' AND table_name = 'folders'
) THEN
IF EXISTS (
SELECT 1 FROM information_schema.columns
WHERE table_schema = 'public' AND table_name = 'folders' AND column_name = 'title'
) THEN
NULL;
ELSIF EXISTS (
SELECT 1 FROM information_schema.columns
WHERE table_schema = 'public' AND table_name = 'folders' AND column_name = 'name'
) THEN
ALTER TABLE folders RENAME COLUMN name TO title;
ELSE
ALTER TABLE folders ADD COLUMN title TEXT NOT NULL DEFAULT '';
END IF;
END IF;
END $$;

View File

@ -0,0 +1,2 @@
-- Schémas antérieurs (IMPL) : table folders sans `status`. LAPI docv-back et le seed démo lutilisent.
ALTER TABLE folders ADD COLUMN IF NOT EXISTS status TEXT NOT NULL DEFAULT 'open';

View File

@ -0,0 +1,9 @@
-- Dossiers permanents types : lien BDD ↔ arborescence data/dossiers-permanents
ALTER TABLE folders ADD COLUMN IF NOT EXISTS dp_archetype TEXT;
ALTER TABLE folders ADD COLUMN IF NOT EXISTS dp_layout_root TEXT;
ALTER TABLE folder_documents ADD COLUMN IF NOT EXISTS dp_mirror_path TEXT;
CREATE INDEX IF NOT EXISTS idx_folders_dp_layout_root ON folders(dp_layout_root)
WHERE dp_layout_root IS NOT NULL;

View File

@ -0,0 +1,10 @@
-- Rôles par office : requis lorsque office_members référence roles (role_uid IMPL / extensions).
-- Si la table existe déjà avec un autre schéma, cette commande ne fait rien (IF NOT EXISTS).
CREATE TABLE IF NOT EXISTS roles (
uid UUID PRIMARY KEY DEFAULT gen_random_uuid(),
office_uid UUID REFERENCES offices(uid) ON DELETE CASCADE,
name TEXT NOT NULL DEFAULT 'Member',
created_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
CREATE INDEX IF NOT EXISTS idx_roles_office_uid ON roles(office_uid);

View File

@ -0,0 +1,19 @@
-- Société (office) : hiérarchie, archivage ; dossier : prolonge le DP ; commentaires société (socle SPEC_18).
ALTER TABLE offices ADD COLUMN IF NOT EXISTS parent_office_uid UUID REFERENCES offices(uid) ON DELETE SET NULL;
ALTER TABLE offices ADD COLUMN IF NOT EXISTS archived_at TIMESTAMPTZ;
CREATE INDEX IF NOT EXISTS idx_offices_parent ON offices(parent_office_uid) WHERE parent_office_uid IS NOT NULL;
ALTER TABLE folders ADD COLUMN IF NOT EXISTS extends_permanent_record BOOLEAN NOT NULL DEFAULT false;
CREATE TABLE IF NOT EXISTS office_comments (
uid UUID PRIMARY KEY DEFAULT gen_random_uuid(),
office_uid UUID NOT NULL REFERENCES offices(uid) ON DELETE CASCADE,
user_uid UUID NOT NULL,
content TEXT NOT NULL,
access_level TEXT NOT NULL DEFAULT 'internal',
created_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
CREATE INDEX IF NOT EXISTS idx_office_comments_office ON office_comments(office_uid);

View File

@ -0,0 +1,61 @@
-- Sources (liens vers d'autres pièces), notes dossier, tâches, état workflow document.
ALTER TABLE folder_documents
ADD COLUMN IF NOT EXISTS workflow_state TEXT NOT NULL DEFAULT 'draft';
ALTER TABLE folder_documents
DROP CONSTRAINT IF EXISTS folder_documents_workflow_state_chk;
ALTER TABLE folder_documents
ADD CONSTRAINT folder_documents_workflow_state_chk CHECK (
workflow_state IN (
'draft',
'requested',
'submitted',
'validated',
'rejected',
'archived'
)
);
CREATE TABLE IF NOT EXISTS folder_document_sources (
uid UUID PRIMARY KEY DEFAULT gen_random_uuid(),
folder_uid UUID NOT NULL REFERENCES folders(uid) ON DELETE CASCADE,
label TEXT,
target_document_uid UUID NOT NULL REFERENCES folder_documents(uid) ON DELETE CASCADE,
created_by UUID NOT NULL REFERENCES users(id) ON DELETE RESTRICT,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
CONSTRAINT folder_document_sources_unique_target UNIQUE (folder_uid, target_document_uid)
);
CREATE INDEX IF NOT EXISTS idx_folder_document_sources_folder ON folder_document_sources(folder_uid);
CREATE INDEX IF NOT EXISTS idx_folder_document_sources_target ON folder_document_sources(target_document_uid);
CREATE TABLE IF NOT EXISTS folder_notes (
uid UUID PRIMARY KEY DEFAULT gen_random_uuid(),
folder_uid UUID NOT NULL REFERENCES folders(uid) ON DELETE CASCADE,
content TEXT NOT NULL,
author_user_uid UUID NOT NULL REFERENCES users(id) ON DELETE RESTRICT,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
CREATE INDEX IF NOT EXISTS idx_folder_notes_folder ON folder_notes(folder_uid);
CREATE TABLE IF NOT EXISTS tasks (
uid UUID PRIMARY KEY DEFAULT gen_random_uuid(),
office_uid UUID NOT NULL REFERENCES offices(uid) ON DELETE CASCADE,
folder_uid UUID REFERENCES folders(uid) ON DELETE CASCADE,
title TEXT NOT NULL,
description TEXT,
status TEXT NOT NULL DEFAULT 'open',
assignee_user_uid UUID REFERENCES users(id) ON DELETE SET NULL,
due_at TIMESTAMPTZ,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT now(),
CONSTRAINT tasks_status_chk CHECK (status IN ('open', 'in_progress', 'done', 'cancelled'))
);
CREATE INDEX IF NOT EXISTS idx_tasks_office ON tasks(office_uid);
CREATE INDEX IF NOT EXISTS idx_tasks_folder ON tasks(folder_uid);
CREATE INDEX IF NOT EXISTS idx_tasks_status ON tasks(status);

View File

@ -0,0 +1,18 @@
-- Dossiers : distinction opération client vs structure type démo (DP) ; type métier d'opération.
ALTER TABLE folders ADD COLUMN IF NOT EXISTS folder_purpose TEXT NOT NULL DEFAULT 'client_operation';
ALTER TABLE folders ADD COLUMN IF NOT EXISTS operation_type TEXT;
UPDATE folders
SET folder_purpose = 'dp_structure_demo'
WHERE title LIKE 'Jeu type %'
AND dp_layout_root IS NOT NULL
AND dp_layout_root LIKE 'instances/%';
UPDATE offices
SET name = 'Entreprise démo (fictive)'
WHERE lower(btrim(name)) = lower(btrim('Cabinet démo'));
ALTER TABLE folders DROP CONSTRAINT IF EXISTS folders_folder_purpose_check;
ALTER TABLE folders ADD CONSTRAINT folders_folder_purpose_check
CHECK (folder_purpose IN ('client_operation', 'dp_structure_demo'));

View File

@ -0,0 +1,34 @@
//! Bearer JWT validation for `/api/v1/*` (same secret and claims shape as OAuth access tokens).
use hyper::header::HeaderMap;
use jsonwebtoken::{decode, DecodingKey, Validation};
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AccessClaims {
pub sub: String,
pub email: String,
pub name: Option<String>,
pub exp: usize,
pub iss: String,
}
pub fn bearer_token(headers: &HeaderMap) -> Option<String> {
let hv = headers.get(hyper::header::AUTHORIZATION)?.to_str().ok()?;
let rest = hv.strip_prefix("Bearer ")?;
let t = rest.trim();
if t.is_empty() {
return None;
}
Some(t.to_string())
}
pub fn decode_access_token(secret: &str, token: &str) -> Result<AccessClaims, ()> {
decode::<AccessClaims>(
token,
&DecodingKey::from_secret(secret.as_bytes()),
&Validation::default(),
)
.map(|d| d.claims)
.map_err(|_| ())
}

View File

@ -0,0 +1,243 @@
//! PerOAuth-client branding for the sign-in HTML (wording + CSS colors).
use html_escape::encode_text;
use serde::Deserialize;
use std::collections::HashMap;
const FALLBACK_JSON: &str = include_str!("../tenants.default.json");
#[derive(Debug, Deserialize, Clone, Default)]
struct BrandingPartial {
page_title: Option<String>,
heading: Option<String>,
subtitle: Option<String>,
primary_color: Option<String>,
accent_color: Option<String>,
surface_color: Option<String>,
text_color: Option<String>,
submit_label: Option<String>,
font_family: Option<String>,
}
#[derive(Debug, Clone)]
pub struct BrandingResolved {
pub page_title: String,
pub heading: String,
pub subtitle: String,
pub primary_color: String,
pub accent_color: String,
pub surface_color: String,
pub text_color: String,
pub submit_label: String,
pub font_family: String,
}
#[derive(Debug, Deserialize)]
struct TenantsFile {
#[serde(rename = "default")]
fallback: BrandingPartial,
#[serde(default)]
clients: HashMap<String, BrandingPartial>,
}
/// Extracts `client_id` from a return URL pointing at `/oauth/authorize?...`.
pub fn client_id_from_authorize_return_url(return_url: &str) -> Option<String> {
let q = return_url.find('?')?;
let qs = &return_url[q + 1..];
let map: HashMap<String, String> = serde_urlencoded::from_str(qs).ok()?;
let id = map.get("client_id")?.trim();
if id.is_empty() {
return None;
}
Some(id.to_string())
}
fn sanitize_hex_color(input: &str, fallback: &str) -> String {
let t = input.trim();
if !t.starts_with('#') || t.len() < 4 {
return fallback.to_string();
}
if !t.chars().skip(1).all(|c| c.is_ascii_hexdigit()) {
return fallback.to_string();
}
t.to_string()
}
fn merge_field(opt_client: Option<&String>, opt_def: Option<&String>, hard: &str) -> String {
opt_client
.or(opt_def)
.cloned()
.filter(|s| !s.trim().is_empty())
.unwrap_or_else(|| hard.to_string())
}
fn resolve_partial(def: &BrandingPartial, ovr: &BrandingPartial) -> BrandingResolved {
let page_title = merge_field(
ovr.page_title.as_ref(),
def.page_title.as_ref(),
"Connexion",
);
let heading = merge_field(ovr.heading.as_ref(), def.heading.as_ref(), "Connexion");
let subtitle = merge_field(
ovr.subtitle.as_ref(),
def.subtitle.as_ref(),
"Saisissez vos identifiants pour continuer.",
);
let primary = sanitize_hex_color(
merge_field(
ovr.primary_color.as_ref(),
def.primary_color.as_ref(),
"#1e3a5f",
)
.as_str(),
"#1e3a5f",
);
let accent = sanitize_hex_color(
merge_field(
ovr.accent_color.as_ref(),
def.accent_color.as_ref(),
"#b45309",
)
.as_str(),
"#b45309",
);
let surface = sanitize_hex_color(
merge_field(
ovr.surface_color.as_ref(),
def.surface_color.as_ref(),
"#f8fafc",
)
.as_str(),
"#f8fafc",
);
let text = sanitize_hex_color(
merge_field(
ovr.text_color.as_ref(),
def.text_color.as_ref(),
"#0f172a",
)
.as_str(),
"#0f172a",
);
let submit_label = merge_field(
ovr.submit_label.as_ref(),
def.submit_label.as_ref(),
"Continuer",
);
let font_family = merge_field(
ovr.font_family.as_ref(),
def.font_family.as_ref(),
"system-ui, sans-serif",
);
BrandingResolved {
page_title,
heading,
subtitle,
primary_color: primary,
accent_color: accent,
surface_color: surface,
text_color: text,
submit_label,
font_family,
}
}
/// Parses `OAUTH_TENANTS_JSON` (or built-in default). Invalid JSON falls back to embedded default.
fn parse_tenants_json(raw: &str) -> TenantsFile {
let use_raw = if raw.trim().is_empty() {
FALLBACK_JSON
} else {
raw
};
serde_json::from_str::<TenantsFile>(use_raw).unwrap_or_else(|_| {
serde_json::from_str(FALLBACK_JSON).expect("tenants.default.json valid")
})
}
pub fn resolve_branding(tenants_json: &str, client_id: Option<&str>) -> BrandingResolved {
let file = parse_tenants_json(tenants_json);
let ovr = client_id.and_then(|id| file.clients.get(id));
resolve_partial(
&file.fallback,
ovr.unwrap_or(&BrandingPartial::default()),
)
}
fn sanitize_font_css(input: &str) -> String {
input
.chars()
.filter(|&c| !matches!(c, '<' | '>' | '{' | '}' | ';' | '\n' | '\r'))
.collect()
}
/// `form_action_attr_escaped` — full POST target e.g. `/docv-api/oauth/sign-in` (attribute-encoded).
pub fn sign_in_page_html(
return_url_attr_escaped: &str,
b: &BrandingResolved,
form_action_attr_escaped: &str,
) -> String {
let title = encode_text(&b.page_title);
let heading = encode_text(&b.heading);
let subtitle = encode_text(&b.subtitle);
let submit = encode_text(&b.submit_label);
let font = sanitize_font_css(&b.font_family);
let p = &b.primary_color;
let a = &b.accent_color;
let s = &b.surface_color;
let t = &b.text_color;
format!(
r#"<!DOCTYPE html><html lang="fr"><head><meta charset="utf-8"/><title>{title}</title>
<style>
:root {{ --primary:{p}; --accent:{a}; --surface:{s}; --text:{t}; }}
* {{ box-sizing:border-box; }}
body {{ font-family:{font}; margin:0; min-height:100vh; display:flex; align-items:center; justify-content:center; background:var(--surface); color:var(--text); padding:1.5rem; }}
.card {{ width:100%; max-width:28rem; background:#fff; padding:2rem; border-radius:12px; box-shadow:0 4px 24px rgba(0,0,0,.08); border:1px solid rgba(0,0,0,.06); }}
h1 {{ margin:0 0 .5rem; font-size:1.35rem; color:var(--primary); }}
p.sub {{ margin:0 0 1.25rem; font-size:.95rem; opacity:.9; line-height:1.45; }}
label {{ display:block; margin:.65rem 0 .25rem; font-size:.85rem; font-weight:500; }}
input {{ width:100%; padding:.55rem .65rem; border:1px solid #cbd5e1; border-radius:8px; font:inherit; }}
input:focus {{ outline:2px solid var(--accent); outline-offset:1px; border-color:var(--primary); }}
button {{ margin-top:1.15rem; width:100%; padding:.7rem 1rem; border:none; border-radius:8px; font:inherit; font-weight:600; cursor:pointer; background:var(--accent); color:#fff; }}
button:hover {{ filter:brightness(1.05); }}
button:active {{ transform:translateY(1px); }}
</style></head>
<body><div class="card"><h1>{heading}</h1><p class="sub">{subtitle}</p>
<form method="post" action="{form_action_attr_escaped}">
<input type="hidden" name="return_url" value="{return_url_attr_escaped}"/>
<label>Email</label><input type="email" name="email" required autocomplete="username"/>
<label>Mot de passe</label><input type="password" name="password" required autocomplete="current-password"/>
<button type="submit">{submit}</button>
</form></div></body></html>"#,
)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parses_client_id_from_relative_authorize_url() {
let u = "/oauth/authorize?response_type=code&client_id=enso-web&redirect_uri=http%3A%2F%2Flocal";
assert_eq!(
client_id_from_authorize_return_url(u).as_deref(),
Some("enso-web")
);
}
#[test]
fn parses_client_id_when_path_has_reverse_proxy_prefix() {
let u = "/docv-api/oauth/authorize?response_type=code&client_id=enso-web&redirect_uri=x";
assert_eq!(
client_id_from_authorize_return_url(u).as_deref(),
Some("enso-web")
);
}
#[test]
fn resolves_client_override_subtitle() {
let raw =
r#"{"default":{"subtitle":"Def"},"clients":{"x":{"subtitle":"Custom"}}}"#;
let b = resolve_branding(raw, Some("x"));
assert_eq!(b.subtitle, "Custom");
}
}

View File

@ -0,0 +1,219 @@
//! Configuration from environment.
use std::env;
use std::fs;
use std::path::PathBuf;
/// Optional Git sync for `data/dossiers-permanents` after mirror uploads.
#[derive(Clone)]
pub struct DpGitSyncConfig {
pub enabled: bool,
pub repo_root: Option<PathBuf>,
/// Relative to repo root (e.g. `data/dossiers-permanents`).
pub data_subpath: PathBuf,
pub remote: String,
pub branch: Option<String>,
}
#[derive(Clone)]
pub struct Config {
pub database_url: String,
pub jwt_secret: String,
pub host: String,
pub port: u16,
/// Registered OAuth2 client ids (comma-separated in `OAUTH_CLIENT_ID`).
pub oauth_client_ids: Vec<String>,
pub oauth_client_secret: String,
pub oauth_redirect_uris: Vec<String>,
/// JSON branding config (`OAUTH_TENANTS_JSON`, or file `OAUTH_TENANTS_PATH`, or built-in default).
pub tenants_json: String,
/// Path prefix browsers use to reach this service (e.g. `/docv-api`). Empty if OAuth is at site root.
pub browser_oauth_prefix: String,
/// Primary key column on `users` for OAuth sign-in lookup: `id` (default migration) or `uid` (legacy / IMPL_01 style).
pub users_pk_column: String,
/// When set, `POST .../folders/:uid/documents/binary` stores bytes on disk and sets `storage_url` for download (`GET .../files/:docUid`).
pub file_storage_dir: Option<PathBuf>,
/// Max body size for binary document upload (bytes).
pub upload_max_bytes: usize,
pub dp_git_sync: DpGitSyncConfig,
/// OAuth2 access token (`Bearer`) lifetime in seconds (JWT `exp`, `expires_in`).
pub oauth_access_token_ttl_secs: u64,
/// Optional upstream IA HTTP endpoint (`POST` JSON in, JSON out). If unset, `POST /api/v1/ai/*` returns 503.
pub ai_service_url: Option<String>,
/// Optional `Authorization: Bearer …` sent to the IA service.
pub ai_api_key: Option<String>,
pub ai_timeout_secs: u64,
/// Max total characters for forwarded chat / assist prompts (guardrail).
pub ai_max_input_chars: usize,
}
impl Config {
pub fn load_from_env() -> Self {
let redirect = env::var("OAUTH_REDIRECT_URIS").unwrap_or_default();
let oauth_redirect_uris: Vec<String> = redirect
.split(',')
.map(|s| s.trim().to_owned())
.filter(|s| !s.is_empty())
.collect();
let raw_ids = env::var("OAUTH_CLIENT_ID").unwrap_or_else(|_| "enso-web".into());
let mut oauth_client_ids: Vec<String> = raw_ids
.split(',')
.map(|s| s.trim().to_owned())
.filter(|s| !s.is_empty())
.collect();
if oauth_client_ids.is_empty() {
oauth_client_ids.push("enso-web".into());
}
let tenants_json = Self::load_tenants_json();
let browser_oauth_prefix = Self::normalize_browser_prefix(
&env::var("OAUTH_BROWSER_PATH_PREFIX").unwrap_or_else(|_| "/docv-api".into()),
);
let users_pk_column = Self::normalize_users_pk_column(
&env::var("DOCV_USERS_PK_COLUMN").unwrap_or_else(|_| "id".into()),
);
let file_storage_dir = env::var("DOCV_FILE_STORAGE_DIR").ok().map(|s| s.trim().to_owned()).filter(|s| !s.is_empty()).map(PathBuf::from);
let upload_max_bytes = env::var("DOCV_UPLOAD_MAX_BYTES")
.ok()
.and_then(|s| s.parse().ok())
.filter(|&n| n > 0)
.unwrap_or(10_485_760);
let dp_git_enabled = env::var("DOCV_DP_GIT_SYNC")
.ok()
.map(|s| s.trim().to_ascii_lowercase())
.map(|s| s == "1" || s == "true" || s == "yes")
.unwrap_or(false);
let dp_git_repo_root = env::var("DOCV_DP_GIT_REPO_ROOT")
.ok()
.map(|s| s.trim().to_owned())
.filter(|s| !s.is_empty())
.map(PathBuf::from);
let dp_sub = env::var("DOCV_DP_GIT_DATA_SUBPATH")
.ok()
.map(|s| s.trim().to_owned())
.filter(|s| !s.is_empty())
.unwrap_or_else(|| "data/dossiers-permanents".into());
let dp_remote = env::var("DOCV_DP_GIT_REMOTE")
.ok()
.map(|s| s.trim().to_owned())
.filter(|s| !s.is_empty())
.unwrap_or_else(|| "origin".into());
let dp_branch = env::var("DOCV_DP_GIT_BRANCH")
.ok()
.map(|s| s.trim().to_owned())
.filter(|s| !s.is_empty());
let dp_git_sync = DpGitSyncConfig {
enabled: dp_git_enabled,
repo_root: dp_git_repo_root,
data_subpath: PathBuf::from(dp_sub),
remote: dp_remote,
branch: dp_branch,
};
let oauth_access_token_ttl_secs = env::var("DOCV_OAUTH_ACCESS_TOKEN_TTL_SEC")
.ok()
.and_then(|s| s.parse::<u64>().ok())
.filter(|&n| (60..=86400).contains(&n))
.unwrap_or(900);
let ai_service_url = env::var("DOCV_AI_SERVICE_URL")
.ok()
.map(|s| s.trim().to_owned())
.filter(|s| !s.is_empty());
let ai_api_key = env::var("DOCV_AI_API_KEY")
.ok()
.map(|s| s.trim().to_owned())
.filter(|s| !s.is_empty());
let ai_timeout_secs = env::var("DOCV_AI_TIMEOUT_SECS")
.ok()
.and_then(|s| s.parse::<u64>().ok())
.filter(|&n| n > 0 && n <= 300)
.unwrap_or(60);
let ai_max_input_chars = env::var("DOCV_AI_MAX_INPUT_CHARS")
.ok()
.and_then(|s| s.parse::<usize>().ok())
.filter(|&n| n >= 512 && n <= 500_000)
.unwrap_or(32_000);
Self {
database_url: env::var("DATABASE_URL")
.unwrap_or_else(|_| "postgres://localhost/docv".into()),
jwt_secret: env::var("JWT_SECRET")
.unwrap_or_else(|_| "dev-secret-change-in-production".into()),
host: env::var("HOST").unwrap_or_else(|_| "0.0.0.0".into()),
port: env::var("PORT")
.unwrap_or_else(|_| "3038".into())
.parse()
.unwrap_or(3038),
oauth_client_ids,
oauth_client_secret: env::var("OAUTH_CLIENT_SECRET")
.unwrap_or_else(|_| "dev-oauth-secret-change-me".into()),
oauth_redirect_uris,
tenants_json,
browser_oauth_prefix,
users_pk_column,
file_storage_dir,
upload_max_bytes,
dp_git_sync,
oauth_access_token_ttl_secs,
ai_service_url,
ai_api_key,
ai_timeout_secs,
ai_max_input_chars,
}
}
fn normalize_users_pk_column(raw: &str) -> String {
match raw.trim().to_ascii_lowercase().as_str() {
"uid" => "uid".to_string(),
_ => "id".to_string(),
}
}
fn normalize_browser_prefix(raw: &str) -> String {
let t = raw.trim();
if t.is_empty() {
return String::new();
}
let inner = t.trim_matches('/').trim();
if inner.is_empty() {
return String::new();
}
format!("/{}", inner)
}
/// Prepends [`Self::browser_oauth_prefix`] to a path starting with `/oauth/...` for `Location` and forms.
pub fn browser_oauth_path(&self, path_from_oauth_mount: &str) -> String {
if !path_from_oauth_mount.starts_with('/') {
return path_from_oauth_mount.to_string();
}
let p = self.browser_oauth_prefix.trim();
if p.is_empty() {
return path_from_oauth_mount.to_string();
}
format!("{}{}", p, path_from_oauth_mount)
}
fn load_tenants_json() -> String {
const DEFAULT: &str = include_str!("../../tenants.default.json");
if let Ok(path) = env::var("OAUTH_TENANTS_PATH") {
if let Ok(s) = fs::read_to_string(path.trim()) {
if !s.trim().is_empty() {
return s;
}
}
}
env::var("OAUTH_TENANTS_JSON").unwrap_or_else(|_| DEFAULT.to_string())
}
pub fn redirect_uri_allowed(&self, uri: &str) -> bool {
self.oauth_redirect_uris.iter().any(|a| a == uri)
}
pub fn oauth_client_id_allowed(&self, id: &str) -> bool {
self.oauth_client_ids.iter().any(|c| c == id)
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,109 @@
//! Optional Git commit/push after dossier-permanent mirror uploads (see docs/features/DOSSIERS_PERMANENTS_DATA_GIT.md).
use crate::config::DpGitSyncConfig;
use std::path::{Component, Path, PathBuf};
use std::process::Command;
use tracing::{info, warn};
/// Rejects absolute paths and `..` segments.
pub fn safe_relative_under_repo(path: &str) -> Option<PathBuf> {
let trimmed = path.trim();
if trimmed.is_empty() {
return None;
}
let p = Path::new(trimmed);
if p.is_absolute() {
return None;
}
for c in p.components() {
match c {
Component::ParentDir => return None,
Component::RootDir => return None,
_ => {}
}
}
Some(p.to_path_buf())
}
fn git_staged_files_nonempty(repo_root: &Path) -> bool {
let out = match Command::new("git")
.current_dir(repo_root)
.args(["diff", "--cached", "--name-only"])
.output()
{
Ok(o) => o,
Err(e) => {
warn!(?e, "git diff --cached");
return false;
}
};
!out.stdout.is_empty()
}
/// Runs `git add`, `git commit` (if needed), `git push` for paths relative to repo root.
pub fn sync_data_paths(cfg: &DpGitSyncConfig, paths_relative_to_repo: &[PathBuf]) {
if !cfg.enabled {
return;
}
let Some(ref repo_root) = cfg.repo_root else {
warn!("DOCV_DP_GIT_SYNC enabled but DOCV_DP_GIT_REPO_ROOT unset");
return;
};
if paths_relative_to_repo.is_empty() {
return;
}
let mut add_cmd = Command::new("git");
add_cmd.current_dir(repo_root);
add_cmd.arg("add").arg("--");
for p in paths_relative_to_repo {
add_cmd.arg(p);
}
match add_cmd.status() {
Ok(s) if s.success() => {}
Ok(s) => {
warn!(?s, "git add failed for DP sync");
return;
}
Err(e) => {
warn!(?e, "git add spawn failed");
return;
}
}
if !git_staged_files_nonempty(repo_root) {
info!("DP git sync: nothing staged after add; skipping commit");
return;
}
let msg = "docv: sync dossiers-permanents mirror upload";
let commit = Command::new("git")
.current_dir(repo_root)
.args(["commit", "-m", msg])
.status();
match commit {
Ok(s) if s.success() => info!("DP git sync: committed"),
Ok(s) => warn!(?s, "git commit (may be empty or hook failure)"),
Err(e) => {
warn!(?e, "git commit spawn failed");
return;
}
}
let push = if let Some(ref b) = cfg.branch {
Command::new("git")
.current_dir(repo_root)
.args([
"push",
cfg.remote.as_str(),
&format!("HEAD:refs/heads/{b}"),
])
.status()
} else {
Command::new("git")
.current_dir(repo_root)
.args(["push", cfg.remote.as_str()])
.status()
};
match push {
Ok(s) if s.success() => info!("DP git sync: push OK"),
Ok(s) => warn!(?s, "git push failed — check remote and branch"),
Err(e) => warn!(?e, "git push spawn failed"),
}
}

View File

@ -0,0 +1,362 @@
//! Lecture disque sous `data/dossiers-permanents` pour les routes DP layout (instances/, operations/).
use serde::Serialize;
use std::path::PathBuf;
#[derive(Serialize)]
pub struct DpLayoutEntryJson {
pub name: String,
pub entry_type: String,
}
/// Répertoire canonique `repo_root` / `data_subpath` / segments de `dp_layout_root`.
pub fn dp_layout_base_canonical(
repo_root: PathBuf,
data_subpath: PathBuf,
dp_layout_root: &str,
) -> Result<PathBuf, String> {
let mut base = repo_root;
base.push(&data_subpath);
for seg in dp_layout_root.trim().split('/').filter(|s| !s.is_empty()) {
base.push(seg);
}
base.canonicalize()
.map_err(|e| format!("instance_root_missing:{e}"))
}
pub fn read_dp_layout_dir_entries(
repo_root: PathBuf,
data_subpath: PathBuf,
dp_layout_root: String,
rel: PathBuf,
) -> Result<Vec<DpLayoutEntryJson>, String> {
let base_canon = dp_layout_base_canonical(repo_root, data_subpath, &dp_layout_root)?;
let target = if rel.as_os_str().is_empty() {
base_canon.clone()
} else {
let joined = base_canon.join(&rel);
joined
.canonicalize()
.map_err(|_| "not_found".to_string())?
};
if !target.starts_with(&base_canon) {
return Err("invalid_path".into());
}
let read = std::fs::read_dir(&target).map_err(|e| format!("read_dir:{e}"))?;
let mut entries: Vec<DpLayoutEntryJson> = Vec::new();
for ent in read {
let ent = ent.map_err(|e| format!("dir_entry:{e}"))?;
let name = ent.file_name().to_string_lossy().into_owned();
if name.starts_with('.') {
continue;
}
let path = ent.path();
// Follow symlinks: `DirEntry::file_type` does not; symlink→dir would be misclassified as file.
let entry_type = if path.is_dir() {
"dir"
} else if path.is_file() {
"file"
} else {
continue;
};
entries.push(DpLayoutEntryJson {
name,
entry_type: entry_type.into(),
});
}
entries.sort_by(|a, b| {
a.entry_type
.cmp(&b.entry_type)
.then_with(|| a.name.to_lowercase().cmp(&b.name.to_lowercase()))
});
Ok(entries)
}
pub fn dp_layout_text_file_name_allowed(file_name: &str) -> bool {
let lower = file_name.to_lowercase();
lower.ends_with(".md")
|| lower.ends_with(".txt")
|| lower.ends_with(".markdown")
}
/// Gabarits versionnés sous `data/dossiers-permanents/` : `__GABARIT__.md` par dossier et fichiers `*.__TEMPLATE__.md`.
pub fn dp_layout_gabarit_file_name_allowed(file_name: &str) -> bool {
let lower = file_name.to_lowercase();
if lower == "__gabarit__.md" {
return true;
}
lower.ends_with(".__template__.md")
}
/// Overwrites an existing UTF-8 gabarit file (same path rules as [`read_dp_layout_text_file`]).
pub fn write_dp_layout_gabarit_text_file(
repo_root: PathBuf,
data_subpath: PathBuf,
dp_layout_root: String,
rel_file: PathBuf,
text: &str,
) -> Result<(), String> {
const MAX_BYTES: u64 = 512 * 1024;
if text.as_bytes().len() as u64 > MAX_BYTES {
return Err("file_too_large".into());
}
let base_canon = dp_layout_base_canonical(repo_root, data_subpath, &dp_layout_root)?;
let joined = base_canon.join(&rel_file);
let target = joined
.canonicalize()
.map_err(|_| "not_found".to_string())?;
if !target.starts_with(&base_canon) {
return Err("invalid_path".into());
}
let meta = std::fs::metadata(&target).map_err(|e| format!("metadata:{e}"))?;
if !meta.is_file() {
return Err("not_a_file".into());
}
let name = target
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("");
if !dp_layout_gabarit_file_name_allowed(name) {
return Err("not_a_gabarit_file".into());
}
std::fs::write(&target, text).map_err(|e| format!("write:{e}"))
}
/// Reads a small UTF-8 text file under `dp_layout_root` (same base as directory listing).
pub fn read_dp_layout_text_file(
repo_root: PathBuf,
data_subpath: PathBuf,
dp_layout_root: String,
rel_file: PathBuf,
) -> Result<String, String> {
const MAX_BYTES: u64 = 512 * 1024;
let base_canon = dp_layout_base_canonical(repo_root, data_subpath, &dp_layout_root)?;
let joined = base_canon.join(&rel_file);
let target = joined
.canonicalize()
.map_err(|_| "not_found".to_string())?;
if !target.starts_with(&base_canon) {
return Err("invalid_path".into());
}
let meta = std::fs::metadata(&target).map_err(|e| format!("metadata:{e}"))?;
if !meta.is_file() {
return Err("not_a_file".into());
}
if meta.len() > MAX_BYTES {
return Err("file_too_large".into());
}
let name = target
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("");
if !dp_layout_text_file_name_allowed(name) {
return Err("file_type_not_allowed".into());
}
std::fs::read_to_string(&target).map_err(|e| format!("read:{e}"))
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
use tempfile::tempdir;
fn write_layout_tree() -> (tempfile::TempDir, PathBuf, String) {
let repo = tempdir().expect("tempdir");
let repo_root = repo.path().to_path_buf();
let data = repo_root.join("data/dossiers-permanents");
let root = data.join("instances/demo_case");
fs::create_dir_all(root.join("Z_folder")).expect("mkdir");
fs::create_dir_all(root.join("a_folder")).expect("mkdir");
fs::write(root.join("readme.md"), "# hi").expect("write md");
fs::write(root.join("note.txt"), "plain").expect("write txt");
fs::write(root.join("a_folder/inner.txt"), "inner").expect("write nested");
(repo, repo_root, "instances/demo_case".to_string())
}
#[test]
fn dir_entries_sorts_dirs_before_files_then_name() {
let (_tmp, repo_root, dp_root) = write_layout_tree();
let entries = read_dp_layout_dir_entries(
repo_root,
PathBuf::from("data/dossiers-permanents"),
dp_root,
PathBuf::new(),
)
.expect("ok");
let names: Vec<&str> = entries.iter().map(|e| e.name.as_str()).collect();
assert_eq!(
names,
vec!["a_folder", "Z_folder", "note.txt", "readme.md"]
);
assert!(entries.iter().any(|e| e.name == "a_folder" && e.entry_type == "dir"));
assert!(entries.iter().any(|e| e.name == "readme.md" && e.entry_type == "file"));
}
#[test]
fn dir_entries_nested_path() {
let (_tmp, repo_root, dp_root) = write_layout_tree();
let entries = read_dp_layout_dir_entries(
repo_root,
PathBuf::from("data/dossiers-permanents"),
dp_root,
PathBuf::from("a_folder"),
)
.expect("ok");
assert_eq!(entries.len(), 1);
assert_eq!(entries[0].name, "inner.txt");
assert_eq!(entries[0].entry_type, "file");
}
#[cfg(unix)]
#[test]
fn dir_entries_symlink_to_dir_is_classified_as_dir() {
use std::os::unix::fs::symlink;
let (_tmp, repo_root, dp_root) = write_layout_tree();
let base = dp_layout_base_canonical(
repo_root.clone(),
PathBuf::from("data/dossiers-permanents"),
&dp_root,
)
.expect("base");
let target_dir = base.join("a_folder");
let link_path = base.join("link_to_a");
symlink(&target_dir, &link_path).expect("symlink");
let entries = read_dp_layout_dir_entries(
repo_root.clone(),
PathBuf::from("data/dossiers-permanents"),
dp_root.clone(),
PathBuf::new(),
)
.expect("ok");
let link = entries
.iter()
.find(|e| e.name == "link_to_a")
.expect("symlink entry");
assert_eq!(link.entry_type, "dir");
let nested = read_dp_layout_dir_entries(
repo_root,
PathBuf::from("data/dossiers-permanents"),
dp_root,
PathBuf::from("link_to_a"),
)
.expect("nested");
assert_eq!(nested.len(), 1);
assert_eq!(nested[0].name, "inner.txt");
}
#[test]
fn read_text_file_ok() {
let (_tmp, repo_root, dp_root) = write_layout_tree();
let text = read_dp_layout_text_file(
repo_root,
PathBuf::from("data/dossiers-permanents"),
dp_root,
PathBuf::from("a_folder/inner.txt"),
)
.expect("read");
assert_eq!(text, "inner");
}
#[test]
fn read_text_file_rejects_wrong_extension() {
let repo = tempdir().expect("tempdir");
let data = repo.path().join("data/dossiers-permanents");
let root = data.join("instances/x");
fs::create_dir_all(&root).expect("mkdir");
fs::write(root.join("bad.bin"), "x").expect("write");
let err = read_dp_layout_text_file(
repo.path().to_path_buf(),
PathBuf::from("data/dossiers-permanents"),
"instances/x".into(),
PathBuf::from("bad.bin"),
)
.expect_err("type");
assert_eq!(err, "file_type_not_allowed");
}
#[test]
fn base_canonical_matches_joined_roots() {
let (_tmp, repo_root, dp_root) = write_layout_tree();
let base = dp_layout_base_canonical(
repo_root.clone(),
PathBuf::from("data/dossiers-permanents"),
&dp_root,
)
.expect("canon");
assert!(base.ends_with("demo_case"));
let entries = read_dp_layout_dir_entries(
repo_root,
PathBuf::from("data/dossiers-permanents"),
dp_root,
PathBuf::new(),
)
.expect("entries");
assert!(!entries.is_empty());
}
#[test]
fn path_escape_rejected() {
let (_tmp, repo_root, dp_root) = write_layout_tree();
let res = read_dp_layout_dir_entries(
repo_root,
PathBuf::from("data/dossiers-permanents"),
dp_root,
PathBuf::from("../.."),
);
assert!(res.is_err());
}
#[test]
fn gabarit_name_allowed() {
assert!(dp_layout_gabarit_file_name_allowed("__GABARIT__.md"));
assert!(dp_layout_gabarit_file_name_allowed("IM.07.__TEMPLATE__.md"));
assert!(!dp_layout_gabarit_file_name_allowed("readme.md"));
assert!(!dp_layout_gabarit_file_name_allowed("note.txt"));
}
#[test]
fn write_gabarit_roundtrip() {
let repo = tempdir().expect("tempdir");
let data = repo.path().join("data/dossiers-permanents");
let root = data.join("instances/x");
fs::create_dir_all(root.join("sub")).expect("mkdir");
fs::write(root.join("sub/__GABARIT__.md"), "old").expect("write");
let repo_root = repo.path().to_path_buf();
let dp = "instances/x".to_string();
write_dp_layout_gabarit_text_file(
repo_root.clone(),
PathBuf::from("data/dossiers-permanents"),
dp.clone(),
PathBuf::from("sub/__GABARIT__.md"),
"new content",
)
.expect("write ok");
let text = read_dp_layout_text_file(
repo_root,
PathBuf::from("data/dossiers-permanents"),
dp,
PathBuf::from("sub/__GABARIT__.md"),
)
.expect("read");
assert_eq!(text, "new content");
}
#[test]
fn write_gabarit_rejects_plain_md() {
let repo = tempdir().expect("tempdir");
let data = repo.path().join("data/dossiers-permanents");
let root = data.join("instances/x");
fs::create_dir_all(&root).expect("mkdir");
fs::write(root.join("readme.md"), "x").expect("write");
let err = write_dp_layout_gabarit_text_file(
repo.path().to_path_buf(),
PathBuf::from("data/dossiers-permanents"),
"instances/x".into(),
PathBuf::from("readme.md"),
"y",
)
.expect_err("not gabarit");
assert_eq!(err, "not_a_gabarit_file");
}
}

View File

@ -0,0 +1,64 @@
//! Résolution des chemins miroir sous `data/dossiers-permanents/` (upload binaire, sync Git).
use crate::config::Config;
use hyper::header::HeaderMap;
use std::path::PathBuf;
use uuid::Uuid;
use crate::dp_git_sync;
pub fn sanitize_dp_mirror_filename(name: &str) -> String {
let base: String = name
.chars()
.map(|c| match c {
'/' | '\\' | ':' | '\0' => '-',
c if c.is_control() => '-',
c => c,
})
.collect();
let t = base.trim();
if t.is_empty() {
return "document".into();
}
t.chars().take(240).collect()
}
pub fn header_dp_mirror_relative_path(hdr: &HeaderMap) -> Option<String> {
hdr.get("x-enso-dp-mirror-relative-path")
.and_then(|v| v.to_str().ok())
.map(str::trim)
.filter(|s| !s.is_empty())
.map(|s| s.to_string())
}
/// Chemin relatif sous `DOCV_DP_GIT_DATA_SUBPATH`, si entête valide ou sync Git activé (défaut `_uploads/...`).
pub fn resolve_dp_mirror_relative(
cfg: &Config,
hdr: &HeaderMap,
folder_uid: Uuid,
name: &str,
) -> Option<PathBuf> {
let safe = sanitize_dp_mirror_filename(name);
if let Some(raw) = header_dp_mirror_relative_path(hdr) {
let t = raw.trim();
if !t.is_empty() {
let is_dir = t.ends_with('/');
let trimmed = t.trim_end_matches('/').trim();
if let Some(mut p) = dp_git_sync::safe_relative_under_repo(trimmed) {
if is_dir {
p.push(safe.clone());
}
return Some(p);
}
}
}
if cfg.dp_git_sync.enabled {
Some(
PathBuf::from("_uploads")
.join(folder_uid.to_string())
.join(safe),
)
} else {
None
}
}

View File

@ -0,0 +1,74 @@
//! docv-back: HTTP API, PostgreSQL, OAuth2 authorization server for enso-front.
mod api_auth;
mod branding;
mod config;
mod db;
mod dp_layout_fs;
mod dp_git_sync;
mod dp_mirror;
mod server;
use crate::config::Config;
use tracing_subscriber::EnvFilter;
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
tracing_subscriber::fmt()
.with_env_filter(EnvFilter::from_default_env().add_directive("info".parse()?))
.init();
let config = Config::load_from_env();
tracing::info!(
host = %config.host,
port = config.port,
db_len = config.database_url.len(),
jwt_len = config.jwt_secret.len(),
oauth_redirects = config.oauth_redirect_uris.len(),
browser_oauth_prefix = %config.browser_oauth_prefix,
users_pk_column = %config.users_pk_column,
file_storage = config.file_storage_dir.as_ref().map(|p| p.display().to_string()).unwrap_or_else(|| "disabled".into()),
upload_max_bytes = config.upload_max_bytes,
dp_git_sync = config.dp_git_sync.enabled,
dp_git_repo = config.dp_git_sync.repo_root.as_ref().map(|p| p.display().to_string()).unwrap_or_else(|| "unset".into()),
oauth_access_token_ttl_secs = config.oauth_access_token_ttl_secs,
"docv-back starting"
);
let pool = db::create_pool()?;
db::ensure_users_table(&pool).await;
if let Ok(client) = pool.get().await {
let seed = include_str!("../migrations/20260330120001_seed_demo_user.sql");
if let Err(e) = client.batch_execute(seed).await {
tracing::warn!(?e, "seed demo user skipped or failed");
}
}
db::ensure_offices_folders_schema(&pool).await;
db::ensure_folders_title_legacy_compat(&pool).await;
db::ensure_offices_extended_columns(&pool).await;
db::ensure_office_members_role_column(&pool).await;
db::ensure_roles_minimal_table(&pool).await;
db::ensure_user_stub_lists_schema(&pool).await;
db::ensure_folder_documents_schema(&pool).await;
db::ensure_folder_documents_storage_columns(&pool).await;
db::ensure_folders_dp_layout_columns(&pool).await;
db::ensure_folders_status_column(&pool).await;
db::ensure_folders_purpose_operation_type(&pool).await;
db::ensure_office_society_extensions(&pool).await;
db::ensure_folder_sources_notes_tasks_workflow(&pool).await;
db::ensure_impl_role_rows_for_all_offices(&pool).await?;
db::ensure_impl_folder_type_rows_for_all_offices(&pool).await?;
db::seed_demo_office_if_needed(&pool, &config.users_pk_column).await;
db::ensure_impl_role_rows_for_all_offices(&pool).await?;
db::ensure_impl_folder_type_rows_for_all_offices(&pool).await?;
db::seed_demo_dp_folders_for_cabinet_demo_if_needed(&pool).await;
db::seed_demo_cession_operation_folder_if_needed(&pool).await;
db::link_listed_users_to_first_office_if_configured(&pool, &config.users_pk_column).await;
db::link_orphan_users_to_first_office_if_configured(&pool, &config.users_pk_column).await;
db::repair_seed_demo_user_office_memberships_if_needed(&pool, &config.users_pk_column).await;
db::remove_stale_legacy_demo_migration_placeholder_office_if_needed(&pool).await;
db::seed_stub_lists_demo_if_needed(&pool, &config.users_pk_column).await;
server::serve(config, pool).await?;
Ok(())
}

View File

@ -0,0 +1,45 @@
//! Forward JSON to an optional upstream IA HTTP service (server-side only).
use crate::config::Config;
use serde_json::Value;
use std::time::Duration;
#[derive(Debug)]
pub enum AiForwardError {
NotConfigured,
Request(String),
UpstreamStatus(u16),
}
pub async fn post_ai_json(config: &Config, body: &Value) -> Result<Value, AiForwardError> {
let Some(url) = config.ai_service_url.as_deref() else {
return Err(AiForwardError::NotConfigured);
};
let client = reqwest::Client::builder()
.timeout(Duration::from_secs(config.ai_timeout_secs))
.build()
.map_err(|e| AiForwardError::Request(e.to_string()))?;
let mut req = client.post(url).json(body);
if let Some(ref key) = config.ai_api_key {
req = req.header(
reqwest::header::AUTHORIZATION,
format!("Bearer {}", key.trim()),
);
}
let res = req
.send()
.await
.map_err(|e| AiForwardError::Request(e.to_string()))?;
let status = res.status();
let text = res
.text()
.await
.map_err(|e| AiForwardError::Request(e.to_string()))?;
if !status.is_success() {
return Err(AiForwardError::UpstreamStatus(status.as_u16()));
}
match serde_json::from_str::<Value>(&text) {
Ok(v) => Ok(v),
Err(_) => Ok(serde_json::json!({ "text": text.trim() })),
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,568 @@
//! HTTP (hyper 0.14): health, OAuth2 authorization_code, docv sign-in (users en BDD docv).
mod ai_forward;
mod api_v1;
mod v1_route;
use crate::api_auth::AccessClaims;
use crate::config::Config;
use crate::db::DbPool;
use hyper::body::to_bytes;
use hyper::service::{make_service_fn, service_fn};
use hyper::{Body, Method, Request, Response, Server, StatusCode};
use jsonwebtoken::{decode, encode, DecodingKey, EncodingKey, Header, Validation};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::convert::Infallible;
use std::net::SocketAddr;
use std::sync::{Arc, Mutex};
use std::time::{Duration, Instant};
use tokio::sync::Mutex as AsyncMutex;
use tokio_postgres::Row;
use tracing::{error, warn};
use uuid::Uuid;
const COOKIE_SESSION: &str = "docv_oauth_session";
const CODE_TTL: Duration = Duration::from_secs(300);
#[derive(Clone)]
pub struct AppState {
pub config: Config,
pub pool: DbPool,
pub codes: Arc<Mutex<HashMap<String, OauthCodeEntry>>>,
/// Sérialise miroir disque + `git add/commit/push` pour limiter les conflits concurrents.
pub dp_git_serial: Arc<AsyncMutex<()>>,
}
#[derive(Clone)]
pub struct OauthCodeEntry {
pub sub: String,
pub email: String,
pub name: Option<String>,
pub expires: Instant,
}
#[derive(Debug, Serialize, Deserialize)]
struct SessionClaims {
sub: String,
email: String,
name: Option<String>,
exp: usize,
}
#[derive(Deserialize)]
struct TokenForm {
grant_type: String,
code: String,
redirect_uri: String,
client_id: String,
client_secret: String,
}
fn unix_now() -> usize {
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.map(|d| d.as_secs() as usize)
.unwrap_or(0)
}
fn session_cookie_value(
secret: &str,
sub: &str,
email: &str,
name: &Option<String>,
ttl_secs: u64,
) -> Result<String, jsonwebtoken::errors::Error> {
let exp = unix_now() + ttl_secs as usize;
let claims = SessionClaims {
sub: sub.to_string(),
email: email.to_string(),
name: name.clone(),
exp,
};
encode(
&Header::default(),
&claims,
&EncodingKey::from_secret(secret.as_bytes()),
)
}
fn read_session(secret: &str, cookie_header: Option<&str>) -> Option<SessionClaims> {
let raw = extract_cookie(cookie_header?, COOKIE_SESSION)?;
decode::<SessionClaims>(
&raw,
&DecodingKey::from_secret(secret.as_bytes()),
&Validation::default(),
)
.ok()
.map(|d| d.claims)
}
fn extract_cookie(header_val: &str, name: &str) -> Option<String> {
for part in header_val.split(';') {
let part = part.trim();
if let Some(rest) = part.strip_prefix(name) {
if let Some(v) = rest.strip_prefix('=') {
return Some(v.trim().to_string());
}
}
}
None
}
fn redirect(status: StatusCode, loc: &str) -> Response<Body> {
Response::builder()
.status(status)
.header(hyper::header::LOCATION, loc)
.body(Body::empty())
.unwrap()
}
/// Origin `scheme://host[:port]` without path (no dependency on `url` crate).
fn uri_origin(uri: &str) -> Option<String> {
let u = uri.trim();
let pos = u.find("://")?;
let rest = &u[pos + 3..];
let path_i = rest.find('/').unwrap_or(rest.len());
Some(u[..pos + 3 + path_i].to_string())
}
/// `return_url` must share the same origin as at least one registered OAuth `redirect_uri`.
fn sign_out_return_allowed(config: &Config, return_url: &str) -> bool {
let rt = return_url.trim();
if rt.is_empty() {
return false;
}
config.oauth_redirect_uris.iter().any(|r| {
uri_origin(r)
.map(|o| rt.starts_with(&format!("{}/", o)) || rt == o)
.unwrap_or(false)
})
}
fn response_clear_session_redirect(loc: &str) -> Response<Body> {
let clear = format!(
"{}=; Path=/; HttpOnly; SameSite=Lax; Max-Age=0",
COOKIE_SESSION
);
let hv = hyper::header::HeaderValue::from_str(&clear).unwrap_or_else(|_| {
hyper::header::HeaderValue::from_static("docv_oauth_session=; Path=/; Max-Age=0")
});
let loc_h = hyper::header::HeaderValue::from_str(loc).unwrap_or_else(|_| {
hyper::header::HeaderValue::from_static("/")
});
Response::builder()
.status(StatusCode::FOUND)
.header(hyper::header::LOCATION, loc_h)
.header(hyper::header::SET_COOKIE, hv)
.body(Body::empty())
.unwrap()
}
async fn handle_sign_out_get(state: Arc<AppState>, req: &Request<Body>) -> Response<Body> {
let Some(qs) = req.uri().query() else {
return bad_request();
};
let map: HashMap<String, String> = match serde_urlencoded::from_str(qs) {
Ok(m) => m,
Err(_) => return bad_request(),
};
let return_url = match map.get("return_url") {
Some(s) if !s.is_empty() => s.as_str(),
_ => return bad_request(),
};
if !sign_out_return_allowed(&state.config, return_url) {
warn!(return_url = %return_url, "sign-out return_url rejected");
return bad_request();
}
response_clear_session_redirect(return_url)
}
fn redirect_with_set_cookie(status: StatusCode, loc: &str, set_cookie: &str) -> Response<Body> {
let hv = hyper::header::HeaderValue::from_str(set_cookie).unwrap_or_else(|_| {
hyper::header::HeaderValue::from_static("docv_oauth_session=invalid; Path=/")
});
Response::builder()
.status(status)
.header(hyper::header::LOCATION, loc)
.header(hyper::header::SET_COOKIE, hv)
.body(Body::empty())
.unwrap()
}
fn bad_request() -> Response<Body> {
Response::builder()
.status(StatusCode::BAD_REQUEST)
.body(Body::from("bad request"))
.unwrap()
}
fn unauthorized() -> Response<Body> {
Response::builder()
.status(StatusCode::UNAUTHORIZED)
.body(Body::from("unauthorized"))
.unwrap()
}
fn html_response(html: String) -> Response<Body> {
Response::builder()
.status(StatusCode::OK)
.header(hyper::header::CONTENT_TYPE, "text/html; charset=utf-8")
.body(Body::from(html))
.unwrap()
}
async fn row_by_email(
client: &deadpool_postgres::Client,
email: &str,
users_pk_column: &str,
) -> Result<Option<Row>, tokio_postgres::Error> {
let col = if users_pk_column == "uid" {
"uid"
} else {
"id"
};
let sql = format!(
"SELECT {}::text AS id, email, password_hash, name FROM users WHERE lower(email) = lower($1)",
col
);
client.query_opt(&sql, &[&email]).await
}
fn sign_in_server_error_html(title: &str, detail: &str) -> Response<Body> {
let title_esc = html_escape::encode_text(title);
let detail_esc = html_escape::encode_text(detail);
let html = format!(
r#"<!DOCTYPE html><html lang="fr"><head><meta charset="utf-8"/><title>{title_esc}</title>
<style>body{{font-family:system-ui;margin:2rem;max-width:32rem}} a{{color:#1e3a5f}}</style></head>
<body><h1>{title_esc}</h1><p>{detail_esc}</p><p><button type="button" onclick="history.back()">Retour</button></p></body></html>"#
);
Response::builder()
.status(StatusCode::INTERNAL_SERVER_ERROR)
.header(hyper::header::CONTENT_TYPE, "text/html; charset=utf-8")
.body(Body::from(html))
.unwrap()
}
async fn handle_authorize(state: Arc<AppState>, req: &Request<Body>) -> Response<Body> {
let Some(qs) = req.uri().query() else {
return bad_request();
};
let map: HashMap<String, String> = match serde_urlencoded::from_str(qs) {
Ok(m) => m,
Err(_) => return bad_request(),
};
let response_type = match map.get("response_type") {
Some(s) => s.as_str(),
None => return bad_request(),
};
if response_type != "code" {
return bad_request();
}
let client_id = match map.get("client_id") {
Some(s) => s.as_str(),
None => return bad_request(),
};
let redirect_uri = match map.get("redirect_uri") {
Some(s) => s.as_str(),
None => return bad_request(),
};
if !state.config.oauth_client_id_allowed(client_id) {
return bad_request();
}
if !state.config.redirect_uri_allowed(redirect_uri) {
warn!(uri = %redirect_uri, "redirect_uri not allowed");
return bad_request();
}
let cookie_header = req
.headers()
.get(hyper::header::COOKIE)
.and_then(|v| v.to_str().ok());
if read_session(&state.config.jwt_secret, cookie_header).is_none() {
let mut current = format!(
"/oauth/authorize?response_type=code&client_id={}&redirect_uri={}",
urlencoding::encode(client_id),
urlencoding::encode(redirect_uri),
);
if let Some(st) = map.get("state") {
current.push_str(&format!("&state={}", urlencoding::encode(st)));
}
if let Some(sc) = map.get("scope") {
current.push_str(&format!("&scope={}", urlencoding::encode(sc)));
}
let return_for_browser = state.config.browser_oauth_path(&current);
let sign_in = state.config.browser_oauth_path("/oauth/sign-in");
let to = format!(
"{}?return_url={}",
sign_in,
urlencoding::encode(&return_for_browser)
);
return redirect(StatusCode::FOUND, &to);
}
let s = read_session(&state.config.jwt_secret, cookie_header).unwrap();
let code = Uuid::new_v4().to_string();
{
let mut cmap = state.codes.lock().expect("codes");
cmap.retain(|_, v| v.expires > Instant::now());
cmap.insert(
code.clone(),
OauthCodeEntry {
sub: s.sub.clone(),
email: s.email.clone(),
name: s.name.clone(),
expires: Instant::now() + CODE_TTL,
},
);
}
let mut loc = format!(
"{}?code={}",
redirect_uri,
urlencoding::encode(&code)
);
if let Some(st) = map.get("state") {
loc.push_str(&format!("&state={}", urlencoding::encode(st)));
}
redirect(StatusCode::FOUND, &loc)
}
async fn handle_sign_in_get(state: Arc<AppState>, req: &Request<Body>) -> Response<Body> {
let Some(qs) = req.uri().query() else {
return bad_request();
};
let map: HashMap<String, String> = match serde_urlencoded::from_str(qs) {
Ok(m) => m,
Err(_) => return bad_request(),
};
let return_url = match map.get("return_url") {
Some(s) => s.as_str(),
None => return bad_request(),
};
if return_url.is_empty() || !return_url.contains("/oauth/authorize") {
return bad_request();
}
let ru = html_escape::encode_double_quoted_attribute(return_url);
let cid = crate::branding::client_id_from_authorize_return_url(return_url);
let branding = crate::branding::resolve_branding(&state.config.tenants_json, cid.as_deref());
let form_action = state.config.browser_oauth_path("/oauth/sign-in");
let form_action_esc = html_escape::encode_double_quoted_attribute(&form_action);
let html = crate::branding::sign_in_page_html(
ru.as_ref(),
&branding,
form_action_esc.as_ref(),
);
html_response(html)
}
async fn handle_sign_in_post(state: Arc<AppState>, req: Request<Body>) -> Response<Body> {
let body = match to_bytes(req.into_body()).await {
Ok(b) => b,
Err(_) => return bad_request(),
};
let map: HashMap<String, String> = match serde_urlencoded::from_bytes(&body) {
Ok(m) => m,
Err(_) => return bad_request(),
};
let return_url = match map.get("return_url") {
Some(s) => s.clone(),
None => return bad_request(),
};
if return_url.is_empty() || !return_url.contains("/oauth/authorize") {
return bad_request();
}
let email = match map.get("email") {
Some(s) => s.clone(),
None => return bad_request(),
};
let password = match map.get("password") {
Some(s) => s.clone(),
None => return bad_request(),
};
let client = match state.pool.get().await {
Ok(c) => c,
Err(e) => {
error!(?e, "pool");
return sign_in_server_error_html(
"Connexion indisponible",
"Impossible de joindre la base de données. Réessayez plus tard.",
);
}
};
let row = match row_by_email(&client, &email, &state.config.users_pk_column).await {
Ok(r) => r,
Err(e) => {
error!(?e, "query");
return sign_in_server_error_html(
"Erreur serveur",
"Une erreur technique est survenue pendant la connexion.",
);
}
};
let Some(row) = row else {
warn!(email = %email, "unknown user");
return unauthorized();
};
let id: String = row.get("id");
let hash: String = row.get("password_hash");
let name: Option<String> = row.get("name");
let ok = bcrypt::verify(password.as_bytes(), &hash).unwrap_or(false);
if !ok {
return unauthorized();
}
let ttl = state.config.oauth_access_token_ttl_secs;
let token = match session_cookie_value(
&state.config.jwt_secret,
&id,
&email,
&name,
ttl,
) {
Ok(t) => t,
Err(_) => {
return Response::builder()
.status(StatusCode::INTERNAL_SERVER_ERROR)
.body(Body::empty())
.unwrap();
}
};
let cookie = format!(
"{}={}; Path=/; HttpOnly; SameSite=Lax; Max-Age={}",
COOKIE_SESSION,
token,
ttl
);
redirect_with_set_cookie(StatusCode::FOUND, &return_url, &cookie)
}
async fn handle_token(state: Arc<AppState>, req: Request<Body>) -> Response<Body> {
let body = match to_bytes(req.into_body()).await {
Ok(b) => b,
Err(_) => return bad_request(),
};
let form: TokenForm = match serde_urlencoded::from_bytes(&body) {
Ok(f) => f,
Err(_) => return bad_request(),
};
if form.grant_type != "authorization_code" {
return bad_request();
}
if !state.config.oauth_client_id_allowed(&form.client_id)
|| form.client_secret != state.config.oauth_client_secret
{
return unauthorized();
}
if !state.config.redirect_uri_allowed(&form.redirect_uri) {
return bad_request();
}
let entry = {
let mut cmap = state.codes.lock().expect("codes");
cmap.retain(|_, v| v.expires > Instant::now());
cmap.remove(&form.code)
};
let Some(entry) = entry else {
return bad_request();
};
let ttl = state.config.oauth_access_token_ttl_secs as usize;
let exp = unix_now() + ttl;
let claims = AccessClaims {
sub: entry.sub.clone(),
email: entry.email.clone(),
name: entry.name.clone(),
exp,
iss: "docv-back".to_string(),
};
let access_token = match encode(
&Header::default(),
&claims,
&EncodingKey::from_secret(state.config.jwt_secret.as_bytes()),
) {
Ok(t) => t,
Err(_) => {
return Response::builder()
.status(StatusCode::INTERNAL_SERVER_ERROR)
.body(Body::empty())
.unwrap();
}
};
let body = serde_json::json!({
"access_token": access_token,
"token_type": "Bearer",
"expires_in": ttl,
});
Response::builder()
.status(StatusCode::OK)
.header(hyper::header::CONTENT_TYPE, "application/json")
.body(Body::from(body.to_string()))
.unwrap()
}
async fn router(state: Arc<AppState>, req: Request<Body>) -> Result<Response<Body>, Infallible> {
let path = req.uri().path().to_string();
if path.starts_with("/api/v1") {
return Ok(api_v1::handle(state, req, &path).await);
}
let method = req.method();
let res = if path == "/" && method == Method::GET {
Response::builder()
.status(StatusCode::OK)
.body(Body::from("ok"))
.unwrap()
} else if path == "/oauth/authorize" && method == Method::GET {
handle_authorize(state.clone(), &req).await
} else if path == "/oauth/sign-in" && method == Method::GET {
handle_sign_in_get(state.clone(), &req).await
} else if path == "/oauth/sign-in" && method == Method::POST {
handle_sign_in_post(state.clone(), req).await
} else if path == "/oauth/token" && method == Method::POST {
handle_token(state, req).await
} else if path == "/oauth/sign-out" && method == Method::GET {
handle_sign_out_get(state.clone(), &req).await
} else {
Response::builder()
.status(StatusCode::NOT_FOUND)
.body(Body::from("not found"))
.unwrap()
};
Ok(res)
}
pub async fn serve(config: Config, pool: DbPool) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
let state = Arc::new(AppState {
config: config.clone(),
pool,
codes: Arc::new(Mutex::new(HashMap::new())),
dp_git_serial: Arc::new(AsyncMutex::new(())),
});
let addr: SocketAddr = format!("{}:{}", config.host, config.port).parse()?;
let make_svc = make_service_fn(move |_| {
let st = state.clone();
async move {
Ok::<_, Infallible>(service_fn(move |req| {
let st = st.clone();
async move { router(st, req).await }
}))
}
});
let server = Server::bind(&addr).serve(make_svc);
tracing::info!("docv-back listening http://{}", addr);
server.await?;
Ok(())
}

View File

@ -0,0 +1,106 @@
//! Pure routing helpers for `/api/v1/*` (factorisation du dispatch).
use uuid::Uuid;
/// Corps `POST` reconnus (segmentation après `/api/v1`).
#[derive(Debug, Clone)]
pub enum PostAction {
CreateFolder,
CreateFolderDocument(Uuid),
/// Raw body = file bytes; headers `X-Enso-*` for metadata.
UploadFolderDocumentBinary(Uuid),
CreatePendingDocument,
PostConversationMessage(Uuid),
CreateOfficeComment(Uuid),
CreateDocumentSource(Uuid),
CreateFolderNote(Uuid),
CreateTask,
AiChat,
AiDocumentAssist(Uuid),
}
pub fn match_post(segments: &[&str]) -> Option<PostAction> {
match segments.len() {
1 if segments[0] == "folders" => Some(PostAction::CreateFolder),
1 if segments[0] == "pending-documents" => Some(PostAction::CreatePendingDocument),
1 if segments[0] == "tasks" => Some(PostAction::CreateTask),
2 if segments[0] == "ai" && segments[1] == "chat" => Some(PostAction::AiChat),
4 if segments[0] == "folders"
&& segments[2] == "documents"
&& segments[3] == "binary" =>
{
Uuid::parse_str(segments[1])
.ok()
.map(PostAction::UploadFolderDocumentBinary)
}
3 if segments[0] == "folders" && segments[2] == "document-sources" => {
Uuid::parse_str(segments[1])
.ok()
.map(PostAction::CreateDocumentSource)
}
3 if segments[0] == "folders" && segments[2] == "notes" => {
Uuid::parse_str(segments[1]).ok().map(PostAction::CreateFolderNote)
}
3 if segments[0] == "folders" && segments[2] == "documents" => {
Uuid::parse_str(segments[1]).ok().map(PostAction::CreateFolderDocument)
}
3 if segments[0] == "conversations" && segments[2] == "messages" => Uuid::parse_str(
segments[1],
)
.ok()
.map(PostAction::PostConversationMessage),
3 if segments[0] == "offices" && segments[2] == "comments" => {
Uuid::parse_str(segments[1]).ok().map(PostAction::CreateOfficeComment)
}
4 if segments[0] == "ai" && segments[1] == "documents" && segments[3] == "assist" => {
Uuid::parse_str(segments[2]).ok().map(PostAction::AiDocumentAssist)
}
_ => None,
}
}
#[derive(Debug, Clone)]
pub enum DeleteAction {
FolderDocument { folder_uid: Uuid, doc_uid: Uuid },
PendingDocument(Uuid),
DocumentSource {
folder_uid: Uuid,
source_uid: Uuid,
},
FolderNote {
folder_uid: Uuid,
note_uid: Uuid,
},
Task(Uuid),
}
pub fn match_delete(segments: &[&str]) -> Option<DeleteAction> {
match segments.len() {
4 if segments[0] == "folders" && segments[2] == "documents" => {
let folder_uid = Uuid::parse_str(segments[1]).ok()?;
let doc_uid = Uuid::parse_str(segments[3]).ok()?;
Some(DeleteAction::FolderDocument { folder_uid, doc_uid })
}
4 if segments[0] == "folders" && segments[2] == "document-sources" => {
let folder_uid = Uuid::parse_str(segments[1]).ok()?;
let source_uid = Uuid::parse_str(segments[3]).ok()?;
Some(DeleteAction::DocumentSource {
folder_uid,
source_uid,
})
}
4 if segments[0] == "folders" && segments[2] == "notes" => {
let folder_uid = Uuid::parse_str(segments[1]).ok()?;
let note_uid = Uuid::parse_str(segments[3]).ok()?;
Some(DeleteAction::FolderNote {
folder_uid,
note_uid,
})
}
2 if segments[0] == "pending-documents" => {
Uuid::parse_str(segments[1]).ok().map(DeleteAction::PendingDocument)
}
2 if segments[0] == "tasks" => Uuid::parse_str(segments[1]).ok().map(DeleteAction::Task),
_ => None,
}
}

View File

@ -0,0 +1,18 @@
{
"default": {
"page_title": "Connexion",
"heading": "Connexion",
"subtitle": "Saisissez vos identifiants pour continuer.",
"primary_color": "#1e3a5f",
"accent_color": "#b45309",
"surface_color": "#f8fafc",
"text_color": "#0f172a",
"submit_label": "Continuer",
"font_family": "system-ui, -apple-system, \"Segoe UI\", sans-serif"
},
"clients": {
"enso-web": {
"subtitle": "Accédez à votre espace Enso Avocats."
}
}
}

View File

@ -0,0 +1,16 @@
[package]
name = "docv-shared"
version = "0.1.0"
edition = "2021"
description = "Shared crate for docv: validation, format, constants, business rules (natif + WASM)"
[lints]
workspace = true
[lib]
crate-type = ["cdylib", "rlib"]
[dependencies]
[target.'cfg(target_arch = "wasm32")'.dependencies]
# wasm-bindgen when WASM enabled

View File

@ -0,0 +1,4 @@
//! Shared constants: error codes, limits, business parameters.
pub const MAX_EMAIL_LEN: usize = 255;
pub const MIN_PASSWORD_LEN: usize = 8;

View File

@ -0,0 +1 @@
//! Formatting helpers (dates, amounts). Used by back and optionally by front via WASM.

View File

@ -0,0 +1,7 @@
//! docv-shared: validation, format, constants, business rules.
//! Consumed by docv-back (natif) and optionally by docv-front via WASM.
pub mod constants;
pub mod format;
pub mod rules;
pub mod validation;

View File

@ -0,0 +1 @@
//! Pure business rules (no I/O). Used by back and optionally by front via WASM.

View File

@ -0,0 +1,5 @@
//! Validation helpers (email, password, lengths). Used by back and optionally by front via WASM.
pub fn is_valid_email(s: &str) -> bool {
!s.is_empty() && s.contains('@') && s.len() <= 255
}

View File

@ -37,3 +37,11 @@ journalctl -u anythingllm -f
```
Requires `docker.service`, user `ncantu` in group `docker`, and paths in `/etc/default/anythingllm` for non-default storage.
## Git pull des clones projet (utilisateur, pas root)
Tirage **périodique** des dépôts listés dans `projects/*/conf.json` (voir [`../cron/README.md`](../cron/README.md)) :
- Unités : [`user/git-pull-project-clones.service.in`](./user/git-pull-project-clones.service.in) + [`user/git-pull-project-clones.timer`](./user/git-pull-project-clones.timer)
- Installation : [`../scripts/install-git-pull-systemd-user.sh`](../scripts/install-git-pull-systemd-user.sh) (écrit dans `~/.config/systemd/user/`, **sans** `sudo`)
- Configuration : [`../cron/config.env`](../cron/config.env)