Compare commits
No commits in common. "v0.2.1" and "master" have entirely different histories.
@ -1,17 +0,0 @@
|
||||
template:
|
||||
repo: nicolas.cantu/4NK_project_template
|
||||
host: git.4nkweb.com
|
||||
sync:
|
||||
include:
|
||||
- .gitea/
|
||||
- .cursor/
|
||||
- docs/
|
||||
- AGENTS.md
|
||||
- CONTRIBUTING.md
|
||||
- CODE_OF_CONDUCT.md
|
||||
- SECURITY.md
|
||||
- CHANGELOG.md
|
||||
exclude:
|
||||
- target/
|
||||
- storage/
|
||||
- .git/
|
@ -1,7 +0,0 @@
|
||||
# Règles Cursor du projet
|
||||
|
||||
- Compiler régulièrement: `cargo build`.
|
||||
- Lancer les tests souvent: `cargo test`.
|
||||
- Mettre à jour la documentation (`docs/`) à chaque changement fonctionnel.
|
||||
- Respecter le style Rust, `cargo fmt` et `cargo clippy -D warnings`.
|
||||
- PRs doivent inclure tests et docs.
|
@ -1,6 +0,0 @@
|
||||
target
|
||||
.git
|
||||
storage
|
||||
**/*.log
|
||||
**/*.tmp
|
||||
**/*.swp
|
@ -1,20 +0,0 @@
|
||||
---
|
||||
name: Rapport de bug
|
||||
about: Signaler un problème
|
||||
labels: bug
|
||||
---
|
||||
|
||||
## Description
|
||||
|
||||
Décrivez le bug.
|
||||
|
||||
## Reproduction
|
||||
|
||||
1. Étapes
|
||||
2. Résultat observé
|
||||
3. Résultat attendu
|
||||
|
||||
## Contexte
|
||||
- Version
|
||||
- OS / Arch
|
||||
- Logs pertinents
|
@ -1,21 +0,0 @@
|
||||
---
|
||||
name: Demande de fonctionnalité
|
||||
about: Proposer une idée
|
||||
labels: enhancement
|
||||
---
|
||||
|
||||
## Problème / Contexte
|
||||
|
||||
Quel problème résout la fonctionnalité ?
|
||||
|
||||
## Proposition
|
||||
|
||||
Décrivez la solution souhaitée.
|
||||
|
||||
## Alternatives
|
||||
|
||||
Solutions alternatives envisagées.
|
||||
|
||||
## Impacts
|
||||
|
||||
Tests, docs, compatibilité.
|
@ -1,13 +0,0 @@
|
||||
# Objet
|
||||
|
||||
Décrivez brièvement les changements.
|
||||
|
||||
## Checklist
|
||||
- [ ] Tests ajoutés/mis à jour (`tests/`)
|
||||
- [ ] Documentation mise à jour (`docs/`)
|
||||
- [ ] `cargo fmt` OK
|
||||
- [ ] `cargo clippy` sans warnings
|
||||
- [ ] `CHANGELOG.md` mis à jour si nécessaire
|
||||
|
||||
## Liens
|
||||
- Issue liée: #
|
@ -1,30 +0,0 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
rust:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Rust toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
- name: Cache cargo
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/registry
|
||||
~/.cargo/git
|
||||
target
|
||||
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
- name: Format check
|
||||
run: cargo fmt --all -- --check
|
||||
- name: Clippy
|
||||
run: cargo clippy --all-targets --all-features -- -D warnings
|
||||
- name: Build
|
||||
run: cargo build --verbose
|
||||
- name: Test
|
||||
run: cargo test --all --verbose
|
@ -1,34 +0,0 @@
|
||||
name: Docker Image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- docker-support
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Login to registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ secrets.DOCKER_REGISTRY }}
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: ${{ secrets.DOCKER_REGISTRY }}/sdk_storage:latest
|
||||
platforms: linux/amd64,linux/arm64
|
@ -1,34 +0,0 @@
|
||||
name: Release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*.*.*'
|
||||
|
||||
jobs:
|
||||
build-release:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Rust toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
- name: Build
|
||||
run: cargo build --release
|
||||
- name: Archive artifact
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p dist
|
||||
if [[ "$RUNNER_OS" == "Windows" ]]; then
|
||||
cp target/release/sdk_storage.exe dist/
|
||||
else
|
||||
cp target/release/sdk_storage dist/
|
||||
fi
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: sdk_storage-${{ runner.os }}
|
||||
path: dist/*
|
@ -1,5 +0,0 @@
|
||||
# Agents & Automations
|
||||
|
||||
- Compilation régulière: `cargo build`.
|
||||
- Lancement des tests: `cargo test`.
|
||||
- Mise à jour de la documentation dès qu'une fonctionnalité change (`docs/`).
|
12
CHANGELOG.md
12
CHANGELOG.md
@ -1,12 +0,0 @@
|
||||
# Changelog
|
||||
|
||||
## 0.2.0
|
||||
- Ajout Dockerfile multi-stage et `.dockerignore`
|
||||
- CI: workflows build/test, release et build/push Docker
|
||||
- Documentation étendue dans `docs/` (architecture, guides, API JSON)
|
||||
- Tests renforcés: conflit de clé et suppression des expirés
|
||||
|
||||
## 0.1.0
|
||||
- Refactor vers `src/lib.rs` et service `StorageService`
|
||||
- Ajout `docs/` (README) et `tests/` (test intégration service)
|
||||
- API HTTP Tide conservée; nettoyage TTL périodique 60s
|
@ -1,9 +0,0 @@
|
||||
# Code de Conduite
|
||||
|
||||
Nous nous engageons à offrir une communauté ouverte, accueillante et respectueuse.
|
||||
|
||||
- Pas de harcèlement.
|
||||
- Respect des avis techniques et des personnes.
|
||||
- Suivre les consignes des mainteneurs.
|
||||
|
||||
Signalez tout problème via les issues du dépôt.
|
@ -1,8 +0,0 @@
|
||||
# Contribuer à sdk_storage
|
||||
|
||||
Merci de proposer des issues et des Pull Requests.
|
||||
|
||||
- Discutez via une issue avant une modification majeure.
|
||||
- Travaillez sur une branche `feature/...`.
|
||||
- Ajoutez systématiquement des tests (`tests/`) et mettez à jour la documentation (`docs/`).
|
||||
- Assurez-vous que `cargo fmt`, `cargo clippy` et `cargo test` passent localement.
|
51
Cargo.lock
generated
51
Cargo.lock
generated
@ -817,18 +817,6 @@ dependencies = [
|
||||
"wasi 0.11.0+wasi-snapshot-preview1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "getrandom"
|
||||
version = "0.3.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4"
|
||||
dependencies = [
|
||||
"cfg-if 1.0.0",
|
||||
"libc",
|
||||
"r-efi",
|
||||
"wasi 0.14.2+wasi-0.2.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ghash"
|
||||
version = "0.3.1"
|
||||
@ -1348,12 +1336,6 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "r-efi"
|
||||
version = "5.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.7.3"
|
||||
@ -1481,7 +1463,6 @@ dependencies = [
|
||||
"hex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tempfile",
|
||||
"tide",
|
||||
]
|
||||
|
||||
@ -1813,20 +1794,6 @@ dependencies = [
|
||||
"syn 2.0.87",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tempfile"
|
||||
version = "3.17.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "22e5a0acb1f3f55f65cc4a866c361b2fb2a0ff6366785ae6fbb5f85df07ba230"
|
||||
dependencies = [
|
||||
"cfg-if 1.0.0",
|
||||
"fastrand 2.2.0",
|
||||
"getrandom 0.3.3",
|
||||
"once_cell",
|
||||
"rustix 0.38.41",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "1.0.69"
|
||||
@ -2046,15 +2013,6 @@ version = "0.11.0+wasi-snapshot-preview1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
||||
|
||||
[[package]]
|
||||
name = "wasi"
|
||||
version = "0.14.2+wasi-0.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3"
|
||||
dependencies = [
|
||||
"wit-bindgen-rt",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen"
|
||||
version = "0.2.95"
|
||||
@ -2313,15 +2271,6 @@ version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
|
||||
|
||||
[[package]]
|
||||
name = "wit-bindgen-rt"
|
||||
version = "0.39.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1"
|
||||
dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "write16"
|
||||
version = "1.0.0"
|
||||
|
14
Cargo.toml
14
Cargo.toml
@ -4,12 +4,8 @@ version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
tide = "0.16"
|
||||
async-std = { version = "1", features = ["attributes"] }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
hex = "0.4"
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3"
|
||||
surf = { version = "2", default-features = false, features = ["h1-client"] }
|
||||
tide = "0.16.0"
|
||||
async-std = { version = "1.8.0", features = ["attributes"] }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
hex = "0.4.3"
|
||||
|
19
Dockerfile
19
Dockerfile
@ -1,19 +0,0 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
FROM rust:1 as builder
|
||||
WORKDIR /app
|
||||
COPY Cargo.toml Cargo.lock ./
|
||||
COPY src ./src
|
||||
RUN cargo build --release
|
||||
|
||||
FROM debian:stable-slim
|
||||
RUN useradd -m -u 10001 appuser && \
|
||||
apt-get update && apt-get install -y ca-certificates && rm -rf /var/lib/apt/lists/*
|
||||
WORKDIR /app
|
||||
COPY --from=builder /app/target/release/sdk_storage /usr/local/bin/sdk_storage
|
||||
RUN mkdir -p /app/storage && chown -R appuser:appuser /app
|
||||
USER appuser
|
||||
EXPOSE 8081
|
||||
ENV RUST_LOG=info
|
||||
ENTRYPOINT ["/usr/local/bin/sdk_storage"]
|
||||
CMD ["--permanent"]
|
19
LICENSE
19
LICENSE
@ -1,19 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
22
README.md
22
README.md
@ -1,22 +0,0 @@
|
||||
# sdk_storage
|
||||
|
||||
Voir la documentation détaillée dans `docs/`.
|
||||
|
||||
## Démarrage rapide
|
||||
|
||||
- Construire: `cargo build`
|
||||
- Lancer: `cargo run -- --permanent` (clé sans TTL = permanente)
|
||||
- Tester: `cargo test`
|
||||
|
||||
## API
|
||||
|
||||
- POST `/store` { key(hex64), value(hex), ttl? (s) }
|
||||
- GET `/retrieve/:key`
|
||||
|
||||
## Contribution
|
||||
|
||||
Voir `CONTRIBUTING.md`, `CODE_OF_CONDUCT.md`, `SECURITY.md`.
|
||||
|
||||
## Licence
|
||||
|
||||
Voir `LICENSE` (MIT).
|
@ -1,5 +0,0 @@
|
||||
# Politique de Sécurité
|
||||
|
||||
- Ne divulguez pas publiquement les vulnérabilités.
|
||||
- Ouvrez une issue privée si possible ou contactez les mainteneurs.
|
||||
- Merci d'inclure des étapes de reproduction et l'impact.
|
@ -1,37 +0,0 @@
|
||||
# Documentation du projet sdk_storage
|
||||
|
||||
Ce dossier documente l'API HTTP, l'architecture et les décisions techniques.
|
||||
|
||||
## API
|
||||
|
||||
- POST `/store` : stocke une valeur hex pour une clé hex 64 chars, `ttl` optionnel (secondes). Quand `--permanent` est passé au binaire, l'absence de `ttl` rend la donnée permanente.
|
||||
- GET `/retrieve/:key` : retourne `{ key, value }` où `value` est encodée en hex.
|
||||
|
||||
## Architecture
|
||||
|
||||
- Service `StorageService` (voir `src/lib.rs`) encapsule la logique de stockage, récupération et nettoyage TTL.
|
||||
- `src/main.rs` démarre Tide avec état `StorageService` et une boucle de nettoyage périodique (60s).
|
||||
|
||||
Voir aussi:
|
||||
- `architecture.md`
|
||||
- `configuration.md`
|
||||
- `guides_principaux.md`
|
||||
- `guides_techniques.md`
|
||||
- `guides_test.md`
|
||||
- `tests_monitoring.md`
|
||||
- `reseau_de_relais.md`
|
||||
- `developpement.md`
|
||||
- `depannage.md`
|
||||
- `performance.md`
|
||||
- `api_json_spec.md`
|
||||
- `api_contrats.md`
|
||||
|
||||
## REX technique
|
||||
|
||||
- Docker
|
||||
- Build local: `docker build -t sdk_storage:local .`
|
||||
- Run: `docker run --rm -p 8081:8081 -v $PWD/storage:/app/storage sdk_storage:local`
|
||||
- Par défaut `--permanent` est activé via CMD, override possible: `docker run ... sdk_storage -- --permanent`
|
||||
|
||||
- Refactor initial de la logique depuis `main.rs` vers `lib.rs` pour testabilité et séparation des responsabilités.
|
||||
- Durées TTL maintenant validées dans le handler, calcul d'expiration converti en `SystemTime` avant l'appel service.
|
@ -1,21 +0,0 @@
|
||||
# Contrats API
|
||||
|
||||
## Garanties de Contrat
|
||||
- Content-Type JSON, réponses structurées.
|
||||
- Clé: 64 hex (validation stricte), sinon 400.
|
||||
- Valeur: hex valide, sinon 400.
|
||||
- Conflit de clé: 409 si la clé existe déjà.
|
||||
- TTL: min 60, max 31 536 000; par défaut 86 400 si non `--permanent`.
|
||||
- Récupération:
|
||||
- 200 avec `{ key, value }` si trouvée.
|
||||
- 400 si clé invalide.
|
||||
- 404 si absente.
|
||||
|
||||
## Couverture de Tests
|
||||
- Stockage et récupération (succès).
|
||||
- Conflit de clé.
|
||||
- Suppression des expirés via nettoyage.
|
||||
- HTTP `/store`: succès, conflit, clé invalide, valeur invalide.
|
||||
- HTTP `/retrieve`: succès, clé invalide, clé absente.
|
||||
|
||||
Voir `api_json_spec.md` pour les schémas et contraintes détaillés.
|
@ -1,109 +0,0 @@
|
||||
# Spécification JSON des API
|
||||
|
||||
## Généralités
|
||||
- Content-Type: `application/json; charset=utf-8`
|
||||
- Encodage des valeurs: chaînes hexadécimales minuscules (0-9a-f).
|
||||
- Modèle d'erreur: corps `{ "message": string }` avec code HTTP approprié.
|
||||
|
||||
## POST /store
|
||||
- Objet requête: `StoreRequest`
|
||||
- Objet réponse (succès/erreur): `ApiResponse`
|
||||
|
||||
### Schéma JSON (StoreRequest)
|
||||
```json
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "StoreRequest",
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"required": ["key", "value"],
|
||||
"properties": {
|
||||
"key": {
|
||||
"type": "string",
|
||||
"description": "Clé hexadécimale 64 caractères (32 octets).",
|
||||
"pattern": "^[0-9a-fA-F]{64}$"
|
||||
},
|
||||
"value": {
|
||||
"type": "string",
|
||||
"description": "Valeur encodée en hexadécimal.",
|
||||
"pattern": "^[0-9a-fA-F]+$"
|
||||
},
|
||||
"ttl": {
|
||||
"type": "integer",
|
||||
"minimum": 60,
|
||||
"maximum": 31536000,
|
||||
"description": "Durée de vie en secondes. Si absent: défaut 86400 sauf si mode --permanent (aucune expiration)."
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Règles de validation et sémantique
|
||||
- `key`: exactement 64 caractères hex (
|
||||
32 octets).
|
||||
- `value`: chaîne hex valide, longueur paire recommandée (représentation d'octets).
|
||||
- `ttl`:
|
||||
- min: 60, max: 31 536 000.
|
||||
- si absent et binaire lancé sans `--permanent`: valeur par défaut 86 400.
|
||||
- si absent et binaire lancé avec `--permanent`: aucune expiration.
|
||||
|
||||
### Réponses
|
||||
- 200 OK: `ApiResponse` (message de succès)
|
||||
- 400 Bad Request: `ApiResponse` (clé/ttl/valeur invalides)
|
||||
- 409 Conflict: `ApiResponse` (clé déjà existante)
|
||||
- 500 Internal Server Error: `ApiResponse`
|
||||
|
||||
### Schéma JSON (ApiResponse)
|
||||
```json
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "ApiResponse",
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"required": ["message"],
|
||||
"properties": {
|
||||
"message": { "type": "string" }
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## GET /retrieve/:key
|
||||
- Paramètre de chemin: `key` (hex 64).
|
||||
- Objet réponse (succès): `RetrieveResponse`
|
||||
- Objet réponse (erreur): `ApiResponse`
|
||||
|
||||
### Contraintes
|
||||
- `key` doit respecter `^[0-9a-fA-F]{64}$`.
|
||||
|
||||
### Réponses
|
||||
- 200 OK: `RetrieveResponse`
|
||||
- 400 Bad Request: `ApiResponse` (clé invalide)
|
||||
- 404 Not Found: `ApiResponse` (clé inconnue)
|
||||
- 500 Internal Server Error: `ApiResponse`
|
||||
|
||||
### Schéma JSON (RetrieveResponse)
|
||||
```json
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "RetrieveResponse",
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"required": ["key", "value"],
|
||||
"properties": {
|
||||
"key": {
|
||||
"type": "string",
|
||||
"description": "Clé hexadécimale 64 caractères.",
|
||||
"pattern": "^[0-9a-fA-F]{64}$"
|
||||
},
|
||||
"value": {
|
||||
"type": "string",
|
||||
"description": "Valeur encodée en hexadécimal.",
|
||||
"pattern": "^[0-9a-fA-F]+$"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Codes d'état et messages
|
||||
- Les messages d'erreur sont informatifs mais ne divulguent pas d'informations sensibles.
|
||||
- Les champs `message` sont destinés à l'humain; ne pas les parser côté client.
|
@ -1,13 +0,0 @@
|
||||
# Architecture
|
||||
|
||||
## Flux de Données
|
||||
|
||||
- Entrées: requêtes HTTP `/store`, `/retrieve/:key`.
|
||||
- Traitements: validation clés/TTL, encodage hex, stockage FS hiérarchique, métadonnées TTL.
|
||||
- Sorties: réponses JSON normalisées.
|
||||
|
||||
## Composants
|
||||
|
||||
- Service `StorageService` (I/O disque, TTL, nettoyage).
|
||||
- Serveur HTTP Tide (routes, état partagé).
|
||||
- Nettoyage périodique (60s) basé sur fichiers `.meta`.
|
@ -1,8 +0,0 @@
|
||||
# Configuration
|
||||
|
||||
## Services Disponibles
|
||||
- HTTP sur port 8081
|
||||
|
||||
## Variables d'Environnement
|
||||
- `RUST_LOG` (optionnel): niveau de logs.
|
||||
- Pour Docker, monter `/app/storage` si persistance souhaitée.
|
@ -1,15 +0,0 @@
|
||||
# Démarrage Rapide
|
||||
|
||||
## Prérequis
|
||||
- Rust stable et Cargo
|
||||
- Optionnel: Docker
|
||||
|
||||
## Installation
|
||||
- `cargo build`
|
||||
|
||||
## Lancement
|
||||
- `cargo run -- --permanent`
|
||||
|
||||
## Docker (optionnel)
|
||||
- Build: `docker build -t sdk_storage:local .`
|
||||
- Run: `docker run --rm -p 8081:8081 -v $PWD/storage:/app/storage sdk_storage:local`
|
@ -1,12 +0,0 @@
|
||||
# Dépannage
|
||||
|
||||
## Problèmes Courants
|
||||
1. Ports déjà utilisés: changer le port de publication Docker.
|
||||
2. Permissions storage: vérifier l'UID/GID et droits du volume.
|
||||
3. Clés invalides: s'assurer d'un hex 64 caractères.
|
||||
|
||||
## Logs détaillés
|
||||
- Exécuter avec `RUST_LOG=info`.
|
||||
|
||||
## Healthchecks
|
||||
- Ajouter une route `/health` (évolution possible) ou ping sur `/retrieve` avec clé connue.
|
@ -1,13 +0,0 @@
|
||||
# Développement
|
||||
|
||||
## Structure du Projet
|
||||
- `src/lib.rs`: service métier
|
||||
- `src/main.rs`: serveur HTTP Tide
|
||||
- `tests/`: scénarios d'intégration
|
||||
|
||||
## Ajout d'un Nouveau Service
|
||||
- Créer une abstraction dédiée dans `src/lib.rs` ou module séparé.
|
||||
- Câbler dans `main.rs` via `tide::with_state` si nécessaire.
|
||||
|
||||
## Modification de la Configuration
|
||||
- Mettre à jour `docs/configuration.md` et secrets CI/CD.
|
@ -1,5 +0,0 @@
|
||||
# Guides Principaux
|
||||
|
||||
- Concepts de base: clés hex 64, valeurs hex, TTL en secondes.
|
||||
- API: `/store` (POST), `/retrieve/:key` (GET).
|
||||
- Persistance: système de fichiers, sous-dossiers par préfixe de clé.
|
@ -1,6 +0,0 @@
|
||||
# Guides Techniques
|
||||
|
||||
- `StorageService`: abstraction des opérations de stockage.
|
||||
- TTL: sérialisé dans `*.meta` (UNIX timestamp secondes).
|
||||
- Nettoyage: parcours des dossiers, suppression données expirées.
|
||||
- Journalisation: sorties standard, intégration possible avec superviseur.
|
@ -1,5 +0,0 @@
|
||||
# Guides de Test
|
||||
|
||||
- Tests unitaires recommandés sur `StorageService` via répertoires temporaires.
|
||||
- Tests d'intégration HTTP optionnels via client HTTP.
|
||||
- Stratégies: cas TTL min/max, clés invalides, conflits de clé.
|
@ -1,9 +0,0 @@
|
||||
# Performance
|
||||
|
||||
## Ressources Recommandées
|
||||
- Disque rapide si grand volume d'écritures.
|
||||
- Mémoire suffisante pour buffers I/O.
|
||||
|
||||
## Optimisations
|
||||
- Paramétrer la taille des blocs et la stratégie de fsync selon contraintes.
|
||||
- Éviter les collisions de clés, supervision du cleanup TTL.
|
@ -1,10 +0,0 @@
|
||||
# Réseau de Relais
|
||||
|
||||
## Architecture Mesh
|
||||
- À définir selon déploiement.
|
||||
|
||||
## Ajout de Nœuds Externes
|
||||
- Procédure à documenter si nécessaire.
|
||||
|
||||
## Configuration Externe
|
||||
- Ports, sécurité, endpoints à exposer.
|
@ -1,8 +0,0 @@
|
||||
# Tests et Monitoring
|
||||
|
||||
## Tests
|
||||
- Unitaires et intégration via `cargo test`.
|
||||
|
||||
## Monitoring
|
||||
- Exposer métriques avec un reverse proxy/sidecar si nécessaire.
|
||||
- Ajouter des healthchecks HTTP au niveau de l'orchestrateur.
|
252
src/lib.rs
252
src/lib.rs
@ -1,252 +0,0 @@
|
||||
use async_std::fs::{create_dir_all, read_dir, read_to_string, remove_file, File};
|
||||
use async_std::io::WriteExt;
|
||||
use async_std::path::Path;
|
||||
use async_std::stream::StreamExt;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::time::{Duration, SystemTime, UNIX_EPOCH};
|
||||
use tide::{StatusCode, Request, Response};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct StorageService {
|
||||
storage_dir: String,
|
||||
}
|
||||
|
||||
impl StorageService {
|
||||
pub fn new<S: Into<String>>(storage_dir: S) -> Self {
|
||||
Self {
|
||||
storage_dir: storage_dir.into(),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_file_path(&self, key: &str) -> String {
|
||||
let dir_name = format!("{}/{}", self.storage_dir, &key[..2]);
|
||||
let file_path = format!("{}/{}", dir_name, &key[2..]);
|
||||
file_path
|
||||
}
|
||||
|
||||
pub async fn store_data(
|
||||
&self,
|
||||
key: &str,
|
||||
value: &[u8],
|
||||
expires_at: Option<SystemTime>,
|
||||
) -> Result<(), tide::Error> {
|
||||
let file_name = self.get_file_path(key);
|
||||
let file_path = Path::new(&file_name);
|
||||
|
||||
if file_path.exists().await {
|
||||
return Err(tide::Error::from_str(StatusCode::Conflict, "Key already exists"));
|
||||
}
|
||||
|
||||
create_dir_all(file_path.parent().ok_or(tide::Error::from_str(
|
||||
StatusCode::InternalServerError,
|
||||
"File path doesn't have parent",
|
||||
))?)
|
||||
.await
|
||||
.map_err(|e| tide::Error::new(StatusCode::InternalServerError, e))?;
|
||||
|
||||
let metadata_path = format!("{}.meta", file_name);
|
||||
|
||||
let mut file = File::create(&file_path)
|
||||
.await
|
||||
.map_err(|e| tide::Error::new(StatusCode::InternalServerError, e))?;
|
||||
file.write_all(value)
|
||||
.await
|
||||
.map_err(|e| tide::Error::new(StatusCode::InternalServerError, e))?;
|
||||
|
||||
let metadata = Metadata {
|
||||
expires_at: expires_at.map(system_time_to_unix),
|
||||
};
|
||||
|
||||
let metadata_json = serde_json::to_string(&metadata)
|
||||
.map_err(|e| tide::Error::new(StatusCode::InternalServerError, e))?;
|
||||
let mut meta_file = File::create(&metadata_path)
|
||||
.await
|
||||
.map_err(|e| tide::Error::new(StatusCode::InternalServerError, e))?;
|
||||
meta_file
|
||||
.write_all(metadata_json.as_bytes())
|
||||
.await
|
||||
.map_err(|e| tide::Error::new(StatusCode::InternalServerError, e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn retrieve_data(&self, key: &str) -> Result<Vec<u8>, String> {
|
||||
let file_path = format!("{}/{}/{}", self.storage_dir, &key[..2], &key[2..]);
|
||||
|
||||
let mut file = File::open(&file_path)
|
||||
.await
|
||||
.map_err(|_| "Key not found.".to_string())?;
|
||||
let mut buffer = Vec::new();
|
||||
async_std::io::ReadExt::read_to_end(&mut file, &mut buffer)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
Ok(buffer)
|
||||
}
|
||||
|
||||
pub async fn cleanup_expired_files_once(&self) -> Result<(), String> {
|
||||
let mut entries = read_dir(&self.storage_dir)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to read storage dir: {}", e))?;
|
||||
let now = system_time_to_unix(SystemTime::now());
|
||||
while let Some(entry) = entries.next().await {
|
||||
let e = entry.map_err(|e| format!("entry returned error: {}", e))?;
|
||||
let path = e.path();
|
||||
if path.is_dir().await {
|
||||
if let Ok(mut sub_entries) = read_dir(&path).await {
|
||||
while let Some(sub_entry) = sub_entries.next().await {
|
||||
if let Ok(sub_entry) = sub_entry {
|
||||
let file_path = sub_entry.path();
|
||||
if file_path.extension() == Some("meta".as_ref()) {
|
||||
self.handle_file_cleanup(now, &file_path).await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn handle_file_cleanup(&self, now: u64, meta_path: &Path) -> Result<(), String> {
|
||||
let meta_content = read_to_string(meta_path)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to read metadata: {}", e.to_string()))?;
|
||||
let metadata: Metadata = serde_json::from_str(&meta_content)
|
||||
.map_err(|e| format!("Failed to parse metadata: {}", e.to_string()))?;
|
||||
|
||||
if metadata.expires_at.is_some() && metadata.expires_at.unwrap() < now {
|
||||
let data_file_path = meta_path.with_extension("");
|
||||
remove_file(&data_file_path)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to remove data file: {}", e.to_string()))?;
|
||||
remove_file(meta_path)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to remove metadata file: {}", e.to_string()))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
pub struct Metadata {
|
||||
pub expires_at: Option<u64>,
|
||||
}
|
||||
|
||||
pub fn system_time_to_unix(system_time: SystemTime) -> u64 {
|
||||
system_time
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.expect("SystemTime before UNIX_EPOCH!")
|
||||
.as_secs()
|
||||
}
|
||||
|
||||
pub fn unix_to_system_time(unix_timestamp: u64) -> SystemTime {
|
||||
UNIX_EPOCH + Duration::from_secs(unix_timestamp)
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct StoreRequest {
|
||||
pub key: String,
|
||||
pub value: String,
|
||||
pub ttl: Option<u64>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct ApiResponse { pub message: String }
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct RetrieveResponse { pub key: String, pub value: String }
|
||||
|
||||
pub async fn handle_store(mut req: Request<StorageService>, no_ttl_permanent: bool) -> tide::Result<Response> {
|
||||
let data: StoreRequest = match req.body_json().await {
|
||||
Ok(data) => data,
|
||||
Err(e) => {
|
||||
return Ok(Response::builder(StatusCode::BadRequest)
|
||||
.body(format!("Invalid request: {}", e))
|
||||
.build());
|
||||
}
|
||||
};
|
||||
|
||||
if data.key.len() != 64 || !data.key.chars().all(|c| c.is_ascii_hexdigit()) {
|
||||
return Ok(Response::builder(StatusCode::BadRequest)
|
||||
.body("Invalid key: must be a 32 bytes hex string.".to_string())
|
||||
.build());
|
||||
}
|
||||
|
||||
let live_for: Option<Duration> = if let Some(ttl) = data.ttl {
|
||||
if ttl < 60 {
|
||||
return Ok(Response::builder(StatusCode::BadRequest)
|
||||
.body(format!("Invalid ttl: must be at least {} seconds.", 60))
|
||||
.build());
|
||||
} else if ttl > 31_536_000 {
|
||||
return Ok(Response::builder(StatusCode::BadRequest)
|
||||
.body(format!("Invalid ttl: must be at most {} seconds.", 31_536_000))
|
||||
.build());
|
||||
}
|
||||
Some(Duration::from_secs(ttl))
|
||||
} else if no_ttl_permanent {
|
||||
None
|
||||
} else {
|
||||
Some(Duration::from_secs(86_400))
|
||||
};
|
||||
|
||||
let expires_at: Option<SystemTime> = match live_for {
|
||||
Some(lf) => Some(
|
||||
SystemTime::now()
|
||||
.checked_add(lf)
|
||||
.ok_or(tide::Error::from_str(StatusCode::BadRequest, "Invalid ttl"))?
|
||||
),
|
||||
None => None,
|
||||
};
|
||||
|
||||
let value_bytes = match hex::decode(&data.value) {
|
||||
Ok(value) => value,
|
||||
Err(e) => {
|
||||
return Ok(Response::builder(StatusCode::BadRequest)
|
||||
.body(format!("Invalid request: {}", e))
|
||||
.build());
|
||||
}
|
||||
};
|
||||
|
||||
let svc = req.state();
|
||||
match svc.store_data(&data.key, &value_bytes, expires_at).await {
|
||||
Ok(()) => Ok(Response::builder(StatusCode::Ok)
|
||||
.body(serde_json::to_value(&ApiResponse {
|
||||
message: "Data stored successfully.".to_string(),
|
||||
})?)
|
||||
.build()),
|
||||
Err(e) => Ok(Response::builder(e.status())
|
||||
.body(serde_json::to_value(&ApiResponse {
|
||||
message: e.to_string(),
|
||||
})?)
|
||||
.build()),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn handle_retrieve(req: Request<StorageService>) -> tide::Result<Response> {
|
||||
let key: String = req.param("key")?.to_string();
|
||||
|
||||
if key.len() != 64 || !key.chars().all(|c| c.is_ascii_hexdigit()) {
|
||||
return Ok(Response::builder(StatusCode::BadRequest)
|
||||
.body("Invalid key: must be a 32 bytes hex string.".to_string())
|
||||
.build());
|
||||
}
|
||||
|
||||
let svc = req.state();
|
||||
match svc.retrieve_data(&key).await {
|
||||
Ok(value) => {
|
||||
let encoded_value = hex::encode(value);
|
||||
Ok(Response::builder(StatusCode::Ok)
|
||||
.body(serde_json::to_value(&RetrieveResponse { key, value: encoded_value })?)
|
||||
.build())
|
||||
}
|
||||
Err(e) => Ok(Response::builder(StatusCode::NotFound).body(e).build()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_app(no_ttl_permanent: bool, storage_dir: impl Into<String>) -> tide::Server<StorageService> {
|
||||
let svc = StorageService::new(storage_dir);
|
||||
let mut app = tide::with_state(svc);
|
||||
app.at("/store").post(move |req| handle_store(req, no_ttl_permanent));
|
||||
app.at("/retrieve/:key").get(handle_retrieve);
|
||||
app
|
||||
}
|
297
src/main.rs
297
src/main.rs
@ -1,12 +1,283 @@
|
||||
use async_std::fs::{create_dir_all, read_dir, read_to_string, remove_file, File};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::time::{Duration, SystemTime, UNIX_EPOCH};
|
||||
use std::env;
|
||||
|
||||
use async_std::io::WriteExt;
|
||||
use async_std::path::Path;
|
||||
use async_std::stream::StreamExt;
|
||||
use async_std::task;
|
||||
use async_std::fs::create_dir_all;
|
||||
use sdk_storage::{StorageService, create_app};
|
||||
use tide::{Request, Response, StatusCode};
|
||||
|
||||
const STORAGE_DIR: &str = "./storage";
|
||||
const PORT: u16 = 8081;
|
||||
const DEFAULT_TTL: u64 = 86400;
|
||||
const MIN_TTL: u64 = 60; // 1 minute
|
||||
const DEFAULT_TTL: u64 = 86400; // 1 day
|
||||
const MAX_TTL: u64 = 31_536_000; // 1 year, to be discussed
|
||||
|
||||
/// Scans storage and removes expired files
|
||||
async fn cleanup_expired_files() {
|
||||
loop {
|
||||
// Traverse storage directory
|
||||
let mut entries = match read_dir(STORAGE_DIR).await {
|
||||
Ok(entry) => entry,
|
||||
Err(e) => {
|
||||
eprintln!("Failed to read storage dir: {}", e);
|
||||
task::sleep(Duration::from_secs(60)).await;
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
let now = system_time_to_unix(SystemTime::now());
|
||||
while let Some(entry) = entries.next().await {
|
||||
let e = match entry {
|
||||
Ok(e) => e,
|
||||
Err(e) => {
|
||||
eprintln!("entry returned error: {}", e);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
let path = e.path();
|
||||
if path.is_dir().await {
|
||||
if let Ok(mut sub_entries) = read_dir(&path).await {
|
||||
while let Some(sub_entry) = sub_entries.next().await {
|
||||
if let Ok(sub_entry) = sub_entry {
|
||||
let file_path = sub_entry.path();
|
||||
if file_path.extension() == Some("meta".as_ref()) {
|
||||
if let Err(err) = handle_file_cleanup(now, &file_path).await {
|
||||
eprintln!("Error cleaning file {:?}: {}", file_path, err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Sleep for 1 minute before next cleanup
|
||||
task::sleep(Duration::from_secs(60)).await;
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
struct Metadata {
|
||||
expires_at: Option<u64>,
|
||||
}
|
||||
|
||||
/// Converts a `SystemTime` to a UNIX timestamp (seconds since UNIX epoch).
|
||||
fn system_time_to_unix(system_time: SystemTime) -> u64 {
|
||||
system_time
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.expect("SystemTime before UNIX_EPOCH!")
|
||||
.as_secs()
|
||||
}
|
||||
|
||||
/// Converts a UNIX timestamp (seconds since UNIX epoch) back to `SystemTime`.
|
||||
fn unix_to_system_time(unix_timestamp: u64) -> SystemTime {
|
||||
UNIX_EPOCH + Duration::from_secs(unix_timestamp)
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct StoreRequest {
|
||||
key: String,
|
||||
value: String,
|
||||
ttl: Option<u64>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct ApiResponse {
|
||||
message: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct RetrieveResponse {
|
||||
key: String,
|
||||
value: String,
|
||||
}
|
||||
|
||||
async fn get_file_path(key: &str) -> String {
|
||||
let dir_name = format!("{}/{}", STORAGE_DIR, &key[..2]);
|
||||
let file_path = format!("{}/{}", dir_name, &key[2..]);
|
||||
|
||||
file_path
|
||||
}
|
||||
|
||||
/// Store data on the filesystem
|
||||
async fn store_data(key: &str, value: &[u8], expires_at: Option<SystemTime>) -> Result<(), tide::Error> {
|
||||
let file_name = get_file_path(key).await;
|
||||
let file_path = Path::new(&file_name);
|
||||
|
||||
// Check if key exists
|
||||
if file_path.exists().await {
|
||||
return Err(tide::Error::from_str(
|
||||
StatusCode::Conflict,
|
||||
"Key already exists",
|
||||
));
|
||||
}
|
||||
|
||||
create_dir_all(file_path.parent().ok_or(tide::Error::from_str(
|
||||
StatusCode::InternalServerError,
|
||||
"File path doesn't have parent",
|
||||
))?)
|
||||
.await
|
||||
.map_err(|e| tide::Error::new(StatusCode::InternalServerError, e))?;
|
||||
|
||||
let metadata_path = format!("{}.meta", file_name);
|
||||
|
||||
let mut file = File::create(&file_path)
|
||||
.await
|
||||
.map_err(|e| tide::Error::new(StatusCode::InternalServerError, e))?;
|
||||
file.write_all(value)
|
||||
.await
|
||||
.map_err(|e| tide::Error::new(StatusCode::InternalServerError, e))?;
|
||||
|
||||
let metadata = Metadata {
|
||||
expires_at: expires_at.map(|e| system_time_to_unix(e)),
|
||||
};
|
||||
|
||||
let metadata_json = serde_json::to_string(&metadata)
|
||||
.map_err(|e| tide::Error::new(StatusCode::InternalServerError, e))?;
|
||||
let mut meta_file = File::create(&metadata_path)
|
||||
.await
|
||||
.map_err(|e| tide::Error::new(StatusCode::InternalServerError, e))?;
|
||||
meta_file
|
||||
.write_all(metadata_json.as_bytes())
|
||||
.await
|
||||
.map_err(|e| tide::Error::new(StatusCode::InternalServerError, e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn retrieve_data(key: &str) -> Result<Vec<u8>, String> {
|
||||
let file_path = format!("{}/{}/{}", STORAGE_DIR, &key[..2], &key[2..]);
|
||||
|
||||
let mut file = File::open(&file_path)
|
||||
.await
|
||||
.map_err(|_| "Key not found.".to_string())?;
|
||||
let mut buffer = Vec::new();
|
||||
async_std::io::ReadExt::read_to_end(&mut file, &mut buffer)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
Ok(buffer)
|
||||
}
|
||||
|
||||
/// Handler for the /store endpoint
|
||||
async fn handle_store(mut req: Request<()>, no_ttl_permanent: bool) -> tide::Result<Response> {
|
||||
// Parse the JSON body
|
||||
let data: StoreRequest = match req.body_json().await {
|
||||
Ok(data) => data,
|
||||
Err(e) => {
|
||||
return Ok(Response::builder(StatusCode::BadRequest)
|
||||
.body(format!("Invalid request: {}", e))
|
||||
.build());
|
||||
}
|
||||
};
|
||||
|
||||
// Validate the key
|
||||
if data.key.len() != 64 || !data.key.chars().all(|c| c.is_ascii_hexdigit()) {
|
||||
return Ok(Response::builder(StatusCode::BadRequest)
|
||||
.body("Invalid key: must be a 32 bytes hex string.".to_string())
|
||||
.build());
|
||||
}
|
||||
|
||||
// Validate the ttl
|
||||
let live_for: Option<Duration> = if let Some(ttl) = data.ttl {
|
||||
if ttl < MIN_TTL {
|
||||
return Ok(Response::builder(StatusCode::BadRequest)
|
||||
.body(format!(
|
||||
"Invalid ttl: must be at least {} seconds.",
|
||||
MIN_TTL
|
||||
))
|
||||
.build());
|
||||
} else if ttl > MAX_TTL {
|
||||
return Ok(Response::builder(StatusCode::BadRequest)
|
||||
.body(format!("Invalid ttl: must be at most {} seconds.", MAX_TTL))
|
||||
.build());
|
||||
}
|
||||
Some(Duration::from_secs(ttl))
|
||||
} else if no_ttl_permanent {
|
||||
// When no_ttl_permanent is true, requests without TTL are permanent
|
||||
None
|
||||
} else {
|
||||
Some(Duration::from_secs(DEFAULT_TTL))
|
||||
};
|
||||
|
||||
let expires_at: Option<SystemTime> = if let Some(live_for) = live_for {
|
||||
let now = SystemTime::now();
|
||||
Some(now
|
||||
.checked_add(live_for)
|
||||
.ok_or(tide::Error::from_str(StatusCode::BadRequest, "Invalid ttl"))?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Decode the value from Base64
|
||||
let value_bytes = match hex::decode(&data.value) {
|
||||
Ok(value) => value,
|
||||
Err(e) => {
|
||||
return Ok(Response::builder(StatusCode::BadRequest)
|
||||
.body(format!("Invalid request: {}", e))
|
||||
.build());
|
||||
}
|
||||
};
|
||||
|
||||
// Store the data
|
||||
match store_data(&data.key, &value_bytes, expires_at).await {
|
||||
Ok(()) => Ok(Response::builder(StatusCode::Ok)
|
||||
.body(serde_json::to_value(&ApiResponse {
|
||||
message: "Data stored successfully.".to_string(),
|
||||
})?)
|
||||
.build()),
|
||||
Err(e) => Ok(Response::builder(e.status())
|
||||
.body(serde_json::to_value(&ApiResponse {
|
||||
message: e.to_string(),
|
||||
})?)
|
||||
.build()),
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_retrieve(req: Request<()>) -> tide::Result<Response> {
|
||||
let key: String = req.param("key")?.to_string();
|
||||
|
||||
if key.len() != 64 || !key.chars().all(|c| c.is_ascii_hexdigit()) {
|
||||
return Ok(Response::builder(StatusCode::BadRequest)
|
||||
.body("Invalid key: must be a 32 bytes hex string.".to_string())
|
||||
.build());
|
||||
}
|
||||
|
||||
match retrieve_data(&key).await {
|
||||
Ok(value) => {
|
||||
let encoded_value = hex::encode(value);
|
||||
Ok(Response::builder(StatusCode::Ok)
|
||||
.body(serde_json::to_value(&RetrieveResponse {
|
||||
key,
|
||||
value: encoded_value,
|
||||
})?)
|
||||
.build())
|
||||
}
|
||||
Err(e) => Ok(Response::builder(StatusCode::NotFound).body(e).build()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks a metadata file and deletes the associated data file if expired
|
||||
async fn handle_file_cleanup(now: u64, meta_path: &Path) -> Result<(), String> {
|
||||
let meta_content = read_to_string(meta_path)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to read metadata: {}", e.to_string()))?;
|
||||
let metadata: Metadata = serde_json::from_str(&meta_content)
|
||||
.map_err(|e| format!("Failed to parse metadata: {}", e.to_string()))?;
|
||||
|
||||
if metadata.expires_at.is_some() && metadata.expires_at.unwrap() < now {
|
||||
let data_file_path = meta_path.with_extension("");
|
||||
remove_file(&data_file_path)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to remove data file: {}", e.to_string()))?;
|
||||
remove_file(meta_path)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to remove metadata file: {}", e.to_string()))?;
|
||||
println!("Removed expired file: {:?}", data_file_path);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[async_std::main]
|
||||
async fn main() -> tide::Result<()> {
|
||||
@ -20,21 +291,15 @@ async fn main() -> tide::Result<()> {
|
||||
println!("No-TTL requests will use default TTL of {} seconds", DEFAULT_TTL);
|
||||
}
|
||||
|
||||
let svc = StorageService::new(STORAGE_DIR);
|
||||
create_dir_all(STORAGE_DIR).await.expect("Failed to create storage directory.");
|
||||
create_dir_all(STORAGE_DIR)
|
||||
.await
|
||||
.expect("Failed to create storage directory.");
|
||||
|
||||
// background cleanup loop
|
||||
let svc_clone = svc.clone();
|
||||
task::spawn(async move {
|
||||
loop {
|
||||
if let Err(e) = svc_clone.cleanup_expired_files_once().await {
|
||||
eprintln!("cleanup error: {}", e);
|
||||
}
|
||||
task::sleep(std::time::Duration::from_secs(60)).await;
|
||||
}
|
||||
});
|
||||
task::spawn(cleanup_expired_files());
|
||||
|
||||
let mut app = create_app(no_ttl_permanent, STORAGE_DIR);
|
||||
let mut app = tide::new();
|
||||
app.at("/store").post(move |req| handle_store(req, no_ttl_permanent));
|
||||
app.at("/retrieve/:key").get(handle_retrieve);
|
||||
app.listen(format!("0.0.0.0:{}", PORT)).await?;
|
||||
|
||||
println!("Server running at http://0.0.0.0:{}", PORT);
|
||||
|
@ -1,17 +0,0 @@
|
||||
use sdk_storage::{StorageService, unix_to_system_time};
|
||||
use tempfile::TempDir;
|
||||
|
||||
#[async_std::test]
|
||||
async fn store_and_retrieve_hex_in_tempdir() {
|
||||
let td = TempDir::new().unwrap();
|
||||
let dir_path = td.path().to_string_lossy().to_string();
|
||||
let svc = StorageService::new(dir_path);
|
||||
|
||||
let key = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
|
||||
let value = b"hello";
|
||||
let expires = Some(unix_to_system_time(60 + sdk_storage::system_time_to_unix(std::time::SystemTime::now())));
|
||||
|
||||
svc.store_data(key, value, expires).await.unwrap();
|
||||
let got = svc.retrieve_data(key).await.unwrap();
|
||||
assert_eq!(got, value);
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user