Cron restart services (bitcoind, mempool), service-login-verify, website-skeleton, docs
**Motivations:** - Consigner l'état actuel du dépôt (cron, service-login-verify, website-skeleton, userwallet, docs). - Centraliser les modifications en attente. **Root causes:** - N/A (commit groupé). **Correctifs:** - N/A. **Evolutions:** - Cron quotidien restart services : script local sans SSH, systemd (bitcoin-signet, bitcoin, APIs, dashboard, userwallet, website-skeleton) + Docker (mempool, bitcoin-signet-instance). - Feature cron-restart-services-local : documentation et règle scripts locaux / pas d'SSH. - service-login-verify : module vérification login (buildAllowedPubkeys, verifyLoginProof, nonceCache). - website-skeleton : app iframe UserWallet, config, systemd unit. - userwallet : collectSignatures, relay. - docs : DOMAINS_AND_PORTS, README, WEBSITE_SKELETON ; features userwallet-contrat-login, timeouts-backoff, service-login-verify. **Pages affectées:** - data/restart-services-cron.sh, data/restart-services.log, data/sync-utxos.log - features/cron-restart-services-local.md, features/service-login-verify.md, features/userwallet-contrat-login-reste-a-faire.md, features/userwallet-timeouts-backoff.md - docs/DOMAINS_AND_PORTS.md, docs/README.md, docs/WEBSITE_SKELETON.md - configure-nginx-proxy.sh - service-login-verify/ (src, dist, node_modules) - userwallet/src/utils/collectSignatures.ts, userwallet/src/utils/relay.ts - website-skeleton/
This commit is contained in:
parent
f7f9442156
commit
6bf37be44e
@ -223,7 +223,34 @@ server {
|
|||||||
}
|
}
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
# 6. Relay / api-relay (port 3019)
|
# 6. Website skeleton (port 3024)
|
||||||
|
echo "📝 Configuration de skeleton.certificator.4nkweb.com..."
|
||||||
|
${SUDO_CMD} tee "${NGINX_SITES_AVAILABLE}/skeleton.certificator.4nkweb.com" > /dev/null << 'EOF'
|
||||||
|
# Website skeleton (UserWallet iframe)
|
||||||
|
server {
|
||||||
|
listen 80;
|
||||||
|
server_name skeleton.certificator.4nkweb.com;
|
||||||
|
|
||||||
|
access_log /var/log/nginx/skeleton.certificator.4nkweb.com.access.log;
|
||||||
|
error_log /var/log/nginx/skeleton.certificator.4nkweb.com.error.log;
|
||||||
|
|
||||||
|
location / {
|
||||||
|
proxy_pass http://192.168.1.105:3024;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection 'upgrade';
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_cache_bypass $http_upgrade;
|
||||||
|
proxy_read_timeout 300s;
|
||||||
|
proxy_connect_timeout 75s;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# 7. Relay / api-relay (port 3019)
|
||||||
echo "📝 Configuration de relay.certificator.4nkweb.com..."
|
echo "📝 Configuration de relay.certificator.4nkweb.com..."
|
||||||
${SUDO_CMD} tee "${NGINX_SITES_AVAILABLE}/relay.certificator.4nkweb.com" > /dev/null << 'EOF'
|
${SUDO_CMD} tee "${NGINX_SITES_AVAILABLE}/relay.certificator.4nkweb.com" > /dev/null << 'EOF'
|
||||||
# API Relay (UserWallet)
|
# API Relay (UserWallet)
|
||||||
@ -260,6 +287,7 @@ ${SUDO_CMD} ln -sf "${NGINX_SITES_AVAILABLE}/faucet.certificator.4nkweb.com" "${
|
|||||||
${SUDO_CMD} ln -sf "${NGINX_SITES_AVAILABLE}/anchorage.certificator.4nkweb.com" "${NGINX_SITES_ENABLED}/anchorage.certificator.4nkweb.com"
|
${SUDO_CMD} ln -sf "${NGINX_SITES_AVAILABLE}/anchorage.certificator.4nkweb.com" "${NGINX_SITES_ENABLED}/anchorage.certificator.4nkweb.com"
|
||||||
${SUDO_CMD} ln -sf "${NGINX_SITES_AVAILABLE}/watermark.certificator.4nkweb.com" "${NGINX_SITES_ENABLED}/watermark.certificator.4nkweb.com"
|
${SUDO_CMD} ln -sf "${NGINX_SITES_AVAILABLE}/watermark.certificator.4nkweb.com" "${NGINX_SITES_ENABLED}/watermark.certificator.4nkweb.com"
|
||||||
${SUDO_CMD} ln -sf "${NGINX_SITES_AVAILABLE}/userwallet.certificator.4nkweb.com" "${NGINX_SITES_ENABLED}/userwallet.certificator.4nkweb.com"
|
${SUDO_CMD} ln -sf "${NGINX_SITES_AVAILABLE}/userwallet.certificator.4nkweb.com" "${NGINX_SITES_ENABLED}/userwallet.certificator.4nkweb.com"
|
||||||
|
${SUDO_CMD} ln -sf "${NGINX_SITES_AVAILABLE}/skeleton.certificator.4nkweb.com" "${NGINX_SITES_ENABLED}/skeleton.certificator.4nkweb.com"
|
||||||
${SUDO_CMD} ln -sf "${NGINX_SITES_AVAILABLE}/relay.certificator.4nkweb.com" "${NGINX_SITES_ENABLED}/relay.certificator.4nkweb.com"
|
${SUDO_CMD} ln -sf "${NGINX_SITES_AVAILABLE}/relay.certificator.4nkweb.com" "${NGINX_SITES_ENABLED}/relay.certificator.4nkweb.com"
|
||||||
|
|
||||||
# Tester la configuration Nginx
|
# Tester la configuration Nginx
|
||||||
@ -290,6 +318,7 @@ DOMAINS=(
|
|||||||
"anchorage.certificator.4nkweb.com"
|
"anchorage.certificator.4nkweb.com"
|
||||||
"watermark.certificator.4nkweb.com"
|
"watermark.certificator.4nkweb.com"
|
||||||
"userwallet.certificator.4nkweb.com"
|
"userwallet.certificator.4nkweb.com"
|
||||||
|
"skeleton.certificator.4nkweb.com"
|
||||||
"relay.certificator.4nkweb.com"
|
"relay.certificator.4nkweb.com"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -319,6 +348,7 @@ echo " - faucet.certificator.4nkweb.com -> http://192.168.1.105:3021"
|
|||||||
echo " - anchorage.certificator.4nkweb.com -> http://192.168.1.105:3010"
|
echo " - anchorage.certificator.4nkweb.com -> http://192.168.1.105:3010"
|
||||||
echo " - watermark.certificator.4nkweb.com -> http://192.168.1.105:3022"
|
echo " - watermark.certificator.4nkweb.com -> http://192.168.1.105:3022"
|
||||||
echo " - userwallet.certificator.4nkweb.com -> http://192.168.1.105:3018"
|
echo " - userwallet.certificator.4nkweb.com -> http://192.168.1.105:3018"
|
||||||
|
echo " - skeleton.certificator.4nkweb.com -> http://192.168.1.105:3024"
|
||||||
echo " - relay.certificator.4nkweb.com -> http://192.168.1.105:3019"
|
echo " - relay.certificator.4nkweb.com -> http://192.168.1.105:3019"
|
||||||
echo ""
|
echo ""
|
||||||
echo "⚠️ Note: Si les services tournent sur une autre machine,"
|
echo "⚠️ Note: Si les services tournent sur une autre machine,"
|
||||||
|
|||||||
97
data/restart-services-cron.sh
Executable file
97
data/restart-services-cron.sh
Executable file
@ -0,0 +1,97 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Daily restart of project systemd services (local only).
|
||||||
|
# Run via cron. Requires passwordless sudo for systemctl restart.
|
||||||
|
# Log: data/restart-services.log
|
||||||
|
#
|
||||||
|
# Local only: no SSH, no remote commands. Manages systemd units and
|
||||||
|
# local Docker (mempool stack, bitcoind container) on the current machine.
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
PROJECT_DIR="$(dirname "$SCRIPT_DIR")"
|
||||||
|
LOG_FILE="$SCRIPT_DIR/restart-services.log"
|
||||||
|
MEMPOOL_COMPOSE="${PROJECT_DIR}/mempool/docker-compose.signet.yml"
|
||||||
|
BITCOIND_CONTAINER="bitcoin-signet-instance"
|
||||||
|
|
||||||
|
SERVICES=(
|
||||||
|
bitcoin-signet
|
||||||
|
bitcoin
|
||||||
|
anchorage-api
|
||||||
|
api-relay
|
||||||
|
clamav-api
|
||||||
|
faucet-api
|
||||||
|
filigrane-api
|
||||||
|
signet-dashboard
|
||||||
|
userwallet
|
||||||
|
website-skeleton
|
||||||
|
)
|
||||||
|
|
||||||
|
log() { echo "$(date -Iseconds) $*" | tee -a "$LOG_FILE"; }
|
||||||
|
|
||||||
|
cd "$PROJECT_DIR" || exit 1
|
||||||
|
|
||||||
|
log "=== Restart services cron (local only, no SSH) ==="
|
||||||
|
|
||||||
|
# 1. Verify enabled (warn if not)
|
||||||
|
for u in "${SERVICES[@]}"; do
|
||||||
|
if ! systemctl is-enabled "$u" &>/dev/null; then
|
||||||
|
log "WARN: $u is not enabled (will not start at boot)"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# 2. Restart active units; track attempted
|
||||||
|
restarted=()
|
||||||
|
for u in "${SERVICES[@]}"; do
|
||||||
|
if ! systemctl is-active --quiet "$u" 2>/dev/null; then
|
||||||
|
log "SKIP: $u not active (not installed or not running on this machine)"
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
log "Restarting $u..."
|
||||||
|
if sudo systemctl restart "$u"; then
|
||||||
|
log " $u OK"
|
||||||
|
restarted+=("$u")
|
||||||
|
else
|
||||||
|
log " $u FAILED"
|
||||||
|
restarted+=("$u")
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# 3. Verify active for restarted units
|
||||||
|
for u in "${restarted[@]}"; do
|
||||||
|
if ! systemctl is-active --quiet "$u" 2>/dev/null; then
|
||||||
|
log "WARN: $u not active after restart"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# 4. Docker: mempool stack (local only)
|
||||||
|
if [ -f "$MEMPOOL_COMPOSE" ] && command -v docker &>/dev/null; then
|
||||||
|
log "Restarting mempool (docker)..."
|
||||||
|
mem_ok=1
|
||||||
|
(cd "${PROJECT_DIR}/mempool" && docker compose -f docker-compose.signet.yml restart &>/dev/null) && mem_ok=0
|
||||||
|
[ "$mem_ok" -ne 0 ] && (cd "${PROJECT_DIR}/mempool" && docker-compose -f docker-compose.signet.yml restart &>/dev/null) && mem_ok=0
|
||||||
|
if [ "$mem_ok" -eq 0 ]; then
|
||||||
|
log " mempool OK"
|
||||||
|
else
|
||||||
|
log " mempool FAILED"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
if [ ! -f "$MEMPOOL_COMPOSE" ]; then
|
||||||
|
log "SKIP: mempool compose not found (not deployed on this machine)"
|
||||||
|
else
|
||||||
|
log "SKIP: mempool requires docker (not available)"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# 5. Docker: bitcoind container (local only)
|
||||||
|
if docker ps -a -q -f "name=^${BITCOIND_CONTAINER}$" 2>/dev/null | grep -q .; then
|
||||||
|
log "Restarting $BITCOIND_CONTAINER..."
|
||||||
|
if docker restart "$BITCOIND_CONTAINER" &>/dev/null; then
|
||||||
|
log " $BITCOIND_CONTAINER OK"
|
||||||
|
else
|
||||||
|
log " $BITCOIND_CONTAINER FAILED"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
log "SKIP: $BITCOIND_CONTAINER container not found (bitcoind may run as systemd)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
log "=== Done ==="
|
||||||
|
tail -n 100 "$LOG_FILE" > "$LOG_FILE.tmp" && mv "$LOG_FILE.tmp" "$LOG_FILE"
|
||||||
100
data/restart-services.log
Normal file
100
data/restart-services.log
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
config Manage Docker configs
|
||||||
|
container Manage containers
|
||||||
|
context Manage contexts
|
||||||
|
image Manage images
|
||||||
|
manifest Manage Docker image manifests and manifest lists
|
||||||
|
network Manage networks
|
||||||
|
node Manage Swarm nodes
|
||||||
|
plugin Manage plugins
|
||||||
|
secret Manage Docker secrets
|
||||||
|
service Manage services
|
||||||
|
stack Manage Docker stacks
|
||||||
|
swarm Manage Swarm
|
||||||
|
system Manage Docker
|
||||||
|
trust Manage trust on Docker images
|
||||||
|
volume Manage volumes
|
||||||
|
|
||||||
|
Commands:
|
||||||
|
attach Attach local standard input, output, and error streams to a running container
|
||||||
|
build Build an image from a Dockerfile
|
||||||
|
commit Create a new image from a container's changes
|
||||||
|
cp Copy files/folders between a container and the local filesystem
|
||||||
|
create Create a new container
|
||||||
|
diff Inspect changes to files or directories on a container's filesystem
|
||||||
|
events Get real time events from the server
|
||||||
|
exec Run a command in a running container
|
||||||
|
export Export a container's filesystem as a tar archive
|
||||||
|
history Show the history of an image
|
||||||
|
images List images
|
||||||
|
import Import the contents from a tarball to create a filesystem image
|
||||||
|
info Display system-wide information
|
||||||
|
inspect Return low-level information on Docker objects
|
||||||
|
kill Kill one or more running containers
|
||||||
|
load Load an image from a tar archive or STDIN
|
||||||
|
login Log in to a Docker registry
|
||||||
|
logout Log out from a Docker registry
|
||||||
|
logs Fetch the logs of a container
|
||||||
|
pause Pause all processes within one or more containers
|
||||||
|
port List port mappings or a specific mapping for the container
|
||||||
|
ps List containers
|
||||||
|
pull Pull an image or a repository from a registry
|
||||||
|
push Push an image or a repository to a registry
|
||||||
|
rename Rename a container
|
||||||
|
restart Restart one or more containers
|
||||||
|
rm Remove one or more containers
|
||||||
|
rmi Remove one or more images
|
||||||
|
run Run a command in a new container
|
||||||
|
save Save one or more images to a tar archive (streamed to STDOUT by default)
|
||||||
|
search Search the Docker Hub for images
|
||||||
|
start Start one or more stopped containers
|
||||||
|
stats Display a live stream of container(s) resource usage statistics
|
||||||
|
stop Stop one or more running containers
|
||||||
|
tag Create a tag TARGET_IMAGE that refers to SOURCE_IMAGE
|
||||||
|
top Display the running processes of a container
|
||||||
|
unpause Unpause all processes within one or more containers
|
||||||
|
update Update configuration of one or more containers
|
||||||
|
version Show the Docker version information
|
||||||
|
wait Block until one or more containers stop, then print their exit codes
|
||||||
|
|
||||||
|
Run 'docker COMMAND --help' for more information on a command.
|
||||||
|
|
||||||
|
To get more help with docker, check out our guides at https://docs.docker.com/go/guides/
|
||||||
|
|
||||||
|
Restarting mempool_api_1 ...
|
||||||
|
Restarting mempool_electrs_1 ...
|
||||||
|
Restarting mempool_web_1 ...
|
||||||
|
Restarting mempool_db_1 ...
|
||||||
|
Restarting mempool_electrs_1 ... done
|
||||||
|
Restarting mempool_db_1 ... done
|
||||||
|
Restarting mempool_api_1 ... done
|
||||||
|
Restarting mempool_web_1 ... done
|
||||||
|
2026-01-28T00:43:59+01:00 mempool OK
|
||||||
|
2026-01-28T00:43:59+01:00 Restarting bitcoin-signet-instance...
|
||||||
|
2026-01-28T00:44:06+01:00 bitcoin-signet-instance OK
|
||||||
|
2026-01-28T00:44:06+01:00 === Done ===
|
||||||
|
2026-01-28T00:45:24+01:00 === Restart services cron (local only, no SSH) ===
|
||||||
|
2026-01-28T00:45:24+01:00 WARN: bitcoin-signet is not enabled (will not start at boot)
|
||||||
|
2026-01-28T00:45:24+01:00 WARN: bitcoin is not enabled (will not start at boot)
|
||||||
|
2026-01-28T00:45:24+01:00 SKIP: bitcoin-signet not active (not installed or not running on this machine)
|
||||||
|
2026-01-28T00:45:24+01:00 SKIP: bitcoin not active (not installed or not running on this machine)
|
||||||
|
2026-01-28T00:45:24+01:00 Restarting anchorage-api...
|
||||||
|
2026-01-28T00:45:24+01:00 anchorage-api OK
|
||||||
|
2026-01-28T00:45:24+01:00 Restarting api-relay...
|
||||||
|
2026-01-28T00:45:24+01:00 api-relay OK
|
||||||
|
2026-01-28T00:45:24+01:00 Restarting clamav-api...
|
||||||
|
2026-01-28T00:45:24+01:00 clamav-api OK
|
||||||
|
2026-01-28T00:45:24+01:00 Restarting faucet-api...
|
||||||
|
2026-01-28T00:45:24+01:00 faucet-api OK
|
||||||
|
2026-01-28T00:45:24+01:00 Restarting filigrane-api...
|
||||||
|
2026-01-28T00:45:24+01:00 filigrane-api OK
|
||||||
|
2026-01-28T00:45:24+01:00 Restarting signet-dashboard...
|
||||||
|
2026-01-28T00:45:25+01:00 signet-dashboard OK
|
||||||
|
2026-01-28T00:45:25+01:00 Restarting userwallet...
|
||||||
|
2026-01-28T00:45:25+01:00 userwallet OK
|
||||||
|
2026-01-28T00:45:25+01:00 Restarting website-skeleton...
|
||||||
|
2026-01-28T00:45:25+01:00 website-skeleton OK
|
||||||
|
2026-01-28T00:45:25+01:00 Restarting mempool (docker)...
|
||||||
|
2026-01-28T00:46:26+01:00 mempool OK
|
||||||
|
2026-01-28T00:46:26+01:00 Restarting bitcoin-signet-instance...
|
||||||
|
2026-01-28T00:46:33+01:00 bitcoin-signet-instance OK
|
||||||
|
2026-01-28T00:46:33+01:00 === Done ===
|
||||||
@ -1,19 +1,3 @@
|
|||||||
🔍 Démarrage de la synchronisation des UTXOs dépensés...
|
|
||||||
|
|
||||||
📊 UTXOs à vérifier: 67955
|
|
||||||
📡 Récupération des UTXOs depuis Bitcoin...
|
|
||||||
📊 UTXOs disponibles dans Bitcoin: 174934
|
|
||||||
💾 Création de la table temporaire...
|
|
||||||
💾 Insertion des UTXOs disponibles par batch...
|
|
||||||
⏳ Traitement: 10000/174934 UTXOs insérés...
|
|
||||||
⏳ Traitement: 20000/174934 UTXOs insérés...
|
|
||||||
⏳ Traitement: 30000/174934 UTXOs insérés...
|
|
||||||
⏳ Traitement: 40000/174934 UTXOs insérés...
|
|
||||||
⏳ Traitement: 50000/174934 UTXOs insérés...
|
|
||||||
⏳ Traitement: 60000/174934 UTXOs insérés...
|
|
||||||
⏳ Traitement: 70000/174934 UTXOs insérés...
|
|
||||||
⏳ Traitement: 80000/174934 UTXOs insérés...
|
|
||||||
⏳ Traitement: 90000/174934 UTXOs insérés...
|
|
||||||
⏳ Traitement: 100000/174934 UTXOs insérés...
|
⏳ Traitement: 100000/174934 UTXOs insérés...
|
||||||
⏳ Traitement: 110000/174934 UTXOs insérés...
|
⏳ Traitement: 110000/174934 UTXOs insérés...
|
||||||
⏳ Traitement: 120000/174934 UTXOs insérés...
|
⏳ Traitement: 120000/174934 UTXOs insérés...
|
||||||
@ -74,3 +58,43 @@
|
|||||||
- Non dépensés: 66104
|
- Non dépensés: 66104
|
||||||
|
|
||||||
✅ Synchronisation terminée
|
✅ Synchronisation terminée
|
||||||
|
🔍 Démarrage de la synchronisation des UTXOs dépensés...
|
||||||
|
|
||||||
|
📊 UTXOs à vérifier: 64639
|
||||||
|
📡 Récupération des UTXOs depuis Bitcoin...
|
||||||
|
📊 UTXOs disponibles dans Bitcoin: 201481
|
||||||
|
💾 Création de la table temporaire...
|
||||||
|
💾 Insertion des UTXOs disponibles par batch...
|
||||||
|
⏳ Traitement: 10000/201481 UTXOs insérés...
|
||||||
|
⏳ Traitement: 20000/201481 UTXOs insérés...
|
||||||
|
⏳ Traitement: 30000/201481 UTXOs insérés...
|
||||||
|
⏳ Traitement: 40000/201481 UTXOs insérés...
|
||||||
|
⏳ Traitement: 50000/201481 UTXOs insérés...
|
||||||
|
⏳ Traitement: 60000/201481 UTXOs insérés...
|
||||||
|
⏳ Traitement: 70000/201481 UTXOs insérés...
|
||||||
|
⏳ Traitement: 80000/201481 UTXOs insérés...
|
||||||
|
⏳ Traitement: 90000/201481 UTXOs insérés...
|
||||||
|
⏳ Traitement: 100000/201481 UTXOs insérés...
|
||||||
|
⏳ Traitement: 110000/201481 UTXOs insérés...
|
||||||
|
⏳ Traitement: 120000/201481 UTXOs insérés...
|
||||||
|
⏳ Traitement: 130000/201481 UTXOs insérés...
|
||||||
|
⏳ Traitement: 140000/201481 UTXOs insérés...
|
||||||
|
⏳ Traitement: 150000/201481 UTXOs insérés...
|
||||||
|
⏳ Traitement: 160000/201481 UTXOs insérés...
|
||||||
|
⏳ Traitement: 170000/201481 UTXOs insérés...
|
||||||
|
⏳ Traitement: 180000/201481 UTXOs insérés...
|
||||||
|
⏳ Traitement: 190000/201481 UTXOs insérés...
|
||||||
|
⏳ Traitement: 200000/201481 UTXOs insérés...
|
||||||
|
💾 Mise à jour des UTXOs dépensés...
|
||||||
|
|
||||||
|
📊 Résumé:
|
||||||
|
- UTXOs vérifiés: 64639
|
||||||
|
- UTXOs toujours disponibles: 64639
|
||||||
|
- UTXOs dépensés détectés: 0
|
||||||
|
|
||||||
|
📈 Statistiques finales:
|
||||||
|
- Total UTXOs: 68398
|
||||||
|
- Dépensés: 3759
|
||||||
|
- Non dépensés: 64639
|
||||||
|
|
||||||
|
✅ Synchronisation terminée
|
||||||
|
|||||||
@ -20,6 +20,7 @@ Ce document liste tous les domaines, ports et services de l'infrastructure Certi
|
|||||||
| `dashboard.certificator.4nkweb.com` | Dashboard | 3020 | Interface web de supervision |
|
| `dashboard.certificator.4nkweb.com` | Dashboard | 3020 | Interface web de supervision |
|
||||||
| `faucet.certificator.4nkweb.com` | API Faucet | 3021 | API REST pour distribuer des sats |
|
| `faucet.certificator.4nkweb.com` | API Faucet | 3021 | API REST pour distribuer des sats |
|
||||||
| `mempool.4nkweb.com` | Mempool | 3015 | Explorateur de blockchain Bitcoin |
|
| `mempool.4nkweb.com` | Mempool | 3015 | Explorateur de blockchain Bitcoin |
|
||||||
|
| `skeleton.certificator.4nkweb.com` | Website skeleton | 3024 | Site squelette iframe UserWallet |
|
||||||
|
|
||||||
### Configuration Nginx
|
### Configuration Nginx
|
||||||
|
|
||||||
@ -41,8 +42,9 @@ Tous les domaines sont routés via le proxy Nginx sur le serveur `192.168.1.100`
|
|||||||
| 3021 | API Faucet | `api-faucet` | `api-faucet/faucet-api.service` |
|
| 3021 | API Faucet | `api-faucet` | `api-faucet/faucet-api.service` |
|
||||||
| 3022 | API Filigrane | `api-filigrane` | `api-filigrane/filigrane-api.service` |
|
| 3022 | API Filigrane | `api-filigrane` | `api-filigrane/filigrane-api.service` |
|
||||||
| 3023 | API ClamAV | `api-clamav` | `api-clamav/clamav-api.service` |
|
| 3023 | API ClamAV | `api-clamav` | `api-clamav/clamav-api.service` |
|
||||||
|
| 3024 | Website skeleton | `website-skeleton` | `website-skeleton/website-skeleton.service` |
|
||||||
|
|
||||||
**Important :** Tous ces ports sont fixes et définis dans les fichiers de service systemd. Ils ne peuvent pas être modifiés sans modifier les services.
|
**Important :** Les ports 3010, 3020, 3021, 3022, 3023, 3024 sont fixes et définis dans les fichiers de service systemd. Ils ne peuvent pas être modifiés sans modifier les services.
|
||||||
|
|
||||||
### Ports Bitcoin Core
|
### Ports Bitcoin Core
|
||||||
|
|
||||||
@ -74,6 +76,7 @@ sudo systemctl status signet-dashboard
|
|||||||
sudo systemctl status faucet-api
|
sudo systemctl status faucet-api
|
||||||
sudo systemctl status filigrane-api
|
sudo systemctl status filigrane-api
|
||||||
sudo systemctl status clamav-api
|
sudo systemctl status clamav-api
|
||||||
|
sudo systemctl status website-skeleton
|
||||||
```
|
```
|
||||||
|
|
||||||
### Commandes de Gestion
|
### Commandes de Gestion
|
||||||
@ -110,6 +113,7 @@ Internet
|
|||||||
│ ├─→ watermark.certificator.4nkweb.com → 192.168.1.103:3022 (API Filigrane)
|
│ ├─→ watermark.certificator.4nkweb.com → 192.168.1.103:3022 (API Filigrane)
|
||||||
│ ├─→ antivir.certificator.4nkweb.com → 192.168.1.103:3023 (API ClamAV)
|
│ ├─→ antivir.certificator.4nkweb.com → 192.168.1.103:3023 (API ClamAV)
|
||||||
│ ├─→ dashboard.certificator.4nkweb.com → 192.168.1.103:3020 (Dashboard)
|
│ ├─→ dashboard.certificator.4nkweb.com → 192.168.1.103:3020 (Dashboard)
|
||||||
|
│ ├─→ skeleton.certificator.4nkweb.com → 192.168.1.105:3024 (Website skeleton)
|
||||||
│ ├─→ faucet.certificator.4nkweb.com → 192.168.1.103:3021 (API Faucet)
|
│ ├─→ faucet.certificator.4nkweb.com → 192.168.1.103:3021 (API Faucet)
|
||||||
│ └─→ mempool.4nkweb.com → 192.168.1.104:3015 (Mempool)
|
│ └─→ mempool.4nkweb.com → 192.168.1.104:3015 (Mempool)
|
||||||
```
|
```
|
||||||
@ -123,7 +127,7 @@ Internet
|
|||||||
sudo ss -tlnp | grep :3010
|
sudo ss -tlnp | grep :3010
|
||||||
|
|
||||||
# Vérifier tous les ports des APIs
|
# Vérifier tous les ports des APIs
|
||||||
sudo ss -tlnp | grep -E ':(3010|3020|3021|3022|3023)'
|
sudo ss -tlnp | grep -E ':(3010|3020|3021|3022|3023|3024)'
|
||||||
```
|
```
|
||||||
|
|
||||||
### Tester la connectivité
|
### Tester la connectivité
|
||||||
|
|||||||
@ -60,6 +60,11 @@ Ce dossier contient toute la documentation nécessaire pour la maintenance et l'
|
|||||||
- API endpoints utilisés
|
- API endpoints utilisés
|
||||||
- Maintenance et dépannage
|
- Maintenance et dépannage
|
||||||
|
|
||||||
|
- **[WEBSITE_SKELETON.md](./WEBSITE_SKELETON.md)** : Squelette de site intégrant UserWallet en iframe
|
||||||
|
- Objectif et structure du projet `website-skeleton/`
|
||||||
|
- Configuration (origine UserWallet, validateurs)
|
||||||
|
- Utilisation, messages postMessage, références
|
||||||
|
|
||||||
## Démarrage Rapide
|
## Démarrage Rapide
|
||||||
|
|
||||||
### Installation
|
### Installation
|
||||||
|
|||||||
111
docs/WEBSITE_SKELETON.md
Normal file
111
docs/WEBSITE_SKELETON.md
Normal file
@ -0,0 +1,111 @@
|
|||||||
|
# website-skeleton
|
||||||
|
|
||||||
|
Squelette d’un site web qui intègre UserWallet en iframe : écoute des messages `postMessage` (auth-request, login-proof, error), vérification des preuves de login via `service-login-verify`, et affichage du statut (accepté / refusé).
|
||||||
|
|
||||||
|
## Objectif
|
||||||
|
|
||||||
|
Fournir un projet minimal, prêt à l’emploi, pour :
|
||||||
|
|
||||||
|
- embarquer UserWallet dans une page via iframe ;
|
||||||
|
- communiquer avec l’iframe via `postMessage` (Channel Messages) ;
|
||||||
|
- vérifier les preuves de login côté parent avec le package `service-login-verify`.
|
||||||
|
|
||||||
|
Utile comme base pour un service (certificator, zapwall, etc.) qui consomme UserWallet et doit valider les sessions.
|
||||||
|
|
||||||
|
## Emplacement et structure
|
||||||
|
|
||||||
|
- **Dossier** : `website-skeleton/` à la racine du dépôt (à côté de `userwallet/`, `service-login-verify/`, `api-relay/`).
|
||||||
|
- **Stack** : Vite, TypeScript, dépendance `service-login-verify` (`file:../service-login-verify`).
|
||||||
|
|
||||||
|
### Fichiers principaux
|
||||||
|
|
||||||
|
| Fichier | Rôle |
|
||||||
|
|---------|------|
|
||||||
|
| `index.html` | Page avec iframe UserWallet, zone de statut, bouton « Demander auth » |
|
||||||
|
| `src/main.ts` | Chargement iframe, écoute `message`, envoi `auth-request`, appel `verifyLoginProof`, mise à jour du statut |
|
||||||
|
| `src/config.ts` | `USERWALLET_ORIGIN`, `DEFAULT_VALIDATEURS` (placeholder) |
|
||||||
|
| `package.json` | Scripts `dev` / `build` / `preview`, dépendance `service-login-verify` |
|
||||||
|
| `start.sh` | Build + `vite preview` sur le port 3024 (production) |
|
||||||
|
| `vite.config.ts` | `preview.allowedHosts: ['skeleton.certificator.4nkweb.com']` pour éviter 403 via proxy |
|
||||||
|
| `website-skeleton.service` | Unité systemd pour servir le site |
|
||||||
|
| `install-website-skeleton.sh` | Installation locale (build, systemd) |
|
||||||
|
| `README.md` | Instructions d’installation, configuration et utilisation |
|
||||||
|
|
||||||
|
## Prérequis
|
||||||
|
|
||||||
|
- **service-login-verify** : exécuter `npm run build` dans `../service-login-verify` avant d’installer ou builder le skeleton.
|
||||||
|
- **UserWallet** : à servir sur l’URL configurée (`USERWALLET_ORIGIN`) pour que l’iframe fonctionne.
|
||||||
|
|
||||||
|
## Installation et commandes
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd website-skeleton
|
||||||
|
npm install
|
||||||
|
npm run build
|
||||||
|
```
|
||||||
|
|
||||||
|
- **Développement** : `npm run dev` → serveur sur `http://localhost:3024`.
|
||||||
|
- **Preview** : `npm run preview` après build.
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
### Origine UserWallet
|
||||||
|
|
||||||
|
- **Fichier** : `src/config.ts`.
|
||||||
|
- **Variable** : `USERWALLET_ORIGIN` = URL de l’iframe et `targetOrigin` pour `postMessage`.
|
||||||
|
- **Défauts** :
|
||||||
|
- En dev : `http://localhost:3018` (UserWallet en dev sur ce port).
|
||||||
|
- En prod : `https://userwallet.certificator.4nkweb.com`.
|
||||||
|
- **Override** : variable d’environnement `VITE_USERWALLET_ORIGIN` (ex. `VITE_USERWALLET_ORIGIN=http://localhost:3018 npm run dev`).
|
||||||
|
|
||||||
|
### Validateurs
|
||||||
|
|
||||||
|
- **Fichier** : `src/config.ts`, objet `DEFAULT_VALIDATEURS`.
|
||||||
|
- **Rôle** : structure des validateurs de l’action login (placeholder).
|
||||||
|
- **Usage réel** : remplacer par les validateurs issus du contrat du service (action login), ou les charger depuis une API / un channel.
|
||||||
|
|
||||||
|
## Utilisation
|
||||||
|
|
||||||
|
1. Lancer UserWallet (dev ou déployé) sur l’URL configurée.
|
||||||
|
2. Lancer le skeleton (`npm run dev` ou servir `dist/`).
|
||||||
|
3. Ouvrir la page du skeleton : l’iframe affiche UserWallet.
|
||||||
|
4. **Demander auth** : cliquer « Demander auth (auth-request) » → envoi de `auth-request` à l’iframe.
|
||||||
|
5. **Login** : effectuer le flux de login dans l’iframe ; à la fin, UserWallet envoie `login-proof` au parent. Le skeleton appelle `verifyLoginProof`, puis affiche « Login accepté » ou « Login refusé : … ».
|
||||||
|
|
||||||
|
## Messages postMessage
|
||||||
|
|
||||||
|
| Type | Sens | Rôle du skeleton |
|
||||||
|
|------|------|-------------------|
|
||||||
|
| `auth-request` | Parent → iframe | Envoyé au clic sur « Demander auth ». |
|
||||||
|
| `auth-response` | Iframe → parent | Affichage « Auth reçu ». |
|
||||||
|
| `login-proof` | Iframe → parent | Vérification via `verifyLoginProof`, mise à jour du statut. |
|
||||||
|
| `error` | Iframe → parent | Affichage du message d’erreur. |
|
||||||
|
|
||||||
|
## Déploiement
|
||||||
|
|
||||||
|
- **Port** : 3024 (aucun conflit avec le Dashboard, 3020).
|
||||||
|
- **Backend** : machine 192.168.1.168 (site + service systemd).
|
||||||
|
- **Proxy** : 192.168.1.100 ; Nginx route `skeleton.certificator.4nkweb.com` → `http://192.168.1.168:3024`.
|
||||||
|
- **Certificat** : Let's Encrypt sur le proxy pour `skeleton.certificator.4nkweb.com` (via `configure-nginx-proxy.sh` / `update-proxy-nginx.sh`).
|
||||||
|
|
||||||
|
### Installation
|
||||||
|
|
||||||
|
1. **Sur la machine backend (192.168.1.105)** :
|
||||||
|
```bash
|
||||||
|
./website-skeleton/install-website-skeleton.sh
|
||||||
|
```
|
||||||
|
Construit le front, installe et démarre le service systemd (`website-skeleton.service`).
|
||||||
|
|
||||||
|
2. **Proxy + certificat** : à partir de la racine du dépôt,
|
||||||
|
```bash
|
||||||
|
./update-proxy-nginx.sh
|
||||||
|
```
|
||||||
|
Configure Nginx sur le proxy (.100), génère le certificat pour `skeleton.certificator.4nkweb.com`, recharge Nginx.
|
||||||
|
|
||||||
|
Voir `docs/DOMAINS_AND_PORTS.md` pour les domaines et ports.
|
||||||
|
|
||||||
|
## Références
|
||||||
|
|
||||||
|
- **Package vérification** : `service-login-verify/`, `features/service-login-verify.md`
|
||||||
|
- **Iframe et login** : `userwallet/`, `features/userwallet-contrat-login-reste-a-faire.md` (§ 3.7)
|
||||||
|
- **Relais** : `api-relay/`, `docs/DOMAINS_AND_PORTS.md`
|
||||||
58
features/cron-restart-services-local.md
Normal file
58
features/cron-restart-services-local.md
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
# Cron quotidien de relance des services (local uniquement)
|
||||||
|
|
||||||
|
**Auteur** : Équipe 4NK
|
||||||
|
**Date** : 2026-01-28
|
||||||
|
|
||||||
|
## Objectif
|
||||||
|
|
||||||
|
Relancer quotidiennement tous les services systemd du projet sur la machine locale, avec vérification des unités activées au démarrage et actives après redémarrage.
|
||||||
|
|
||||||
|
## Règle : scripts locaux, pas d’SSH
|
||||||
|
|
||||||
|
Tous les scripts du projet sont **locaux** et **ne doivent pas appeler de commandes SSH**. La relance s’effectue via `systemctl` (unités systemd) et, le cas échéant, `docker` / `docker-compose` (Mempool, bitcoind conteneur) sur la machine où le cron s’exécute. Aucune connexion à une autre machine.
|
||||||
|
|
||||||
|
## Solution implémentée
|
||||||
|
|
||||||
|
### Script `data/restart-services-cron.sh`
|
||||||
|
|
||||||
|
- **Local uniquement** : aucun `ssh`, aucune commande distante.
|
||||||
|
- Gère les unités systemd sur la machine courante : `systemctl is-enabled`, `systemctl is-active`, `sudo systemctl restart`.
|
||||||
|
- **Systemd** : `bitcoin-signet`, `bitcoin`, `anchorage-api`, `api-relay`, `clamav-api`, `faucet-api`, `filigrane-api`, `signet-dashboard`, `userwallet`, `website-skeleton`.
|
||||||
|
- **Docker (local)** : stack Mempool (`docker-compose.signet.yml`), conteneur `bitcoin-signet-instance`. Utilise `docker compose` ou `docker-compose` selon disponibilité.
|
||||||
|
- Log : `data/restart-services.log` (timestamps, rotation sur 100 dernières lignes).
|
||||||
|
|
||||||
|
### Comportement
|
||||||
|
|
||||||
|
1. **Vérification `is-enabled`** : pour chaque service, si non activé au démarrage → log `WARN`.
|
||||||
|
2. **Redémarrage** : seules les unités actives sont redémarrées ; les autres sont ignorées avec log `SKIP` (non installées ou non démarrées sur cette machine).
|
||||||
|
3. **Vérification `is-active` après redémarrage** : pour chaque unité redémarrée, si inactive → log `WARN`.
|
||||||
|
4. **Docker Mempool** : si `mempool/docker-compose.signet.yml` existe et `docker` est disponible, redémarrage de la stack via `docker compose` ou `docker-compose`. Sinon `SKIP`.
|
||||||
|
5. **Docker bitcoind** : si le conteneur `bitcoin-signet-instance` existe, `docker restart`. Sinon `SKIP` (bitcoind peut être systemd).
|
||||||
|
|
||||||
|
### Prérequis
|
||||||
|
|
||||||
|
- Exécution en cron (utilisateur avec `sudo` sans mot de passe pour `systemctl restart`, ou cron root).
|
||||||
|
- Unités systemd installées et activées sur la machine (ou `SKIP` si absentes).
|
||||||
|
- Pour Mempool / bitcoind Docker : `docker` et `docker-compose` ou `docker compose` disponibles, et droits pour redémarrer les conteneurs.
|
||||||
|
|
||||||
|
## Modifications
|
||||||
|
|
||||||
|
**Fichiers créés :**
|
||||||
|
|
||||||
|
- `data/restart-services-cron.sh` : script de relance (local, pas d’SSH).
|
||||||
|
- `features/cron-restart-services-local.md` : cette documentation.
|
||||||
|
|
||||||
|
## Modalités de déploiement
|
||||||
|
|
||||||
|
1. Rendre le script exécutable : `chmod +x data/restart-services-cron.sh`
|
||||||
|
2. Ajouter une entrée cron quotidienne (ex. 4h00) :
|
||||||
|
```text
|
||||||
|
0 4 * * * /home/ncantu/Bureau/code/bitcoin/data/restart-services-cron.sh
|
||||||
|
```
|
||||||
|
3. Tester manuellement : `./data/restart-services-cron.sh`
|
||||||
|
|
||||||
|
## Modalités d’analyse
|
||||||
|
|
||||||
|
- Consulter `data/restart-services.log` pour les relances et les avertissements.
|
||||||
|
- Vérifier les unités : `systemctl is-enabled <unit>`, `systemctl is-active <unit>`.
|
||||||
|
- En cas de `SKIP` : l’unité n’est pas installée ou pas active sur cette machine (déploiement partiel possible).
|
||||||
86
features/service-login-verify.md
Normal file
86
features/service-login-verify.md
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
# Côté service – Acceptation de session et politique anti-rejeu (3.7)
|
||||||
|
|
||||||
|
**Author:** Équipe 4NK
|
||||||
|
**Date:** 2026-01-26
|
||||||
|
|
||||||
|
## Explication
|
||||||
|
|
||||||
|
### Rôle du « service »
|
||||||
|
|
||||||
|
Le **service** est l’application **parente** qui embarque UserWallet en iframe. C’est elle qui **consomme** le login : elle reçoit la preuve de login (`login-proof`) via `postMessage`, doit l’**accepter ou refuser** pour ouvrir une session, **sans serveur central**.
|
||||||
|
|
||||||
|
- UserWallet (iframe) envoie `{ type: 'login-proof', payload: LoginProof }` au parent.
|
||||||
|
- Le parent écoute `window.addEventListener('message', ...)`, filtre `event.data?.type === 'login-proof'`, puis **vérifie** la preuve avant d’accepter la session.
|
||||||
|
|
||||||
|
### Acceptation de session
|
||||||
|
|
||||||
|
Le service doit **vérifier la preuve** en s’appuyant uniquement sur **contrats + signatures** :
|
||||||
|
|
||||||
|
1. **Graphe contractuel** : le service dispose du contrat (et contrats fils) du service, fourni par channel message ou contrat par défaut. Il peut en déduire l’action login, les membres, les validateurs.
|
||||||
|
2. **Clés autorisées** : à partir des validateurs de l’action login (`membres_du_role[].signatures_obligatoires[].cle_publique`), construire l’ensemble des `cle_publique` autorisées à signer. (Résolution `pair_uuid` → clé non utilisée côté service si seules les `cle_publique` explicites sont présentes.)
|
||||||
|
3. **Vérification crypto** : pour chaque signature de la preuve, vérifier qu’elle signe bien `hash-nonce` (hash du challenge + nonce) avec la `cle_publique` indiquée (secp256k1).
|
||||||
|
4. **Conformité validateurs** : toutes les signatures doivent provenir de clés autorisées ; au moins une signature valide est requise.
|
||||||
|
5. **Anti-rejeu** : nonce unique (cache `nonce_vus`), fenêtre temporelle sur le `timestamp` du challenge.
|
||||||
|
|
||||||
|
Aucune autorité centrale : la décision d’accepter ou refuser la session repose uniquement sur ces vérifications locales.
|
||||||
|
|
||||||
|
### Politique anti-rejeu
|
||||||
|
|
||||||
|
- **Nonce unique** : chaque preuve utilise un nonce. Le service maintient un cache des nonces déjà vus (ex. `NonceCache`). Si un nonce est rejoué dans la fenêtre TTL, la preuve est refusée (`X_NONCE_REUSED`).
|
||||||
|
- **Fenêtre timestamp** : le `timestamp` du challenge doit être dans une fenêtre acceptable (ex. ±5 min). Sinon refus (`X_TIMESTAMP_OUT_OF_WINDOW`).
|
||||||
|
- **Cache `nonce_vus`** : persistance optionnelle (mémoire, IndexedDB, Redis, etc.). Le package fournit un cache en mémoire avec TTL configurable ; le service peut le remplacer par un stockage adapté.
|
||||||
|
|
||||||
|
### Implémentation
|
||||||
|
|
||||||
|
Le package **`service-login-verify`** fournit :
|
||||||
|
|
||||||
|
- `verifyLoginProof(proof, context)` : vérification complète (crypto, clés autorisées, timestamp, nonce).
|
||||||
|
- `NonceCache` : cache anti-rejeu en mémoire (TTL configurable).
|
||||||
|
- `buildAllowedPubkeysFromValidateurs(validateurs)` : extraction des `cle_publique` depuis les validateurs de l’action login.
|
||||||
|
|
||||||
|
Le service (parent) :
|
||||||
|
|
||||||
|
1. Reçoit contrat + contrats fils (channel ou défaut).
|
||||||
|
2. Extrait l’action login et ses validateurs, construit `allowedPubkeys` via `buildAllowedPubkeysFromValidateurs`.
|
||||||
|
3. Maintient une `NonceCache` (ou équivalent).
|
||||||
|
4. À réception de `login-proof`, appelle `verifyLoginProof(proof, { allowedPubkeys, nonceCache, timestampWindowMs })`.
|
||||||
|
5. Si `accept: true` → ouvrir la session ; sinon → refuser et optionnellement informer l’utilisateur (ex. via `postMessage` vers l’iframe).
|
||||||
|
|
||||||
|
### Limites
|
||||||
|
|
||||||
|
- Validateurs **sans** `cle_publique` (uniquement `pair_uuid`) : le service ne peut pas vérifier les clés autorisées. Il faut au moins une `cle_publique` dans `signatures_obligatoires` pour une vérification stricte côté service.
|
||||||
|
- Le service ne déchiffre pas le challenge ; il vérifie uniquement les signatures et l’anti-rejeu.
|
||||||
|
|
||||||
|
## Utilisation
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import {
|
||||||
|
verifyLoginProof,
|
||||||
|
NonceCache,
|
||||||
|
buildAllowedPubkeysFromValidateurs,
|
||||||
|
} from 'service-login-verify';
|
||||||
|
|
||||||
|
const nonceCache = new NonceCache(3600000);
|
||||||
|
const allowedPubkeys = buildAllowedPubkeysFromValidateurs(actionLogin.validateurs_action);
|
||||||
|
|
||||||
|
window.addEventListener('message', (event) => {
|
||||||
|
if (event.data?.type !== 'login-proof') return;
|
||||||
|
const result = verifyLoginProof(event.data.payload, {
|
||||||
|
allowedPubkeys,
|
||||||
|
nonceCache,
|
||||||
|
timestampWindowMs: 300000,
|
||||||
|
});
|
||||||
|
if (result.accept) {
|
||||||
|
// ouvrir session
|
||||||
|
} else {
|
||||||
|
// refuser (result.reason)
|
||||||
|
}
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## Références
|
||||||
|
|
||||||
|
- `features/userwallet-contrat-login-reste-a-faire.md` (§ 3.7)
|
||||||
|
- `userwallet/docs/specs.md` (graphe, validateurs, anti-rejeu)
|
||||||
|
- `service-login-verify/` : implémentation (package à la racine du dépôt).
|
||||||
|
- `website-skeleton/` : squelette de site qui intègre l’iframe et vérifie les preuves.
|
||||||
@ -33,8 +33,7 @@ Référence : `userwallet/docs/specs.md` (modèle, objets, machine à états, ca
|
|||||||
|
|
||||||
### 3.1 Machine à états formelle
|
### 3.1 Machine à états formelle
|
||||||
|
|
||||||
- **Fait** : `loginStateMachine` (états S_LOGIN_*, événements E_*), `useLoginStateMachine`, dispatch dans LoginScreen, Retour → E_BACK ; G_PAIRING_SATISFIED, E_ADD_PAIR / E_SYNC_NOW. **Timeouts** : `RELAY_FETCH_TIMEOUT_MS` sur tous les fetch relay (GET/POST). Pas de backoff. Voir `features/userwallet-login-state-machine.md`, `features/userwallet-timeouts-backoff.md`.
|
- **Fait** : `loginStateMachine` (états S_LOGIN_*, événements E_*), `useLoginStateMachine`, dispatch dans LoginScreen, Retour → E_BACK ; G_PAIRING_SATISFIED, E_ADD_PAIR / E_SYNC_NOW. **Timeouts** : `RELAY_FETCH_TIMEOUT_MS` sur tous les fetch relay (GET/POST). **Collecte** : timeout distinct `COLLECT_FETCH_TIMEOUT_MS` pour chaque GET /signatures pendant la boucle de collecte (`CollectLoopOpts.fetchTimeoutMs`, `fetchSignaturesForHash(..., fetchTimeoutMs)`). Pas de backoff. Voir `features/userwallet-login-state-machine.md`, `features/userwallet-timeouts-backoff.md`.
|
||||||
- **À prévoir** : timeouts sur collecte signatures (fetch relay) si distinct.
|
|
||||||
|
|
||||||
### 3.2 Écrans et UX alignés specs
|
### 3.2 Écrans et UX alignés specs
|
||||||
|
|
||||||
@ -75,8 +74,8 @@ Référence : `userwallet/docs/specs.md` (modèle, objets, machine à états, ca
|
|||||||
|
|
||||||
### 3.7 Côté service (hors userwallet)
|
### 3.7 Côté service (hors userwallet)
|
||||||
|
|
||||||
- **Acceptation de session** : le service (application qui consomme le login) doit vérifier la preuve en ne s’appuyant que sur contrats + signatures (sans serveur central). Rejouer la construction du graphe depuis les UUID/messages disponibles, vérifier hash, signatures, anti-rejeu.
|
- **Acceptation de session** : le service (application qui consomme le login) doit vérifier la preuve en ne s’appuyant que sur contrats + signatures (sans serveur central). Rejouer la construction du graphe depuis les UUID/messages disponibles, vérifier hash, signatures, anti-rejeu. **Implémenté** : package `service-login-verify` (`verifyLoginProof`, `buildAllowedPubkeysFromValidateurs`, `NonceCache`). Voir `features/service-login-verify.md`.
|
||||||
- **Politique anti-rejeu** : TTL nonce, fenêtre timestamp, cache nonce_vus.
|
- **Politique anti-rejeu** : TTL nonce, fenêtre timestamp, cache nonce_vus. **Implémenté** : `NonceCache` (mémoire, TTL configurable), `verifyTimestamp` dans `verifyLoginProof`, `timestampWindowMs` optionnel.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@ -92,5 +91,6 @@ Référence : `userwallet/docs/specs.md` (modèle, objets, machine à états, ca
|
|||||||
| Version contrats | Fait (supported check, affichage) | — |
|
| Version contrats | Fait (supported check, affichage) | — |
|
||||||
| Bloom API + usage sync | Fait (`getBloom`, `fetchAndLoadBloom`, skip key fetch) | — |
|
| Bloom API + usage sync | Fait (`getBloom`, `fetchAndLoadBloom`, skip key fetch) | — |
|
||||||
| Merkle (scan) | Optionnel, non implémenté | Basse |
|
| Merkle (scan) | Optionnel, non implémenté | Basse |
|
||||||
|
| Côté service (acceptation session, anti-rejeu) | Fait (`service-login-verify`) | — |
|
||||||
|
|
||||||
Les éléments de la section « Déjà en place » restent le socle ; les développements ci‑dessus les complètent pour coller au principe « contrat + login » décrit dans `specs.md`.
|
Les éléments de la section « Déjà en place » restent le socle ; les développements ci‑dessus les complètent pour coller au principe « contrat + login » décrit dans `specs.md`.
|
||||||
|
|||||||
@ -10,10 +10,12 @@ Timeouts sur les appels relay (X_RELAY_TIMEOUT). Pas de backoff.
|
|||||||
## Impacts
|
## Impacts
|
||||||
|
|
||||||
- **Relay** : tous les fetch (GET /messages, /signatures, /keys, /bloom ; POST /messages, /signatures, /keys) utilisent `AbortSignal.timeout(RELAY_FETCH_TIMEOUT_MS)` (15 s). En cas de dépassement, l’appel échoue (AbortError).
|
- **Relay** : tous les fetch (GET /messages, /signatures, /keys, /bloom ; POST /messages, /signatures, /keys) utilisent `AbortSignal.timeout(RELAY_FETCH_TIMEOUT_MS)` (15 s). En cas de dépassement, l’appel échoue (AbortError).
|
||||||
|
- **Collecte signatures** : chaque GET /signatures pendant la boucle de collecte utilise un timeout **distinct** `COLLECT_FETCH_TIMEOUT_MS` (15 s). Configurable via `CollectLoopOpts.fetchTimeoutMs`. En échec, le relais est ignoré pour ce poll, log `fetchSignaturesForHash relay failure`.
|
||||||
|
|
||||||
## Modifications
|
## Modifications
|
||||||
|
|
||||||
- **`utils/relay.ts`** : `RELAY_FETCH_TIMEOUT_MS` ; `signal: AbortSignal.timeout(...)` sur chaque `fetch`.
|
- **`utils/relay.ts`** : `RELAY_FETCH_TIMEOUT_MS` ; `getSignatures(relay, hash, timeoutMs?)` avec `timeoutMs` optionnel ; `signal: AbortSignal.timeout(...)` sur chaque `fetch`.
|
||||||
|
- **`utils/collectSignatures.ts`** : `COLLECT_FETCH_TIMEOUT_MS` ; `fetchSignaturesForHash(..., fetchTimeoutMs?)` ; `CollectLoopOpts.fetchTimeoutMs` ; `runCollectLoop` passe le timeout aux fetch ; log `console.error` en cas d’échec fetch par relais.
|
||||||
- **`services/syncService.ts`** : pas de retry ; en échec fetch, le relais est marqué `ok: false`.
|
- **`services/syncService.ts`** : pas de retry ; en échec fetch, le relais est marqué `ok: false`.
|
||||||
|
|
||||||
## Modalités de déploiement
|
## Modalités de déploiement
|
||||||
@ -23,6 +25,7 @@ Déploiement classique du front userwallet.
|
|||||||
## Modalités d’analyse
|
## Modalités d’analyse
|
||||||
|
|
||||||
- Relais lent ou coupé : timeout après 15 s ; `relayStatus` indique `ok: false` pour ce relais. Log « Error syncing from … ».
|
- Relais lent ou coupé : timeout après 15 s ; `relayStatus` indique `ok: false` pour ce relais. Log « Error syncing from … ».
|
||||||
|
- Collecte : un relais en timeout sur GET /signatures → log « fetchSignaturesForHash relay failure » ; le poll continue avec les autres relais. Boucle globale `COLLECT_TIMEOUT_MS` (5 min).
|
||||||
|
|
||||||
## Références
|
## Références
|
||||||
|
|
||||||
|
|||||||
30
service-login-verify/README.md
Normal file
30
service-login-verify/README.md
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
# service-login-verify
|
||||||
|
|
||||||
|
Verify login proof and anti-replay policy for services that embed UserWallet in an iframe.
|
||||||
|
|
||||||
|
## Purpose
|
||||||
|
|
||||||
|
The **service** is the parent application that embeds UserWallet. It receives `login-proof` via `postMessage` and must verify the proof before accepting a session, **without a central server**. This package provides:
|
||||||
|
|
||||||
|
- `verifyLoginProof(proof, context)` – full verification (crypto, allowed pubkeys, timestamp window, nonce anti-replay)
|
||||||
|
- `NonceCache` – in-memory anti-replay cache (TTL configurable)
|
||||||
|
- `buildAllowedPubkeysFromValidateurs(validateurs)` – build allowed pubkeys from action login validators (`cle_publique` only)
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
See `features/service-login-verify.md` for explanation and usage example.
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
From the repo root:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd service-login-verify && npm install && npm run build
|
||||||
|
```
|
||||||
|
|
||||||
|
Consuming apps can add a workspace dependency or link to `../service-login-verify`.
|
||||||
|
|
||||||
|
## References
|
||||||
|
|
||||||
|
- `features/service-login-verify.md`
|
||||||
|
- `features/userwallet-contrat-login-reste-a-faire.md` (§ 3.7)
|
||||||
7
service-login-verify/dist/buildAllowedPubkeys.d.ts
vendored
Normal file
7
service-login-verify/dist/buildAllowedPubkeys.d.ts
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
import type { Validateurs } from './types.js';
|
||||||
|
/**
|
||||||
|
* Build allowed pubkeys from validators (cle_publique in signatures_obligatoires).
|
||||||
|
* Service uses only explicit cle_publique; pair_uuid resolution is not used.
|
||||||
|
*/
|
||||||
|
export declare function buildAllowedPubkeysFromValidateurs(validateurs: Validateurs): Set<string>;
|
||||||
|
//# sourceMappingURL=buildAllowedPubkeys.d.ts.map
|
||||||
1
service-login-verify/dist/buildAllowedPubkeys.d.ts.map
vendored
Normal file
1
service-login-verify/dist/buildAllowedPubkeys.d.ts.map
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"buildAllowedPubkeys.d.ts","sourceRoot":"","sources":["../src/buildAllowedPubkeys.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,YAAY,CAAC;AAE9C;;;GAGG;AACH,wBAAgB,kCAAkC,CAChD,WAAW,EAAE,WAAW,GACvB,GAAG,CAAC,MAAM,CAAC,CAUb"}
|
||||||
15
service-login-verify/dist/buildAllowedPubkeys.js
vendored
Normal file
15
service-login-verify/dist/buildAllowedPubkeys.js
vendored
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
/**
|
||||||
|
* Build allowed pubkeys from validators (cle_publique in signatures_obligatoires).
|
||||||
|
* Service uses only explicit cle_publique; pair_uuid resolution is not used.
|
||||||
|
*/
|
||||||
|
export function buildAllowedPubkeysFromValidateurs(validateurs) {
|
||||||
|
const out = new Set();
|
||||||
|
for (const m of validateurs.membres_du_role) {
|
||||||
|
for (const r of m.signatures_obligatoires) {
|
||||||
|
if (r.cle_publique !== undefined) {
|
||||||
|
out.add(r.cle_publique);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out;
|
||||||
|
}
|
||||||
6
service-login-verify/dist/crypto.d.ts
vendored
Normal file
6
service-login-verify/dist/crypto.d.ts
vendored
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
/**
|
||||||
|
* Verify secp256k1 signature over message (hash-nonce).
|
||||||
|
* Message is hashed with SHA-256 before verification.
|
||||||
|
*/
|
||||||
|
export declare function verifySignature(message: string, signatureHex: string, publicKeyHex: string): boolean;
|
||||||
|
//# sourceMappingURL=crypto.d.ts.map
|
||||||
1
service-login-verify/dist/crypto.d.ts.map
vendored
Normal file
1
service-login-verify/dist/crypto.d.ts.map
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"crypto.d.ts","sourceRoot":"","sources":["../src/crypto.ts"],"names":[],"mappings":"AAIA;;;GAGG;AACH,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,EACf,YAAY,EAAE,MAAM,EACpB,YAAY,EAAE,MAAM,GACnB,OAAO,CAST"}
|
||||||
18
service-login-verify/dist/crypto.js
vendored
Normal file
18
service-login-verify/dist/crypto.js
vendored
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
import { verify as secpVerify, Signature } from '@noble/secp256k1';
|
||||||
|
import { sha256 } from '@noble/hashes/sha256';
|
||||||
|
import { hexToBytes } from '@noble/hashes/utils';
|
||||||
|
/**
|
||||||
|
* Verify secp256k1 signature over message (hash-nonce).
|
||||||
|
* Message is hashed with SHA-256 before verification.
|
||||||
|
*/
|
||||||
|
export function verifySignature(message, signatureHex, publicKeyHex) {
|
||||||
|
try {
|
||||||
|
const msgHash = sha256(message);
|
||||||
|
const sig = Signature.fromCompact(hexToBytes(signatureHex));
|
||||||
|
const pub = hexToBytes(publicKeyHex);
|
||||||
|
return secpVerify(sig, msgHash, pub);
|
||||||
|
}
|
||||||
|
catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
5
service-login-verify/dist/index.d.ts
vendored
Normal file
5
service-login-verify/dist/index.d.ts
vendored
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
export { verifyLoginProof, } from './verifyLoginProof.js';
|
||||||
|
export { NonceCache } from './nonceCache.js';
|
||||||
|
export { buildAllowedPubkeysFromValidateurs } from './buildAllowedPubkeys.js';
|
||||||
|
export type { LoginProof, Validateurs, VerifyLoginProofContext, VerifyLoginProofResult, NonceCacheLike, } from './types.js';
|
||||||
|
//# sourceMappingURL=index.d.ts.map
|
||||||
1
service-login-verify/dist/index.d.ts.map
vendored
Normal file
1
service-login-verify/dist/index.d.ts.map
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,gBAAgB,GACjB,MAAM,uBAAuB,CAAC;AAC/B,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAC7C,OAAO,EAAE,kCAAkC,EAAE,MAAM,0BAA0B,CAAC;AAC9E,YAAY,EACV,UAAU,EACV,WAAW,EACX,uBAAuB,EACvB,sBAAsB,EACtB,cAAc,GACf,MAAM,YAAY,CAAC"}
|
||||||
3
service-login-verify/dist/index.js
vendored
Normal file
3
service-login-verify/dist/index.js
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
export { verifyLoginProof, } from './verifyLoginProof.js';
|
||||||
|
export { NonceCache } from './nonceCache.js';
|
||||||
|
export { buildAllowedPubkeysFromValidateurs } from './buildAllowedPubkeys.js';
|
||||||
17
service-login-verify/dist/nonceCache.d.ts
vendored
Normal file
17
service-login-verify/dist/nonceCache.d.ts
vendored
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
import type { NonceCacheLike } from './types.js';
|
||||||
|
/**
|
||||||
|
* In-memory nonce cache for anti-replay.
|
||||||
|
* TTL defines how long a nonce is considered "seen".
|
||||||
|
*/
|
||||||
|
export declare class NonceCache implements NonceCacheLike {
|
||||||
|
private readonly cache;
|
||||||
|
private readonly ttlMs;
|
||||||
|
constructor(ttlMs?: number);
|
||||||
|
/**
|
||||||
|
* Returns true if nonce is valid (not seen within TTL). Records nonce on success.
|
||||||
|
*/
|
||||||
|
isValid(nonce: string, timestamp: number): boolean;
|
||||||
|
private cleanup;
|
||||||
|
clear(): void;
|
||||||
|
}
|
||||||
|
//# sourceMappingURL=nonceCache.d.ts.map
|
||||||
1
service-login-verify/dist/nonceCache.d.ts.map
vendored
Normal file
1
service-login-verify/dist/nonceCache.d.ts.map
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"nonceCache.d.ts","sourceRoot":"","sources":["../src/nonceCache.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,YAAY,CAAC;AAEjD;;;GAGG;AACH,qBAAa,UAAW,YAAW,cAAc;IAC/C,OAAO,CAAC,QAAQ,CAAC,KAAK,CAA6B;IACnD,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAS;gBAEnB,KAAK,GAAE,MAAgB;IAInC;;OAEG;IACH,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,OAAO;IAgBlD,OAAO,CAAC,OAAO;IAQf,KAAK,IAAI,IAAI;CAGd"}
|
||||||
37
service-login-verify/dist/nonceCache.js
vendored
Normal file
37
service-login-verify/dist/nonceCache.js
vendored
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
/**
|
||||||
|
* In-memory nonce cache for anti-replay.
|
||||||
|
* TTL defines how long a nonce is considered "seen".
|
||||||
|
*/
|
||||||
|
export class NonceCache {
|
||||||
|
cache = new Map();
|
||||||
|
ttlMs;
|
||||||
|
constructor(ttlMs = 3600000) {
|
||||||
|
this.ttlMs = ttlMs;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns true if nonce is valid (not seen within TTL). Records nonce on success.
|
||||||
|
*/
|
||||||
|
isValid(nonce, timestamp) {
|
||||||
|
const now = Date.now();
|
||||||
|
const cached = this.cache.get(nonce);
|
||||||
|
if (cached !== undefined) {
|
||||||
|
if (now - cached < this.ttlMs) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
this.cache.delete(nonce);
|
||||||
|
}
|
||||||
|
this.cache.set(nonce, timestamp);
|
||||||
|
this.cleanup(now);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
cleanup(now) {
|
||||||
|
for (const [n, ts] of this.cache.entries()) {
|
||||||
|
if (now - ts >= this.ttlMs) {
|
||||||
|
this.cache.delete(n);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
clear() {
|
||||||
|
this.cache.clear();
|
||||||
|
}
|
||||||
|
}
|
||||||
56
service-login-verify/dist/types.d.ts
vendored
Normal file
56
service-login-verify/dist/types.d.ts
vendored
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
/**
|
||||||
|
* Login proof payload (matches UserWallet LoginProof).
|
||||||
|
*/
|
||||||
|
export interface LoginProof {
|
||||||
|
challenge: {
|
||||||
|
hash: string;
|
||||||
|
nonce: string;
|
||||||
|
timestamp: number;
|
||||||
|
};
|
||||||
|
signatures: Array<{
|
||||||
|
signature: string;
|
||||||
|
cle_publique: string;
|
||||||
|
nonce: string;
|
||||||
|
pair_uuid: string;
|
||||||
|
}>;
|
||||||
|
statut: 'en_attente' | 'publie' | 'valide' | 'invalide';
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Validateurs shape (membres_du_role / signatures_obligatoires).
|
||||||
|
* Used to build allowed pubkeys for strict verification.
|
||||||
|
*/
|
||||||
|
export interface SignatureObligatoire {
|
||||||
|
membre_uuid: string;
|
||||||
|
cle_publique?: string;
|
||||||
|
cardinalite_minimale?: number;
|
||||||
|
dependances?: string[];
|
||||||
|
}
|
||||||
|
export interface MembreDuRole {
|
||||||
|
membre_uuid: string;
|
||||||
|
signatures_obligatoires: SignatureObligatoire[];
|
||||||
|
}
|
||||||
|
export interface Validateurs {
|
||||||
|
membres_du_role: MembreDuRole[];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Context for verifyLoginProof.
|
||||||
|
*/
|
||||||
|
export interface VerifyLoginProofContext {
|
||||||
|
/** Allowed pubkeys from validators (cle_publique). */
|
||||||
|
allowedPubkeys: Set<string>;
|
||||||
|
/** Anti-replay nonce cache. */
|
||||||
|
nonceCache: NonceCacheLike;
|
||||||
|
/** Timestamp window (ms). Default 300_000 (5 min). */
|
||||||
|
timestampWindowMs?: number;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Minimal nonce cache interface (anti-replay).
|
||||||
|
*/
|
||||||
|
export interface NonceCacheLike {
|
||||||
|
isValid(nonce: string, timestamp: number): boolean;
|
||||||
|
}
|
||||||
|
export interface VerifyLoginProofResult {
|
||||||
|
accept: boolean;
|
||||||
|
reason?: 'timestamp_out_of_window' | 'nonce_reused' | 'validators_not_verifiable' | 'no_validator_signature' | 'signature_cle_publique_not_authorized';
|
||||||
|
}
|
||||||
|
//# sourceMappingURL=types.d.ts.map
|
||||||
1
service-login-verify/dist/types.d.ts.map
vendored
Normal file
1
service-login-verify/dist/types.d.ts.map
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,MAAM,WAAW,UAAU;IACzB,SAAS,EAAE;QACT,IAAI,EAAE,MAAM,CAAC;QACb,KAAK,EAAE,MAAM,CAAC;QACd,SAAS,EAAE,MAAM,CAAC;KACnB,CAAC;IACF,UAAU,EAAE,KAAK,CAAC;QAChB,SAAS,EAAE,MAAM,CAAC;QAClB,YAAY,EAAE,MAAM,CAAC;QACrB,KAAK,EAAE,MAAM,CAAC;QACd,SAAS,EAAE,MAAM,CAAC;KACnB,CAAC,CAAC;IACH,MAAM,EAAE,YAAY,GAAG,QAAQ,GAAG,QAAQ,GAAG,UAAU,CAAC;CACzD;AAED;;;GAGG;AACH,MAAM,WAAW,oBAAoB;IACnC,WAAW,EAAE,MAAM,CAAC;IACpB,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,oBAAoB,CAAC,EAAE,MAAM,CAAC;IAC9B,WAAW,CAAC,EAAE,MAAM,EAAE,CAAC;CACxB;AAED,MAAM,WAAW,YAAY;IAC3B,WAAW,EAAE,MAAM,CAAC;IACpB,uBAAuB,EAAE,oBAAoB,EAAE,CAAC;CACjD;AAED,MAAM,WAAW,WAAW;IAC1B,eAAe,EAAE,YAAY,EAAE,CAAC;CACjC;AAED;;GAEG;AACH,MAAM,WAAW,uBAAuB;IACtC,sDAAsD;IACtD,cAAc,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC;IAC5B,+BAA+B;IAC/B,UAAU,EAAE,cAAc,CAAC;IAC3B,sDAAsD;IACtD,iBAAiB,CAAC,EAAE,MAAM,CAAC;CAC5B;AAED;;GAEG;AACH,MAAM,WAAW,cAAc;IAC7B,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC;CACpD;AAED,MAAM,WAAW,sBAAsB;IACrC,MAAM,EAAE,OAAO,CAAC;IAChB,MAAM,CAAC,EAAE,yBAAyB,GAAG,cAAc,GAAG,2BAA2B,GAAG,wBAAwB,GAAG,uCAAuC,CAAC;CACxJ"}
|
||||||
1
service-login-verify/dist/types.js
vendored
Normal file
1
service-login-verify/dist/types.js
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
export {};
|
||||||
7
service-login-verify/dist/verifyLoginProof.d.ts
vendored
Normal file
7
service-login-verify/dist/verifyLoginProof.d.ts
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
import type { LoginProof, VerifyLoginProofContext, VerifyLoginProofResult } from './types.js';
|
||||||
|
/**
|
||||||
|
* Verify login proof: crypto, allowed pubkeys, timestamp window, nonce anti-replay.
|
||||||
|
* Service must provide allowedPubkeys (from validators) and a NonceCache.
|
||||||
|
*/
|
||||||
|
export declare function verifyLoginProof(proof: LoginProof, ctx: VerifyLoginProofContext): VerifyLoginProofResult;
|
||||||
|
//# sourceMappingURL=verifyLoginProof.d.ts.map
|
||||||
1
service-login-verify/dist/verifyLoginProof.d.ts.map
vendored
Normal file
1
service-login-verify/dist/verifyLoginProof.d.ts.map
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"verifyLoginProof.d.ts","sourceRoot":"","sources":["../src/verifyLoginProof.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EACV,UAAU,EACV,uBAAuB,EACvB,sBAAsB,EACvB,MAAM,YAAY,CAAC;AAyCpB;;;GAGG;AACH,wBAAgB,gBAAgB,CAC9B,KAAK,EAAE,UAAU,EACjB,GAAG,EAAE,uBAAuB,GAC3B,sBAAsB,CA6CxB"}
|
||||||
65
service-login-verify/dist/verifyLoginProof.js
vendored
Normal file
65
service-login-verify/dist/verifyLoginProof.js
vendored
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
import { verifySignature } from './crypto.js';
|
||||||
|
const DEFAULT_TIMESTAMP_WINDOW_MS = 300000;
|
||||||
|
function verifyTimestamp(timestamp, windowMs) {
|
||||||
|
const now = Date.now();
|
||||||
|
const diff = Math.abs(now - timestamp);
|
||||||
|
return diff <= windowMs;
|
||||||
|
}
|
||||||
|
function verifySignaturesStrict(hashValue, signatures, allowedPubkeys) {
|
||||||
|
let valid = 0;
|
||||||
|
let unauthorized = 0;
|
||||||
|
for (const s of signatures) {
|
||||||
|
const messageToVerify = `${hashValue}-${s.nonce}`;
|
||||||
|
const cryptoOk = verifySignature(messageToVerify, s.signature, s.cle_publique);
|
||||||
|
if (!cryptoOk) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (allowedPubkeys.has(s.cle_publique)) {
|
||||||
|
valid++;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
unauthorized++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return { valid, unauthorized };
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Verify login proof: crypto, allowed pubkeys, timestamp window, nonce anti-replay.
|
||||||
|
* Service must provide allowedPubkeys (from validators) and a NonceCache.
|
||||||
|
*/
|
||||||
|
export function verifyLoginProof(proof, ctx) {
|
||||||
|
if (ctx.allowedPubkeys.size === 0) {
|
||||||
|
return {
|
||||||
|
accept: false,
|
||||||
|
reason: 'validators_not_verifiable',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
const windowMs = ctx.timestampWindowMs ?? DEFAULT_TIMESTAMP_WINDOW_MS;
|
||||||
|
if (!verifyTimestamp(proof.challenge.timestamp, windowMs)) {
|
||||||
|
return {
|
||||||
|
accept: false,
|
||||||
|
reason: 'timestamp_out_of_window',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
if (!ctx.nonceCache.isValid(proof.challenge.nonce, proof.challenge.timestamp)) {
|
||||||
|
return {
|
||||||
|
accept: false,
|
||||||
|
reason: 'nonce_reused',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
const hashValue = proof.challenge.hash;
|
||||||
|
const { valid, unauthorized } = verifySignaturesStrict(hashValue, proof.signatures, ctx.allowedPubkeys);
|
||||||
|
if (valid === 0) {
|
||||||
|
return {
|
||||||
|
accept: false,
|
||||||
|
reason: 'no_validator_signature',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
if (unauthorized > 0) {
|
||||||
|
return {
|
||||||
|
accept: false,
|
||||||
|
reason: 'signature_cle_publique_not_authorized',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return { accept: true };
|
||||||
|
}
|
||||||
1
service-login-verify/node_modules/.bin/tsc
generated
vendored
Symbolic link
1
service-login-verify/node_modules/.bin/tsc
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../typescript/bin/tsc
|
||||||
1
service-login-verify/node_modules/.bin/tsserver
generated
vendored
Symbolic link
1
service-login-verify/node_modules/.bin/tsserver
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../typescript/bin/tsserver
|
||||||
43
service-login-verify/node_modules/.package-lock.json
generated
vendored
Normal file
43
service-login-verify/node_modules/.package-lock.json
generated
vendored
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
{
|
||||||
|
"name": "service-login-verify",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"lockfileVersion": 3,
|
||||||
|
"requires": true,
|
||||||
|
"packages": {
|
||||||
|
"node_modules/@noble/hashes": {
|
||||||
|
"version": "1.8.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.8.0.tgz",
|
||||||
|
"integrity": "sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": "^14.21.3 || >=16"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://paulmillr.com/funding/"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@noble/secp256k1": {
|
||||||
|
"version": "2.3.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@noble/secp256k1/-/secp256k1-2.3.0.tgz",
|
||||||
|
"integrity": "sha512-0TQed2gcBbIrh7Ccyw+y/uZQvbJwm7Ao4scBUxqpBCcsOlZG0O4KGfjtNAy/li4W8n1xt3dxrwJ0beZ2h2G6Kw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"funding": {
|
||||||
|
"url": "https://paulmillr.com/funding/"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/typescript": {
|
||||||
|
"version": "5.9.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz",
|
||||||
|
"integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"bin": {
|
||||||
|
"tsc": "bin/tsc",
|
||||||
|
"tsserver": "bin/tsserver"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14.17"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
21
service-login-verify/node_modules/@noble/hashes/LICENSE
generated
vendored
Normal file
21
service-login-verify/node_modules/@noble/hashes/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2022 Paul Miller (https://paulmillr.com)
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the “Software”), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
521
service-login-verify/node_modules/@noble/hashes/README.md
generated
vendored
Normal file
521
service-login-verify/node_modules/@noble/hashes/README.md
generated
vendored
Normal file
@ -0,0 +1,521 @@
|
|||||||
|
# noble-hashes
|
||||||
|
|
||||||
|
Audited & minimal JS implementation of hash functions, MACs and KDFs.
|
||||||
|
|
||||||
|
- 🔒 [**Audited**](#security) by an independent security firm
|
||||||
|
- 🔻 Tree-shakeable: unused code is excluded from your builds
|
||||||
|
- 🏎 Fast: hand-optimized for caveats of JS engines
|
||||||
|
- 🔍 Reliable: chained / sliding window / DoS tests and fuzzing ensure correctness
|
||||||
|
- 🔁 No unrolled loops: makes it easier to verify and reduces source code size up to 5x
|
||||||
|
- 🦘 Includes SHA, RIPEMD, BLAKE, HMAC, HKDF, PBKDF, Scrypt, Argon2 & KangarooTwelve
|
||||||
|
- 🪶 48KB for everything, 4.8KB (2.36KB gzipped) for single-hash build
|
||||||
|
|
||||||
|
Take a glance at [GitHub Discussions](https://github.com/paulmillr/noble-hashes/discussions) for questions and support.
|
||||||
|
The library's initial development was funded by [Ethereum Foundation](https://ethereum.org/).
|
||||||
|
|
||||||
|
### This library belongs to _noble_ cryptography
|
||||||
|
|
||||||
|
> **noble cryptography** — high-security, easily auditable set of contained cryptographic libraries and tools.
|
||||||
|
|
||||||
|
- Zero or minimal dependencies
|
||||||
|
- Highly readable TypeScript / JS code
|
||||||
|
- PGP-signed releases and transparent NPM builds
|
||||||
|
- All libraries:
|
||||||
|
[ciphers](https://github.com/paulmillr/noble-ciphers),
|
||||||
|
[curves](https://github.com/paulmillr/noble-curves),
|
||||||
|
[hashes](https://github.com/paulmillr/noble-hashes),
|
||||||
|
[post-quantum](https://github.com/paulmillr/noble-post-quantum),
|
||||||
|
4kb [secp256k1](https://github.com/paulmillr/noble-secp256k1) /
|
||||||
|
[ed25519](https://github.com/paulmillr/noble-ed25519)
|
||||||
|
- [Check out homepage](https://paulmillr.com/noble/)
|
||||||
|
for reading resources, documentation and apps built with noble
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
> `npm install @noble/hashes`
|
||||||
|
|
||||||
|
> `deno add jsr:@noble/hashes`
|
||||||
|
|
||||||
|
> `deno doc jsr:@noble/hashes` # command-line documentation
|
||||||
|
|
||||||
|
We support all major platforms and runtimes.
|
||||||
|
For React Native, you may need a [polyfill for getRandomValues](https://github.com/LinusU/react-native-get-random-values).
|
||||||
|
A standalone file [noble-hashes.js](https://github.com/paulmillr/noble-hashes/releases) is also available.
|
||||||
|
|
||||||
|
```js
|
||||||
|
// import * from '@noble/hashes'; // Error: use sub-imports, to ensure small app size
|
||||||
|
import { sha256 } from '@noble/hashes/sha2.js'; // ESM & Common.js
|
||||||
|
sha256(Uint8Array.from([0xca, 0xfe, 0x01, 0x23])); // returns Uint8Array
|
||||||
|
|
||||||
|
// Available modules
|
||||||
|
import { sha256, sha384, sha512, sha224, sha512_224, sha512_256 } from '@noble/hashes/sha2.js';
|
||||||
|
import { sha3_256, sha3_512, keccak_256, keccak_512, shake128, shake256 } from '@noble/hashes/sha3.js';
|
||||||
|
import { cshake256, turboshake256, kmac256, tuplehash256, k12, m14, keccakprg } from '@noble/hashes/sha3-addons.js';
|
||||||
|
import { blake3 } from '@noble/hashes/blake3.js';
|
||||||
|
import { blake2b, blake2s } from '@noble/hashes/blake2.js';
|
||||||
|
import { blake256, blake512 } from '@noble/hashes/blake1.js';
|
||||||
|
import { sha1, md5, ripemd160 } from '@noble/hashes/legacy.js';
|
||||||
|
import { hmac } from '@noble/hashes/hmac.js';
|
||||||
|
import { hkdf } from '@noble/hashes/hkdf.js';
|
||||||
|
import { pbkdf2, pbkdf2Async } from '@noble/hashes/pbkdf2.js';
|
||||||
|
import { scrypt, scryptAsync } from '@noble/hashes/scrypt.js';
|
||||||
|
import { argon2d, argon2i, argon2id } from '@noble/hashes/argon2.js';
|
||||||
|
import * as utils from '@noble/hashes/utils'; // bytesToHex, bytesToUtf8, concatBytes...
|
||||||
|
```
|
||||||
|
|
||||||
|
- [sha2: sha256, sha384, sha512](#sha2-sha256-sha384-sha512-and-others)
|
||||||
|
- [sha3: FIPS, SHAKE, Keccak](#sha3-fips-shake-keccak)
|
||||||
|
- [sha3-addons: cSHAKE, KMAC, K12, M14, TurboSHAKE](#sha3-addons-cshake-kmac-k12-m14-turboshake)
|
||||||
|
- [blake, blake2, blake3](#blake-blake2-blake3) | [legacy: sha1, md5, ripemd160](#legacy-sha1-md5-ripemd160)
|
||||||
|
- MACs: [hmac](#hmac) | [sha3-addons kmac](#sha3-addons-cshake-kmac-k12-m14-turboshake) | [blake3 key mode](#blake2b-blake2s-blake3)
|
||||||
|
- KDFs: [hkdf](#hkdf) | [pbkdf2](#pbkdf2) | [scrypt](#scrypt) | [argon2](#argon2)
|
||||||
|
- [utils](#utils)
|
||||||
|
- [Security](#security) | [Speed](#speed) | [Contributing & testing](#contributing--testing) | [License](#license)
|
||||||
|
|
||||||
|
### Implementations
|
||||||
|
|
||||||
|
Hash functions:
|
||||||
|
|
||||||
|
- `sha256()`: receive & return `Uint8Array`
|
||||||
|
- `sha256.create().update(a).update(b).digest()`: support partial updates
|
||||||
|
- `blake3.create({ context: 'e', dkLen: 32 })`: sometimes have options
|
||||||
|
- support little-endian architecture; also experimentally big-endian
|
||||||
|
- can hash up to 4GB per chunk, with any amount of chunks
|
||||||
|
|
||||||
|
#### sha2: sha256, sha384, sha512 and others
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { sha224, sha256, sha384, sha512, sha512_224, sha512_256 } from '@noble/hashes/sha2.js';
|
||||||
|
const res = sha256(Uint8Array.from([0xbc])); // basic
|
||||||
|
for (let hash of [sha256, sha384, sha512, sha224, sha512_224, sha512_256]) {
|
||||||
|
const arr = Uint8Array.from([0x10, 0x20, 0x30]);
|
||||||
|
const a = hash(arr);
|
||||||
|
const b = hash.create().update(arr).digest();
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
See [RFC 4634](https://datatracker.ietf.org/doc/html/rfc4634) and
|
||||||
|
[the paper on truncated SHA512/256](https://eprint.iacr.org/2010/548.pdf).
|
||||||
|
|
||||||
|
#### sha3: FIPS, SHAKE, Keccak
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import {
|
||||||
|
keccak_224, keccak_256, keccak_384, keccak_512,
|
||||||
|
sha3_224, sha3_256, sha3_384, sha3_512,
|
||||||
|
shake128, shake256,
|
||||||
|
} from '@noble/hashes/sha3.js';
|
||||||
|
for (let hash of [
|
||||||
|
sha3_224, sha3_256, sha3_384, sha3_512,
|
||||||
|
keccak_224, keccak_256, keccak_384, keccak_512,
|
||||||
|
]) {
|
||||||
|
const arr = Uint8Array.from([0x10, 0x20, 0x30]);
|
||||||
|
const a = hash(arr);
|
||||||
|
const b = hash.create().update(arr).digest();
|
||||||
|
}
|
||||||
|
const shka = shake128(Uint8Array.from([0x10]), { dkLen: 512 });
|
||||||
|
const shkb = shake256(Uint8Array.from([0x30]), { dkLen: 512 });
|
||||||
|
```
|
||||||
|
|
||||||
|
See [FIPS-202](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf),
|
||||||
|
[Website](https://keccak.team/keccak.html).
|
||||||
|
|
||||||
|
Check out [the differences between SHA-3 and Keccak](https://crypto.stackexchange.com/questions/15727/what-are-the-key-differences-between-the-draft-sha-3-standard-and-the-keccak-sub)
|
||||||
|
|
||||||
|
#### sha3-addons: cSHAKE, KMAC, K12, M14, TurboSHAKE
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import {
|
||||||
|
cshake128, cshake256,
|
||||||
|
k12,
|
||||||
|
keccakprg,
|
||||||
|
kmac128, kmac256,
|
||||||
|
m14,
|
||||||
|
parallelhash256,
|
||||||
|
tuplehash256,
|
||||||
|
turboshake128, turboshake256
|
||||||
|
} from '@noble/hashes/sha3-addons.js';
|
||||||
|
const data = Uint8Array.from([0x10, 0x20, 0x30]);
|
||||||
|
const ec1 = cshake128(data, { personalization: 'def' });
|
||||||
|
const ec2 = cshake256(data, { personalization: 'def' });
|
||||||
|
const et1 = turboshake128(data);
|
||||||
|
const et2 = turboshake256(data, { D: 0x05 });
|
||||||
|
// tuplehash(['ab', 'c']) !== tuplehash(['a', 'bc']) !== tuplehash([data])
|
||||||
|
const et3 = tuplehash256([utf8ToBytes('ab'), utf8ToBytes('c')]);
|
||||||
|
// Not parallel in JS (similar to blake3 / k12), added for compat
|
||||||
|
const ep1 = parallelhash256(data, { blockLen: 8 });
|
||||||
|
const kk = Uint8Array.from([0xca]);
|
||||||
|
const ek10 = kmac128(kk, data);
|
||||||
|
const ek11 = kmac256(kk, data);
|
||||||
|
const ek12 = k12(data);
|
||||||
|
const ek13 = m14(data);
|
||||||
|
// pseudo-random generator, first argument is capacity. XKCP recommends 254 bits capacity for 128-bit security strength.
|
||||||
|
// * with a capacity of 254 bits.
|
||||||
|
const p = keccakprg(254);
|
||||||
|
p.feed('test');
|
||||||
|
const rand1b = p.fetch(1);
|
||||||
|
```
|
||||||
|
|
||||||
|
- Full [NIST SP 800-185](https://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-185.pdf):
|
||||||
|
cSHAKE, KMAC, TupleHash, ParallelHash + XOF variants
|
||||||
|
- [Reduced-round Keccak](https://datatracker.ietf.org/doc/draft-irtf-cfrg-kangarootwelve/):
|
||||||
|
- 🦘 K12 aka KangarooTwelve
|
||||||
|
- M14 aka MarsupilamiFourteen
|
||||||
|
- TurboSHAKE
|
||||||
|
- [KeccakPRG](https://keccak.team/files/CSF-0.1.pdf): Pseudo-random generator based on Keccak
|
||||||
|
|
||||||
|
#### blake, blake2, blake3
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { blake224, blake256, blake384, blake512 } from '@noble/hashes/blake1.js';
|
||||||
|
import { blake2b, blake2s } from '@noble/hashes/blake2.js';
|
||||||
|
import { blake3 } from '@noble/hashes/blake3.js';
|
||||||
|
|
||||||
|
for (let hash of [
|
||||||
|
blake224, blake256, blake384, blake512,
|
||||||
|
blake2b, blake2s, blake3
|
||||||
|
]) {
|
||||||
|
const arr = Uint8Array.from([0x10, 0x20, 0x30]);
|
||||||
|
const a = hash(arr);
|
||||||
|
const b = hash.create().update(arr).digest();
|
||||||
|
}
|
||||||
|
|
||||||
|
// blake2 advanced usage
|
||||||
|
const ab = Uint8Array.from([0x01]);
|
||||||
|
blake2s(ab);
|
||||||
|
blake2s(ab, { key: new Uint8Array(32) });
|
||||||
|
blake2s(ab, { personalization: 'pers1234' });
|
||||||
|
blake2s(ab, { salt: 'salt1234' });
|
||||||
|
blake2b(ab);
|
||||||
|
blake2b(ab, { key: new Uint8Array(64) });
|
||||||
|
blake2b(ab, { personalization: 'pers1234pers1234' });
|
||||||
|
blake2b(ab, { salt: 'salt1234salt1234' });
|
||||||
|
|
||||||
|
// blake3 advanced usage
|
||||||
|
blake3(ab);
|
||||||
|
blake3(ab, { dkLen: 256 });
|
||||||
|
blake3(ab, { key: new Uint8Array(32) });
|
||||||
|
blake3(ab, { context: 'application-name' });
|
||||||
|
```
|
||||||
|
|
||||||
|
- Blake1 is legacy hash, one of SHA3 proposals. It is rarely used anywhere. See [pdf](https://www.aumasson.jp/blake/blake.pdf).
|
||||||
|
- Blake2 is popular fast hash. blake2b focuses on 64-bit platforms while blake2s is for 8-bit to 32-bit ones. See [RFC 7693](https://datatracker.ietf.org/doc/html/rfc7693), [Website](https://www.blake2.net)
|
||||||
|
- Blake3 is faster, reduced-round blake2. See [Website & specs](https://blake3.io)
|
||||||
|
|
||||||
|
#### legacy: sha1, md5, ripemd160
|
||||||
|
|
||||||
|
SHA1 (RFC 3174), MD5 (RFC 1321) and RIPEMD160 (RFC 2286) legacy, weak hash functions.
|
||||||
|
Don't use them in a new protocol. What "weak" means:
|
||||||
|
|
||||||
|
- Collisions can be made with 2^18 effort in MD5, 2^60 in SHA1, 2^80 in RIPEMD160.
|
||||||
|
- No practical pre-image attacks (only theoretical, 2^123.4)
|
||||||
|
- HMAC seems kinda ok: https://datatracker.ietf.org/doc/html/rfc6151
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { md5, ripemd160, sha1 } from '@noble/hashes/legacy.js';
|
||||||
|
for (let hash of [md5, ripemd160, sha1]) {
|
||||||
|
const arr = Uint8Array.from([0x10, 0x20, 0x30]);
|
||||||
|
const a = hash(arr);
|
||||||
|
const b = hash.create().update(arr).digest();
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### hmac
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { hmac } from '@noble/hashes/hmac.js';
|
||||||
|
import { sha256 } from '@noble/hashes/sha2.js';
|
||||||
|
const key = new Uint8Array(32).fill(1);
|
||||||
|
const msg = new Uint8Array(32).fill(2);
|
||||||
|
const mac1 = hmac(sha256, key, msg);
|
||||||
|
const mac2 = hmac.create(sha256, key).update(msg).digest();
|
||||||
|
```
|
||||||
|
|
||||||
|
Matches [RFC 2104](https://datatracker.ietf.org/doc/html/rfc2104).
|
||||||
|
|
||||||
|
#### hkdf
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { hkdf } from '@noble/hashes/hkdf.js';
|
||||||
|
import { randomBytes } from '@noble/hashes/utils.js';
|
||||||
|
import { sha256 } from '@noble/hashes/sha2.js';
|
||||||
|
const inputKey = randomBytes(32);
|
||||||
|
const salt = randomBytes(32);
|
||||||
|
const info = 'application-key';
|
||||||
|
const hk1 = hkdf(sha256, inputKey, salt, info, 32);
|
||||||
|
|
||||||
|
// == same as
|
||||||
|
import { extract, expand } from '@noble/hashes/hkdf.js';
|
||||||
|
import { sha256 } from '@noble/hashes/sha2.js';
|
||||||
|
const prk = extract(sha256, inputKey, salt);
|
||||||
|
const hk2 = expand(sha256, prk, info, 32);
|
||||||
|
```
|
||||||
|
|
||||||
|
Matches [RFC 5869](https://datatracker.ietf.org/doc/html/rfc5869).
|
||||||
|
|
||||||
|
#### pbkdf2
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { pbkdf2, pbkdf2Async } from '@noble/hashes/pbkdf2.js';
|
||||||
|
import { sha256 } from '@noble/hashes/sha2.js';
|
||||||
|
const pbkey1 = pbkdf2(sha256, 'password', 'salt', { c: 32, dkLen: 32 });
|
||||||
|
const pbkey2 = await pbkdf2Async(sha256, 'password', 'salt', { c: 32, dkLen: 32 });
|
||||||
|
const pbkey3 = await pbkdf2Async(sha256, Uint8Array.from([1, 2, 3]), Uint8Array.from([4, 5, 6]), {
|
||||||
|
c: 32,
|
||||||
|
dkLen: 32,
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
Matches [RFC 2898](https://datatracker.ietf.org/doc/html/rfc2898).
|
||||||
|
|
||||||
|
#### scrypt
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { scrypt, scryptAsync } from '@noble/hashes/scrypt.js';
|
||||||
|
const scr1 = scrypt('password', 'salt', { N: 2 ** 16, r: 8, p: 1, dkLen: 32 });
|
||||||
|
const scr2 = await scryptAsync('password', 'salt', { N: 2 ** 16, r: 8, p: 1, dkLen: 32 });
|
||||||
|
const scr3 = await scryptAsync(Uint8Array.from([1, 2, 3]), Uint8Array.from([4, 5, 6]), {
|
||||||
|
N: 2 ** 17,
|
||||||
|
r: 8,
|
||||||
|
p: 1,
|
||||||
|
dkLen: 32,
|
||||||
|
onProgress(percentage) {
|
||||||
|
console.log('progress', percentage);
|
||||||
|
},
|
||||||
|
maxmem: 2 ** 32 + 128 * 8 * 1, // N * r * p * 128 + (128*r*p)
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
Conforms to [RFC 7914](https://datatracker.ietf.org/doc/html/rfc7914),
|
||||||
|
[Website](https://www.tarsnap.com/scrypt.html)
|
||||||
|
|
||||||
|
- `N, r, p` are work factors. To understand them, see [the blog post](https://blog.filippo.io/the-scrypt-parameters/).
|
||||||
|
`r: 8, p: 1` are common. JS doesn't support parallelization, making increasing p meaningless.
|
||||||
|
- `dkLen` is the length of output bytes e.g. `32` or `64`
|
||||||
|
- `onProgress` can be used with async version of the function to report progress to a user.
|
||||||
|
- `maxmem` prevents DoS and is limited to `1GB + 1KB` (`2**30 + 2**10`), but can be adjusted using formula: `N * r * p * 128 + (128 * r * p)`
|
||||||
|
|
||||||
|
Time it takes to derive Scrypt key under different values of N (2\*\*N) on Apple M4 (mobile phones can be 1x-4x slower):
|
||||||
|
|
||||||
|
| N pow | Time | RAM |
|
||||||
|
| ----- | ---- | ----- |
|
||||||
|
| 16 | 0.1s | 64MB |
|
||||||
|
| 17 | 0.2s | 128MB |
|
||||||
|
| 18 | 0.4s | 256MB |
|
||||||
|
| 19 | 0.8s | 512MB |
|
||||||
|
| 20 | 1.5s | 1GB |
|
||||||
|
| 21 | 3.1s | 2GB |
|
||||||
|
| 22 | 6.2s | 4GB |
|
||||||
|
| 23 | 13s | 8GB |
|
||||||
|
| 24 | 27s | 16GB |
|
||||||
|
|
||||||
|
> [!NOTE]
|
||||||
|
> We support N larger than `2**20` where available, however,
|
||||||
|
> not all JS engines support >= 2GB ArrayBuffer-s.
|
||||||
|
> When using such N, you'll need to manually adjust `maxmem`, using formula above.
|
||||||
|
> Other JS implementations don't support large N-s.
|
||||||
|
|
||||||
|
#### argon2
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import { argon2d, argon2i, argon2id } from '@noble/hashes/argon2.js';
|
||||||
|
const arg1 = argon2id('password', 'saltsalt', { t: 2, m: 65536, p: 1, maxmem: 2 ** 32 - 1 });
|
||||||
|
```
|
||||||
|
|
||||||
|
Argon2 [RFC 9106](https://datatracker.ietf.org/doc/html/rfc9106) implementation.
|
||||||
|
|
||||||
|
> [!WARNING]
|
||||||
|
> Argon2 can't be fast in JS, because there is no fast Uint64Array.
|
||||||
|
> It is suggested to use [Scrypt](#scrypt) instead.
|
||||||
|
> Being 5x slower than native code means brute-forcing attackers have bigger advantage.
|
||||||
|
|
||||||
|
#### utils
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { bytesToHex as toHex, randomBytes } from '@noble/hashes/utils';
|
||||||
|
console.log(toHex(randomBytes(32)));
|
||||||
|
```
|
||||||
|
|
||||||
|
- `bytesToHex` will convert `Uint8Array` to a hex string
|
||||||
|
- `randomBytes(bytes)` will produce cryptographically secure random `Uint8Array` of length `bytes`
|
||||||
|
|
||||||
|
## Security
|
||||||
|
|
||||||
|
The library has been independently audited:
|
||||||
|
|
||||||
|
- at version 1.0.0, in Jan 2022, by [Cure53](https://cure53.de)
|
||||||
|
- PDFs: [website](https://cure53.de/pentest-report_hashing-libs.pdf), [in-repo](./audit/2022-01-05-cure53-audit-nbl2.pdf)
|
||||||
|
- [Changes since audit](https://github.com/paulmillr/noble-hashes/compare/1.0.0..main).
|
||||||
|
- Scope: everything, besides `blake3`, `sha3-addons`, `sha1` and `argon2`, which have not been audited
|
||||||
|
- The audit has been funded by [Ethereum Foundation](https://ethereum.org/en/) with help of [Nomic Labs](https://nomiclabs.io)
|
||||||
|
|
||||||
|
It is tested against property-based, cross-library and Wycheproof vectors,
|
||||||
|
and is being fuzzed in [the separate repo](https://github.com/paulmillr/fuzzing).
|
||||||
|
|
||||||
|
If you see anything unusual: investigate and report.
|
||||||
|
|
||||||
|
### Constant-timeness
|
||||||
|
|
||||||
|
We're targetting algorithmic constant time. _JIT-compiler_ and _Garbage Collector_ make "constant time"
|
||||||
|
extremely hard to achieve [timing attack](https://en.wikipedia.org/wiki/Timing_attack) resistance
|
||||||
|
in a scripting language. Which means _any other JS library can't have
|
||||||
|
constant-timeness_. Even statically typed Rust, a language without GC,
|
||||||
|
[makes it harder to achieve constant-time](https://www.chosenplaintext.ca/open-source/rust-timing-shield/security)
|
||||||
|
for some cases. If your goal is absolute security, don't use any JS lib — including bindings to native ones.
|
||||||
|
Use low-level libraries & languages.
|
||||||
|
|
||||||
|
### Memory dumping
|
||||||
|
|
||||||
|
The library shares state buffers between hash
|
||||||
|
function calls. The buffers are zeroed-out after each call. However, if an attacker
|
||||||
|
can read application memory, you are doomed in any case:
|
||||||
|
|
||||||
|
- At some point, input will be a string and strings are immutable in JS:
|
||||||
|
there is no way to overwrite them with zeros. For example: deriving
|
||||||
|
key from `scrypt(password, salt)` where password and salt are strings
|
||||||
|
- Input from a file will stay in file buffers
|
||||||
|
- Input / output will be re-used multiple times in application which means it could stay in memory
|
||||||
|
- `await anything()` will always write all internal variables (including numbers)
|
||||||
|
to memory. With async functions / Promises there are no guarantees when the code
|
||||||
|
chunk would be executed. Which means attacker can have plenty of time to read data from memory
|
||||||
|
- There is no way to guarantee anything about zeroing sensitive data without
|
||||||
|
complex tests-suite which will dump process memory and verify that there is
|
||||||
|
no sensitive data left. For JS it means testing all browsers (incl. mobile),
|
||||||
|
which is complex. And of course it will be useless without using the same
|
||||||
|
test-suite in the actual application that consumes the library
|
||||||
|
|
||||||
|
### Supply chain security
|
||||||
|
|
||||||
|
- **Commits** are signed with PGP keys, to prevent forgery. Make sure to verify commit signatures
|
||||||
|
- **Releases** are transparent and built on GitHub CI. Make sure to verify [provenance](https://docs.npmjs.com/generating-provenance-statements) logs
|
||||||
|
- Use GitHub CLI to verify single-file builds:
|
||||||
|
`gh attestation verify --owner paulmillr noble-hashes.js`
|
||||||
|
- **Rare releasing** is followed to ensure less re-audit need for end-users
|
||||||
|
- **Dependencies** are minimized and locked-down: any dependency could get hacked and users will be downloading malware with every install.
|
||||||
|
- We make sure to use as few dependencies as possible
|
||||||
|
- Automatic dep updates are prevented by locking-down version ranges; diffs are checked with `npm-diff`
|
||||||
|
- **Dev Dependencies** are disabled for end-users; they are only used to develop / build the source code
|
||||||
|
|
||||||
|
For this package, there are 0 dependencies; and a few dev dependencies:
|
||||||
|
|
||||||
|
- micro-bmark, micro-should and jsbt are used for benchmarking / testing / build tooling and developed by the same author
|
||||||
|
- prettier, fast-check and typescript are used for code quality / test generation / ts compilation. It's hard to audit their source code thoroughly and fully because of their size
|
||||||
|
|
||||||
|
### Randomness
|
||||||
|
|
||||||
|
We're deferring to built-in
|
||||||
|
[crypto.getRandomValues](https://developer.mozilla.org/en-US/docs/Web/API/Crypto/getRandomValues)
|
||||||
|
which is considered cryptographically secure (CSPRNG).
|
||||||
|
|
||||||
|
In the past, browsers had bugs that made it weak: it may happen again.
|
||||||
|
Implementing a userspace CSPRNG to get resilient to the weakness
|
||||||
|
is even worse: there is no reliable userspace source of quality entropy.
|
||||||
|
|
||||||
|
### Quantum computers
|
||||||
|
|
||||||
|
Cryptographically relevant quantum computer, if built, will allow to
|
||||||
|
utilize Grover's algorithm to break hashes in 2^n/2 operations, instead of 2^n.
|
||||||
|
|
||||||
|
This means SHA256 should be replaced with SHA512, SHA3-256 with SHA3-512, SHAKE128 with SHAKE256 etc.
|
||||||
|
|
||||||
|
Australian ASD prohibits SHA256 and similar hashes [after 2030](https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/ism/cyber-security-guidelines/guidelines-cryptography).
|
||||||
|
|
||||||
|
## Speed
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm run bench:install && npm run bench
|
||||||
|
```
|
||||||
|
|
||||||
|
Benchmarks measured on Apple M4.
|
||||||
|
|
||||||
|
```
|
||||||
|
# 32B
|
||||||
|
sha256 x 1,968,503 ops/sec @ 508ns/op
|
||||||
|
sha512 x 740,740 ops/sec @ 1μs/op
|
||||||
|
sha3_256 x 287,686 ops/sec @ 3μs/op
|
||||||
|
sha3_512 x 288,267 ops/sec @ 3μs/op
|
||||||
|
k12 x 476,190 ops/sec @ 2μs/op
|
||||||
|
m14 x 423,190 ops/sec @ 2μs/op
|
||||||
|
blake2b x 464,252 ops/sec @ 2μs/op
|
||||||
|
blake2s x 766,871 ops/sec @ 1μs/op
|
||||||
|
blake3 x 879,507 ops/sec @ 1μs/op
|
||||||
|
|
||||||
|
# 1MB
|
||||||
|
sha256 x 331 ops/sec @ 3ms/op
|
||||||
|
sha512 x 129 ops/sec @ 7ms/op
|
||||||
|
sha3_256 x 38 ops/sec @ 25ms/op
|
||||||
|
sha3_512 x 20 ops/sec @ 47ms/op
|
||||||
|
k12 x 88 ops/sec @ 11ms/op
|
||||||
|
m14 x 62 ops/sec @ 15ms/op
|
||||||
|
blake2b x 69 ops/sec @ 14ms/op
|
||||||
|
blake2s x 57 ops/sec @ 17ms/op
|
||||||
|
blake3 x 72 ops/sec @ 13ms/op
|
||||||
|
|
||||||
|
# MAC
|
||||||
|
hmac(sha256) x 599,880 ops/sec @ 1μs/op
|
||||||
|
hmac(sha512) x 197,122 ops/sec @ 5μs/op
|
||||||
|
kmac256 x 87,981 ops/sec @ 11μs/op
|
||||||
|
blake3(key) x 796,812 ops/sec @ 1μs/op
|
||||||
|
|
||||||
|
# KDF
|
||||||
|
hkdf(sha256) x 259,942 ops/sec @ 3μs/op
|
||||||
|
blake3(context) x 424,808 ops/sec @ 2μs/op
|
||||||
|
pbkdf2(sha256, c: 2 ** 18) x 5 ops/sec @ 197ms/op
|
||||||
|
pbkdf2(sha512, c: 2 ** 18) x 1 ops/sec @ 630ms/op
|
||||||
|
scrypt(n: 2 ** 18, r: 8, p: 1) x 2 ops/sec @ 400ms/op
|
||||||
|
argon2id(t: 1, m: 256MB) 2881ms
|
||||||
|
```
|
||||||
|
|
||||||
|
Compare to native node.js implementation that uses C bindings instead of pure-js code:
|
||||||
|
|
||||||
|
```
|
||||||
|
# native (node) 32B
|
||||||
|
sha256 x 2,267,573 ops/sec
|
||||||
|
sha512 x 983,284 ops/sec
|
||||||
|
sha3_256 x 1,522,070 ops/sec
|
||||||
|
blake2b x 1,512,859 ops/sec
|
||||||
|
blake2s x 1,821,493 ops/sec
|
||||||
|
hmac(sha256) x 1,085,776 ops/sec
|
||||||
|
hkdf(sha256) x 312,109 ops/sec
|
||||||
|
# native (node) KDF
|
||||||
|
pbkdf2(sha256, c: 2 ** 18) x 5 ops/sec @ 197ms/op
|
||||||
|
pbkdf2(sha512, c: 2 ** 18) x 1 ops/sec @ 630ms/op
|
||||||
|
scrypt(n: 2 ** 18, r: 8, p: 1) x 2 ops/sec @ 378ms/op
|
||||||
|
```
|
||||||
|
|
||||||
|
It is possible to [make this library 4x+ faster](./benchmark/README.md) by
|
||||||
|
_doing code generation of full loop unrolls_. We've decided against it. Reasons:
|
||||||
|
|
||||||
|
- the library must be auditable, with minimum amount of code, and zero dependencies
|
||||||
|
- most method invocations with the lib are going to be something like hashing 32b to 64kb of data
|
||||||
|
- hashing big inputs is 10x faster with low-level languages, which means you should probably pick 'em instead
|
||||||
|
|
||||||
|
The current performance is good enough when compared to other projects; SHA256 takes only 900 nanoseconds to run.
|
||||||
|
|
||||||
|
## Contributing & testing
|
||||||
|
|
||||||
|
`test/misc` directory contains implementations of loop unrolling and md5.
|
||||||
|
|
||||||
|
- `npm install && npm run build && npm test` will build the code and run tests.
|
||||||
|
- `npm run lint` / `npm run format` will run linter / fix linter issues.
|
||||||
|
- `npm run bench` will run benchmarks, which may need their deps first (`npm run bench:install`)
|
||||||
|
- `npm run build:release` will build single file
|
||||||
|
- There is **additional** 20-min DoS test `npm run test:dos` and 2-hour "big" multicore test `npm run test:big`.
|
||||||
|
See [our approach to testing](./test/README.md)
|
||||||
|
|
||||||
|
Additional resources:
|
||||||
|
|
||||||
|
- NTT hashes are outside of scope of the library. You can view some of them in different repos:
|
||||||
|
- [Pedersen in micro-zk-proofs](https://github.com/paulmillr/micro-zk-proofs/blob/1ed5ce1253583b2e540eef7f3477fb52bf5344ff/src/pedersen.ts)
|
||||||
|
- [Poseidon in noble-curves](https://github.com/paulmillr/noble-curves/blob/3d124dd3ecec8b6634cc0b2ba1c183aded5304f9/src/abstract/poseidon.ts)
|
||||||
|
- Check out [guidelines](https://github.com/paulmillr/guidelines) for coding practices
|
||||||
|
- See [paulmillr.com/noble](https://paulmillr.com/noble/) for useful resources, articles, documentation and demos
|
||||||
|
related to the library.
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2022 Paul Miller [(https://paulmillr.com)](https://paulmillr.com)
|
||||||
|
|
||||||
|
See LICENSE file.
|
||||||
17
service-login-verify/node_modules/@noble/hashes/_assert.d.ts
generated
vendored
Normal file
17
service-login-verify/node_modules/@noble/hashes/_assert.d.ts
generated
vendored
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
/**
|
||||||
|
* Internal assertion helpers.
|
||||||
|
* @module
|
||||||
|
* @deprecated
|
||||||
|
*/
|
||||||
|
import { abytes as ab, aexists as ae, anumber as an, aoutput as ao, type IHash as H } from './utils.ts';
|
||||||
|
/** @deprecated Use import from `noble/hashes/utils` module */
|
||||||
|
export declare const abytes: typeof ab;
|
||||||
|
/** @deprecated Use import from `noble/hashes/utils` module */
|
||||||
|
export declare const aexists: typeof ae;
|
||||||
|
/** @deprecated Use import from `noble/hashes/utils` module */
|
||||||
|
export declare const anumber: typeof an;
|
||||||
|
/** @deprecated Use import from `noble/hashes/utils` module */
|
||||||
|
export declare const aoutput: typeof ao;
|
||||||
|
/** @deprecated Use import from `noble/hashes/utils` module */
|
||||||
|
export type Hash = H;
|
||||||
|
//# sourceMappingURL=_assert.d.ts.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/_assert.d.ts.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/_assert.d.ts.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"_assert.d.ts","sourceRoot":"","sources":["src/_assert.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH,OAAO,EACL,MAAM,IAAI,EAAE,EACZ,OAAO,IAAI,EAAE,EACb,OAAO,IAAI,EAAE,EACb,OAAO,IAAI,EAAE,EACb,KAAK,KAAK,IAAI,CAAC,EAChB,MAAM,YAAY,CAAC;AACpB,8DAA8D;AAC9D,eAAO,MAAM,MAAM,EAAE,OAAO,EAAO,CAAC;AACpC,8DAA8D;AAC9D,eAAO,MAAM,OAAO,EAAE,OAAO,EAAO,CAAC;AACrC,8DAA8D;AAC9D,eAAO,MAAM,OAAO,EAAE,OAAO,EAAO,CAAC;AACrC,8DAA8D;AAC9D,eAAO,MAAM,OAAO,EAAE,OAAO,EAAO,CAAC;AACrC,8DAA8D;AAC9D,MAAM,MAAM,IAAI,GAAG,CAAC,CAAC"}
|
||||||
18
service-login-verify/node_modules/@noble/hashes/_assert.js
generated
vendored
Normal file
18
service-login-verify/node_modules/@noble/hashes/_assert.js
generated
vendored
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.aoutput = exports.anumber = exports.aexists = exports.abytes = void 0;
|
||||||
|
/**
|
||||||
|
* Internal assertion helpers.
|
||||||
|
* @module
|
||||||
|
* @deprecated
|
||||||
|
*/
|
||||||
|
const utils_ts_1 = require("./utils.js");
|
||||||
|
/** @deprecated Use import from `noble/hashes/utils` module */
|
||||||
|
exports.abytes = utils_ts_1.abytes;
|
||||||
|
/** @deprecated Use import from `noble/hashes/utils` module */
|
||||||
|
exports.aexists = utils_ts_1.aexists;
|
||||||
|
/** @deprecated Use import from `noble/hashes/utils` module */
|
||||||
|
exports.anumber = utils_ts_1.anumber;
|
||||||
|
/** @deprecated Use import from `noble/hashes/utils` module */
|
||||||
|
exports.aoutput = utils_ts_1.aoutput;
|
||||||
|
//# sourceMappingURL=_assert.js.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/_assert.js.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/_assert.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"_assert.js","sourceRoot":"","sources":["src/_assert.ts"],"names":[],"mappings":";;;AAAA;;;;GAIG;AACH,yCAMoB;AACpB,8DAA8D;AACjD,QAAA,MAAM,GAAc,iBAAE,CAAC;AACpC,8DAA8D;AACjD,QAAA,OAAO,GAAc,kBAAE,CAAC;AACrC,8DAA8D;AACjD,QAAA,OAAO,GAAc,kBAAE,CAAC;AACrC,8DAA8D;AACjD,QAAA,OAAO,GAAc,kBAAE,CAAC"}
|
||||||
14
service-login-verify/node_modules/@noble/hashes/_blake.d.ts
generated
vendored
Normal file
14
service-login-verify/node_modules/@noble/hashes/_blake.d.ts
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
/**
|
||||||
|
* Internal blake variable.
|
||||||
|
* For BLAKE2b, the two extra permutations for rounds 10 and 11 are SIGMA[10..11] = SIGMA[0..1].
|
||||||
|
*/
|
||||||
|
export declare const BSIGMA: Uint8Array;
|
||||||
|
export type Num4 = {
|
||||||
|
a: number;
|
||||||
|
b: number;
|
||||||
|
c: number;
|
||||||
|
d: number;
|
||||||
|
};
|
||||||
|
export declare function G1s(a: number, b: number, c: number, d: number, x: number): Num4;
|
||||||
|
export declare function G2s(a: number, b: number, c: number, d: number, x: number): Num4;
|
||||||
|
//# sourceMappingURL=_blake.d.ts.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/_blake.d.ts.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/_blake.d.ts.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"_blake.d.ts","sourceRoot":"","sources":["src/_blake.ts"],"names":[],"mappings":"AAMA;;;GAGG;AAEH,eAAO,MAAM,MAAM,EAAE,UAkBnB,CAAC;AAGH,MAAM,MAAM,IAAI,GAAG;IAAE,CAAC,EAAE,MAAM,CAAC;IAAC,CAAC,EAAE,MAAM,CAAC;IAAC,CAAC,EAAE,MAAM,CAAC;IAAC,CAAC,EAAE,MAAM,CAAC;CAAE,CAAC;AAGnE,wBAAgB,GAAG,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,GAAG,IAAI,CAM/E;AAED,wBAAgB,GAAG,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,GAAG,IAAI,CAM/E"}
|
||||||
50
service-login-verify/node_modules/@noble/hashes/_blake.js
generated
vendored
Normal file
50
service-login-verify/node_modules/@noble/hashes/_blake.js
generated
vendored
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.BSIGMA = void 0;
|
||||||
|
exports.G1s = G1s;
|
||||||
|
exports.G2s = G2s;
|
||||||
|
/**
|
||||||
|
* Internal helpers for blake hash.
|
||||||
|
* @module
|
||||||
|
*/
|
||||||
|
const utils_ts_1 = require("./utils.js");
|
||||||
|
/**
|
||||||
|
* Internal blake variable.
|
||||||
|
* For BLAKE2b, the two extra permutations for rounds 10 and 11 are SIGMA[10..11] = SIGMA[0..1].
|
||||||
|
*/
|
||||||
|
// prettier-ignore
|
||||||
|
exports.BSIGMA = Uint8Array.from([
|
||||||
|
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
|
||||||
|
14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3,
|
||||||
|
11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4,
|
||||||
|
7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8,
|
||||||
|
9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13,
|
||||||
|
2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9,
|
||||||
|
12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11,
|
||||||
|
13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10,
|
||||||
|
6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5,
|
||||||
|
10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0,
|
||||||
|
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
|
||||||
|
14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3,
|
||||||
|
// Blake1, unused in others
|
||||||
|
11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4,
|
||||||
|
7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8,
|
||||||
|
9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13,
|
||||||
|
2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9,
|
||||||
|
]);
|
||||||
|
// Mixing function G splitted in two halfs
|
||||||
|
function G1s(a, b, c, d, x) {
|
||||||
|
a = (a + b + x) | 0;
|
||||||
|
d = (0, utils_ts_1.rotr)(d ^ a, 16);
|
||||||
|
c = (c + d) | 0;
|
||||||
|
b = (0, utils_ts_1.rotr)(b ^ c, 12);
|
||||||
|
return { a, b, c, d };
|
||||||
|
}
|
||||||
|
function G2s(a, b, c, d, x) {
|
||||||
|
a = (a + b + x) | 0;
|
||||||
|
d = (0, utils_ts_1.rotr)(d ^ a, 8);
|
||||||
|
c = (c + d) | 0;
|
||||||
|
b = (0, utils_ts_1.rotr)(b ^ c, 7);
|
||||||
|
return { a, b, c, d };
|
||||||
|
}
|
||||||
|
//# sourceMappingURL=_blake.js.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/_blake.js.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/_blake.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"_blake.js","sourceRoot":"","sources":["src/_blake.ts"],"names":[],"mappings":";;;AAmCA,kBAMC;AAED,kBAMC;AAjDD;;;GAGG;AACH,yCAAkC;AAElC;;;GAGG;AACH,kBAAkB;AACL,QAAA,MAAM,GAA+B,UAAU,CAAC,IAAI,CAAC;IAChE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE;IACpD,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC;IACpD,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC;IACpD,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC;IACpD,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE;IACpD,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC;IACpD,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE;IACpD,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE;IACpD,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC;IACpD,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC;IACpD,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE;IACpD,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC;IACpD,2BAA2B;IAC3B,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC;IACpD,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC;IACpD,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE;IACpD,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC;CACrD,CAAC,CAAC;AAKH,0CAA0C;AAC1C,SAAgB,GAAG,CAAC,CAAS,EAAE,CAAS,EAAE,CAAS,EAAE,CAAS,EAAE,CAAS;IACvE,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;IACpB,CAAC,GAAG,IAAA,eAAI,EAAC,CAAC,GAAG,CAAC,EAAE,EAAE,CAAC,CAAC;IACpB,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;IAChB,CAAC,GAAG,IAAA,eAAI,EAAC,CAAC,GAAG,CAAC,EAAE,EAAE,CAAC,CAAC;IACpB,OAAO,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC;AACxB,CAAC;AAED,SAAgB,GAAG,CAAC,CAAS,EAAE,CAAS,EAAE,CAAS,EAAE,CAAS,EAAE,CAAS;IACvE,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;IACpB,CAAC,GAAG,IAAA,eAAI,EAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC;IACnB,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;IAChB,CAAC,GAAG,IAAA,eAAI,EAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC;IACnB,OAAO,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC;AACxB,CAAC"}
|
||||||
51
service-login-verify/node_modules/@noble/hashes/_md.d.ts
generated
vendored
Normal file
51
service-login-verify/node_modules/@noble/hashes/_md.d.ts
generated
vendored
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
/**
|
||||||
|
* Internal Merkle-Damgard hash utils.
|
||||||
|
* @module
|
||||||
|
*/
|
||||||
|
import { type Input, Hash } from './utils.ts';
|
||||||
|
/** Polyfill for Safari 14. https://caniuse.com/mdn-javascript_builtins_dataview_setbiguint64 */
|
||||||
|
export declare function setBigUint64(view: DataView, byteOffset: number, value: bigint, isLE: boolean): void;
|
||||||
|
/** Choice: a ? b : c */
|
||||||
|
export declare function Chi(a: number, b: number, c: number): number;
|
||||||
|
/** Majority function, true if any two inputs is true. */
|
||||||
|
export declare function Maj(a: number, b: number, c: number): number;
|
||||||
|
/**
|
||||||
|
* Merkle-Damgard hash construction base class.
|
||||||
|
* Could be used to create MD5, RIPEMD, SHA1, SHA2.
|
||||||
|
*/
|
||||||
|
export declare abstract class HashMD<T extends HashMD<T>> extends Hash<T> {
|
||||||
|
protected abstract process(buf: DataView, offset: number): void;
|
||||||
|
protected abstract get(): number[];
|
||||||
|
protected abstract set(...args: number[]): void;
|
||||||
|
abstract destroy(): void;
|
||||||
|
protected abstract roundClean(): void;
|
||||||
|
readonly blockLen: number;
|
||||||
|
readonly outputLen: number;
|
||||||
|
readonly padOffset: number;
|
||||||
|
readonly isLE: boolean;
|
||||||
|
protected buffer: Uint8Array;
|
||||||
|
protected view: DataView;
|
||||||
|
protected finished: boolean;
|
||||||
|
protected length: number;
|
||||||
|
protected pos: number;
|
||||||
|
protected destroyed: boolean;
|
||||||
|
constructor(blockLen: number, outputLen: number, padOffset: number, isLE: boolean);
|
||||||
|
update(data: Input): this;
|
||||||
|
digestInto(out: Uint8Array): void;
|
||||||
|
digest(): Uint8Array;
|
||||||
|
_cloneInto(to?: T): T;
|
||||||
|
clone(): T;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Initial SHA-2 state: fractional parts of square roots of first 16 primes 2..53.
|
||||||
|
* Check out `test/misc/sha2-gen-iv.js` for recomputation guide.
|
||||||
|
*/
|
||||||
|
/** Initial SHA256 state. Bits 0..32 of frac part of sqrt of primes 2..19 */
|
||||||
|
export declare const SHA256_IV: Uint32Array;
|
||||||
|
/** Initial SHA224 state. Bits 32..64 of frac part of sqrt of primes 23..53 */
|
||||||
|
export declare const SHA224_IV: Uint32Array;
|
||||||
|
/** Initial SHA384 state. Bits 0..64 of frac part of sqrt of primes 23..53 */
|
||||||
|
export declare const SHA384_IV: Uint32Array;
|
||||||
|
/** Initial SHA512 state. Bits 0..64 of frac part of sqrt of primes 2..19 */
|
||||||
|
export declare const SHA512_IV: Uint32Array;
|
||||||
|
//# sourceMappingURL=_md.d.ts.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/_md.d.ts.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/_md.d.ts.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"_md.d.ts","sourceRoot":"","sources":["src/_md.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,OAAO,EAAE,KAAK,KAAK,EAAE,IAAI,EAAwD,MAAM,YAAY,CAAC;AAEpG,gGAAgG;AAChG,wBAAgB,YAAY,CAC1B,IAAI,EAAE,QAAQ,EACd,UAAU,EAAE,MAAM,EAClB,KAAK,EAAE,MAAM,EACb,IAAI,EAAE,OAAO,GACZ,IAAI,CAUN;AAED,wBAAwB;AACxB,wBAAgB,GAAG,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,GAAG,MAAM,CAE3D;AAED,yDAAyD;AACzD,wBAAgB,GAAG,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,GAAG,MAAM,CAE3D;AAED;;;GAGG;AACH,8BAAsB,MAAM,CAAC,CAAC,SAAS,MAAM,CAAC,CAAC,CAAC,CAAE,SAAQ,IAAI,CAAC,CAAC,CAAC;IAC/D,SAAS,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,QAAQ,EAAE,MAAM,EAAE,MAAM,GAAG,IAAI;IAC/D,SAAS,CAAC,QAAQ,CAAC,GAAG,IAAI,MAAM,EAAE;IAClC,SAAS,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,IAAI,EAAE,MAAM,EAAE,GAAG,IAAI;IAC/C,QAAQ,CAAC,OAAO,IAAI,IAAI;IACxB,SAAS,CAAC,QAAQ,CAAC,UAAU,IAAI,IAAI;IAErC,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC;IAC1B,QAAQ,CAAC,SAAS,EAAE,MAAM,CAAC;IAC3B,QAAQ,CAAC,SAAS,EAAE,MAAM,CAAC;IAC3B,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC;IAGvB,SAAS,CAAC,MAAM,EAAE,UAAU,CAAC;IAC7B,SAAS,CAAC,IAAI,EAAE,QAAQ,CAAC;IACzB,SAAS,CAAC,QAAQ,UAAS;IAC3B,SAAS,CAAC,MAAM,SAAK;IACrB,SAAS,CAAC,GAAG,SAAK;IAClB,SAAS,CAAC,SAAS,UAAS;gBAEhB,QAAQ,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO;IASjF,MAAM,CAAC,IAAI,EAAE,KAAK,GAAG,IAAI;IA0BzB,UAAU,CAAC,GAAG,EAAE,UAAU,GAAG,IAAI;IAkCjC,MAAM,IAAI,UAAU;IAOpB,UAAU,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC;IAWrB,KAAK,IAAI,CAAC;CAGX;AAED;;;GAGG;AAEH,4EAA4E;AAC5E,eAAO,MAAM,SAAS,EAAE,WAEtB,CAAC;AAEH,8EAA8E;AAC9E,eAAO,MAAM,SAAS,EAAE,WAEtB,CAAC;AAEH,6EAA6E;AAC7E,eAAO,MAAM,SAAS,EAAE,WAGtB,CAAC;AAEH,4EAA4E;AAC5E,eAAO,MAAM,SAAS,EAAE,WAGtB,CAAC"}
|
||||||
162
service-login-verify/node_modules/@noble/hashes/_md.js
generated
vendored
Normal file
162
service-login-verify/node_modules/@noble/hashes/_md.js
generated
vendored
Normal file
@ -0,0 +1,162 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.SHA512_IV = exports.SHA384_IV = exports.SHA224_IV = exports.SHA256_IV = exports.HashMD = void 0;
|
||||||
|
exports.setBigUint64 = setBigUint64;
|
||||||
|
exports.Chi = Chi;
|
||||||
|
exports.Maj = Maj;
|
||||||
|
/**
|
||||||
|
* Internal Merkle-Damgard hash utils.
|
||||||
|
* @module
|
||||||
|
*/
|
||||||
|
const utils_ts_1 = require("./utils.js");
|
||||||
|
/** Polyfill for Safari 14. https://caniuse.com/mdn-javascript_builtins_dataview_setbiguint64 */
|
||||||
|
function setBigUint64(view, byteOffset, value, isLE) {
|
||||||
|
if (typeof view.setBigUint64 === 'function')
|
||||||
|
return view.setBigUint64(byteOffset, value, isLE);
|
||||||
|
const _32n = BigInt(32);
|
||||||
|
const _u32_max = BigInt(0xffffffff);
|
||||||
|
const wh = Number((value >> _32n) & _u32_max);
|
||||||
|
const wl = Number(value & _u32_max);
|
||||||
|
const h = isLE ? 4 : 0;
|
||||||
|
const l = isLE ? 0 : 4;
|
||||||
|
view.setUint32(byteOffset + h, wh, isLE);
|
||||||
|
view.setUint32(byteOffset + l, wl, isLE);
|
||||||
|
}
|
||||||
|
/** Choice: a ? b : c */
|
||||||
|
function Chi(a, b, c) {
|
||||||
|
return (a & b) ^ (~a & c);
|
||||||
|
}
|
||||||
|
/** Majority function, true if any two inputs is true. */
|
||||||
|
function Maj(a, b, c) {
|
||||||
|
return (a & b) ^ (a & c) ^ (b & c);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Merkle-Damgard hash construction base class.
|
||||||
|
* Could be used to create MD5, RIPEMD, SHA1, SHA2.
|
||||||
|
*/
|
||||||
|
class HashMD extends utils_ts_1.Hash {
|
||||||
|
constructor(blockLen, outputLen, padOffset, isLE) {
|
||||||
|
super();
|
||||||
|
this.finished = false;
|
||||||
|
this.length = 0;
|
||||||
|
this.pos = 0;
|
||||||
|
this.destroyed = false;
|
||||||
|
this.blockLen = blockLen;
|
||||||
|
this.outputLen = outputLen;
|
||||||
|
this.padOffset = padOffset;
|
||||||
|
this.isLE = isLE;
|
||||||
|
this.buffer = new Uint8Array(blockLen);
|
||||||
|
this.view = (0, utils_ts_1.createView)(this.buffer);
|
||||||
|
}
|
||||||
|
update(data) {
|
||||||
|
(0, utils_ts_1.aexists)(this);
|
||||||
|
data = (0, utils_ts_1.toBytes)(data);
|
||||||
|
(0, utils_ts_1.abytes)(data);
|
||||||
|
const { view, buffer, blockLen } = this;
|
||||||
|
const len = data.length;
|
||||||
|
for (let pos = 0; pos < len;) {
|
||||||
|
const take = Math.min(blockLen - this.pos, len - pos);
|
||||||
|
// Fast path: we have at least one block in input, cast it to view and process
|
||||||
|
if (take === blockLen) {
|
||||||
|
const dataView = (0, utils_ts_1.createView)(data);
|
||||||
|
for (; blockLen <= len - pos; pos += blockLen)
|
||||||
|
this.process(dataView, pos);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
buffer.set(data.subarray(pos, pos + take), this.pos);
|
||||||
|
this.pos += take;
|
||||||
|
pos += take;
|
||||||
|
if (this.pos === blockLen) {
|
||||||
|
this.process(view, 0);
|
||||||
|
this.pos = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.length += data.length;
|
||||||
|
this.roundClean();
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
digestInto(out) {
|
||||||
|
(0, utils_ts_1.aexists)(this);
|
||||||
|
(0, utils_ts_1.aoutput)(out, this);
|
||||||
|
this.finished = true;
|
||||||
|
// Padding
|
||||||
|
// We can avoid allocation of buffer for padding completely if it
|
||||||
|
// was previously not allocated here. But it won't change performance.
|
||||||
|
const { buffer, view, blockLen, isLE } = this;
|
||||||
|
let { pos } = this;
|
||||||
|
// append the bit '1' to the message
|
||||||
|
buffer[pos++] = 0b10000000;
|
||||||
|
(0, utils_ts_1.clean)(this.buffer.subarray(pos));
|
||||||
|
// we have less than padOffset left in buffer, so we cannot put length in
|
||||||
|
// current block, need process it and pad again
|
||||||
|
if (this.padOffset > blockLen - pos) {
|
||||||
|
this.process(view, 0);
|
||||||
|
pos = 0;
|
||||||
|
}
|
||||||
|
// Pad until full block byte with zeros
|
||||||
|
for (let i = pos; i < blockLen; i++)
|
||||||
|
buffer[i] = 0;
|
||||||
|
// Note: sha512 requires length to be 128bit integer, but length in JS will overflow before that
|
||||||
|
// You need to write around 2 exabytes (u64_max / 8 / (1024**6)) for this to happen.
|
||||||
|
// So we just write lowest 64 bits of that value.
|
||||||
|
setBigUint64(view, blockLen - 8, BigInt(this.length * 8), isLE);
|
||||||
|
this.process(view, 0);
|
||||||
|
const oview = (0, utils_ts_1.createView)(out);
|
||||||
|
const len = this.outputLen;
|
||||||
|
// NOTE: we do division by 4 later, which should be fused in single op with modulo by JIT
|
||||||
|
if (len % 4)
|
||||||
|
throw new Error('_sha2: outputLen should be aligned to 32bit');
|
||||||
|
const outLen = len / 4;
|
||||||
|
const state = this.get();
|
||||||
|
if (outLen > state.length)
|
||||||
|
throw new Error('_sha2: outputLen bigger than state');
|
||||||
|
for (let i = 0; i < outLen; i++)
|
||||||
|
oview.setUint32(4 * i, state[i], isLE);
|
||||||
|
}
|
||||||
|
digest() {
|
||||||
|
const { buffer, outputLen } = this;
|
||||||
|
this.digestInto(buffer);
|
||||||
|
const res = buffer.slice(0, outputLen);
|
||||||
|
this.destroy();
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
_cloneInto(to) {
|
||||||
|
to || (to = new this.constructor());
|
||||||
|
to.set(...this.get());
|
||||||
|
const { blockLen, buffer, length, finished, destroyed, pos } = this;
|
||||||
|
to.destroyed = destroyed;
|
||||||
|
to.finished = finished;
|
||||||
|
to.length = length;
|
||||||
|
to.pos = pos;
|
||||||
|
if (length % blockLen)
|
||||||
|
to.buffer.set(buffer);
|
||||||
|
return to;
|
||||||
|
}
|
||||||
|
clone() {
|
||||||
|
return this._cloneInto();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.HashMD = HashMD;
|
||||||
|
/**
|
||||||
|
* Initial SHA-2 state: fractional parts of square roots of first 16 primes 2..53.
|
||||||
|
* Check out `test/misc/sha2-gen-iv.js` for recomputation guide.
|
||||||
|
*/
|
||||||
|
/** Initial SHA256 state. Bits 0..32 of frac part of sqrt of primes 2..19 */
|
||||||
|
exports.SHA256_IV = Uint32Array.from([
|
||||||
|
0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19,
|
||||||
|
]);
|
||||||
|
/** Initial SHA224 state. Bits 32..64 of frac part of sqrt of primes 23..53 */
|
||||||
|
exports.SHA224_IV = Uint32Array.from([
|
||||||
|
0xc1059ed8, 0x367cd507, 0x3070dd17, 0xf70e5939, 0xffc00b31, 0x68581511, 0x64f98fa7, 0xbefa4fa4,
|
||||||
|
]);
|
||||||
|
/** Initial SHA384 state. Bits 0..64 of frac part of sqrt of primes 23..53 */
|
||||||
|
exports.SHA384_IV = Uint32Array.from([
|
||||||
|
0xcbbb9d5d, 0xc1059ed8, 0x629a292a, 0x367cd507, 0x9159015a, 0x3070dd17, 0x152fecd8, 0xf70e5939,
|
||||||
|
0x67332667, 0xffc00b31, 0x8eb44a87, 0x68581511, 0xdb0c2e0d, 0x64f98fa7, 0x47b5481d, 0xbefa4fa4,
|
||||||
|
]);
|
||||||
|
/** Initial SHA512 state. Bits 0..64 of frac part of sqrt of primes 2..19 */
|
||||||
|
exports.SHA512_IV = Uint32Array.from([
|
||||||
|
0x6a09e667, 0xf3bcc908, 0xbb67ae85, 0x84caa73b, 0x3c6ef372, 0xfe94f82b, 0xa54ff53a, 0x5f1d36f1,
|
||||||
|
0x510e527f, 0xade682d1, 0x9b05688c, 0x2b3e6c1f, 0x1f83d9ab, 0xfb41bd6b, 0x5be0cd19, 0x137e2179,
|
||||||
|
]);
|
||||||
|
//# sourceMappingURL=_md.js.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/_md.js.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/_md.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
55
service-login-verify/node_modules/@noble/hashes/_u64.d.ts
generated
vendored
Normal file
55
service-login-verify/node_modules/@noble/hashes/_u64.d.ts
generated
vendored
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
declare function fromBig(n: bigint, le?: boolean): {
|
||||||
|
h: number;
|
||||||
|
l: number;
|
||||||
|
};
|
||||||
|
declare function split(lst: bigint[], le?: boolean): Uint32Array[];
|
||||||
|
declare const toBig: (h: number, l: number) => bigint;
|
||||||
|
declare const shrSH: (h: number, _l: number, s: number) => number;
|
||||||
|
declare const shrSL: (h: number, l: number, s: number) => number;
|
||||||
|
declare const rotrSH: (h: number, l: number, s: number) => number;
|
||||||
|
declare const rotrSL: (h: number, l: number, s: number) => number;
|
||||||
|
declare const rotrBH: (h: number, l: number, s: number) => number;
|
||||||
|
declare const rotrBL: (h: number, l: number, s: number) => number;
|
||||||
|
declare const rotr32H: (_h: number, l: number) => number;
|
||||||
|
declare const rotr32L: (h: number, _l: number) => number;
|
||||||
|
declare const rotlSH: (h: number, l: number, s: number) => number;
|
||||||
|
declare const rotlSL: (h: number, l: number, s: number) => number;
|
||||||
|
declare const rotlBH: (h: number, l: number, s: number) => number;
|
||||||
|
declare const rotlBL: (h: number, l: number, s: number) => number;
|
||||||
|
declare function add(Ah: number, Al: number, Bh: number, Bl: number): {
|
||||||
|
h: number;
|
||||||
|
l: number;
|
||||||
|
};
|
||||||
|
declare const add3L: (Al: number, Bl: number, Cl: number) => number;
|
||||||
|
declare const add3H: (low: number, Ah: number, Bh: number, Ch: number) => number;
|
||||||
|
declare const add4L: (Al: number, Bl: number, Cl: number, Dl: number) => number;
|
||||||
|
declare const add4H: (low: number, Ah: number, Bh: number, Ch: number, Dh: number) => number;
|
||||||
|
declare const add5L: (Al: number, Bl: number, Cl: number, Dl: number, El: number) => number;
|
||||||
|
declare const add5H: (low: number, Ah: number, Bh: number, Ch: number, Dh: number, Eh: number) => number;
|
||||||
|
export { add, add3H, add3L, add4H, add4L, add5H, add5L, fromBig, rotlBH, rotlBL, rotlSH, rotlSL, rotr32H, rotr32L, rotrBH, rotrBL, rotrSH, rotrSL, shrSH, shrSL, split, toBig };
|
||||||
|
declare const u64: {
|
||||||
|
fromBig: typeof fromBig;
|
||||||
|
split: typeof split;
|
||||||
|
toBig: (h: number, l: number) => bigint;
|
||||||
|
shrSH: (h: number, _l: number, s: number) => number;
|
||||||
|
shrSL: (h: number, l: number, s: number) => number;
|
||||||
|
rotrSH: (h: number, l: number, s: number) => number;
|
||||||
|
rotrSL: (h: number, l: number, s: number) => number;
|
||||||
|
rotrBH: (h: number, l: number, s: number) => number;
|
||||||
|
rotrBL: (h: number, l: number, s: number) => number;
|
||||||
|
rotr32H: (_h: number, l: number) => number;
|
||||||
|
rotr32L: (h: number, _l: number) => number;
|
||||||
|
rotlSH: (h: number, l: number, s: number) => number;
|
||||||
|
rotlSL: (h: number, l: number, s: number) => number;
|
||||||
|
rotlBH: (h: number, l: number, s: number) => number;
|
||||||
|
rotlBL: (h: number, l: number, s: number) => number;
|
||||||
|
add: typeof add;
|
||||||
|
add3L: (Al: number, Bl: number, Cl: number) => number;
|
||||||
|
add3H: (low: number, Ah: number, Bh: number, Ch: number) => number;
|
||||||
|
add4L: (Al: number, Bl: number, Cl: number, Dl: number) => number;
|
||||||
|
add4H: (low: number, Ah: number, Bh: number, Ch: number, Dh: number) => number;
|
||||||
|
add5H: (low: number, Ah: number, Bh: number, Ch: number, Dh: number, Eh: number) => number;
|
||||||
|
add5L: (Al: number, Bl: number, Cl: number, Dl: number, El: number) => number;
|
||||||
|
};
|
||||||
|
export default u64;
|
||||||
|
//# sourceMappingURL=_u64.d.ts.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/_u64.d.ts.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/_u64.d.ts.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"_u64.d.ts","sourceRoot":"","sources":["src/_u64.ts"],"names":[],"mappings":"AAQA,iBAAS,OAAO,CACd,CAAC,EAAE,MAAM,EACT,EAAE,UAAQ,GACT;IACD,CAAC,EAAE,MAAM,CAAC;IACV,CAAC,EAAE,MAAM,CAAC;CACX,CAGA;AAED,iBAAS,KAAK,CAAC,GAAG,EAAE,MAAM,EAAE,EAAE,EAAE,UAAQ,GAAG,WAAW,EAAE,CASvD;AAED,QAAA,MAAM,KAAK,GAAI,GAAG,MAAM,EAAE,GAAG,MAAM,KAAG,MAAqD,CAAC;AAE5F,QAAA,MAAM,KAAK,GAAI,GAAG,MAAM,EAAE,IAAI,MAAM,EAAE,GAAG,MAAM,KAAG,MAAiB,CAAC;AACpE,QAAA,MAAM,KAAK,GAAI,GAAG,MAAM,EAAE,GAAG,MAAM,EAAE,GAAG,MAAM,KAAG,MAAqC,CAAC;AAEvF,QAAA,MAAM,MAAM,GAAI,GAAG,MAAM,EAAE,GAAG,MAAM,EAAE,GAAG,MAAM,KAAG,MAAqC,CAAC;AACxF,QAAA,MAAM,MAAM,GAAI,GAAG,MAAM,EAAE,GAAG,MAAM,EAAE,GAAG,MAAM,KAAG,MAAqC,CAAC;AAExF,QAAA,MAAM,MAAM,GAAI,GAAG,MAAM,EAAE,GAAG,MAAM,EAAE,GAAG,MAAM,KAAG,MAA4C,CAAC;AAC/F,QAAA,MAAM,MAAM,GAAI,GAAG,MAAM,EAAE,GAAG,MAAM,EAAE,GAAG,MAAM,KAAG,MAA4C,CAAC;AAE/F,QAAA,MAAM,OAAO,GAAI,IAAI,MAAM,EAAE,GAAG,MAAM,KAAG,MAAW,CAAC;AACrD,QAAA,MAAM,OAAO,GAAI,GAAG,MAAM,EAAE,IAAI,MAAM,KAAG,MAAW,CAAC;AAErD,QAAA,MAAM,MAAM,GAAI,GAAG,MAAM,EAAE,GAAG,MAAM,EAAE,GAAG,MAAM,KAAG,MAAqC,CAAC;AACxF,QAAA,MAAM,MAAM,GAAI,GAAG,MAAM,EAAE,GAAG,MAAM,EAAE,GAAG,MAAM,KAAG,MAAqC,CAAC;AAExF,QAAA,MAAM,MAAM,GAAI,GAAG,MAAM,EAAE,GAAG,MAAM,EAAE,GAAG,MAAM,KAAG,MAA4C,CAAC;AAC/F,QAAA,MAAM,MAAM,GAAI,GAAG,MAAM,EAAE,GAAG,MAAM,EAAE,GAAG,MAAM,KAAG,MAA4C,CAAC;AAI/F,iBAAS,GAAG,CACV,EAAE,EAAE,MAAM,EACV,EAAE,EAAE,MAAM,EACV,EAAE,EAAE,MAAM,EACV,EAAE,EAAE,MAAM,GACT;IACD,CAAC,EAAE,MAAM,CAAC;IACV,CAAC,EAAE,MAAM,CAAC;CACX,CAGA;AAED,QAAA,MAAM,KAAK,GAAI,IAAI,MAAM,EAAE,IAAI,MAAM,EAAE,IAAI,MAAM,KAAG,MAA8C,CAAC;AACnG,QAAA,MAAM,KAAK,GAAI,KAAK,MAAM,EAAE,IAAI,MAAM,EAAE,IAAI,MAAM,EAAE,IAAI,MAAM,KAAG,MACrB,CAAC;AAC7C,QAAA,MAAM,KAAK,GAAI,IAAI,MAAM,EAAE,IAAI,MAAM,EAAE,IAAI,MAAM,EAAE,IAAI,MAAM,KAAG,MACb,CAAC;AACpD,QAAA,MAAM,KAAK,GAAI,KAAK,MAAM,EAAE,IAAI,MAAM,EAAE,IAAI,MAAM,EAAE,IAAI,MAAM,EAAE,IAAI,MAAM,KAAG,MAC5B,CAAC;AAClD,QAAA,MAAM,KAAK,GAAI,IAAI,MAAM,EAAE,IAAI,MAAM,EAAE,IAAI,MAAM,EAAE,IAAI,MAAM,EAAE,IAAI,MAAM,KAAG,MACZ,CAAC;AACjE,QAAA,MAAM,KAAK,GAAI,KAAK,MAAM,EAAE,IAAI,MAAM,EAAE,IAAI,MAAM,EAAE,IAAI,MAAM,EAAE,IAAI,MAAM,EAAE,IAAI,MAAM,KAAG,MACnC,CAAC;AAGvD,OAAO,EACL,GAAG,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EACrK,CAAC;AAEF,QAAA,MAAM,GAAG,EAAE;IAAE,OAAO,EAAE,OAAO,OAAO,CAAC;IAAC,KAAK,EAAE,OAAO,KAAK,CAAC;IAAC,KAAK,EAAE,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,KAAK,MAAM,CAAC;IAAC,KAAK,EAAE,CAAC,CAAC,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,KAAK,MAAM,CAAC;IAAC,KAAK,EAAE,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,KAAK,MAAM,CAAC;IAAC,MAAM,EAAE,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,KAAK,MAAM,CAAC;IAAC,MAAM,EAAE,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,KAAK,MAAM,CAAC;IAAC,MAAM,EAAE,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,KAAK,MAAM,CAAC;IAAC,MAAM,EAAE,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,KAAK,MAAM,CAAC;IAAC,OAAO,EAAE,CAAC,EAAE,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,KAAK,MAAM,CAAC;IAAC,OAAO,EAAE,CAAC,CAAC,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,KAAK,MAAM,CAAC;IAAC,MAAM,EAAE,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,KAAK,MAAM,CAAC;IAAC,MAAM,EAAE,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,KAAK,MAAM,CAAC;IAAC,MAAM,EAAE,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,KAAK,MAAM,CAAC;IAAC,MAAM,EAAE,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,KAAK,MAAM,CAAC;IAAC,GAAG,EAAE,OAAO,GAAG,CAAC;IAAC,KAAK,EAAE,CAAC,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,KAAK,MAAM,CAAC;IAAC,KAAK,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,KAAK,MAAM,CAAC;IAAC,KAAK,EAAE,CAAC,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,KAAK,MAAM,CAAC;IAAC,KAAK,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,KAAK,MAAM,CAAC;IAAC,KAAK,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,KAAK,MAAM,CAAC;IAAC,KAAK,EAAE,CAAC,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,KAAK,MAAM,CAAC;CAOrpC,CAAC;AACF,eAAe,GAAG,CAAC"}
|
||||||
90
service-login-verify/node_modules/@noble/hashes/_u64.js
generated
vendored
Normal file
90
service-login-verify/node_modules/@noble/hashes/_u64.js
generated
vendored
Normal file
@ -0,0 +1,90 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.toBig = exports.shrSL = exports.shrSH = exports.rotrSL = exports.rotrSH = exports.rotrBL = exports.rotrBH = exports.rotr32L = exports.rotr32H = exports.rotlSL = exports.rotlSH = exports.rotlBL = exports.rotlBH = exports.add5L = exports.add5H = exports.add4L = exports.add4H = exports.add3L = exports.add3H = void 0;
|
||||||
|
exports.add = add;
|
||||||
|
exports.fromBig = fromBig;
|
||||||
|
exports.split = split;
|
||||||
|
/**
|
||||||
|
* Internal helpers for u64. BigUint64Array is too slow as per 2025, so we implement it using Uint32Array.
|
||||||
|
* @todo re-check https://issues.chromium.org/issues/42212588
|
||||||
|
* @module
|
||||||
|
*/
|
||||||
|
const U32_MASK64 = /* @__PURE__ */ BigInt(2 ** 32 - 1);
|
||||||
|
const _32n = /* @__PURE__ */ BigInt(32);
|
||||||
|
function fromBig(n, le = false) {
|
||||||
|
if (le)
|
||||||
|
return { h: Number(n & U32_MASK64), l: Number((n >> _32n) & U32_MASK64) };
|
||||||
|
return { h: Number((n >> _32n) & U32_MASK64) | 0, l: Number(n & U32_MASK64) | 0 };
|
||||||
|
}
|
||||||
|
function split(lst, le = false) {
|
||||||
|
const len = lst.length;
|
||||||
|
let Ah = new Uint32Array(len);
|
||||||
|
let Al = new Uint32Array(len);
|
||||||
|
for (let i = 0; i < len; i++) {
|
||||||
|
const { h, l } = fromBig(lst[i], le);
|
||||||
|
[Ah[i], Al[i]] = [h, l];
|
||||||
|
}
|
||||||
|
return [Ah, Al];
|
||||||
|
}
|
||||||
|
const toBig = (h, l) => (BigInt(h >>> 0) << _32n) | BigInt(l >>> 0);
|
||||||
|
exports.toBig = toBig;
|
||||||
|
// for Shift in [0, 32)
|
||||||
|
const shrSH = (h, _l, s) => h >>> s;
|
||||||
|
exports.shrSH = shrSH;
|
||||||
|
const shrSL = (h, l, s) => (h << (32 - s)) | (l >>> s);
|
||||||
|
exports.shrSL = shrSL;
|
||||||
|
// Right rotate for Shift in [1, 32)
|
||||||
|
const rotrSH = (h, l, s) => (h >>> s) | (l << (32 - s));
|
||||||
|
exports.rotrSH = rotrSH;
|
||||||
|
const rotrSL = (h, l, s) => (h << (32 - s)) | (l >>> s);
|
||||||
|
exports.rotrSL = rotrSL;
|
||||||
|
// Right rotate for Shift in (32, 64), NOTE: 32 is special case.
|
||||||
|
const rotrBH = (h, l, s) => (h << (64 - s)) | (l >>> (s - 32));
|
||||||
|
exports.rotrBH = rotrBH;
|
||||||
|
const rotrBL = (h, l, s) => (h >>> (s - 32)) | (l << (64 - s));
|
||||||
|
exports.rotrBL = rotrBL;
|
||||||
|
// Right rotate for shift===32 (just swaps l&h)
|
||||||
|
const rotr32H = (_h, l) => l;
|
||||||
|
exports.rotr32H = rotr32H;
|
||||||
|
const rotr32L = (h, _l) => h;
|
||||||
|
exports.rotr32L = rotr32L;
|
||||||
|
// Left rotate for Shift in [1, 32)
|
||||||
|
const rotlSH = (h, l, s) => (h << s) | (l >>> (32 - s));
|
||||||
|
exports.rotlSH = rotlSH;
|
||||||
|
const rotlSL = (h, l, s) => (l << s) | (h >>> (32 - s));
|
||||||
|
exports.rotlSL = rotlSL;
|
||||||
|
// Left rotate for Shift in (32, 64), NOTE: 32 is special case.
|
||||||
|
const rotlBH = (h, l, s) => (l << (s - 32)) | (h >>> (64 - s));
|
||||||
|
exports.rotlBH = rotlBH;
|
||||||
|
const rotlBL = (h, l, s) => (h << (s - 32)) | (l >>> (64 - s));
|
||||||
|
exports.rotlBL = rotlBL;
|
||||||
|
// JS uses 32-bit signed integers for bitwise operations which means we cannot
|
||||||
|
// simple take carry out of low bit sum by shift, we need to use division.
|
||||||
|
function add(Ah, Al, Bh, Bl) {
|
||||||
|
const l = (Al >>> 0) + (Bl >>> 0);
|
||||||
|
return { h: (Ah + Bh + ((l / 2 ** 32) | 0)) | 0, l: l | 0 };
|
||||||
|
}
|
||||||
|
// Addition with more than 2 elements
|
||||||
|
const add3L = (Al, Bl, Cl) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0);
|
||||||
|
exports.add3L = add3L;
|
||||||
|
const add3H = (low, Ah, Bh, Ch) => (Ah + Bh + Ch + ((low / 2 ** 32) | 0)) | 0;
|
||||||
|
exports.add3H = add3H;
|
||||||
|
const add4L = (Al, Bl, Cl, Dl) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0) + (Dl >>> 0);
|
||||||
|
exports.add4L = add4L;
|
||||||
|
const add4H = (low, Ah, Bh, Ch, Dh) => (Ah + Bh + Ch + Dh + ((low / 2 ** 32) | 0)) | 0;
|
||||||
|
exports.add4H = add4H;
|
||||||
|
const add5L = (Al, Bl, Cl, Dl, El) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0) + (Dl >>> 0) + (El >>> 0);
|
||||||
|
exports.add5L = add5L;
|
||||||
|
const add5H = (low, Ah, Bh, Ch, Dh, Eh) => (Ah + Bh + Ch + Dh + Eh + ((low / 2 ** 32) | 0)) | 0;
|
||||||
|
exports.add5H = add5H;
|
||||||
|
// prettier-ignore
|
||||||
|
const u64 = {
|
||||||
|
fromBig, split, toBig,
|
||||||
|
shrSH, shrSL,
|
||||||
|
rotrSH, rotrSL, rotrBH, rotrBL,
|
||||||
|
rotr32H, rotr32L,
|
||||||
|
rotlSH, rotlSL, rotlBH, rotlBL,
|
||||||
|
add, add3L, add3H, add4L, add4H, add5H, add5L,
|
||||||
|
};
|
||||||
|
exports.default = u64;
|
||||||
|
//# sourceMappingURL=_u64.js.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/_u64.js.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/_u64.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
32
service-login-verify/node_modules/@noble/hashes/argon2.d.ts
generated
vendored
Normal file
32
service-login-verify/node_modules/@noble/hashes/argon2.d.ts
generated
vendored
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
import { type KDFInput } from './utils.ts';
|
||||||
|
/**
|
||||||
|
* Argon2 options.
|
||||||
|
* * t: time cost, m: mem cost in kb, p: parallelization.
|
||||||
|
* * key: optional key. personalization: arbitrary extra data.
|
||||||
|
* * dkLen: desired number of output bytes.
|
||||||
|
*/
|
||||||
|
export type ArgonOpts = {
|
||||||
|
t: number;
|
||||||
|
m: number;
|
||||||
|
p: number;
|
||||||
|
version?: number;
|
||||||
|
key?: KDFInput;
|
||||||
|
personalization?: KDFInput;
|
||||||
|
dkLen?: number;
|
||||||
|
asyncTick?: number;
|
||||||
|
maxmem?: number;
|
||||||
|
onProgress?: (progress: number) => void;
|
||||||
|
};
|
||||||
|
/** argon2d GPU-resistant version. */
|
||||||
|
export declare const argon2d: (password: KDFInput, salt: KDFInput, opts: ArgonOpts) => Uint8Array;
|
||||||
|
/** argon2i side-channel-resistant version. */
|
||||||
|
export declare const argon2i: (password: KDFInput, salt: KDFInput, opts: ArgonOpts) => Uint8Array;
|
||||||
|
/** argon2id, combining i+d, the most popular version from RFC 9106 */
|
||||||
|
export declare const argon2id: (password: KDFInput, salt: KDFInput, opts: ArgonOpts) => Uint8Array;
|
||||||
|
/** argon2d async GPU-resistant version. */
|
||||||
|
export declare const argon2dAsync: (password: KDFInput, salt: KDFInput, opts: ArgonOpts) => Promise<Uint8Array>;
|
||||||
|
/** argon2i async side-channel-resistant version. */
|
||||||
|
export declare const argon2iAsync: (password: KDFInput, salt: KDFInput, opts: ArgonOpts) => Promise<Uint8Array>;
|
||||||
|
/** argon2id async, combining i+d, the most popular version from RFC 9106 */
|
||||||
|
export declare const argon2idAsync: (password: KDFInput, salt: KDFInput, opts: ArgonOpts) => Promise<Uint8Array>;
|
||||||
|
//# sourceMappingURL=argon2.d.ts.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/argon2.d.ts.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/argon2.d.ts.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"argon2.d.ts","sourceRoot":"","sources":["src/argon2.ts"],"names":[],"mappings":"AAYA,OAAO,EAAqD,KAAK,QAAQ,EAAE,MAAM,YAAY,CAAC;AAkK9F;;;;;GAKG;AACH,MAAM,MAAM,SAAS,GAAG;IACtB,CAAC,EAAE,MAAM,CAAC;IACV,CAAC,EAAE,MAAM,CAAC;IACV,CAAC,EAAE,MAAM,CAAC;IACV,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,GAAG,CAAC,EAAE,QAAQ,CAAC;IACf,eAAe,CAAC,EAAE,QAAQ,CAAC;IAC3B,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,UAAU,CAAC,EAAE,CAAC,QAAQ,EAAE,MAAM,KAAK,IAAI,CAAC;CACzC,CAAC;AAuNF,qCAAqC;AACrC,eAAO,MAAM,OAAO,GAAI,UAAU,QAAQ,EAAE,MAAM,QAAQ,EAAE,MAAM,SAAS,KAAG,UACnC,CAAC;AAC5C,8CAA8C;AAC9C,eAAO,MAAM,OAAO,GAAI,UAAU,QAAQ,EAAE,MAAM,QAAQ,EAAE,MAAM,SAAS,KAAG,UACpC,CAAC;AAC3C,sEAAsE;AACtE,eAAO,MAAM,QAAQ,GAAI,UAAU,QAAQ,EAAE,MAAM,QAAQ,EAAE,MAAM,SAAS,KAAG,UACpC,CAAC;AAiE5C,2CAA2C;AAC3C,eAAO,MAAM,YAAY,GACvB,UAAU,QAAQ,EAClB,MAAM,QAAQ,EACd,MAAM,SAAS,KACd,OAAO,CAAC,UAAU,CAAmD,CAAC;AACzE,oDAAoD;AACpD,eAAO,MAAM,YAAY,GACvB,UAAU,QAAQ,EAClB,MAAM,QAAQ,EACd,MAAM,SAAS,KACd,OAAO,CAAC,UAAU,CAAkD,CAAC;AACxE,4EAA4E;AAC5E,eAAO,MAAM,aAAa,GACxB,UAAU,QAAQ,EAClB,MAAM,QAAQ,EACd,MAAM,SAAS,KACd,OAAO,CAAC,UAAU,CAAmD,CAAC"}
|
||||||
401
service-login-verify/node_modules/@noble/hashes/argon2.js
generated
vendored
Normal file
401
service-login-verify/node_modules/@noble/hashes/argon2.js
generated
vendored
Normal file
@ -0,0 +1,401 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.argon2idAsync = exports.argon2iAsync = exports.argon2dAsync = exports.argon2id = exports.argon2i = exports.argon2d = void 0;
|
||||||
|
/**
|
||||||
|
* Argon2 KDF from RFC 9106. Can be used to create a key from password and salt.
|
||||||
|
* We suggest to use Scrypt. JS Argon is 2-10x slower than native code because of 64-bitness:
|
||||||
|
* * argon uses uint64, but JS doesn't have fast uint64array
|
||||||
|
* * uint64 multiplication is 1/3 of time
|
||||||
|
* * `P` function would be very nice with u64, because most of value will be in registers,
|
||||||
|
* hovewer with u32 it will require 32 registers, which is too much.
|
||||||
|
* * JS arrays do slow bound checks, so reading from `A2_BUF` slows it down
|
||||||
|
* @module
|
||||||
|
*/
|
||||||
|
const _u64_ts_1 = require("./_u64.js");
|
||||||
|
const blake2_ts_1 = require("./blake2.js");
|
||||||
|
const utils_ts_1 = require("./utils.js");
|
||||||
|
const AT = { Argond2d: 0, Argon2i: 1, Argon2id: 2 };
|
||||||
|
const ARGON2_SYNC_POINTS = 4;
|
||||||
|
const abytesOrZero = (buf) => {
|
||||||
|
if (buf === undefined)
|
||||||
|
return Uint8Array.of();
|
||||||
|
return (0, utils_ts_1.kdfInputToBytes)(buf);
|
||||||
|
};
|
||||||
|
// u32 * u32 = u64
|
||||||
|
function mul(a, b) {
|
||||||
|
const aL = a & 0xffff;
|
||||||
|
const aH = a >>> 16;
|
||||||
|
const bL = b & 0xffff;
|
||||||
|
const bH = b >>> 16;
|
||||||
|
const ll = Math.imul(aL, bL);
|
||||||
|
const hl = Math.imul(aH, bL);
|
||||||
|
const lh = Math.imul(aL, bH);
|
||||||
|
const hh = Math.imul(aH, bH);
|
||||||
|
const carry = (ll >>> 16) + (hl & 0xffff) + lh;
|
||||||
|
const high = (hh + (hl >>> 16) + (carry >>> 16)) | 0;
|
||||||
|
const low = (carry << 16) | (ll & 0xffff);
|
||||||
|
return { h: high, l: low };
|
||||||
|
}
|
||||||
|
function mul2(a, b) {
|
||||||
|
// 2 * a * b (via shifts)
|
||||||
|
const { h, l } = mul(a, b);
|
||||||
|
return { h: ((h << 1) | (l >>> 31)) & 4294967295, l: (l << 1) & 4294967295 };
|
||||||
|
}
|
||||||
|
// BlaMka permutation for Argon2
|
||||||
|
// A + B + (2 * u32(A) * u32(B))
|
||||||
|
function blamka(Ah, Al, Bh, Bl) {
|
||||||
|
const { h: Ch, l: Cl } = mul2(Al, Bl);
|
||||||
|
// A + B + (2 * A * B)
|
||||||
|
const Rll = (0, _u64_ts_1.add3L)(Al, Bl, Cl);
|
||||||
|
return { h: (0, _u64_ts_1.add3H)(Rll, Ah, Bh, Ch), l: Rll | 0 };
|
||||||
|
}
|
||||||
|
// Temporary block buffer
|
||||||
|
const A2_BUF = new Uint32Array(256); // 1024 bytes (matrix 16x16)
|
||||||
|
function G(a, b, c, d) {
|
||||||
|
let Al = A2_BUF[2 * a], Ah = A2_BUF[2 * a + 1]; // prettier-ignore
|
||||||
|
let Bl = A2_BUF[2 * b], Bh = A2_BUF[2 * b + 1]; // prettier-ignore
|
||||||
|
let Cl = A2_BUF[2 * c], Ch = A2_BUF[2 * c + 1]; // prettier-ignore
|
||||||
|
let Dl = A2_BUF[2 * d], Dh = A2_BUF[2 * d + 1]; // prettier-ignore
|
||||||
|
({ h: Ah, l: Al } = blamka(Ah, Al, Bh, Bl));
|
||||||
|
({ Dh, Dl } = { Dh: Dh ^ Ah, Dl: Dl ^ Al });
|
||||||
|
({ Dh, Dl } = { Dh: (0, _u64_ts_1.rotr32H)(Dh, Dl), Dl: (0, _u64_ts_1.rotr32L)(Dh, Dl) });
|
||||||
|
({ h: Ch, l: Cl } = blamka(Ch, Cl, Dh, Dl));
|
||||||
|
({ Bh, Bl } = { Bh: Bh ^ Ch, Bl: Bl ^ Cl });
|
||||||
|
({ Bh, Bl } = { Bh: (0, _u64_ts_1.rotrSH)(Bh, Bl, 24), Bl: (0, _u64_ts_1.rotrSL)(Bh, Bl, 24) });
|
||||||
|
({ h: Ah, l: Al } = blamka(Ah, Al, Bh, Bl));
|
||||||
|
({ Dh, Dl } = { Dh: Dh ^ Ah, Dl: Dl ^ Al });
|
||||||
|
({ Dh, Dl } = { Dh: (0, _u64_ts_1.rotrSH)(Dh, Dl, 16), Dl: (0, _u64_ts_1.rotrSL)(Dh, Dl, 16) });
|
||||||
|
({ h: Ch, l: Cl } = blamka(Ch, Cl, Dh, Dl));
|
||||||
|
({ Bh, Bl } = { Bh: Bh ^ Ch, Bl: Bl ^ Cl });
|
||||||
|
({ Bh, Bl } = { Bh: (0, _u64_ts_1.rotrBH)(Bh, Bl, 63), Bl: (0, _u64_ts_1.rotrBL)(Bh, Bl, 63) });
|
||||||
|
(A2_BUF[2 * a] = Al), (A2_BUF[2 * a + 1] = Ah);
|
||||||
|
(A2_BUF[2 * b] = Bl), (A2_BUF[2 * b + 1] = Bh);
|
||||||
|
(A2_BUF[2 * c] = Cl), (A2_BUF[2 * c + 1] = Ch);
|
||||||
|
(A2_BUF[2 * d] = Dl), (A2_BUF[2 * d + 1] = Dh);
|
||||||
|
}
|
||||||
|
// prettier-ignore
|
||||||
|
function P(v00, v01, v02, v03, v04, v05, v06, v07, v08, v09, v10, v11, v12, v13, v14, v15) {
|
||||||
|
G(v00, v04, v08, v12);
|
||||||
|
G(v01, v05, v09, v13);
|
||||||
|
G(v02, v06, v10, v14);
|
||||||
|
G(v03, v07, v11, v15);
|
||||||
|
G(v00, v05, v10, v15);
|
||||||
|
G(v01, v06, v11, v12);
|
||||||
|
G(v02, v07, v08, v13);
|
||||||
|
G(v03, v04, v09, v14);
|
||||||
|
}
|
||||||
|
function block(x, xPos, yPos, outPos, needXor) {
|
||||||
|
for (let i = 0; i < 256; i++)
|
||||||
|
A2_BUF[i] = x[xPos + i] ^ x[yPos + i];
|
||||||
|
// columns (8)
|
||||||
|
for (let i = 0; i < 128; i += 16) {
|
||||||
|
// prettier-ignore
|
||||||
|
P(i, i + 1, i + 2, i + 3, i + 4, i + 5, i + 6, i + 7, i + 8, i + 9, i + 10, i + 11, i + 12, i + 13, i + 14, i + 15);
|
||||||
|
}
|
||||||
|
// rows (8)
|
||||||
|
for (let i = 0; i < 16; i += 2) {
|
||||||
|
// prettier-ignore
|
||||||
|
P(i, i + 1, i + 16, i + 17, i + 32, i + 33, i + 48, i + 49, i + 64, i + 65, i + 80, i + 81, i + 96, i + 97, i + 112, i + 113);
|
||||||
|
}
|
||||||
|
if (needXor)
|
||||||
|
for (let i = 0; i < 256; i++)
|
||||||
|
x[outPos + i] ^= A2_BUF[i] ^ x[xPos + i] ^ x[yPos + i];
|
||||||
|
else
|
||||||
|
for (let i = 0; i < 256; i++)
|
||||||
|
x[outPos + i] = A2_BUF[i] ^ x[xPos + i] ^ x[yPos + i];
|
||||||
|
(0, utils_ts_1.clean)(A2_BUF);
|
||||||
|
}
|
||||||
|
// Variable-Length Hash Function H'
|
||||||
|
function Hp(A, dkLen) {
|
||||||
|
const A8 = (0, utils_ts_1.u8)(A);
|
||||||
|
const T = new Uint32Array(1);
|
||||||
|
const T8 = (0, utils_ts_1.u8)(T);
|
||||||
|
T[0] = dkLen;
|
||||||
|
// Fast path
|
||||||
|
if (dkLen <= 64)
|
||||||
|
return blake2_ts_1.blake2b.create({ dkLen }).update(T8).update(A8).digest();
|
||||||
|
const out = new Uint8Array(dkLen);
|
||||||
|
let V = blake2_ts_1.blake2b.create({}).update(T8).update(A8).digest();
|
||||||
|
let pos = 0;
|
||||||
|
// First block
|
||||||
|
out.set(V.subarray(0, 32));
|
||||||
|
pos += 32;
|
||||||
|
// Rest blocks
|
||||||
|
for (; dkLen - pos > 64; pos += 32) {
|
||||||
|
const Vh = blake2_ts_1.blake2b.create({}).update(V);
|
||||||
|
Vh.digestInto(V);
|
||||||
|
Vh.destroy();
|
||||||
|
out.set(V.subarray(0, 32), pos);
|
||||||
|
}
|
||||||
|
// Last block
|
||||||
|
out.set((0, blake2_ts_1.blake2b)(V, { dkLen: dkLen - pos }), pos);
|
||||||
|
(0, utils_ts_1.clean)(V, T);
|
||||||
|
return (0, utils_ts_1.u32)(out);
|
||||||
|
}
|
||||||
|
// Used only inside process block!
|
||||||
|
function indexAlpha(r, s, laneLen, segmentLen, index, randL, sameLane = false) {
|
||||||
|
// This is ugly, but close enough to reference implementation.
|
||||||
|
let area;
|
||||||
|
if (r === 0) {
|
||||||
|
if (s === 0)
|
||||||
|
area = index - 1;
|
||||||
|
else if (sameLane)
|
||||||
|
area = s * segmentLen + index - 1;
|
||||||
|
else
|
||||||
|
area = s * segmentLen + (index == 0 ? -1 : 0);
|
||||||
|
}
|
||||||
|
else if (sameLane)
|
||||||
|
area = laneLen - segmentLen + index - 1;
|
||||||
|
else
|
||||||
|
area = laneLen - segmentLen + (index == 0 ? -1 : 0);
|
||||||
|
const startPos = r !== 0 && s !== ARGON2_SYNC_POINTS - 1 ? (s + 1) * segmentLen : 0;
|
||||||
|
const rel = area - 1 - mul(area, mul(randL, randL).h).h;
|
||||||
|
return (startPos + rel) % laneLen;
|
||||||
|
}
|
||||||
|
const maxUint32 = Math.pow(2, 32);
|
||||||
|
function isU32(num) {
|
||||||
|
return Number.isSafeInteger(num) && num >= 0 && num < maxUint32;
|
||||||
|
}
|
||||||
|
function argon2Opts(opts) {
|
||||||
|
const merged = {
|
||||||
|
version: 0x13,
|
||||||
|
dkLen: 32,
|
||||||
|
maxmem: maxUint32 - 1,
|
||||||
|
asyncTick: 10,
|
||||||
|
};
|
||||||
|
for (let [k, v] of Object.entries(opts))
|
||||||
|
if (v != null)
|
||||||
|
merged[k] = v;
|
||||||
|
const { dkLen, p, m, t, version, onProgress } = merged;
|
||||||
|
if (!isU32(dkLen) || dkLen < 4)
|
||||||
|
throw new Error('dkLen should be at least 4 bytes');
|
||||||
|
if (!isU32(p) || p < 1 || p >= Math.pow(2, 24))
|
||||||
|
throw new Error('p should be 1 <= p < 2^24');
|
||||||
|
if (!isU32(m))
|
||||||
|
throw new Error('m should be 0 <= m < 2^32');
|
||||||
|
if (!isU32(t) || t < 1)
|
||||||
|
throw new Error('t (iterations) should be 1 <= t < 2^32');
|
||||||
|
if (onProgress !== undefined && typeof onProgress !== 'function')
|
||||||
|
throw new Error('progressCb should be function');
|
||||||
|
/*
|
||||||
|
Memory size m MUST be an integer number of kibibytes from 8*p to 2^(32)-1. The actual number of blocks is m', which is m rounded down to the nearest multiple of 4*p.
|
||||||
|
*/
|
||||||
|
if (!isU32(m) || m < 8 * p)
|
||||||
|
throw new Error('memory should be at least 8*p bytes');
|
||||||
|
if (version !== 0x10 && version !== 0x13)
|
||||||
|
throw new Error('unknown version=' + version);
|
||||||
|
return merged;
|
||||||
|
}
|
||||||
|
function argon2Init(password, salt, type, opts) {
|
||||||
|
password = (0, utils_ts_1.kdfInputToBytes)(password);
|
||||||
|
salt = (0, utils_ts_1.kdfInputToBytes)(salt);
|
||||||
|
(0, utils_ts_1.abytes)(password);
|
||||||
|
(0, utils_ts_1.abytes)(salt);
|
||||||
|
if (!isU32(password.length))
|
||||||
|
throw new Error('password should be less than 4 GB');
|
||||||
|
if (!isU32(salt.length) || salt.length < 8)
|
||||||
|
throw new Error('salt should be at least 8 bytes and less than 4 GB');
|
||||||
|
if (!Object.values(AT).includes(type))
|
||||||
|
throw new Error('invalid type');
|
||||||
|
let { p, dkLen, m, t, version, key, personalization, maxmem, onProgress, asyncTick } = argon2Opts(opts);
|
||||||
|
// Validation
|
||||||
|
key = abytesOrZero(key);
|
||||||
|
personalization = abytesOrZero(personalization);
|
||||||
|
// H_0 = H^(64)(LE32(p) || LE32(T) || LE32(m) || LE32(t) ||
|
||||||
|
// LE32(v) || LE32(y) || LE32(length(P)) || P ||
|
||||||
|
// LE32(length(S)) || S || LE32(length(K)) || K ||
|
||||||
|
// LE32(length(X)) || X)
|
||||||
|
const h = blake2_ts_1.blake2b.create({});
|
||||||
|
const BUF = new Uint32Array(1);
|
||||||
|
const BUF8 = (0, utils_ts_1.u8)(BUF);
|
||||||
|
for (let item of [p, dkLen, m, t, version, type]) {
|
||||||
|
BUF[0] = item;
|
||||||
|
h.update(BUF8);
|
||||||
|
}
|
||||||
|
for (let i of [password, salt, key, personalization]) {
|
||||||
|
BUF[0] = i.length; // BUF is u32 array, this is valid
|
||||||
|
h.update(BUF8).update(i);
|
||||||
|
}
|
||||||
|
const H0 = new Uint32Array(18);
|
||||||
|
const H0_8 = (0, utils_ts_1.u8)(H0);
|
||||||
|
h.digestInto(H0_8);
|
||||||
|
// 256 u32 = 1024 (BLOCK_SIZE), fills A2_BUF on processing
|
||||||
|
// Params
|
||||||
|
const lanes = p;
|
||||||
|
// m' = 4 * p * floor (m / 4p)
|
||||||
|
const mP = 4 * p * Math.floor(m / (ARGON2_SYNC_POINTS * p));
|
||||||
|
//q = m' / p columns
|
||||||
|
const laneLen = Math.floor(mP / p);
|
||||||
|
const segmentLen = Math.floor(laneLen / ARGON2_SYNC_POINTS);
|
||||||
|
const memUsed = mP * 256;
|
||||||
|
if (!isU32(maxmem) || memUsed > maxmem)
|
||||||
|
throw new Error('mem should be less than 2**32, got: maxmem=' + maxmem + ', memused=' + memUsed);
|
||||||
|
const B = new Uint32Array(memUsed);
|
||||||
|
// Fill first blocks
|
||||||
|
for (let l = 0; l < p; l++) {
|
||||||
|
const i = 256 * laneLen * l;
|
||||||
|
// B[i][0] = H'^(1024)(H_0 || LE32(0) || LE32(i))
|
||||||
|
H0[17] = l;
|
||||||
|
H0[16] = 0;
|
||||||
|
B.set(Hp(H0, 1024), i);
|
||||||
|
// B[i][1] = H'^(1024)(H_0 || LE32(1) || LE32(i))
|
||||||
|
H0[16] = 1;
|
||||||
|
B.set(Hp(H0, 1024), i + 256);
|
||||||
|
}
|
||||||
|
let perBlock = () => { };
|
||||||
|
if (onProgress) {
|
||||||
|
const totalBlock = t * ARGON2_SYNC_POINTS * p * segmentLen;
|
||||||
|
// Invoke callback if progress changes from 10.01 to 10.02
|
||||||
|
// Allows to draw smooth progress bar on up to 8K screen
|
||||||
|
const callbackPer = Math.max(Math.floor(totalBlock / 10000), 1);
|
||||||
|
let blockCnt = 0;
|
||||||
|
perBlock = () => {
|
||||||
|
blockCnt++;
|
||||||
|
if (onProgress && (!(blockCnt % callbackPer) || blockCnt === totalBlock))
|
||||||
|
onProgress(blockCnt / totalBlock);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
(0, utils_ts_1.clean)(BUF, H0);
|
||||||
|
return { type, mP, p, t, version, B, laneLen, lanes, segmentLen, dkLen, perBlock, asyncTick };
|
||||||
|
}
|
||||||
|
function argon2Output(B, p, laneLen, dkLen) {
|
||||||
|
const B_final = new Uint32Array(256);
|
||||||
|
for (let l = 0; l < p; l++)
|
||||||
|
for (let j = 0; j < 256; j++)
|
||||||
|
B_final[j] ^= B[256 * (laneLen * l + laneLen - 1) + j];
|
||||||
|
const res = (0, utils_ts_1.u8)(Hp(B_final, dkLen));
|
||||||
|
(0, utils_ts_1.clean)(B_final);
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
function processBlock(B, address, l, r, s, index, laneLen, segmentLen, lanes, offset, prev, dataIndependent, needXor) {
|
||||||
|
if (offset % laneLen)
|
||||||
|
prev = offset - 1;
|
||||||
|
let randL, randH;
|
||||||
|
if (dataIndependent) {
|
||||||
|
let i128 = index % 128;
|
||||||
|
if (i128 === 0) {
|
||||||
|
address[256 + 12]++;
|
||||||
|
block(address, 256, 2 * 256, 0, false);
|
||||||
|
block(address, 0, 2 * 256, 0, false);
|
||||||
|
}
|
||||||
|
randL = address[2 * i128];
|
||||||
|
randH = address[2 * i128 + 1];
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
const T = 256 * prev;
|
||||||
|
randL = B[T];
|
||||||
|
randH = B[T + 1];
|
||||||
|
}
|
||||||
|
// address block
|
||||||
|
const refLane = r === 0 && s === 0 ? l : randH % lanes;
|
||||||
|
const refPos = indexAlpha(r, s, laneLen, segmentLen, index, randL, refLane == l);
|
||||||
|
const refBlock = laneLen * refLane + refPos;
|
||||||
|
// B[i][j] = G(B[i][j-1], B[l][z])
|
||||||
|
block(B, 256 * prev, 256 * refBlock, offset * 256, needXor);
|
||||||
|
}
|
||||||
|
function argon2(type, password, salt, opts) {
|
||||||
|
const { mP, p, t, version, B, laneLen, lanes, segmentLen, dkLen, perBlock } = argon2Init(password, salt, type, opts);
|
||||||
|
// Pre-loop setup
|
||||||
|
// [address, input, zero_block] format so we can pass single U32 to block function
|
||||||
|
const address = new Uint32Array(3 * 256);
|
||||||
|
address[256 + 6] = mP;
|
||||||
|
address[256 + 8] = t;
|
||||||
|
address[256 + 10] = type;
|
||||||
|
for (let r = 0; r < t; r++) {
|
||||||
|
const needXor = r !== 0 && version === 0x13;
|
||||||
|
address[256 + 0] = r;
|
||||||
|
for (let s = 0; s < ARGON2_SYNC_POINTS; s++) {
|
||||||
|
address[256 + 4] = s;
|
||||||
|
const dataIndependent = type == AT.Argon2i || (type == AT.Argon2id && r === 0 && s < 2);
|
||||||
|
for (let l = 0; l < p; l++) {
|
||||||
|
address[256 + 2] = l;
|
||||||
|
address[256 + 12] = 0;
|
||||||
|
let startPos = 0;
|
||||||
|
if (r === 0 && s === 0) {
|
||||||
|
startPos = 2;
|
||||||
|
if (dataIndependent) {
|
||||||
|
address[256 + 12]++;
|
||||||
|
block(address, 256, 2 * 256, 0, false);
|
||||||
|
block(address, 0, 2 * 256, 0, false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// current block postion
|
||||||
|
let offset = l * laneLen + s * segmentLen + startPos;
|
||||||
|
// previous block position
|
||||||
|
let prev = offset % laneLen ? offset - 1 : offset + laneLen - 1;
|
||||||
|
for (let index = startPos; index < segmentLen; index++, offset++, prev++) {
|
||||||
|
perBlock();
|
||||||
|
processBlock(B, address, l, r, s, index, laneLen, segmentLen, lanes, offset, prev, dataIndependent, needXor);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(0, utils_ts_1.clean)(address);
|
||||||
|
return argon2Output(B, p, laneLen, dkLen);
|
||||||
|
}
|
||||||
|
/** argon2d GPU-resistant version. */
|
||||||
|
const argon2d = (password, salt, opts) => argon2(AT.Argond2d, password, salt, opts);
|
||||||
|
exports.argon2d = argon2d;
|
||||||
|
/** argon2i side-channel-resistant version. */
|
||||||
|
const argon2i = (password, salt, opts) => argon2(AT.Argon2i, password, salt, opts);
|
||||||
|
exports.argon2i = argon2i;
|
||||||
|
/** argon2id, combining i+d, the most popular version from RFC 9106 */
|
||||||
|
const argon2id = (password, salt, opts) => argon2(AT.Argon2id, password, salt, opts);
|
||||||
|
exports.argon2id = argon2id;
|
||||||
|
async function argon2Async(type, password, salt, opts) {
|
||||||
|
const { mP, p, t, version, B, laneLen, lanes, segmentLen, dkLen, perBlock, asyncTick } = argon2Init(password, salt, type, opts);
|
||||||
|
// Pre-loop setup
|
||||||
|
// [address, input, zero_block] format so we can pass single U32 to block function
|
||||||
|
const address = new Uint32Array(3 * 256);
|
||||||
|
address[256 + 6] = mP;
|
||||||
|
address[256 + 8] = t;
|
||||||
|
address[256 + 10] = type;
|
||||||
|
let ts = Date.now();
|
||||||
|
for (let r = 0; r < t; r++) {
|
||||||
|
const needXor = r !== 0 && version === 0x13;
|
||||||
|
address[256 + 0] = r;
|
||||||
|
for (let s = 0; s < ARGON2_SYNC_POINTS; s++) {
|
||||||
|
address[256 + 4] = s;
|
||||||
|
const dataIndependent = type == AT.Argon2i || (type == AT.Argon2id && r === 0 && s < 2);
|
||||||
|
for (let l = 0; l < p; l++) {
|
||||||
|
address[256 + 2] = l;
|
||||||
|
address[256 + 12] = 0;
|
||||||
|
let startPos = 0;
|
||||||
|
if (r === 0 && s === 0) {
|
||||||
|
startPos = 2;
|
||||||
|
if (dataIndependent) {
|
||||||
|
address[256 + 12]++;
|
||||||
|
block(address, 256, 2 * 256, 0, false);
|
||||||
|
block(address, 0, 2 * 256, 0, false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// current block postion
|
||||||
|
let offset = l * laneLen + s * segmentLen + startPos;
|
||||||
|
// previous block position
|
||||||
|
let prev = offset % laneLen ? offset - 1 : offset + laneLen - 1;
|
||||||
|
for (let index = startPos; index < segmentLen; index++, offset++, prev++) {
|
||||||
|
perBlock();
|
||||||
|
processBlock(B, address, l, r, s, index, laneLen, segmentLen, lanes, offset, prev, dataIndependent, needXor);
|
||||||
|
// Date.now() is not monotonic, so in case if clock goes backwards we return return control too
|
||||||
|
const diff = Date.now() - ts;
|
||||||
|
if (!(diff >= 0 && diff < asyncTick)) {
|
||||||
|
await (0, utils_ts_1.nextTick)();
|
||||||
|
ts += diff;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(0, utils_ts_1.clean)(address);
|
||||||
|
return argon2Output(B, p, laneLen, dkLen);
|
||||||
|
}
|
||||||
|
/** argon2d async GPU-resistant version. */
|
||||||
|
const argon2dAsync = (password, salt, opts) => argon2Async(AT.Argond2d, password, salt, opts);
|
||||||
|
exports.argon2dAsync = argon2dAsync;
|
||||||
|
/** argon2i async side-channel-resistant version. */
|
||||||
|
const argon2iAsync = (password, salt, opts) => argon2Async(AT.Argon2i, password, salt, opts);
|
||||||
|
exports.argon2iAsync = argon2iAsync;
|
||||||
|
/** argon2id async, combining i+d, the most popular version from RFC 9106 */
|
||||||
|
const argon2idAsync = (password, salt, opts) => argon2Async(AT.Argon2id, password, salt, opts);
|
||||||
|
exports.argon2idAsync = argon2idAsync;
|
||||||
|
//# sourceMappingURL=argon2.js.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/argon2.js.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/argon2.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
106
service-login-verify/node_modules/@noble/hashes/blake1.d.ts
generated
vendored
Normal file
106
service-login-verify/node_modules/@noble/hashes/blake1.d.ts
generated
vendored
Normal file
@ -0,0 +1,106 @@
|
|||||||
|
import { Hash, type CHashO, type Input } from './utils.ts';
|
||||||
|
/** Blake1 options. Basically just "salt" */
|
||||||
|
export type BlakeOpts = {
|
||||||
|
salt?: Uint8Array;
|
||||||
|
};
|
||||||
|
declare abstract class BLAKE1<T extends BLAKE1<T>> extends Hash<T> {
|
||||||
|
protected finished: boolean;
|
||||||
|
protected length: number;
|
||||||
|
protected pos: number;
|
||||||
|
protected destroyed: boolean;
|
||||||
|
protected buffer: Uint8Array;
|
||||||
|
protected view: DataView;
|
||||||
|
protected salt: Uint32Array;
|
||||||
|
abstract compress(view: DataView, offset: number, withLength?: boolean): void;
|
||||||
|
protected abstract get(): number[];
|
||||||
|
protected abstract set(...args: number[]): void;
|
||||||
|
readonly blockLen: number;
|
||||||
|
readonly outputLen: number;
|
||||||
|
private lengthFlag;
|
||||||
|
private counterLen;
|
||||||
|
protected constants: Uint32Array;
|
||||||
|
constructor(blockLen: number, outputLen: number, lengthFlag: number, counterLen: number, saltLen: number, constants: Uint32Array, opts?: BlakeOpts);
|
||||||
|
update(data: Input): this;
|
||||||
|
destroy(): void;
|
||||||
|
_cloneInto(to?: T): T;
|
||||||
|
clone(): T;
|
||||||
|
digestInto(out: Uint8Array): void;
|
||||||
|
digest(): Uint8Array;
|
||||||
|
}
|
||||||
|
declare class Blake1_32 extends BLAKE1<Blake1_32> {
|
||||||
|
private v0;
|
||||||
|
private v1;
|
||||||
|
private v2;
|
||||||
|
private v3;
|
||||||
|
private v4;
|
||||||
|
private v5;
|
||||||
|
private v6;
|
||||||
|
private v7;
|
||||||
|
constructor(outputLen: number, IV: Uint32Array, lengthFlag: number, opts?: BlakeOpts);
|
||||||
|
protected get(): [number, number, number, number, number, number, number, number];
|
||||||
|
protected set(v0: number, v1: number, v2: number, v3: number, v4: number, v5: number, v6: number, v7: number): void;
|
||||||
|
destroy(): void;
|
||||||
|
compress(view: DataView, offset: number, withLength?: boolean): void;
|
||||||
|
}
|
||||||
|
declare class Blake1_64 extends BLAKE1<Blake1_64> {
|
||||||
|
private v0l;
|
||||||
|
private v0h;
|
||||||
|
private v1l;
|
||||||
|
private v1h;
|
||||||
|
private v2l;
|
||||||
|
private v2h;
|
||||||
|
private v3l;
|
||||||
|
private v3h;
|
||||||
|
private v4l;
|
||||||
|
private v4h;
|
||||||
|
private v5l;
|
||||||
|
private v5h;
|
||||||
|
private v6l;
|
||||||
|
private v6h;
|
||||||
|
private v7l;
|
||||||
|
private v7h;
|
||||||
|
constructor(outputLen: number, IV: Uint32Array, lengthFlag: number, opts?: BlakeOpts);
|
||||||
|
protected get(): [
|
||||||
|
number,
|
||||||
|
number,
|
||||||
|
number,
|
||||||
|
number,
|
||||||
|
number,
|
||||||
|
number,
|
||||||
|
number,
|
||||||
|
number,
|
||||||
|
number,
|
||||||
|
number,
|
||||||
|
number,
|
||||||
|
number,
|
||||||
|
number,
|
||||||
|
number,
|
||||||
|
number,
|
||||||
|
number
|
||||||
|
];
|
||||||
|
protected set(v0l: number, v0h: number, v1l: number, v1h: number, v2l: number, v2h: number, v3l: number, v3h: number, v4l: number, v4h: number, v5l: number, v5h: number, v6l: number, v6h: number, v7l: number, v7h: number): void;
|
||||||
|
destroy(): void;
|
||||||
|
compress(view: DataView, offset: number, withLength?: boolean): void;
|
||||||
|
}
|
||||||
|
export declare class BLAKE224 extends Blake1_32 {
|
||||||
|
constructor(opts?: BlakeOpts);
|
||||||
|
}
|
||||||
|
export declare class BLAKE256 extends Blake1_32 {
|
||||||
|
constructor(opts?: BlakeOpts);
|
||||||
|
}
|
||||||
|
export declare class BLAKE384 extends Blake1_64 {
|
||||||
|
constructor(opts?: BlakeOpts);
|
||||||
|
}
|
||||||
|
export declare class BLAKE512 extends Blake1_64 {
|
||||||
|
constructor(opts?: BlakeOpts);
|
||||||
|
}
|
||||||
|
/** blake1-224 hash function */
|
||||||
|
export declare const blake224: CHashO;
|
||||||
|
/** blake1-256 hash function */
|
||||||
|
export declare const blake256: CHashO;
|
||||||
|
/** blake1-384 hash function */
|
||||||
|
export declare const blake384: CHashO;
|
||||||
|
/** blake1-512 hash function */
|
||||||
|
export declare const blake512: CHashO;
|
||||||
|
export {};
|
||||||
|
//# sourceMappingURL=blake1.d.ts.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/blake1.d.ts.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/blake1.d.ts.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"blake1.d.ts","sourceRoot":"","sources":["src/blake1.ts"],"names":[],"mappings":"AA4BA,OAAO,EAGO,IAAI,EAChB,KAAK,MAAM,EAAE,KAAK,KAAK,EACxB,MAAM,YAAY,CAAC;AAEpB,4CAA4C;AAC5C,MAAM,MAAM,SAAS,GAAG;IACtB,IAAI,CAAC,EAAE,UAAU,CAAC;CACnB,CAAC;AAKF,uBAAe,MAAM,CAAC,CAAC,SAAS,MAAM,CAAC,CAAC,CAAC,CAAE,SAAQ,IAAI,CAAC,CAAC,CAAC;IACxD,SAAS,CAAC,QAAQ,UAAS;IAC3B,SAAS,CAAC,MAAM,SAAK;IACrB,SAAS,CAAC,GAAG,SAAK;IAClB,SAAS,CAAC,SAAS,UAAS;IAE5B,SAAS,CAAC,MAAM,EAAE,UAAU,CAAC;IAC7B,SAAS,CAAC,IAAI,EAAE,QAAQ,CAAC;IACzB,SAAS,CAAC,IAAI,EAAE,WAAW,CAAC;IAC5B,QAAQ,CAAC,QAAQ,CAAC,IAAI,EAAE,QAAQ,EAAE,MAAM,EAAE,MAAM,EAAE,UAAU,CAAC,EAAE,OAAO,GAAG,IAAI;IAC7E,SAAS,CAAC,QAAQ,CAAC,GAAG,IAAI,MAAM,EAAE;IAClC,SAAS,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,IAAI,EAAE,MAAM,EAAE,GAAG,IAAI;IAE/C,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC;IAC1B,QAAQ,CAAC,SAAS,EAAE,MAAM,CAAC;IAC3B,OAAO,CAAC,UAAU,CAAS;IAC3B,OAAO,CAAC,UAAU,CAAS;IAC3B,SAAS,CAAC,SAAS,EAAE,WAAW,CAAC;gBAG/B,QAAQ,EAAE,MAAM,EAChB,SAAS,EAAE,MAAM,EACjB,UAAU,EAAE,MAAM,EAClB,UAAU,EAAE,MAAM,EAClB,OAAO,EAAE,MAAM,EACf,SAAS,EAAE,WAAW,EACtB,IAAI,GAAE,SAAc;IA2BtB,MAAM,CAAC,IAAI,EAAE,KAAK,GAAG,IAAI;IA8BzB,OAAO,IAAI,IAAI;IAMf,UAAU,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC;IAarB,KAAK,IAAI,CAAC;IAGV,UAAU,CAAC,GAAG,EAAE,UAAU,GAAG,IAAI;IA4BjC,MAAM,IAAI,UAAU;CAOrB;AAgCD,cAAM,SAAU,SAAQ,MAAM,CAAC,SAAS,CAAC;IACvC,OAAO,CAAC,EAAE,CAAS;IACnB,OAAO,CAAC,EAAE,CAAS;IACnB,OAAO,CAAC,EAAE,CAAS;IACnB,OAAO,CAAC,EAAE,CAAS;IACnB,OAAO,CAAC,EAAE,CAAS;IACnB,OAAO,CAAC,EAAE,CAAS;IACnB,OAAO,CAAC,EAAE,CAAS;IACnB,OAAO,CAAC,EAAE,CAAS;gBACP,SAAS,EAAE,MAAM,EAAE,EAAE,EAAE,WAAW,EAAE,UAAU,EAAE,MAAM,EAAE,IAAI,GAAE,SAAc;IAWxF,SAAS,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC;IAKjF,SAAS,CAAC,GAAG,CACX,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,GAC7F,IAAI;IAUP,OAAO,IAAI,IAAI;IAIf,QAAQ,CAAC,IAAI,EAAE,QAAQ,EAAE,MAAM,EAAE,MAAM,EAAE,UAAU,UAAO,GAAG,IAAI;CAiDlE;AAsED,cAAM,SAAU,SAAQ,MAAM,CAAC,SAAS,CAAC;IACvC,OAAO,CAAC,GAAG,CAAS;IACpB,OAAO,CAAC,GAAG,CAAS;IACpB,OAAO,CAAC,GAAG,CAAS;IACpB,OAAO,CAAC,GAAG,CAAS;IACpB,OAAO,CAAC,GAAG,CAAS;IACpB,OAAO,CAAC,GAAG,CAAS;IACpB,OAAO,CAAC,GAAG,CAAS;IACpB,OAAO,CAAC,GAAG,CAAS;IACpB,OAAO,CAAC,GAAG,CAAS;IACpB,OAAO,CAAC,GAAG,CAAS;IACpB,OAAO,CAAC,GAAG,CAAS;IACpB,OAAO,CAAC,GAAG,CAAS;IACpB,OAAO,CAAC,GAAG,CAAS;IACpB,OAAO,CAAC,GAAG,CAAS;IACpB,OAAO,CAAC,GAAG,CAAS;IACpB,OAAO,CAAC,GAAG,CAAS;gBACR,SAAS,EAAE,MAAM,EAAE,EAAE,EAAE,WAAW,EAAE,UAAU,EAAE,MAAM,EAAE,IAAI,GAAE,SAAc;IAoBxF,SAAS,CAAC,GAAG,IAAI;QACf,MAAM;QAAE,MAAM;QAAE,MAAM;QAAE,MAAM;QAAE,MAAM;QAAE,MAAM;QAAE,MAAM;QAAE,MAAM;QAC9D,MAAM;QAAE,MAAM;QAAE,MAAM;QAAE,MAAM;QAAE,MAAM;QAAE,MAAM;QAAE,MAAM;QAAE,MAAM;KAC/D;IAKD,SAAS,CAAC,GAAG,CACX,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAClD,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAClD,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAClD,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GACjD,IAAI;IAkBP,OAAO,IAAI,IAAI;IAIf,QAAQ,CAAC,IAAI,EAAE,QAAQ,EAAE,MAAM,EAAE,MAAM,EAAE,UAAU,UAAO,GAAG,IAAI;CAiDlE;AAED,qBAAa,QAAS,SAAQ,SAAS;gBACzB,IAAI,GAAE,SAAc;CAGjC;AACD,qBAAa,QAAS,SAAQ,SAAS;gBACzB,IAAI,GAAE,SAAc;CAGjC;AACD,qBAAa,QAAS,SAAQ,SAAS;gBACzB,IAAI,GAAE,SAAc;CAGjC;AACD,qBAAa,QAAS,SAAQ,SAAS;gBACzB,IAAI,GAAE,SAAc;CAGjC;AACD,+BAA+B;AAC/B,eAAO,MAAM,QAAQ,EAAE,MAEtB,CAAC;AACF,+BAA+B;AAC/B,eAAO,MAAM,QAAQ,EAAE,MAEtB,CAAC;AACF,+BAA+B;AAC/B,eAAO,MAAM,QAAQ,EAAE,MAEtB,CAAC;AACF,+BAA+B;AAC/B,eAAO,MAAM,QAAQ,EAAE,MAEtB,CAAC"}
|
||||||
459
service-login-verify/node_modules/@noble/hashes/blake1.js
generated
vendored
Normal file
459
service-login-verify/node_modules/@noble/hashes/blake1.js
generated
vendored
Normal file
@ -0,0 +1,459 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.blake512 = exports.blake384 = exports.blake256 = exports.blake224 = exports.BLAKE512 = exports.BLAKE384 = exports.BLAKE256 = exports.BLAKE224 = void 0;
|
||||||
|
/**
|
||||||
|
* Blake1 legacy hash function, one of SHA3 proposals.
|
||||||
|
* Rarely used. Check out blake2 or blake3 instead.
|
||||||
|
* https://www.aumasson.jp/blake/blake.pdf
|
||||||
|
*
|
||||||
|
* In the best case, there are 0 allocations.
|
||||||
|
*
|
||||||
|
* Differences from blake2:
|
||||||
|
*
|
||||||
|
* - BE instead of LE
|
||||||
|
* - Paddings, similar to MD5, RIPEMD, SHA1, SHA2, but:
|
||||||
|
* - length flag is located before actual length
|
||||||
|
* - padding block is compressed differently (no lengths)
|
||||||
|
* Instead of msg[sigma[k]], we have `msg[sigma[k]] ^ constants[sigma[k-1]]`
|
||||||
|
* (-1 for g1, g2 without -1)
|
||||||
|
* - Salt is XOR-ed into constants instead of state
|
||||||
|
* - Salt is XOR-ed with output in `compress`
|
||||||
|
* - Additional rows (+64 bytes) in SIGMA for new rounds
|
||||||
|
* - Different round count:
|
||||||
|
* - 14 / 10 rounds in blake256 / blake2s
|
||||||
|
* - 16 / 12 rounds in blake512 / blake2b
|
||||||
|
* - blake512: G1b: rotr 24 -> 25, G2b: rotr 63 -> 11
|
||||||
|
* @module
|
||||||
|
*/
|
||||||
|
const _blake_ts_1 = require("./_blake.js");
|
||||||
|
const _md_ts_1 = require("./_md.js");
|
||||||
|
const u64 = require("./_u64.js");
|
||||||
|
// prettier-ignore
|
||||||
|
const utils_ts_1 = require("./utils.js");
|
||||||
|
// Empty zero-filled salt
|
||||||
|
const EMPTY_SALT = /* @__PURE__ */ new Uint32Array(8);
|
||||||
|
class BLAKE1 extends utils_ts_1.Hash {
|
||||||
|
constructor(blockLen, outputLen, lengthFlag, counterLen, saltLen, constants, opts = {}) {
|
||||||
|
super();
|
||||||
|
this.finished = false;
|
||||||
|
this.length = 0;
|
||||||
|
this.pos = 0;
|
||||||
|
this.destroyed = false;
|
||||||
|
const { salt } = opts;
|
||||||
|
this.blockLen = blockLen;
|
||||||
|
this.outputLen = outputLen;
|
||||||
|
this.lengthFlag = lengthFlag;
|
||||||
|
this.counterLen = counterLen;
|
||||||
|
this.buffer = new Uint8Array(blockLen);
|
||||||
|
this.view = (0, utils_ts_1.createView)(this.buffer);
|
||||||
|
if (salt) {
|
||||||
|
let slt = salt;
|
||||||
|
slt = (0, utils_ts_1.toBytes)(slt);
|
||||||
|
(0, utils_ts_1.abytes)(slt);
|
||||||
|
if (slt.length !== 4 * saltLen)
|
||||||
|
throw new Error('wrong salt length');
|
||||||
|
const salt32 = (this.salt = new Uint32Array(saltLen));
|
||||||
|
const sv = (0, utils_ts_1.createView)(slt);
|
||||||
|
this.constants = constants.slice();
|
||||||
|
for (let i = 0, offset = 0; i < salt32.length; i++, offset += 4) {
|
||||||
|
salt32[i] = sv.getUint32(offset, false);
|
||||||
|
this.constants[i] ^= salt32[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this.salt = EMPTY_SALT;
|
||||||
|
this.constants = constants;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
update(data) {
|
||||||
|
(0, utils_ts_1.aexists)(this);
|
||||||
|
data = (0, utils_ts_1.toBytes)(data);
|
||||||
|
(0, utils_ts_1.abytes)(data);
|
||||||
|
// From _md, but update length before each compress
|
||||||
|
const { view, buffer, blockLen } = this;
|
||||||
|
const len = data.length;
|
||||||
|
let dataView;
|
||||||
|
for (let pos = 0; pos < len;) {
|
||||||
|
const take = Math.min(blockLen - this.pos, len - pos);
|
||||||
|
// Fast path: we have at least one block in input, cast it to view and process
|
||||||
|
if (take === blockLen) {
|
||||||
|
if (!dataView)
|
||||||
|
dataView = (0, utils_ts_1.createView)(data);
|
||||||
|
for (; blockLen <= len - pos; pos += blockLen) {
|
||||||
|
this.length += blockLen;
|
||||||
|
this.compress(dataView, pos);
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
buffer.set(data.subarray(pos, pos + take), this.pos);
|
||||||
|
this.pos += take;
|
||||||
|
pos += take;
|
||||||
|
if (this.pos === blockLen) {
|
||||||
|
this.length += blockLen;
|
||||||
|
this.compress(view, 0, true);
|
||||||
|
this.pos = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
destroy() {
|
||||||
|
this.destroyed = true;
|
||||||
|
if (this.salt !== EMPTY_SALT) {
|
||||||
|
(0, utils_ts_1.clean)(this.salt, this.constants);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_cloneInto(to) {
|
||||||
|
to || (to = new this.constructor());
|
||||||
|
to.set(...this.get());
|
||||||
|
const { buffer, length, finished, destroyed, constants, salt, pos } = this;
|
||||||
|
to.buffer.set(buffer);
|
||||||
|
to.constants = constants.slice();
|
||||||
|
to.destroyed = destroyed;
|
||||||
|
to.finished = finished;
|
||||||
|
to.length = length;
|
||||||
|
to.pos = pos;
|
||||||
|
to.salt = salt.slice();
|
||||||
|
return to;
|
||||||
|
}
|
||||||
|
clone() {
|
||||||
|
return this._cloneInto();
|
||||||
|
}
|
||||||
|
digestInto(out) {
|
||||||
|
(0, utils_ts_1.aexists)(this);
|
||||||
|
(0, utils_ts_1.aoutput)(out, this);
|
||||||
|
this.finished = true;
|
||||||
|
// Padding
|
||||||
|
const { buffer, blockLen, counterLen, lengthFlag, view } = this;
|
||||||
|
(0, utils_ts_1.clean)(buffer.subarray(this.pos)); // clean buf
|
||||||
|
const counter = BigInt((this.length + this.pos) * 8);
|
||||||
|
const counterPos = blockLen - counterLen - 1;
|
||||||
|
buffer[this.pos] |= 128; // End block flag
|
||||||
|
this.length += this.pos; // add unwritten length
|
||||||
|
// Not enough in buffer for length: write what we have.
|
||||||
|
if (this.pos > counterPos) {
|
||||||
|
this.compress(view, 0);
|
||||||
|
(0, utils_ts_1.clean)(buffer);
|
||||||
|
this.pos = 0;
|
||||||
|
}
|
||||||
|
// Difference with md: here we have lengthFlag!
|
||||||
|
buffer[counterPos] |= lengthFlag; // Length flag
|
||||||
|
// We always set 8 byte length flag. Because length will overflow significantly sooner.
|
||||||
|
(0, _md_ts_1.setBigUint64)(view, blockLen - 8, counter, false);
|
||||||
|
this.compress(view, 0, this.pos !== 0); // don't add length if length is not empty block?
|
||||||
|
// Write output
|
||||||
|
(0, utils_ts_1.clean)(buffer);
|
||||||
|
const v = (0, utils_ts_1.createView)(out);
|
||||||
|
const state = this.get();
|
||||||
|
for (let i = 0; i < this.outputLen / 4; ++i)
|
||||||
|
v.setUint32(i * 4, state[i]);
|
||||||
|
}
|
||||||
|
digest() {
|
||||||
|
const { buffer, outputLen } = this;
|
||||||
|
this.digestInto(buffer);
|
||||||
|
const res = buffer.slice(0, outputLen);
|
||||||
|
this.destroy();
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Constants
|
||||||
|
const B64C = /* @__PURE__ */ Uint32Array.from([
|
||||||
|
0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344, 0xa4093822, 0x299f31d0, 0x082efa98, 0xec4e6c89,
|
||||||
|
0x452821e6, 0x38d01377, 0xbe5466cf, 0x34e90c6c, 0xc0ac29b7, 0xc97c50dd, 0x3f84d5b5, 0xb5470917,
|
||||||
|
0x9216d5d9, 0x8979fb1b, 0xd1310ba6, 0x98dfb5ac, 0x2ffd72db, 0xd01adfb7, 0xb8e1afed, 0x6a267e96,
|
||||||
|
0xba7c9045, 0xf12c7f99, 0x24a19947, 0xb3916cf7, 0x0801f2e2, 0x858efc16, 0x636920d8, 0x71574e69,
|
||||||
|
]);
|
||||||
|
// first half of C512
|
||||||
|
const B32C = B64C.slice(0, 16);
|
||||||
|
const B256_IV = _md_ts_1.SHA256_IV.slice();
|
||||||
|
const B224_IV = _md_ts_1.SHA224_IV.slice();
|
||||||
|
const B384_IV = _md_ts_1.SHA384_IV.slice();
|
||||||
|
const B512_IV = _md_ts_1.SHA512_IV.slice();
|
||||||
|
function generateTBL256() {
|
||||||
|
const TBL = [];
|
||||||
|
for (let i = 0, j = 0; i < 14; i++, j += 16) {
|
||||||
|
for (let offset = 1; offset < 16; offset += 2) {
|
||||||
|
TBL.push(B32C[_blake_ts_1.BSIGMA[j + offset]]);
|
||||||
|
TBL.push(B32C[_blake_ts_1.BSIGMA[j + offset - 1]]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return new Uint32Array(TBL);
|
||||||
|
}
|
||||||
|
const TBL256 = /* @__PURE__ */ generateTBL256(); // C256[SIGMA[X]] precompute
|
||||||
|
// Reusable temporary buffer
|
||||||
|
const BLAKE256_W = /* @__PURE__ */ new Uint32Array(16);
|
||||||
|
class Blake1_32 extends BLAKE1 {
|
||||||
|
constructor(outputLen, IV, lengthFlag, opts = {}) {
|
||||||
|
super(64, outputLen, lengthFlag, 8, 4, B32C, opts);
|
||||||
|
this.v0 = IV[0] | 0;
|
||||||
|
this.v1 = IV[1] | 0;
|
||||||
|
this.v2 = IV[2] | 0;
|
||||||
|
this.v3 = IV[3] | 0;
|
||||||
|
this.v4 = IV[4] | 0;
|
||||||
|
this.v5 = IV[5] | 0;
|
||||||
|
this.v6 = IV[6] | 0;
|
||||||
|
this.v7 = IV[7] | 0;
|
||||||
|
}
|
||||||
|
get() {
|
||||||
|
const { v0, v1, v2, v3, v4, v5, v6, v7 } = this;
|
||||||
|
return [v0, v1, v2, v3, v4, v5, v6, v7];
|
||||||
|
}
|
||||||
|
// prettier-ignore
|
||||||
|
set(v0, v1, v2, v3, v4, v5, v6, v7) {
|
||||||
|
this.v0 = v0 | 0;
|
||||||
|
this.v1 = v1 | 0;
|
||||||
|
this.v2 = v2 | 0;
|
||||||
|
this.v3 = v3 | 0;
|
||||||
|
this.v4 = v4 | 0;
|
||||||
|
this.v5 = v5 | 0;
|
||||||
|
this.v6 = v6 | 0;
|
||||||
|
this.v7 = v7 | 0;
|
||||||
|
}
|
||||||
|
destroy() {
|
||||||
|
super.destroy();
|
||||||
|
this.set(0, 0, 0, 0, 0, 0, 0, 0);
|
||||||
|
}
|
||||||
|
compress(view, offset, withLength = true) {
|
||||||
|
for (let i = 0; i < 16; i++, offset += 4)
|
||||||
|
BLAKE256_W[i] = view.getUint32(offset, false);
|
||||||
|
// NOTE: we cannot re-use compress from blake2s, since there is additional xor over u256[SIGMA[e]]
|
||||||
|
let v00 = this.v0 | 0;
|
||||||
|
let v01 = this.v1 | 0;
|
||||||
|
let v02 = this.v2 | 0;
|
||||||
|
let v03 = this.v3 | 0;
|
||||||
|
let v04 = this.v4 | 0;
|
||||||
|
let v05 = this.v5 | 0;
|
||||||
|
let v06 = this.v6 | 0;
|
||||||
|
let v07 = this.v7 | 0;
|
||||||
|
let v08 = this.constants[0] | 0;
|
||||||
|
let v09 = this.constants[1] | 0;
|
||||||
|
let v10 = this.constants[2] | 0;
|
||||||
|
let v11 = this.constants[3] | 0;
|
||||||
|
const { h, l } = u64.fromBig(BigInt(withLength ? this.length * 8 : 0));
|
||||||
|
let v12 = (this.constants[4] ^ l) >>> 0;
|
||||||
|
let v13 = (this.constants[5] ^ l) >>> 0;
|
||||||
|
let v14 = (this.constants[6] ^ h) >>> 0;
|
||||||
|
let v15 = (this.constants[7] ^ h) >>> 0;
|
||||||
|
// prettier-ignore
|
||||||
|
for (let i = 0, k = 0, j = 0; i < 14; i++) {
|
||||||
|
({ a: v00, b: v04, c: v08, d: v12 } = (0, _blake_ts_1.G1s)(v00, v04, v08, v12, BLAKE256_W[_blake_ts_1.BSIGMA[k++]] ^ TBL256[j++]));
|
||||||
|
({ a: v00, b: v04, c: v08, d: v12 } = (0, _blake_ts_1.G2s)(v00, v04, v08, v12, BLAKE256_W[_blake_ts_1.BSIGMA[k++]] ^ TBL256[j++]));
|
||||||
|
({ a: v01, b: v05, c: v09, d: v13 } = (0, _blake_ts_1.G1s)(v01, v05, v09, v13, BLAKE256_W[_blake_ts_1.BSIGMA[k++]] ^ TBL256[j++]));
|
||||||
|
({ a: v01, b: v05, c: v09, d: v13 } = (0, _blake_ts_1.G2s)(v01, v05, v09, v13, BLAKE256_W[_blake_ts_1.BSIGMA[k++]] ^ TBL256[j++]));
|
||||||
|
({ a: v02, b: v06, c: v10, d: v14 } = (0, _blake_ts_1.G1s)(v02, v06, v10, v14, BLAKE256_W[_blake_ts_1.BSIGMA[k++]] ^ TBL256[j++]));
|
||||||
|
({ a: v02, b: v06, c: v10, d: v14 } = (0, _blake_ts_1.G2s)(v02, v06, v10, v14, BLAKE256_W[_blake_ts_1.BSIGMA[k++]] ^ TBL256[j++]));
|
||||||
|
({ a: v03, b: v07, c: v11, d: v15 } = (0, _blake_ts_1.G1s)(v03, v07, v11, v15, BLAKE256_W[_blake_ts_1.BSIGMA[k++]] ^ TBL256[j++]));
|
||||||
|
({ a: v03, b: v07, c: v11, d: v15 } = (0, _blake_ts_1.G2s)(v03, v07, v11, v15, BLAKE256_W[_blake_ts_1.BSIGMA[k++]] ^ TBL256[j++]));
|
||||||
|
({ a: v00, b: v05, c: v10, d: v15 } = (0, _blake_ts_1.G1s)(v00, v05, v10, v15, BLAKE256_W[_blake_ts_1.BSIGMA[k++]] ^ TBL256[j++]));
|
||||||
|
({ a: v00, b: v05, c: v10, d: v15 } = (0, _blake_ts_1.G2s)(v00, v05, v10, v15, BLAKE256_W[_blake_ts_1.BSIGMA[k++]] ^ TBL256[j++]));
|
||||||
|
({ a: v01, b: v06, c: v11, d: v12 } = (0, _blake_ts_1.G1s)(v01, v06, v11, v12, BLAKE256_W[_blake_ts_1.BSIGMA[k++]] ^ TBL256[j++]));
|
||||||
|
({ a: v01, b: v06, c: v11, d: v12 } = (0, _blake_ts_1.G2s)(v01, v06, v11, v12, BLAKE256_W[_blake_ts_1.BSIGMA[k++]] ^ TBL256[j++]));
|
||||||
|
({ a: v02, b: v07, c: v08, d: v13 } = (0, _blake_ts_1.G1s)(v02, v07, v08, v13, BLAKE256_W[_blake_ts_1.BSIGMA[k++]] ^ TBL256[j++]));
|
||||||
|
({ a: v02, b: v07, c: v08, d: v13 } = (0, _blake_ts_1.G2s)(v02, v07, v08, v13, BLAKE256_W[_blake_ts_1.BSIGMA[k++]] ^ TBL256[j++]));
|
||||||
|
({ a: v03, b: v04, c: v09, d: v14 } = (0, _blake_ts_1.G1s)(v03, v04, v09, v14, BLAKE256_W[_blake_ts_1.BSIGMA[k++]] ^ TBL256[j++]));
|
||||||
|
({ a: v03, b: v04, c: v09, d: v14 } = (0, _blake_ts_1.G2s)(v03, v04, v09, v14, BLAKE256_W[_blake_ts_1.BSIGMA[k++]] ^ TBL256[j++]));
|
||||||
|
}
|
||||||
|
this.v0 = (this.v0 ^ v00 ^ v08 ^ this.salt[0]) >>> 0;
|
||||||
|
this.v1 = (this.v1 ^ v01 ^ v09 ^ this.salt[1]) >>> 0;
|
||||||
|
this.v2 = (this.v2 ^ v02 ^ v10 ^ this.salt[2]) >>> 0;
|
||||||
|
this.v3 = (this.v3 ^ v03 ^ v11 ^ this.salt[3]) >>> 0;
|
||||||
|
this.v4 = (this.v4 ^ v04 ^ v12 ^ this.salt[0]) >>> 0;
|
||||||
|
this.v5 = (this.v5 ^ v05 ^ v13 ^ this.salt[1]) >>> 0;
|
||||||
|
this.v6 = (this.v6 ^ v06 ^ v14 ^ this.salt[2]) >>> 0;
|
||||||
|
this.v7 = (this.v7 ^ v07 ^ v15 ^ this.salt[3]) >>> 0;
|
||||||
|
(0, utils_ts_1.clean)(BLAKE256_W);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const BBUF = /* @__PURE__ */ new Uint32Array(32);
|
||||||
|
const BLAKE512_W = /* @__PURE__ */ new Uint32Array(32);
|
||||||
|
function generateTBL512() {
|
||||||
|
const TBL = [];
|
||||||
|
for (let r = 0, k = 0; r < 16; r++, k += 16) {
|
||||||
|
for (let offset = 1; offset < 16; offset += 2) {
|
||||||
|
TBL.push(B64C[_blake_ts_1.BSIGMA[k + offset] * 2 + 0]);
|
||||||
|
TBL.push(B64C[_blake_ts_1.BSIGMA[k + offset] * 2 + 1]);
|
||||||
|
TBL.push(B64C[_blake_ts_1.BSIGMA[k + offset - 1] * 2 + 0]);
|
||||||
|
TBL.push(B64C[_blake_ts_1.BSIGMA[k + offset - 1] * 2 + 1]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return new Uint32Array(TBL);
|
||||||
|
}
|
||||||
|
const TBL512 = /* @__PURE__ */ generateTBL512(); // C512[SIGMA[X]] precompute
|
||||||
|
// Mixing function G splitted in two halfs
|
||||||
|
function G1b(a, b, c, d, msg, k) {
|
||||||
|
const Xpos = 2 * _blake_ts_1.BSIGMA[k];
|
||||||
|
const Xl = msg[Xpos + 1] ^ TBL512[k * 2 + 1], Xh = msg[Xpos] ^ TBL512[k * 2]; // prettier-ignore
|
||||||
|
let Al = BBUF[2 * a + 1], Ah = BBUF[2 * a]; // prettier-ignore
|
||||||
|
let Bl = BBUF[2 * b + 1], Bh = BBUF[2 * b]; // prettier-ignore
|
||||||
|
let Cl = BBUF[2 * c + 1], Ch = BBUF[2 * c]; // prettier-ignore
|
||||||
|
let Dl = BBUF[2 * d + 1], Dh = BBUF[2 * d]; // prettier-ignore
|
||||||
|
// v[a] = (v[a] + v[b] + x) | 0;
|
||||||
|
let ll = u64.add3L(Al, Bl, Xl);
|
||||||
|
Ah = u64.add3H(ll, Ah, Bh, Xh) >>> 0;
|
||||||
|
Al = (ll | 0) >>> 0;
|
||||||
|
// v[d] = rotr(v[d] ^ v[a], 32)
|
||||||
|
({ Dh, Dl } = { Dh: Dh ^ Ah, Dl: Dl ^ Al });
|
||||||
|
({ Dh, Dl } = { Dh: u64.rotr32H(Dh, Dl), Dl: u64.rotr32L(Dh, Dl) });
|
||||||
|
// v[c] = (v[c] + v[d]) | 0;
|
||||||
|
({ h: Ch, l: Cl } = u64.add(Ch, Cl, Dh, Dl));
|
||||||
|
// v[b] = rotr(v[b] ^ v[c], 25)
|
||||||
|
({ Bh, Bl } = { Bh: Bh ^ Ch, Bl: Bl ^ Cl });
|
||||||
|
({ Bh, Bl } = { Bh: u64.rotrSH(Bh, Bl, 25), Bl: u64.rotrSL(Bh, Bl, 25) });
|
||||||
|
(BBUF[2 * a + 1] = Al), (BBUF[2 * a] = Ah);
|
||||||
|
(BBUF[2 * b + 1] = Bl), (BBUF[2 * b] = Bh);
|
||||||
|
(BBUF[2 * c + 1] = Cl), (BBUF[2 * c] = Ch);
|
||||||
|
(BBUF[2 * d + 1] = Dl), (BBUF[2 * d] = Dh);
|
||||||
|
}
|
||||||
|
function G2b(a, b, c, d, msg, k) {
|
||||||
|
const Xpos = 2 * _blake_ts_1.BSIGMA[k];
|
||||||
|
const Xl = msg[Xpos + 1] ^ TBL512[k * 2 + 1], Xh = msg[Xpos] ^ TBL512[k * 2]; // prettier-ignore
|
||||||
|
let Al = BBUF[2 * a + 1], Ah = BBUF[2 * a]; // prettier-ignore
|
||||||
|
let Bl = BBUF[2 * b + 1], Bh = BBUF[2 * b]; // prettier-ignore
|
||||||
|
let Cl = BBUF[2 * c + 1], Ch = BBUF[2 * c]; // prettier-ignore
|
||||||
|
let Dl = BBUF[2 * d + 1], Dh = BBUF[2 * d]; // prettier-ignore
|
||||||
|
// v[a] = (v[a] + v[b] + x) | 0;
|
||||||
|
let ll = u64.add3L(Al, Bl, Xl);
|
||||||
|
Ah = u64.add3H(ll, Ah, Bh, Xh);
|
||||||
|
Al = ll | 0;
|
||||||
|
// v[d] = rotr(v[d] ^ v[a], 16)
|
||||||
|
({ Dh, Dl } = { Dh: Dh ^ Ah, Dl: Dl ^ Al });
|
||||||
|
({ Dh, Dl } = { Dh: u64.rotrSH(Dh, Dl, 16), Dl: u64.rotrSL(Dh, Dl, 16) });
|
||||||
|
// v[c] = (v[c] + v[d]) | 0;
|
||||||
|
({ h: Ch, l: Cl } = u64.add(Ch, Cl, Dh, Dl));
|
||||||
|
// v[b] = rotr(v[b] ^ v[c], 11)
|
||||||
|
({ Bh, Bl } = { Bh: Bh ^ Ch, Bl: Bl ^ Cl });
|
||||||
|
({ Bh, Bl } = { Bh: u64.rotrSH(Bh, Bl, 11), Bl: u64.rotrSL(Bh, Bl, 11) });
|
||||||
|
(BBUF[2 * a + 1] = Al), (BBUF[2 * a] = Ah);
|
||||||
|
(BBUF[2 * b + 1] = Bl), (BBUF[2 * b] = Bh);
|
||||||
|
(BBUF[2 * c + 1] = Cl), (BBUF[2 * c] = Ch);
|
||||||
|
(BBUF[2 * d + 1] = Dl), (BBUF[2 * d] = Dh);
|
||||||
|
}
|
||||||
|
class Blake1_64 extends BLAKE1 {
|
||||||
|
constructor(outputLen, IV, lengthFlag, opts = {}) {
|
||||||
|
super(128, outputLen, lengthFlag, 16, 8, B64C, opts);
|
||||||
|
this.v0l = IV[0] | 0;
|
||||||
|
this.v0h = IV[1] | 0;
|
||||||
|
this.v1l = IV[2] | 0;
|
||||||
|
this.v1h = IV[3] | 0;
|
||||||
|
this.v2l = IV[4] | 0;
|
||||||
|
this.v2h = IV[5] | 0;
|
||||||
|
this.v3l = IV[6] | 0;
|
||||||
|
this.v3h = IV[7] | 0;
|
||||||
|
this.v4l = IV[8] | 0;
|
||||||
|
this.v4h = IV[9] | 0;
|
||||||
|
this.v5l = IV[10] | 0;
|
||||||
|
this.v5h = IV[11] | 0;
|
||||||
|
this.v6l = IV[12] | 0;
|
||||||
|
this.v6h = IV[13] | 0;
|
||||||
|
this.v7l = IV[14] | 0;
|
||||||
|
this.v7h = IV[15] | 0;
|
||||||
|
}
|
||||||
|
// prettier-ignore
|
||||||
|
get() {
|
||||||
|
let { v0l, v0h, v1l, v1h, v2l, v2h, v3l, v3h, v4l, v4h, v5l, v5h, v6l, v6h, v7l, v7h } = this;
|
||||||
|
return [v0l, v0h, v1l, v1h, v2l, v2h, v3l, v3h, v4l, v4h, v5l, v5h, v6l, v6h, v7l, v7h];
|
||||||
|
}
|
||||||
|
// prettier-ignore
|
||||||
|
set(v0l, v0h, v1l, v1h, v2l, v2h, v3l, v3h, v4l, v4h, v5l, v5h, v6l, v6h, v7l, v7h) {
|
||||||
|
this.v0l = v0l | 0;
|
||||||
|
this.v0h = v0h | 0;
|
||||||
|
this.v1l = v1l | 0;
|
||||||
|
this.v1h = v1h | 0;
|
||||||
|
this.v2l = v2l | 0;
|
||||||
|
this.v2h = v2h | 0;
|
||||||
|
this.v3l = v3l | 0;
|
||||||
|
this.v3h = v3h | 0;
|
||||||
|
this.v4l = v4l | 0;
|
||||||
|
this.v4h = v4h | 0;
|
||||||
|
this.v5l = v5l | 0;
|
||||||
|
this.v5h = v5h | 0;
|
||||||
|
this.v6l = v6l | 0;
|
||||||
|
this.v6h = v6h | 0;
|
||||||
|
this.v7l = v7l | 0;
|
||||||
|
this.v7h = v7h | 0;
|
||||||
|
}
|
||||||
|
destroy() {
|
||||||
|
super.destroy();
|
||||||
|
this.set(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
|
||||||
|
}
|
||||||
|
compress(view, offset, withLength = true) {
|
||||||
|
for (let i = 0; i < 32; i++, offset += 4)
|
||||||
|
BLAKE512_W[i] = view.getUint32(offset, false);
|
||||||
|
this.get().forEach((v, i) => (BBUF[i] = v)); // First half from state.
|
||||||
|
BBUF.set(this.constants.subarray(0, 16), 16);
|
||||||
|
if (withLength) {
|
||||||
|
const { h, l } = u64.fromBig(BigInt(this.length * 8));
|
||||||
|
BBUF[24] = (BBUF[24] ^ h) >>> 0;
|
||||||
|
BBUF[25] = (BBUF[25] ^ l) >>> 0;
|
||||||
|
BBUF[26] = (BBUF[26] ^ h) >>> 0;
|
||||||
|
BBUF[27] = (BBUF[27] ^ l) >>> 0;
|
||||||
|
}
|
||||||
|
for (let i = 0, k = 0; i < 16; i++) {
|
||||||
|
G1b(0, 4, 8, 12, BLAKE512_W, k++);
|
||||||
|
G2b(0, 4, 8, 12, BLAKE512_W, k++);
|
||||||
|
G1b(1, 5, 9, 13, BLAKE512_W, k++);
|
||||||
|
G2b(1, 5, 9, 13, BLAKE512_W, k++);
|
||||||
|
G1b(2, 6, 10, 14, BLAKE512_W, k++);
|
||||||
|
G2b(2, 6, 10, 14, BLAKE512_W, k++);
|
||||||
|
G1b(3, 7, 11, 15, BLAKE512_W, k++);
|
||||||
|
G2b(3, 7, 11, 15, BLAKE512_W, k++);
|
||||||
|
G1b(0, 5, 10, 15, BLAKE512_W, k++);
|
||||||
|
G2b(0, 5, 10, 15, BLAKE512_W, k++);
|
||||||
|
G1b(1, 6, 11, 12, BLAKE512_W, k++);
|
||||||
|
G2b(1, 6, 11, 12, BLAKE512_W, k++);
|
||||||
|
G1b(2, 7, 8, 13, BLAKE512_W, k++);
|
||||||
|
G2b(2, 7, 8, 13, BLAKE512_W, k++);
|
||||||
|
G1b(3, 4, 9, 14, BLAKE512_W, k++);
|
||||||
|
G2b(3, 4, 9, 14, BLAKE512_W, k++);
|
||||||
|
}
|
||||||
|
this.v0l ^= BBUF[0] ^ BBUF[16] ^ this.salt[0];
|
||||||
|
this.v0h ^= BBUF[1] ^ BBUF[17] ^ this.salt[1];
|
||||||
|
this.v1l ^= BBUF[2] ^ BBUF[18] ^ this.salt[2];
|
||||||
|
this.v1h ^= BBUF[3] ^ BBUF[19] ^ this.salt[3];
|
||||||
|
this.v2l ^= BBUF[4] ^ BBUF[20] ^ this.salt[4];
|
||||||
|
this.v2h ^= BBUF[5] ^ BBUF[21] ^ this.salt[5];
|
||||||
|
this.v3l ^= BBUF[6] ^ BBUF[22] ^ this.salt[6];
|
||||||
|
this.v3h ^= BBUF[7] ^ BBUF[23] ^ this.salt[7];
|
||||||
|
this.v4l ^= BBUF[8] ^ BBUF[24] ^ this.salt[0];
|
||||||
|
this.v4h ^= BBUF[9] ^ BBUF[25] ^ this.salt[1];
|
||||||
|
this.v5l ^= BBUF[10] ^ BBUF[26] ^ this.salt[2];
|
||||||
|
this.v5h ^= BBUF[11] ^ BBUF[27] ^ this.salt[3];
|
||||||
|
this.v6l ^= BBUF[12] ^ BBUF[28] ^ this.salt[4];
|
||||||
|
this.v6h ^= BBUF[13] ^ BBUF[29] ^ this.salt[5];
|
||||||
|
this.v7l ^= BBUF[14] ^ BBUF[30] ^ this.salt[6];
|
||||||
|
this.v7h ^= BBUF[15] ^ BBUF[31] ^ this.salt[7];
|
||||||
|
(0, utils_ts_1.clean)(BBUF, BLAKE512_W);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
class BLAKE224 extends Blake1_32 {
|
||||||
|
constructor(opts = {}) {
|
||||||
|
super(28, B224_IV, 0, opts);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.BLAKE224 = BLAKE224;
|
||||||
|
class BLAKE256 extends Blake1_32 {
|
||||||
|
constructor(opts = {}) {
|
||||||
|
super(32, B256_IV, 1, opts);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.BLAKE256 = BLAKE256;
|
||||||
|
class BLAKE384 extends Blake1_64 {
|
||||||
|
constructor(opts = {}) {
|
||||||
|
super(48, B384_IV, 0, opts);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.BLAKE384 = BLAKE384;
|
||||||
|
class BLAKE512 extends Blake1_64 {
|
||||||
|
constructor(opts = {}) {
|
||||||
|
super(64, B512_IV, 1, opts);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.BLAKE512 = BLAKE512;
|
||||||
|
/** blake1-224 hash function */
|
||||||
|
exports.blake224 = (0, utils_ts_1.createOptHasher)((opts) => new BLAKE224(opts));
|
||||||
|
/** blake1-256 hash function */
|
||||||
|
exports.blake256 = (0, utils_ts_1.createOptHasher)((opts) => new BLAKE256(opts));
|
||||||
|
/** blake1-384 hash function */
|
||||||
|
exports.blake384 = (0, utils_ts_1.createOptHasher)((opts) => new BLAKE384(opts));
|
||||||
|
/** blake1-512 hash function */
|
||||||
|
exports.blake512 = (0, utils_ts_1.createOptHasher)((opts) => new BLAKE512(opts));
|
||||||
|
//# sourceMappingURL=blake1.js.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/blake1.js.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/blake1.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
116
service-login-verify/node_modules/@noble/hashes/blake2.d.ts
generated
vendored
Normal file
116
service-login-verify/node_modules/@noble/hashes/blake2.d.ts
generated
vendored
Normal file
@ -0,0 +1,116 @@
|
|||||||
|
import { Hash, type CHashO, type Input } from './utils.ts';
|
||||||
|
/** Blake hash options. dkLen is output length. key is used in MAC mode. salt is used in KDF mode. */
|
||||||
|
export type Blake2Opts = {
|
||||||
|
dkLen?: number;
|
||||||
|
key?: Input;
|
||||||
|
salt?: Input;
|
||||||
|
personalization?: Input;
|
||||||
|
};
|
||||||
|
/** Class, from which others are subclassed. */
|
||||||
|
export declare abstract class BLAKE2<T extends BLAKE2<T>> extends Hash<T> {
|
||||||
|
protected abstract compress(msg: Uint32Array, offset: number, isLast: boolean): void;
|
||||||
|
protected abstract get(): number[];
|
||||||
|
protected abstract set(...args: number[]): void;
|
||||||
|
abstract destroy(): void;
|
||||||
|
protected buffer: Uint8Array;
|
||||||
|
protected buffer32: Uint32Array;
|
||||||
|
protected finished: boolean;
|
||||||
|
protected destroyed: boolean;
|
||||||
|
protected length: number;
|
||||||
|
protected pos: number;
|
||||||
|
readonly blockLen: number;
|
||||||
|
readonly outputLen: number;
|
||||||
|
constructor(blockLen: number, outputLen: number);
|
||||||
|
update(data: Input): this;
|
||||||
|
digestInto(out: Uint8Array): void;
|
||||||
|
digest(): Uint8Array;
|
||||||
|
_cloneInto(to?: T): T;
|
||||||
|
clone(): T;
|
||||||
|
}
|
||||||
|
export declare class BLAKE2b extends BLAKE2<BLAKE2b> {
|
||||||
|
private v0l;
|
||||||
|
private v0h;
|
||||||
|
private v1l;
|
||||||
|
private v1h;
|
||||||
|
private v2l;
|
||||||
|
private v2h;
|
||||||
|
private v3l;
|
||||||
|
private v3h;
|
||||||
|
private v4l;
|
||||||
|
private v4h;
|
||||||
|
private v5l;
|
||||||
|
private v5h;
|
||||||
|
private v6l;
|
||||||
|
private v6h;
|
||||||
|
private v7l;
|
||||||
|
private v7h;
|
||||||
|
constructor(opts?: Blake2Opts);
|
||||||
|
protected get(): [
|
||||||
|
number,
|
||||||
|
number,
|
||||||
|
number,
|
||||||
|
number,
|
||||||
|
number,
|
||||||
|
number,
|
||||||
|
number,
|
||||||
|
number,
|
||||||
|
number,
|
||||||
|
number,
|
||||||
|
number,
|
||||||
|
number,
|
||||||
|
number,
|
||||||
|
number,
|
||||||
|
number,
|
||||||
|
number
|
||||||
|
];
|
||||||
|
protected set(v0l: number, v0h: number, v1l: number, v1h: number, v2l: number, v2h: number, v3l: number, v3h: number, v4l: number, v4h: number, v5l: number, v5h: number, v6l: number, v6h: number, v7l: number, v7h: number): void;
|
||||||
|
protected compress(msg: Uint32Array, offset: number, isLast: boolean): void;
|
||||||
|
destroy(): void;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Blake2b hash function. 64-bit. 1.5x slower than blake2s in JS.
|
||||||
|
* @param msg - message that would be hashed
|
||||||
|
* @param opts - dkLen output length, key for MAC mode, salt, personalization
|
||||||
|
*/
|
||||||
|
export declare const blake2b: CHashO;
|
||||||
|
export type Num16 = {
|
||||||
|
v0: number;
|
||||||
|
v1: number;
|
||||||
|
v2: number;
|
||||||
|
v3: number;
|
||||||
|
v4: number;
|
||||||
|
v5: number;
|
||||||
|
v6: number;
|
||||||
|
v7: number;
|
||||||
|
v8: number;
|
||||||
|
v9: number;
|
||||||
|
v10: number;
|
||||||
|
v11: number;
|
||||||
|
v12: number;
|
||||||
|
v13: number;
|
||||||
|
v14: number;
|
||||||
|
v15: number;
|
||||||
|
};
|
||||||
|
export declare function compress(s: Uint8Array, offset: number, msg: Uint32Array, rounds: number, v0: number, v1: number, v2: number, v3: number, v4: number, v5: number, v6: number, v7: number, v8: number, v9: number, v10: number, v11: number, v12: number, v13: number, v14: number, v15: number): Num16;
|
||||||
|
export declare class BLAKE2s extends BLAKE2<BLAKE2s> {
|
||||||
|
private v0;
|
||||||
|
private v1;
|
||||||
|
private v2;
|
||||||
|
private v3;
|
||||||
|
private v4;
|
||||||
|
private v5;
|
||||||
|
private v6;
|
||||||
|
private v7;
|
||||||
|
constructor(opts?: Blake2Opts);
|
||||||
|
protected get(): [number, number, number, number, number, number, number, number];
|
||||||
|
protected set(v0: number, v1: number, v2: number, v3: number, v4: number, v5: number, v6: number, v7: number): void;
|
||||||
|
protected compress(msg: Uint32Array, offset: number, isLast: boolean): void;
|
||||||
|
destroy(): void;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Blake2s hash function. Focuses on 8-bit to 32-bit platforms. 1.5x faster than blake2b in JS.
|
||||||
|
* @param msg - message that would be hashed
|
||||||
|
* @param opts - dkLen output length, key for MAC mode, salt, personalization
|
||||||
|
*/
|
||||||
|
export declare const blake2s: CHashO;
|
||||||
|
//# sourceMappingURL=blake2.d.ts.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/blake2.d.ts.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/blake2.d.ts.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"blake2.d.ts","sourceRoot":"","sources":["src/blake2.ts"],"names":[],"mappings":"AASA,OAAO,EAEmB,IAAI,EAC5B,KAAK,MAAM,EAAE,KAAK,KAAK,EACxB,MAAM,YAAY,CAAC;AAEpB,qGAAqG;AACrG,MAAM,MAAM,UAAU,GAAG;IACvB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,GAAG,CAAC,EAAE,KAAK,CAAC;IACZ,IAAI,CAAC,EAAE,KAAK,CAAC;IACb,eAAe,CAAC,EAAE,KAAK,CAAC;CACzB,CAAC;AA+EF,+CAA+C;AAC/C,8BAAsB,MAAM,CAAC,CAAC,SAAS,MAAM,CAAC,CAAC,CAAC,CAAE,SAAQ,IAAI,CAAC,CAAC,CAAC;IAC/D,SAAS,CAAC,QAAQ,CAAC,QAAQ,CAAC,GAAG,EAAE,WAAW,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,GAAG,IAAI;IACpF,SAAS,CAAC,QAAQ,CAAC,GAAG,IAAI,MAAM,EAAE;IAClC,SAAS,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,IAAI,EAAE,MAAM,EAAE,GAAG,IAAI;IAC/C,QAAQ,CAAC,OAAO,IAAI,IAAI;IACxB,SAAS,CAAC,MAAM,EAAE,UAAU,CAAC;IAC7B,SAAS,CAAC,QAAQ,EAAE,WAAW,CAAC;IAChC,SAAS,CAAC,QAAQ,UAAS;IAC3B,SAAS,CAAC,SAAS,UAAS;IAC5B,SAAS,CAAC,MAAM,EAAE,MAAM,CAAK;IAC7B,SAAS,CAAC,GAAG,EAAE,MAAM,CAAK;IAC1B,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC;IAC1B,QAAQ,CAAC,SAAS,EAAE,MAAM,CAAC;gBAEf,QAAQ,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM;IAS/C,MAAM,CAAC,IAAI,EAAE,KAAK,GAAG,IAAI;IAwCzB,UAAU,CAAC,GAAG,EAAE,UAAU,GAAG,IAAI;IAajC,MAAM,IAAI,UAAU;IAOpB,UAAU,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC;IAarB,KAAK,IAAI,CAAC;CAGX;AAED,qBAAa,OAAQ,SAAQ,MAAM,CAAC,OAAO,CAAC;IAE1C,OAAO,CAAC,GAAG,CAAiB;IAC5B,OAAO,CAAC,GAAG,CAAiB;IAC5B,OAAO,CAAC,GAAG,CAAiB;IAC5B,OAAO,CAAC,GAAG,CAAiB;IAC5B,OAAO,CAAC,GAAG,CAAiB;IAC5B,OAAO,CAAC,GAAG,CAAiB;IAC5B,OAAO,CAAC,GAAG,CAAiB;IAC5B,OAAO,CAAC,GAAG,CAAiB;IAC5B,OAAO,CAAC,GAAG,CAAiB;IAC5B,OAAO,CAAC,GAAG,CAAiB;IAC5B,OAAO,CAAC,GAAG,CAAkB;IAC7B,OAAO,CAAC,GAAG,CAAkB;IAC7B,OAAO,CAAC,GAAG,CAAkB;IAC7B,OAAO,CAAC,GAAG,CAAkB;IAC7B,OAAO,CAAC,GAAG,CAAkB;IAC7B,OAAO,CAAC,GAAG,CAAkB;gBAEjB,IAAI,GAAE,UAAe;IAmCjC,SAAS,CAAC,GAAG,IAAI;QACf,MAAM;QAAE,MAAM;QAAE,MAAM;QAAE,MAAM;QAAE,MAAM;QAAE,MAAM;QAAE,MAAM;QAAE,MAAM;QAC9D,MAAM;QAAE,MAAM;QAAE,MAAM;QAAE,MAAM;QAAE,MAAM;QAAE,MAAM;QAAE,MAAM;QAAE,MAAM;KAC/D;IAKD,SAAS,CAAC,GAAG,CACX,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAClD,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAClD,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAClD,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GACjD,IAAI;IAkBP,SAAS,CAAC,QAAQ,CAAC,GAAG,EAAE,WAAW,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,GAAG,IAAI;IAkD3E,OAAO,IAAI,IAAI;CAKhB;AAED;;;;GAIG;AACH,eAAO,MAAM,OAAO,EAAE,MAErB,CAAC;AAOF,MAAM,MAAM,KAAK,GAAG;IAClB,EAAE,EAAE,MAAM,CAAC;IAAC,EAAE,EAAE,MAAM,CAAC;IAAC,EAAE,EAAE,MAAM,CAAC;IAAC,EAAE,EAAE,MAAM,CAAC;IAC/C,EAAE,EAAE,MAAM,CAAC;IAAC,EAAE,EAAE,MAAM,CAAC;IAAC,EAAE,EAAE,MAAM,CAAC;IAAC,EAAE,EAAE,MAAM,CAAC;IAC/C,EAAE,EAAE,MAAM,CAAC;IAAC,EAAE,EAAE,MAAM,CAAC;IAAC,GAAG,EAAE,MAAM,CAAC;IAAC,GAAG,EAAE,MAAM,CAAC;IACjD,GAAG,EAAE,MAAM,CAAC;IAAC,GAAG,EAAE,MAAM,CAAC;IAAC,GAAG,EAAE,MAAM,CAAC;IAAC,GAAG,EAAE,MAAM,CAAC;CACpD,CAAC;AAGF,wBAAgB,QAAQ,CAAC,CAAC,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,GAAG,EAAE,WAAW,EAAE,MAAM,EAAE,MAAM,EACtF,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAC9F,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GACnG,KAAK,CAsBP;AAGD,qBAAa,OAAQ,SAAQ,MAAM,CAAC,OAAO,CAAC;IAE1C,OAAO,CAAC,EAAE,CAAiB;IAC3B,OAAO,CAAC,EAAE,CAAiB;IAC3B,OAAO,CAAC,EAAE,CAAiB;IAC3B,OAAO,CAAC,EAAE,CAAiB;IAC3B,OAAO,CAAC,EAAE,CAAiB;IAC3B,OAAO,CAAC,EAAE,CAAiB;IAC3B,OAAO,CAAC,EAAE,CAAiB;IAC3B,OAAO,CAAC,EAAE,CAAiB;gBAEf,IAAI,GAAE,UAAe;IA+BjC,SAAS,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC;IAKjF,SAAS,CAAC,GAAG,CACX,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,GAC7F,IAAI;IAUP,SAAS,CAAC,QAAQ,CAAC,GAAG,EAAE,WAAW,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,GAAG,IAAI;IAkB3E,OAAO,IAAI,IAAI;CAKhB;AAED;;;;GAIG;AACH,eAAO,MAAM,OAAO,EAAE,MAErB,CAAC"}
|
||||||
420
service-login-verify/node_modules/@noble/hashes/blake2.js
generated
vendored
Normal file
420
service-login-verify/node_modules/@noble/hashes/blake2.js
generated
vendored
Normal file
@ -0,0 +1,420 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.blake2s = exports.BLAKE2s = exports.blake2b = exports.BLAKE2b = exports.BLAKE2 = void 0;
|
||||||
|
exports.compress = compress;
|
||||||
|
/**
|
||||||
|
* blake2b (64-bit) & blake2s (8 to 32-bit) hash functions.
|
||||||
|
* b could have been faster, but there is no fast u64 in js, so s is 1.5x faster.
|
||||||
|
* @module
|
||||||
|
*/
|
||||||
|
const _blake_ts_1 = require("./_blake.js");
|
||||||
|
const _md_ts_1 = require("./_md.js");
|
||||||
|
const u64 = require("./_u64.js");
|
||||||
|
// prettier-ignore
|
||||||
|
const utils_ts_1 = require("./utils.js");
|
||||||
|
// Same as SHA512_IV, but swapped endianness: LE instead of BE. iv[1] is iv[0], etc.
|
||||||
|
const B2B_IV = /* @__PURE__ */ Uint32Array.from([
|
||||||
|
0xf3bcc908, 0x6a09e667, 0x84caa73b, 0xbb67ae85, 0xfe94f82b, 0x3c6ef372, 0x5f1d36f1, 0xa54ff53a,
|
||||||
|
0xade682d1, 0x510e527f, 0x2b3e6c1f, 0x9b05688c, 0xfb41bd6b, 0x1f83d9ab, 0x137e2179, 0x5be0cd19,
|
||||||
|
]);
|
||||||
|
// Temporary buffer
|
||||||
|
const BBUF = /* @__PURE__ */ new Uint32Array(32);
|
||||||
|
// Mixing function G splitted in two halfs
|
||||||
|
function G1b(a, b, c, d, msg, x) {
|
||||||
|
// NOTE: V is LE here
|
||||||
|
const Xl = msg[x], Xh = msg[x + 1]; // prettier-ignore
|
||||||
|
let Al = BBUF[2 * a], Ah = BBUF[2 * a + 1]; // prettier-ignore
|
||||||
|
let Bl = BBUF[2 * b], Bh = BBUF[2 * b + 1]; // prettier-ignore
|
||||||
|
let Cl = BBUF[2 * c], Ch = BBUF[2 * c + 1]; // prettier-ignore
|
||||||
|
let Dl = BBUF[2 * d], Dh = BBUF[2 * d + 1]; // prettier-ignore
|
||||||
|
// v[a] = (v[a] + v[b] + x) | 0;
|
||||||
|
let ll = u64.add3L(Al, Bl, Xl);
|
||||||
|
Ah = u64.add3H(ll, Ah, Bh, Xh);
|
||||||
|
Al = ll | 0;
|
||||||
|
// v[d] = rotr(v[d] ^ v[a], 32)
|
||||||
|
({ Dh, Dl } = { Dh: Dh ^ Ah, Dl: Dl ^ Al });
|
||||||
|
({ Dh, Dl } = { Dh: u64.rotr32H(Dh, Dl), Dl: u64.rotr32L(Dh, Dl) });
|
||||||
|
// v[c] = (v[c] + v[d]) | 0;
|
||||||
|
({ h: Ch, l: Cl } = u64.add(Ch, Cl, Dh, Dl));
|
||||||
|
// v[b] = rotr(v[b] ^ v[c], 24)
|
||||||
|
({ Bh, Bl } = { Bh: Bh ^ Ch, Bl: Bl ^ Cl });
|
||||||
|
({ Bh, Bl } = { Bh: u64.rotrSH(Bh, Bl, 24), Bl: u64.rotrSL(Bh, Bl, 24) });
|
||||||
|
(BBUF[2 * a] = Al), (BBUF[2 * a + 1] = Ah);
|
||||||
|
(BBUF[2 * b] = Bl), (BBUF[2 * b + 1] = Bh);
|
||||||
|
(BBUF[2 * c] = Cl), (BBUF[2 * c + 1] = Ch);
|
||||||
|
(BBUF[2 * d] = Dl), (BBUF[2 * d + 1] = Dh);
|
||||||
|
}
|
||||||
|
function G2b(a, b, c, d, msg, x) {
|
||||||
|
// NOTE: V is LE here
|
||||||
|
const Xl = msg[x], Xh = msg[x + 1]; // prettier-ignore
|
||||||
|
let Al = BBUF[2 * a], Ah = BBUF[2 * a + 1]; // prettier-ignore
|
||||||
|
let Bl = BBUF[2 * b], Bh = BBUF[2 * b + 1]; // prettier-ignore
|
||||||
|
let Cl = BBUF[2 * c], Ch = BBUF[2 * c + 1]; // prettier-ignore
|
||||||
|
let Dl = BBUF[2 * d], Dh = BBUF[2 * d + 1]; // prettier-ignore
|
||||||
|
// v[a] = (v[a] + v[b] + x) | 0;
|
||||||
|
let ll = u64.add3L(Al, Bl, Xl);
|
||||||
|
Ah = u64.add3H(ll, Ah, Bh, Xh);
|
||||||
|
Al = ll | 0;
|
||||||
|
// v[d] = rotr(v[d] ^ v[a], 16)
|
||||||
|
({ Dh, Dl } = { Dh: Dh ^ Ah, Dl: Dl ^ Al });
|
||||||
|
({ Dh, Dl } = { Dh: u64.rotrSH(Dh, Dl, 16), Dl: u64.rotrSL(Dh, Dl, 16) });
|
||||||
|
// v[c] = (v[c] + v[d]) | 0;
|
||||||
|
({ h: Ch, l: Cl } = u64.add(Ch, Cl, Dh, Dl));
|
||||||
|
// v[b] = rotr(v[b] ^ v[c], 63)
|
||||||
|
({ Bh, Bl } = { Bh: Bh ^ Ch, Bl: Bl ^ Cl });
|
||||||
|
({ Bh, Bl } = { Bh: u64.rotrBH(Bh, Bl, 63), Bl: u64.rotrBL(Bh, Bl, 63) });
|
||||||
|
(BBUF[2 * a] = Al), (BBUF[2 * a + 1] = Ah);
|
||||||
|
(BBUF[2 * b] = Bl), (BBUF[2 * b + 1] = Bh);
|
||||||
|
(BBUF[2 * c] = Cl), (BBUF[2 * c + 1] = Ch);
|
||||||
|
(BBUF[2 * d] = Dl), (BBUF[2 * d + 1] = Dh);
|
||||||
|
}
|
||||||
|
function checkBlake2Opts(outputLen, opts = {}, keyLen, saltLen, persLen) {
|
||||||
|
(0, utils_ts_1.anumber)(keyLen);
|
||||||
|
if (outputLen < 0 || outputLen > keyLen)
|
||||||
|
throw new Error('outputLen bigger than keyLen');
|
||||||
|
const { key, salt, personalization } = opts;
|
||||||
|
if (key !== undefined && (key.length < 1 || key.length > keyLen))
|
||||||
|
throw new Error('key length must be undefined or 1..' + keyLen);
|
||||||
|
if (salt !== undefined && salt.length !== saltLen)
|
||||||
|
throw new Error('salt must be undefined or ' + saltLen);
|
||||||
|
if (personalization !== undefined && personalization.length !== persLen)
|
||||||
|
throw new Error('personalization must be undefined or ' + persLen);
|
||||||
|
}
|
||||||
|
/** Class, from which others are subclassed. */
|
||||||
|
class BLAKE2 extends utils_ts_1.Hash {
|
||||||
|
constructor(blockLen, outputLen) {
|
||||||
|
super();
|
||||||
|
this.finished = false;
|
||||||
|
this.destroyed = false;
|
||||||
|
this.length = 0;
|
||||||
|
this.pos = 0;
|
||||||
|
(0, utils_ts_1.anumber)(blockLen);
|
||||||
|
(0, utils_ts_1.anumber)(outputLen);
|
||||||
|
this.blockLen = blockLen;
|
||||||
|
this.outputLen = outputLen;
|
||||||
|
this.buffer = new Uint8Array(blockLen);
|
||||||
|
this.buffer32 = (0, utils_ts_1.u32)(this.buffer);
|
||||||
|
}
|
||||||
|
update(data) {
|
||||||
|
(0, utils_ts_1.aexists)(this);
|
||||||
|
data = (0, utils_ts_1.toBytes)(data);
|
||||||
|
(0, utils_ts_1.abytes)(data);
|
||||||
|
// Main difference with other hashes: there is flag for last block,
|
||||||
|
// so we cannot process current block before we know that there
|
||||||
|
// is the next one. This significantly complicates logic and reduces ability
|
||||||
|
// to do zero-copy processing
|
||||||
|
const { blockLen, buffer, buffer32 } = this;
|
||||||
|
const len = data.length;
|
||||||
|
const offset = data.byteOffset;
|
||||||
|
const buf = data.buffer;
|
||||||
|
for (let pos = 0; pos < len;) {
|
||||||
|
// If buffer is full and we still have input (don't process last block, same as blake2s)
|
||||||
|
if (this.pos === blockLen) {
|
||||||
|
(0, utils_ts_1.swap32IfBE)(buffer32);
|
||||||
|
this.compress(buffer32, 0, false);
|
||||||
|
(0, utils_ts_1.swap32IfBE)(buffer32);
|
||||||
|
this.pos = 0;
|
||||||
|
}
|
||||||
|
const take = Math.min(blockLen - this.pos, len - pos);
|
||||||
|
const dataOffset = offset + pos;
|
||||||
|
// full block && aligned to 4 bytes && not last in input
|
||||||
|
if (take === blockLen && !(dataOffset % 4) && pos + take < len) {
|
||||||
|
const data32 = new Uint32Array(buf, dataOffset, Math.floor((len - pos) / 4));
|
||||||
|
(0, utils_ts_1.swap32IfBE)(data32);
|
||||||
|
for (let pos32 = 0; pos + blockLen < len; pos32 += buffer32.length, pos += blockLen) {
|
||||||
|
this.length += blockLen;
|
||||||
|
this.compress(data32, pos32, false);
|
||||||
|
}
|
||||||
|
(0, utils_ts_1.swap32IfBE)(data32);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
buffer.set(data.subarray(pos, pos + take), this.pos);
|
||||||
|
this.pos += take;
|
||||||
|
this.length += take;
|
||||||
|
pos += take;
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
digestInto(out) {
|
||||||
|
(0, utils_ts_1.aexists)(this);
|
||||||
|
(0, utils_ts_1.aoutput)(out, this);
|
||||||
|
const { pos, buffer32 } = this;
|
||||||
|
this.finished = true;
|
||||||
|
// Padding
|
||||||
|
(0, utils_ts_1.clean)(this.buffer.subarray(pos));
|
||||||
|
(0, utils_ts_1.swap32IfBE)(buffer32);
|
||||||
|
this.compress(buffer32, 0, true);
|
||||||
|
(0, utils_ts_1.swap32IfBE)(buffer32);
|
||||||
|
const out32 = (0, utils_ts_1.u32)(out);
|
||||||
|
this.get().forEach((v, i) => (out32[i] = (0, utils_ts_1.swap8IfBE)(v)));
|
||||||
|
}
|
||||||
|
digest() {
|
||||||
|
const { buffer, outputLen } = this;
|
||||||
|
this.digestInto(buffer);
|
||||||
|
const res = buffer.slice(0, outputLen);
|
||||||
|
this.destroy();
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
_cloneInto(to) {
|
||||||
|
const { buffer, length, finished, destroyed, outputLen, pos } = this;
|
||||||
|
to || (to = new this.constructor({ dkLen: outputLen }));
|
||||||
|
to.set(...this.get());
|
||||||
|
to.buffer.set(buffer);
|
||||||
|
to.destroyed = destroyed;
|
||||||
|
to.finished = finished;
|
||||||
|
to.length = length;
|
||||||
|
to.pos = pos;
|
||||||
|
// @ts-ignore
|
||||||
|
to.outputLen = outputLen;
|
||||||
|
return to;
|
||||||
|
}
|
||||||
|
clone() {
|
||||||
|
return this._cloneInto();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.BLAKE2 = BLAKE2;
|
||||||
|
class BLAKE2b extends BLAKE2 {
|
||||||
|
constructor(opts = {}) {
|
||||||
|
const olen = opts.dkLen === undefined ? 64 : opts.dkLen;
|
||||||
|
super(128, olen);
|
||||||
|
// Same as SHA-512, but LE
|
||||||
|
this.v0l = B2B_IV[0] | 0;
|
||||||
|
this.v0h = B2B_IV[1] | 0;
|
||||||
|
this.v1l = B2B_IV[2] | 0;
|
||||||
|
this.v1h = B2B_IV[3] | 0;
|
||||||
|
this.v2l = B2B_IV[4] | 0;
|
||||||
|
this.v2h = B2B_IV[5] | 0;
|
||||||
|
this.v3l = B2B_IV[6] | 0;
|
||||||
|
this.v3h = B2B_IV[7] | 0;
|
||||||
|
this.v4l = B2B_IV[8] | 0;
|
||||||
|
this.v4h = B2B_IV[9] | 0;
|
||||||
|
this.v5l = B2B_IV[10] | 0;
|
||||||
|
this.v5h = B2B_IV[11] | 0;
|
||||||
|
this.v6l = B2B_IV[12] | 0;
|
||||||
|
this.v6h = B2B_IV[13] | 0;
|
||||||
|
this.v7l = B2B_IV[14] | 0;
|
||||||
|
this.v7h = B2B_IV[15] | 0;
|
||||||
|
checkBlake2Opts(olen, opts, 64, 16, 16);
|
||||||
|
let { key, personalization, salt } = opts;
|
||||||
|
let keyLength = 0;
|
||||||
|
if (key !== undefined) {
|
||||||
|
key = (0, utils_ts_1.toBytes)(key);
|
||||||
|
keyLength = key.length;
|
||||||
|
}
|
||||||
|
this.v0l ^= this.outputLen | (keyLength << 8) | (0x01 << 16) | (0x01 << 24);
|
||||||
|
if (salt !== undefined) {
|
||||||
|
salt = (0, utils_ts_1.toBytes)(salt);
|
||||||
|
const slt = (0, utils_ts_1.u32)(salt);
|
||||||
|
this.v4l ^= (0, utils_ts_1.swap8IfBE)(slt[0]);
|
||||||
|
this.v4h ^= (0, utils_ts_1.swap8IfBE)(slt[1]);
|
||||||
|
this.v5l ^= (0, utils_ts_1.swap8IfBE)(slt[2]);
|
||||||
|
this.v5h ^= (0, utils_ts_1.swap8IfBE)(slt[3]);
|
||||||
|
}
|
||||||
|
if (personalization !== undefined) {
|
||||||
|
personalization = (0, utils_ts_1.toBytes)(personalization);
|
||||||
|
const pers = (0, utils_ts_1.u32)(personalization);
|
||||||
|
this.v6l ^= (0, utils_ts_1.swap8IfBE)(pers[0]);
|
||||||
|
this.v6h ^= (0, utils_ts_1.swap8IfBE)(pers[1]);
|
||||||
|
this.v7l ^= (0, utils_ts_1.swap8IfBE)(pers[2]);
|
||||||
|
this.v7h ^= (0, utils_ts_1.swap8IfBE)(pers[3]);
|
||||||
|
}
|
||||||
|
if (key !== undefined) {
|
||||||
|
// Pad to blockLen and update
|
||||||
|
const tmp = new Uint8Array(this.blockLen);
|
||||||
|
tmp.set(key);
|
||||||
|
this.update(tmp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// prettier-ignore
|
||||||
|
get() {
|
||||||
|
let { v0l, v0h, v1l, v1h, v2l, v2h, v3l, v3h, v4l, v4h, v5l, v5h, v6l, v6h, v7l, v7h } = this;
|
||||||
|
return [v0l, v0h, v1l, v1h, v2l, v2h, v3l, v3h, v4l, v4h, v5l, v5h, v6l, v6h, v7l, v7h];
|
||||||
|
}
|
||||||
|
// prettier-ignore
|
||||||
|
set(v0l, v0h, v1l, v1h, v2l, v2h, v3l, v3h, v4l, v4h, v5l, v5h, v6l, v6h, v7l, v7h) {
|
||||||
|
this.v0l = v0l | 0;
|
||||||
|
this.v0h = v0h | 0;
|
||||||
|
this.v1l = v1l | 0;
|
||||||
|
this.v1h = v1h | 0;
|
||||||
|
this.v2l = v2l | 0;
|
||||||
|
this.v2h = v2h | 0;
|
||||||
|
this.v3l = v3l | 0;
|
||||||
|
this.v3h = v3h | 0;
|
||||||
|
this.v4l = v4l | 0;
|
||||||
|
this.v4h = v4h | 0;
|
||||||
|
this.v5l = v5l | 0;
|
||||||
|
this.v5h = v5h | 0;
|
||||||
|
this.v6l = v6l | 0;
|
||||||
|
this.v6h = v6h | 0;
|
||||||
|
this.v7l = v7l | 0;
|
||||||
|
this.v7h = v7h | 0;
|
||||||
|
}
|
||||||
|
compress(msg, offset, isLast) {
|
||||||
|
this.get().forEach((v, i) => (BBUF[i] = v)); // First half from state.
|
||||||
|
BBUF.set(B2B_IV, 16); // Second half from IV.
|
||||||
|
let { h, l } = u64.fromBig(BigInt(this.length));
|
||||||
|
BBUF[24] = B2B_IV[8] ^ l; // Low word of the offset.
|
||||||
|
BBUF[25] = B2B_IV[9] ^ h; // High word.
|
||||||
|
// Invert all bits for last block
|
||||||
|
if (isLast) {
|
||||||
|
BBUF[28] = ~BBUF[28];
|
||||||
|
BBUF[29] = ~BBUF[29];
|
||||||
|
}
|
||||||
|
let j = 0;
|
||||||
|
const s = _blake_ts_1.BSIGMA;
|
||||||
|
for (let i = 0; i < 12; i++) {
|
||||||
|
G1b(0, 4, 8, 12, msg, offset + 2 * s[j++]);
|
||||||
|
G2b(0, 4, 8, 12, msg, offset + 2 * s[j++]);
|
||||||
|
G1b(1, 5, 9, 13, msg, offset + 2 * s[j++]);
|
||||||
|
G2b(1, 5, 9, 13, msg, offset + 2 * s[j++]);
|
||||||
|
G1b(2, 6, 10, 14, msg, offset + 2 * s[j++]);
|
||||||
|
G2b(2, 6, 10, 14, msg, offset + 2 * s[j++]);
|
||||||
|
G1b(3, 7, 11, 15, msg, offset + 2 * s[j++]);
|
||||||
|
G2b(3, 7, 11, 15, msg, offset + 2 * s[j++]);
|
||||||
|
G1b(0, 5, 10, 15, msg, offset + 2 * s[j++]);
|
||||||
|
G2b(0, 5, 10, 15, msg, offset + 2 * s[j++]);
|
||||||
|
G1b(1, 6, 11, 12, msg, offset + 2 * s[j++]);
|
||||||
|
G2b(1, 6, 11, 12, msg, offset + 2 * s[j++]);
|
||||||
|
G1b(2, 7, 8, 13, msg, offset + 2 * s[j++]);
|
||||||
|
G2b(2, 7, 8, 13, msg, offset + 2 * s[j++]);
|
||||||
|
G1b(3, 4, 9, 14, msg, offset + 2 * s[j++]);
|
||||||
|
G2b(3, 4, 9, 14, msg, offset + 2 * s[j++]);
|
||||||
|
}
|
||||||
|
this.v0l ^= BBUF[0] ^ BBUF[16];
|
||||||
|
this.v0h ^= BBUF[1] ^ BBUF[17];
|
||||||
|
this.v1l ^= BBUF[2] ^ BBUF[18];
|
||||||
|
this.v1h ^= BBUF[3] ^ BBUF[19];
|
||||||
|
this.v2l ^= BBUF[4] ^ BBUF[20];
|
||||||
|
this.v2h ^= BBUF[5] ^ BBUF[21];
|
||||||
|
this.v3l ^= BBUF[6] ^ BBUF[22];
|
||||||
|
this.v3h ^= BBUF[7] ^ BBUF[23];
|
||||||
|
this.v4l ^= BBUF[8] ^ BBUF[24];
|
||||||
|
this.v4h ^= BBUF[9] ^ BBUF[25];
|
||||||
|
this.v5l ^= BBUF[10] ^ BBUF[26];
|
||||||
|
this.v5h ^= BBUF[11] ^ BBUF[27];
|
||||||
|
this.v6l ^= BBUF[12] ^ BBUF[28];
|
||||||
|
this.v6h ^= BBUF[13] ^ BBUF[29];
|
||||||
|
this.v7l ^= BBUF[14] ^ BBUF[30];
|
||||||
|
this.v7h ^= BBUF[15] ^ BBUF[31];
|
||||||
|
(0, utils_ts_1.clean)(BBUF);
|
||||||
|
}
|
||||||
|
destroy() {
|
||||||
|
this.destroyed = true;
|
||||||
|
(0, utils_ts_1.clean)(this.buffer32);
|
||||||
|
this.set(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.BLAKE2b = BLAKE2b;
|
||||||
|
/**
|
||||||
|
* Blake2b hash function. 64-bit. 1.5x slower than blake2s in JS.
|
||||||
|
* @param msg - message that would be hashed
|
||||||
|
* @param opts - dkLen output length, key for MAC mode, salt, personalization
|
||||||
|
*/
|
||||||
|
exports.blake2b = (0, utils_ts_1.createOptHasher)((opts) => new BLAKE2b(opts));
|
||||||
|
// prettier-ignore
|
||||||
|
function compress(s, offset, msg, rounds, v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15) {
|
||||||
|
let j = 0;
|
||||||
|
for (let i = 0; i < rounds; i++) {
|
||||||
|
({ a: v0, b: v4, c: v8, d: v12 } = (0, _blake_ts_1.G1s)(v0, v4, v8, v12, msg[offset + s[j++]]));
|
||||||
|
({ a: v0, b: v4, c: v8, d: v12 } = (0, _blake_ts_1.G2s)(v0, v4, v8, v12, msg[offset + s[j++]]));
|
||||||
|
({ a: v1, b: v5, c: v9, d: v13 } = (0, _blake_ts_1.G1s)(v1, v5, v9, v13, msg[offset + s[j++]]));
|
||||||
|
({ a: v1, b: v5, c: v9, d: v13 } = (0, _blake_ts_1.G2s)(v1, v5, v9, v13, msg[offset + s[j++]]));
|
||||||
|
({ a: v2, b: v6, c: v10, d: v14 } = (0, _blake_ts_1.G1s)(v2, v6, v10, v14, msg[offset + s[j++]]));
|
||||||
|
({ a: v2, b: v6, c: v10, d: v14 } = (0, _blake_ts_1.G2s)(v2, v6, v10, v14, msg[offset + s[j++]]));
|
||||||
|
({ a: v3, b: v7, c: v11, d: v15 } = (0, _blake_ts_1.G1s)(v3, v7, v11, v15, msg[offset + s[j++]]));
|
||||||
|
({ a: v3, b: v7, c: v11, d: v15 } = (0, _blake_ts_1.G2s)(v3, v7, v11, v15, msg[offset + s[j++]]));
|
||||||
|
({ a: v0, b: v5, c: v10, d: v15 } = (0, _blake_ts_1.G1s)(v0, v5, v10, v15, msg[offset + s[j++]]));
|
||||||
|
({ a: v0, b: v5, c: v10, d: v15 } = (0, _blake_ts_1.G2s)(v0, v5, v10, v15, msg[offset + s[j++]]));
|
||||||
|
({ a: v1, b: v6, c: v11, d: v12 } = (0, _blake_ts_1.G1s)(v1, v6, v11, v12, msg[offset + s[j++]]));
|
||||||
|
({ a: v1, b: v6, c: v11, d: v12 } = (0, _blake_ts_1.G2s)(v1, v6, v11, v12, msg[offset + s[j++]]));
|
||||||
|
({ a: v2, b: v7, c: v8, d: v13 } = (0, _blake_ts_1.G1s)(v2, v7, v8, v13, msg[offset + s[j++]]));
|
||||||
|
({ a: v2, b: v7, c: v8, d: v13 } = (0, _blake_ts_1.G2s)(v2, v7, v8, v13, msg[offset + s[j++]]));
|
||||||
|
({ a: v3, b: v4, c: v9, d: v14 } = (0, _blake_ts_1.G1s)(v3, v4, v9, v14, msg[offset + s[j++]]));
|
||||||
|
({ a: v3, b: v4, c: v9, d: v14 } = (0, _blake_ts_1.G2s)(v3, v4, v9, v14, msg[offset + s[j++]]));
|
||||||
|
}
|
||||||
|
return { v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15 };
|
||||||
|
}
|
||||||
|
const B2S_IV = _md_ts_1.SHA256_IV;
|
||||||
|
class BLAKE2s extends BLAKE2 {
|
||||||
|
constructor(opts = {}) {
|
||||||
|
const olen = opts.dkLen === undefined ? 32 : opts.dkLen;
|
||||||
|
super(64, olen);
|
||||||
|
// Internal state, same as SHA-256
|
||||||
|
this.v0 = B2S_IV[0] | 0;
|
||||||
|
this.v1 = B2S_IV[1] | 0;
|
||||||
|
this.v2 = B2S_IV[2] | 0;
|
||||||
|
this.v3 = B2S_IV[3] | 0;
|
||||||
|
this.v4 = B2S_IV[4] | 0;
|
||||||
|
this.v5 = B2S_IV[5] | 0;
|
||||||
|
this.v6 = B2S_IV[6] | 0;
|
||||||
|
this.v7 = B2S_IV[7] | 0;
|
||||||
|
checkBlake2Opts(olen, opts, 32, 8, 8);
|
||||||
|
let { key, personalization, salt } = opts;
|
||||||
|
let keyLength = 0;
|
||||||
|
if (key !== undefined) {
|
||||||
|
key = (0, utils_ts_1.toBytes)(key);
|
||||||
|
keyLength = key.length;
|
||||||
|
}
|
||||||
|
this.v0 ^= this.outputLen | (keyLength << 8) | (0x01 << 16) | (0x01 << 24);
|
||||||
|
if (salt !== undefined) {
|
||||||
|
salt = (0, utils_ts_1.toBytes)(salt);
|
||||||
|
const slt = (0, utils_ts_1.u32)(salt);
|
||||||
|
this.v4 ^= (0, utils_ts_1.swap8IfBE)(slt[0]);
|
||||||
|
this.v5 ^= (0, utils_ts_1.swap8IfBE)(slt[1]);
|
||||||
|
}
|
||||||
|
if (personalization !== undefined) {
|
||||||
|
personalization = (0, utils_ts_1.toBytes)(personalization);
|
||||||
|
const pers = (0, utils_ts_1.u32)(personalization);
|
||||||
|
this.v6 ^= (0, utils_ts_1.swap8IfBE)(pers[0]);
|
||||||
|
this.v7 ^= (0, utils_ts_1.swap8IfBE)(pers[1]);
|
||||||
|
}
|
||||||
|
if (key !== undefined) {
|
||||||
|
// Pad to blockLen and update
|
||||||
|
(0, utils_ts_1.abytes)(key);
|
||||||
|
const tmp = new Uint8Array(this.blockLen);
|
||||||
|
tmp.set(key);
|
||||||
|
this.update(tmp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
get() {
|
||||||
|
const { v0, v1, v2, v3, v4, v5, v6, v7 } = this;
|
||||||
|
return [v0, v1, v2, v3, v4, v5, v6, v7];
|
||||||
|
}
|
||||||
|
// prettier-ignore
|
||||||
|
set(v0, v1, v2, v3, v4, v5, v6, v7) {
|
||||||
|
this.v0 = v0 | 0;
|
||||||
|
this.v1 = v1 | 0;
|
||||||
|
this.v2 = v2 | 0;
|
||||||
|
this.v3 = v3 | 0;
|
||||||
|
this.v4 = v4 | 0;
|
||||||
|
this.v5 = v5 | 0;
|
||||||
|
this.v6 = v6 | 0;
|
||||||
|
this.v7 = v7 | 0;
|
||||||
|
}
|
||||||
|
compress(msg, offset, isLast) {
|
||||||
|
const { h, l } = u64.fromBig(BigInt(this.length));
|
||||||
|
// prettier-ignore
|
||||||
|
const { v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15 } = compress(_blake_ts_1.BSIGMA, offset, msg, 10, this.v0, this.v1, this.v2, this.v3, this.v4, this.v5, this.v6, this.v7, B2S_IV[0], B2S_IV[1], B2S_IV[2], B2S_IV[3], l ^ B2S_IV[4], h ^ B2S_IV[5], isLast ? ~B2S_IV[6] : B2S_IV[6], B2S_IV[7]);
|
||||||
|
this.v0 ^= v0 ^ v8;
|
||||||
|
this.v1 ^= v1 ^ v9;
|
||||||
|
this.v2 ^= v2 ^ v10;
|
||||||
|
this.v3 ^= v3 ^ v11;
|
||||||
|
this.v4 ^= v4 ^ v12;
|
||||||
|
this.v5 ^= v5 ^ v13;
|
||||||
|
this.v6 ^= v6 ^ v14;
|
||||||
|
this.v7 ^= v7 ^ v15;
|
||||||
|
}
|
||||||
|
destroy() {
|
||||||
|
this.destroyed = true;
|
||||||
|
(0, utils_ts_1.clean)(this.buffer32);
|
||||||
|
this.set(0, 0, 0, 0, 0, 0, 0, 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.BLAKE2s = BLAKE2s;
|
||||||
|
/**
|
||||||
|
* Blake2s hash function. Focuses on 8-bit to 32-bit platforms. 1.5x faster than blake2b in JS.
|
||||||
|
* @param msg - message that would be hashed
|
||||||
|
* @param opts - dkLen output length, key for MAC mode, salt, personalization
|
||||||
|
*/
|
||||||
|
exports.blake2s = (0, utils_ts_1.createOptHasher)((opts) => new BLAKE2s(opts));
|
||||||
|
//# sourceMappingURL=blake2.js.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/blake2.js.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/blake2.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
11
service-login-verify/node_modules/@noble/hashes/blake2b.d.ts
generated
vendored
Normal file
11
service-login-verify/node_modules/@noble/hashes/blake2b.d.ts
generated
vendored
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
/**
|
||||||
|
* Blake2b hash function. Focuses on 64-bit platforms, but in JS speed different from Blake2s is negligible.
|
||||||
|
* @module
|
||||||
|
* @deprecated
|
||||||
|
*/
|
||||||
|
import { BLAKE2b as B2B, blake2b as b2b } from './blake2.ts';
|
||||||
|
/** @deprecated Use import from `noble/hashes/blake2` module */
|
||||||
|
export declare const BLAKE2b: typeof B2B;
|
||||||
|
/** @deprecated Use import from `noble/hashes/blake2` module */
|
||||||
|
export declare const blake2b: typeof b2b;
|
||||||
|
//# sourceMappingURL=blake2b.d.ts.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/blake2b.d.ts.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/blake2b.d.ts.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"blake2b.d.ts","sourceRoot":"","sources":["src/blake2b.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH,OAAO,EAAE,OAAO,IAAI,GAAG,EAAE,OAAO,IAAI,GAAG,EAAE,MAAM,aAAa,CAAC;AAC7D,+DAA+D;AAC/D,eAAO,MAAM,OAAO,EAAE,OAAO,GAAS,CAAC;AACvC,+DAA+D;AAC/D,eAAO,MAAM,OAAO,EAAE,OAAO,GAAS,CAAC"}
|
||||||
14
service-login-verify/node_modules/@noble/hashes/blake2b.js
generated
vendored
Normal file
14
service-login-verify/node_modules/@noble/hashes/blake2b.js
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.blake2b = exports.BLAKE2b = void 0;
|
||||||
|
/**
|
||||||
|
* Blake2b hash function. Focuses on 64-bit platforms, but in JS speed different from Blake2s is negligible.
|
||||||
|
* @module
|
||||||
|
* @deprecated
|
||||||
|
*/
|
||||||
|
const blake2_ts_1 = require("./blake2.js");
|
||||||
|
/** @deprecated Use import from `noble/hashes/blake2` module */
|
||||||
|
exports.BLAKE2b = blake2_ts_1.BLAKE2b;
|
||||||
|
/** @deprecated Use import from `noble/hashes/blake2` module */
|
||||||
|
exports.blake2b = blake2_ts_1.blake2b;
|
||||||
|
//# sourceMappingURL=blake2b.js.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/blake2b.js.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/blake2b.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"blake2b.js","sourceRoot":"","sources":["src/blake2b.ts"],"names":[],"mappings":";;;AAAA;;;;GAIG;AACH,2CAA6D;AAC7D,+DAA+D;AAClD,QAAA,OAAO,GAAe,mBAAG,CAAC;AACvC,+DAA+D;AAClD,QAAA,OAAO,GAAe,mBAAG,CAAC"}
|
||||||
20
service-login-verify/node_modules/@noble/hashes/blake2s.d.ts
generated
vendored
Normal file
20
service-login-verify/node_modules/@noble/hashes/blake2s.d.ts
generated
vendored
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
/**
|
||||||
|
* Blake2s hash function. Focuses on 8-bit to 32-bit platforms. blake2b for 64-bit, but in JS it is slower.
|
||||||
|
* @module
|
||||||
|
* @deprecated
|
||||||
|
*/
|
||||||
|
import { G1s as G1s_n, G2s as G2s_n } from './_blake.ts';
|
||||||
|
import { BLAKE2s as B2S, blake2s as b2s, compress as compress_n } from './blake2.ts';
|
||||||
|
/** @deprecated Use import from `noble/hashes/blake2` module */
|
||||||
|
export declare const B2S_IV: Uint32Array;
|
||||||
|
/** @deprecated Use import from `noble/hashes/blake2` module */
|
||||||
|
export declare const G1s: typeof G1s_n;
|
||||||
|
/** @deprecated Use import from `noble/hashes/blake2` module */
|
||||||
|
export declare const G2s: typeof G2s_n;
|
||||||
|
/** @deprecated Use import from `noble/hashes/blake2` module */
|
||||||
|
export declare const compress: typeof compress_n;
|
||||||
|
/** @deprecated Use import from `noble/hashes/blake2` module */
|
||||||
|
export declare const BLAKE2s: typeof B2S;
|
||||||
|
/** @deprecated Use import from `noble/hashes/blake2` module */
|
||||||
|
export declare const blake2s: typeof b2s;
|
||||||
|
//# sourceMappingURL=blake2s.d.ts.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/blake2s.d.ts.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/blake2s.d.ts.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"blake2s.d.ts","sourceRoot":"","sources":["src/blake2s.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH,OAAO,EAAE,GAAG,IAAI,KAAK,EAAE,GAAG,IAAI,KAAK,EAAE,MAAM,aAAa,CAAC;AAEzD,OAAO,EAAE,OAAO,IAAI,GAAG,EAAE,OAAO,IAAI,GAAG,EAAE,QAAQ,IAAI,UAAU,EAAE,MAAM,aAAa,CAAC;AACrF,+DAA+D;AAC/D,eAAO,MAAM,MAAM,EAAE,WAAuB,CAAC;AAC7C,+DAA+D;AAC/D,eAAO,MAAM,GAAG,EAAE,OAAO,KAAa,CAAC;AACvC,+DAA+D;AAC/D,eAAO,MAAM,GAAG,EAAE,OAAO,KAAa,CAAC;AACvC,+DAA+D;AAC/D,eAAO,MAAM,QAAQ,EAAE,OAAO,UAAuB,CAAC;AACtD,+DAA+D;AAC/D,eAAO,MAAM,OAAO,EAAE,OAAO,GAAS,CAAC;AACvC,+DAA+D;AAC/D,eAAO,MAAM,OAAO,EAAE,OAAO,GAAS,CAAC"}
|
||||||
24
service-login-verify/node_modules/@noble/hashes/blake2s.js
generated
vendored
Normal file
24
service-login-verify/node_modules/@noble/hashes/blake2s.js
generated
vendored
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.blake2s = exports.BLAKE2s = exports.compress = exports.G2s = exports.G1s = exports.B2S_IV = void 0;
|
||||||
|
/**
|
||||||
|
* Blake2s hash function. Focuses on 8-bit to 32-bit platforms. blake2b for 64-bit, but in JS it is slower.
|
||||||
|
* @module
|
||||||
|
* @deprecated
|
||||||
|
*/
|
||||||
|
const _blake_ts_1 = require("./_blake.js");
|
||||||
|
const _md_ts_1 = require("./_md.js");
|
||||||
|
const blake2_ts_1 = require("./blake2.js");
|
||||||
|
/** @deprecated Use import from `noble/hashes/blake2` module */
|
||||||
|
exports.B2S_IV = _md_ts_1.SHA256_IV;
|
||||||
|
/** @deprecated Use import from `noble/hashes/blake2` module */
|
||||||
|
exports.G1s = _blake_ts_1.G1s;
|
||||||
|
/** @deprecated Use import from `noble/hashes/blake2` module */
|
||||||
|
exports.G2s = _blake_ts_1.G2s;
|
||||||
|
/** @deprecated Use import from `noble/hashes/blake2` module */
|
||||||
|
exports.compress = blake2_ts_1.compress;
|
||||||
|
/** @deprecated Use import from `noble/hashes/blake2` module */
|
||||||
|
exports.BLAKE2s = blake2_ts_1.BLAKE2s;
|
||||||
|
/** @deprecated Use import from `noble/hashes/blake2` module */
|
||||||
|
exports.blake2s = blake2_ts_1.blake2s;
|
||||||
|
//# sourceMappingURL=blake2s.js.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/blake2s.js.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/blake2s.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"blake2s.js","sourceRoot":"","sources":["src/blake2s.ts"],"names":[],"mappings":";;;AAAA;;;;GAIG;AACH,2CAAyD;AACzD,qCAAqC;AACrC,2CAAqF;AACrF,+DAA+D;AAClD,QAAA,MAAM,GAAgB,kBAAS,CAAC;AAC7C,+DAA+D;AAClD,QAAA,GAAG,GAAiB,eAAK,CAAC;AACvC,+DAA+D;AAClD,QAAA,GAAG,GAAiB,eAAK,CAAC;AACvC,+DAA+D;AAClD,QAAA,QAAQ,GAAsB,oBAAU,CAAC;AACtD,+DAA+D;AAClD,QAAA,OAAO,GAAe,mBAAG,CAAC;AACvC,+DAA+D;AAClD,QAAA,OAAO,GAAe,mBAAG,CAAC"}
|
||||||
54
service-login-verify/node_modules/@noble/hashes/blake3.d.ts
generated
vendored
Normal file
54
service-login-verify/node_modules/@noble/hashes/blake3.d.ts
generated
vendored
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
import { BLAKE2 } from './blake2.ts';
|
||||||
|
import { type CHashXO, type HashXOF, type Input } from './utils.ts';
|
||||||
|
/**
|
||||||
|
* Ensure to use EITHER `key` OR `context`, not both.
|
||||||
|
*
|
||||||
|
* * `key`: 32-byte MAC key.
|
||||||
|
* * `context`: string for KDF. Should be hardcoded, globally unique, and application - specific.
|
||||||
|
* A good default format for the context string is "[application] [commit timestamp] [purpose]".
|
||||||
|
*/
|
||||||
|
export type Blake3Opts = {
|
||||||
|
dkLen?: number;
|
||||||
|
key?: Input;
|
||||||
|
context?: Input;
|
||||||
|
};
|
||||||
|
/** Blake3 hash. Can be used as MAC and KDF. */
|
||||||
|
export declare class BLAKE3 extends BLAKE2<BLAKE3> implements HashXOF<BLAKE3> {
|
||||||
|
private chunkPos;
|
||||||
|
private chunksDone;
|
||||||
|
private flags;
|
||||||
|
private IV;
|
||||||
|
private state;
|
||||||
|
private stack;
|
||||||
|
private posOut;
|
||||||
|
private bufferOut32;
|
||||||
|
private bufferOut;
|
||||||
|
private chunkOut;
|
||||||
|
private enableXOF;
|
||||||
|
constructor(opts?: Blake3Opts, flags?: number);
|
||||||
|
protected get(): [];
|
||||||
|
protected set(): void;
|
||||||
|
private b2Compress;
|
||||||
|
protected compress(buf: Uint32Array, bufPos?: number, isLast?: boolean): void;
|
||||||
|
_cloneInto(to?: BLAKE3): BLAKE3;
|
||||||
|
destroy(): void;
|
||||||
|
private b2CompressOut;
|
||||||
|
protected finish(): void;
|
||||||
|
private writeInto;
|
||||||
|
xofInto(out: Uint8Array): Uint8Array;
|
||||||
|
xof(bytes: number): Uint8Array;
|
||||||
|
digestInto(out: Uint8Array): Uint8Array;
|
||||||
|
digest(): Uint8Array;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* BLAKE3 hash function. Can be used as MAC and KDF.
|
||||||
|
* @param msg - message that would be hashed
|
||||||
|
* @param opts - `dkLen` for output length, `key` for MAC mode, `context` for KDF mode
|
||||||
|
* @example
|
||||||
|
* const data = new Uint8Array(32);
|
||||||
|
* const hash = blake3(data);
|
||||||
|
* const mac = blake3(data, { key: new Uint8Array(32) });
|
||||||
|
* const kdf = blake3(data, { context: 'application name' });
|
||||||
|
*/
|
||||||
|
export declare const blake3: CHashXO;
|
||||||
|
//# sourceMappingURL=blake3.d.ts.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/blake3.d.ts.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/blake3.d.ts.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"blake3.d.ts","sourceRoot":"","sources":["src/blake3.ts"],"names":[],"mappings":"AAeA,OAAO,EAAE,MAAM,EAAY,MAAM,aAAa,CAAC;AAE/C,OAAO,EAGL,KAAK,OAAO,EAAE,KAAK,OAAO,EAAE,KAAK,KAAK,EACvC,MAAM,YAAY,CAAC;AAwBpB;;;;;;GAMG;AACH,MAAM,MAAM,UAAU,GAAG;IAAE,KAAK,CAAC,EAAE,MAAM,CAAC;IAAC,GAAG,CAAC,EAAE,KAAK,CAAC;IAAC,OAAO,CAAC,EAAE,KAAK,CAAA;CAAE,CAAC;AAE1E,+CAA+C;AAC/C,qBAAa,MAAO,SAAQ,MAAM,CAAC,MAAM,CAAE,YAAW,OAAO,CAAC,MAAM,CAAC;IACnE,OAAO,CAAC,QAAQ,CAAK;IACrB,OAAO,CAAC,UAAU,CAAK;IACvB,OAAO,CAAC,KAAK,CAAS;IACtB,OAAO,CAAC,EAAE,CAAc;IACxB,OAAO,CAAC,KAAK,CAAc;IAC3B,OAAO,CAAC,KAAK,CAAqB;IAElC,OAAO,CAAC,MAAM,CAAK;IACnB,OAAO,CAAC,WAAW,CAAuB;IAC1C,OAAO,CAAC,SAAS,CAAa;IAC9B,OAAO,CAAC,QAAQ,CAAK;IACrB,OAAO,CAAC,SAAS,CAAQ;gBAEb,IAAI,GAAE,UAAe,EAAE,KAAK,SAAI;IA2B5C,SAAS,CAAC,GAAG,IAAI,EAAE;IAGnB,SAAS,CAAC,GAAG,IAAI,IAAI;IACrB,OAAO,CAAC,UAAU;IAmBlB,SAAS,CAAC,QAAQ,CAAC,GAAG,EAAE,WAAW,EAAE,MAAM,GAAE,MAAU,EAAE,MAAM,GAAE,OAAe,GAAG,IAAI;IAiCvF,UAAU,CAAC,EAAE,CAAC,EAAE,MAAM,GAAG,MAAM;IAe/B,OAAO,IAAI,IAAI;IAMf,OAAO,CAAC,aAAa;IA+BrB,SAAS,CAAC,MAAM,IAAI,IAAI;IAoBxB,OAAO,CAAC,SAAS;IAcjB,OAAO,CAAC,GAAG,EAAE,UAAU,GAAG,UAAU;IAIpC,GAAG,CAAC,KAAK,EAAE,MAAM,GAAG,UAAU;IAI9B,UAAU,CAAC,GAAG,EAAE,UAAU,GAAG,UAAU;IAQvC,MAAM,IAAI,UAAU;CAGrB;AAED;;;;;;;;;GASG;AACH,eAAO,MAAM,MAAM,EAAE,OAEpB,CAAC"}
|
||||||
255
service-login-verify/node_modules/@noble/hashes/blake3.js
generated
vendored
Normal file
255
service-login-verify/node_modules/@noble/hashes/blake3.js
generated
vendored
Normal file
@ -0,0 +1,255 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.blake3 = exports.BLAKE3 = void 0;
|
||||||
|
/**
|
||||||
|
* Blake3 fast hash is Blake2 with reduced security (round count). Can also be used as MAC & KDF.
|
||||||
|
*
|
||||||
|
* It is advertised as "the fastest cryptographic hash". However, it isn't true in JS.
|
||||||
|
* Why is this so slow? While it should be 6x faster than blake2b, perf diff is only 20%:
|
||||||
|
*
|
||||||
|
* * There is only 30% reduction in number of rounds from blake2s
|
||||||
|
* * Speed-up comes from tree structure, which is parallelized using SIMD & threading.
|
||||||
|
* These features are not present in JS, so we only get overhead from trees.
|
||||||
|
* * Parallelization only happens on 1024-byte chunks: there is no benefit for small inputs.
|
||||||
|
* * It is still possible to make it faster using: a) loop unrolling b) web workers c) wasm
|
||||||
|
* @module
|
||||||
|
*/
|
||||||
|
const _md_ts_1 = require("./_md.js");
|
||||||
|
const _u64_ts_1 = require("./_u64.js");
|
||||||
|
const blake2_ts_1 = require("./blake2.js");
|
||||||
|
// prettier-ignore
|
||||||
|
const utils_ts_1 = require("./utils.js");
|
||||||
|
// Flag bitset
|
||||||
|
const B3_Flags = {
|
||||||
|
CHUNK_START: 0b1,
|
||||||
|
CHUNK_END: 0b10,
|
||||||
|
PARENT: 0b100,
|
||||||
|
ROOT: 0b1000,
|
||||||
|
KEYED_HASH: 0b10000,
|
||||||
|
DERIVE_KEY_CONTEXT: 0b100000,
|
||||||
|
DERIVE_KEY_MATERIAL: 0b1000000,
|
||||||
|
};
|
||||||
|
const B3_IV = _md_ts_1.SHA256_IV.slice();
|
||||||
|
const B3_SIGMA = /* @__PURE__ */ (() => {
|
||||||
|
const Id = Array.from({ length: 16 }, (_, i) => i);
|
||||||
|
const permute = (arr) => [2, 6, 3, 10, 7, 0, 4, 13, 1, 11, 12, 5, 9, 14, 15, 8].map((i) => arr[i]);
|
||||||
|
const res = [];
|
||||||
|
for (let i = 0, v = Id; i < 7; i++, v = permute(v))
|
||||||
|
res.push(...v);
|
||||||
|
return Uint8Array.from(res);
|
||||||
|
})();
|
||||||
|
/** Blake3 hash. Can be used as MAC and KDF. */
|
||||||
|
class BLAKE3 extends blake2_ts_1.BLAKE2 {
|
||||||
|
constructor(opts = {}, flags = 0) {
|
||||||
|
super(64, opts.dkLen === undefined ? 32 : opts.dkLen);
|
||||||
|
this.chunkPos = 0; // Position of current block in chunk
|
||||||
|
this.chunksDone = 0; // How many chunks we already have
|
||||||
|
this.flags = 0 | 0;
|
||||||
|
this.stack = [];
|
||||||
|
// Output
|
||||||
|
this.posOut = 0;
|
||||||
|
this.bufferOut32 = new Uint32Array(16);
|
||||||
|
this.chunkOut = 0; // index of output chunk
|
||||||
|
this.enableXOF = true;
|
||||||
|
const { key, context } = opts;
|
||||||
|
const hasContext = context !== undefined;
|
||||||
|
if (key !== undefined) {
|
||||||
|
if (hasContext)
|
||||||
|
throw new Error('Only "key" or "context" can be specified at same time');
|
||||||
|
const k = (0, utils_ts_1.toBytes)(key).slice();
|
||||||
|
(0, utils_ts_1.abytes)(k, 32);
|
||||||
|
this.IV = (0, utils_ts_1.u32)(k);
|
||||||
|
(0, utils_ts_1.swap32IfBE)(this.IV);
|
||||||
|
this.flags = flags | B3_Flags.KEYED_HASH;
|
||||||
|
}
|
||||||
|
else if (hasContext) {
|
||||||
|
const ctx = (0, utils_ts_1.toBytes)(context);
|
||||||
|
const contextKey = new BLAKE3({ dkLen: 32 }, B3_Flags.DERIVE_KEY_CONTEXT)
|
||||||
|
.update(ctx)
|
||||||
|
.digest();
|
||||||
|
this.IV = (0, utils_ts_1.u32)(contextKey);
|
||||||
|
(0, utils_ts_1.swap32IfBE)(this.IV);
|
||||||
|
this.flags = flags | B3_Flags.DERIVE_KEY_MATERIAL;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this.IV = B3_IV.slice();
|
||||||
|
this.flags = flags;
|
||||||
|
}
|
||||||
|
this.state = this.IV.slice();
|
||||||
|
this.bufferOut = (0, utils_ts_1.u8)(this.bufferOut32);
|
||||||
|
}
|
||||||
|
// Unused
|
||||||
|
get() {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
set() { }
|
||||||
|
b2Compress(counter, flags, buf, bufPos = 0) {
|
||||||
|
const { state: s, pos } = this;
|
||||||
|
const { h, l } = (0, _u64_ts_1.fromBig)(BigInt(counter), true);
|
||||||
|
// prettier-ignore
|
||||||
|
const { v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15 } = (0, blake2_ts_1.compress)(B3_SIGMA, bufPos, buf, 7, s[0], s[1], s[2], s[3], s[4], s[5], s[6], s[7], B3_IV[0], B3_IV[1], B3_IV[2], B3_IV[3], h, l, pos, flags);
|
||||||
|
s[0] = v0 ^ v8;
|
||||||
|
s[1] = v1 ^ v9;
|
||||||
|
s[2] = v2 ^ v10;
|
||||||
|
s[3] = v3 ^ v11;
|
||||||
|
s[4] = v4 ^ v12;
|
||||||
|
s[5] = v5 ^ v13;
|
||||||
|
s[6] = v6 ^ v14;
|
||||||
|
s[7] = v7 ^ v15;
|
||||||
|
}
|
||||||
|
compress(buf, bufPos = 0, isLast = false) {
|
||||||
|
// Compress last block
|
||||||
|
let flags = this.flags;
|
||||||
|
if (!this.chunkPos)
|
||||||
|
flags |= B3_Flags.CHUNK_START;
|
||||||
|
if (this.chunkPos === 15 || isLast)
|
||||||
|
flags |= B3_Flags.CHUNK_END;
|
||||||
|
if (!isLast)
|
||||||
|
this.pos = this.blockLen;
|
||||||
|
this.b2Compress(this.chunksDone, flags, buf, bufPos);
|
||||||
|
this.chunkPos += 1;
|
||||||
|
// If current block is last in chunk (16 blocks), then compress chunks
|
||||||
|
if (this.chunkPos === 16 || isLast) {
|
||||||
|
let chunk = this.state;
|
||||||
|
this.state = this.IV.slice();
|
||||||
|
// If not the last one, compress only when there are trailing zeros in chunk counter
|
||||||
|
// chunks used as binary tree where current stack is path. Zero means current leaf is finished and can be compressed.
|
||||||
|
// 1 (001) - leaf not finished (just push current chunk to stack)
|
||||||
|
// 2 (010) - leaf finished at depth=1 (merge with last elm on stack and push back)
|
||||||
|
// 3 (011) - last leaf not finished
|
||||||
|
// 4 (100) - leafs finished at depth=1 and depth=2
|
||||||
|
for (let last, chunks = this.chunksDone + 1; isLast || !(chunks & 1); chunks >>= 1) {
|
||||||
|
if (!(last = this.stack.pop()))
|
||||||
|
break;
|
||||||
|
this.buffer32.set(last, 0);
|
||||||
|
this.buffer32.set(chunk, 8);
|
||||||
|
this.pos = this.blockLen;
|
||||||
|
this.b2Compress(0, this.flags | B3_Flags.PARENT, this.buffer32, 0);
|
||||||
|
chunk = this.state;
|
||||||
|
this.state = this.IV.slice();
|
||||||
|
}
|
||||||
|
this.chunksDone++;
|
||||||
|
this.chunkPos = 0;
|
||||||
|
this.stack.push(chunk);
|
||||||
|
}
|
||||||
|
this.pos = 0;
|
||||||
|
}
|
||||||
|
_cloneInto(to) {
|
||||||
|
to = super._cloneInto(to);
|
||||||
|
const { IV, flags, state, chunkPos, posOut, chunkOut, stack, chunksDone } = this;
|
||||||
|
to.state.set(state.slice());
|
||||||
|
to.stack = stack.map((i) => Uint32Array.from(i));
|
||||||
|
to.IV.set(IV);
|
||||||
|
to.flags = flags;
|
||||||
|
to.chunkPos = chunkPos;
|
||||||
|
to.chunksDone = chunksDone;
|
||||||
|
to.posOut = posOut;
|
||||||
|
to.chunkOut = chunkOut;
|
||||||
|
to.enableXOF = this.enableXOF;
|
||||||
|
to.bufferOut32.set(this.bufferOut32);
|
||||||
|
return to;
|
||||||
|
}
|
||||||
|
destroy() {
|
||||||
|
this.destroyed = true;
|
||||||
|
(0, utils_ts_1.clean)(this.state, this.buffer32, this.IV, this.bufferOut32);
|
||||||
|
(0, utils_ts_1.clean)(...this.stack);
|
||||||
|
}
|
||||||
|
// Same as b2Compress, but doesn't modify state and returns 16 u32 array (instead of 8)
|
||||||
|
b2CompressOut() {
|
||||||
|
const { state: s, pos, flags, buffer32, bufferOut32: out32 } = this;
|
||||||
|
const { h, l } = (0, _u64_ts_1.fromBig)(BigInt(this.chunkOut++));
|
||||||
|
(0, utils_ts_1.swap32IfBE)(buffer32);
|
||||||
|
// prettier-ignore
|
||||||
|
const { v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15 } = (0, blake2_ts_1.compress)(B3_SIGMA, 0, buffer32, 7, s[0], s[1], s[2], s[3], s[4], s[5], s[6], s[7], B3_IV[0], B3_IV[1], B3_IV[2], B3_IV[3], l, h, pos, flags);
|
||||||
|
out32[0] = v0 ^ v8;
|
||||||
|
out32[1] = v1 ^ v9;
|
||||||
|
out32[2] = v2 ^ v10;
|
||||||
|
out32[3] = v3 ^ v11;
|
||||||
|
out32[4] = v4 ^ v12;
|
||||||
|
out32[5] = v5 ^ v13;
|
||||||
|
out32[6] = v6 ^ v14;
|
||||||
|
out32[7] = v7 ^ v15;
|
||||||
|
out32[8] = s[0] ^ v8;
|
||||||
|
out32[9] = s[1] ^ v9;
|
||||||
|
out32[10] = s[2] ^ v10;
|
||||||
|
out32[11] = s[3] ^ v11;
|
||||||
|
out32[12] = s[4] ^ v12;
|
||||||
|
out32[13] = s[5] ^ v13;
|
||||||
|
out32[14] = s[6] ^ v14;
|
||||||
|
out32[15] = s[7] ^ v15;
|
||||||
|
(0, utils_ts_1.swap32IfBE)(buffer32);
|
||||||
|
(0, utils_ts_1.swap32IfBE)(out32);
|
||||||
|
this.posOut = 0;
|
||||||
|
}
|
||||||
|
finish() {
|
||||||
|
if (this.finished)
|
||||||
|
return;
|
||||||
|
this.finished = true;
|
||||||
|
// Padding
|
||||||
|
(0, utils_ts_1.clean)(this.buffer.subarray(this.pos));
|
||||||
|
// Process last chunk
|
||||||
|
let flags = this.flags | B3_Flags.ROOT;
|
||||||
|
if (this.stack.length) {
|
||||||
|
flags |= B3_Flags.PARENT;
|
||||||
|
(0, utils_ts_1.swap32IfBE)(this.buffer32);
|
||||||
|
this.compress(this.buffer32, 0, true);
|
||||||
|
(0, utils_ts_1.swap32IfBE)(this.buffer32);
|
||||||
|
this.chunksDone = 0;
|
||||||
|
this.pos = this.blockLen;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
flags |= (!this.chunkPos ? B3_Flags.CHUNK_START : 0) | B3_Flags.CHUNK_END;
|
||||||
|
}
|
||||||
|
this.flags = flags;
|
||||||
|
this.b2CompressOut();
|
||||||
|
}
|
||||||
|
writeInto(out) {
|
||||||
|
(0, utils_ts_1.aexists)(this, false);
|
||||||
|
(0, utils_ts_1.abytes)(out);
|
||||||
|
this.finish();
|
||||||
|
const { blockLen, bufferOut } = this;
|
||||||
|
for (let pos = 0, len = out.length; pos < len;) {
|
||||||
|
if (this.posOut >= blockLen)
|
||||||
|
this.b2CompressOut();
|
||||||
|
const take = Math.min(blockLen - this.posOut, len - pos);
|
||||||
|
out.set(bufferOut.subarray(this.posOut, this.posOut + take), pos);
|
||||||
|
this.posOut += take;
|
||||||
|
pos += take;
|
||||||
|
}
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
xofInto(out) {
|
||||||
|
if (!this.enableXOF)
|
||||||
|
throw new Error('XOF is not possible after digest call');
|
||||||
|
return this.writeInto(out);
|
||||||
|
}
|
||||||
|
xof(bytes) {
|
||||||
|
(0, utils_ts_1.anumber)(bytes);
|
||||||
|
return this.xofInto(new Uint8Array(bytes));
|
||||||
|
}
|
||||||
|
digestInto(out) {
|
||||||
|
(0, utils_ts_1.aoutput)(out, this);
|
||||||
|
if (this.finished)
|
||||||
|
throw new Error('digest() was already called');
|
||||||
|
this.enableXOF = false;
|
||||||
|
this.writeInto(out);
|
||||||
|
this.destroy();
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
digest() {
|
||||||
|
return this.digestInto(new Uint8Array(this.outputLen));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.BLAKE3 = BLAKE3;
|
||||||
|
/**
|
||||||
|
* BLAKE3 hash function. Can be used as MAC and KDF.
|
||||||
|
* @param msg - message that would be hashed
|
||||||
|
* @param opts - `dkLen` for output length, `key` for MAC mode, `context` for KDF mode
|
||||||
|
* @example
|
||||||
|
* const data = new Uint8Array(32);
|
||||||
|
* const hash = blake3(data);
|
||||||
|
* const mac = blake3(data, { key: new Uint8Array(32) });
|
||||||
|
* const kdf = blake3(data, { context: 'application name' });
|
||||||
|
*/
|
||||||
|
exports.blake3 = (0, utils_ts_1.createXOFer)((opts) => new BLAKE3(opts));
|
||||||
|
//# sourceMappingURL=blake3.js.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/blake3.js.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/blake3.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
2
service-login-verify/node_modules/@noble/hashes/crypto.d.ts
generated
vendored
Normal file
2
service-login-verify/node_modules/@noble/hashes/crypto.d.ts
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
export declare const crypto: any;
|
||||||
|
//# sourceMappingURL=crypto.d.ts.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/crypto.d.ts.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/crypto.d.ts.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"crypto.d.ts","sourceRoot":"","sources":["src/crypto.ts"],"names":[],"mappings":"AAOA,eAAO,MAAM,MAAM,EAAE,GACqE,CAAC"}
|
||||||
5
service-login-verify/node_modules/@noble/hashes/crypto.js
generated
vendored
Normal file
5
service-login-verify/node_modules/@noble/hashes/crypto.js
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.crypto = void 0;
|
||||||
|
exports.crypto = typeof globalThis === 'object' && 'crypto' in globalThis ? globalThis.crypto : undefined;
|
||||||
|
//# sourceMappingURL=crypto.js.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/crypto.js.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/crypto.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"crypto.js","sourceRoot":"","sources":["src/crypto.ts"],"names":[],"mappings":";;;AAOa,QAAA,MAAM,GACjB,OAAO,UAAU,KAAK,QAAQ,IAAI,QAAQ,IAAI,UAAU,CAAC,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC"}
|
||||||
2
service-login-verify/node_modules/@noble/hashes/cryptoNode.d.ts
generated
vendored
Normal file
2
service-login-verify/node_modules/@noble/hashes/cryptoNode.d.ts
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
export declare const crypto: any;
|
||||||
|
//# sourceMappingURL=cryptoNode.d.ts.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/cryptoNode.d.ts.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/cryptoNode.d.ts.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"cryptoNode.d.ts","sourceRoot":"","sources":["src/cryptoNode.ts"],"names":[],"mappings":"AASA,eAAO,MAAM,MAAM,EAAE,GAKJ,CAAC"}
|
||||||
18
service-login-verify/node_modules/@noble/hashes/cryptoNode.js
generated
vendored
Normal file
18
service-login-verify/node_modules/@noble/hashes/cryptoNode.js
generated
vendored
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.crypto = void 0;
|
||||||
|
/**
|
||||||
|
* Internal webcrypto alias.
|
||||||
|
* We prefer WebCrypto aka globalThis.crypto, which exists in node.js 16+.
|
||||||
|
* Falls back to Node.js built-in crypto for Node.js <=v14.
|
||||||
|
* See utils.ts for details.
|
||||||
|
* @module
|
||||||
|
*/
|
||||||
|
// @ts-ignore
|
||||||
|
const nc = require("node:crypto");
|
||||||
|
exports.crypto = nc && typeof nc === 'object' && 'webcrypto' in nc
|
||||||
|
? nc.webcrypto
|
||||||
|
: nc && typeof nc === 'object' && 'randomBytes' in nc
|
||||||
|
? nc
|
||||||
|
: undefined;
|
||||||
|
//# sourceMappingURL=cryptoNode.js.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/cryptoNode.js.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/cryptoNode.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"cryptoNode.js","sourceRoot":"","sources":["src/cryptoNode.ts"],"names":[],"mappings":";;;AAAA;;;;;;GAMG;AACH,aAAa;AACb,kCAAkC;AACrB,QAAA,MAAM,GACjB,EAAE,IAAI,OAAO,EAAE,KAAK,QAAQ,IAAI,WAAW,IAAI,EAAE;IAC/C,CAAC,CAAE,EAAE,CAAC,SAAiB;IACvB,CAAC,CAAC,EAAE,IAAI,OAAO,EAAE,KAAK,QAAQ,IAAI,aAAa,IAAI,EAAE;QACnD,CAAC,CAAC,EAAE;QACJ,CAAC,CAAC,SAAS,CAAC"}
|
||||||
47
service-login-verify/node_modules/@noble/hashes/eskdf.d.ts
generated
vendored
Normal file
47
service-login-verify/node_modules/@noble/hashes/eskdf.d.ts
generated
vendored
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
export declare function scrypt(password: string, salt: string): Uint8Array;
|
||||||
|
export declare function pbkdf2(password: string, salt: string): Uint8Array;
|
||||||
|
/**
|
||||||
|
* Derives main seed. Takes a lot of time. Prefer `eskdf` method instead.
|
||||||
|
*/
|
||||||
|
export declare function deriveMainSeed(username: string, password: string): Uint8Array;
|
||||||
|
type AccountID = number | string;
|
||||||
|
type OptsLength = {
|
||||||
|
keyLength: number;
|
||||||
|
};
|
||||||
|
type OptsMod = {
|
||||||
|
modulus: bigint;
|
||||||
|
};
|
||||||
|
type KeyOpts = undefined | OptsLength | OptsMod;
|
||||||
|
export interface ESKDF {
|
||||||
|
/**
|
||||||
|
* Derives a child key. Child key will not be associated with any
|
||||||
|
* other child key because of properties of underlying KDF.
|
||||||
|
*
|
||||||
|
* @param protocol - 3-15 character protocol name
|
||||||
|
* @param accountId - numeric identifier of account
|
||||||
|
* @param options - `keyLength: 64` or `modulus: 41920438n`
|
||||||
|
* @example deriveChildKey('aes', 0)
|
||||||
|
*/
|
||||||
|
deriveChildKey: (protocol: string, accountId: AccountID, options?: KeyOpts) => Uint8Array;
|
||||||
|
/**
|
||||||
|
* Deletes the main seed from eskdf instance
|
||||||
|
*/
|
||||||
|
expire: () => void;
|
||||||
|
/**
|
||||||
|
* Account fingerprint
|
||||||
|
*/
|
||||||
|
fingerprint: string;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* ESKDF
|
||||||
|
* @param username - username, email, or identifier, min: 8 characters, should have enough entropy
|
||||||
|
* @param password - password, min: 8 characters, should have enough entropy
|
||||||
|
* @example
|
||||||
|
* const kdf = await eskdf('example-university', 'beginning-new-example');
|
||||||
|
* const key = kdf.deriveChildKey('aes', 0);
|
||||||
|
* console.log(kdf.fingerprint);
|
||||||
|
* kdf.expire();
|
||||||
|
*/
|
||||||
|
export declare function eskdf(username: string, password: string): Promise<ESKDF>;
|
||||||
|
export {};
|
||||||
|
//# sourceMappingURL=eskdf.d.ts.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/eskdf.d.ts.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/eskdf.d.ts.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"eskdf.d.ts","sourceRoot":"","sources":["src/eskdf.ts"],"names":[],"mappings":"AAiBA,wBAAgB,MAAM,CAAC,QAAQ,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,GAAG,UAAU,CAEjE;AAGD,wBAAgB,MAAM,CAAC,QAAQ,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,GAAG,UAAU,CAEjE;AAiBD;;GAEG;AACH,wBAAgB,cAAc,CAAC,QAAQ,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,UAAU,CAY7E;AAED,KAAK,SAAS,GAAG,MAAM,GAAG,MAAM,CAAC;AAgCjC,KAAK,UAAU,GAAG;IAAE,SAAS,EAAE,MAAM,CAAA;CAAE,CAAC;AACxC,KAAK,OAAO,GAAG;IAAE,OAAO,EAAE,MAAM,CAAA;CAAE,CAAC;AACnC,KAAK,OAAO,GAAG,SAAS,GAAG,UAAU,GAAG,OAAO,CAAC;AAwChD,MAAM,WAAW,KAAK;IACpB;;;;;;;;OAQG;IACH,cAAc,EAAE,CAAC,QAAQ,EAAE,MAAM,EAAE,SAAS,EAAE,SAAS,EAAE,OAAO,CAAC,EAAE,OAAO,KAAK,UAAU,CAAC;IAC1F;;OAEG;IACH,MAAM,EAAE,MAAM,IAAI,CAAC;IACnB;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;CACrB;AAED;;;;;;;;;GASG;AACH,wBAAsB,KAAK,CAAC,QAAQ,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,KAAK,CAAC,CAuB9E"}
|
||||||
166
service-login-verify/node_modules/@noble/hashes/eskdf.js
generated
vendored
Normal file
166
service-login-verify/node_modules/@noble/hashes/eskdf.js
generated
vendored
Normal file
@ -0,0 +1,166 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.scrypt = scrypt;
|
||||||
|
exports.pbkdf2 = pbkdf2;
|
||||||
|
exports.deriveMainSeed = deriveMainSeed;
|
||||||
|
exports.eskdf = eskdf;
|
||||||
|
/**
|
||||||
|
* Experimental KDF for AES.
|
||||||
|
*/
|
||||||
|
const hkdf_ts_1 = require("./hkdf.js");
|
||||||
|
const pbkdf2_ts_1 = require("./pbkdf2.js");
|
||||||
|
const scrypt_ts_1 = require("./scrypt.js");
|
||||||
|
const sha256_ts_1 = require("./sha256.js");
|
||||||
|
const utils_ts_1 = require("./utils.js");
|
||||||
|
// A tiny KDF for various applications like AES key-gen.
|
||||||
|
// Uses HKDF in a non-standard way, so it's not "KDF-secure", only "PRF-secure".
|
||||||
|
// Which is good enough: assume sha2-256 retained preimage resistance.
|
||||||
|
const SCRYPT_FACTOR = 2 ** 19;
|
||||||
|
const PBKDF2_FACTOR = 2 ** 17;
|
||||||
|
// Scrypt KDF
|
||||||
|
function scrypt(password, salt) {
|
||||||
|
return (0, scrypt_ts_1.scrypt)(password, salt, { N: SCRYPT_FACTOR, r: 8, p: 1, dkLen: 32 });
|
||||||
|
}
|
||||||
|
// PBKDF2-HMAC-SHA256
|
||||||
|
function pbkdf2(password, salt) {
|
||||||
|
return (0, pbkdf2_ts_1.pbkdf2)(sha256_ts_1.sha256, password, salt, { c: PBKDF2_FACTOR, dkLen: 32 });
|
||||||
|
}
|
||||||
|
// Combines two 32-byte byte arrays
|
||||||
|
function xor32(a, b) {
|
||||||
|
(0, utils_ts_1.abytes)(a, 32);
|
||||||
|
(0, utils_ts_1.abytes)(b, 32);
|
||||||
|
const arr = new Uint8Array(32);
|
||||||
|
for (let i = 0; i < 32; i++) {
|
||||||
|
arr[i] = a[i] ^ b[i];
|
||||||
|
}
|
||||||
|
return arr;
|
||||||
|
}
|
||||||
|
function strHasLength(str, min, max) {
|
||||||
|
return typeof str === 'string' && str.length >= min && str.length <= max;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Derives main seed. Takes a lot of time. Prefer `eskdf` method instead.
|
||||||
|
*/
|
||||||
|
function deriveMainSeed(username, password) {
|
||||||
|
if (!strHasLength(username, 8, 255))
|
||||||
|
throw new Error('invalid username');
|
||||||
|
if (!strHasLength(password, 8, 255))
|
||||||
|
throw new Error('invalid password');
|
||||||
|
// Declared like this to throw off minifiers which auto-convert .fromCharCode(1) to actual string.
|
||||||
|
// String with non-ascii may be problematic in some envs
|
||||||
|
const codes = { _1: 1, _2: 2 };
|
||||||
|
const sep = { s: String.fromCharCode(codes._1), p: String.fromCharCode(codes._2) };
|
||||||
|
const scr = scrypt(password + sep.s, username + sep.s);
|
||||||
|
const pbk = pbkdf2(password + sep.p, username + sep.p);
|
||||||
|
const res = xor32(scr, pbk);
|
||||||
|
(0, utils_ts_1.clean)(scr, pbk);
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Converts protocol & accountId pair to HKDF salt & info params.
|
||||||
|
*/
|
||||||
|
function getSaltInfo(protocol, accountId = 0) {
|
||||||
|
// Note that length here also repeats two lines below
|
||||||
|
// We do an additional length check here to reduce the scope of DoS attacks
|
||||||
|
if (!(strHasLength(protocol, 3, 15) && /^[a-z0-9]{3,15}$/.test(protocol))) {
|
||||||
|
throw new Error('invalid protocol');
|
||||||
|
}
|
||||||
|
// Allow string account ids for some protocols
|
||||||
|
const allowsStr = /^password\d{0,3}|ssh|tor|file$/.test(protocol);
|
||||||
|
let salt; // Extract salt. Default is undefined.
|
||||||
|
if (typeof accountId === 'string') {
|
||||||
|
if (!allowsStr)
|
||||||
|
throw new Error('accountId must be a number');
|
||||||
|
if (!strHasLength(accountId, 1, 255))
|
||||||
|
throw new Error('accountId must be string of length 1..255');
|
||||||
|
salt = (0, utils_ts_1.kdfInputToBytes)(accountId);
|
||||||
|
}
|
||||||
|
else if (Number.isSafeInteger(accountId)) {
|
||||||
|
if (accountId < 0 || accountId > Math.pow(2, 32) - 1)
|
||||||
|
throw new Error('invalid accountId');
|
||||||
|
// Convert to Big Endian Uint32
|
||||||
|
salt = new Uint8Array(4);
|
||||||
|
(0, utils_ts_1.createView)(salt).setUint32(0, accountId, false);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
throw new Error('accountId must be a number' + (allowsStr ? ' or string' : ''));
|
||||||
|
}
|
||||||
|
const info = (0, utils_ts_1.kdfInputToBytes)(protocol);
|
||||||
|
return { salt, info };
|
||||||
|
}
|
||||||
|
function countBytes(num) {
|
||||||
|
if (typeof num !== 'bigint' || num <= BigInt(128))
|
||||||
|
throw new Error('invalid number');
|
||||||
|
return Math.ceil(num.toString(2).length / 8);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Parses keyLength and modulus options to extract length of result key.
|
||||||
|
* If modulus is used, adds 64 bits to it as per FIPS 186 B.4.1 to combat modulo bias.
|
||||||
|
*/
|
||||||
|
function getKeyLength(options) {
|
||||||
|
if (!options || typeof options !== 'object')
|
||||||
|
return 32;
|
||||||
|
const hasLen = 'keyLength' in options;
|
||||||
|
const hasMod = 'modulus' in options;
|
||||||
|
if (hasLen && hasMod)
|
||||||
|
throw new Error('cannot combine keyLength and modulus options');
|
||||||
|
if (!hasLen && !hasMod)
|
||||||
|
throw new Error('must have either keyLength or modulus option');
|
||||||
|
// FIPS 186 B.4.1 requires at least 64 more bits
|
||||||
|
const l = hasMod ? countBytes(options.modulus) + 8 : options.keyLength;
|
||||||
|
if (!(typeof l === 'number' && l >= 16 && l <= 8192))
|
||||||
|
throw new Error('invalid keyLength');
|
||||||
|
return l;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Converts key to bigint and divides it by modulus. Big Endian.
|
||||||
|
* Implements FIPS 186 B.4.1, which removes 0 and modulo bias from output.
|
||||||
|
*/
|
||||||
|
function modReduceKey(key, modulus) {
|
||||||
|
const _1 = BigInt(1);
|
||||||
|
const num = BigInt('0x' + (0, utils_ts_1.bytesToHex)(key)); // check for ui8a, then bytesToNumber()
|
||||||
|
const res = (num % (modulus - _1)) + _1; // Remove 0 from output
|
||||||
|
if (res < _1)
|
||||||
|
throw new Error('expected positive number'); // Guard against bad values
|
||||||
|
const len = key.length - 8; // FIPS requires 64 more bits = 8 bytes
|
||||||
|
const hex = res.toString(16).padStart(len * 2, '0'); // numberToHex()
|
||||||
|
const bytes = (0, utils_ts_1.hexToBytes)(hex);
|
||||||
|
if (bytes.length !== len)
|
||||||
|
throw new Error('invalid length of result key');
|
||||||
|
return bytes;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* ESKDF
|
||||||
|
* @param username - username, email, or identifier, min: 8 characters, should have enough entropy
|
||||||
|
* @param password - password, min: 8 characters, should have enough entropy
|
||||||
|
* @example
|
||||||
|
* const kdf = await eskdf('example-university', 'beginning-new-example');
|
||||||
|
* const key = kdf.deriveChildKey('aes', 0);
|
||||||
|
* console.log(kdf.fingerprint);
|
||||||
|
* kdf.expire();
|
||||||
|
*/
|
||||||
|
async function eskdf(username, password) {
|
||||||
|
// We are using closure + object instead of class because
|
||||||
|
// we want to make `seed` non-accessible for any external function.
|
||||||
|
let seed = deriveMainSeed(username, password);
|
||||||
|
function deriveCK(protocol, accountId = 0, options) {
|
||||||
|
(0, utils_ts_1.abytes)(seed, 32);
|
||||||
|
const { salt, info } = getSaltInfo(protocol, accountId); // validate protocol & accountId
|
||||||
|
const keyLength = getKeyLength(options); // validate options
|
||||||
|
const key = (0, hkdf_ts_1.hkdf)(sha256_ts_1.sha256, seed, salt, info, keyLength);
|
||||||
|
// Modulus has already been validated
|
||||||
|
return options && 'modulus' in options ? modReduceKey(key, options.modulus) : key;
|
||||||
|
}
|
||||||
|
function expire() {
|
||||||
|
if (seed)
|
||||||
|
seed.fill(1);
|
||||||
|
seed = undefined;
|
||||||
|
}
|
||||||
|
// prettier-ignore
|
||||||
|
const fingerprint = Array.from(deriveCK('fingerprint', 0))
|
||||||
|
.slice(0, 6)
|
||||||
|
.map((char) => char.toString(16).padStart(2, '0').toUpperCase())
|
||||||
|
.join(':');
|
||||||
|
return Object.freeze({ deriveChildKey: deriveCK, expire, fingerprint });
|
||||||
|
}
|
||||||
|
//# sourceMappingURL=eskdf.js.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/eskdf.js.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/eskdf.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
17
service-login-verify/node_modules/@noble/hashes/esm/_assert.d.ts
generated
vendored
Normal file
17
service-login-verify/node_modules/@noble/hashes/esm/_assert.d.ts
generated
vendored
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
/**
|
||||||
|
* Internal assertion helpers.
|
||||||
|
* @module
|
||||||
|
* @deprecated
|
||||||
|
*/
|
||||||
|
import { abytes as ab, aexists as ae, anumber as an, aoutput as ao, type IHash as H } from './utils.ts';
|
||||||
|
/** @deprecated Use import from `noble/hashes/utils` module */
|
||||||
|
export declare const abytes: typeof ab;
|
||||||
|
/** @deprecated Use import from `noble/hashes/utils` module */
|
||||||
|
export declare const aexists: typeof ae;
|
||||||
|
/** @deprecated Use import from `noble/hashes/utils` module */
|
||||||
|
export declare const anumber: typeof an;
|
||||||
|
/** @deprecated Use import from `noble/hashes/utils` module */
|
||||||
|
export declare const aoutput: typeof ao;
|
||||||
|
/** @deprecated Use import from `noble/hashes/utils` module */
|
||||||
|
export type Hash = H;
|
||||||
|
//# sourceMappingURL=_assert.d.ts.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/esm/_assert.d.ts.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/esm/_assert.d.ts.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"_assert.d.ts","sourceRoot":"","sources":["../src/_assert.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH,OAAO,EACL,MAAM,IAAI,EAAE,EACZ,OAAO,IAAI,EAAE,EACb,OAAO,IAAI,EAAE,EACb,OAAO,IAAI,EAAE,EACb,KAAK,KAAK,IAAI,CAAC,EAChB,MAAM,YAAY,CAAC;AACpB,8DAA8D;AAC9D,eAAO,MAAM,MAAM,EAAE,OAAO,EAAO,CAAC;AACpC,8DAA8D;AAC9D,eAAO,MAAM,OAAO,EAAE,OAAO,EAAO,CAAC;AACrC,8DAA8D;AAC9D,eAAO,MAAM,OAAO,EAAE,OAAO,EAAO,CAAC;AACrC,8DAA8D;AAC9D,eAAO,MAAM,OAAO,EAAE,OAAO,EAAO,CAAC;AACrC,8DAA8D;AAC9D,MAAM,MAAM,IAAI,GAAG,CAAC,CAAC"}
|
||||||
15
service-login-verify/node_modules/@noble/hashes/esm/_assert.js
generated
vendored
Normal file
15
service-login-verify/node_modules/@noble/hashes/esm/_assert.js
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
/**
|
||||||
|
* Internal assertion helpers.
|
||||||
|
* @module
|
||||||
|
* @deprecated
|
||||||
|
*/
|
||||||
|
import { abytes as ab, aexists as ae, anumber as an, aoutput as ao, } from "./utils.js";
|
||||||
|
/** @deprecated Use import from `noble/hashes/utils` module */
|
||||||
|
export const abytes = ab;
|
||||||
|
/** @deprecated Use import from `noble/hashes/utils` module */
|
||||||
|
export const aexists = ae;
|
||||||
|
/** @deprecated Use import from `noble/hashes/utils` module */
|
||||||
|
export const anumber = an;
|
||||||
|
/** @deprecated Use import from `noble/hashes/utils` module */
|
||||||
|
export const aoutput = ao;
|
||||||
|
//# sourceMappingURL=_assert.js.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/esm/_assert.js.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/esm/_assert.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"_assert.js","sourceRoot":"","sources":["../src/_assert.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH,OAAO,EACL,MAAM,IAAI,EAAE,EACZ,OAAO,IAAI,EAAE,EACb,OAAO,IAAI,EAAE,EACb,OAAO,IAAI,EAAE,GAEd,MAAM,YAAY,CAAC;AACpB,8DAA8D;AAC9D,MAAM,CAAC,MAAM,MAAM,GAAc,EAAE,CAAC;AACpC,8DAA8D;AAC9D,MAAM,CAAC,MAAM,OAAO,GAAc,EAAE,CAAC;AACrC,8DAA8D;AAC9D,MAAM,CAAC,MAAM,OAAO,GAAc,EAAE,CAAC;AACrC,8DAA8D;AAC9D,MAAM,CAAC,MAAM,OAAO,GAAc,EAAE,CAAC"}
|
||||||
14
service-login-verify/node_modules/@noble/hashes/esm/_blake.d.ts
generated
vendored
Normal file
14
service-login-verify/node_modules/@noble/hashes/esm/_blake.d.ts
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
/**
|
||||||
|
* Internal blake variable.
|
||||||
|
* For BLAKE2b, the two extra permutations for rounds 10 and 11 are SIGMA[10..11] = SIGMA[0..1].
|
||||||
|
*/
|
||||||
|
export declare const BSIGMA: Uint8Array;
|
||||||
|
export type Num4 = {
|
||||||
|
a: number;
|
||||||
|
b: number;
|
||||||
|
c: number;
|
||||||
|
d: number;
|
||||||
|
};
|
||||||
|
export declare function G1s(a: number, b: number, c: number, d: number, x: number): Num4;
|
||||||
|
export declare function G2s(a: number, b: number, c: number, d: number, x: number): Num4;
|
||||||
|
//# sourceMappingURL=_blake.d.ts.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/esm/_blake.d.ts.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/esm/_blake.d.ts.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"_blake.d.ts","sourceRoot":"","sources":["../src/_blake.ts"],"names":[],"mappings":"AAMA;;;GAGG;AAEH,eAAO,MAAM,MAAM,EAAE,UAkBnB,CAAC;AAGH,MAAM,MAAM,IAAI,GAAG;IAAE,CAAC,EAAE,MAAM,CAAC;IAAC,CAAC,EAAE,MAAM,CAAC;IAAC,CAAC,EAAE,MAAM,CAAC;IAAC,CAAC,EAAE,MAAM,CAAC;CAAE,CAAC;AAGnE,wBAAgB,GAAG,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,GAAG,IAAI,CAM/E;AAED,wBAAgB,GAAG,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,GAAG,IAAI,CAM/E"}
|
||||||
45
service-login-verify/node_modules/@noble/hashes/esm/_blake.js
generated
vendored
Normal file
45
service-login-verify/node_modules/@noble/hashes/esm/_blake.js
generated
vendored
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
/**
|
||||||
|
* Internal helpers for blake hash.
|
||||||
|
* @module
|
||||||
|
*/
|
||||||
|
import { rotr } from "./utils.js";
|
||||||
|
/**
|
||||||
|
* Internal blake variable.
|
||||||
|
* For BLAKE2b, the two extra permutations for rounds 10 and 11 are SIGMA[10..11] = SIGMA[0..1].
|
||||||
|
*/
|
||||||
|
// prettier-ignore
|
||||||
|
export const BSIGMA = /* @__PURE__ */ Uint8Array.from([
|
||||||
|
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
|
||||||
|
14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3,
|
||||||
|
11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4,
|
||||||
|
7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8,
|
||||||
|
9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13,
|
||||||
|
2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9,
|
||||||
|
12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11,
|
||||||
|
13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10,
|
||||||
|
6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5,
|
||||||
|
10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0,
|
||||||
|
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
|
||||||
|
14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3,
|
||||||
|
// Blake1, unused in others
|
||||||
|
11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4,
|
||||||
|
7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8,
|
||||||
|
9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13,
|
||||||
|
2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9,
|
||||||
|
]);
|
||||||
|
// Mixing function G splitted in two halfs
|
||||||
|
export function G1s(a, b, c, d, x) {
|
||||||
|
a = (a + b + x) | 0;
|
||||||
|
d = rotr(d ^ a, 16);
|
||||||
|
c = (c + d) | 0;
|
||||||
|
b = rotr(b ^ c, 12);
|
||||||
|
return { a, b, c, d };
|
||||||
|
}
|
||||||
|
export function G2s(a, b, c, d, x) {
|
||||||
|
a = (a + b + x) | 0;
|
||||||
|
d = rotr(d ^ a, 8);
|
||||||
|
c = (c + d) | 0;
|
||||||
|
b = rotr(b ^ c, 7);
|
||||||
|
return { a, b, c, d };
|
||||||
|
}
|
||||||
|
//# sourceMappingURL=_blake.js.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/esm/_blake.js.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/esm/_blake.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"_blake.js","sourceRoot":"","sources":["../src/_blake.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,OAAO,EAAE,IAAI,EAAE,MAAM,YAAY,CAAC;AAElC;;;GAGG;AACH,kBAAkB;AAClB,MAAM,CAAC,MAAM,MAAM,GAAe,eAAe,CAAC,UAAU,CAAC,IAAI,CAAC;IAChE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE;IACpD,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC;IACpD,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC;IACpD,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC;IACpD,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE;IACpD,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC;IACpD,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE;IACpD,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE;IACpD,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC;IACpD,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC;IACpD,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE;IACpD,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC;IACpD,2BAA2B;IAC3B,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC;IACpD,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC;IACpD,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE;IACpD,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC;CACrD,CAAC,CAAC;AAKH,0CAA0C;AAC1C,MAAM,UAAU,GAAG,CAAC,CAAS,EAAE,CAAS,EAAE,CAAS,EAAE,CAAS,EAAE,CAAS;IACvE,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;IACpB,CAAC,GAAG,IAAI,CAAC,CAAC,GAAG,CAAC,EAAE,EAAE,CAAC,CAAC;IACpB,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;IAChB,CAAC,GAAG,IAAI,CAAC,CAAC,GAAG,CAAC,EAAE,EAAE,CAAC,CAAC;IACpB,OAAO,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC;AACxB,CAAC;AAED,MAAM,UAAU,GAAG,CAAC,CAAS,EAAE,CAAS,EAAE,CAAS,EAAE,CAAS,EAAE,CAAS;IACvE,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;IACpB,CAAC,GAAG,IAAI,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC;IACnB,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;IAChB,CAAC,GAAG,IAAI,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC;IACnB,OAAO,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC;AACxB,CAAC"}
|
||||||
51
service-login-verify/node_modules/@noble/hashes/esm/_md.d.ts
generated
vendored
Normal file
51
service-login-verify/node_modules/@noble/hashes/esm/_md.d.ts
generated
vendored
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
/**
|
||||||
|
* Internal Merkle-Damgard hash utils.
|
||||||
|
* @module
|
||||||
|
*/
|
||||||
|
import { type Input, Hash } from './utils.ts';
|
||||||
|
/** Polyfill for Safari 14. https://caniuse.com/mdn-javascript_builtins_dataview_setbiguint64 */
|
||||||
|
export declare function setBigUint64(view: DataView, byteOffset: number, value: bigint, isLE: boolean): void;
|
||||||
|
/** Choice: a ? b : c */
|
||||||
|
export declare function Chi(a: number, b: number, c: number): number;
|
||||||
|
/** Majority function, true if any two inputs is true. */
|
||||||
|
export declare function Maj(a: number, b: number, c: number): number;
|
||||||
|
/**
|
||||||
|
* Merkle-Damgard hash construction base class.
|
||||||
|
* Could be used to create MD5, RIPEMD, SHA1, SHA2.
|
||||||
|
*/
|
||||||
|
export declare abstract class HashMD<T extends HashMD<T>> extends Hash<T> {
|
||||||
|
protected abstract process(buf: DataView, offset: number): void;
|
||||||
|
protected abstract get(): number[];
|
||||||
|
protected abstract set(...args: number[]): void;
|
||||||
|
abstract destroy(): void;
|
||||||
|
protected abstract roundClean(): void;
|
||||||
|
readonly blockLen: number;
|
||||||
|
readonly outputLen: number;
|
||||||
|
readonly padOffset: number;
|
||||||
|
readonly isLE: boolean;
|
||||||
|
protected buffer: Uint8Array;
|
||||||
|
protected view: DataView;
|
||||||
|
protected finished: boolean;
|
||||||
|
protected length: number;
|
||||||
|
protected pos: number;
|
||||||
|
protected destroyed: boolean;
|
||||||
|
constructor(blockLen: number, outputLen: number, padOffset: number, isLE: boolean);
|
||||||
|
update(data: Input): this;
|
||||||
|
digestInto(out: Uint8Array): void;
|
||||||
|
digest(): Uint8Array;
|
||||||
|
_cloneInto(to?: T): T;
|
||||||
|
clone(): T;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Initial SHA-2 state: fractional parts of square roots of first 16 primes 2..53.
|
||||||
|
* Check out `test/misc/sha2-gen-iv.js` for recomputation guide.
|
||||||
|
*/
|
||||||
|
/** Initial SHA256 state. Bits 0..32 of frac part of sqrt of primes 2..19 */
|
||||||
|
export declare const SHA256_IV: Uint32Array;
|
||||||
|
/** Initial SHA224 state. Bits 32..64 of frac part of sqrt of primes 23..53 */
|
||||||
|
export declare const SHA224_IV: Uint32Array;
|
||||||
|
/** Initial SHA384 state. Bits 0..64 of frac part of sqrt of primes 23..53 */
|
||||||
|
export declare const SHA384_IV: Uint32Array;
|
||||||
|
/** Initial SHA512 state. Bits 0..64 of frac part of sqrt of primes 2..19 */
|
||||||
|
export declare const SHA512_IV: Uint32Array;
|
||||||
|
//# sourceMappingURL=_md.d.ts.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/esm/_md.d.ts.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/esm/_md.d.ts.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"_md.d.ts","sourceRoot":"","sources":["../src/_md.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,OAAO,EAAE,KAAK,KAAK,EAAE,IAAI,EAAwD,MAAM,YAAY,CAAC;AAEpG,gGAAgG;AAChG,wBAAgB,YAAY,CAC1B,IAAI,EAAE,QAAQ,EACd,UAAU,EAAE,MAAM,EAClB,KAAK,EAAE,MAAM,EACb,IAAI,EAAE,OAAO,GACZ,IAAI,CAUN;AAED,wBAAwB;AACxB,wBAAgB,GAAG,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,GAAG,MAAM,CAE3D;AAED,yDAAyD;AACzD,wBAAgB,GAAG,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,GAAG,MAAM,CAE3D;AAED;;;GAGG;AACH,8BAAsB,MAAM,CAAC,CAAC,SAAS,MAAM,CAAC,CAAC,CAAC,CAAE,SAAQ,IAAI,CAAC,CAAC,CAAC;IAC/D,SAAS,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,QAAQ,EAAE,MAAM,EAAE,MAAM,GAAG,IAAI;IAC/D,SAAS,CAAC,QAAQ,CAAC,GAAG,IAAI,MAAM,EAAE;IAClC,SAAS,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,IAAI,EAAE,MAAM,EAAE,GAAG,IAAI;IAC/C,QAAQ,CAAC,OAAO,IAAI,IAAI;IACxB,SAAS,CAAC,QAAQ,CAAC,UAAU,IAAI,IAAI;IAErC,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC;IAC1B,QAAQ,CAAC,SAAS,EAAE,MAAM,CAAC;IAC3B,QAAQ,CAAC,SAAS,EAAE,MAAM,CAAC;IAC3B,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC;IAGvB,SAAS,CAAC,MAAM,EAAE,UAAU,CAAC;IAC7B,SAAS,CAAC,IAAI,EAAE,QAAQ,CAAC;IACzB,SAAS,CAAC,QAAQ,UAAS;IAC3B,SAAS,CAAC,MAAM,SAAK;IACrB,SAAS,CAAC,GAAG,SAAK;IAClB,SAAS,CAAC,SAAS,UAAS;gBAEhB,QAAQ,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO;IASjF,MAAM,CAAC,IAAI,EAAE,KAAK,GAAG,IAAI;IA0BzB,UAAU,CAAC,GAAG,EAAE,UAAU,GAAG,IAAI;IAkCjC,MAAM,IAAI,UAAU;IAOpB,UAAU,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC;IAWrB,KAAK,IAAI,CAAC;CAGX;AAED;;;GAGG;AAEH,4EAA4E;AAC5E,eAAO,MAAM,SAAS,EAAE,WAEtB,CAAC;AAEH,8EAA8E;AAC9E,eAAO,MAAM,SAAS,EAAE,WAEtB,CAAC;AAEH,6EAA6E;AAC7E,eAAO,MAAM,SAAS,EAAE,WAGtB,CAAC;AAEH,4EAA4E;AAC5E,eAAO,MAAM,SAAS,EAAE,WAGtB,CAAC"}
|
||||||
155
service-login-verify/node_modules/@noble/hashes/esm/_md.js
generated
vendored
Normal file
155
service-login-verify/node_modules/@noble/hashes/esm/_md.js
generated
vendored
Normal file
@ -0,0 +1,155 @@
|
|||||||
|
/**
|
||||||
|
* Internal Merkle-Damgard hash utils.
|
||||||
|
* @module
|
||||||
|
*/
|
||||||
|
import { Hash, abytes, aexists, aoutput, clean, createView, toBytes } from "./utils.js";
|
||||||
|
/** Polyfill for Safari 14. https://caniuse.com/mdn-javascript_builtins_dataview_setbiguint64 */
|
||||||
|
export function setBigUint64(view, byteOffset, value, isLE) {
|
||||||
|
if (typeof view.setBigUint64 === 'function')
|
||||||
|
return view.setBigUint64(byteOffset, value, isLE);
|
||||||
|
const _32n = BigInt(32);
|
||||||
|
const _u32_max = BigInt(0xffffffff);
|
||||||
|
const wh = Number((value >> _32n) & _u32_max);
|
||||||
|
const wl = Number(value & _u32_max);
|
||||||
|
const h = isLE ? 4 : 0;
|
||||||
|
const l = isLE ? 0 : 4;
|
||||||
|
view.setUint32(byteOffset + h, wh, isLE);
|
||||||
|
view.setUint32(byteOffset + l, wl, isLE);
|
||||||
|
}
|
||||||
|
/** Choice: a ? b : c */
|
||||||
|
export function Chi(a, b, c) {
|
||||||
|
return (a & b) ^ (~a & c);
|
||||||
|
}
|
||||||
|
/** Majority function, true if any two inputs is true. */
|
||||||
|
export function Maj(a, b, c) {
|
||||||
|
return (a & b) ^ (a & c) ^ (b & c);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Merkle-Damgard hash construction base class.
|
||||||
|
* Could be used to create MD5, RIPEMD, SHA1, SHA2.
|
||||||
|
*/
|
||||||
|
export class HashMD extends Hash {
|
||||||
|
constructor(blockLen, outputLen, padOffset, isLE) {
|
||||||
|
super();
|
||||||
|
this.finished = false;
|
||||||
|
this.length = 0;
|
||||||
|
this.pos = 0;
|
||||||
|
this.destroyed = false;
|
||||||
|
this.blockLen = blockLen;
|
||||||
|
this.outputLen = outputLen;
|
||||||
|
this.padOffset = padOffset;
|
||||||
|
this.isLE = isLE;
|
||||||
|
this.buffer = new Uint8Array(blockLen);
|
||||||
|
this.view = createView(this.buffer);
|
||||||
|
}
|
||||||
|
update(data) {
|
||||||
|
aexists(this);
|
||||||
|
data = toBytes(data);
|
||||||
|
abytes(data);
|
||||||
|
const { view, buffer, blockLen } = this;
|
||||||
|
const len = data.length;
|
||||||
|
for (let pos = 0; pos < len;) {
|
||||||
|
const take = Math.min(blockLen - this.pos, len - pos);
|
||||||
|
// Fast path: we have at least one block in input, cast it to view and process
|
||||||
|
if (take === blockLen) {
|
||||||
|
const dataView = createView(data);
|
||||||
|
for (; blockLen <= len - pos; pos += blockLen)
|
||||||
|
this.process(dataView, pos);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
buffer.set(data.subarray(pos, pos + take), this.pos);
|
||||||
|
this.pos += take;
|
||||||
|
pos += take;
|
||||||
|
if (this.pos === blockLen) {
|
||||||
|
this.process(view, 0);
|
||||||
|
this.pos = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.length += data.length;
|
||||||
|
this.roundClean();
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
digestInto(out) {
|
||||||
|
aexists(this);
|
||||||
|
aoutput(out, this);
|
||||||
|
this.finished = true;
|
||||||
|
// Padding
|
||||||
|
// We can avoid allocation of buffer for padding completely if it
|
||||||
|
// was previously not allocated here. But it won't change performance.
|
||||||
|
const { buffer, view, blockLen, isLE } = this;
|
||||||
|
let { pos } = this;
|
||||||
|
// append the bit '1' to the message
|
||||||
|
buffer[pos++] = 0b10000000;
|
||||||
|
clean(this.buffer.subarray(pos));
|
||||||
|
// we have less than padOffset left in buffer, so we cannot put length in
|
||||||
|
// current block, need process it and pad again
|
||||||
|
if (this.padOffset > blockLen - pos) {
|
||||||
|
this.process(view, 0);
|
||||||
|
pos = 0;
|
||||||
|
}
|
||||||
|
// Pad until full block byte with zeros
|
||||||
|
for (let i = pos; i < blockLen; i++)
|
||||||
|
buffer[i] = 0;
|
||||||
|
// Note: sha512 requires length to be 128bit integer, but length in JS will overflow before that
|
||||||
|
// You need to write around 2 exabytes (u64_max / 8 / (1024**6)) for this to happen.
|
||||||
|
// So we just write lowest 64 bits of that value.
|
||||||
|
setBigUint64(view, blockLen - 8, BigInt(this.length * 8), isLE);
|
||||||
|
this.process(view, 0);
|
||||||
|
const oview = createView(out);
|
||||||
|
const len = this.outputLen;
|
||||||
|
// NOTE: we do division by 4 later, which should be fused in single op with modulo by JIT
|
||||||
|
if (len % 4)
|
||||||
|
throw new Error('_sha2: outputLen should be aligned to 32bit');
|
||||||
|
const outLen = len / 4;
|
||||||
|
const state = this.get();
|
||||||
|
if (outLen > state.length)
|
||||||
|
throw new Error('_sha2: outputLen bigger than state');
|
||||||
|
for (let i = 0; i < outLen; i++)
|
||||||
|
oview.setUint32(4 * i, state[i], isLE);
|
||||||
|
}
|
||||||
|
digest() {
|
||||||
|
const { buffer, outputLen } = this;
|
||||||
|
this.digestInto(buffer);
|
||||||
|
const res = buffer.slice(0, outputLen);
|
||||||
|
this.destroy();
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
_cloneInto(to) {
|
||||||
|
to || (to = new this.constructor());
|
||||||
|
to.set(...this.get());
|
||||||
|
const { blockLen, buffer, length, finished, destroyed, pos } = this;
|
||||||
|
to.destroyed = destroyed;
|
||||||
|
to.finished = finished;
|
||||||
|
to.length = length;
|
||||||
|
to.pos = pos;
|
||||||
|
if (length % blockLen)
|
||||||
|
to.buffer.set(buffer);
|
||||||
|
return to;
|
||||||
|
}
|
||||||
|
clone() {
|
||||||
|
return this._cloneInto();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Initial SHA-2 state: fractional parts of square roots of first 16 primes 2..53.
|
||||||
|
* Check out `test/misc/sha2-gen-iv.js` for recomputation guide.
|
||||||
|
*/
|
||||||
|
/** Initial SHA256 state. Bits 0..32 of frac part of sqrt of primes 2..19 */
|
||||||
|
export const SHA256_IV = /* @__PURE__ */ Uint32Array.from([
|
||||||
|
0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19,
|
||||||
|
]);
|
||||||
|
/** Initial SHA224 state. Bits 32..64 of frac part of sqrt of primes 23..53 */
|
||||||
|
export const SHA224_IV = /* @__PURE__ */ Uint32Array.from([
|
||||||
|
0xc1059ed8, 0x367cd507, 0x3070dd17, 0xf70e5939, 0xffc00b31, 0x68581511, 0x64f98fa7, 0xbefa4fa4,
|
||||||
|
]);
|
||||||
|
/** Initial SHA384 state. Bits 0..64 of frac part of sqrt of primes 23..53 */
|
||||||
|
export const SHA384_IV = /* @__PURE__ */ Uint32Array.from([
|
||||||
|
0xcbbb9d5d, 0xc1059ed8, 0x629a292a, 0x367cd507, 0x9159015a, 0x3070dd17, 0x152fecd8, 0xf70e5939,
|
||||||
|
0x67332667, 0xffc00b31, 0x8eb44a87, 0x68581511, 0xdb0c2e0d, 0x64f98fa7, 0x47b5481d, 0xbefa4fa4,
|
||||||
|
]);
|
||||||
|
/** Initial SHA512 state. Bits 0..64 of frac part of sqrt of primes 2..19 */
|
||||||
|
export const SHA512_IV = /* @__PURE__ */ Uint32Array.from([
|
||||||
|
0x6a09e667, 0xf3bcc908, 0xbb67ae85, 0x84caa73b, 0x3c6ef372, 0xfe94f82b, 0xa54ff53a, 0x5f1d36f1,
|
||||||
|
0x510e527f, 0xade682d1, 0x9b05688c, 0x2b3e6c1f, 0x1f83d9ab, 0xfb41bd6b, 0x5be0cd19, 0x137e2179,
|
||||||
|
]);
|
||||||
|
//# sourceMappingURL=_md.js.map
|
||||||
1
service-login-verify/node_modules/@noble/hashes/esm/_md.js.map
generated
vendored
Normal file
1
service-login-verify/node_modules/@noble/hashes/esm/_md.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
55
service-login-verify/node_modules/@noble/hashes/esm/_u64.d.ts
generated
vendored
Normal file
55
service-login-verify/node_modules/@noble/hashes/esm/_u64.d.ts
generated
vendored
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
declare function fromBig(n: bigint, le?: boolean): {
|
||||||
|
h: number;
|
||||||
|
l: number;
|
||||||
|
};
|
||||||
|
declare function split(lst: bigint[], le?: boolean): Uint32Array[];
|
||||||
|
declare const toBig: (h: number, l: number) => bigint;
|
||||||
|
declare const shrSH: (h: number, _l: number, s: number) => number;
|
||||||
|
declare const shrSL: (h: number, l: number, s: number) => number;
|
||||||
|
declare const rotrSH: (h: number, l: number, s: number) => number;
|
||||||
|
declare const rotrSL: (h: number, l: number, s: number) => number;
|
||||||
|
declare const rotrBH: (h: number, l: number, s: number) => number;
|
||||||
|
declare const rotrBL: (h: number, l: number, s: number) => number;
|
||||||
|
declare const rotr32H: (_h: number, l: number) => number;
|
||||||
|
declare const rotr32L: (h: number, _l: number) => number;
|
||||||
|
declare const rotlSH: (h: number, l: number, s: number) => number;
|
||||||
|
declare const rotlSL: (h: number, l: number, s: number) => number;
|
||||||
|
declare const rotlBH: (h: number, l: number, s: number) => number;
|
||||||
|
declare const rotlBL: (h: number, l: number, s: number) => number;
|
||||||
|
declare function add(Ah: number, Al: number, Bh: number, Bl: number): {
|
||||||
|
h: number;
|
||||||
|
l: number;
|
||||||
|
};
|
||||||
|
declare const add3L: (Al: number, Bl: number, Cl: number) => number;
|
||||||
|
declare const add3H: (low: number, Ah: number, Bh: number, Ch: number) => number;
|
||||||
|
declare const add4L: (Al: number, Bl: number, Cl: number, Dl: number) => number;
|
||||||
|
declare const add4H: (low: number, Ah: number, Bh: number, Ch: number, Dh: number) => number;
|
||||||
|
declare const add5L: (Al: number, Bl: number, Cl: number, Dl: number, El: number) => number;
|
||||||
|
declare const add5H: (low: number, Ah: number, Bh: number, Ch: number, Dh: number, Eh: number) => number;
|
||||||
|
export { add, add3H, add3L, add4H, add4L, add5H, add5L, fromBig, rotlBH, rotlBL, rotlSH, rotlSL, rotr32H, rotr32L, rotrBH, rotrBL, rotrSH, rotrSL, shrSH, shrSL, split, toBig };
|
||||||
|
declare const u64: {
|
||||||
|
fromBig: typeof fromBig;
|
||||||
|
split: typeof split;
|
||||||
|
toBig: (h: number, l: number) => bigint;
|
||||||
|
shrSH: (h: number, _l: number, s: number) => number;
|
||||||
|
shrSL: (h: number, l: number, s: number) => number;
|
||||||
|
rotrSH: (h: number, l: number, s: number) => number;
|
||||||
|
rotrSL: (h: number, l: number, s: number) => number;
|
||||||
|
rotrBH: (h: number, l: number, s: number) => number;
|
||||||
|
rotrBL: (h: number, l: number, s: number) => number;
|
||||||
|
rotr32H: (_h: number, l: number) => number;
|
||||||
|
rotr32L: (h: number, _l: number) => number;
|
||||||
|
rotlSH: (h: number, l: number, s: number) => number;
|
||||||
|
rotlSL: (h: number, l: number, s: number) => number;
|
||||||
|
rotlBH: (h: number, l: number, s: number) => number;
|
||||||
|
rotlBL: (h: number, l: number, s: number) => number;
|
||||||
|
add: typeof add;
|
||||||
|
add3L: (Al: number, Bl: number, Cl: number) => number;
|
||||||
|
add3H: (low: number, Ah: number, Bh: number, Ch: number) => number;
|
||||||
|
add4L: (Al: number, Bl: number, Cl: number, Dl: number) => number;
|
||||||
|
add4H: (low: number, Ah: number, Bh: number, Ch: number, Dh: number) => number;
|
||||||
|
add5H: (low: number, Ah: number, Bh: number, Ch: number, Dh: number, Eh: number) => number;
|
||||||
|
add5L: (Al: number, Bl: number, Cl: number, Dl: number, El: number) => number;
|
||||||
|
};
|
||||||
|
export default u64;
|
||||||
|
//# sourceMappingURL=_u64.d.ts.map
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user