ci: docker_tag=dev-test - Synchronisation confs et nettoyage; ajout scripts centralisés
This commit is contained in:
parent
7e7f5c9900
commit
6ffe3e49cc
@ -1 +0,0 @@
|
||||
./
|
@ -1 +0,0 @@
|
||||
./
|
@ -1 +0,0 @@
|
||||
./
|
@ -1 +0,0 @@
|
||||
./
|
@ -1 +0,0 @@
|
||||
./
|
@ -1 +0,0 @@
|
||||
./
|
@ -1 +0,0 @@
|
||||
./
|
@ -1 +0,0 @@
|
||||
./
|
@ -1,11 +0,0 @@
|
||||
{
|
||||
"folders": [
|
||||
{
|
||||
"path": "../../.."
|
||||
},
|
||||
{
|
||||
"path": "../../../../../../etc/nginx"
|
||||
}
|
||||
],
|
||||
"settings": {}
|
||||
}
|
@ -1 +0,0 @@
|
||||
./
|
@ -1,107 +0,0 @@
|
||||
server:
|
||||
http_listen_port: 8090
|
||||
grpc_listen_port: 0
|
||||
|
||||
positions:
|
||||
filename: /tmp/positions.yaml
|
||||
|
||||
clients:
|
||||
- url: http://loki:3100/loki/api/v1/push
|
||||
|
||||
scrape_configs:
|
||||
# Bitcoin Signet Logs
|
||||
- job_name: bitcoin
|
||||
static_configs:
|
||||
- targets:
|
||||
- localhost
|
||||
labels:
|
||||
job: bitcoin
|
||||
service: bitcoin-signet
|
||||
__path__: /home/debian/4NK_env/lecoffre_node/logs/bitcoin/*.log
|
||||
|
||||
# Blindbit Oracle Logs
|
||||
- job_name: blindbit
|
||||
static_configs:
|
||||
- targets:
|
||||
- localhost
|
||||
labels:
|
||||
job: blindbit
|
||||
service: blindbit-oracle
|
||||
__path__: /home/debian/4NK_env/lecoffre_node/logs/blindbit-oracle/*.log
|
||||
|
||||
# SDK Relay Logs
|
||||
- job_name: sdk_relay
|
||||
static_configs:
|
||||
- targets:
|
||||
- localhost
|
||||
labels:
|
||||
job: sdk_relay
|
||||
service: sdk_relay
|
||||
__path__: /home/debian/4NK_env/lecoffre_node/logs/sdk_relay/*.log
|
||||
|
||||
# SDK Storage Logs
|
||||
- job_name: sdk_storage
|
||||
static_configs:
|
||||
- targets:
|
||||
- localhost
|
||||
labels:
|
||||
job: sdk_storage
|
||||
service: sdk_storage
|
||||
__path__: /home/debian/4NK_env/lecoffre_node/logs/sdk_storage/*.log
|
||||
|
||||
# LeCoffre Frontend Logs
|
||||
- job_name: lecoffre-front
|
||||
static_configs:
|
||||
- targets:
|
||||
- localhost
|
||||
labels:
|
||||
job: lecoffre-front
|
||||
service: lecoffre-front
|
||||
__path__: /home/debian/4NK_env/lecoffre_node/logs/lecoffre-front/*.log
|
||||
|
||||
# IHM Client Logs
|
||||
- job_name: ihm_client
|
||||
static_configs:
|
||||
- targets:
|
||||
- localhost
|
||||
labels:
|
||||
job: ihm_client
|
||||
service: ihm_client
|
||||
__path__: /home/debian/4NK_env/lecoffre_node/logs/ihm_client/*.log
|
||||
|
||||
# Miner Logs
|
||||
- job_name: miner
|
||||
static_configs:
|
||||
- targets:
|
||||
- localhost
|
||||
labels:
|
||||
job: miner
|
||||
service: signet_miner
|
||||
__path__: /home/debian/4NK_env/lecoffre_node/logs/miner/*.log
|
||||
|
||||
# Tor Logs
|
||||
- job_name: tor
|
||||
static_configs:
|
||||
- targets:
|
||||
- localhost
|
||||
labels:
|
||||
job: tor
|
||||
service: tor-proxy
|
||||
__path__: /home/debian/4NK_env/lecoffre_node/logs/tor/*.log
|
||||
|
||||
# Docker Container Logs
|
||||
- job_name: docker
|
||||
docker_sd_configs:
|
||||
- host: unix:///var/run/docker.sock
|
||||
refresh_interval: 5s
|
||||
filters:
|
||||
- name: label
|
||||
values: ["com.centurylinklabs.watchtower.enable=true"]
|
||||
relabel_configs:
|
||||
- source_labels: ['__meta_docker_container_name']
|
||||
regex: '/?(.*)'
|
||||
target_label: 'container_name'
|
||||
- source_labels: ['__meta_docker_container_log_stream']
|
||||
target_label: 'logstream'
|
||||
- source_labels: ['__meta_docker_container_label_logging_job_name']
|
||||
target_label: 'job'
|
@ -1 +0,0 @@
|
||||
./
|
@ -1 +0,0 @@
|
||||
./
|
@ -1 +0,0 @@
|
||||
./
|
@ -1 +0,0 @@
|
||||
./
|
@ -1 +0,0 @@
|
||||
./
|
@ -1 +0,0 @@
|
||||
./
|
@ -1,21 +0,0 @@
|
||||
# Configuration Tor pour LeCoffre Node
|
||||
# Écoute sur 127.0.0.1 pour la sécurité
|
||||
|
||||
# Port SOCKS pour les connexions sortantes
|
||||
SOCKSPort 127.0.0.1:9050
|
||||
|
||||
# Port de contrôle (désactivé pour la sécurité)
|
||||
# ControlPort 127.0.0.1:9050
|
||||
|
||||
# Configuration de base
|
||||
Log notice file /home/debian/4NK_env/lecoffre_node/logs/tor/tor.log
|
||||
DataDirectory /home/debian/4NK_env/lecoffre_node/data/tor
|
||||
|
||||
# Configuration réseau
|
||||
ClientOnly 1
|
||||
SafeLogging 1
|
||||
WarnUnsafeSocks 1
|
||||
|
||||
# Désactiver les services cachés
|
||||
HiddenServiceDir /home/debian/4NK_env/lecoffre_node/data/tor/hidden_service/
|
||||
HiddenServicePort 80 127.0.0.1:80
|
@ -1,49 +0,0 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
FROM debian:bookworm-slim
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Installation des dépendances de base
|
||||
RUN apt-get update && apt-get upgrade -y && \
|
||||
apt-get install -y --fix-missing \
|
||||
ca-certificates curl jq git python3 python3-pip && \
|
||||
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
||||
# Création d'un utilisateur non-root
|
||||
RUN useradd -m -u 1000 appuser && \
|
||||
mkdir -p /app && chown -R appuser:appuser /app
|
||||
|
||||
# Installer bitcoin-cli (binaire officiel)
|
||||
RUN curl -L -o /tmp/bitcoin-cli.tar.gz https://bitcoincore.org/bin/bitcoin-core-26.2/bitcoin-26.2-x86_64-linux-gnu.tar.gz \
|
||||
&& mkdir -p /tmp/bitcoin-cli \
|
||||
&& tar -xzf /tmp/bitcoin-cli.tar.gz -C /tmp/bitcoin-cli --strip-components=2 bitcoin-26.2/bin/bitcoin-cli \
|
||||
&& mv /tmp/bitcoin-cli/bitcoin-cli /usr/local/bin/bitcoin-cli \
|
||||
&& chmod +x /usr/local/bin/bitcoin-cli \
|
||||
&& rm -rf /tmp/bitcoin-cli /tmp/bitcoin-cli.tar.gz
|
||||
|
||||
COPY requirements.txt ./
|
||||
RUN pip3 install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Vendoriser test_framework depuis Bitcoin Core (pour le script signet/miner)
|
||||
RUN curl -L -o /tmp/bitcoin-core.tar.gz https://github.com/bitcoin/bitcoin/archive/refs/tags/v26.2.tar.gz \
|
||||
&& mkdir -p /tmp/bitcoin-core \
|
||||
&& tar -xzf /tmp/bitcoin-core.tar.gz -C /tmp/bitcoin-core --strip-components=1 \
|
||||
&& mkdir -p /app/test/functional \
|
||||
&& cp -r /tmp/bitcoin-core/test/functional/test_framework /app/test/functional/test_framework \
|
||||
&& rm -rf /tmp/bitcoin-core /tmp/bitcoin-core.tar.gz
|
||||
|
||||
COPY entrypoint.sh ./
|
||||
COPY signet_miner.py ./
|
||||
COPY signet/ ./signet/
|
||||
|
||||
RUN chmod +x /app/entrypoint.sh && \
|
||||
chown -R appuser:appuser /app
|
||||
|
||||
USER appuser
|
||||
|
||||
VOLUME ["/bitcoin"]
|
||||
|
||||
ENTRYPOINT ["/app/entrypoint.sh"]
|
@ -1,56 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
BITCOIN_DIR=${BITCOIN_DIR:-/bitcoin}
|
||||
COOKIE_FILE=${COOKIE_FILE:-$BITCOIN_DIR/signet/.cookie}
|
||||
RPC_HOST=${RPC_HOST:-bitcoin}
|
||||
RPC_PORT=${RPC_PORT:-38332}
|
||||
POLL_INTERVAL=${POLL_INTERVAL:-5}
|
||||
WATCHONLY_WALLET=${WATCHONLY_WALLET:-watchonly}
|
||||
MINING_WALLET=${MINING_WALLET:-mining_mnemonic}
|
||||
MINER_TAG=${MINER_TAG:-lecoffre}
|
||||
|
||||
# Ajouter test_framework au PYTHONPATH
|
||||
export PYTHONPATH="/app/test/functional:${PYTHONPATH:-}"
|
||||
|
||||
if [ ! -f "$COOKIE_FILE" ]; then
|
||||
echo "Cookie introuvable: $COOKIE_FILE" >&2
|
||||
ls -la "$BITCOIN_DIR" || true
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Variables attendues via miner/.env
|
||||
# COINBASE_ADDRESS est optionnel - si non défini, une adresse sera générée automatiquement
|
||||
|
||||
# Adresse du relay pour partager les rewards (optionnel)
|
||||
RELAY_ADDRESS="${RELAY_ADDRESS:-}"
|
||||
REWARD_SPLIT_RATIO="${REWARD_SPLIT_RATIO:-0.5}"
|
||||
|
||||
# Lancer le miner (les options globales doivent précéder la sous-commande)
|
||||
MINER_CMD=(
|
||||
python /app/signet/miner \
|
||||
--cli "bitcoin-cli -datadir=$BITCOIN_DIR -rpcconnect=$RPC_HOST -rpcport=$RPC_PORT -rpccookiefile=$COOKIE_FILE" \
|
||||
generate \
|
||||
--ongoing \
|
||||
--min-nbits \
|
||||
--WATCHONLY_WALLET "$WATCHONLY_WALLET" \
|
||||
--MINING_WALLET "$MINING_WALLET" \
|
||||
--MINER_TAG "$MINER_TAG"
|
||||
)
|
||||
|
||||
if [ -n "${COINBASE_ADDRESS:-}" ]; then
|
||||
MINER_CMD+=( --address "$COINBASE_ADDRESS" )
|
||||
elif [ -n "${COINBASE_DESCRIPTOR:-}" ]; then
|
||||
MINER_CMD+=( --descriptor "$COINBASE_DESCRIPTOR" )
|
||||
else
|
||||
# Générer automatiquement une adresse
|
||||
MINER_CMD+=( --address "auto" )
|
||||
fi
|
||||
|
||||
if [ -n "${RELAY_ADDRESS:-}" ]; then
|
||||
MINER_CMD+=( --relay-address "$RELAY_ADDRESS" )
|
||||
fi
|
||||
|
||||
MINER_CMD+=( --reward-split-ratio "$REWARD_SPLIT_RATIO" )
|
||||
|
||||
exec "${MINER_CMD[@]}"
|
@ -1,3 +0,0 @@
|
||||
requests==2.32.3
|
||||
python-bitcointx==1.1.2
|
||||
websockets==12.0
|
@ -1,83 +0,0 @@
|
||||
Contents
|
||||
========
|
||||
This directory contains tools related to Signet, both for running a Signet yourself and for using one.
|
||||
|
||||
getcoins.py
|
||||
===========
|
||||
|
||||
A script to call a faucet to get Signet coins.
|
||||
|
||||
Syntax: `getcoins.py [-h|--help] [-c|--cmd=<bitcoin-cli path>] [-f|--faucet=<faucet URL>] [-a|--addr=<signet bech32 address>] [-p|--password=<faucet password>] [--] [<bitcoin-cli args>]`
|
||||
|
||||
* `--cmd` lets you customize the bitcoin-cli path. By default it will look for it in the PATH
|
||||
* `--faucet` lets you specify which faucet to use; the faucet is assumed to be compatible with https://github.com/kallewoof/bitcoin-faucet
|
||||
* `--addr` lets you specify a Signet address; by default, the address must be a bech32 address. This and `--cmd` above complement each other (i.e. you do not need `bitcoin-cli` if you use `--addr`)
|
||||
* `--password` lets you specify a faucet password; this is handy if you are in a classroom and set up your own faucet for your students; (above faucet does not limit by IP when password is enabled)
|
||||
|
||||
If using the default network, invoking the script with no arguments should be sufficient under normal
|
||||
circumstances, but if multiple people are behind the same IP address, the faucet will by default only
|
||||
accept one claim per day. See `--password` above.
|
||||
|
||||
miner
|
||||
=====
|
||||
|
||||
You will first need to pick a difficulty target. Since signet chains are primarily protected by a signature rather than proof of work, there is no need to spend as much energy as possible mining, however you may wish to choose to spend more time than the absolute minimum. The calibrate subcommand can be used to pick a target appropriate for your hardware, eg:
|
||||
|
||||
cd src/
|
||||
MINER="../contrib/signet/miner"
|
||||
GRIND="./bitcoin-util grind"
|
||||
$MINER calibrate --grind-cmd="$GRIND"
|
||||
nbits=1e00f403 for 25s average mining time
|
||||
|
||||
It defaults to estimating an nbits value resulting in 25s average time to find a block, but the --seconds parameter can be used to pick a different target, or the --nbits parameter can be used to estimate how long it will take for a given difficulty.
|
||||
|
||||
To mine the first block in your custom chain, you can run:
|
||||
|
||||
CLI="./bitcoin-cli -conf=mysignet.conf"
|
||||
ADDR=$($CLI -signet getnewaddress)
|
||||
NBITS=1e00f403
|
||||
$MINER --cli="$CLI" generate --grind-cmd="$GRIND" --address="$ADDR" --nbits=$NBITS
|
||||
|
||||
This will mine a single block with a backdated timestamp designed to allow 100 blocks to be mined as quickly as possible, so that it is possible to do transactions.
|
||||
|
||||
Adding the --ongoing parameter will then cause the signet miner to create blocks indefinitely. It will pick the time between blocks so that difficulty is adjusted to match the provided --nbits value.
|
||||
|
||||
$MINER --cli="$CLI" generate --grind-cmd="$GRIND" --address="$ADDR" --nbits=$NBITS --ongoing
|
||||
|
||||
Other options
|
||||
-------------
|
||||
|
||||
The --debug and --quiet options are available to control how noisy the signet miner's output is. Note that the --debug, --quiet and --cli parameters must all appear before the subcommand (generate, calibrate, etc) if used.
|
||||
|
||||
Instead of specifying --ongoing, you can specify --max-blocks=N to mine N blocks and stop.
|
||||
|
||||
The --set-block-time option is available to manually move timestamps forward or backward (subject to the rules that blocktime must be greater than mediantime, and dates can't be more than two hours in the future). It can only be used when mining a single block (ie, not when using --ongoing or --max-blocks greater than 1).
|
||||
|
||||
Instead of using a single address, a ranged descriptor may be provided via the --descriptor parameter, with the reward for the block at height H being sent to the H'th address generated from the descriptor.
|
||||
|
||||
Instead of calculating a specific nbits value, --min-nbits can be specified instead, in which case the minimum signet difficulty will be targeted. Signet's minimum difficulty corresponds to --nbits=1e0377ae.
|
||||
|
||||
By default, the signet miner mines blocks at fixed intervals with minimal variation. If you want blocks to appear more randomly, as they do in mainnet, specify the --poisson option.
|
||||
|
||||
Using the --multiminer parameter allows mining to be distributed amongst multiple miners. For example, if you have 3 miners and want to share blocks between them, specify --multiminer=1/3 on one, --multiminer=2/3 on another, and --multiminer=3/3 on the last one. If you want one to do 10% of blocks and two others to do 45% each, --multiminer=1-10/100 on the first, and --multiminer=11-55 and --multiminer=56-100 on the others. Note that which miner mines which block is determined by the previous block hash, so occasional runs of one miner doing many blocks in a row is to be expected.
|
||||
|
||||
When --multiminer is used, if a miner is down and does not mine a block within five minutes of when it is due, the other miners will automatically act as redundant backups ensuring the chain does not halt. The --backup-delay parameter can be used to change how long a given miner waits, allowing one to be the primary backup (after five minutes) and another to be the secondary backup (after six minutes, eg).
|
||||
|
||||
The --standby-delay parameter can be used to make a backup miner that only mines if a block doesn't arrive on time. This can be combined with --multiminer if desired. Setting --standby-delay also prevents the first block from being mined immediately.
|
||||
|
||||
Advanced usage
|
||||
--------------
|
||||
|
||||
The process generate follows internally is to get a block template, convert that into a PSBT, sign the PSBT, move the signature from the signed PSBT into the block template's coinbase, grind proof of work for the block, and then submit the block to the network.
|
||||
|
||||
These steps can instead be done explicitly:
|
||||
|
||||
$CLI -signet getblocktemplate '{"rules": ["signet","segwit"]}' |
|
||||
$MINER --cli="$CLI" genpsbt --address="$ADDR" |
|
||||
$CLI -signet -stdin walletprocesspsbt |
|
||||
jq -r .psbt |
|
||||
$MINER --cli="$CLI" solvepsbt --grind-cmd="$GRIND" |
|
||||
$CLI -signet -stdin submitblock
|
||||
|
||||
This is intended to allow you to replace part of the pipeline for further experimentation (eg, to sign the block with a hardware wallet).
|
||||
|
@ -1,158 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2020-2021 The Bitcoin Core developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
import argparse
|
||||
import io
|
||||
import requests
|
||||
import subprocess
|
||||
import sys
|
||||
import xml.etree.ElementTree
|
||||
|
||||
DEFAULT_GLOBAL_FAUCET = 'https://signetfaucet.com/claim'
|
||||
DEFAULT_GLOBAL_CAPTCHA = 'https://signetfaucet.com/captcha'
|
||||
GLOBAL_FIRST_BLOCK_HASH = '00000086d6b2636cb2a392d45edc4ec544a10024d30141c9adf4bfd9de533b53'
|
||||
|
||||
# braille unicode block
|
||||
BASE = 0x2800
|
||||
BIT_PER_PIXEL = [
|
||||
[0x01, 0x08],
|
||||
[0x02, 0x10],
|
||||
[0x04, 0x20],
|
||||
[0x40, 0x80],
|
||||
]
|
||||
BW = 2
|
||||
BH = 4
|
||||
|
||||
# imagemagick or compatible fork (used for converting SVG)
|
||||
CONVERT = 'convert'
|
||||
|
||||
class PPMImage:
|
||||
'''
|
||||
Load a PPM image (Pillow-ish API).
|
||||
'''
|
||||
def __init__(self, f):
|
||||
if f.readline() != b'P6\n':
|
||||
raise ValueError('Invalid ppm format: header')
|
||||
line = f.readline()
|
||||
(width, height) = (int(x) for x in line.rstrip().split(b' '))
|
||||
if f.readline() != b'255\n':
|
||||
raise ValueError('Invalid ppm format: color depth')
|
||||
data = f.read(width * height * 3)
|
||||
stride = width * 3
|
||||
self.size = (width, height)
|
||||
self._grid = [[tuple(data[stride * y + 3 * x:stride * y + 3 * (x + 1)]) for x in range(width)] for y in range(height)]
|
||||
|
||||
def getpixel(self, pos):
|
||||
return self._grid[pos[1]][pos[0]]
|
||||
|
||||
def print_image(img, threshold=128):
|
||||
'''Print black-and-white image to terminal in braille unicode characters.'''
|
||||
x_blocks = (img.size[0] + BW - 1) // BW
|
||||
y_blocks = (img.size[1] + BH - 1) // BH
|
||||
|
||||
for yb in range(y_blocks):
|
||||
line = []
|
||||
for xb in range(x_blocks):
|
||||
ch = BASE
|
||||
for y in range(BH):
|
||||
for x in range(BW):
|
||||
try:
|
||||
val = img.getpixel((xb * BW + x, yb * BH + y))
|
||||
except IndexError:
|
||||
pass
|
||||
else:
|
||||
if val[0] < threshold:
|
||||
ch |= BIT_PER_PIXEL[y][x]
|
||||
line.append(chr(ch))
|
||||
print(''.join(line))
|
||||
|
||||
parser = argparse.ArgumentParser(description='Script to get coins from a faucet.', epilog='You may need to start with double-dash (--) when providing bitcoin-cli arguments.')
|
||||
parser.add_argument('-c', '--cmd', dest='cmd', default='bitcoin-cli', help='bitcoin-cli command to use')
|
||||
parser.add_argument('-f', '--faucet', dest='faucet', default=DEFAULT_GLOBAL_FAUCET, help='URL of the faucet')
|
||||
parser.add_argument('-g', '--captcha', dest='captcha', default=DEFAULT_GLOBAL_CAPTCHA, help='URL of the faucet captcha, or empty if no captcha is needed')
|
||||
parser.add_argument('-a', '--addr', dest='addr', default='', help='Bitcoin address to which the faucet should send')
|
||||
parser.add_argument('-p', '--password', dest='password', default='', help='Faucet password, if any')
|
||||
parser.add_argument('-n', '--amount', dest='amount', default='0.001', help='Amount to request (0.001-0.1, default is 0.001)')
|
||||
parser.add_argument('-i', '--imagemagick', dest='imagemagick', default=CONVERT, help='Path to imagemagick convert utility')
|
||||
parser.add_argument('bitcoin_cli_args', nargs='*', help='Arguments to pass on to bitcoin-cli (default: -signet)')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.bitcoin_cli_args == []:
|
||||
args.bitcoin_cli_args = ['-signet']
|
||||
|
||||
|
||||
def bitcoin_cli(rpc_command_and_params):
|
||||
argv = [args.cmd] + args.bitcoin_cli_args + rpc_command_and_params
|
||||
try:
|
||||
return subprocess.check_output(argv).strip().decode()
|
||||
except FileNotFoundError:
|
||||
raise SystemExit(f"The binary {args.cmd} could not be found")
|
||||
except subprocess.CalledProcessError:
|
||||
cmdline = ' '.join(argv)
|
||||
raise SystemExit(f"-----\nError while calling {cmdline} (see output above).")
|
||||
|
||||
|
||||
if args.faucet.lower() == DEFAULT_GLOBAL_FAUCET:
|
||||
# Get the hash of the block at height 1 of the currently active signet chain
|
||||
curr_signet_hash = bitcoin_cli(['getblockhash', '1'])
|
||||
if curr_signet_hash != GLOBAL_FIRST_BLOCK_HASH:
|
||||
raise SystemExit('The global faucet cannot be used with a custom Signet network. Please use the global signet or setup your custom faucet to use this functionality.\n')
|
||||
else:
|
||||
# For custom faucets, don't request captcha by default.
|
||||
if args.captcha == DEFAULT_GLOBAL_CAPTCHA:
|
||||
args.captcha = ''
|
||||
|
||||
if args.addr == '':
|
||||
# get address for receiving coins
|
||||
args.addr = bitcoin_cli(['getnewaddress', 'faucet', 'bech32'])
|
||||
|
||||
data = {'address': args.addr, 'password': args.password, 'amount': args.amount}
|
||||
|
||||
# Store cookies
|
||||
# for debugging: print(session.cookies.get_dict())
|
||||
session = requests.Session()
|
||||
|
||||
if args.captcha != '': # Retrieve a captcha
|
||||
try:
|
||||
res = session.get(args.captcha)
|
||||
res.raise_for_status()
|
||||
except requests.exceptions.RequestException as e:
|
||||
raise SystemExit(f"Unexpected error when contacting faucet: {e}")
|
||||
|
||||
# Size limitation
|
||||
svg = xml.etree.ElementTree.fromstring(res.content)
|
||||
if svg.attrib.get('width') != '150' or svg.attrib.get('height') != '50':
|
||||
raise SystemExit("Captcha size doesn't match expected dimensions 150x50")
|
||||
|
||||
# Convert SVG image to PPM, and load it
|
||||
try:
|
||||
rv = subprocess.run([args.imagemagick, 'svg:-', '-depth', '8', 'ppm:-'], input=res.content, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
except FileNotFoundError:
|
||||
raise SystemExit(f"The binary {args.imagemagick} could not be found. Please make sure ImageMagick (or a compatible fork) is installed and that the correct path is specified.")
|
||||
|
||||
img = PPMImage(io.BytesIO(rv.stdout))
|
||||
|
||||
# Terminal interaction
|
||||
print_image(img)
|
||||
print(f"Captcha from URL {args.captcha}")
|
||||
data['captcha'] = input('Enter captcha: ')
|
||||
|
||||
try:
|
||||
res = session.post(args.faucet, data=data)
|
||||
except:
|
||||
raise SystemExit(f"Unexpected error when contacting faucet: {sys.exc_info()[0]}")
|
||||
|
||||
# Display the output as per the returned status code
|
||||
if res:
|
||||
# When the return code is in between 200 and 400 i.e. successful
|
||||
print(res.text)
|
||||
elif res.status_code == 404:
|
||||
print('The specified faucet URL does not exist. Please check for any server issues/typo.')
|
||||
elif res.status_code == 429:
|
||||
print('The script does not allow for repeated transactions as the global faucet is rate-limitied to 1 request/IP/day. You can access the faucet website to get more coins manually')
|
||||
else:
|
||||
print(f'Returned Error Code {res.status_code}\n{res.text}\n')
|
||||
print('Please check the provided arguments for their validity and/or any possible typo.')
|
@ -1,989 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2020 The Bitcoin Core developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
|
||||
import argparse
|
||||
import base64
|
||||
import json
|
||||
import logging
|
||||
import math
|
||||
import os
|
||||
import re
|
||||
import struct
|
||||
import sys
|
||||
import time
|
||||
import subprocess
|
||||
import asyncio, websockets
|
||||
from hashlib import sha256
|
||||
|
||||
from io import BytesIO
|
||||
from random import uniform
|
||||
from decimal import Decimal
|
||||
|
||||
PATH_BASE_CONTRIB_SIGNET = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
|
||||
PATH_BASE_TEST_FUNCTIONAL = os.path.abspath(os.path.join(PATH_BASE_CONTRIB_SIGNET, "..", "..", "test", "functional"))
|
||||
sys.path.insert(0, PATH_BASE_TEST_FUNCTIONAL)
|
||||
|
||||
|
||||
from test_framework.blocktools import WITNESS_COMMITMENT_HEADER, script_BIP34_coinbase_height # noqa: E402
|
||||
from test_framework.messages import CBlock, CBlockHeader, COutPoint, CTransaction, CTxIn, CTxInWitness, CTxOut, from_hex, deser_string, hash256, ser_compact_size, ser_string, ser_uint256, tx_from_hex, uint256_from_str # noqa: E402
|
||||
from test_framework.script import CScript, CScriptOp # noqa: E402
|
||||
|
||||
logging.basicConfig(
|
||||
format='%(asctime)s %(levelname)s %(message)s',
|
||||
level=logging.INFO,
|
||||
datefmt='%Y-%m-%d %H:%M:%S')
|
||||
|
||||
SIGNET_HEADER = b"\xec\xc7\xda\xa2"
|
||||
PSBT_SIGNET_BLOCK = b"\xfc\x06signetb" # proprietary PSBT global field holding the block being signed
|
||||
RE_MULTIMINER = re.compile("^(\d+)(-(\d+))?/(\d+)$")
|
||||
CC_URL = 'ws://localhost:9823/websocket'
|
||||
|
||||
|
||||
def json_dumps(obj, **k):
|
||||
def hook(dd):
|
||||
if isinstance(dd, Decimal):
|
||||
return float(dd)
|
||||
if hasattr(dd, 'strftime'):
|
||||
return str(dd) # isoformat
|
||||
logging.error("Unhandled JSON type: %r" % dd)
|
||||
raise TypeError
|
||||
|
||||
k['default'] = hook
|
||||
|
||||
return json.dumps(obj, **k)
|
||||
|
||||
def message(action, *args):
|
||||
return json_dumps(dict(action=action, args=args))
|
||||
|
||||
POLICY = {
|
||||
'never_log': False,
|
||||
'must_log': False,
|
||||
'priv_over_ux': True,
|
||||
'boot_to_hsm': "123456",
|
||||
'period': None,
|
||||
'set_sl': None,
|
||||
'allow_sl': None,
|
||||
'rules': [
|
||||
{
|
||||
"whitelist": "",
|
||||
"per_period": None,
|
||||
"max_amount": None,
|
||||
"users": "",
|
||||
"min_users": "all",
|
||||
"local_conf": False,
|
||||
"wallet": None
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
# #### some helpers that could go into test_framework
|
||||
|
||||
# like from_hex, but without the hex part
|
||||
def FromBinary(cls, stream):
|
||||
"""deserialize a binary stream (or bytes object) into an object"""
|
||||
# handle bytes object by turning it into a stream
|
||||
was_bytes = isinstance(stream, bytes)
|
||||
if was_bytes:
|
||||
stream = BytesIO(stream)
|
||||
obj = cls()
|
||||
obj.deserialize(stream)
|
||||
if was_bytes:
|
||||
assert len(stream.read()) == 0
|
||||
return obj
|
||||
|
||||
class PSBTMap:
|
||||
"""Class for serializing and deserializing PSBT maps"""
|
||||
|
||||
def __init__(self, map=None):
|
||||
self.map = map if map is not None else {}
|
||||
|
||||
def deserialize(self, f):
|
||||
m = {}
|
||||
while True:
|
||||
k = deser_string(f)
|
||||
if len(k) == 0:
|
||||
break
|
||||
v = deser_string(f)
|
||||
if len(k) == 1:
|
||||
k = k[0]
|
||||
assert k not in m
|
||||
m[k] = v
|
||||
self.map = m
|
||||
|
||||
def serialize(self):
|
||||
m = b""
|
||||
for k,v in self.map.items():
|
||||
if isinstance(k, int) and 0 <= k and k <= 255:
|
||||
k = bytes([k])
|
||||
m += ser_compact_size(len(k)) + k
|
||||
m += ser_compact_size(len(v)) + v
|
||||
m += b"\x00"
|
||||
return m
|
||||
|
||||
class PSBT:
|
||||
"""Class for serializing and deserializing PSBTs"""
|
||||
|
||||
def __init__(self):
|
||||
self.g = PSBTMap()
|
||||
self.i = []
|
||||
self.o = []
|
||||
self.tx = None
|
||||
|
||||
def deserialize(self, f):
|
||||
assert f.read(5) == b"psbt\xff"
|
||||
self.g = FromBinary(PSBTMap, f)
|
||||
assert 0 in self.g.map
|
||||
self.tx = FromBinary(CTransaction, self.g.map[0])
|
||||
self.i = [FromBinary(PSBTMap, f) for _ in self.tx.vin]
|
||||
self.o = [FromBinary(PSBTMap, f) for _ in self.tx.vout]
|
||||
return self
|
||||
|
||||
def serialize(self):
|
||||
assert isinstance(self.g, PSBTMap)
|
||||
assert isinstance(self.i, list) and all(isinstance(x, PSBTMap) for x in self.i)
|
||||
assert isinstance(self.o, list) and all(isinstance(x, PSBTMap) for x in self.o)
|
||||
assert 0 in self.g.map
|
||||
tx = FromBinary(CTransaction, self.g.map[0])
|
||||
assert len(tx.vin) == len(self.i)
|
||||
assert len(tx.vout) == len(self.o)
|
||||
|
||||
psbt = [x.serialize() for x in [self.g] + self.i + self.o]
|
||||
return b"psbt\xff" + b"".join(psbt)
|
||||
|
||||
def to_base64(self):
|
||||
return base64.b64encode(self.serialize()).decode("utf8")
|
||||
|
||||
@classmethod
|
||||
def from_base64(cls, b64psbt):
|
||||
return FromBinary(cls, base64.b64decode(b64psbt))
|
||||
|
||||
# #####
|
||||
|
||||
def create_coinbase(height, value, spk, miner_tag=''):
|
||||
cb = CTransaction()
|
||||
scriptsig = bytes(script_BIP34_coinbase_height(height))
|
||||
if miner_tag is not None:
|
||||
scriptsig = CScript(scriptsig + CScriptOp.encode_op_pushdata(miner_tag.encode()))
|
||||
else:
|
||||
scriptsig = CScript(scriptsig)
|
||||
cb.vin = [CTxIn(COutPoint(0, 0xffffffff), scriptsig, 0xffffffff)]
|
||||
|
||||
# Utiliser une seule sortie pour le miner (le relay recevra des fonds via une transaction normale)
|
||||
cb.vout = [CTxOut(value, spk)]
|
||||
logging.info(f"Coinbase reward: {value} sat to miner")
|
||||
|
||||
return cb
|
||||
|
||||
def get_witness_script(witness_root, witness_nonce):
|
||||
commitment = uint256_from_str(hash256(ser_uint256(witness_root) + ser_uint256(witness_nonce)))
|
||||
return b"\x6a" + CScriptOp.encode_op_pushdata(WITNESS_COMMITMENT_HEADER + ser_uint256(commitment))
|
||||
|
||||
def signet_txs(block, challenge):
|
||||
# assumes signet solution has not been added yet so does not need
|
||||
# to be removed
|
||||
|
||||
txs = block.vtx[:]
|
||||
txs[0] = CTransaction(txs[0])
|
||||
txs[0].vout[-1].scriptPubKey += CScriptOp.encode_op_pushdata(SIGNET_HEADER)
|
||||
hashes = []
|
||||
for tx in txs:
|
||||
tx.rehash()
|
||||
hashes.append(ser_uint256(tx.sha256))
|
||||
mroot = block.get_merkle_root(hashes)
|
||||
|
||||
sd = b""
|
||||
sd += struct.pack("<i", block.nVersion)
|
||||
sd += ser_uint256(block.hashPrevBlock)
|
||||
sd += ser_uint256(mroot)
|
||||
sd += struct.pack("<I", block.nTime)
|
||||
|
||||
to_spend = CTransaction()
|
||||
to_spend.nVersion = 0
|
||||
to_spend.nLockTime = 0
|
||||
to_spend.vin = [CTxIn(COutPoint(0, 0xFFFFFFFF), b"\x00" + CScriptOp.encode_op_pushdata(sd), 0)]
|
||||
to_spend.vout = [CTxOut(0, challenge)]
|
||||
to_spend.rehash()
|
||||
|
||||
spend = CTransaction()
|
||||
spend.nVersion = 0
|
||||
spend.nLockTime = 0
|
||||
spend.vin = [CTxIn(COutPoint(to_spend.sha256, 0), b"", 0)]
|
||||
spend.vout = [CTxOut(0, b"\x6a")]
|
||||
|
||||
return spend, to_spend
|
||||
|
||||
def do_createpsbt(block, signme, spendme):
|
||||
psbt = PSBT()
|
||||
psbt.g = PSBTMap( {0: signme.serialize(),
|
||||
PSBT_SIGNET_BLOCK: block.serialize()
|
||||
} )
|
||||
psbt.i = [ PSBTMap( {0: spendme.serialize(),
|
||||
3: bytes([1,0,0,0])})
|
||||
]
|
||||
psbt.o = [ PSBTMap() ]
|
||||
return psbt.to_base64()
|
||||
|
||||
def do_decode_psbt(b64psbt):
|
||||
psbt = PSBT.from_base64(b64psbt)
|
||||
|
||||
assert len(psbt.tx.vin) == 1
|
||||
assert len(psbt.tx.vout) == 1
|
||||
assert PSBT_SIGNET_BLOCK in psbt.g.map
|
||||
|
||||
scriptSig = psbt.i[0].map.get(7, b"")
|
||||
scriptWitness = psbt.i[0].map.get(8, b"\x00")
|
||||
|
||||
return FromBinary(CBlock, psbt.g.map[PSBT_SIGNET_BLOCK]), ser_string(scriptSig) + scriptWitness
|
||||
|
||||
def finish_block(block, signet_solution, grind_cmd):
|
||||
block.vtx[0].vout[-1].scriptPubKey += CScriptOp.encode_op_pushdata(SIGNET_HEADER + signet_solution)
|
||||
block.vtx[0].rehash()
|
||||
block.hashMerkleRoot = block.calc_merkle_root()
|
||||
if grind_cmd is None:
|
||||
block.solve()
|
||||
else:
|
||||
headhex = CBlockHeader.serialize(block).hex()
|
||||
cmd = grind_cmd.split(" ") + [headhex]
|
||||
newheadhex = subprocess.run(cmd, stdout=subprocess.PIPE, input=b"", check=True).stdout.strip()
|
||||
newhead = from_hex(CBlockHeader(), newheadhex.decode('utf8'))
|
||||
block.nNonce = newhead.nNonce
|
||||
block.rehash()
|
||||
return block
|
||||
|
||||
def generate_psbt(tmpl, reward_spk, *, blocktime=None, miner_tag='', relay_spk=None, reward_split_ratio=0.5):
|
||||
signet_spk = tmpl["signet_challenge"]
|
||||
signet_spk_bin = bytes.fromhex(signet_spk)
|
||||
|
||||
cbtx = create_coinbase(height=tmpl["height"], value=tmpl["coinbasevalue"], spk=reward_spk, miner_tag=miner_tag)
|
||||
cbtx.vin[0].nSequence = 2**32-2
|
||||
cbtx.rehash()
|
||||
|
||||
block = CBlock()
|
||||
block.nVersion = tmpl["version"]
|
||||
block.hashPrevBlock = int(tmpl["previousblockhash"], 16)
|
||||
block.nTime = tmpl["curtime"] if blocktime is None else blocktime
|
||||
if block.nTime < tmpl["mintime"]:
|
||||
block.nTime = tmpl["mintime"]
|
||||
block.nBits = int(tmpl["bits"], 16)
|
||||
block.nNonce = 0
|
||||
block.vtx = [cbtx] + [tx_from_hex(t["data"]) for t in tmpl["transactions"]]
|
||||
|
||||
witnonce = 0
|
||||
witroot = block.calc_witness_merkle_root()
|
||||
cbwit = CTxInWitness()
|
||||
cbwit.scriptWitness.stack = [ser_uint256(witnonce)]
|
||||
block.vtx[0].wit.vtxinwit = [cbwit]
|
||||
block.vtx[0].vout.append(CTxOut(0, get_witness_script(witroot, witnonce)))
|
||||
|
||||
signme, spendme = signet_txs(block, signet_spk_bin)
|
||||
|
||||
return do_createpsbt(block, signme, spendme)
|
||||
|
||||
def get_reward_address(args, height):
|
||||
if args.address is not None:
|
||||
if args.address == "auto":
|
||||
# Générer automatiquement une adresse
|
||||
try:
|
||||
addr = json.loads(args.bcli(f"-rpcwallet={args.MINING_WALLET}", "getnewaddress"))
|
||||
return addr
|
||||
except:
|
||||
# En cas d'erreur, utiliser une adresse simple
|
||||
logging.warning("Failed to generate new address, using simple address")
|
||||
return "tb1qauto123456789012345678901234567890"
|
||||
return args.address
|
||||
|
||||
if args.descriptor is None:
|
||||
try:
|
||||
addr = json.loads(args.bcli(f"-rpcwallet={args.MINING_WALLET}", "getnewaddress"))
|
||||
return addr
|
||||
except Exception as e:
|
||||
# En cas d'erreur, réessayer avec une approche différente
|
||||
logging.error(f"Failed to generate new address: {e}")
|
||||
try:
|
||||
# Essayer de créer une adresse avec un label
|
||||
new_addr = json.loads(args.bcli(f"-rpcwallet={args.MINING_WALLET}", "getnewaddress", "miner"))
|
||||
logging.info(f"Generated new address with label: {new_addr}")
|
||||
return new_addr
|
||||
except Exception as e2:
|
||||
logging.error(f"Failed to generate address with label: {e2}")
|
||||
# En dernier recours, utiliser une adresse simple
|
||||
return "tb1qauto123456789012345678901234567890"
|
||||
|
||||
if '*' not in args.descriptor:
|
||||
addr = json.loads(args.bcli("deriveaddresses", args.descriptor))[0]
|
||||
args.address = addr
|
||||
return addr
|
||||
|
||||
remove = [k for k in args.derived_addresses.keys() if k+20 <= height]
|
||||
for k in remove:
|
||||
del args.derived_addresses[k]
|
||||
|
||||
addr = args.derived_addresses.get(height, None)
|
||||
if addr is None:
|
||||
addrs = json.loads(args.bcli("deriveaddresses", args.descriptor, "[%d,%d]" % (height, height+20)))
|
||||
addr = addrs[0]
|
||||
for k, a in enumerate(addrs):
|
||||
args.derived_addresses[height+k] = a
|
||||
|
||||
return addr
|
||||
|
||||
def get_reward_addr_spk(args, height):
|
||||
#assert args.address is not None or args.descriptor is not None
|
||||
|
||||
if hasattr(args, "reward_spk"):
|
||||
return args.address, args.reward_spk
|
||||
|
||||
reward_addr = get_reward_address(args, height)
|
||||
if args.signer != None:
|
||||
wallet = args.WATCHONLY_WALLET
|
||||
else:
|
||||
wallet = args.MINING_WALLET
|
||||
print("%s", reward_addr)
|
||||
|
||||
try:
|
||||
reward_spk = bytes.fromhex(json.loads(args.bcli(f"-rpcwallet={wallet}", "getaddressinfo", reward_addr))["scriptPubKey"])
|
||||
except:
|
||||
# Si l'adresse n'est pas dans le wallet, générer une nouvelle adresse
|
||||
logging.warning(f"Address {reward_addr} not in wallet, generating new address")
|
||||
try:
|
||||
new_addr = json.loads(args.bcli(f"-rpcwallet={wallet}", "getnewaddress"))
|
||||
reward_spk = bytes.fromhex(json.loads(args.bcli(f"-rpcwallet={wallet}", "getaddressinfo", new_addr))["scriptPubKey"])
|
||||
logging.info(f"Generated new address: {new_addr}")
|
||||
# Mettre à jour l'adresse pour les logs
|
||||
reward_addr = new_addr
|
||||
except Exception as e:
|
||||
# En cas d'erreur, réessayer avec une approche différente
|
||||
logging.error(f"Failed to generate new address: {e}")
|
||||
try:
|
||||
# Essayer de créer une adresse avec un label
|
||||
new_addr = json.loads(args.bcli(f"-rpcwallet={wallet}", "getnewaddress", "miner"))
|
||||
reward_spk = bytes.fromhex(json.loads(args.bcli(f"-rpcwallet={wallet}", "getaddressinfo", new_addr))["scriptPubKey"])
|
||||
logging.info(f"Generated new address with label: {new_addr}")
|
||||
reward_addr = new_addr
|
||||
except Exception as e2:
|
||||
logging.error(f"Failed to generate address with label: {e2}")
|
||||
# En dernier recours, utiliser une adresse simple
|
||||
reward_spk = bytes.fromhex("0014" + "0" * 40)
|
||||
|
||||
if args.address is not None:
|
||||
# will always be the same, so cache
|
||||
args.reward_spk = reward_spk
|
||||
|
||||
return reward_addr, reward_spk
|
||||
|
||||
def do_genpsbt(args):
|
||||
tmpl = json.load(sys.stdin)
|
||||
_, reward_spk = get_reward_addr_spk(args, tmpl["height"])
|
||||
|
||||
# Obtenir l'adresse et le scriptPubKey du relay
|
||||
relay_spk = None
|
||||
if hasattr(args, 'relay_address') and args.relay_address:
|
||||
try:
|
||||
relay_spk = bytes.fromhex(json.loads(args.bcli(f"-rpcwallet={args.WATCHONLY_WALLET}", "getaddressinfo", args.relay_address))["scriptPubKey"])
|
||||
except:
|
||||
# Si l'adresse n'est pas dans le wallet, utiliser la même adresse que le miner
|
||||
logging.warning(f"Relay address {args.relay_address} not in wallet, using miner address")
|
||||
relay_spk = reward_spk
|
||||
|
||||
psbt = generate_psbt(tmpl, reward_spk, None, args.MINER_TAG, relay_spk, getattr(args, 'reward_split_ratio', 0.5))
|
||||
print(psbt)
|
||||
|
||||
def do_solvepsbt(args):
|
||||
block, signet_solution = do_decode_psbt(sys.stdin.read())
|
||||
block = finish_block(block, signet_solution, args.grind_cmd)
|
||||
print(block.serialize().hex())
|
||||
|
||||
def nbits_to_target(nbits):
|
||||
shift = (nbits >> 24) & 0xff
|
||||
return (nbits & 0x00ffffff) * 2**(8*(shift - 3))
|
||||
|
||||
def target_to_nbits(target):
|
||||
tstr = "{0:x}".format(target)
|
||||
if len(tstr) < 6:
|
||||
tstr = ("000000"+tstr)[-6:]
|
||||
if len(tstr) % 2 != 0:
|
||||
tstr = "0" + tstr
|
||||
if int(tstr[0],16) >= 0x8:
|
||||
# avoid "negative"
|
||||
tstr = "00" + tstr
|
||||
fix = int(tstr[:6], 16)
|
||||
sz = len(tstr)//2
|
||||
if tstr[6:] != "0"*(sz*2-6):
|
||||
fix += 1
|
||||
|
||||
return int("%02x%06x" % (sz,fix), 16)
|
||||
|
||||
def seconds_to_hms(s):
|
||||
if s == 0:
|
||||
return "0s"
|
||||
neg = (s < 0)
|
||||
if neg:
|
||||
s = -s
|
||||
out = ""
|
||||
if s % 60 > 0:
|
||||
out = "%ds" % (s % 60)
|
||||
s //= 60
|
||||
if s % 60 > 0:
|
||||
out = "%dm%s" % (s % 60, out)
|
||||
s //= 60
|
||||
if s > 0:
|
||||
out = "%dh%s" % (s, out)
|
||||
if neg:
|
||||
out = "-" + out
|
||||
return out
|
||||
|
||||
def send_to_relay(args, miner_addr, height):
|
||||
"""Envoie des fonds au relay après avoir miné un bloc"""
|
||||
if not hasattr(args, 'relay_address') or not args.relay_address:
|
||||
return
|
||||
|
||||
relay_addr = args.relay_address
|
||||
split_ratio = getattr(args, 'reward_split_ratio', 0.5)
|
||||
|
||||
# Attendre que le bloc soit confirmé
|
||||
time.sleep(5)
|
||||
|
||||
try:
|
||||
# Vérifier le solde du wallet
|
||||
balance = json.loads(args.bcli(f"-rpcwallet={args.MINING_WALLET}", "getbalance"))
|
||||
if balance < 0.001: # Minimum 0.001 BTC
|
||||
logging.warning(f"Insufficient balance to send to relay: {balance} BTC")
|
||||
return
|
||||
|
||||
# Calculer le montant à envoyer (50% du solde disponible)
|
||||
amount = balance * split_ratio
|
||||
amount = max(0.001, min(amount, 0.1)) # Entre 0.001 et 0.1 BTC
|
||||
|
||||
# Envoyer les fonds au relay
|
||||
txid = json.loads(args.bcli(f"-rpcwallet={args.MINING_WALLET}", "sendtoaddress", relay_addr, str(amount)))
|
||||
logging.info(f"Sent {amount} BTC to relay {relay_addr}, txid: {txid}")
|
||||
|
||||
# Attendre que la transaction soit confirmée
|
||||
time.sleep(2)
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Failed to send funds to relay: {e}")
|
||||
|
||||
def next_block_delta(last_nbits, last_hash, ultimate_target, do_poisson):
|
||||
# strategy:
|
||||
# 1) work out how far off our desired target we are
|
||||
# 2) cap it to a factor of 4 since that's the best we can do in a single retarget period
|
||||
# 3) use that to work out the desired average interval in this retarget period
|
||||
# 4) if doing poisson, use the last hash to pick a uniformly random number in [0,1), and work out a random multiplier to vary the average by
|
||||
# 5) cap the resulting interval between 1 second and 1 hour to avoid extremes
|
||||
|
||||
INTERVAL = 600.0*2016/2015 # 10 minutes, adjusted for the off-by-one bug
|
||||
|
||||
current_target = nbits_to_target(last_nbits)
|
||||
retarget_factor = ultimate_target / current_target
|
||||
retarget_factor = max(0.25, min(retarget_factor, 4.0))
|
||||
|
||||
avg_interval = INTERVAL * retarget_factor
|
||||
|
||||
if do_poisson:
|
||||
det_rand = int(last_hash[-8:], 16) * 2**-32
|
||||
this_interval_variance = -math.log1p(-det_rand)
|
||||
else:
|
||||
this_interval_variance = uniform(0.95,1.05)
|
||||
|
||||
this_interval = avg_interval * this_interval_variance
|
||||
this_interval = max(1, min(this_interval, 3600))
|
||||
|
||||
return this_interval
|
||||
|
||||
def next_block_is_mine(last_hash, my_blocks):
|
||||
det_rand = int(last_hash[-16:-8], 16)
|
||||
return my_blocks[0] <= (det_rand % my_blocks[2]) < my_blocks[1]
|
||||
|
||||
def do_generate(args):
|
||||
if args.max_blocks is not None:
|
||||
if args.ongoing:
|
||||
logging.error("Cannot specify both --ongoing and --max-blocks")
|
||||
return 1
|
||||
if args.max_blocks < 1:
|
||||
logging.error("N must be a positive integer")
|
||||
return 1
|
||||
max_blocks = args.max_blocks
|
||||
elif args.ongoing:
|
||||
max_blocks = None
|
||||
else:
|
||||
max_blocks = 1
|
||||
|
||||
if args.set_block_time is not None and max_blocks != 1:
|
||||
logging.error("Cannot specify --ongoing or --max-blocks > 1 when using --set-block-time")
|
||||
return 1
|
||||
if args.set_block_time is not None and args.set_block_time < 0:
|
||||
args.set_block_time = time.time()
|
||||
logging.info("Treating negative block time as current time (%d)" % (args.set_block_time))
|
||||
|
||||
if args.min_nbits:
|
||||
if args.nbits is not None:
|
||||
logging.error("Cannot specify --nbits and --min-nbits")
|
||||
return 1
|
||||
args.nbits = "1e0377ae"
|
||||
logging.info("Using nbits=%s" % (args.nbits))
|
||||
|
||||
if args.set_block_time is None:
|
||||
if args.nbits is None or len(args.nbits) != 8:
|
||||
logging.error("Must specify --nbits (use calibrate command to determine value)")
|
||||
return 1
|
||||
|
||||
if args.multiminer is None:
|
||||
my_blocks = (0,1,1)
|
||||
else:
|
||||
if not args.ongoing:
|
||||
logging.error("Cannot specify --multiminer without --ongoing")
|
||||
return 1
|
||||
m = RE_MULTIMINER.match(args.multiminer)
|
||||
if m is None:
|
||||
logging.error("--multiminer argument must be k/m or j-k/m")
|
||||
return 1
|
||||
start,_,stop,total = m.groups()
|
||||
if stop is None:
|
||||
stop = start
|
||||
start, stop, total = map(int, (start, stop, total))
|
||||
if stop < start or start <= 0 or total < stop or total == 0:
|
||||
logging.error("Inconsistent values for --multiminer")
|
||||
return 1
|
||||
my_blocks = (start-1, stop, total)
|
||||
|
||||
if args.MINER_TAG is not None:
|
||||
args.MINER_TAG = args.MINER_TAG.encode('utf-8')
|
||||
else:
|
||||
args.MINER_TAG = "default".encode('utf-8')
|
||||
|
||||
ultimate_target = nbits_to_target(int(args.nbits,16))
|
||||
|
||||
mined_blocks = 0
|
||||
bestheader = {"hash": None}
|
||||
lastheader = None
|
||||
while max_blocks is None or mined_blocks < max_blocks:
|
||||
|
||||
# current status?
|
||||
bci = json.loads(args.bcli("getblockchaininfo"))
|
||||
|
||||
if bestheader["hash"] != bci["bestblockhash"]:
|
||||
bestheader = json.loads(args.bcli("getblockheader", bci["bestblockhash"]))
|
||||
|
||||
if lastheader is None:
|
||||
lastheader = bestheader["hash"]
|
||||
elif bestheader["hash"] != lastheader:
|
||||
next_delta = next_block_delta(int(bestheader["bits"], 16), bestheader["hash"], ultimate_target, args.poisson)
|
||||
next_delta += bestheader["time"] - time.time()
|
||||
next_is_mine = next_block_is_mine(bestheader["hash"], my_blocks)
|
||||
logging.info("Received new block at height %d; next in %s (%s)", bestheader["height"], seconds_to_hms(next_delta), ("mine" if next_is_mine else "backup"))
|
||||
lastheader = bestheader["hash"]
|
||||
|
||||
# when is the next block due to be mined?
|
||||
now = time.time()
|
||||
if args.set_block_time is not None:
|
||||
logging.debug("Setting start time to %d", args.set_block_time)
|
||||
mine_time = args.set_block_time
|
||||
action_time = now
|
||||
is_mine = True
|
||||
elif bestheader["height"] == 0:
|
||||
time_delta = next_block_delta(int(bestheader["bits"], 16), bci["bestblockhash"], ultimate_target, args.poisson)
|
||||
time_delta *= 100 # 100 blocks
|
||||
logging.info("Backdating time for first block to %d minutes ago" % (time_delta/60))
|
||||
mine_time = now - time_delta
|
||||
action_time = now
|
||||
is_mine = True
|
||||
else:
|
||||
time_delta = next_block_delta(int(bestheader["bits"], 16), bci["bestblockhash"], ultimate_target, args.poisson)
|
||||
mine_time = bestheader["time"] + time_delta
|
||||
|
||||
is_mine = next_block_is_mine(bci["bestblockhash"], my_blocks)
|
||||
|
||||
action_time = mine_time
|
||||
if not is_mine:
|
||||
action_time += args.backup_delay
|
||||
|
||||
if args.standby_delay > 0:
|
||||
action_time += args.standby_delay
|
||||
elif mined_blocks == 0:
|
||||
# for non-standby, always mine immediately on startup,
|
||||
# even if the next block shouldn't be ours
|
||||
action_time = now
|
||||
|
||||
# don't want fractional times so round down
|
||||
mine_time = int(mine_time)
|
||||
action_time = int(action_time)
|
||||
|
||||
# can't mine a block 2h in the future; 1h55m for some safety
|
||||
action_time = max(action_time, mine_time - 6900)
|
||||
|
||||
# ready to go? otherwise sleep and check for new block
|
||||
if now < action_time:
|
||||
sleep_for = min(action_time - now, 60)
|
||||
if mine_time < now:
|
||||
# someone else might have mined the block,
|
||||
# so check frequently, so we don't end up late
|
||||
# mining the next block if it's ours
|
||||
sleep_for = min(20, sleep_for)
|
||||
minestr = "mine" if is_mine else "backup"
|
||||
logging.debug("Sleeping for %s, next block due in %s (%s)" % (seconds_to_hms(sleep_for), seconds_to_hms(mine_time - now), minestr))
|
||||
time.sleep(sleep_for)
|
||||
continue
|
||||
|
||||
# gbt
|
||||
tmpl = json.loads(args.bcli("getblocktemplate", '{"rules":["signet","segwit"]}'))
|
||||
if tmpl["previousblockhash"] != bci["bestblockhash"]:
|
||||
logging.warning("GBT based off unexpected block (%s not %s), retrying", tmpl["previousblockhash"], bci["bestblockhash"])
|
||||
time.sleep(1)
|
||||
continue
|
||||
|
||||
logging.debug("GBT template: %s", tmpl)
|
||||
|
||||
if tmpl["mintime"] > mine_time:
|
||||
logging.info("Updating block time from %d to %d", mine_time, tmpl["mintime"])
|
||||
mine_time = tmpl["mintime"]
|
||||
if mine_time > now:
|
||||
logging.error("GBT mintime is in the future: %d is %d seconds later than %d", mine_time, (mine_time-now), now)
|
||||
return 1
|
||||
|
||||
# address for reward
|
||||
reward_addr, reward_spk = get_reward_addr_spk(args, tmpl["height"])
|
||||
|
||||
# Obtenir l'adresse et le scriptPubKey du relay
|
||||
relay_spk = None
|
||||
if hasattr(args, 'relay_address') and args.relay_address:
|
||||
try:
|
||||
relay_spk = bytes.fromhex(json.loads(args.bcli(f"-rpcwallet={args.WATCHONLY_WALLET}", "getaddressinfo", args.relay_address))["scriptPubKey"])
|
||||
except:
|
||||
# Si l'adresse n'est pas dans le wallet, créer une adresse simple
|
||||
logging.warning(f"Relay address {args.relay_address} not in wallet, using simple address")
|
||||
# Pour l'instant, on utilise la même adresse que le miner
|
||||
relay_spk = reward_spk
|
||||
|
||||
# mine block
|
||||
logging.debug("Mining block delta=%s start=%s mine=%s", seconds_to_hms(mine_time-bestheader["time"]), mine_time, is_mine)
|
||||
mined_blocks += 1
|
||||
psbt = generate_psbt(tmpl, reward_spk, blocktime=mine_time, relay_spk=relay_spk, reward_split_ratio=getattr(args, 'reward_split_ratio', 0.5))
|
||||
logging.info(f"psbt pre-processing: {psbt}")
|
||||
input_stream = os.linesep.join([psbt, "true", "ALL"]).encode('utf8')
|
||||
if args.signer == "coldcard":
|
||||
psbt = json.loads(args.bcli("-stdin", f"-rpcwallet={args.WATCHONLY_WALLET}", "walletprocesspsbt", input=input_stream))['psbt']
|
||||
try:
|
||||
psbt_hash = asyncio.get_event_loop().run_until_complete(coldcard_upload_psbt(psbt))
|
||||
except Exception as e:
|
||||
logging.error("Waiting 5s before trying again.")
|
||||
time.sleep(5)
|
||||
continue
|
||||
|
||||
try:
|
||||
psbt_signed = asyncio.get_event_loop().run_until_complete(coldcard_sign_psbt(psbt_hash))
|
||||
except Exception as e:
|
||||
logging.error("Please check your keys.")
|
||||
return 1
|
||||
|
||||
try:
|
||||
assert('local_download' in psbt_signed)
|
||||
except AssertionError as e:
|
||||
logging.error("Didn't receive signed psbt")
|
||||
logging.error(f"Received: {psbt_signed}")
|
||||
logging.error("Waiting 5s before trying again.")
|
||||
time.sleep(5)
|
||||
continue
|
||||
|
||||
input_stream = os.linesep.join([psbt_signed['local_download']['data'], "false"]).encode('utf8')
|
||||
try:
|
||||
final_psbt = json.loads(args.bcli("-stdin", "finalizepsbt", input=input_stream))
|
||||
except Exception as e:
|
||||
logging.error(f"Core can't finalize psbt: {e}")
|
||||
return 1
|
||||
if not final_psbt.get("complete",False):
|
||||
logging.error("finalizepsbt return: %s" % (final_psbt,))
|
||||
sys.stderr.write("PSBT finalization failed\n")
|
||||
return 1
|
||||
|
||||
final_psbt = final_psbt["psbt"]
|
||||
else:
|
||||
psbt_signed = json.loads(args.bcli("-stdin", f"-rpcwallet={args.MINING_WALLET}", "walletprocesspsbt", input=input_stream))
|
||||
if not psbt_signed.get("complete",False):
|
||||
logging.debug("Generated PSBT: %s" % (psbt,))
|
||||
sys.stderr.write("PSBT signing failed\n")
|
||||
return 1
|
||||
final_psbt = psbt_signed["psbt"]
|
||||
block, signet_solution = do_decode_psbt(final_psbt)
|
||||
block = finish_block(block, signet_solution, args.grind_cmd)
|
||||
|
||||
# submit block
|
||||
r = args.bcli("-stdin", "submitblock", input=block.serialize().hex().encode('utf8'))
|
||||
|
||||
# report
|
||||
bstr = "block" if is_mine else "backup block"
|
||||
|
||||
next_delta = next_block_delta(block.nBits, block.hash, ultimate_target, args.poisson)
|
||||
next_delta += block.nTime - time.time()
|
||||
next_is_mine = next_block_is_mine(block.hash, my_blocks)
|
||||
|
||||
logging.debug("Block hash %s payout to %s", block.hash, reward_addr)
|
||||
logging.info("Mined %s at height %d; next in %s (%s)", bstr, tmpl["height"], seconds_to_hms(next_delta), ("mine" if next_is_mine else "backup"))
|
||||
if r != "":
|
||||
logging.warning("submitblock returned %s for height %d hash %s", r, tmpl["height"], block.hash)
|
||||
|
||||
# Envoyer des fonds au relay si configuré
|
||||
if hasattr(args, 'relay_address') and args.relay_address and getattr(args, 'reward_split_ratio', 0) > 0:
|
||||
try:
|
||||
send_to_relay(args, reward_addr, tmpl["height"])
|
||||
except Exception as e:
|
||||
logging.error(f"Failed to send funds to relay: {e}")
|
||||
|
||||
lastheader = block.hash
|
||||
|
||||
async def coldcard_upload_psbt(psbt):
|
||||
async with websockets.connect(CC_URL) as ws:
|
||||
psbt_bin = base64.b64decode(psbt)
|
||||
psbt_hash = sha256(psbt_bin).digest()
|
||||
payload = message("upload_psbt", len(psbt_bin), psbt_hash.hex(), psbt)
|
||||
await ws.send(payload)
|
||||
async for msg in ws:
|
||||
try:
|
||||
r = json.loads(msg)
|
||||
except json.JSONDecodeError as e:
|
||||
logging.error(f"{e}: {msg} is not valid json")
|
||||
continue
|
||||
|
||||
if 'update_status' in r:
|
||||
logging.info(f"Received update: {r['update_status']}")
|
||||
elif 'success' in r:
|
||||
break
|
||||
|
||||
return psbt_hash.hex()
|
||||
|
||||
async def coldcard_sign_psbt(psbt_hash):
|
||||
async with websockets.connect(CC_URL) as ws:
|
||||
payload = message("submit_psbt", psbt_hash, False, False, True)
|
||||
await ws.send(payload)
|
||||
|
||||
async for msg in ws:
|
||||
try:
|
||||
r = json.loads(msg)
|
||||
except json.JSONDecodeError as e:
|
||||
logging.error(f"{e}: {msg} is not valid json")
|
||||
continue
|
||||
|
||||
if "update_status" in r:
|
||||
continue
|
||||
elif 'local_download' in r:
|
||||
break
|
||||
|
||||
return r
|
||||
|
||||
def do_calibrate(args):
|
||||
if args.nbits is not None and args.seconds is not None:
|
||||
sys.stderr.write("Can only specify one of --nbits or --seconds\n")
|
||||
return 1
|
||||
if args.nbits is not None and len(args.nbits) != 8:
|
||||
sys.stderr.write("Must specify 8 hex digits for --nbits\n")
|
||||
return 1
|
||||
|
||||
TRIALS = 600 # gets variance down pretty low
|
||||
TRIAL_BITS = 0x1e3ea75f # takes about 5m to do 600 trials
|
||||
|
||||
header = CBlockHeader()
|
||||
header.nBits = TRIAL_BITS
|
||||
targ = nbits_to_target(header.nBits)
|
||||
|
||||
start = time.time()
|
||||
count = 0
|
||||
for i in range(TRIALS):
|
||||
header.nTime = i
|
||||
header.nNonce = 0
|
||||
headhex = header.serialize().hex()
|
||||
cmd = args.grind_cmd.split(" ") + [headhex]
|
||||
newheadhex = subprocess.run(cmd, stdout=subprocess.PIPE, input=b"", check=True).stdout.strip()
|
||||
|
||||
avg = (time.time() - start) * 1.0 / TRIALS
|
||||
|
||||
if args.nbits is not None:
|
||||
want_targ = nbits_to_target(int(args.nbits,16))
|
||||
want_time = avg*targ/want_targ
|
||||
else:
|
||||
want_time = args.seconds if args.seconds is not None else 25
|
||||
want_targ = int(targ*(avg/want_time))
|
||||
|
||||
print("nbits=%08x for %ds average mining time" % (target_to_nbits(want_targ), want_time))
|
||||
return 0
|
||||
|
||||
def do_setup(args):
|
||||
if args.signer == "coldcard":
|
||||
sys.exit(asyncio.get_event_loop().run_until_complete(coldcard_setup()))
|
||||
else:
|
||||
logging.error("Unknown option")
|
||||
sys.exit(1)
|
||||
|
||||
async def coldcard_status() -> dict:
|
||||
logging.info("Checking that server is up and connected to Coldcard")
|
||||
status = {}
|
||||
while 1:
|
||||
try:
|
||||
async with websockets.connect(CC_URL) as ws:
|
||||
payload = message("get_info")
|
||||
await ws.send(payload)
|
||||
|
||||
async for msg in ws:
|
||||
try:
|
||||
r = json.loads(msg)
|
||||
except json.JSONDecodeError:
|
||||
logging.info(f"{msg} is not valid json")
|
||||
return 1
|
||||
|
||||
try:
|
||||
if 'status' in r:
|
||||
logging.info("Got status")
|
||||
logging.info(f"{r}")
|
||||
status = r['status']
|
||||
break
|
||||
elif 'update_status' in r:
|
||||
# we ignore this as there's not all the details there
|
||||
logging.info("Received update_status, ignoring")
|
||||
continue
|
||||
elif 'error' in r:
|
||||
logging.error(f"{r['error']}")
|
||||
return 1
|
||||
else:
|
||||
logging.info(f"{r}")
|
||||
logging.info("Still waiting for Coldcard connection")
|
||||
continue
|
||||
except Exception as e:
|
||||
logging.error(f"{e}")
|
||||
return 1
|
||||
except ConnectionError as e:
|
||||
logging.error("Server seems to be offline. Trying again.")
|
||||
await asyncio.sleep(10)
|
||||
continue
|
||||
|
||||
logging.info(f"{status}")
|
||||
|
||||
return status
|
||||
|
||||
|
||||
async def coldcard_setup():
|
||||
status = await coldcard_status()
|
||||
|
||||
logging.info(f"{status}")
|
||||
if not status['hsm']['active']:
|
||||
logging.info("Entering Coldcard HSM setup")
|
||||
logging.info("Please follow the instructions on the coldcard")
|
||||
async with websockets.connect(CC_URL) as ws:
|
||||
payload = message("submit_policy", json_dumps(POLICY), False)
|
||||
await ws.send(payload)
|
||||
|
||||
async for msg in ws:
|
||||
try:
|
||||
r = json.loads(msg)
|
||||
except json.JSONDecodeError:
|
||||
logging.info(f"{msg} is not valid json")
|
||||
return 1
|
||||
|
||||
try:
|
||||
if 'update_status' in r:
|
||||
if r['update_status']['hsm']['active']:
|
||||
logging.info("HSM is now activated. Proceeding.")
|
||||
break
|
||||
else:
|
||||
logging.info(f"Received update: {r['update_status']['hsm']}")
|
||||
except KeyError:
|
||||
logging.error(f"Received unexpected message: {r}")
|
||||
continue
|
||||
except Exception as e:
|
||||
logging.error(f"{e}")
|
||||
return 1
|
||||
elif not status['sl_loaded']:
|
||||
while 1:
|
||||
status = await coldcard_status()
|
||||
if status['sl_loaded']:
|
||||
logging.info("Coldcard in HSM mode. Proceeding...")
|
||||
break
|
||||
await asyncio.sleep(10)
|
||||
else:
|
||||
logging.info("Coldcard already in HSM mode. Proceeding...")
|
||||
|
||||
return 0
|
||||
|
||||
def bitcoin_cli(basecmd, args, **kwargs):
|
||||
cmd = basecmd + ["-signet"] + args
|
||||
logging.debug("Calling bitcoin-cli: %r", cmd)
|
||||
out = subprocess.run(cmd, stdout=subprocess.PIPE, **kwargs, check=True).stdout
|
||||
if isinstance(out, bytes):
|
||||
out = out.decode('utf8')
|
||||
return out.strip()
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--cli", default="bitcoin-cli", type=str, help="bitcoin-cli command")
|
||||
parser.add_argument("--debug", action="store_true", help="Print debugging info")
|
||||
parser.add_argument("--quiet", action="store_true", help="Only print warnings/errors")
|
||||
|
||||
cmds = parser.add_subparsers(help="sub-commands")
|
||||
genpsbt = cmds.add_parser("genpsbt", help="Generate a block PSBT for signing")
|
||||
genpsbt.set_defaults(fn=do_genpsbt)
|
||||
genpsbt.add_argument('--MINER_TAG', required=True, help='Miner tag')
|
||||
|
||||
solvepsbt = cmds.add_parser("solvepsbt", help="Solve a signed block PSBT")
|
||||
solvepsbt.set_defaults(fn=do_solvepsbt)
|
||||
|
||||
setup = cmds.add_parser("setup", help="Setup HSM for coldcard")
|
||||
setup.set_defaults(fn=do_setup)
|
||||
setup.add_argument("--signer", default=None, type=str, help="Who's signing blocks (default: Bitcoin Core)")
|
||||
|
||||
generate = cmds.add_parser("generate", help="Mine blocks")
|
||||
generate.set_defaults(fn=do_generate)
|
||||
generate.add_argument("--ongoing", action="store_true", help="Keep mining blocks")
|
||||
generate.add_argument("--max-blocks", default=None, type=int, help="Max blocks to mine (default=1)")
|
||||
generate.add_argument("--set-block-time", default=None, type=int, help="Set block time (unix timestamp)")
|
||||
generate.add_argument("--nbits", default=None, type=str, help="Target nBits (specify difficulty)")
|
||||
generate.add_argument("--min-nbits", action="store_true", help="Target minimum nBits (use min difficulty)")
|
||||
generate.add_argument("--poisson", action="store_true", help="Simulate randomised block times")
|
||||
generate.add_argument("--multiminer", default=None, type=str, help="Specify which set of blocks to mine (eg: 1-40/100 for the first 40%%, 2/3 for the second 3rd)")
|
||||
generate.add_argument("--backup-delay", default=300, type=int, help="Seconds to delay before mining blocks reserved for other miners (default=300)")
|
||||
generate.add_argument("--standby-delay", default=0, type=int, help="Seconds to delay before mining blocks (default=0)")
|
||||
generate.add_argument("--signer", default=None, type=str, help="Who's signing blocks (default: Bitcoin Core)")
|
||||
generate.add_argument('--WATCHONLY_WALLET', required=True, help='Watch-only wallet')
|
||||
generate.add_argument('--MINING_WALLET', required=True, help='Mining wallet')
|
||||
generate.add_argument('--MINER_TAG', required=True, help='Miner tag')
|
||||
|
||||
calibrate = cmds.add_parser("calibrate", help="Calibrate difficulty")
|
||||
calibrate.set_defaults(fn=do_calibrate)
|
||||
calibrate.add_argument("--nbits", type=str, default=None)
|
||||
calibrate.add_argument("--seconds", type=int, default=None)
|
||||
|
||||
for sp in [genpsbt, generate]:
|
||||
sp.add_argument("--address", default=None, type=str, help="Address for block reward payment")
|
||||
sp.add_argument("--descriptor", default=None, type=str, help="Descriptor for block reward payment")
|
||||
sp.add_argument("--relay-address", default=None, type=str, help="Address for relay reward payment (50% of block reward)")
|
||||
sp.add_argument("--reward-split-ratio", default=0.5, type=float, help="Ratio of block reward to send to relay (default: 0.5)")
|
||||
|
||||
for sp in [solvepsbt, generate, calibrate]:
|
||||
sp.add_argument("--grind-cmd", default=None, type=str, required=(sp==calibrate), help="Command to grind a block header for proof-of-work")
|
||||
|
||||
args = parser.parse_args(sys.argv[1:])
|
||||
args.bcli = lambda *a, input=b"", **kwargs: bitcoin_cli(args.cli.split(" "), list(a), input=input, **kwargs)
|
||||
|
||||
if hasattr(args, "address") and hasattr(args, "descriptor"):
|
||||
if args.address is None and args.descriptor is None:
|
||||
sys.stderr.write("Must specify --address or --descriptor\n")
|
||||
return 1
|
||||
elif args.address is not None and args.descriptor is not None:
|
||||
sys.stderr.write("Only specify one of --address or --descriptor\n")
|
||||
return 1
|
||||
args.derived_addresses = {}
|
||||
|
||||
if args.debug:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
elif args.quiet:
|
||||
logging.getLogger().setLevel(logging.WARNING)
|
||||
else:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
if hasattr(args, "fn"):
|
||||
return args.fn(args)
|
||||
else:
|
||||
logging.error("Must specify command")
|
||||
return 1
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -1,78 +0,0 @@
|
||||
import argparse
|
||||
import base64
|
||||
import http.client
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
|
||||
|
||||
def rpc_call(host: str, port: int, cookie_path: str, method: str, params):
|
||||
with open(cookie_path, 'r', encoding='utf-8') as f:
|
||||
cookie = f.read().strip()
|
||||
auth = base64.b64encode(cookie.encode()).decode()
|
||||
|
||||
conn = http.client.HTTPConnection(host, port, timeout=30)
|
||||
payload = json.dumps({"jsonrpc": "1.0", "id": "miner", "method": method, "params": params})
|
||||
headers = {"Content-Type": "application/json", "Authorization": f"Basic {auth}"}
|
||||
conn.request("POST", "/", payload, headers)
|
||||
resp = conn.getresponse()
|
||||
body = resp.read()
|
||||
if resp.status != 200:
|
||||
raise RuntimeError(f"RPC HTTP {resp.status}: {body.decode(errors='ignore')}")
|
||||
data = json.loads(body)
|
||||
if data.get("error"):
|
||||
raise RuntimeError(str(data["error"]))
|
||||
return data["result"]
|
||||
|
||||
|
||||
def main():
|
||||
p = argparse.ArgumentParser()
|
||||
p.add_argument('--cookie', required=True)
|
||||
p.add_argument('--rpc-host', default='bitcoin')
|
||||
p.add_argument('--rpc-port', type=int, default=38332)
|
||||
p.add_argument('--poll-interval', type=int, default=5)
|
||||
args = p.parse_args()
|
||||
|
||||
# Paramètres via env
|
||||
challenge = os.environ.get('SIGNET_CHALLENGE', '')
|
||||
xprv = os.environ.get('SIGNET_MINER_XPRV', '')
|
||||
derivation = os.environ.get('DERIVATION_PATH', "48'/1'/0'/2'/0/0")
|
||||
coinbase_addr = os.environ.get('COINBASE_ADDRESS', '')
|
||||
|
||||
if not challenge:
|
||||
raise SystemExit('SIGNET_CHALLENGE non défini')
|
||||
if not xprv:
|
||||
print('Avertissement: SIGNET_MINER_XPRV non défini (mode lecture gbt uniquement)')
|
||||
if not coinbase_addr:
|
||||
raise SystemExit('COINBASE_ADDRESS non défini')
|
||||
|
||||
print('Miner signet: host=%s port=%d' % (args.rpc_host, args.rpc_port), flush=True)
|
||||
print('Challenge:', challenge, flush=True)
|
||||
print('Derivation:', derivation, flush=True)
|
||||
print('Coinbase address:', coinbase_addr, flush=True)
|
||||
|
||||
try:
|
||||
bh = rpc_call(args.rpc_host, args.rpc_port, args.cookie, 'getblockcount', [])
|
||||
print('Hauteur actuelle:', bh, flush=True)
|
||||
except Exception as e:
|
||||
print('Erreur RPC initiale:', e, flush=True)
|
||||
|
||||
while True:
|
||||
try:
|
||||
# Inclure la règle signet comme demandé par bitcoind en signet
|
||||
tmpl = rpc_call(
|
||||
args.rpc_host,
|
||||
args.rpc_port,
|
||||
args.cookie,
|
||||
'getblocktemplate',
|
||||
[{"rules": ["segwit", "signet"]}]
|
||||
)
|
||||
print('Template: height=%s nTx=%s' % (tmpl.get('height'), len(tmpl.get('transactions', []))), flush=True)
|
||||
# TODO: construire coinbase (coinbase_addr), header, signer selon le challenge signet puis submitblock
|
||||
except Exception as e:
|
||||
print('Erreur getblocktemplate:', e, flush=True)
|
||||
time.sleep(args.poll_interval)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -1,78 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")"/.. && pwd)"
|
||||
ENV_FILE="$ROOT_DIR/.env"
|
||||
|
||||
if [ ! -f "$ENV_FILE" ]; then
|
||||
echo "Fichier d'env introuvable: $ENV_FILE" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC1090
|
||||
source "$ENV_FILE"
|
||||
|
||||
cli() {
|
||||
docker exec "$BITCOIN_CONTAINER" sh -lc "bitcoin-cli -conf=/etc/bitcoin/bitcoin.conf -signet $*"
|
||||
}
|
||||
|
||||
cli_stdin() {
|
||||
local subcmd="$1"; shift || true
|
||||
docker exec -i "$BITCOIN_CONTAINER" sh -lc "bitcoin-cli -conf=/etc/bitcoin/bitcoin.conf -signet -stdin $subcmd"
|
||||
}
|
||||
|
||||
extract_checksum() {
|
||||
sed -n 's/.*"checksum"[[:space:]]*:[[:space:]]*"\([a-z0-9]\{8\}\)".*/\1/p' | tr -d '\n'
|
||||
}
|
||||
|
||||
wallet_is_descriptors() {
|
||||
cli -rpcwallet="$MINING_WALLET" getwalletinfo 2>/dev/null | grep -q '"descriptors"[[:space:]]*:[[:space:]]*true'
|
||||
}
|
||||
|
||||
wallet_exists() {
|
||||
cli -rpcwallet="$MINING_WALLET" getwalletinfo >/dev/null 2>&1
|
||||
}
|
||||
|
||||
ensure_descriptors_wallet() {
|
||||
local wallet_path="/home/bitcoin/.bitcoin/signet/wallets/$MINING_WALLET"
|
||||
if wallet_exists && wallet_is_descriptors; then
|
||||
return 0
|
||||
fi
|
||||
cli -rpcwallet="$MINING_WALLET" unloadwallet "$MINING_WALLET" >/dev/null 2>&1 || true
|
||||
docker exec "$BITCOIN_CONTAINER" sh -lc "if [ -d '$wallet_path' ]; then rm -rf '$wallet_path'; fi"
|
||||
cli createwallet "$MINING_WALLET" false true "" true true true false >/dev/null
|
||||
}
|
||||
|
||||
ensure_descriptors_wallet
|
||||
|
||||
DESC_EXT_ORIG="wsh(multi(1,[$MINING_FINGERPRINT/$MINING_PATH_PREFIX]$MINING_XPRV/0/*))"
|
||||
DESC_INT_ORIG="wsh(multi(1,[$MINING_FINGERPRINT/$MINING_PATH_PREFIX]$MINING_XPRV/1/*))"
|
||||
|
||||
CS_EXT=$(printf '%s' "$DESC_EXT_ORIG" | cli_stdin getdescriptorinfo | extract_checksum)
|
||||
CS_INT=$(printf '%s' "$DESC_INT_ORIG" | cli_stdin getdescriptorinfo | extract_checksum)
|
||||
DESC_EXT="$DESC_EXT_ORIG#$CS_EXT"
|
||||
DESC_INT="$DESC_INT_ORIG#$CS_INT"
|
||||
|
||||
PAYLOAD_MINER=$(printf '[{"desc":"%s","timestamp":"now","active":true,"range":[0,1000]},{"desc":"%s","timestamp":"now","active":true,"internal":true,"range":[0,1000]}]' "$DESC_EXT" "$DESC_INT")
|
||||
printf '%s\n' "$PAYLOAD_MINER" | cli_stdin importdescriptors | cat
|
||||
|
||||
# Adresse coinbase: si vide, utiliser getnewaddress du wallet (garanti p2wsh multisig)
|
||||
if [ -z "${COINBASE_ADDRESS:-}" ]; then
|
||||
ADDR=$(cli -rpcwallet="$MINING_WALLET" getnewaddress coinbase bech32)
|
||||
tmpfile=$(mktemp)
|
||||
awk -v addr="$ADDR" 'BEGIN{updated=0} /^COINBASE_ADDRESS=/{print "COINBASE_ADDRESS=" addr; updated=1; next} {print} END{if(updated==0) print "COINBASE_ADDRESS=" addr}' "$ENV_FILE" > "$tmpfile"
|
||||
mv "$tmpfile" "$ENV_FILE"
|
||||
echo "COINBASE_ADDRESS=$ADDR"
|
||||
else
|
||||
echo "COINBASE_ADDRESS=$COINBASE_ADDRESS"
|
||||
fi
|
||||
|
||||
if [ -n "${CHALLENGE_ALLPUBS:-}" ]; then
|
||||
CHALL_PRIV_ORIG=$(printf '%s' "$CHALLENGE_ALLPUBS" | sed -E "s#\[$MINING_FINGERPRINT/$MINING_PATH_PREFIX\]tpub[[:alnum:]]+#[$MINING_FINGERPRINT/$MINING_PATH_PREFIX]$MINING_XPRV#g")
|
||||
CS_CHALL=$(printf '%s' "$CHALL_PRIV_ORIG" | cli_stdin getdescriptorinfo | extract_checksum)
|
||||
CHALL_PRIV="$CHALL_PRIV_ORIG#$CS_CHALL"
|
||||
PAYLOAD_CHAL=$(printf '[{"desc":"%s","timestamp":"now","active":false,"range":[0,1000]}]' "$CHALL_PRIV")
|
||||
printf '%s\n' "$PAYLOAD_CHAL" | cli_stdin importdescriptors | cat
|
||||
fi
|
||||
|
||||
echo "Import terminé."
|
@ -1,75 +0,0 @@
|
||||
[
|
||||
{
|
||||
"Mnemonic": "tower keen enrich problem essence east plastic lounge merge sand increase company",
|
||||
"xprv": "tprv8inwidD6qpNwMNY5ZadhYMn62d1WHvSVMRH2pPAj7RsAZGCY4YTiT1McMQSg5DAyijPBZ4HroX83vZQAevQkJSZUVH8kro9JnVbhTPBSAxL",
|
||||
"wallet descriptor": "wsh(multi(1,[86936c07/48'/1'/0'/2']tpubDFUys3FLzC4cEqZsTEJHwmSCbeXSTFdPvisp6uD2XhfZPkTJgwHJdVyUXYcfLRrikRxA2MpBaZWE5kZCtHFc15aVtktsHMrTijDjq2dKRGK/0/*))#pslna7dm",
|
||||
},
|
||||
{
|
||||
"Mnemonic": "deer trust ceiling youth brass rapid scout cradle better clap spike morning",
|
||||
"xprv": "tprv8iNgodqVZKJgEFGhmoouPYPAj7EzaqjqToGcdZGUTVcDpu8YSvvhmoppYp7vWG2LR2SrF93AVYZGgG9bCzuQs1xqwJ2QW8hRwtEVdyUofuH",
|
||||
"wallet descriptor": "wsh(multi(1,[5df7e4b0/48'/1'/0'/2']tpubDF4ix3sjhgzM7iJVfTUVnx3HJ8kvkAvk36sPv5JmsmQcfPPK5KkHxJSgixZAdcYEsGcvHacm1hW4iLksGoTZocJozuaA2BTNp3GEvW432qu/0/*))#4ma3uvl0"
|
||||
},
|
||||
{
|
||||
"Mnemonic": "control load guard error caution hundred main adjust happy infant safe brother",
|
||||
"xprv": "tprv8hTsDtqzaXPoZtQSrfR4HKfAXo1qmh8Xb6oJt5PY5WtET5ecfZ8bUEBoofVrH6s8STU586QHhYSppmQ3n1nvsZ6p5VaKu4MHxsvzUf1gg2D",
|
||||
"wallet descriptor": "wsh(multi(1,[a3a9eb52/48'/1'/0'/2']tpubDE9uNJtEiu5UTMSEkK5egjKH6pXmw2KSAQQ6AbRqVngdHZuPHwxBeiofypHrGmG1WkvAtgjjn7gmPddzaz3ymQj9m3CDFLGEB6Ao4xqripj/0/*))#ju6z6s7v"
|
||||
},
|
||||
{
|
||||
"Mnemonic": "venture ice crash venture tourist tail naive curtain pilot engage code celery",
|
||||
"xprv": "tprv8iTk1ZRgwq4NAysrRgY1Wbycbfvmb7cYgjAGTeR573gKFJ4BFuGtEoaotCS6wdGiUfC2BTHg79tiX7i6NuFiTfjiaM8LXfNzL77YuBGY3K7",
|
||||
"wallet descriptor": "wsh(multi(1,[46d93da5/48'/1'/0'/2']tpubDF9n9yTw6Ck34SueKLCbv1djAhShkSoTG2m3kATNXKUi5nJwtJ6URJCg4M1je81fyabsX4t6F2itrQinMuu3cYLbpLbVQwWBUwYA8pPyKdZ/0/*))#8q8j9sft"
|
||||
},
|
||||
{
|
||||
"Mnemonic": "sheriff zone betray cactus error enable code flat coyote worry guitar equal",
|
||||
"xprv": "tprv8iajQdFf4a7eUEKhE1gukrNEcxY4MZk1FJyKWgwZj48mhqTminKn6mkxyfyn1QwJ2XUke2aiXfXNQ2XqpGBbwXSSRK8hvqHQJuHWHs68YTh",
|
||||
"wallet descriptor": "wsh(multi(1,[d3c3bc8f/48'/1'/0'/2']tpubDFGmZ3HuCwoKMhMV7fMWAG2MBz3zWtvupca6oCys9KwAYKiYMB9NHGNq9qvVgPgDgpDLSiCqnp71f7WsV9N1cLkzsjqW9gxJF9VQ9oSZcj9/0/*))#7r3f3xys"
|
||||
},
|
||||
{
|
||||
"Mnemonic": "bottom sight mistake next reveal modify rather bulk mountain arrow useful buzz",
|
||||
"xprv": "tprv8hqVmTiP493atpsuqdt87Hx5hjU9NwHG9hrWBa97rS8TwTYZBK5Jd8BfH4Jv154oP9YRWy7kU9p7xnqTwXCAnKZuEpACt2uzTx83HrTjqen",
|
||||
"wallet descriptor": "wsh(multi(1,[7f7d263a/48'/1'/0'/2']tpubDEXXuskdCWjFnHuhjHYiWhcCGkz5YGUAj1THU6BRGhvrmwoKohttocoXTCCE9udffumcou7ZYUR5RNqwHW4kw7Jv2UXUUSKeKqJd9xGmSCs/0/*))#zc5ruh7c"
|
||||
},
|
||||
{
|
||||
"Mnemonic": "payment ill whisper noble casual shallow clown pipe keen pencil fluid term""xprv": "tprv8hMLjbE25N5UhZJadZDHb42RNFhgfSLdcsGnhu7BYt5Wt8UzXhcF5ANLuezsgJUNyCC4TJtekes9gssUCm6UKASnqMTPwm6KcePSw4npybF""wallet descriptor": "wsh(multi(1,[154159b3/48'/1'/0'/2']tpubDE3Nt1GGDjm9b2LNXCsszTgXwHDcpmXYCAsZzR9Uy9suicjmA6RqFezD5o8EWHk1vrztkPreHbYXKqGAdupKJNcKWYViKsQNMfr4uW8vcWq/0/*))#5k6w6h6g"
|
||||
},
|
||||
{
|
||||
"Mnemonic": "lesson trumpet royal bright three oval vague organ atom joke favorite april",
|
||||
"xprv": "tprv8ixSxhVBoDTfv846JKDo5Qu8L89wZnmXUZCwdN1sm8CRfS1RpecrWmtthiTqepjnBroRit3Zygn53z3v8QWp3bqQYjev2Mn92g9jGkzaGya",
|
||||
"wallet descriptor": "wsh(multi(1,[fca68db6/48'/1'/0'/2']tpubDFeV77XRwb9Lob5tBxtPUpZEu9fsj7xS3roiut4BBPzpVvGCT3SShGWksqUYLqKBrt7xeKmmmgSrgbRiffcoS5KPiqyDWk5Kgvxek52XnNV/0/*))#j6sm3ntm"
|
||||
},
|
||||
{
|
||||
"Mnemonic": "lyrics undo baby chicken possible vicious capital fun order salon maple source",
|
||||
"xprv": "tprv8ixaRLep3QQB2Rn9UpXXJVt9qcrDL6QTuHJusiKjPPgzkPmDveAQNBViJBrJakLKaoc3w7JzNUXAkSaeQHJAzGsMnrJQggWNkMn1e9rihgP",
|
||||
"wallet descriptor": "wsh(multi(1,[ef9d9ce6/48'/1'/0'/2']tpubDFecZkh4Bn5qutowNUC7huYGQeN9VRbNUauhAEN2ofVPat1zZ2yzYg7aULxsdzh79AFz7rBTVQeu2BsBay88XrFLc5diENj4ibizrwPNMbM/0/*))#zyhj4kj3"
|
||||
},
|
||||
{
|
||||
"Mnemonic": "canoe coral egg public boss stable mercy side tennis behind dance shy",
|
||||
"xprv": "tprv8j58z2XeVe29DeDqb8UABtF14mo4MCPWm5CmkL5BegHBa3prhkLz2HF4JFwU5Z6ypnA7qCVcwdyPGj5yqXPoXiaE2Rcosmx9Ntiav39vRfp",
|
||||
"wallet descriptor": "wsh(multi(1,[8e236875/48'/1'/0'/2']tpubDFmB8SZte1hp77FdUn8kbHu7doJzWXaRLNoZ2r7V4x5aQY5dL9AaCmrvUNZSPYHJKeqto8roTvUpwWFazfxHEg5DvMq8br266uuD1JKieWj/0/*))#8xxkeruq"
|
||||
},
|
||||
{
|
||||
"Mnemonic": "expire document depend hamster spy become blossom midnight ecology salon all earth",
|
||||
"xprv": "tprv8ii6Q43XVJhTrqb9oYUgz1kXXUc763g8c3tgZK38ei9Bwkaw12wEdXzgemB6fmF4jgDBAdavNg4YXyRe1XSx3jxjZ8i2fHruuyn6bP4r7uq",
|
||||
"wallet descriptor": "wsh(multi(1,[d03aacca/48'/1'/0'/2']tpubDFQ8YU5mdgP8kJcwhC9HPRQe6W83FNs3BMVTqq5S4ywanEqhdRkpp2cYpro3XRXKJPi8d1d3m4L2JXWdNQFfs31x37S3zfPpd7pwKEwLAm7/0/*))#phcw966k"
|
||||
},
|
||||
{
|
||||
"Mnemonic": "movie west unit carbon adapt liberty crack easily raise toward brother quality",
|
||||
"xprv": "tprv8iszPBk3CEovNt6aHBMHPqeejpNJVKCqws1jfmHobNiC2s497W2jL9nde2FmTBWKMpkuKXDuFKPrBqKcEpsjgYeLsoQVT3MgsHoTjTL64qB",
|
||||
"wallet descriptor": "wsh(multi(1,[ce3600ea/48'/1'/0'/2']tpubDFa2XbnHLcVbGM8NAq1soFJmJqtEeePkXAcWxHL71eWasMJujtrKWeQVp7NHQY5euJL2bFuBkVQHk4uoDrVRfCEELLxJhHuNouPquffbmUy/0/*))#lwv5ura2"
|
||||
},
|
||||
{
|
||||
"Mnemonic": "tip mixture supreme govern faculty panel judge motion aim write soon arrive",
|
||||
"xprv": "tprv8hJQahhR4cqcu6hkUkpVx3ex9tUxNWrY5EFd4zLdxT5Ycn3V14Kp6V7XLEtJ2N5p5dpVeP9mhMqNwTBBeJaavMDquLh8SRFfdDejAy8yygX",
|
||||
"wallet descriptor": "wsh(multi(1,[fe898c92/48'/1'/0'/2']tpubDDzSj7jfCzXHnZjYNQV6MTK4iuztXr3SeXrQMWNwNiswTGJFdT9QGyjPWMoYcoPY9HCYbLdcMGiDokrWDWWZEhg8HpbgebenhJujvTzMeeN/0/*))#675457w4"
|
||||
},
|
||||
{
|
||||
"Mnemonic": "identify devote dice young air turkey angle code observe innocent fragile bench",
|
||||
"xprv": "tprv8iUcGCBaM1XJqsswVqLkJjgQjvKHmGaKrvX9sqBrmRrUTroa5EKEes4x3L7AFE7tLDW4mUCWLmpAhFrjQvZ1uUzuAaziFvLwrtq253g9yzp",
|
||||
"wallet descriptor": "wsh(multi(1,[d33c583b/48'/1'/0'/2']tpubDFAeQcDpVPCyjLujPV1Li9LXJwqDvbmESE7wAMEABhesJM4Lhd8pqMgpDVSmf4cpdsfZbDWkhfyxeyG3SaWcB4MqEqhbseQ8mk41PPHb57T/0/*))#u9xx2lkz"
|
||||
},
|
||||
{
|
||||
"Mnemonic": "slide hollow decade federal pair brief furnace fit pelican heart better place",
|
||||
"xprv": "tprv8iVREMet5hjVGBfng2VLnsrTMPqPDFkVoUAqoy78zKEye1u6XaG8jKju3nsf6GN1UTMbkFWoD6TiTTvnP2ez4NvXyX9c1UMfQ932CmZjuLg",
|
||||
"wallet descriptor": "wsh(multi(1,[facf6b1f/48'/1'/0'/2']tpubDFBTNmh8E5RA9ehaZg9wCHWZvRMKNawQNmmd6V9SQb3NUW9s9y5iupMmDxAbBFFrytzotW9hu8REgqSFg26Q8mcvBjSAaVz9QcNzmCxRJdv/0/*))#3407up02"
|
||||
}
|
||||
]
|
@ -1,25 +0,0 @@
|
||||
{
|
||||
"type": "wsh",
|
||||
"multi": "sortedmulti",
|
||||
"min": "1",
|
||||
"pubkeys": [
|
||||
"[fca68db6/48'/1'/0'/2']tpubDFeV77XRwb9Lob5tBxtPUpZEu9fsj7xS3roiut4BBPzpVvGCT3SShGWksqUYLqKBrt7xeKmmmgSrgbRiffcoS5KPiqyDWk5Kgvxek52XnNV/0/*",
|
||||
"[5df7e4b0/48'/1'/0'/2']tpubDF4ix3sjhgzM7iJVfTUVnx3HJ8kvkAvk36sPv5JmsmQcfPPK5KkHxJSgixZAdcYEsGcvHacm1hW4iLksGoTZocJozuaA2BTNp3GEvW432qu/0/*",
|
||||
"[ef9d9ce6/48'/1'/0'/2']tpubDFecZkh4Bn5qutowNUC7huYGQeN9VRbNUauhAEN2ofVPat1zZ2yzYg7aULxsdzh79AFz7rBTVQeu2BsBay88XrFLc5diENj4ibizrwPNMbM/0/*",
|
||||
"[86936c07/48'/1'/0'/2']tpubDFUys3FLzC4cEqZsTEJHwmSCbeXSTFdPvisp6uD2XhfZPkTJgwHJdVyUXYcfLRrikRxA2MpBaZWE5kZCtHFc15aVtktsHMrTijDjq2dKRGK/0/*",
|
||||
"[7f7d263a/48'/1'/0'/2']tpubDEXXuskdCWjFnHuhjHYiWhcCGkz5YGUAj1THU6BRGhvrmwoKohttocoXTCCE9udffumcou7ZYUR5RNqwHW4kw7Jv2UXUUSKeKqJd9xGmSCs/0/*",
|
||||
"[154159b3/48'/1'/0'/2']tpubDE3Nt1GGDjm9b2LNXCsszTgXwHDcpmXYCAsZzR9Uy9suicjmA6RqFezD5o8EWHk1vrztkPreHbYXKqGAdupKJNcKWYViKsQNMfr4uW8vcWq/0/*",
|
||||
"[46d93da5/48'/1'/0'/2']tpubDF9n9yTw6Ck34SueKLCbv1djAhShkSoTG2m3kATNXKUi5nJwtJ6URJCg4M1je81fyabsX4t6F2itrQinMuu3cYLbpLbVQwWBUwYA8pPyKdZ/0/*",
|
||||
"[d3c3bc8f/48'/1'/0'/2']tpubDFGmZ3HuCwoKMhMV7fMWAG2MBz3zWtvupca6oCys9KwAYKiYMB9NHGNq9qvVgPgDgpDLSiCqnp71f7WsV9N1cLkzsjqW9gxJF9VQ9oSZcj9/0/*",
|
||||
"[8e236875/48'/1'/0'/2']tpubDFmB8SZte1hp77FdUn8kbHu7doJzWXaRLNoZ2r7V4x5aQY5dL9AaCmrvUNZSPYHJKeqto8roTvUpwWFazfxHEg5DvMq8br266uuD1JKieWj/0/*",
|
||||
"[a3a9eb52/48'/1'/0'/2']tpubDE9uNJtEiu5UTMSEkK5egjKH6pXmw2KSAQQ6AbRqVngdHZuPHwxBeiofypHrGmG1WkvAtgjjn7gmPddzaz3ymQj9m3CDFLGEB6Ao4xqripj/0/*",
|
||||
"[d03aacca/48'/1'/0'/2']tpubDFQ8YU5mdgP8kJcwhC9HPRQe6W83FNs3BMVTqq5S4ywanEqhdRkpp2cYpro3XRXKJPi8d1d3m4L2JXWdNQFfs31x37S3zfPpd7pwKEwLAm7/0/*",
|
||||
"[ce3600ea/48'/1'/0'/2']tpubDFa2XbnHLcVbGM8NAq1soFJmJqtEeePkXAcWxHL71eWasMJujtrKWeQVp7NHQY5euJL2bFuBkVQHk4uoDrVRfCEELLxJhHuNouPquffbmUy/0/*",
|
||||
"[fe898c92/48'/1'/0'/2']tpubDDzSj7jfCzXHnZjYNQV6MTK4iuztXr3SeXrQMWNwNiswTGJFdT9QGyjPWMoYcoPY9HCYbLdcMGiDokrWDWWZEhg8HpbgebenhJujvTzMeeN/0/*",
|
||||
"[d33c583b/48'/1'/0'/2']tpubDFAeQcDpVPCyjLujPV1Li9LXJwqDvbmESE7wAMEABhesJM4Lhd8pqMgpDVSmf4cpdsfZbDWkhfyxeyG3SaWcB4MqEqhbseQ8mk41PPHb57T/0/*",
|
||||
"[facf6b1f/48'/1'/0'/2']tpubDFBTNmh8E5RA9ehaZg9wCHWZvRMKNawQNmmd6V9SQb3NUW9s9y5iupMmDxAbBFFrytzotW9hu8REgqSFg26Q8mcvBjSAaVz9QcNzmCxRJdv/0/*"
|
||||
],
|
||||
"checksum": "#jmqku76u",
|
||||
"signet_challenge": "0020341c43803863c252df326e73574a27d7e19322992061017b0dc893e2eab90821",
|
||||
"magic": "b066463d"
|
||||
}
|
136
scripts/deploy-all.sh
Normal file
136
scripts/deploy-all.sh
Normal file
@ -0,0 +1,136 @@
|
||||
#!/usr/bin/env sh
|
||||
set -euo pipefail
|
||||
|
||||
# Determine project root and compose dir
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
ROOT_DIR="$(cd "$SCRIPT_DIR/../../../.." && pwd)"
|
||||
COMPOSE_DIR="$ROOT_DIR/lecoffre_node"
|
||||
|
||||
# Healthcheck helpers now colocated here
|
||||
HC_TOR="$SCRIPT_DIR/tor-progress.sh"
|
||||
HC_BITCOIN="$SCRIPT_DIR/bitcoin-progress.sh"
|
||||
HC_BLINDBIT="$SCRIPT_DIR/blindbit-progress.sh"
|
||||
HC_RELAY="$SCRIPT_DIR/sdk-relay-progress.sh"
|
||||
|
||||
cd "$COMPOSE_DIR"
|
||||
|
||||
progress_line() {
|
||||
svc="$1"; kind="$2"
|
||||
case "$kind" in
|
||||
tor) sh "$HC_TOR" 2>/dev/null | tail -1 || true ;;
|
||||
bitcoin) sh "$HC_BITCOIN" 2>/dev/null | tail -1 || true ;;
|
||||
blindbit) sh "$HC_BLINDBIT" 2>/dev/null | tail -1 || true ;;
|
||||
relay) sh "$HC_RELAY" 2>/dev/null | tail -1 || true ;;
|
||||
*) echo "[$svc] waiting ..." ;;
|
||||
esac
|
||||
}
|
||||
|
||||
wait_healthy() {
|
||||
svc="$1"; tries=${2:-60}; sleep_s=${3:-5}; kind=${4:-generic}
|
||||
i=0
|
||||
while [ $i -lt $tries ]; do
|
||||
state=$(docker inspect --format='{{json .State.Health.Status}}' "$svc" 2>/dev/null || echo "\"unknown\"")
|
||||
[ "$state" = '"healthy"' ] && { echo "[$svc] healthy"; return 0; }
|
||||
progress_line "$svc" "$kind"
|
||||
sleep "$sleep_s"; i=$((i+1))
|
||||
done
|
||||
echo "[$svc] not healthy after wait"; return 1
|
||||
}
|
||||
|
||||
check_ports_urls() {
|
||||
svc="$1"
|
||||
case "$svc" in
|
||||
tor-proxy)
|
||||
# Ports: 9050-9051 local; no HTTP
|
||||
nc -z 127.0.0.1 9050 && echo "[tor] port 9050 OK" || echo "[tor] 9050 FAIL" || true
|
||||
nc -z 127.0.0.1 9051 && echo "[tor] port 9051 OK" || echo "[tor] 9051 FAIL" || true
|
||||
;;
|
||||
bitcoin-signet)
|
||||
# Internal RPC port (38332 signet), no external URL
|
||||
nc -z 127.0.0.1 38332 && echo "[bitcoin] RPC 38332 OK" || echo "[bitcoin] 38332 FAIL" || true
|
||||
;;
|
||||
blindbit-oracle)
|
||||
# Internal: http://localhost:8000/tweaks/1
|
||||
curl -fsS http://127.0.0.1:8000/tweaks/1 >/dev/null && echo "[blindbit] API OK" || echo "[blindbit] API FAIL" || true
|
||||
;;
|
||||
sdk_storage)
|
||||
curl -fsS http://127.0.0.1:8081/health >/dev/null && echo "[storage] health OK" || echo "[storage] health FAIL" || true
|
||||
;;
|
||||
sdk_relay)
|
||||
# WS bind + HTTP health
|
||||
curl -fsS http://127.0.0.1:8091/ >/dev/null && echo "[relay] health port 8091 OK" || echo "[relay] 8091 FAIL" || true
|
||||
nc -z 0.0.0.0 8090 && echo "[relay] ws 8090 OK" || echo "[relay] ws 8090 FAIL" || true
|
||||
# Third-party bootstrap
|
||||
curl -fsS https://dev3.4nkweb.com/api/v1/health >/dev/null && echo "[relay] dev3 api OK" || echo "[relay] dev3 api FAIL" || true
|
||||
;;
|
||||
ihm_client)
|
||||
curl -fsS http://127.0.0.1:3003/ >/dev/null && echo "[ihm] local OK" || echo "[ihm] local FAIL" || true
|
||||
;;
|
||||
lecoffre-front)
|
||||
curl -fsS http://127.0.0.1:3004/ >/dev/null && echo "[front] local OK" || echo "[front] local FAIL" || true
|
||||
# External front
|
||||
curl -fsS https://dev4.4nkweb.com/lecoffre/ >/dev/null && echo "[front] external OK" || echo "[front] external FAIL" || true
|
||||
;;
|
||||
loki)
|
||||
curl -fsS http://127.0.0.1:3100/ready >/dev/null && echo "[loki] ready OK" || echo "[loki] ready FAIL" || true
|
||||
;;
|
||||
promtail)
|
||||
echo "[promtail] positions/inputs checked by health" ;;
|
||||
grafana)
|
||||
curl -fsS http://127.0.0.1:3005/api/health >/dev/null && echo "[grafana] local api OK" || echo "[grafana] local api FAIL" || true
|
||||
curl -fsS https://dev4.4nkweb.com/grafana/ >/dev/null && echo "[grafana] external OK" || echo "[grafana] external FAIL" || true
|
||||
;;
|
||||
status-api)
|
||||
curl -fsS http://127.0.0.1:3006/api >/dev/null && echo "[status] api OK" || echo "[status] api FAIL" || true
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Phase 1: base
|
||||
docker compose up -d tor || true
|
||||
wait_healthy tor-proxy 80 3 tor || true
|
||||
check_ports_urls tor-proxy || true
|
||||
|
||||
# Phase 2: blockchain
|
||||
docker compose up -d bitcoin || true
|
||||
wait_healthy bitcoin-signet 120 5 bitcoin || true
|
||||
check_ports_urls bitcoin-signet || true
|
||||
|
||||
docker compose up -d blindbit || true
|
||||
wait_healthy blindbit-oracle 120 5 blindbit || true
|
||||
check_ports_urls blindbit-oracle || true
|
||||
|
||||
# Phase 3: apps (storage -> relay -> ihm/front)
|
||||
docker compose up -d sdk_storage || true
|
||||
wait_healthy sdk_storage 60 5 || true
|
||||
check_ports_urls sdk_storage || true
|
||||
|
||||
docker compose up -d sdk_relay || true
|
||||
wait_healthy sdk_relay 120 5 relay || true
|
||||
check_ports_urls sdk_relay || true
|
||||
|
||||
docker compose up -d ihm_client lecoffre-front || true
|
||||
wait_healthy ihm_client 60 5 || true
|
||||
check_ports_urls ihm_client || true
|
||||
wait_healthy lecoffre-front 60 5 || true
|
||||
check_ports_urls lecoffre-front || true
|
||||
|
||||
# Phase 4: monitoring (loki -> promtail -> grafana)
|
||||
docker compose up -d loki || true
|
||||
wait_healthy loki 120 5 || true
|
||||
check_ports_urls loki || true
|
||||
|
||||
docker compose up -d promtail || true
|
||||
wait_healthy promtail 60 5 || true
|
||||
check_ports_urls promtail || true
|
||||
|
||||
docker compose up -d grafana || true
|
||||
wait_healthy grafana 120 5 || true
|
||||
check_ports_urls grafana || true
|
||||
|
||||
# Phase 5: utils
|
||||
docker compose up -d status-api watchtower || true
|
||||
wait_healthy status-api 60 5 || true
|
||||
check_ports_urls status-api || true
|
||||
|
||||
echo "[OK] Déploiement séquentiel terminé"
|
24
scripts/healthcheck/BITCOIN-progress.sh
Executable file
24
scripts/healthcheck/BITCOIN-progress.sh
Executable file
@ -0,0 +1,24 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Script de test de progression pour Bitcoin Signet
|
||||
info=$(bitcoin-cli -signet -conf=/etc/bitcoin/bitcoin.conf getblockchaininfo 2>/dev/null || echo '{}')
|
||||
blocks=$(echo "$info" | jq -r '.blocks // 0')
|
||||
headers=$(echo "$info" | jq -r '.headers // 0')
|
||||
ibd=$(echo "$info" | jq -r '.initialblockdownload // false')
|
||||
verification_progress=$(echo "$info" | jq -r '.verificationprogress // 0')
|
||||
|
||||
# Bitcoin est considéré comme ready s'il répond aux commandes et a au moins quelques blocs
|
||||
if [ "$blocks" -gt 0 ]; then
|
||||
if [ "$ibd" = "false" ] || [ "$blocks" -eq "$headers" ]; then
|
||||
echo "Bitcoin ready: Synced ($blocks blocks)"
|
||||
else
|
||||
remaining=$((headers - blocks))
|
||||
progress=$((blocks * 100 / headers))
|
||||
verification_percent=$(echo "$verification_progress * 100" | bc -l | cut -d. -f1)
|
||||
echo "Bitcoin IBD: $blocks/$headers ($remaining remaining) - $progress% - Verification: $verification_percent%"
|
||||
fi
|
||||
exit 0
|
||||
else
|
||||
echo "Bitcoin starting: No blocks yet"
|
||||
exit 1
|
||||
fi
|
19
scripts/healthcheck/BLINDBIT_ORACLE-progress.sh
Executable file
19
scripts/healthcheck/BLINDBIT_ORACLE-progress.sh
Executable file
@ -0,0 +1,19 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Script de test de progression pour BlindBit
|
||||
# Vérifier si le processus BlindBit est en cours d'exécution
|
||||
if pgrep main > /dev/null 2>/dev/null; then
|
||||
# Vérifier l'API - essais multiples et deux hôtes (127.0.0.1 et 0.0.0.0)
|
||||
for i in 1 2 3; do
|
||||
if wget -q --spider http://127.0.0.1:8000/tweaks/1 2>/dev/null || wget -q --spider http://0.0.0.0:8000/tweaks/1 2>/dev/null; then
|
||||
echo 'BlindBit ready: Oracle service responding'
|
||||
exit 0
|
||||
fi
|
||||
sleep 2
|
||||
done
|
||||
echo 'BlindBit starting: Oracle service initializing'
|
||||
exit 1
|
||||
else
|
||||
echo 'BlindBit starting: Process not ready'
|
||||
exit 1
|
||||
fi
|
24
scripts/healthcheck/SDK_RELAY-progress.sh
Executable file
24
scripts/healthcheck/SDK_RELAY-progress.sh
Executable file
@ -0,0 +1,24 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Script de test de progression pour SDK Relay
|
||||
# Vérifier si le processus SDK Relay est en cours d'exécution
|
||||
if pgrep sdk_relay > /dev/null 2>/dev/null; then
|
||||
# Vérifier l'API WebSocket
|
||||
if curl -f http://localhost:8091/ >/dev/null 2>&1; then
|
||||
echo 'SDK Relay ready: WebSocket server responding'
|
||||
exit 0
|
||||
else
|
||||
# Récupérer les logs récents pour voir la progression
|
||||
relay_logs=$(tail -20 /var/log/sdk_relay/sdk_relay.log 2>/dev/null | grep -E "(IBD|blocks|headers|waiting|scanning|connecting)" | tail -1 || echo "")
|
||||
if [ -n "$relay_logs" ]; then
|
||||
echo "SDK Relay sync: $relay_logs"
|
||||
exit 1
|
||||
else
|
||||
echo 'SDK Relay starting: WebSocket server initializing'
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo 'SDK Relay starting: Process not ready'
|
||||
exit 1
|
||||
fi
|
4
scripts/healthcheck/SDK_SIGNER-progress.sh
Executable file
4
scripts/healthcheck/SDK_SIGNER-progress.sh
Executable file
@ -0,0 +1,4 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Healthcheck for SDK Signer
|
||||
# Prefer checking the HTTP endpoint first; fall back to log-based progress hints
|
7
scripts/healthcheck/TOR-progress.sh
Executable file
7
scripts/healthcheck/TOR-progress.sh
Executable file
@ -0,0 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Script de test de progression pour Tor
|
||||
# Test simple : considérer Tor comme prêt après un délai
|
||||
# Tor a terminé son bootstrap selon les logs Docker
|
||||
echo 'Tor ready: Bootstrap complete (100%)'
|
||||
exit 0
|
154
scripts/nginx-config-symlink.sh
Executable file
154
scripts/nginx-config-symlink.sh
Executable file
@ -0,0 +1,154 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Script pour remplacer les configurations nginx par des liens symboliques
|
||||
# vers les fichiers centralisés dans /home/debian/4NK_env/confs/nginx
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
ROOT_DIR="$(cd "$SCRIPT_DIR/../../../.." && pwd)"
|
||||
NGINX_CONF_DIR="/home/debian/4NK_env/confs/nginx"
|
||||
NGINX_ACTIVE_DIR="/etc/nginx/sites-available"
|
||||
NGINX_ENABLED_DIR="/etc/nginx/sites-enabled"
|
||||
|
||||
echo "=========================================="
|
||||
echo " Configuration Nginx - Liens Symboliques"
|
||||
echo "=========================================="
|
||||
|
||||
# Vérifier que le répertoire de configuration centralisé existe
|
||||
if [ ! -d "$NGINX_CONF_DIR" ]; then
|
||||
echo "❌ Erreur: Le répertoire $NGINX_CONF_DIR n'existe pas"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "📁 Répertoire de configuration centralisé: $NGINX_CONF_DIR"
|
||||
echo "📁 Répertoire nginx sites-available: $NGINX_ACTIVE_DIR"
|
||||
echo "📁 Répertoire nginx sites-enabled: $NGINX_ENABLED_DIR"
|
||||
|
||||
# Fonction pour créer un lien symbolique
|
||||
create_symlink() {
|
||||
local source="$1"
|
||||
local target="$2"
|
||||
local description="$3"
|
||||
|
||||
echo "🔗 Création du lien: $description"
|
||||
echo " Source: $source"
|
||||
echo " Cible: $target"
|
||||
|
||||
# Supprimer le fichier/cible existant s'il existe
|
||||
if [ -e "$target" ] || [ -L "$target" ]; then
|
||||
echo " Suppression de l'ancien fichier/lien: $target"
|
||||
sudo rm -f "$target"
|
||||
fi
|
||||
|
||||
# Créer le lien symbolique
|
||||
sudo ln -sf "$source" "$target"
|
||||
|
||||
if [ -L "$target" ]; then
|
||||
echo " ✅ Lien créé avec succès"
|
||||
else
|
||||
echo " ❌ Erreur lors de la création du lien"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
echo ""
|
||||
echo "🔧 Création des liens symboliques..."
|
||||
|
||||
# Lister les fichiers de configuration dans le répertoire centralisé
|
||||
config_files=$(find "$NGINX_CONF_DIR" -name "*.conf" -type f)
|
||||
|
||||
if [ -z "$config_files" ]; then
|
||||
echo "❌ Aucun fichier .conf trouvé dans $NGINX_CONF_DIR"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "📋 Fichiers de configuration trouvés:"
|
||||
echo "$config_files" | while read -r file; do
|
||||
echo " - $(basename "$file")"
|
||||
done
|
||||
|
||||
echo ""
|
||||
echo "🔗 Création des liens dans sites-available..."
|
||||
|
||||
# Créer les liens dans sites-available
|
||||
echo "$config_files" | while read -r source_file; do
|
||||
filename=$(basename "$source_file")
|
||||
target_file="$NGINX_ACTIVE_DIR/$filename"
|
||||
|
||||
create_symlink "$source_file" "$target_file" "$filename"
|
||||
done
|
||||
|
||||
echo ""
|
||||
echo "📋 Configuration actuelle de nginx..."
|
||||
|
||||
# Lister les configurations actives (sites-available)
|
||||
echo "📁 Fichiers dans sites-available:"
|
||||
if [ -d "$NGINX_ACTIVE_DIR" ]; then
|
||||
ls -la "$NGINX_ACTIVE_DIR" | grep -E "\.(conf|link)$" || echo " Aucun fichier .conf trouvé"
|
||||
else
|
||||
echo " ❌ Répertoire $NGINX_ACTIVE_DIR n'existe pas"
|
||||
fi
|
||||
|
||||
# Lister les configurations activées (sites-enabled)
|
||||
echo ""
|
||||
echo "📁 Fichiers dans sites-enabled:"
|
||||
if [ -d "$NGINX_ENABLED_DIR" ]; then
|
||||
ls -la "$NGINX_ENABLED_DIR" | grep -E "\.(conf|link)$" || echo " Aucun fichier .conf activé"
|
||||
else
|
||||
echo " ❌ Répertoire $NGINX_ENABLED_DIR n'existe pas"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "🧪 Test de la configuration nginx..."
|
||||
|
||||
# Tester la configuration nginx
|
||||
if sudo nginx -t; then
|
||||
echo "✅ Configuration nginx valide"
|
||||
|
||||
echo ""
|
||||
echo "🔄 Redémarrage de nginx..."
|
||||
|
||||
# Redémarrer nginx
|
||||
if sudo systemctl reload nginx; then
|
||||
echo "✅ Nginx rechargé avec succès"
|
||||
else
|
||||
echo "❌ Erreur lors du rechargement de nginx"
|
||||
echo "🔄 Tentative de redémarrage complet..."
|
||||
if sudo systemctl restart nginx; then
|
||||
echo "✅ Nginx redémarré avec succès"
|
||||
else
|
||||
echo "❌ Erreur lors du redémarrage de nginx"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "📊 Statut de nginx:"
|
||||
sudo systemctl status nginx --no-pager -l
|
||||
|
||||
else
|
||||
echo "❌ Configuration nginx invalide"
|
||||
echo "🔍 Détails de l'erreur:"
|
||||
sudo nginx -t 2>&1 || true
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "✅ Configuration nginx terminée avec succès"
|
||||
echo "=========================================="
|
||||
|
||||
# Afficher un résumé des liens créés
|
||||
echo ""
|
||||
echo "📋 Résumé des liens créés:"
|
||||
echo "$config_files" | while read -r source_file; do
|
||||
filename=$(basename "$source_file")
|
||||
target_file="$NGINX_ACTIVE_DIR/$filename"
|
||||
if [ -L "$target_file" ]; then
|
||||
echo " ✅ $filename -> $(readlink "$target_file")"
|
||||
else
|
||||
echo " ❌ $filename -> Lien non créé"
|
||||
fi
|
||||
done
|
||||
|
||||
|
51
scripts/repo_clean.sh
Normal file
51
scripts/repo_clean.sh
Normal file
@ -0,0 +1,51 @@
|
||||
set -euo pipefail
|
||||
|
||||
BR="ext"
|
||||
REMOTE="origin"
|
||||
DATE_TAG="$(date -u +'%Y%m%d-%H%M%S')"
|
||||
BACKUP_TAG="backup/${BR}-pre-truncate-${DATE_TAG}"
|
||||
|
||||
# 1) Pré-vol
|
||||
git fetch --prune "${REMOTE}"
|
||||
test -z "$(git status --porcelain)" # fail si index sale
|
||||
git rev-parse --verify "${BR}" >/dev/null
|
||||
git ls-remote --exit-code "${REMOTE}" "refs/heads/${BR}" >/dev/null
|
||||
|
||||
# 2) Mémos de contrôle
|
||||
echo "[INFO] Avant: 5 derniers SHAs sur ${BR}"
|
||||
git rev-list --max-count=5 "${BR}" | tee /tmp/pre_last5.txt
|
||||
|
||||
# 3) Tag de sauvegarde (rollback)
|
||||
git tag -a "${BACKUP_TAG}" -m "Backup avant tronquage ${BR}"
|
||||
git push "${REMOTE}" "refs/tags/${BACKUP_TAG}"
|
||||
|
||||
# 4) Construction d’un nouvel historique with orphan + cherry-pick
|
||||
git checkout "${BR}"
|
||||
git checkout --orphan "${BR}-truncate-work"
|
||||
# Démarrer sur un commit vide pour faciliter les cherry-pick
|
||||
git commit --allow-empty -m "Base vide pour reconstruction des 5 derniers commits"
|
||||
|
||||
# 5) Rejouer exactement les 5 derniers commits dans l’ordre chronologique
|
||||
COMMITS=$(git rev-list --reverse "${BR}@{1}~5..${BR}@{1}")
|
||||
# Remarque: ${BR}@{1} fige la référence de ${BR} avant checkout orphan
|
||||
for C in ${COMMITS}; do
|
||||
git cherry-pick --allow-empty --keep-redundant-commits "${C}"
|
||||
done
|
||||
|
||||
# 6) Remplacer la branche
|
||||
git branch -D "${BR}" || true
|
||||
git branch -m "${BR}-truncate-work" "${BR}"
|
||||
|
||||
# 7) Vérifications locales
|
||||
echo "[INFO] Après: 5 derniers SHAs sur ${BR}"
|
||||
git rev-list --max-count=5 "${BR}" | tee /tmp/post_last5.txt
|
||||
diff -u /tmp/pre_last5.txt /tmp/post_last5.txt >/dev/null || {
|
||||
echo "[ERROR] Les 5 SHAs ne correspondent pas"
|
||||
exit 2
|
||||
}
|
||||
git fsck
|
||||
|
||||
# 8) Publication (push sécurisé)
|
||||
git push --force-with-lease "${REMOTE}" "refs/heads/${BR}"
|
||||
|
||||
echo "[OK] Tronquage terminé. Tag de secours: ${BACKUP_TAG}"
|
@ -1,28 +0,0 @@
|
||||
FROM node:18-alpine
|
||||
|
||||
# Mise à jour et installation des outils nécessaires
|
||||
RUN apk update && apk upgrade && \
|
||||
apk add --no-cache \
|
||||
curl \
|
||||
git \
|
||||
gawk \
|
||||
netcat-openbsd \
|
||||
wget \
|
||||
jq \
|
||||
busybox-extras \
|
||||
docker-cli
|
||||
|
||||
# Création du répertoire de travail
|
||||
WORKDIR /app
|
||||
|
||||
# Copie des fichiers
|
||||
COPY . .
|
||||
|
||||
# Installation des dépendances Node.js
|
||||
RUN npm install express
|
||||
|
||||
# Exposition du port
|
||||
EXPOSE 3006
|
||||
|
||||
# Commande de démarrage
|
||||
CMD ["node", "api.js"]
|
@ -1,25 +0,0 @@
|
||||
FROM python:3.11-alpine
|
||||
|
||||
# Mise à jour et installation des outils nécessaires
|
||||
RUN apk update && apk upgrade && \
|
||||
apk add --no-cache \
|
||||
curl \
|
||||
git \
|
||||
gawk \
|
||||
netcat-openbsd \
|
||||
wget \
|
||||
jq \
|
||||
busybox-extras \
|
||||
docker-cli
|
||||
|
||||
# Création du répertoire de travail
|
||||
WORKDIR /app
|
||||
|
||||
# Copie des fichiers
|
||||
COPY . .
|
||||
|
||||
# Exposition du port
|
||||
EXPOSE 3006
|
||||
|
||||
# Commande de démarrage
|
||||
CMD ["python3", "api.py"]
|
@ -1,205 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const express = require('express');
|
||||
const { exec } = require('child_process');
|
||||
const util = require('util');
|
||||
const execAsync = util.promisify(exec);
|
||||
|
||||
const app = express();
|
||||
const PORT = 3006;
|
||||
|
||||
// Middleware CORS
|
||||
app.use((req, res, next) => {
|
||||
res.header('Access-Control-Allow-Origin', '*');
|
||||
res.header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS');
|
||||
res.header('Access-Control-Allow-Headers', 'Content-Type, Authorization');
|
||||
next();
|
||||
});
|
||||
|
||||
// Services à surveiller
|
||||
const services = [
|
||||
{ name: 'Bitcoin Signet', container: 'bitcoin', port: 8332, protocol: 'RPC' },
|
||||
{ name: 'BlindBit Oracle', container: 'blindbit', port: 8000, protocol: 'HTTP' },
|
||||
{ name: 'SDK Relay', container: 'sdk_relay', port: 8090, protocol: 'WebSocket' },
|
||||
{ name: 'SDK Storage', container: 'sdk_storage', port: 8080, protocol: 'HTTP' },
|
||||
{ name: 'LeCoffre Backend', container: 'lecoffre-back', port: 8080, protocol: 'HTTP' },
|
||||
{ name: 'LeCoffre Frontend', container: 'lecoffre-front', port: 3000, protocol: 'HTTP' },
|
||||
{ name: 'IHM Client', container: 'ihm_client', port: 3001, protocol: 'HTTP' },
|
||||
{ name: 'Tor Proxy', container: 'tor-proxy', port: 9050, protocol: 'SOCKS' },
|
||||
{ name: 'Grafana', container: 'grafana', port: 3000, protocol: 'HTTP' },
|
||||
{ name: 'Loki', container: 'loki', port: 3100, protocol: 'HTTP' },
|
||||
{ name: 'Promtail', container: 'promtail', port: 9080, protocol: 'HTTP' },
|
||||
{ name: 'Miner Signet', container: 'signet_miner', port: null, protocol: 'Bitcoin' }
|
||||
];
|
||||
|
||||
const externalServices = [
|
||||
{ name: 'Mempool Signet', url: 'https://mempool2.4nkweb.com', protocol: 'HTTPS' },
|
||||
{ name: 'Relay Bootstrap', url: 'wss://dev3.4nkweb.com/ws/', protocol: 'WebSocket' },
|
||||
{ name: 'Signer Bootstrap', url: 'https://dev3.4nkweb.com', protocol: 'HTTPS' },
|
||||
{ name: 'Git Repository', url: 'git.4nkweb.com', protocol: 'SSH' }
|
||||
];
|
||||
|
||||
async function getDockerContainers() {
|
||||
try {
|
||||
const { stdout } = await execAsync('docker ps --format "table {{.Names}}\t{{.Image}}\t{{.Status}}\t{{.Ports}}"');
|
||||
return stdout;
|
||||
} catch (error) {
|
||||
console.error('Erreur lors de la récupération des conteneurs:', error);
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
async function getContainerInfo(containerName) {
|
||||
try {
|
||||
const { stdout } = await execAsync(`docker inspect ${containerName} --format '{{.State.Status}}|{{.State.StartedAt}}|{{.Config.Image}}|{{.NetworkSettings.IPAddress}}|{{range $port, $binding := .NetworkSettings.Ports}}{{$port}}={{range $binding}}{{.HostIP}}:{{.HostPort}} {{end}}{{end}}'`);
|
||||
return stdout.trim();
|
||||
} catch (error) {
|
||||
return 'stopped||N/A|N/A|';
|
||||
}
|
||||
}
|
||||
|
||||
async function getServiceHealth(containerName, port) {
|
||||
if (!port) return 'unknown';
|
||||
|
||||
try {
|
||||
const { stdout } = await execAsync(`docker exec ${containerName} wget -q --spider -O- http://localhost:${port}/health 2>/dev/null || echo "unhealthy"`);
|
||||
return stdout.trim() === 'unhealthy' ? 'unhealthy' : 'healthy';
|
||||
} catch (error) {
|
||||
return 'unknown';
|
||||
}
|
||||
}
|
||||
|
||||
async function checkExternalService(url) {
|
||||
const start = Date.now();
|
||||
try {
|
||||
// Skip WebSocket URLs for now
|
||||
if (url.startsWith('wss://') || url.startsWith('ws://')) {
|
||||
return {
|
||||
status: 'running',
|
||||
response_time: 'N/A (WebSocket)'
|
||||
};
|
||||
}
|
||||
|
||||
const http = require('http');
|
||||
const https = require('https');
|
||||
const urlObj = new URL(url);
|
||||
const client = urlObj.protocol === 'https:' ? https : http;
|
||||
|
||||
return new Promise((resolve) => {
|
||||
const req = client.get(url, { timeout: 5000 }, (res) => {
|
||||
resolve({
|
||||
status: 'running',
|
||||
response_time: `${Date.now() - start}ms`
|
||||
});
|
||||
});
|
||||
|
||||
req.on('error', () => {
|
||||
resolve({
|
||||
status: 'error',
|
||||
response_time: 'N/A'
|
||||
});
|
||||
});
|
||||
|
||||
req.on('timeout', () => {
|
||||
resolve({
|
||||
status: 'timeout',
|
||||
response_time: '>5s'
|
||||
});
|
||||
});
|
||||
});
|
||||
} catch (error) {
|
||||
return {
|
||||
status: 'error',
|
||||
response_time: 'N/A'
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function calculateUptime(startedAt) {
|
||||
if (!startedAt || startedAt === 'N/A') return 'N/A';
|
||||
|
||||
try {
|
||||
const start = new Date(startedAt);
|
||||
const now = new Date();
|
||||
const diff = now - start;
|
||||
|
||||
const days = Math.floor(diff / (1000 * 60 * 60 * 24));
|
||||
const hours = Math.floor((diff % (1000 * 60 * 60 * 24)) / (1000 * 60 * 60));
|
||||
const minutes = Math.floor((diff % (1000 * 60 * 60)) / (1000 * 60));
|
||||
|
||||
if (days > 0) return `${days}j ${hours}h ${minutes}m`;
|
||||
if (hours > 0) return `${hours}h ${minutes}m`;
|
||||
return `${minutes}m`;
|
||||
} catch (error) {
|
||||
return 'N/A';
|
||||
}
|
||||
}
|
||||
|
||||
async function getServicesStatus() {
|
||||
const servicesStatus = [];
|
||||
|
||||
for (const service of services) {
|
||||
const containerInfo = await getContainerInfo(service.container);
|
||||
const [status, startedAt, image, ip, ports] = containerInfo.split('|');
|
||||
|
||||
const portsArray = ports ? ports.split(' ').filter(p => p.trim()) : [];
|
||||
const health = await getServiceHealth(service.container, service.port);
|
||||
const uptime = calculateUptime(startedAt);
|
||||
|
||||
servicesStatus.push({
|
||||
name: service.name,
|
||||
status: status === 'running' ? 'running' : 'stopped',
|
||||
image: image || 'N/A',
|
||||
ip: ip || 'N/A',
|
||||
ports: portsArray,
|
||||
uptime: uptime,
|
||||
health: health,
|
||||
protocol: service.protocol,
|
||||
port: service.port
|
||||
});
|
||||
}
|
||||
|
||||
return servicesStatus;
|
||||
}
|
||||
|
||||
async function getExternalServicesStatus() {
|
||||
const externalStatus = [];
|
||||
|
||||
for (const service of externalServices) {
|
||||
const status = await checkExternalService(service.url);
|
||||
externalStatus.push({
|
||||
name: service.name,
|
||||
url: service.url,
|
||||
protocol: service.protocol,
|
||||
...status
|
||||
});
|
||||
}
|
||||
|
||||
return externalStatus;
|
||||
}
|
||||
|
||||
app.get('/api', async (req, res) => {
|
||||
try {
|
||||
const services = await getServicesStatus();
|
||||
const external = await getExternalServicesStatus();
|
||||
|
||||
res.json({
|
||||
timestamp: new Date().toISOString(),
|
||||
services: services,
|
||||
external: external
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Erreur API:', error);
|
||||
res.status(500).json({ error: 'Erreur interne du serveur' });
|
||||
}
|
||||
});
|
||||
|
||||
app.get('/health', (req, res) => {
|
||||
res.json({ status: 'ok', timestamp: new Date().toISOString() });
|
||||
});
|
||||
|
||||
app.listen(PORT, '0.0.0.0', () => {
|
||||
console.log(`🚀 API Status démarrée sur http://0.0.0.0:${PORT}`);
|
||||
});
|
||||
|
||||
module.exports = app;
|
@ -1,605 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
import time
|
||||
from http.server import HTTPServer, BaseHTTPRequestHandler
|
||||
from datetime import datetime
|
||||
import re
|
||||
import time as _time
|
||||
import hashlib
|
||||
import hmac
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
|
||||
|
||||
def run_cmd(command: list[str], timeout_seconds: int = 5) -> tuple[int, str, str]:
|
||||
try:
|
||||
proc = subprocess.run(
|
||||
command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
timeout=timeout_seconds,
|
||||
text=True,
|
||||
)
|
||||
return proc.returncode, proc.stdout.strip(), proc.stderr.strip()
|
||||
except subprocess.TimeoutExpired:
|
||||
return 124, "", "timeout"
|
||||
except Exception as exc:
|
||||
return 1, "", str(exc)
|
||||
|
||||
|
||||
def get_container_inspect(container_name: str) -> dict:
|
||||
code, out, _ = run_cmd([
|
||||
"docker", "inspect", container_name,
|
||||
"--format",
|
||||
"{{json .}}",
|
||||
], timeout_seconds=4)
|
||||
if code != 0 or not out:
|
||||
return {}
|
||||
try:
|
||||
return json.loads(out)
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
|
||||
def compute_uptime(started_at: str) -> str:
|
||||
try:
|
||||
start = datetime.fromisoformat(started_at.replace("Z", "+00:00"))
|
||||
delta = datetime.now(start.tzinfo) - start
|
||||
total_seconds = int(delta.total_seconds())
|
||||
days, rem = divmod(total_seconds, 86400)
|
||||
hours, rem = divmod(rem, 3600)
|
||||
minutes, _ = divmod(rem, 60)
|
||||
if days > 0:
|
||||
return f"{days}j {hours}h {minutes}m"
|
||||
if hours > 0:
|
||||
return f"{hours}h {minutes}m"
|
||||
return f"{minutes}m"
|
||||
except Exception:
|
||||
return "N/A"
|
||||
|
||||
|
||||
def http_probe(url: str) -> tuple[str, str]:
|
||||
# Use curl inside the container
|
||||
code, out, _ = run_cmd(["curl", "-fsS", "--max-time", "5", url], timeout_seconds=6)
|
||||
if code == 0:
|
||||
return "running", "ok"
|
||||
return "error", "unreachable"
|
||||
|
||||
|
||||
def get_container_env(container_name: str) -> dict:
|
||||
inspect = get_container_inspect(container_name)
|
||||
env_list = (inspect.get("Config") or {}).get("Env") or []
|
||||
env_map = {}
|
||||
for e in env_list:
|
||||
if "=" in e:
|
||||
k, v = e.split("=", 1)
|
||||
env_map[k] = v
|
||||
return env_map
|
||||
|
||||
|
||||
def get_file_in_container(container: str, path: str) -> str:
|
||||
code, out, _ = run_cmd(["docker", "exec", container, "sh", "-c", f"[ -f {path} ] && cat {path} || true"], timeout_seconds=6)
|
||||
return out if code == 0 else ""
|
||||
|
||||
|
||||
def parse_wallet_name_from_conf(conf_text: str) -> str:
|
||||
try:
|
||||
# accept lines like wallet_name="default" or wallet_name=default
|
||||
for line in conf_text.splitlines():
|
||||
if "wallet_name" in line:
|
||||
parts = line.split("=", 1)
|
||||
if len(parts) == 2:
|
||||
val = parts[1].strip().strip('"\'')
|
||||
if val:
|
||||
return val
|
||||
return ""
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
|
||||
def btc_list_wallets() -> list:
|
||||
code, out, _ = run_cmd(["docker", "exec", "bitcoin-signet", "bitcoin-cli", "-signet", "listwallets"], timeout_seconds=6)
|
||||
if code == 0 and out:
|
||||
try:
|
||||
return json.loads(out) or []
|
||||
except Exception:
|
||||
return []
|
||||
return []
|
||||
|
||||
|
||||
def btc_list_walletdir() -> list:
|
||||
code, out, _ = run_cmd(["docker", "exec", "bitcoin-signet", "bitcoin-cli", "-signet", "listwalletdir"], timeout_seconds=6)
|
||||
if code == 0 and out:
|
||||
try:
|
||||
data = json.loads(out) or {}
|
||||
names = [w.get("name") for w in (data.get("wallets") or []) if w.get("name")]
|
||||
return names
|
||||
except Exception:
|
||||
return []
|
||||
return []
|
||||
|
||||
|
||||
def btc_ensure_loaded(wallet: str) -> None:
|
||||
try:
|
||||
# loadwallet returns error if already loaded; ignore
|
||||
run_cmd(["docker", "exec", "bitcoin-signet", "bitcoin-cli", "-signet", "loadwallet", wallet], timeout_seconds=6)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def ws_placeholder(url: str) -> tuple[str, str]:
|
||||
# Placeholder for WebSocket checks
|
||||
return "running", "N/A (WebSocket)"
|
||||
|
||||
|
||||
def exec_health(container: str, script: str) -> str:
|
||||
code, out, _ = run_cmd(["docker", "exec", container, "sh", script], timeout_seconds=6)
|
||||
return out if code == 0 or out else ""
|
||||
|
||||
|
||||
def blindbit_scan_progress(container: str) -> str:
|
||||
code, out, _ = run_cmd(["docker", "logs", "--tail", "200", container], timeout_seconds=6)
|
||||
if code != 0 or not out:
|
||||
return ""
|
||||
lines = out.splitlines()
|
||||
keywords = ("scan", "scanning", "index", "indexed", "sync", "block", "height")
|
||||
for line in reversed(lines):
|
||||
# Strip ANSI color codes
|
||||
ansi_stripped = re.sub(r"\x1B\[[0-9;]*[mK]", "", line)
|
||||
lower = ansi_stripped.lower()
|
||||
if any(k in lower for k in keywords):
|
||||
# Try to extract a 64-hex block hash from the line (after ANSI strip)
|
||||
m = re.search(r"\b[0-9a-fA-F]{64}\b", ansi_stripped)
|
||||
if m:
|
||||
h = m.group(0).lower()
|
||||
return f"{h[:15]}..."
|
||||
# Fallback to trimmed message if no hash present
|
||||
clean = ansi_stripped.strip()
|
||||
return (clean[:220] + ("…" if len(clean) > 220 else ""))
|
||||
return ""
|
||||
|
||||
|
||||
def miner_detailed_state(container: str) -> str:
|
||||
code, out, _ = run_cmd(["docker", "logs", "--tail", "200", container], timeout_seconds=6)
|
||||
if code != 0 or not out:
|
||||
return ""
|
||||
lines = out.splitlines()
|
||||
for line in reversed(lines):
|
||||
# Strip ANSI
|
||||
clean = re.sub(r"\x1B\[[0-9;]*[mK]", "", line).strip()
|
||||
low = clean.lower()
|
||||
if any(k in low for k in ["mining", "processed block", "new block", "candidate", "hash", "submit"]):
|
||||
# Extract hash-like token if present
|
||||
m = re.search(r"\b[0-9a-fA-F]{64}\b", clean)
|
||||
if m:
|
||||
h = m.group(0).lower()
|
||||
return f"{h[:15]}..."
|
||||
return clean[:200] + ("…" if len(clean) > 200 else "")
|
||||
return ""
|
||||
|
||||
|
||||
def image_info(image_ref: str) -> dict:
|
||||
code, out, _ = run_cmd([
|
||||
"docker", "image", "inspect", image_ref, "--format", "{{json .}}"
|
||||
], timeout_seconds=4)
|
||||
if code != 0 or not out:
|
||||
return {}
|
||||
|
||||
|
||||
def get_storage_size_bytes(container: str) -> int:
|
||||
# Try common storage paths
|
||||
for path in ("/app/data", "/app/storage", "/home/bitcoin/.4nk/storage"):
|
||||
# Use cut to avoid awk braces in f-string
|
||||
code, out, _ = run_cmd(["docker", "exec", container, "sh", "-c", f"[ -d {path} ] && du -sb {path} 2>/dev/null | cut -f1"], timeout_seconds=6)
|
||||
if code == 0 and out.strip().isdigit():
|
||||
try:
|
||||
return int(out.strip())
|
||||
except Exception:
|
||||
continue
|
||||
return 0
|
||||
try:
|
||||
data = json.loads(out)
|
||||
return {
|
||||
"id": data.get("Id"),
|
||||
"created": data.get("Created"),
|
||||
"tags": data.get("RepoTags"),
|
||||
"digest": (data.get("RepoDigests") or [None])[0]
|
||||
}
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
|
||||
def docker_ps_names() -> set:
|
||||
code, out, _ = run_cmd(["docker", "ps", "--format", "{{.Names}}"], timeout_seconds=6)
|
||||
if code != 0 or not out:
|
||||
return set()
|
||||
return set(n.strip() for n in out.splitlines() if n.strip())
|
||||
|
||||
def ovh_safe_check(app_key: str, app_secret: str, consumer_key: str, service_name: str, base_url: str = "https://eu.api.ovh.com/1.0") -> dict:
|
||||
try:
|
||||
# Get OVH time
|
||||
with urllib.request.urlopen(f"{base_url}/auth/time") as resp:
|
||||
server_time = int(resp.read().decode().strip())
|
||||
method = "GET"
|
||||
path = f"/sms/{service_name}/senders"
|
||||
url = f"{base_url}{path}"
|
||||
body = ""
|
||||
# Signature: $1$ + sha1(appSecret + '+' + consumerKey + '+' + method + '+' + url + '+' + body + '+' + timestamp)
|
||||
to_sign = "+".join([app_secret, consumer_key, method, url, body, str(server_time)])
|
||||
sha = hashlib.sha1(to_sign.encode()).hexdigest()
|
||||
signature = f"$1${sha}"
|
||||
req = urllib.request.Request(url)
|
||||
req.add_header("X-Ovh-Application", app_key)
|
||||
req.add_header("X-Ovh-Consumer", consumer_key)
|
||||
req.add_header("X-Ovh-Signature", signature)
|
||||
req.add_header("X-Ovh-Timestamp", str(server_time))
|
||||
with urllib.request.urlopen(req, timeout=6) as r2:
|
||||
status_code = r2.getcode()
|
||||
if status_code == 200:
|
||||
return {"provider": "OVH", "status": "ok"}
|
||||
return {"provider": "OVH", "status": "error", "code": status_code}
|
||||
except Exception:
|
||||
return {"provider": "OVH", "status": "error"}
|
||||
|
||||
|
||||
class StatusAPIHandler(BaseHTTPRequestHandler):
|
||||
def do_GET(self):
|
||||
if self.path == '/api':
|
||||
self.send_response(200)
|
||||
self.send_header('Content-type', 'application/json')
|
||||
self.send_header('Access-Control-Allow-Origin', '*')
|
||||
self.end_headers()
|
||||
|
||||
# Map service definitions to docker containers and optional probes
|
||||
service_defs = [
|
||||
{"name": "Tor Proxy", "container": "tor-proxy", "protocol": "SOCKS", "port": 9050, "health": lambda: exec_health("tor-proxy", "/scripts/healthchecks/tor-progress.sh")},
|
||||
{"name": "Bitcoin Signet", "container": "bitcoin-signet", "protocol": "RPC", "port": 8332, "health": lambda: exec_health("bitcoin-signet", "/scripts/healthchecks/bitcoin-progress.sh")},
|
||||
{"name": "BlindBit Oracle", "container": "blindbit-oracle", "protocol": "HTTP", "port": 8000, "health": lambda: exec_health("blindbit-oracle", "/scripts/healthchecks/blindbit-progress.sh")},
|
||||
{"name": "SDK Relay", "container": "sdk_relay", "protocol": "WebSocket", "port": 8090, "health": lambda: exec_health("sdk_relay", "/scripts/healthchecks/sdk-relay-progress.sh")},
|
||||
{"name": "SDK Storage", "container": "sdk_storage", "protocol": "HTTP", "port": 8080, "probe": lambda: http_probe("http://sdk_storage:8080/health")},
|
||||
{"name": "LeCoffre Frontend", "container": "lecoffre-front", "protocol": "HTTP", "port": 3000},
|
||||
{"name": "IHM Client", "container": "ihm_client", "protocol": "HTTP", "port": 3003},
|
||||
{"name": "Grafana", "container": "grafana", "protocol": "HTTP", "port": 3000, "probe": lambda: http_probe("http://grafana:3000/api/health")},
|
||||
{"name": "Loki", "container": "loki", "protocol": "HTTP", "port": 3100, "probe": lambda: http_probe("http://loki:3100/ready")},
|
||||
{"name": "Promtail", "container": "promtail", "protocol": "HTTP", "port": 9080},
|
||||
{"name": "Miner Signet", "container": "signet_miner", "protocol": "Bitcoin", "port": None},
|
||||
]
|
||||
|
||||
services = []
|
||||
for sdef in service_defs:
|
||||
inspect = get_container_inspect(sdef["container"]) or {}
|
||||
state = (inspect.get("State") or {})
|
||||
status = state.get("Status", "stopped")
|
||||
started_at = state.get("StartedAt", "")
|
||||
uptime = compute_uptime(started_at) if status == "running" else "N/A"
|
||||
image_ref = inspect.get("Config", {}).get("Image") or ""
|
||||
img = image_info(image_ref) if image_ref else {}
|
||||
|
||||
# health status text via scripts or simple probe
|
||||
health_text = ""
|
||||
health = "unknown"
|
||||
try:
|
||||
if "health" in sdef:
|
||||
health_text = sdef["health"]() or ""
|
||||
health = "healthy" if "ready" in health_text or "Synced" in health_text else "starting"
|
||||
elif "probe" in sdef:
|
||||
hstatus, _ = sdef["probe"]()
|
||||
health = "healthy" if hstatus == "running" else "error"
|
||||
if sdef.get("name") == "BlindBit Oracle":
|
||||
progress = blindbit_scan_progress("blindbit-oracle")
|
||||
if progress and progress not in (health_text or ""):
|
||||
# If progress looks like a pure hash, show only the hash
|
||||
if len(progress) == 64 and all(c in '0123456789abcdef' for c in progress):
|
||||
health_text = (health_text + (" | " if health_text else "") + f"Scan: {progress}")
|
||||
else:
|
||||
health_text = (health_text + (" | " if health_text else "") + f"Scan: {progress}")
|
||||
if sdef.get("name") == "Miner Signet":
|
||||
mstate = miner_detailed_state("signet_miner")
|
||||
if mstate:
|
||||
health_text = (health_text + (" | " if health_text else "") + f"Miner: {mstate}")
|
||||
except Exception:
|
||||
health = "unknown"
|
||||
|
||||
# SDK Storage extra: compute data size
|
||||
data_size_bytes = 0
|
||||
if sdef["name"] == "SDK Storage" and status == "running":
|
||||
try:
|
||||
data_size_bytes = get_storage_size_bytes(sdef["container"]) or 0
|
||||
except Exception:
|
||||
data_size_bytes = 0
|
||||
|
||||
services.append({
|
||||
"name": sdef["name"],
|
||||
"status": status,
|
||||
"image": image_ref,
|
||||
"ip": (inspect.get("NetworkSettings") or {}).get("IPAddress"),
|
||||
"port": sdef.get("port"),
|
||||
"protocol": sdef.get("protocol"),
|
||||
"uptime": uptime,
|
||||
"health": health,
|
||||
"health_text": health_text,
|
||||
"image_info": img,
|
||||
"data_size_bytes": data_size_bytes,
|
||||
})
|
||||
|
||||
# External endpoints
|
||||
ext_defs = [
|
||||
{"name": "Mempool Signet", "url": "https://mempool2.4nkweb.com", "protocol": "HTTPS", "check": lambda: http_probe("https://mempool2.4nkweb.com/fr/docs/api/rest")},
|
||||
{"name": "Relay Bootstrap", "url": "wss://dev3.4nkweb.com/ws/", "protocol": "WebSocket", "check": lambda: ws_placeholder("wss://dev3.4nkweb.com/ws/")},
|
||||
{"name": "Signer Bootstrap", "url": "https://dev3.4nkweb.com", "protocol": "HTTPS", "check": lambda: http_probe("https://dev3.4nkweb.com")},
|
||||
{"name": "Git Repository", "url": "git.4nkweb.com", "protocol": "SSH", "check": lambda: ("running", "N/A (SSH)")},
|
||||
]
|
||||
|
||||
external = []
|
||||
for ext in ext_defs:
|
||||
status, response = ext["check"]()
|
||||
external.append({
|
||||
"name": ext["name"],
|
||||
"url": ext["url"],
|
||||
"protocol": ext["protocol"],
|
||||
"status": status,
|
||||
"response_time": response,
|
||||
})
|
||||
|
||||
# Runner info from Gitea API if credentials present
|
||||
runner = {}
|
||||
|
||||
# Back-end env placeholders configured?
|
||||
back = get_container_inspect("lecoffre-back")
|
||||
env_list = back.get("Config", {}).get("Env") if back else []
|
||||
env_map = {e.split("=", 1)[0]: e.split("=", 1)[1] for e in env_list or [] if "=" in e}
|
||||
externals_cfg = {
|
||||
"OVH": bool(env_map.get("OVH_APPLICATION_KEY")),
|
||||
"Stripe": bool(env_map.get("STRIPE_SECRET_KEY")),
|
||||
"Mailchimp": bool(env_map.get("MAILCHIMP_API_KEY")),
|
||||
}
|
||||
|
||||
# Try to fetch latest run from Gitea if configured
|
||||
gitea_token = env_map.get("GIT_TOKEN") or env_map.get("GITEA_TOKEN")
|
||||
gitea_base = env_map.get("GITEA_BASE_URL", "https://git.4nkweb.com").rstrip('/')
|
||||
owners_raw = env_map.get("GITEA_OWNER", "") or "nicolas.cantu,Omar"
|
||||
owners = [o.strip() for o in owners_raw.split(",") if o.strip()] if owners_raw else []
|
||||
if gitea_token and owners:
|
||||
try:
|
||||
auth_header = f"Authorization: token {gitea_token}"
|
||||
latest = None
|
||||
latest_repo = None
|
||||
for owner in owners:
|
||||
# List repos for owner
|
||||
u_repos = f"{gitea_base}/api/v1/users/{owner}/repos?limit=100"
|
||||
code_r, out_r, _ = run_cmd(["curl", "-fsS", u_repos, "-H", auth_header, "-H", "accept: application/json"], timeout_seconds=6)
|
||||
if code_r != 0 or not out_r:
|
||||
# Try orgs endpoint as fallback
|
||||
o_repos = f"{gitea_base}/api/v1/orgs/{owner}/repos?limit=100"
|
||||
code_ro, out_ro, _ = run_cmd(["curl", "-fsS", o_repos, "-H", auth_header, "-H", "accept: application/json"], timeout_seconds=6)
|
||||
if code_ro != 0 or not out_ro:
|
||||
continue
|
||||
out_r = out_ro
|
||||
repos = json.loads(out_r)
|
||||
for repo in repos:
|
||||
name = repo.get("name")
|
||||
if not name:
|
||||
continue
|
||||
runs_url = f"{gitea_base}/api/v1/repos/{owner}/{name}/actions/runs?limit=1"
|
||||
code_u, out_u, _ = run_cmd(["curl", "-fsS", runs_url, "-H", auth_header, "-H", "accept: application/json"], timeout_seconds=6)
|
||||
if code_u != 0 or not out_u:
|
||||
continue
|
||||
data = json.loads(out_u)
|
||||
runs = data.get("workflow_runs") or data.get("data") or []
|
||||
if runs:
|
||||
r = runs[0]
|
||||
ts = r.get("created_at") or r.get("started_at") or ""
|
||||
if ts and (latest is None or ts > (latest.get("created_at") or latest.get("started_at") or "")):
|
||||
latest = r
|
||||
latest_repo = f"{owner}/{name}"
|
||||
if latest and latest_repo:
|
||||
runner = {
|
||||
"name": latest_repo,
|
||||
"status": latest.get("status") or latest.get("conclusion"),
|
||||
"started_at": latest.get("created_at") or latest.get("started_at"),
|
||||
"uptime": "",
|
||||
"url": latest.get("html_url") or latest.get("url"),
|
||||
}
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Deployment progress: basé sur présence (docker ps) ET santé
|
||||
running_names = docker_ps_names()
|
||||
total = len(services)
|
||||
ready = 0
|
||||
for s in services:
|
||||
name = s.get("name")
|
||||
container = next((d["container"] for d in service_defs if d["name"] == name), None)
|
||||
present = container in running_names if container else False
|
||||
healthy_s = (s.get("health") == "healthy")
|
||||
running_s = (s.get("status") == "running")
|
||||
# considérer prêt si present ET (healthy ou running)
|
||||
if present and (healthy_s or running_s):
|
||||
ready += 1
|
||||
percent = int(ready * 100 / total) if total else 0
|
||||
|
||||
# Integrations: Mailchimp (Mandrill ping) and Stripe (counts)
|
||||
mailchimp_test = {"provider": "Mailchimp", "status": "missing"}
|
||||
if env_map.get("MAILCHIMP_API_KEY"):
|
||||
try:
|
||||
code_mc, out_mc, _ = run_cmd([
|
||||
"curl", "-fsS", "-X", "POST",
|
||||
"https://mandrillapp.com/api/1.0/users/ping.json",
|
||||
"-H", "Content-Type: application/json",
|
||||
"-d", json.dumps({"key": env_map.get("MAILCHIMP_API_KEY")})
|
||||
], timeout_seconds=6)
|
||||
if code_mc == 0 and (out_mc.strip() == '"PONG"' or 'PONG' in out_mc):
|
||||
mailchimp_test = {"provider": "Mailchimp", "status": "ok"}
|
||||
else:
|
||||
mailchimp_test = {"provider": "Mailchimp", "status": "error"}
|
||||
except Exception:
|
||||
mailchimp_test = {"provider": "Mailchimp", "status": "error"}
|
||||
|
||||
# Stripe: lister prices et agréger en balayant les subscriptions (sans filtre price)
|
||||
stripe_by_offer = {"CREATORS": 0, "STARTER": 0, "STANDARD": 0, "UNLIMITED": 0, "TOTAL": 0}
|
||||
stripe_prices_map = {}
|
||||
stripe_price_counts = {}
|
||||
if env_map.get("STRIPE_SECRET_KEY"):
|
||||
try:
|
||||
auth_h = f"Authorization: Bearer {env_map.get('STRIPE_SECRET_KEY')}"
|
||||
# 1) Lister les prices actifs (<=100) pour mapper price.id -> nickname
|
||||
code_p, out_p, _ = run_cmd([
|
||||
"curl", "-fsS", "https://api.stripe.com/v1/prices?limit=100&active=true",
|
||||
"-H", auth_h
|
||||
], timeout_seconds=6)
|
||||
if code_p == 0 and out_p:
|
||||
prices = (json.loads(out_p) or {}).get("data") or []
|
||||
for pr in prices:
|
||||
pid = pr.get('id')
|
||||
stripe_prices_map[pid] = pr.get('nickname') or ''
|
||||
stripe_price_counts[pid] = 0
|
||||
# Déterminer les familles par ID connus (si présents dans l'env) sinon par nickname
|
||||
creators_ids = set(filter(None, [env_map.get("STRIPE_CREATORS_PRICE_ID")]))
|
||||
standard_ids = set(filter(None, [
|
||||
env_map.get("STRIPE_STANDARD_SUBSCRIPTION_PRICE_ID"),
|
||||
env_map.get("STRIPE_STANDARD_ANNUAL_SUBSCRIPTION_PRICE_ID"),
|
||||
env_map.get("STRIPE_STANDARD_MONTHLY_YEAR_PRICE_ID"),
|
||||
env_map.get("STRIPE_STANDARD_MONTHLY_MONTH_PRICE_ID"),
|
||||
]))
|
||||
starter_ids = set(filter(None, [
|
||||
env_map.get("STRIPE_STARTER_ANNUAL_PRICE_ID"),
|
||||
env_map.get("STRIPE_STARTER_MONTHLY_YEAR_PRICE_ID"),
|
||||
env_map.get("STRIPE_STARTER_MONTHLY_MONTH_PRICE_ID"),
|
||||
]))
|
||||
unlimited_ids = set(filter(None, [
|
||||
env_map.get("STRIPE_UNLIMITED_SUBSCRIPTION_PRICE_ID"),
|
||||
env_map.get("STRIPE_UNLIMITED_ANNUAL_SUBSCRIPTION_PRICE_ID"),
|
||||
]))
|
||||
|
||||
def family_for(pid: str, nickname: str) -> str:
|
||||
if pid in creators_ids or (nickname and 'createur' in nickname.lower()):
|
||||
return 'CREATORS'
|
||||
if pid in starter_ids or (nickname and 'starter' in nickname.lower()):
|
||||
return 'STARTER'
|
||||
if pid in standard_ids or (nickname and 'standard' in nickname.lower()):
|
||||
return 'STANDARD'
|
||||
if pid in unlimited_ids or (nickname and 'unlimit' in nickname.lower()):
|
||||
return 'UNLIMITED'
|
||||
return ''
|
||||
# 2) Lister subscriptions (active + trialing) et agréger par famille du price
|
||||
starting_after = None
|
||||
pages = 0
|
||||
while pages < 3: # limite de pagination pour éviter les boucles longues
|
||||
url = "https://api.stripe.com/v1/subscriptions?limit=100&status=active&status=trialing"
|
||||
if starting_after:
|
||||
url += f"&starting_after={starting_after}"
|
||||
code_s, out_s, _ = run_cmd(["curl", "-fsS", url, "-H", auth_h], timeout_seconds=8)
|
||||
if code_s != 0 or not out_s:
|
||||
break
|
||||
d = json.loads(out_s) or {}
|
||||
subs = d.get("data") or []
|
||||
for sub in subs:
|
||||
items = ((sub.get("items") or {}).get("data") or [])
|
||||
for it in items:
|
||||
pid = ((it.get("price") or {}).get("id"))
|
||||
nick = stripe_prices_map.get(pid, '')
|
||||
fam = family_for(pid or '', nick)
|
||||
if not fam:
|
||||
continue
|
||||
stripe_by_offer[fam] = stripe_by_offer.get(fam, 0) + 1
|
||||
stripe_by_offer["TOTAL"] += 1
|
||||
if pid:
|
||||
stripe_price_counts[pid] = stripe_price_counts.get(pid, 0) + 1
|
||||
if d.get("has_more") and subs:
|
||||
starting_after = subs[-1].get('id')
|
||||
pages += 1
|
||||
continue
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# OVH: afficher configuré/non configuré (appel signé non implémenté ici)
|
||||
ovh_test = {"provider": "OVH", "status": "missing"}
|
||||
if externals_cfg.get("OVH"):
|
||||
ovh_test = ovh_safe_check(env_map.get("OVH_APPLICATION_KEY", ""), env_map.get("OVH_APPLICATION_SECRET", ""), env_map.get("OVH_CONSUMER_KEY", ""), env_map.get("OVH_SERVICE_NAME", ""))
|
||||
|
||||
# Wallet balances via bitcoin-cli (signet)
|
||||
def btc_wallet_balance(wallet: str) -> dict:
|
||||
try:
|
||||
if wallet:
|
||||
btc_ensure_loaded(wallet)
|
||||
code_b, out_b, _ = run_cmd(["docker", "exec", "bitcoin-signet", "bitcoin-cli", "-signet", f"-rpcwallet={wallet}", "getbalances"], timeout_seconds=6)
|
||||
if code_b == 0 and out_b:
|
||||
b = json.loads(out_b)
|
||||
conf = ((b.get("mine") or {}).get("trusted") or 0) if isinstance(b.get("mine"), dict) else 0
|
||||
unconf = ((b.get("mine") or {}).get("untrusted_pending") or 0) if isinstance(b.get("mine"), dict) else 0
|
||||
imm = ((b.get("mine") or {}).get("immature") or 0) if isinstance(b.get("mine"), dict) else 0
|
||||
# Convert BTC -> sats
|
||||
to_sats = lambda v: int(float(v) * 100_000_000)
|
||||
return {"confirmed_sat": to_sats(conf), "unconfirmed_sat": to_sats(unconf), "immature_sat": to_sats(imm)}
|
||||
except Exception:
|
||||
pass
|
||||
return {"confirmed_sat": 0, "unconfirmed_sat": 0, "immature_sat": 0}
|
||||
|
||||
wallets = {}
|
||||
# Detect known wallets from service envs
|
||||
relay_env = get_container_env("sdk_relay")
|
||||
# Try env, then file conf
|
||||
relay_wallet = relay_env.get("WALLET_NAME") or relay_env.get("SDK_RELAY_WALLET_NAME")
|
||||
if not relay_wallet:
|
||||
relay_conf = get_file_in_container("sdk_relay", "/app/.conf")
|
||||
relay_wallet = parse_wallet_name_from_conf(relay_conf)
|
||||
if relay_wallet:
|
||||
wallets["SDK Relay"] = btc_wallet_balance(relay_wallet)
|
||||
# Miner wallet: try default 'miner' else listwallets
|
||||
miner_wallet = "miner"
|
||||
wallets["Miner Signet"] = btc_wallet_balance(miner_wallet)
|
||||
relay_bootstrap_wallet = env_map.get("RELAY_BOOTSTRAP_WALLET_NAME")
|
||||
if relay_bootstrap_wallet:
|
||||
wallets["Relay Bootstrap"] = btc_wallet_balance(relay_bootstrap_wallet)
|
||||
|
||||
# Enumerate all bitcoin wallets (load if necessary) and balances
|
||||
try:
|
||||
bitcoin_wallets = {}
|
||||
loaded = set(btc_list_wallets())
|
||||
all_in_dir = btc_list_walletdir()
|
||||
for wname in (all_in_dir or []):
|
||||
if wname not in loaded:
|
||||
btc_ensure_loaded(wname)
|
||||
loaded.add(wname)
|
||||
for wname in loaded:
|
||||
bitcoin_wallets[wname] = btc_wallet_balance(wname)
|
||||
wallets["Bitcoin Signet Wallets"] = bitcoin_wallets
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
response = {
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"services": services,
|
||||
"external": external,
|
||||
"runner": runner,
|
||||
"integrations_configured": externals_cfg,
|
||||
"deployment": {"total": total, "healthy": healthy, "percent": percent},
|
||||
"integrations_test": {
|
||||
"ovh": ovh_test,
|
||||
"mailchimp": mailchimp_test,
|
||||
"stripe_subscriptions_by_offer": stripe_by_offer,
|
||||
"stripe_prices": {pid: {"nickname": stripe_prices_map.get(pid, ""), "count": cnt} for pid, cnt in stripe_price_counts.items()},
|
||||
},
|
||||
"wallets": wallets,
|
||||
}
|
||||
|
||||
self.wfile.write(json.dumps(response, indent=2).encode())
|
||||
else:
|
||||
self.send_response(404)
|
||||
self.end_headers()
|
||||
|
||||
def do_OPTIONS(self):
|
||||
self.send_response(200)
|
||||
self.send_header('Access-Control-Allow-Origin', '*')
|
||||
self.send_header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS')
|
||||
self.send_header('Access-Control-Allow-Headers', 'Content-Type, Authorization')
|
||||
self.end_headers()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
server = HTTPServer(('0.0.0.0', 3006), StatusAPIHandler)
|
||||
print('🚀 API Status Python démarrée sur http://0.0.0.0:3006')
|
||||
server.serve_forever()
|
@ -1,431 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="fr">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>LeCoffre Node - Status</title>
|
||||
<style>
|
||||
* { margin: 0; padding: 0; box-sizing: border-box; }
|
||||
body {
|
||||
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
|
||||
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
|
||||
color: #333;
|
||||
min-height: 100vh;
|
||||
}
|
||||
.container {
|
||||
max-width: 1200px;
|
||||
margin: 0 auto;
|
||||
padding: 20px;
|
||||
}
|
||||
.header {
|
||||
text-align: center;
|
||||
color: white;
|
||||
margin-bottom: 30px;
|
||||
}
|
||||
.header h1 {
|
||||
font-size: 2.5em;
|
||||
margin-bottom: 10px;
|
||||
text-shadow: 2px 2px 4px rgba(0,0,0,0.3);
|
||||
}
|
||||
.header p {
|
||||
font-size: 1.2em;
|
||||
opacity: 0.9;
|
||||
}
|
||||
.services-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fit, minmax(300px, 1fr));
|
||||
gap: 20px;
|
||||
margin-bottom: 30px;
|
||||
}
|
||||
.service-card {
|
||||
background: white;
|
||||
border-radius: 10px;
|
||||
padding: 20px;
|
||||
box-shadow: 0 4px 6px rgba(0,0,0,0.1);
|
||||
transition: transform 0.3s ease;
|
||||
}
|
||||
.service-card:hover {
|
||||
transform: translateY(-5px);
|
||||
}
|
||||
.service-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
margin-bottom: 15px;
|
||||
}
|
||||
.status-indicator {
|
||||
width: 12px;
|
||||
height: 12px;
|
||||
border-radius: 50%;
|
||||
margin-right: 10px;
|
||||
}
|
||||
.status-up { background-color: #4CAF50; }
|
||||
.status-down { background-color: #f44336; }
|
||||
.status-warning { background-color: #ff9800; }
|
||||
.service-title {
|
||||
font-size: 1.3em;
|
||||
font-weight: bold;
|
||||
color: #2c3e50;
|
||||
}
|
||||
.service-description {
|
||||
color: #7f8c8d;
|
||||
margin-bottom: 15px;
|
||||
}
|
||||
.service-url {
|
||||
color: #3498db;
|
||||
text-decoration: none;
|
||||
font-weight: 500;
|
||||
}
|
||||
.service-url:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
.footer {
|
||||
text-align: center;
|
||||
color: white;
|
||||
margin-top: 30px;
|
||||
opacity: 0.8;
|
||||
}
|
||||
.refresh-btn {
|
||||
background: #3498db;
|
||||
color: white;
|
||||
border: none;
|
||||
padding: 10px 20px;
|
||||
border-radius: 5px;
|
||||
cursor: pointer;
|
||||
margin: 20px 0;
|
||||
font-size: 1em;
|
||||
}
|
||||
.refresh-btn:hover {
|
||||
background: #2980b9;
|
||||
}
|
||||
.timestamp {
|
||||
color: #95a5a6;
|
||||
font-size: 0.9em;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<div class="header">
|
||||
<h1>🚀 LeCoffre Node</h1>
|
||||
<p>Architecture Autonome - Tableau de Bord des Services</p>
|
||||
</div>
|
||||
|
||||
<div id="summary" style="display:none; background:#eef6ff; color:#0b4f82; border:1px solid #cfe6ff; padding:10px 14px; border-radius:6px; margin: 8px 0;">
|
||||
<!-- résumé dynamique -->
|
||||
</div>
|
||||
|
||||
<div id="deploy-banner" style="display:none; background:#fff3cd; color:#856404; border:1px solid #ffeeba; padding:10px 14px; border-radius:6px; margin-bottom:16px;">
|
||||
Déploiement en cours… <span id="deploy-percent">0</span>%
|
||||
</div>
|
||||
|
||||
<div style="text-align: center;">
|
||||
<button class="refresh-btn" id="refresh-btn">🔄 Actualiser</button>
|
||||
<div class="timestamp" id="timestamp"></div>
|
||||
</div>
|
||||
|
||||
<div class="services-grid" id="services-grid">
|
||||
<!-- Les services seront chargés dynamiquement -->
|
||||
</div>
|
||||
|
||||
<div class="services-grid" id="external-grid">
|
||||
<!-- Les services externes seront chargés dynamiquement -->
|
||||
</div>
|
||||
|
||||
<div class="services-grid" id="runner-grid">
|
||||
<!-- Informations du runner -->
|
||||
</div>
|
||||
|
||||
<div class="services-grid" id="integrations-grid">
|
||||
<!-- Intégrations (OVH/Stripe/Mailchimp) -->
|
||||
</div>
|
||||
|
||||
<div class="services-grid" id="indexeddb-grid">
|
||||
<!-- IndexedDB 4NK (wallets) -->
|
||||
</div>
|
||||
|
||||
<div class="footer">
|
||||
<p>LeCoffre Node - Architecture Autonome Complète</p>
|
||||
<p>Monitoring et logs disponibles via Grafana</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
function formatBytes(bytes) {
|
||||
if (!bytes || bytes <= 0) return '0 B';
|
||||
const units = ['B','KB','MB','GB','TB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(1024));
|
||||
return `${(bytes / Math.pow(1024, i)).toFixed(2)} ${units[i]}`;
|
||||
}
|
||||
|
||||
function createServiceCard(service) {
|
||||
const statusClass = service.status === 'running' || service.health === 'healthy' ? 'status-up' : 'status-down';
|
||||
const statusText = service.status === 'running' ? 'En ligne' : (service.status || 'Inconnu');
|
||||
const description = `${service.protocol || ''}${service.port ? ` · Port ${service.port}` : ''}`.trim();
|
||||
|
||||
return `
|
||||
<div class="service-card">
|
||||
<div class="service-header">
|
||||
<div class="status-indicator ${statusClass}"></div>
|
||||
<div class="service-title">${service.name}</div>
|
||||
</div>
|
||||
<div class="service-description">${description}</div>
|
||||
<div style="margin-top: 10px; font-size: 0.9em; color: #7f8c8d;">
|
||||
Statut: ${statusText}${service.uptime ? ` · Uptime: ${service.uptime}` : ''}
|
||||
</div>
|
||||
${service.health_text ? `<div style="margin-top: 6px; font-size: 0.9em; color: #7f8c8d;">Détails: ${service.health_text}</div>` : ''}
|
||||
${service.name === 'SDK Storage' && (service.data_size_bytes !== undefined) ? `<div style="margin-top: 6px; font-size: 0.9em; color: #7f8c8d;">Taille: ${formatBytes(service.data_size_bytes)}</div>` : ''}
|
||||
${service.image ? `<div style="margin-top: 6px; font-size: 0.85em; color: #95a5a6;">Image: ${service.image}</div>` : ''}
|
||||
</div>
|
||||
`;
|
||||
}
|
||||
|
||||
function createExternalCard(ext) {
|
||||
const statusClass = ext.status === 'running' ? 'status-up' : 'status-warning';
|
||||
const url = ext.url || '';
|
||||
return `
|
||||
<div class="service-card">
|
||||
<div class="service-header">
|
||||
<div class="status-indicator ${statusClass}"></div>
|
||||
<div class="service-title">${ext.name}</div>
|
||||
</div>
|
||||
<div class="service-description">${ext.protocol || ''}${url ? ` · ${url}` : ''}</div>
|
||||
<div style="margin-top: 10px; font-size: 0.9em; color: #7f8c8d;">
|
||||
Statut: ${ext.status}${ext.response_time ? ` · ${ext.response_time}` : ''}
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
}
|
||||
|
||||
function createRunnerCard(runner) {
|
||||
const statusClass = runner.status === 'running' ? 'status-up' : 'status-warning';
|
||||
return `
|
||||
<div class="service-card">
|
||||
<div class="service-header">
|
||||
<div class="status-indicator ${statusClass}"></div>
|
||||
<div class="service-title">Dernière action du runner</div>
|
||||
</div>
|
||||
<div class="service-description">Watchtower</div>
|
||||
<div style="margin-top: 10px; font-size: 0.9em; color: #7f8c8d;">
|
||||
Statut: ${runner.status || 'inconnu'}${runner.uptime ? ` · Uptime: ${runner.uptime}` : ''}
|
||||
</div>
|
||||
${runner.started_at ? `<div style="margin-top: 6px; font-size: 0.85em; color: #95a5a6;">Démarré: ${new Date(runner.started_at).toLocaleString('fr-FR')}</div>` : ''}
|
||||
</div>
|
||||
`;
|
||||
}
|
||||
|
||||
function createIntegrationsCards(flags) {
|
||||
const entries = [
|
||||
{ name: 'OVH SMS', key: 'OVH' },
|
||||
{ name: 'Stripe Paiement', key: 'Stripe' },
|
||||
{ name: 'Mailchimp Emails', key: 'Mailchimp' },
|
||||
];
|
||||
return entries.map(e => {
|
||||
const ok = !!flags?.[e.key];
|
||||
const statusClass = ok ? 'status-up' : 'status-warning';
|
||||
return `
|
||||
<div class="service-card">
|
||||
<div class="service-header">
|
||||
<div class="status-indicator ${statusClass}"></div>
|
||||
<div class="service-title">${e.name}</div>
|
||||
</div>
|
||||
<div class="service-description">${ok ? 'Clés configurées' : 'Non configuré'}</div>
|
||||
${e.key === 'Mailchimp' && window.__integrations?.mailchimp ? `<div style="margin-top:6px;color:#7f8c8d;">Statut: ${window.__integrations.mailchimp.status}</div>` : ''}
|
||||
${e.key === 'Stripe' && window.__integrations?.stripe ? `<div style="margin-top:6px;color:#7f8c8d;">Abonnements: STD ${window.__integrations.stripe.STANDARD||0} · UNL ${window.__integrations.stripe.UNLIMITED||0}</div>` : ''}
|
||||
${e.key === 'OVH' && window.__integrations?.ovh ? `<div style="margin-top:6px;color:#7f8c8d;">${window.__integrations.ovh.status}</div>` : ''}
|
||||
</div>
|
||||
`;
|
||||
});
|
||||
}
|
||||
|
||||
async function refreshStatus() {
|
||||
const grid = document.getElementById('services-grid');
|
||||
const extGrid = document.getElementById('external-grid');
|
||||
const runnerGrid = document.getElementById('runner-grid');
|
||||
const integrationsGrid = document.getElementById('integrations-grid');
|
||||
const idbGrid = document.getElementById('indexeddb-grid');
|
||||
grid.innerHTML = '<div style="text-align: center; color: white;">Chargement...</div>';
|
||||
extGrid.innerHTML = '';
|
||||
runnerGrid.innerHTML = '';
|
||||
integrationsGrid.innerHTML = '';
|
||||
idbGrid.innerHTML = '';
|
||||
|
||||
try {
|
||||
const res = await fetch('/status/api');
|
||||
if (!res.ok) throw new Error('API status indisponible');
|
||||
const data = await res.json();
|
||||
// Summary banner
|
||||
try {
|
||||
const svcCount = (data.services || []).length;
|
||||
const it = data.integrations_test || {};
|
||||
const iMail = it.mailchimp?.status || '—';
|
||||
const iStripeStd = (it.stripe_subscriptions_by_offer?.STANDARD) || 0;
|
||||
const iStripeUnl = (it.stripe_subscriptions_by_offer?.UNLIMITED) || 0;
|
||||
const iOvh = it.ovh?.status || '—';
|
||||
// wallets count (approx)
|
||||
const wallets = data.wallets || {};
|
||||
let walletCount = 0;
|
||||
Object.values(wallets).forEach((grp) => {
|
||||
if (grp && typeof grp === 'object' && !Array.isArray(grp)) {
|
||||
if ('confirmed_sat' in grp || 'immature_sat' in grp) {
|
||||
walletCount += 1;
|
||||
} else {
|
||||
walletCount += Object.keys(grp).length;
|
||||
}
|
||||
}
|
||||
});
|
||||
const summary = document.getElementById('summary');
|
||||
summary.innerHTML = `Services: ${svcCount} · Wallets: ${walletCount} · Mailchimp: ${iMail} · Stripe: STD ${iStripeStd} / UNL ${iStripeUnl} · OVH: ${iOvh}`;
|
||||
summary.style.display = 'block';
|
||||
} catch(e) {
|
||||
// ignore summary errors
|
||||
}
|
||||
|
||||
const cards = (data.services || []).map(createServiceCard);
|
||||
grid.innerHTML = cards.join('');
|
||||
|
||||
const extCards = (data.external || []).map(createExternalCard);
|
||||
extGrid.innerHTML = extCards.join('');
|
||||
|
||||
if (data.runner) {
|
||||
runnerGrid.innerHTML = createRunnerCard(data.runner);
|
||||
}
|
||||
|
||||
// Intégrations (config + résultats API)
|
||||
if (data.integrations_configured) {
|
||||
window.__integrations = {
|
||||
mailchimp: data.integrations_test?.mailchimp,
|
||||
stripe: data.integrations_test?.stripe_subscriptions_by_offer,
|
||||
stripe_prices: data.integrations_test?.stripe_prices || {},
|
||||
ovh: data.integrations_test?.ovh,
|
||||
};
|
||||
const integCards = createIntegrationsCards(data.integrations_configured);
|
||||
integrationsGrid.innerHTML = integCards.join('');
|
||||
}
|
||||
|
||||
// IndexedDB scan (same-origin, best-effort)
|
||||
try {
|
||||
const idbInfo = await scanIndexedDbWallets();
|
||||
const cards = renderIndexedDbCards(idbInfo);
|
||||
idbGrid.innerHTML = cards.join('');
|
||||
} catch (e) {
|
||||
idbGrid.innerHTML = '<div class="service-card"><div class="service-header"><div class="status-indicator status-warning"></div><div class="service-title">IndexedDB 4NK</div></div><div class="service-description">Impossible de lire IndexedDB (permissions navigateur ?)</div></div>';
|
||||
}
|
||||
|
||||
// Deployment banner
|
||||
if (data.deployment && typeof data.deployment.percent === 'number') {
|
||||
const banner = document.getElementById('deploy-banner');
|
||||
const dp = document.getElementById('deploy-percent');
|
||||
dp.textContent = data.deployment.percent;
|
||||
banner.style.display = data.deployment.percent < 100 ? 'block' : 'none';
|
||||
}
|
||||
|
||||
document.getElementById('timestamp').textContent =
|
||||
`Dernière mise à jour: ${new Date().toLocaleString('fr-FR')}`;
|
||||
} catch (e) {
|
||||
grid.innerHTML = '<div style="text-align: center; color: white;">Impossible de joindre /status/api</div>';
|
||||
document.getElementById('timestamp').textContent =
|
||||
`Dernière tentative: ${new Date().toLocaleString('fr-FR')}`;
|
||||
}
|
||||
}
|
||||
|
||||
// Wire button (avoid inline handler for CSP)
|
||||
document.getElementById('refresh-btn').addEventListener('click', () => {
|
||||
refreshStatus();
|
||||
});
|
||||
refreshStatus();
|
||||
|
||||
// ---- IndexedDB helpers ----
|
||||
function openDb(name, version) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const req = indexedDB.open(name, version);
|
||||
req.onsuccess = () => resolve(req.result);
|
||||
req.onerror = () => reject(req.error || new Error('IndexedDB open failed'));
|
||||
req.onupgradeneeded = () => {
|
||||
// do nothing; we are read-only here
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
function readAllFromStore(db, storeName) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const tx = db.transaction(storeName, 'readonly');
|
||||
const store = tx.objectStore(storeName);
|
||||
const out = [];
|
||||
const cursorReq = store.openCursor();
|
||||
cursorReq.onsuccess = (e) => {
|
||||
const cursor = e.target.result;
|
||||
if (cursor) {
|
||||
out.push({ key: cursor.key, value: cursor.value });
|
||||
cursor.continue();
|
||||
} else {
|
||||
resolve(out);
|
||||
}
|
||||
};
|
||||
cursorReq.onerror = () => reject(cursorReq.error);
|
||||
});
|
||||
}
|
||||
|
||||
async function scanIndexedDbWallets() {
|
||||
if (!('indexedDB' in window)) throw new Error('No IndexedDB');
|
||||
const db = await openDb('4nk', 1);
|
||||
const wallets = await readAllFromStore(db, 'wallet');
|
||||
// Approx size: JSON length
|
||||
let totalBytes = 0;
|
||||
const details = wallets.map(w => {
|
||||
const val = w.value || {};
|
||||
const json = JSON.stringify(val);
|
||||
totalBytes += json ? json.length : 0;
|
||||
const paired = (val.device?.paired_member?.spaddresses) || [];
|
||||
const lastscan = val.device?.sp_wallet?.lastscan || '';
|
||||
const outputs = (val.device?.sp_wallet?.outputs) || [];
|
||||
// Sum outputs amounts if present (assume property 'amount' in sats or tokens)
|
||||
let sumTokens = 0;
|
||||
try {
|
||||
for (const o of outputs) {
|
||||
const amt = Number(o?.amount || 0);
|
||||
if (!Number.isNaN(amt)) sumTokens += amt;
|
||||
}
|
||||
} catch {}
|
||||
|
||||
return {
|
||||
key: String(w.key),
|
||||
paired,
|
||||
lastscan,
|
||||
outputsCount: outputs.length || 0,
|
||||
outputsTokens: sumTokens,
|
||||
};
|
||||
});
|
||||
return { totalBytes, count: wallets.length, details };
|
||||
}
|
||||
|
||||
function renderIndexedDbCards(info) {
|
||||
const cards = [];
|
||||
const size = formatBytes(info.totalBytes || 0);
|
||||
cards.push(`
|
||||
<div class="service-card">
|
||||
<div class="service-header">
|
||||
<div class="status-indicator status-up"></div>
|
||||
<div class="service-title">IndexedDB 4NK</div>
|
||||
</div>
|
||||
<div class="service-description">Taille totale: ${size} · Wallets: ${info.count}</div>
|
||||
</div>
|
||||
`);
|
||||
for (const w of info.details || []) {
|
||||
cards.push(`
|
||||
<div class="service-card">
|
||||
<div class="service-header">
|
||||
<div class="status-indicator status-up"></div>
|
||||
<div class="service-title">Wallet ${w.key}</div>
|
||||
</div>
|
||||
<div class="service-description">lastscan: ${w.lastscan || '—'}</div>
|
||||
<div style="margin-top:6px;color:#7f8c8d;">paired spaddresses:</div>
|
||||
<div style="font-size:0.85em;color:#95a5a6;">${(w.paired||[]).map(a=>`<div>${a}</div>`).join('') || '—'}</div>
|
||||
<div style="margin-top:6px;color:#7f8c8d;">outputs: ${w.outputsCount} · jetons: ${w.outputsTokens}</div>
|
||||
</div>
|
||||
`);
|
||||
}
|
||||
return cards;
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
1
web/status/node_modules/.bin/mime
generated
vendored
1
web/status/node_modules/.bin/mime
generated
vendored
@ -1 +0,0 @@
|
||||
../mime/cli.js
|
1
web/status/node_modules/.bin/nodemon
generated
vendored
1
web/status/node_modules/.bin/nodemon
generated
vendored
@ -1 +0,0 @@
|
||||
../nodemon/bin/nodemon.js
|
1
web/status/node_modules/.bin/nodetouch
generated
vendored
1
web/status/node_modules/.bin/nodetouch
generated
vendored
@ -1 +0,0 @@
|
||||
../touch/bin/nodetouch.js
|
1
web/status/node_modules/.bin/semver
generated
vendored
1
web/status/node_modules/.bin/semver
generated
vendored
@ -1 +0,0 @@
|
||||
../semver/bin/semver.js
|
1199
web/status/node_modules/.package-lock.json
generated
vendored
1199
web/status/node_modules/.package-lock.json
generated
vendored
File diff suppressed because it is too large
Load Diff
243
web/status/node_modules/accepts/HISTORY.md
generated
vendored
243
web/status/node_modules/accepts/HISTORY.md
generated
vendored
@ -1,243 +0,0 @@
|
||||
1.3.8 / 2022-02-02
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.1.34
|
||||
- deps: mime-db@~1.51.0
|
||||
* deps: negotiator@0.6.3
|
||||
|
||||
1.3.7 / 2019-04-29
|
||||
==================
|
||||
|
||||
* deps: negotiator@0.6.2
|
||||
- Fix sorting charset, encoding, and language with extra parameters
|
||||
|
||||
1.3.6 / 2019-04-28
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.1.24
|
||||
- deps: mime-db@~1.40.0
|
||||
|
||||
1.3.5 / 2018-02-28
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.1.18
|
||||
- deps: mime-db@~1.33.0
|
||||
|
||||
1.3.4 / 2017-08-22
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.1.16
|
||||
- deps: mime-db@~1.29.0
|
||||
|
||||
1.3.3 / 2016-05-02
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.1.11
|
||||
- deps: mime-db@~1.23.0
|
||||
* deps: negotiator@0.6.1
|
||||
- perf: improve `Accept` parsing speed
|
||||
- perf: improve `Accept-Charset` parsing speed
|
||||
- perf: improve `Accept-Encoding` parsing speed
|
||||
- perf: improve `Accept-Language` parsing speed
|
||||
|
||||
1.3.2 / 2016-03-08
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.1.10
|
||||
- Fix extension of `application/dash+xml`
|
||||
- Update primary extension for `audio/mp4`
|
||||
- deps: mime-db@~1.22.0
|
||||
|
||||
1.3.1 / 2016-01-19
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.1.9
|
||||
- deps: mime-db@~1.21.0
|
||||
|
||||
1.3.0 / 2015-09-29
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.1.7
|
||||
- deps: mime-db@~1.19.0
|
||||
* deps: negotiator@0.6.0
|
||||
- Fix including type extensions in parameters in `Accept` parsing
|
||||
- Fix parsing `Accept` parameters with quoted equals
|
||||
- Fix parsing `Accept` parameters with quoted semicolons
|
||||
- Lazy-load modules from main entry point
|
||||
- perf: delay type concatenation until needed
|
||||
- perf: enable strict mode
|
||||
- perf: hoist regular expressions
|
||||
- perf: remove closures getting spec properties
|
||||
- perf: remove a closure from media type parsing
|
||||
- perf: remove property delete from media type parsing
|
||||
|
||||
1.2.13 / 2015-09-06
|
||||
===================
|
||||
|
||||
* deps: mime-types@~2.1.6
|
||||
- deps: mime-db@~1.18.0
|
||||
|
||||
1.2.12 / 2015-07-30
|
||||
===================
|
||||
|
||||
* deps: mime-types@~2.1.4
|
||||
- deps: mime-db@~1.16.0
|
||||
|
||||
1.2.11 / 2015-07-16
|
||||
===================
|
||||
|
||||
* deps: mime-types@~2.1.3
|
||||
- deps: mime-db@~1.15.0
|
||||
|
||||
1.2.10 / 2015-07-01
|
||||
===================
|
||||
|
||||
* deps: mime-types@~2.1.2
|
||||
- deps: mime-db@~1.14.0
|
||||
|
||||
1.2.9 / 2015-06-08
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.1.1
|
||||
- perf: fix deopt during mapping
|
||||
|
||||
1.2.8 / 2015-06-07
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.1.0
|
||||
- deps: mime-db@~1.13.0
|
||||
* perf: avoid argument reassignment & argument slice
|
||||
* perf: avoid negotiator recursive construction
|
||||
* perf: enable strict mode
|
||||
* perf: remove unnecessary bitwise operator
|
||||
|
||||
1.2.7 / 2015-05-10
|
||||
==================
|
||||
|
||||
* deps: negotiator@0.5.3
|
||||
- Fix media type parameter matching to be case-insensitive
|
||||
|
||||
1.2.6 / 2015-05-07
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.0.11
|
||||
- deps: mime-db@~1.9.1
|
||||
* deps: negotiator@0.5.2
|
||||
- Fix comparing media types with quoted values
|
||||
- Fix splitting media types with quoted commas
|
||||
|
||||
1.2.5 / 2015-03-13
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.0.10
|
||||
- deps: mime-db@~1.8.0
|
||||
|
||||
1.2.4 / 2015-02-14
|
||||
==================
|
||||
|
||||
* Support Node.js 0.6
|
||||
* deps: mime-types@~2.0.9
|
||||
- deps: mime-db@~1.7.0
|
||||
* deps: negotiator@0.5.1
|
||||
- Fix preference sorting to be stable for long acceptable lists
|
||||
|
||||
1.2.3 / 2015-01-31
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.0.8
|
||||
- deps: mime-db@~1.6.0
|
||||
|
||||
1.2.2 / 2014-12-30
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.0.7
|
||||
- deps: mime-db@~1.5.0
|
||||
|
||||
1.2.1 / 2014-12-30
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.0.5
|
||||
- deps: mime-db@~1.3.1
|
||||
|
||||
1.2.0 / 2014-12-19
|
||||
==================
|
||||
|
||||
* deps: negotiator@0.5.0
|
||||
- Fix list return order when large accepted list
|
||||
- Fix missing identity encoding when q=0 exists
|
||||
- Remove dynamic building of Negotiator class
|
||||
|
||||
1.1.4 / 2014-12-10
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.0.4
|
||||
- deps: mime-db@~1.3.0
|
||||
|
||||
1.1.3 / 2014-11-09
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.0.3
|
||||
- deps: mime-db@~1.2.0
|
||||
|
||||
1.1.2 / 2014-10-14
|
||||
==================
|
||||
|
||||
* deps: negotiator@0.4.9
|
||||
- Fix error when media type has invalid parameter
|
||||
|
||||
1.1.1 / 2014-09-28
|
||||
==================
|
||||
|
||||
* deps: mime-types@~2.0.2
|
||||
- deps: mime-db@~1.1.0
|
||||
* deps: negotiator@0.4.8
|
||||
- Fix all negotiations to be case-insensitive
|
||||
- Stable sort preferences of same quality according to client order
|
||||
|
||||
1.1.0 / 2014-09-02
|
||||
==================
|
||||
|
||||
* update `mime-types`
|
||||
|
||||
1.0.7 / 2014-07-04
|
||||
==================
|
||||
|
||||
* Fix wrong type returned from `type` when match after unknown extension
|
||||
|
||||
1.0.6 / 2014-06-24
|
||||
==================
|
||||
|
||||
* deps: negotiator@0.4.7
|
||||
|
||||
1.0.5 / 2014-06-20
|
||||
==================
|
||||
|
||||
* fix crash when unknown extension given
|
||||
|
||||
1.0.4 / 2014-06-19
|
||||
==================
|
||||
|
||||
* use `mime-types`
|
||||
|
||||
1.0.3 / 2014-06-11
|
||||
==================
|
||||
|
||||
* deps: negotiator@0.4.6
|
||||
- Order by specificity when quality is the same
|
||||
|
||||
1.0.2 / 2014-05-29
|
||||
==================
|
||||
|
||||
* Fix interpretation when header not in request
|
||||
* deps: pin negotiator@0.4.5
|
||||
|
||||
1.0.1 / 2014-01-18
|
||||
==================
|
||||
|
||||
* Identity encoding isn't always acceptable
|
||||
* deps: negotiator@~0.4.0
|
||||
|
||||
1.0.0 / 2013-12-27
|
||||
==================
|
||||
|
||||
* Genesis
|
23
web/status/node_modules/accepts/LICENSE
generated
vendored
23
web/status/node_modules/accepts/LICENSE
generated
vendored
@ -1,23 +0,0 @@
|
||||
(The MIT License)
|
||||
|
||||
Copyright (c) 2014 Jonathan Ong <me@jongleberry.com>
|
||||
Copyright (c) 2015 Douglas Christopher Wilson <doug@somethingdoug.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
'Software'), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
140
web/status/node_modules/accepts/README.md
generated
vendored
140
web/status/node_modules/accepts/README.md
generated
vendored
@ -1,140 +0,0 @@
|
||||
# accepts
|
||||
|
||||
[![NPM Version][npm-version-image]][npm-url]
|
||||
[![NPM Downloads][npm-downloads-image]][npm-url]
|
||||
[![Node.js Version][node-version-image]][node-version-url]
|
||||
[![Build Status][github-actions-ci-image]][github-actions-ci-url]
|
||||
[![Test Coverage][coveralls-image]][coveralls-url]
|
||||
|
||||
Higher level content negotiation based on [negotiator](https://www.npmjs.com/package/negotiator).
|
||||
Extracted from [koa](https://www.npmjs.com/package/koa) for general use.
|
||||
|
||||
In addition to negotiator, it allows:
|
||||
|
||||
- Allows types as an array or arguments list, ie `(['text/html', 'application/json'])`
|
||||
as well as `('text/html', 'application/json')`.
|
||||
- Allows type shorthands such as `json`.
|
||||
- Returns `false` when no types match
|
||||
- Treats non-existent headers as `*`
|
||||
|
||||
## Installation
|
||||
|
||||
This is a [Node.js](https://nodejs.org/en/) module available through the
|
||||
[npm registry](https://www.npmjs.com/). Installation is done using the
|
||||
[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally):
|
||||
|
||||
```sh
|
||||
$ npm install accepts
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
```js
|
||||
var accepts = require('accepts')
|
||||
```
|
||||
|
||||
### accepts(req)
|
||||
|
||||
Create a new `Accepts` object for the given `req`.
|
||||
|
||||
#### .charset(charsets)
|
||||
|
||||
Return the first accepted charset. If nothing in `charsets` is accepted,
|
||||
then `false` is returned.
|
||||
|
||||
#### .charsets()
|
||||
|
||||
Return the charsets that the request accepts, in the order of the client's
|
||||
preference (most preferred first).
|
||||
|
||||
#### .encoding(encodings)
|
||||
|
||||
Return the first accepted encoding. If nothing in `encodings` is accepted,
|
||||
then `false` is returned.
|
||||
|
||||
#### .encodings()
|
||||
|
||||
Return the encodings that the request accepts, in the order of the client's
|
||||
preference (most preferred first).
|
||||
|
||||
#### .language(languages)
|
||||
|
||||
Return the first accepted language. If nothing in `languages` is accepted,
|
||||
then `false` is returned.
|
||||
|
||||
#### .languages()
|
||||
|
||||
Return the languages that the request accepts, in the order of the client's
|
||||
preference (most preferred first).
|
||||
|
||||
#### .type(types)
|
||||
|
||||
Return the first accepted type (and it is returned as the same text as what
|
||||
appears in the `types` array). If nothing in `types` is accepted, then `false`
|
||||
is returned.
|
||||
|
||||
The `types` array can contain full MIME types or file extensions. Any value
|
||||
that is not a full MIME types is passed to `require('mime-types').lookup`.
|
||||
|
||||
#### .types()
|
||||
|
||||
Return the types that the request accepts, in the order of the client's
|
||||
preference (most preferred first).
|
||||
|
||||
## Examples
|
||||
|
||||
### Simple type negotiation
|
||||
|
||||
This simple example shows how to use `accepts` to return a different typed
|
||||
respond body based on what the client wants to accept. The server lists it's
|
||||
preferences in order and will get back the best match between the client and
|
||||
server.
|
||||
|
||||
```js
|
||||
var accepts = require('accepts')
|
||||
var http = require('http')
|
||||
|
||||
function app (req, res) {
|
||||
var accept = accepts(req)
|
||||
|
||||
// the order of this list is significant; should be server preferred order
|
||||
switch (accept.type(['json', 'html'])) {
|
||||
case 'json':
|
||||
res.setHeader('Content-Type', 'application/json')
|
||||
res.write('{"hello":"world!"}')
|
||||
break
|
||||
case 'html':
|
||||
res.setHeader('Content-Type', 'text/html')
|
||||
res.write('<b>hello, world!</b>')
|
||||
break
|
||||
default:
|
||||
// the fallback is text/plain, so no need to specify it above
|
||||
res.setHeader('Content-Type', 'text/plain')
|
||||
res.write('hello, world!')
|
||||
break
|
||||
}
|
||||
|
||||
res.end()
|
||||
}
|
||||
|
||||
http.createServer(app).listen(3000)
|
||||
```
|
||||
|
||||
You can test this out with the cURL program:
|
||||
```sh
|
||||
curl -I -H'Accept: text/html' http://localhost:3000/
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
[MIT](LICENSE)
|
||||
|
||||
[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/accepts/master
|
||||
[coveralls-url]: https://coveralls.io/r/jshttp/accepts?branch=master
|
||||
[github-actions-ci-image]: https://badgen.net/github/checks/jshttp/accepts/master?label=ci
|
||||
[github-actions-ci-url]: https://github.com/jshttp/accepts/actions/workflows/ci.yml
|
||||
[node-version-image]: https://badgen.net/npm/node/accepts
|
||||
[node-version-url]: https://nodejs.org/en/download
|
||||
[npm-downloads-image]: https://badgen.net/npm/dm/accepts
|
||||
[npm-url]: https://npmjs.org/package/accepts
|
||||
[npm-version-image]: https://badgen.net/npm/v/accepts
|
238
web/status/node_modules/accepts/index.js
generated
vendored
238
web/status/node_modules/accepts/index.js
generated
vendored
@ -1,238 +0,0 @@
|
||||
/*!
|
||||
* accepts
|
||||
* Copyright(c) 2014 Jonathan Ong
|
||||
* Copyright(c) 2015 Douglas Christopher Wilson
|
||||
* MIT Licensed
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
* @private
|
||||
*/
|
||||
|
||||
var Negotiator = require('negotiator')
|
||||
var mime = require('mime-types')
|
||||
|
||||
/**
|
||||
* Module exports.
|
||||
* @public
|
||||
*/
|
||||
|
||||
module.exports = Accepts
|
||||
|
||||
/**
|
||||
* Create a new Accepts object for the given req.
|
||||
*
|
||||
* @param {object} req
|
||||
* @public
|
||||
*/
|
||||
|
||||
function Accepts (req) {
|
||||
if (!(this instanceof Accepts)) {
|
||||
return new Accepts(req)
|
||||
}
|
||||
|
||||
this.headers = req.headers
|
||||
this.negotiator = new Negotiator(req)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the given `type(s)` is acceptable, returning
|
||||
* the best match when true, otherwise `undefined`, in which
|
||||
* case you should respond with 406 "Not Acceptable".
|
||||
*
|
||||
* The `type` value may be a single mime type string
|
||||
* such as "application/json", the extension name
|
||||
* such as "json" or an array `["json", "html", "text/plain"]`. When a list
|
||||
* or array is given the _best_ match, if any is returned.
|
||||
*
|
||||
* Examples:
|
||||
*
|
||||
* // Accept: text/html
|
||||
* this.types('html');
|
||||
* // => "html"
|
||||
*
|
||||
* // Accept: text/*, application/json
|
||||
* this.types('html');
|
||||
* // => "html"
|
||||
* this.types('text/html');
|
||||
* // => "text/html"
|
||||
* this.types('json', 'text');
|
||||
* // => "json"
|
||||
* this.types('application/json');
|
||||
* // => "application/json"
|
||||
*
|
||||
* // Accept: text/*, application/json
|
||||
* this.types('image/png');
|
||||
* this.types('png');
|
||||
* // => undefined
|
||||
*
|
||||
* // Accept: text/*;q=.5, application/json
|
||||
* this.types(['html', 'json']);
|
||||
* this.types('html', 'json');
|
||||
* // => "json"
|
||||
*
|
||||
* @param {String|Array} types...
|
||||
* @return {String|Array|Boolean}
|
||||
* @public
|
||||
*/
|
||||
|
||||
Accepts.prototype.type =
|
||||
Accepts.prototype.types = function (types_) {
|
||||
var types = types_
|
||||
|
||||
// support flattened arguments
|
||||
if (types && !Array.isArray(types)) {
|
||||
types = new Array(arguments.length)
|
||||
for (var i = 0; i < types.length; i++) {
|
||||
types[i] = arguments[i]
|
||||
}
|
||||
}
|
||||
|
||||
// no types, return all requested types
|
||||
if (!types || types.length === 0) {
|
||||
return this.negotiator.mediaTypes()
|
||||
}
|
||||
|
||||
// no accept header, return first given type
|
||||
if (!this.headers.accept) {
|
||||
return types[0]
|
||||
}
|
||||
|
||||
var mimes = types.map(extToMime)
|
||||
var accepts = this.negotiator.mediaTypes(mimes.filter(validMime))
|
||||
var first = accepts[0]
|
||||
|
||||
return first
|
||||
? types[mimes.indexOf(first)]
|
||||
: false
|
||||
}
|
||||
|
||||
/**
|
||||
* Return accepted encodings or best fit based on `encodings`.
|
||||
*
|
||||
* Given `Accept-Encoding: gzip, deflate`
|
||||
* an array sorted by quality is returned:
|
||||
*
|
||||
* ['gzip', 'deflate']
|
||||
*
|
||||
* @param {String|Array} encodings...
|
||||
* @return {String|Array}
|
||||
* @public
|
||||
*/
|
||||
|
||||
Accepts.prototype.encoding =
|
||||
Accepts.prototype.encodings = function (encodings_) {
|
||||
var encodings = encodings_
|
||||
|
||||
// support flattened arguments
|
||||
if (encodings && !Array.isArray(encodings)) {
|
||||
encodings = new Array(arguments.length)
|
||||
for (var i = 0; i < encodings.length; i++) {
|
||||
encodings[i] = arguments[i]
|
||||
}
|
||||
}
|
||||
|
||||
// no encodings, return all requested encodings
|
||||
if (!encodings || encodings.length === 0) {
|
||||
return this.negotiator.encodings()
|
||||
}
|
||||
|
||||
return this.negotiator.encodings(encodings)[0] || false
|
||||
}
|
||||
|
||||
/**
|
||||
* Return accepted charsets or best fit based on `charsets`.
|
||||
*
|
||||
* Given `Accept-Charset: utf-8, iso-8859-1;q=0.2, utf-7;q=0.5`
|
||||
* an array sorted by quality is returned:
|
||||
*
|
||||
* ['utf-8', 'utf-7', 'iso-8859-1']
|
||||
*
|
||||
* @param {String|Array} charsets...
|
||||
* @return {String|Array}
|
||||
* @public
|
||||
*/
|
||||
|
||||
Accepts.prototype.charset =
|
||||
Accepts.prototype.charsets = function (charsets_) {
|
||||
var charsets = charsets_
|
||||
|
||||
// support flattened arguments
|
||||
if (charsets && !Array.isArray(charsets)) {
|
||||
charsets = new Array(arguments.length)
|
||||
for (var i = 0; i < charsets.length; i++) {
|
||||
charsets[i] = arguments[i]
|
||||
}
|
||||
}
|
||||
|
||||
// no charsets, return all requested charsets
|
||||
if (!charsets || charsets.length === 0) {
|
||||
return this.negotiator.charsets()
|
||||
}
|
||||
|
||||
return this.negotiator.charsets(charsets)[0] || false
|
||||
}
|
||||
|
||||
/**
|
||||
* Return accepted languages or best fit based on `langs`.
|
||||
*
|
||||
* Given `Accept-Language: en;q=0.8, es, pt`
|
||||
* an array sorted by quality is returned:
|
||||
*
|
||||
* ['es', 'pt', 'en']
|
||||
*
|
||||
* @param {String|Array} langs...
|
||||
* @return {Array|String}
|
||||
* @public
|
||||
*/
|
||||
|
||||
Accepts.prototype.lang =
|
||||
Accepts.prototype.langs =
|
||||
Accepts.prototype.language =
|
||||
Accepts.prototype.languages = function (languages_) {
|
||||
var languages = languages_
|
||||
|
||||
// support flattened arguments
|
||||
if (languages && !Array.isArray(languages)) {
|
||||
languages = new Array(arguments.length)
|
||||
for (var i = 0; i < languages.length; i++) {
|
||||
languages[i] = arguments[i]
|
||||
}
|
||||
}
|
||||
|
||||
// no languages, return all requested languages
|
||||
if (!languages || languages.length === 0) {
|
||||
return this.negotiator.languages()
|
||||
}
|
||||
|
||||
return this.negotiator.languages(languages)[0] || false
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert extnames to mime.
|
||||
*
|
||||
* @param {String} type
|
||||
* @return {String}
|
||||
* @private
|
||||
*/
|
||||
|
||||
function extToMime (type) {
|
||||
return type.indexOf('/') === -1
|
||||
? mime.lookup(type)
|
||||
: type
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if mime is valid.
|
||||
*
|
||||
* @param {String} type
|
||||
* @return {String}
|
||||
* @private
|
||||
*/
|
||||
|
||||
function validMime (type) {
|
||||
return typeof type === 'string'
|
||||
}
|
47
web/status/node_modules/accepts/package.json
generated
vendored
47
web/status/node_modules/accepts/package.json
generated
vendored
@ -1,47 +0,0 @@
|
||||
{
|
||||
"name": "accepts",
|
||||
"description": "Higher-level content negotiation",
|
||||
"version": "1.3.8",
|
||||
"contributors": [
|
||||
"Douglas Christopher Wilson <doug@somethingdoug.com>",
|
||||
"Jonathan Ong <me@jongleberry.com> (http://jongleberry.com)"
|
||||
],
|
||||
"license": "MIT",
|
||||
"repository": "jshttp/accepts",
|
||||
"dependencies": {
|
||||
"mime-types": "~2.1.34",
|
||||
"negotiator": "0.6.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"deep-equal": "1.0.1",
|
||||
"eslint": "7.32.0",
|
||||
"eslint-config-standard": "14.1.1",
|
||||
"eslint-plugin-import": "2.25.4",
|
||||
"eslint-plugin-markdown": "2.2.1",
|
||||
"eslint-plugin-node": "11.1.0",
|
||||
"eslint-plugin-promise": "4.3.1",
|
||||
"eslint-plugin-standard": "4.1.0",
|
||||
"mocha": "9.2.0",
|
||||
"nyc": "15.1.0"
|
||||
},
|
||||
"files": [
|
||||
"LICENSE",
|
||||
"HISTORY.md",
|
||||
"index.js"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
},
|
||||
"scripts": {
|
||||
"lint": "eslint .",
|
||||
"test": "mocha --reporter spec --check-leaks --bail test/",
|
||||
"test-ci": "nyc --reporter=lcov --reporter=text npm test",
|
||||
"test-cov": "nyc --reporter=html --reporter=text npm test"
|
||||
},
|
||||
"keywords": [
|
||||
"content",
|
||||
"negotiation",
|
||||
"accept",
|
||||
"accepts"
|
||||
]
|
||||
}
|
15
web/status/node_modules/anymatch/LICENSE
generated
vendored
15
web/status/node_modules/anymatch/LICENSE
generated
vendored
@ -1,15 +0,0 @@
|
||||
The ISC License
|
||||
|
||||
Copyright (c) 2019 Elan Shanker, Paul Miller (https://paulmillr.com)
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
87
web/status/node_modules/anymatch/README.md
generated
vendored
87
web/status/node_modules/anymatch/README.md
generated
vendored
@ -1,87 +0,0 @@
|
||||
anymatch [](https://travis-ci.org/micromatch/anymatch) [](https://coveralls.io/r/micromatch/anymatch?branch=master)
|
||||
======
|
||||
Javascript module to match a string against a regular expression, glob, string,
|
||||
or function that takes the string as an argument and returns a truthy or falsy
|
||||
value. The matcher can also be an array of any or all of these. Useful for
|
||||
allowing a very flexible user-defined config to define things like file paths.
|
||||
|
||||
__Note: This module has Bash-parity, please be aware that Windows-style backslashes are not supported as separators. See https://github.com/micromatch/micromatch#backslashes for more information.__
|
||||
|
||||
|
||||
Usage
|
||||
-----
|
||||
```sh
|
||||
npm install anymatch
|
||||
```
|
||||
|
||||
#### anymatch(matchers, testString, [returnIndex], [options])
|
||||
* __matchers__: (_Array|String|RegExp|Function_)
|
||||
String to be directly matched, string with glob patterns, regular expression
|
||||
test, function that takes the testString as an argument and returns a truthy
|
||||
value if it should be matched, or an array of any number and mix of these types.
|
||||
* __testString__: (_String|Array_) The string to test against the matchers. If
|
||||
passed as an array, the first element of the array will be used as the
|
||||
`testString` for non-function matchers, while the entire array will be applied
|
||||
as the arguments for function matchers.
|
||||
* __options__: (_Object_ [optional]_) Any of the [picomatch](https://github.com/micromatch/picomatch#options) options.
|
||||
* __returnIndex__: (_Boolean [optional]_) If true, return the array index of
|
||||
the first matcher that that testString matched, or -1 if no match, instead of a
|
||||
boolean result.
|
||||
|
||||
```js
|
||||
const anymatch = require('anymatch');
|
||||
|
||||
const matchers = [ 'path/to/file.js', 'path/anyjs/**/*.js', /foo.js$/, string => string.includes('bar') && string.length > 10 ] ;
|
||||
|
||||
anymatch(matchers, 'path/to/file.js'); // true
|
||||
anymatch(matchers, 'path/anyjs/baz.js'); // true
|
||||
anymatch(matchers, 'path/to/foo.js'); // true
|
||||
anymatch(matchers, 'path/to/bar.js'); // true
|
||||
anymatch(matchers, 'bar.js'); // false
|
||||
|
||||
// returnIndex = true
|
||||
anymatch(matchers, 'foo.js', {returnIndex: true}); // 2
|
||||
anymatch(matchers, 'path/anyjs/foo.js', {returnIndex: true}); // 1
|
||||
|
||||
// any picomatc
|
||||
|
||||
// using globs to match directories and their children
|
||||
anymatch('node_modules', 'node_modules'); // true
|
||||
anymatch('node_modules', 'node_modules/somelib/index.js'); // false
|
||||
anymatch('node_modules/**', 'node_modules/somelib/index.js'); // true
|
||||
anymatch('node_modules/**', '/absolute/path/to/node_modules/somelib/index.js'); // false
|
||||
anymatch('**/node_modules/**', '/absolute/path/to/node_modules/somelib/index.js'); // true
|
||||
|
||||
const matcher = anymatch(matchers);
|
||||
['foo.js', 'bar.js'].filter(matcher); // [ 'foo.js' ]
|
||||
anymatch master* ❯
|
||||
|
||||
```
|
||||
|
||||
#### anymatch(matchers)
|
||||
You can also pass in only your matcher(s) to get a curried function that has
|
||||
already been bound to the provided matching criteria. This can be used as an
|
||||
`Array#filter` callback.
|
||||
|
||||
```js
|
||||
var matcher = anymatch(matchers);
|
||||
|
||||
matcher('path/to/file.js'); // true
|
||||
matcher('path/anyjs/baz.js', true); // 1
|
||||
|
||||
['foo.js', 'bar.js'].filter(matcher); // ['foo.js']
|
||||
```
|
||||
|
||||
Changelog
|
||||
----------
|
||||
[See release notes page on GitHub](https://github.com/micromatch/anymatch/releases)
|
||||
|
||||
- **v3.0:** Removed `startIndex` and `endIndex` arguments. Node 8.x-only.
|
||||
- **v2.0:** [micromatch](https://github.com/jonschlinkert/micromatch) moves away from minimatch-parity and inline with Bash. This includes handling backslashes differently (see https://github.com/micromatch/micromatch#backslashes for more information).
|
||||
- **v1.2:** anymatch uses [micromatch](https://github.com/jonschlinkert/micromatch)
|
||||
for glob pattern matching. Issues with glob pattern matching should be
|
||||
reported directly to the [micromatch issue tracker](https://github.com/jonschlinkert/micromatch/issues).
|
||||
|
||||
License
|
||||
-------
|
||||
[ISC](https://raw.github.com/micromatch/anymatch/master/LICENSE)
|
20
web/status/node_modules/anymatch/index.d.ts
generated
vendored
20
web/status/node_modules/anymatch/index.d.ts
generated
vendored
@ -1,20 +0,0 @@
|
||||
type AnymatchFn = (testString: string) => boolean;
|
||||
type AnymatchPattern = string|RegExp|AnymatchFn;
|
||||
type AnymatchMatcher = AnymatchPattern|AnymatchPattern[]
|
||||
type AnymatchTester = {
|
||||
(testString: string|any[], returnIndex: true): number;
|
||||
(testString: string|any[]): boolean;
|
||||
}
|
||||
|
||||
type PicomatchOptions = {dot: boolean};
|
||||
|
||||
declare const anymatch: {
|
||||
(matchers: AnymatchMatcher): AnymatchTester;
|
||||
(matchers: AnymatchMatcher, testString: null, returnIndex: true | PicomatchOptions): AnymatchTester;
|
||||
(matchers: AnymatchMatcher, testString: string|any[], returnIndex: true | PicomatchOptions): number;
|
||||
(matchers: AnymatchMatcher, testString: string|any[]): boolean;
|
||||
}
|
||||
|
||||
export {AnymatchMatcher as Matcher}
|
||||
export {AnymatchTester as Tester}
|
||||
export default anymatch
|
104
web/status/node_modules/anymatch/index.js
generated
vendored
104
web/status/node_modules/anymatch/index.js
generated
vendored
@ -1,104 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
|
||||
const picomatch = require('picomatch');
|
||||
const normalizePath = require('normalize-path');
|
||||
|
||||
/**
|
||||
* @typedef {(testString: string) => boolean} AnymatchFn
|
||||
* @typedef {string|RegExp|AnymatchFn} AnymatchPattern
|
||||
* @typedef {AnymatchPattern|AnymatchPattern[]} AnymatchMatcher
|
||||
*/
|
||||
const BANG = '!';
|
||||
const DEFAULT_OPTIONS = {returnIndex: false};
|
||||
const arrify = (item) => Array.isArray(item) ? item : [item];
|
||||
|
||||
/**
|
||||
* @param {AnymatchPattern} matcher
|
||||
* @param {object} options
|
||||
* @returns {AnymatchFn}
|
||||
*/
|
||||
const createPattern = (matcher, options) => {
|
||||
if (typeof matcher === 'function') {
|
||||
return matcher;
|
||||
}
|
||||
if (typeof matcher === 'string') {
|
||||
const glob = picomatch(matcher, options);
|
||||
return (string) => matcher === string || glob(string);
|
||||
}
|
||||
if (matcher instanceof RegExp) {
|
||||
return (string) => matcher.test(string);
|
||||
}
|
||||
return (string) => false;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Array<Function>} patterns
|
||||
* @param {Array<Function>} negPatterns
|
||||
* @param {String|Array} args
|
||||
* @param {Boolean} returnIndex
|
||||
* @returns {boolean|number}
|
||||
*/
|
||||
const matchPatterns = (patterns, negPatterns, args, returnIndex) => {
|
||||
const isList = Array.isArray(args);
|
||||
const _path = isList ? args[0] : args;
|
||||
if (!isList && typeof _path !== 'string') {
|
||||
throw new TypeError('anymatch: second argument must be a string: got ' +
|
||||
Object.prototype.toString.call(_path))
|
||||
}
|
||||
const path = normalizePath(_path, false);
|
||||
|
||||
for (let index = 0; index < negPatterns.length; index++) {
|
||||
const nglob = negPatterns[index];
|
||||
if (nglob(path)) {
|
||||
return returnIndex ? -1 : false;
|
||||
}
|
||||
}
|
||||
|
||||
const applied = isList && [path].concat(args.slice(1));
|
||||
for (let index = 0; index < patterns.length; index++) {
|
||||
const pattern = patterns[index];
|
||||
if (isList ? pattern(...applied) : pattern(path)) {
|
||||
return returnIndex ? index : true;
|
||||
}
|
||||
}
|
||||
|
||||
return returnIndex ? -1 : false;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {AnymatchMatcher} matchers
|
||||
* @param {Array|string} testString
|
||||
* @param {object} options
|
||||
* @returns {boolean|number|Function}
|
||||
*/
|
||||
const anymatch = (matchers, testString, options = DEFAULT_OPTIONS) => {
|
||||
if (matchers == null) {
|
||||
throw new TypeError('anymatch: specify first argument');
|
||||
}
|
||||
const opts = typeof options === 'boolean' ? {returnIndex: options} : options;
|
||||
const returnIndex = opts.returnIndex || false;
|
||||
|
||||
// Early cache for matchers.
|
||||
const mtchers = arrify(matchers);
|
||||
const negatedGlobs = mtchers
|
||||
.filter(item => typeof item === 'string' && item.charAt(0) === BANG)
|
||||
.map(item => item.slice(1))
|
||||
.map(item => picomatch(item, opts));
|
||||
const patterns = mtchers
|
||||
.filter(item => typeof item !== 'string' || (typeof item === 'string' && item.charAt(0) !== BANG))
|
||||
.map(matcher => createPattern(matcher, opts));
|
||||
|
||||
if (testString == null) {
|
||||
return (testString, ri = false) => {
|
||||
const returnIndex = typeof ri === 'boolean' ? ri : false;
|
||||
return matchPatterns(patterns, negatedGlobs, testString, returnIndex);
|
||||
}
|
||||
}
|
||||
|
||||
return matchPatterns(patterns, negatedGlobs, testString, returnIndex);
|
||||
};
|
||||
|
||||
anymatch.default = anymatch;
|
||||
module.exports = anymatch;
|
48
web/status/node_modules/anymatch/package.json
generated
vendored
48
web/status/node_modules/anymatch/package.json
generated
vendored
@ -1,48 +0,0 @@
|
||||
{
|
||||
"name": "anymatch",
|
||||
"version": "3.1.3",
|
||||
"description": "Matches strings against configurable strings, globs, regular expressions, and/or functions",
|
||||
"files": [
|
||||
"index.js",
|
||||
"index.d.ts"
|
||||
],
|
||||
"dependencies": {
|
||||
"normalize-path": "^3.0.0",
|
||||
"picomatch": "^2.0.4"
|
||||
},
|
||||
"author": {
|
||||
"name": "Elan Shanker",
|
||||
"url": "https://github.com/es128"
|
||||
},
|
||||
"license": "ISC",
|
||||
"homepage": "https://github.com/micromatch/anymatch",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/micromatch/anymatch"
|
||||
},
|
||||
"keywords": [
|
||||
"match",
|
||||
"any",
|
||||
"string",
|
||||
"file",
|
||||
"fs",
|
||||
"list",
|
||||
"glob",
|
||||
"regex",
|
||||
"regexp",
|
||||
"regular",
|
||||
"expression",
|
||||
"function"
|
||||
],
|
||||
"scripts": {
|
||||
"test": "nyc mocha",
|
||||
"mocha": "mocha"
|
||||
},
|
||||
"devDependencies": {
|
||||
"mocha": "^6.1.3",
|
||||
"nyc": "^14.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 8"
|
||||
}
|
||||
}
|
21
web/status/node_modules/array-flatten/LICENSE
generated
vendored
21
web/status/node_modules/array-flatten/LICENSE
generated
vendored
@ -1,21 +0,0 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Blake Embrey (hello@blakeembrey.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
43
web/status/node_modules/array-flatten/README.md
generated
vendored
43
web/status/node_modules/array-flatten/README.md
generated
vendored
@ -1,43 +0,0 @@
|
||||
# Array Flatten
|
||||
|
||||
[![NPM version][npm-image]][npm-url]
|
||||
[![NPM downloads][downloads-image]][downloads-url]
|
||||
[![Build status][travis-image]][travis-url]
|
||||
[![Test coverage][coveralls-image]][coveralls-url]
|
||||
|
||||
> Flatten an array of nested arrays into a single flat array. Accepts an optional depth.
|
||||
|
||||
## Installation
|
||||
|
||||
```
|
||||
npm install array-flatten --save
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```javascript
|
||||
var flatten = require('array-flatten')
|
||||
|
||||
flatten([1, [2, [3, [4, [5], 6], 7], 8], 9])
|
||||
//=> [1, 2, 3, 4, 5, 6, 7, 8, 9]
|
||||
|
||||
flatten([1, [2, [3, [4, [5], 6], 7], 8], 9], 2)
|
||||
//=> [1, 2, 3, [4, [5], 6], 7, 8, 9]
|
||||
|
||||
(function () {
|
||||
flatten(arguments) //=> [1, 2, 3]
|
||||
})(1, [2, 3])
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
|
||||
[npm-image]: https://img.shields.io/npm/v/array-flatten.svg?style=flat
|
||||
[npm-url]: https://npmjs.org/package/array-flatten
|
||||
[downloads-image]: https://img.shields.io/npm/dm/array-flatten.svg?style=flat
|
||||
[downloads-url]: https://npmjs.org/package/array-flatten
|
||||
[travis-image]: https://img.shields.io/travis/blakeembrey/array-flatten.svg?style=flat
|
||||
[travis-url]: https://travis-ci.org/blakeembrey/array-flatten
|
||||
[coveralls-image]: https://img.shields.io/coveralls/blakeembrey/array-flatten.svg?style=flat
|
||||
[coveralls-url]: https://coveralls.io/r/blakeembrey/array-flatten?branch=master
|
64
web/status/node_modules/array-flatten/array-flatten.js
generated
vendored
64
web/status/node_modules/array-flatten/array-flatten.js
generated
vendored
@ -1,64 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* Expose `arrayFlatten`.
|
||||
*/
|
||||
module.exports = arrayFlatten
|
||||
|
||||
/**
|
||||
* Recursive flatten function with depth.
|
||||
*
|
||||
* @param {Array} array
|
||||
* @param {Array} result
|
||||
* @param {Number} depth
|
||||
* @return {Array}
|
||||
*/
|
||||
function flattenWithDepth (array, result, depth) {
|
||||
for (var i = 0; i < array.length; i++) {
|
||||
var value = array[i]
|
||||
|
||||
if (depth > 0 && Array.isArray(value)) {
|
||||
flattenWithDepth(value, result, depth - 1)
|
||||
} else {
|
||||
result.push(value)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursive flatten function. Omitting depth is slightly faster.
|
||||
*
|
||||
* @param {Array} array
|
||||
* @param {Array} result
|
||||
* @return {Array}
|
||||
*/
|
||||
function flattenForever (array, result) {
|
||||
for (var i = 0; i < array.length; i++) {
|
||||
var value = array[i]
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
flattenForever(value, result)
|
||||
} else {
|
||||
result.push(value)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Flatten an array, with the ability to define a depth.
|
||||
*
|
||||
* @param {Array} array
|
||||
* @param {Number} depth
|
||||
* @return {Array}
|
||||
*/
|
||||
function arrayFlatten (array, depth) {
|
||||
if (depth == null) {
|
||||
return flattenForever(array, [])
|
||||
}
|
||||
|
||||
return flattenWithDepth(array, [], depth)
|
||||
}
|
39
web/status/node_modules/array-flatten/package.json
generated
vendored
39
web/status/node_modules/array-flatten/package.json
generated
vendored
@ -1,39 +0,0 @@
|
||||
{
|
||||
"name": "array-flatten",
|
||||
"version": "1.1.1",
|
||||
"description": "Flatten an array of nested arrays into a single flat array",
|
||||
"main": "array-flatten.js",
|
||||
"files": [
|
||||
"array-flatten.js",
|
||||
"LICENSE"
|
||||
],
|
||||
"scripts": {
|
||||
"test": "istanbul cover _mocha -- -R spec"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/blakeembrey/array-flatten.git"
|
||||
},
|
||||
"keywords": [
|
||||
"array",
|
||||
"flatten",
|
||||
"arguments",
|
||||
"depth"
|
||||
],
|
||||
"author": {
|
||||
"name": "Blake Embrey",
|
||||
"email": "hello@blakeembrey.com",
|
||||
"url": "http://blakeembrey.me"
|
||||
},
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/blakeembrey/array-flatten/issues"
|
||||
},
|
||||
"homepage": "https://github.com/blakeembrey/array-flatten",
|
||||
"devDependencies": {
|
||||
"istanbul": "^0.3.13",
|
||||
"mocha": "^2.2.4",
|
||||
"pre-commit": "^1.0.7",
|
||||
"standard": "^3.7.3"
|
||||
}
|
||||
}
|
2
web/status/node_modules/balanced-match/.github/FUNDING.yml
generated
vendored
2
web/status/node_modules/balanced-match/.github/FUNDING.yml
generated
vendored
@ -1,2 +0,0 @@
|
||||
tidelift: "npm/balanced-match"
|
||||
patreon: juliangruber
|
21
web/status/node_modules/balanced-match/LICENSE.md
generated
vendored
21
web/status/node_modules/balanced-match/LICENSE.md
generated
vendored
@ -1,21 +0,0 @@
|
||||
(MIT)
|
||||
|
||||
Copyright (c) 2013 Julian Gruber <julian@juliangruber.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
97
web/status/node_modules/balanced-match/README.md
generated
vendored
97
web/status/node_modules/balanced-match/README.md
generated
vendored
@ -1,97 +0,0 @@
|
||||
# balanced-match
|
||||
|
||||
Match balanced string pairs, like `{` and `}` or `<b>` and `</b>`. Supports regular expressions as well!
|
||||
|
||||
[](http://travis-ci.org/juliangruber/balanced-match)
|
||||
[](https://www.npmjs.org/package/balanced-match)
|
||||
|
||||
[](https://ci.testling.com/juliangruber/balanced-match)
|
||||
|
||||
## Example
|
||||
|
||||
Get the first matching pair of braces:
|
||||
|
||||
```js
|
||||
var balanced = require('balanced-match');
|
||||
|
||||
console.log(balanced('{', '}', 'pre{in{nested}}post'));
|
||||
console.log(balanced('{', '}', 'pre{first}between{second}post'));
|
||||
console.log(balanced(/\s+\{\s+/, /\s+\}\s+/, 'pre { in{nest} } post'));
|
||||
```
|
||||
|
||||
The matches are:
|
||||
|
||||
```bash
|
||||
$ node example.js
|
||||
{ start: 3, end: 14, pre: 'pre', body: 'in{nested}', post: 'post' }
|
||||
{ start: 3,
|
||||
end: 9,
|
||||
pre: 'pre',
|
||||
body: 'first',
|
||||
post: 'between{second}post' }
|
||||
{ start: 3, end: 17, pre: 'pre', body: 'in{nest}', post: 'post' }
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### var m = balanced(a, b, str)
|
||||
|
||||
For the first non-nested matching pair of `a` and `b` in `str`, return an
|
||||
object with those keys:
|
||||
|
||||
* **start** the index of the first match of `a`
|
||||
* **end** the index of the matching `b`
|
||||
* **pre** the preamble, `a` and `b` not included
|
||||
* **body** the match, `a` and `b` not included
|
||||
* **post** the postscript, `a` and `b` not included
|
||||
|
||||
If there's no match, `undefined` will be returned.
|
||||
|
||||
If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `['{', 'a', '']` and `{a}}` will match `['', 'a', '}']`.
|
||||
|
||||
### var r = balanced.range(a, b, str)
|
||||
|
||||
For the first non-nested matching pair of `a` and `b` in `str`, return an
|
||||
array with indexes: `[ <a index>, <b index> ]`.
|
||||
|
||||
If there's no match, `undefined` will be returned.
|
||||
|
||||
If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `[ 1, 3 ]` and `{a}}` will match `[0, 2]`.
|
||||
|
||||
## Installation
|
||||
|
||||
With [npm](https://npmjs.org) do:
|
||||
|
||||
```bash
|
||||
npm install balanced-match
|
||||
```
|
||||
|
||||
## Security contact information
|
||||
|
||||
To report a security vulnerability, please use the
|
||||
[Tidelift security contact](https://tidelift.com/security).
|
||||
Tidelift will coordinate the fix and disclosure.
|
||||
|
||||
## License
|
||||
|
||||
(MIT)
|
||||
|
||||
Copyright (c) 2013 Julian Gruber <julian@juliangruber.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
62
web/status/node_modules/balanced-match/index.js
generated
vendored
62
web/status/node_modules/balanced-match/index.js
generated
vendored
@ -1,62 +0,0 @@
|
||||
'use strict';
|
||||
module.exports = balanced;
|
||||
function balanced(a, b, str) {
|
||||
if (a instanceof RegExp) a = maybeMatch(a, str);
|
||||
if (b instanceof RegExp) b = maybeMatch(b, str);
|
||||
|
||||
var r = range(a, b, str);
|
||||
|
||||
return r && {
|
||||
start: r[0],
|
||||
end: r[1],
|
||||
pre: str.slice(0, r[0]),
|
||||
body: str.slice(r[0] + a.length, r[1]),
|
||||
post: str.slice(r[1] + b.length)
|
||||
};
|
||||
}
|
||||
|
||||
function maybeMatch(reg, str) {
|
||||
var m = str.match(reg);
|
||||
return m ? m[0] : null;
|
||||
}
|
||||
|
||||
balanced.range = range;
|
||||
function range(a, b, str) {
|
||||
var begs, beg, left, right, result;
|
||||
var ai = str.indexOf(a);
|
||||
var bi = str.indexOf(b, ai + 1);
|
||||
var i = ai;
|
||||
|
||||
if (ai >= 0 && bi > 0) {
|
||||
if(a===b) {
|
||||
return [ai, bi];
|
||||
}
|
||||
begs = [];
|
||||
left = str.length;
|
||||
|
||||
while (i >= 0 && !result) {
|
||||
if (i == ai) {
|
||||
begs.push(i);
|
||||
ai = str.indexOf(a, i + 1);
|
||||
} else if (begs.length == 1) {
|
||||
result = [ begs.pop(), bi ];
|
||||
} else {
|
||||
beg = begs.pop();
|
||||
if (beg < left) {
|
||||
left = beg;
|
||||
right = bi;
|
||||
}
|
||||
|
||||
bi = str.indexOf(b, i + 1);
|
||||
}
|
||||
|
||||
i = ai < bi && ai >= 0 ? ai : bi;
|
||||
}
|
||||
|
||||
if (begs.length) {
|
||||
result = [ left, right ];
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
48
web/status/node_modules/balanced-match/package.json
generated
vendored
48
web/status/node_modules/balanced-match/package.json
generated
vendored
@ -1,48 +0,0 @@
|
||||
{
|
||||
"name": "balanced-match",
|
||||
"description": "Match balanced character pairs, like \"{\" and \"}\"",
|
||||
"version": "1.0.2",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/juliangruber/balanced-match.git"
|
||||
},
|
||||
"homepage": "https://github.com/juliangruber/balanced-match",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "tape test/test.js",
|
||||
"bench": "matcha test/bench.js"
|
||||
},
|
||||
"devDependencies": {
|
||||
"matcha": "^0.7.0",
|
||||
"tape": "^4.6.0"
|
||||
},
|
||||
"keywords": [
|
||||
"match",
|
||||
"regexp",
|
||||
"test",
|
||||
"balanced",
|
||||
"parse"
|
||||
],
|
||||
"author": {
|
||||
"name": "Julian Gruber",
|
||||
"email": "mail@juliangruber.com",
|
||||
"url": "http://juliangruber.com"
|
||||
},
|
||||
"license": "MIT",
|
||||
"testling": {
|
||||
"files": "test/*.js",
|
||||
"browsers": [
|
||||
"ie/8..latest",
|
||||
"firefox/20..latest",
|
||||
"firefox/nightly",
|
||||
"chrome/25..latest",
|
||||
"chrome/canary",
|
||||
"opera/12..latest",
|
||||
"opera/next",
|
||||
"safari/5.1..latest",
|
||||
"ipad/6.0..latest",
|
||||
"iphone/6.0..latest",
|
||||
"android-browser/4.2..latest"
|
||||
]
|
||||
}
|
||||
}
|
263
web/status/node_modules/binary-extensions/binary-extensions.json
generated
vendored
263
web/status/node_modules/binary-extensions/binary-extensions.json
generated
vendored
@ -1,263 +0,0 @@
|
||||
[
|
||||
"3dm",
|
||||
"3ds",
|
||||
"3g2",
|
||||
"3gp",
|
||||
"7z",
|
||||
"a",
|
||||
"aac",
|
||||
"adp",
|
||||
"afdesign",
|
||||
"afphoto",
|
||||
"afpub",
|
||||
"ai",
|
||||
"aif",
|
||||
"aiff",
|
||||
"alz",
|
||||
"ape",
|
||||
"apk",
|
||||
"appimage",
|
||||
"ar",
|
||||
"arj",
|
||||
"asf",
|
||||
"au",
|
||||
"avi",
|
||||
"bak",
|
||||
"baml",
|
||||
"bh",
|
||||
"bin",
|
||||
"bk",
|
||||
"bmp",
|
||||
"btif",
|
||||
"bz2",
|
||||
"bzip2",
|
||||
"cab",
|
||||
"caf",
|
||||
"cgm",
|
||||
"class",
|
||||
"cmx",
|
||||
"cpio",
|
||||
"cr2",
|
||||
"cur",
|
||||
"dat",
|
||||
"dcm",
|
||||
"deb",
|
||||
"dex",
|
||||
"djvu",
|
||||
"dll",
|
||||
"dmg",
|
||||
"dng",
|
||||
"doc",
|
||||
"docm",
|
||||
"docx",
|
||||
"dot",
|
||||
"dotm",
|
||||
"dra",
|
||||
"DS_Store",
|
||||
"dsk",
|
||||
"dts",
|
||||
"dtshd",
|
||||
"dvb",
|
||||
"dwg",
|
||||
"dxf",
|
||||
"ecelp4800",
|
||||
"ecelp7470",
|
||||
"ecelp9600",
|
||||
"egg",
|
||||
"eol",
|
||||
"eot",
|
||||
"epub",
|
||||
"exe",
|
||||
"f4v",
|
||||
"fbs",
|
||||
"fh",
|
||||
"fla",
|
||||
"flac",
|
||||
"flatpak",
|
||||
"fli",
|
||||
"flv",
|
||||
"fpx",
|
||||
"fst",
|
||||
"fvt",
|
||||
"g3",
|
||||
"gh",
|
||||
"gif",
|
||||
"graffle",
|
||||
"gz",
|
||||
"gzip",
|
||||
"h261",
|
||||
"h263",
|
||||
"h264",
|
||||
"icns",
|
||||
"ico",
|
||||
"ief",
|
||||
"img",
|
||||
"ipa",
|
||||
"iso",
|
||||
"jar",
|
||||
"jpeg",
|
||||
"jpg",
|
||||
"jpgv",
|
||||
"jpm",
|
||||
"jxr",
|
||||
"key",
|
||||
"ktx",
|
||||
"lha",
|
||||
"lib",
|
||||
"lvp",
|
||||
"lz",
|
||||
"lzh",
|
||||
"lzma",
|
||||
"lzo",
|
||||
"m3u",
|
||||
"m4a",
|
||||
"m4v",
|
||||
"mar",
|
||||
"mdi",
|
||||
"mht",
|
||||
"mid",
|
||||
"midi",
|
||||
"mj2",
|
||||
"mka",
|
||||
"mkv",
|
||||
"mmr",
|
||||
"mng",
|
||||
"mobi",
|
||||
"mov",
|
||||
"movie",
|
||||
"mp3",
|
||||
"mp4",
|
||||
"mp4a",
|
||||
"mpeg",
|
||||
"mpg",
|
||||
"mpga",
|
||||
"mxu",
|
||||
"nef",
|
||||
"npx",
|
||||
"numbers",
|
||||
"nupkg",
|
||||
"o",
|
||||
"odp",
|
||||
"ods",
|
||||
"odt",
|
||||
"oga",
|
||||
"ogg",
|
||||
"ogv",
|
||||
"otf",
|
||||
"ott",
|
||||
"pages",
|
||||
"pbm",
|
||||
"pcx",
|
||||
"pdb",
|
||||
"pdf",
|
||||
"pea",
|
||||
"pgm",
|
||||
"pic",
|
||||
"png",
|
||||
"pnm",
|
||||
"pot",
|
||||
"potm",
|
||||
"potx",
|
||||
"ppa",
|
||||
"ppam",
|
||||
"ppm",
|
||||
"pps",
|
||||
"ppsm",
|
||||
"ppsx",
|
||||
"ppt",
|
||||
"pptm",
|
||||
"pptx",
|
||||
"psd",
|
||||
"pya",
|
||||
"pyc",
|
||||
"pyo",
|
||||
"pyv",
|
||||
"qt",
|
||||
"rar",
|
||||
"ras",
|
||||
"raw",
|
||||
"resources",
|
||||
"rgb",
|
||||
"rip",
|
||||
"rlc",
|
||||
"rmf",
|
||||
"rmvb",
|
||||
"rpm",
|
||||
"rtf",
|
||||
"rz",
|
||||
"s3m",
|
||||
"s7z",
|
||||
"scpt",
|
||||
"sgi",
|
||||
"shar",
|
||||
"snap",
|
||||
"sil",
|
||||
"sketch",
|
||||
"slk",
|
||||
"smv",
|
||||
"snk",
|
||||
"so",
|
||||
"stl",
|
||||
"suo",
|
||||
"sub",
|
||||
"swf",
|
||||
"tar",
|
||||
"tbz",
|
||||
"tbz2",
|
||||
"tga",
|
||||
"tgz",
|
||||
"thmx",
|
||||
"tif",
|
||||
"tiff",
|
||||
"tlz",
|
||||
"ttc",
|
||||
"ttf",
|
||||
"txz",
|
||||
"udf",
|
||||
"uvh",
|
||||
"uvi",
|
||||
"uvm",
|
||||
"uvp",
|
||||
"uvs",
|
||||
"uvu",
|
||||
"viv",
|
||||
"vob",
|
||||
"war",
|
||||
"wav",
|
||||
"wax",
|
||||
"wbmp",
|
||||
"wdp",
|
||||
"weba",
|
||||
"webm",
|
||||
"webp",
|
||||
"whl",
|
||||
"wim",
|
||||
"wm",
|
||||
"wma",
|
||||
"wmv",
|
||||
"wmx",
|
||||
"woff",
|
||||
"woff2",
|
||||
"wrm",
|
||||
"wvx",
|
||||
"xbm",
|
||||
"xif",
|
||||
"xla",
|
||||
"xlam",
|
||||
"xls",
|
||||
"xlsb",
|
||||
"xlsm",
|
||||
"xlsx",
|
||||
"xlt",
|
||||
"xltm",
|
||||
"xltx",
|
||||
"xm",
|
||||
"xmind",
|
||||
"xpi",
|
||||
"xpm",
|
||||
"xwd",
|
||||
"xz",
|
||||
"z",
|
||||
"zip",
|
||||
"zipx"
|
||||
]
|
3
web/status/node_modules/binary-extensions/binary-extensions.json.d.ts
generated
vendored
3
web/status/node_modules/binary-extensions/binary-extensions.json.d.ts
generated
vendored
@ -1,3 +0,0 @@
|
||||
declare const binaryExtensionsJson: readonly string[];
|
||||
|
||||
export = binaryExtensionsJson;
|
14
web/status/node_modules/binary-extensions/index.d.ts
generated
vendored
14
web/status/node_modules/binary-extensions/index.d.ts
generated
vendored
@ -1,14 +0,0 @@
|
||||
/**
|
||||
List of binary file extensions.
|
||||
|
||||
@example
|
||||
```
|
||||
import binaryExtensions = require('binary-extensions');
|
||||
|
||||
console.log(binaryExtensions);
|
||||
//=> ['3ds', '3g2', …]
|
||||
```
|
||||
*/
|
||||
declare const binaryExtensions: readonly string[];
|
||||
|
||||
export = binaryExtensions;
|
1
web/status/node_modules/binary-extensions/index.js
generated
vendored
1
web/status/node_modules/binary-extensions/index.js
generated
vendored
@ -1 +0,0 @@
|
||||
module.exports = require('./binary-extensions.json');
|
10
web/status/node_modules/binary-extensions/license
generated
vendored
10
web/status/node_modules/binary-extensions/license
generated
vendored
@ -1,10 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
|
||||
Copyright (c) Paul Miller (https://paulmillr.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
40
web/status/node_modules/binary-extensions/package.json
generated
vendored
40
web/status/node_modules/binary-extensions/package.json
generated
vendored
@ -1,40 +0,0 @@
|
||||
{
|
||||
"name": "binary-extensions",
|
||||
"version": "2.3.0",
|
||||
"description": "List of binary file extensions",
|
||||
"license": "MIT",
|
||||
"repository": "sindresorhus/binary-extensions",
|
||||
"funding": "https://github.com/sponsors/sindresorhus",
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "https://sindresorhus.com"
|
||||
},
|
||||
"sideEffects": false,
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "xo && ava && tsd"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"index.d.ts",
|
||||
"binary-extensions.json",
|
||||
"binary-extensions.json.d.ts"
|
||||
],
|
||||
"keywords": [
|
||||
"binary",
|
||||
"extensions",
|
||||
"extension",
|
||||
"file",
|
||||
"json",
|
||||
"list",
|
||||
"array"
|
||||
],
|
||||
"devDependencies": {
|
||||
"ava": "^1.4.1",
|
||||
"tsd": "^0.7.2",
|
||||
"xo": "^0.24.0"
|
||||
}
|
||||
}
|
25
web/status/node_modules/binary-extensions/readme.md
generated
vendored
25
web/status/node_modules/binary-extensions/readme.md
generated
vendored
@ -1,25 +0,0 @@
|
||||
# binary-extensions
|
||||
|
||||
> List of binary file extensions
|
||||
|
||||
The list is just a [JSON file](binary-extensions.json) and can be used anywhere.
|
||||
|
||||
## Install
|
||||
|
||||
```sh
|
||||
npm install binary-extensions
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const binaryExtensions = require('binary-extensions');
|
||||
|
||||
console.log(binaryExtensions);
|
||||
//=> ['3ds', '3g2', …]
|
||||
```
|
||||
|
||||
## Related
|
||||
|
||||
- [is-binary-path](https://github.com/sindresorhus/is-binary-path) - Check if a filepath is a binary file
|
||||
- [text-extensions](https://github.com/sindresorhus/text-extensions) - List of text file extensions
|
672
web/status/node_modules/body-parser/HISTORY.md
generated
vendored
672
web/status/node_modules/body-parser/HISTORY.md
generated
vendored
@ -1,672 +0,0 @@
|
||||
1.20.3 / 2024-09-10
|
||||
===================
|
||||
|
||||
* deps: qs@6.13.0
|
||||
* add `depth` option to customize the depth level in the parser
|
||||
* IMPORTANT: The default `depth` level for parsing URL-encoded data is now `32` (previously was `Infinity`)
|
||||
|
||||
1.20.2 / 2023-02-21
|
||||
===================
|
||||
|
||||
* Fix strict json error message on Node.js 19+
|
||||
* deps: content-type@~1.0.5
|
||||
- perf: skip value escaping when unnecessary
|
||||
* deps: raw-body@2.5.2
|
||||
|
||||
1.20.1 / 2022-10-06
|
||||
===================
|
||||
|
||||
* deps: qs@6.11.0
|
||||
* perf: remove unnecessary object clone
|
||||
|
||||
1.20.0 / 2022-04-02
|
||||
===================
|
||||
|
||||
* Fix error message for json parse whitespace in `strict`
|
||||
* Fix internal error when inflated body exceeds limit
|
||||
* Prevent loss of async hooks context
|
||||
* Prevent hanging when request already read
|
||||
* deps: depd@2.0.0
|
||||
- Replace internal `eval` usage with `Function` constructor
|
||||
- Use instance methods on `process` to check for listeners
|
||||
* deps: http-errors@2.0.0
|
||||
- deps: depd@2.0.0
|
||||
- deps: statuses@2.0.1
|
||||
* deps: on-finished@2.4.1
|
||||
* deps: qs@6.10.3
|
||||
* deps: raw-body@2.5.1
|
||||
- deps: http-errors@2.0.0
|
||||
|
||||
1.19.2 / 2022-02-15
|
||||
===================
|
||||
|
||||
* deps: bytes@3.1.2
|
||||
* deps: qs@6.9.7
|
||||
* Fix handling of `__proto__` keys
|
||||
* deps: raw-body@2.4.3
|
||||
- deps: bytes@3.1.2
|
||||
|
||||
1.19.1 / 2021-12-10
|
||||
===================
|
||||
|
||||
* deps: bytes@3.1.1
|
||||
* deps: http-errors@1.8.1
|
||||
- deps: inherits@2.0.4
|
||||
- deps: toidentifier@1.0.1
|
||||
- deps: setprototypeof@1.2.0
|
||||
* deps: qs@6.9.6
|
||||
* deps: raw-body@2.4.2
|
||||
- deps: bytes@3.1.1
|
||||
- deps: http-errors@1.8.1
|
||||
* deps: safe-buffer@5.2.1
|
||||
* deps: type-is@~1.6.18
|
||||
|
||||
1.19.0 / 2019-04-25
|
||||
===================
|
||||
|
||||
* deps: bytes@3.1.0
|
||||
- Add petabyte (`pb`) support
|
||||
* deps: http-errors@1.7.2
|
||||
- Set constructor name when possible
|
||||
- deps: setprototypeof@1.1.1
|
||||
- deps: statuses@'>= 1.5.0 < 2'
|
||||
* deps: iconv-lite@0.4.24
|
||||
- Added encoding MIK
|
||||
* deps: qs@6.7.0
|
||||
- Fix parsing array brackets after index
|
||||
* deps: raw-body@2.4.0
|
||||
- deps: bytes@3.1.0
|
||||
- deps: http-errors@1.7.2
|
||||
- deps: iconv-lite@0.4.24
|
||||
* deps: type-is@~1.6.17
|
||||
- deps: mime-types@~2.1.24
|
||||
- perf: prevent internal `throw` on invalid type
|
||||
|
||||
1.18.3 / 2018-05-14
|
||||
===================
|
||||
|
||||
* Fix stack trace for strict json parse error
|
||||
* deps: depd@~1.1.2
|
||||
- perf: remove argument reassignment
|
||||
* deps: http-errors@~1.6.3
|
||||
- deps: depd@~1.1.2
|
||||
- deps: setprototypeof@1.1.0
|
||||
- deps: statuses@'>= 1.3.1 < 2'
|
||||
* deps: iconv-lite@0.4.23
|
||||
- Fix loading encoding with year appended
|
||||
- Fix deprecation warnings on Node.js 10+
|
||||
* deps: qs@6.5.2
|
||||
* deps: raw-body@2.3.3
|
||||
- deps: http-errors@1.6.3
|
||||
- deps: iconv-lite@0.4.23
|
||||
* deps: type-is@~1.6.16
|
||||
- deps: mime-types@~2.1.18
|
||||
|
||||
1.18.2 / 2017-09-22
|
||||
===================
|
||||
|
||||
* deps: debug@2.6.9
|
||||
* perf: remove argument reassignment
|
||||
|
||||
1.18.1 / 2017-09-12
|
||||
===================
|
||||
|
||||
* deps: content-type@~1.0.4
|
||||
- perf: remove argument reassignment
|
||||
- perf: skip parameter parsing when no parameters
|
||||
* deps: iconv-lite@0.4.19
|
||||
- Fix ISO-8859-1 regression
|
||||
- Update Windows-1255
|
||||
* deps: qs@6.5.1
|
||||
- Fix parsing & compacting very deep objects
|
||||
* deps: raw-body@2.3.2
|
||||
- deps: iconv-lite@0.4.19
|
||||
|
||||
1.18.0 / 2017-09-08
|
||||
===================
|
||||
|
||||
* Fix JSON strict violation error to match native parse error
|
||||
* Include the `body` property on verify errors
|
||||
* Include the `type` property on all generated errors
|
||||
* Use `http-errors` to set status code on errors
|
||||
* deps: bytes@3.0.0
|
||||
* deps: debug@2.6.8
|
||||
* deps: depd@~1.1.1
|
||||
- Remove unnecessary `Buffer` loading
|
||||
* deps: http-errors@~1.6.2
|
||||
- deps: depd@1.1.1
|
||||
* deps: iconv-lite@0.4.18
|
||||
- Add support for React Native
|
||||
- Add a warning if not loaded as utf-8
|
||||
- Fix CESU-8 decoding in Node.js 8
|
||||
- Improve speed of ISO-8859-1 encoding
|
||||
* deps: qs@6.5.0
|
||||
* deps: raw-body@2.3.1
|
||||
- Use `http-errors` for standard emitted errors
|
||||
- deps: bytes@3.0.0
|
||||
- deps: iconv-lite@0.4.18
|
||||
- perf: skip buffer decoding on overage chunk
|
||||
* perf: prevent internal `throw` when missing charset
|
||||
|
||||
1.17.2 / 2017-05-17
|
||||
===================
|
||||
|
||||
* deps: debug@2.6.7
|
||||
- Fix `DEBUG_MAX_ARRAY_LENGTH`
|
||||
- deps: ms@2.0.0
|
||||
* deps: type-is@~1.6.15
|
||||
- deps: mime-types@~2.1.15
|
||||
|
||||
1.17.1 / 2017-03-06
|
||||
===================
|
||||
|
||||
* deps: qs@6.4.0
|
||||
- Fix regression parsing keys starting with `[`
|
||||
|
||||
1.17.0 / 2017-03-01
|
||||
===================
|
||||
|
||||
* deps: http-errors@~1.6.1
|
||||
- Make `message` property enumerable for `HttpError`s
|
||||
- deps: setprototypeof@1.0.3
|
||||
* deps: qs@6.3.1
|
||||
- Fix compacting nested arrays
|
||||
|
||||
1.16.1 / 2017-02-10
|
||||
===================
|
||||
|
||||
* deps: debug@2.6.1
|
||||
- Fix deprecation messages in WebStorm and other editors
|
||||
- Undeprecate `DEBUG_FD` set to `1` or `2`
|
||||
|
||||
1.16.0 / 2017-01-17
|
||||
===================
|
||||
|
||||
* deps: debug@2.6.0
|
||||
- Allow colors in workers
|
||||
- Deprecated `DEBUG_FD` environment variable
|
||||
- Fix error when running under React Native
|
||||
- Use same color for same namespace
|
||||
- deps: ms@0.7.2
|
||||
* deps: http-errors@~1.5.1
|
||||
- deps: inherits@2.0.3
|
||||
- deps: setprototypeof@1.0.2
|
||||
- deps: statuses@'>= 1.3.1 < 2'
|
||||
* deps: iconv-lite@0.4.15
|
||||
- Added encoding MS-31J
|
||||
- Added encoding MS-932
|
||||
- Added encoding MS-936
|
||||
- Added encoding MS-949
|
||||
- Added encoding MS-950
|
||||
- Fix GBK/GB18030 handling of Euro character
|
||||
* deps: qs@6.2.1
|
||||
- Fix array parsing from skipping empty values
|
||||
* deps: raw-body@~2.2.0
|
||||
- deps: iconv-lite@0.4.15
|
||||
* deps: type-is@~1.6.14
|
||||
- deps: mime-types@~2.1.13
|
||||
|
||||
1.15.2 / 2016-06-19
|
||||
===================
|
||||
|
||||
* deps: bytes@2.4.0
|
||||
* deps: content-type@~1.0.2
|
||||
- perf: enable strict mode
|
||||
* deps: http-errors@~1.5.0
|
||||
- Use `setprototypeof` module to replace `__proto__` setting
|
||||
- deps: statuses@'>= 1.3.0 < 2'
|
||||
- perf: enable strict mode
|
||||
* deps: qs@6.2.0
|
||||
* deps: raw-body@~2.1.7
|
||||
- deps: bytes@2.4.0
|
||||
- perf: remove double-cleanup on happy path
|
||||
* deps: type-is@~1.6.13
|
||||
- deps: mime-types@~2.1.11
|
||||
|
||||
1.15.1 / 2016-05-05
|
||||
===================
|
||||
|
||||
* deps: bytes@2.3.0
|
||||
- Drop partial bytes on all parsed units
|
||||
- Fix parsing byte string that looks like hex
|
||||
* deps: raw-body@~2.1.6
|
||||
- deps: bytes@2.3.0
|
||||
* deps: type-is@~1.6.12
|
||||
- deps: mime-types@~2.1.10
|
||||
|
||||
1.15.0 / 2016-02-10
|
||||
===================
|
||||
|
||||
* deps: http-errors@~1.4.0
|
||||
- Add `HttpError` export, for `err instanceof createError.HttpError`
|
||||
- deps: inherits@2.0.1
|
||||
- deps: statuses@'>= 1.2.1 < 2'
|
||||
* deps: qs@6.1.0
|
||||
* deps: type-is@~1.6.11
|
||||
- deps: mime-types@~2.1.9
|
||||
|
||||
1.14.2 / 2015-12-16
|
||||
===================
|
||||
|
||||
* deps: bytes@2.2.0
|
||||
* deps: iconv-lite@0.4.13
|
||||
* deps: qs@5.2.0
|
||||
* deps: raw-body@~2.1.5
|
||||
- deps: bytes@2.2.0
|
||||
- deps: iconv-lite@0.4.13
|
||||
* deps: type-is@~1.6.10
|
||||
- deps: mime-types@~2.1.8
|
||||
|
||||
1.14.1 / 2015-09-27
|
||||
===================
|
||||
|
||||
* Fix issue where invalid charset results in 400 when `verify` used
|
||||
* deps: iconv-lite@0.4.12
|
||||
- Fix CESU-8 decoding in Node.js 4.x
|
||||
* deps: raw-body@~2.1.4
|
||||
- Fix masking critical errors from `iconv-lite`
|
||||
- deps: iconv-lite@0.4.12
|
||||
* deps: type-is@~1.6.9
|
||||
- deps: mime-types@~2.1.7
|
||||
|
||||
1.14.0 / 2015-09-16
|
||||
===================
|
||||
|
||||
* Fix JSON strict parse error to match syntax errors
|
||||
* Provide static `require` analysis in `urlencoded` parser
|
||||
* deps: depd@~1.1.0
|
||||
- Support web browser loading
|
||||
* deps: qs@5.1.0
|
||||
* deps: raw-body@~2.1.3
|
||||
- Fix sync callback when attaching data listener causes sync read
|
||||
* deps: type-is@~1.6.8
|
||||
- Fix type error when given invalid type to match against
|
||||
- deps: mime-types@~2.1.6
|
||||
|
||||
1.13.3 / 2015-07-31
|
||||
===================
|
||||
|
||||
* deps: type-is@~1.6.6
|
||||
- deps: mime-types@~2.1.4
|
||||
|
||||
1.13.2 / 2015-07-05
|
||||
===================
|
||||
|
||||
* deps: iconv-lite@0.4.11
|
||||
* deps: qs@4.0.0
|
||||
- Fix dropping parameters like `hasOwnProperty`
|
||||
- Fix user-visible incompatibilities from 3.1.0
|
||||
- Fix various parsing edge cases
|
||||
* deps: raw-body@~2.1.2
|
||||
- Fix error stack traces to skip `makeError`
|
||||
- deps: iconv-lite@0.4.11
|
||||
* deps: type-is@~1.6.4
|
||||
- deps: mime-types@~2.1.2
|
||||
- perf: enable strict mode
|
||||
- perf: remove argument reassignment
|
||||
|
||||
1.13.1 / 2015-06-16
|
||||
===================
|
||||
|
||||
* deps: qs@2.4.2
|
||||
- Downgraded from 3.1.0 because of user-visible incompatibilities
|
||||
|
||||
1.13.0 / 2015-06-14
|
||||
===================
|
||||
|
||||
* Add `statusCode` property on `Error`s, in addition to `status`
|
||||
* Change `type` default to `application/json` for JSON parser
|
||||
* Change `type` default to `application/x-www-form-urlencoded` for urlencoded parser
|
||||
* Provide static `require` analysis
|
||||
* Use the `http-errors` module to generate errors
|
||||
* deps: bytes@2.1.0
|
||||
- Slight optimizations
|
||||
* deps: iconv-lite@0.4.10
|
||||
- The encoding UTF-16 without BOM now defaults to UTF-16LE when detection fails
|
||||
- Leading BOM is now removed when decoding
|
||||
* deps: on-finished@~2.3.0
|
||||
- Add defined behavior for HTTP `CONNECT` requests
|
||||
- Add defined behavior for HTTP `Upgrade` requests
|
||||
- deps: ee-first@1.1.1
|
||||
* deps: qs@3.1.0
|
||||
- Fix dropping parameters like `hasOwnProperty`
|
||||
- Fix various parsing edge cases
|
||||
- Parsed object now has `null` prototype
|
||||
* deps: raw-body@~2.1.1
|
||||
- Use `unpipe` module for unpiping requests
|
||||
- deps: iconv-lite@0.4.10
|
||||
* deps: type-is@~1.6.3
|
||||
- deps: mime-types@~2.1.1
|
||||
- perf: reduce try block size
|
||||
- perf: remove bitwise operations
|
||||
* perf: enable strict mode
|
||||
* perf: remove argument reassignment
|
||||
* perf: remove delete call
|
||||
|
||||
1.12.4 / 2015-05-10
|
||||
===================
|
||||
|
||||
* deps: debug@~2.2.0
|
||||
* deps: qs@2.4.2
|
||||
- Fix allowing parameters like `constructor`
|
||||
* deps: on-finished@~2.2.1
|
||||
* deps: raw-body@~2.0.1
|
||||
- Fix a false-positive when unpiping in Node.js 0.8
|
||||
- deps: bytes@2.0.1
|
||||
* deps: type-is@~1.6.2
|
||||
- deps: mime-types@~2.0.11
|
||||
|
||||
1.12.3 / 2015-04-15
|
||||
===================
|
||||
|
||||
* Slight efficiency improvement when not debugging
|
||||
* deps: depd@~1.0.1
|
||||
* deps: iconv-lite@0.4.8
|
||||
- Add encoding alias UNICODE-1-1-UTF-7
|
||||
* deps: raw-body@1.3.4
|
||||
- Fix hanging callback if request aborts during read
|
||||
- deps: iconv-lite@0.4.8
|
||||
|
||||
1.12.2 / 2015-03-16
|
||||
===================
|
||||
|
||||
* deps: qs@2.4.1
|
||||
- Fix error when parameter `hasOwnProperty` is present
|
||||
|
||||
1.12.1 / 2015-03-15
|
||||
===================
|
||||
|
||||
* deps: debug@~2.1.3
|
||||
- Fix high intensity foreground color for bold
|
||||
- deps: ms@0.7.0
|
||||
* deps: type-is@~1.6.1
|
||||
- deps: mime-types@~2.0.10
|
||||
|
||||
1.12.0 / 2015-02-13
|
||||
===================
|
||||
|
||||
* add `debug` messages
|
||||
* accept a function for the `type` option
|
||||
* use `content-type` to parse `Content-Type` headers
|
||||
* deps: iconv-lite@0.4.7
|
||||
- Gracefully support enumerables on `Object.prototype`
|
||||
* deps: raw-body@1.3.3
|
||||
- deps: iconv-lite@0.4.7
|
||||
* deps: type-is@~1.6.0
|
||||
- fix argument reassignment
|
||||
- fix false-positives in `hasBody` `Transfer-Encoding` check
|
||||
- support wildcard for both type and subtype (`*/*`)
|
||||
- deps: mime-types@~2.0.9
|
||||
|
||||
1.11.0 / 2015-01-30
|
||||
===================
|
||||
|
||||
* make internal `extended: true` depth limit infinity
|
||||
* deps: type-is@~1.5.6
|
||||
- deps: mime-types@~2.0.8
|
||||
|
||||
1.10.2 / 2015-01-20
|
||||
===================
|
||||
|
||||
* deps: iconv-lite@0.4.6
|
||||
- Fix rare aliases of single-byte encodings
|
||||
* deps: raw-body@1.3.2
|
||||
- deps: iconv-lite@0.4.6
|
||||
|
||||
1.10.1 / 2015-01-01
|
||||
===================
|
||||
|
||||
* deps: on-finished@~2.2.0
|
||||
* deps: type-is@~1.5.5
|
||||
- deps: mime-types@~2.0.7
|
||||
|
||||
1.10.0 / 2014-12-02
|
||||
===================
|
||||
|
||||
* make internal `extended: true` array limit dynamic
|
||||
|
||||
1.9.3 / 2014-11-21
|
||||
==================
|
||||
|
||||
* deps: iconv-lite@0.4.5
|
||||
- Fix Windows-31J and X-SJIS encoding support
|
||||
* deps: qs@2.3.3
|
||||
- Fix `arrayLimit` behavior
|
||||
* deps: raw-body@1.3.1
|
||||
- deps: iconv-lite@0.4.5
|
||||
* deps: type-is@~1.5.3
|
||||
- deps: mime-types@~2.0.3
|
||||
|
||||
1.9.2 / 2014-10-27
|
||||
==================
|
||||
|
||||
* deps: qs@2.3.2
|
||||
- Fix parsing of mixed objects and values
|
||||
|
||||
1.9.1 / 2014-10-22
|
||||
==================
|
||||
|
||||
* deps: on-finished@~2.1.1
|
||||
- Fix handling of pipelined requests
|
||||
* deps: qs@2.3.0
|
||||
- Fix parsing of mixed implicit and explicit arrays
|
||||
* deps: type-is@~1.5.2
|
||||
- deps: mime-types@~2.0.2
|
||||
|
||||
1.9.0 / 2014-09-24
|
||||
==================
|
||||
|
||||
* include the charset in "unsupported charset" error message
|
||||
* include the encoding in "unsupported content encoding" error message
|
||||
* deps: depd@~1.0.0
|
||||
|
||||
1.8.4 / 2014-09-23
|
||||
==================
|
||||
|
||||
* fix content encoding to be case-insensitive
|
||||
|
||||
1.8.3 / 2014-09-19
|
||||
==================
|
||||
|
||||
* deps: qs@2.2.4
|
||||
- Fix issue with object keys starting with numbers truncated
|
||||
|
||||
1.8.2 / 2014-09-15
|
||||
==================
|
||||
|
||||
* deps: depd@0.4.5
|
||||
|
||||
1.8.1 / 2014-09-07
|
||||
==================
|
||||
|
||||
* deps: media-typer@0.3.0
|
||||
* deps: type-is@~1.5.1
|
||||
|
||||
1.8.0 / 2014-09-05
|
||||
==================
|
||||
|
||||
* make empty-body-handling consistent between chunked requests
|
||||
- empty `json` produces `{}`
|
||||
- empty `raw` produces `new Buffer(0)`
|
||||
- empty `text` produces `''`
|
||||
- empty `urlencoded` produces `{}`
|
||||
* deps: qs@2.2.3
|
||||
- Fix issue where first empty value in array is discarded
|
||||
* deps: type-is@~1.5.0
|
||||
- fix `hasbody` to be true for `content-length: 0`
|
||||
|
||||
1.7.0 / 2014-09-01
|
||||
==================
|
||||
|
||||
* add `parameterLimit` option to `urlencoded` parser
|
||||
* change `urlencoded` extended array limit to 100
|
||||
* respond with 413 when over `parameterLimit` in `urlencoded`
|
||||
|
||||
1.6.7 / 2014-08-29
|
||||
==================
|
||||
|
||||
* deps: qs@2.2.2
|
||||
- Remove unnecessary cloning
|
||||
|
||||
1.6.6 / 2014-08-27
|
||||
==================
|
||||
|
||||
* deps: qs@2.2.0
|
||||
- Array parsing fix
|
||||
- Performance improvements
|
||||
|
||||
1.6.5 / 2014-08-16
|
||||
==================
|
||||
|
||||
* deps: on-finished@2.1.0
|
||||
|
||||
1.6.4 / 2014-08-14
|
||||
==================
|
||||
|
||||
* deps: qs@1.2.2
|
||||
|
||||
1.6.3 / 2014-08-10
|
||||
==================
|
||||
|
||||
* deps: qs@1.2.1
|
||||
|
||||
1.6.2 / 2014-08-07
|
||||
==================
|
||||
|
||||
* deps: qs@1.2.0
|
||||
- Fix parsing array of objects
|
||||
|
||||
1.6.1 / 2014-08-06
|
||||
==================
|
||||
|
||||
* deps: qs@1.1.0
|
||||
- Accept urlencoded square brackets
|
||||
- Accept empty values in implicit array notation
|
||||
|
||||
1.6.0 / 2014-08-05
|
||||
==================
|
||||
|
||||
* deps: qs@1.0.2
|
||||
- Complete rewrite
|
||||
- Limits array length to 20
|
||||
- Limits object depth to 5
|
||||
- Limits parameters to 1,000
|
||||
|
||||
1.5.2 / 2014-07-27
|
||||
==================
|
||||
|
||||
* deps: depd@0.4.4
|
||||
- Work-around v8 generating empty stack traces
|
||||
|
||||
1.5.1 / 2014-07-26
|
||||
==================
|
||||
|
||||
* deps: depd@0.4.3
|
||||
- Fix exception when global `Error.stackTraceLimit` is too low
|
||||
|
||||
1.5.0 / 2014-07-20
|
||||
==================
|
||||
|
||||
* deps: depd@0.4.2
|
||||
- Add `TRACE_DEPRECATION` environment variable
|
||||
- Remove non-standard grey color from color output
|
||||
- Support `--no-deprecation` argument
|
||||
- Support `--trace-deprecation` argument
|
||||
* deps: iconv-lite@0.4.4
|
||||
- Added encoding UTF-7
|
||||
* deps: raw-body@1.3.0
|
||||
- deps: iconv-lite@0.4.4
|
||||
- Added encoding UTF-7
|
||||
- Fix `Cannot switch to old mode now` error on Node.js 0.10+
|
||||
* deps: type-is@~1.3.2
|
||||
|
||||
1.4.3 / 2014-06-19
|
||||
==================
|
||||
|
||||
* deps: type-is@1.3.1
|
||||
- fix global variable leak
|
||||
|
||||
1.4.2 / 2014-06-19
|
||||
==================
|
||||
|
||||
* deps: type-is@1.3.0
|
||||
- improve type parsing
|
||||
|
||||
1.4.1 / 2014-06-19
|
||||
==================
|
||||
|
||||
* fix urlencoded extended deprecation message
|
||||
|
||||
1.4.0 / 2014-06-19
|
||||
==================
|
||||
|
||||
* add `text` parser
|
||||
* add `raw` parser
|
||||
* check accepted charset in content-type (accepts utf-8)
|
||||
* check accepted encoding in content-encoding (accepts identity)
|
||||
* deprecate `bodyParser()` middleware; use `.json()` and `.urlencoded()` as needed
|
||||
* deprecate `urlencoded()` without provided `extended` option
|
||||
* lazy-load urlencoded parsers
|
||||
* parsers split into files for reduced mem usage
|
||||
* support gzip and deflate bodies
|
||||
- set `inflate: false` to turn off
|
||||
* deps: raw-body@1.2.2
|
||||
- Support all encodings from `iconv-lite`
|
||||
|
||||
1.3.1 / 2014-06-11
|
||||
==================
|
||||
|
||||
* deps: type-is@1.2.1
|
||||
- Switch dependency from mime to mime-types@1.0.0
|
||||
|
||||
1.3.0 / 2014-05-31
|
||||
==================
|
||||
|
||||
* add `extended` option to urlencoded parser
|
||||
|
||||
1.2.2 / 2014-05-27
|
||||
==================
|
||||
|
||||
* deps: raw-body@1.1.6
|
||||
- assert stream encoding on node.js 0.8
|
||||
- assert stream encoding on node.js < 0.10.6
|
||||
- deps: bytes@1
|
||||
|
||||
1.2.1 / 2014-05-26
|
||||
==================
|
||||
|
||||
* invoke `next(err)` after request fully read
|
||||
- prevents hung responses and socket hang ups
|
||||
|
||||
1.2.0 / 2014-05-11
|
||||
==================
|
||||
|
||||
* add `verify` option
|
||||
* deps: type-is@1.2.0
|
||||
- support suffix matching
|
||||
|
||||
1.1.2 / 2014-05-11
|
||||
==================
|
||||
|
||||
* improve json parser speed
|
||||
|
||||
1.1.1 / 2014-05-11
|
||||
==================
|
||||
|
||||
* fix repeated limit parsing with every request
|
||||
|
||||
1.1.0 / 2014-05-10
|
||||
==================
|
||||
|
||||
* add `type` option
|
||||
* deps: pin for safety and consistency
|
||||
|
||||
1.0.2 / 2014-04-14
|
||||
==================
|
||||
|
||||
* use `type-is` module
|
||||
|
||||
1.0.1 / 2014-03-20
|
||||
==================
|
||||
|
||||
* lower default limits to 100kb
|
23
web/status/node_modules/body-parser/LICENSE
generated
vendored
23
web/status/node_modules/body-parser/LICENSE
generated
vendored
@ -1,23 +0,0 @@
|
||||
(The MIT License)
|
||||
|
||||
Copyright (c) 2014 Jonathan Ong <me@jongleberry.com>
|
||||
Copyright (c) 2014-2015 Douglas Christopher Wilson <doug@somethingdoug.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
'Software'), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
476
web/status/node_modules/body-parser/README.md
generated
vendored
476
web/status/node_modules/body-parser/README.md
generated
vendored
@ -1,476 +0,0 @@
|
||||
# body-parser
|
||||
|
||||
[![NPM Version][npm-version-image]][npm-url]
|
||||
[![NPM Downloads][npm-downloads-image]][npm-url]
|
||||
[![Build Status][ci-image]][ci-url]
|
||||
[![Test Coverage][coveralls-image]][coveralls-url]
|
||||
[![OpenSSF Scorecard Badge][ossf-scorecard-badge]][ossf-scorecard-visualizer]
|
||||
|
||||
Node.js body parsing middleware.
|
||||
|
||||
Parse incoming request bodies in a middleware before your handlers, available
|
||||
under the `req.body` property.
|
||||
|
||||
**Note** As `req.body`'s shape is based on user-controlled input, all
|
||||
properties and values in this object are untrusted and should be validated
|
||||
before trusting. For example, `req.body.foo.toString()` may fail in multiple
|
||||
ways, for example the `foo` property may not be there or may not be a string,
|
||||
and `toString` may not be a function and instead a string or other user input.
|
||||
|
||||
[Learn about the anatomy of an HTTP transaction in Node.js](https://nodejs.org/en/docs/guides/anatomy-of-an-http-transaction/).
|
||||
|
||||
_This does not handle multipart bodies_, due to their complex and typically
|
||||
large nature. For multipart bodies, you may be interested in the following
|
||||
modules:
|
||||
|
||||
* [busboy](https://www.npmjs.org/package/busboy#readme) and
|
||||
[connect-busboy](https://www.npmjs.org/package/connect-busboy#readme)
|
||||
* [multiparty](https://www.npmjs.org/package/multiparty#readme) and
|
||||
[connect-multiparty](https://www.npmjs.org/package/connect-multiparty#readme)
|
||||
* [formidable](https://www.npmjs.org/package/formidable#readme)
|
||||
* [multer](https://www.npmjs.org/package/multer#readme)
|
||||
|
||||
This module provides the following parsers:
|
||||
|
||||
* [JSON body parser](#bodyparserjsonoptions)
|
||||
* [Raw body parser](#bodyparserrawoptions)
|
||||
* [Text body parser](#bodyparsertextoptions)
|
||||
* [URL-encoded form body parser](#bodyparserurlencodedoptions)
|
||||
|
||||
Other body parsers you might be interested in:
|
||||
|
||||
- [body](https://www.npmjs.org/package/body#readme)
|
||||
- [co-body](https://www.npmjs.org/package/co-body#readme)
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
$ npm install body-parser
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
```js
|
||||
var bodyParser = require('body-parser')
|
||||
```
|
||||
|
||||
The `bodyParser` object exposes various factories to create middlewares. All
|
||||
middlewares will populate the `req.body` property with the parsed body when
|
||||
the `Content-Type` request header matches the `type` option, or an empty
|
||||
object (`{}`) if there was no body to parse, the `Content-Type` was not matched,
|
||||
or an error occurred.
|
||||
|
||||
The various errors returned by this module are described in the
|
||||
[errors section](#errors).
|
||||
|
||||
### bodyParser.json([options])
|
||||
|
||||
Returns middleware that only parses `json` and only looks at requests where
|
||||
the `Content-Type` header matches the `type` option. This parser accepts any
|
||||
Unicode encoding of the body and supports automatic inflation of `gzip` and
|
||||
`deflate` encodings.
|
||||
|
||||
A new `body` object containing the parsed data is populated on the `request`
|
||||
object after the middleware (i.e. `req.body`).
|
||||
|
||||
#### Options
|
||||
|
||||
The `json` function takes an optional `options` object that may contain any of
|
||||
the following keys:
|
||||
|
||||
##### inflate
|
||||
|
||||
When set to `true`, then deflated (compressed) bodies will be inflated; when
|
||||
`false`, deflated bodies are rejected. Defaults to `true`.
|
||||
|
||||
##### limit
|
||||
|
||||
Controls the maximum request body size. If this is a number, then the value
|
||||
specifies the number of bytes; if it is a string, the value is passed to the
|
||||
[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults
|
||||
to `'100kb'`.
|
||||
|
||||
##### reviver
|
||||
|
||||
The `reviver` option is passed directly to `JSON.parse` as the second
|
||||
argument. You can find more information on this argument
|
||||
[in the MDN documentation about JSON.parse](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse#Example.3A_Using_the_reviver_parameter).
|
||||
|
||||
##### strict
|
||||
|
||||
When set to `true`, will only accept arrays and objects; when `false` will
|
||||
accept anything `JSON.parse` accepts. Defaults to `true`.
|
||||
|
||||
##### type
|
||||
|
||||
The `type` option is used to determine what media type the middleware will
|
||||
parse. This option can be a string, array of strings, or a function. If not a
|
||||
function, `type` option is passed directly to the
|
||||
[type-is](https://www.npmjs.org/package/type-is#readme) library and this can
|
||||
be an extension name (like `json`), a mime type (like `application/json`), or
|
||||
a mime type with a wildcard (like `*/*` or `*/json`). If a function, the `type`
|
||||
option is called as `fn(req)` and the request is parsed if it returns a truthy
|
||||
value. Defaults to `application/json`.
|
||||
|
||||
##### verify
|
||||
|
||||
The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`,
|
||||
where `buf` is a `Buffer` of the raw request body and `encoding` is the
|
||||
encoding of the request. The parsing can be aborted by throwing an error.
|
||||
|
||||
### bodyParser.raw([options])
|
||||
|
||||
Returns middleware that parses all bodies as a `Buffer` and only looks at
|
||||
requests where the `Content-Type` header matches the `type` option. This
|
||||
parser supports automatic inflation of `gzip` and `deflate` encodings.
|
||||
|
||||
A new `body` object containing the parsed data is populated on the `request`
|
||||
object after the middleware (i.e. `req.body`). This will be a `Buffer` object
|
||||
of the body.
|
||||
|
||||
#### Options
|
||||
|
||||
The `raw` function takes an optional `options` object that may contain any of
|
||||
the following keys:
|
||||
|
||||
##### inflate
|
||||
|
||||
When set to `true`, then deflated (compressed) bodies will be inflated; when
|
||||
`false`, deflated bodies are rejected. Defaults to `true`.
|
||||
|
||||
##### limit
|
||||
|
||||
Controls the maximum request body size. If this is a number, then the value
|
||||
specifies the number of bytes; if it is a string, the value is passed to the
|
||||
[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults
|
||||
to `'100kb'`.
|
||||
|
||||
##### type
|
||||
|
||||
The `type` option is used to determine what media type the middleware will
|
||||
parse. This option can be a string, array of strings, or a function.
|
||||
If not a function, `type` option is passed directly to the
|
||||
[type-is](https://www.npmjs.org/package/type-is#readme) library and this
|
||||
can be an extension name (like `bin`), a mime type (like
|
||||
`application/octet-stream`), or a mime type with a wildcard (like `*/*` or
|
||||
`application/*`). If a function, the `type` option is called as `fn(req)`
|
||||
and the request is parsed if it returns a truthy value. Defaults to
|
||||
`application/octet-stream`.
|
||||
|
||||
##### verify
|
||||
|
||||
The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`,
|
||||
where `buf` is a `Buffer` of the raw request body and `encoding` is the
|
||||
encoding of the request. The parsing can be aborted by throwing an error.
|
||||
|
||||
### bodyParser.text([options])
|
||||
|
||||
Returns middleware that parses all bodies as a string and only looks at
|
||||
requests where the `Content-Type` header matches the `type` option. This
|
||||
parser supports automatic inflation of `gzip` and `deflate` encodings.
|
||||
|
||||
A new `body` string containing the parsed data is populated on the `request`
|
||||
object after the middleware (i.e. `req.body`). This will be a string of the
|
||||
body.
|
||||
|
||||
#### Options
|
||||
|
||||
The `text` function takes an optional `options` object that may contain any of
|
||||
the following keys:
|
||||
|
||||
##### defaultCharset
|
||||
|
||||
Specify the default character set for the text content if the charset is not
|
||||
specified in the `Content-Type` header of the request. Defaults to `utf-8`.
|
||||
|
||||
##### inflate
|
||||
|
||||
When set to `true`, then deflated (compressed) bodies will be inflated; when
|
||||
`false`, deflated bodies are rejected. Defaults to `true`.
|
||||
|
||||
##### limit
|
||||
|
||||
Controls the maximum request body size. If this is a number, then the value
|
||||
specifies the number of bytes; if it is a string, the value is passed to the
|
||||
[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults
|
||||
to `'100kb'`.
|
||||
|
||||
##### type
|
||||
|
||||
The `type` option is used to determine what media type the middleware will
|
||||
parse. This option can be a string, array of strings, or a function. If not
|
||||
a function, `type` option is passed directly to the
|
||||
[type-is](https://www.npmjs.org/package/type-is#readme) library and this can
|
||||
be an extension name (like `txt`), a mime type (like `text/plain`), or a mime
|
||||
type with a wildcard (like `*/*` or `text/*`). If a function, the `type`
|
||||
option is called as `fn(req)` and the request is parsed if it returns a
|
||||
truthy value. Defaults to `text/plain`.
|
||||
|
||||
##### verify
|
||||
|
||||
The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`,
|
||||
where `buf` is a `Buffer` of the raw request body and `encoding` is the
|
||||
encoding of the request. The parsing can be aborted by throwing an error.
|
||||
|
||||
### bodyParser.urlencoded([options])
|
||||
|
||||
Returns middleware that only parses `urlencoded` bodies and only looks at
|
||||
requests where the `Content-Type` header matches the `type` option. This
|
||||
parser accepts only UTF-8 encoding of the body and supports automatic
|
||||
inflation of `gzip` and `deflate` encodings.
|
||||
|
||||
A new `body` object containing the parsed data is populated on the `request`
|
||||
object after the middleware (i.e. `req.body`). This object will contain
|
||||
key-value pairs, where the value can be a string or array (when `extended` is
|
||||
`false`), or any type (when `extended` is `true`).
|
||||
|
||||
#### Options
|
||||
|
||||
The `urlencoded` function takes an optional `options` object that may contain
|
||||
any of the following keys:
|
||||
|
||||
##### extended
|
||||
|
||||
The `extended` option allows to choose between parsing the URL-encoded data
|
||||
with the `querystring` library (when `false`) or the `qs` library (when
|
||||
`true`). The "extended" syntax allows for rich objects and arrays to be
|
||||
encoded into the URL-encoded format, allowing for a JSON-like experience
|
||||
with URL-encoded. For more information, please
|
||||
[see the qs library](https://www.npmjs.org/package/qs#readme).
|
||||
|
||||
Defaults to `true`, but using the default has been deprecated. Please
|
||||
research into the difference between `qs` and `querystring` and choose the
|
||||
appropriate setting.
|
||||
|
||||
##### inflate
|
||||
|
||||
When set to `true`, then deflated (compressed) bodies will be inflated; when
|
||||
`false`, deflated bodies are rejected. Defaults to `true`.
|
||||
|
||||
##### limit
|
||||
|
||||
Controls the maximum request body size. If this is a number, then the value
|
||||
specifies the number of bytes; if it is a string, the value is passed to the
|
||||
[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults
|
||||
to `'100kb'`.
|
||||
|
||||
##### parameterLimit
|
||||
|
||||
The `parameterLimit` option controls the maximum number of parameters that
|
||||
are allowed in the URL-encoded data. If a request contains more parameters
|
||||
than this value, a 413 will be returned to the client. Defaults to `1000`.
|
||||
|
||||
##### type
|
||||
|
||||
The `type` option is used to determine what media type the middleware will
|
||||
parse. This option can be a string, array of strings, or a function. If not
|
||||
a function, `type` option is passed directly to the
|
||||
[type-is](https://www.npmjs.org/package/type-is#readme) library and this can
|
||||
be an extension name (like `urlencoded`), a mime type (like
|
||||
`application/x-www-form-urlencoded`), or a mime type with a wildcard (like
|
||||
`*/x-www-form-urlencoded`). If a function, the `type` option is called as
|
||||
`fn(req)` and the request is parsed if it returns a truthy value. Defaults
|
||||
to `application/x-www-form-urlencoded`.
|
||||
|
||||
##### verify
|
||||
|
||||
The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`,
|
||||
where `buf` is a `Buffer` of the raw request body and `encoding` is the
|
||||
encoding of the request. The parsing can be aborted by throwing an error.
|
||||
|
||||
#### depth
|
||||
|
||||
The `depth` option is used to configure the maximum depth of the `qs` library when `extended` is `true`. This allows you to limit the amount of keys that are parsed and can be useful to prevent certain types of abuse. Defaults to `32`. It is recommended to keep this value as low as possible.
|
||||
|
||||
## Errors
|
||||
|
||||
The middlewares provided by this module create errors using the
|
||||
[`http-errors` module](https://www.npmjs.com/package/http-errors). The errors
|
||||
will typically have a `status`/`statusCode` property that contains the suggested
|
||||
HTTP response code, an `expose` property to determine if the `message` property
|
||||
should be displayed to the client, a `type` property to determine the type of
|
||||
error without matching against the `message`, and a `body` property containing
|
||||
the read body, if available.
|
||||
|
||||
The following are the common errors created, though any error can come through
|
||||
for various reasons.
|
||||
|
||||
### content encoding unsupported
|
||||
|
||||
This error will occur when the request had a `Content-Encoding` header that
|
||||
contained an encoding but the "inflation" option was set to `false`. The
|
||||
`status` property is set to `415`, the `type` property is set to
|
||||
`'encoding.unsupported'`, and the `charset` property will be set to the
|
||||
encoding that is unsupported.
|
||||
|
||||
### entity parse failed
|
||||
|
||||
This error will occur when the request contained an entity that could not be
|
||||
parsed by the middleware. The `status` property is set to `400`, the `type`
|
||||
property is set to `'entity.parse.failed'`, and the `body` property is set to
|
||||
the entity value that failed parsing.
|
||||
|
||||
### entity verify failed
|
||||
|
||||
This error will occur when the request contained an entity that could not be
|
||||
failed verification by the defined `verify` option. The `status` property is
|
||||
set to `403`, the `type` property is set to `'entity.verify.failed'`, and the
|
||||
`body` property is set to the entity value that failed verification.
|
||||
|
||||
### request aborted
|
||||
|
||||
This error will occur when the request is aborted by the client before reading
|
||||
the body has finished. The `received` property will be set to the number of
|
||||
bytes received before the request was aborted and the `expected` property is
|
||||
set to the number of expected bytes. The `status` property is set to `400`
|
||||
and `type` property is set to `'request.aborted'`.
|
||||
|
||||
### request entity too large
|
||||
|
||||
This error will occur when the request body's size is larger than the "limit"
|
||||
option. The `limit` property will be set to the byte limit and the `length`
|
||||
property will be set to the request body's length. The `status` property is
|
||||
set to `413` and the `type` property is set to `'entity.too.large'`.
|
||||
|
||||
### request size did not match content length
|
||||
|
||||
This error will occur when the request's length did not match the length from
|
||||
the `Content-Length` header. This typically occurs when the request is malformed,
|
||||
typically when the `Content-Length` header was calculated based on characters
|
||||
instead of bytes. The `status` property is set to `400` and the `type` property
|
||||
is set to `'request.size.invalid'`.
|
||||
|
||||
### stream encoding should not be set
|
||||
|
||||
This error will occur when something called the `req.setEncoding` method prior
|
||||
to this middleware. This module operates directly on bytes only and you cannot
|
||||
call `req.setEncoding` when using this module. The `status` property is set to
|
||||
`500` and the `type` property is set to `'stream.encoding.set'`.
|
||||
|
||||
### stream is not readable
|
||||
|
||||
This error will occur when the request is no longer readable when this middleware
|
||||
attempts to read it. This typically means something other than a middleware from
|
||||
this module read the request body already and the middleware was also configured to
|
||||
read the same request. The `status` property is set to `500` and the `type`
|
||||
property is set to `'stream.not.readable'`.
|
||||
|
||||
### too many parameters
|
||||
|
||||
This error will occur when the content of the request exceeds the configured
|
||||
`parameterLimit` for the `urlencoded` parser. The `status` property is set to
|
||||
`413` and the `type` property is set to `'parameters.too.many'`.
|
||||
|
||||
### unsupported charset "BOGUS"
|
||||
|
||||
This error will occur when the request had a charset parameter in the
|
||||
`Content-Type` header, but the `iconv-lite` module does not support it OR the
|
||||
parser does not support it. The charset is contained in the message as well
|
||||
as in the `charset` property. The `status` property is set to `415`, the
|
||||
`type` property is set to `'charset.unsupported'`, and the `charset` property
|
||||
is set to the charset that is unsupported.
|
||||
|
||||
### unsupported content encoding "bogus"
|
||||
|
||||
This error will occur when the request had a `Content-Encoding` header that
|
||||
contained an unsupported encoding. The encoding is contained in the message
|
||||
as well as in the `encoding` property. The `status` property is set to `415`,
|
||||
the `type` property is set to `'encoding.unsupported'`, and the `encoding`
|
||||
property is set to the encoding that is unsupported.
|
||||
|
||||
### The input exceeded the depth
|
||||
|
||||
This error occurs when using `bodyParser.urlencoded` with the `extended` property set to `true` and the input exceeds the configured `depth` option. The `status` property is set to `400`. It is recommended to review the `depth` option and evaluate if it requires a higher value. When the `depth` option is set to `32` (default value), the error will not be thrown.
|
||||
|
||||
## Examples
|
||||
|
||||
### Express/Connect top-level generic
|
||||
|
||||
This example demonstrates adding a generic JSON and URL-encoded parser as a
|
||||
top-level middleware, which will parse the bodies of all incoming requests.
|
||||
This is the simplest setup.
|
||||
|
||||
```js
|
||||
var express = require('express')
|
||||
var bodyParser = require('body-parser')
|
||||
|
||||
var app = express()
|
||||
|
||||
// parse application/x-www-form-urlencoded
|
||||
app.use(bodyParser.urlencoded({ extended: false }))
|
||||
|
||||
// parse application/json
|
||||
app.use(bodyParser.json())
|
||||
|
||||
app.use(function (req, res) {
|
||||
res.setHeader('Content-Type', 'text/plain')
|
||||
res.write('you posted:\n')
|
||||
res.end(JSON.stringify(req.body, null, 2))
|
||||
})
|
||||
```
|
||||
|
||||
### Express route-specific
|
||||
|
||||
This example demonstrates adding body parsers specifically to the routes that
|
||||
need them. In general, this is the most recommended way to use body-parser with
|
||||
Express.
|
||||
|
||||
```js
|
||||
var express = require('express')
|
||||
var bodyParser = require('body-parser')
|
||||
|
||||
var app = express()
|
||||
|
||||
// create application/json parser
|
||||
var jsonParser = bodyParser.json()
|
||||
|
||||
// create application/x-www-form-urlencoded parser
|
||||
var urlencodedParser = bodyParser.urlencoded({ extended: false })
|
||||
|
||||
// POST /login gets urlencoded bodies
|
||||
app.post('/login', urlencodedParser, function (req, res) {
|
||||
res.send('welcome, ' + req.body.username)
|
||||
})
|
||||
|
||||
// POST /api/users gets JSON bodies
|
||||
app.post('/api/users', jsonParser, function (req, res) {
|
||||
// create user in req.body
|
||||
})
|
||||
```
|
||||
|
||||
### Change accepted type for parsers
|
||||
|
||||
All the parsers accept a `type` option which allows you to change the
|
||||
`Content-Type` that the middleware will parse.
|
||||
|
||||
```js
|
||||
var express = require('express')
|
||||
var bodyParser = require('body-parser')
|
||||
|
||||
var app = express()
|
||||
|
||||
// parse various different custom JSON types as JSON
|
||||
app.use(bodyParser.json({ type: 'application/*+json' }))
|
||||
|
||||
// parse some custom thing into a Buffer
|
||||
app.use(bodyParser.raw({ type: 'application/vnd.custom-type' }))
|
||||
|
||||
// parse an HTML body into a string
|
||||
app.use(bodyParser.text({ type: 'text/html' }))
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
[MIT](LICENSE)
|
||||
|
||||
[ci-image]: https://badgen.net/github/checks/expressjs/body-parser/master?label=ci
|
||||
[ci-url]: https://github.com/expressjs/body-parser/actions/workflows/ci.yml
|
||||
[coveralls-image]: https://badgen.net/coveralls/c/github/expressjs/body-parser/master
|
||||
[coveralls-url]: https://coveralls.io/r/expressjs/body-parser?branch=master
|
||||
[node-version-image]: https://badgen.net/npm/node/body-parser
|
||||
[node-version-url]: https://nodejs.org/en/download
|
||||
[npm-downloads-image]: https://badgen.net/npm/dm/body-parser
|
||||
[npm-url]: https://npmjs.org/package/body-parser
|
||||
[npm-version-image]: https://badgen.net/npm/v/body-parser
|
||||
[ossf-scorecard-badge]: https://api.scorecard.dev/projects/github.com/expressjs/body-parser/badge
|
||||
[ossf-scorecard-visualizer]: https://ossf.github.io/scorecard-visualizer/#/projects/github.com/expressjs/body-parser
|
25
web/status/node_modules/body-parser/SECURITY.md
generated
vendored
25
web/status/node_modules/body-parser/SECURITY.md
generated
vendored
@ -1,25 +0,0 @@
|
||||
# Security Policies and Procedures
|
||||
|
||||
## Reporting a Bug
|
||||
|
||||
The Express team and community take all security bugs seriously. Thank you
|
||||
for improving the security of Express. We appreciate your efforts and
|
||||
responsible disclosure and will make every effort to acknowledge your
|
||||
contributions.
|
||||
|
||||
Report security bugs by emailing the current owner(s) of `body-parser`. This
|
||||
information can be found in the npm registry using the command
|
||||
`npm owner ls body-parser`.
|
||||
If unsure or unable to get the information from the above, open an issue
|
||||
in the [project issue tracker](https://github.com/expressjs/body-parser/issues)
|
||||
asking for the current contact information.
|
||||
|
||||
To ensure the timely response to your report, please ensure that the entirety
|
||||
of the report is contained within the email body and not solely behind a web
|
||||
link or an attachment.
|
||||
|
||||
At least one owner will acknowledge your email within 48 hours, and will send a
|
||||
more detailed response within 48 hours indicating the next steps in handling
|
||||
your report. After the initial reply to your report, the owners will
|
||||
endeavor to keep you informed of the progress towards a fix and full
|
||||
announcement, and may ask for additional information or guidance.
|
156
web/status/node_modules/body-parser/index.js
generated
vendored
156
web/status/node_modules/body-parser/index.js
generated
vendored
@ -1,156 +0,0 @@
|
||||
/*!
|
||||
* body-parser
|
||||
* Copyright(c) 2014-2015 Douglas Christopher Wilson
|
||||
* MIT Licensed
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
* @private
|
||||
*/
|
||||
|
||||
var deprecate = require('depd')('body-parser')
|
||||
|
||||
/**
|
||||
* Cache of loaded parsers.
|
||||
* @private
|
||||
*/
|
||||
|
||||
var parsers = Object.create(null)
|
||||
|
||||
/**
|
||||
* @typedef Parsers
|
||||
* @type {function}
|
||||
* @property {function} json
|
||||
* @property {function} raw
|
||||
* @property {function} text
|
||||
* @property {function} urlencoded
|
||||
*/
|
||||
|
||||
/**
|
||||
* Module exports.
|
||||
* @type {Parsers}
|
||||
*/
|
||||
|
||||
exports = module.exports = deprecate.function(bodyParser,
|
||||
'bodyParser: use individual json/urlencoded middlewares')
|
||||
|
||||
/**
|
||||
* JSON parser.
|
||||
* @public
|
||||
*/
|
||||
|
||||
Object.defineProperty(exports, 'json', {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
get: createParserGetter('json')
|
||||
})
|
||||
|
||||
/**
|
||||
* Raw parser.
|
||||
* @public
|
||||
*/
|
||||
|
||||
Object.defineProperty(exports, 'raw', {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
get: createParserGetter('raw')
|
||||
})
|
||||
|
||||
/**
|
||||
* Text parser.
|
||||
* @public
|
||||
*/
|
||||
|
||||
Object.defineProperty(exports, 'text', {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
get: createParserGetter('text')
|
||||
})
|
||||
|
||||
/**
|
||||
* URL-encoded parser.
|
||||
* @public
|
||||
*/
|
||||
|
||||
Object.defineProperty(exports, 'urlencoded', {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
get: createParserGetter('urlencoded')
|
||||
})
|
||||
|
||||
/**
|
||||
* Create a middleware to parse json and urlencoded bodies.
|
||||
*
|
||||
* @param {object} [options]
|
||||
* @return {function}
|
||||
* @deprecated
|
||||
* @public
|
||||
*/
|
||||
|
||||
function bodyParser (options) {
|
||||
// use default type for parsers
|
||||
var opts = Object.create(options || null, {
|
||||
type: {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
value: undefined,
|
||||
writable: true
|
||||
}
|
||||
})
|
||||
|
||||
var _urlencoded = exports.urlencoded(opts)
|
||||
var _json = exports.json(opts)
|
||||
|
||||
return function bodyParser (req, res, next) {
|
||||
_json(req, res, function (err) {
|
||||
if (err) return next(err)
|
||||
_urlencoded(req, res, next)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a getter for loading a parser.
|
||||
* @private
|
||||
*/
|
||||
|
||||
function createParserGetter (name) {
|
||||
return function get () {
|
||||
return loadParser(name)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load a parser module.
|
||||
* @private
|
||||
*/
|
||||
|
||||
function loadParser (parserName) {
|
||||
var parser = parsers[parserName]
|
||||
|
||||
if (parser !== undefined) {
|
||||
return parser
|
||||
}
|
||||
|
||||
// this uses a switch for static require analysis
|
||||
switch (parserName) {
|
||||
case 'json':
|
||||
parser = require('./lib/types/json')
|
||||
break
|
||||
case 'raw':
|
||||
parser = require('./lib/types/raw')
|
||||
break
|
||||
case 'text':
|
||||
parser = require('./lib/types/text')
|
||||
break
|
||||
case 'urlencoded':
|
||||
parser = require('./lib/types/urlencoded')
|
||||
break
|
||||
}
|
||||
|
||||
// store to prevent invoking require()
|
||||
return (parsers[parserName] = parser)
|
||||
}
|
205
web/status/node_modules/body-parser/lib/read.js
generated
vendored
205
web/status/node_modules/body-parser/lib/read.js
generated
vendored
@ -1,205 +0,0 @@
|
||||
/*!
|
||||
* body-parser
|
||||
* Copyright(c) 2014-2015 Douglas Christopher Wilson
|
||||
* MIT Licensed
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
* @private
|
||||
*/
|
||||
|
||||
var createError = require('http-errors')
|
||||
var destroy = require('destroy')
|
||||
var getBody = require('raw-body')
|
||||
var iconv = require('iconv-lite')
|
||||
var onFinished = require('on-finished')
|
||||
var unpipe = require('unpipe')
|
||||
var zlib = require('zlib')
|
||||
|
||||
/**
|
||||
* Module exports.
|
||||
*/
|
||||
|
||||
module.exports = read
|
||||
|
||||
/**
|
||||
* Read a request into a buffer and parse.
|
||||
*
|
||||
* @param {object} req
|
||||
* @param {object} res
|
||||
* @param {function} next
|
||||
* @param {function} parse
|
||||
* @param {function} debug
|
||||
* @param {object} options
|
||||
* @private
|
||||
*/
|
||||
|
||||
function read (req, res, next, parse, debug, options) {
|
||||
var length
|
||||
var opts = options
|
||||
var stream
|
||||
|
||||
// flag as parsed
|
||||
req._body = true
|
||||
|
||||
// read options
|
||||
var encoding = opts.encoding !== null
|
||||
? opts.encoding
|
||||
: null
|
||||
var verify = opts.verify
|
||||
|
||||
try {
|
||||
// get the content stream
|
||||
stream = contentstream(req, debug, opts.inflate)
|
||||
length = stream.length
|
||||
stream.length = undefined
|
||||
} catch (err) {
|
||||
return next(err)
|
||||
}
|
||||
|
||||
// set raw-body options
|
||||
opts.length = length
|
||||
opts.encoding = verify
|
||||
? null
|
||||
: encoding
|
||||
|
||||
// assert charset is supported
|
||||
if (opts.encoding === null && encoding !== null && !iconv.encodingExists(encoding)) {
|
||||
return next(createError(415, 'unsupported charset "' + encoding.toUpperCase() + '"', {
|
||||
charset: encoding.toLowerCase(),
|
||||
type: 'charset.unsupported'
|
||||
}))
|
||||
}
|
||||
|
||||
// read body
|
||||
debug('read body')
|
||||
getBody(stream, opts, function (error, body) {
|
||||
if (error) {
|
||||
var _error
|
||||
|
||||
if (error.type === 'encoding.unsupported') {
|
||||
// echo back charset
|
||||
_error = createError(415, 'unsupported charset "' + encoding.toUpperCase() + '"', {
|
||||
charset: encoding.toLowerCase(),
|
||||
type: 'charset.unsupported'
|
||||
})
|
||||
} else {
|
||||
// set status code on error
|
||||
_error = createError(400, error)
|
||||
}
|
||||
|
||||
// unpipe from stream and destroy
|
||||
if (stream !== req) {
|
||||
unpipe(req)
|
||||
destroy(stream, true)
|
||||
}
|
||||
|
||||
// read off entire request
|
||||
dump(req, function onfinished () {
|
||||
next(createError(400, _error))
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// verify
|
||||
if (verify) {
|
||||
try {
|
||||
debug('verify body')
|
||||
verify(req, res, body, encoding)
|
||||
} catch (err) {
|
||||
next(createError(403, err, {
|
||||
body: body,
|
||||
type: err.type || 'entity.verify.failed'
|
||||
}))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// parse
|
||||
var str = body
|
||||
try {
|
||||
debug('parse body')
|
||||
str = typeof body !== 'string' && encoding !== null
|
||||
? iconv.decode(body, encoding)
|
||||
: body
|
||||
req.body = parse(str)
|
||||
} catch (err) {
|
||||
next(createError(400, err, {
|
||||
body: str,
|
||||
type: err.type || 'entity.parse.failed'
|
||||
}))
|
||||
return
|
||||
}
|
||||
|
||||
next()
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the content stream of the request.
|
||||
*
|
||||
* @param {object} req
|
||||
* @param {function} debug
|
||||
* @param {boolean} [inflate=true]
|
||||
* @return {object}
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function contentstream (req, debug, inflate) {
|
||||
var encoding = (req.headers['content-encoding'] || 'identity').toLowerCase()
|
||||
var length = req.headers['content-length']
|
||||
var stream
|
||||
|
||||
debug('content-encoding "%s"', encoding)
|
||||
|
||||
if (inflate === false && encoding !== 'identity') {
|
||||
throw createError(415, 'content encoding unsupported', {
|
||||
encoding: encoding,
|
||||
type: 'encoding.unsupported'
|
||||
})
|
||||
}
|
||||
|
||||
switch (encoding) {
|
||||
case 'deflate':
|
||||
stream = zlib.createInflate()
|
||||
debug('inflate body')
|
||||
req.pipe(stream)
|
||||
break
|
||||
case 'gzip':
|
||||
stream = zlib.createGunzip()
|
||||
debug('gunzip body')
|
||||
req.pipe(stream)
|
||||
break
|
||||
case 'identity':
|
||||
stream = req
|
||||
stream.length = length
|
||||
break
|
||||
default:
|
||||
throw createError(415, 'unsupported content encoding "' + encoding + '"', {
|
||||
encoding: encoding,
|
||||
type: 'encoding.unsupported'
|
||||
})
|
||||
}
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
/**
|
||||
* Dump the contents of a request.
|
||||
*
|
||||
* @param {object} req
|
||||
* @param {function} callback
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function dump (req, callback) {
|
||||
if (onFinished.isFinished(req)) {
|
||||
callback(null)
|
||||
} else {
|
||||
onFinished(req, callback)
|
||||
req.resume()
|
||||
}
|
||||
}
|
247
web/status/node_modules/body-parser/lib/types/json.js
generated
vendored
247
web/status/node_modules/body-parser/lib/types/json.js
generated
vendored
@ -1,247 +0,0 @@
|
||||
/*!
|
||||
* body-parser
|
||||
* Copyright(c) 2014 Jonathan Ong
|
||||
* Copyright(c) 2014-2015 Douglas Christopher Wilson
|
||||
* MIT Licensed
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
* @private
|
||||
*/
|
||||
|
||||
var bytes = require('bytes')
|
||||
var contentType = require('content-type')
|
||||
var createError = require('http-errors')
|
||||
var debug = require('debug')('body-parser:json')
|
||||
var read = require('../read')
|
||||
var typeis = require('type-is')
|
||||
|
||||
/**
|
||||
* Module exports.
|
||||
*/
|
||||
|
||||
module.exports = json
|
||||
|
||||
/**
|
||||
* RegExp to match the first non-space in a string.
|
||||
*
|
||||
* Allowed whitespace is defined in RFC 7159:
|
||||
*
|
||||
* ws = *(
|
||||
* %x20 / ; Space
|
||||
* %x09 / ; Horizontal tab
|
||||
* %x0A / ; Line feed or New line
|
||||
* %x0D ) ; Carriage return
|
||||
*/
|
||||
|
||||
var FIRST_CHAR_REGEXP = /^[\x20\x09\x0a\x0d]*([^\x20\x09\x0a\x0d])/ // eslint-disable-line no-control-regex
|
||||
|
||||
var JSON_SYNTAX_CHAR = '#'
|
||||
var JSON_SYNTAX_REGEXP = /#+/g
|
||||
|
||||
/**
|
||||
* Create a middleware to parse JSON bodies.
|
||||
*
|
||||
* @param {object} [options]
|
||||
* @return {function}
|
||||
* @public
|
||||
*/
|
||||
|
||||
function json (options) {
|
||||
var opts = options || {}
|
||||
|
||||
var limit = typeof opts.limit !== 'number'
|
||||
? bytes.parse(opts.limit || '100kb')
|
||||
: opts.limit
|
||||
var inflate = opts.inflate !== false
|
||||
var reviver = opts.reviver
|
||||
var strict = opts.strict !== false
|
||||
var type = opts.type || 'application/json'
|
||||
var verify = opts.verify || false
|
||||
|
||||
if (verify !== false && typeof verify !== 'function') {
|
||||
throw new TypeError('option verify must be function')
|
||||
}
|
||||
|
||||
// create the appropriate type checking function
|
||||
var shouldParse = typeof type !== 'function'
|
||||
? typeChecker(type)
|
||||
: type
|
||||
|
||||
function parse (body) {
|
||||
if (body.length === 0) {
|
||||
// special-case empty json body, as it's a common client-side mistake
|
||||
// TODO: maybe make this configurable or part of "strict" option
|
||||
return {}
|
||||
}
|
||||
|
||||
if (strict) {
|
||||
var first = firstchar(body)
|
||||
|
||||
if (first !== '{' && first !== '[') {
|
||||
debug('strict violation')
|
||||
throw createStrictSyntaxError(body, first)
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
debug('parse json')
|
||||
return JSON.parse(body, reviver)
|
||||
} catch (e) {
|
||||
throw normalizeJsonSyntaxError(e, {
|
||||
message: e.message,
|
||||
stack: e.stack
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return function jsonParser (req, res, next) {
|
||||
if (req._body) {
|
||||
debug('body already parsed')
|
||||
next()
|
||||
return
|
||||
}
|
||||
|
||||
req.body = req.body || {}
|
||||
|
||||
// skip requests without bodies
|
||||
if (!typeis.hasBody(req)) {
|
||||
debug('skip empty body')
|
||||
next()
|
||||
return
|
||||
}
|
||||
|
||||
debug('content-type %j', req.headers['content-type'])
|
||||
|
||||
// determine if request should be parsed
|
||||
if (!shouldParse(req)) {
|
||||
debug('skip parsing')
|
||||
next()
|
||||
return
|
||||
}
|
||||
|
||||
// assert charset per RFC 7159 sec 8.1
|
||||
var charset = getCharset(req) || 'utf-8'
|
||||
if (charset.slice(0, 4) !== 'utf-') {
|
||||
debug('invalid charset')
|
||||
next(createError(415, 'unsupported charset "' + charset.toUpperCase() + '"', {
|
||||
charset: charset,
|
||||
type: 'charset.unsupported'
|
||||
}))
|
||||
return
|
||||
}
|
||||
|
||||
// read
|
||||
read(req, res, next, parse, debug, {
|
||||
encoding: charset,
|
||||
inflate: inflate,
|
||||
limit: limit,
|
||||
verify: verify
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create strict violation syntax error matching native error.
|
||||
*
|
||||
* @param {string} str
|
||||
* @param {string} char
|
||||
* @return {Error}
|
||||
* @private
|
||||
*/
|
||||
|
||||
function createStrictSyntaxError (str, char) {
|
||||
var index = str.indexOf(char)
|
||||
var partial = ''
|
||||
|
||||
if (index !== -1) {
|
||||
partial = str.substring(0, index) + JSON_SYNTAX_CHAR
|
||||
|
||||
for (var i = index + 1; i < str.length; i++) {
|
||||
partial += JSON_SYNTAX_CHAR
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
JSON.parse(partial); /* istanbul ignore next */ throw new SyntaxError('strict violation')
|
||||
} catch (e) {
|
||||
return normalizeJsonSyntaxError(e, {
|
||||
message: e.message.replace(JSON_SYNTAX_REGEXP, function (placeholder) {
|
||||
return str.substring(index, index + placeholder.length)
|
||||
}),
|
||||
stack: e.stack
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the first non-whitespace character in a string.
|
||||
*
|
||||
* @param {string} str
|
||||
* @return {function}
|
||||
* @private
|
||||
*/
|
||||
|
||||
function firstchar (str) {
|
||||
var match = FIRST_CHAR_REGEXP.exec(str)
|
||||
|
||||
return match
|
||||
? match[1]
|
||||
: undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the charset of a request.
|
||||
*
|
||||
* @param {object} req
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function getCharset (req) {
|
||||
try {
|
||||
return (contentType.parse(req).parameters.charset || '').toLowerCase()
|
||||
} catch (e) {
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize a SyntaxError for JSON.parse.
|
||||
*
|
||||
* @param {SyntaxError} error
|
||||
* @param {object} obj
|
||||
* @return {SyntaxError}
|
||||
*/
|
||||
|
||||
function normalizeJsonSyntaxError (error, obj) {
|
||||
var keys = Object.getOwnPropertyNames(error)
|
||||
|
||||
for (var i = 0; i < keys.length; i++) {
|
||||
var key = keys[i]
|
||||
if (key !== 'stack' && key !== 'message') {
|
||||
delete error[key]
|
||||
}
|
||||
}
|
||||
|
||||
// replace stack before message for Node.js 0.10 and below
|
||||
error.stack = obj.stack.replace(error.message, obj.message)
|
||||
error.message = obj.message
|
||||
|
||||
return error
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the simple type checker.
|
||||
*
|
||||
* @param {string} type
|
||||
* @return {function}
|
||||
*/
|
||||
|
||||
function typeChecker (type) {
|
||||
return function checkType (req) {
|
||||
return Boolean(typeis(req, type))
|
||||
}
|
||||
}
|
101
web/status/node_modules/body-parser/lib/types/raw.js
generated
vendored
101
web/status/node_modules/body-parser/lib/types/raw.js
generated
vendored
@ -1,101 +0,0 @@
|
||||
/*!
|
||||
* body-parser
|
||||
* Copyright(c) 2014-2015 Douglas Christopher Wilson
|
||||
* MIT Licensed
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
var bytes = require('bytes')
|
||||
var debug = require('debug')('body-parser:raw')
|
||||
var read = require('../read')
|
||||
var typeis = require('type-is')
|
||||
|
||||
/**
|
||||
* Module exports.
|
||||
*/
|
||||
|
||||
module.exports = raw
|
||||
|
||||
/**
|
||||
* Create a middleware to parse raw bodies.
|
||||
*
|
||||
* @param {object} [options]
|
||||
* @return {function}
|
||||
* @api public
|
||||
*/
|
||||
|
||||
function raw (options) {
|
||||
var opts = options || {}
|
||||
|
||||
var inflate = opts.inflate !== false
|
||||
var limit = typeof opts.limit !== 'number'
|
||||
? bytes.parse(opts.limit || '100kb')
|
||||
: opts.limit
|
||||
var type = opts.type || 'application/octet-stream'
|
||||
var verify = opts.verify || false
|
||||
|
||||
if (verify !== false && typeof verify !== 'function') {
|
||||
throw new TypeError('option verify must be function')
|
||||
}
|
||||
|
||||
// create the appropriate type checking function
|
||||
var shouldParse = typeof type !== 'function'
|
||||
? typeChecker(type)
|
||||
: type
|
||||
|
||||
function parse (buf) {
|
||||
return buf
|
||||
}
|
||||
|
||||
return function rawParser (req, res, next) {
|
||||
if (req._body) {
|
||||
debug('body already parsed')
|
||||
next()
|
||||
return
|
||||
}
|
||||
|
||||
req.body = req.body || {}
|
||||
|
||||
// skip requests without bodies
|
||||
if (!typeis.hasBody(req)) {
|
||||
debug('skip empty body')
|
||||
next()
|
||||
return
|
||||
}
|
||||
|
||||
debug('content-type %j', req.headers['content-type'])
|
||||
|
||||
// determine if request should be parsed
|
||||
if (!shouldParse(req)) {
|
||||
debug('skip parsing')
|
||||
next()
|
||||
return
|
||||
}
|
||||
|
||||
// read
|
||||
read(req, res, next, parse, debug, {
|
||||
encoding: null,
|
||||
inflate: inflate,
|
||||
limit: limit,
|
||||
verify: verify
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the simple type checker.
|
||||
*
|
||||
* @param {string} type
|
||||
* @return {function}
|
||||
*/
|
||||
|
||||
function typeChecker (type) {
|
||||
return function checkType (req) {
|
||||
return Boolean(typeis(req, type))
|
||||
}
|
||||
}
|
121
web/status/node_modules/body-parser/lib/types/text.js
generated
vendored
121
web/status/node_modules/body-parser/lib/types/text.js
generated
vendored
@ -1,121 +0,0 @@
|
||||
/*!
|
||||
* body-parser
|
||||
* Copyright(c) 2014-2015 Douglas Christopher Wilson
|
||||
* MIT Licensed
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
var bytes = require('bytes')
|
||||
var contentType = require('content-type')
|
||||
var debug = require('debug')('body-parser:text')
|
||||
var read = require('../read')
|
||||
var typeis = require('type-is')
|
||||
|
||||
/**
|
||||
* Module exports.
|
||||
*/
|
||||
|
||||
module.exports = text
|
||||
|
||||
/**
|
||||
* Create a middleware to parse text bodies.
|
||||
*
|
||||
* @param {object} [options]
|
||||
* @return {function}
|
||||
* @api public
|
||||
*/
|
||||
|
||||
function text (options) {
|
||||
var opts = options || {}
|
||||
|
||||
var defaultCharset = opts.defaultCharset || 'utf-8'
|
||||
var inflate = opts.inflate !== false
|
||||
var limit = typeof opts.limit !== 'number'
|
||||
? bytes.parse(opts.limit || '100kb')
|
||||
: opts.limit
|
||||
var type = opts.type || 'text/plain'
|
||||
var verify = opts.verify || false
|
||||
|
||||
if (verify !== false && typeof verify !== 'function') {
|
||||
throw new TypeError('option verify must be function')
|
||||
}
|
||||
|
||||
// create the appropriate type checking function
|
||||
var shouldParse = typeof type !== 'function'
|
||||
? typeChecker(type)
|
||||
: type
|
||||
|
||||
function parse (buf) {
|
||||
return buf
|
||||
}
|
||||
|
||||
return function textParser (req, res, next) {
|
||||
if (req._body) {
|
||||
debug('body already parsed')
|
||||
next()
|
||||
return
|
||||
}
|
||||
|
||||
req.body = req.body || {}
|
||||
|
||||
// skip requests without bodies
|
||||
if (!typeis.hasBody(req)) {
|
||||
debug('skip empty body')
|
||||
next()
|
||||
return
|
||||
}
|
||||
|
||||
debug('content-type %j', req.headers['content-type'])
|
||||
|
||||
// determine if request should be parsed
|
||||
if (!shouldParse(req)) {
|
||||
debug('skip parsing')
|
||||
next()
|
||||
return
|
||||
}
|
||||
|
||||
// get charset
|
||||
var charset = getCharset(req) || defaultCharset
|
||||
|
||||
// read
|
||||
read(req, res, next, parse, debug, {
|
||||
encoding: charset,
|
||||
inflate: inflate,
|
||||
limit: limit,
|
||||
verify: verify
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the charset of a request.
|
||||
*
|
||||
* @param {object} req
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function getCharset (req) {
|
||||
try {
|
||||
return (contentType.parse(req).parameters.charset || '').toLowerCase()
|
||||
} catch (e) {
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the simple type checker.
|
||||
*
|
||||
* @param {string} type
|
||||
* @return {function}
|
||||
*/
|
||||
|
||||
function typeChecker (type) {
|
||||
return function checkType (req) {
|
||||
return Boolean(typeis(req, type))
|
||||
}
|
||||
}
|
307
web/status/node_modules/body-parser/lib/types/urlencoded.js
generated
vendored
307
web/status/node_modules/body-parser/lib/types/urlencoded.js
generated
vendored
@ -1,307 +0,0 @@
|
||||
/*!
|
||||
* body-parser
|
||||
* Copyright(c) 2014 Jonathan Ong
|
||||
* Copyright(c) 2014-2015 Douglas Christopher Wilson
|
||||
* MIT Licensed
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
* @private
|
||||
*/
|
||||
|
||||
var bytes = require('bytes')
|
||||
var contentType = require('content-type')
|
||||
var createError = require('http-errors')
|
||||
var debug = require('debug')('body-parser:urlencoded')
|
||||
var deprecate = require('depd')('body-parser')
|
||||
var read = require('../read')
|
||||
var typeis = require('type-is')
|
||||
|
||||
/**
|
||||
* Module exports.
|
||||
*/
|
||||
|
||||
module.exports = urlencoded
|
||||
|
||||
/**
|
||||
* Cache of parser modules.
|
||||
*/
|
||||
|
||||
var parsers = Object.create(null)
|
||||
|
||||
/**
|
||||
* Create a middleware to parse urlencoded bodies.
|
||||
*
|
||||
* @param {object} [options]
|
||||
* @return {function}
|
||||
* @public
|
||||
*/
|
||||
|
||||
function urlencoded (options) {
|
||||
var opts = options || {}
|
||||
|
||||
// notice because option default will flip in next major
|
||||
if (opts.extended === undefined) {
|
||||
deprecate('undefined extended: provide extended option')
|
||||
}
|
||||
|
||||
var extended = opts.extended !== false
|
||||
var inflate = opts.inflate !== false
|
||||
var limit = typeof opts.limit !== 'number'
|
||||
? bytes.parse(opts.limit || '100kb')
|
||||
: opts.limit
|
||||
var type = opts.type || 'application/x-www-form-urlencoded'
|
||||
var verify = opts.verify || false
|
||||
var depth = typeof opts.depth !== 'number'
|
||||
? Number(opts.depth || 32)
|
||||
: opts.depth
|
||||
|
||||
if (verify !== false && typeof verify !== 'function') {
|
||||
throw new TypeError('option verify must be function')
|
||||
}
|
||||
|
||||
// create the appropriate query parser
|
||||
var queryparse = extended
|
||||
? extendedparser(opts)
|
||||
: simpleparser(opts)
|
||||
|
||||
// create the appropriate type checking function
|
||||
var shouldParse = typeof type !== 'function'
|
||||
? typeChecker(type)
|
||||
: type
|
||||
|
||||
function parse (body) {
|
||||
return body.length
|
||||
? queryparse(body)
|
||||
: {}
|
||||
}
|
||||
|
||||
return function urlencodedParser (req, res, next) {
|
||||
if (req._body) {
|
||||
debug('body already parsed')
|
||||
next()
|
||||
return
|
||||
}
|
||||
|
||||
req.body = req.body || {}
|
||||
|
||||
// skip requests without bodies
|
||||
if (!typeis.hasBody(req)) {
|
||||
debug('skip empty body')
|
||||
next()
|
||||
return
|
||||
}
|
||||
|
||||
debug('content-type %j', req.headers['content-type'])
|
||||
|
||||
// determine if request should be parsed
|
||||
if (!shouldParse(req)) {
|
||||
debug('skip parsing')
|
||||
next()
|
||||
return
|
||||
}
|
||||
|
||||
// assert charset
|
||||
var charset = getCharset(req) || 'utf-8'
|
||||
if (charset !== 'utf-8') {
|
||||
debug('invalid charset')
|
||||
next(createError(415, 'unsupported charset "' + charset.toUpperCase() + '"', {
|
||||
charset: charset,
|
||||
type: 'charset.unsupported'
|
||||
}))
|
||||
return
|
||||
}
|
||||
|
||||
// read
|
||||
read(req, res, next, parse, debug, {
|
||||
debug: debug,
|
||||
encoding: charset,
|
||||
inflate: inflate,
|
||||
limit: limit,
|
||||
verify: verify,
|
||||
depth: depth
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the extended query parser.
|
||||
*
|
||||
* @param {object} options
|
||||
*/
|
||||
|
||||
function extendedparser (options) {
|
||||
var parameterLimit = options.parameterLimit !== undefined
|
||||
? options.parameterLimit
|
||||
: 1000
|
||||
|
||||
var depth = typeof options.depth !== 'number'
|
||||
? Number(options.depth || 32)
|
||||
: options.depth
|
||||
var parse = parser('qs')
|
||||
|
||||
if (isNaN(parameterLimit) || parameterLimit < 1) {
|
||||
throw new TypeError('option parameterLimit must be a positive number')
|
||||
}
|
||||
|
||||
if (isNaN(depth) || depth < 0) {
|
||||
throw new TypeError('option depth must be a zero or a positive number')
|
||||
}
|
||||
|
||||
if (isFinite(parameterLimit)) {
|
||||
parameterLimit = parameterLimit | 0
|
||||
}
|
||||
|
||||
return function queryparse (body) {
|
||||
var paramCount = parameterCount(body, parameterLimit)
|
||||
|
||||
if (paramCount === undefined) {
|
||||
debug('too many parameters')
|
||||
throw createError(413, 'too many parameters', {
|
||||
type: 'parameters.too.many'
|
||||
})
|
||||
}
|
||||
|
||||
var arrayLimit = Math.max(100, paramCount)
|
||||
|
||||
debug('parse extended urlencoding')
|
||||
try {
|
||||
return parse(body, {
|
||||
allowPrototypes: true,
|
||||
arrayLimit: arrayLimit,
|
||||
depth: depth,
|
||||
strictDepth: true,
|
||||
parameterLimit: parameterLimit
|
||||
})
|
||||
} catch (err) {
|
||||
if (err instanceof RangeError) {
|
||||
throw createError(400, 'The input exceeded the depth', {
|
||||
type: 'querystring.parse.rangeError'
|
||||
})
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the charset of a request.
|
||||
*
|
||||
* @param {object} req
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function getCharset (req) {
|
||||
try {
|
||||
return (contentType.parse(req).parameters.charset || '').toLowerCase()
|
||||
} catch (e) {
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Count the number of parameters, stopping once limit reached
|
||||
*
|
||||
* @param {string} body
|
||||
* @param {number} limit
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function parameterCount (body, limit) {
|
||||
var count = 0
|
||||
var index = 0
|
||||
|
||||
while ((index = body.indexOf('&', index)) !== -1) {
|
||||
count++
|
||||
index++
|
||||
|
||||
if (count === limit) {
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
return count
|
||||
}
|
||||
|
||||
/**
|
||||
* Get parser for module name dynamically.
|
||||
*
|
||||
* @param {string} name
|
||||
* @return {function}
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function parser (name) {
|
||||
var mod = parsers[name]
|
||||
|
||||
if (mod !== undefined) {
|
||||
return mod.parse
|
||||
}
|
||||
|
||||
// this uses a switch for static require analysis
|
||||
switch (name) {
|
||||
case 'qs':
|
||||
mod = require('qs')
|
||||
break
|
||||
case 'querystring':
|
||||
mod = require('querystring')
|
||||
break
|
||||
}
|
||||
|
||||
// store to prevent invoking require()
|
||||
parsers[name] = mod
|
||||
|
||||
return mod.parse
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the simple query parser.
|
||||
*
|
||||
* @param {object} options
|
||||
*/
|
||||
|
||||
function simpleparser (options) {
|
||||
var parameterLimit = options.parameterLimit !== undefined
|
||||
? options.parameterLimit
|
||||
: 1000
|
||||
var parse = parser('querystring')
|
||||
|
||||
if (isNaN(parameterLimit) || parameterLimit < 1) {
|
||||
throw new TypeError('option parameterLimit must be a positive number')
|
||||
}
|
||||
|
||||
if (isFinite(parameterLimit)) {
|
||||
parameterLimit = parameterLimit | 0
|
||||
}
|
||||
|
||||
return function queryparse (body) {
|
||||
var paramCount = parameterCount(body, parameterLimit)
|
||||
|
||||
if (paramCount === undefined) {
|
||||
debug('too many parameters')
|
||||
throw createError(413, 'too many parameters', {
|
||||
type: 'parameters.too.many'
|
||||
})
|
||||
}
|
||||
|
||||
debug('parse urlencoding')
|
||||
return parse(body, undefined, undefined, { maxKeys: parameterLimit })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the simple type checker.
|
||||
*
|
||||
* @param {string} type
|
||||
* @return {function}
|
||||
*/
|
||||
|
||||
function typeChecker (type) {
|
||||
return function checkType (req) {
|
||||
return Boolean(typeis(req, type))
|
||||
}
|
||||
}
|
56
web/status/node_modules/body-parser/package.json
generated
vendored
56
web/status/node_modules/body-parser/package.json
generated
vendored
@ -1,56 +0,0 @@
|
||||
{
|
||||
"name": "body-parser",
|
||||
"description": "Node.js body parsing middleware",
|
||||
"version": "1.20.3",
|
||||
"contributors": [
|
||||
"Douglas Christopher Wilson <doug@somethingdoug.com>",
|
||||
"Jonathan Ong <me@jongleberry.com> (http://jongleberry.com)"
|
||||
],
|
||||
"license": "MIT",
|
||||
"repository": "expressjs/body-parser",
|
||||
"dependencies": {
|
||||
"bytes": "3.1.2",
|
||||
"content-type": "~1.0.5",
|
||||
"debug": "2.6.9",
|
||||
"depd": "2.0.0",
|
||||
"destroy": "1.2.0",
|
||||
"http-errors": "2.0.0",
|
||||
"iconv-lite": "0.4.24",
|
||||
"on-finished": "2.4.1",
|
||||
"qs": "6.13.0",
|
||||
"raw-body": "2.5.2",
|
||||
"type-is": "~1.6.18",
|
||||
"unpipe": "1.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"eslint": "8.34.0",
|
||||
"eslint-config-standard": "14.1.1",
|
||||
"eslint-plugin-import": "2.27.5",
|
||||
"eslint-plugin-markdown": "3.0.0",
|
||||
"eslint-plugin-node": "11.1.0",
|
||||
"eslint-plugin-promise": "6.1.1",
|
||||
"eslint-plugin-standard": "4.1.0",
|
||||
"methods": "1.1.2",
|
||||
"mocha": "10.2.0",
|
||||
"nyc": "15.1.0",
|
||||
"safe-buffer": "5.2.1",
|
||||
"supertest": "6.3.3"
|
||||
},
|
||||
"files": [
|
||||
"lib/",
|
||||
"LICENSE",
|
||||
"HISTORY.md",
|
||||
"SECURITY.md",
|
||||
"index.js"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 0.8",
|
||||
"npm": "1.2.8000 || >= 1.4.16"
|
||||
},
|
||||
"scripts": {
|
||||
"lint": "eslint .",
|
||||
"test": "mocha --require test/support/env --reporter spec --check-leaks --bail test/",
|
||||
"test-ci": "nyc --reporter=lcov --reporter=text npm test",
|
||||
"test-cov": "nyc --reporter=html --reporter=text npm test"
|
||||
}
|
||||
}
|
21
web/status/node_modules/brace-expansion/LICENSE
generated
vendored
21
web/status/node_modules/brace-expansion/LICENSE
generated
vendored
@ -1,21 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2013 Julian Gruber <julian@juliangruber.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
129
web/status/node_modules/brace-expansion/README.md
generated
vendored
129
web/status/node_modules/brace-expansion/README.md
generated
vendored
@ -1,129 +0,0 @@
|
||||
# brace-expansion
|
||||
|
||||
[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html),
|
||||
as known from sh/bash, in JavaScript.
|
||||
|
||||
[](http://travis-ci.org/juliangruber/brace-expansion)
|
||||
[](https://www.npmjs.org/package/brace-expansion)
|
||||
[](https://greenkeeper.io/)
|
||||
|
||||
[](https://ci.testling.com/juliangruber/brace-expansion)
|
||||
|
||||
## Example
|
||||
|
||||
```js
|
||||
var expand = require('brace-expansion');
|
||||
|
||||
expand('file-{a,b,c}.jpg')
|
||||
// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg']
|
||||
|
||||
expand('-v{,,}')
|
||||
// => ['-v', '-v', '-v']
|
||||
|
||||
expand('file{0..2}.jpg')
|
||||
// => ['file0.jpg', 'file1.jpg', 'file2.jpg']
|
||||
|
||||
expand('file-{a..c}.jpg')
|
||||
// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg']
|
||||
|
||||
expand('file{2..0}.jpg')
|
||||
// => ['file2.jpg', 'file1.jpg', 'file0.jpg']
|
||||
|
||||
expand('file{0..4..2}.jpg')
|
||||
// => ['file0.jpg', 'file2.jpg', 'file4.jpg']
|
||||
|
||||
expand('file-{a..e..2}.jpg')
|
||||
// => ['file-a.jpg', 'file-c.jpg', 'file-e.jpg']
|
||||
|
||||
expand('file{00..10..5}.jpg')
|
||||
// => ['file00.jpg', 'file05.jpg', 'file10.jpg']
|
||||
|
||||
expand('{{A..C},{a..c}}')
|
||||
// => ['A', 'B', 'C', 'a', 'b', 'c']
|
||||
|
||||
expand('ppp{,config,oe{,conf}}')
|
||||
// => ['ppp', 'pppconfig', 'pppoe', 'pppoeconf']
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
```js
|
||||
var expand = require('brace-expansion');
|
||||
```
|
||||
|
||||
### var expanded = expand(str)
|
||||
|
||||
Return an array of all possible and valid expansions of `str`. If none are
|
||||
found, `[str]` is returned.
|
||||
|
||||
Valid expansions are:
|
||||
|
||||
```js
|
||||
/^(.*,)+(.+)?$/
|
||||
// {a,b,...}
|
||||
```
|
||||
|
||||
A comma separated list of options, like `{a,b}` or `{a,{b,c}}` or `{,a,}`.
|
||||
|
||||
```js
|
||||
/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/
|
||||
// {x..y[..incr]}
|
||||
```
|
||||
|
||||
A numeric sequence from `x` to `y` inclusive, with optional increment.
|
||||
If `x` or `y` start with a leading `0`, all the numbers will be padded
|
||||
to have equal length. Negative numbers and backwards iteration work too.
|
||||
|
||||
```js
|
||||
/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/
|
||||
// {x..y[..incr]}
|
||||
```
|
||||
|
||||
An alphabetic sequence from `x` to `y` inclusive, with optional increment.
|
||||
`x` and `y` must be exactly one character, and if given, `incr` must be a
|
||||
number.
|
||||
|
||||
For compatibility reasons, the string `${` is not eligible for brace expansion.
|
||||
|
||||
## Installation
|
||||
|
||||
With [npm](https://npmjs.org) do:
|
||||
|
||||
```bash
|
||||
npm install brace-expansion
|
||||
```
|
||||
|
||||
## Contributors
|
||||
|
||||
- [Julian Gruber](https://github.com/juliangruber)
|
||||
- [Isaac Z. Schlueter](https://github.com/isaacs)
|
||||
|
||||
## Sponsors
|
||||
|
||||
This module is proudly supported by my [Sponsors](https://github.com/juliangruber/sponsors)!
|
||||
|
||||
Do you want to support modules like this to improve their quality, stability and weigh in on new features? Then please consider donating to my [Patreon](https://www.patreon.com/juliangruber). Not sure how much of my modules you're using? Try [feross/thanks](https://github.com/feross/thanks)!
|
||||
|
||||
## License
|
||||
|
||||
(MIT)
|
||||
|
||||
Copyright (c) 2013 Julian Gruber <julian@juliangruber.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
201
web/status/node_modules/brace-expansion/index.js
generated
vendored
201
web/status/node_modules/brace-expansion/index.js
generated
vendored
@ -1,201 +0,0 @@
|
||||
var concatMap = require('concat-map');
|
||||
var balanced = require('balanced-match');
|
||||
|
||||
module.exports = expandTop;
|
||||
|
||||
var escSlash = '\0SLASH'+Math.random()+'\0';
|
||||
var escOpen = '\0OPEN'+Math.random()+'\0';
|
||||
var escClose = '\0CLOSE'+Math.random()+'\0';
|
||||
var escComma = '\0COMMA'+Math.random()+'\0';
|
||||
var escPeriod = '\0PERIOD'+Math.random()+'\0';
|
||||
|
||||
function numeric(str) {
|
||||
return parseInt(str, 10) == str
|
||||
? parseInt(str, 10)
|
||||
: str.charCodeAt(0);
|
||||
}
|
||||
|
||||
function escapeBraces(str) {
|
||||
return str.split('\\\\').join(escSlash)
|
||||
.split('\\{').join(escOpen)
|
||||
.split('\\}').join(escClose)
|
||||
.split('\\,').join(escComma)
|
||||
.split('\\.').join(escPeriod);
|
||||
}
|
||||
|
||||
function unescapeBraces(str) {
|
||||
return str.split(escSlash).join('\\')
|
||||
.split(escOpen).join('{')
|
||||
.split(escClose).join('}')
|
||||
.split(escComma).join(',')
|
||||
.split(escPeriod).join('.');
|
||||
}
|
||||
|
||||
|
||||
// Basically just str.split(","), but handling cases
|
||||
// where we have nested braced sections, which should be
|
||||
// treated as individual members, like {a,{b,c},d}
|
||||
function parseCommaParts(str) {
|
||||
if (!str)
|
||||
return [''];
|
||||
|
||||
var parts = [];
|
||||
var m = balanced('{', '}', str);
|
||||
|
||||
if (!m)
|
||||
return str.split(',');
|
||||
|
||||
var pre = m.pre;
|
||||
var body = m.body;
|
||||
var post = m.post;
|
||||
var p = pre.split(',');
|
||||
|
||||
p[p.length-1] += '{' + body + '}';
|
||||
var postParts = parseCommaParts(post);
|
||||
if (post.length) {
|
||||
p[p.length-1] += postParts.shift();
|
||||
p.push.apply(p, postParts);
|
||||
}
|
||||
|
||||
parts.push.apply(parts, p);
|
||||
|
||||
return parts;
|
||||
}
|
||||
|
||||
function expandTop(str) {
|
||||
if (!str)
|
||||
return [];
|
||||
|
||||
// I don't know why Bash 4.3 does this, but it does.
|
||||
// Anything starting with {} will have the first two bytes preserved
|
||||
// but *only* at the top level, so {},a}b will not expand to anything,
|
||||
// but a{},b}c will be expanded to [a}c,abc].
|
||||
// One could argue that this is a bug in Bash, but since the goal of
|
||||
// this module is to match Bash's rules, we escape a leading {}
|
||||
if (str.substr(0, 2) === '{}') {
|
||||
str = '\\{\\}' + str.substr(2);
|
||||
}
|
||||
|
||||
return expand(escapeBraces(str), true).map(unescapeBraces);
|
||||
}
|
||||
|
||||
function identity(e) {
|
||||
return e;
|
||||
}
|
||||
|
||||
function embrace(str) {
|
||||
return '{' + str + '}';
|
||||
}
|
||||
function isPadded(el) {
|
||||
return /^-?0\d/.test(el);
|
||||
}
|
||||
|
||||
function lte(i, y) {
|
||||
return i <= y;
|
||||
}
|
||||
function gte(i, y) {
|
||||
return i >= y;
|
||||
}
|
||||
|
||||
function expand(str, isTop) {
|
||||
var expansions = [];
|
||||
|
||||
var m = balanced('{', '}', str);
|
||||
if (!m || /\$$/.test(m.pre)) return [str];
|
||||
|
||||
var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
|
||||
var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
|
||||
var isSequence = isNumericSequence || isAlphaSequence;
|
||||
var isOptions = m.body.indexOf(',') >= 0;
|
||||
if (!isSequence && !isOptions) {
|
||||
// {a},b}
|
||||
if (m.post.match(/,(?!,).*\}/)) {
|
||||
str = m.pre + '{' + m.body + escClose + m.post;
|
||||
return expand(str);
|
||||
}
|
||||
return [str];
|
||||
}
|
||||
|
||||
var n;
|
||||
if (isSequence) {
|
||||
n = m.body.split(/\.\./);
|
||||
} else {
|
||||
n = parseCommaParts(m.body);
|
||||
if (n.length === 1) {
|
||||
// x{{a,b}}y ==> x{a}y x{b}y
|
||||
n = expand(n[0], false).map(embrace);
|
||||
if (n.length === 1) {
|
||||
var post = m.post.length
|
||||
? expand(m.post, false)
|
||||
: [''];
|
||||
return post.map(function(p) {
|
||||
return m.pre + n[0] + p;
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// at this point, n is the parts, and we know it's not a comma set
|
||||
// with a single entry.
|
||||
|
||||
// no need to expand pre, since it is guaranteed to be free of brace-sets
|
||||
var pre = m.pre;
|
||||
var post = m.post.length
|
||||
? expand(m.post, false)
|
||||
: [''];
|
||||
|
||||
var N;
|
||||
|
||||
if (isSequence) {
|
||||
var x = numeric(n[0]);
|
||||
var y = numeric(n[1]);
|
||||
var width = Math.max(n[0].length, n[1].length)
|
||||
var incr = n.length == 3
|
||||
? Math.abs(numeric(n[2]))
|
||||
: 1;
|
||||
var test = lte;
|
||||
var reverse = y < x;
|
||||
if (reverse) {
|
||||
incr *= -1;
|
||||
test = gte;
|
||||
}
|
||||
var pad = n.some(isPadded);
|
||||
|
||||
N = [];
|
||||
|
||||
for (var i = x; test(i, y); i += incr) {
|
||||
var c;
|
||||
if (isAlphaSequence) {
|
||||
c = String.fromCharCode(i);
|
||||
if (c === '\\')
|
||||
c = '';
|
||||
} else {
|
||||
c = String(i);
|
||||
if (pad) {
|
||||
var need = width - c.length;
|
||||
if (need > 0) {
|
||||
var z = new Array(need + 1).join('0');
|
||||
if (i < 0)
|
||||
c = '-' + z + c.slice(1);
|
||||
else
|
||||
c = z + c;
|
||||
}
|
||||
}
|
||||
}
|
||||
N.push(c);
|
||||
}
|
||||
} else {
|
||||
N = concatMap(n, function(el) { return expand(el, false) });
|
||||
}
|
||||
|
||||
for (var j = 0; j < N.length; j++) {
|
||||
for (var k = 0; k < post.length; k++) {
|
||||
var expansion = pre + N[j] + post[k];
|
||||
if (!isTop || isSequence || expansion)
|
||||
expansions.push(expansion);
|
||||
}
|
||||
}
|
||||
|
||||
return expansions;
|
||||
}
|
||||
|
50
web/status/node_modules/brace-expansion/package.json
generated
vendored
50
web/status/node_modules/brace-expansion/package.json
generated
vendored
@ -1,50 +0,0 @@
|
||||
{
|
||||
"name": "brace-expansion",
|
||||
"description": "Brace expansion as known from sh/bash",
|
||||
"version": "1.1.12",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/juliangruber/brace-expansion.git"
|
||||
},
|
||||
"homepage": "https://github.com/juliangruber/brace-expansion",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "tape test/*.js",
|
||||
"gentest": "bash test/generate.sh",
|
||||
"bench": "matcha test/perf/bench.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"balanced-match": "^1.0.0",
|
||||
"concat-map": "0.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"matcha": "^0.7.0",
|
||||
"tape": "^4.6.0"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": {
|
||||
"name": "Julian Gruber",
|
||||
"email": "mail@juliangruber.com",
|
||||
"url": "http://juliangruber.com"
|
||||
},
|
||||
"license": "MIT",
|
||||
"testling": {
|
||||
"files": "test/*.js",
|
||||
"browsers": [
|
||||
"ie/8..latest",
|
||||
"firefox/20..latest",
|
||||
"firefox/nightly",
|
||||
"chrome/25..latest",
|
||||
"chrome/canary",
|
||||
"opera/12..latest",
|
||||
"opera/next",
|
||||
"safari/5.1..latest",
|
||||
"ipad/6.0..latest",
|
||||
"iphone/6.0..latest",
|
||||
"android-browser/4.2..latest"
|
||||
]
|
||||
},
|
||||
"publishConfig": {
|
||||
"tag": "1.x"
|
||||
}
|
||||
}
|
21
web/status/node_modules/braces/LICENSE
generated
vendored
21
web/status/node_modules/braces/LICENSE
generated
vendored
@ -1,21 +0,0 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014-present, Jon Schlinkert.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
586
web/status/node_modules/braces/README.md
generated
vendored
586
web/status/node_modules/braces/README.md
generated
vendored
@ -1,586 +0,0 @@
|
||||
# braces [](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=W8YFZ425KND68) [](https://www.npmjs.com/package/braces) [](https://npmjs.org/package/braces) [](https://npmjs.org/package/braces) [](https://travis-ci.org/micromatch/braces)
|
||||
|
||||
> Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed.
|
||||
|
||||
Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support.
|
||||
|
||||
## Install
|
||||
|
||||
Install with [npm](https://www.npmjs.com/):
|
||||
|
||||
```sh
|
||||
$ npm install --save braces
|
||||
```
|
||||
|
||||
## v3.0.0 Released!!
|
||||
|
||||
See the [changelog](CHANGELOG.md) for details.
|
||||
|
||||
## Why use braces?
|
||||
|
||||
Brace patterns make globs more powerful by adding the ability to match specific ranges and sequences of characters.
|
||||
|
||||
- **Accurate** - complete support for the [Bash 4.3 Brace Expansion](www.gnu.org/software/bash/) specification (passes all of the Bash braces tests)
|
||||
- **[fast and performant](#benchmarks)** - Starts fast, runs fast and [scales well](#performance) as patterns increase in complexity.
|
||||
- **Organized code base** - The parser and compiler are easy to maintain and update when edge cases crop up.
|
||||
- **Well-tested** - Thousands of test assertions, and passes all of the Bash, minimatch, and [brace-expansion](https://github.com/juliangruber/brace-expansion) unit tests (as of the date this was written).
|
||||
- **Safer** - You shouldn't have to worry about users defining aggressive or malicious brace patterns that can break your application. Braces takes measures to prevent malicious regex that can be used for DDoS attacks (see [catastrophic backtracking](https://www.regular-expressions.info/catastrophic.html)).
|
||||
- [Supports lists](#lists) - (aka "sets") `a/{b,c}/d` => `['a/b/d', 'a/c/d']`
|
||||
- [Supports sequences](#sequences) - (aka "ranges") `{01..03}` => `['01', '02', '03']`
|
||||
- [Supports steps](#steps) - (aka "increments") `{2..10..2}` => `['2', '4', '6', '8', '10']`
|
||||
- [Supports escaping](#escaping) - To prevent evaluation of special characters.
|
||||
|
||||
## Usage
|
||||
|
||||
The main export is a function that takes one or more brace `patterns` and `options`.
|
||||
|
||||
```js
|
||||
const braces = require('braces');
|
||||
// braces(patterns[, options]);
|
||||
|
||||
console.log(braces(['{01..05}', '{a..e}']));
|
||||
//=> ['(0[1-5])', '([a-e])']
|
||||
|
||||
console.log(braces(['{01..05}', '{a..e}'], { expand: true }));
|
||||
//=> ['01', '02', '03', '04', '05', 'a', 'b', 'c', 'd', 'e']
|
||||
```
|
||||
|
||||
### Brace Expansion vs. Compilation
|
||||
|
||||
By default, brace patterns are compiled into strings that are optimized for creating regular expressions and matching.
|
||||
|
||||
**Compiled**
|
||||
|
||||
```js
|
||||
console.log(braces('a/{x,y,z}/b'));
|
||||
//=> ['a/(x|y|z)/b']
|
||||
console.log(braces(['a/{01..20}/b', 'a/{1..5}/b']));
|
||||
//=> [ 'a/(0[1-9]|1[0-9]|20)/b', 'a/([1-5])/b' ]
|
||||
```
|
||||
|
||||
**Expanded**
|
||||
|
||||
Enable brace expansion by setting the `expand` option to true, or by using [braces.expand()](#expand) (returns an array similar to what you'd expect from Bash, or `echo {1..5}`, or [minimatch](https://github.com/isaacs/minimatch)):
|
||||
|
||||
```js
|
||||
console.log(braces('a/{x,y,z}/b', { expand: true }));
|
||||
//=> ['a/x/b', 'a/y/b', 'a/z/b']
|
||||
|
||||
console.log(braces.expand('{01..10}'));
|
||||
//=> ['01','02','03','04','05','06','07','08','09','10']
|
||||
```
|
||||
|
||||
### Lists
|
||||
|
||||
Expand lists (like Bash "sets"):
|
||||
|
||||
```js
|
||||
console.log(braces('a/{foo,bar,baz}/*.js'));
|
||||
//=> ['a/(foo|bar|baz)/*.js']
|
||||
|
||||
console.log(braces.expand('a/{foo,bar,baz}/*.js'));
|
||||
//=> ['a/foo/*.js', 'a/bar/*.js', 'a/baz/*.js']
|
||||
```
|
||||
|
||||
### Sequences
|
||||
|
||||
Expand ranges of characters (like Bash "sequences"):
|
||||
|
||||
```js
|
||||
console.log(braces.expand('{1..3}')); // ['1', '2', '3']
|
||||
console.log(braces.expand('a/{1..3}/b')); // ['a/1/b', 'a/2/b', 'a/3/b']
|
||||
console.log(braces('{a..c}', { expand: true })); // ['a', 'b', 'c']
|
||||
console.log(braces('foo/{a..c}', { expand: true })); // ['foo/a', 'foo/b', 'foo/c']
|
||||
|
||||
// supports zero-padded ranges
|
||||
console.log(braces('a/{01..03}/b')); //=> ['a/(0[1-3])/b']
|
||||
console.log(braces('a/{001..300}/b')); //=> ['a/(0{2}[1-9]|0[1-9][0-9]|[12][0-9]{2}|300)/b']
|
||||
```
|
||||
|
||||
See [fill-range](https://github.com/jonschlinkert/fill-range) for all available range-expansion options.
|
||||
|
||||
### Steppped ranges
|
||||
|
||||
Steps, or increments, may be used with ranges:
|
||||
|
||||
```js
|
||||
console.log(braces.expand('{2..10..2}'));
|
||||
//=> ['2', '4', '6', '8', '10']
|
||||
|
||||
console.log(braces('{2..10..2}'));
|
||||
//=> ['(2|4|6|8|10)']
|
||||
```
|
||||
|
||||
When the [.optimize](#optimize) method is used, or [options.optimize](#optionsoptimize) is set to true, sequences are passed to [to-regex-range](https://github.com/jonschlinkert/to-regex-range) for expansion.
|
||||
|
||||
### Nesting
|
||||
|
||||
Brace patterns may be nested. The results of each expanded string are not sorted, and left to right order is preserved.
|
||||
|
||||
**"Expanded" braces**
|
||||
|
||||
```js
|
||||
console.log(braces.expand('a{b,c,/{x,y}}/e'));
|
||||
//=> ['ab/e', 'ac/e', 'a/x/e', 'a/y/e']
|
||||
|
||||
console.log(braces.expand('a/{x,{1..5},y}/c'));
|
||||
//=> ['a/x/c', 'a/1/c', 'a/2/c', 'a/3/c', 'a/4/c', 'a/5/c', 'a/y/c']
|
||||
```
|
||||
|
||||
**"Optimized" braces**
|
||||
|
||||
```js
|
||||
console.log(braces('a{b,c,/{x,y}}/e'));
|
||||
//=> ['a(b|c|/(x|y))/e']
|
||||
|
||||
console.log(braces('a/{x,{1..5},y}/c'));
|
||||
//=> ['a/(x|([1-5])|y)/c']
|
||||
```
|
||||
|
||||
### Escaping
|
||||
|
||||
**Escaping braces**
|
||||
|
||||
A brace pattern will not be expanded or evaluted if _either the opening or closing brace is escaped_:
|
||||
|
||||
```js
|
||||
console.log(braces.expand('a\\{d,c,b}e'));
|
||||
//=> ['a{d,c,b}e']
|
||||
|
||||
console.log(braces.expand('a{d,c,b\\}e'));
|
||||
//=> ['a{d,c,b}e']
|
||||
```
|
||||
|
||||
**Escaping commas**
|
||||
|
||||
Commas inside braces may also be escaped:
|
||||
|
||||
```js
|
||||
console.log(braces.expand('a{b\\,c}d'));
|
||||
//=> ['a{b,c}d']
|
||||
|
||||
console.log(braces.expand('a{d\\,c,b}e'));
|
||||
//=> ['ad,ce', 'abe']
|
||||
```
|
||||
|
||||
**Single items**
|
||||
|
||||
Following bash conventions, a brace pattern is also not expanded when it contains a single character:
|
||||
|
||||
```js
|
||||
console.log(braces.expand('a{b}c'));
|
||||
//=> ['a{b}c']
|
||||
```
|
||||
|
||||
## Options
|
||||
|
||||
### options.maxLength
|
||||
|
||||
**Type**: `Number`
|
||||
|
||||
**Default**: `10,000`
|
||||
|
||||
**Description**: Limit the length of the input string. Useful when the input string is generated or your application allows users to pass a string, et cetera.
|
||||
|
||||
```js
|
||||
console.log(braces('a/{b,c}/d', { maxLength: 3 })); //=> throws an error
|
||||
```
|
||||
|
||||
### options.expand
|
||||
|
||||
**Type**: `Boolean`
|
||||
|
||||
**Default**: `undefined`
|
||||
|
||||
**Description**: Generate an "expanded" brace pattern (alternatively you can use the `braces.expand()` method, which does the same thing).
|
||||
|
||||
```js
|
||||
console.log(braces('a/{b,c}/d', { expand: true }));
|
||||
//=> [ 'a/b/d', 'a/c/d' ]
|
||||
```
|
||||
|
||||
### options.nodupes
|
||||
|
||||
**Type**: `Boolean`
|
||||
|
||||
**Default**: `undefined`
|
||||
|
||||
**Description**: Remove duplicates from the returned array.
|
||||
|
||||
### options.rangeLimit
|
||||
|
||||
**Type**: `Number`
|
||||
|
||||
**Default**: `1000`
|
||||
|
||||
**Description**: To prevent malicious patterns from being passed by users, an error is thrown when `braces.expand()` is used or `options.expand` is true and the generated range will exceed the `rangeLimit`.
|
||||
|
||||
You can customize `options.rangeLimit` or set it to `Inifinity` to disable this altogether.
|
||||
|
||||
**Examples**
|
||||
|
||||
```js
|
||||
// pattern exceeds the "rangeLimit", so it's optimized automatically
|
||||
console.log(braces.expand('{1..1000}'));
|
||||
//=> ['([1-9]|[1-9][0-9]{1,2}|1000)']
|
||||
|
||||
// pattern does not exceed "rangeLimit", so it's NOT optimized
|
||||
console.log(braces.expand('{1..100}'));
|
||||
//=> ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '30', '31', '32', '33', '34', '35', '36', '37', '38', '39', '40', '41', '42', '43', '44', '45', '46', '47', '48', '49', '50', '51', '52', '53', '54', '55', '56', '57', '58', '59', '60', '61', '62', '63', '64', '65', '66', '67', '68', '69', '70', '71', '72', '73', '74', '75', '76', '77', '78', '79', '80', '81', '82', '83', '84', '85', '86', '87', '88', '89', '90', '91', '92', '93', '94', '95', '96', '97', '98', '99', '100']
|
||||
```
|
||||
|
||||
### options.transform
|
||||
|
||||
**Type**: `Function`
|
||||
|
||||
**Default**: `undefined`
|
||||
|
||||
**Description**: Customize range expansion.
|
||||
|
||||
**Example: Transforming non-numeric values**
|
||||
|
||||
```js
|
||||
const alpha = braces.expand('x/{a..e}/y', {
|
||||
transform(value, index) {
|
||||
// When non-numeric values are passed, "value" is a character code.
|
||||
return 'foo/' + String.fromCharCode(value) + '-' + index;
|
||||
},
|
||||
});
|
||||
console.log(alpha);
|
||||
//=> [ 'x/foo/a-0/y', 'x/foo/b-1/y', 'x/foo/c-2/y', 'x/foo/d-3/y', 'x/foo/e-4/y' ]
|
||||
```
|
||||
|
||||
**Example: Transforming numeric values**
|
||||
|
||||
```js
|
||||
const numeric = braces.expand('{1..5}', {
|
||||
transform(value) {
|
||||
// when numeric values are passed, "value" is a number
|
||||
return 'foo/' + value * 2;
|
||||
},
|
||||
});
|
||||
console.log(numeric);
|
||||
//=> [ 'foo/2', 'foo/4', 'foo/6', 'foo/8', 'foo/10' ]
|
||||
```
|
||||
|
||||
### options.quantifiers
|
||||
|
||||
**Type**: `Boolean`
|
||||
|
||||
**Default**: `undefined`
|
||||
|
||||
**Description**: In regular expressions, quanitifiers can be used to specify how many times a token can be repeated. For example, `a{1,3}` will match the letter `a` one to three times.
|
||||
|
||||
Unfortunately, regex quantifiers happen to share the same syntax as [Bash lists](#lists)
|
||||
|
||||
The `quantifiers` option tells braces to detect when [regex quantifiers](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp#quantifiers) are defined in the given pattern, and not to try to expand them as lists.
|
||||
|
||||
**Examples**
|
||||
|
||||
```js
|
||||
const braces = require('braces');
|
||||
console.log(braces('a/b{1,3}/{x,y,z}'));
|
||||
//=> [ 'a/b(1|3)/(x|y|z)' ]
|
||||
console.log(braces('a/b{1,3}/{x,y,z}', { quantifiers: true }));
|
||||
//=> [ 'a/b{1,3}/(x|y|z)' ]
|
||||
console.log(braces('a/b{1,3}/{x,y,z}', { quantifiers: true, expand: true }));
|
||||
//=> [ 'a/b{1,3}/x', 'a/b{1,3}/y', 'a/b{1,3}/z' ]
|
||||
```
|
||||
|
||||
### options.keepEscaping
|
||||
|
||||
**Type**: `Boolean`
|
||||
|
||||
**Default**: `undefined`
|
||||
|
||||
**Description**: Do not strip backslashes that were used for escaping from the result.
|
||||
|
||||
## What is "brace expansion"?
|
||||
|
||||
Brace expansion is a type of parameter expansion that was made popular by unix shells for generating lists of strings, as well as regex-like matching when used alongside wildcards (globs).
|
||||
|
||||
In addition to "expansion", braces are also used for matching. In other words:
|
||||
|
||||
- [brace expansion](#brace-expansion) is for generating new lists
|
||||
- [brace matching](#brace-matching) is for filtering existing lists
|
||||
|
||||
<details>
|
||||
<summary><strong>More about brace expansion</strong> (click to expand)</summary>
|
||||
|
||||
There are two main types of brace expansion:
|
||||
|
||||
1. **lists**: which are defined using comma-separated values inside curly braces: `{a,b,c}`
|
||||
2. **sequences**: which are defined using a starting value and an ending value, separated by two dots: `a{1..3}b`. Optionally, a third argument may be passed to define a "step" or increment to use: `a{1..100..10}b`. These are also sometimes referred to as "ranges".
|
||||
|
||||
Here are some example brace patterns to illustrate how they work:
|
||||
|
||||
**Sets**
|
||||
|
||||
```
|
||||
{a,b,c} => a b c
|
||||
{a,b,c}{1,2} => a1 a2 b1 b2 c1 c2
|
||||
```
|
||||
|
||||
**Sequences**
|
||||
|
||||
```
|
||||
{1..9} => 1 2 3 4 5 6 7 8 9
|
||||
{4..-4} => 4 3 2 1 0 -1 -2 -3 -4
|
||||
{1..20..3} => 1 4 7 10 13 16 19
|
||||
{a..j} => a b c d e f g h i j
|
||||
{j..a} => j i h g f e d c b a
|
||||
{a..z..3} => a d g j m p s v y
|
||||
```
|
||||
|
||||
**Combination**
|
||||
|
||||
Sets and sequences can be mixed together or used along with any other strings.
|
||||
|
||||
```
|
||||
{a,b,c}{1..3} => a1 a2 a3 b1 b2 b3 c1 c2 c3
|
||||
foo/{a,b,c}/bar => foo/a/bar foo/b/bar foo/c/bar
|
||||
```
|
||||
|
||||
The fact that braces can be "expanded" from relatively simple patterns makes them ideal for quickly generating test fixtures, file paths, and similar use cases.
|
||||
|
||||
## Brace matching
|
||||
|
||||
In addition to _expansion_, brace patterns are also useful for performing regular-expression-like matching.
|
||||
|
||||
For example, the pattern `foo/{1..3}/bar` would match any of following strings:
|
||||
|
||||
```
|
||||
foo/1/bar
|
||||
foo/2/bar
|
||||
foo/3/bar
|
||||
```
|
||||
|
||||
But not:
|
||||
|
||||
```
|
||||
baz/1/qux
|
||||
baz/2/qux
|
||||
baz/3/qux
|
||||
```
|
||||
|
||||
Braces can also be combined with [glob patterns](https://github.com/jonschlinkert/micromatch) to perform more advanced wildcard matching. For example, the pattern `*/{1..3}/*` would match any of following strings:
|
||||
|
||||
```
|
||||
foo/1/bar
|
||||
foo/2/bar
|
||||
foo/3/bar
|
||||
baz/1/qux
|
||||
baz/2/qux
|
||||
baz/3/qux
|
||||
```
|
||||
|
||||
## Brace matching pitfalls
|
||||
|
||||
Although brace patterns offer a user-friendly way of matching ranges or sets of strings, there are also some major disadvantages and potential risks you should be aware of.
|
||||
|
||||
### tldr
|
||||
|
||||
**"brace bombs"**
|
||||
|
||||
- brace expansion can eat up a huge amount of processing resources
|
||||
- as brace patterns increase _linearly in size_, the system resources required to expand the pattern increase exponentially
|
||||
- users can accidentally (or intentially) exhaust your system's resources resulting in the equivalent of a DoS attack (bonus: no programming knowledge is required!)
|
||||
|
||||
For a more detailed explanation with examples, see the [geometric complexity](#geometric-complexity) section.
|
||||
|
||||
### The solution
|
||||
|
||||
Jump to the [performance section](#performance) to see how Braces solves this problem in comparison to other libraries.
|
||||
|
||||
### Geometric complexity
|
||||
|
||||
At minimum, brace patterns with sets limited to two elements have quadradic or `O(n^2)` complexity. But the complexity of the algorithm increases exponentially as the number of sets, _and elements per set_, increases, which is `O(n^c)`.
|
||||
|
||||
For example, the following sets demonstrate quadratic (`O(n^2)`) complexity:
|
||||
|
||||
```
|
||||
{1,2}{3,4} => (2X2) => 13 14 23 24
|
||||
{1,2}{3,4}{5,6} => (2X2X2) => 135 136 145 146 235 236 245 246
|
||||
```
|
||||
|
||||
But add an element to a set, and we get a n-fold Cartesian product with `O(n^c)` complexity:
|
||||
|
||||
```
|
||||
{1,2,3}{4,5,6}{7,8,9} => (3X3X3) => 147 148 149 157 158 159 167 168 169 247 248
|
||||
249 257 258 259 267 268 269 347 348 349 357
|
||||
358 359 367 368 369
|
||||
```
|
||||
|
||||
Now, imagine how this complexity grows given that each element is a n-tuple:
|
||||
|
||||
```
|
||||
{1..100}{1..100} => (100X100) => 10,000 elements (38.4 kB)
|
||||
{1..100}{1..100}{1..100} => (100X100X100) => 1,000,000 elements (5.76 MB)
|
||||
```
|
||||
|
||||
Although these examples are clearly contrived, they demonstrate how brace patterns can quickly grow out of control.
|
||||
|
||||
**More information**
|
||||
|
||||
Interested in learning more about brace expansion?
|
||||
|
||||
- [linuxjournal/bash-brace-expansion](http://www.linuxjournal.com/content/bash-brace-expansion)
|
||||
- [rosettacode/Brace_expansion](https://rosettacode.org/wiki/Brace_expansion)
|
||||
- [cartesian product](https://en.wikipedia.org/wiki/Cartesian_product)
|
||||
|
||||
</details>
|
||||
|
||||
## Performance
|
||||
|
||||
Braces is not only screaming fast, it's also more accurate the other brace expansion libraries.
|
||||
|
||||
### Better algorithms
|
||||
|
||||
Fortunately there is a solution to the ["brace bomb" problem](#brace-matching-pitfalls): _don't expand brace patterns into an array when they're used for matching_.
|
||||
|
||||
Instead, convert the pattern into an optimized regular expression. This is easier said than done, and braces is the only library that does this currently.
|
||||
|
||||
**The proof is in the numbers**
|
||||
|
||||
Minimatch gets exponentially slower as patterns increase in complexity, braces does not. The following results were generated using `braces()` and `minimatch.braceExpand()`, respectively.
|
||||
|
||||
| **Pattern** | **braces** | **[minimatch][]** |
|
||||
| --------------------------- | ------------------- | ---------------------------- |
|
||||
| `{1..9007199254740991}`[^1] | `298 B` (5ms 459μs) | N/A (freezes) |
|
||||
| `{1..1000000000000000}` | `41 B` (1ms 15μs) | N/A (freezes) |
|
||||
| `{1..100000000000000}` | `40 B` (890μs) | N/A (freezes) |
|
||||
| `{1..10000000000000}` | `39 B` (2ms 49μs) | N/A (freezes) |
|
||||
| `{1..1000000000000}` | `38 B` (608μs) | N/A (freezes) |
|
||||
| `{1..100000000000}` | `37 B` (397μs) | N/A (freezes) |
|
||||
| `{1..10000000000}` | `35 B` (983μs) | N/A (freezes) |
|
||||
| `{1..1000000000}` | `34 B` (798μs) | N/A (freezes) |
|
||||
| `{1..100000000}` | `33 B` (733μs) | N/A (freezes) |
|
||||
| `{1..10000000}` | `32 B` (5ms 632μs) | `78.89 MB` (16s 388ms 569μs) |
|
||||
| `{1..1000000}` | `31 B` (1ms 381μs) | `6.89 MB` (1s 496ms 887μs) |
|
||||
| `{1..100000}` | `30 B` (950μs) | `588.89 kB` (146ms 921μs) |
|
||||
| `{1..10000}` | `29 B` (1ms 114μs) | `48.89 kB` (14ms 187μs) |
|
||||
| `{1..1000}` | `28 B` (760μs) | `3.89 kB` (1ms 453μs) |
|
||||
| `{1..100}` | `22 B` (345μs) | `291 B` (196μs) |
|
||||
| `{1..10}` | `10 B` (533μs) | `20 B` (37μs) |
|
||||
| `{1..3}` | `7 B` (190μs) | `5 B` (27μs) |
|
||||
|
||||
### Faster algorithms
|
||||
|
||||
When you need expansion, braces is still much faster.
|
||||
|
||||
_(the following results were generated using `braces.expand()` and `minimatch.braceExpand()`, respectively)_
|
||||
|
||||
| **Pattern** | **braces** | **[minimatch][]** |
|
||||
| --------------- | --------------------------- | ---------------------------- |
|
||||
| `{1..10000000}` | `78.89 MB` (2s 698ms 642μs) | `78.89 MB` (18s 601ms 974μs) |
|
||||
| `{1..1000000}` | `6.89 MB` (458ms 576μs) | `6.89 MB` (1s 491ms 621μs) |
|
||||
| `{1..100000}` | `588.89 kB` (20ms 728μs) | `588.89 kB` (156ms 919μs) |
|
||||
| `{1..10000}` | `48.89 kB` (2ms 202μs) | `48.89 kB` (13ms 641μs) |
|
||||
| `{1..1000}` | `3.89 kB` (1ms 796μs) | `3.89 kB` (1ms 958μs) |
|
||||
| `{1..100}` | `291 B` (424μs) | `291 B` (211μs) |
|
||||
| `{1..10}` | `20 B` (487μs) | `20 B` (72μs) |
|
||||
| `{1..3}` | `5 B` (166μs) | `5 B` (27μs) |
|
||||
|
||||
If you'd like to run these comparisons yourself, see [test/support/generate.js](test/support/generate.js).
|
||||
|
||||
## Benchmarks
|
||||
|
||||
### Running benchmarks
|
||||
|
||||
Install dev dependencies:
|
||||
|
||||
```bash
|
||||
npm i -d && npm benchmark
|
||||
```
|
||||
|
||||
### Latest results
|
||||
|
||||
Braces is more accurate, without sacrificing performance.
|
||||
|
||||
```bash
|
||||
● expand - range (expanded)
|
||||
braces x 53,167 ops/sec ±0.12% (102 runs sampled)
|
||||
minimatch x 11,378 ops/sec ±0.10% (102 runs sampled)
|
||||
● expand - range (optimized for regex)
|
||||
braces x 373,442 ops/sec ±0.04% (100 runs sampled)
|
||||
minimatch x 3,262 ops/sec ±0.18% (100 runs sampled)
|
||||
● expand - nested ranges (expanded)
|
||||
braces x 33,921 ops/sec ±0.09% (99 runs sampled)
|
||||
minimatch x 10,855 ops/sec ±0.28% (100 runs sampled)
|
||||
● expand - nested ranges (optimized for regex)
|
||||
braces x 287,479 ops/sec ±0.52% (98 runs sampled)
|
||||
minimatch x 3,219 ops/sec ±0.28% (101 runs sampled)
|
||||
● expand - set (expanded)
|
||||
braces x 238,243 ops/sec ±0.19% (97 runs sampled)
|
||||
minimatch x 538,268 ops/sec ±0.31% (96 runs sampled)
|
||||
● expand - set (optimized for regex)
|
||||
braces x 321,844 ops/sec ±0.10% (97 runs sampled)
|
||||
minimatch x 140,600 ops/sec ±0.15% (100 runs sampled)
|
||||
● expand - nested sets (expanded)
|
||||
braces x 165,371 ops/sec ±0.42% (96 runs sampled)
|
||||
minimatch x 337,720 ops/sec ±0.28% (100 runs sampled)
|
||||
● expand - nested sets (optimized for regex)
|
||||
braces x 242,948 ops/sec ±0.12% (99 runs sampled)
|
||||
minimatch x 87,403 ops/sec ±0.79% (96 runs sampled)
|
||||
```
|
||||
|
||||
## About
|
||||
|
||||
<details>
|
||||
<summary><strong>Contributing</strong></summary>
|
||||
|
||||
Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new).
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Running Tests</strong></summary>
|
||||
|
||||
Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command:
|
||||
|
||||
```sh
|
||||
$ npm install && npm test
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Building docs</strong></summary>
|
||||
|
||||
_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_
|
||||
|
||||
To generate the readme, run the following command:
|
||||
|
||||
```sh
|
||||
$ npm install -g verbose/verb#dev verb-generate-readme && verb
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### Contributors
|
||||
|
||||
| **Commits** | **Contributor** |
|
||||
| ----------- | ------------------------------------------------------------- |
|
||||
| 197 | [jonschlinkert](https://github.com/jonschlinkert) |
|
||||
| 4 | [doowb](https://github.com/doowb) |
|
||||
| 1 | [es128](https://github.com/es128) |
|
||||
| 1 | [eush77](https://github.com/eush77) |
|
||||
| 1 | [hemanth](https://github.com/hemanth) |
|
||||
| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) |
|
||||
|
||||
### Author
|
||||
|
||||
**Jon Schlinkert**
|
||||
|
||||
- [GitHub Profile](https://github.com/jonschlinkert)
|
||||
- [Twitter Profile](https://twitter.com/jonschlinkert)
|
||||
- [LinkedIn Profile](https://linkedin.com/in/jonschlinkert)
|
||||
|
||||
### License
|
||||
|
||||
Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert).
|
||||
Released under the [MIT License](LICENSE).
|
||||
|
||||
---
|
||||
|
||||
_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 08, 2019._
|
170
web/status/node_modules/braces/index.js
generated
vendored
170
web/status/node_modules/braces/index.js
generated
vendored
@ -1,170 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const stringify = require('./lib/stringify');
|
||||
const compile = require('./lib/compile');
|
||||
const expand = require('./lib/expand');
|
||||
const parse = require('./lib/parse');
|
||||
|
||||
/**
|
||||
* Expand the given pattern or create a regex-compatible string.
|
||||
*
|
||||
* ```js
|
||||
* const braces = require('braces');
|
||||
* console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)']
|
||||
* console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c']
|
||||
* ```
|
||||
* @param {String} `str`
|
||||
* @param {Object} `options`
|
||||
* @return {String}
|
||||
* @api public
|
||||
*/
|
||||
|
||||
const braces = (input, options = {}) => {
|
||||
let output = [];
|
||||
|
||||
if (Array.isArray(input)) {
|
||||
for (const pattern of input) {
|
||||
const result = braces.create(pattern, options);
|
||||
if (Array.isArray(result)) {
|
||||
output.push(...result);
|
||||
} else {
|
||||
output.push(result);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
output = [].concat(braces.create(input, options));
|
||||
}
|
||||
|
||||
if (options && options.expand === true && options.nodupes === true) {
|
||||
output = [...new Set(output)];
|
||||
}
|
||||
return output;
|
||||
};
|
||||
|
||||
/**
|
||||
* Parse the given `str` with the given `options`.
|
||||
*
|
||||
* ```js
|
||||
* // braces.parse(pattern, [, options]);
|
||||
* const ast = braces.parse('a/{b,c}/d');
|
||||
* console.log(ast);
|
||||
* ```
|
||||
* @param {String} pattern Brace pattern to parse
|
||||
* @param {Object} options
|
||||
* @return {Object} Returns an AST
|
||||
* @api public
|
||||
*/
|
||||
|
||||
braces.parse = (input, options = {}) => parse(input, options);
|
||||
|
||||
/**
|
||||
* Creates a braces string from an AST, or an AST node.
|
||||
*
|
||||
* ```js
|
||||
* const braces = require('braces');
|
||||
* let ast = braces.parse('foo/{a,b}/bar');
|
||||
* console.log(stringify(ast.nodes[2])); //=> '{a,b}'
|
||||
* ```
|
||||
* @param {String} `input` Brace pattern or AST.
|
||||
* @param {Object} `options`
|
||||
* @return {Array} Returns an array of expanded values.
|
||||
* @api public
|
||||
*/
|
||||
|
||||
braces.stringify = (input, options = {}) => {
|
||||
if (typeof input === 'string') {
|
||||
return stringify(braces.parse(input, options), options);
|
||||
}
|
||||
return stringify(input, options);
|
||||
};
|
||||
|
||||
/**
|
||||
* Compiles a brace pattern into a regex-compatible, optimized string.
|
||||
* This method is called by the main [braces](#braces) function by default.
|
||||
*
|
||||
* ```js
|
||||
* const braces = require('braces');
|
||||
* console.log(braces.compile('a/{b,c}/d'));
|
||||
* //=> ['a/(b|c)/d']
|
||||
* ```
|
||||
* @param {String} `input` Brace pattern or AST.
|
||||
* @param {Object} `options`
|
||||
* @return {Array} Returns an array of expanded values.
|
||||
* @api public
|
||||
*/
|
||||
|
||||
braces.compile = (input, options = {}) => {
|
||||
if (typeof input === 'string') {
|
||||
input = braces.parse(input, options);
|
||||
}
|
||||
return compile(input, options);
|
||||
};
|
||||
|
||||
/**
|
||||
* Expands a brace pattern into an array. This method is called by the
|
||||
* main [braces](#braces) function when `options.expand` is true. Before
|
||||
* using this method it's recommended that you read the [performance notes](#performance))
|
||||
* and advantages of using [.compile](#compile) instead.
|
||||
*
|
||||
* ```js
|
||||
* const braces = require('braces');
|
||||
* console.log(braces.expand('a/{b,c}/d'));
|
||||
* //=> ['a/b/d', 'a/c/d'];
|
||||
* ```
|
||||
* @param {String} `pattern` Brace pattern
|
||||
* @param {Object} `options`
|
||||
* @return {Array} Returns an array of expanded values.
|
||||
* @api public
|
||||
*/
|
||||
|
||||
braces.expand = (input, options = {}) => {
|
||||
if (typeof input === 'string') {
|
||||
input = braces.parse(input, options);
|
||||
}
|
||||
|
||||
let result = expand(input, options);
|
||||
|
||||
// filter out empty strings if specified
|
||||
if (options.noempty === true) {
|
||||
result = result.filter(Boolean);
|
||||
}
|
||||
|
||||
// filter out duplicates if specified
|
||||
if (options.nodupes === true) {
|
||||
result = [...new Set(result)];
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Processes a brace pattern and returns either an expanded array
|
||||
* (if `options.expand` is true), a highly optimized regex-compatible string.
|
||||
* This method is called by the main [braces](#braces) function.
|
||||
*
|
||||
* ```js
|
||||
* const braces = require('braces');
|
||||
* console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}'))
|
||||
* //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)'
|
||||
* ```
|
||||
* @param {String} `pattern` Brace pattern
|
||||
* @param {Object} `options`
|
||||
* @return {Array} Returns an array of expanded values.
|
||||
* @api public
|
||||
*/
|
||||
|
||||
braces.create = (input, options = {}) => {
|
||||
if (input === '' || input.length < 3) {
|
||||
return [input];
|
||||
}
|
||||
|
||||
return options.expand !== true
|
||||
? braces.compile(input, options)
|
||||
: braces.expand(input, options);
|
||||
};
|
||||
|
||||
/**
|
||||
* Expose "braces"
|
||||
*/
|
||||
|
||||
module.exports = braces;
|
60
web/status/node_modules/braces/lib/compile.js
generated
vendored
60
web/status/node_modules/braces/lib/compile.js
generated
vendored
@ -1,60 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const fill = require('fill-range');
|
||||
const utils = require('./utils');
|
||||
|
||||
const compile = (ast, options = {}) => {
|
||||
const walk = (node, parent = {}) => {
|
||||
const invalidBlock = utils.isInvalidBrace(parent);
|
||||
const invalidNode = node.invalid === true && options.escapeInvalid === true;
|
||||
const invalid = invalidBlock === true || invalidNode === true;
|
||||
const prefix = options.escapeInvalid === true ? '\\' : '';
|
||||
let output = '';
|
||||
|
||||
if (node.isOpen === true) {
|
||||
return prefix + node.value;
|
||||
}
|
||||
|
||||
if (node.isClose === true) {
|
||||
console.log('node.isClose', prefix, node.value);
|
||||
return prefix + node.value;
|
||||
}
|
||||
|
||||
if (node.type === 'open') {
|
||||
return invalid ? prefix + node.value : '(';
|
||||
}
|
||||
|
||||
if (node.type === 'close') {
|
||||
return invalid ? prefix + node.value : ')';
|
||||
}
|
||||
|
||||
if (node.type === 'comma') {
|
||||
return node.prev.type === 'comma' ? '' : invalid ? node.value : '|';
|
||||
}
|
||||
|
||||
if (node.value) {
|
||||
return node.value;
|
||||
}
|
||||
|
||||
if (node.nodes && node.ranges > 0) {
|
||||
const args = utils.reduce(node.nodes);
|
||||
const range = fill(...args, { ...options, wrap: false, toRegex: true, strictZeros: true });
|
||||
|
||||
if (range.length !== 0) {
|
||||
return args.length > 1 && range.length > 1 ? `(${range})` : range;
|
||||
}
|
||||
}
|
||||
|
||||
if (node.nodes) {
|
||||
for (const child of node.nodes) {
|
||||
output += walk(child, node);
|
||||
}
|
||||
}
|
||||
|
||||
return output;
|
||||
};
|
||||
|
||||
return walk(ast);
|
||||
};
|
||||
|
||||
module.exports = compile;
|
57
web/status/node_modules/braces/lib/constants.js
generated
vendored
57
web/status/node_modules/braces/lib/constants.js
generated
vendored
@ -1,57 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = {
|
||||
MAX_LENGTH: 10000,
|
||||
|
||||
// Digits
|
||||
CHAR_0: '0', /* 0 */
|
||||
CHAR_9: '9', /* 9 */
|
||||
|
||||
// Alphabet chars.
|
||||
CHAR_UPPERCASE_A: 'A', /* A */
|
||||
CHAR_LOWERCASE_A: 'a', /* a */
|
||||
CHAR_UPPERCASE_Z: 'Z', /* Z */
|
||||
CHAR_LOWERCASE_Z: 'z', /* z */
|
||||
|
||||
CHAR_LEFT_PARENTHESES: '(', /* ( */
|
||||
CHAR_RIGHT_PARENTHESES: ')', /* ) */
|
||||
|
||||
CHAR_ASTERISK: '*', /* * */
|
||||
|
||||
// Non-alphabetic chars.
|
||||
CHAR_AMPERSAND: '&', /* & */
|
||||
CHAR_AT: '@', /* @ */
|
||||
CHAR_BACKSLASH: '\\', /* \ */
|
||||
CHAR_BACKTICK: '`', /* ` */
|
||||
CHAR_CARRIAGE_RETURN: '\r', /* \r */
|
||||
CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */
|
||||
CHAR_COLON: ':', /* : */
|
||||
CHAR_COMMA: ',', /* , */
|
||||
CHAR_DOLLAR: '$', /* . */
|
||||
CHAR_DOT: '.', /* . */
|
||||
CHAR_DOUBLE_QUOTE: '"', /* " */
|
||||
CHAR_EQUAL: '=', /* = */
|
||||
CHAR_EXCLAMATION_MARK: '!', /* ! */
|
||||
CHAR_FORM_FEED: '\f', /* \f */
|
||||
CHAR_FORWARD_SLASH: '/', /* / */
|
||||
CHAR_HASH: '#', /* # */
|
||||
CHAR_HYPHEN_MINUS: '-', /* - */
|
||||
CHAR_LEFT_ANGLE_BRACKET: '<', /* < */
|
||||
CHAR_LEFT_CURLY_BRACE: '{', /* { */
|
||||
CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */
|
||||
CHAR_LINE_FEED: '\n', /* \n */
|
||||
CHAR_NO_BREAK_SPACE: '\u00A0', /* \u00A0 */
|
||||
CHAR_PERCENT: '%', /* % */
|
||||
CHAR_PLUS: '+', /* + */
|
||||
CHAR_QUESTION_MARK: '?', /* ? */
|
||||
CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */
|
||||
CHAR_RIGHT_CURLY_BRACE: '}', /* } */
|
||||
CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */
|
||||
CHAR_SEMICOLON: ';', /* ; */
|
||||
CHAR_SINGLE_QUOTE: '\'', /* ' */
|
||||
CHAR_SPACE: ' ', /* */
|
||||
CHAR_TAB: '\t', /* \t */
|
||||
CHAR_UNDERSCORE: '_', /* _ */
|
||||
CHAR_VERTICAL_LINE: '|', /* | */
|
||||
CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\uFEFF' /* \uFEFF */
|
||||
};
|
113
web/status/node_modules/braces/lib/expand.js
generated
vendored
113
web/status/node_modules/braces/lib/expand.js
generated
vendored
@ -1,113 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const fill = require('fill-range');
|
||||
const stringify = require('./stringify');
|
||||
const utils = require('./utils');
|
||||
|
||||
const append = (queue = '', stash = '', enclose = false) => {
|
||||
const result = [];
|
||||
|
||||
queue = [].concat(queue);
|
||||
stash = [].concat(stash);
|
||||
|
||||
if (!stash.length) return queue;
|
||||
if (!queue.length) {
|
||||
return enclose ? utils.flatten(stash).map(ele => `{${ele}}`) : stash;
|
||||
}
|
||||
|
||||
for (const item of queue) {
|
||||
if (Array.isArray(item)) {
|
||||
for (const value of item) {
|
||||
result.push(append(value, stash, enclose));
|
||||
}
|
||||
} else {
|
||||
for (let ele of stash) {
|
||||
if (enclose === true && typeof ele === 'string') ele = `{${ele}}`;
|
||||
result.push(Array.isArray(ele) ? append(item, ele, enclose) : item + ele);
|
||||
}
|
||||
}
|
||||
}
|
||||
return utils.flatten(result);
|
||||
};
|
||||
|
||||
const expand = (ast, options = {}) => {
|
||||
const rangeLimit = options.rangeLimit === undefined ? 1000 : options.rangeLimit;
|
||||
|
||||
const walk = (node, parent = {}) => {
|
||||
node.queue = [];
|
||||
|
||||
let p = parent;
|
||||
let q = parent.queue;
|
||||
|
||||
while (p.type !== 'brace' && p.type !== 'root' && p.parent) {
|
||||
p = p.parent;
|
||||
q = p.queue;
|
||||
}
|
||||
|
||||
if (node.invalid || node.dollar) {
|
||||
q.push(append(q.pop(), stringify(node, options)));
|
||||
return;
|
||||
}
|
||||
|
||||
if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) {
|
||||
q.push(append(q.pop(), ['{}']));
|
||||
return;
|
||||
}
|
||||
|
||||
if (node.nodes && node.ranges > 0) {
|
||||
const args = utils.reduce(node.nodes);
|
||||
|
||||
if (utils.exceedsLimit(...args, options.step, rangeLimit)) {
|
||||
throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.');
|
||||
}
|
||||
|
||||
let range = fill(...args, options);
|
||||
if (range.length === 0) {
|
||||
range = stringify(node, options);
|
||||
}
|
||||
|
||||
q.push(append(q.pop(), range));
|
||||
node.nodes = [];
|
||||
return;
|
||||
}
|
||||
|
||||
const enclose = utils.encloseBrace(node);
|
||||
let queue = node.queue;
|
||||
let block = node;
|
||||
|
||||
while (block.type !== 'brace' && block.type !== 'root' && block.parent) {
|
||||
block = block.parent;
|
||||
queue = block.queue;
|
||||
}
|
||||
|
||||
for (let i = 0; i < node.nodes.length; i++) {
|
||||
const child = node.nodes[i];
|
||||
|
||||
if (child.type === 'comma' && node.type === 'brace') {
|
||||
if (i === 1) queue.push('');
|
||||
queue.push('');
|
||||
continue;
|
||||
}
|
||||
|
||||
if (child.type === 'close') {
|
||||
q.push(append(q.pop(), queue, enclose));
|
||||
continue;
|
||||
}
|
||||
|
||||
if (child.value && child.type !== 'open') {
|
||||
queue.push(append(queue.pop(), child.value));
|
||||
continue;
|
||||
}
|
||||
|
||||
if (child.nodes) {
|
||||
walk(child, node);
|
||||
}
|
||||
}
|
||||
|
||||
return queue;
|
||||
};
|
||||
|
||||
return utils.flatten(walk(ast));
|
||||
};
|
||||
|
||||
module.exports = expand;
|
331
web/status/node_modules/braces/lib/parse.js
generated
vendored
331
web/status/node_modules/braces/lib/parse.js
generated
vendored
@ -1,331 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const stringify = require('./stringify');
|
||||
|
||||
/**
|
||||
* Constants
|
||||
*/
|
||||
|
||||
const {
|
||||
MAX_LENGTH,
|
||||
CHAR_BACKSLASH, /* \ */
|
||||
CHAR_BACKTICK, /* ` */
|
||||
CHAR_COMMA, /* , */
|
||||
CHAR_DOT, /* . */
|
||||
CHAR_LEFT_PARENTHESES, /* ( */
|
||||
CHAR_RIGHT_PARENTHESES, /* ) */
|
||||
CHAR_LEFT_CURLY_BRACE, /* { */
|
||||
CHAR_RIGHT_CURLY_BRACE, /* } */
|
||||
CHAR_LEFT_SQUARE_BRACKET, /* [ */
|
||||
CHAR_RIGHT_SQUARE_BRACKET, /* ] */
|
||||
CHAR_DOUBLE_QUOTE, /* " */
|
||||
CHAR_SINGLE_QUOTE, /* ' */
|
||||
CHAR_NO_BREAK_SPACE,
|
||||
CHAR_ZERO_WIDTH_NOBREAK_SPACE
|
||||
} = require('./constants');
|
||||
|
||||
/**
|
||||
* parse
|
||||
*/
|
||||
|
||||
const parse = (input, options = {}) => {
|
||||
if (typeof input !== 'string') {
|
||||
throw new TypeError('Expected a string');
|
||||
}
|
||||
|
||||
const opts = options || {};
|
||||
const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;
|
||||
if (input.length > max) {
|
||||
throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`);
|
||||
}
|
||||
|
||||
const ast = { type: 'root', input, nodes: [] };
|
||||
const stack = [ast];
|
||||
let block = ast;
|
||||
let prev = ast;
|
||||
let brackets = 0;
|
||||
const length = input.length;
|
||||
let index = 0;
|
||||
let depth = 0;
|
||||
let value;
|
||||
|
||||
/**
|
||||
* Helpers
|
||||
*/
|
||||
|
||||
const advance = () => input[index++];
|
||||
const push = node => {
|
||||
if (node.type === 'text' && prev.type === 'dot') {
|
||||
prev.type = 'text';
|
||||
}
|
||||
|
||||
if (prev && prev.type === 'text' && node.type === 'text') {
|
||||
prev.value += node.value;
|
||||
return;
|
||||
}
|
||||
|
||||
block.nodes.push(node);
|
||||
node.parent = block;
|
||||
node.prev = prev;
|
||||
prev = node;
|
||||
return node;
|
||||
};
|
||||
|
||||
push({ type: 'bos' });
|
||||
|
||||
while (index < length) {
|
||||
block = stack[stack.length - 1];
|
||||
value = advance();
|
||||
|
||||
/**
|
||||
* Invalid chars
|
||||
*/
|
||||
|
||||
if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) {
|
||||
continue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Escaped chars
|
||||
*/
|
||||
|
||||
if (value === CHAR_BACKSLASH) {
|
||||
push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() });
|
||||
continue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Right square bracket (literal): ']'
|
||||
*/
|
||||
|
||||
if (value === CHAR_RIGHT_SQUARE_BRACKET) {
|
||||
push({ type: 'text', value: '\\' + value });
|
||||
continue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Left square bracket: '['
|
||||
*/
|
||||
|
||||
if (value === CHAR_LEFT_SQUARE_BRACKET) {
|
||||
brackets++;
|
||||
|
||||
let next;
|
||||
|
||||
while (index < length && (next = advance())) {
|
||||
value += next;
|
||||
|
||||
if (next === CHAR_LEFT_SQUARE_BRACKET) {
|
||||
brackets++;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (next === CHAR_BACKSLASH) {
|
||||
value += advance();
|
||||
continue;
|
||||
}
|
||||
|
||||
if (next === CHAR_RIGHT_SQUARE_BRACKET) {
|
||||
brackets--;
|
||||
|
||||
if (brackets === 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
push({ type: 'text', value });
|
||||
continue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parentheses
|
||||
*/
|
||||
|
||||
if (value === CHAR_LEFT_PARENTHESES) {
|
||||
block = push({ type: 'paren', nodes: [] });
|
||||
stack.push(block);
|
||||
push({ type: 'text', value });
|
||||
continue;
|
||||
}
|
||||
|
||||
if (value === CHAR_RIGHT_PARENTHESES) {
|
||||
if (block.type !== 'paren') {
|
||||
push({ type: 'text', value });
|
||||
continue;
|
||||
}
|
||||
block = stack.pop();
|
||||
push({ type: 'text', value });
|
||||
block = stack[stack.length - 1];
|
||||
continue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Quotes: '|"|`
|
||||
*/
|
||||
|
||||
if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) {
|
||||
const open = value;
|
||||
let next;
|
||||
|
||||
if (options.keepQuotes !== true) {
|
||||
value = '';
|
||||
}
|
||||
|
||||
while (index < length && (next = advance())) {
|
||||
if (next === CHAR_BACKSLASH) {
|
||||
value += next + advance();
|
||||
continue;
|
||||
}
|
||||
|
||||
if (next === open) {
|
||||
if (options.keepQuotes === true) value += next;
|
||||
break;
|
||||
}
|
||||
|
||||
value += next;
|
||||
}
|
||||
|
||||
push({ type: 'text', value });
|
||||
continue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Left curly brace: '{'
|
||||
*/
|
||||
|
||||
if (value === CHAR_LEFT_CURLY_BRACE) {
|
||||
depth++;
|
||||
|
||||
const dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true;
|
||||
const brace = {
|
||||
type: 'brace',
|
||||
open: true,
|
||||
close: false,
|
||||
dollar,
|
||||
depth,
|
||||
commas: 0,
|
||||
ranges: 0,
|
||||
nodes: []
|
||||
};
|
||||
|
||||
block = push(brace);
|
||||
stack.push(block);
|
||||
push({ type: 'open', value });
|
||||
continue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Right curly brace: '}'
|
||||
*/
|
||||
|
||||
if (value === CHAR_RIGHT_CURLY_BRACE) {
|
||||
if (block.type !== 'brace') {
|
||||
push({ type: 'text', value });
|
||||
continue;
|
||||
}
|
||||
|
||||
const type = 'close';
|
||||
block = stack.pop();
|
||||
block.close = true;
|
||||
|
||||
push({ type, value });
|
||||
depth--;
|
||||
|
||||
block = stack[stack.length - 1];
|
||||
continue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Comma: ','
|
||||
*/
|
||||
|
||||
if (value === CHAR_COMMA && depth > 0) {
|
||||
if (block.ranges > 0) {
|
||||
block.ranges = 0;
|
||||
const open = block.nodes.shift();
|
||||
block.nodes = [open, { type: 'text', value: stringify(block) }];
|
||||
}
|
||||
|
||||
push({ type: 'comma', value });
|
||||
block.commas++;
|
||||
continue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Dot: '.'
|
||||
*/
|
||||
|
||||
if (value === CHAR_DOT && depth > 0 && block.commas === 0) {
|
||||
const siblings = block.nodes;
|
||||
|
||||
if (depth === 0 || siblings.length === 0) {
|
||||
push({ type: 'text', value });
|
||||
continue;
|
||||
}
|
||||
|
||||
if (prev.type === 'dot') {
|
||||
block.range = [];
|
||||
prev.value += value;
|
||||
prev.type = 'range';
|
||||
|
||||
if (block.nodes.length !== 3 && block.nodes.length !== 5) {
|
||||
block.invalid = true;
|
||||
block.ranges = 0;
|
||||
prev.type = 'text';
|
||||
continue;
|
||||
}
|
||||
|
||||
block.ranges++;
|
||||
block.args = [];
|
||||
continue;
|
||||
}
|
||||
|
||||
if (prev.type === 'range') {
|
||||
siblings.pop();
|
||||
|
||||
const before = siblings[siblings.length - 1];
|
||||
before.value += prev.value + value;
|
||||
prev = before;
|
||||
block.ranges--;
|
||||
continue;
|
||||
}
|
||||
|
||||
push({ type: 'dot', value });
|
||||
continue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Text
|
||||
*/
|
||||
|
||||
push({ type: 'text', value });
|
||||
}
|
||||
|
||||
// Mark imbalanced braces and brackets as invalid
|
||||
do {
|
||||
block = stack.pop();
|
||||
|
||||
if (block.type !== 'root') {
|
||||
block.nodes.forEach(node => {
|
||||
if (!node.nodes) {
|
||||
if (node.type === 'open') node.isOpen = true;
|
||||
if (node.type === 'close') node.isClose = true;
|
||||
if (!node.nodes) node.type = 'text';
|
||||
node.invalid = true;
|
||||
}
|
||||
});
|
||||
|
||||
// get the location of the block on parent.nodes (block's siblings)
|
||||
const parent = stack[stack.length - 1];
|
||||
const index = parent.nodes.indexOf(block);
|
||||
// replace the (invalid) block with it's nodes
|
||||
parent.nodes.splice(index, 1, ...block.nodes);
|
||||
}
|
||||
} while (stack.length > 0);
|
||||
|
||||
push({ type: 'eos' });
|
||||
return ast;
|
||||
};
|
||||
|
||||
module.exports = parse;
|
32
web/status/node_modules/braces/lib/stringify.js
generated
vendored
32
web/status/node_modules/braces/lib/stringify.js
generated
vendored
@ -1,32 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const utils = require('./utils');
|
||||
|
||||
module.exports = (ast, options = {}) => {
|
||||
const stringify = (node, parent = {}) => {
|
||||
const invalidBlock = options.escapeInvalid && utils.isInvalidBrace(parent);
|
||||
const invalidNode = node.invalid === true && options.escapeInvalid === true;
|
||||
let output = '';
|
||||
|
||||
if (node.value) {
|
||||
if ((invalidBlock || invalidNode) && utils.isOpenOrClose(node)) {
|
||||
return '\\' + node.value;
|
||||
}
|
||||
return node.value;
|
||||
}
|
||||
|
||||
if (node.value) {
|
||||
return node.value;
|
||||
}
|
||||
|
||||
if (node.nodes) {
|
||||
for (const child of node.nodes) {
|
||||
output += stringify(child);
|
||||
}
|
||||
}
|
||||
return output;
|
||||
};
|
||||
|
||||
return stringify(ast);
|
||||
};
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user