1
0

protection contre les erreurs curl http/2 (pour les gens qui ne metten pas à jour leur machine)

This commit is contained in:
unfr 2025-08-14 19:39:07 +02:00
parent 8abff0fb6c
commit 61ef28108d
2 changed files with 195 additions and 177 deletions

View File

@ -47,6 +47,29 @@ install_bin() { # install_bin <src> <dst>
install -m 755 "$1" "$2"
}
# ========= Downloader (curl→wget fallback) =========
download() {
local url="$1" out="$2"
[ -z "$url" ] || [ -z "$out" ] && { echo "download: usage: download <url> <outfile>" >&2; return 2; }
mkdir -p -- "$(dirname -- "$out")"
local tmp="${out}.dl.$$"
local curl_opts=(--fail --silent --show-error --location --retry 5 --retry-all-errors --retry-delay 2 --connect-timeout 15)
local wget_opts=(--quiet --https-only --tries=5 --waitretry=2 --retry-connrefused)
if command -v curl >/dev/null 2>&1; then
if curl --http1.1 -4 "${curl_opts[@]}" -o "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi
if env -u http_proxy -u https_proxy -u all_proxy \
curl --http1.1 -4 "${curl_opts[@]}" -o "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi
if curl --http1.1 "${curl_opts[@]}" -o "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi
fi
if command -v wget >/dev/null 2>&1; then
if wget --inet4-only "${wget_opts[@]}" -O "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi
if wget "${wget_opts[@]}" -O "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi
fi
rm -f -- "$tmp" 2>/dev/null || true
return 1
}
# ========= Start =========
log "Initialisation des dossiers"
ensure_dir "$BIN_DIR"
@ -70,7 +93,6 @@ MISSING=()
for c in wget curl tar xz; do
has_cmd "$c" || MISSING+=("$c")
done
# unzip est requis plus bas
has_cmd unzip || MISSING+=("unzip")
if [ "${#MISSING[@]}" -gt 0 ]; then
warn "Installation des prérequis manquants: ${MISSING[*]}"
@ -101,14 +123,18 @@ else
"https://mediaarea.net/download/binary/mediainfo/25.04/mediainfo_25.04-1_amd64.Debian_12.deb"
)
pushd "$TMP_DIR" >/dev/null
for u in "${DEBS[@]}"; do wget -q "$u"; done
for u in "${DEBS[@]}"; do
f="$(basename "$u")"
download "$u" "$f" || die "Téléchargement échoué: $u"
done
run_root dpkg -i ./*.deb || run_root apt-get -f -y install
popd >/dev/null
fi
else
# AppImage fallback
APP="$BIN_DIR/mediainfo"
curl -fsSL -o "$APP" "https://mediaarea.net/download/binary/mediainfo/20.09/mediainfo-20.09.glibc2.3-x86_64.AppImage"
download "https://mediaarea.net/download/binary/mediainfo/20.09/mediainfo-20.09.glibc2.3-x86_64.AppImage" "$APP" \
|| die "Téléchargement mediainfo AppImage"
install -m 755 "$APP" "$APP"
ok "mediainfo AppImage installé dans $BIN_DIR"
fi
@ -129,9 +155,9 @@ select BDD in "SQLite" "MySQL"; do
else
warn "apt absent → installation binaire sqlite3"
pushd "$TMP_DIR" >/dev/null
curl -fsSL -o sqlite-tools.zip "https://www.sqlite.org/2024/sqlite-tools-linux-x64-3470000.zip"
unzip -q sqlite-tools.zip
# trouve sqlite3
download "https://www.sqlite.org/2024/sqlite-tools-linux-x64-3470000.zip" "$TMP_DIR/sqlite-tools.zip" \
|| die "Téléchargement sqlite-tools"
unzip -q "$TMP_DIR/sqlite-tools.zip"
SQLITE_BIN="$(find . -type f -name sqlite3 -perm -u+x | head -n1)"
[ -n "$SQLITE_BIN" ] || die "sqlite3 introuvable dans larchive"
install_bin "$SQLITE_BIN" "$BIN_DIR/sqlite3"
@ -153,8 +179,9 @@ select BDD in "SQLite" "MySQL"; do
run_root apt-get install -y sqlite3
else
pushd "$TMP_DIR" >/dev/null
curl -fsSL -o sqlite-tools.zip "https://www.sqlite.org/2024/sqlite-tools-linux-x64-3470000.zip"
unzip -q sqlite-tools.zip
download "https://www.sqlite.org/2024/sqlite-tools-linux-x64-3470000.zip" "$TMP_DIR/sqlite-tools.zip" \
|| die "Téléchargement sqlite-tools"
unzip -q "$TMP_DIR/sqlite-tools.zip"
SQLITE_BIN="$(find . -type f -name sqlite3 -perm -u+x | head -n1)"
[ -n "$SQLITE_BIN" ] || die "sqlite3 introuvable dans larchive"
install_bin "$SQLITE_BIN" "$BIN_DIR/sqlite3"
@ -176,7 +203,8 @@ else
if has_cmd apt-get; then
run_root apt-get install -y jq
else
curl -fsSL -o "$TMP_DIR/jq" "https://github.com/jqlang/jq/releases/download/jq-1.7.1/jq-linux-amd64"
download "https://github.com/jqlang/jq/releases/download/jq-1.7.1/jq-linux-amd64" "$TMP_DIR/jq" \
|| die "Téléchargement jq"
install_bin "$TMP_DIR/jq" "$BIN_DIR/jq"
fi
fi
@ -189,9 +217,8 @@ else
log "Installation de 7z (binaire standalone)"
pushd "$TMP_DIR" >/dev/null
Z_URL="https://7-zip.org/a/7z2409-linux-x64.tar.xz"
wget -q -O 7z.tar.xz "$Z_URL"
tar -xJf 7z.tar.xz
# Cherche 7zz* exécutable
download "$Z_URL" "$TMP_DIR/7z.tar.xz" || die "Téléchargement 7z"
tar -xJf "$TMP_DIR/7z.tar.xz"
Z_BIN="$(find . -maxdepth 1 -type f -name '7zz*' -perm -u+x | head -n1)"
[ -n "$Z_BIN" ] || die "binaire 7z introuvable"
install_bin "$Z_BIN" "$BIN_DIR/7z"
@ -202,14 +229,16 @@ REQUIRED_CMDS+=(7z)
# ========= BDInfo & Substractor =========
log "Installation BDInfo"
pushd "$TMP_DIR" >/dev/null
curl -fsSL -o bdinfo.zip "https://github.com/dotnetcorecorner/BDInfo/releases/download/linux-2.0.6/bdinfo_linux_v2.0.6.zip"
unzip -q bdinfo.zip
download "https://github.com/dotnetcorecorner/BDInfo/releases/download/linux-2.0.6/bdinfo_linux_v2.0.6.zip" "$TMP_DIR/bdinfo.zip" \
|| die "Téléchargement BDInfo"
unzip -q "$TMP_DIR/bdinfo.zip"
BDINFO_BIN="$(find . -type f -name BDInfo -perm -u+x | head -n1)"
[ -n "$BDINFO_BIN" ] || die "BDInfo introuvable"
install_bin "$BDINFO_BIN" "$BIN_DIR/BDInfo"
curl -fsSL -o substractor.zip "https://github.com/dotnetcorecorner/BDInfo/releases/download/linux-2.0.6/bdinfodatasubstractor_linux_v2.0.6.zip"
unzip -q substractor.zip
download "https://github.com/dotnetcorecorner/BDInfo/releases/download/linux-2.0.6/bdinfodatasubstractor_linux_v2.0.6.zip" "$TMP_DIR/substractor.zip" \
|| die "Téléchargement BDInfoDataSubstractor"
unzip -q "$TMP_DIR/substractor.zip"
SUB_BIN="$(find . -type f -name BDInfoDataSubstractor -perm -u+x | head -n1)"
[ -n "$SUB_BIN" ] || die "BDInfoDataSubstractor introuvable"
install_bin "$SUB_BIN" "$BIN_DIR/BDInfoDataSubstractor"
@ -220,8 +249,8 @@ REQUIRED_CMDS+=(BDInfo BDInfoDataSubstractor)
log "Installation Nyuu"
pushd "$TMP_DIR" >/dev/null
NYUU_URL="https://github.com/Antidote2151/Nyuu-Obfuscation/releases/download/v0.4.2-Obfuscate1.3/nyuu-v0.4.2-Obfuscate1.3-linux-amd64.tar.xz"
wget -q -O nyuu.tar.xz "$NYUU_URL"
tar -xJf nyuu.tar.xz
download "$NYUU_URL" "$TMP_DIR/nyuu.tar.xz" || die "Téléchargement nyuu"
tar -xJf "$TMP_DIR/nyuu.tar.xz"
NYUU_BIN="$(find . -type f -name nyuu -perm -u+x | head -n1)"
[ -n "$NYUU_BIN" ] || die "nyuu introuvable dans larchive"
install_bin "$NYUU_BIN" "$BIN_DIR/nyuu"
@ -232,8 +261,8 @@ REQUIRED_CMDS+=(nyuu)
log "Installation ParPar"
pushd "$TMP_DIR" >/dev/null
PARPAR_URL="https://github.com/animetosho/ParPar/releases/download/v0.4.5/parpar-v0.4.5-linux-static-amd64.xz"
wget -q -O parpar.xz "$PARPAR_URL"
xz -d parpar.xz
download "$PARPAR_URL" "$TMP_DIR/parpar.xz" || die "Téléchargement parpar"
xz -d "$TMP_DIR/parpar.xz"
PARPAR_BIN="$(find . -maxdepth 1 -type f -name 'parpar-*' -perm -u+x | head -n1)"
[ -n "$PARPAR_BIN" ] || die "parpar introuvable"
install_bin "$PARPAR_BIN" "$BIN_DIR/parpar"
@ -242,11 +271,11 @@ REQUIRED_CMDS+=(parpar)
# ========= Téléchargement scripts autopost =========
log "Téléchargement des scripts autopost"
wget -q -O "$AUTOPOST_DIR/analyzer.sh" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/analyzer.sh"
wget -q -O "$AUTOPOST_DIR/common.sh" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/common.sh"
wget -q -O "$AUTOPOST_DIR/posteur.sh" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/posteur.sh"
wget -q -O "$BIN_DIR/postauto" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/bin/postauto"
[ -f "$AUTOPOST_DIR/conf.sh" ] || wget -q -O "$AUTOPOST_DIR/conf.sh" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/conf.sh"
download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/analyzer.sh" "$AUTOPOST_DIR/analyzer.sh" || die "analyzer.sh"
download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/common.sh" "$AUTOPOST_DIR/common.sh" || die "common.sh"
download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/posteur.sh" "$AUTOPOST_DIR/posteur.sh" || die "posteur.sh"
download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/bin/postauto" "$BIN_DIR/postauto" || die "postauto"
[ -f "$AUTOPOST_DIR/conf.sh" ] || download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/conf.sh" "$AUTOPOST_DIR/conf.sh"
chmod 755 "$BIN_DIR/postauto"
chmod -R 755 "$AUTOPOST_DIR"
@ -254,7 +283,7 @@ chmod -R 755 "$AUTOPOST_DIR"
# ========= Bash completion (fichier dédié) =========
COMP_FILE="$BASH_COMPLETION_DIR/postauto"
if [ ! -s "$COMP_FILE" ]; then
cat > "$COMP_FILE" <<'EOF'
cat > "$COMP_FILE" <<'EOF'
# completion postauto
_autopost_completion() {
local cur prev opts
@ -274,7 +303,6 @@ _autopost_completion() {
complete -F _autopost_completion postauto
EOF
ok "Completion bash installée dans $COMP_FILE"
# source automatique depuis .bashrc si pas déjà présent
if ! grep -q 'bash_completion.d' "$BASHRC_FILE" 2>/dev/null; then
echo '[ -f "$HOME/.bash_completion.d/postauto" ] && . "$HOME/.bash_completion.d/postauto"' >> "$BASHRC_FILE"
fi
@ -289,7 +317,10 @@ if [ -s "$NVM_DIR/nvm.sh" ]; then
. "$NVM_DIR/nvm.sh"
else
log "Installation de nvm"
curl -fsSL https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.3/install.sh | bash
tmp_nvm="$TMP_DIR/install_nvm.sh"
download "https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.3/install.sh" "$tmp_nvm" \
|| die "Téléchargement nvm install.sh"
bash "$tmp_nvm"
. "$NVM_DIR/nvm.sh"
fi
@ -328,12 +359,12 @@ popd >/dev/null
# ========= Fichiers Node (server.js, db.js, config.js) =========
log "Vérification fichiers Node"
[ -f "$AUTOPOST_DIR/server.js" ] || wget -q -O "$AUTOPOST_DIR/server.js" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/server.js"
[ -f "$AUTOPOST_DIR/db.js" ] || wget -q -O "$AUTOPOST_DIR/db.js" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/db.js"
[ -f "$AUTOPOST_DIR/public/autopost.js" ] || wget -q -O "$AUTOPOST_DIR/public/autopost.js" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/public/autopost.js"
[ -f "$AUTOPOST_DIR/views/autopost.html" ] || wget -q -O "$AUTOPOST_DIR/views/autopost.html" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/views/autopost.html"
[ -f "$AUTOPOST_DIR/server.js" ] || download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/server.js" "$AUTOPOST_DIR/server.js"
[ -f "$AUTOPOST_DIR/db.js" ] || download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/db.js" "$AUTOPOST_DIR/db.js"
[ -f "$AUTOPOST_DIR/public/autopost.js" ] || download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/public/autopost.js" "$AUTOPOST_DIR/public/autopost.js"
[ -f "$AUTOPOST_DIR/views/autopost.html" ] || download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/views/autopost.html" "$AUTOPOST_DIR/views/autopost.html"
if [ ! -f "$AUTOPOST_DIR/config.js" ]; then
wget -q -O "$AUTOPOST_DIR/config.js" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/config.js"
download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/config.js" "$AUTOPOST_DIR/config.js"
ok "Installation terminée. Configurez $AUTOPOST_DIR/config.js."
fi
@ -342,7 +373,6 @@ log "Vérification finale des binaires requis"
missing_final=()
for cmd in "${REQUIRED_CMDS[@]}"; do
if ! command -v "$cmd" >/dev/null 2>&1; then
# autoriser chemin absolu
if [[ "$cmd" == */* ]]; then
[ -x "$cmd" ] || missing_final+=("$cmd")
else

View File

@ -26,6 +26,29 @@ die() { err "$*"; exit 1; }
install_bin(){ install -m 755 "$1" "$2"; }
# ========= Downloader (curl→wget fallback) =========
download() {
local url="$1" out="$2"
[ -z "$url" ] || [ -z "$out" ] && { echo "download: usage: download <url> <outfile>" >&2; return 2; }
mkdir -p -- "$(dirname -- "$out")"
local tmp="${out}.dl.$$"
local curl_opts=(--fail --silent --show-error --location --retry 5 --retry-all-errors --retry-delay 2 --connect-timeout 15)
local wget_opts=(--quiet --https-only --tries=5 --waitretry=2 --retry-connrefused)
if command -v curl >/dev/null 2>&1; then
if curl --http1.1 -4 "${curl_opts[@]}" -o "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi
if env -u http_proxy -u https_proxy -u all_proxy \
curl --http1.1 -4 "${curl_opts[@]}" -o "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi
if curl --http1.1 "${curl_opts[@]}" -o "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi
fi
if command -v wget >/dev/null 2>&1; then
if wget --inet4-only "${wget_opts[@]}" -O "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi
if wget "${wget_opts[@]}" -O "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi
fi
rm -f -- "$tmp" 2>/dev/null || true
return 1
}
# --- lire une clé JS (ligne "clé: valeur") sans exécuter ---
parse_js_raw() {
local key="$1"
@ -33,34 +56,17 @@ parse_js_raw() {
| head -n1 | sed -E "s/[[:space:]]*(,)?[[:space:]]*$//"
}
# --- normaliser une valeur JS simple: enlève guillemets, garde nombres, laisse path.join tel quel ---
# --- normaliser une valeur JS simple ---
# - supprime les commentaires inline " // ... "
# - supprime la virgule terminale
# - trim espaces
# - retire guillemets si présents
normalize_js_value() {
local raw="$1"
# retire commentaire inline: seulement si précédé d'un espace (évite "https://")
raw="$(printf '%s' "$raw" | sed -E 's@[[:space:]]//.*$@@')"
# retire virgule en fin de champ et espaces résiduels
raw="$(printf '%s' "$raw" | sed -E 's/,[[:space:]]*$//')"
raw="$(printf '%s' "$raw" | sed -E 's/^[[:space:]]+//; s/[[:space:]]+$//')"
# retire guillemets simples/doubles
if [[ "$raw" =~ ^\"(.*)\"$ ]]; then
printf '%s\n' "${BASH_REMATCH[1]}"; return
fi
if [[ "$raw" =~ ^\'(.*)\'$ ]]; then
printf '%s\n' "${BASH_REMATCH[1]}"; return
fi
raw="$(printf '%s' "$raw" | sed -E 's@[[:space:]]//.*$@@')" # retire commentaire inline
raw="$(printf '%s' "$raw" | sed -E 's/,[[:space:]]*$//')" # retire virgule
raw="$(printf '%s' "$raw" | sed -E 's/^[[:space:]]+//; s/[[:space:]]+$//')" # trim
if [[ "$raw" =~ ^\"(.*)\"$ ]]; then printf '%s\n' "${BASH_REMATCH[1]}"; return; fi
if [[ "$raw" =~ ^\'(.*)\'$ ]]; then printf '%s\n' "${BASH_REMATCH[1]}"; return; fi
printf '%s\n' "$raw"
}
# placeholders à refuser (vides, “Voir…”, “CHANGEME…”, etc.)
is_placeholder() {
local v="$1"
@ -72,14 +78,12 @@ is_placeholder() {
# entier (>=0)
is_int() { [[ "$1" =~ ^[0-9]+$ ]]; }
# booléen JS (true/false), avec ou sans guillemets
# booléen JS (true/false)
is_bool_literal() {
local v="$(echo "$1" | tr '[:upper:]' '[:lower:]')"
[[ "$v" == "true" || "$v" == "false" ]]
}
# ────────── Paths ──────────
BIN_DIR="$HOME/bin"
AUTOPOST_DIR="$HOME/autopost"
@ -110,14 +114,14 @@ FILES["$AUTOPOST_DIR/views/autopost.html"]="https://tig.unfr.pw/UNFR/postauto/ra
log "Vérification/MAJ des fichiers…"
for LOCAL in "${!FILES[@]}"; do
URL="${FILES[$LOCAL]}"
TMP="$TMP_DIR/$(basename "$LOCAL").dl"
curl -fsSL "$URL" -o "$TMP" || die "Téléchargement échoué: $URL"
TMPF="$TMP_DIR/$(basename "$LOCAL").dl"
download "$URL" "$TMPF" || die "Téléchargement échoué: $URL"
if [ ! -f "$LOCAL" ] || ! cmp -s "$LOCAL" "$TMP"; then
if [ ! -f "$LOCAL" ] || ! cmp -s "$LOCAL" "$TMPF"; then
cp -f "$LOCAL" "$LOCAL.bak" 2>/dev/null || true
case "$LOCAL" in
*postauto|*.sh) install_bin "$TMP" "$LOCAL" ;;
*) install -m 644 "$TMP" "$LOCAL" ;;
*postauto|*.sh) install_bin "$TMPF" "$LOCAL" ;;
*) install -m 644 "$TMPF" "$LOCAL" ;;
esac
ok "Mise à jour: $LOCAL"
updated=1
@ -159,7 +163,6 @@ EOF
if [ ! -s "$COMP_FILE" ] || ! cmp -s <(printf "%s" "$COMPLETION_CODE") "$COMP_FILE"; then
printf "%s" "$COMPLETION_CODE" > "$COMP_FILE"
ok "Completion installée: $COMP_FILE"
# hook .bashrc si pas déjà présent
grep -q '\.bash_completion.d/postauto' "$BASHRC_FILE" 2>/dev/null || \
echo '[ -f "$HOME/.bash_completion.d/postauto" ] && . "$HOME/.bash_completion.d/postauto"' >> "$BASHRC_FILE"
updated=1
@ -171,8 +174,8 @@ ensure_cmd(){ command -v "$1" >/dev/null 2>&1; }
if ! ensure_cmd 7z; then
log "Installation 7z…"
pushd "$TMP_DIR" >/dev/null
wget -q -o /dev/null -O 7z.tar.xz "https://7-zip.org/a/7z2409-linux-x64.tar.xz"
tar -xJf 7z.tar.xz
download "https://7-zip.org/a/7z2409-linux-x64.tar.xz" "$TMP_DIR/7z.tar.xz" || die "Téléchargement 7z"
tar -xJf "$TMP_DIR/7z.tar.xz"
ZBIN="$(find . -maxdepth 1 -type f -name '7zz*' -perm -u+x | head -n1)"
[ -n "$ZBIN" ] || die "Binaire 7z introuvable"
install_bin "$ZBIN" "$BIN_DIR/7z"
@ -182,8 +185,9 @@ fi
if ! ensure_cmd BDInfo; then
log "Installation BDInfo…"
pushd "$TMP_DIR" >/dev/null
wget -q -o /dev/null -O bdinfo.zip "https://github.com/dotnetcorecorner/BDInfo/releases/download/linux-2.0.6/bdinfo_linux_v2.0.6.zip"
unzip -q bdinfo.zip
download "https://github.com/dotnetcorecorner/BDInfo/releases/download/linux-2.0.6/bdinfo_linux_v2.0.6.zip" "$TMP_DIR/bdinfo.zip" \
|| die "Téléchargement BDInfo"
unzip -q "$TMP_DIR/bdinfo.zip"
BDBIN="$(find . -type f -name BDInfo -perm -u+x | head -n1)"
[ -n "$BDBIN" ] || die "BDInfo introuvable"
install_bin "$BDBIN" "$BIN_DIR/BDInfo"
@ -193,8 +197,9 @@ fi
if ! ensure_cmd BDInfoDataSubstractor; then
log "Installation BDInfoDataSubstractor…"
pushd "$TMP_DIR" >/dev/null
wget -q -o /dev/null -O substractor.zip "https://github.com/dotnetcorecorner/BDInfo/releases/download/linux-2.0.6/bdinfodatasubstractor_linux_v2.0.6.zip"
unzip -q substractor.zip
download "https://github.com/dotnetcorecorner/BDInfo/releases/download/linux-2.0.6/bdinfodatasubstractor_linux_v2.0.6.zip" "$TMP_DIR/substractor.zip" \
|| die "Téléchargement BDInfoDataSubstractor"
unzip -q "$TMP_DIR/substractor.zip"
SBBIN="$(find . -type f -name BDInfoDataSubstractor -perm -u+x | head -n1)"
[ -n "$SBBIN" ] || die "BDInfoDataSubstractor introuvable"
install_bin "$SBBIN" "$BIN_DIR/BDInfoDataSubstractor"
@ -219,14 +224,12 @@ fi
popd >/dev/null
# ────────── VALIDATION conf.sh (sans exécuter) ──────────
# --- conf.sh : validation déclarative ---
check_conf() {
local file="$CONF_SH"
[[ -f "$file" ]] || { err "Manquant: $file"; errors=$((errors+1)); return; }
log "Validation déclarative de $file"
# Parse simple NAME=VALUE (ignore commentaires / 'export')
declare -A V=()
while IFS= read -r line; do
[[ "$line" =~ ^[[:space:]]*# ]] && continue
@ -241,7 +244,6 @@ check_conf() {
fi
done < "$file"
# Requis généraux (non-placeholder)
for k in URL_API APIKEY DOSSIER_GLOBAL DOSSIER_NFO DOSSIER_LOGS DOSSIER_NZB_ATTENTE DOSSIER_NZB_FINAL MOVE_CMD MYSQL_TABLE dbtype; do
v="${V[$k]:-}"
if is_placeholder "$v"; then
@ -249,7 +251,6 @@ check_conf() {
fi
done
# MOVE_CMD valeurs autorisées
case "${V[MOVE_CMD]:-}" in
"cp -rl"|"cp -rs"|"ln -s"|"mv"|"cp") : ;;
*)
@ -258,7 +259,6 @@ check_conf() {
;;
esac
# Fournisseur Usenet : non-vides + numériques où nécessaire
for k in NG_HOST NG_USER NG_PASS; do
if is_placeholder "${V[$k]:-}"; then
err "conf.sh: '$k' non renseigné"; errors=$((errors+1))
@ -271,7 +271,6 @@ check_conf() {
err "conf.sh: NG_NBR_CONN doit être numérique"; errors=$((errors+1))
fi
# DB : règles conditionnelles (déclarations seulement)
case "${V[dbtype]:-}" in
sqlite)
if is_placeholder "${V[DB_FILE]:-}"; then
@ -295,16 +294,11 @@ check_conf() {
esac
}
check_conf "$CONF_SH"
# ────────── VALIDATION config.js (avec Node) ──────────
# --- config.js : validation déclarative (sans exécuter du JS) ---
validate_config_js() {
[[ -f "$CFG_JS" ]] || { err "Manquant: $CFG_JS"; errors=$((errors+1)); return; }
log "Validation déclarative de $CFG_JS"
# valeurs principales
local dbtype port name secret table
dbtype="$(normalize_js_value "$(parse_js_raw dbtype)")"
port="$(normalize_js_value "$(parse_js_raw port)")"
@ -312,7 +306,6 @@ validate_config_js() {
secret="$(normalize_js_value "$(parse_js_raw sessionSecret)")"
table="$(normalize_js_value "$(parse_js_raw DB_TABLE)")"
# checks minimaux
if ! is_int "$port" || (( port < 1 || port > 65535 )); then
err "config.js: 'port' invalide ($port)"; errors=$((errors+1))
fi
@ -320,7 +313,6 @@ validate_config_js() {
if is_placeholder "$secret"; then err "config.js: 'sessionSecret' non renseigné"; errors=$((errors+1)); fi
if is_placeholder "$table"; then err "config.js: 'DB_TABLE' non renseigné"; errors=$((errors+1)); fi
# dossiers : déclaration non vide (pas de test FS)
for key in finishdirectory logdirectory infodirectory; do
val="$(normalize_js_value "$(parse_js_raw "$key")")"
if is_placeholder "$val"; then
@ -328,7 +320,6 @@ validate_config_js() {
fi
done
# trustProxy / cookieSecure / sessionStorePath
local tp cs ssp
tp="$(normalize_js_value "$(parse_js_raw trustProxy)")"
cs="$(normalize_js_value "$(parse_js_raw cookieSecure)")"
@ -341,9 +332,6 @@ validate_config_js() {
if (( tp < 0 )); then
err "config.js: 'trustProxy' doit être >= 0 (valeur=$tp)"; errors=$((errors+1))
fi
else
# chaîne non vide acceptée (ex: "loopback,uniquelocal")
:
fi
fi
@ -355,7 +343,6 @@ validate_config_js() {
err "config.js: 'sessionStorePath' non renseigné"; errors=$((errors+1))
fi
# règles DB (déclarations uniquement)
case "$dbtype" in
sqlite)
val="$(normalize_js_value "$(parse_js_raw dbFile)")"
@ -383,6 +370,7 @@ validate_config_js() {
esac
}
check_conf "$CONF_SH"
validate_config_js
# ────────── Résumé & exit codes ──────────