From 61ef28108dd92482b35b8c5b2862432bdf4ad61b Mon Sep 17 00:00:00 2001 From: unfr Date: Thu, 14 Aug 2025 19:39:07 +0200 Subject: [PATCH] =?UTF-8?q?protection=20contre=20les=20erreurs=20curl=20ht?= =?UTF-8?q?tp/2=20(pour=20les=20gens=20qui=20ne=20metten=20pas=20=C3=A0=20?= =?UTF-8?q?jour=20leur=20machine)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- install.sh | 98 ++++++++++++------- update.sh | 274 +++++++++++++++++++++++++---------------------------- 2 files changed, 195 insertions(+), 177 deletions(-) diff --git a/install.sh b/install.sh index 40d1e49..2ef03fa 100644 --- a/install.sh +++ b/install.sh @@ -47,6 +47,29 @@ install_bin() { # install_bin install -m 755 "$1" "$2" } +# ========= Downloader (curl→wget fallback) ========= +download() { + local url="$1" out="$2" + [ -z "$url" ] || [ -z "$out" ] && { echo "download: usage: download " >&2; return 2; } + mkdir -p -- "$(dirname -- "$out")" + local tmp="${out}.dl.$$" + local curl_opts=(--fail --silent --show-error --location --retry 5 --retry-all-errors --retry-delay 2 --connect-timeout 15) + local wget_opts=(--quiet --https-only --tries=5 --waitretry=2 --retry-connrefused) + + if command -v curl >/dev/null 2>&1; then + if curl --http1.1 -4 "${curl_opts[@]}" -o "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi + if env -u http_proxy -u https_proxy -u all_proxy \ + curl --http1.1 -4 "${curl_opts[@]}" -o "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi + if curl --http1.1 "${curl_opts[@]}" -o "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi + fi + if command -v wget >/dev/null 2>&1; then + if wget --inet4-only "${wget_opts[@]}" -O "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi + if wget "${wget_opts[@]}" -O "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi + fi + rm -f -- "$tmp" 2>/dev/null || true + return 1 +} + # ========= Start ========= log "Initialisation des dossiers" ensure_dir "$BIN_DIR" @@ -70,7 +93,6 @@ MISSING=() for c in wget curl tar xz; do has_cmd "$c" || MISSING+=("$c") done -# unzip est requis plus bas has_cmd unzip || MISSING+=("unzip") if [ "${#MISSING[@]}" -gt 0 ]; then warn "Installation des prérequis manquants: ${MISSING[*]}" @@ -101,14 +123,18 @@ else "https://mediaarea.net/download/binary/mediainfo/25.04/mediainfo_25.04-1_amd64.Debian_12.deb" ) pushd "$TMP_DIR" >/dev/null - for u in "${DEBS[@]}"; do wget -q "$u"; done + for u in "${DEBS[@]}"; do + f="$(basename "$u")" + download "$u" "$f" || die "Téléchargement échoué: $u" + done run_root dpkg -i ./*.deb || run_root apt-get -f -y install popd >/dev/null fi else # AppImage fallback APP="$BIN_DIR/mediainfo" - curl -fsSL -o "$APP" "https://mediaarea.net/download/binary/mediainfo/20.09/mediainfo-20.09.glibc2.3-x86_64.AppImage" + download "https://mediaarea.net/download/binary/mediainfo/20.09/mediainfo-20.09.glibc2.3-x86_64.AppImage" "$APP" \ + || die "Téléchargement mediainfo AppImage" install -m 755 "$APP" "$APP" ok "mediainfo AppImage installé dans $BIN_DIR" fi @@ -129,9 +155,9 @@ select BDD in "SQLite" "MySQL"; do else warn "apt absent → installation binaire sqlite3" pushd "$TMP_DIR" >/dev/null - curl -fsSL -o sqlite-tools.zip "https://www.sqlite.org/2024/sqlite-tools-linux-x64-3470000.zip" - unzip -q sqlite-tools.zip - # trouve sqlite3 + download "https://www.sqlite.org/2024/sqlite-tools-linux-x64-3470000.zip" "$TMP_DIR/sqlite-tools.zip" \ + || die "Téléchargement sqlite-tools" + unzip -q "$TMP_DIR/sqlite-tools.zip" SQLITE_BIN="$(find . -type f -name sqlite3 -perm -u+x | head -n1)" [ -n "$SQLITE_BIN" ] || die "sqlite3 introuvable dans l’archive" install_bin "$SQLITE_BIN" "$BIN_DIR/sqlite3" @@ -153,8 +179,9 @@ select BDD in "SQLite" "MySQL"; do run_root apt-get install -y sqlite3 else pushd "$TMP_DIR" >/dev/null - curl -fsSL -o sqlite-tools.zip "https://www.sqlite.org/2024/sqlite-tools-linux-x64-3470000.zip" - unzip -q sqlite-tools.zip + download "https://www.sqlite.org/2024/sqlite-tools-linux-x64-3470000.zip" "$TMP_DIR/sqlite-tools.zip" \ + || die "Téléchargement sqlite-tools" + unzip -q "$TMP_DIR/sqlite-tools.zip" SQLITE_BIN="$(find . -type f -name sqlite3 -perm -u+x | head -n1)" [ -n "$SQLITE_BIN" ] || die "sqlite3 introuvable dans l’archive" install_bin "$SQLITE_BIN" "$BIN_DIR/sqlite3" @@ -176,7 +203,8 @@ else if has_cmd apt-get; then run_root apt-get install -y jq else - curl -fsSL -o "$TMP_DIR/jq" "https://github.com/jqlang/jq/releases/download/jq-1.7.1/jq-linux-amd64" + download "https://github.com/jqlang/jq/releases/download/jq-1.7.1/jq-linux-amd64" "$TMP_DIR/jq" \ + || die "Téléchargement jq" install_bin "$TMP_DIR/jq" "$BIN_DIR/jq" fi fi @@ -189,9 +217,8 @@ else log "Installation de 7z (binaire standalone)" pushd "$TMP_DIR" >/dev/null Z_URL="https://7-zip.org/a/7z2409-linux-x64.tar.xz" - wget -q -O 7z.tar.xz "$Z_URL" - tar -xJf 7z.tar.xz - # Cherche 7zz* exécutable + download "$Z_URL" "$TMP_DIR/7z.tar.xz" || die "Téléchargement 7z" + tar -xJf "$TMP_DIR/7z.tar.xz" Z_BIN="$(find . -maxdepth 1 -type f -name '7zz*' -perm -u+x | head -n1)" [ -n "$Z_BIN" ] || die "binaire 7z introuvable" install_bin "$Z_BIN" "$BIN_DIR/7z" @@ -202,14 +229,16 @@ REQUIRED_CMDS+=(7z) # ========= BDInfo & Substractor ========= log "Installation BDInfo" pushd "$TMP_DIR" >/dev/null -curl -fsSL -o bdinfo.zip "https://github.com/dotnetcorecorner/BDInfo/releases/download/linux-2.0.6/bdinfo_linux_v2.0.6.zip" -unzip -q bdinfo.zip +download "https://github.com/dotnetcorecorner/BDInfo/releases/download/linux-2.0.6/bdinfo_linux_v2.0.6.zip" "$TMP_DIR/bdinfo.zip" \ + || die "Téléchargement BDInfo" +unzip -q "$TMP_DIR/bdinfo.zip" BDINFO_BIN="$(find . -type f -name BDInfo -perm -u+x | head -n1)" [ -n "$BDINFO_BIN" ] || die "BDInfo introuvable" install_bin "$BDINFO_BIN" "$BIN_DIR/BDInfo" -curl -fsSL -o substractor.zip "https://github.com/dotnetcorecorner/BDInfo/releases/download/linux-2.0.6/bdinfodatasubstractor_linux_v2.0.6.zip" -unzip -q substractor.zip +download "https://github.com/dotnetcorecorner/BDInfo/releases/download/linux-2.0.6/bdinfodatasubstractor_linux_v2.0.6.zip" "$TMP_DIR/substractor.zip" \ + || die "Téléchargement BDInfoDataSubstractor" +unzip -q "$TMP_DIR/substractor.zip" SUB_BIN="$(find . -type f -name BDInfoDataSubstractor -perm -u+x | head -n1)" [ -n "$SUB_BIN" ] || die "BDInfoDataSubstractor introuvable" install_bin "$SUB_BIN" "$BIN_DIR/BDInfoDataSubstractor" @@ -220,8 +249,8 @@ REQUIRED_CMDS+=(BDInfo BDInfoDataSubstractor) log "Installation Nyuu" pushd "$TMP_DIR" >/dev/null NYUU_URL="https://github.com/Antidote2151/Nyuu-Obfuscation/releases/download/v0.4.2-Obfuscate1.3/nyuu-v0.4.2-Obfuscate1.3-linux-amd64.tar.xz" -wget -q -O nyuu.tar.xz "$NYUU_URL" -tar -xJf nyuu.tar.xz +download "$NYUU_URL" "$TMP_DIR/nyuu.tar.xz" || die "Téléchargement nyuu" +tar -xJf "$TMP_DIR/nyuu.tar.xz" NYUU_BIN="$(find . -type f -name nyuu -perm -u+x | head -n1)" [ -n "$NYUU_BIN" ] || die "nyuu introuvable dans l’archive" install_bin "$NYUU_BIN" "$BIN_DIR/nyuu" @@ -232,8 +261,8 @@ REQUIRED_CMDS+=(nyuu) log "Installation ParPar" pushd "$TMP_DIR" >/dev/null PARPAR_URL="https://github.com/animetosho/ParPar/releases/download/v0.4.5/parpar-v0.4.5-linux-static-amd64.xz" -wget -q -O parpar.xz "$PARPAR_URL" -xz -d parpar.xz +download "$PARPAR_URL" "$TMP_DIR/parpar.xz" || die "Téléchargement parpar" +xz -d "$TMP_DIR/parpar.xz" PARPAR_BIN="$(find . -maxdepth 1 -type f -name 'parpar-*' -perm -u+x | head -n1)" [ -n "$PARPAR_BIN" ] || die "parpar introuvable" install_bin "$PARPAR_BIN" "$BIN_DIR/parpar" @@ -242,11 +271,11 @@ REQUIRED_CMDS+=(parpar) # ========= Téléchargement scripts autopost ========= log "Téléchargement des scripts autopost" -wget -q -O "$AUTOPOST_DIR/analyzer.sh" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/analyzer.sh" -wget -q -O "$AUTOPOST_DIR/common.sh" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/common.sh" -wget -q -O "$AUTOPOST_DIR/posteur.sh" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/posteur.sh" -wget -q -O "$BIN_DIR/postauto" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/bin/postauto" -[ -f "$AUTOPOST_DIR/conf.sh" ] || wget -q -O "$AUTOPOST_DIR/conf.sh" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/conf.sh" +download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/analyzer.sh" "$AUTOPOST_DIR/analyzer.sh" || die "analyzer.sh" +download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/common.sh" "$AUTOPOST_DIR/common.sh" || die "common.sh" +download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/posteur.sh" "$AUTOPOST_DIR/posteur.sh" || die "posteur.sh" +download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/bin/postauto" "$BIN_DIR/postauto" || die "postauto" +[ -f "$AUTOPOST_DIR/conf.sh" ] || download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/conf.sh" "$AUTOPOST_DIR/conf.sh" chmod 755 "$BIN_DIR/postauto" chmod -R 755 "$AUTOPOST_DIR" @@ -254,7 +283,7 @@ chmod -R 755 "$AUTOPOST_DIR" # ========= Bash completion (fichier dédié) ========= COMP_FILE="$BASH_COMPLETION_DIR/postauto" if [ ! -s "$COMP_FILE" ]; then - cat > "$COMP_FILE" <<'EOF' +cat > "$COMP_FILE" <<'EOF' # completion postauto _autopost_completion() { local cur prev opts @@ -274,7 +303,6 @@ _autopost_completion() { complete -F _autopost_completion postauto EOF ok "Completion bash installée dans $COMP_FILE" - # source automatique depuis .bashrc si pas déjà présent if ! grep -q 'bash_completion.d' "$BASHRC_FILE" 2>/dev/null; then echo '[ -f "$HOME/.bash_completion.d/postauto" ] && . "$HOME/.bash_completion.d/postauto"' >> "$BASHRC_FILE" fi @@ -289,7 +317,10 @@ if [ -s "$NVM_DIR/nvm.sh" ]; then . "$NVM_DIR/nvm.sh" else log "Installation de nvm" - curl -fsSL https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.3/install.sh | bash + tmp_nvm="$TMP_DIR/install_nvm.sh" + download "https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.3/install.sh" "$tmp_nvm" \ + || die "Téléchargement nvm install.sh" + bash "$tmp_nvm" . "$NVM_DIR/nvm.sh" fi @@ -328,12 +359,12 @@ popd >/dev/null # ========= Fichiers Node (server.js, db.js, config.js) ========= log "Vérification fichiers Node" -[ -f "$AUTOPOST_DIR/server.js" ] || wget -q -O "$AUTOPOST_DIR/server.js" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/server.js" -[ -f "$AUTOPOST_DIR/db.js" ] || wget -q -O "$AUTOPOST_DIR/db.js" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/db.js" -[ -f "$AUTOPOST_DIR/public/autopost.js" ] || wget -q -O "$AUTOPOST_DIR/public/autopost.js" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/public/autopost.js" -[ -f "$AUTOPOST_DIR/views/autopost.html" ] || wget -q -O "$AUTOPOST_DIR/views/autopost.html" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/views/autopost.html" +[ -f "$AUTOPOST_DIR/server.js" ] || download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/server.js" "$AUTOPOST_DIR/server.js" +[ -f "$AUTOPOST_DIR/db.js" ] || download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/db.js" "$AUTOPOST_DIR/db.js" +[ -f "$AUTOPOST_DIR/public/autopost.js" ] || download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/public/autopost.js" "$AUTOPOST_DIR/public/autopost.js" +[ -f "$AUTOPOST_DIR/views/autopost.html" ] || download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/views/autopost.html" "$AUTOPOST_DIR/views/autopost.html" if [ ! -f "$AUTOPOST_DIR/config.js" ]; then - wget -q -O "$AUTOPOST_DIR/config.js" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/config.js" + download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/config.js" "$AUTOPOST_DIR/config.js" ok "Installation terminée. Configurez $AUTOPOST_DIR/config.js." fi @@ -342,7 +373,6 @@ log "Vérification finale des binaires requis" missing_final=() for cmd in "${REQUIRED_CMDS[@]}"; do if ! command -v "$cmd" >/dev/null 2>&1; then - # autoriser chemin absolu if [[ "$cmd" == */* ]]; then [ -x "$cmd" ] || missing_final+=("$cmd") else diff --git a/update.sh b/update.sh index c3af7aa..a837019 100644 --- a/update.sh +++ b/update.sh @@ -26,41 +26,47 @@ die() { err "$*"; exit 1; } install_bin(){ install -m 755 "$1" "$2"; } +# ========= Downloader (curl→wget fallback) ========= +download() { + local url="$1" out="$2" + [ -z "$url" ] || [ -z "$out" ] && { echo "download: usage: download " >&2; return 2; } + mkdir -p -- "$(dirname -- "$out")" + local tmp="${out}.dl.$$" + local curl_opts=(--fail --silent --show-error --location --retry 5 --retry-all-errors --retry-delay 2 --connect-timeout 15) + local wget_opts=(--quiet --https-only --tries=5 --waitretry=2 --retry-connrefused) + + if command -v curl >/dev/null 2>&1; then + if curl --http1.1 -4 "${curl_opts[@]}" -o "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi + if env -u http_proxy -u https_proxy -u all_proxy \ + curl --http1.1 -4 "${curl_opts[@]}" -o "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi + if curl --http1.1 "${curl_opts[@]}" -o "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi + fi + if command -v wget >/dev/null 2>&1; then + if wget --inet4-only "${wget_opts[@]}" -O "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi + if wget "${wget_opts[@]}" -O "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi + fi + rm -f -- "$tmp" 2>/dev/null || true + return 1 +} + # --- lire une clé JS (ligne "clé: valeur") sans exécuter --- parse_js_raw() { local key="$1" sed -n -E "s/^[[:space:]]*['\"]?${key}['\"]?[[:space:]]*:[[:space:]]*(.*)$/\1/p" "$CFG_JS" \ - | head -n1 | sed -E "s/[[:space:]]*(,)?[[:space:]]*$//" + | head -n1 | sed -E "s/[[:space:]]*(,)?[[:space:]]*$//" } -# --- normaliser une valeur JS simple: enlève guillemets, garde nombres, laisse path.join tel quel --- # --- normaliser une valeur JS simple --- -# - supprime les commentaires inline " // ... " -# - supprime la virgule terminale -# - trim espaces -# - retire guillemets si présents normalize_js_value() { local raw="$1" - - # retire commentaire inline: seulement si précédé d'un espace (évite "https://") - raw="$(printf '%s' "$raw" | sed -E 's@[[:space:]]//.*$@@')" - - # retire virgule en fin de champ et espaces résiduels - raw="$(printf '%s' "$raw" | sed -E 's/,[[:space:]]*$//')" - raw="$(printf '%s' "$raw" | sed -E 's/^[[:space:]]+//; s/[[:space:]]+$//')" - - # retire guillemets simples/doubles - if [[ "$raw" =~ ^\"(.*)\"$ ]]; then - printf '%s\n' "${BASH_REMATCH[1]}"; return - fi - if [[ "$raw" =~ ^\'(.*)\'$ ]]; then - printf '%s\n' "${BASH_REMATCH[1]}"; return - fi - + raw="$(printf '%s' "$raw" | sed -E 's@[[:space:]]//.*$@@')" # retire commentaire inline + raw="$(printf '%s' "$raw" | sed -E 's/,[[:space:]]*$//')" # retire virgule + raw="$(printf '%s' "$raw" | sed -E 's/^[[:space:]]+//; s/[[:space:]]+$//')" # trim + if [[ "$raw" =~ ^\"(.*)\"$ ]]; then printf '%s\n' "${BASH_REMATCH[1]}"; return; fi + if [[ "$raw" =~ ^\'(.*)\'$ ]]; then printf '%s\n' "${BASH_REMATCH[1]}"; return; fi printf '%s\n' "$raw" } - # placeholders à refuser (vides, “Voir…”, “CHANGEME…”, etc.) is_placeholder() { local v="$1" @@ -72,14 +78,12 @@ is_placeholder() { # entier (>=0) is_int() { [[ "$1" =~ ^[0-9]+$ ]]; } -# booléen JS (true/false), avec ou sans guillemets +# booléen JS (true/false) is_bool_literal() { local v="$(echo "$1" | tr '[:upper:]' '[:lower:]')" [[ "$v" == "true" || "$v" == "false" ]] } - - # ────────── Paths ────────── BIN_DIR="$HOME/bin" AUTOPOST_DIR="$HOME/autopost" @@ -110,17 +114,17 @@ FILES["$AUTOPOST_DIR/views/autopost.html"]="https://tig.unfr.pw/UNFR/postauto/ra log "Vérification/MAJ des fichiers…" for LOCAL in "${!FILES[@]}"; do URL="${FILES[$LOCAL]}" - TMP="$TMP_DIR/$(basename "$LOCAL").dl" - curl -fsSL "$URL" -o "$TMP" || die "Téléchargement échoué: $URL" + TMPF="$TMP_DIR/$(basename "$LOCAL").dl" + download "$URL" "$TMPF" || die "Téléchargement échoué: $URL" - if [ ! -f "$LOCAL" ] || ! cmp -s "$LOCAL" "$TMP"; then - cp -f "$LOCAL" "$LOCAL.bak" 2>/dev/null || true - case "$LOCAL" in - *postauto|*.sh) install_bin "$TMP" "$LOCAL" ;; - *) install -m 644 "$TMP" "$LOCAL" ;; - esac - ok "Mise à jour: $LOCAL" - updated=1 + if [ ! -f "$LOCAL" ] || ! cmp -s "$LOCAL" "$TMPF"; then + cp -f "$LOCAL" "$LOCAL.bak" 2>/dev/null || true + case "$LOCAL" in + *postauto|*.sh) install_bin "$TMPF" "$LOCAL" ;; + *) install -m 644 "$TMPF" "$LOCAL" ;; + esac + ok "Mise à jour: $LOCAL" + updated=1 fi done @@ -146,10 +150,10 @@ _autopost_completion() { opts="start stop restart show status createdb add log check update" if [ $COMP_CWORD -eq 1 ]; then - COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ); return 0 + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ); return 0 fi if [ $COMP_CWORD -eq 2 ] && [ "${COMP_WORDS[1]}" = "add" ]; then - COMPREPLY=( $(compgen -f -- "${cur}") ); return 0 + COMPREPLY=( $(compgen -f -- "${cur}") ); return 0 fi } complete -F _autopost_completion postauto @@ -159,9 +163,8 @@ EOF if [ ! -s "$COMP_FILE" ] || ! cmp -s <(printf "%s" "$COMPLETION_CODE") "$COMP_FILE"; then printf "%s" "$COMPLETION_CODE" > "$COMP_FILE" ok "Completion installée: $COMP_FILE" - # hook .bashrc si pas déjà présent grep -q '\.bash_completion.d/postauto' "$BASHRC_FILE" 2>/dev/null || \ - echo '[ -f "$HOME/.bash_completion.d/postauto" ] && . "$HOME/.bash_completion.d/postauto"' >> "$BASHRC_FILE" + echo '[ -f "$HOME/.bash_completion.d/postauto" ] && . "$HOME/.bash_completion.d/postauto"' >> "$BASHRC_FILE" updated=1 fi @@ -171,8 +174,8 @@ ensure_cmd(){ command -v "$1" >/dev/null 2>&1; } if ! ensure_cmd 7z; then log "Installation 7z…" pushd "$TMP_DIR" >/dev/null - wget -q -o /dev/null -O 7z.tar.xz "https://7-zip.org/a/7z2409-linux-x64.tar.xz" - tar -xJf 7z.tar.xz + download "https://7-zip.org/a/7z2409-linux-x64.tar.xz" "$TMP_DIR/7z.tar.xz" || die "Téléchargement 7z" + tar -xJf "$TMP_DIR/7z.tar.xz" ZBIN="$(find . -maxdepth 1 -type f -name '7zz*' -perm -u+x | head -n1)" [ -n "$ZBIN" ] || die "Binaire 7z introuvable" install_bin "$ZBIN" "$BIN_DIR/7z" @@ -182,8 +185,9 @@ fi if ! ensure_cmd BDInfo; then log "Installation BDInfo…" pushd "$TMP_DIR" >/dev/null - wget -q -o /dev/null -O bdinfo.zip "https://github.com/dotnetcorecorner/BDInfo/releases/download/linux-2.0.6/bdinfo_linux_v2.0.6.zip" - unzip -q bdinfo.zip + download "https://github.com/dotnetcorecorner/BDInfo/releases/download/linux-2.0.6/bdinfo_linux_v2.0.6.zip" "$TMP_DIR/bdinfo.zip" \ + || die "Téléchargement BDInfo" + unzip -q "$TMP_DIR/bdinfo.zip" BDBIN="$(find . -type f -name BDInfo -perm -u+x | head -n1)" [ -n "$BDBIN" ] || die "BDInfo introuvable" install_bin "$BDBIN" "$BIN_DIR/BDInfo" @@ -193,8 +197,9 @@ fi if ! ensure_cmd BDInfoDataSubstractor; then log "Installation BDInfoDataSubstractor…" pushd "$TMP_DIR" >/dev/null - wget -q -o /dev/null -O substractor.zip "https://github.com/dotnetcorecorner/BDInfo/releases/download/linux-2.0.6/bdinfodatasubstractor_linux_v2.0.6.zip" - unzip -q substractor.zip + download "https://github.com/dotnetcorecorner/BDInfo/releases/download/linux-2.0.6/bdinfodatasubstractor_linux_v2.0.6.zip" "$TMP_DIR/substractor.zip" \ + || die "Téléchargement BDInfoDataSubstractor" + unzip -q "$TMP_DIR/substractor.zip" SBBIN="$(find . -type f -name BDInfoDataSubstractor -perm -u+x | head -n1)" [ -n "$SBBIN" ] || die "BDInfoDataSubstractor introuvable" install_bin "$SBBIN" "$BIN_DIR/BDInfoDataSubstractor" @@ -219,92 +224,81 @@ fi popd >/dev/null # ────────── VALIDATION conf.sh (sans exécuter) ────────── -# --- conf.sh : validation déclarative --- check_conf() { local file="$CONF_SH" [[ -f "$file" ]] || { err "Manquant: $file"; errors=$((errors+1)); return; } log "Validation déclarative de $file…" - # Parse simple NAME=VALUE (ignore commentaires / 'export') declare -A V=() while IFS= read -r line; do - [[ "$line" =~ ^[[:space:]]*# ]] && continue - [[ "$line" =~ ^[[:space:]]*$ ]] && continue - line="${line#export }" - if [[ "$line" =~ ^[[:space:]]*([A-Za-z_][A-Za-z0-9_]*)[[:space:]]*=(.*)$ ]]; then - name="${BASH_REMATCH[1]}" - val="${BASH_REMATCH[2]}" - val="${val%%#*}"; val="${val%%;*}" - val="$(echo -n "$val" | sed -E "s/^[[:space:]]*['\"]?//; s/['\"]?[[:space:]]*$//")" - V["$name"]="$val" - fi + [[ "$line" =~ ^[[:space:]]*# ]] && continue + [[ "$line" =~ ^[[:space:]]*$ ]] && continue + line="${line#export }" + if [[ "$line" =~ ^[[:space:]]*([A-Za-z_][A-Za-z0-9_]*)[[:space:]]*=(.*)$ ]]; then + name="${BASH_REMATCH[1]}" + val="${BASH_REMATCH[2]}" + val="${val%%#*}"; val="${val%%;*}" + val="$(echo -n "$val" | sed -E "s/^[[:space:]]*['\"]?//; s/['\"]?[[:space:]]*$//")" + V["$name"]="$val" + fi done < "$file" - # Requis généraux (non-placeholder) for k in URL_API APIKEY DOSSIER_GLOBAL DOSSIER_NFO DOSSIER_LOGS DOSSIER_NZB_ATTENTE DOSSIER_NZB_FINAL MOVE_CMD MYSQL_TABLE dbtype; do - v="${V[$k]:-}" - if is_placeholder "$v"; then - err "conf.sh: '$k' non renseigné"; errors=$((errors+1)) - fi + v="${V[$k]:-}" + if is_placeholder "$v"; then + err "conf.sh: '$k' non renseigné"; errors=$((errors+1)) + fi done - # MOVE_CMD valeurs autorisées case "${V[MOVE_CMD]:-}" in - "cp -rl"|"cp -rs"|"ln -s"|"mv"|"cp") : ;; - *) - err "conf.sh: MOVE_CMD invalide ('${V[MOVE_CMD]:-}'), attendus: cp -rl|cp -rs|ln -s|mv|cp" - errors=$((errors+1)) - ;; + "cp -rl"|"cp -rs"|"ln -s"|"mv"|"cp") : ;; + *) + err "conf.sh: MOVE_CMD invalide ('${V[MOVE_CMD]:-}'), attendus: cp -rl|cp -rs|ln -s|mv|cp" + errors=$((errors+1)) + ;; esac - # Fournisseur Usenet : non-vides + numériques où nécessaire for k in NG_HOST NG_USER NG_PASS; do - if is_placeholder "${V[$k]:-}"; then - err "conf.sh: '$k' non renseigné"; errors=$((errors+1)) - fi + if is_placeholder "${V[$k]:-}"; then + err "conf.sh: '$k' non renseigné"; errors=$((errors+1)) + fi done if ! [[ "${V[NG_PORT]:-}" =~ ^[0-9]+$ ]]; then - err "conf.sh: NG_PORT doit être numérique"; errors=$((errors+1)) + err "conf.sh: NG_PORT doit être numérique"; errors=$((errors+1)) fi if ! [[ "${V[NG_NBR_CONN]:-}" =~ ^[0-9]+$ ]]; then - err "conf.sh: NG_NBR_CONN doit être numérique"; errors=$((errors+1)) + err "conf.sh: NG_NBR_CONN doit être numérique"; errors=$((errors+1)) fi - # DB : règles conditionnelles (déclarations seulement) case "${V[dbtype]:-}" in - sqlite) - if is_placeholder "${V[DB_FILE]:-}"; then - err "conf.sh: DB_FILE requis en mode sqlite"; errors=$((errors+1)) - fi - ;; - mysql) - for k in MYSQL_HOST MYSQL_USER MYSQL_PASS MYSQL_DB; do - if is_placeholder "${V[$k]:-}"; then - err "conf.sh: '$k' requis en mode mysql"; errors=$((errors+1)) - fi - done - if ! [[ "${V[MYSQL_PORT]:-}" =~ ^[0-9]+$ ]]; then - err "conf.sh: MYSQL_PORT doit être numérique"; errors=$((errors+1)) - fi - ;; - *) - err "conf.sh: dbtype doit être 'sqlite' ou 'mysql' (actuel='${V[dbtype]:-}')" - errors=$((errors+1)) - ;; + sqlite) + if is_placeholder "${V[DB_FILE]:-}"; then + err "conf.sh: DB_FILE requis en mode sqlite"; errors=$((errors+1)) + fi + ;; + mysql) + for k in MYSQL_HOST MYSQL_USER MYSQL_PASS MYSQL_DB; do + if is_placeholder "${V[$k]:-}"; then + err "conf.sh: '$k' requis en mode mysql"; errors=$((errors+1)) + fi + done + if ! [[ "${V[MYSQL_PORT]:-}" =~ ^[0-9]+$ ]]; then + err "conf.sh: MYSQL_PORT doit être numérique"; errors=$((errors+1)) + fi + ;; + *) + err "conf.sh: dbtype doit être 'sqlite' ou 'mysql' (actuel='${V[dbtype]:-}')" + errors=$((errors+1)) + ;; esac } - -check_conf "$CONF_SH" - # ────────── VALIDATION config.js (avec Node) ────────── -# --- config.js : validation déclarative (sans exécuter du JS) --- validate_config_js() { [[ -f "$CFG_JS" ]] || { err "Manquant: $CFG_JS"; errors=$((errors+1)); return; } log "Validation déclarative de $CFG_JS…" - # valeurs principales local dbtype port name secret table dbtype="$(normalize_js_value "$(parse_js_raw dbtype)")" port="$(normalize_js_value "$(parse_js_raw port)")" @@ -312,77 +306,71 @@ validate_config_js() { secret="$(normalize_js_value "$(parse_js_raw sessionSecret)")" table="$(normalize_js_value "$(parse_js_raw DB_TABLE)")" - # checks minimaux if ! is_int "$port" || (( port < 1 || port > 65535 )); then - err "config.js: 'port' invalide ($port)"; errors=$((errors+1)) + err "config.js: 'port' invalide ($port)"; errors=$((errors+1)) fi if is_placeholder "$name"; then err "config.js: 'name' non renseigné"; errors=$((errors+1)); fi if is_placeholder "$secret"; then err "config.js: 'sessionSecret' non renseigné"; errors=$((errors+1)); fi if is_placeholder "$table"; then err "config.js: 'DB_TABLE' non renseigné"; errors=$((errors+1)); fi - # dossiers : déclaration non vide (pas de test FS) for key in finishdirectory logdirectory infodirectory; do - val="$(normalize_js_value "$(parse_js_raw "$key")")" - if is_placeholder "$val"; then - err "config.js: '$key' non renseigné"; errors=$((errors+1)) - fi + val="$(normalize_js_value "$(parse_js_raw "$key")")" + if is_placeholder "$val"; then + err "config.js: '$key' non renseigné"; errors=$((errors+1)) + fi done - # trustProxy / cookieSecure / sessionStorePath local tp cs ssp tp="$(normalize_js_value "$(parse_js_raw trustProxy)")" cs="$(normalize_js_value "$(parse_js_raw cookieSecure)")" ssp="$(normalize_js_value "$(parse_js_raw sessionStorePath)")" if is_placeholder "$tp"; then - err "config.js: 'trustProxy' non renseigné"; errors=$((errors+1)) + err "config.js: 'trustProxy' non renseigné"; errors=$((errors+1)) else - if is_int "$tp"; then - if (( tp < 0 )); then - err "config.js: 'trustProxy' doit être >= 0 (valeur=$tp)"; errors=$((errors+1)) - fi - else - # chaîne non vide acceptée (ex: "loopback,uniquelocal") - : - fi + if is_int "$tp"; then + if (( tp < 0 )); then + err "config.js: 'trustProxy' doit être >= 0 (valeur=$tp)"; errors=$((errors+1)) + fi + fi fi if ! is_bool_literal "$cs"; then - err "config.js: 'cookieSecure' doit être true ou false (valeur='$cs')"; errors=$((errors+1)) + err "config.js: 'cookieSecure' doit être true ou false (valeur='$cs')"; errors=$((errors+1)) fi if is_placeholder "$ssp"; then - err "config.js: 'sessionStorePath' non renseigné"; errors=$((errors+1)) + err "config.js: 'sessionStorePath' non renseigné"; errors=$((errors+1)) fi - # règles DB (déclarations uniquement) case "$dbtype" in - sqlite) - val="$(normalize_js_value "$(parse_js_raw dbFile)")" - if is_placeholder "$val"; then - err "config.js: 'dbFile' requis (sqlite)"; errors=$((errors+1)) - fi - ;; - mysql) - local H P U PW DB - H="$(normalize_js_value "$(parse_js_raw DB_HOST)")" - P="$(normalize_js_value "$(parse_js_raw DB_PORT)")" - U="$(normalize_js_value "$(parse_js_raw DB_USER)")" - PW="$(normalize_js_value "$(parse_js_raw DB_PASSWORD)")" - DB="$(normalize_js_value "$(parse_js_raw DB_DATABASE)")" - if is_placeholder "$H"; then err "config.js: 'DB_HOST' requis (mysql)"; errors=$((errors+1)); fi - if ! is_int "$P"; then err "config.js: 'DB_PORT' entier requis (mysql)"; errors=$((errors+1)); fi - if is_placeholder "$U"; then err "config.js: 'DB_USER' requis (mysql)"; errors=$((errors+1)); fi - if is_placeholder "$PW"; then err "config.js: 'DB_PASSWORD' requis (mysql)"; errors=$((errors+1)); fi - if is_placeholder "$DB"; then err "config.js: 'DB_DATABASE' requis (mysql)"; errors=$((errors+1)); fi - ;; - *) - err "config.js: 'dbtype' doit être 'sqlite' ou 'mysql' (actuel='$dbtype')" - errors=$((errors+1)) - ;; + sqlite) + val="$(normalize_js_value "$(parse_js_raw dbFile)")" + if is_placeholder "$val"; then + err "config.js: 'dbFile' requis (sqlite)"; errors=$((errors+1)) + fi + ;; + mysql) + local H P U PW DB + H="$(normalize_js_value "$(parse_js_raw DB_HOST)")" + P="$(normalize_js_value "$(parse_js_raw DB_PORT)")" + U="$(normalize_js_value "$(parse_js_raw DB_USER)")" + PW="$(normalize_js_value "$(parse_js_raw DB_PASSWORD)")" + DB="$(normalize_js_value "$(parse_js_raw DB_DATABASE)")" + if is_placeholder "$H"; then err "config.js: 'DB_HOST' requis (mysql)"; errors=$((errors+1)); fi + if ! is_int "$P"; then err "config.js: 'DB_PORT' entier requis (mysql)"; errors=$((errors+1)); fi + if is_placeholder "$U"; then err "config.js: 'DB_USER' requis (mysql)"; errors=$((errors+1)); fi + if is_placeholder "$PW"; then err "config.js: 'DB_PASSWORD' requis (mysql)"; errors=$((errors+1)); fi + if is_placeholder "$DB"; then err "config.js: 'DB_DATABASE' requis (mysql)"; errors=$((errors+1)); fi + ;; + *) + err "config.js: 'dbtype' doit être 'sqlite' ou 'mysql' (actuel='$dbtype')" + errors=$((errors+1)) + ;; esac } +check_conf "$CONF_SH" validate_config_js # ────────── Résumé & exit codes ──────────