protection contre les erreurs curl http/2 (pour les gens qui ne metten pas à jour leur machine)
This commit is contained in:
parent
8abff0fb6c
commit
61ef28108d
98
install.sh
98
install.sh
@ -47,6 +47,29 @@ install_bin() { # install_bin <src> <dst>
|
|||||||
install -m 755 "$1" "$2"
|
install -m 755 "$1" "$2"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# ========= Downloader (curl→wget fallback) =========
|
||||||
|
download() {
|
||||||
|
local url="$1" out="$2"
|
||||||
|
[ -z "$url" ] || [ -z "$out" ] && { echo "download: usage: download <url> <outfile>" >&2; return 2; }
|
||||||
|
mkdir -p -- "$(dirname -- "$out")"
|
||||||
|
local tmp="${out}.dl.$$"
|
||||||
|
local curl_opts=(--fail --silent --show-error --location --retry 5 --retry-all-errors --retry-delay 2 --connect-timeout 15)
|
||||||
|
local wget_opts=(--quiet --https-only --tries=5 --waitretry=2 --retry-connrefused)
|
||||||
|
|
||||||
|
if command -v curl >/dev/null 2>&1; then
|
||||||
|
if curl --http1.1 -4 "${curl_opts[@]}" -o "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi
|
||||||
|
if env -u http_proxy -u https_proxy -u all_proxy \
|
||||||
|
curl --http1.1 -4 "${curl_opts[@]}" -o "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi
|
||||||
|
if curl --http1.1 "${curl_opts[@]}" -o "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi
|
||||||
|
fi
|
||||||
|
if command -v wget >/dev/null 2>&1; then
|
||||||
|
if wget --inet4-only "${wget_opts[@]}" -O "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi
|
||||||
|
if wget "${wget_opts[@]}" -O "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi
|
||||||
|
fi
|
||||||
|
rm -f -- "$tmp" 2>/dev/null || true
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
# ========= Start =========
|
# ========= Start =========
|
||||||
log "Initialisation des dossiers"
|
log "Initialisation des dossiers"
|
||||||
ensure_dir "$BIN_DIR"
|
ensure_dir "$BIN_DIR"
|
||||||
@ -70,7 +93,6 @@ MISSING=()
|
|||||||
for c in wget curl tar xz; do
|
for c in wget curl tar xz; do
|
||||||
has_cmd "$c" || MISSING+=("$c")
|
has_cmd "$c" || MISSING+=("$c")
|
||||||
done
|
done
|
||||||
# unzip est requis plus bas
|
|
||||||
has_cmd unzip || MISSING+=("unzip")
|
has_cmd unzip || MISSING+=("unzip")
|
||||||
if [ "${#MISSING[@]}" -gt 0 ]; then
|
if [ "${#MISSING[@]}" -gt 0 ]; then
|
||||||
warn "Installation des prérequis manquants: ${MISSING[*]}"
|
warn "Installation des prérequis manquants: ${MISSING[*]}"
|
||||||
@ -101,14 +123,18 @@ else
|
|||||||
"https://mediaarea.net/download/binary/mediainfo/25.04/mediainfo_25.04-1_amd64.Debian_12.deb"
|
"https://mediaarea.net/download/binary/mediainfo/25.04/mediainfo_25.04-1_amd64.Debian_12.deb"
|
||||||
)
|
)
|
||||||
pushd "$TMP_DIR" >/dev/null
|
pushd "$TMP_DIR" >/dev/null
|
||||||
for u in "${DEBS[@]}"; do wget -q "$u"; done
|
for u in "${DEBS[@]}"; do
|
||||||
|
f="$(basename "$u")"
|
||||||
|
download "$u" "$f" || die "Téléchargement échoué: $u"
|
||||||
|
done
|
||||||
run_root dpkg -i ./*.deb || run_root apt-get -f -y install
|
run_root dpkg -i ./*.deb || run_root apt-get -f -y install
|
||||||
popd >/dev/null
|
popd >/dev/null
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
# AppImage fallback
|
# AppImage fallback
|
||||||
APP="$BIN_DIR/mediainfo"
|
APP="$BIN_DIR/mediainfo"
|
||||||
curl -fsSL -o "$APP" "https://mediaarea.net/download/binary/mediainfo/20.09/mediainfo-20.09.glibc2.3-x86_64.AppImage"
|
download "https://mediaarea.net/download/binary/mediainfo/20.09/mediainfo-20.09.glibc2.3-x86_64.AppImage" "$APP" \
|
||||||
|
|| die "Téléchargement mediainfo AppImage"
|
||||||
install -m 755 "$APP" "$APP"
|
install -m 755 "$APP" "$APP"
|
||||||
ok "mediainfo AppImage installé dans $BIN_DIR"
|
ok "mediainfo AppImage installé dans $BIN_DIR"
|
||||||
fi
|
fi
|
||||||
@ -129,9 +155,9 @@ select BDD in "SQLite" "MySQL"; do
|
|||||||
else
|
else
|
||||||
warn "apt absent → installation binaire sqlite3"
|
warn "apt absent → installation binaire sqlite3"
|
||||||
pushd "$TMP_DIR" >/dev/null
|
pushd "$TMP_DIR" >/dev/null
|
||||||
curl -fsSL -o sqlite-tools.zip "https://www.sqlite.org/2024/sqlite-tools-linux-x64-3470000.zip"
|
download "https://www.sqlite.org/2024/sqlite-tools-linux-x64-3470000.zip" "$TMP_DIR/sqlite-tools.zip" \
|
||||||
unzip -q sqlite-tools.zip
|
|| die "Téléchargement sqlite-tools"
|
||||||
# trouve sqlite3
|
unzip -q "$TMP_DIR/sqlite-tools.zip"
|
||||||
SQLITE_BIN="$(find . -type f -name sqlite3 -perm -u+x | head -n1)"
|
SQLITE_BIN="$(find . -type f -name sqlite3 -perm -u+x | head -n1)"
|
||||||
[ -n "$SQLITE_BIN" ] || die "sqlite3 introuvable dans l’archive"
|
[ -n "$SQLITE_BIN" ] || die "sqlite3 introuvable dans l’archive"
|
||||||
install_bin "$SQLITE_BIN" "$BIN_DIR/sqlite3"
|
install_bin "$SQLITE_BIN" "$BIN_DIR/sqlite3"
|
||||||
@ -153,8 +179,9 @@ select BDD in "SQLite" "MySQL"; do
|
|||||||
run_root apt-get install -y sqlite3
|
run_root apt-get install -y sqlite3
|
||||||
else
|
else
|
||||||
pushd "$TMP_DIR" >/dev/null
|
pushd "$TMP_DIR" >/dev/null
|
||||||
curl -fsSL -o sqlite-tools.zip "https://www.sqlite.org/2024/sqlite-tools-linux-x64-3470000.zip"
|
download "https://www.sqlite.org/2024/sqlite-tools-linux-x64-3470000.zip" "$TMP_DIR/sqlite-tools.zip" \
|
||||||
unzip -q sqlite-tools.zip
|
|| die "Téléchargement sqlite-tools"
|
||||||
|
unzip -q "$TMP_DIR/sqlite-tools.zip"
|
||||||
SQLITE_BIN="$(find . -type f -name sqlite3 -perm -u+x | head -n1)"
|
SQLITE_BIN="$(find . -type f -name sqlite3 -perm -u+x | head -n1)"
|
||||||
[ -n "$SQLITE_BIN" ] || die "sqlite3 introuvable dans l’archive"
|
[ -n "$SQLITE_BIN" ] || die "sqlite3 introuvable dans l’archive"
|
||||||
install_bin "$SQLITE_BIN" "$BIN_DIR/sqlite3"
|
install_bin "$SQLITE_BIN" "$BIN_DIR/sqlite3"
|
||||||
@ -176,7 +203,8 @@ else
|
|||||||
if has_cmd apt-get; then
|
if has_cmd apt-get; then
|
||||||
run_root apt-get install -y jq
|
run_root apt-get install -y jq
|
||||||
else
|
else
|
||||||
curl -fsSL -o "$TMP_DIR/jq" "https://github.com/jqlang/jq/releases/download/jq-1.7.1/jq-linux-amd64"
|
download "https://github.com/jqlang/jq/releases/download/jq-1.7.1/jq-linux-amd64" "$TMP_DIR/jq" \
|
||||||
|
|| die "Téléchargement jq"
|
||||||
install_bin "$TMP_DIR/jq" "$BIN_DIR/jq"
|
install_bin "$TMP_DIR/jq" "$BIN_DIR/jq"
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
@ -189,9 +217,8 @@ else
|
|||||||
log "Installation de 7z (binaire standalone)"
|
log "Installation de 7z (binaire standalone)"
|
||||||
pushd "$TMP_DIR" >/dev/null
|
pushd "$TMP_DIR" >/dev/null
|
||||||
Z_URL="https://7-zip.org/a/7z2409-linux-x64.tar.xz"
|
Z_URL="https://7-zip.org/a/7z2409-linux-x64.tar.xz"
|
||||||
wget -q -O 7z.tar.xz "$Z_URL"
|
download "$Z_URL" "$TMP_DIR/7z.tar.xz" || die "Téléchargement 7z"
|
||||||
tar -xJf 7z.tar.xz
|
tar -xJf "$TMP_DIR/7z.tar.xz"
|
||||||
# Cherche 7zz* exécutable
|
|
||||||
Z_BIN="$(find . -maxdepth 1 -type f -name '7zz*' -perm -u+x | head -n1)"
|
Z_BIN="$(find . -maxdepth 1 -type f -name '7zz*' -perm -u+x | head -n1)"
|
||||||
[ -n "$Z_BIN" ] || die "binaire 7z introuvable"
|
[ -n "$Z_BIN" ] || die "binaire 7z introuvable"
|
||||||
install_bin "$Z_BIN" "$BIN_DIR/7z"
|
install_bin "$Z_BIN" "$BIN_DIR/7z"
|
||||||
@ -202,14 +229,16 @@ REQUIRED_CMDS+=(7z)
|
|||||||
# ========= BDInfo & Substractor =========
|
# ========= BDInfo & Substractor =========
|
||||||
log "Installation BDInfo"
|
log "Installation BDInfo"
|
||||||
pushd "$TMP_DIR" >/dev/null
|
pushd "$TMP_DIR" >/dev/null
|
||||||
curl -fsSL -o bdinfo.zip "https://github.com/dotnetcorecorner/BDInfo/releases/download/linux-2.0.6/bdinfo_linux_v2.0.6.zip"
|
download "https://github.com/dotnetcorecorner/BDInfo/releases/download/linux-2.0.6/bdinfo_linux_v2.0.6.zip" "$TMP_DIR/bdinfo.zip" \
|
||||||
unzip -q bdinfo.zip
|
|| die "Téléchargement BDInfo"
|
||||||
|
unzip -q "$TMP_DIR/bdinfo.zip"
|
||||||
BDINFO_BIN="$(find . -type f -name BDInfo -perm -u+x | head -n1)"
|
BDINFO_BIN="$(find . -type f -name BDInfo -perm -u+x | head -n1)"
|
||||||
[ -n "$BDINFO_BIN" ] || die "BDInfo introuvable"
|
[ -n "$BDINFO_BIN" ] || die "BDInfo introuvable"
|
||||||
install_bin "$BDINFO_BIN" "$BIN_DIR/BDInfo"
|
install_bin "$BDINFO_BIN" "$BIN_DIR/BDInfo"
|
||||||
|
|
||||||
curl -fsSL -o substractor.zip "https://github.com/dotnetcorecorner/BDInfo/releases/download/linux-2.0.6/bdinfodatasubstractor_linux_v2.0.6.zip"
|
download "https://github.com/dotnetcorecorner/BDInfo/releases/download/linux-2.0.6/bdinfodatasubstractor_linux_v2.0.6.zip" "$TMP_DIR/substractor.zip" \
|
||||||
unzip -q substractor.zip
|
|| die "Téléchargement BDInfoDataSubstractor"
|
||||||
|
unzip -q "$TMP_DIR/substractor.zip"
|
||||||
SUB_BIN="$(find . -type f -name BDInfoDataSubstractor -perm -u+x | head -n1)"
|
SUB_BIN="$(find . -type f -name BDInfoDataSubstractor -perm -u+x | head -n1)"
|
||||||
[ -n "$SUB_BIN" ] || die "BDInfoDataSubstractor introuvable"
|
[ -n "$SUB_BIN" ] || die "BDInfoDataSubstractor introuvable"
|
||||||
install_bin "$SUB_BIN" "$BIN_DIR/BDInfoDataSubstractor"
|
install_bin "$SUB_BIN" "$BIN_DIR/BDInfoDataSubstractor"
|
||||||
@ -220,8 +249,8 @@ REQUIRED_CMDS+=(BDInfo BDInfoDataSubstractor)
|
|||||||
log "Installation Nyuu"
|
log "Installation Nyuu"
|
||||||
pushd "$TMP_DIR" >/dev/null
|
pushd "$TMP_DIR" >/dev/null
|
||||||
NYUU_URL="https://github.com/Antidote2151/Nyuu-Obfuscation/releases/download/v0.4.2-Obfuscate1.3/nyuu-v0.4.2-Obfuscate1.3-linux-amd64.tar.xz"
|
NYUU_URL="https://github.com/Antidote2151/Nyuu-Obfuscation/releases/download/v0.4.2-Obfuscate1.3/nyuu-v0.4.2-Obfuscate1.3-linux-amd64.tar.xz"
|
||||||
wget -q -O nyuu.tar.xz "$NYUU_URL"
|
download "$NYUU_URL" "$TMP_DIR/nyuu.tar.xz" || die "Téléchargement nyuu"
|
||||||
tar -xJf nyuu.tar.xz
|
tar -xJf "$TMP_DIR/nyuu.tar.xz"
|
||||||
NYUU_BIN="$(find . -type f -name nyuu -perm -u+x | head -n1)"
|
NYUU_BIN="$(find . -type f -name nyuu -perm -u+x | head -n1)"
|
||||||
[ -n "$NYUU_BIN" ] || die "nyuu introuvable dans l’archive"
|
[ -n "$NYUU_BIN" ] || die "nyuu introuvable dans l’archive"
|
||||||
install_bin "$NYUU_BIN" "$BIN_DIR/nyuu"
|
install_bin "$NYUU_BIN" "$BIN_DIR/nyuu"
|
||||||
@ -232,8 +261,8 @@ REQUIRED_CMDS+=(nyuu)
|
|||||||
log "Installation ParPar"
|
log "Installation ParPar"
|
||||||
pushd "$TMP_DIR" >/dev/null
|
pushd "$TMP_DIR" >/dev/null
|
||||||
PARPAR_URL="https://github.com/animetosho/ParPar/releases/download/v0.4.5/parpar-v0.4.5-linux-static-amd64.xz"
|
PARPAR_URL="https://github.com/animetosho/ParPar/releases/download/v0.4.5/parpar-v0.4.5-linux-static-amd64.xz"
|
||||||
wget -q -O parpar.xz "$PARPAR_URL"
|
download "$PARPAR_URL" "$TMP_DIR/parpar.xz" || die "Téléchargement parpar"
|
||||||
xz -d parpar.xz
|
xz -d "$TMP_DIR/parpar.xz"
|
||||||
PARPAR_BIN="$(find . -maxdepth 1 -type f -name 'parpar-*' -perm -u+x | head -n1)"
|
PARPAR_BIN="$(find . -maxdepth 1 -type f -name 'parpar-*' -perm -u+x | head -n1)"
|
||||||
[ -n "$PARPAR_BIN" ] || die "parpar introuvable"
|
[ -n "$PARPAR_BIN" ] || die "parpar introuvable"
|
||||||
install_bin "$PARPAR_BIN" "$BIN_DIR/parpar"
|
install_bin "$PARPAR_BIN" "$BIN_DIR/parpar"
|
||||||
@ -242,11 +271,11 @@ REQUIRED_CMDS+=(parpar)
|
|||||||
|
|
||||||
# ========= Téléchargement scripts autopost =========
|
# ========= Téléchargement scripts autopost =========
|
||||||
log "Téléchargement des scripts autopost"
|
log "Téléchargement des scripts autopost"
|
||||||
wget -q -O "$AUTOPOST_DIR/analyzer.sh" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/analyzer.sh"
|
download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/analyzer.sh" "$AUTOPOST_DIR/analyzer.sh" || die "analyzer.sh"
|
||||||
wget -q -O "$AUTOPOST_DIR/common.sh" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/common.sh"
|
download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/common.sh" "$AUTOPOST_DIR/common.sh" || die "common.sh"
|
||||||
wget -q -O "$AUTOPOST_DIR/posteur.sh" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/posteur.sh"
|
download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/posteur.sh" "$AUTOPOST_DIR/posteur.sh" || die "posteur.sh"
|
||||||
wget -q -O "$BIN_DIR/postauto" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/bin/postauto"
|
download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/bin/postauto" "$BIN_DIR/postauto" || die "postauto"
|
||||||
[ -f "$AUTOPOST_DIR/conf.sh" ] || wget -q -O "$AUTOPOST_DIR/conf.sh" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/conf.sh"
|
[ -f "$AUTOPOST_DIR/conf.sh" ] || download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/conf.sh" "$AUTOPOST_DIR/conf.sh"
|
||||||
|
|
||||||
chmod 755 "$BIN_DIR/postauto"
|
chmod 755 "$BIN_DIR/postauto"
|
||||||
chmod -R 755 "$AUTOPOST_DIR"
|
chmod -R 755 "$AUTOPOST_DIR"
|
||||||
@ -254,7 +283,7 @@ chmod -R 755 "$AUTOPOST_DIR"
|
|||||||
# ========= Bash completion (fichier dédié) =========
|
# ========= Bash completion (fichier dédié) =========
|
||||||
COMP_FILE="$BASH_COMPLETION_DIR/postauto"
|
COMP_FILE="$BASH_COMPLETION_DIR/postauto"
|
||||||
if [ ! -s "$COMP_FILE" ]; then
|
if [ ! -s "$COMP_FILE" ]; then
|
||||||
cat > "$COMP_FILE" <<'EOF'
|
cat > "$COMP_FILE" <<'EOF'
|
||||||
# completion postauto
|
# completion postauto
|
||||||
_autopost_completion() {
|
_autopost_completion() {
|
||||||
local cur prev opts
|
local cur prev opts
|
||||||
@ -274,7 +303,6 @@ _autopost_completion() {
|
|||||||
complete -F _autopost_completion postauto
|
complete -F _autopost_completion postauto
|
||||||
EOF
|
EOF
|
||||||
ok "Completion bash installée dans $COMP_FILE"
|
ok "Completion bash installée dans $COMP_FILE"
|
||||||
# source automatique depuis .bashrc si pas déjà présent
|
|
||||||
if ! grep -q 'bash_completion.d' "$BASHRC_FILE" 2>/dev/null; then
|
if ! grep -q 'bash_completion.d' "$BASHRC_FILE" 2>/dev/null; then
|
||||||
echo '[ -f "$HOME/.bash_completion.d/postauto" ] && . "$HOME/.bash_completion.d/postauto"' >> "$BASHRC_FILE"
|
echo '[ -f "$HOME/.bash_completion.d/postauto" ] && . "$HOME/.bash_completion.d/postauto"' >> "$BASHRC_FILE"
|
||||||
fi
|
fi
|
||||||
@ -289,7 +317,10 @@ if [ -s "$NVM_DIR/nvm.sh" ]; then
|
|||||||
. "$NVM_DIR/nvm.sh"
|
. "$NVM_DIR/nvm.sh"
|
||||||
else
|
else
|
||||||
log "Installation de nvm"
|
log "Installation de nvm"
|
||||||
curl -fsSL https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.3/install.sh | bash
|
tmp_nvm="$TMP_DIR/install_nvm.sh"
|
||||||
|
download "https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.3/install.sh" "$tmp_nvm" \
|
||||||
|
|| die "Téléchargement nvm install.sh"
|
||||||
|
bash "$tmp_nvm"
|
||||||
. "$NVM_DIR/nvm.sh"
|
. "$NVM_DIR/nvm.sh"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@ -328,12 +359,12 @@ popd >/dev/null
|
|||||||
|
|
||||||
# ========= Fichiers Node (server.js, db.js, config.js) =========
|
# ========= Fichiers Node (server.js, db.js, config.js) =========
|
||||||
log "Vérification fichiers Node"
|
log "Vérification fichiers Node"
|
||||||
[ -f "$AUTOPOST_DIR/server.js" ] || wget -q -O "$AUTOPOST_DIR/server.js" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/server.js"
|
[ -f "$AUTOPOST_DIR/server.js" ] || download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/server.js" "$AUTOPOST_DIR/server.js"
|
||||||
[ -f "$AUTOPOST_DIR/db.js" ] || wget -q -O "$AUTOPOST_DIR/db.js" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/db.js"
|
[ -f "$AUTOPOST_DIR/db.js" ] || download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/db.js" "$AUTOPOST_DIR/db.js"
|
||||||
[ -f "$AUTOPOST_DIR/public/autopost.js" ] || wget -q -O "$AUTOPOST_DIR/public/autopost.js" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/public/autopost.js"
|
[ -f "$AUTOPOST_DIR/public/autopost.js" ] || download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/public/autopost.js" "$AUTOPOST_DIR/public/autopost.js"
|
||||||
[ -f "$AUTOPOST_DIR/views/autopost.html" ] || wget -q -O "$AUTOPOST_DIR/views/autopost.html" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/views/autopost.html"
|
[ -f "$AUTOPOST_DIR/views/autopost.html" ] || download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/views/autopost.html" "$AUTOPOST_DIR/views/autopost.html"
|
||||||
if [ ! -f "$AUTOPOST_DIR/config.js" ]; then
|
if [ ! -f "$AUTOPOST_DIR/config.js" ]; then
|
||||||
wget -q -O "$AUTOPOST_DIR/config.js" "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/config.js"
|
download "https://tig.unfr.pw/UNFR/postauto/raw/branch/main/autopost/config.js" "$AUTOPOST_DIR/config.js"
|
||||||
ok "Installation terminée. Configurez $AUTOPOST_DIR/config.js."
|
ok "Installation terminée. Configurez $AUTOPOST_DIR/config.js."
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@ -342,7 +373,6 @@ log "Vérification finale des binaires requis"
|
|||||||
missing_final=()
|
missing_final=()
|
||||||
for cmd in "${REQUIRED_CMDS[@]}"; do
|
for cmd in "${REQUIRED_CMDS[@]}"; do
|
||||||
if ! command -v "$cmd" >/dev/null 2>&1; then
|
if ! command -v "$cmd" >/dev/null 2>&1; then
|
||||||
# autoriser chemin absolu
|
|
||||||
if [[ "$cmd" == */* ]]; then
|
if [[ "$cmd" == */* ]]; then
|
||||||
[ -x "$cmd" ] || missing_final+=("$cmd")
|
[ -x "$cmd" ] || missing_final+=("$cmd")
|
||||||
else
|
else
|
||||||
|
|||||||
274
update.sh
274
update.sh
@ -26,41 +26,47 @@ die() { err "$*"; exit 1; }
|
|||||||
|
|
||||||
install_bin(){ install -m 755 "$1" "$2"; }
|
install_bin(){ install -m 755 "$1" "$2"; }
|
||||||
|
|
||||||
|
# ========= Downloader (curl→wget fallback) =========
|
||||||
|
download() {
|
||||||
|
local url="$1" out="$2"
|
||||||
|
[ -z "$url" ] || [ -z "$out" ] && { echo "download: usage: download <url> <outfile>" >&2; return 2; }
|
||||||
|
mkdir -p -- "$(dirname -- "$out")"
|
||||||
|
local tmp="${out}.dl.$$"
|
||||||
|
local curl_opts=(--fail --silent --show-error --location --retry 5 --retry-all-errors --retry-delay 2 --connect-timeout 15)
|
||||||
|
local wget_opts=(--quiet --https-only --tries=5 --waitretry=2 --retry-connrefused)
|
||||||
|
|
||||||
|
if command -v curl >/dev/null 2>&1; then
|
||||||
|
if curl --http1.1 -4 "${curl_opts[@]}" -o "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi
|
||||||
|
if env -u http_proxy -u https_proxy -u all_proxy \
|
||||||
|
curl --http1.1 -4 "${curl_opts[@]}" -o "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi
|
||||||
|
if curl --http1.1 "${curl_opts[@]}" -o "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi
|
||||||
|
fi
|
||||||
|
if command -v wget >/dev/null 2>&1; then
|
||||||
|
if wget --inet4-only "${wget_opts[@]}" -O "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi
|
||||||
|
if wget "${wget_opts[@]}" -O "$tmp" "$url"; then mv -f -- "$tmp" "$out"; return 0; fi
|
||||||
|
fi
|
||||||
|
rm -f -- "$tmp" 2>/dev/null || true
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
# --- lire une clé JS (ligne "clé: valeur") sans exécuter ---
|
# --- lire une clé JS (ligne "clé: valeur") sans exécuter ---
|
||||||
parse_js_raw() {
|
parse_js_raw() {
|
||||||
local key="$1"
|
local key="$1"
|
||||||
sed -n -E "s/^[[:space:]]*['\"]?${key}['\"]?[[:space:]]*:[[:space:]]*(.*)$/\1/p" "$CFG_JS" \
|
sed -n -E "s/^[[:space:]]*['\"]?${key}['\"]?[[:space:]]*:[[:space:]]*(.*)$/\1/p" "$CFG_JS" \
|
||||||
| head -n1 | sed -E "s/[[:space:]]*(,)?[[:space:]]*$//"
|
| head -n1 | sed -E "s/[[:space:]]*(,)?[[:space:]]*$//"
|
||||||
}
|
}
|
||||||
|
|
||||||
# --- normaliser une valeur JS simple: enlève guillemets, garde nombres, laisse path.join tel quel ---
|
|
||||||
# --- normaliser une valeur JS simple ---
|
# --- normaliser une valeur JS simple ---
|
||||||
# - supprime les commentaires inline " // ... "
|
|
||||||
# - supprime la virgule terminale
|
|
||||||
# - trim espaces
|
|
||||||
# - retire guillemets si présents
|
|
||||||
normalize_js_value() {
|
normalize_js_value() {
|
||||||
local raw="$1"
|
local raw="$1"
|
||||||
|
raw="$(printf '%s' "$raw" | sed -E 's@[[:space:]]//.*$@@')" # retire commentaire inline
|
||||||
# retire commentaire inline: seulement si précédé d'un espace (évite "https://")
|
raw="$(printf '%s' "$raw" | sed -E 's/,[[:space:]]*$//')" # retire virgule
|
||||||
raw="$(printf '%s' "$raw" | sed -E 's@[[:space:]]//.*$@@')"
|
raw="$(printf '%s' "$raw" | sed -E 's/^[[:space:]]+//; s/[[:space:]]+$//')" # trim
|
||||||
|
if [[ "$raw" =~ ^\"(.*)\"$ ]]; then printf '%s\n' "${BASH_REMATCH[1]}"; return; fi
|
||||||
# retire virgule en fin de champ et espaces résiduels
|
if [[ "$raw" =~ ^\'(.*)\'$ ]]; then printf '%s\n' "${BASH_REMATCH[1]}"; return; fi
|
||||||
raw="$(printf '%s' "$raw" | sed -E 's/,[[:space:]]*$//')"
|
|
||||||
raw="$(printf '%s' "$raw" | sed -E 's/^[[:space:]]+//; s/[[:space:]]+$//')"
|
|
||||||
|
|
||||||
# retire guillemets simples/doubles
|
|
||||||
if [[ "$raw" =~ ^\"(.*)\"$ ]]; then
|
|
||||||
printf '%s\n' "${BASH_REMATCH[1]}"; return
|
|
||||||
fi
|
|
||||||
if [[ "$raw" =~ ^\'(.*)\'$ ]]; then
|
|
||||||
printf '%s\n' "${BASH_REMATCH[1]}"; return
|
|
||||||
fi
|
|
||||||
|
|
||||||
printf '%s\n' "$raw"
|
printf '%s\n' "$raw"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# placeholders à refuser (vides, “Voir…”, “CHANGEME…”, etc.)
|
# placeholders à refuser (vides, “Voir…”, “CHANGEME…”, etc.)
|
||||||
is_placeholder() {
|
is_placeholder() {
|
||||||
local v="$1"
|
local v="$1"
|
||||||
@ -72,14 +78,12 @@ is_placeholder() {
|
|||||||
# entier (>=0)
|
# entier (>=0)
|
||||||
is_int() { [[ "$1" =~ ^[0-9]+$ ]]; }
|
is_int() { [[ "$1" =~ ^[0-9]+$ ]]; }
|
||||||
|
|
||||||
# booléen JS (true/false), avec ou sans guillemets
|
# booléen JS (true/false)
|
||||||
is_bool_literal() {
|
is_bool_literal() {
|
||||||
local v="$(echo "$1" | tr '[:upper:]' '[:lower:]')"
|
local v="$(echo "$1" | tr '[:upper:]' '[:lower:]')"
|
||||||
[[ "$v" == "true" || "$v" == "false" ]]
|
[[ "$v" == "true" || "$v" == "false" ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# ────────── Paths ──────────
|
# ────────── Paths ──────────
|
||||||
BIN_DIR="$HOME/bin"
|
BIN_DIR="$HOME/bin"
|
||||||
AUTOPOST_DIR="$HOME/autopost"
|
AUTOPOST_DIR="$HOME/autopost"
|
||||||
@ -110,17 +114,17 @@ FILES["$AUTOPOST_DIR/views/autopost.html"]="https://tig.unfr.pw/UNFR/postauto/ra
|
|||||||
log "Vérification/MAJ des fichiers…"
|
log "Vérification/MAJ des fichiers…"
|
||||||
for LOCAL in "${!FILES[@]}"; do
|
for LOCAL in "${!FILES[@]}"; do
|
||||||
URL="${FILES[$LOCAL]}"
|
URL="${FILES[$LOCAL]}"
|
||||||
TMP="$TMP_DIR/$(basename "$LOCAL").dl"
|
TMPF="$TMP_DIR/$(basename "$LOCAL").dl"
|
||||||
curl -fsSL "$URL" -o "$TMP" || die "Téléchargement échoué: $URL"
|
download "$URL" "$TMPF" || die "Téléchargement échoué: $URL"
|
||||||
|
|
||||||
if [ ! -f "$LOCAL" ] || ! cmp -s "$LOCAL" "$TMP"; then
|
if [ ! -f "$LOCAL" ] || ! cmp -s "$LOCAL" "$TMPF"; then
|
||||||
cp -f "$LOCAL" "$LOCAL.bak" 2>/dev/null || true
|
cp -f "$LOCAL" "$LOCAL.bak" 2>/dev/null || true
|
||||||
case "$LOCAL" in
|
case "$LOCAL" in
|
||||||
*postauto|*.sh) install_bin "$TMP" "$LOCAL" ;;
|
*postauto|*.sh) install_bin "$TMPF" "$LOCAL" ;;
|
||||||
*) install -m 644 "$TMP" "$LOCAL" ;;
|
*) install -m 644 "$TMPF" "$LOCAL" ;;
|
||||||
esac
|
esac
|
||||||
ok "Mise à jour: $LOCAL"
|
ok "Mise à jour: $LOCAL"
|
||||||
updated=1
|
updated=1
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
@ -146,10 +150,10 @@ _autopost_completion() {
|
|||||||
opts="start stop restart show status createdb add log check update"
|
opts="start stop restart show status createdb add log check update"
|
||||||
|
|
||||||
if [ $COMP_CWORD -eq 1 ]; then
|
if [ $COMP_CWORD -eq 1 ]; then
|
||||||
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ); return 0
|
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ); return 0
|
||||||
fi
|
fi
|
||||||
if [ $COMP_CWORD -eq 2 ] && [ "${COMP_WORDS[1]}" = "add" ]; then
|
if [ $COMP_CWORD -eq 2 ] && [ "${COMP_WORDS[1]}" = "add" ]; then
|
||||||
COMPREPLY=( $(compgen -f -- "${cur}") ); return 0
|
COMPREPLY=( $(compgen -f -- "${cur}") ); return 0
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
complete -F _autopost_completion postauto
|
complete -F _autopost_completion postauto
|
||||||
@ -159,9 +163,8 @@ EOF
|
|||||||
if [ ! -s "$COMP_FILE" ] || ! cmp -s <(printf "%s" "$COMPLETION_CODE") "$COMP_FILE"; then
|
if [ ! -s "$COMP_FILE" ] || ! cmp -s <(printf "%s" "$COMPLETION_CODE") "$COMP_FILE"; then
|
||||||
printf "%s" "$COMPLETION_CODE" > "$COMP_FILE"
|
printf "%s" "$COMPLETION_CODE" > "$COMP_FILE"
|
||||||
ok "Completion installée: $COMP_FILE"
|
ok "Completion installée: $COMP_FILE"
|
||||||
# hook .bashrc si pas déjà présent
|
|
||||||
grep -q '\.bash_completion.d/postauto' "$BASHRC_FILE" 2>/dev/null || \
|
grep -q '\.bash_completion.d/postauto' "$BASHRC_FILE" 2>/dev/null || \
|
||||||
echo '[ -f "$HOME/.bash_completion.d/postauto" ] && . "$HOME/.bash_completion.d/postauto"' >> "$BASHRC_FILE"
|
echo '[ -f "$HOME/.bash_completion.d/postauto" ] && . "$HOME/.bash_completion.d/postauto"' >> "$BASHRC_FILE"
|
||||||
updated=1
|
updated=1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@ -171,8 +174,8 @@ ensure_cmd(){ command -v "$1" >/dev/null 2>&1; }
|
|||||||
if ! ensure_cmd 7z; then
|
if ! ensure_cmd 7z; then
|
||||||
log "Installation 7z…"
|
log "Installation 7z…"
|
||||||
pushd "$TMP_DIR" >/dev/null
|
pushd "$TMP_DIR" >/dev/null
|
||||||
wget -q -o /dev/null -O 7z.tar.xz "https://7-zip.org/a/7z2409-linux-x64.tar.xz"
|
download "https://7-zip.org/a/7z2409-linux-x64.tar.xz" "$TMP_DIR/7z.tar.xz" || die "Téléchargement 7z"
|
||||||
tar -xJf 7z.tar.xz
|
tar -xJf "$TMP_DIR/7z.tar.xz"
|
||||||
ZBIN="$(find . -maxdepth 1 -type f -name '7zz*' -perm -u+x | head -n1)"
|
ZBIN="$(find . -maxdepth 1 -type f -name '7zz*' -perm -u+x | head -n1)"
|
||||||
[ -n "$ZBIN" ] || die "Binaire 7z introuvable"
|
[ -n "$ZBIN" ] || die "Binaire 7z introuvable"
|
||||||
install_bin "$ZBIN" "$BIN_DIR/7z"
|
install_bin "$ZBIN" "$BIN_DIR/7z"
|
||||||
@ -182,8 +185,9 @@ fi
|
|||||||
if ! ensure_cmd BDInfo; then
|
if ! ensure_cmd BDInfo; then
|
||||||
log "Installation BDInfo…"
|
log "Installation BDInfo…"
|
||||||
pushd "$TMP_DIR" >/dev/null
|
pushd "$TMP_DIR" >/dev/null
|
||||||
wget -q -o /dev/null -O bdinfo.zip "https://github.com/dotnetcorecorner/BDInfo/releases/download/linux-2.0.6/bdinfo_linux_v2.0.6.zip"
|
download "https://github.com/dotnetcorecorner/BDInfo/releases/download/linux-2.0.6/bdinfo_linux_v2.0.6.zip" "$TMP_DIR/bdinfo.zip" \
|
||||||
unzip -q bdinfo.zip
|
|| die "Téléchargement BDInfo"
|
||||||
|
unzip -q "$TMP_DIR/bdinfo.zip"
|
||||||
BDBIN="$(find . -type f -name BDInfo -perm -u+x | head -n1)"
|
BDBIN="$(find . -type f -name BDInfo -perm -u+x | head -n1)"
|
||||||
[ -n "$BDBIN" ] || die "BDInfo introuvable"
|
[ -n "$BDBIN" ] || die "BDInfo introuvable"
|
||||||
install_bin "$BDBIN" "$BIN_DIR/BDInfo"
|
install_bin "$BDBIN" "$BIN_DIR/BDInfo"
|
||||||
@ -193,8 +197,9 @@ fi
|
|||||||
if ! ensure_cmd BDInfoDataSubstractor; then
|
if ! ensure_cmd BDInfoDataSubstractor; then
|
||||||
log "Installation BDInfoDataSubstractor…"
|
log "Installation BDInfoDataSubstractor…"
|
||||||
pushd "$TMP_DIR" >/dev/null
|
pushd "$TMP_DIR" >/dev/null
|
||||||
wget -q -o /dev/null -O substractor.zip "https://github.com/dotnetcorecorner/BDInfo/releases/download/linux-2.0.6/bdinfodatasubstractor_linux_v2.0.6.zip"
|
download "https://github.com/dotnetcorecorner/BDInfo/releases/download/linux-2.0.6/bdinfodatasubstractor_linux_v2.0.6.zip" "$TMP_DIR/substractor.zip" \
|
||||||
unzip -q substractor.zip
|
|| die "Téléchargement BDInfoDataSubstractor"
|
||||||
|
unzip -q "$TMP_DIR/substractor.zip"
|
||||||
SBBIN="$(find . -type f -name BDInfoDataSubstractor -perm -u+x | head -n1)"
|
SBBIN="$(find . -type f -name BDInfoDataSubstractor -perm -u+x | head -n1)"
|
||||||
[ -n "$SBBIN" ] || die "BDInfoDataSubstractor introuvable"
|
[ -n "$SBBIN" ] || die "BDInfoDataSubstractor introuvable"
|
||||||
install_bin "$SBBIN" "$BIN_DIR/BDInfoDataSubstractor"
|
install_bin "$SBBIN" "$BIN_DIR/BDInfoDataSubstractor"
|
||||||
@ -219,92 +224,81 @@ fi
|
|||||||
popd >/dev/null
|
popd >/dev/null
|
||||||
|
|
||||||
# ────────── VALIDATION conf.sh (sans exécuter) ──────────
|
# ────────── VALIDATION conf.sh (sans exécuter) ──────────
|
||||||
# --- conf.sh : validation déclarative ---
|
|
||||||
check_conf() {
|
check_conf() {
|
||||||
local file="$CONF_SH"
|
local file="$CONF_SH"
|
||||||
[[ -f "$file" ]] || { err "Manquant: $file"; errors=$((errors+1)); return; }
|
[[ -f "$file" ]] || { err "Manquant: $file"; errors=$((errors+1)); return; }
|
||||||
|
|
||||||
log "Validation déclarative de $file…"
|
log "Validation déclarative de $file…"
|
||||||
|
|
||||||
# Parse simple NAME=VALUE (ignore commentaires / 'export')
|
|
||||||
declare -A V=()
|
declare -A V=()
|
||||||
while IFS= read -r line; do
|
while IFS= read -r line; do
|
||||||
[[ "$line" =~ ^[[:space:]]*# ]] && continue
|
[[ "$line" =~ ^[[:space:]]*# ]] && continue
|
||||||
[[ "$line" =~ ^[[:space:]]*$ ]] && continue
|
[[ "$line" =~ ^[[:space:]]*$ ]] && continue
|
||||||
line="${line#export }"
|
line="${line#export }"
|
||||||
if [[ "$line" =~ ^[[:space:]]*([A-Za-z_][A-Za-z0-9_]*)[[:space:]]*=(.*)$ ]]; then
|
if [[ "$line" =~ ^[[:space:]]*([A-Za-z_][A-Za-z0-9_]*)[[:space:]]*=(.*)$ ]]; then
|
||||||
name="${BASH_REMATCH[1]}"
|
name="${BASH_REMATCH[1]}"
|
||||||
val="${BASH_REMATCH[2]}"
|
val="${BASH_REMATCH[2]}"
|
||||||
val="${val%%#*}"; val="${val%%;*}"
|
val="${val%%#*}"; val="${val%%;*}"
|
||||||
val="$(echo -n "$val" | sed -E "s/^[[:space:]]*['\"]?//; s/['\"]?[[:space:]]*$//")"
|
val="$(echo -n "$val" | sed -E "s/^[[:space:]]*['\"]?//; s/['\"]?[[:space:]]*$//")"
|
||||||
V["$name"]="$val"
|
V["$name"]="$val"
|
||||||
fi
|
fi
|
||||||
done < "$file"
|
done < "$file"
|
||||||
|
|
||||||
# Requis généraux (non-placeholder)
|
|
||||||
for k in URL_API APIKEY DOSSIER_GLOBAL DOSSIER_NFO DOSSIER_LOGS DOSSIER_NZB_ATTENTE DOSSIER_NZB_FINAL MOVE_CMD MYSQL_TABLE dbtype; do
|
for k in URL_API APIKEY DOSSIER_GLOBAL DOSSIER_NFO DOSSIER_LOGS DOSSIER_NZB_ATTENTE DOSSIER_NZB_FINAL MOVE_CMD MYSQL_TABLE dbtype; do
|
||||||
v="${V[$k]:-}"
|
v="${V[$k]:-}"
|
||||||
if is_placeholder "$v"; then
|
if is_placeholder "$v"; then
|
||||||
err "conf.sh: '$k' non renseigné"; errors=$((errors+1))
|
err "conf.sh: '$k' non renseigné"; errors=$((errors+1))
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
# MOVE_CMD valeurs autorisées
|
|
||||||
case "${V[MOVE_CMD]:-}" in
|
case "${V[MOVE_CMD]:-}" in
|
||||||
"cp -rl"|"cp -rs"|"ln -s"|"mv"|"cp") : ;;
|
"cp -rl"|"cp -rs"|"ln -s"|"mv"|"cp") : ;;
|
||||||
*)
|
*)
|
||||||
err "conf.sh: MOVE_CMD invalide ('${V[MOVE_CMD]:-}'), attendus: cp -rl|cp -rs|ln -s|mv|cp"
|
err "conf.sh: MOVE_CMD invalide ('${V[MOVE_CMD]:-}'), attendus: cp -rl|cp -rs|ln -s|mv|cp"
|
||||||
errors=$((errors+1))
|
errors=$((errors+1))
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
# Fournisseur Usenet : non-vides + numériques où nécessaire
|
|
||||||
for k in NG_HOST NG_USER NG_PASS; do
|
for k in NG_HOST NG_USER NG_PASS; do
|
||||||
if is_placeholder "${V[$k]:-}"; then
|
if is_placeholder "${V[$k]:-}"; then
|
||||||
err "conf.sh: '$k' non renseigné"; errors=$((errors+1))
|
err "conf.sh: '$k' non renseigné"; errors=$((errors+1))
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
if ! [[ "${V[NG_PORT]:-}" =~ ^[0-9]+$ ]]; then
|
if ! [[ "${V[NG_PORT]:-}" =~ ^[0-9]+$ ]]; then
|
||||||
err "conf.sh: NG_PORT doit être numérique"; errors=$((errors+1))
|
err "conf.sh: NG_PORT doit être numérique"; errors=$((errors+1))
|
||||||
fi
|
fi
|
||||||
if ! [[ "${V[NG_NBR_CONN]:-}" =~ ^[0-9]+$ ]]; then
|
if ! [[ "${V[NG_NBR_CONN]:-}" =~ ^[0-9]+$ ]]; then
|
||||||
err "conf.sh: NG_NBR_CONN doit être numérique"; errors=$((errors+1))
|
err "conf.sh: NG_NBR_CONN doit être numérique"; errors=$((errors+1))
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# DB : règles conditionnelles (déclarations seulement)
|
|
||||||
case "${V[dbtype]:-}" in
|
case "${V[dbtype]:-}" in
|
||||||
sqlite)
|
sqlite)
|
||||||
if is_placeholder "${V[DB_FILE]:-}"; then
|
if is_placeholder "${V[DB_FILE]:-}"; then
|
||||||
err "conf.sh: DB_FILE requis en mode sqlite"; errors=$((errors+1))
|
err "conf.sh: DB_FILE requis en mode sqlite"; errors=$((errors+1))
|
||||||
fi
|
fi
|
||||||
;;
|
;;
|
||||||
mysql)
|
mysql)
|
||||||
for k in MYSQL_HOST MYSQL_USER MYSQL_PASS MYSQL_DB; do
|
for k in MYSQL_HOST MYSQL_USER MYSQL_PASS MYSQL_DB; do
|
||||||
if is_placeholder "${V[$k]:-}"; then
|
if is_placeholder "${V[$k]:-}"; then
|
||||||
err "conf.sh: '$k' requis en mode mysql"; errors=$((errors+1))
|
err "conf.sh: '$k' requis en mode mysql"; errors=$((errors+1))
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
if ! [[ "${V[MYSQL_PORT]:-}" =~ ^[0-9]+$ ]]; then
|
if ! [[ "${V[MYSQL_PORT]:-}" =~ ^[0-9]+$ ]]; then
|
||||||
err "conf.sh: MYSQL_PORT doit être numérique"; errors=$((errors+1))
|
err "conf.sh: MYSQL_PORT doit être numérique"; errors=$((errors+1))
|
||||||
fi
|
fi
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
err "conf.sh: dbtype doit être 'sqlite' ou 'mysql' (actuel='${V[dbtype]:-}')"
|
err "conf.sh: dbtype doit être 'sqlite' ou 'mysql' (actuel='${V[dbtype]:-}')"
|
||||||
errors=$((errors+1))
|
errors=$((errors+1))
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
check_conf "$CONF_SH"
|
|
||||||
|
|
||||||
# ────────── VALIDATION config.js (avec Node) ──────────
|
# ────────── VALIDATION config.js (avec Node) ──────────
|
||||||
# --- config.js : validation déclarative (sans exécuter du JS) ---
|
|
||||||
validate_config_js() {
|
validate_config_js() {
|
||||||
[[ -f "$CFG_JS" ]] || { err "Manquant: $CFG_JS"; errors=$((errors+1)); return; }
|
[[ -f "$CFG_JS" ]] || { err "Manquant: $CFG_JS"; errors=$((errors+1)); return; }
|
||||||
log "Validation déclarative de $CFG_JS…"
|
log "Validation déclarative de $CFG_JS…"
|
||||||
|
|
||||||
# valeurs principales
|
|
||||||
local dbtype port name secret table
|
local dbtype port name secret table
|
||||||
dbtype="$(normalize_js_value "$(parse_js_raw dbtype)")"
|
dbtype="$(normalize_js_value "$(parse_js_raw dbtype)")"
|
||||||
port="$(normalize_js_value "$(parse_js_raw port)")"
|
port="$(normalize_js_value "$(parse_js_raw port)")"
|
||||||
@ -312,77 +306,71 @@ validate_config_js() {
|
|||||||
secret="$(normalize_js_value "$(parse_js_raw sessionSecret)")"
|
secret="$(normalize_js_value "$(parse_js_raw sessionSecret)")"
|
||||||
table="$(normalize_js_value "$(parse_js_raw DB_TABLE)")"
|
table="$(normalize_js_value "$(parse_js_raw DB_TABLE)")"
|
||||||
|
|
||||||
# checks minimaux
|
|
||||||
if ! is_int "$port" || (( port < 1 || port > 65535 )); then
|
if ! is_int "$port" || (( port < 1 || port > 65535 )); then
|
||||||
err "config.js: 'port' invalide ($port)"; errors=$((errors+1))
|
err "config.js: 'port' invalide ($port)"; errors=$((errors+1))
|
||||||
fi
|
fi
|
||||||
if is_placeholder "$name"; then err "config.js: 'name' non renseigné"; errors=$((errors+1)); fi
|
if is_placeholder "$name"; then err "config.js: 'name' non renseigné"; errors=$((errors+1)); fi
|
||||||
if is_placeholder "$secret"; then err "config.js: 'sessionSecret' non renseigné"; errors=$((errors+1)); fi
|
if is_placeholder "$secret"; then err "config.js: 'sessionSecret' non renseigné"; errors=$((errors+1)); fi
|
||||||
if is_placeholder "$table"; then err "config.js: 'DB_TABLE' non renseigné"; errors=$((errors+1)); fi
|
if is_placeholder "$table"; then err "config.js: 'DB_TABLE' non renseigné"; errors=$((errors+1)); fi
|
||||||
|
|
||||||
# dossiers : déclaration non vide (pas de test FS)
|
|
||||||
for key in finishdirectory logdirectory infodirectory; do
|
for key in finishdirectory logdirectory infodirectory; do
|
||||||
val="$(normalize_js_value "$(parse_js_raw "$key")")"
|
val="$(normalize_js_value "$(parse_js_raw "$key")")"
|
||||||
if is_placeholder "$val"; then
|
if is_placeholder "$val"; then
|
||||||
err "config.js: '$key' non renseigné"; errors=$((errors+1))
|
err "config.js: '$key' non renseigné"; errors=$((errors+1))
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
# trustProxy / cookieSecure / sessionStorePath
|
|
||||||
local tp cs ssp
|
local tp cs ssp
|
||||||
tp="$(normalize_js_value "$(parse_js_raw trustProxy)")"
|
tp="$(normalize_js_value "$(parse_js_raw trustProxy)")"
|
||||||
cs="$(normalize_js_value "$(parse_js_raw cookieSecure)")"
|
cs="$(normalize_js_value "$(parse_js_raw cookieSecure)")"
|
||||||
ssp="$(normalize_js_value "$(parse_js_raw sessionStorePath)")"
|
ssp="$(normalize_js_value "$(parse_js_raw sessionStorePath)")"
|
||||||
|
|
||||||
if is_placeholder "$tp"; then
|
if is_placeholder "$tp"; then
|
||||||
err "config.js: 'trustProxy' non renseigné"; errors=$((errors+1))
|
err "config.js: 'trustProxy' non renseigné"; errors=$((errors+1))
|
||||||
else
|
else
|
||||||
if is_int "$tp"; then
|
if is_int "$tp"; then
|
||||||
if (( tp < 0 )); then
|
if (( tp < 0 )); then
|
||||||
err "config.js: 'trustProxy' doit être >= 0 (valeur=$tp)"; errors=$((errors+1))
|
err "config.js: 'trustProxy' doit être >= 0 (valeur=$tp)"; errors=$((errors+1))
|
||||||
fi
|
fi
|
||||||
else
|
fi
|
||||||
# chaîne non vide acceptée (ex: "loopback,uniquelocal")
|
|
||||||
:
|
|
||||||
fi
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if ! is_bool_literal "$cs"; then
|
if ! is_bool_literal "$cs"; then
|
||||||
err "config.js: 'cookieSecure' doit être true ou false (valeur='$cs')"; errors=$((errors+1))
|
err "config.js: 'cookieSecure' doit être true ou false (valeur='$cs')"; errors=$((errors+1))
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if is_placeholder "$ssp"; then
|
if is_placeholder "$ssp"; then
|
||||||
err "config.js: 'sessionStorePath' non renseigné"; errors=$((errors+1))
|
err "config.js: 'sessionStorePath' non renseigné"; errors=$((errors+1))
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# règles DB (déclarations uniquement)
|
|
||||||
case "$dbtype" in
|
case "$dbtype" in
|
||||||
sqlite)
|
sqlite)
|
||||||
val="$(normalize_js_value "$(parse_js_raw dbFile)")"
|
val="$(normalize_js_value "$(parse_js_raw dbFile)")"
|
||||||
if is_placeholder "$val"; then
|
if is_placeholder "$val"; then
|
||||||
err "config.js: 'dbFile' requis (sqlite)"; errors=$((errors+1))
|
err "config.js: 'dbFile' requis (sqlite)"; errors=$((errors+1))
|
||||||
fi
|
fi
|
||||||
;;
|
;;
|
||||||
mysql)
|
mysql)
|
||||||
local H P U PW DB
|
local H P U PW DB
|
||||||
H="$(normalize_js_value "$(parse_js_raw DB_HOST)")"
|
H="$(normalize_js_value "$(parse_js_raw DB_HOST)")"
|
||||||
P="$(normalize_js_value "$(parse_js_raw DB_PORT)")"
|
P="$(normalize_js_value "$(parse_js_raw DB_PORT)")"
|
||||||
U="$(normalize_js_value "$(parse_js_raw DB_USER)")"
|
U="$(normalize_js_value "$(parse_js_raw DB_USER)")"
|
||||||
PW="$(normalize_js_value "$(parse_js_raw DB_PASSWORD)")"
|
PW="$(normalize_js_value "$(parse_js_raw DB_PASSWORD)")"
|
||||||
DB="$(normalize_js_value "$(parse_js_raw DB_DATABASE)")"
|
DB="$(normalize_js_value "$(parse_js_raw DB_DATABASE)")"
|
||||||
if is_placeholder "$H"; then err "config.js: 'DB_HOST' requis (mysql)"; errors=$((errors+1)); fi
|
if is_placeholder "$H"; then err "config.js: 'DB_HOST' requis (mysql)"; errors=$((errors+1)); fi
|
||||||
if ! is_int "$P"; then err "config.js: 'DB_PORT' entier requis (mysql)"; errors=$((errors+1)); fi
|
if ! is_int "$P"; then err "config.js: 'DB_PORT' entier requis (mysql)"; errors=$((errors+1)); fi
|
||||||
if is_placeholder "$U"; then err "config.js: 'DB_USER' requis (mysql)"; errors=$((errors+1)); fi
|
if is_placeholder "$U"; then err "config.js: 'DB_USER' requis (mysql)"; errors=$((errors+1)); fi
|
||||||
if is_placeholder "$PW"; then err "config.js: 'DB_PASSWORD' requis (mysql)"; errors=$((errors+1)); fi
|
if is_placeholder "$PW"; then err "config.js: 'DB_PASSWORD' requis (mysql)"; errors=$((errors+1)); fi
|
||||||
if is_placeholder "$DB"; then err "config.js: 'DB_DATABASE' requis (mysql)"; errors=$((errors+1)); fi
|
if is_placeholder "$DB"; then err "config.js: 'DB_DATABASE' requis (mysql)"; errors=$((errors+1)); fi
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
err "config.js: 'dbtype' doit être 'sqlite' ou 'mysql' (actuel='$dbtype')"
|
err "config.js: 'dbtype' doit être 'sqlite' ou 'mysql' (actuel='$dbtype')"
|
||||||
errors=$((errors+1))
|
errors=$((errors+1))
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
}
|
}
|
||||||
|
|
||||||
|
check_conf "$CONF_SH"
|
||||||
validate_config_js
|
validate_config_js
|
||||||
|
|
||||||
# ────────── Résumé & exit codes ──────────
|
# ────────── Résumé & exit codes ──────────
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user