Portage complet PHP/Bash vers Node.js (Fastify + worker_threads)
This commit is contained in:
34
.env.example
Normal file
34
.env.example
Normal file
@@ -0,0 +1,34 @@
|
||||
# --- Secrets / runtime (obligatoires) ---
|
||||
TMDB_API_KEY=your_tmdb_api_key_here
|
||||
PROXYTMDB_PASSWORD=change_me
|
||||
# 32 caracteres. Generer avec :
|
||||
# node -e "console.log(require('crypto').randomBytes(32).toString('base64').slice(0,32))"
|
||||
SESSION_SECRET=change_me_to_a_random_32_chars_str
|
||||
|
||||
# --- Serveur ---
|
||||
PORT=3000
|
||||
HOST=0.0.0.0
|
||||
PAGE_TITLE=Index protégé
|
||||
|
||||
# --- URLs externes (laisse les defauts sauf si tu changes de domaine) ---
|
||||
#TMDB_API_BASE=https://api.themoviedb.org/3
|
||||
#TMDB_EXPORTS_BASE=http://files.tmdb.org/p/exports
|
||||
#IMDB_DATASETS_BASE=https://datasets.imdbws.com
|
||||
#MOVIE_URL=https://www.themoviedb.org/movie
|
||||
#TV_URL=https://www.themoviedb.org/tv
|
||||
#MOVIE_API_URL=https://tmdb.uklm.xyz/api?t=movie&q=
|
||||
#TV_API_URL=https://tmdb.uklm.xyz/api?t=tv&q=
|
||||
#POSTER_URL=https://image.tmdb.org/t/p/w200
|
||||
#NO_POSTER_URL=https://www.serveurperso.com/stats/noposter.jpg
|
||||
#IMDB_URL=https://www.imdb.com/title
|
||||
|
||||
# --- Reglages cron / recherche (defauts conserves de la version PHP) ---
|
||||
#CHANGES_DAYS=3
|
||||
#NB_SEARCH_PARTS=8
|
||||
#NB_WORKERS=8
|
||||
#TITLE_TOLERANCE=40
|
||||
#LEV_INS=10
|
||||
#LEV_REP=12
|
||||
#LEV_DEL=10
|
||||
#LEV_SCALE=10
|
||||
#YEAR_TOLERANCE=1
|
||||
52
.gitignore
vendored
Normal file
52
.gitignore
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
# Node
|
||||
node_modules/
|
||||
npm-debug.log*
|
||||
|
||||
# Logs runtime
|
||||
cron.txt
|
||||
lastcron.txt
|
||||
*.log
|
||||
|
||||
# Donnees generees (telechargees / construites par le cron)
|
||||
imdbratings.tsv
|
||||
imdbratings.tsv.tmp
|
||||
title.ratings.tsv
|
||||
title.ratings.tsv.gz
|
||||
|
||||
# Exports TMDb quotidiens
|
||||
tmdbintegral/movie.json
|
||||
tmdbintegral/tv.json
|
||||
tmdbintegral/movie.json.tmp
|
||||
tmdbintegral/tv.json.tmp
|
||||
|
||||
# Mappings TMDb <-> IMDb
|
||||
tmdbintegral/movie2imdb.json
|
||||
tmdbintegral/tv2imdb.json
|
||||
tmdbintegral/imdb2movie.json
|
||||
tmdbintegral/imdb2tv.json
|
||||
|
||||
# Chunks de recherche
|
||||
tmdbintegral/searchmovie*.json
|
||||
tmdbintegral/searchtv*.json
|
||||
|
||||
# Ambiguites
|
||||
tmdbintegral/ambiguitymovie.csv
|
||||
tmdbintegral/ambiguitytv.csv
|
||||
|
||||
# Cache complet TMDb (~1700 dossiers x 1000 fichiers JSON)
|
||||
tmdbintegral/movie/
|
||||
tmdbintegral/tv/
|
||||
tmdbintegral/justwatchmovie/
|
||||
tmdbintegral/justwatchtv/
|
||||
|
||||
# IDE / OS / outils
|
||||
.claude/
|
||||
.specstory/
|
||||
.vscode/
|
||||
.idea/
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Secrets
|
||||
.env
|
||||
.env.local
|
||||
61
config.js
Normal file
61
config.js
Normal file
@@ -0,0 +1,61 @@
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import { dirname, join } from 'node:path';
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
export const ROOT = __dirname;
|
||||
export const TMDBINTEGRAL_DIR = join(ROOT, 'tmdbintegral');
|
||||
export const MOVIE_DIR = join(TMDBINTEGRAL_DIR, 'movie');
|
||||
export const TV_DIR = join(TMDBINTEGRAL_DIR, 'tv');
|
||||
export const JUSTWATCH_MOVIE_DIR = join(TMDBINTEGRAL_DIR, 'justwatchmovie');
|
||||
export const JUSTWATCH_TV_DIR = join(TMDBINTEGRAL_DIR, 'justwatchtv');
|
||||
export const IMDB_RATINGS = join(ROOT, 'imdbratings.tsv');
|
||||
export const CRON_TXT = join(ROOT, 'cron.txt');
|
||||
export const LASTCRON_TXT = join(ROOT, 'lastcron.txt');
|
||||
|
||||
function required(name) {
|
||||
const v = process.env[name];
|
||||
if (!v) throw new Error(`Variable d'environnement manquante: ${name} (voir .env.example)`);
|
||||
return v;
|
||||
}
|
||||
|
||||
function int(name, def) {
|
||||
const v = process.env[name];
|
||||
return v ? parseInt(v, 10) : def;
|
||||
}
|
||||
|
||||
function str(name, def) {
|
||||
return process.env[name] ?? def;
|
||||
}
|
||||
|
||||
// Secrets / runtime
|
||||
export const TMDB_API_KEY = required('TMDB_API_KEY');
|
||||
export const PASSWORD = required('PROXYTMDB_PASSWORD');
|
||||
export const SESSION_SECRET = required('SESSION_SECRET');
|
||||
export const PORT = int('PORT', 3000);
|
||||
export const HOST = str('HOST', '0.0.0.0');
|
||||
|
||||
// URLs externes
|
||||
export const TMDB_API_BASE = str('TMDB_API_BASE', 'https://api.themoviedb.org/3');
|
||||
export const TMDB_EXPORTS_BASE = str('TMDB_EXPORTS_BASE', 'http://files.tmdb.org/p/exports');
|
||||
export const IMDB_DATASETS_BASE = str('IMDB_DATASETS_BASE', 'https://datasets.imdbws.com');
|
||||
export const MOVIE_URL = str('MOVIE_URL', 'https://www.themoviedb.org/movie');
|
||||
export const TV_URL = str('TV_URL', 'https://www.themoviedb.org/tv');
|
||||
export const MOVIE_API_URL = str('MOVIE_API_URL', 'https://tmdb.uklm.xyz/api?t=movie&q=');
|
||||
export const TV_API_URL = str('TV_API_URL', 'https://tmdb.uklm.xyz/api?t=tv&q=');
|
||||
export const POSTER_URL = str('POSTER_URL', 'https://image.tmdb.org/t/p/w200');
|
||||
export const NO_POSTER_URL = str('NO_POSTER_URL', 'https://www.serveurperso.com/stats/noposter.jpg');
|
||||
export const IMDB_URL = str('IMDB_URL', 'https://www.imdb.com/title');
|
||||
|
||||
// Reglages cron / recherche
|
||||
export const CHANGES_DAYS = int('CHANGES_DAYS', 3);
|
||||
export const NB_SEARCH_PARTS = int('NB_SEARCH_PARTS', 8);
|
||||
export const NB_WORKERS = int('NB_WORKERS', 8);
|
||||
export const TITLE_TOLERANCE = int('TITLE_TOLERANCE', 40);
|
||||
export const LEV_INS = int('LEV_INS', 10);
|
||||
export const LEV_REP = int('LEV_REP', 12);
|
||||
export const LEV_DEL = int('LEV_DEL', 10);
|
||||
export const LEV_SCALE = int('LEV_SCALE', 10);
|
||||
export const YEAR_TOLERANCE = int('YEAR_TOLERANCE', 1);
|
||||
|
||||
export const TITLE = str('PAGE_TITLE', 'Index protégé');
|
||||
104
cron/ambiguity.js
Normal file
104
cron/ambiguity.js
Normal file
@@ -0,0 +1,104 @@
|
||||
// Port of tmdbintegral/ambiguity.php
|
||||
// Detects pairs of distinct TMDb ids whose filtered titles collide and whose
|
||||
// years are within YEARTOLERANCE.
|
||||
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { writeFile } from 'node:fs/promises';
|
||||
import { join } from 'node:path';
|
||||
import { TMDBINTEGRAL_DIR, NB_SEARCH_PARTS } from '../config.js';
|
||||
|
||||
const TMDB = 0;
|
||||
const FILTEREDTITLE = 4;
|
||||
const FILTEREDENGLISHTITLE = 5;
|
||||
const FILTEREDORIGINALTITLE = 6;
|
||||
const YEAR = 7;
|
||||
|
||||
export async function buildAmbiguity(type, nbParts = NB_SEARCH_PARTS) {
|
||||
const yearTolerance = type === 'tv' ? 200 : 1;
|
||||
const out = join(TMDBINTEGRAL_DIR, `ambiguity${type}.csv`);
|
||||
|
||||
const database = [];
|
||||
for (let p = 0; p < nbParts; p++) {
|
||||
const file = join(TMDBINTEGRAL_DIR, `search${type}${p}.json`);
|
||||
const chunk = JSON.parse(readFileSync(file, 'utf8'));
|
||||
for (const e of chunk) database.push(e);
|
||||
}
|
||||
|
||||
const tmdbs = [];
|
||||
const filteredTitles = [];
|
||||
const languages = [];
|
||||
const years = [];
|
||||
for (const db of database) {
|
||||
const fr = db[FILTEREDTITLE];
|
||||
const en = db[FILTEREDENGLISHTITLE];
|
||||
const vo = db[FILTEREDORIGINALTITLE];
|
||||
if (fr) { tmdbs.push(db[TMDB]); filteredTitles.push(fr); years.push(db[YEAR][0]); languages.push('FR'); }
|
||||
if (en) { tmdbs.push(db[TMDB]); filteredTitles.push(en); years.push(db[YEAR][0]); languages.push('EN'); }
|
||||
if (vo) { tmdbs.push(db[TMDB]); filteredTitles.push(vo); years.push(db[YEAR][0]); languages.push('VO'); }
|
||||
}
|
||||
|
||||
// PHP: array_multisort(filteredtitles, years, tmdbs, languages)
|
||||
// Sort indices by (filteredTitle ASC, year ASC, tmdb ASC, language ASC).
|
||||
const idx = filteredTitles.map((_, i) => i);
|
||||
idx.sort((a, b) => {
|
||||
if (filteredTitles[a] < filteredTitles[b]) return -1;
|
||||
if (filteredTitles[a] > filteredTitles[b]) return 1;
|
||||
if (years[a] !== years[b]) return years[a] - years[b];
|
||||
if (tmdbs[a] !== tmdbs[b]) return tmdbs[a] - tmdbs[b];
|
||||
if (languages[a] < languages[b]) return -1;
|
||||
if (languages[a] > languages[b]) return 1;
|
||||
return 0;
|
||||
});
|
||||
|
||||
const sortedTmdbs = idx.map((i) => tmdbs[i]);
|
||||
const sortedFiltered = idx.map((i) => filteredTitles[i]);
|
||||
const sortedYears = idx.map((i) => years[i]);
|
||||
const sortedLanguages = idx.map((i) => languages[i]);
|
||||
|
||||
let oldTmdb = 0;
|
||||
let nbTmdbs = 0;
|
||||
let oldFiltered = '';
|
||||
let ambiguities = [];
|
||||
const lines = [];
|
||||
|
||||
const flush = () => {
|
||||
if (nbTmdbs >= 2) {
|
||||
for (const a1 of ambiguities) {
|
||||
for (const a2 of ambiguities) {
|
||||
if (a1[0] !== a2[0] && Math.abs(a1[1] - a2[1]) <= yearTolerance) {
|
||||
lines.push(`${a1[0]};${a1[2]};${a2[0]};${a2[2]}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
ambiguities = [];
|
||||
nbTmdbs = 0;
|
||||
};
|
||||
|
||||
for (let i = 0; i < sortedFiltered.length; i++) {
|
||||
if (sortedTmdbs[i] !== oldTmdb) nbTmdbs++;
|
||||
oldTmdb = sortedTmdbs[i];
|
||||
|
||||
if (sortedFiltered[i] !== oldFiltered) {
|
||||
flush();
|
||||
}
|
||||
oldFiltered = sortedFiltered[i];
|
||||
ambiguities.push([sortedTmdbs[i], sortedYears[i], sortedLanguages[i]]);
|
||||
}
|
||||
flush();
|
||||
|
||||
await writeFile(out, lines.length ? lines.join('\n') + '\n' : '');
|
||||
}
|
||||
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
const type = process.argv[2];
|
||||
const nb = parseInt(process.argv[3] || String(NB_SEARCH_PARTS), 10);
|
||||
if (type !== 'movie' && type !== 'tv') {
|
||||
console.error('Usage: node cron/ambiguity.js movie|tv [nbParts]');
|
||||
process.exit(1);
|
||||
}
|
||||
buildAmbiguity(type, nb).catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
133
cron/buildSearch.js
Normal file
133
cron/buildSearch.js
Normal file
@@ -0,0 +1,133 @@
|
||||
// Port of tmdbintegral/search.php
|
||||
// Builds the chunked search database files (searchmovieN.json / searchtvN.json).
|
||||
//
|
||||
// Each entry has the same positional shape as the PHP version:
|
||||
// [TMDB, TITLE, ENGLISHTITLE, ORIGINALTITLE,
|
||||
// FILTEREDTITLE, FILTEREDENGLISHTITLE, FILTEREDORIGINALTITLE,
|
||||
// YEARS[], POPULARITY]
|
||||
// so the runtime search worker can use the same indices.
|
||||
|
||||
import { createReadStream, existsSync, readFileSync } from 'node:fs';
|
||||
import { writeFile } from 'node:fs/promises';
|
||||
import { createInterface } from 'node:readline';
|
||||
import { join } from 'node:path';
|
||||
import { TMDBINTEGRAL_DIR, NB_SEARCH_PARTS } from '../config.js';
|
||||
import { entryPath } from '../lib/paths.js';
|
||||
import { filterTitle } from '../lib/titleFilter.js';
|
||||
import { mbStrlen } from '../lib/mbLevenshtein.js';
|
||||
|
||||
function lower(s) { return s.toLocaleLowerCase(); }
|
||||
|
||||
function extractEnglishTitle(detail, type) {
|
||||
const tr = detail?.translations?.translations;
|
||||
if (!Array.isArray(tr)) return '';
|
||||
for (const t of tr) {
|
||||
if (t.iso_639_1 === 'en') {
|
||||
return type === 'movie' ? (t.data?.title || '') : (t.data?.name || '');
|
||||
}
|
||||
}
|
||||
return '';
|
||||
}
|
||||
|
||||
function buildEntry(masterObj, detail, type) {
|
||||
const tmdb = masterObj.id;
|
||||
const popularity = parseFloat(masterObj.popularity) || 0;
|
||||
|
||||
let title, originalTitle, englishTitle;
|
||||
const years = [];
|
||||
|
||||
if (type === 'movie') {
|
||||
const date = String(detail.release_date || '').split('-');
|
||||
years.push(parseInt(date[0], 10) || 0);
|
||||
title = detail.title || '';
|
||||
originalTitle = detail.original_title || '';
|
||||
englishTitle = extractEnglishTitle(detail, 'movie');
|
||||
} else {
|
||||
const date = String(detail.first_air_date || '').split('-');
|
||||
years.push(parseInt(date[0], 10) || 0);
|
||||
title = detail.name || '';
|
||||
originalTitle = detail.original_name || '';
|
||||
englishTitle = extractEnglishTitle(detail, 'tv');
|
||||
if (Array.isArray(detail.seasons)) {
|
||||
for (const s of detail.seasons) {
|
||||
const sd = String(s.air_date || '').split('-');
|
||||
const sy = parseInt(sd[0], 10);
|
||||
if (sy) years.push(sy);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!years[0]) return null;
|
||||
|
||||
let ft = filterTitle(title);
|
||||
let fe = filterTitle(englishTitle);
|
||||
let fo = filterTitle(originalTitle);
|
||||
|
||||
if (!ft && !fe && !fo) return null;
|
||||
|
||||
if (ft && mbStrlen(ft) / mbStrlen(title) < 0.5) ft = '';
|
||||
if (fe && mbStrlen(fe) / mbStrlen(englishTitle) < 0.5) fe = '';
|
||||
if (fo && mbStrlen(fo) / mbStrlen(originalTitle) < 0.5) fo = '';
|
||||
|
||||
// Dedupe years preserving order (PHP array_values(array_unique($years)))
|
||||
const seen = new Set();
|
||||
const uniqYears = [];
|
||||
for (const y of years) {
|
||||
if (!seen.has(y)) { seen.add(y); uniqYears.push(y); }
|
||||
}
|
||||
|
||||
return [
|
||||
tmdb,
|
||||
title,
|
||||
englishTitle,
|
||||
originalTitle,
|
||||
lower(ft),
|
||||
lower(fe),
|
||||
lower(fo),
|
||||
uniqYears,
|
||||
popularity,
|
||||
];
|
||||
}
|
||||
|
||||
export async function buildSearch(type, nbParts = NB_SEARCH_PARTS) {
|
||||
const indexFile = join(TMDBINTEGRAL_DIR, `${type}.json`);
|
||||
const database = [];
|
||||
|
||||
const stream = createReadStream(indexFile, { encoding: 'utf8' });
|
||||
const rl = createInterface({ input: stream, crlfDelay: Infinity });
|
||||
|
||||
for await (const line of rl) {
|
||||
if (!line) continue;
|
||||
let masterObj;
|
||||
try { masterObj = JSON.parse(line); } catch { continue; }
|
||||
const path = entryPath(type, masterObj.id);
|
||||
if (!existsSync(path)) continue;
|
||||
let detail;
|
||||
try { detail = JSON.parse(readFileSync(path, 'utf8')); } catch { continue; }
|
||||
const entry = buildEntry(masterObj, detail, type);
|
||||
if (entry) database.push(entry);
|
||||
}
|
||||
|
||||
const partSize = Math.ceil(database.length / nbParts);
|
||||
const writes = [];
|
||||
for (let p = 0; p < nbParts; p++) {
|
||||
const chunk = database.slice(p * partSize, (p + 1) * partSize);
|
||||
const out = join(TMDBINTEGRAL_DIR, `search${type}${p}.json`);
|
||||
console.log(`Writing ${chunk.length} entries to search${type}${p}.json`);
|
||||
writes.push(writeFile(out, JSON.stringify(chunk)));
|
||||
}
|
||||
await Promise.all(writes);
|
||||
}
|
||||
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
const type = process.argv[2];
|
||||
const nb = parseInt(process.argv[3] || String(NB_SEARCH_PARTS), 10);
|
||||
if (type !== 'movie' && type !== 'tv') {
|
||||
console.error('Usage: node cron/buildSearch.js movie|tv [nbParts]');
|
||||
process.exit(1);
|
||||
}
|
||||
buildSearch(type, nb).catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
36
cron/imdbRatings.js
Normal file
36
cron/imdbRatings.js
Normal file
@@ -0,0 +1,36 @@
|
||||
import { createWriteStream } from 'node:fs';
|
||||
import { rename } from 'node:fs/promises';
|
||||
import { pipeline } from 'node:stream/promises';
|
||||
import { createGunzip } from 'node:zlib';
|
||||
import { Readable } from 'node:stream';
|
||||
import { join } from 'node:path';
|
||||
import { ROOT, IMDB_DATASETS_BASE, IMDB_RATINGS } from '../config.js';
|
||||
|
||||
const FILE = 'title.ratings.tsv';
|
||||
|
||||
export async function syncImdbRatings() {
|
||||
const url = `${IMDB_DATASETS_BASE}/${FILE}.gz`;
|
||||
const tmpPath = join(ROOT, `${FILE}.tmp`);
|
||||
|
||||
console.log(`Downloading: "${url}"`);
|
||||
const res = await fetch(url);
|
||||
if (!res.ok || !res.body) {
|
||||
throw new Error(`Failed to fetch ${url}: HTTP ${res.status}`);
|
||||
}
|
||||
|
||||
await pipeline(
|
||||
Readable.fromWeb(res.body),
|
||||
createGunzip(),
|
||||
createWriteStream(tmpPath),
|
||||
);
|
||||
|
||||
await rename(tmpPath, IMDB_RATINGS);
|
||||
console.log(`Wrote ${IMDB_RATINGS}`);
|
||||
}
|
||||
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
syncImdbRatings().catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
93
cron/justwatchSync.js
Normal file
93
cron/justwatchSync.js
Normal file
@@ -0,0 +1,93 @@
|
||||
// Port of tmdbintegral/justwatch.php
|
||||
|
||||
import { createReadStream, existsSync, readdirSync, unlinkSync } from 'node:fs';
|
||||
import { mkdir, writeFile } from 'node:fs/promises';
|
||||
import { createInterface } from 'node:readline';
|
||||
import { join } from 'node:path';
|
||||
import {
|
||||
TMDBINTEGRAL_DIR, JUSTWATCH_MOVIE_DIR, JUSTWATCH_TV_DIR, TMDB_API_KEY, TMDB_API_BASE,
|
||||
} from '../config.js';
|
||||
import { Limiter } from '../lib/http.js';
|
||||
import { justwatchDir, justwatchPath, bucket } from '../lib/paths.js';
|
||||
|
||||
const DOWNLOAD_CONCURRENCY = 16;
|
||||
|
||||
async function readMasterIds(type) {
|
||||
const file = join(TMDBINTEGRAL_DIR, `${type}.json`);
|
||||
const ids = [];
|
||||
const stream = createReadStream(file, { encoding: 'utf8' });
|
||||
const rl = createInterface({ input: stream, crlfDelay: Infinity });
|
||||
for await (const line of rl) {
|
||||
if (!line) continue;
|
||||
try {
|
||||
const obj = JSON.parse(line);
|
||||
if (typeof obj.id === 'number') ids.push(obj.id);
|
||||
} catch { /* ignore */ }
|
||||
}
|
||||
return ids;
|
||||
}
|
||||
|
||||
async function ensureDir(dir) {
|
||||
if (!existsSync(dir)) await mkdir(dir, { recursive: true });
|
||||
}
|
||||
|
||||
async function downloadProvider(type, id) {
|
||||
const dir = justwatchDir(type, id);
|
||||
await ensureDir(dir);
|
||||
const path = justwatchPath(type, id);
|
||||
const url = `${TMDB_API_BASE}/${type}/${id}/watch/providers?api_key=${TMDB_API_KEY}`;
|
||||
console.log(`Downloading: "justwatch${type}/${bucket(id)}/${id}.json"`);
|
||||
const res = await fetch(url);
|
||||
if (!res.ok) {
|
||||
console.log(`Failed to retrieve TMDb data: "${url}"`);
|
||||
return;
|
||||
}
|
||||
const text = await res.text();
|
||||
await writeFile(path, text);
|
||||
}
|
||||
|
||||
function removeOrphans(type, ids) {
|
||||
const baseDir = type === 'movie' ? JUSTWATCH_MOVIE_DIR : JUSTWATCH_TV_DIR;
|
||||
const expected = new Set(ids);
|
||||
let buckets;
|
||||
try { buckets = readdirSync(baseDir); } catch { return; }
|
||||
for (const b of buckets) {
|
||||
let entries;
|
||||
try { entries = readdirSync(join(baseDir, b)); } catch { continue; }
|
||||
for (const fname of entries) {
|
||||
if (!fname.endsWith('.json')) continue;
|
||||
const id = parseInt(fname.slice(0, -5), 10);
|
||||
if (!Number.isInteger(id)) continue;
|
||||
if (!expected.has(id)) {
|
||||
const p = join(baseDir, b, fname);
|
||||
console.log(`Removing: "justwatch${type}/${b}/${fname}"`);
|
||||
try { unlinkSync(p); } catch { /* ignore */ }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function syncType(type) {
|
||||
const ids = await readMasterIds(type);
|
||||
const limiter = new Limiter(DOWNLOAD_CONCURRENCY);
|
||||
const tasks = [];
|
||||
for (const id of ids) {
|
||||
if (existsSync(justwatchPath(type, id))) continue;
|
||||
tasks.push(limiter.run(() => downloadProvider(type, id)));
|
||||
}
|
||||
await Promise.allSettled(tasks);
|
||||
ids.sort((a, b) => a - b);
|
||||
removeOrphans(type, ids);
|
||||
}
|
||||
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
const type = process.argv[2];
|
||||
if (type !== 'movie' && type !== 'tv') {
|
||||
console.error('Usage: node cron/justwatchSync.js movie|tv');
|
||||
process.exit(1);
|
||||
}
|
||||
syncType(type).catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
19
cron/run.sh
Executable file
19
cron/run.sh
Executable file
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Wrapper de lancement du cron pour environnement nvm.
|
||||
# Cron n'a pas le PATH de nvm — ce script charge nvm puis lance le cron Node.
|
||||
#
|
||||
# Utilisation crontab :
|
||||
# 13 13 * * * /home/matt/_WEB/proxytmdb/cron/run.sh > /home/matt/_WEB/proxytmdb/lastcron.txt 2>&1
|
||||
|
||||
set -e
|
||||
|
||||
export NVM_DIR="${NVM_DIR:-$HOME/.nvm}"
|
||||
# shellcheck source=/dev/null
|
||||
[ -s "$NVM_DIR/nvm.sh" ] && . "$NVM_DIR/nvm.sh"
|
||||
|
||||
# Bascule sur la version "default" de nvm (suit nvm alias default)
|
||||
nvm use default >/dev/null
|
||||
|
||||
cd "$(dirname "$0")/.."
|
||||
exec node --env-file-if-exists=.env cron/runAll.js
|
||||
59
cron/runAll.js
Normal file
59
cron/runAll.js
Normal file
@@ -0,0 +1,59 @@
|
||||
// Port of cron.sh + tmdbintegral/tmdbintegral.sh
|
||||
//
|
||||
// Pipeline:
|
||||
// 1. Refresh imdbratings.tsv
|
||||
// 2. Download daily TMDb exports (movie.json, tv.json)
|
||||
// 3. In parallel: tmdbSync(movie+tv), justwatchSync(movie+tv)
|
||||
// 4. In parallel: tmdb2imdb(movie+tv), buildSearch(movie+tv)
|
||||
// 5. In parallel: ambiguity(movie+tv)
|
||||
//
|
||||
// Writes cron.txt at start/end (mirrors cron.sh).
|
||||
|
||||
import { writeFileSync, appendFileSync } from 'node:fs';
|
||||
import { CRON_TXT } from '../config.js';
|
||||
import { syncImdbRatings } from './imdbRatings.js';
|
||||
import { syncExports } from './tmdbExports.js';
|
||||
import { syncType as syncTmdb } from './tmdbSync.js';
|
||||
import { syncType as syncJustwatch } from './justwatchSync.js';
|
||||
import { buildMapping } from './tmdb2imdb.js';
|
||||
import { buildSearch } from './buildSearch.js';
|
||||
import { buildAmbiguity } from './ambiguity.js';
|
||||
|
||||
function dateStamp() {
|
||||
return new Date().toString();
|
||||
}
|
||||
|
||||
export async function runAll() {
|
||||
writeFileSync(CRON_TXT, `Started At ${dateStamp()}\n`);
|
||||
|
||||
await syncImdbRatings();
|
||||
await syncExports();
|
||||
|
||||
await Promise.all([
|
||||
syncTmdb('movie'),
|
||||
syncTmdb('tv'),
|
||||
syncJustwatch('movie'),
|
||||
syncJustwatch('tv'),
|
||||
]);
|
||||
|
||||
await Promise.all([
|
||||
buildMapping('movie'),
|
||||
buildMapping('tv'),
|
||||
buildSearch('movie'),
|
||||
buildSearch('tv'),
|
||||
]);
|
||||
|
||||
await Promise.all([
|
||||
buildAmbiguity('movie'),
|
||||
buildAmbiguity('tv'),
|
||||
]);
|
||||
|
||||
appendFileSync(CRON_TXT, `Finished At ${dateStamp()}\n`);
|
||||
}
|
||||
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
runAll().catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
52
cron/tmdb2imdb.js
Normal file
52
cron/tmdb2imdb.js
Normal file
@@ -0,0 +1,52 @@
|
||||
// Port of tmdbintegral/tmdb2imdb.php
|
||||
// Builds bidirectional TMDb <-> IMDb id mappings from cached detail files.
|
||||
|
||||
import { createReadStream, existsSync, readFileSync } from 'node:fs';
|
||||
import { writeFile } from 'node:fs/promises';
|
||||
import { createInterface } from 'node:readline';
|
||||
import { join } from 'node:path';
|
||||
import { TMDBINTEGRAL_DIR } from '../config.js';
|
||||
import { entryPath } from '../lib/paths.js';
|
||||
|
||||
export async function buildMapping(type) {
|
||||
const inputFile = join(TMDBINTEGRAL_DIR, `${type}.json`);
|
||||
const out1 = join(TMDBINTEGRAL_DIR, `${type}2imdb.json`);
|
||||
const out2 = join(TMDBINTEGRAL_DIR, `imdb2${type}.json`);
|
||||
|
||||
const data1 = {};
|
||||
const data2 = {};
|
||||
|
||||
const stream = createReadStream(inputFile, { encoding: 'utf8' });
|
||||
const rl = createInterface({ input: stream, crlfDelay: Infinity });
|
||||
|
||||
for await (const line of rl) {
|
||||
if (!line) continue;
|
||||
let obj;
|
||||
try { obj = JSON.parse(line); } catch { continue; }
|
||||
const tmdb = obj.id;
|
||||
const path = entryPath(type, tmdb);
|
||||
if (!existsSync(path)) continue;
|
||||
let detail;
|
||||
try { detail = JSON.parse(readFileSync(path, 'utf8')); } catch { continue; }
|
||||
const imdb = detail?.external_ids?.imdb_id;
|
||||
if (imdb) {
|
||||
data1[tmdb] = imdb;
|
||||
data2[imdb] = tmdb;
|
||||
}
|
||||
}
|
||||
|
||||
await writeFile(out1, JSON.stringify(data1));
|
||||
await writeFile(out2, JSON.stringify(data2));
|
||||
}
|
||||
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
const type = process.argv[2];
|
||||
if (type !== 'movie' && type !== 'tv') {
|
||||
console.error('Usage: node cron/tmdb2imdb.js movie|tv');
|
||||
process.exit(1);
|
||||
}
|
||||
buildMapping(type).catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
56
cron/tmdbExports.js
Normal file
56
cron/tmdbExports.js
Normal file
@@ -0,0 +1,56 @@
|
||||
import { createWriteStream } from 'node:fs';
|
||||
import { rename } from 'node:fs/promises';
|
||||
import { pipeline } from 'node:stream/promises';
|
||||
import { createGunzip } from 'node:zlib';
|
||||
import { Readable } from 'node:stream';
|
||||
import { join } from 'node:path';
|
||||
import { TMDBINTEGRAL_DIR, TMDB_EXPORTS_BASE } from '../config.js';
|
||||
|
||||
function formatMMDDYYYY(date) {
|
||||
const mm = String(date.getUTCMonth() + 1).padStart(2, '0');
|
||||
const dd = String(date.getUTCDate()).padStart(2, '0');
|
||||
const yyyy = date.getUTCFullYear();
|
||||
return `${mm}_${dd}_${yyyy}`;
|
||||
}
|
||||
|
||||
async function tryDownload(url, outPath) {
|
||||
console.log(`Downloading: "${url}"`);
|
||||
const res = await fetch(url);
|
||||
if (res.status === 403 || res.status === 404) {
|
||||
console.log(`Not published yet (HTTP ${res.status}): ${url}`);
|
||||
return false;
|
||||
}
|
||||
if (!res.ok || !res.body) {
|
||||
throw new Error(`Failed to fetch ${url}: HTTP ${res.status}`);
|
||||
}
|
||||
const tmp = `${outPath}.tmp`;
|
||||
await pipeline(Readable.fromWeb(res.body), createGunzip(), createWriteStream(tmp));
|
||||
await rename(tmp, outPath);
|
||||
console.log(`Wrote ${outPath}`);
|
||||
return true;
|
||||
}
|
||||
|
||||
// TMDb publishes the daily export around 08:00 UTC. If we run before that, the
|
||||
// current-day file returns 403. Try today, then fall back to yesterday.
|
||||
async function downloadExport(prefix, outName) {
|
||||
const now = new Date();
|
||||
const yesterday = new Date(now.getTime() - 86400 * 1000);
|
||||
const out = join(TMDBINTEGRAL_DIR, outName);
|
||||
for (const d of [now, yesterday]) {
|
||||
const url = `${TMDB_EXPORTS_BASE}/${prefix}_${formatMMDDYYYY(d)}.json.gz`;
|
||||
if (await tryDownload(url, out)) return;
|
||||
}
|
||||
throw new Error(`No TMDb ${prefix} export available for today or yesterday`);
|
||||
}
|
||||
|
||||
export async function syncExports() {
|
||||
await downloadExport('movie_ids', 'movie.json');
|
||||
await downloadExport('tv_series_ids', 'tv.json');
|
||||
}
|
||||
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
syncExports().catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
168
cron/tmdbSync.js
Normal file
168
cron/tmdbSync.js
Normal file
@@ -0,0 +1,168 @@
|
||||
// Port of tmdbintegral/tmdbintegral.php
|
||||
//
|
||||
// 1. Fetch /changes for the last CHANGES_DAYS to find recently-modified entries
|
||||
// whose local cache file is older than CHANGES_DAYS (so we re-download them).
|
||||
// 2. Stream <type>.json line-by-line, ensure each id has a local detail file
|
||||
// (downloading it if missing or flagged for update).
|
||||
// 3. Walk through every numeric id < max(tmdbs) and remove orphan files that
|
||||
// no longer appear in the master list.
|
||||
|
||||
import { createReadStream, createWriteStream, existsSync, statSync, readdirSync, unlinkSync } from 'node:fs';
|
||||
import { mkdir, stat, writeFile, unlink } from 'node:fs/promises';
|
||||
import { createInterface } from 'node:readline';
|
||||
import { join } from 'node:path';
|
||||
import {
|
||||
TMDBINTEGRAL_DIR, MOVIE_DIR, TV_DIR, TMDB_API_KEY, TMDB_API_BASE, CHANGES_DAYS,
|
||||
} from '../config.js';
|
||||
import { fetchJson, Limiter } from '../lib/http.js';
|
||||
import { entryDir, entryPath, bucket } from '../lib/paths.js';
|
||||
|
||||
const CHANGES_SECS = CHANGES_DAYS * 24 * 3600;
|
||||
const DOWNLOAD_CONCURRENCY = 16;
|
||||
|
||||
function ymd(date) {
|
||||
const y = date.getUTCFullYear();
|
||||
const m = String(date.getUTCMonth() + 1).padStart(2, '0');
|
||||
const d = String(date.getUTCDate()).padStart(2, '0');
|
||||
return `${y}-${m}-${d}`;
|
||||
}
|
||||
|
||||
function appendResponse(type) {
|
||||
return type === 'tv'
|
||||
? 'credits,aggregate_credits,external_ids,release_dates,translations,images,videos'
|
||||
: 'credits,external_ids,release_dates,translations,images,videos';
|
||||
}
|
||||
|
||||
function detailUrl(type, id) {
|
||||
const base = `${TMDB_API_BASE}/${type}`;
|
||||
return `${base}/${id}?api_key=${TMDB_API_KEY}&append_to_response=${appendResponse(type)}&include_image_language=fr,null,en&language=fr-FR`;
|
||||
}
|
||||
|
||||
async function findChanges(type) {
|
||||
const now = new Date();
|
||||
const start = new Date(now.getTime() - CHANGES_DAYS * 86400 * 1000);
|
||||
const startdate = ymd(start);
|
||||
const enddate = ymd(now);
|
||||
const baseUrl = `${TMDB_API_BASE}/${type}/changes?api_key=${TMDB_API_KEY}&start_date=${startdate}&end_date=${enddate}&page=`;
|
||||
|
||||
const updates = new Set();
|
||||
let total = 1;
|
||||
for (let page = 1; page <= total; page++) {
|
||||
const url = `${baseUrl}${page}`;
|
||||
console.log(`Downloading: "${url}"`);
|
||||
const obj = await fetchJson(url);
|
||||
if (!obj) {
|
||||
console.log(`Failed to retrieve TMDb data: "${baseUrl}"`);
|
||||
continue;
|
||||
}
|
||||
if (typeof obj.total_pages === 'number') total = obj.total_pages;
|
||||
if (!Array.isArray(obj.results)) continue;
|
||||
|
||||
for (const change of obj.results) {
|
||||
const id = change.id;
|
||||
const path = entryPath(type, id);
|
||||
if (!existsSync(path)) continue;
|
||||
let st;
|
||||
try { st = statSync(path); } catch { continue; }
|
||||
// PHP uses filectime; on Linux ctime tracks metadata changes too, but the
|
||||
// intent is "last time the local file was refreshed". We use mtime which
|
||||
// is closer to that intent in JS (writeFile updates mtime).
|
||||
const ageSecs = (Date.now() - st.mtimeMs) / 1000;
|
||||
if (ageSecs >= CHANGES_SECS) {
|
||||
const days = Math.floor(ageSecs / 86400);
|
||||
const hours = Math.floor((ageSecs % 86400) / 3600);
|
||||
const minutes = Math.floor((ageSecs % 3600) / 60);
|
||||
console.log(`Updating: "${type}/${bucket(id)}/${id}.json" ${days} days, ${hours} hours, ${minutes} minutes`);
|
||||
updates.add(id);
|
||||
}
|
||||
}
|
||||
}
|
||||
return updates;
|
||||
}
|
||||
|
||||
async function readMasterIds(type) {
|
||||
const file = join(TMDBINTEGRAL_DIR, `${type}.json`);
|
||||
const ids = [];
|
||||
const stream = createReadStream(file, { encoding: 'utf8' });
|
||||
const rl = createInterface({ input: stream, crlfDelay: Infinity });
|
||||
for await (const line of rl) {
|
||||
if (!line) continue;
|
||||
try {
|
||||
const obj = JSON.parse(line);
|
||||
if (typeof obj.id === 'number') ids.push(obj.id);
|
||||
} catch { /* ignore malformed lines */ }
|
||||
}
|
||||
return ids;
|
||||
}
|
||||
|
||||
async function ensureDir(dir) {
|
||||
if (!existsSync(dir)) {
|
||||
await mkdir(dir, { recursive: true });
|
||||
}
|
||||
}
|
||||
|
||||
async function downloadDetail(type, id) {
|
||||
const dir = entryDir(type, id);
|
||||
await ensureDir(dir);
|
||||
const path = entryPath(type, id);
|
||||
console.log(`Downloading: "${type}/${bucket(id)}/${id}.json"`);
|
||||
const url = detailUrl(type, id);
|
||||
const res = await fetch(url);
|
||||
if (!res.ok) {
|
||||
console.log(`Failed to retrieve TMDb data: "${url}"`);
|
||||
return;
|
||||
}
|
||||
const text = await res.text();
|
||||
await writeFile(path, text);
|
||||
}
|
||||
|
||||
function removeOrphans(type, sortedIds) {
|
||||
// Walk every bucket directory once, build a set of expected ids, delete the rest.
|
||||
const baseDir = type === 'movie' ? MOVIE_DIR : TV_DIR;
|
||||
const expected = new Set(sortedIds);
|
||||
let buckets;
|
||||
try { buckets = readdirSync(baseDir); } catch { return; }
|
||||
for (const b of buckets) {
|
||||
let entries;
|
||||
try { entries = readdirSync(join(baseDir, b)); } catch { continue; }
|
||||
for (const fname of entries) {
|
||||
if (!fname.endsWith('.json')) continue;
|
||||
const id = parseInt(fname.slice(0, -5), 10);
|
||||
if (!Number.isInteger(id)) continue;
|
||||
if (!expected.has(id)) {
|
||||
const p = join(baseDir, b, fname);
|
||||
console.log(`Removing: "${type}/${b}/${fname}"`);
|
||||
try { unlinkSync(p); } catch { /* ignore */ }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function syncType(type) {
|
||||
const updates = await findChanges(type);
|
||||
const ids = await readMasterIds(type);
|
||||
|
||||
const limiter = new Limiter(DOWNLOAD_CONCURRENCY);
|
||||
const tasks = [];
|
||||
for (const id of ids) {
|
||||
const path = entryPath(type, id);
|
||||
if (!updates.has(id) && existsSync(path)) continue;
|
||||
tasks.push(limiter.run(() => downloadDetail(type, id)));
|
||||
}
|
||||
await Promise.allSettled(tasks);
|
||||
|
||||
ids.sort((a, b) => a - b);
|
||||
removeOrphans(type, ids);
|
||||
}
|
||||
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
const type = process.argv[2];
|
||||
if (type !== 'movie' && type !== 'tv') {
|
||||
console.error('Usage: node cron/tmdbSync.js movie|tv');
|
||||
process.exit(1);
|
||||
}
|
||||
syncType(type).catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
21
lib/format.js
Normal file
21
lib/format.js
Normal file
@@ -0,0 +1,21 @@
|
||||
// Money formatting (Intl.NumberFormat replaces PHP's NumberFormatter::CURRENCY).
|
||||
|
||||
const FMT = new Intl.NumberFormat('en-US', {
|
||||
style: 'currency',
|
||||
currency: 'USD',
|
||||
maximumFractionDigits: 0,
|
||||
minimumFractionDigits: 0,
|
||||
});
|
||||
|
||||
export function formatCurrency(n) {
|
||||
return FMT.format(n || 0);
|
||||
}
|
||||
|
||||
export function pad2(n) {
|
||||
return n < 10 ? `0${n}` : String(n);
|
||||
}
|
||||
|
||||
export function formatRuntime(runtime) {
|
||||
if (!runtime) return '';
|
||||
return `${Math.floor(runtime / 60)} h ${pad2(runtime % 60)} min`;
|
||||
}
|
||||
66
lib/http.js
Normal file
66
lib/http.js
Normal file
@@ -0,0 +1,66 @@
|
||||
// Tiny fetch wrapper with retry and concurrency limiter.
|
||||
|
||||
export async function fetchText(url, { retries = 3, timeoutMs = 30000 } = {}) {
|
||||
let lastErr;
|
||||
for (let attempt = 0; attempt <= retries; attempt++) {
|
||||
const ac = new AbortController();
|
||||
const timer = setTimeout(() => ac.abort(), timeoutMs);
|
||||
try {
|
||||
const res = await fetch(url, { signal: ac.signal });
|
||||
clearTimeout(timer);
|
||||
if (!res.ok) {
|
||||
if (res.status === 404) return null;
|
||||
throw new Error(`HTTP ${res.status} ${res.statusText}`);
|
||||
}
|
||||
return await res.text();
|
||||
} catch (err) {
|
||||
clearTimeout(timer);
|
||||
lastErr = err;
|
||||
if (attempt < retries) {
|
||||
await new Promise((r) => setTimeout(r, 500 * (attempt + 1)));
|
||||
}
|
||||
}
|
||||
}
|
||||
console.error(`fetchText failed: ${url} :: ${lastErr?.message}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
export async function fetchJson(url, opts) {
|
||||
const text = await fetchText(url, opts);
|
||||
if (!text) return null;
|
||||
try {
|
||||
return JSON.parse(text);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export class Limiter {
|
||||
constructor(max) {
|
||||
this.max = max;
|
||||
this.active = 0;
|
||||
this.queue = [];
|
||||
}
|
||||
run(fn) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const tryRun = () => {
|
||||
if (this.active >= this.max) {
|
||||
this.queue.push(tryRun);
|
||||
return;
|
||||
}
|
||||
this.active++;
|
||||
Promise.resolve()
|
||||
.then(fn)
|
||||
.then(
|
||||
(v) => { this.active--; resolve(v); this._next(); },
|
||||
(e) => { this.active--; reject(e); this._next(); },
|
||||
);
|
||||
};
|
||||
tryRun();
|
||||
});
|
||||
}
|
||||
_next() {
|
||||
const next = this.queue.shift();
|
||||
if (next) next();
|
||||
}
|
||||
}
|
||||
49
lib/imdbRatings.js
Normal file
49
lib/imdbRatings.js
Normal file
@@ -0,0 +1,49 @@
|
||||
import { createReadStream, statSync } from 'node:fs';
|
||||
import { createInterface } from 'node:readline';
|
||||
import { IMDB_RATINGS } from '../config.js';
|
||||
|
||||
let cache = null;
|
||||
let cacheMtime = 0;
|
||||
|
||||
export async function loadRatings(filePath = IMDB_RATINGS) {
|
||||
const map = new Map();
|
||||
const stream = createReadStream(filePath, { encoding: 'utf8' });
|
||||
const rl = createInterface({ input: stream, crlfDelay: Infinity });
|
||||
let first = true;
|
||||
for await (const line of rl) {
|
||||
if (first) { first = false; continue; }
|
||||
if (!line) continue;
|
||||
const tab1 = line.indexOf('\t');
|
||||
if (tab1 < 0) continue;
|
||||
const tab2 = line.indexOf('\t', tab1 + 1);
|
||||
if (tab2 < 0) continue;
|
||||
const id = line.slice(0, tab1);
|
||||
const rating = line.slice(tab1 + 1, tab2);
|
||||
const votes = line.slice(tab2 + 1);
|
||||
map.set(id, [rating, votes]);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
export async function getRatings() {
|
||||
try {
|
||||
const st = statSync(IMDB_RATINGS);
|
||||
if (cache && st.mtimeMs === cacheMtime) return cache;
|
||||
cache = await loadRatings(IMDB_RATINGS);
|
||||
cacheMtime = st.mtimeMs;
|
||||
return cache;
|
||||
} catch (err) {
|
||||
if (cache) return cache;
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
export function lookupRating(map, imdbId) {
|
||||
if (!imdbId) return { rating: 0, votes: 0 };
|
||||
const row = map.get(imdbId);
|
||||
if (!row) return { rating: 0, votes: 0 };
|
||||
return {
|
||||
rating: parseFloat(row[0]) || 0,
|
||||
votes: parseInt(row[1], 10) || 0,
|
||||
};
|
||||
}
|
||||
47
lib/mbLevenshtein.js
Normal file
47
lib/mbLevenshtein.js
Normal file
@@ -0,0 +1,47 @@
|
||||
// UTF-8-safe Levenshtein distance with custom insertion/replacement/deletion costs.
|
||||
// Iterates by Unicode code point (matches the PHP mb_levenshtein behaviour).
|
||||
|
||||
export function mbLevenshtein(s1, s2, costIns = 1, costRep = 1, costDel = 1) {
|
||||
const a = [...s1];
|
||||
const b = [...s2];
|
||||
const la = a.length;
|
||||
const lb = b.length;
|
||||
|
||||
if (la === 0) return lb * costIns;
|
||||
if (lb === 0) return la * costDel;
|
||||
|
||||
let prev = new Array(lb + 1);
|
||||
let curr = new Array(lb + 1);
|
||||
for (let j = 0; j <= lb; j++) prev[j] = j * costIns;
|
||||
|
||||
for (let i = 1; i <= la; i++) {
|
||||
curr[0] = i * costDel;
|
||||
for (let j = 1; j <= lb; j++) {
|
||||
const cost = a[i - 1] === b[j - 1] ? 0 : costRep;
|
||||
const del = prev[j] + costDel;
|
||||
const ins = curr[j - 1] + costIns;
|
||||
const rep = prev[j - 1] + cost;
|
||||
curr[j] = del < ins ? (del < rep ? del : rep) : (ins < rep ? ins : rep);
|
||||
}
|
||||
[prev, curr] = [curr, prev];
|
||||
}
|
||||
return prev[lb];
|
||||
}
|
||||
|
||||
export function mbLevenshteinRatio(s1, s2, costIns = 1, costRep = 1, costDel = 1) {
|
||||
const l1 = [...s1].length;
|
||||
const l2 = [...s2].length;
|
||||
const size = Math.max(l1, l2);
|
||||
if (!size) return 0;
|
||||
if (!s1) return l2 / size;
|
||||
if (!s2) return l1 / size;
|
||||
return 1 - mbLevenshtein(s1, s2, costIns, costRep, costDel) / size;
|
||||
}
|
||||
|
||||
export function mbStrlen(s) {
|
||||
return [...s].length;
|
||||
}
|
||||
|
||||
export function mbStrtolower(s) {
|
||||
return s.toLocaleLowerCase();
|
||||
}
|
||||
26
lib/paths.js
Normal file
26
lib/paths.js
Normal file
@@ -0,0 +1,26 @@
|
||||
import { join } from 'node:path';
|
||||
import { MOVIE_DIR, TV_DIR, JUSTWATCH_MOVIE_DIR, JUSTWATCH_TV_DIR } from '../config.js';
|
||||
|
||||
export function bucket(id) {
|
||||
return String(Math.floor(id / 1000));
|
||||
}
|
||||
|
||||
export function entryPath(type, id) {
|
||||
const base = type === 'movie' ? MOVIE_DIR : TV_DIR;
|
||||
return join(base, bucket(id), `${id}.json`);
|
||||
}
|
||||
|
||||
export function entryDir(type, id) {
|
||||
const base = type === 'movie' ? MOVIE_DIR : TV_DIR;
|
||||
return join(base, bucket(id));
|
||||
}
|
||||
|
||||
export function justwatchPath(type, id) {
|
||||
const base = type === 'movie' ? JUSTWATCH_MOVIE_DIR : JUSTWATCH_TV_DIR;
|
||||
return join(base, bucket(id), `${id}.json`);
|
||||
}
|
||||
|
||||
export function justwatchDir(type, id) {
|
||||
const base = type === 'movie' ? JUSTWATCH_MOVIE_DIR : JUSTWATCH_TV_DIR;
|
||||
return join(base, bucket(id));
|
||||
}
|
||||
85
lib/queryParser.js
Normal file
85
lib/queryParser.js
Normal file
@@ -0,0 +1,85 @@
|
||||
// Replicates the query parsing logic shared by api.php and search.php:
|
||||
// - extract a year (last (19|20)\d{2} match, ignoring 1080/2160)
|
||||
// - extract an episode marker (SxxExxx, SxxExx, Sxx, partN, NxN, Exxx)
|
||||
// - choose movie vs tv accordingly
|
||||
// - extract titlein from the bytes before the year/episode
|
||||
|
||||
import { FILTER_RE } from './titleFilter.js';
|
||||
|
||||
const YEAR_RE = /(19|20)\d{2}/g;
|
||||
// Single-pass regex matching the PHP behaviour:
|
||||
// - S/s and E/e and "part" are case-insensitive ([Ss], [Ee], [Pp]art)
|
||||
// - the lowercase 'x' in NxN, and uppercase 'E' in standalone Exxx, are case-sensitive
|
||||
// Greedy left-to-right alternation means "S01E02" is consumed whole, so the
|
||||
// trailing "E02" alternative cannot match inside it.
|
||||
const EPISODE_RE = /[Ss][0-9]{1,2}.?[Ee][0-9]{1,3}|[Ss][0-9]{2}|[Pp]art\.?[0-9]{1,3}|[0-9]{1,2}x[0-9]{1,3}|E[0-9]{1,3}/g;
|
||||
|
||||
// PHP uses byte offsets (substr). To stay byte-faithful, work on the UTF-8 bytes.
|
||||
const utf8 = (s) => Buffer.from(s, 'utf8');
|
||||
const sliceBytes = (s, end) => utf8(s).slice(0, end).toString('utf8');
|
||||
|
||||
function findAll(re, str) {
|
||||
const out = [];
|
||||
re.lastIndex = 0;
|
||||
let m;
|
||||
while ((m = re.exec(str)) !== null) {
|
||||
out.push({ value: m[0], byteOffset: Buffer.byteLength(str.slice(0, m.index), 'utf8') });
|
||||
if (m.index === re.lastIndex) re.lastIndex++;
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
function stripFilter(s) {
|
||||
return s.replace(FILTER_RE, '');
|
||||
}
|
||||
|
||||
export function parseQuery(query) {
|
||||
if (!query) return null;
|
||||
|
||||
let yearin = 0;
|
||||
let yearpos = -1;
|
||||
let titlein = '';
|
||||
|
||||
const years = findAll(YEAR_RE, query).reverse();
|
||||
for (const m of years) {
|
||||
if (m.value === '1080' || m.value === '2160') continue;
|
||||
yearin = parseInt(m.value, 10);
|
||||
yearpos = m.byteOffset;
|
||||
titlein = sliceBytes(query, yearpos);
|
||||
break;
|
||||
}
|
||||
|
||||
let episodein = '';
|
||||
let episodepos = -1;
|
||||
const eps = findAll(EPISODE_RE, query).reverse();
|
||||
for (const m of eps) {
|
||||
episodein = m.value;
|
||||
episodepos = m.byteOffset;
|
||||
break;
|
||||
}
|
||||
|
||||
if (episodein) {
|
||||
if (!yearin) {
|
||||
titlein = sliceBytes(query, episodepos);
|
||||
} else if (episodepos > yearpos) {
|
||||
titlein = sliceBytes(query, yearpos);
|
||||
if (!stripFilter(titlein)) {
|
||||
titlein = sliceBytes(query, episodepos);
|
||||
yearin = 0;
|
||||
}
|
||||
} else {
|
||||
titlein = sliceBytes(query, episodepos);
|
||||
if (!stripFilter(titlein)) {
|
||||
titlein = sliceBytes(query, yearpos);
|
||||
episodein = '';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!yearin && !episodein) {
|
||||
return { error: 'Year or episode not found in query', titlein, yearin, episodein };
|
||||
}
|
||||
|
||||
const type = episodein ? 'tv' : 'movie';
|
||||
return { type, titlein, yearin, episodein };
|
||||
}
|
||||
80
lib/searchEngine.js
Normal file
80
lib/searchEngine.js
Normal file
@@ -0,0 +1,80 @@
|
||||
// Spawns N worker threads (one per searchTYPEi.json chunk) and orchestrates
|
||||
// queries across them. Workers are kept alive between requests so the chunks
|
||||
// stay loaded in memory (replaces the per-request `php searchmultithreads.php`
|
||||
// fork from the PHP version).
|
||||
|
||||
import { Worker } from 'node:worker_threads';
|
||||
import { join } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import { dirname } from 'node:path';
|
||||
import { existsSync } from 'node:fs';
|
||||
import { TMDBINTEGRAL_DIR, NB_WORKERS } from '../config.js';
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
const WORKER_PATH = join(__dirname, 'searchWorker.js');
|
||||
|
||||
const pools = new Map();
|
||||
|
||||
class WorkerPool {
|
||||
constructor(type) {
|
||||
this.type = type;
|
||||
this.workers = [];
|
||||
this.nextId = 1;
|
||||
this.pending = new Map();
|
||||
for (let i = 0; i < NB_WORKERS; i++) {
|
||||
const chunkPath = join(TMDBINTEGRAL_DIR, `search${type}${i}.json`);
|
||||
if (!existsSync(chunkPath)) {
|
||||
console.warn(`Missing search chunk: ${chunkPath}`);
|
||||
continue;
|
||||
}
|
||||
const w = new Worker(WORKER_PATH, { workerData: { chunkPath } });
|
||||
w.on('message', (msg) => this._onMessage(msg));
|
||||
w.on('error', (err) => console.error(`Worker ${type}/${i} error:`, err));
|
||||
w.unref();
|
||||
this.workers.push(w);
|
||||
}
|
||||
}
|
||||
|
||||
_onMessage(msg) {
|
||||
const entry = this.pending.get(msg.id);
|
||||
if (!entry) return;
|
||||
if (msg.type === 'result') entry.results.push(...msg.results);
|
||||
entry.remaining--;
|
||||
if (entry.remaining === 0) {
|
||||
this.pending.delete(msg.id);
|
||||
entry.resolve(entry.results);
|
||||
}
|
||||
}
|
||||
|
||||
search(payload) {
|
||||
return new Promise((resolve) => {
|
||||
const id = this.nextId++;
|
||||
this.pending.set(id, { results: [], remaining: this.workers.length, resolve });
|
||||
for (const w of this.workers) {
|
||||
w.postMessage({ type: 'search', id, payload });
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export function getPool(type) {
|
||||
if (!pools.has(type)) pools.set(type, new WorkerPool(type));
|
||||
return pools.get(type);
|
||||
}
|
||||
|
||||
export async function search(type, filteredTitleIn, yearIn) {
|
||||
const pool = getPool(type);
|
||||
const results = await pool.search({ filteredTitleIn, yearIn });
|
||||
|
||||
// Sort by delta ASC, then -popularity ASC (i.e. popularity DESC),
|
||||
// then deltaYear ASC, then tmdb ASC. Equivalent to PHP's
|
||||
// array_multisort($deltas, $pops, $deltayears, $tmdbs, ...).
|
||||
results.sort((a, b) => {
|
||||
if (a.delta !== b.delta) return a.delta - b.delta;
|
||||
if (a.pop !== b.pop) return a.pop - b.pop;
|
||||
if (a.deltaYear !== b.deltaYear) return a.deltaYear - b.deltaYear;
|
||||
return a.tmdb - b.tmdb;
|
||||
});
|
||||
|
||||
return results;
|
||||
}
|
||||
109
lib/searchWorker.js
Normal file
109
lib/searchWorker.js
Normal file
@@ -0,0 +1,109 @@
|
||||
// Worker thread used by lib/searchEngine.js. Equivalent to one fork in
|
||||
// searchmultithreads.php: load one search chunk and emit candidate matches.
|
||||
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { parentPort, workerData } from 'node:worker_threads';
|
||||
import { mbLevenshtein, mbStrlen } from './mbLevenshtein.js';
|
||||
import {
|
||||
TITLE_TOLERANCE, LEV_INS, LEV_REP, LEV_DEL, LEV_SCALE, YEAR_TOLERANCE,
|
||||
} from '../config.js';
|
||||
|
||||
const TMDB = 0;
|
||||
const TITLE = 1;
|
||||
const ENGLISHTITLE = 2;
|
||||
const ORIGINALTITLE = 3;
|
||||
const FILTEREDTITLE = 4;
|
||||
const FILTEREDENGLISHTITLE = 5;
|
||||
const FILTEREDORIGINALTITLE = 6;
|
||||
const YEAR = 7;
|
||||
const POPULARITY = 8;
|
||||
|
||||
let chunkPath;
|
||||
let chunk = null;
|
||||
|
||||
if (workerData?.chunkPath) {
|
||||
chunkPath = workerData.chunkPath;
|
||||
}
|
||||
|
||||
function loadChunk() {
|
||||
if (chunk) return chunk;
|
||||
chunk = JSON.parse(readFileSync(chunkPath, 'utf8'));
|
||||
return chunk;
|
||||
}
|
||||
|
||||
function score(filteredIn, target, ftiLen) {
|
||||
if (!target) return 0;
|
||||
const tlen = mbStrlen(target);
|
||||
return 100 - (mbLevenshtein(filteredIn, target, LEV_INS, LEV_REP, LEV_DEL) /
|
||||
(Math.max(ftiLen, tlen) * LEV_SCALE)) * 100;
|
||||
}
|
||||
|
||||
function search({ filteredTitleIn, yearIn }) {
|
||||
const db = loadChunk();
|
||||
const out = [];
|
||||
const ftiLen = mbStrlen(filteredTitleIn);
|
||||
|
||||
for (const row of db) {
|
||||
let deltaYear = 0;
|
||||
if (yearIn) {
|
||||
let ok = false;
|
||||
for (const y of row[YEAR]) {
|
||||
const dy = Math.abs(yearIn - y);
|
||||
if (dy <= YEAR_TOLERANCE) { ok = true; deltaYear = dy; break; }
|
||||
}
|
||||
if (!ok) continue;
|
||||
}
|
||||
|
||||
const fT = row[FILTEREDTITLE];
|
||||
const fE = row[FILTEREDENGLISHTITLE];
|
||||
const fO = row[FILTEREDORIGINALTITLE];
|
||||
|
||||
const pO = score(filteredTitleIn, fO, ftiLen);
|
||||
|
||||
let pT;
|
||||
if (fT) {
|
||||
pT = (fT === fO) ? pO : score(filteredTitleIn, fT, ftiLen);
|
||||
} else pT = 0;
|
||||
|
||||
let pE;
|
||||
if (fE) {
|
||||
if (fE === fO) pE = pO;
|
||||
else if (fE === fT) pE = pT;
|
||||
else pE = score(filteredTitleIn, fE, ftiLen);
|
||||
} else pE = 0;
|
||||
|
||||
const dT = 100 - pT;
|
||||
const dE = 100 - pE;
|
||||
const dO = 100 - pO;
|
||||
const delta = Math.min(dT, dE, dO);
|
||||
if (delta > TITLE_TOLERANCE) continue;
|
||||
|
||||
out.push({
|
||||
delta,
|
||||
pop: -row[POPULARITY],
|
||||
deltaYear,
|
||||
tmdb: row[TMDB],
|
||||
title: row[TITLE],
|
||||
englishTitle: row[ENGLISHTITLE],
|
||||
originalTitle: row[ORIGINALTITLE],
|
||||
filteredTitle: fT,
|
||||
filteredEnglishTitle: fE,
|
||||
filteredOriginalTitle: fO,
|
||||
year: row[YEAR][0],
|
||||
});
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
if (parentPort) {
|
||||
parentPort.on('message', (msg) => {
|
||||
if (msg?.type === 'search') {
|
||||
try {
|
||||
const results = search(msg.payload);
|
||||
parentPort.postMessage({ type: 'result', id: msg.id, results });
|
||||
} catch (err) {
|
||||
parentPort.postMessage({ type: 'error', id: msg.id, error: err.message });
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
29
lib/titleFilter.js
Normal file
29
lib/titleFilter.js
Normal file
@@ -0,0 +1,29 @@
|
||||
// Replicates the PHP search.php title normalization:
|
||||
// - replace ligatures and superscripts
|
||||
// - strip everything that is not Latin or 0-9
|
||||
// - lowercase
|
||||
|
||||
const TITLE_SEARCHES = ['œ', 'Œ', 'æ', 'Æ', 'é', 'è', '²', '³', '⁴'];
|
||||
const TITLE_REPLACES = ['oe', 'Oe', 'ae', 'Ae', 'é', 'è', '2', '3', '4'];
|
||||
|
||||
const FILTER_RE = /[^\p{Script=Latin}0-9]+/gu;
|
||||
|
||||
export function translit(s) {
|
||||
if (!s) return '';
|
||||
let out = s;
|
||||
for (let i = 0; i < TITLE_SEARCHES.length; i++) {
|
||||
out = out.split(TITLE_SEARCHES[i]).join(TITLE_REPLACES[i]);
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
export function filterTitle(s) {
|
||||
if (!s) return '';
|
||||
return translit(s).replace(FILTER_RE, '');
|
||||
}
|
||||
|
||||
export function filterAndLower(s) {
|
||||
return filterTitle(s).toLocaleLowerCase();
|
||||
}
|
||||
|
||||
export { FILTER_RE };
|
||||
1328
package-lock.json
generated
Normal file
1328
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
27
package.json
Normal file
27
package.json
Normal file
@@ -0,0 +1,27 @@
|
||||
{
|
||||
"name": "proxytmdb",
|
||||
"version": "1.0.0",
|
||||
"description": "Proxy/cache local de l'API TMDB avec notes IMDb et matching titre/annee/episode",
|
||||
"type": "module",
|
||||
"private": true,
|
||||
"engines": {
|
||||
"node": ">=20"
|
||||
},
|
||||
"scripts": {
|
||||
"start": "node --env-file-if-exists=.env server.js",
|
||||
"cron": "node --env-file-if-exists=.env cron/runAll.js",
|
||||
"cron:imdb": "node --env-file-if-exists=.env cron/imdbRatings.js",
|
||||
"cron:tmdb": "node --env-file-if-exists=.env cron/tmdbSync.js",
|
||||
"cron:justwatch": "node --env-file-if-exists=.env cron/justwatchSync.js",
|
||||
"cron:tmdb2imdb": "node --env-file-if-exists=.env cron/tmdb2imdb.js",
|
||||
"cron:search": "node --env-file-if-exists=.env cron/buildSearch.js",
|
||||
"cron:ambiguity": "node --env-file-if-exists=.env cron/ambiguity.js",
|
||||
"test": "node --env-file-if-exists=.env --test test/*.test.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@fastify/formbody": "^8.0.1",
|
||||
"@fastify/secure-session": "^8.1.0",
|
||||
"@fastify/static": "^8.0.4",
|
||||
"fastify": "^5.2.0"
|
||||
}
|
||||
}
|
||||
156
routes/api.js
Normal file
156
routes/api.js
Normal file
@@ -0,0 +1,156 @@
|
||||
// JSON API — replaces api.php.
|
||||
// GET /api?t=movie&q=<id>
|
||||
// GET /api?t=tv&q=<id>
|
||||
// GET /api?t=search&q=<query>
|
||||
|
||||
import { readFile } from 'node:fs/promises';
|
||||
import { entryPath } from '../lib/paths.js';
|
||||
import { getRatings, lookupRating } from '../lib/imdbRatings.js';
|
||||
import { parseQuery } from '../lib/queryParser.js';
|
||||
import { filterAndLower } from '../lib/titleFilter.js';
|
||||
import { search as runSearch } from '../lib/searchEngine.js';
|
||||
import { formatCurrency, formatRuntime, pad2 } from '../lib/format.js';
|
||||
import {
|
||||
POSTER_URL, MOVIE_URL, TV_URL, MOVIE_API_URL, TV_API_URL, IMDB_URL,
|
||||
} from '../config.js';
|
||||
|
||||
async function getDetail(type, id) {
|
||||
try {
|
||||
const buf = await readFile(entryPath(type, id), 'utf8');
|
||||
return JSON.parse(buf);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async function handleEntry(type, id) {
|
||||
const obj = await getDetail(type, id);
|
||||
if (!obj) return { error: 'Not found' };
|
||||
|
||||
const imdb = type === 'movie' ? obj.imdb_id : obj?.external_ids?.imdb_id;
|
||||
if (imdb) {
|
||||
const ratings = await getRatings();
|
||||
const row = ratings.get(imdb);
|
||||
if (row) {
|
||||
obj.note_imdb = row[0].trim();
|
||||
obj.vote_imdb = row[1].trim();
|
||||
}
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
|
||||
async function handleSearch(query) {
|
||||
const parsed = parseQuery(query);
|
||||
if (!parsed) return null;
|
||||
if (parsed.error) return { error: parsed.error };
|
||||
|
||||
const { type, titlein, yearin, episodein } = parsed;
|
||||
const filteredTitleIn = filterAndLower(titlein);
|
||||
|
||||
const matches = await runSearch(type, filteredTitleIn, yearin);
|
||||
if (!matches.length) {
|
||||
return { error: 'Not found in localized and original titles database' };
|
||||
}
|
||||
|
||||
const ratings = await getRatings();
|
||||
const movietvurl = type === 'movie' ? MOVIE_URL : TV_URL;
|
||||
const movietvurlapi = type === 'movie' ? MOVIE_API_URL : TV_API_URL;
|
||||
|
||||
const results = [];
|
||||
for (const m of matches) {
|
||||
const detail = await getDetail(type, m.tmdb);
|
||||
if (!detail) continue;
|
||||
|
||||
const item = {};
|
||||
|
||||
if (m.filteredTitle) {
|
||||
item.title = m.title;
|
||||
item.years = m.year;
|
||||
}
|
||||
if (m.filteredEnglishTitle) item.english_title = m.englishTitle;
|
||||
if (m.filteredOriginalTitle) item.original_title = m.originalTitle;
|
||||
|
||||
item.poster = `${POSTER_URL}/${detail.poster_path}`;
|
||||
item.poster_path = detail.poster_path;
|
||||
|
||||
let genres = '';
|
||||
if (Array.isArray(detail.genres)) {
|
||||
for (const g of detail.genres) genres += `${g.name} `;
|
||||
}
|
||||
if (genres) item.genres = genres;
|
||||
|
||||
let countries = '';
|
||||
if (Array.isArray(detail.production_countries)) {
|
||||
for (const c of detail.production_countries) countries += `${c.iso_3166_1} `;
|
||||
}
|
||||
if (countries) item.countries = countries;
|
||||
|
||||
if (detail.runtime) item.runtime = formatRuntime(detail.runtime);
|
||||
|
||||
const imdb = !episodein ? detail.imdb_id : detail?.external_ids?.imdb_id;
|
||||
if (imdb) {
|
||||
const { rating, votes } = lookupRating(ratings, imdb);
|
||||
item.imdb_id = imdb;
|
||||
item.imdb_url = `${IMDB_URL}/${imdb}`;
|
||||
item.note_imdb = rating;
|
||||
item.vote_imdb = votes;
|
||||
}
|
||||
|
||||
item.tmdb_id = m.tmdb;
|
||||
item.tmdb_url = `${movietvurl}/${m.tmdb}`;
|
||||
item.api_url = `${movietvurlapi}${m.tmdb}`;
|
||||
item.note_tmdb = Math.round((parseFloat(detail.vote_average) || 0) * 10) / 10;
|
||||
item.vote_tmdb = parseInt(detail.vote_count, 10) || 0;
|
||||
|
||||
if (detail.budget || detail.revenue) {
|
||||
item.budget = formatCurrency(detail.budget);
|
||||
item.revenue = formatCurrency(detail.revenue);
|
||||
}
|
||||
|
||||
if (episodein && Array.isArray(detail.seasons)) {
|
||||
// PHP loops and overwrites $data['results'][$j]['season'] for each season,
|
||||
// so only the LAST season is kept. Reproduce that behaviour.
|
||||
let lastSeason;
|
||||
for (const s of detail.seasons) {
|
||||
const sn = pad2(s.season_number || 0);
|
||||
const ec = pad2(s.episode_count || 0);
|
||||
lastSeason = `S${sn}E${ec}`;
|
||||
}
|
||||
if (lastSeason) item.season = lastSeason;
|
||||
}
|
||||
|
||||
if (detail.tagline) item.tagline = detail.tagline;
|
||||
if (detail.overview) item.overview = detail.overview;
|
||||
|
||||
results.push(item);
|
||||
}
|
||||
|
||||
return { results };
|
||||
}
|
||||
|
||||
async function handle(req, reply) {
|
||||
reply.header('Access-Control-Allow-Origin', '*');
|
||||
reply.header('Content-Type', 'application/json; charset=utf-8');
|
||||
|
||||
const t = req.query?.t;
|
||||
const q = req.query?.q;
|
||||
|
||||
if (t === 'movie' || t === 'tv') {
|
||||
if (!q) return {};
|
||||
const id = parseInt(q, 10);
|
||||
if (!Number.isInteger(id)) return {};
|
||||
return handleEntry(t, id);
|
||||
}
|
||||
|
||||
if (t === 'search') {
|
||||
if (!q) return reply.send('');
|
||||
return (await handleSearch(q)) ?? {};
|
||||
}
|
||||
|
||||
return reply.send('');
|
||||
}
|
||||
|
||||
export default async function apiRoutes(fastify) {
|
||||
fastify.get('/api', handle);
|
||||
fastify.get('/api.php', handle);
|
||||
}
|
||||
162
routes/index.js
Normal file
162
routes/index.js
Normal file
@@ -0,0 +1,162 @@
|
||||
// Login + protected directory listing — replaces index.php.
|
||||
|
||||
import { readdir, stat } from 'node:fs/promises';
|
||||
import { join } from 'node:path';
|
||||
import { ROOT, TITLE, PASSWORD } from '../config.js';
|
||||
|
||||
const HIDDEN = new Set(['index.php', '.htaccess', 'node_modules', 'package.json', 'package-lock.json', 'config.js', 'server.js', 'lib', 'routes', 'cron', 'test', '.git']);
|
||||
|
||||
function esc(s) {
|
||||
if (s == null) return '';
|
||||
return String(s)
|
||||
.replace(/&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, ''');
|
||||
}
|
||||
|
||||
function formatBytes(bytes) {
|
||||
const units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'];
|
||||
let i = bytes > 0 ? Math.floor(Math.log(bytes) / Math.log(1024)) : 0;
|
||||
if (i >= units.length) i = units.length - 1;
|
||||
return `${(bytes / Math.pow(1024, i)) || 0} ${units[i]}`;
|
||||
}
|
||||
|
||||
function pad(n) { return n < 10 ? `0${n}` : String(n); }
|
||||
function fmtDate(ms) {
|
||||
const d = new Date(ms);
|
||||
return `${d.getFullYear()}-${pad(d.getMonth() + 1)}-${pad(d.getDate())} ${pad(d.getHours())}:${pad(d.getMinutes())}`;
|
||||
}
|
||||
|
||||
function loginPage(error = '') {
|
||||
return `<!doctype html>
|
||||
<html lang="fr">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width,initial-scale=1">
|
||||
<title>${esc(TITLE)}</title>
|
||||
<style>
|
||||
*, *::before, *::after { box-sizing: border-box; }
|
||||
body { font-family: system-ui, -apple-system, Segoe UI, Roboto, sans-serif; background:#0b1220; color:#e5e7eb; display:flex; align-items:center; justify-content:center; min-height:100vh; margin:0; }
|
||||
.card { background:#111827; padding:24px; border-radius:12px; width:100%; max-width:360px; box-shadow:0 10px 30px rgba(0,0,0,.35); }
|
||||
h1 { font-size:18px; margin:0 0 16px; color:#f9fafb; }
|
||||
label { display:block; font-size:14px; margin-bottom:8px; color:#cbd5e1; }
|
||||
input[type=password]{ width:100%; padding:10px 12px; border:1px solid #334155; border-radius:8px; background:#0f172a; color:#e5e7eb; outline:none; }
|
||||
input[type=password]:focus{ border-color:#60a5fa; }
|
||||
.btn { margin-top:12px; width:100%; padding:10px 12px; border:0; border-radius:8px; background:#2563eb; color:white; font-weight:600; cursor:pointer; }
|
||||
.btn:hover { filter:brightness(1.05); }
|
||||
.err { color:#fca5a5; font-size:14px; margin-top:10px; min-height:18px; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<form class="card" method="post" autocomplete="off">
|
||||
<h1>${esc(TITLE)}</h1>
|
||||
<label for="pw">Mot de passe</label>
|
||||
<input id="pw" name="password" type="password" required autofocus>
|
||||
<button class="btn" type="submit">Entrer</button>
|
||||
<div class="err">${esc(error)}</div>
|
||||
</form>
|
||||
</body>
|
||||
</html>`;
|
||||
}
|
||||
|
||||
async function listingPage() {
|
||||
const names = await readdir(ROOT);
|
||||
const entries = [];
|
||||
for (const name of names) {
|
||||
if (HIDDEN.has(name)) continue;
|
||||
if (name.startsWith('.')) continue;
|
||||
let st;
|
||||
try { st = await stat(join(ROOT, name)); } catch { continue; }
|
||||
entries.push({
|
||||
name,
|
||||
isDir: st.isDirectory(),
|
||||
size: st.isFile() ? st.size : 0,
|
||||
mtime: st.mtimeMs,
|
||||
});
|
||||
}
|
||||
entries.sort((a, b) => {
|
||||
if (a.isDir !== b.isDir) return a.isDir ? -1 : 1;
|
||||
return a.name.toLowerCase().localeCompare(b.name.toLowerCase());
|
||||
});
|
||||
|
||||
const rows = entries.map((e) => {
|
||||
const href = encodeURIComponent(e.name);
|
||||
return `<tr>
|
||||
<td class="name"><a href="${href}${e.isDir ? '/' : ''}">${esc(e.name)}</a>${e.isDir ? '<span class="badge">dossier</span>' : ''}</td>
|
||||
<td>${e.isDir ? '—' : esc(formatBytes(e.size))}</td>
|
||||
<td>${esc(fmtDate(e.mtime))}</td>
|
||||
</tr>`;
|
||||
}).join('');
|
||||
|
||||
return `<!doctype html>
|
||||
<html lang="fr">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width,initial-scale=1">
|
||||
<title>${esc(TITLE)}</title>
|
||||
<style>
|
||||
body{ font-family: ui-sans-serif,system-ui,-apple-system,Segoe UI,Roboto,sans-serif; background:#0b1220; color:#e5e7eb; margin:0; }
|
||||
header{ display:flex; gap:12px; align-items:center; justify-content:space-between; padding:16px 20px; background:#0f172a; position:sticky; top:0; }
|
||||
h1{ font-size:18px; margin:0; color:#f9fafb; }
|
||||
a.logout{ color:#93c5fd; text-decoration:none; font-size:14px; }
|
||||
a.logout:hover{ text-decoration:underline; }
|
||||
.wrap{ max-width:1100px; margin:20px auto; padding:0 16px; }
|
||||
table{ width:100%; border-collapse:collapse; background:#111827; border-radius:12px; overflow:hidden; }
|
||||
th,td{ padding:12px 14px; border-bottom:1px solid #1f2937; text-align:left; font-size:14px; }
|
||||
th{ background:#0f172a; color:#cbd5e1; font-weight:600; }
|
||||
tr:hover td{ background:#0b1324; }
|
||||
.name a{ color:#93c5fd; text-decoration:none; }
|
||||
.name a:hover{ text-decoration:underline; }
|
||||
.badge{ font-size:11px; padding:2px 8px; border-radius:999px; background:#1f2937; color:#cbd5e1; margin-left:8px; }
|
||||
footer{ color:#94a3b8; font-size:12px; text-align:center; padding:16px; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<header>
|
||||
<h1>${esc(TITLE)}</h1>
|
||||
<a class="logout" href="?logout=1">Se déconnecter</a>
|
||||
</header>
|
||||
<div class="wrap">
|
||||
<table>
|
||||
<thead><tr><th>Nom</th><th>Taille</th><th>Modifié</th></tr></thead>
|
||||
<tbody>${rows}</tbody>
|
||||
</table>
|
||||
<footer>Les liens ci-dessus pointent directement vers les fichiers/dossiers non protégés.</footer>
|
||||
</div>
|
||||
</body>
|
||||
</html>`;
|
||||
}
|
||||
|
||||
export default async function indexRoutes(fastify) {
|
||||
fastify.get('/', async (req, reply) => {
|
||||
reply.header('Content-Type', 'text/html; charset=utf-8');
|
||||
if (req.query?.logout != null) {
|
||||
req.session.delete();
|
||||
return reply.redirect('/');
|
||||
}
|
||||
if (!req.session.get('auth_ok')) {
|
||||
return loginPage();
|
||||
}
|
||||
return listingPage();
|
||||
});
|
||||
|
||||
fastify.post('/', async (req, reply) => {
|
||||
reply.header('Content-Type', 'text/html; charset=utf-8');
|
||||
const submitted = req.body?.password ?? '';
|
||||
if (timingSafeEqual(submitted, PASSWORD)) {
|
||||
req.session.set('auth_ok', true);
|
||||
return reply.redirect('/');
|
||||
}
|
||||
return loginPage('Mot de passe incorrect.');
|
||||
});
|
||||
}
|
||||
|
||||
function timingSafeEqual(a, b) {
|
||||
if (typeof a !== 'string' || typeof b !== 'string') return false;
|
||||
if (a.length !== b.length) return false;
|
||||
let diff = 0;
|
||||
for (let i = 0; i < a.length; i++) diff |= a.charCodeAt(i) ^ b.charCodeAt(i);
|
||||
return diff === 0;
|
||||
}
|
||||
144
routes/search.js
Normal file
144
routes/search.js
Normal file
@@ -0,0 +1,144 @@
|
||||
// HTML search view — replaces search.php (the public, human-facing version).
|
||||
|
||||
import { readFile } from 'node:fs/promises';
|
||||
import { entryPath } from '../lib/paths.js';
|
||||
import { getRatings, lookupRating } from '../lib/imdbRatings.js';
|
||||
import { parseQuery } from '../lib/queryParser.js';
|
||||
import { filterAndLower } from '../lib/titleFilter.js';
|
||||
import { search as runSearch } from '../lib/searchEngine.js';
|
||||
import { formatCurrency, formatRuntime, pad2 } from '../lib/format.js';
|
||||
import {
|
||||
POSTER_URL, NO_POSTER_URL, MOVIE_URL, TV_URL, IMDB_URL,
|
||||
} from '../config.js';
|
||||
|
||||
function esc(s) {
|
||||
if (s == null) return '';
|
||||
return String(s)
|
||||
.replace(/&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, ''');
|
||||
}
|
||||
|
||||
function center(msg) {
|
||||
return `<div style="text-align: center;">${esc(msg)}</div>`;
|
||||
}
|
||||
|
||||
async function getDetail(type, id) {
|
||||
try {
|
||||
return JSON.parse(await readFile(entryPath(type, id), 'utf8'));
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async function render(query) {
|
||||
if (!query) return '';
|
||||
|
||||
const parsed = parseQuery(query);
|
||||
if (!parsed) return '';
|
||||
if (parsed.error) return center(parsed.error);
|
||||
|
||||
const { type, titlein, yearin, episodein } = parsed;
|
||||
const filteredTitleIn = filterAndLower(titlein);
|
||||
|
||||
const matches = await runSearch(type, filteredTitleIn, yearin);
|
||||
if (!matches.length) {
|
||||
return center('Not found in localized and original titles database');
|
||||
}
|
||||
|
||||
const ratings = await getRatings();
|
||||
const movietvurl = type === 'movie' ? MOVIE_URL : TV_URL;
|
||||
|
||||
let html = '<div style="text-align: center; font-size: 14px; font-family: sans-serif;">';
|
||||
|
||||
for (const m of matches) {
|
||||
const detail = await getDetail(type, m.tmdb);
|
||||
if (!detail) continue;
|
||||
|
||||
const poster = detail.poster_path;
|
||||
const src = poster ? `${POSTER_URL}/${poster}` : NO_POSTER_URL;
|
||||
|
||||
let genres = '';
|
||||
if (Array.isArray(detail.genres)) {
|
||||
for (const g of detail.genres) genres += `${g.name} `;
|
||||
}
|
||||
|
||||
let countries = '';
|
||||
if (Array.isArray(detail.production_countries)) {
|
||||
for (const c of detail.production_countries) countries += `${c.iso_3166_1} `;
|
||||
}
|
||||
|
||||
const runtime = detail.runtime;
|
||||
const imdb = !episodein ? detail.imdb_id : detail?.external_ids?.imdb_id;
|
||||
const { rating: ivote, votes: ivoteCount } = lookupRating(ratings, imdb);
|
||||
const tvote = Math.round((parseFloat(detail.vote_average) || 0) * 10) / 10;
|
||||
const tvoteCount = parseInt(detail.vote_count, 10) || 0;
|
||||
const budget = detail.budget;
|
||||
const revenue = detail.revenue;
|
||||
|
||||
let seasons;
|
||||
if (episodein && Array.isArray(detail.seasons)) {
|
||||
seasons = detail.seasons.map((s) => `S${pad2(s.season_number || 0)}E${pad2(s.episode_count || 0)}`);
|
||||
}
|
||||
|
||||
html += '<span style="display: inline-block; margin: 10px; vertical-align: top;">';
|
||||
html += `<img src="${esc(src)}" width="200" height="300"/>`;
|
||||
html += '<div style="display: inline-block; white-space: normal; overflow: auto; vertical-align: top; width: 400px; height: 300px; background-color: #484848; color: #ffffff;">';
|
||||
|
||||
html += '<p style="margin: 10px;">';
|
||||
if (m.filteredTitle) html += `FR <b>${esc(m.title)}</b> ${esc(m.year)}<br />`;
|
||||
if (m.filteredEnglishTitle) html += `EN <b>${esc(m.englishTitle)}</b> ${esc(m.year)}<br />`;
|
||||
if (m.filteredOriginalTitle) html += `VO <b>${esc(m.originalTitle)}</b> ${esc(m.year)}<br />`;
|
||||
|
||||
html += '<p style="margin: 10px;">';
|
||||
if (genres) html += esc(genres);
|
||||
if (countries) html += `<b>${esc(countries)}</b>`;
|
||||
if (runtime) html += formatRuntime(runtime);
|
||||
html += '</p>';
|
||||
|
||||
html += '<p style="margin: 10px;">';
|
||||
if (imdb) {
|
||||
html += `<a href="${esc(IMDB_URL)}/${esc(imdb)}" style="background-color: #f3ce13; color: #000000; text-decoration: none;" onclick="this.target='_blank';">`;
|
||||
html += ` IMDb </a> ${esc(imdb)} <b>${esc(ivote)}</b> ${esc(ivoteCount)} `;
|
||||
}
|
||||
html += `<a href="${esc(movietvurl)}/${esc(m.tmdb)}" style="background-color: #01b4e4; color: #000000; text-decoration: none;" onclick="this.target='_blank';">`;
|
||||
html += ` TMDb </a> ${esc(m.tmdb)} <b>${esc(tvote)}</b> ${esc(tvoteCount)}<br />`;
|
||||
|
||||
if (budget || revenue) {
|
||||
html += `${esc(formatCurrency(budget))} ${esc(formatCurrency(revenue))}`;
|
||||
}
|
||||
html += '</p>';
|
||||
|
||||
html += '<p style="margin: 10px; text-align: left;">';
|
||||
if (seasons) {
|
||||
html += '<b>Episodes finaux</b> ';
|
||||
for (const s of seasons) html += `${esc(s)} `;
|
||||
}
|
||||
html += '</p>';
|
||||
|
||||
html += '<p style="margin: 10px; text-align: left;">';
|
||||
html += `<b>${esc(detail.tagline || '')}</b>`;
|
||||
html += '</p>';
|
||||
|
||||
html += '<p style="margin: 10px; text-align: justify;">';
|
||||
if (detail.overview) html += esc(detail.overview);
|
||||
html += '</p>';
|
||||
|
||||
html += '</div></span>';
|
||||
}
|
||||
|
||||
html += '</div>';
|
||||
return html;
|
||||
}
|
||||
|
||||
async function handle(req, reply) {
|
||||
reply.header('Content-Type', 'text/html; charset=utf-8');
|
||||
return render(req.query?.query || '');
|
||||
}
|
||||
|
||||
export default async function searchRoutes(fastify) {
|
||||
fastify.get('/search', handle);
|
||||
fastify.get('/search.php', handle);
|
||||
}
|
||||
60
server.js
Normal file
60
server.js
Normal file
@@ -0,0 +1,60 @@
|
||||
import Fastify from 'fastify';
|
||||
import formbody from '@fastify/formbody';
|
||||
import secureSession from '@fastify/secure-session';
|
||||
import fastifyStatic from '@fastify/static';
|
||||
import { join } from 'node:path';
|
||||
import { ROOT, PORT, HOST, SESSION_SECRET } from './config.js';
|
||||
import indexRoutes from './routes/index.js';
|
||||
import apiRoutes from './routes/api.js';
|
||||
import searchRoutes from './routes/search.js';
|
||||
import { getRatings } from './lib/imdbRatings.js';
|
||||
import { getPool } from './lib/searchEngine.js';
|
||||
|
||||
const fastify = Fastify({ logger: true, trustProxy: true });
|
||||
|
||||
await fastify.register(formbody);
|
||||
|
||||
await fastify.register(secureSession, {
|
||||
// 32 bytes minimum. Use SESSION_SECRET env var in production.
|
||||
secret: SESSION_SECRET.padEnd(32, '0').slice(0, 32),
|
||||
salt: 'proxytmdb-salt-1',
|
||||
cookieName: 'session',
|
||||
cookie: {
|
||||
path: '/',
|
||||
httpOnly: true,
|
||||
sameSite: 'lax',
|
||||
secure: 'auto',
|
||||
},
|
||||
});
|
||||
|
||||
await fastify.register(indexRoutes);
|
||||
await fastify.register(apiRoutes);
|
||||
await fastify.register(searchRoutes);
|
||||
|
||||
// Serve any other path as a static file from the project root, so that the
|
||||
// "directory listing" links keep working exactly as they did under Apache.
|
||||
await fastify.register(fastifyStatic, {
|
||||
root: ROOT,
|
||||
serve: true,
|
||||
index: false,
|
||||
list: false,
|
||||
decorateReply: false,
|
||||
prefix: '/',
|
||||
});
|
||||
|
||||
// Warm up: load IMDb ratings and spawn search workers eagerly.
|
||||
fastify.ready().then(async () => {
|
||||
try {
|
||||
await getRatings();
|
||||
getPool('movie');
|
||||
getPool('tv');
|
||||
fastify.log.info('Warmup complete');
|
||||
} catch (err) {
|
||||
fastify.log.warn({ err }, 'Warmup failed');
|
||||
}
|
||||
});
|
||||
|
||||
fastify.listen({ port: PORT, host: HOST }).catch((err) => {
|
||||
fastify.log.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
92
test/helpers.test.js
Normal file
92
test/helpers.test.js
Normal file
@@ -0,0 +1,92 @@
|
||||
import { test } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { mbLevenshtein, mbStrlen } from '../lib/mbLevenshtein.js';
|
||||
import { filterTitle, filterAndLower, translit } from '../lib/titleFilter.js';
|
||||
import { parseQuery } from '../lib/queryParser.js';
|
||||
import { bucket, entryPath } from '../lib/paths.js';
|
||||
|
||||
test('mbLevenshtein basic', () => {
|
||||
assert.equal(mbLevenshtein('kitten', 'sitting'), 3);
|
||||
assert.equal(mbLevenshtein('', 'abc'), 3);
|
||||
assert.equal(mbLevenshtein('abc', ''), 3);
|
||||
assert.equal(mbLevenshtein('abc', 'abc'), 0);
|
||||
});
|
||||
|
||||
test('mbLevenshtein utf-8', () => {
|
||||
// PHP mb_levenshtein remaps multibyte chars; JS works on code points.
|
||||
// The distance for équal chars should be 0.
|
||||
assert.equal(mbLevenshtein('café', 'café'), 0);
|
||||
assert.equal(mbLevenshtein('été', 'été'), 0);
|
||||
assert.equal(mbLevenshtein('é', 'e'), 1);
|
||||
});
|
||||
|
||||
test('mbLevenshtein custom costs', () => {
|
||||
// INS=10 REP=12 DEL=10 mirrors searchmultithreads.php
|
||||
assert.equal(mbLevenshtein('abc', 'abcd', 10, 12, 10), 10);
|
||||
assert.equal(mbLevenshtein('abcd', 'abc', 10, 12, 10), 10);
|
||||
assert.equal(mbLevenshtein('abc', 'abd', 10, 12, 10), 12);
|
||||
});
|
||||
|
||||
test('mbStrlen counts code points', () => {
|
||||
assert.equal(mbStrlen('abc'), 3);
|
||||
assert.equal(mbStrlen('café'), 4);
|
||||
assert.equal(mbStrlen('œuf'), 3);
|
||||
});
|
||||
|
||||
test('translit replaces ligatures and superscripts', () => {
|
||||
assert.equal(translit('Œuf'), 'Oeuf');
|
||||
assert.equal(translit('cœur'), 'coeur');
|
||||
assert.equal(translit('m²'), 'm2');
|
||||
});
|
||||
|
||||
test('filterTitle strips non-Latin/digit', () => {
|
||||
assert.equal(filterTitle('Hello, World!'), 'HelloWorld');
|
||||
assert.equal(filterTitle('Inception (2010)'), 'Inception2010');
|
||||
assert.equal(filterTitle('北京'), '');
|
||||
assert.equal(filterTitle('Café au lait'), 'Caféaulait');
|
||||
});
|
||||
|
||||
test('filterAndLower', () => {
|
||||
assert.equal(filterAndLower('Mr. Robot S01E02'), 'mrrobots01e02');
|
||||
assert.equal(filterAndLower('Cœur de Pirate'), 'coeurdepirate');
|
||||
});
|
||||
|
||||
test('parseQuery: movie with year', () => {
|
||||
const r = parseQuery('Inception 2010 1080p BluRay');
|
||||
assert.equal(r.type, 'movie');
|
||||
assert.equal(r.yearin, 2010);
|
||||
assert.equal(r.episodein, '');
|
||||
assert.equal(r.titlein.startsWith('Inception '), true);
|
||||
});
|
||||
|
||||
test('parseQuery: 1080 is not a year', () => {
|
||||
const r = parseQuery('Inception 2010 1080p');
|
||||
assert.equal(r.yearin, 2010);
|
||||
});
|
||||
|
||||
test('parseQuery: tv with episode', () => {
|
||||
const r = parseQuery('Mr.Robot.S01E02.FRENCH.1080p');
|
||||
assert.equal(r.type, 'tv');
|
||||
assert.equal(r.episodein, 'S01E02');
|
||||
assert.equal(r.titlein.startsWith('Mr.Robot'), true);
|
||||
});
|
||||
|
||||
test('parseQuery: tv with year + episode', () => {
|
||||
const r = parseQuery('Some.Show.2015.S02E10');
|
||||
assert.equal(r.type, 'tv');
|
||||
assert.equal(r.yearin, 2015);
|
||||
assert.equal(r.episodein, 'S02E10');
|
||||
});
|
||||
|
||||
test('parseQuery: no year no episode', () => {
|
||||
const r = parseQuery('Just a title');
|
||||
assert.ok(r.error);
|
||||
});
|
||||
|
||||
test('paths bucket + entryPath', () => {
|
||||
assert.equal(bucket(100), '0');
|
||||
assert.equal(bucket(1500), '1');
|
||||
assert.equal(bucket(1675803), '1675');
|
||||
assert.ok(entryPath('movie', 100).endsWith('/movie/0/100.json'));
|
||||
assert.ok(entryPath('tv', 1408).endsWith('/tv/1/1408.json'));
|
||||
});
|
||||
Reference in New Issue
Block a user