Files
proxy_tmdb/lib/http.js

67 lines
1.6 KiB
JavaScript

// Tiny fetch wrapper with retry and concurrency limiter.
export async function fetchText(url, { retries = 3, timeoutMs = 30000 } = {}) {
let lastErr;
for (let attempt = 0; attempt <= retries; attempt++) {
const ac = new AbortController();
const timer = setTimeout(() => ac.abort(), timeoutMs);
try {
const res = await fetch(url, { signal: ac.signal });
clearTimeout(timer);
if (!res.ok) {
if (res.status === 404) return null;
throw new Error(`HTTP ${res.status} ${res.statusText}`);
}
return await res.text();
} catch (err) {
clearTimeout(timer);
lastErr = err;
if (attempt < retries) {
await new Promise((r) => setTimeout(r, 500 * (attempt + 1)));
}
}
}
console.error(`fetchText failed: ${url} :: ${lastErr?.message}`);
return null;
}
export async function fetchJson(url, opts) {
const text = await fetchText(url, opts);
if (!text) return null;
try {
return JSON.parse(text);
} catch {
return null;
}
}
export class Limiter {
constructor(max) {
this.max = max;
this.active = 0;
this.queue = [];
}
run(fn) {
return new Promise((resolve, reject) => {
const tryRun = () => {
if (this.active >= this.max) {
this.queue.push(tryRun);
return;
}
this.active++;
Promise.resolve()
.then(fn)
.then(
(v) => { this.active--; resolve(v); this._next(); },
(e) => { this.active--; reject(e); this._next(); },
);
};
tryRun();
});
}
_next() {
const next = this.queue.shift();
if (next) next();
}
}