Initial Commit with Working server and metrics

This commit is contained in:
2025-10-11 21:41:03 +02:00
commit 368383ba5c
16 changed files with 2839 additions and 0 deletions

31
.env.sample Normal file
View File

@@ -0,0 +1,31 @@
# Server
PORT=8080
# Epirent
UPSTREAM_BASE=http://127.0.0.1
EPI_NO_SESSION=true
EPI_ACCESS_TOKEN=xxxx
# Cache
TTL_SECONDS=120
STALE_SECONDS=120
# Optional: Pfadspezifische TTL (Regex = Sekunden), mehrere Regeln mit |
# Beispiel: /api\/v1\/search=30|/api\/v1\/reports=1800
ROUTE_TTLS=
# Redis (leer = in-memory LRU)
REDIS_URL=
# CORS
CORS=false
CORS_ORIGIN=*
# Minimal-Rate-Limit (Requests pro Sekunde, 0 = aus)
RATE_LIMIT_RPS=0
# Purge (optional)
PURGE_SECRET=
# --- Metriken (zweiter Port) ---
METRICS_PORT=9090
METRICS_ALLOW_ORIGIN=*

2
.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
.env
node_modules/

9
Dockerfile Normal file
View File

@@ -0,0 +1,9 @@
FROM node:22-alpine
WORKDIR /app
COPY package*.json ./
RUN npm ci --omit=dev
COPY tsconfig.json ./
COPY src ./src
RUN npm run build
EXPOSE 8080 9090
CMD ["node","dist/server.js"]

146
README.md Normal file
View File

@@ -0,0 +1,146 @@
# Epirent ReadOnly Cache Proxy (Node/TS)
ReverseProxy mit zentralem Cache (TTL + _stalewhilerevalidate_) für die EpirentAPI.
Alle **GET/HEAD**Requests werden 1:1 an Epirent weitergereicht, Antworten werden zwischengespeichert.
Authentifizierung erfolgt serverseitig via `X-EPI-NO-SESSION: True` und `X-EPI-ACC-TOK: <token>`.
---
## Features
- 100% dynamischer ReadOnlyProxy
- Zentraler Cache mit **TTL** und **stalewhilerevalidate**
- **ETag/LastModified**Revalidation (falls vom Upstream geliefert)
- Schutz vor ThunderingHerd durch proKey InFlightLock
- Optional: **Redis** als Shared Cache
- Ausführliches DebugLogging (`DEBUG=1/2`)
- **Metriken** (Prometheus + kleines WebDashboard) auf **zweitem Port** (optional)
---
## Voraussetzungen
- Node.js **20+** (oder Docker)
- Optional: Redis **7+** (für Shared Cache)
---
## Konfiguration
`.env` anlegen (Beispiel in der .env.sample).
> **Hinweis:** `.env` wird über `dotenv` geladen. Alternativ können Variablen vor dem Start im ShellEnvironment gesetzt werden.
---
## Installation (ohne Docker)
```bash
npm ci
npm run build
npm start
```
### Entwicklung
Zwei Debug Level (1 und 2) sind implementiert
- Linux/macOS:
```bash
DEBUG=1 npm run dev
```
- Windows PowerShell:
```powershell
$env:DEBUG="1"; npm run dev
```
---
## Docker
### Build & Run
```bash
docker build -t epirent-cache .
docker run --rm -p 8080:8080 --env-file .env epirent-cache
```
### docker-compose (mit Redis & Metriken)
```yaml
services:
cache-api:
build: .
env_file: .env
ports:
- "8080:8080"
- "9090:9090"
depends_on: [ redis ]
redis:
image: redis:7-alpine
command: ["redis-server","--appendonly","yes"]
volumes:
- ./data/redis:/data
```
Start:
```bash
docker compose up -d
```
---
## Endpunkte
- **Proxy:** `GET /…``X-Cache-Status: HIT|MISS|MISS-EXPIRED|STALE|STALE-ERROR`
- **Health:** `GET /_healthz`
- **Purge:** `POST /_purge?secret=<PURGE_SECRET>&url=<VOLLE_UPSTREAM_URL>`
- **Metriken (zweiter Port):** `GET /metrics` (Prometheus), `GET /` (Dashboard)
---
## Systemd (optional)
```ini
[Unit]
Description=Epirent Read-Only Cache Proxy
After=network.target
[Service]
WorkingDirectory=/opt/epirent-cache
ExecStart=/usr/bin/node dist/server.js
EnvironmentFile=/opt/epirent-cache/.env
Restart=always
RestartSec=3
User=www-data
Group=www-data
[Install]
WantedBy=multi-user.target
```
Aktivieren:
```bash
sudo systemctl daemon-reload
sudo systemctl enable --now epirent-cache
```
---
## Troubleshooting
### 502 Bad Gateway
- `UPSTREAM_BASE` prüfen (Schema/Host/Port)
- Direkt via `curl` testen:
```bash
curl -v "http://epirent.host.local:8080/api/..." -H "X-EPI-NO-SESSION: True" -H "X-EPI-ACC-TOK: DEIN_TOKEN"
```
- `DEBUG=2` aktivieren und Logs prüfen
- Interne Zertifikate? Testweise `TLS_REJECT_UNAUTHORIZED=0` (nicht dauerhaft!)
### Keine CacheTreffer
- `Accept` unterscheidet sich zwischen Clients → gleiche Werte nutzen oder VaryListe erweitern
### Redis wird nicht genutzt
- `REDIS_URL` leer → InMemory LRU aktiv. `REDIS_URL` setzen und Service neu starten

36
dist/cache.js vendored Normal file
View File

@@ -0,0 +1,36 @@
import { LRUCache } from "lru-cache";
import config from "./config.js";
let redis = null;
let useRedis = false;
export async function initCache() {
if (config.redisUrl) {
const { createClient } = await import("redis");
redis = createClient({ url: config.redisUrl });
await redis.connect();
useRedis = true;
}
}
const lru = new LRUCache({ max: 1000 });
export async function get(key) {
if (useRedis) {
const raw = await redis.get(key);
return raw ? JSON.parse(raw) : null;
}
return lru.get(key) ?? null;
}
export async function set(key, val) {
if (useRedis) {
await redis.set(key, JSON.stringify(val));
}
else {
lru.set(key, val);
}
}
export async function del(key) {
if (useRedis) {
await redis.del(key);
}
else {
lru.delete(key);
}
}

30
dist/config.js vendored Normal file
View File

@@ -0,0 +1,30 @@
// Zentrale Konfiguration, per .env überschreibbar
export default {
port: parseInt(process.env.PORT || "8080", 10),
// Upstream (Epirent)
upstreamBase: process.env.UPSTREAM_BASE || "https://epirent.example.com", // <-- anpassen
epiNoSession: process.env.EPI_NO_SESSION || "true",
epiAccessToken: process.env.EPI_ACCESS_TOKEN || "xxxDerAccessTokenxxx",
// Cache
ttlSeconds: parseInt(process.env.TTL_SECONDS || "600", 10), // frisch
staleSeconds: parseInt(process.env.STALE_SECONDS || "600", 10), // zusätzlich stale
// Optional: Pfad-spezifische TTL-Regeln (Regex-Strings)
routeTtls: (process.env.ROUTE_TTLS || "")
.split("|")
.map(s => s.trim())
.filter(Boolean)
.map(rule => {
// Format: /regex/i=SECONDS (i optional)
const [pat, sec] = rule.split("=");
const flags = /\/[gimsuy]*$/.test(pat) ? pat.split("/").pop() : "";
const body = pat.replace(/^\/|\/[gimsuy]*$/g, "");
return { re: new RegExp(body, flags || "i"), seconds: parseInt(sec, 10) };
}),
// Redis (leer => In-Memory LRU)
redisUrl: process.env.REDIS_URL || "",
// Sicherheit / CORS
allowCors: (process.env.CORS || "false").toLowerCase() === "true",
corsOrigin: process.env.CORS_ORIGIN || "*",
// Rate limit (sehr simpel, optional)
rateLimitRps: parseInt(process.env.RATE_LIMIT_RPS || "0", 10), // 0 = aus
};

183
dist/server.js vendored Normal file
View File

@@ -0,0 +1,183 @@
import express from "express";
import fetch, { Headers } from "node-fetch";
import crypto from "node:crypto";
import { setInterval as nodeSetInterval } from "node:timers";
import config from "./config.js";
import { get, set, del, initCache } from "./cache.js";
// pro-Key In-Flight Lock gegen Thundering Herd
const inflight = new Map();
const app = express();
app.disable("x-powered-by");
// Optional: schlankes CORS
if (config.allowCors) {
app.use((req, res, next) => {
res.setHeader("Access-Control-Allow-Origin", config.corsOrigin);
res.setHeader("Access-Control-Allow-Methods", "GET,HEAD,OPTIONS");
res.setHeader("Access-Control-Allow-Headers", "*");
if (req.method === "OPTIONS")
return res.sendStatus(204);
next();
});
}
// Optional: minimalistisches Rate-Limit (global)
if (config.rateLimitRps > 0) {
let tokens = config.rateLimitRps;
nodeSetInterval(() => {
tokens = config.rateLimitRps;
}, 1000).unref();
app.use((req, res, next) => {
if (req.method === "GET" || req.method === "HEAD") {
if (tokens > 0) {
tokens--;
return next();
}
res.setHeader("Retry-After", "1");
return res.status(429).send("Too Many Requests");
}
next();
});
}
// Health
app.get("/_healthz", (_req, res) => {
res.json({ ok: true, ts: new Date().toISOString() });
});
// Purge per URL (optional; mit Secret schützen)
app.post("/_purge", express.json(), async (req, res) => {
const { secret, url } = req.query;
if (secret !== process.env.PURGE_SECRET || !url)
return res.sendStatus(403);
const key = cacheKeyFromUrl(String(url));
await del(key);
res.json({ purged: true, key });
});
function cacheKeyFromUrl(fullUrl) {
const vary = ""; // Purge-Variante: bewusst ohne Header-Vary
return crypto.createHash("sha1").update(`GET|${fullUrl}|${vary}`).digest("hex");
}
function cacheKey(req) {
const varyHeaders = ["accept"];
const h = varyHeaders.map((hn) => `${hn}:${req.header(hn) ?? ""}`).join("|");
const url = new URL(req.originalUrl, "http://x"); // Base egal, wir brauchen nur Pfad/Query
return crypto
.createHash("sha1")
.update(`${req.method}|${url.pathname}|${url.searchParams.toString()}|${h}`)
.digest("hex");
}
function pickTtl(pathname) {
for (const r of config.routeTtls) {
if (r.re.test(pathname))
return r.seconds;
}
return config.ttlSeconds;
}
function isFresh(entry, ttl, now) {
return now - entry.ts < ttl * 1000;
}
function isStaleOk(entry, ttl, stale, now) {
return now - entry.ts < (ttl + stale) * 1000;
}
app.all("*", async (req, res) => {
if (!["GET", "HEAD", "OPTIONS"].includes(req.method)) {
return res.status(405).send("Method Not Allowed");
}
if (req.method === "OPTIONS")
return res.sendStatus(204);
const upstreamUrl = new URL(req.originalUrl, config.upstreamBase).toString();
const key = cacheKey(req);
const now = Date.now();
const ttl = pickTtl(new URL(upstreamUrl).pathname);
const entry = await get(key);
if (entry && isFresh(entry, ttl, now)) {
res.set("X-Cache-Status", "HIT");
Object.entries(entry.headers).forEach(([k, v]) => res.set(k, v));
return res.status(entry.status).send(entry.body);
}
if (entry && isStaleOk(entry, ttl, config.staleSeconds, now)) {
res.set("X-Cache-Status", "STALE");
Object.entries(entry.headers).forEach(([k, v]) => res.set(k, v));
res.status(entry.status).send(entry.body);
void backgroundRefresh(upstreamUrl, req, key, entry).catch(() => { });
return;
}
try {
const fresh = await getOrFetch(upstreamUrl, req, key, entry ?? undefined);
res.set("X-Cache-Status", entry ? "MISS-EXPIRED" : "MISS");
Object.entries(fresh.headers).forEach(([k, v]) => res.set(k, v));
res.status(fresh.status).send(fresh.body);
}
catch {
if (entry) {
res.set("X-Cache-Status", "STALE-ERROR");
Object.entries(entry.headers).forEach(([k, v]) => res.set(k, v));
return res.status(entry.status).send(entry.body);
}
res.status(502).send("Bad Gateway");
}
});
async function backgroundRefresh(upstreamUrl, req, key, prev) {
try {
await getOrFetch(upstreamUrl, req, key, prev);
}
catch { }
}
async function getOrFetch(upstreamUrl, req, key, prev) {
if (inflight.has(key))
return inflight.get(key);
const p = (async () => {
const headers = new Headers();
["accept", "user-agent"].forEach((hn) => {
const v = req.header(hn);
if (v)
headers.set(hn, v);
});
if (prev?.etag)
headers.set("if-none-match", prev.etag);
if (prev?.lastModified)
headers.set("if-modified-since", prev.lastModified);
// Epirent Auth-Header (nur upstream)
headers.set("X-EPI-NO-SESSION", config.epiNoSession);
headers.set("X-EPI-ACC-TOK", config.epiAccessToken);
const resp = await fetch(upstreamUrl, { method: "GET", headers });
if (resp.status === 304 && prev) {
const refreshed = { ...prev, ts: Date.now() };
await set(key, refreshed);
return refreshed;
}
const body = Buffer.from(await resp.arrayBuffer());
const outHeaders = {};
let etag;
let lastMod;
for (const [k, v] of resp.headers.entries()) {
const kl = k.toLowerCase();
if (["content-type", "etag", "last-modified", "cache-control", "date"].includes(kl)) {
outHeaders[kl] = v;
}
if (kl === "etag")
etag = v;
if (kl === "last-modified")
lastMod = v;
}
const stored = {
status: resp.status,
headers: outHeaders,
body,
ts: Date.now(),
etag,
lastModified: lastMod
};
await set(key, stored);
return stored;
})();
inflight.set(key, p);
try {
return await p;
}
finally {
inflight.delete(key);
}
}
initCache().then(() => {
app.listen(config.port, () => {
console.log(`Epirent Read-Only Cache läuft auf :${config.port}`);
});
});

14
docker-compose.yml Normal file
View File

@@ -0,0 +1,14 @@
services:
cache-api:
build: .
env_file: .env
ports:
- "8080:8080" #PROXY
- "9090:9090" #METRIK
depends_on:
- redis
redis:
image: redis:7-alpine
command: ["redis-server","--appendonly","yes"]
volumes:
- ./data/redis:/data

1770
package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

26
package.json Normal file
View File

@@ -0,0 +1,26 @@
{
"name": "epirent-cache-proxy",
"version": "1.0.0",
"type": "module",
"private": true,
"scripts": {
"build": "tsc",
"start": "node dist/server.js",
"dev": "tsx watch src/server.ts"
},
"dependencies": {
"dotenv": "^17.2.3",
"express": "^4.19.2",
"lru-cache": "^10.4.3",
"node-fetch": "^3.3.2",
"prom-client": "^15.1.3",
"redis": "^4.7.0"
},
"devDependencies": {
"@types/express": "^4.17.21",
"@types/node": "^22.7.5",
"tsx": "^4.18.0",
"typescript": "^5.6.3"
}
}

80
src/cache.ts Normal file
View File

@@ -0,0 +1,80 @@
import { LRUCache } from "lru-cache";
import config from "./config.js";
export type CacheEntry = {
status: number;
headers: Record<string, string>;
body: Buffer;
ts: number;
etag?: string;
lastModified?: string;
};
const DEBUG = parseInt(process.env.DEBUG || "0", 10) || 0;
let redis: any = null;
let useRedis = false;
function log(...args: any[]) {
if (DEBUG >= 1) console.log("[cache]", ...args);
}
function log2(...args: any[]) {
if (DEBUG >= 2) console.log("[cache:vv]", ...args);
}
export async function initCache() {
if (config.redisUrl) {
const { createClient } = await import("redis");
redis = createClient({ url: config.redisUrl });
await redis.connect();
useRedis = true;
log("backend=redis url=", config.redisUrl);
} else {
log("backend=in-memory-lru");
}
}
const lru = new LRUCache<string, CacheEntry>({ max: 1000 });
export async function get(key: string): Promise<CacheEntry | null> {
if (useRedis) {
const raw = await redis.get(key);
if (!raw) {
log2("GET", key, "→ miss(redis)");
return null;
}
try {
const parsed = JSON.parse(raw) as CacheEntry;
log2("GET", key, "→ hit(redis)", `size=${raw.length}`);
return parsed;
} catch (e) {
log("GET", key, "→ corrupt(redis) dropping");
await redis.del(key);
return null;
}
}
const val = lru.get(key) ?? null;
log2("GET", key, val ? "→ hit(lru)" : "→ miss(lru)");
return val;
}
export async function set(key: string, val: CacheEntry): Promise<void> {
if (useRedis) {
const raw = JSON.stringify(val);
await redis.set(key, raw);
log2("SET", key, `redis size=${raw.length}`, `status=${val.status}`);
} else {
lru.set(key, val);
log2("SET", key, `lru`, `status=${val.status}`, `bytes=${val.body.length}`);
}
}
export async function del(key: string): Promise<void> {
if (useRedis) {
await redis.del(key);
log("DEL", key, "redis");
} else {
lru.delete(key);
log("DEL", key, "lru");
}
}

37
src/config.ts Normal file
View File

@@ -0,0 +1,37 @@
import "dotenv/config";
export default {
port: parseInt(process.env.PORT || "8080", 10),
// Upstream (Epirent)
upstreamBase: process.env.UPSTREAM_BASE || "http://127.0.0.1",
epiNoSession: process.env.EPI_NO_SESSION || "true",
epiAccessToken: process.env.EPI_ACCESS_TOKEN || "xxxDerAccessTokenxxx",
// Cache
ttlSeconds: parseInt(process.env.TTL_SECONDS || "600", 10),
staleSeconds: parseInt(process.env.STALE_SECONDS || "600", 10),
routeTtls: (process.env.ROUTE_TTLS || "")
.split("|")
.map(s => s.trim())
.filter(Boolean)
.map(rule => {
// Format: /regex/i=SECONDS (i optional)
const [pat, sec] = rule.split("=");
const flags = /\/[gimsuy]*$/.test(pat) ? pat.split("/").pop() : "";
const body = pat.replace(/^\/|\/[gimsuy]*$/g, "");
return { re: new RegExp(body, flags || "i"), seconds: parseInt(sec, 10) };
}),
redisUrl: process.env.REDIS_URL || "",
allowCors: (process.env.CORS || "false").toLowerCase() === "true",
corsOrigin: process.env.CORS_ORIGIN || "*",
rateLimitRps: parseInt(process.env.RATE_LIMIT_RPS || "0", 10), // 0 = aus
};

143
src/metrics.ts Normal file
View File

@@ -0,0 +1,143 @@
// src/metrics.ts
import http from "node:http";
import express from "express";
import client from "prom-client";
//Default Prometheus Metrics
const registry = new client.Registry();
client.collectDefaultMetrics({ register: registry });
export const requestsTotal = new client.Counter({
name: "proxy_requests_total",
help: "Total number of incoming requests",
labelNames: ["method"] as const,
});
export const cacheEvents = new client.Counter({
name: "proxy_cache_events_total",
help: "Cache outcomes (HIT, MISS, MISS_EXPIRED, STALE, STALE_ERROR)",
labelNames: ["status"] as const,
});
export const upstreamLatency = new client.Histogram({
name: "proxy_upstream_latency_ms",
help: "Upstream fetch latency in milliseconds",
buckets: [25, 50, 100, 200, 400, 800, 1600, 3200, 6400],
});
export const upstreamStatus = new client.Counter({
name: "proxy_upstream_status_total",
help: "Upstream HTTP status codes",
labelNames: ["code"] as const,
});
export const bytesTransferred = new client.Counter({
name: "proxy_bytes_transferred_total",
help: "Bytes transferred to/from clients",
labelNames: ["direction"] as const, // "out" | "in"
});
registry.registerMetric(requestsTotal);
registry.registerMetric(cacheEvents);
registry.registerMetric(upstreamLatency);
registry.registerMetric(upstreamStatus);
registry.registerMetric(bytesTransferred);
type Outcome = "HIT" | "MISS" | "MISS-EXPIRED" | "STALE" | "STALE-ERROR";
const windowStats: Array<{ ts: number; hit: number; miss: number; stale: number }> = [];
let rolling = { hit: 0, miss: 0, stale: 0 };
setInterval(() => {
windowStats.push({ ts: Date.now(), ...rolling });
if (windowStats.length > 120) windowStats.shift(); // ~10 Minuten
rolling = { hit: 0, miss: 0, stale: 0 };
}, 5000).unref();
export function onIncomingRequest(method: string) {
requestsTotal.inc({ method });
}
export function onCacheEvent(outcome: Outcome) {
cacheEvents.inc({ status: outcome });
if (outcome === "HIT") rolling.hit++;
else if (outcome === "MISS" || outcome === "MISS-EXPIRED") rolling.miss++;
else if (outcome === "STALE" || outcome === "STALE-ERROR") rolling.stale++;
}
export function onUpstream(statusCode: number, latencyMs: number, bytesOut: number) {
upstreamStatus.inc({ code: String(statusCode) });
upstreamLatency.observe(latencyMs);
if (bytesOut > 0) bytesTransferred.inc({ direction: "out" }, bytesOut);
}
export function onClientBytes(direction: "out" | "in", n: number) {
if (n > 0) bytesTransferred.inc({ direction }, n);
}
export function startMetricsServer(port: number, allowOrigin = "*") {
const app = express();
app.get("/metrics", async (_req, res) => {
res.set("Content-Type", registry.contentType);
res.set("Access-Control-Allow-Origin", allowOrigin);
res.end(await registry.metrics());
});
app.get("/", (_req, res) => {
res.set("Content-Type", "text/html; charset=utf-8");
res.send(`<!doctype html>
<html>
<head><meta charset="utf-8"><title>Proxy Metrics</title></head>
<body style="font-family:system-ui,Segoe UI,Roboto,Helvetica,Arial,sans-serif;margin:20px;">
<h1>Proxy Metrics</h1>
<p>Prometheus Endpoint: <code>/metrics</code></p>
<canvas id="chart" width="1100" height="340"></canvas>
<script src="https://cdn.jsdelivr.net/npm/chart.js"></script>
<script>
const ctx = document.getElementById('chart').getContext('2d');
const data = {
labels: [],
datasets: [
{ label: 'HIT', data: [], borderWidth: 2, tension: .2 },
{ label: 'MISS', data: [], borderWidth: 2, tension: .2 },
{ label: 'STALE',data: [], borderWidth: 2, tension: .2 },
]
};
const chart = new Chart(ctx, {
type: 'line',
data,
options: {
responsive: true,
animation: false,
scales: { x: { title: { display: true, text: 'time' } } }
}
});
async function tick() {
const r = await fetch('/stats');
const j = await r.json();
data.labels.push(new Date(j.ts).toLocaleTimeString());
data.datasets[0].data.push(j.hit);
data.datasets[1].data.push(j.miss);
data.datasets[2].data.push(j.stale);
if (data.labels.length > 120) {
data.labels.shift();
data.datasets.forEach(ds => ds.data.shift());
}
chart.update();
}
setInterval(tick, 5000);
</script>
</body>
</html>`);
});
app.get("/stats", (_req, res) => {
const last = windowStats.at(-1) || { ts: Date.now(), hit: 0, miss: 0, stale: 0 };
res.set("Access-Control-Allow-Origin", allowOrigin);
res.json(last);
});
http.createServer(app).listen(port, () => {
console.log("[metrics] listening", { port });
});
}

313
src/server.ts Normal file
View File

@@ -0,0 +1,313 @@
// server.ts
import express, { type Request, type Response, type NextFunction } from "express";
import fetch, { Headers } from "node-fetch";
import crypto from "node:crypto";
import { setInterval as nodeSetInterval } from "node:timers";
import https from "node:https";
import config from "./config.js";
import { get, set, del, initCache, type CacheEntry } from "./cache.js";
import {
startMetricsServer,
onIncomingRequest,
onCacheEvent,
onUpstream,
onClientBytes,
} from "./metrics.js";
const DEBUG = parseInt(process.env.DEBUG || "0", 10) || 0;
function redactHeaders(obj: Record<string, any>) {
const redacted = { ...obj };
for (const k of Object.keys(redacted)) {
const low = k.toLowerCase();
if (low.includes("acc-tok") || low.includes("authorization") || low.includes("token")) {
const v = String(redacted[k] ?? "");
redacted[k] = v.length > 8 ? v.slice(0, 4) + "***" + v.slice(-4) : "***";
}
}
return redacted;
}
function hToObj(h: Headers) {
const o: Record<string, string> = {};
h.forEach((v, k) => (o[k] = v));
return o;
}
function log(...args: any[]) {
if (DEBUG >= 1) console.log("[proxy]", ...args);
}
function log2(...args: any[]) {
if (DEBUG >= 2) console.log("[proxy:vv]", ...args);
}
const inflight = new Map<string, Promise<CacheEntry>>();
const app = express();
app.disable("x-powered-by");
if (config.allowCors) {
app.use((req: Request, res: Response, next: NextFunction) => {
res.setHeader("Access-Control-Allow-Origin", config.corsOrigin);
res.setHeader("Access-Control-Allow-Methods", "GET,HEAD,OPTIONS");
res.setHeader("Access-Control-Allow-Headers", "*");
if (req.method === "OPTIONS") return res.sendStatus(204);
next();
});
}
if (config.rateLimitRps > 0) {
let tokens = config.rateLimitRps;
nodeSetInterval(() => {
tokens = config.rateLimitRps;
}, 1000).unref();
app.use((req: Request, res: Response, next: NextFunction) => {
if (req.method === "GET" || req.method === "HEAD") {
if (tokens > 0) {
tokens--;
return next();
}
res.setHeader("Retry-After", "1");
return res.status(429).send("Too Many Requests");
}
next();
});
}
app.get("/_healthz", (_req: Request, res: Response) => {
res.json({ ok: true, ts: new Date().toISOString(), upstream: config.upstreamBase });
});
app.post("/_purge", express.json(), async (req: Request, res: Response) => {
const { secret, url } = req.query as any;
if (secret !== process.env.PURGE_SECRET || !url) return res.sendStatus(403);
const key = cacheKeyFromUrl(String(url));
await del(key);
log("PURGE", { key, url });
res.json({ purged: true, key });
});
function cacheKeyFromUrl(fullUrl: string) {
const vary = ""; // Purge-Variante: kein Header-Vary
return crypto.createHash("sha1").update(`GET|${fullUrl}|${vary}`).digest("hex");
}
function cacheKey(req: Request) {
const varyHeaders = ["accept"]; // bei Bedarf erweitern (z. B. "accept-language")
const h = varyHeaders.map((hn) => `${hn}:${req.header(hn) ?? ""}`).join("|");
const url = new URL(req.originalUrl, "http://x");
return crypto
.createHash("sha1")
.update(`${req.method}|${url.pathname}|${url.searchParams.toString()}|${h}`)
.digest("hex");
}
function pickTtl(pathname: string): number {
for (const r of config.routeTtls) {
if (r.re.test(pathname)) return r.seconds;
}
return config.ttlSeconds;
}
function isFresh(entry: CacheEntry, ttl: number, now: number) {
return now - entry.ts < ttl * 1000;
}
function isStaleOk(entry: CacheEntry, ttl: number, stale: number, now: number) {
return now - entry.ts < (ttl + stale) * 1000;
}
const httpsAgent = new https.Agent({
rejectUnauthorized: process.env.TLS_REJECT_UNAUTHORIZED !== "0",
});
app.all("*", async (req: Request, res: Response) => {
if (!["GET", "HEAD", "OPTIONS"].includes(req.method)) {
return res.status(405).send("Method Not Allowed");
}
if (req.method === "OPTIONS") return res.sendStatus(204);
onIncomingRequest(req.method); // metrics
const upstreamUrl = new URL(req.originalUrl, config.upstreamBase).toString();
const key = cacheKey(req);
const now = Date.now();
const ttl = pickTtl(new URL(upstreamUrl).pathname);
log(`${req.method} ${req.originalUrl}${upstreamUrl}`);
log2("req.headers", redactHeaders(req.headers as any));
const entry = await get(key);
// Cache HIT
if (entry && isFresh(entry, ttl, now)) {
res.set("X-Cache-Status", "HIT");
Object.entries(entry.headers).forEach(([k, v]) => res.set(k, v));
onCacheEvent("HIT");
onClientBytes("out", entry.body.length);
log("cache HIT", { key, ttl, ageMs: now - entry.ts, size: entry.body.length });
return res.status(entry.status).send(entry.body);
}
// STALE serve + refresh
if (entry && isStaleOk(entry, ttl, config.staleSeconds, now)) {
res.set("X-Cache-Status", "STALE");
Object.entries(entry.headers).forEach(([k, v]) => res.set(k, v));
onCacheEvent("STALE");
onClientBytes("out", entry.body.length);
log("cache STALE -> serve+refresh", { key, ttl, ageMs: now - entry.ts, size: entry.body.length });
res.status(entry.status).send(entry.body);
void backgroundRefresh(upstreamUrl, req, key, entry).catch((e) => {
log("background refresh ERROR", e?.name, e?.message);
if (DEBUG >= 2) console.error(e?.stack || e);
});
return;
}
// MISS / MISS-EXPIRED
try {
const fresh = await getOrFetch(upstreamUrl, req, key, entry ?? undefined);
res.set("X-Cache-Status", entry ? "MISS-EXPIRED" : "MISS");
Object.entries(fresh.headers).forEach(([k, v]) => res.set(k, v));
onCacheEvent(entry ? "MISS-EXPIRED" : "MISS");
onClientBytes("out", fresh.body.length);
log(entry ? "cache MISS-EXPIRED" : "cache MISS", {
key,
ttl,
status: fresh.status,
size: fresh.body.length,
});
res.status(fresh.status).send(fresh.body);
} catch (e: any) {
log("FETCH ERROR", e?.name, e?.message, e?.code);
if (DEBUG >= 2) console.error(e?.stack || e);
if (entry) {
res.set("X-Cache-Status", "STALE-ERROR");
Object.entries(entry.headers).forEach(([k, v]) => res.set(k, v));
onCacheEvent("STALE-ERROR");
onClientBytes("out", entry.body.length);
log("serve STALE due to error", { key });
return res.status(entry.status).send(entry.body);
}
const msg = DEBUG ? `${e?.name || "Error"}: ${e?.message || ""}` : "Bad Gateway";
return res.status(502).send(msg);
}
});
async function backgroundRefresh(
upstreamUrl: string,
req: Request,
key: string,
prev: CacheEntry
) {
try {
await getOrFetch(upstreamUrl, req, key, prev);
} catch {
}
}
async function getOrFetch(
upstreamUrl: string,
req: Request,
key: string,
prev?: CacheEntry
): Promise<CacheEntry> {
if (inflight.has(key)) return inflight.get(key)!;
const p = (async () => {
const headers = new Headers();
const passHeaders = [
"accept",
"accept-language",
"user-agent",
"if-none-match",
"if-modified-since",
];
for (const hn of passHeaders) {
const v = req.header(hn);
if (v) headers.set(hn, v);
}
if (prev?.etag && !headers.has("if-none-match")) headers.set("if-none-match", prev.etag);
if (prev?.lastModified && !headers.has("if-modified-since"))
headers.set("if-modified-since", prev.lastModified);
headers.set("X-EPI-NO-SESSION", config.epiNoSession); // "True"
headers.set("X-EPI-ACC-TOK", config.epiAccessToken); // Token
if (DEBUG >= 2) log2("upstream request headers", redactHeaders(hToObj(headers)));
const fetchOpts: any = { method: "GET", headers };
if (upstreamUrl.startsWith("https://")) fetchOpts.agent = httpsAgent;
log("→ fetch", upstreamUrl);
const t0 = Date.now();
const resp = await fetch(upstreamUrl, fetchOpts);
const t1 = Date.now();
const lenHeader = resp.headers.get("content-length");
const respLen = lenHeader ? Number(lenHeader) : 0;
onUpstream(resp.status, t1 - t0, respLen);
log("← fetch", resp.status, resp.statusText, `${t1 - t0}ms`);
if (DEBUG >= 2) log2("upstream response headers", hToObj(resp.headers));
if (resp.status === 304 && prev) {
const refreshed: CacheEntry = { ...prev, ts: Date.now() };
await set(key, refreshed);
log("revalidated 304 → cache ts refreshed", { key });
return refreshed;
}
const arr = await resp.arrayBuffer();
const body = Buffer.from(arr);
const outHeaders: Record<string, string> = {};
let etag: string | undefined;
let lastMod: string | undefined;
for (const [k, v] of resp.headers.entries()) {
const kl = k.toLowerCase();
if (["content-type", "etag", "last-modified", "cache-control", "date"].includes(kl)) {
outHeaders[kl] = v;
}
if (kl === "etag") etag = v;
if (kl === "last-modified") lastMod = v;
}
const stored: CacheEntry = {
status: resp.status,
headers: outHeaders,
body,
ts: Date.now(),
etag,
lastModified: lastMod,
};
await set(key, stored);
log("stored", { key, status: stored.status, bytes: body.length });
return stored;
})();
inflight.set(key, p);
try {
return await p;
} finally {
inflight.delete(key);
}
}
initCache().then(() => {
// Metriken auf zweitem Port starten
const mPort = parseInt(process.env.METRICS_PORT || "0", 10);
if (mPort > 0) {
startMetricsServer(mPort, process.env.METRICS_ALLOW_ORIGIN || "*");
}
app.listen(config.port, () => {
log("listening", {
port: config.port,
upstream: config.upstreamBase,
ttl: config.ttlSeconds,
stale: config.staleSeconds,
});
});
});

1
src/types.d.ts vendored Normal file
View File

@@ -0,0 +1 @@
// Damit node-fetch ESM sauber in TS kompiliert; ansonsten leer ok.

18
tsconfig.json Normal file
View File

@@ -0,0 +1,18 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "NodeNext",
"moduleResolution": "NodeNext",
"outDir": "dist",
"rootDir": "src",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"lib": ["ES2022"],
"types": ["node"]
},
"include": ["src"]
}