apiforgejs 1.0.2 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "apiforgejs",
3
- "version": "1.0.2",
3
+ "version": "2.0.0",
4
4
  "description": "API observability & intelligence SDK for Express.js — local-first, privacy-first",
5
5
  "main": "src/index.js",
6
6
  "keywords": [
@@ -18,16 +18,12 @@
18
18
  "author": "APIForge",
19
19
  "license": "MIT",
20
20
  "scripts": {
21
- "test": "node --test test/**/*.test.js",
22
- "test:smoke": "node --test test/smoke.test.js"
21
+ "tests": "node --test tests/**/*.test.js",
22
+ "test:smoke": "node --test tests/smoke.test.js"
23
23
  },
24
24
  "engines": {
25
25
  "node": ">=22.5.0"
26
26
  },
27
- "dependencies": {
28
- "react": "^18.0.0",
29
- "react-dom": "^18.0.0"
30
- },
31
27
  "peerDependencies": {
32
28
  "express": ">=4.0.0"
33
29
  },
package/src/aggregator.js CHANGED
@@ -33,6 +33,7 @@ class Aggregator {
33
33
  env: event.env,
34
34
  release: event.release,
35
35
  durations: [],
36
+ response_sizes: [],
36
37
  status_2xx: 0,
37
38
  status_4xx: 0,
38
39
  status_5xx: 0,
@@ -41,6 +42,7 @@ class Aggregator {
41
42
  }
42
43
 
43
44
  bucket.durations.push(event.duration_ms);
45
+ if (event.response_size != null) bucket.response_sizes.push(event.response_size);
44
46
 
45
47
  const s = event.status;
46
48
  if (s >= 200 && s < 300) bucket.status_2xx++;
@@ -58,6 +60,14 @@ class Aggregator {
58
60
  for (const bucket of this.buffer.values()) {
59
61
  const sorted = bucket.durations.slice().sort((a, b) => a - b);
60
62
  const n = sorted.length;
63
+ const sizes = bucket.response_sizes;
64
+ const bytes_avg = sizes.length > 0
65
+ ? sizes.reduce((a, b) => a + b, 0) / sizes.length
66
+ : null;
67
+
68
+ const lat_avg = n > 0
69
+ ? bucket.durations.reduce((a, b) => a + b, 0) / n
70
+ : null;
61
71
 
62
72
  rows.push({
63
73
  bucket_ts: bucketTs,
@@ -72,8 +82,10 @@ class Aggregator {
72
82
  lat_p50: percentile(sorted, 0.50),
73
83
  lat_p90: percentile(sorted, 0.90),
74
84
  lat_p99: percentile(sorted, 0.99),
85
+ lat_avg,
75
86
  lat_min: sorted[0] ?? 0,
76
87
  lat_max: sorted[n - 1] ?? 0,
88
+ bytes_avg,
77
89
  });
78
90
  }
79
91
 
@@ -0,0 +1,62 @@
1
+ 'use strict';
2
+
3
+ const CIRCUIT_OPEN_MS = 60_000;
4
+ const FAILURE_THRESHOLD = 5;
5
+
6
+ class CloudTransport {
7
+ /**
8
+ * @param {string} cloudUrl - Base URL of the SaaS API, e.g. 'https://api.apiforge.fr'
9
+ * @param {string} apiKey - Project API key starting with 'af_'
10
+ * @param {string} service - Service name passed to every metric row
11
+ */
12
+ constructor(cloudUrl, apiKey, service) {
13
+ this._url = `${cloudUrl.replace(/\/$/, '')}/ingest`;
14
+ this._apiKey = apiKey;
15
+ this._service = service;
16
+ this._failures = 0;
17
+ this._openUntil = 0;
18
+ }
19
+
20
+ write(rows) {
21
+ if (rows.length === 0) return;
22
+ if (Date.now() < this._openUntil) return;
23
+
24
+ const metrics = rows.map(r => ({
25
+ route: r.route,
26
+ method: r.method,
27
+ service: this._service,
28
+ env: r.env,
29
+ release: r.release_tag ?? null,
30
+ time: new Date(r.bucket_ts * 1000).toISOString(),
31
+ calls_total: r.total_calls,
32
+ calls_2xx: r.status_2xx,
33
+ calls_4xx: r.status_4xx,
34
+ calls_5xx: r.status_5xx,
35
+ lat_p50: r.lat_p50 ?? null,
36
+ lat_p90: r.lat_p90 ?? null,
37
+ lat_p99: r.lat_p99 ?? null,
38
+ lat_avg: r.lat_avg ?? null,
39
+ bytes_avg: r.bytes_avg ?? null,
40
+ }));
41
+
42
+ fetch(this._url, {
43
+ method: 'POST',
44
+ headers: { 'Content-Type': 'application/json', 'X-API-Key': this._apiKey },
45
+ body: JSON.stringify({ metrics }),
46
+ })
47
+ .then(res => {
48
+ if (!res.ok) throw new Error(`HTTP ${res.status}`);
49
+ this._failures = 0;
50
+ })
51
+ .catch(err => {
52
+ this._failures++;
53
+ if (this._failures >= FAILURE_THRESHOLD) {
54
+ this._openUntil = Date.now() + CIRCUIT_OPEN_MS;
55
+ this._failures = 0;
56
+ console.warn(`[apiforgejs] Cloud flush failures — pausing for ${CIRCUIT_OPEN_MS / 1000}s. Error: ${err.message}`);
57
+ }
58
+ });
59
+ }
60
+ }
61
+
62
+ module.exports = { CloudTransport };
package/src/database.js CHANGED
@@ -44,19 +44,23 @@ class ApiForgeDatabase {
44
44
  lat_p90 REAL,
45
45
  lat_p99 REAL,
46
46
  lat_min REAL,
47
- lat_max REAL
47
+ lat_max REAL,
48
+ bytes_avg REAL
48
49
  );
49
50
  CREATE INDEX IF NOT EXISTS idx_route_ts ON api_metrics (route, method, bucket_ts);
50
51
  CREATE INDEX IF NOT EXISTS idx_bucket_ts ON api_metrics (bucket_ts);
51
52
  CREATE INDEX IF NOT EXISTS idx_release ON api_metrics (release_tag) WHERE release_tag IS NOT NULL;
52
53
  `);
53
54
 
55
+ // Migration for databases created before bytes_avg was introduced
56
+ try { this.db.exec('ALTER TABLE api_metrics ADD COLUMN bytes_avg REAL'); } catch (_) {}
57
+
54
58
  this._stmtInsert = this.db.prepare(`
55
59
  INSERT INTO api_metrics
56
60
  (bucket_ts, route, method, env, release_tag,
57
61
  status_2xx, status_4xx, status_5xx, total_calls,
58
- lat_p50, lat_p90, lat_p99, lat_min, lat_max)
59
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
62
+ lat_p50, lat_p90, lat_p99, lat_min, lat_max, bytes_avg)
63
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
60
64
  `);
61
65
 
62
66
  this._begin = this.db.prepare('BEGIN');
@@ -71,7 +75,7 @@ class ApiForgeDatabase {
71
75
  this._stmtInsert.run(
72
76
  r.bucket_ts, r.route, r.method, r.env, r.release_tag ?? null,
73
77
  r.status_2xx, r.status_4xx, r.status_5xx, r.total_calls,
74
- r.lat_p50, r.lat_p90, r.lat_p99, r.lat_min, r.lat_max
78
+ r.lat_p50, r.lat_p90, r.lat_p99, r.lat_min, r.lat_max, r.bytes_avg ?? null
75
79
  );
76
80
  }
77
81
  this._commit.run();
@@ -131,7 +135,8 @@ class ApiForgeDatabase {
131
135
  AVG(lat_p50) as p50,
132
136
  AVG(lat_p90) as p90,
133
137
  AVG(lat_p99) as p99,
134
- MAX(lat_max) as lat_max
138
+ MAX(lat_max) as lat_max,
139
+ AVG(bytes_avg) as bytes_avg
135
140
  FROM api_metrics
136
141
  WHERE bucket_ts >= ?
137
142
  GROUP BY route, method
@@ -286,6 +291,21 @@ class ApiForgeDatabase {
286
291
  `).all();
287
292
  }
288
293
 
294
+ // Returns one row per (route, method, day) for the last 30 days, used by drift detection
295
+ getDriftData() {
296
+ const since30d = nowSec() - 30 * 86_400;
297
+ return this.db.prepare(`
298
+ SELECT
299
+ route, method,
300
+ CAST(bucket_ts / 86400 AS INTEGER) as day_bucket,
301
+ AVG(lat_p90) as p90
302
+ FROM api_metrics
303
+ WHERE bucket_ts >= ? AND lat_p90 IS NOT NULL
304
+ GROUP BY route, method, day_bucket
305
+ ORDER BY route, method, day_bucket
306
+ `).all(since30d);
307
+ }
308
+
289
309
  getGlobalTimeSeries(hours = 24) {
290
310
  const since = nowSec() - hours * 3600;
291
311
  return this.db.prepare(`
package/src/index.js CHANGED
@@ -1,49 +1,64 @@
1
1
  'use strict';
2
2
 
3
3
  const { createInterceptor } = require('./interceptor');
4
- const { Aggregator } = require('./aggregator');
5
- const { LocalTransport } = require('./transport');
6
- const { ApiForgeDatabase } = require('./database');
7
- const { startDashboard } = require('./dashboard');
4
+ const { Aggregator } = require('./aggregator');
5
+ const { LocalTransport } = require('./transport');
6
+ const { CloudTransport } = require('./cloud-transport');
7
+ const { ApiForgeDatabase } = require('./database');
8
+ const { startDashboard } = require('./dashboard');
8
9
 
9
10
  /**
10
11
  * APIForge Express middleware factory.
11
12
  *
12
- * @param {object} options
13
- * @param {'local'} options.mode - Storage mode. Only 'local' (SQLite) in v0.x.
14
- * @param {string} [options.dbPath] - SQLite file path. Default: '.apiforge.db'
15
- * @param {number} [options.dashboardPort] - Dashboard port. Default: 4242. Set to 0 to disable.
16
- * @param {number} [options.flushInterval] - Aggregation flush interval in ms. Default: 60000.
17
- * @param {string} [options.env] - Environment label. Default: NODE_ENV or 'production'.
18
- * @param {string} [options.release] - Release/version tag for deployment correlation.
19
- * @param {string} [options.service] - Service name for multi-service setups.
20
- * @param {number} [options.sampling] - Sample rate 0.0–1.0. Default: 1.0.
21
- * @param {string[]}[options.ignorePaths] - Paths to skip. Default: ['/favicon.ico'].
13
+ * @param {object} options
14
+ * @param {string} [options.mode] - 'local' (default) or 'cloud'.
15
+ * @param {string} [options.cloudUrl] - Cloud mode: SaaS API base URL.
16
+ * @param {string} [options.apiKey] - Cloud mode: project API key (af_…).
17
+ * @param {string} [options.dbPath] - Local mode: SQLite path. Default: '.apiforge.db'.
18
+ * @param {number} [options.dashboardPort] - Local mode: dashboard port. Default: 4242. 0 = off.
19
+ * @param {number} [options.flushInterval] - Flush interval in ms. Default: 60000.
20
+ * @param {string} [options.env] - Environment label. Default: NODE_ENV or 'production'.
21
+ * @param {string} [options.release] - Release/version tag.
22
+ * @param {string} [options.service] - Service name. Default: 'default'.
23
+ * @param {number} [options.sampling] - Sample rate 0.0–1.0. Default: 1.0.
24
+ * @param {string[]} [options.ignorePaths] - Paths to skip. Default: ['/favicon.ico'].
22
25
  */
23
26
  function apiforge(options = {}) {
24
- if (options.mode && options.mode !== 'local') {
25
- throw new Error(`[apiforgejs] mode '${options.mode}' is not yet supported. Use 'local'.`);
27
+ const hasCloudUrl = Boolean(options.cloudUrl);
28
+ const hasApiKey = Boolean(options.apiKey);
29
+ const isCloud = options.mode === 'cloud' || (hasCloudUrl && hasApiKey);
30
+
31
+ if ((hasCloudUrl || hasApiKey) && !(hasCloudUrl && hasApiKey)) {
32
+ throw new Error('[apiforgejs] Cloud mode requires both cloudUrl and apiKey options.');
26
33
  }
27
34
 
28
35
  const config = {
29
- mode: 'local',
30
- dbPath: options.dbPath ?? '.apiforge.db',
31
- dashboardPort: options.dashboardPort !== undefined ? options.dashboardPort : 4242,
36
+ mode: isCloud ? 'cloud' : 'local',
37
+ cloudUrl: options.cloudUrl ?? null,
38
+ apiKey: options.apiKey ?? null,
39
+ dbPath: options.dbPath ?? '.apiforge.db',
40
+ dashboardPort: isCloud ? 0 : (options.dashboardPort !== undefined ? options.dashboardPort : 4242),
32
41
  flushInterval: options.flushInterval ?? 60_000,
33
- env: options.env ?? process.env.NODE_ENV ?? 'production',
34
- release: options.release ?? process.env.APP_VERSION ?? null,
35
- service: options.service ?? 'default',
36
- sampling: options.sampling ?? 1.0,
37
- ignorePaths: options.ignorePaths ?? ['/favicon.ico'],
42
+ env: options.env ?? process.env.NODE_ENV ?? 'production',
43
+ release: options.release ?? process.env.APP_VERSION ?? null,
44
+ service: options.service ?? 'default',
45
+ sampling: options.sampling ?? 1.0,
46
+ ignorePaths: options.ignorePaths ?? ['/favicon.ico'],
38
47
  };
39
48
 
40
- const db = new ApiForgeDatabase(config.dbPath);
41
- const transport = new LocalTransport(db);
42
- const aggregator = new Aggregator(transport, config.flushInterval);
49
+ let transport, db;
43
50
 
51
+ if (isCloud) {
52
+ transport = new CloudTransport(config.cloudUrl, config.apiKey, config.service);
53
+ } else {
54
+ db = new ApiForgeDatabase(config.dbPath);
55
+ transport = new LocalTransport(db);
56
+ }
57
+
58
+ const aggregator = new Aggregator(transport, config.flushInterval);
44
59
  aggregator.start();
45
60
 
46
- if (config.dashboardPort) {
61
+ if (!isCloud && config.dashboardPort) {
47
62
  startDashboard(db, config.dashboardPort);
48
63
  }
49
64
 
@@ -51,7 +66,7 @@ function apiforge(options = {}) {
51
66
 
52
67
  middleware.shutdown = () => {
53
68
  aggregator.stop();
54
- db.close();
69
+ if (db) db.close();
55
70
  };
56
71
 
57
72
  return middleware;
package/src/insights.js CHANGED
@@ -1,27 +1,19 @@
1
1
  'use strict';
2
2
 
3
- const DEAD_ENDPOINT_DAYS = 21;
4
- const REGRESSION_THRESHOLD = 0.20; // 20% worse P90 triggers regression insight
5
- const ANOMALY_Z_THRESHOLD = 2.5; // Z-score threshold for latency anomaly
3
+ const DEAD_ENDPOINT_DAYS = 21;
4
+ const REGRESSION_THRESHOLD = 0.20; // 20% worse P90 triggers regression insight
5
+ const ANOMALY_Z_THRESHOLD = 2.5; // Z-score threshold for latency anomaly
6
+ const DRIFT_SLOPE_THRESHOLD = 5; // ms/day above which progressive drift is reported
7
+ const DRIFT_MIN_DAYS = 7; // minimum number of daily data points required
6
8
 
7
9
  function getInsights(db) {
8
10
  const insights = [];
9
11
 
10
- try {
11
- insights.push(...detectLatencyAnomalies(db));
12
- } catch (_) {}
13
-
14
- try {
15
- insights.push(...detectDeadEndpoints(db));
16
- } catch (_) {}
17
-
18
- try {
19
- insights.push(...detectReleaseRegressions(db));
20
- } catch (_) {}
21
-
22
- try {
23
- insights.push(...detectUntrackedRoutes(db));
24
- } catch (_) {}
12
+ try { insights.push(...detectLatencyAnomalies(db)); } catch (_) {}
13
+ try { insights.push(...detectDeadEndpoints(db)); } catch (_) {}
14
+ try { insights.push(...detectReleaseRegressions(db)); } catch (_) {}
15
+ try { insights.push(...detectUntrackedRoutes(db)); } catch (_) {}
16
+ try { insights.push(...detectDrift(db)); } catch (_) {}
25
17
 
26
18
  return insights;
27
19
  }
@@ -33,7 +25,7 @@ function detectUntrackedRoutes(db) {
33
25
  severity: 'info',
34
26
  route: r.route,
35
27
  method: r.method,
36
- message: `\`${r.method} ${r.route}\` est déclaré dans l'application mais n'a reçu aucune requête depuis le début du monitoring.`,
28
+ message: `\`${r.method} ${r.route}\` is declared but has received no requests since monitoring started.`,
37
29
  data: { first_seen_ts: r.first_seen },
38
30
  }));
39
31
  }
@@ -69,7 +61,7 @@ function detectLatencyAnomalies(db) {
69
61
  severity: 'warning',
70
62
  route: r.route,
71
63
  method: r.method,
72
- message: `La latence P99 de \`${r.method} ${r.route}\` est anormalement élevée cette heure (${fmt(r.avg_p99)} vs moyenne ${fmt(mean)} — Z-score ${z.toFixed(1)}).`,
64
+ message: `\`${r.method} ${r.route}\` P99 latency is abnormally high this hour (${fmt(r.avg_p99)} vs baseline ${fmt(mean)} — Z-score ${z.toFixed(1)}).`,
73
65
  data: { current_p99: r.avg_p99, baseline_p99: mean, z_score: z },
74
66
  });
75
67
  }
@@ -87,7 +79,7 @@ function detectDeadEndpoints(db) {
87
79
  severity: 'info',
88
80
  route: row.route,
89
81
  method: row.method,
90
- message: `\`${row.method} ${row.route}\` n'a reçu aucune requête depuis ${daysSince} jours. Candidat à la déprécation.`,
82
+ message: `\`${row.method} ${row.route}\` has received no requests in ${daysSince} days. Consider deprecating this endpoint.`,
91
83
  data: { last_seen_ts: row.last_seen, inactive_days: daysSince },
92
84
  };
93
85
  });
@@ -115,7 +107,7 @@ function detectReleaseRegressions(db) {
115
107
  severity: 'error',
116
108
  route: a.route,
117
109
  method: a.method,
118
- message: `La latence P90 de \`${a.method} ${a.route}\` a augmenté de ${pct(delta)} depuis le déploiement ${release_tag}. Avant : ${fmt(b.avg_p90)} — Après : ${fmt(a.avg_p90)}.`,
110
+ message: `\`${a.method} ${a.route}\` P90 increased by ${pct(delta)} after ${release_tag}. Before: ${fmt(b.avg_p90)} — After: ${fmt(a.avg_p90)}.`,
119
111
  data: {
120
112
  release: release_tag,
121
113
  before_p90: b.avg_p90,
@@ -129,7 +121,7 @@ function detectReleaseRegressions(db) {
129
121
  severity: 'success',
130
122
  route: a.route,
131
123
  method: a.method,
132
- message: `Le déploiement ${release_tag} a amélioré \`${a.method} ${a.route}\` de ${pct(-delta)}. Avant : ${fmt(b.avg_p90)} — Après : ${fmt(a.avg_p90)}.`,
124
+ message: `${release_tag} improved \`${a.method} ${a.route}\` by ${pct(-delta)}. Before: ${fmt(b.avg_p90)} — After: ${fmt(a.avg_p90)}.`,
133
125
  data: {
134
126
  release: release_tag,
135
127
  before_p90: b.avg_p90,
@@ -143,6 +135,53 @@ function detectReleaseRegressions(db) {
143
135
  return insights;
144
136
  }
145
137
 
138
+ function detectDrift(db) {
139
+ const rows = db.getDriftData();
140
+ if (rows.length === 0) return [];
141
+
142
+ // Group daily P90 samples by endpoint
143
+ const byEndpoint = new Map();
144
+ for (const row of rows) {
145
+ const key = `${row.method}|${row.route}`;
146
+ if (!byEndpoint.has(key)) byEndpoint.set(key, { method: row.method, route: row.route, points: [] });
147
+ byEndpoint.get(key).points.push({ x: row.day_bucket, y: row.p90 });
148
+ }
149
+
150
+ const insights = [];
151
+ for (const { method, route, points } of byEndpoint.values()) {
152
+ if (points.length < DRIFT_MIN_DAYS) continue;
153
+
154
+ // Ordinary least squares on (day_index, p90) pairs
155
+ const x0 = points[0].x;
156
+ const xs = points.map(p => p.x - x0);
157
+ const ys = points.map(p => p.y);
158
+ const n = xs.length;
159
+ const sumX = xs.reduce((a, b) => a + b, 0);
160
+ const sumY = ys.reduce((a, b) => a + b, 0);
161
+ const sumXY = xs.reduce((s, x, i) => s + x * ys[i], 0);
162
+ const sumX2 = xs.reduce((s, x) => s + x * x, 0);
163
+ const denom = n * sumX2 - sumX * sumX;
164
+ if (denom === 0) continue;
165
+
166
+ const slope = (n * sumXY - sumX * sumY) / denom;
167
+ if (slope < DRIFT_SLOPE_THRESHOLD) continue;
168
+
169
+ const observedDays = xs[xs.length - 1];
170
+ const projection30 = Math.round(slope * 30);
171
+
172
+ insights.push({
173
+ type: 'DRIFT',
174
+ severity: 'warning',
175
+ route,
176
+ method,
177
+ message: `\`${method} ${route}\` has been progressively degrading for ${observedDays} day${observedDays !== 1 ? 's' : ''}: +${slope.toFixed(1)}ms/day. 30-day projection: +${projection30}ms.`,
178
+ data: { slope_ms_per_day: slope, observed_days: observedDays, projection_30d_ms: projection30 },
179
+ });
180
+ }
181
+
182
+ return insights;
183
+ }
184
+
146
185
  function computeHealthScore(db) {
147
186
  try {
148
187
  const { recent, baseline, activeRoutes, totalRoutes } = db.getSummary();
package/src/ui.html CHANGED
@@ -1241,8 +1241,9 @@ function Insights({ setRoute, setParams }) {
1241
1241
 
1242
1242
  const types = [
1243
1243
  {id:'ALL',label:'All'},{id:'PERF',label:'Performance'},
1244
- {id:'ANOMALY',label:'Anomaly'},{id:'DEAD',label:'Dead'},
1245
- {id:'UNTRACKED',label:'Untracked'},{id:'OK',label:'OK'},
1244
+ {id:'DRIFT',label:'Drift'},{id:'ANOMALY',label:'Anomaly'},
1245
+ {id:'DEAD',label:'Dead'},{id:'UNTRACKED',label:'Untracked'},
1246
+ {id:'OK',label:'OK'},
1246
1247
  ];
1247
1248
  const filtered = INSIGHTS.filter(i =>
1248
1249
  (typeFilter === 'ALL' || i.type === typeFilter) &&