@neat.is/core 0.2.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/compat.json +120 -0
- package/dist/chunk-6JT6L2OV.js +164 -0
- package/dist/chunk-6JT6L2OV.js.map +1 -0
- package/dist/chunk-6SFEITLJ.js +3371 -0
- package/dist/chunk-6SFEITLJ.js.map +1 -0
- package/dist/chunk-I5IMCXRO.js +325 -0
- package/dist/chunk-I5IMCXRO.js.map +1 -0
- package/dist/chunk-T2U4U256.js +462 -0
- package/dist/chunk-T2U4U256.js.map +1 -0
- package/dist/chunk-WX55TLUT.js +184 -0
- package/dist/chunk-WX55TLUT.js.map +1 -0
- package/dist/chunk-XOOCA5T7.js +290 -0
- package/dist/chunk-XOOCA5T7.js.map +1 -0
- package/dist/cli.cjs +5754 -0
- package/dist/cli.cjs.map +1 -0
- package/dist/cli.d.cts +36 -0
- package/dist/cli.d.ts +36 -0
- package/dist/cli.js +1175 -0
- package/dist/cli.js.map +1 -0
- package/dist/index.cjs +4552 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +408 -0
- package/dist/index.d.ts +408 -0
- package/dist/index.js +93 -0
- package/dist/index.js.map +1 -0
- package/dist/neatd.cjs +3070 -0
- package/dist/neatd.cjs.map +1 -0
- package/dist/neatd.d.cts +1 -0
- package/dist/neatd.d.ts +1 -0
- package/dist/neatd.js +114 -0
- package/dist/neatd.js.map +1 -0
- package/dist/otel-grpc-B4XBSI4W.js +9 -0
- package/dist/otel-grpc-B4XBSI4W.js.map +1 -0
- package/dist/server.cjs +4499 -0
- package/dist/server.cjs.map +1 -0
- package/dist/server.d.cts +2 -0
- package/dist/server.d.ts +2 -0
- package/dist/server.js +97 -0
- package/dist/server.js.map +1 -0
- package/package.json +77 -0
- package/proto/opentelemetry/proto/collector/trace/v1/trace_service.proto +31 -0
- package/proto/opentelemetry/proto/common/v1/common.proto +46 -0
- package/proto/opentelemetry/proto/resource/v1/resource.proto +19 -0
- package/proto/opentelemetry/proto/trace/v1/trace.proto +93 -0
|
@@ -0,0 +1,3371 @@
|
|
|
1
|
+
// src/graph.ts
|
|
2
|
+
import GraphDefault from "graphology";
|
|
3
|
+
var MultiDirectedGraph = GraphDefault.MultiDirectedGraph;
|
|
4
|
+
var DEFAULT_PROJECT = "default";
|
|
5
|
+
var graphs = /* @__PURE__ */ new Map();
|
|
6
|
+
function makeGraph() {
|
|
7
|
+
return new MultiDirectedGraph({ allowSelfLoops: false });
|
|
8
|
+
}
|
|
9
|
+
function getGraph(project = DEFAULT_PROJECT) {
|
|
10
|
+
let g = graphs.get(project);
|
|
11
|
+
if (!g) {
|
|
12
|
+
g = makeGraph();
|
|
13
|
+
graphs.set(project, g);
|
|
14
|
+
}
|
|
15
|
+
return g;
|
|
16
|
+
}
|
|
17
|
+
function resetGraph(project) {
|
|
18
|
+
if (project === void 0) {
|
|
19
|
+
graphs.clear();
|
|
20
|
+
return;
|
|
21
|
+
}
|
|
22
|
+
graphs.delete(project);
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
// src/compat.ts
|
|
26
|
+
import { promises as fs } from "fs";
|
|
27
|
+
import os from "os";
|
|
28
|
+
import path from "path";
|
|
29
|
+
import semver from "semver";
|
|
30
|
+
|
|
31
|
+
// compat.json
|
|
32
|
+
var compat_default = {
|
|
33
|
+
pairs: [
|
|
34
|
+
{
|
|
35
|
+
kind: "driver-engine",
|
|
36
|
+
driver: "pg",
|
|
37
|
+
engine: "postgresql",
|
|
38
|
+
minDriverVersion: "8.0.0",
|
|
39
|
+
minEngineVersion: "14",
|
|
40
|
+
reason: "PostgreSQL 14+ requires scram-sha-256 auth by default; pg < 8.0.0 only speaks md5."
|
|
41
|
+
},
|
|
42
|
+
{
|
|
43
|
+
kind: "driver-engine",
|
|
44
|
+
driver: "mysql2",
|
|
45
|
+
engine: "mysql",
|
|
46
|
+
minDriverVersion: "3.0.0",
|
|
47
|
+
minEngineVersion: "8",
|
|
48
|
+
reason: "MySQL 8 defaults to caching_sha2_password; mysql2 < 3.0.0 doesn't negotiate it."
|
|
49
|
+
},
|
|
50
|
+
{
|
|
51
|
+
kind: "driver-engine",
|
|
52
|
+
driver: "mongoose",
|
|
53
|
+
engine: "mongodb",
|
|
54
|
+
minDriverVersion: "7.0.0",
|
|
55
|
+
minEngineVersion: "7",
|
|
56
|
+
reason: "MongoDB 7 drops legacy wire-protocol opcodes that mongoose < 7.0.0 still emits."
|
|
57
|
+
},
|
|
58
|
+
{
|
|
59
|
+
kind: "driver-engine",
|
|
60
|
+
driver: "psycopg2",
|
|
61
|
+
engine: "postgresql",
|
|
62
|
+
minDriverVersion: "2.9.0",
|
|
63
|
+
minEngineVersion: "14",
|
|
64
|
+
reason: "PostgreSQL 14+ requires scram-sha-256 auth by default; psycopg2 < 2.9.0 only speaks md5."
|
|
65
|
+
},
|
|
66
|
+
{
|
|
67
|
+
kind: "driver-engine",
|
|
68
|
+
driver: "pymongo",
|
|
69
|
+
engine: "mongodb",
|
|
70
|
+
minDriverVersion: "4.0.0",
|
|
71
|
+
minEngineVersion: "7",
|
|
72
|
+
reason: "MongoDB 7 drops legacy wire-protocol opcodes that pymongo < 4.0.0 still emits."
|
|
73
|
+
},
|
|
74
|
+
{
|
|
75
|
+
kind: "driver-engine",
|
|
76
|
+
driver: "mysql-connector-python",
|
|
77
|
+
engine: "mysql",
|
|
78
|
+
minDriverVersion: "8.0.0",
|
|
79
|
+
minEngineVersion: "8",
|
|
80
|
+
reason: "MySQL 8 defaults to caching_sha2_password; mysql-connector-python < 8.0.0 doesn't negotiate it."
|
|
81
|
+
}
|
|
82
|
+
],
|
|
83
|
+
nodeEngineConstraints: [
|
|
84
|
+
{
|
|
85
|
+
kind: "node-engine",
|
|
86
|
+
package: "vitest",
|
|
87
|
+
packageMinVersion: "2.0.0",
|
|
88
|
+
minNodeVersion: "18.0.0",
|
|
89
|
+
reason: "vitest >= 2.0 drops Node 16 support; requires Node 18+."
|
|
90
|
+
},
|
|
91
|
+
{
|
|
92
|
+
kind: "node-engine",
|
|
93
|
+
package: "next",
|
|
94
|
+
packageMinVersion: "14.0.0",
|
|
95
|
+
minNodeVersion: "18.17.0",
|
|
96
|
+
reason: "Next 14+ requires Node 18.17+ (uses APIs introduced in that minor)."
|
|
97
|
+
},
|
|
98
|
+
{
|
|
99
|
+
kind: "node-engine",
|
|
100
|
+
package: "@modelcontextprotocol/sdk",
|
|
101
|
+
packageMinVersion: "1.0.0",
|
|
102
|
+
minNodeVersion: "18.0.0",
|
|
103
|
+
reason: "@modelcontextprotocol/sdk >= 1 requires Node 18+ (web-streams polyfill removed)."
|
|
104
|
+
}
|
|
105
|
+
],
|
|
106
|
+
packageConflicts: [
|
|
107
|
+
{
|
|
108
|
+
kind: "package-conflict",
|
|
109
|
+
package: "@tanstack/react-query",
|
|
110
|
+
packageMinVersion: "5.0.0",
|
|
111
|
+
requires: {
|
|
112
|
+
name: "react",
|
|
113
|
+
minVersion: "18.0.0"
|
|
114
|
+
},
|
|
115
|
+
reason: "@tanstack/react-query 5+ uses useSyncExternalStore \u2014 only available in React 18+."
|
|
116
|
+
},
|
|
117
|
+
{
|
|
118
|
+
kind: "package-conflict",
|
|
119
|
+
package: "react-router-dom",
|
|
120
|
+
packageMinVersion: "7.0.0",
|
|
121
|
+
requires: {
|
|
122
|
+
name: "react",
|
|
123
|
+
minVersion: "18.0.0"
|
|
124
|
+
},
|
|
125
|
+
reason: "react-router-dom 7+ requires React 18+."
|
|
126
|
+
},
|
|
127
|
+
{
|
|
128
|
+
kind: "package-conflict",
|
|
129
|
+
package: "next",
|
|
130
|
+
packageMinVersion: "14.0.0",
|
|
131
|
+
requires: {
|
|
132
|
+
name: "react",
|
|
133
|
+
minVersion: "18.2.0"
|
|
134
|
+
},
|
|
135
|
+
reason: "Next.js 14+ requires React 18.2+."
|
|
136
|
+
}
|
|
137
|
+
],
|
|
138
|
+
deprecatedApis: [
|
|
139
|
+
{
|
|
140
|
+
kind: "deprecated-api",
|
|
141
|
+
package: "request",
|
|
142
|
+
packageMaxVersion: "2.88.2",
|
|
143
|
+
reason: "request is deprecated; use undici, node-fetch, or axios instead."
|
|
144
|
+
},
|
|
145
|
+
{
|
|
146
|
+
kind: "deprecated-api",
|
|
147
|
+
package: "node-uuid",
|
|
148
|
+
reason: "node-uuid is deprecated; use the `uuid` package."
|
|
149
|
+
}
|
|
150
|
+
]
|
|
151
|
+
};
|
|
152
|
+
|
|
153
|
+
// src/compat.ts
|
|
154
|
+
var bundledMatrix = compat_default;
|
|
155
|
+
var mergedMatrix = null;
|
|
156
|
+
var remoteLoadAttempted = false;
|
|
157
|
+
var REMOTE_CACHE_DIR = path.join(os.homedir(), ".neat");
|
|
158
|
+
var REMOTE_CACHE_PATH = path.join(REMOTE_CACHE_DIR, "compat-cache.json");
|
|
159
|
+
var REMOTE_TTL_MS = 24 * 60 * 60 * 1e3;
|
|
160
|
+
function engineMeetsThreshold(engineVersion, threshold) {
|
|
161
|
+
const e = parseInt(engineVersion, 10);
|
|
162
|
+
const t = parseInt(threshold, 10);
|
|
163
|
+
if (Number.isFinite(e) && Number.isFinite(t)) return e >= t;
|
|
164
|
+
const ec = semver.coerce(engineVersion);
|
|
165
|
+
const tc = semver.coerce(threshold);
|
|
166
|
+
if (ec && tc) return semver.gte(ec, tc);
|
|
167
|
+
return false;
|
|
168
|
+
}
|
|
169
|
+
function checkCompatibility(driver, driverVersion, engine, engineVersion) {
|
|
170
|
+
const matrix = currentMatrix();
|
|
171
|
+
const pair = matrix.pairs.find((p) => p.driver === driver && p.engine === engine);
|
|
172
|
+
if (!pair) return { compatible: true };
|
|
173
|
+
if (pair.minEngineVersion && !engineMeetsThreshold(engineVersion, pair.minEngineVersion)) {
|
|
174
|
+
return { compatible: true };
|
|
175
|
+
}
|
|
176
|
+
const driverCoerced = semver.coerce(driverVersion);
|
|
177
|
+
if (!driverCoerced) return { compatible: true };
|
|
178
|
+
if (semver.lt(driverCoerced, pair.minDriverVersion)) {
|
|
179
|
+
return {
|
|
180
|
+
compatible: false,
|
|
181
|
+
reason: pair.reason,
|
|
182
|
+
minDriverVersion: pair.minDriverVersion
|
|
183
|
+
};
|
|
184
|
+
}
|
|
185
|
+
return { compatible: true };
|
|
186
|
+
}
|
|
187
|
+
function rangeAdmitsVersion(serviceNodeRange, requiredNodeVersion) {
|
|
188
|
+
try {
|
|
189
|
+
const required = semver.coerce(requiredNodeVersion);
|
|
190
|
+
if (!required) return true;
|
|
191
|
+
return semver.subset(serviceNodeRange, `>=${required.version}`, {
|
|
192
|
+
includePrerelease: false
|
|
193
|
+
});
|
|
194
|
+
} catch {
|
|
195
|
+
return true;
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
function checkNodeEngineConstraint(constraint, declaredPackageVersion, serviceNodeRange) {
|
|
199
|
+
if (constraint.packageMinVersion && declaredPackageVersion) {
|
|
200
|
+
const v = semver.coerce(declaredPackageVersion);
|
|
201
|
+
if (v && semver.lt(v, constraint.packageMinVersion)) {
|
|
202
|
+
return { compatible: true };
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
if (!serviceNodeRange) {
|
|
206
|
+
return { compatible: true };
|
|
207
|
+
}
|
|
208
|
+
if (rangeAdmitsVersion(serviceNodeRange, constraint.minNodeVersion)) {
|
|
209
|
+
return { compatible: true };
|
|
210
|
+
}
|
|
211
|
+
return {
|
|
212
|
+
compatible: false,
|
|
213
|
+
reason: constraint.reason,
|
|
214
|
+
requiredNodeVersion: constraint.minNodeVersion
|
|
215
|
+
};
|
|
216
|
+
}
|
|
217
|
+
function checkPackageConflict(conflict, declaredPackageVersion, declaredRequiredVersion) {
|
|
218
|
+
if (!declaredPackageVersion) return { compatible: true };
|
|
219
|
+
if (conflict.packageMinVersion) {
|
|
220
|
+
const v = semver.coerce(declaredPackageVersion);
|
|
221
|
+
if (v && semver.lt(v, conflict.packageMinVersion)) {
|
|
222
|
+
return { compatible: true };
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
if (!declaredRequiredVersion) {
|
|
226
|
+
return {
|
|
227
|
+
compatible: false,
|
|
228
|
+
reason: conflict.reason,
|
|
229
|
+
requires: conflict.requires
|
|
230
|
+
};
|
|
231
|
+
}
|
|
232
|
+
const requiredCoerced = semver.coerce(declaredRequiredVersion);
|
|
233
|
+
if (!requiredCoerced) return { compatible: true };
|
|
234
|
+
if (semver.lt(requiredCoerced, conflict.requires.minVersion)) {
|
|
235
|
+
return {
|
|
236
|
+
compatible: false,
|
|
237
|
+
reason: conflict.reason,
|
|
238
|
+
requires: conflict.requires,
|
|
239
|
+
foundVersion: declaredRequiredVersion
|
|
240
|
+
};
|
|
241
|
+
}
|
|
242
|
+
return { compatible: true };
|
|
243
|
+
}
|
|
244
|
+
function checkDeprecatedApi(rule, declaredVersion) {
|
|
245
|
+
if (declaredVersion === void 0) return { compatible: true };
|
|
246
|
+
if (rule.packageMaxVersion) {
|
|
247
|
+
const v = semver.coerce(declaredVersion);
|
|
248
|
+
const max = semver.coerce(rule.packageMaxVersion);
|
|
249
|
+
if (v && max && semver.gt(v, max)) return { compatible: true };
|
|
250
|
+
}
|
|
251
|
+
return { compatible: false, reason: rule.reason };
|
|
252
|
+
}
|
|
253
|
+
function currentMatrix() {
|
|
254
|
+
return mergedMatrix ?? bundledMatrix;
|
|
255
|
+
}
|
|
256
|
+
function mergeMatrices(a, b) {
|
|
257
|
+
return {
|
|
258
|
+
pairs: [...a.pairs, ...b.pairs ?? []],
|
|
259
|
+
nodeEngineConstraints: [
|
|
260
|
+
...a.nodeEngineConstraints ?? [],
|
|
261
|
+
...b.nodeEngineConstraints ?? []
|
|
262
|
+
],
|
|
263
|
+
packageConflicts: [...a.packageConflicts ?? [], ...b.packageConflicts ?? []],
|
|
264
|
+
deprecatedApis: [...a.deprecatedApis ?? [], ...b.deprecatedApis ?? []]
|
|
265
|
+
};
|
|
266
|
+
}
|
|
267
|
+
async function readRemoteCache(url) {
|
|
268
|
+
try {
|
|
269
|
+
const raw = await fs.readFile(REMOTE_CACHE_PATH, "utf8");
|
|
270
|
+
const parsed = JSON.parse(raw);
|
|
271
|
+
if (parsed.url !== url) return null;
|
|
272
|
+
const age = Date.now() - new Date(parsed.fetchedAt).getTime();
|
|
273
|
+
if (age > REMOTE_TTL_MS) return null;
|
|
274
|
+
return parsed.matrix;
|
|
275
|
+
} catch {
|
|
276
|
+
return null;
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
async function writeRemoteCache(url, matrix) {
|
|
280
|
+
const file = {
|
|
281
|
+
fetchedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
282
|
+
url,
|
|
283
|
+
matrix
|
|
284
|
+
};
|
|
285
|
+
try {
|
|
286
|
+
await fs.mkdir(REMOTE_CACHE_DIR, { recursive: true });
|
|
287
|
+
await fs.writeFile(REMOTE_CACHE_PATH, JSON.stringify(file), "utf8");
|
|
288
|
+
} catch (err) {
|
|
289
|
+
console.warn(`[neat] failed to cache compat matrix: ${err.message}`);
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
async function ensureCompatLoaded() {
|
|
293
|
+
if (mergedMatrix) return mergedMatrix;
|
|
294
|
+
if (remoteLoadAttempted) {
|
|
295
|
+
mergedMatrix = bundledMatrix;
|
|
296
|
+
return mergedMatrix;
|
|
297
|
+
}
|
|
298
|
+
remoteLoadAttempted = true;
|
|
299
|
+
const url = process.env.NEAT_COMPAT_URL;
|
|
300
|
+
if (!url) {
|
|
301
|
+
mergedMatrix = bundledMatrix;
|
|
302
|
+
return mergedMatrix;
|
|
303
|
+
}
|
|
304
|
+
const cached = await readRemoteCache(url);
|
|
305
|
+
if (cached) {
|
|
306
|
+
mergedMatrix = mergeMatrices(bundledMatrix, cached);
|
|
307
|
+
return mergedMatrix;
|
|
308
|
+
}
|
|
309
|
+
try {
|
|
310
|
+
const res = await fetch(url);
|
|
311
|
+
if (!res.ok) throw new Error(`${res.status} ${res.statusText}`);
|
|
312
|
+
const remote = await res.json();
|
|
313
|
+
await writeRemoteCache(url, remote);
|
|
314
|
+
mergedMatrix = mergeMatrices(bundledMatrix, remote);
|
|
315
|
+
return mergedMatrix;
|
|
316
|
+
} catch (err) {
|
|
317
|
+
console.warn(
|
|
318
|
+
`[neat] NEAT_COMPAT_URL fetch failed (${err.message}); using bundled matrix only`
|
|
319
|
+
);
|
|
320
|
+
mergedMatrix = bundledMatrix;
|
|
321
|
+
return mergedMatrix;
|
|
322
|
+
}
|
|
323
|
+
}
|
|
324
|
+
function compatPairs() {
|
|
325
|
+
return currentMatrix().pairs;
|
|
326
|
+
}
|
|
327
|
+
function nodeEngineConstraints() {
|
|
328
|
+
return currentMatrix().nodeEngineConstraints ?? [];
|
|
329
|
+
}
|
|
330
|
+
function packageConflicts() {
|
|
331
|
+
return currentMatrix().packageConflicts ?? [];
|
|
332
|
+
}
|
|
333
|
+
function deprecatedApis() {
|
|
334
|
+
return currentMatrix().deprecatedApis ?? [];
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
// src/traverse.ts
|
|
338
|
+
import {
|
|
339
|
+
BlastRadiusResultSchema,
|
|
340
|
+
NodeType,
|
|
341
|
+
PROV_RANK,
|
|
342
|
+
Provenance,
|
|
343
|
+
RootCauseResultSchema,
|
|
344
|
+
TransitiveDependenciesResultSchema
|
|
345
|
+
} from "@neat.is/types";
|
|
346
|
+
var ROOT_CAUSE_MAX_DEPTH = 5;
|
|
347
|
+
var BLAST_RADIUS_DEFAULT_DEPTH = 10;
|
|
348
|
+
function bestEdgeBySource(graph, edgeIds) {
|
|
349
|
+
const best = /* @__PURE__ */ new Map();
|
|
350
|
+
for (const id of edgeIds) {
|
|
351
|
+
const e = graph.getEdgeAttributes(id);
|
|
352
|
+
if (e.provenance === Provenance.FRONTIER) continue;
|
|
353
|
+
const cur = best.get(e.source);
|
|
354
|
+
if (!cur || PROV_RANK[e.provenance] > PROV_RANK[cur.provenance]) {
|
|
355
|
+
best.set(e.source, e);
|
|
356
|
+
}
|
|
357
|
+
}
|
|
358
|
+
return best;
|
|
359
|
+
}
|
|
360
|
+
function bestEdgeByTarget(graph, edgeIds) {
|
|
361
|
+
const best = /* @__PURE__ */ new Map();
|
|
362
|
+
for (const id of edgeIds) {
|
|
363
|
+
const e = graph.getEdgeAttributes(id);
|
|
364
|
+
if (e.provenance === Provenance.FRONTIER) continue;
|
|
365
|
+
const cur = best.get(e.target);
|
|
366
|
+
if (!cur || PROV_RANK[e.provenance] > PROV_RANK[cur.provenance]) {
|
|
367
|
+
best.set(e.target, e);
|
|
368
|
+
}
|
|
369
|
+
}
|
|
370
|
+
return best;
|
|
371
|
+
}
|
|
372
|
+
var PROVENANCE_CEILING = {
|
|
373
|
+
OBSERVED: 1,
|
|
374
|
+
INFERRED: 0.7,
|
|
375
|
+
EXTRACTED: 0.5,
|
|
376
|
+
STALE: 0.3,
|
|
377
|
+
FRONTIER: 0.3
|
|
378
|
+
};
|
|
379
|
+
function volumeWeight(spanCount) {
|
|
380
|
+
if (!spanCount || spanCount <= 0) return 0.5;
|
|
381
|
+
const w = 0.5 + Math.log10(spanCount + 1) / 3;
|
|
382
|
+
return Math.min(1, w);
|
|
383
|
+
}
|
|
384
|
+
function recencyWeight(ageMs) {
|
|
385
|
+
if (ageMs === void 0) return 0.8;
|
|
386
|
+
const hour = 60 * 60 * 1e3;
|
|
387
|
+
if (ageMs <= hour) return 1;
|
|
388
|
+
if (ageMs <= 24 * hour) {
|
|
389
|
+
const t = (ageMs - hour) / (23 * hour);
|
|
390
|
+
return 1 - 0.5 * t;
|
|
391
|
+
}
|
|
392
|
+
return 0.3;
|
|
393
|
+
}
|
|
394
|
+
function cleanlinessWeight(spanCount, errorCount) {
|
|
395
|
+
if (!spanCount || spanCount <= 0) return 1;
|
|
396
|
+
const rate = (errorCount ?? 0) / spanCount;
|
|
397
|
+
if (rate <= 0.01) return 1;
|
|
398
|
+
if (rate >= 0.5) return 0.3;
|
|
399
|
+
return 1 - rate * 1.4;
|
|
400
|
+
}
|
|
401
|
+
function confidenceForEdge(edge, now = Date.now()) {
|
|
402
|
+
const ceiling = PROVENANCE_CEILING[edge.provenance] ?? 0.5;
|
|
403
|
+
const spanCount = edge.signal?.spanCount ?? edge.callCount;
|
|
404
|
+
const ageMs = edge.signal?.lastObservedAgeMs ?? lastObservedAge(edge, now);
|
|
405
|
+
if (spanCount === void 0 && ageMs === void 0 && edge.signal === void 0) {
|
|
406
|
+
return ceiling;
|
|
407
|
+
}
|
|
408
|
+
const v = volumeWeight(spanCount);
|
|
409
|
+
const r = recencyWeight(ageMs);
|
|
410
|
+
const c = cleanlinessWeight(spanCount, edge.signal?.errorCount);
|
|
411
|
+
return Math.max(0, Math.min(1, ceiling * v * r * c));
|
|
412
|
+
}
|
|
413
|
+
function lastObservedAge(edge, now) {
|
|
414
|
+
if (!edge.lastObserved) return void 0;
|
|
415
|
+
const t = Date.parse(edge.lastObserved);
|
|
416
|
+
if (!Number.isFinite(t)) return void 0;
|
|
417
|
+
return Math.max(0, now - t);
|
|
418
|
+
}
|
|
419
|
+
function confidenceFromMix(edges, now = Date.now()) {
|
|
420
|
+
if (edges.length === 0) return 1;
|
|
421
|
+
let product = 1;
|
|
422
|
+
for (const e of edges) {
|
|
423
|
+
product *= confidenceForEdge(e, now);
|
|
424
|
+
}
|
|
425
|
+
return Math.max(0, Math.min(1, product));
|
|
426
|
+
}
|
|
427
|
+
function longestIncomingWalk(graph, start, maxDepth) {
|
|
428
|
+
let best = { path: [start], edges: [] };
|
|
429
|
+
const visited = /* @__PURE__ */ new Set([start]);
|
|
430
|
+
function step(node, path29, edges) {
|
|
431
|
+
if (path29.length > best.path.length) {
|
|
432
|
+
best = { path: [...path29], edges: [...edges] };
|
|
433
|
+
}
|
|
434
|
+
if (path29.length - 1 >= maxDepth) return;
|
|
435
|
+
const incoming = bestEdgeBySource(graph, graph.inboundEdges(node));
|
|
436
|
+
for (const [srcId, edge] of incoming) {
|
|
437
|
+
if (visited.has(srcId)) continue;
|
|
438
|
+
visited.add(srcId);
|
|
439
|
+
path29.push(srcId);
|
|
440
|
+
edges.push(edge);
|
|
441
|
+
step(srcId, path29, edges);
|
|
442
|
+
path29.pop();
|
|
443
|
+
edges.pop();
|
|
444
|
+
visited.delete(srcId);
|
|
445
|
+
}
|
|
446
|
+
}
|
|
447
|
+
step(start, [start], []);
|
|
448
|
+
return best;
|
|
449
|
+
}
|
|
450
|
+
function databaseRootCauseShape(graph, origin, walk) {
|
|
451
|
+
const targetDb = origin;
|
|
452
|
+
const candidatePairs = compatPairs().filter((p) => p.engine === targetDb.engine);
|
|
453
|
+
if (candidatePairs.length === 0) return null;
|
|
454
|
+
for (const id of walk.path) {
|
|
455
|
+
const attrs = graph.getNodeAttributes(id);
|
|
456
|
+
if (attrs.type !== NodeType.ServiceNode) continue;
|
|
457
|
+
const svc = attrs;
|
|
458
|
+
const deps = svc.dependencies ?? {};
|
|
459
|
+
for (const pair of candidatePairs) {
|
|
460
|
+
const declared = deps[pair.driver];
|
|
461
|
+
if (!declared) continue;
|
|
462
|
+
const result = checkCompatibility(
|
|
463
|
+
pair.driver,
|
|
464
|
+
declared,
|
|
465
|
+
targetDb.engine,
|
|
466
|
+
targetDb.engineVersion
|
|
467
|
+
);
|
|
468
|
+
if (!result.compatible) {
|
|
469
|
+
return {
|
|
470
|
+
rootCauseNode: id,
|
|
471
|
+
rootCauseReason: result.reason ?? "incompatible driver",
|
|
472
|
+
...result.minDriverVersion ? {
|
|
473
|
+
fixRecommendation: `Upgrade ${svc.name} ${pair.driver} driver to >= ${result.minDriverVersion}`
|
|
474
|
+
} : {}
|
|
475
|
+
};
|
|
476
|
+
}
|
|
477
|
+
}
|
|
478
|
+
}
|
|
479
|
+
return null;
|
|
480
|
+
}
|
|
481
|
+
function serviceRootCauseShape(graph, _origin, walk) {
|
|
482
|
+
for (const id of walk.path) {
|
|
483
|
+
const attrs = graph.getNodeAttributes(id);
|
|
484
|
+
if (attrs.type !== NodeType.ServiceNode) continue;
|
|
485
|
+
const svc = attrs;
|
|
486
|
+
const deps = svc.dependencies ?? {};
|
|
487
|
+
const serviceNodeEngine = svc.nodeEngine;
|
|
488
|
+
for (const constraint of nodeEngineConstraints()) {
|
|
489
|
+
const declared = deps[constraint.package];
|
|
490
|
+
if (!declared) continue;
|
|
491
|
+
const result = checkNodeEngineConstraint(constraint, declared, serviceNodeEngine);
|
|
492
|
+
if (!result.compatible && result.reason) {
|
|
493
|
+
return {
|
|
494
|
+
rootCauseNode: id,
|
|
495
|
+
rootCauseReason: result.reason,
|
|
496
|
+
...result.requiredNodeVersion ? {
|
|
497
|
+
fixRecommendation: `Bump ${svc.name}'s engines.node to >= ${result.requiredNodeVersion}`
|
|
498
|
+
} : {}
|
|
499
|
+
};
|
|
500
|
+
}
|
|
501
|
+
}
|
|
502
|
+
for (const conflict of packageConflicts()) {
|
|
503
|
+
const declared = deps[conflict.package];
|
|
504
|
+
if (!declared) continue;
|
|
505
|
+
const requiredDeclared = deps[conflict.requires.name];
|
|
506
|
+
const result = checkPackageConflict(conflict, declared, requiredDeclared);
|
|
507
|
+
if (!result.compatible && result.reason) {
|
|
508
|
+
return {
|
|
509
|
+
rootCauseNode: id,
|
|
510
|
+
rootCauseReason: result.reason,
|
|
511
|
+
fixRecommendation: `Upgrade ${svc.name}'s ${conflict.requires.name} to >= ${conflict.requires.minVersion}`
|
|
512
|
+
};
|
|
513
|
+
}
|
|
514
|
+
}
|
|
515
|
+
}
|
|
516
|
+
return null;
|
|
517
|
+
}
|
|
518
|
+
var rootCauseShapes = {
|
|
519
|
+
[NodeType.DatabaseNode]: databaseRootCauseShape,
|
|
520
|
+
[NodeType.ServiceNode]: serviceRootCauseShape
|
|
521
|
+
};
|
|
522
|
+
function getRootCause(graph, errorNodeId, errorEvent) {
|
|
523
|
+
if (!graph.hasNode(errorNodeId)) return null;
|
|
524
|
+
const origin = graph.getNodeAttributes(errorNodeId);
|
|
525
|
+
const shape = rootCauseShapes[origin.type];
|
|
526
|
+
if (!shape) return null;
|
|
527
|
+
const walk = longestIncomingWalk(graph, errorNodeId, ROOT_CAUSE_MAX_DEPTH);
|
|
528
|
+
const match = shape(graph, origin, walk);
|
|
529
|
+
if (!match) return null;
|
|
530
|
+
const reason = errorEvent ? `${match.rootCauseReason} (observed error: ${errorEvent.errorMessage})` : match.rootCauseReason;
|
|
531
|
+
return RootCauseResultSchema.parse({
|
|
532
|
+
rootCauseNode: match.rootCauseNode,
|
|
533
|
+
rootCauseReason: reason,
|
|
534
|
+
traversalPath: walk.path,
|
|
535
|
+
edgeProvenances: walk.edges.map((e) => e.provenance),
|
|
536
|
+
confidence: confidenceFromMix(walk.edges),
|
|
537
|
+
fixRecommendation: match.fixRecommendation
|
|
538
|
+
});
|
|
539
|
+
}
|
|
540
|
+
function getBlastRadius(graph, nodeId, maxDepth = BLAST_RADIUS_DEFAULT_DEPTH) {
|
|
541
|
+
if (!graph.hasNode(nodeId)) {
|
|
542
|
+
return BlastRadiusResultSchema.parse({ origin: nodeId, affectedNodes: [], totalAffected: 0 });
|
|
543
|
+
}
|
|
544
|
+
const seen = /* @__PURE__ */ new Map();
|
|
545
|
+
const queue = [{ nodeId, distance: 0, path: [nodeId], pathEdges: [] }];
|
|
546
|
+
const enqueued = /* @__PURE__ */ new Set([nodeId]);
|
|
547
|
+
while (queue.length > 0) {
|
|
548
|
+
const frame = queue.shift();
|
|
549
|
+
if (frame.distance > 0 && frame.pathEdges.length > 0) {
|
|
550
|
+
const lastEdge = frame.pathEdges[frame.pathEdges.length - 1];
|
|
551
|
+
seen.set(frame.nodeId, {
|
|
552
|
+
nodeId: frame.nodeId,
|
|
553
|
+
distance: frame.distance,
|
|
554
|
+
edgeProvenance: lastEdge.provenance,
|
|
555
|
+
path: frame.path,
|
|
556
|
+
confidence: confidenceFromMix(frame.pathEdges)
|
|
557
|
+
});
|
|
558
|
+
}
|
|
559
|
+
if (frame.distance >= maxDepth) continue;
|
|
560
|
+
const outgoing = bestEdgeByTarget(graph, graph.outboundEdges(frame.nodeId));
|
|
561
|
+
for (const [tgtId, edge] of outgoing) {
|
|
562
|
+
if (enqueued.has(tgtId)) continue;
|
|
563
|
+
enqueued.add(tgtId);
|
|
564
|
+
queue.push({
|
|
565
|
+
nodeId: tgtId,
|
|
566
|
+
distance: frame.distance + 1,
|
|
567
|
+
path: [...frame.path, tgtId],
|
|
568
|
+
pathEdges: [...frame.pathEdges, edge]
|
|
569
|
+
});
|
|
570
|
+
}
|
|
571
|
+
}
|
|
572
|
+
const affectedNodes = [...seen.values()].sort(
|
|
573
|
+
(a, b) => a.distance - b.distance || a.nodeId.localeCompare(b.nodeId)
|
|
574
|
+
);
|
|
575
|
+
return BlastRadiusResultSchema.parse({
|
|
576
|
+
origin: nodeId,
|
|
577
|
+
affectedNodes,
|
|
578
|
+
totalAffected: affectedNodes.length
|
|
579
|
+
});
|
|
580
|
+
}
|
|
581
|
+
var TRANSITIVE_DEPENDENCIES_DEFAULT_DEPTH = 3;
|
|
582
|
+
var TRANSITIVE_DEPENDENCIES_MAX_DEPTH = 10;
|
|
583
|
+
function getTransitiveDependencies(graph, nodeId, depth = TRANSITIVE_DEPENDENCIES_DEFAULT_DEPTH) {
|
|
584
|
+
if (!graph.hasNode(nodeId)) {
|
|
585
|
+
return TransitiveDependenciesResultSchema.parse({
|
|
586
|
+
origin: nodeId,
|
|
587
|
+
depth,
|
|
588
|
+
dependencies: [],
|
|
589
|
+
total: 0
|
|
590
|
+
});
|
|
591
|
+
}
|
|
592
|
+
const seen = /* @__PURE__ */ new Map();
|
|
593
|
+
const queue = [{ nodeId, distance: 0, edge: null }];
|
|
594
|
+
const enqueued = /* @__PURE__ */ new Set([nodeId]);
|
|
595
|
+
while (queue.length > 0) {
|
|
596
|
+
const frame = queue.shift();
|
|
597
|
+
if (frame.distance > 0 && frame.edge) {
|
|
598
|
+
seen.set(frame.nodeId, {
|
|
599
|
+
nodeId: frame.nodeId,
|
|
600
|
+
distance: frame.distance,
|
|
601
|
+
edgeType: frame.edge.type,
|
|
602
|
+
provenance: frame.edge.provenance
|
|
603
|
+
});
|
|
604
|
+
}
|
|
605
|
+
if (frame.distance >= depth) continue;
|
|
606
|
+
const outgoing = bestEdgeByTarget(graph, graph.outboundEdges(frame.nodeId));
|
|
607
|
+
for (const [tgtId, edge] of outgoing) {
|
|
608
|
+
if (enqueued.has(tgtId)) continue;
|
|
609
|
+
enqueued.add(tgtId);
|
|
610
|
+
queue.push({ nodeId: tgtId, distance: frame.distance + 1, edge });
|
|
611
|
+
}
|
|
612
|
+
}
|
|
613
|
+
const dependencies = [...seen.values()].sort(
|
|
614
|
+
(a, b) => a.distance - b.distance || a.nodeId.localeCompare(b.nodeId)
|
|
615
|
+
);
|
|
616
|
+
return TransitiveDependenciesResultSchema.parse({
|
|
617
|
+
origin: nodeId,
|
|
618
|
+
depth,
|
|
619
|
+
dependencies,
|
|
620
|
+
total: dependencies.length
|
|
621
|
+
});
|
|
622
|
+
}
|
|
623
|
+
|
|
624
|
+
// src/ingest.ts
|
|
625
|
+
import { promises as fs3 } from "fs";
|
|
626
|
+
import path3 from "path";
|
|
627
|
+
|
|
628
|
+
// src/policy.ts
|
|
629
|
+
import { promises as fs2 } from "fs";
|
|
630
|
+
import path2 from "path";
|
|
631
|
+
import {
|
|
632
|
+
EdgeType,
|
|
633
|
+
NodeType as NodeType2,
|
|
634
|
+
PolicyFileSchema,
|
|
635
|
+
Provenance as Provenance2
|
|
636
|
+
} from "@neat.is/types";
|
|
637
|
+
var DEFAULT_ACTION_BY_SEVERITY = {
|
|
638
|
+
info: "log",
|
|
639
|
+
warning: "alert",
|
|
640
|
+
error: "alert",
|
|
641
|
+
critical: "block"
|
|
642
|
+
};
|
|
643
|
+
function resolveOnViolation(policy) {
|
|
644
|
+
return policy.onViolation ?? DEFAULT_ACTION_BY_SEVERITY[policy.severity];
|
|
645
|
+
}
|
|
646
|
+
function makeViolation(policy, rule, contextSuffix, message, subject, ctx) {
|
|
647
|
+
return {
|
|
648
|
+
id: `${policy.id}:${contextSuffix}`,
|
|
649
|
+
policyId: policy.id,
|
|
650
|
+
policyName: policy.name,
|
|
651
|
+
severity: policy.severity,
|
|
652
|
+
onViolation: resolveOnViolation(policy),
|
|
653
|
+
ruleType: rule.type,
|
|
654
|
+
subject,
|
|
655
|
+
message,
|
|
656
|
+
observedAt: new Date(ctx.now()).toISOString()
|
|
657
|
+
};
|
|
658
|
+
}
|
|
659
|
+
var evaluateStructural = ({
|
|
660
|
+
graph,
|
|
661
|
+
policy,
|
|
662
|
+
rule,
|
|
663
|
+
ctx
|
|
664
|
+
}) => {
|
|
665
|
+
const violations = [];
|
|
666
|
+
graph.forEachNode((id, attrs) => {
|
|
667
|
+
const a = attrs;
|
|
668
|
+
if (a.type !== rule.fromNodeType) return;
|
|
669
|
+
let satisfied = false;
|
|
670
|
+
for (const edgeId of graph.outboundEdges(id)) {
|
|
671
|
+
const e = graph.getEdgeAttributes(edgeId);
|
|
672
|
+
if (e.type !== rule.edgeType) continue;
|
|
673
|
+
if (e.provenance === Provenance2.FRONTIER) continue;
|
|
674
|
+
const target = graph.getNodeAttributes(e.target);
|
|
675
|
+
if (target.type === rule.toNodeType) {
|
|
676
|
+
satisfied = true;
|
|
677
|
+
break;
|
|
678
|
+
}
|
|
679
|
+
}
|
|
680
|
+
if (!satisfied) {
|
|
681
|
+
violations.push(
|
|
682
|
+
makeViolation(
|
|
683
|
+
policy,
|
|
684
|
+
rule,
|
|
685
|
+
id,
|
|
686
|
+
`${rule.fromNodeType} ${id} has no ${rule.edgeType} edge to a ${rule.toNodeType}`,
|
|
687
|
+
{ nodeId: id },
|
|
688
|
+
ctx
|
|
689
|
+
)
|
|
690
|
+
);
|
|
691
|
+
}
|
|
692
|
+
});
|
|
693
|
+
return violations;
|
|
694
|
+
};
|
|
695
|
+
var evaluateOwnership = ({
|
|
696
|
+
graph,
|
|
697
|
+
policy,
|
|
698
|
+
rule,
|
|
699
|
+
ctx
|
|
700
|
+
}) => {
|
|
701
|
+
const violations = [];
|
|
702
|
+
graph.forEachNode((id, attrs) => {
|
|
703
|
+
const a = attrs;
|
|
704
|
+
if (a.type !== rule.nodeType) return;
|
|
705
|
+
const value = a[rule.field];
|
|
706
|
+
if (typeof value !== "string" || value.length === 0) {
|
|
707
|
+
violations.push(
|
|
708
|
+
makeViolation(
|
|
709
|
+
policy,
|
|
710
|
+
rule,
|
|
711
|
+
id,
|
|
712
|
+
`${rule.nodeType} ${id} is missing required field "${rule.field}"`,
|
|
713
|
+
{ nodeId: id },
|
|
714
|
+
ctx
|
|
715
|
+
)
|
|
716
|
+
);
|
|
717
|
+
}
|
|
718
|
+
});
|
|
719
|
+
return violations;
|
|
720
|
+
};
|
|
721
|
+
var evaluateProvenance = ({
|
|
722
|
+
graph,
|
|
723
|
+
policy,
|
|
724
|
+
rule,
|
|
725
|
+
ctx
|
|
726
|
+
}) => {
|
|
727
|
+
const required = Array.isArray(rule.required) ? new Set(rule.required) : /* @__PURE__ */ new Set([rule.required]);
|
|
728
|
+
const violations = [];
|
|
729
|
+
graph.forEachEdge((edgeId, attrs) => {
|
|
730
|
+
const e = attrs;
|
|
731
|
+
if (e.type !== rule.edgeType) return;
|
|
732
|
+
if (rule.targetNodeId && e.target !== rule.targetNodeId) return;
|
|
733
|
+
if (!required.has(e.provenance)) {
|
|
734
|
+
const requiredList = [...required].join(" | ");
|
|
735
|
+
violations.push(
|
|
736
|
+
makeViolation(
|
|
737
|
+
policy,
|
|
738
|
+
rule,
|
|
739
|
+
edgeId,
|
|
740
|
+
`${rule.edgeType} edge ${edgeId} has provenance ${e.provenance}; required ${requiredList}`,
|
|
741
|
+
{ edgeId },
|
|
742
|
+
ctx
|
|
743
|
+
)
|
|
744
|
+
);
|
|
745
|
+
}
|
|
746
|
+
});
|
|
747
|
+
return violations;
|
|
748
|
+
};
|
|
749
|
+
var evaluateBlastRadius = ({
|
|
750
|
+
graph,
|
|
751
|
+
policy,
|
|
752
|
+
rule,
|
|
753
|
+
ctx
|
|
754
|
+
}) => {
|
|
755
|
+
const violations = [];
|
|
756
|
+
const depth = rule.depth;
|
|
757
|
+
graph.forEachNode((id, attrs) => {
|
|
758
|
+
const a = attrs;
|
|
759
|
+
if (a.type !== rule.nodeType) return;
|
|
760
|
+
const result = depth !== void 0 ? getBlastRadius(graph, id, depth) : getBlastRadius(graph, id);
|
|
761
|
+
if (result.totalAffected > rule.maxAffected) {
|
|
762
|
+
violations.push(
|
|
763
|
+
makeViolation(
|
|
764
|
+
policy,
|
|
765
|
+
rule,
|
|
766
|
+
id,
|
|
767
|
+
`${rule.nodeType} ${id} has blast radius ${result.totalAffected} > ${rule.maxAffected}`,
|
|
768
|
+
{ nodeId: id, path: [id] },
|
|
769
|
+
ctx
|
|
770
|
+
)
|
|
771
|
+
);
|
|
772
|
+
}
|
|
773
|
+
});
|
|
774
|
+
return violations;
|
|
775
|
+
};
|
|
776
|
+
var evaluateCompatibility = ({
|
|
777
|
+
graph,
|
|
778
|
+
policy,
|
|
779
|
+
rule,
|
|
780
|
+
ctx
|
|
781
|
+
}) => {
|
|
782
|
+
const violations = [];
|
|
783
|
+
const wantsKind = (kind) => rule.kind === void 0 || rule.kind === kind;
|
|
784
|
+
graph.forEachNode((svcId, attrs) => {
|
|
785
|
+
const a = attrs;
|
|
786
|
+
if (a.type !== NodeType2.ServiceNode) return;
|
|
787
|
+
const svc = a;
|
|
788
|
+
const deps = svc.dependencies ?? {};
|
|
789
|
+
if (wantsKind("driver-engine")) {
|
|
790
|
+
for (const edgeId of graph.outboundEdges(svcId)) {
|
|
791
|
+
const e = graph.getEdgeAttributes(edgeId);
|
|
792
|
+
if (e.type !== EdgeType.CONNECTS_TO) continue;
|
|
793
|
+
if (e.provenance === Provenance2.FRONTIER) continue;
|
|
794
|
+
const dbAttrs = graph.getNodeAttributes(e.target);
|
|
795
|
+
if (dbAttrs.type !== NodeType2.DatabaseNode) continue;
|
|
796
|
+
const db = dbAttrs;
|
|
797
|
+
for (const pair of compatPairs()) {
|
|
798
|
+
if (pair.engine !== db.engine) continue;
|
|
799
|
+
const declared = deps[pair.driver];
|
|
800
|
+
if (!declared) continue;
|
|
801
|
+
const result = checkCompatibility(pair.driver, declared, db.engine, db.engineVersion);
|
|
802
|
+
if (!result.compatible && result.reason) {
|
|
803
|
+
violations.push(
|
|
804
|
+
makeViolation(
|
|
805
|
+
policy,
|
|
806
|
+
rule,
|
|
807
|
+
`${svcId}:driver-engine:${pair.driver}@${declared}:${db.engine}@${db.engineVersion}`,
|
|
808
|
+
result.reason,
|
|
809
|
+
{ nodeId: svcId, edgeId },
|
|
810
|
+
ctx
|
|
811
|
+
)
|
|
812
|
+
);
|
|
813
|
+
}
|
|
814
|
+
}
|
|
815
|
+
}
|
|
816
|
+
}
|
|
817
|
+
if (wantsKind("node-engine")) {
|
|
818
|
+
const serviceNodeRange = svc.nodeEngine;
|
|
819
|
+
for (const constraint of nodeEngineConstraints()) {
|
|
820
|
+
const declared = deps[constraint.package];
|
|
821
|
+
if (!declared) continue;
|
|
822
|
+
const result = checkNodeEngineConstraint(constraint, declared, serviceNodeRange);
|
|
823
|
+
if (!result.compatible && result.reason) {
|
|
824
|
+
violations.push(
|
|
825
|
+
makeViolation(
|
|
826
|
+
policy,
|
|
827
|
+
rule,
|
|
828
|
+
`${svcId}:node-engine:${constraint.package}@${declared}`,
|
|
829
|
+
result.reason,
|
|
830
|
+
{ nodeId: svcId },
|
|
831
|
+
ctx
|
|
832
|
+
)
|
|
833
|
+
);
|
|
834
|
+
}
|
|
835
|
+
}
|
|
836
|
+
}
|
|
837
|
+
if (wantsKind("package-conflict")) {
|
|
838
|
+
for (const conflict of packageConflicts()) {
|
|
839
|
+
const declared = deps[conflict.package];
|
|
840
|
+
if (!declared) continue;
|
|
841
|
+
const requiredDeclared = deps[conflict.requires.name];
|
|
842
|
+
const result = checkPackageConflict(conflict, declared, requiredDeclared);
|
|
843
|
+
if (!result.compatible && result.reason) {
|
|
844
|
+
violations.push(
|
|
845
|
+
makeViolation(
|
|
846
|
+
policy,
|
|
847
|
+
rule,
|
|
848
|
+
`${svcId}:package-conflict:${conflict.package}@${declared}`,
|
|
849
|
+
result.reason,
|
|
850
|
+
{ nodeId: svcId },
|
|
851
|
+
ctx
|
|
852
|
+
)
|
|
853
|
+
);
|
|
854
|
+
}
|
|
855
|
+
}
|
|
856
|
+
}
|
|
857
|
+
if (wantsKind("deprecated-api")) {
|
|
858
|
+
for (const dep of deprecatedApis()) {
|
|
859
|
+
const declared = deps[dep.package];
|
|
860
|
+
if (!declared) continue;
|
|
861
|
+
const result = checkDeprecatedApi(dep, declared);
|
|
862
|
+
if (!result.compatible && result.reason) {
|
|
863
|
+
violations.push(
|
|
864
|
+
makeViolation(
|
|
865
|
+
policy,
|
|
866
|
+
rule,
|
|
867
|
+
`${svcId}:deprecated-api:${dep.package}@${declared}`,
|
|
868
|
+
result.reason,
|
|
869
|
+
{ nodeId: svcId },
|
|
870
|
+
ctx
|
|
871
|
+
)
|
|
872
|
+
);
|
|
873
|
+
}
|
|
874
|
+
}
|
|
875
|
+
}
|
|
876
|
+
});
|
|
877
|
+
return violations;
|
|
878
|
+
};
|
|
879
|
+
var policyEvaluators = {
|
|
880
|
+
structural: evaluateStructural,
|
|
881
|
+
ownership: evaluateOwnership,
|
|
882
|
+
provenance: evaluateProvenance,
|
|
883
|
+
"blast-radius": evaluateBlastRadius,
|
|
884
|
+
compatibility: evaluateCompatibility
|
|
885
|
+
};
|
|
886
|
+
function canPromoteFrontier(graph, frontierId2, policies, ctx) {
|
|
887
|
+
if (policies.length === 0) return { allowed: true, violations: [] };
|
|
888
|
+
const all = evaluateAllPolicies(graph, policies, ctx);
|
|
889
|
+
const blocking = all.filter((v) => {
|
|
890
|
+
if (v.onViolation !== "block") return false;
|
|
891
|
+
return v.subject.nodeId === frontierId2 || v.subject.path?.includes(frontierId2) === true;
|
|
892
|
+
});
|
|
893
|
+
return { allowed: blocking.length === 0, violations: blocking };
|
|
894
|
+
}
|
|
895
|
+
function evaluateAllPolicies(graph, policies, ctx) {
|
|
896
|
+
const out = [];
|
|
897
|
+
for (const policy of policies) {
|
|
898
|
+
const evaluator = policyEvaluators[policy.rule.type];
|
|
899
|
+
const violations = evaluator({ graph, policy, rule: policy.rule, ctx });
|
|
900
|
+
for (const v of violations) out.push(v);
|
|
901
|
+
}
|
|
902
|
+
return out;
|
|
903
|
+
}
|
|
904
|
+
async function loadPolicyFile(policyPath) {
|
|
905
|
+
let raw;
|
|
906
|
+
try {
|
|
907
|
+
raw = await fs2.readFile(policyPath, "utf8");
|
|
908
|
+
} catch (err) {
|
|
909
|
+
if (err.code === "ENOENT") return [];
|
|
910
|
+
throw err;
|
|
911
|
+
}
|
|
912
|
+
const json = JSON.parse(raw);
|
|
913
|
+
const file = PolicyFileSchema.parse(json);
|
|
914
|
+
return file.policies;
|
|
915
|
+
}
|
|
916
|
+
var PolicyViolationsLog = class {
|
|
917
|
+
path;
|
|
918
|
+
seen = null;
|
|
919
|
+
constructor(logPath) {
|
|
920
|
+
this.path = logPath;
|
|
921
|
+
}
|
|
922
|
+
async append(v) {
|
|
923
|
+
if (!this.seen) await this.hydrate();
|
|
924
|
+
if (this.seen.has(v.id)) return false;
|
|
925
|
+
this.seen.add(v.id);
|
|
926
|
+
await fs2.mkdir(path2.dirname(this.path), { recursive: true });
|
|
927
|
+
await fs2.appendFile(this.path, JSON.stringify(v) + "\n", "utf8");
|
|
928
|
+
return true;
|
|
929
|
+
}
|
|
930
|
+
async readAll() {
|
|
931
|
+
try {
|
|
932
|
+
const raw = await fs2.readFile(this.path, "utf8");
|
|
933
|
+
return raw.split("\n").filter(Boolean).map((line) => JSON.parse(line));
|
|
934
|
+
} catch (err) {
|
|
935
|
+
if (err.code === "ENOENT") return [];
|
|
936
|
+
throw err;
|
|
937
|
+
}
|
|
938
|
+
}
|
|
939
|
+
async hydrate() {
|
|
940
|
+
this.seen = /* @__PURE__ */ new Set();
|
|
941
|
+
const existing = await this.readAll();
|
|
942
|
+
for (const v of existing) this.seen.add(v.id);
|
|
943
|
+
}
|
|
944
|
+
};
|
|
945
|
+
|
|
946
|
+
// src/ingest.ts
|
|
947
|
+
import {
|
|
948
|
+
EdgeType as EdgeType2,
|
|
949
|
+
NodeType as NodeType3,
|
|
950
|
+
Provenance as Provenance3,
|
|
951
|
+
databaseId,
|
|
952
|
+
extractedEdgeId,
|
|
953
|
+
frontierEdgeId,
|
|
954
|
+
frontierId,
|
|
955
|
+
inferredEdgeId,
|
|
956
|
+
observedEdgeId,
|
|
957
|
+
serviceId
|
|
958
|
+
} from "@neat.is/types";
|
|
959
|
+
var HOUR_MS = 60 * 60 * 1e3;
|
|
960
|
+
var DAY_MS = 24 * HOUR_MS;
|
|
961
|
+
var DEFAULT_STALE_THRESHOLDS = {
|
|
962
|
+
CALLS: HOUR_MS,
|
|
963
|
+
CONNECTS_TO: 4 * HOUR_MS,
|
|
964
|
+
PUBLISHES_TO: 4 * HOUR_MS,
|
|
965
|
+
CONSUMES_FROM: 4 * HOUR_MS,
|
|
966
|
+
DEPENDS_ON: DAY_MS,
|
|
967
|
+
CONFIGURED_BY: DAY_MS,
|
|
968
|
+
RUNS_ON: DAY_MS
|
|
969
|
+
};
|
|
970
|
+
var FALLBACK_STALE_THRESHOLD_MS = DAY_MS;
|
|
971
|
+
function loadStaleThresholdsFromEnv() {
|
|
972
|
+
const raw = process.env.NEAT_STALE_THRESHOLDS;
|
|
973
|
+
if (!raw) return DEFAULT_STALE_THRESHOLDS;
|
|
974
|
+
try {
|
|
975
|
+
const overrides = JSON.parse(raw);
|
|
976
|
+
const merged = { ...DEFAULT_STALE_THRESHOLDS };
|
|
977
|
+
for (const [k, v] of Object.entries(overrides)) {
|
|
978
|
+
if (typeof v === "number" && Number.isFinite(v) && v >= 0) merged[k] = v;
|
|
979
|
+
}
|
|
980
|
+
return merged;
|
|
981
|
+
} catch (err) {
|
|
982
|
+
console.warn(
|
|
983
|
+
`[neat] NEAT_STALE_THRESHOLDS could not be parsed (${err.message}); using defaults`
|
|
984
|
+
);
|
|
985
|
+
return DEFAULT_STALE_THRESHOLDS;
|
|
986
|
+
}
|
|
987
|
+
}
|
|
988
|
+
function thresholdForEdgeType(edgeType, overrides) {
|
|
989
|
+
const map = overrides ?? loadStaleThresholdsFromEnv();
|
|
990
|
+
return map[edgeType] ?? FALLBACK_STALE_THRESHOLD_MS;
|
|
991
|
+
}
|
|
992
|
+
function nowIso(ctx) {
|
|
993
|
+
return new Date(ctx.now ? ctx.now() : Date.now()).toISOString();
|
|
994
|
+
}
|
|
995
|
+
function pickAttr(span, ...keys) {
|
|
996
|
+
for (const k of keys) {
|
|
997
|
+
const v = span.attributes[k];
|
|
998
|
+
if (typeof v === "string" && v.length > 0) return v;
|
|
999
|
+
}
|
|
1000
|
+
return void 0;
|
|
1001
|
+
}
|
|
1002
|
+
function hostFromUrl(u) {
|
|
1003
|
+
if (!u) return void 0;
|
|
1004
|
+
try {
|
|
1005
|
+
return new URL(u).hostname;
|
|
1006
|
+
} catch {
|
|
1007
|
+
return void 0;
|
|
1008
|
+
}
|
|
1009
|
+
}
|
|
1010
|
+
function pickAddress(span) {
|
|
1011
|
+
return pickAttr(span, "server.address", "net.peer.name", "net.host.name") ?? hostFromUrl(pickAttr(span, "url.full", "http.url"));
|
|
1012
|
+
}
|
|
1013
|
+
function makeObservedEdgeId(type, source, target) {
|
|
1014
|
+
return observedEdgeId(source, target, type);
|
|
1015
|
+
}
|
|
1016
|
+
function makeInferredEdgeId(type, source, target) {
|
|
1017
|
+
return inferredEdgeId(source, target, type);
|
|
1018
|
+
}
|
|
1019
|
+
var INFERRED_CONFIDENCE = 0.6;
|
|
1020
|
+
var STITCH_MAX_DEPTH = 2;
|
|
1021
|
+
var PARENT_SPAN_CACHE_SIZE = 1e4;
|
|
1022
|
+
var PARENT_SPAN_CACHE_TTL_MS = 5 * 60 * 1e3;
|
|
1023
|
+
var parentSpanCache = /* @__PURE__ */ new Map();
|
|
1024
|
+
function parentSpanKey(traceId, spanId) {
|
|
1025
|
+
return `${traceId}:${spanId}`;
|
|
1026
|
+
}
|
|
1027
|
+
function cacheSpanService(span, now) {
|
|
1028
|
+
if (!span.traceId || !span.spanId) return;
|
|
1029
|
+
const key = parentSpanKey(span.traceId, span.spanId);
|
|
1030
|
+
parentSpanCache.delete(key);
|
|
1031
|
+
parentSpanCache.set(key, { service: span.service, expiresAt: now + PARENT_SPAN_CACHE_TTL_MS });
|
|
1032
|
+
while (parentSpanCache.size > PARENT_SPAN_CACHE_SIZE) {
|
|
1033
|
+
const oldest = parentSpanCache.keys().next().value;
|
|
1034
|
+
if (!oldest) break;
|
|
1035
|
+
parentSpanCache.delete(oldest);
|
|
1036
|
+
}
|
|
1037
|
+
}
|
|
1038
|
+
function lookupParentSpanService(traceId, parentSpanId, now) {
|
|
1039
|
+
const entry = parentSpanCache.get(parentSpanKey(traceId, parentSpanId));
|
|
1040
|
+
if (!entry) return null;
|
|
1041
|
+
if (entry.expiresAt <= now) {
|
|
1042
|
+
parentSpanCache.delete(parentSpanKey(traceId, parentSpanId));
|
|
1043
|
+
return null;
|
|
1044
|
+
}
|
|
1045
|
+
return entry.service;
|
|
1046
|
+
}
|
|
1047
|
+
function resolveServiceId(graph, host) {
|
|
1048
|
+
const direct = serviceId(host);
|
|
1049
|
+
if (graph.hasNode(direct)) return direct;
|
|
1050
|
+
let found = null;
|
|
1051
|
+
graph.forEachNode((id, attrs) => {
|
|
1052
|
+
if (found) return;
|
|
1053
|
+
const a = attrs;
|
|
1054
|
+
if (a.type !== NodeType3.ServiceNode) return;
|
|
1055
|
+
if (a.name === host) {
|
|
1056
|
+
found = id;
|
|
1057
|
+
return;
|
|
1058
|
+
}
|
|
1059
|
+
if (a.aliases && a.aliases.includes(host)) {
|
|
1060
|
+
found = id;
|
|
1061
|
+
}
|
|
1062
|
+
});
|
|
1063
|
+
return found;
|
|
1064
|
+
}
|
|
1065
|
+
function frontierIdFor(host) {
|
|
1066
|
+
return frontierId(host);
|
|
1067
|
+
}
|
|
1068
|
+
function ensureServiceNode(graph, serviceName) {
|
|
1069
|
+
const id = serviceId(serviceName);
|
|
1070
|
+
if (graph.hasNode(id)) return id;
|
|
1071
|
+
const node = {
|
|
1072
|
+
id,
|
|
1073
|
+
type: NodeType3.ServiceNode,
|
|
1074
|
+
name: serviceName,
|
|
1075
|
+
language: "unknown",
|
|
1076
|
+
discoveredVia: "otel"
|
|
1077
|
+
};
|
|
1078
|
+
graph.addNode(id, node);
|
|
1079
|
+
return id;
|
|
1080
|
+
}
|
|
1081
|
+
function ensureDatabaseNode(graph, host, engine) {
|
|
1082
|
+
const id = databaseId(host);
|
|
1083
|
+
if (graph.hasNode(id)) return id;
|
|
1084
|
+
const node = {
|
|
1085
|
+
id,
|
|
1086
|
+
type: NodeType3.DatabaseNode,
|
|
1087
|
+
name: host,
|
|
1088
|
+
engine,
|
|
1089
|
+
engineVersion: "unknown",
|
|
1090
|
+
compatibleDrivers: [],
|
|
1091
|
+
host,
|
|
1092
|
+
discoveredVia: "otel"
|
|
1093
|
+
};
|
|
1094
|
+
graph.addNode(id, node);
|
|
1095
|
+
return id;
|
|
1096
|
+
}
|
|
1097
|
+
function ensureFrontierNode(graph, host, ts) {
|
|
1098
|
+
const id = frontierIdFor(host);
|
|
1099
|
+
if (graph.hasNode(id)) {
|
|
1100
|
+
const existing = graph.getNodeAttributes(id);
|
|
1101
|
+
graph.replaceNodeAttributes(id, { ...existing, lastObserved: ts });
|
|
1102
|
+
return id;
|
|
1103
|
+
}
|
|
1104
|
+
const node = {
|
|
1105
|
+
id,
|
|
1106
|
+
type: NodeType3.FrontierNode,
|
|
1107
|
+
name: host,
|
|
1108
|
+
host,
|
|
1109
|
+
firstObserved: ts,
|
|
1110
|
+
lastObserved: ts
|
|
1111
|
+
};
|
|
1112
|
+
graph.addNode(id, node);
|
|
1113
|
+
return id;
|
|
1114
|
+
}
|
|
1115
|
+
function upsertFrontierEdge(graph, type, source, target, ts) {
|
|
1116
|
+
const id = frontierEdgeId(source, target, type);
|
|
1117
|
+
if (graph.hasEdge(id)) {
|
|
1118
|
+
const existing = graph.getEdgeAttributes(id);
|
|
1119
|
+
const updated = {
|
|
1120
|
+
...existing,
|
|
1121
|
+
provenance: Provenance3.FRONTIER,
|
|
1122
|
+
lastObserved: ts,
|
|
1123
|
+
callCount: (existing.callCount ?? 0) + 1
|
|
1124
|
+
};
|
|
1125
|
+
graph.replaceEdgeAttributes(id, updated);
|
|
1126
|
+
return;
|
|
1127
|
+
}
|
|
1128
|
+
const edge = {
|
|
1129
|
+
id,
|
|
1130
|
+
source,
|
|
1131
|
+
target,
|
|
1132
|
+
type,
|
|
1133
|
+
provenance: Provenance3.FRONTIER,
|
|
1134
|
+
confidence: 1,
|
|
1135
|
+
lastObserved: ts,
|
|
1136
|
+
callCount: 1
|
|
1137
|
+
};
|
|
1138
|
+
graph.addEdgeWithKey(id, source, target, edge);
|
|
1139
|
+
}
|
|
1140
|
+
function upsertObservedEdge(graph, type, source, target, ts, isError = false) {
|
|
1141
|
+
if (!graph.hasNode(source) || !graph.hasNode(target)) return null;
|
|
1142
|
+
const id = makeObservedEdgeId(type, source, target);
|
|
1143
|
+
if (graph.hasEdge(id)) {
|
|
1144
|
+
const existing = graph.getEdgeAttributes(id);
|
|
1145
|
+
const newSpanCount = (existing.signal?.spanCount ?? existing.callCount ?? 0) + 1;
|
|
1146
|
+
const newErrorCount = (existing.signal?.errorCount ?? 0) + (isError ? 1 : 0);
|
|
1147
|
+
const updated = {
|
|
1148
|
+
...existing,
|
|
1149
|
+
provenance: Provenance3.OBSERVED,
|
|
1150
|
+
lastObserved: ts,
|
|
1151
|
+
callCount: newSpanCount,
|
|
1152
|
+
signal: {
|
|
1153
|
+
spanCount: newSpanCount,
|
|
1154
|
+
errorCount: newErrorCount,
|
|
1155
|
+
lastObservedAgeMs: 0
|
|
1156
|
+
},
|
|
1157
|
+
confidence: 1
|
|
1158
|
+
};
|
|
1159
|
+
graph.replaceEdgeAttributes(id, updated);
|
|
1160
|
+
return { edge: updated, created: false };
|
|
1161
|
+
}
|
|
1162
|
+
const edge = {
|
|
1163
|
+
id,
|
|
1164
|
+
source,
|
|
1165
|
+
target,
|
|
1166
|
+
type,
|
|
1167
|
+
provenance: Provenance3.OBSERVED,
|
|
1168
|
+
confidence: 1,
|
|
1169
|
+
lastObserved: ts,
|
|
1170
|
+
callCount: 1,
|
|
1171
|
+
signal: {
|
|
1172
|
+
spanCount: 1,
|
|
1173
|
+
errorCount: isError ? 1 : 0,
|
|
1174
|
+
lastObservedAgeMs: 0
|
|
1175
|
+
}
|
|
1176
|
+
};
|
|
1177
|
+
graph.addEdgeWithKey(id, source, target, edge);
|
|
1178
|
+
return { edge, created: true };
|
|
1179
|
+
}
|
|
1180
|
+
function stitchTrace(graph, sourceServiceId, ts) {
|
|
1181
|
+
if (!graph.hasNode(sourceServiceId)) return;
|
|
1182
|
+
const visited = /* @__PURE__ */ new Set([sourceServiceId]);
|
|
1183
|
+
const queue = [{ nodeId: sourceServiceId, depth: 0 }];
|
|
1184
|
+
while (queue.length > 0) {
|
|
1185
|
+
const { nodeId, depth } = queue.shift();
|
|
1186
|
+
if (depth >= STITCH_MAX_DEPTH) continue;
|
|
1187
|
+
const outbound = graph.outboundEdges(nodeId);
|
|
1188
|
+
for (const edgeId of outbound) {
|
|
1189
|
+
const edge = graph.getEdgeAttributes(edgeId);
|
|
1190
|
+
if (edge.provenance !== Provenance3.EXTRACTED) continue;
|
|
1191
|
+
if (graph.hasEdge(observedEdgeId(edge.source, edge.target, edge.type))) continue;
|
|
1192
|
+
upsertInferredEdge(graph, edge.type, edge.source, edge.target, ts);
|
|
1193
|
+
if (!visited.has(edge.target)) {
|
|
1194
|
+
visited.add(edge.target);
|
|
1195
|
+
queue.push({ nodeId: edge.target, depth: depth + 1 });
|
|
1196
|
+
}
|
|
1197
|
+
}
|
|
1198
|
+
}
|
|
1199
|
+
}
|
|
1200
|
+
function upsertInferredEdge(graph, type, source, target, ts) {
|
|
1201
|
+
const id = makeInferredEdgeId(type, source, target);
|
|
1202
|
+
if (graph.hasEdge(id)) {
|
|
1203
|
+
const existing = graph.getEdgeAttributes(id);
|
|
1204
|
+
const updated = { ...existing, lastObserved: ts };
|
|
1205
|
+
graph.replaceEdgeAttributes(id, updated);
|
|
1206
|
+
return;
|
|
1207
|
+
}
|
|
1208
|
+
const edge = {
|
|
1209
|
+
id,
|
|
1210
|
+
source,
|
|
1211
|
+
target,
|
|
1212
|
+
type,
|
|
1213
|
+
provenance: Provenance3.INFERRED,
|
|
1214
|
+
confidence: INFERRED_CONFIDENCE,
|
|
1215
|
+
lastObserved: ts
|
|
1216
|
+
};
|
|
1217
|
+
graph.addEdgeWithKey(id, source, target, edge);
|
|
1218
|
+
}
|
|
1219
|
+
async function appendErrorEvent(ctx, ev) {
|
|
1220
|
+
await fs3.mkdir(path3.dirname(ctx.errorsPath), { recursive: true });
|
|
1221
|
+
await fs3.appendFile(ctx.errorsPath, JSON.stringify(ev) + "\n", "utf8");
|
|
1222
|
+
}
|
|
1223
|
+
function buildErrorEventForReceiver(span) {
|
|
1224
|
+
if (span.statusCode !== 2) return null;
|
|
1225
|
+
const ts = span.startTimeIso ?? (/* @__PURE__ */ new Date()).toISOString();
|
|
1226
|
+
return {
|
|
1227
|
+
id: `${span.traceId}:${span.spanId}`,
|
|
1228
|
+
timestamp: ts,
|
|
1229
|
+
service: span.service,
|
|
1230
|
+
traceId: span.traceId,
|
|
1231
|
+
spanId: span.spanId,
|
|
1232
|
+
errorMessage: span.exception?.message ?? span.errorMessage ?? span.name ?? "unknown error",
|
|
1233
|
+
...span.exception?.type ? { exceptionType: span.exception.type } : {},
|
|
1234
|
+
...span.exception?.stacktrace ? { exceptionStacktrace: span.exception.stacktrace } : {},
|
|
1235
|
+
affectedNode: serviceId(span.service)
|
|
1236
|
+
};
|
|
1237
|
+
}
|
|
1238
|
+
function makeErrorSpanWriter(errorsPath) {
|
|
1239
|
+
return async (span) => {
|
|
1240
|
+
const ev = buildErrorEventForReceiver(span);
|
|
1241
|
+
if (!ev) return;
|
|
1242
|
+
await fs3.mkdir(path3.dirname(errorsPath), { recursive: true });
|
|
1243
|
+
await fs3.appendFile(errorsPath, JSON.stringify(ev) + "\n", "utf8");
|
|
1244
|
+
};
|
|
1245
|
+
}
|
|
1246
|
+
async function handleSpan(ctx, span) {
|
|
1247
|
+
const ts = span.startTimeIso ?? nowIso(ctx);
|
|
1248
|
+
const nowMs = ctx.now ? ctx.now() : Date.now();
|
|
1249
|
+
const sourceId = ensureServiceNode(ctx.graph, span.service);
|
|
1250
|
+
const isError = span.statusCode === 2;
|
|
1251
|
+
cacheSpanService(span, nowMs);
|
|
1252
|
+
let affectedNode = sourceId;
|
|
1253
|
+
if (span.dbSystem) {
|
|
1254
|
+
const host = pickAddress(span);
|
|
1255
|
+
if (host) {
|
|
1256
|
+
ensureDatabaseNode(ctx.graph, host, span.dbSystem);
|
|
1257
|
+
const targetId = databaseId(host);
|
|
1258
|
+
const result = upsertObservedEdge(
|
|
1259
|
+
ctx.graph,
|
|
1260
|
+
EdgeType2.CONNECTS_TO,
|
|
1261
|
+
sourceId,
|
|
1262
|
+
targetId,
|
|
1263
|
+
ts,
|
|
1264
|
+
isError
|
|
1265
|
+
);
|
|
1266
|
+
if (result) affectedNode = targetId;
|
|
1267
|
+
}
|
|
1268
|
+
} else {
|
|
1269
|
+
const host = pickAddress(span);
|
|
1270
|
+
let resolvedViaAddress = false;
|
|
1271
|
+
if (host && host !== span.service) {
|
|
1272
|
+
const targetId = resolveServiceId(ctx.graph, host);
|
|
1273
|
+
if (targetId && targetId !== sourceId) {
|
|
1274
|
+
upsertObservedEdge(
|
|
1275
|
+
ctx.graph,
|
|
1276
|
+
EdgeType2.CALLS,
|
|
1277
|
+
sourceId,
|
|
1278
|
+
targetId,
|
|
1279
|
+
ts,
|
|
1280
|
+
isError
|
|
1281
|
+
);
|
|
1282
|
+
affectedNode = targetId;
|
|
1283
|
+
resolvedViaAddress = true;
|
|
1284
|
+
} else if (!targetId) {
|
|
1285
|
+
const frontierId2 = ensureFrontierNode(ctx.graph, host, ts);
|
|
1286
|
+
if (ctx.graph.hasNode(sourceId)) {
|
|
1287
|
+
upsertFrontierEdge(ctx.graph, EdgeType2.CALLS, sourceId, frontierId2, ts);
|
|
1288
|
+
}
|
|
1289
|
+
affectedNode = frontierId2;
|
|
1290
|
+
resolvedViaAddress = true;
|
|
1291
|
+
}
|
|
1292
|
+
}
|
|
1293
|
+
if (!resolvedViaAddress && span.parentSpanId) {
|
|
1294
|
+
const parentService = lookupParentSpanService(span.traceId, span.parentSpanId, nowMs);
|
|
1295
|
+
if (parentService && parentService !== span.service) {
|
|
1296
|
+
const parentId = ensureServiceNode(ctx.graph, parentService);
|
|
1297
|
+
upsertObservedEdge(
|
|
1298
|
+
ctx.graph,
|
|
1299
|
+
EdgeType2.CALLS,
|
|
1300
|
+
parentId,
|
|
1301
|
+
sourceId,
|
|
1302
|
+
ts,
|
|
1303
|
+
isError
|
|
1304
|
+
);
|
|
1305
|
+
}
|
|
1306
|
+
}
|
|
1307
|
+
}
|
|
1308
|
+
if (span.statusCode === 2) {
|
|
1309
|
+
stitchTrace(ctx.graph, sourceId, ts);
|
|
1310
|
+
if (ctx.writeErrorEventInline !== false) {
|
|
1311
|
+
const ev = {
|
|
1312
|
+
id: `${span.traceId}:${span.spanId}`,
|
|
1313
|
+
timestamp: ts,
|
|
1314
|
+
service: span.service,
|
|
1315
|
+
traceId: span.traceId,
|
|
1316
|
+
spanId: span.spanId,
|
|
1317
|
+
errorMessage: span.exception?.message ?? span.errorMessage ?? span.name ?? "unknown error",
|
|
1318
|
+
...span.exception?.type ? { exceptionType: span.exception.type } : {},
|
|
1319
|
+
...span.exception?.stacktrace ? { exceptionStacktrace: span.exception.stacktrace } : {},
|
|
1320
|
+
affectedNode
|
|
1321
|
+
};
|
|
1322
|
+
await appendErrorEvent(ctx, ev);
|
|
1323
|
+
}
|
|
1324
|
+
}
|
|
1325
|
+
void affectedNode;
|
|
1326
|
+
if (ctx.onPolicyTrigger) await ctx.onPolicyTrigger(ctx.graph);
|
|
1327
|
+
}
|
|
1328
|
+
function promoteFrontierNodes(graph, opts = {}) {
|
|
1329
|
+
const aliasIndex = /* @__PURE__ */ new Map();
|
|
1330
|
+
graph.forEachNode((id, attrs) => {
|
|
1331
|
+
const a = attrs;
|
|
1332
|
+
if (a.type !== NodeType3.ServiceNode) return;
|
|
1333
|
+
aliasIndex.set(a.name, id);
|
|
1334
|
+
if (a.aliases) {
|
|
1335
|
+
for (const alias of a.aliases) aliasIndex.set(alias, id);
|
|
1336
|
+
}
|
|
1337
|
+
});
|
|
1338
|
+
const toPromote = [];
|
|
1339
|
+
graph.forEachNode((id, attrs) => {
|
|
1340
|
+
const a = attrs;
|
|
1341
|
+
if (a.type !== NodeType3.FrontierNode) return;
|
|
1342
|
+
const target = aliasIndex.get(a.host);
|
|
1343
|
+
if (!target) return;
|
|
1344
|
+
if (target === id) return;
|
|
1345
|
+
toPromote.push({ frontierId: id, serviceId: target });
|
|
1346
|
+
});
|
|
1347
|
+
let promoted = 0;
|
|
1348
|
+
for (const { frontierId: frontierId2, serviceId: serviceId3 } of toPromote) {
|
|
1349
|
+
if (opts.policies && opts.policies.length > 0 && opts.policyCtx) {
|
|
1350
|
+
const gate = canPromoteFrontier(graph, frontierId2, opts.policies, opts.policyCtx);
|
|
1351
|
+
if (!gate.allowed) {
|
|
1352
|
+
continue;
|
|
1353
|
+
}
|
|
1354
|
+
}
|
|
1355
|
+
rewireFrontierEdges(graph, frontierId2, serviceId3);
|
|
1356
|
+
graph.dropNode(frontierId2);
|
|
1357
|
+
promoted++;
|
|
1358
|
+
}
|
|
1359
|
+
return promoted;
|
|
1360
|
+
}
|
|
1361
|
+
function rewireFrontierEdges(graph, frontierId2, serviceId3) {
|
|
1362
|
+
const inbound = [...graph.inboundEdges(frontierId2)];
|
|
1363
|
+
const outbound = [...graph.outboundEdges(frontierId2)];
|
|
1364
|
+
for (const edgeId of inbound) {
|
|
1365
|
+
const edge = graph.getEdgeAttributes(edgeId);
|
|
1366
|
+
rebuildEdge(graph, edge, edge.source, serviceId3, edgeId);
|
|
1367
|
+
}
|
|
1368
|
+
for (const edgeId of outbound) {
|
|
1369
|
+
const edge = graph.getEdgeAttributes(edgeId);
|
|
1370
|
+
rebuildEdge(graph, edge, serviceId3, edge.target, edgeId);
|
|
1371
|
+
}
|
|
1372
|
+
}
|
|
1373
|
+
function rebuildEdge(graph, edge, newSource, newTarget, oldEdgeId) {
|
|
1374
|
+
graph.dropEdge(oldEdgeId);
|
|
1375
|
+
const promotedProvenance = edge.provenance === Provenance3.FRONTIER ? Provenance3.OBSERVED : edge.provenance;
|
|
1376
|
+
const newId = promotedProvenance === Provenance3.OBSERVED ? observedEdgeId(newSource, newTarget, edge.type) : promotedProvenance === Provenance3.INFERRED ? inferredEdgeId(newSource, newTarget, edge.type) : promotedProvenance === Provenance3.EXTRACTED ? extractedEdgeId(newSource, newTarget, edge.type) : frontierEdgeId(newSource, newTarget, edge.type);
|
|
1377
|
+
if (graph.hasEdge(newId)) {
|
|
1378
|
+
const existing = graph.getEdgeAttributes(newId);
|
|
1379
|
+
const merged = {
|
|
1380
|
+
...existing,
|
|
1381
|
+
callCount: (existing.callCount ?? 0) + (edge.callCount ?? 0),
|
|
1382
|
+
lastObserved: pickLater(existing.lastObserved, edge.lastObserved)
|
|
1383
|
+
};
|
|
1384
|
+
graph.replaceEdgeAttributes(newId, merged);
|
|
1385
|
+
return;
|
|
1386
|
+
}
|
|
1387
|
+
const rebuilt = {
|
|
1388
|
+
...edge,
|
|
1389
|
+
id: newId,
|
|
1390
|
+
source: newSource,
|
|
1391
|
+
target: newTarget,
|
|
1392
|
+
provenance: promotedProvenance
|
|
1393
|
+
};
|
|
1394
|
+
graph.addEdgeWithKey(newId, newSource, newTarget, rebuilt);
|
|
1395
|
+
}
|
|
1396
|
+
function pickLater(a, b) {
|
|
1397
|
+
if (!a) return b;
|
|
1398
|
+
if (!b) return a;
|
|
1399
|
+
return new Date(a).getTime() >= new Date(b).getTime() ? a : b;
|
|
1400
|
+
}
|
|
1401
|
+
function makeSpanHandler(ctx) {
|
|
1402
|
+
return (span) => handleSpan(ctx, span);
|
|
1403
|
+
}
|
|
1404
|
+
async function markStaleEdges(graph, options = {}) {
|
|
1405
|
+
const thresholds = options.thresholds ?? loadStaleThresholdsFromEnv();
|
|
1406
|
+
const now = options.now ?? Date.now();
|
|
1407
|
+
const events = [];
|
|
1408
|
+
graph.forEachEdge((id, attrs) => {
|
|
1409
|
+
const e = attrs;
|
|
1410
|
+
if (e.provenance !== Provenance3.OBSERVED) return;
|
|
1411
|
+
if (!e.lastObserved) return;
|
|
1412
|
+
const threshold = thresholdForEdgeType(e.type, thresholds);
|
|
1413
|
+
const age = now - new Date(e.lastObserved).getTime();
|
|
1414
|
+
if (age > threshold) {
|
|
1415
|
+
const updated = { ...e, provenance: Provenance3.STALE, confidence: 0.3 };
|
|
1416
|
+
graph.replaceEdgeAttributes(id, updated);
|
|
1417
|
+
events.push({
|
|
1418
|
+
edgeId: id,
|
|
1419
|
+
source: e.source,
|
|
1420
|
+
target: e.target,
|
|
1421
|
+
edgeType: e.type,
|
|
1422
|
+
thresholdMs: threshold,
|
|
1423
|
+
ageMs: age,
|
|
1424
|
+
lastObserved: e.lastObserved,
|
|
1425
|
+
transitionedAt: new Date(now).toISOString()
|
|
1426
|
+
});
|
|
1427
|
+
}
|
|
1428
|
+
});
|
|
1429
|
+
if (options.staleEventsPath && events.length > 0) {
|
|
1430
|
+
await appendStaleEvents(options.staleEventsPath, events);
|
|
1431
|
+
}
|
|
1432
|
+
return { count: events.length, events };
|
|
1433
|
+
}
|
|
1434
|
+
async function appendStaleEvents(staleEventsPath, events) {
|
|
1435
|
+
await fs3.mkdir(path3.dirname(staleEventsPath), { recursive: true });
|
|
1436
|
+
const lines = events.map((e) => JSON.stringify(e)).join("\n") + "\n";
|
|
1437
|
+
await fs3.appendFile(staleEventsPath, lines, "utf8");
|
|
1438
|
+
}
|
|
1439
|
+
async function readStaleEvents(staleEventsPath) {
|
|
1440
|
+
try {
|
|
1441
|
+
const raw = await fs3.readFile(staleEventsPath, "utf8");
|
|
1442
|
+
return raw.split("\n").filter((line) => line.length > 0).map((line) => JSON.parse(line));
|
|
1443
|
+
} catch (err) {
|
|
1444
|
+
if (err.code === "ENOENT") return [];
|
|
1445
|
+
throw err;
|
|
1446
|
+
}
|
|
1447
|
+
}
|
|
1448
|
+
function startStalenessLoop(graph, options = {}) {
|
|
1449
|
+
let stopped = false;
|
|
1450
|
+
const intervalMs = options.intervalMs ?? 6e4;
|
|
1451
|
+
const tick = () => {
|
|
1452
|
+
if (stopped) return;
|
|
1453
|
+
void (async () => {
|
|
1454
|
+
try {
|
|
1455
|
+
await markStaleEdges(graph, {
|
|
1456
|
+
thresholds: options.thresholds,
|
|
1457
|
+
staleEventsPath: options.staleEventsPath
|
|
1458
|
+
});
|
|
1459
|
+
if (options.onPolicyTrigger) await options.onPolicyTrigger(graph);
|
|
1460
|
+
} catch (err) {
|
|
1461
|
+
console.error("staleness tick failed", err);
|
|
1462
|
+
}
|
|
1463
|
+
})();
|
|
1464
|
+
};
|
|
1465
|
+
const interval = setInterval(tick, intervalMs);
|
|
1466
|
+
if (typeof interval.unref === "function") interval.unref();
|
|
1467
|
+
return () => {
|
|
1468
|
+
stopped = true;
|
|
1469
|
+
clearInterval(interval);
|
|
1470
|
+
};
|
|
1471
|
+
}
|
|
1472
|
+
async function readErrorEvents(errorsPath) {
|
|
1473
|
+
try {
|
|
1474
|
+
const raw = await fs3.readFile(errorsPath, "utf8");
|
|
1475
|
+
return raw.split("\n").filter((line) => line.length > 0).map((line) => JSON.parse(line));
|
|
1476
|
+
} catch (err) {
|
|
1477
|
+
if (err.code === "ENOENT") return [];
|
|
1478
|
+
throw err;
|
|
1479
|
+
}
|
|
1480
|
+
}
|
|
1481
|
+
|
|
1482
|
+
// src/extract/services.ts
|
|
1483
|
+
import { promises as fs6 } from "fs";
|
|
1484
|
+
import path6 from "path";
|
|
1485
|
+
import ignore from "ignore";
|
|
1486
|
+
import { minimatch } from "minimatch";
|
|
1487
|
+
import { NodeType as NodeType4, serviceId as serviceId2 } from "@neat.is/types";
|
|
1488
|
+
|
|
1489
|
+
// src/extract/shared.ts
|
|
1490
|
+
import { promises as fs4 } from "fs";
|
|
1491
|
+
import path4 from "path";
|
|
1492
|
+
import { parse as parseYaml } from "yaml";
|
|
1493
|
+
import { extractedEdgeId as extractedEdgeId2 } from "@neat.is/types";
|
|
1494
|
+
var SERVICE_FILE_EXTENSIONS = /* @__PURE__ */ new Set([".js", ".mjs", ".cjs", ".ts", ".tsx", ".py"]);
|
|
1495
|
+
var CONFIG_FILE_EXTENSIONS = /* @__PURE__ */ new Set([".yaml", ".yml"]);
|
|
1496
|
+
var IGNORED_DIRS = /* @__PURE__ */ new Set([
|
|
1497
|
+
"node_modules",
|
|
1498
|
+
".git",
|
|
1499
|
+
".turbo",
|
|
1500
|
+
"dist",
|
|
1501
|
+
"build",
|
|
1502
|
+
".next"
|
|
1503
|
+
]);
|
|
1504
|
+
function isConfigFile(name) {
|
|
1505
|
+
const ext = path4.extname(name);
|
|
1506
|
+
if (CONFIG_FILE_EXTENSIONS.has(ext)) return { match: true, fileType: ext.slice(1) };
|
|
1507
|
+
if (name === ".env" || name.startsWith(".env.")) return { match: true, fileType: "env" };
|
|
1508
|
+
return { match: false, fileType: "" };
|
|
1509
|
+
}
|
|
1510
|
+
function cleanVersion(raw) {
|
|
1511
|
+
if (!raw) return void 0;
|
|
1512
|
+
return raw.replace(/^[\^~><=v\s]+/, "").trim() || void 0;
|
|
1513
|
+
}
|
|
1514
|
+
async function readJson(filePath) {
|
|
1515
|
+
const raw = await fs4.readFile(filePath, "utf8");
|
|
1516
|
+
return JSON.parse(raw);
|
|
1517
|
+
}
|
|
1518
|
+
async function readYaml(filePath) {
|
|
1519
|
+
const raw = await fs4.readFile(filePath, "utf8");
|
|
1520
|
+
return parseYaml(raw);
|
|
1521
|
+
}
|
|
1522
|
+
async function exists(p) {
|
|
1523
|
+
try {
|
|
1524
|
+
await fs4.access(p);
|
|
1525
|
+
return true;
|
|
1526
|
+
} catch {
|
|
1527
|
+
return false;
|
|
1528
|
+
}
|
|
1529
|
+
}
|
|
1530
|
+
|
|
1531
|
+
// src/extract/python.ts
|
|
1532
|
+
import { promises as fs5 } from "fs";
|
|
1533
|
+
import path5 from "path";
|
|
1534
|
+
import { parse as parseToml } from "smol-toml";
|
|
1535
|
+
var REQUIREMENT_LINE = /^\s*([A-Za-z0-9_.-]+)(?:\[[^\]]*\])?\s*(?:(==)\s*([A-Za-z0-9_.+-]+))?/;
|
|
1536
|
+
function parseRequirementsTxt(content) {
|
|
1537
|
+
const out = {};
|
|
1538
|
+
for (const rawLine of content.split("\n")) {
|
|
1539
|
+
const line = rawLine.split("#")[0]?.trim();
|
|
1540
|
+
if (!line) continue;
|
|
1541
|
+
if (line.startsWith("-")) continue;
|
|
1542
|
+
const match = REQUIREMENT_LINE.exec(line);
|
|
1543
|
+
if (!match) continue;
|
|
1544
|
+
const name = match[1].toLowerCase();
|
|
1545
|
+
const version = match[3] ?? "";
|
|
1546
|
+
out[name] = version;
|
|
1547
|
+
}
|
|
1548
|
+
return out;
|
|
1549
|
+
}
|
|
1550
|
+
function depsFromPyProject(pyproject) {
|
|
1551
|
+
const out = {};
|
|
1552
|
+
for (const entry of pyproject.project?.dependencies ?? []) {
|
|
1553
|
+
const match = REQUIREMENT_LINE.exec(entry);
|
|
1554
|
+
if (!match) continue;
|
|
1555
|
+
out[match[1].toLowerCase()] = match[3] ?? "";
|
|
1556
|
+
}
|
|
1557
|
+
const poetryDeps = pyproject.tool?.poetry?.dependencies ?? {};
|
|
1558
|
+
for (const [name, value] of Object.entries(poetryDeps)) {
|
|
1559
|
+
if (name.toLowerCase() === "python") continue;
|
|
1560
|
+
const raw = typeof value === "string" ? value : value?.version ?? "";
|
|
1561
|
+
out[name.toLowerCase()] = raw.replace(/^[\^~><=v\s]+/, "");
|
|
1562
|
+
}
|
|
1563
|
+
return out;
|
|
1564
|
+
}
|
|
1565
|
+
async function discoverPythonService(serviceDir) {
|
|
1566
|
+
const pyprojectPath = path5.join(serviceDir, "pyproject.toml");
|
|
1567
|
+
const requirementsPath = path5.join(serviceDir, "requirements.txt");
|
|
1568
|
+
const setupPath = path5.join(serviceDir, "setup.py");
|
|
1569
|
+
const hasPyproject = await exists(pyprojectPath);
|
|
1570
|
+
const hasRequirements = await exists(requirementsPath);
|
|
1571
|
+
const hasSetup = await exists(setupPath);
|
|
1572
|
+
if (!hasPyproject && !hasRequirements && !hasSetup) return null;
|
|
1573
|
+
let name = path5.basename(serviceDir);
|
|
1574
|
+
let version;
|
|
1575
|
+
const dependencies = {};
|
|
1576
|
+
if (hasPyproject) {
|
|
1577
|
+
const raw = await fs5.readFile(pyprojectPath, "utf8");
|
|
1578
|
+
const pyproject = parseToml(raw);
|
|
1579
|
+
name = pyproject.project?.name ?? pyproject.tool?.poetry?.name ?? name;
|
|
1580
|
+
version = pyproject.project?.version ?? pyproject.tool?.poetry?.version ?? void 0;
|
|
1581
|
+
Object.assign(dependencies, depsFromPyProject(pyproject));
|
|
1582
|
+
}
|
|
1583
|
+
if (hasRequirements) {
|
|
1584
|
+
const raw = await fs5.readFile(requirementsPath, "utf8");
|
|
1585
|
+
Object.assign(dependencies, parseRequirementsTxt(raw));
|
|
1586
|
+
}
|
|
1587
|
+
return { name, version, dependencies };
|
|
1588
|
+
}
|
|
1589
|
+
function pythonToPackage(service) {
|
|
1590
|
+
return {
|
|
1591
|
+
name: service.name,
|
|
1592
|
+
version: service.version,
|
|
1593
|
+
dependencies: service.dependencies
|
|
1594
|
+
};
|
|
1595
|
+
}
|
|
1596
|
+
|
|
1597
|
+
// src/extract/services.ts
|
|
1598
|
+
var DEFAULT_SCAN_DEPTH = 5;
|
|
1599
|
+
function parseScanDepth() {
|
|
1600
|
+
const raw = process.env.NEAT_SCAN_DEPTH;
|
|
1601
|
+
if (!raw) return DEFAULT_SCAN_DEPTH;
|
|
1602
|
+
const n = Number.parseInt(raw, 10);
|
|
1603
|
+
return Number.isFinite(n) && n >= 0 ? n : DEFAULT_SCAN_DEPTH;
|
|
1604
|
+
}
|
|
1605
|
+
function workspaceGlobs(pkg) {
|
|
1606
|
+
const ws = pkg.workspaces;
|
|
1607
|
+
if (!ws) return null;
|
|
1608
|
+
if (Array.isArray(ws)) return ws.length > 0 ? ws : null;
|
|
1609
|
+
if (Array.isArray(ws.packages)) return ws.packages.length > 0 ? ws.packages : null;
|
|
1610
|
+
return null;
|
|
1611
|
+
}
|
|
1612
|
+
async function loadGitignore(scanPath) {
|
|
1613
|
+
const gitignorePath = path6.join(scanPath, ".gitignore");
|
|
1614
|
+
if (!await exists(gitignorePath)) return null;
|
|
1615
|
+
const raw = await fs6.readFile(gitignorePath, "utf8");
|
|
1616
|
+
return ignore().add(raw);
|
|
1617
|
+
}
|
|
1618
|
+
async function walkDirs(start, scanPath, options, visit) {
|
|
1619
|
+
async function recurse(current, depth) {
|
|
1620
|
+
if (depth > options.maxDepth) return;
|
|
1621
|
+
const entries = await fs6.readdir(current, { withFileTypes: true }).catch(() => []);
|
|
1622
|
+
for (const entry of entries) {
|
|
1623
|
+
if (!entry.isDirectory()) continue;
|
|
1624
|
+
if (IGNORED_DIRS.has(entry.name)) continue;
|
|
1625
|
+
const child = path6.join(current, entry.name);
|
|
1626
|
+
if (options.ig) {
|
|
1627
|
+
const rel = path6.relative(scanPath, child).split(path6.sep).join("/");
|
|
1628
|
+
if (rel && options.ig.ignores(rel + "/")) continue;
|
|
1629
|
+
}
|
|
1630
|
+
await visit(child);
|
|
1631
|
+
await recurse(child, depth + 1);
|
|
1632
|
+
}
|
|
1633
|
+
}
|
|
1634
|
+
await recurse(start, 0);
|
|
1635
|
+
}
|
|
1636
|
+
async function expandWorkspaceGlobs(scanPath, globs) {
|
|
1637
|
+
const found = /* @__PURE__ */ new Set();
|
|
1638
|
+
const scanDepth = parseScanDepth();
|
|
1639
|
+
for (const raw of globs) {
|
|
1640
|
+
const pattern = raw.replace(/^\.\//, "");
|
|
1641
|
+
if (!pattern.includes("*")) {
|
|
1642
|
+
const candidate = path6.join(scanPath, pattern);
|
|
1643
|
+
if (await exists(path6.join(candidate, "package.json"))) found.add(candidate);
|
|
1644
|
+
continue;
|
|
1645
|
+
}
|
|
1646
|
+
const segments = pattern.split("/");
|
|
1647
|
+
const staticSegments = [];
|
|
1648
|
+
for (const seg of segments) {
|
|
1649
|
+
if (seg.includes("*")) break;
|
|
1650
|
+
staticSegments.push(seg);
|
|
1651
|
+
}
|
|
1652
|
+
const start = path6.join(scanPath, ...staticSegments);
|
|
1653
|
+
if (!await exists(start)) continue;
|
|
1654
|
+
const hasDoubleStar = pattern.includes("**");
|
|
1655
|
+
const walkDepth = hasDoubleStar ? scanDepth : Math.max(0, segments.length - staticSegments.length - 1);
|
|
1656
|
+
await walkDirs(start, scanPath, { maxDepth: walkDepth, ig: null }, async (dir) => {
|
|
1657
|
+
const rel = path6.relative(scanPath, dir).split(path6.sep).join("/");
|
|
1658
|
+
if (minimatch(rel, pattern) && await exists(path6.join(dir, "package.json"))) {
|
|
1659
|
+
found.add(dir);
|
|
1660
|
+
}
|
|
1661
|
+
});
|
|
1662
|
+
}
|
|
1663
|
+
return [...found];
|
|
1664
|
+
}
|
|
1665
|
+
async function discoverNodeService(scanPath, dir) {
|
|
1666
|
+
const pkgPath = path6.join(dir, "package.json");
|
|
1667
|
+
if (!await exists(pkgPath)) return null;
|
|
1668
|
+
const pkg = await readJson(pkgPath);
|
|
1669
|
+
if (!pkg.name) return null;
|
|
1670
|
+
const node = {
|
|
1671
|
+
id: serviceId2(pkg.name),
|
|
1672
|
+
type: NodeType4.ServiceNode,
|
|
1673
|
+
name: pkg.name,
|
|
1674
|
+
language: "javascript",
|
|
1675
|
+
version: pkg.version,
|
|
1676
|
+
dependencies: pkg.dependencies ?? {},
|
|
1677
|
+
repoPath: path6.relative(scanPath, dir),
|
|
1678
|
+
...pkg.engines?.node ? { nodeEngine: pkg.engines.node } : {}
|
|
1679
|
+
};
|
|
1680
|
+
return { pkg, dir, node };
|
|
1681
|
+
}
|
|
1682
|
+
async function discoverPyService(scanPath, dir) {
|
|
1683
|
+
const py = await discoverPythonService(dir);
|
|
1684
|
+
if (!py) return null;
|
|
1685
|
+
const pkg = pythonToPackage(py);
|
|
1686
|
+
const node = {
|
|
1687
|
+
id: serviceId2(py.name),
|
|
1688
|
+
type: NodeType4.ServiceNode,
|
|
1689
|
+
name: py.name,
|
|
1690
|
+
language: "python",
|
|
1691
|
+
version: py.version,
|
|
1692
|
+
dependencies: py.dependencies,
|
|
1693
|
+
repoPath: path6.relative(scanPath, dir)
|
|
1694
|
+
};
|
|
1695
|
+
return { pkg, dir, node };
|
|
1696
|
+
}
|
|
1697
|
+
async function discoverServices(scanPath) {
|
|
1698
|
+
const rootPkgPath = path6.join(scanPath, "package.json");
|
|
1699
|
+
const rootPkg = await exists(rootPkgPath) ? await readJson(rootPkgPath) : null;
|
|
1700
|
+
const wsGlobs = rootPkg ? workspaceGlobs(rootPkg) : null;
|
|
1701
|
+
const candidateDirs = [];
|
|
1702
|
+
if (wsGlobs) {
|
|
1703
|
+
candidateDirs.push(...await expandWorkspaceGlobs(scanPath, wsGlobs));
|
|
1704
|
+
} else {
|
|
1705
|
+
if (rootPkg && rootPkg.name) candidateDirs.push(scanPath);
|
|
1706
|
+
const ig = await loadGitignore(scanPath);
|
|
1707
|
+
await walkDirs(
|
|
1708
|
+
scanPath,
|
|
1709
|
+
scanPath,
|
|
1710
|
+
{ maxDepth: parseScanDepth(), ig },
|
|
1711
|
+
async (dir) => {
|
|
1712
|
+
if (await exists(path6.join(dir, "package.json"))) {
|
|
1713
|
+
candidateDirs.push(dir);
|
|
1714
|
+
} else if (await exists(path6.join(dir, "pyproject.toml")) || await exists(path6.join(dir, "requirements.txt")) || await exists(path6.join(dir, "setup.py"))) {
|
|
1715
|
+
candidateDirs.push(dir);
|
|
1716
|
+
}
|
|
1717
|
+
}
|
|
1718
|
+
);
|
|
1719
|
+
}
|
|
1720
|
+
candidateDirs.sort();
|
|
1721
|
+
const seen = /* @__PURE__ */ new Map();
|
|
1722
|
+
const out = [];
|
|
1723
|
+
for (const dir of candidateDirs) {
|
|
1724
|
+
const service = await discoverNodeService(scanPath, dir) ?? await discoverPyService(scanPath, dir);
|
|
1725
|
+
if (!service) continue;
|
|
1726
|
+
const existingDir = seen.get(service.node.name);
|
|
1727
|
+
if (existingDir !== void 0) {
|
|
1728
|
+
const a = path6.relative(scanPath, existingDir) || ".";
|
|
1729
|
+
const b = path6.relative(scanPath, dir) || ".";
|
|
1730
|
+
console.warn(
|
|
1731
|
+
`[neat] duplicate package name "${service.node.name}" \u2014 keeping ${a}, ignoring ${b}`
|
|
1732
|
+
);
|
|
1733
|
+
continue;
|
|
1734
|
+
}
|
|
1735
|
+
seen.set(service.node.name, dir);
|
|
1736
|
+
out.push(service);
|
|
1737
|
+
}
|
|
1738
|
+
return out;
|
|
1739
|
+
}
|
|
1740
|
+
function addServiceNodes(graph, services) {
|
|
1741
|
+
let nodesAdded = 0;
|
|
1742
|
+
for (const service of services) {
|
|
1743
|
+
if (!graph.hasNode(service.node.id)) {
|
|
1744
|
+
graph.addNode(service.node.id, { ...service.node, discoveredVia: "static" });
|
|
1745
|
+
nodesAdded++;
|
|
1746
|
+
continue;
|
|
1747
|
+
}
|
|
1748
|
+
const existing = graph.getNodeAttributes(service.node.id);
|
|
1749
|
+
const mergedDiscoveredVia = existing.discoveredVia === "otel" ? "merged" : "static";
|
|
1750
|
+
graph.replaceNodeAttributes(service.node.id, {
|
|
1751
|
+
...existing,
|
|
1752
|
+
...service.node,
|
|
1753
|
+
discoveredVia: mergedDiscoveredVia
|
|
1754
|
+
});
|
|
1755
|
+
}
|
|
1756
|
+
return nodesAdded;
|
|
1757
|
+
}
|
|
1758
|
+
|
|
1759
|
+
// src/extract/aliases.ts
|
|
1760
|
+
import path7 from "path";
|
|
1761
|
+
import { promises as fs7 } from "fs";
|
|
1762
|
+
import { parseAllDocuments } from "yaml";
|
|
1763
|
+
import { NodeType as NodeType5 } from "@neat.is/types";
|
|
1764
|
+
var K8S_KINDS_WITH_HOSTNAMES = /* @__PURE__ */ new Set([
|
|
1765
|
+
"Service",
|
|
1766
|
+
"Deployment",
|
|
1767
|
+
"StatefulSet",
|
|
1768
|
+
"DaemonSet"
|
|
1769
|
+
]);
|
|
1770
|
+
function addAliases(graph, serviceId3, candidates) {
|
|
1771
|
+
if (!graph.hasNode(serviceId3)) return;
|
|
1772
|
+
const node = graph.getNodeAttributes(serviceId3);
|
|
1773
|
+
if (node.type !== NodeType5.ServiceNode) return;
|
|
1774
|
+
const set = new Set(node.aliases ?? []);
|
|
1775
|
+
for (const c of candidates) {
|
|
1776
|
+
if (!c) continue;
|
|
1777
|
+
if (c === node.name) continue;
|
|
1778
|
+
set.add(c);
|
|
1779
|
+
}
|
|
1780
|
+
if (set.size === 0) return;
|
|
1781
|
+
const updated = { ...node, aliases: [...set].sort() };
|
|
1782
|
+
graph.replaceNodeAttributes(serviceId3, updated);
|
|
1783
|
+
}
|
|
1784
|
+
function indexServicesByName(services) {
|
|
1785
|
+
const map = /* @__PURE__ */ new Map();
|
|
1786
|
+
for (const s of services) {
|
|
1787
|
+
map.set(s.node.name, s.node.id);
|
|
1788
|
+
map.set(path7.basename(s.dir), s.node.id);
|
|
1789
|
+
}
|
|
1790
|
+
return map;
|
|
1791
|
+
}
|
|
1792
|
+
async function collectComposeAliases(graph, scanPath, serviceIndex) {
|
|
1793
|
+
let composePath = null;
|
|
1794
|
+
for (const name of ["docker-compose.yml", "docker-compose.yaml"]) {
|
|
1795
|
+
const abs = path7.join(scanPath, name);
|
|
1796
|
+
if (await exists(abs)) {
|
|
1797
|
+
composePath = abs;
|
|
1798
|
+
break;
|
|
1799
|
+
}
|
|
1800
|
+
}
|
|
1801
|
+
if (!composePath) return;
|
|
1802
|
+
const compose = await readYaml(composePath);
|
|
1803
|
+
if (!compose?.services) return;
|
|
1804
|
+
for (const [composeName, svc] of Object.entries(compose.services)) {
|
|
1805
|
+
const serviceId3 = serviceIndex.get(composeName);
|
|
1806
|
+
if (!serviceId3) continue;
|
|
1807
|
+
const aliases = /* @__PURE__ */ new Set([composeName]);
|
|
1808
|
+
if (svc.container_name) aliases.add(svc.container_name);
|
|
1809
|
+
if (svc.hostname) aliases.add(svc.hostname);
|
|
1810
|
+
addAliases(graph, serviceId3, aliases);
|
|
1811
|
+
}
|
|
1812
|
+
}
|
|
1813
|
+
var LABEL_KEYS = /* @__PURE__ */ new Set([
|
|
1814
|
+
"service",
|
|
1815
|
+
"service.name",
|
|
1816
|
+
"app",
|
|
1817
|
+
"app.name",
|
|
1818
|
+
"com.docker.compose.service",
|
|
1819
|
+
"org.opencontainers.image.title"
|
|
1820
|
+
]);
|
|
1821
|
+
function parseDockerfileLabels(content) {
|
|
1822
|
+
const out = [];
|
|
1823
|
+
const lineRegex = /^\s*label\s+(.+)$/i;
|
|
1824
|
+
for (const raw of content.split("\n")) {
|
|
1825
|
+
const m = lineRegex.exec(raw);
|
|
1826
|
+
if (!m) continue;
|
|
1827
|
+
const rest = m[1];
|
|
1828
|
+
const pairRegex = /([\w.-]+)\s*=\s*("([^"]*)"|'([^']*)'|([^\s]+))/g;
|
|
1829
|
+
let pair;
|
|
1830
|
+
while ((pair = pairRegex.exec(rest)) !== null) {
|
|
1831
|
+
const key = pair[1].toLowerCase();
|
|
1832
|
+
if (!LABEL_KEYS.has(key)) continue;
|
|
1833
|
+
const value = pair[3] ?? pair[4] ?? pair[5] ?? "";
|
|
1834
|
+
if (value) out.push(value);
|
|
1835
|
+
}
|
|
1836
|
+
}
|
|
1837
|
+
return out;
|
|
1838
|
+
}
|
|
1839
|
+
async function collectDockerfileAliases(graph, services) {
|
|
1840
|
+
for (const service of services) {
|
|
1841
|
+
const dockerfilePath = path7.join(service.dir, "Dockerfile");
|
|
1842
|
+
if (!await exists(dockerfilePath)) continue;
|
|
1843
|
+
const content = await fs7.readFile(dockerfilePath, "utf8");
|
|
1844
|
+
const aliases = parseDockerfileLabels(content);
|
|
1845
|
+
if (aliases.length > 0) addAliases(graph, service.node.id, aliases);
|
|
1846
|
+
}
|
|
1847
|
+
}
|
|
1848
|
+
async function walkYamlFiles(start, depth = 0, max = 5) {
|
|
1849
|
+
if (depth > max) return [];
|
|
1850
|
+
const out = [];
|
|
1851
|
+
const entries = await fs7.readdir(start, { withFileTypes: true }).catch(() => []);
|
|
1852
|
+
for (const entry of entries) {
|
|
1853
|
+
if (entry.isDirectory()) {
|
|
1854
|
+
if (IGNORED_DIRS.has(entry.name)) continue;
|
|
1855
|
+
out.push(...await walkYamlFiles(path7.join(start, entry.name), depth + 1, max));
|
|
1856
|
+
} else if (entry.isFile() && CONFIG_FILE_EXTENSIONS.has(path7.extname(entry.name))) {
|
|
1857
|
+
out.push(path7.join(start, entry.name));
|
|
1858
|
+
}
|
|
1859
|
+
}
|
|
1860
|
+
return out;
|
|
1861
|
+
}
|
|
1862
|
+
function k8sHostnames(name, namespace) {
|
|
1863
|
+
const ns = namespace ?? "default";
|
|
1864
|
+
return [
|
|
1865
|
+
name,
|
|
1866
|
+
`${name}.${ns}`,
|
|
1867
|
+
`${name}.${ns}.svc`,
|
|
1868
|
+
`${name}.${ns}.svc.cluster.local`
|
|
1869
|
+
];
|
|
1870
|
+
}
|
|
1871
|
+
function k8sServiceTarget(doc, byName) {
|
|
1872
|
+
const selector = doc.spec?.selector;
|
|
1873
|
+
const selectorApp = selector?.app ?? selector?.matchLabels?.app;
|
|
1874
|
+
if (selectorApp && byName.has(selectorApp)) return byName.get(selectorApp);
|
|
1875
|
+
const labelApp = doc.metadata?.labels?.app;
|
|
1876
|
+
if (labelApp && byName.has(labelApp)) return byName.get(labelApp);
|
|
1877
|
+
const metaName = doc.metadata?.name;
|
|
1878
|
+
if (metaName && byName.has(metaName)) return byName.get(metaName);
|
|
1879
|
+
return null;
|
|
1880
|
+
}
|
|
1881
|
+
async function collectK8sAliases(graph, scanPath, serviceIndex) {
|
|
1882
|
+
const files = await walkYamlFiles(scanPath);
|
|
1883
|
+
for (const file of files) {
|
|
1884
|
+
const content = await fs7.readFile(file, "utf8");
|
|
1885
|
+
let docs;
|
|
1886
|
+
try {
|
|
1887
|
+
docs = parseAllDocuments(content).map((d) => d.toJSON());
|
|
1888
|
+
} catch {
|
|
1889
|
+
continue;
|
|
1890
|
+
}
|
|
1891
|
+
for (const doc of docs) {
|
|
1892
|
+
if (!doc?.kind || !doc.metadata?.name) continue;
|
|
1893
|
+
if (!K8S_KINDS_WITH_HOSTNAMES.has(doc.kind)) continue;
|
|
1894
|
+
const target = k8sServiceTarget(doc, serviceIndex);
|
|
1895
|
+
if (!target) continue;
|
|
1896
|
+
addAliases(graph, target, k8sHostnames(doc.metadata.name, doc.metadata.namespace));
|
|
1897
|
+
}
|
|
1898
|
+
}
|
|
1899
|
+
}
|
|
1900
|
+
async function addServiceAliases(graph, scanPath, services) {
|
|
1901
|
+
const byName = indexServicesByName(services);
|
|
1902
|
+
await collectComposeAliases(graph, scanPath, byName);
|
|
1903
|
+
await collectDockerfileAliases(graph, services);
|
|
1904
|
+
await collectK8sAliases(graph, scanPath, byName);
|
|
1905
|
+
}
|
|
1906
|
+
|
|
1907
|
+
// src/extract/databases/index.ts
|
|
1908
|
+
import path15 from "path";
|
|
1909
|
+
import { EdgeType as EdgeType3, NodeType as NodeType6, Provenance as Provenance4, databaseId as databaseId2 } from "@neat.is/types";
|
|
1910
|
+
|
|
1911
|
+
// src/extract/databases/db-config-yaml.ts
|
|
1912
|
+
import path8 from "path";
|
|
1913
|
+
async function parse(serviceDir) {
|
|
1914
|
+
const yamlPath = path8.join(serviceDir, "db-config.yaml");
|
|
1915
|
+
if (!await exists(yamlPath)) return [];
|
|
1916
|
+
const raw = await readYaml(yamlPath);
|
|
1917
|
+
return [
|
|
1918
|
+
{
|
|
1919
|
+
host: raw.host,
|
|
1920
|
+
port: raw.port,
|
|
1921
|
+
database: raw.database,
|
|
1922
|
+
engine: raw.engine,
|
|
1923
|
+
engineVersion: raw.engineVersion !== void 0 ? String(raw.engineVersion) : "unknown",
|
|
1924
|
+
sourceFile: yamlPath
|
|
1925
|
+
}
|
|
1926
|
+
];
|
|
1927
|
+
}
|
|
1928
|
+
var dbConfigYamlParser = { name: "db-config.yaml", parse };
|
|
1929
|
+
|
|
1930
|
+
// src/extract/databases/dotenv.ts
|
|
1931
|
+
import { promises as fs9 } from "fs";
|
|
1932
|
+
import path10 from "path";
|
|
1933
|
+
|
|
1934
|
+
// src/extract/databases/shared.ts
|
|
1935
|
+
import { promises as fs8 } from "fs";
|
|
1936
|
+
import path9 from "path";
|
|
1937
|
+
function schemeToEngine(scheme) {
|
|
1938
|
+
const s = scheme.toLowerCase().split("+")[0];
|
|
1939
|
+
switch (s) {
|
|
1940
|
+
case "postgres":
|
|
1941
|
+
case "postgresql":
|
|
1942
|
+
return "postgresql";
|
|
1943
|
+
case "mysql":
|
|
1944
|
+
case "mariadb":
|
|
1945
|
+
return "mysql";
|
|
1946
|
+
case "mongodb":
|
|
1947
|
+
case "mongodb+srv":
|
|
1948
|
+
return "mongodb";
|
|
1949
|
+
case "redis":
|
|
1950
|
+
case "rediss":
|
|
1951
|
+
return "redis";
|
|
1952
|
+
case "sqlite":
|
|
1953
|
+
return "sqlite";
|
|
1954
|
+
default:
|
|
1955
|
+
return null;
|
|
1956
|
+
}
|
|
1957
|
+
}
|
|
1958
|
+
function parseConnectionString(url) {
|
|
1959
|
+
const m = url.match(
|
|
1960
|
+
/^(?<scheme>[a-z][a-z+]*):\/\/(?:[^@/]+(?::[^@]*)?@)?(?<host>[^:/?]+)(?::(?<port>\d+))?(?:\/(?<db>[^?#]*))?/i
|
|
1961
|
+
);
|
|
1962
|
+
if (!m || !m.groups) return null;
|
|
1963
|
+
const engine = schemeToEngine(m.groups.scheme);
|
|
1964
|
+
if (!engine) return null;
|
|
1965
|
+
return {
|
|
1966
|
+
host: m.groups.host,
|
|
1967
|
+
port: m.groups.port ? Number(m.groups.port) : void 0,
|
|
1968
|
+
database: m.groups.db ?? "",
|
|
1969
|
+
engine,
|
|
1970
|
+
engineVersion: "unknown"
|
|
1971
|
+
};
|
|
1972
|
+
}
|
|
1973
|
+
async function readIfExists(filePath) {
|
|
1974
|
+
try {
|
|
1975
|
+
return await fs8.readFile(filePath, "utf8");
|
|
1976
|
+
} catch {
|
|
1977
|
+
return null;
|
|
1978
|
+
}
|
|
1979
|
+
}
|
|
1980
|
+
async function findFirst(serviceDir, candidates) {
|
|
1981
|
+
for (const rel of candidates) {
|
|
1982
|
+
const abs = path9.join(serviceDir, rel);
|
|
1983
|
+
const content = await readIfExists(abs);
|
|
1984
|
+
if (content !== null) return abs;
|
|
1985
|
+
}
|
|
1986
|
+
return null;
|
|
1987
|
+
}
|
|
1988
|
+
function engineFromImage(image) {
|
|
1989
|
+
const lower = image.toLowerCase();
|
|
1990
|
+
const colon = lower.lastIndexOf(":");
|
|
1991
|
+
const repo = colon >= 0 ? lower.slice(0, colon) : lower;
|
|
1992
|
+
const tag = colon >= 0 ? lower.slice(colon + 1) : "latest";
|
|
1993
|
+
const last = repo.split("/").pop() ?? repo;
|
|
1994
|
+
let engine = null;
|
|
1995
|
+
if (last.startsWith("postgres")) engine = "postgresql";
|
|
1996
|
+
else if (last.startsWith("mysql") || last.startsWith("mariadb")) engine = "mysql";
|
|
1997
|
+
else if (last.startsWith("mongo")) engine = "mongodb";
|
|
1998
|
+
else if (last.startsWith("redis")) engine = "redis";
|
|
1999
|
+
else if (last.startsWith("sqlite")) engine = "sqlite";
|
|
2000
|
+
if (!engine) return null;
|
|
2001
|
+
const versionMatch = tag.match(/^(\d+(?:\.\d+){0,2})/);
|
|
2002
|
+
return {
|
|
2003
|
+
engine,
|
|
2004
|
+
engineVersion: versionMatch ? versionMatch[1] : "unknown"
|
|
2005
|
+
};
|
|
2006
|
+
}
|
|
2007
|
+
|
|
2008
|
+
// src/extract/databases/dotenv.ts
|
|
2009
|
+
var CONNECTION_KEYS = /* @__PURE__ */ new Set([
|
|
2010
|
+
"DATABASE_URL",
|
|
2011
|
+
"DB_URL",
|
|
2012
|
+
"POSTGRES_URL",
|
|
2013
|
+
"POSTGRESQL_URL",
|
|
2014
|
+
"MYSQL_URL",
|
|
2015
|
+
"MONGODB_URI",
|
|
2016
|
+
"MONGO_URL",
|
|
2017
|
+
"MONGO_URI",
|
|
2018
|
+
"REDIS_URL"
|
|
2019
|
+
]);
|
|
2020
|
+
function parseDotenvLine(line) {
|
|
2021
|
+
const trimmed = line.trim();
|
|
2022
|
+
if (!trimmed || trimmed.startsWith("#")) return null;
|
|
2023
|
+
const eq = trimmed.indexOf("=");
|
|
2024
|
+
if (eq < 0) return null;
|
|
2025
|
+
const key = trimmed.slice(0, eq).trim();
|
|
2026
|
+
let value = trimmed.slice(eq + 1).trim();
|
|
2027
|
+
if (value.startsWith('"') && value.endsWith('"') || value.startsWith("'") && value.endsWith("'")) {
|
|
2028
|
+
value = value.slice(1, -1);
|
|
2029
|
+
}
|
|
2030
|
+
return { key, value };
|
|
2031
|
+
}
|
|
2032
|
+
async function parse2(serviceDir) {
|
|
2033
|
+
const entries = await fs9.readdir(serviceDir, { withFileTypes: true }).catch(() => []);
|
|
2034
|
+
const configs = [];
|
|
2035
|
+
const seen = /* @__PURE__ */ new Set();
|
|
2036
|
+
for (const entry of entries) {
|
|
2037
|
+
if (!entry.isFile()) continue;
|
|
2038
|
+
const match = isConfigFile(entry.name);
|
|
2039
|
+
if (!match.match || match.fileType !== "env") continue;
|
|
2040
|
+
const filePath = path10.join(serviceDir, entry.name);
|
|
2041
|
+
const content = await fs9.readFile(filePath, "utf8");
|
|
2042
|
+
for (const line of content.split("\n")) {
|
|
2043
|
+
const parsed = parseDotenvLine(line);
|
|
2044
|
+
if (!parsed) continue;
|
|
2045
|
+
if (!CONNECTION_KEYS.has(parsed.key.toUpperCase())) continue;
|
|
2046
|
+
const config = parseConnectionString(parsed.value);
|
|
2047
|
+
if (!config) continue;
|
|
2048
|
+
const key = `${config.engine}://${config.host}:${config.port ?? ""}/${config.database}`;
|
|
2049
|
+
if (seen.has(key)) continue;
|
|
2050
|
+
seen.add(key);
|
|
2051
|
+
configs.push({ ...config, sourceFile: filePath });
|
|
2052
|
+
}
|
|
2053
|
+
}
|
|
2054
|
+
return configs;
|
|
2055
|
+
}
|
|
2056
|
+
var dotenvParser = { name: ".env", parse: parse2 };
|
|
2057
|
+
|
|
2058
|
+
// src/extract/databases/prisma.ts
|
|
2059
|
+
import path11 from "path";
|
|
2060
|
+
async function parse3(serviceDir) {
|
|
2061
|
+
const schemaPath = path11.join(serviceDir, "prisma", "schema.prisma");
|
|
2062
|
+
const content = await readIfExists(schemaPath);
|
|
2063
|
+
if (!content) return [];
|
|
2064
|
+
const block = content.match(/datasource\s+\w+\s*\{([^}]*)\}/s);
|
|
2065
|
+
if (!block) return [];
|
|
2066
|
+
const body = block[1] ?? "";
|
|
2067
|
+
const providerMatch = body.match(/provider\s*=\s*"([^"]+)"/);
|
|
2068
|
+
if (!providerMatch) return [];
|
|
2069
|
+
const engine = schemeToEngine(providerMatch[1]);
|
|
2070
|
+
if (!engine) return [];
|
|
2071
|
+
const urlMatch = body.match(/url\s*=\s*"([^"]+)"/);
|
|
2072
|
+
if (urlMatch) {
|
|
2073
|
+
const config = parseConnectionString(urlMatch[1]);
|
|
2074
|
+
if (config) return [{ ...config, sourceFile: schemaPath }];
|
|
2075
|
+
}
|
|
2076
|
+
return [
|
|
2077
|
+
{
|
|
2078
|
+
host: `${engine}-prisma`,
|
|
2079
|
+
database: "",
|
|
2080
|
+
engine,
|
|
2081
|
+
engineVersion: "unknown",
|
|
2082
|
+
sourceFile: schemaPath
|
|
2083
|
+
}
|
|
2084
|
+
];
|
|
2085
|
+
}
|
|
2086
|
+
var prismaParser = { name: "prisma", parse: parse3 };
|
|
2087
|
+
|
|
2088
|
+
// src/extract/databases/drizzle.ts
|
|
2089
|
+
var DIALECT_TO_ENGINE = {
|
|
2090
|
+
postgresql: "postgresql",
|
|
2091
|
+
postgres: "postgresql",
|
|
2092
|
+
pg: "postgresql",
|
|
2093
|
+
mysql: "mysql",
|
|
2094
|
+
mysql2: "mysql",
|
|
2095
|
+
sqlite: "sqlite",
|
|
2096
|
+
"better-sqlite": "sqlite"
|
|
2097
|
+
};
|
|
2098
|
+
async function parse4(serviceDir) {
|
|
2099
|
+
const filePath = await findFirst(serviceDir, [
|
|
2100
|
+
"drizzle.config.ts",
|
|
2101
|
+
"drizzle.config.js",
|
|
2102
|
+
"drizzle.config.mjs"
|
|
2103
|
+
]);
|
|
2104
|
+
if (!filePath) return [];
|
|
2105
|
+
const content = await readIfExists(filePath);
|
|
2106
|
+
if (!content) return [];
|
|
2107
|
+
const dialectMatch = content.match(/dialect\s*:\s*['"`]([^'"`]+)['"`]/);
|
|
2108
|
+
if (!dialectMatch) return [];
|
|
2109
|
+
const engine = DIALECT_TO_ENGINE[dialectMatch[1].toLowerCase()] ?? schemeToEngine(dialectMatch[1]);
|
|
2110
|
+
if (!engine) return [];
|
|
2111
|
+
const urlMatch = content.match(
|
|
2112
|
+
/(?:url|connectionString)\s*:\s*['"`]([a-z][a-z+]*:\/\/[^'"`]+)['"`]/i
|
|
2113
|
+
);
|
|
2114
|
+
if (urlMatch) {
|
|
2115
|
+
const config = parseConnectionString(urlMatch[1]);
|
|
2116
|
+
if (config) return [{ ...config, sourceFile: filePath }];
|
|
2117
|
+
}
|
|
2118
|
+
const hostMatch = content.match(/host\s*:\s*['"`]([^'"`]+)['"`]/);
|
|
2119
|
+
if (hostMatch) {
|
|
2120
|
+
const portMatch = content.match(/port\s*:\s*(\d+)/);
|
|
2121
|
+
const dbMatch = content.match(/database\s*:\s*['"`]([^'"`]+)['"`]/);
|
|
2122
|
+
return [
|
|
2123
|
+
{
|
|
2124
|
+
host: hostMatch[1],
|
|
2125
|
+
port: portMatch ? Number(portMatch[1]) : void 0,
|
|
2126
|
+
database: dbMatch?.[1] ?? "",
|
|
2127
|
+
engine,
|
|
2128
|
+
engineVersion: "unknown",
|
|
2129
|
+
sourceFile: filePath
|
|
2130
|
+
}
|
|
2131
|
+
];
|
|
2132
|
+
}
|
|
2133
|
+
return [
|
|
2134
|
+
{ host: `${engine}-drizzle`, database: "", engine, engineVersion: "unknown", sourceFile: filePath }
|
|
2135
|
+
];
|
|
2136
|
+
}
|
|
2137
|
+
var drizzleParser = { name: "drizzle", parse: parse4 };
|
|
2138
|
+
|
|
2139
|
+
// src/extract/databases/knex.ts
|
|
2140
|
+
var CLIENT_TO_ENGINE = {
|
|
2141
|
+
pg: "postgresql",
|
|
2142
|
+
postgres: "postgresql",
|
|
2143
|
+
postgresql: "postgresql",
|
|
2144
|
+
mysql: "mysql",
|
|
2145
|
+
mysql2: "mysql",
|
|
2146
|
+
sqlite3: "sqlite",
|
|
2147
|
+
"better-sqlite3": "sqlite"
|
|
2148
|
+
};
|
|
2149
|
+
async function parse5(serviceDir) {
|
|
2150
|
+
const filePath = await findFirst(serviceDir, [
|
|
2151
|
+
"knexfile.js",
|
|
2152
|
+
"knexfile.ts",
|
|
2153
|
+
"knexfile.cjs",
|
|
2154
|
+
"knexfile.mjs"
|
|
2155
|
+
]);
|
|
2156
|
+
if (!filePath) return [];
|
|
2157
|
+
const content = await readIfExists(filePath);
|
|
2158
|
+
if (!content) return [];
|
|
2159
|
+
const clientMatch = content.match(/client\s*:\s*['"`]([^'"`]+)['"`]/);
|
|
2160
|
+
if (!clientMatch) return [];
|
|
2161
|
+
const engine = CLIENT_TO_ENGINE[clientMatch[1].toLowerCase()];
|
|
2162
|
+
if (!engine) return [];
|
|
2163
|
+
const urlMatch = content.match(
|
|
2164
|
+
/connection\s*:\s*['"`]([a-z][a-z+]*:\/\/[^'"`]+)['"`]/i
|
|
2165
|
+
);
|
|
2166
|
+
if (urlMatch) {
|
|
2167
|
+
const config = parseConnectionString(urlMatch[1]);
|
|
2168
|
+
if (config) return [{ ...config, sourceFile: filePath }];
|
|
2169
|
+
}
|
|
2170
|
+
const host = content.match(/host\s*:\s*['"`]([^'"`]+)['"`]/)?.[1];
|
|
2171
|
+
if (host) {
|
|
2172
|
+
const port = content.match(/port\s*:\s*(\d+)/)?.[1];
|
|
2173
|
+
const database = content.match(/database\s*:\s*['"`]([^'"`]+)['"`]/)?.[1] ?? "";
|
|
2174
|
+
return [
|
|
2175
|
+
{
|
|
2176
|
+
host,
|
|
2177
|
+
port: port ? Number(port) : void 0,
|
|
2178
|
+
database,
|
|
2179
|
+
engine,
|
|
2180
|
+
engineVersion: "unknown",
|
|
2181
|
+
sourceFile: filePath
|
|
2182
|
+
}
|
|
2183
|
+
];
|
|
2184
|
+
}
|
|
2185
|
+
return [{ host: `${engine}-knex`, database: "", engine, engineVersion: "unknown", sourceFile: filePath }];
|
|
2186
|
+
}
|
|
2187
|
+
var knexParser = { name: "knex", parse: parse5 };
|
|
2188
|
+
|
|
2189
|
+
// src/extract/databases/ormconfig.ts
|
|
2190
|
+
import path12 from "path";
|
|
2191
|
+
async function parse6(serviceDir) {
|
|
2192
|
+
for (const candidate of ["ormconfig.json", "ormconfig.yaml", "ormconfig.yml"]) {
|
|
2193
|
+
const abs = path12.join(serviceDir, candidate);
|
|
2194
|
+
if (!await exists(abs)) continue;
|
|
2195
|
+
const raw = candidate.endsWith(".json") ? await readJson(abs) : await readYaml(abs);
|
|
2196
|
+
const entries = Array.isArray(raw) ? raw : [raw];
|
|
2197
|
+
const out = [];
|
|
2198
|
+
for (const entry of entries) {
|
|
2199
|
+
if (!entry?.type || !entry.host) continue;
|
|
2200
|
+
const engine = schemeToEngine(entry.type);
|
|
2201
|
+
if (!engine) continue;
|
|
2202
|
+
out.push({
|
|
2203
|
+
host: entry.host,
|
|
2204
|
+
port: entry.port,
|
|
2205
|
+
database: entry.database ?? "",
|
|
2206
|
+
engine,
|
|
2207
|
+
engineVersion: "unknown",
|
|
2208
|
+
sourceFile: abs
|
|
2209
|
+
});
|
|
2210
|
+
}
|
|
2211
|
+
if (out.length > 0) return out;
|
|
2212
|
+
}
|
|
2213
|
+
return [];
|
|
2214
|
+
}
|
|
2215
|
+
var ormconfigParser = { name: "ormconfig", parse: parse6 };
|
|
2216
|
+
|
|
2217
|
+
// src/extract/databases/typeorm.ts
|
|
2218
|
+
async function parse7(serviceDir) {
|
|
2219
|
+
const filePath = await findFirst(serviceDir, [
|
|
2220
|
+
"data-source.ts",
|
|
2221
|
+
"data-source.js",
|
|
2222
|
+
"src/data-source.ts",
|
|
2223
|
+
"src/data-source.js"
|
|
2224
|
+
]);
|
|
2225
|
+
if (!filePath) return [];
|
|
2226
|
+
const content = await readIfExists(filePath);
|
|
2227
|
+
if (!content) return [];
|
|
2228
|
+
const block = content.match(/new\s+DataSource\s*\(\s*\{([\s\S]*?)\}\s*\)/);
|
|
2229
|
+
const body = block ? block[1] : content;
|
|
2230
|
+
const typeMatch = body.match(/type\s*:\s*['"`]([^'"`]+)['"`]/);
|
|
2231
|
+
const host = body.match(/host\s*:\s*['"`]([^'"`]+)['"`]/)?.[1];
|
|
2232
|
+
if (!typeMatch || !host) return [];
|
|
2233
|
+
const engine = schemeToEngine(typeMatch[1]);
|
|
2234
|
+
if (!engine) return [];
|
|
2235
|
+
const port = body.match(/port\s*:\s*(\d+)/)?.[1];
|
|
2236
|
+
const database = body.match(/database\s*:\s*['"`]([^'"`]+)['"`]/)?.[1] ?? "";
|
|
2237
|
+
return [
|
|
2238
|
+
{
|
|
2239
|
+
host,
|
|
2240
|
+
port: port ? Number(port) : void 0,
|
|
2241
|
+
database,
|
|
2242
|
+
engine,
|
|
2243
|
+
engineVersion: "unknown",
|
|
2244
|
+
sourceFile: filePath
|
|
2245
|
+
}
|
|
2246
|
+
];
|
|
2247
|
+
}
|
|
2248
|
+
var typeormParser = { name: "typeorm", parse: parse7 };
|
|
2249
|
+
|
|
2250
|
+
// src/extract/databases/sequelize.ts
|
|
2251
|
+
import path13 from "path";
|
|
2252
|
+
async function parse8(serviceDir) {
|
|
2253
|
+
const configPath = path13.join(serviceDir, "config", "config.json");
|
|
2254
|
+
if (!await exists(configPath)) return [];
|
|
2255
|
+
const raw = await readJson(configPath);
|
|
2256
|
+
const out = [];
|
|
2257
|
+
const seen = /* @__PURE__ */ new Set();
|
|
2258
|
+
for (const entry of Object.values(raw)) {
|
|
2259
|
+
if (!entry?.dialect || !entry.host) continue;
|
|
2260
|
+
const engine = schemeToEngine(entry.dialect);
|
|
2261
|
+
if (!engine) continue;
|
|
2262
|
+
const key = `${engine}://${entry.host}:${entry.port ?? ""}/${entry.database ?? ""}`;
|
|
2263
|
+
if (seen.has(key)) continue;
|
|
2264
|
+
seen.add(key);
|
|
2265
|
+
out.push({
|
|
2266
|
+
host: entry.host,
|
|
2267
|
+
port: entry.port,
|
|
2268
|
+
database: entry.database ?? "",
|
|
2269
|
+
engine,
|
|
2270
|
+
engineVersion: "unknown",
|
|
2271
|
+
sourceFile: configPath
|
|
2272
|
+
});
|
|
2273
|
+
}
|
|
2274
|
+
return out;
|
|
2275
|
+
}
|
|
2276
|
+
var sequelizeParser = { name: "sequelize", parse: parse8 };
|
|
2277
|
+
|
|
2278
|
+
// src/extract/databases/docker-compose.ts
|
|
2279
|
+
import path14 from "path";
|
|
2280
|
+
function portFromService(svc) {
|
|
2281
|
+
for (const raw of svc.ports ?? []) {
|
|
2282
|
+
const str = String(raw);
|
|
2283
|
+
const last = str.split(":").pop();
|
|
2284
|
+
const n = Number(last);
|
|
2285
|
+
if (Number.isFinite(n) && n > 0) return n;
|
|
2286
|
+
}
|
|
2287
|
+
return void 0;
|
|
2288
|
+
}
|
|
2289
|
+
function databaseFromEnv(svc) {
|
|
2290
|
+
const env = svc.environment;
|
|
2291
|
+
const get = (key) => {
|
|
2292
|
+
if (!env) return void 0;
|
|
2293
|
+
if (Array.isArray(env)) {
|
|
2294
|
+
for (const line of env) {
|
|
2295
|
+
const [k, v] = line.split("=");
|
|
2296
|
+
if (k === key) return v;
|
|
2297
|
+
}
|
|
2298
|
+
return void 0;
|
|
2299
|
+
}
|
|
2300
|
+
return env[key];
|
|
2301
|
+
};
|
|
2302
|
+
return get("POSTGRES_DB") ?? get("MYSQL_DATABASE") ?? get("MONGO_INITDB_DATABASE") ?? "";
|
|
2303
|
+
}
|
|
2304
|
+
async function parse9(serviceDir) {
|
|
2305
|
+
for (const name of ["docker-compose.yml", "docker-compose.yaml"]) {
|
|
2306
|
+
const abs = path14.join(serviceDir, name);
|
|
2307
|
+
if (!await exists(abs)) continue;
|
|
2308
|
+
const raw = await readYaml(abs);
|
|
2309
|
+
if (!raw?.services) return [];
|
|
2310
|
+
const out = [];
|
|
2311
|
+
for (const [serviceName, svc] of Object.entries(raw.services)) {
|
|
2312
|
+
if (!svc.image) continue;
|
|
2313
|
+
const meta = engineFromImage(svc.image);
|
|
2314
|
+
if (!meta) continue;
|
|
2315
|
+
out.push({
|
|
2316
|
+
host: serviceName,
|
|
2317
|
+
port: portFromService(svc),
|
|
2318
|
+
database: databaseFromEnv(svc),
|
|
2319
|
+
engine: meta.engine,
|
|
2320
|
+
engineVersion: meta.engineVersion,
|
|
2321
|
+
sourceFile: abs
|
|
2322
|
+
});
|
|
2323
|
+
}
|
|
2324
|
+
return out;
|
|
2325
|
+
}
|
|
2326
|
+
return [];
|
|
2327
|
+
}
|
|
2328
|
+
var dockerComposeParser = { name: "docker-compose", parse: parse9 };
|
|
2329
|
+
|
|
2330
|
+
// src/extract/databases/index.ts
|
|
2331
|
+
var DB_PARSERS = [
|
|
2332
|
+
dbConfigYamlParser,
|
|
2333
|
+
dotenvParser,
|
|
2334
|
+
prismaParser,
|
|
2335
|
+
drizzleParser,
|
|
2336
|
+
knexParser,
|
|
2337
|
+
ormconfigParser,
|
|
2338
|
+
typeormParser,
|
|
2339
|
+
sequelizeParser,
|
|
2340
|
+
dockerComposeParser
|
|
2341
|
+
];
|
|
2342
|
+
function compatibleDriversFor(engine) {
|
|
2343
|
+
return compatPairs().filter((p) => p.engine === engine).map((p) => ({ name: p.driver, minVersion: p.minDriverVersion }));
|
|
2344
|
+
}
|
|
2345
|
+
function toDatabaseNode(config) {
|
|
2346
|
+
return {
|
|
2347
|
+
id: databaseId2(config.host),
|
|
2348
|
+
type: NodeType6.DatabaseNode,
|
|
2349
|
+
name: config.database || config.host,
|
|
2350
|
+
engine: config.engine,
|
|
2351
|
+
engineVersion: config.engineVersion,
|
|
2352
|
+
compatibleDrivers: compatibleDriversFor(config.engine),
|
|
2353
|
+
host: config.host,
|
|
2354
|
+
port: config.port
|
|
2355
|
+
};
|
|
2356
|
+
}
|
|
2357
|
+
function attachIncompatibilities(service, configs) {
|
|
2358
|
+
const deps = { ...service.pkg.dependencies ?? {}, ...service.pkg.devDependencies ?? {} };
|
|
2359
|
+
const incompatibilities = [];
|
|
2360
|
+
const seen = /* @__PURE__ */ new Set();
|
|
2361
|
+
for (const config of configs) {
|
|
2362
|
+
for (const pair of compatPairs()) {
|
|
2363
|
+
if (pair.engine !== config.engine) continue;
|
|
2364
|
+
const declaredVersion = cleanVersion(deps[pair.driver]);
|
|
2365
|
+
if (!declaredVersion) continue;
|
|
2366
|
+
const result = checkCompatibility(
|
|
2367
|
+
pair.driver,
|
|
2368
|
+
declaredVersion,
|
|
2369
|
+
config.engine,
|
|
2370
|
+
config.engineVersion
|
|
2371
|
+
);
|
|
2372
|
+
if (!result.compatible && result.reason) {
|
|
2373
|
+
const key = `driver-engine|${pair.driver}@${declaredVersion}|${config.engine}@${config.engineVersion}`;
|
|
2374
|
+
if (seen.has(key)) continue;
|
|
2375
|
+
seen.add(key);
|
|
2376
|
+
incompatibilities.push({
|
|
2377
|
+
kind: "driver-engine",
|
|
2378
|
+
driver: pair.driver,
|
|
2379
|
+
driverVersion: declaredVersion,
|
|
2380
|
+
engine: config.engine,
|
|
2381
|
+
engineVersion: config.engineVersion,
|
|
2382
|
+
reason: result.reason
|
|
2383
|
+
});
|
|
2384
|
+
}
|
|
2385
|
+
}
|
|
2386
|
+
}
|
|
2387
|
+
const serviceNodeEngine = service.node.nodeEngine ?? service.pkg.engines?.node;
|
|
2388
|
+
for (const constraint of nodeEngineConstraints()) {
|
|
2389
|
+
const declared = cleanVersion(deps[constraint.package]);
|
|
2390
|
+
if (!declared) continue;
|
|
2391
|
+
const result = checkNodeEngineConstraint(constraint, declared, serviceNodeEngine);
|
|
2392
|
+
if (!result.compatible && result.reason) {
|
|
2393
|
+
const key = `node-engine|${constraint.package}@${declared}|${serviceNodeEngine ?? ""}`;
|
|
2394
|
+
if (seen.has(key)) continue;
|
|
2395
|
+
seen.add(key);
|
|
2396
|
+
incompatibilities.push({
|
|
2397
|
+
kind: "node-engine",
|
|
2398
|
+
package: constraint.package,
|
|
2399
|
+
packageVersion: declared,
|
|
2400
|
+
requiredNodeVersion: result.requiredNodeVersion ?? constraint.minNodeVersion,
|
|
2401
|
+
...serviceNodeEngine ? { declaredNodeEngine: serviceNodeEngine } : {},
|
|
2402
|
+
reason: result.reason
|
|
2403
|
+
});
|
|
2404
|
+
}
|
|
2405
|
+
}
|
|
2406
|
+
for (const conflict of packageConflicts()) {
|
|
2407
|
+
const declared = cleanVersion(deps[conflict.package]);
|
|
2408
|
+
if (!declared) continue;
|
|
2409
|
+
const requiredVersion = cleanVersion(deps[conflict.requires.name]);
|
|
2410
|
+
const result = checkPackageConflict(conflict, declared, requiredVersion);
|
|
2411
|
+
if (!result.compatible && result.reason) {
|
|
2412
|
+
const key = `package-conflict|${conflict.package}@${declared}|${conflict.requires.name}@${requiredVersion ?? "missing"}`;
|
|
2413
|
+
if (seen.has(key)) continue;
|
|
2414
|
+
seen.add(key);
|
|
2415
|
+
incompatibilities.push({
|
|
2416
|
+
kind: "package-conflict",
|
|
2417
|
+
package: conflict.package,
|
|
2418
|
+
packageVersion: declared,
|
|
2419
|
+
requires: conflict.requires,
|
|
2420
|
+
...requiredVersion ? { foundVersion: requiredVersion } : {},
|
|
2421
|
+
reason: result.reason
|
|
2422
|
+
});
|
|
2423
|
+
}
|
|
2424
|
+
}
|
|
2425
|
+
for (const rule of deprecatedApis()) {
|
|
2426
|
+
const declared = cleanVersion(deps[rule.package]);
|
|
2427
|
+
if (declared === void 0) continue;
|
|
2428
|
+
const result = checkDeprecatedApi(rule, declared);
|
|
2429
|
+
if (!result.compatible && result.reason) {
|
|
2430
|
+
const key = `deprecated-api|${rule.package}@${declared}`;
|
|
2431
|
+
if (seen.has(key)) continue;
|
|
2432
|
+
seen.add(key);
|
|
2433
|
+
incompatibilities.push({
|
|
2434
|
+
kind: "deprecated-api",
|
|
2435
|
+
package: rule.package,
|
|
2436
|
+
packageVersion: declared,
|
|
2437
|
+
reason: result.reason
|
|
2438
|
+
});
|
|
2439
|
+
}
|
|
2440
|
+
}
|
|
2441
|
+
if (incompatibilities.length > 0) service.node.incompatibilities = incompatibilities;
|
|
2442
|
+
}
|
|
2443
|
+
async function addDatabasesAndCompat(graph, services, scanPath) {
|
|
2444
|
+
let nodesAdded = 0;
|
|
2445
|
+
let edgesAdded = 0;
|
|
2446
|
+
for (const service of services) {
|
|
2447
|
+
const merged = /* @__PURE__ */ new Map();
|
|
2448
|
+
for (const parser of DB_PARSERS) {
|
|
2449
|
+
let configs;
|
|
2450
|
+
try {
|
|
2451
|
+
configs = await parser.parse(service.dir);
|
|
2452
|
+
} catch (err) {
|
|
2453
|
+
console.warn(
|
|
2454
|
+
`[neat] ${parser.name} parser failed on ${service.node.name}: ${err.message}`
|
|
2455
|
+
);
|
|
2456
|
+
continue;
|
|
2457
|
+
}
|
|
2458
|
+
for (const config of configs) {
|
|
2459
|
+
if (!config.host) continue;
|
|
2460
|
+
if (!merged.has(config.host)) merged.set(config.host, config);
|
|
2461
|
+
}
|
|
2462
|
+
}
|
|
2463
|
+
const allConfigs = [...merged.values()];
|
|
2464
|
+
for (const config of allConfigs) {
|
|
2465
|
+
const dbNode = toDatabaseNode(config);
|
|
2466
|
+
if (!graph.hasNode(dbNode.id)) {
|
|
2467
|
+
graph.addNode(dbNode.id, { ...dbNode, discoveredVia: "static" });
|
|
2468
|
+
nodesAdded++;
|
|
2469
|
+
} else {
|
|
2470
|
+
const existing = graph.getNodeAttributes(dbNode.id);
|
|
2471
|
+
const mergedDiscoveredVia = existing.discoveredVia === "otel" ? "merged" : "static";
|
|
2472
|
+
graph.replaceNodeAttributes(dbNode.id, {
|
|
2473
|
+
...existing,
|
|
2474
|
+
...dbNode,
|
|
2475
|
+
discoveredVia: mergedDiscoveredVia
|
|
2476
|
+
});
|
|
2477
|
+
}
|
|
2478
|
+
const edge = {
|
|
2479
|
+
id: extractedEdgeId2(service.node.id, dbNode.id, EdgeType3.CONNECTS_TO),
|
|
2480
|
+
source: service.node.id,
|
|
2481
|
+
target: dbNode.id,
|
|
2482
|
+
type: EdgeType3.CONNECTS_TO,
|
|
2483
|
+
provenance: Provenance4.EXTRACTED,
|
|
2484
|
+
...config.sourceFile ? {
|
|
2485
|
+
evidence: {
|
|
2486
|
+
file: path15.relative(scanPath, config.sourceFile).split(path15.sep).join("/")
|
|
2487
|
+
}
|
|
2488
|
+
} : {}
|
|
2489
|
+
};
|
|
2490
|
+
if (!graph.hasEdge(edge.id)) {
|
|
2491
|
+
graph.addEdgeWithKey(edge.id, edge.source, edge.target, edge);
|
|
2492
|
+
edgesAdded++;
|
|
2493
|
+
}
|
|
2494
|
+
}
|
|
2495
|
+
attachIncompatibilities(service, allConfigs);
|
|
2496
|
+
if (graph.hasNode(service.node.id)) {
|
|
2497
|
+
const current = graph.getNodeAttributes(service.node.id);
|
|
2498
|
+
const updated = {
|
|
2499
|
+
...current,
|
|
2500
|
+
...service.node,
|
|
2501
|
+
...current.aliases ? { aliases: current.aliases } : {}
|
|
2502
|
+
};
|
|
2503
|
+
if (!service.node.incompatibilities || service.node.incompatibilities.length === 0) {
|
|
2504
|
+
delete updated.incompatibilities;
|
|
2505
|
+
}
|
|
2506
|
+
graph.replaceNodeAttributes(service.node.id, updated);
|
|
2507
|
+
}
|
|
2508
|
+
}
|
|
2509
|
+
return { nodesAdded, edgesAdded };
|
|
2510
|
+
}
|
|
2511
|
+
|
|
2512
|
+
// src/extract/configs.ts
|
|
2513
|
+
import { promises as fs10 } from "fs";
|
|
2514
|
+
import path16 from "path";
|
|
2515
|
+
import { EdgeType as EdgeType4, NodeType as NodeType7, Provenance as Provenance5, configId } from "@neat.is/types";
|
|
2516
|
+
async function walkConfigFiles(dir) {
|
|
2517
|
+
const out = [];
|
|
2518
|
+
async function walk(current) {
|
|
2519
|
+
const entries = await fs10.readdir(current, { withFileTypes: true });
|
|
2520
|
+
for (const entry of entries) {
|
|
2521
|
+
const full = path16.join(current, entry.name);
|
|
2522
|
+
if (entry.isDirectory()) {
|
|
2523
|
+
if (!IGNORED_DIRS.has(entry.name)) await walk(full);
|
|
2524
|
+
} else if (entry.isFile() && isConfigFile(entry.name).match) {
|
|
2525
|
+
out.push(full);
|
|
2526
|
+
}
|
|
2527
|
+
}
|
|
2528
|
+
}
|
|
2529
|
+
await walk(dir);
|
|
2530
|
+
return out;
|
|
2531
|
+
}
|
|
2532
|
+
async function addConfigNodes(graph, services, scanPath) {
|
|
2533
|
+
let nodesAdded = 0;
|
|
2534
|
+
let edgesAdded = 0;
|
|
2535
|
+
for (const service of services) {
|
|
2536
|
+
const configFiles = await walkConfigFiles(service.dir);
|
|
2537
|
+
for (const file of configFiles) {
|
|
2538
|
+
const relPath = path16.relative(scanPath, file);
|
|
2539
|
+
const node = {
|
|
2540
|
+
id: configId(relPath),
|
|
2541
|
+
type: NodeType7.ConfigNode,
|
|
2542
|
+
name: path16.basename(file),
|
|
2543
|
+
path: relPath,
|
|
2544
|
+
fileType: isConfigFile(path16.basename(file)).fileType
|
|
2545
|
+
};
|
|
2546
|
+
if (!graph.hasNode(node.id)) {
|
|
2547
|
+
graph.addNode(node.id, node);
|
|
2548
|
+
nodesAdded++;
|
|
2549
|
+
}
|
|
2550
|
+
const edge = {
|
|
2551
|
+
id: extractedEdgeId2(service.node.id, node.id, EdgeType4.CONFIGURED_BY),
|
|
2552
|
+
source: service.node.id,
|
|
2553
|
+
target: node.id,
|
|
2554
|
+
type: EdgeType4.CONFIGURED_BY,
|
|
2555
|
+
provenance: Provenance5.EXTRACTED,
|
|
2556
|
+
evidence: { file: relPath.split(path16.sep).join("/") }
|
|
2557
|
+
};
|
|
2558
|
+
if (!graph.hasEdge(edge.id)) {
|
|
2559
|
+
graph.addEdgeWithKey(edge.id, edge.source, edge.target, edge);
|
|
2560
|
+
edgesAdded++;
|
|
2561
|
+
}
|
|
2562
|
+
}
|
|
2563
|
+
}
|
|
2564
|
+
return { nodesAdded, edgesAdded };
|
|
2565
|
+
}
|
|
2566
|
+
|
|
2567
|
+
// src/extract/calls/index.ts
|
|
2568
|
+
import { EdgeType as EdgeType6, NodeType as NodeType8, Provenance as Provenance7 } from "@neat.is/types";
|
|
2569
|
+
|
|
2570
|
+
// src/extract/calls/http.ts
|
|
2571
|
+
import path18 from "path";
|
|
2572
|
+
import Parser from "tree-sitter";
|
|
2573
|
+
import JavaScript from "tree-sitter-javascript";
|
|
2574
|
+
import Python from "tree-sitter-python";
|
|
2575
|
+
import { EdgeType as EdgeType5, Provenance as Provenance6 } from "@neat.is/types";
|
|
2576
|
+
|
|
2577
|
+
// src/extract/calls/shared.ts
|
|
2578
|
+
import { promises as fs11 } from "fs";
|
|
2579
|
+
import path17 from "path";
|
|
2580
|
+
async function walkSourceFiles(dir) {
|
|
2581
|
+
const out = [];
|
|
2582
|
+
async function walk(current) {
|
|
2583
|
+
const entries = await fs11.readdir(current, { withFileTypes: true }).catch(() => []);
|
|
2584
|
+
for (const entry of entries) {
|
|
2585
|
+
const full = path17.join(current, entry.name);
|
|
2586
|
+
if (entry.isDirectory()) {
|
|
2587
|
+
if (!IGNORED_DIRS.has(entry.name)) await walk(full);
|
|
2588
|
+
} else if (entry.isFile() && SERVICE_FILE_EXTENSIONS.has(path17.extname(entry.name))) {
|
|
2589
|
+
out.push(full);
|
|
2590
|
+
}
|
|
2591
|
+
}
|
|
2592
|
+
}
|
|
2593
|
+
await walk(dir);
|
|
2594
|
+
return out;
|
|
2595
|
+
}
|
|
2596
|
+
async function loadSourceFiles(dir) {
|
|
2597
|
+
const paths = await walkSourceFiles(dir);
|
|
2598
|
+
const out = [];
|
|
2599
|
+
for (const p of paths) {
|
|
2600
|
+
try {
|
|
2601
|
+
const content = await fs11.readFile(p, "utf8");
|
|
2602
|
+
out.push({ path: p, content });
|
|
2603
|
+
} catch {
|
|
2604
|
+
}
|
|
2605
|
+
}
|
|
2606
|
+
return out;
|
|
2607
|
+
}
|
|
2608
|
+
function lineOf(text, needle) {
|
|
2609
|
+
const idx = text.indexOf(needle);
|
|
2610
|
+
if (idx < 0) return 1;
|
|
2611
|
+
return text.slice(0, idx).split("\n").length;
|
|
2612
|
+
}
|
|
2613
|
+
function snippet(text, line) {
|
|
2614
|
+
const lines = text.split("\n");
|
|
2615
|
+
return (lines[line - 1] ?? "").trim();
|
|
2616
|
+
}
|
|
2617
|
+
|
|
2618
|
+
// src/extract/calls/http.ts
|
|
2619
|
+
var STRING_LITERAL_NODE_TYPES = /* @__PURE__ */ new Set(["string_fragment", "string_content"]);
|
|
2620
|
+
function collectStringLiterals(node, out) {
|
|
2621
|
+
if (STRING_LITERAL_NODE_TYPES.has(node.type)) out.push(node.text);
|
|
2622
|
+
for (let i = 0; i < node.namedChildCount; i++) {
|
|
2623
|
+
const child = node.namedChild(i);
|
|
2624
|
+
if (child) collectStringLiterals(child, out);
|
|
2625
|
+
}
|
|
2626
|
+
}
|
|
2627
|
+
function callsFromSource(source, parser, knownHosts) {
|
|
2628
|
+
const tree = parser.parse(source);
|
|
2629
|
+
const literals = [];
|
|
2630
|
+
collectStringLiterals(tree.rootNode, literals);
|
|
2631
|
+
const targets = /* @__PURE__ */ new Set();
|
|
2632
|
+
for (const lit of literals) {
|
|
2633
|
+
for (const host of knownHosts) {
|
|
2634
|
+
if (lit.includes(`//${host}`) || lit.includes(`//${host}:`)) {
|
|
2635
|
+
targets.add(host);
|
|
2636
|
+
}
|
|
2637
|
+
}
|
|
2638
|
+
}
|
|
2639
|
+
return targets;
|
|
2640
|
+
}
|
|
2641
|
+
function makeJsParser() {
|
|
2642
|
+
const p = new Parser();
|
|
2643
|
+
p.setLanguage(JavaScript);
|
|
2644
|
+
return p;
|
|
2645
|
+
}
|
|
2646
|
+
function makePyParser() {
|
|
2647
|
+
const p = new Parser();
|
|
2648
|
+
p.setLanguage(Python);
|
|
2649
|
+
return p;
|
|
2650
|
+
}
|
|
2651
|
+
async function addHttpCallEdges(graph, services) {
|
|
2652
|
+
const jsParser = makeJsParser();
|
|
2653
|
+
const pyParser = makePyParser();
|
|
2654
|
+
const knownHosts = /* @__PURE__ */ new Set();
|
|
2655
|
+
const hostToNodeId = /* @__PURE__ */ new Map();
|
|
2656
|
+
for (const service of services) {
|
|
2657
|
+
knownHosts.add(path18.basename(service.dir));
|
|
2658
|
+
knownHosts.add(service.pkg.name);
|
|
2659
|
+
hostToNodeId.set(path18.basename(service.dir), service.node.id);
|
|
2660
|
+
hostToNodeId.set(service.pkg.name, service.node.id);
|
|
2661
|
+
}
|
|
2662
|
+
let edgesAdded = 0;
|
|
2663
|
+
for (const service of services) {
|
|
2664
|
+
const files = await loadSourceFiles(service.dir);
|
|
2665
|
+
const seenTargets = /* @__PURE__ */ new Map();
|
|
2666
|
+
for (const file of files) {
|
|
2667
|
+
const parser = path18.extname(file.path) === ".py" ? pyParser : jsParser;
|
|
2668
|
+
const targets = callsFromSource(file.content, parser, knownHosts);
|
|
2669
|
+
for (const t of targets) {
|
|
2670
|
+
const targetId = hostToNodeId.get(t);
|
|
2671
|
+
if (!targetId || targetId === service.node.id) continue;
|
|
2672
|
+
if (!seenTargets.has(targetId)) {
|
|
2673
|
+
seenTargets.set(targetId, { file: file.path, host: t });
|
|
2674
|
+
}
|
|
2675
|
+
}
|
|
2676
|
+
}
|
|
2677
|
+
for (const [targetId, evidenceFile] of seenTargets) {
|
|
2678
|
+
const fileContent = files.find((f) => f.path === evidenceFile.file)?.content ?? "";
|
|
2679
|
+
const line = lineOf(fileContent, `//${evidenceFile.host}`);
|
|
2680
|
+
const edge = {
|
|
2681
|
+
id: extractedEdgeId2(service.node.id, targetId, EdgeType5.CALLS),
|
|
2682
|
+
source: service.node.id,
|
|
2683
|
+
target: targetId,
|
|
2684
|
+
type: EdgeType5.CALLS,
|
|
2685
|
+
provenance: Provenance6.EXTRACTED,
|
|
2686
|
+
evidence: {
|
|
2687
|
+
file: path18.relative(service.dir, evidenceFile.file),
|
|
2688
|
+
line,
|
|
2689
|
+
snippet: snippet(fileContent, line)
|
|
2690
|
+
}
|
|
2691
|
+
};
|
|
2692
|
+
if (!graph.hasEdge(edge.id)) {
|
|
2693
|
+
graph.addEdgeWithKey(edge.id, edge.source, edge.target, edge);
|
|
2694
|
+
edgesAdded++;
|
|
2695
|
+
}
|
|
2696
|
+
}
|
|
2697
|
+
}
|
|
2698
|
+
return edgesAdded;
|
|
2699
|
+
}
|
|
2700
|
+
|
|
2701
|
+
// src/extract/calls/kafka.ts
|
|
2702
|
+
import path19 from "path";
|
|
2703
|
+
import { infraId } from "@neat.is/types";
|
|
2704
|
+
var PRODUCER_TOPIC_RE = /(?:producer|kafkaProducer)[\s\S]{0,40}?\.send\s*\(\s*\{[\s\S]{0,200}?topic\s*:\s*['"`]([^'"`]+)['"`]/g;
|
|
2705
|
+
var CONSUMER_TOPIC_RE = /(?:consumer|kafkaConsumer)[\s\S]{0,40}?\.(?:subscribe|run)\s*\(\s*\{[\s\S]{0,200}?topic[s]?\s*:\s*(?:\[\s*)?['"`]([^'"`]+)['"`]/g;
|
|
2706
|
+
function findAll(re, text) {
|
|
2707
|
+
re.lastIndex = 0;
|
|
2708
|
+
const out = [];
|
|
2709
|
+
let m;
|
|
2710
|
+
while ((m = re.exec(text)) !== null) {
|
|
2711
|
+
out.push({ topic: m[1], index: m.index });
|
|
2712
|
+
}
|
|
2713
|
+
return out;
|
|
2714
|
+
}
|
|
2715
|
+
function kafkaEndpointsFromFile(file, serviceDir) {
|
|
2716
|
+
const out = [];
|
|
2717
|
+
const seen = /* @__PURE__ */ new Set();
|
|
2718
|
+
const make = (topic, edgeType) => {
|
|
2719
|
+
const key = `${edgeType}|${topic}`;
|
|
2720
|
+
if (seen.has(key)) return;
|
|
2721
|
+
seen.add(key);
|
|
2722
|
+
const line = lineOf(file.content, topic);
|
|
2723
|
+
out.push({
|
|
2724
|
+
infraId: infraId("kafka-topic", topic),
|
|
2725
|
+
name: topic,
|
|
2726
|
+
kind: "kafka-topic",
|
|
2727
|
+
edgeType,
|
|
2728
|
+
evidence: {
|
|
2729
|
+
file: path19.relative(serviceDir, file.path),
|
|
2730
|
+
line,
|
|
2731
|
+
snippet: snippet(file.content, line)
|
|
2732
|
+
}
|
|
2733
|
+
});
|
|
2734
|
+
};
|
|
2735
|
+
for (const { topic } of findAll(PRODUCER_TOPIC_RE, file.content)) make(topic, "PUBLISHES_TO");
|
|
2736
|
+
for (const { topic } of findAll(CONSUMER_TOPIC_RE, file.content)) make(topic, "CONSUMES_FROM");
|
|
2737
|
+
return out;
|
|
2738
|
+
}
|
|
2739
|
+
|
|
2740
|
+
// src/extract/calls/redis.ts
|
|
2741
|
+
import path20 from "path";
|
|
2742
|
+
import { infraId as infraId2 } from "@neat.is/types";
|
|
2743
|
+
var REDIS_URL_RE = /redis(?:s)?:\/\/(?:[^@'"`\s]+@)?([^:/'"`\s]+)(?::(\d+))?/g;
|
|
2744
|
+
function redisEndpointsFromFile(file, serviceDir) {
|
|
2745
|
+
const out = [];
|
|
2746
|
+
const seen = /* @__PURE__ */ new Set();
|
|
2747
|
+
REDIS_URL_RE.lastIndex = 0;
|
|
2748
|
+
let m;
|
|
2749
|
+
while ((m = REDIS_URL_RE.exec(file.content)) !== null) {
|
|
2750
|
+
const host = m[1];
|
|
2751
|
+
if (seen.has(host)) continue;
|
|
2752
|
+
seen.add(host);
|
|
2753
|
+
const line = lineOf(file.content, host);
|
|
2754
|
+
out.push({
|
|
2755
|
+
infraId: infraId2("redis", host),
|
|
2756
|
+
name: host,
|
|
2757
|
+
kind: "redis",
|
|
2758
|
+
edgeType: "CALLS",
|
|
2759
|
+
evidence: {
|
|
2760
|
+
file: path20.relative(serviceDir, file.path),
|
|
2761
|
+
line,
|
|
2762
|
+
snippet: snippet(file.content, line)
|
|
2763
|
+
}
|
|
2764
|
+
});
|
|
2765
|
+
}
|
|
2766
|
+
return out;
|
|
2767
|
+
}
|
|
2768
|
+
|
|
2769
|
+
// src/extract/calls/aws.ts
|
|
2770
|
+
import path21 from "path";
|
|
2771
|
+
import { infraId as infraId3 } from "@neat.is/types";
|
|
2772
|
+
var S3_BUCKET_RE = /Bucket\s*:\s*['"`]([^'"`]+)['"`]/g;
|
|
2773
|
+
var DYNAMO_TABLE_RE = /TableName\s*:\s*['"`]([^'"`]+)['"`]/g;
|
|
2774
|
+
function hasMarker(text, markers) {
|
|
2775
|
+
return markers.some((m) => text.includes(m));
|
|
2776
|
+
}
|
|
2777
|
+
function findAll2(re, text) {
|
|
2778
|
+
re.lastIndex = 0;
|
|
2779
|
+
const out = [];
|
|
2780
|
+
let m;
|
|
2781
|
+
while ((m = re.exec(text)) !== null) {
|
|
2782
|
+
out.push({ name: m[1], index: m.index });
|
|
2783
|
+
}
|
|
2784
|
+
return out;
|
|
2785
|
+
}
|
|
2786
|
+
function awsEndpointsFromFile(file, serviceDir) {
|
|
2787
|
+
const out = [];
|
|
2788
|
+
const seen = /* @__PURE__ */ new Set();
|
|
2789
|
+
const make = (kind, name) => {
|
|
2790
|
+
const key = `${kind}|${name}`;
|
|
2791
|
+
if (seen.has(key)) return;
|
|
2792
|
+
seen.add(key);
|
|
2793
|
+
const line = lineOf(file.content, name);
|
|
2794
|
+
out.push({
|
|
2795
|
+
infraId: infraId3(kind, name),
|
|
2796
|
+
name,
|
|
2797
|
+
kind,
|
|
2798
|
+
edgeType: "CALLS",
|
|
2799
|
+
evidence: {
|
|
2800
|
+
file: path21.relative(serviceDir, file.path),
|
|
2801
|
+
line,
|
|
2802
|
+
snippet: snippet(file.content, line)
|
|
2803
|
+
}
|
|
2804
|
+
});
|
|
2805
|
+
};
|
|
2806
|
+
if (hasMarker(file.content, ["S3Client", "PutObjectCommand", "GetObjectCommand", "DeleteObjectCommand"])) {
|
|
2807
|
+
for (const { name } of findAll2(S3_BUCKET_RE, file.content)) make("s3-bucket", name);
|
|
2808
|
+
}
|
|
2809
|
+
if (hasMarker(file.content, [
|
|
2810
|
+
"DynamoDBClient",
|
|
2811
|
+
"DynamoDBDocumentClient",
|
|
2812
|
+
"GetCommand",
|
|
2813
|
+
"PutCommand",
|
|
2814
|
+
"QueryCommand",
|
|
2815
|
+
"UpdateCommand",
|
|
2816
|
+
"DeleteCommand"
|
|
2817
|
+
])) {
|
|
2818
|
+
for (const { name } of findAll2(DYNAMO_TABLE_RE, file.content)) make("dynamodb-table", name);
|
|
2819
|
+
}
|
|
2820
|
+
return out;
|
|
2821
|
+
}
|
|
2822
|
+
|
|
2823
|
+
// src/extract/calls/grpc.ts
|
|
2824
|
+
import path22 from "path";
|
|
2825
|
+
import { infraId as infraId4 } from "@neat.is/types";
|
|
2826
|
+
var GRPC_CLIENT_RE = /new\s+([A-Z][A-Za-z0-9_]*)Client\s*\(\s*['"`]?([^,'"`)]+)?/g;
|
|
2827
|
+
function isLikelyAddress(value) {
|
|
2828
|
+
if (!value) return false;
|
|
2829
|
+
return /:\d{2,5}$/.test(value) || value.includes(".");
|
|
2830
|
+
}
|
|
2831
|
+
function grpcEndpointsFromFile(file, serviceDir) {
|
|
2832
|
+
const out = [];
|
|
2833
|
+
const seen = /* @__PURE__ */ new Set();
|
|
2834
|
+
GRPC_CLIENT_RE.lastIndex = 0;
|
|
2835
|
+
let m;
|
|
2836
|
+
while ((m = GRPC_CLIENT_RE.exec(file.content)) !== null) {
|
|
2837
|
+
const symbol = m[1];
|
|
2838
|
+
const addr = m[2]?.trim();
|
|
2839
|
+
const name = isLikelyAddress(addr) ? addr : symbol;
|
|
2840
|
+
if (seen.has(name)) continue;
|
|
2841
|
+
seen.add(name);
|
|
2842
|
+
const line = lineOf(file.content, m[0]);
|
|
2843
|
+
out.push({
|
|
2844
|
+
infraId: infraId4("grpc-service", name),
|
|
2845
|
+
name,
|
|
2846
|
+
kind: "grpc-service",
|
|
2847
|
+
edgeType: "CALLS",
|
|
2848
|
+
evidence: {
|
|
2849
|
+
file: path22.relative(serviceDir, file.path),
|
|
2850
|
+
line,
|
|
2851
|
+
snippet: snippet(file.content, line)
|
|
2852
|
+
}
|
|
2853
|
+
});
|
|
2854
|
+
}
|
|
2855
|
+
return out;
|
|
2856
|
+
}
|
|
2857
|
+
|
|
2858
|
+
// src/extract/calls/index.ts
|
|
2859
|
+
function edgeTypeFromEndpoint(ep) {
|
|
2860
|
+
switch (ep.edgeType) {
|
|
2861
|
+
case "PUBLISHES_TO":
|
|
2862
|
+
return EdgeType6.PUBLISHES_TO;
|
|
2863
|
+
case "CONSUMES_FROM":
|
|
2864
|
+
return EdgeType6.CONSUMES_FROM;
|
|
2865
|
+
default:
|
|
2866
|
+
return EdgeType6.CALLS;
|
|
2867
|
+
}
|
|
2868
|
+
}
|
|
2869
|
+
async function addExternalEndpointEdges(graph, services) {
|
|
2870
|
+
let nodesAdded = 0;
|
|
2871
|
+
let edgesAdded = 0;
|
|
2872
|
+
for (const service of services) {
|
|
2873
|
+
const files = await loadSourceFiles(service.dir);
|
|
2874
|
+
const endpoints = [];
|
|
2875
|
+
for (const file of files) {
|
|
2876
|
+
endpoints.push(...kafkaEndpointsFromFile(file, service.dir));
|
|
2877
|
+
endpoints.push(...redisEndpointsFromFile(file, service.dir));
|
|
2878
|
+
endpoints.push(...awsEndpointsFromFile(file, service.dir));
|
|
2879
|
+
endpoints.push(...grpcEndpointsFromFile(file, service.dir));
|
|
2880
|
+
}
|
|
2881
|
+
if (endpoints.length === 0) continue;
|
|
2882
|
+
const seenEdges = /* @__PURE__ */ new Set();
|
|
2883
|
+
for (const ep of endpoints) {
|
|
2884
|
+
if (!graph.hasNode(ep.infraId)) {
|
|
2885
|
+
const node = {
|
|
2886
|
+
id: ep.infraId,
|
|
2887
|
+
type: NodeType8.InfraNode,
|
|
2888
|
+
name: ep.name,
|
|
2889
|
+
provider: ep.kind.startsWith("s3") || ep.kind.startsWith("dynamodb") ? "aws" : "self",
|
|
2890
|
+
kind: ep.kind
|
|
2891
|
+
};
|
|
2892
|
+
graph.addNode(node.id, node);
|
|
2893
|
+
nodesAdded++;
|
|
2894
|
+
}
|
|
2895
|
+
const edgeType = edgeTypeFromEndpoint(ep);
|
|
2896
|
+
const edgeId = extractedEdgeId2(service.node.id, ep.infraId, edgeType);
|
|
2897
|
+
if (seenEdges.has(edgeId)) continue;
|
|
2898
|
+
seenEdges.add(edgeId);
|
|
2899
|
+
if (!graph.hasEdge(edgeId)) {
|
|
2900
|
+
const edge = {
|
|
2901
|
+
id: edgeId,
|
|
2902
|
+
source: service.node.id,
|
|
2903
|
+
target: ep.infraId,
|
|
2904
|
+
type: edgeType,
|
|
2905
|
+
provenance: Provenance7.EXTRACTED,
|
|
2906
|
+
evidence: ep.evidence
|
|
2907
|
+
};
|
|
2908
|
+
graph.addEdgeWithKey(edgeId, edge.source, edge.target, edge);
|
|
2909
|
+
edgesAdded++;
|
|
2910
|
+
}
|
|
2911
|
+
}
|
|
2912
|
+
}
|
|
2913
|
+
return { nodesAdded, edgesAdded };
|
|
2914
|
+
}
|
|
2915
|
+
async function addCallEdges(graph, services) {
|
|
2916
|
+
const httpEdges = await addHttpCallEdges(graph, services);
|
|
2917
|
+
const ext = await addExternalEndpointEdges(graph, services);
|
|
2918
|
+
return {
|
|
2919
|
+
nodesAdded: ext.nodesAdded,
|
|
2920
|
+
edgesAdded: httpEdges + ext.edgesAdded
|
|
2921
|
+
};
|
|
2922
|
+
}
|
|
2923
|
+
|
|
2924
|
+
// src/extract/infra/docker-compose.ts
|
|
2925
|
+
import path23 from "path";
|
|
2926
|
+
import { EdgeType as EdgeType7, Provenance as Provenance8 } from "@neat.is/types";
|
|
2927
|
+
|
|
2928
|
+
// src/extract/infra/shared.ts
|
|
2929
|
+
import { NodeType as NodeType9, infraId as infraId5 } from "@neat.is/types";
|
|
2930
|
+
function makeInfraNode(kind, name, provider = "self", extras) {
|
|
2931
|
+
return {
|
|
2932
|
+
id: infraId5(kind, name),
|
|
2933
|
+
type: NodeType9.InfraNode,
|
|
2934
|
+
name,
|
|
2935
|
+
provider,
|
|
2936
|
+
kind,
|
|
2937
|
+
...extras?.region ? { region: extras.region } : {}
|
|
2938
|
+
};
|
|
2939
|
+
}
|
|
2940
|
+
function classifyImage(image) {
|
|
2941
|
+
const lower = image.toLowerCase();
|
|
2942
|
+
const repo = lower.split(":")[0];
|
|
2943
|
+
const last = repo.split("/").pop() ?? repo;
|
|
2944
|
+
if (last.startsWith("postgres")) return "postgres";
|
|
2945
|
+
if (last.startsWith("mysql") || last.startsWith("mariadb")) return "mysql";
|
|
2946
|
+
if (last.startsWith("mongo")) return "mongodb";
|
|
2947
|
+
if (last.startsWith("redis")) return "redis";
|
|
2948
|
+
if (last.startsWith("rabbitmq")) return "rabbitmq";
|
|
2949
|
+
if (last.startsWith("kafka") || last.includes("kafka")) return "kafka";
|
|
2950
|
+
if (last.startsWith("memcached")) return "memcached";
|
|
2951
|
+
return "container";
|
|
2952
|
+
}
|
|
2953
|
+
|
|
2954
|
+
// src/extract/infra/docker-compose.ts
|
|
2955
|
+
function dependsOnList(value) {
|
|
2956
|
+
if (!value) return [];
|
|
2957
|
+
if (Array.isArray(value)) return value;
|
|
2958
|
+
return Object.keys(value);
|
|
2959
|
+
}
|
|
2960
|
+
function serviceNameToServiceNode(name, services) {
|
|
2961
|
+
for (const s of services) {
|
|
2962
|
+
if (s.node.name === name || path23.basename(s.dir) === name) return s.node.id;
|
|
2963
|
+
}
|
|
2964
|
+
return null;
|
|
2965
|
+
}
|
|
2966
|
+
async function addComposeInfra(graph, scanPath, services) {
|
|
2967
|
+
let nodesAdded = 0;
|
|
2968
|
+
let edgesAdded = 0;
|
|
2969
|
+
let composePath = null;
|
|
2970
|
+
for (const name of ["docker-compose.yml", "docker-compose.yaml"]) {
|
|
2971
|
+
const abs = path23.join(scanPath, name);
|
|
2972
|
+
if (await exists(abs)) {
|
|
2973
|
+
composePath = abs;
|
|
2974
|
+
break;
|
|
2975
|
+
}
|
|
2976
|
+
}
|
|
2977
|
+
if (!composePath) return { nodesAdded, edgesAdded };
|
|
2978
|
+
const compose = await readYaml(composePath);
|
|
2979
|
+
if (!compose?.services) return { nodesAdded, edgesAdded };
|
|
2980
|
+
const evidenceFile = path23.relative(scanPath, composePath).split(path23.sep).join("/");
|
|
2981
|
+
const composeNameToNodeId = /* @__PURE__ */ new Map();
|
|
2982
|
+
for (const [composeName, svc] of Object.entries(compose.services)) {
|
|
2983
|
+
const matchedServiceId = serviceNameToServiceNode(composeName, services);
|
|
2984
|
+
if (matchedServiceId) {
|
|
2985
|
+
composeNameToNodeId.set(composeName, matchedServiceId);
|
|
2986
|
+
continue;
|
|
2987
|
+
}
|
|
2988
|
+
const kind = svc.image ? classifyImage(svc.image) : "container";
|
|
2989
|
+
const node = makeInfraNode(kind, composeName);
|
|
2990
|
+
if (!graph.hasNode(node.id)) {
|
|
2991
|
+
graph.addNode(node.id, node);
|
|
2992
|
+
nodesAdded++;
|
|
2993
|
+
}
|
|
2994
|
+
composeNameToNodeId.set(composeName, node.id);
|
|
2995
|
+
}
|
|
2996
|
+
for (const [composeName, svc] of Object.entries(compose.services)) {
|
|
2997
|
+
const sourceId = composeNameToNodeId.get(composeName);
|
|
2998
|
+
if (!sourceId) continue;
|
|
2999
|
+
for (const dep of dependsOnList(svc.depends_on)) {
|
|
3000
|
+
const targetId = composeNameToNodeId.get(dep);
|
|
3001
|
+
if (!targetId) continue;
|
|
3002
|
+
const edgeId = extractedEdgeId2(sourceId, targetId, EdgeType7.DEPENDS_ON);
|
|
3003
|
+
if (graph.hasEdge(edgeId)) continue;
|
|
3004
|
+
const edge = {
|
|
3005
|
+
id: edgeId,
|
|
3006
|
+
source: sourceId,
|
|
3007
|
+
target: targetId,
|
|
3008
|
+
type: EdgeType7.DEPENDS_ON,
|
|
3009
|
+
provenance: Provenance8.EXTRACTED,
|
|
3010
|
+
evidence: { file: evidenceFile }
|
|
3011
|
+
};
|
|
3012
|
+
graph.addEdgeWithKey(edgeId, edge.source, edge.target, edge);
|
|
3013
|
+
edgesAdded++;
|
|
3014
|
+
}
|
|
3015
|
+
}
|
|
3016
|
+
return { nodesAdded, edgesAdded };
|
|
3017
|
+
}
|
|
3018
|
+
|
|
3019
|
+
// src/extract/infra/dockerfile.ts
|
|
3020
|
+
import path24 from "path";
|
|
3021
|
+
import { promises as fs12 } from "fs";
|
|
3022
|
+
import { EdgeType as EdgeType8, Provenance as Provenance9 } from "@neat.is/types";
|
|
3023
|
+
function runtimeImage(content) {
|
|
3024
|
+
const lines = content.split("\n");
|
|
3025
|
+
let last = null;
|
|
3026
|
+
for (const raw of lines) {
|
|
3027
|
+
const line = raw.trim();
|
|
3028
|
+
if (!line || line.startsWith("#")) continue;
|
|
3029
|
+
if (!/^from\s+/i.test(line)) continue;
|
|
3030
|
+
const tokens = line.split(/\s+/);
|
|
3031
|
+
const image = tokens[1];
|
|
3032
|
+
if (!image || image.toLowerCase() === "scratch") continue;
|
|
3033
|
+
last = image;
|
|
3034
|
+
}
|
|
3035
|
+
return last;
|
|
3036
|
+
}
|
|
3037
|
+
async function addDockerfileRuntimes(graph, services, scanPath) {
|
|
3038
|
+
let nodesAdded = 0;
|
|
3039
|
+
let edgesAdded = 0;
|
|
3040
|
+
for (const service of services) {
|
|
3041
|
+
const dockerfilePath = path24.join(service.dir, "Dockerfile");
|
|
3042
|
+
if (!await exists(dockerfilePath)) continue;
|
|
3043
|
+
const content = await fs12.readFile(dockerfilePath, "utf8");
|
|
3044
|
+
const image = runtimeImage(content);
|
|
3045
|
+
if (!image) continue;
|
|
3046
|
+
const node = makeInfraNode("container-image", image);
|
|
3047
|
+
if (!graph.hasNode(node.id)) {
|
|
3048
|
+
graph.addNode(node.id, node);
|
|
3049
|
+
nodesAdded++;
|
|
3050
|
+
}
|
|
3051
|
+
const edgeId = extractedEdgeId2(service.node.id, node.id, EdgeType8.RUNS_ON);
|
|
3052
|
+
if (!graph.hasEdge(edgeId)) {
|
|
3053
|
+
const edge = {
|
|
3054
|
+
id: edgeId,
|
|
3055
|
+
source: service.node.id,
|
|
3056
|
+
target: node.id,
|
|
3057
|
+
type: EdgeType8.RUNS_ON,
|
|
3058
|
+
provenance: Provenance9.EXTRACTED,
|
|
3059
|
+
evidence: {
|
|
3060
|
+
file: path24.relative(scanPath, dockerfilePath).split(path24.sep).join("/")
|
|
3061
|
+
}
|
|
3062
|
+
};
|
|
3063
|
+
graph.addEdgeWithKey(edgeId, edge.source, edge.target, edge);
|
|
3064
|
+
edgesAdded++;
|
|
3065
|
+
}
|
|
3066
|
+
}
|
|
3067
|
+
return { nodesAdded, edgesAdded };
|
|
3068
|
+
}
|
|
3069
|
+
|
|
3070
|
+
// src/extract/infra/terraform.ts
|
|
3071
|
+
import { promises as fs13 } from "fs";
|
|
3072
|
+
import path25 from "path";
|
|
3073
|
+
var RESOURCE_RE = /resource\s+"(aws_[A-Za-z0-9_]+)"\s+"([A-Za-z0-9_-]+)"/g;
|
|
3074
|
+
async function walkTfFiles(start, depth = 0, max = 5) {
|
|
3075
|
+
if (depth > max) return [];
|
|
3076
|
+
const out = [];
|
|
3077
|
+
const entries = await fs13.readdir(start, { withFileTypes: true }).catch(() => []);
|
|
3078
|
+
for (const entry of entries) {
|
|
3079
|
+
if (entry.isDirectory()) {
|
|
3080
|
+
if (IGNORED_DIRS.has(entry.name) || entry.name === ".terraform") continue;
|
|
3081
|
+
out.push(...await walkTfFiles(path25.join(start, entry.name), depth + 1, max));
|
|
3082
|
+
} else if (entry.isFile() && entry.name.endsWith(".tf")) {
|
|
3083
|
+
out.push(path25.join(start, entry.name));
|
|
3084
|
+
}
|
|
3085
|
+
}
|
|
3086
|
+
return out;
|
|
3087
|
+
}
|
|
3088
|
+
async function addTerraformResources(graph, scanPath) {
|
|
3089
|
+
let nodesAdded = 0;
|
|
3090
|
+
const files = await walkTfFiles(scanPath);
|
|
3091
|
+
for (const file of files) {
|
|
3092
|
+
const content = await fs13.readFile(file, "utf8");
|
|
3093
|
+
RESOURCE_RE.lastIndex = 0;
|
|
3094
|
+
let m;
|
|
3095
|
+
while ((m = RESOURCE_RE.exec(content)) !== null) {
|
|
3096
|
+
const kind = m[1];
|
|
3097
|
+
const name = m[2];
|
|
3098
|
+
const node = makeInfraNode(kind, name, "aws");
|
|
3099
|
+
if (!graph.hasNode(node.id)) {
|
|
3100
|
+
graph.addNode(node.id, node);
|
|
3101
|
+
nodesAdded++;
|
|
3102
|
+
}
|
|
3103
|
+
}
|
|
3104
|
+
}
|
|
3105
|
+
return { nodesAdded, edgesAdded: 0 };
|
|
3106
|
+
}
|
|
3107
|
+
|
|
3108
|
+
// src/extract/infra/k8s.ts
|
|
3109
|
+
import { promises as fs14 } from "fs";
|
|
3110
|
+
import path26 from "path";
|
|
3111
|
+
import { parseAllDocuments as parseAllDocuments2 } from "yaml";
|
|
3112
|
+
var K8S_KIND_TO_INFRA_KIND = {
|
|
3113
|
+
Service: "k8s-service",
|
|
3114
|
+
Deployment: "k8s-deployment",
|
|
3115
|
+
StatefulSet: "k8s-statefulset",
|
|
3116
|
+
DaemonSet: "k8s-daemonset",
|
|
3117
|
+
CronJob: "k8s-cronjob",
|
|
3118
|
+
Job: "k8s-job",
|
|
3119
|
+
Ingress: "k8s-ingress"
|
|
3120
|
+
};
|
|
3121
|
+
async function walkYamlFiles2(start, depth = 0, max = 5) {
|
|
3122
|
+
if (depth > max) return [];
|
|
3123
|
+
const out = [];
|
|
3124
|
+
const entries = await fs14.readdir(start, { withFileTypes: true }).catch(() => []);
|
|
3125
|
+
for (const entry of entries) {
|
|
3126
|
+
if (entry.isDirectory()) {
|
|
3127
|
+
if (IGNORED_DIRS.has(entry.name)) continue;
|
|
3128
|
+
out.push(...await walkYamlFiles2(path26.join(start, entry.name), depth + 1, max));
|
|
3129
|
+
} else if (entry.isFile() && CONFIG_FILE_EXTENSIONS.has(path26.extname(entry.name))) {
|
|
3130
|
+
out.push(path26.join(start, entry.name));
|
|
3131
|
+
}
|
|
3132
|
+
}
|
|
3133
|
+
return out;
|
|
3134
|
+
}
|
|
3135
|
+
async function addK8sResources(graph, scanPath) {
|
|
3136
|
+
let nodesAdded = 0;
|
|
3137
|
+
const files = await walkYamlFiles2(scanPath);
|
|
3138
|
+
for (const file of files) {
|
|
3139
|
+
const content = await fs14.readFile(file, "utf8");
|
|
3140
|
+
let docs;
|
|
3141
|
+
try {
|
|
3142
|
+
docs = parseAllDocuments2(content).map((d) => d.toJSON());
|
|
3143
|
+
} catch {
|
|
3144
|
+
continue;
|
|
3145
|
+
}
|
|
3146
|
+
for (const doc of docs) {
|
|
3147
|
+
if (!doc?.kind || !doc.metadata?.name) continue;
|
|
3148
|
+
const infraKind = K8S_KIND_TO_INFRA_KIND[doc.kind];
|
|
3149
|
+
if (!infraKind) continue;
|
|
3150
|
+
const namespaced = doc.metadata.namespace ? `${doc.metadata.namespace}/${doc.metadata.name}` : doc.metadata.name;
|
|
3151
|
+
const node = makeInfraNode(infraKind, namespaced, "kubernetes");
|
|
3152
|
+
if (!graph.hasNode(node.id)) {
|
|
3153
|
+
graph.addNode(node.id, node);
|
|
3154
|
+
nodesAdded++;
|
|
3155
|
+
}
|
|
3156
|
+
}
|
|
3157
|
+
}
|
|
3158
|
+
return { nodesAdded, edgesAdded: 0 };
|
|
3159
|
+
}
|
|
3160
|
+
|
|
3161
|
+
// src/extract/infra/index.ts
|
|
3162
|
+
async function addInfra(graph, scanPath, services) {
|
|
3163
|
+
const compose = await addComposeInfra(graph, scanPath, services);
|
|
3164
|
+
const dockerfile = await addDockerfileRuntimes(graph, services, scanPath);
|
|
3165
|
+
const terraform = await addTerraformResources(graph, scanPath);
|
|
3166
|
+
const k8s = await addK8sResources(graph, scanPath);
|
|
3167
|
+
return {
|
|
3168
|
+
nodesAdded: compose.nodesAdded + dockerfile.nodesAdded + terraform.nodesAdded + k8s.nodesAdded,
|
|
3169
|
+
edgesAdded: compose.edgesAdded + dockerfile.edgesAdded + terraform.edgesAdded + k8s.edgesAdded
|
|
3170
|
+
};
|
|
3171
|
+
}
|
|
3172
|
+
|
|
3173
|
+
// src/extract/index.ts
|
|
3174
|
+
async function extractFromDirectory(graph, scanPath, opts = {}) {
|
|
3175
|
+
await ensureCompatLoaded();
|
|
3176
|
+
const services = await discoverServices(scanPath);
|
|
3177
|
+
const phase1Nodes = addServiceNodes(graph, services);
|
|
3178
|
+
await addServiceAliases(graph, scanPath, services);
|
|
3179
|
+
const phase2 = await addDatabasesAndCompat(graph, services, scanPath);
|
|
3180
|
+
const phase3 = await addConfigNodes(graph, services, scanPath);
|
|
3181
|
+
const phase4 = await addCallEdges(graph, services);
|
|
3182
|
+
const phase5 = await addInfra(graph, scanPath, services);
|
|
3183
|
+
const frontiersPromoted = promoteFrontierNodes(graph);
|
|
3184
|
+
if (opts.onPolicyTrigger) await opts.onPolicyTrigger(graph);
|
|
3185
|
+
return {
|
|
3186
|
+
nodesAdded: phase1Nodes + phase2.nodesAdded + phase3.nodesAdded + phase4.nodesAdded + phase5.nodesAdded,
|
|
3187
|
+
edgesAdded: phase2.edgesAdded + phase3.edgesAdded + phase4.edgesAdded + phase5.edgesAdded,
|
|
3188
|
+
frontiersPromoted
|
|
3189
|
+
};
|
|
3190
|
+
}
|
|
3191
|
+
|
|
3192
|
+
// src/persist.ts
|
|
3193
|
+
import { promises as fs15 } from "fs";
|
|
3194
|
+
import path27 from "path";
|
|
3195
|
+
var SCHEMA_VERSION = 2;
|
|
3196
|
+
function migrateV1ToV2(payload) {
|
|
3197
|
+
const nodes = payload.graph.nodes;
|
|
3198
|
+
if (Array.isArray(nodes)) {
|
|
3199
|
+
for (const node of nodes) {
|
|
3200
|
+
if (node.attributes && "pgDriverVersion" in node.attributes) {
|
|
3201
|
+
delete node.attributes.pgDriverVersion;
|
|
3202
|
+
}
|
|
3203
|
+
}
|
|
3204
|
+
}
|
|
3205
|
+
return { ...payload, schemaVersion: 2 };
|
|
3206
|
+
}
|
|
3207
|
+
async function ensureDir(filePath) {
|
|
3208
|
+
await fs15.mkdir(path27.dirname(filePath), { recursive: true });
|
|
3209
|
+
}
|
|
3210
|
+
async function saveGraphToDisk(graph, outPath) {
|
|
3211
|
+
await ensureDir(outPath);
|
|
3212
|
+
const payload = {
|
|
3213
|
+
schemaVersion: SCHEMA_VERSION,
|
|
3214
|
+
exportedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
3215
|
+
graph: graph.export()
|
|
3216
|
+
};
|
|
3217
|
+
const tmp = `${outPath}.tmp`;
|
|
3218
|
+
await fs15.writeFile(tmp, JSON.stringify(payload), "utf8");
|
|
3219
|
+
await fs15.rename(tmp, outPath);
|
|
3220
|
+
}
|
|
3221
|
+
async function loadGraphFromDisk(graph, outPath) {
|
|
3222
|
+
let raw;
|
|
3223
|
+
try {
|
|
3224
|
+
raw = await fs15.readFile(outPath, "utf8");
|
|
3225
|
+
} catch (err) {
|
|
3226
|
+
if (err.code === "ENOENT") return;
|
|
3227
|
+
throw err;
|
|
3228
|
+
}
|
|
3229
|
+
let payload = JSON.parse(raw);
|
|
3230
|
+
if (payload.schemaVersion === 1) {
|
|
3231
|
+
payload = migrateV1ToV2(payload);
|
|
3232
|
+
}
|
|
3233
|
+
if (payload.schemaVersion !== SCHEMA_VERSION) {
|
|
3234
|
+
throw new Error(
|
|
3235
|
+
`persist: unsupported snapshot schemaVersion ${payload.schemaVersion} (expected ${SCHEMA_VERSION})`
|
|
3236
|
+
);
|
|
3237
|
+
}
|
|
3238
|
+
graph.clear();
|
|
3239
|
+
graph.import(payload.graph);
|
|
3240
|
+
}
|
|
3241
|
+
function startPersistLoop(graph, outPath, intervalMs = 6e4) {
|
|
3242
|
+
let stopped = false;
|
|
3243
|
+
const tick = async () => {
|
|
3244
|
+
if (stopped) return;
|
|
3245
|
+
try {
|
|
3246
|
+
await saveGraphToDisk(graph, outPath);
|
|
3247
|
+
} catch (err) {
|
|
3248
|
+
console.error("persist: periodic save failed", err);
|
|
3249
|
+
}
|
|
3250
|
+
};
|
|
3251
|
+
const interval = setInterval(() => {
|
|
3252
|
+
void tick();
|
|
3253
|
+
}, intervalMs);
|
|
3254
|
+
const onSignal = (signal) => {
|
|
3255
|
+
void (async () => {
|
|
3256
|
+
try {
|
|
3257
|
+
await saveGraphToDisk(graph, outPath);
|
|
3258
|
+
} catch (err) {
|
|
3259
|
+
console.error(`persist: ${signal} save failed`, err);
|
|
3260
|
+
} finally {
|
|
3261
|
+
process.exit(0);
|
|
3262
|
+
}
|
|
3263
|
+
})();
|
|
3264
|
+
};
|
|
3265
|
+
process.on("SIGTERM", onSignal);
|
|
3266
|
+
process.on("SIGINT", onSignal);
|
|
3267
|
+
return () => {
|
|
3268
|
+
stopped = true;
|
|
3269
|
+
clearInterval(interval);
|
|
3270
|
+
process.off("SIGTERM", onSignal);
|
|
3271
|
+
process.off("SIGINT", onSignal);
|
|
3272
|
+
};
|
|
3273
|
+
}
|
|
3274
|
+
|
|
3275
|
+
// src/projects.ts
|
|
3276
|
+
import path28 from "path";
|
|
3277
|
+
function pathsForProject(project, baseDir) {
|
|
3278
|
+
if (project === DEFAULT_PROJECT) {
|
|
3279
|
+
return {
|
|
3280
|
+
snapshotPath: path28.join(baseDir, "graph.json"),
|
|
3281
|
+
errorsPath: path28.join(baseDir, "errors.ndjson"),
|
|
3282
|
+
staleEventsPath: path28.join(baseDir, "stale-events.ndjson"),
|
|
3283
|
+
embeddingsCachePath: path28.join(baseDir, "embeddings.json"),
|
|
3284
|
+
policyViolationsPath: path28.join(baseDir, "policy-violations.ndjson")
|
|
3285
|
+
};
|
|
3286
|
+
}
|
|
3287
|
+
return {
|
|
3288
|
+
snapshotPath: path28.join(baseDir, `${project}.json`),
|
|
3289
|
+
errorsPath: path28.join(baseDir, `errors.${project}.ndjson`),
|
|
3290
|
+
staleEventsPath: path28.join(baseDir, `stale-events.${project}.ndjson`),
|
|
3291
|
+
embeddingsCachePath: path28.join(baseDir, `embeddings.${project}.json`),
|
|
3292
|
+
policyViolationsPath: path28.join(baseDir, `policy-violations.${project}.ndjson`)
|
|
3293
|
+
};
|
|
3294
|
+
}
|
|
3295
|
+
var Projects = class {
|
|
3296
|
+
contexts = /* @__PURE__ */ new Map();
|
|
3297
|
+
upsert(ctx) {
|
|
3298
|
+
this.contexts.set(ctx.name, ctx);
|
|
3299
|
+
}
|
|
3300
|
+
set(name, init) {
|
|
3301
|
+
const ctx = {
|
|
3302
|
+
name,
|
|
3303
|
+
graph: init.graph ?? getGraph(name),
|
|
3304
|
+
scanPath: init.scanPath,
|
|
3305
|
+
paths: init.paths,
|
|
3306
|
+
searchIndex: init.searchIndex
|
|
3307
|
+
};
|
|
3308
|
+
this.contexts.set(name, ctx);
|
|
3309
|
+
return ctx;
|
|
3310
|
+
}
|
|
3311
|
+
get(name) {
|
|
3312
|
+
return this.contexts.get(name);
|
|
3313
|
+
}
|
|
3314
|
+
has(name) {
|
|
3315
|
+
return this.contexts.has(name);
|
|
3316
|
+
}
|
|
3317
|
+
list() {
|
|
3318
|
+
return [...this.contexts.keys()].sort();
|
|
3319
|
+
}
|
|
3320
|
+
attachSearchIndex(name, index) {
|
|
3321
|
+
const ctx = this.contexts.get(name);
|
|
3322
|
+
if (ctx) ctx.searchIndex = index;
|
|
3323
|
+
}
|
|
3324
|
+
};
|
|
3325
|
+
function parseExtraProjects(raw) {
|
|
3326
|
+
if (!raw) return [];
|
|
3327
|
+
return raw.split(",").map((p) => p.trim()).filter((p) => p.length > 0 && p !== DEFAULT_PROJECT);
|
|
3328
|
+
}
|
|
3329
|
+
|
|
3330
|
+
export {
|
|
3331
|
+
DEFAULT_PROJECT,
|
|
3332
|
+
getGraph,
|
|
3333
|
+
resetGraph,
|
|
3334
|
+
checkCompatibility,
|
|
3335
|
+
ensureCompatLoaded,
|
|
3336
|
+
compatPairs,
|
|
3337
|
+
confidenceForEdge,
|
|
3338
|
+
getRootCause,
|
|
3339
|
+
getBlastRadius,
|
|
3340
|
+
TRANSITIVE_DEPENDENCIES_DEFAULT_DEPTH,
|
|
3341
|
+
TRANSITIVE_DEPENDENCIES_MAX_DEPTH,
|
|
3342
|
+
getTransitiveDependencies,
|
|
3343
|
+
evaluateAllPolicies,
|
|
3344
|
+
loadPolicyFile,
|
|
3345
|
+
PolicyViolationsLog,
|
|
3346
|
+
thresholdForEdgeType,
|
|
3347
|
+
stitchTrace,
|
|
3348
|
+
makeErrorSpanWriter,
|
|
3349
|
+
handleSpan,
|
|
3350
|
+
promoteFrontierNodes,
|
|
3351
|
+
makeSpanHandler,
|
|
3352
|
+
markStaleEdges,
|
|
3353
|
+
readStaleEvents,
|
|
3354
|
+
startStalenessLoop,
|
|
3355
|
+
readErrorEvents,
|
|
3356
|
+
discoverServices,
|
|
3357
|
+
addServiceNodes,
|
|
3358
|
+
addServiceAliases,
|
|
3359
|
+
addDatabasesAndCompat,
|
|
3360
|
+
addConfigNodes,
|
|
3361
|
+
addCallEdges,
|
|
3362
|
+
addInfra,
|
|
3363
|
+
extractFromDirectory,
|
|
3364
|
+
saveGraphToDisk,
|
|
3365
|
+
loadGraphFromDisk,
|
|
3366
|
+
startPersistLoop,
|
|
3367
|
+
pathsForProject,
|
|
3368
|
+
Projects,
|
|
3369
|
+
parseExtraProjects
|
|
3370
|
+
};
|
|
3371
|
+
//# sourceMappingURL=chunk-6SFEITLJ.js.map
|