@neat.is/core 0.2.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/compat.json +120 -0
- package/dist/chunk-6JT6L2OV.js +164 -0
- package/dist/chunk-6JT6L2OV.js.map +1 -0
- package/dist/chunk-6SFEITLJ.js +3371 -0
- package/dist/chunk-6SFEITLJ.js.map +1 -0
- package/dist/chunk-I5IMCXRO.js +325 -0
- package/dist/chunk-I5IMCXRO.js.map +1 -0
- package/dist/chunk-T2U4U256.js +462 -0
- package/dist/chunk-T2U4U256.js.map +1 -0
- package/dist/chunk-WX55TLUT.js +184 -0
- package/dist/chunk-WX55TLUT.js.map +1 -0
- package/dist/chunk-XOOCA5T7.js +290 -0
- package/dist/chunk-XOOCA5T7.js.map +1 -0
- package/dist/cli.cjs +5754 -0
- package/dist/cli.cjs.map +1 -0
- package/dist/cli.d.cts +36 -0
- package/dist/cli.d.ts +36 -0
- package/dist/cli.js +1175 -0
- package/dist/cli.js.map +1 -0
- package/dist/index.cjs +4552 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +408 -0
- package/dist/index.d.ts +408 -0
- package/dist/index.js +93 -0
- package/dist/index.js.map +1 -0
- package/dist/neatd.cjs +3070 -0
- package/dist/neatd.cjs.map +1 -0
- package/dist/neatd.d.cts +1 -0
- package/dist/neatd.d.ts +1 -0
- package/dist/neatd.js +114 -0
- package/dist/neatd.js.map +1 -0
- package/dist/otel-grpc-B4XBSI4W.js +9 -0
- package/dist/otel-grpc-B4XBSI4W.js.map +1 -0
- package/dist/server.cjs +4499 -0
- package/dist/server.cjs.map +1 -0
- package/dist/server.d.cts +2 -0
- package/dist/server.d.ts +2 -0
- package/dist/server.js +97 -0
- package/dist/server.js.map +1 -0
- package/package.json +77 -0
- package/proto/opentelemetry/proto/collector/trace/v1/trace_service.proto +31 -0
- package/proto/opentelemetry/proto/common/v1/common.proto +46 -0
- package/proto/opentelemetry/proto/resource/v1/resource.proto +19 -0
- package/proto/opentelemetry/proto/trace/v1/trace.proto +93 -0
package/dist/neatd.cjs
ADDED
|
@@ -0,0 +1,3070 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
"use strict";
|
|
3
|
+
var __create = Object.create;
|
|
4
|
+
var __defProp = Object.defineProperty;
|
|
5
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
6
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
7
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
8
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
9
|
+
var __copyProps = (to, from, except, desc) => {
|
|
10
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
11
|
+
for (let key of __getOwnPropNames(from))
|
|
12
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
13
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
14
|
+
}
|
|
15
|
+
return to;
|
|
16
|
+
};
|
|
17
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
18
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
19
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
20
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
21
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
22
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
23
|
+
mod
|
|
24
|
+
));
|
|
25
|
+
|
|
26
|
+
// src/neatd.ts
|
|
27
|
+
var import_node_fs18 = require("fs");
|
|
28
|
+
var import_node_path31 = __toESM(require("path"), 1);
|
|
29
|
+
|
|
30
|
+
// src/daemon.ts
|
|
31
|
+
var import_node_fs17 = require("fs");
|
|
32
|
+
var import_node_path30 = __toESM(require("path"), 1);
|
|
33
|
+
|
|
34
|
+
// src/graph.ts
|
|
35
|
+
var import_graphology = __toESM(require("graphology"), 1);
|
|
36
|
+
var MultiDirectedGraph = import_graphology.default.MultiDirectedGraph;
|
|
37
|
+
var DEFAULT_PROJECT = "default";
|
|
38
|
+
var graphs = /* @__PURE__ */ new Map();
|
|
39
|
+
function makeGraph() {
|
|
40
|
+
return new MultiDirectedGraph({ allowSelfLoops: false });
|
|
41
|
+
}
|
|
42
|
+
function getGraph(project = DEFAULT_PROJECT) {
|
|
43
|
+
let g = graphs.get(project);
|
|
44
|
+
if (!g) {
|
|
45
|
+
g = makeGraph();
|
|
46
|
+
graphs.set(project, g);
|
|
47
|
+
}
|
|
48
|
+
return g;
|
|
49
|
+
}
|
|
50
|
+
function resetGraph(project) {
|
|
51
|
+
if (project === void 0) {
|
|
52
|
+
graphs.clear();
|
|
53
|
+
return;
|
|
54
|
+
}
|
|
55
|
+
graphs.delete(project);
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
// src/ingest.ts
|
|
59
|
+
var import_node_fs3 = require("fs");
|
|
60
|
+
var import_node_path3 = __toESM(require("path"), 1);
|
|
61
|
+
|
|
62
|
+
// src/policy.ts
|
|
63
|
+
var import_node_fs2 = require("fs");
|
|
64
|
+
var import_node_path2 = __toESM(require("path"), 1);
|
|
65
|
+
var import_types2 = require("@neat.is/types");
|
|
66
|
+
|
|
67
|
+
// src/compat.ts
|
|
68
|
+
var import_node_fs = require("fs");
|
|
69
|
+
var import_node_os = __toESM(require("os"), 1);
|
|
70
|
+
var import_node_path = __toESM(require("path"), 1);
|
|
71
|
+
var import_semver = __toESM(require("semver"), 1);
|
|
72
|
+
|
|
73
|
+
// compat.json
|
|
74
|
+
var compat_default = {
|
|
75
|
+
pairs: [
|
|
76
|
+
{
|
|
77
|
+
kind: "driver-engine",
|
|
78
|
+
driver: "pg",
|
|
79
|
+
engine: "postgresql",
|
|
80
|
+
minDriverVersion: "8.0.0",
|
|
81
|
+
minEngineVersion: "14",
|
|
82
|
+
reason: "PostgreSQL 14+ requires scram-sha-256 auth by default; pg < 8.0.0 only speaks md5."
|
|
83
|
+
},
|
|
84
|
+
{
|
|
85
|
+
kind: "driver-engine",
|
|
86
|
+
driver: "mysql2",
|
|
87
|
+
engine: "mysql",
|
|
88
|
+
minDriverVersion: "3.0.0",
|
|
89
|
+
minEngineVersion: "8",
|
|
90
|
+
reason: "MySQL 8 defaults to caching_sha2_password; mysql2 < 3.0.0 doesn't negotiate it."
|
|
91
|
+
},
|
|
92
|
+
{
|
|
93
|
+
kind: "driver-engine",
|
|
94
|
+
driver: "mongoose",
|
|
95
|
+
engine: "mongodb",
|
|
96
|
+
minDriverVersion: "7.0.0",
|
|
97
|
+
minEngineVersion: "7",
|
|
98
|
+
reason: "MongoDB 7 drops legacy wire-protocol opcodes that mongoose < 7.0.0 still emits."
|
|
99
|
+
},
|
|
100
|
+
{
|
|
101
|
+
kind: "driver-engine",
|
|
102
|
+
driver: "psycopg2",
|
|
103
|
+
engine: "postgresql",
|
|
104
|
+
minDriverVersion: "2.9.0",
|
|
105
|
+
minEngineVersion: "14",
|
|
106
|
+
reason: "PostgreSQL 14+ requires scram-sha-256 auth by default; psycopg2 < 2.9.0 only speaks md5."
|
|
107
|
+
},
|
|
108
|
+
{
|
|
109
|
+
kind: "driver-engine",
|
|
110
|
+
driver: "pymongo",
|
|
111
|
+
engine: "mongodb",
|
|
112
|
+
minDriverVersion: "4.0.0",
|
|
113
|
+
minEngineVersion: "7",
|
|
114
|
+
reason: "MongoDB 7 drops legacy wire-protocol opcodes that pymongo < 4.0.0 still emits."
|
|
115
|
+
},
|
|
116
|
+
{
|
|
117
|
+
kind: "driver-engine",
|
|
118
|
+
driver: "mysql-connector-python",
|
|
119
|
+
engine: "mysql",
|
|
120
|
+
minDriverVersion: "8.0.0",
|
|
121
|
+
minEngineVersion: "8",
|
|
122
|
+
reason: "MySQL 8 defaults to caching_sha2_password; mysql-connector-python < 8.0.0 doesn't negotiate it."
|
|
123
|
+
}
|
|
124
|
+
],
|
|
125
|
+
nodeEngineConstraints: [
|
|
126
|
+
{
|
|
127
|
+
kind: "node-engine",
|
|
128
|
+
package: "vitest",
|
|
129
|
+
packageMinVersion: "2.0.0",
|
|
130
|
+
minNodeVersion: "18.0.0",
|
|
131
|
+
reason: "vitest >= 2.0 drops Node 16 support; requires Node 18+."
|
|
132
|
+
},
|
|
133
|
+
{
|
|
134
|
+
kind: "node-engine",
|
|
135
|
+
package: "next",
|
|
136
|
+
packageMinVersion: "14.0.0",
|
|
137
|
+
minNodeVersion: "18.17.0",
|
|
138
|
+
reason: "Next 14+ requires Node 18.17+ (uses APIs introduced in that minor)."
|
|
139
|
+
},
|
|
140
|
+
{
|
|
141
|
+
kind: "node-engine",
|
|
142
|
+
package: "@modelcontextprotocol/sdk",
|
|
143
|
+
packageMinVersion: "1.0.0",
|
|
144
|
+
minNodeVersion: "18.0.0",
|
|
145
|
+
reason: "@modelcontextprotocol/sdk >= 1 requires Node 18+ (web-streams polyfill removed)."
|
|
146
|
+
}
|
|
147
|
+
],
|
|
148
|
+
packageConflicts: [
|
|
149
|
+
{
|
|
150
|
+
kind: "package-conflict",
|
|
151
|
+
package: "@tanstack/react-query",
|
|
152
|
+
packageMinVersion: "5.0.0",
|
|
153
|
+
requires: {
|
|
154
|
+
name: "react",
|
|
155
|
+
minVersion: "18.0.0"
|
|
156
|
+
},
|
|
157
|
+
reason: "@tanstack/react-query 5+ uses useSyncExternalStore \u2014 only available in React 18+."
|
|
158
|
+
},
|
|
159
|
+
{
|
|
160
|
+
kind: "package-conflict",
|
|
161
|
+
package: "react-router-dom",
|
|
162
|
+
packageMinVersion: "7.0.0",
|
|
163
|
+
requires: {
|
|
164
|
+
name: "react",
|
|
165
|
+
minVersion: "18.0.0"
|
|
166
|
+
},
|
|
167
|
+
reason: "react-router-dom 7+ requires React 18+."
|
|
168
|
+
},
|
|
169
|
+
{
|
|
170
|
+
kind: "package-conflict",
|
|
171
|
+
package: "next",
|
|
172
|
+
packageMinVersion: "14.0.0",
|
|
173
|
+
requires: {
|
|
174
|
+
name: "react",
|
|
175
|
+
minVersion: "18.2.0"
|
|
176
|
+
},
|
|
177
|
+
reason: "Next.js 14+ requires React 18.2+."
|
|
178
|
+
}
|
|
179
|
+
],
|
|
180
|
+
deprecatedApis: [
|
|
181
|
+
{
|
|
182
|
+
kind: "deprecated-api",
|
|
183
|
+
package: "request",
|
|
184
|
+
packageMaxVersion: "2.88.2",
|
|
185
|
+
reason: "request is deprecated; use undici, node-fetch, or axios instead."
|
|
186
|
+
},
|
|
187
|
+
{
|
|
188
|
+
kind: "deprecated-api",
|
|
189
|
+
package: "node-uuid",
|
|
190
|
+
reason: "node-uuid is deprecated; use the `uuid` package."
|
|
191
|
+
}
|
|
192
|
+
]
|
|
193
|
+
};
|
|
194
|
+
|
|
195
|
+
// src/compat.ts
|
|
196
|
+
var bundledMatrix = compat_default;
|
|
197
|
+
var mergedMatrix = null;
|
|
198
|
+
var remoteLoadAttempted = false;
|
|
199
|
+
var REMOTE_CACHE_DIR = import_node_path.default.join(import_node_os.default.homedir(), ".neat");
|
|
200
|
+
var REMOTE_CACHE_PATH = import_node_path.default.join(REMOTE_CACHE_DIR, "compat-cache.json");
|
|
201
|
+
var REMOTE_TTL_MS = 24 * 60 * 60 * 1e3;
|
|
202
|
+
function engineMeetsThreshold(engineVersion, threshold) {
|
|
203
|
+
const e = parseInt(engineVersion, 10);
|
|
204
|
+
const t = parseInt(threshold, 10);
|
|
205
|
+
if (Number.isFinite(e) && Number.isFinite(t)) return e >= t;
|
|
206
|
+
const ec = import_semver.default.coerce(engineVersion);
|
|
207
|
+
const tc = import_semver.default.coerce(threshold);
|
|
208
|
+
if (ec && tc) return import_semver.default.gte(ec, tc);
|
|
209
|
+
return false;
|
|
210
|
+
}
|
|
211
|
+
function checkCompatibility(driver, driverVersion, engine, engineVersion) {
|
|
212
|
+
const matrix = currentMatrix();
|
|
213
|
+
const pair = matrix.pairs.find((p) => p.driver === driver && p.engine === engine);
|
|
214
|
+
if (!pair) return { compatible: true };
|
|
215
|
+
if (pair.minEngineVersion && !engineMeetsThreshold(engineVersion, pair.minEngineVersion)) {
|
|
216
|
+
return { compatible: true };
|
|
217
|
+
}
|
|
218
|
+
const driverCoerced = import_semver.default.coerce(driverVersion);
|
|
219
|
+
if (!driverCoerced) return { compatible: true };
|
|
220
|
+
if (import_semver.default.lt(driverCoerced, pair.minDriverVersion)) {
|
|
221
|
+
return {
|
|
222
|
+
compatible: false,
|
|
223
|
+
reason: pair.reason,
|
|
224
|
+
minDriverVersion: pair.minDriverVersion
|
|
225
|
+
};
|
|
226
|
+
}
|
|
227
|
+
return { compatible: true };
|
|
228
|
+
}
|
|
229
|
+
function rangeAdmitsVersion(serviceNodeRange, requiredNodeVersion) {
|
|
230
|
+
try {
|
|
231
|
+
const required = import_semver.default.coerce(requiredNodeVersion);
|
|
232
|
+
if (!required) return true;
|
|
233
|
+
return import_semver.default.subset(serviceNodeRange, `>=${required.version}`, {
|
|
234
|
+
includePrerelease: false
|
|
235
|
+
});
|
|
236
|
+
} catch {
|
|
237
|
+
return true;
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
function checkNodeEngineConstraint(constraint, declaredPackageVersion, serviceNodeRange) {
|
|
241
|
+
if (constraint.packageMinVersion && declaredPackageVersion) {
|
|
242
|
+
const v = import_semver.default.coerce(declaredPackageVersion);
|
|
243
|
+
if (v && import_semver.default.lt(v, constraint.packageMinVersion)) {
|
|
244
|
+
return { compatible: true };
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
if (!serviceNodeRange) {
|
|
248
|
+
return { compatible: true };
|
|
249
|
+
}
|
|
250
|
+
if (rangeAdmitsVersion(serviceNodeRange, constraint.minNodeVersion)) {
|
|
251
|
+
return { compatible: true };
|
|
252
|
+
}
|
|
253
|
+
return {
|
|
254
|
+
compatible: false,
|
|
255
|
+
reason: constraint.reason,
|
|
256
|
+
requiredNodeVersion: constraint.minNodeVersion
|
|
257
|
+
};
|
|
258
|
+
}
|
|
259
|
+
function checkPackageConflict(conflict, declaredPackageVersion, declaredRequiredVersion) {
|
|
260
|
+
if (!declaredPackageVersion) return { compatible: true };
|
|
261
|
+
if (conflict.packageMinVersion) {
|
|
262
|
+
const v = import_semver.default.coerce(declaredPackageVersion);
|
|
263
|
+
if (v && import_semver.default.lt(v, conflict.packageMinVersion)) {
|
|
264
|
+
return { compatible: true };
|
|
265
|
+
}
|
|
266
|
+
}
|
|
267
|
+
if (!declaredRequiredVersion) {
|
|
268
|
+
return {
|
|
269
|
+
compatible: false,
|
|
270
|
+
reason: conflict.reason,
|
|
271
|
+
requires: conflict.requires
|
|
272
|
+
};
|
|
273
|
+
}
|
|
274
|
+
const requiredCoerced = import_semver.default.coerce(declaredRequiredVersion);
|
|
275
|
+
if (!requiredCoerced) return { compatible: true };
|
|
276
|
+
if (import_semver.default.lt(requiredCoerced, conflict.requires.minVersion)) {
|
|
277
|
+
return {
|
|
278
|
+
compatible: false,
|
|
279
|
+
reason: conflict.reason,
|
|
280
|
+
requires: conflict.requires,
|
|
281
|
+
foundVersion: declaredRequiredVersion
|
|
282
|
+
};
|
|
283
|
+
}
|
|
284
|
+
return { compatible: true };
|
|
285
|
+
}
|
|
286
|
+
function checkDeprecatedApi(rule, declaredVersion) {
|
|
287
|
+
if (declaredVersion === void 0) return { compatible: true };
|
|
288
|
+
if (rule.packageMaxVersion) {
|
|
289
|
+
const v = import_semver.default.coerce(declaredVersion);
|
|
290
|
+
const max = import_semver.default.coerce(rule.packageMaxVersion);
|
|
291
|
+
if (v && max && import_semver.default.gt(v, max)) return { compatible: true };
|
|
292
|
+
}
|
|
293
|
+
return { compatible: false, reason: rule.reason };
|
|
294
|
+
}
|
|
295
|
+
function currentMatrix() {
|
|
296
|
+
return mergedMatrix ?? bundledMatrix;
|
|
297
|
+
}
|
|
298
|
+
function mergeMatrices(a, b) {
|
|
299
|
+
return {
|
|
300
|
+
pairs: [...a.pairs, ...b.pairs ?? []],
|
|
301
|
+
nodeEngineConstraints: [
|
|
302
|
+
...a.nodeEngineConstraints ?? [],
|
|
303
|
+
...b.nodeEngineConstraints ?? []
|
|
304
|
+
],
|
|
305
|
+
packageConflicts: [...a.packageConflicts ?? [], ...b.packageConflicts ?? []],
|
|
306
|
+
deprecatedApis: [...a.deprecatedApis ?? [], ...b.deprecatedApis ?? []]
|
|
307
|
+
};
|
|
308
|
+
}
|
|
309
|
+
async function readRemoteCache(url) {
|
|
310
|
+
try {
|
|
311
|
+
const raw = await import_node_fs.promises.readFile(REMOTE_CACHE_PATH, "utf8");
|
|
312
|
+
const parsed = JSON.parse(raw);
|
|
313
|
+
if (parsed.url !== url) return null;
|
|
314
|
+
const age = Date.now() - new Date(parsed.fetchedAt).getTime();
|
|
315
|
+
if (age > REMOTE_TTL_MS) return null;
|
|
316
|
+
return parsed.matrix;
|
|
317
|
+
} catch {
|
|
318
|
+
return null;
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
async function writeRemoteCache(url, matrix) {
|
|
322
|
+
const file = {
|
|
323
|
+
fetchedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
324
|
+
url,
|
|
325
|
+
matrix
|
|
326
|
+
};
|
|
327
|
+
try {
|
|
328
|
+
await import_node_fs.promises.mkdir(REMOTE_CACHE_DIR, { recursive: true });
|
|
329
|
+
await import_node_fs.promises.writeFile(REMOTE_CACHE_PATH, JSON.stringify(file), "utf8");
|
|
330
|
+
} catch (err) {
|
|
331
|
+
console.warn(`[neat] failed to cache compat matrix: ${err.message}`);
|
|
332
|
+
}
|
|
333
|
+
}
|
|
334
|
+
async function ensureCompatLoaded() {
|
|
335
|
+
if (mergedMatrix) return mergedMatrix;
|
|
336
|
+
if (remoteLoadAttempted) {
|
|
337
|
+
mergedMatrix = bundledMatrix;
|
|
338
|
+
return mergedMatrix;
|
|
339
|
+
}
|
|
340
|
+
remoteLoadAttempted = true;
|
|
341
|
+
const url = process.env.NEAT_COMPAT_URL;
|
|
342
|
+
if (!url) {
|
|
343
|
+
mergedMatrix = bundledMatrix;
|
|
344
|
+
return mergedMatrix;
|
|
345
|
+
}
|
|
346
|
+
const cached = await readRemoteCache(url);
|
|
347
|
+
if (cached) {
|
|
348
|
+
mergedMatrix = mergeMatrices(bundledMatrix, cached);
|
|
349
|
+
return mergedMatrix;
|
|
350
|
+
}
|
|
351
|
+
try {
|
|
352
|
+
const res = await fetch(url);
|
|
353
|
+
if (!res.ok) throw new Error(`${res.status} ${res.statusText}`);
|
|
354
|
+
const remote = await res.json();
|
|
355
|
+
await writeRemoteCache(url, remote);
|
|
356
|
+
mergedMatrix = mergeMatrices(bundledMatrix, remote);
|
|
357
|
+
return mergedMatrix;
|
|
358
|
+
} catch (err) {
|
|
359
|
+
console.warn(
|
|
360
|
+
`[neat] NEAT_COMPAT_URL fetch failed (${err.message}); using bundled matrix only`
|
|
361
|
+
);
|
|
362
|
+
mergedMatrix = bundledMatrix;
|
|
363
|
+
return mergedMatrix;
|
|
364
|
+
}
|
|
365
|
+
}
|
|
366
|
+
function compatPairs() {
|
|
367
|
+
return currentMatrix().pairs;
|
|
368
|
+
}
|
|
369
|
+
function nodeEngineConstraints() {
|
|
370
|
+
return currentMatrix().nodeEngineConstraints ?? [];
|
|
371
|
+
}
|
|
372
|
+
function packageConflicts() {
|
|
373
|
+
return currentMatrix().packageConflicts ?? [];
|
|
374
|
+
}
|
|
375
|
+
function deprecatedApis() {
|
|
376
|
+
return currentMatrix().deprecatedApis ?? [];
|
|
377
|
+
}
|
|
378
|
+
|
|
379
|
+
// src/traverse.ts
|
|
380
|
+
var import_types = require("@neat.is/types");
|
|
381
|
+
var BLAST_RADIUS_DEFAULT_DEPTH = 10;
|
|
382
|
+
function bestEdgeByTarget(graph, edgeIds) {
|
|
383
|
+
const best = /* @__PURE__ */ new Map();
|
|
384
|
+
for (const id of edgeIds) {
|
|
385
|
+
const e = graph.getEdgeAttributes(id);
|
|
386
|
+
if (e.provenance === import_types.Provenance.FRONTIER) continue;
|
|
387
|
+
const cur = best.get(e.target);
|
|
388
|
+
if (!cur || import_types.PROV_RANK[e.provenance] > import_types.PROV_RANK[cur.provenance]) {
|
|
389
|
+
best.set(e.target, e);
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
return best;
|
|
393
|
+
}
|
|
394
|
+
var PROVENANCE_CEILING = {
|
|
395
|
+
OBSERVED: 1,
|
|
396
|
+
INFERRED: 0.7,
|
|
397
|
+
EXTRACTED: 0.5,
|
|
398
|
+
STALE: 0.3,
|
|
399
|
+
FRONTIER: 0.3
|
|
400
|
+
};
|
|
401
|
+
function volumeWeight(spanCount) {
|
|
402
|
+
if (!spanCount || spanCount <= 0) return 0.5;
|
|
403
|
+
const w = 0.5 + Math.log10(spanCount + 1) / 3;
|
|
404
|
+
return Math.min(1, w);
|
|
405
|
+
}
|
|
406
|
+
function recencyWeight(ageMs) {
|
|
407
|
+
if (ageMs === void 0) return 0.8;
|
|
408
|
+
const hour = 60 * 60 * 1e3;
|
|
409
|
+
if (ageMs <= hour) return 1;
|
|
410
|
+
if (ageMs <= 24 * hour) {
|
|
411
|
+
const t = (ageMs - hour) / (23 * hour);
|
|
412
|
+
return 1 - 0.5 * t;
|
|
413
|
+
}
|
|
414
|
+
return 0.3;
|
|
415
|
+
}
|
|
416
|
+
function cleanlinessWeight(spanCount, errorCount) {
|
|
417
|
+
if (!spanCount || spanCount <= 0) return 1;
|
|
418
|
+
const rate = (errorCount ?? 0) / spanCount;
|
|
419
|
+
if (rate <= 0.01) return 1;
|
|
420
|
+
if (rate >= 0.5) return 0.3;
|
|
421
|
+
return 1 - rate * 1.4;
|
|
422
|
+
}
|
|
423
|
+
function confidenceForEdge(edge, now = Date.now()) {
|
|
424
|
+
const ceiling = PROVENANCE_CEILING[edge.provenance] ?? 0.5;
|
|
425
|
+
const spanCount = edge.signal?.spanCount ?? edge.callCount;
|
|
426
|
+
const ageMs = edge.signal?.lastObservedAgeMs ?? lastObservedAge(edge, now);
|
|
427
|
+
if (spanCount === void 0 && ageMs === void 0 && edge.signal === void 0) {
|
|
428
|
+
return ceiling;
|
|
429
|
+
}
|
|
430
|
+
const v = volumeWeight(spanCount);
|
|
431
|
+
const r = recencyWeight(ageMs);
|
|
432
|
+
const c = cleanlinessWeight(spanCount, edge.signal?.errorCount);
|
|
433
|
+
return Math.max(0, Math.min(1, ceiling * v * r * c));
|
|
434
|
+
}
|
|
435
|
+
function lastObservedAge(edge, now) {
|
|
436
|
+
if (!edge.lastObserved) return void 0;
|
|
437
|
+
const t = Date.parse(edge.lastObserved);
|
|
438
|
+
if (!Number.isFinite(t)) return void 0;
|
|
439
|
+
return Math.max(0, now - t);
|
|
440
|
+
}
|
|
441
|
+
function confidenceFromMix(edges, now = Date.now()) {
|
|
442
|
+
if (edges.length === 0) return 1;
|
|
443
|
+
let product = 1;
|
|
444
|
+
for (const e of edges) {
|
|
445
|
+
product *= confidenceForEdge(e, now);
|
|
446
|
+
}
|
|
447
|
+
return Math.max(0, Math.min(1, product));
|
|
448
|
+
}
|
|
449
|
+
function databaseRootCauseShape(graph, origin, walk) {
|
|
450
|
+
const targetDb = origin;
|
|
451
|
+
const candidatePairs = compatPairs().filter((p) => p.engine === targetDb.engine);
|
|
452
|
+
if (candidatePairs.length === 0) return null;
|
|
453
|
+
for (const id of walk.path) {
|
|
454
|
+
const attrs = graph.getNodeAttributes(id);
|
|
455
|
+
if (attrs.type !== import_types.NodeType.ServiceNode) continue;
|
|
456
|
+
const svc = attrs;
|
|
457
|
+
const deps = svc.dependencies ?? {};
|
|
458
|
+
for (const pair of candidatePairs) {
|
|
459
|
+
const declared = deps[pair.driver];
|
|
460
|
+
if (!declared) continue;
|
|
461
|
+
const result = checkCompatibility(
|
|
462
|
+
pair.driver,
|
|
463
|
+
declared,
|
|
464
|
+
targetDb.engine,
|
|
465
|
+
targetDb.engineVersion
|
|
466
|
+
);
|
|
467
|
+
if (!result.compatible) {
|
|
468
|
+
return {
|
|
469
|
+
rootCauseNode: id,
|
|
470
|
+
rootCauseReason: result.reason ?? "incompatible driver",
|
|
471
|
+
...result.minDriverVersion ? {
|
|
472
|
+
fixRecommendation: `Upgrade ${svc.name} ${pair.driver} driver to >= ${result.minDriverVersion}`
|
|
473
|
+
} : {}
|
|
474
|
+
};
|
|
475
|
+
}
|
|
476
|
+
}
|
|
477
|
+
}
|
|
478
|
+
return null;
|
|
479
|
+
}
|
|
480
|
+
function serviceRootCauseShape(graph, _origin, walk) {
|
|
481
|
+
for (const id of walk.path) {
|
|
482
|
+
const attrs = graph.getNodeAttributes(id);
|
|
483
|
+
if (attrs.type !== import_types.NodeType.ServiceNode) continue;
|
|
484
|
+
const svc = attrs;
|
|
485
|
+
const deps = svc.dependencies ?? {};
|
|
486
|
+
const serviceNodeEngine = svc.nodeEngine;
|
|
487
|
+
for (const constraint of nodeEngineConstraints()) {
|
|
488
|
+
const declared = deps[constraint.package];
|
|
489
|
+
if (!declared) continue;
|
|
490
|
+
const result = checkNodeEngineConstraint(constraint, declared, serviceNodeEngine);
|
|
491
|
+
if (!result.compatible && result.reason) {
|
|
492
|
+
return {
|
|
493
|
+
rootCauseNode: id,
|
|
494
|
+
rootCauseReason: result.reason,
|
|
495
|
+
...result.requiredNodeVersion ? {
|
|
496
|
+
fixRecommendation: `Bump ${svc.name}'s engines.node to >= ${result.requiredNodeVersion}`
|
|
497
|
+
} : {}
|
|
498
|
+
};
|
|
499
|
+
}
|
|
500
|
+
}
|
|
501
|
+
for (const conflict of packageConflicts()) {
|
|
502
|
+
const declared = deps[conflict.package];
|
|
503
|
+
if (!declared) continue;
|
|
504
|
+
const requiredDeclared = deps[conflict.requires.name];
|
|
505
|
+
const result = checkPackageConflict(conflict, declared, requiredDeclared);
|
|
506
|
+
if (!result.compatible && result.reason) {
|
|
507
|
+
return {
|
|
508
|
+
rootCauseNode: id,
|
|
509
|
+
rootCauseReason: result.reason,
|
|
510
|
+
fixRecommendation: `Upgrade ${svc.name}'s ${conflict.requires.name} to >= ${conflict.requires.minVersion}`
|
|
511
|
+
};
|
|
512
|
+
}
|
|
513
|
+
}
|
|
514
|
+
}
|
|
515
|
+
return null;
|
|
516
|
+
}
|
|
517
|
+
var rootCauseShapes = {
|
|
518
|
+
[import_types.NodeType.DatabaseNode]: databaseRootCauseShape,
|
|
519
|
+
[import_types.NodeType.ServiceNode]: serviceRootCauseShape
|
|
520
|
+
};
|
|
521
|
+
function getBlastRadius(graph, nodeId, maxDepth = BLAST_RADIUS_DEFAULT_DEPTH) {
|
|
522
|
+
if (!graph.hasNode(nodeId)) {
|
|
523
|
+
return import_types.BlastRadiusResultSchema.parse({ origin: nodeId, affectedNodes: [], totalAffected: 0 });
|
|
524
|
+
}
|
|
525
|
+
const seen = /* @__PURE__ */ new Map();
|
|
526
|
+
const queue = [{ nodeId, distance: 0, path: [nodeId], pathEdges: [] }];
|
|
527
|
+
const enqueued = /* @__PURE__ */ new Set([nodeId]);
|
|
528
|
+
while (queue.length > 0) {
|
|
529
|
+
const frame = queue.shift();
|
|
530
|
+
if (frame.distance > 0 && frame.pathEdges.length > 0) {
|
|
531
|
+
const lastEdge = frame.pathEdges[frame.pathEdges.length - 1];
|
|
532
|
+
seen.set(frame.nodeId, {
|
|
533
|
+
nodeId: frame.nodeId,
|
|
534
|
+
distance: frame.distance,
|
|
535
|
+
edgeProvenance: lastEdge.provenance,
|
|
536
|
+
path: frame.path,
|
|
537
|
+
confidence: confidenceFromMix(frame.pathEdges)
|
|
538
|
+
});
|
|
539
|
+
}
|
|
540
|
+
if (frame.distance >= maxDepth) continue;
|
|
541
|
+
const outgoing = bestEdgeByTarget(graph, graph.outboundEdges(frame.nodeId));
|
|
542
|
+
for (const [tgtId, edge] of outgoing) {
|
|
543
|
+
if (enqueued.has(tgtId)) continue;
|
|
544
|
+
enqueued.add(tgtId);
|
|
545
|
+
queue.push({
|
|
546
|
+
nodeId: tgtId,
|
|
547
|
+
distance: frame.distance + 1,
|
|
548
|
+
path: [...frame.path, tgtId],
|
|
549
|
+
pathEdges: [...frame.pathEdges, edge]
|
|
550
|
+
});
|
|
551
|
+
}
|
|
552
|
+
}
|
|
553
|
+
const affectedNodes = [...seen.values()].sort(
|
|
554
|
+
(a, b) => a.distance - b.distance || a.nodeId.localeCompare(b.nodeId)
|
|
555
|
+
);
|
|
556
|
+
return import_types.BlastRadiusResultSchema.parse({
|
|
557
|
+
origin: nodeId,
|
|
558
|
+
affectedNodes,
|
|
559
|
+
totalAffected: affectedNodes.length
|
|
560
|
+
});
|
|
561
|
+
}
|
|
562
|
+
|
|
563
|
+
// src/policy.ts
|
|
564
|
+
var DEFAULT_ACTION_BY_SEVERITY = {
|
|
565
|
+
info: "log",
|
|
566
|
+
warning: "alert",
|
|
567
|
+
error: "alert",
|
|
568
|
+
critical: "block"
|
|
569
|
+
};
|
|
570
|
+
function resolveOnViolation(policy) {
|
|
571
|
+
return policy.onViolation ?? DEFAULT_ACTION_BY_SEVERITY[policy.severity];
|
|
572
|
+
}
|
|
573
|
+
function makeViolation(policy, rule, contextSuffix, message, subject, ctx) {
|
|
574
|
+
return {
|
|
575
|
+
id: `${policy.id}:${contextSuffix}`,
|
|
576
|
+
policyId: policy.id,
|
|
577
|
+
policyName: policy.name,
|
|
578
|
+
severity: policy.severity,
|
|
579
|
+
onViolation: resolveOnViolation(policy),
|
|
580
|
+
ruleType: rule.type,
|
|
581
|
+
subject,
|
|
582
|
+
message,
|
|
583
|
+
observedAt: new Date(ctx.now()).toISOString()
|
|
584
|
+
};
|
|
585
|
+
}
|
|
586
|
+
var evaluateStructural = ({
|
|
587
|
+
graph,
|
|
588
|
+
policy,
|
|
589
|
+
rule,
|
|
590
|
+
ctx
|
|
591
|
+
}) => {
|
|
592
|
+
const violations = [];
|
|
593
|
+
graph.forEachNode((id, attrs) => {
|
|
594
|
+
const a = attrs;
|
|
595
|
+
if (a.type !== rule.fromNodeType) return;
|
|
596
|
+
let satisfied = false;
|
|
597
|
+
for (const edgeId of graph.outboundEdges(id)) {
|
|
598
|
+
const e = graph.getEdgeAttributes(edgeId);
|
|
599
|
+
if (e.type !== rule.edgeType) continue;
|
|
600
|
+
if (e.provenance === import_types2.Provenance.FRONTIER) continue;
|
|
601
|
+
const target = graph.getNodeAttributes(e.target);
|
|
602
|
+
if (target.type === rule.toNodeType) {
|
|
603
|
+
satisfied = true;
|
|
604
|
+
break;
|
|
605
|
+
}
|
|
606
|
+
}
|
|
607
|
+
if (!satisfied) {
|
|
608
|
+
violations.push(
|
|
609
|
+
makeViolation(
|
|
610
|
+
policy,
|
|
611
|
+
rule,
|
|
612
|
+
id,
|
|
613
|
+
`${rule.fromNodeType} ${id} has no ${rule.edgeType} edge to a ${rule.toNodeType}`,
|
|
614
|
+
{ nodeId: id },
|
|
615
|
+
ctx
|
|
616
|
+
)
|
|
617
|
+
);
|
|
618
|
+
}
|
|
619
|
+
});
|
|
620
|
+
return violations;
|
|
621
|
+
};
|
|
622
|
+
var evaluateOwnership = ({
|
|
623
|
+
graph,
|
|
624
|
+
policy,
|
|
625
|
+
rule,
|
|
626
|
+
ctx
|
|
627
|
+
}) => {
|
|
628
|
+
const violations = [];
|
|
629
|
+
graph.forEachNode((id, attrs) => {
|
|
630
|
+
const a = attrs;
|
|
631
|
+
if (a.type !== rule.nodeType) return;
|
|
632
|
+
const value = a[rule.field];
|
|
633
|
+
if (typeof value !== "string" || value.length === 0) {
|
|
634
|
+
violations.push(
|
|
635
|
+
makeViolation(
|
|
636
|
+
policy,
|
|
637
|
+
rule,
|
|
638
|
+
id,
|
|
639
|
+
`${rule.nodeType} ${id} is missing required field "${rule.field}"`,
|
|
640
|
+
{ nodeId: id },
|
|
641
|
+
ctx
|
|
642
|
+
)
|
|
643
|
+
);
|
|
644
|
+
}
|
|
645
|
+
});
|
|
646
|
+
return violations;
|
|
647
|
+
};
|
|
648
|
+
var evaluateProvenance = ({
|
|
649
|
+
graph,
|
|
650
|
+
policy,
|
|
651
|
+
rule,
|
|
652
|
+
ctx
|
|
653
|
+
}) => {
|
|
654
|
+
const required = Array.isArray(rule.required) ? new Set(rule.required) : /* @__PURE__ */ new Set([rule.required]);
|
|
655
|
+
const violations = [];
|
|
656
|
+
graph.forEachEdge((edgeId, attrs) => {
|
|
657
|
+
const e = attrs;
|
|
658
|
+
if (e.type !== rule.edgeType) return;
|
|
659
|
+
if (rule.targetNodeId && e.target !== rule.targetNodeId) return;
|
|
660
|
+
if (!required.has(e.provenance)) {
|
|
661
|
+
const requiredList = [...required].join(" | ");
|
|
662
|
+
violations.push(
|
|
663
|
+
makeViolation(
|
|
664
|
+
policy,
|
|
665
|
+
rule,
|
|
666
|
+
edgeId,
|
|
667
|
+
`${rule.edgeType} edge ${edgeId} has provenance ${e.provenance}; required ${requiredList}`,
|
|
668
|
+
{ edgeId },
|
|
669
|
+
ctx
|
|
670
|
+
)
|
|
671
|
+
);
|
|
672
|
+
}
|
|
673
|
+
});
|
|
674
|
+
return violations;
|
|
675
|
+
};
|
|
676
|
+
var evaluateBlastRadius = ({
|
|
677
|
+
graph,
|
|
678
|
+
policy,
|
|
679
|
+
rule,
|
|
680
|
+
ctx
|
|
681
|
+
}) => {
|
|
682
|
+
const violations = [];
|
|
683
|
+
const depth = rule.depth;
|
|
684
|
+
graph.forEachNode((id, attrs) => {
|
|
685
|
+
const a = attrs;
|
|
686
|
+
if (a.type !== rule.nodeType) return;
|
|
687
|
+
const result = depth !== void 0 ? getBlastRadius(graph, id, depth) : getBlastRadius(graph, id);
|
|
688
|
+
if (result.totalAffected > rule.maxAffected) {
|
|
689
|
+
violations.push(
|
|
690
|
+
makeViolation(
|
|
691
|
+
policy,
|
|
692
|
+
rule,
|
|
693
|
+
id,
|
|
694
|
+
`${rule.nodeType} ${id} has blast radius ${result.totalAffected} > ${rule.maxAffected}`,
|
|
695
|
+
{ nodeId: id, path: [id] },
|
|
696
|
+
ctx
|
|
697
|
+
)
|
|
698
|
+
);
|
|
699
|
+
}
|
|
700
|
+
});
|
|
701
|
+
return violations;
|
|
702
|
+
};
|
|
703
|
+
var evaluateCompatibility = ({
|
|
704
|
+
graph,
|
|
705
|
+
policy,
|
|
706
|
+
rule,
|
|
707
|
+
ctx
|
|
708
|
+
}) => {
|
|
709
|
+
const violations = [];
|
|
710
|
+
const wantsKind = (kind) => rule.kind === void 0 || rule.kind === kind;
|
|
711
|
+
graph.forEachNode((svcId, attrs) => {
|
|
712
|
+
const a = attrs;
|
|
713
|
+
if (a.type !== import_types2.NodeType.ServiceNode) return;
|
|
714
|
+
const svc = a;
|
|
715
|
+
const deps = svc.dependencies ?? {};
|
|
716
|
+
if (wantsKind("driver-engine")) {
|
|
717
|
+
for (const edgeId of graph.outboundEdges(svcId)) {
|
|
718
|
+
const e = graph.getEdgeAttributes(edgeId);
|
|
719
|
+
if (e.type !== import_types2.EdgeType.CONNECTS_TO) continue;
|
|
720
|
+
if (e.provenance === import_types2.Provenance.FRONTIER) continue;
|
|
721
|
+
const dbAttrs = graph.getNodeAttributes(e.target);
|
|
722
|
+
if (dbAttrs.type !== import_types2.NodeType.DatabaseNode) continue;
|
|
723
|
+
const db = dbAttrs;
|
|
724
|
+
for (const pair of compatPairs()) {
|
|
725
|
+
if (pair.engine !== db.engine) continue;
|
|
726
|
+
const declared = deps[pair.driver];
|
|
727
|
+
if (!declared) continue;
|
|
728
|
+
const result = checkCompatibility(pair.driver, declared, db.engine, db.engineVersion);
|
|
729
|
+
if (!result.compatible && result.reason) {
|
|
730
|
+
violations.push(
|
|
731
|
+
makeViolation(
|
|
732
|
+
policy,
|
|
733
|
+
rule,
|
|
734
|
+
`${svcId}:driver-engine:${pair.driver}@${declared}:${db.engine}@${db.engineVersion}`,
|
|
735
|
+
result.reason,
|
|
736
|
+
{ nodeId: svcId, edgeId },
|
|
737
|
+
ctx
|
|
738
|
+
)
|
|
739
|
+
);
|
|
740
|
+
}
|
|
741
|
+
}
|
|
742
|
+
}
|
|
743
|
+
}
|
|
744
|
+
if (wantsKind("node-engine")) {
|
|
745
|
+
const serviceNodeRange = svc.nodeEngine;
|
|
746
|
+
for (const constraint of nodeEngineConstraints()) {
|
|
747
|
+
const declared = deps[constraint.package];
|
|
748
|
+
if (!declared) continue;
|
|
749
|
+
const result = checkNodeEngineConstraint(constraint, declared, serviceNodeRange);
|
|
750
|
+
if (!result.compatible && result.reason) {
|
|
751
|
+
violations.push(
|
|
752
|
+
makeViolation(
|
|
753
|
+
policy,
|
|
754
|
+
rule,
|
|
755
|
+
`${svcId}:node-engine:${constraint.package}@${declared}`,
|
|
756
|
+
result.reason,
|
|
757
|
+
{ nodeId: svcId },
|
|
758
|
+
ctx
|
|
759
|
+
)
|
|
760
|
+
);
|
|
761
|
+
}
|
|
762
|
+
}
|
|
763
|
+
}
|
|
764
|
+
if (wantsKind("package-conflict")) {
|
|
765
|
+
for (const conflict of packageConflicts()) {
|
|
766
|
+
const declared = deps[conflict.package];
|
|
767
|
+
if (!declared) continue;
|
|
768
|
+
const requiredDeclared = deps[conflict.requires.name];
|
|
769
|
+
const result = checkPackageConflict(conflict, declared, requiredDeclared);
|
|
770
|
+
if (!result.compatible && result.reason) {
|
|
771
|
+
violations.push(
|
|
772
|
+
makeViolation(
|
|
773
|
+
policy,
|
|
774
|
+
rule,
|
|
775
|
+
`${svcId}:package-conflict:${conflict.package}@${declared}`,
|
|
776
|
+
result.reason,
|
|
777
|
+
{ nodeId: svcId },
|
|
778
|
+
ctx
|
|
779
|
+
)
|
|
780
|
+
);
|
|
781
|
+
}
|
|
782
|
+
}
|
|
783
|
+
}
|
|
784
|
+
if (wantsKind("deprecated-api")) {
|
|
785
|
+
for (const dep of deprecatedApis()) {
|
|
786
|
+
const declared = deps[dep.package];
|
|
787
|
+
if (!declared) continue;
|
|
788
|
+
const result = checkDeprecatedApi(dep, declared);
|
|
789
|
+
if (!result.compatible && result.reason) {
|
|
790
|
+
violations.push(
|
|
791
|
+
makeViolation(
|
|
792
|
+
policy,
|
|
793
|
+
rule,
|
|
794
|
+
`${svcId}:deprecated-api:${dep.package}@${declared}`,
|
|
795
|
+
result.reason,
|
|
796
|
+
{ nodeId: svcId },
|
|
797
|
+
ctx
|
|
798
|
+
)
|
|
799
|
+
);
|
|
800
|
+
}
|
|
801
|
+
}
|
|
802
|
+
}
|
|
803
|
+
});
|
|
804
|
+
return violations;
|
|
805
|
+
};
|
|
806
|
+
var policyEvaluators = {
|
|
807
|
+
structural: evaluateStructural,
|
|
808
|
+
ownership: evaluateOwnership,
|
|
809
|
+
provenance: evaluateProvenance,
|
|
810
|
+
"blast-radius": evaluateBlastRadius,
|
|
811
|
+
compatibility: evaluateCompatibility
|
|
812
|
+
};
|
|
813
|
+
function canPromoteFrontier(graph, frontierId2, policies, ctx) {
|
|
814
|
+
if (policies.length === 0) return { allowed: true, violations: [] };
|
|
815
|
+
const all = evaluateAllPolicies(graph, policies, ctx);
|
|
816
|
+
const blocking = all.filter((v) => {
|
|
817
|
+
if (v.onViolation !== "block") return false;
|
|
818
|
+
return v.subject.nodeId === frontierId2 || v.subject.path?.includes(frontierId2) === true;
|
|
819
|
+
});
|
|
820
|
+
return { allowed: blocking.length === 0, violations: blocking };
|
|
821
|
+
}
|
|
822
|
+
function evaluateAllPolicies(graph, policies, ctx) {
|
|
823
|
+
const out = [];
|
|
824
|
+
for (const policy of policies) {
|
|
825
|
+
const evaluator = policyEvaluators[policy.rule.type];
|
|
826
|
+
const violations = evaluator({ graph, policy, rule: policy.rule, ctx });
|
|
827
|
+
for (const v of violations) out.push(v);
|
|
828
|
+
}
|
|
829
|
+
return out;
|
|
830
|
+
}
|
|
831
|
+
|
|
832
|
+
// src/ingest.ts
|
|
833
|
+
var import_types3 = require("@neat.is/types");
|
|
834
|
+
var HOUR_MS = 60 * 60 * 1e3;
|
|
835
|
+
var DAY_MS = 24 * HOUR_MS;
|
|
836
|
+
var DEFAULT_STALE_THRESHOLDS = {
|
|
837
|
+
CALLS: HOUR_MS,
|
|
838
|
+
CONNECTS_TO: 4 * HOUR_MS,
|
|
839
|
+
PUBLISHES_TO: 4 * HOUR_MS,
|
|
840
|
+
CONSUMES_FROM: 4 * HOUR_MS,
|
|
841
|
+
DEPENDS_ON: DAY_MS,
|
|
842
|
+
CONFIGURED_BY: DAY_MS,
|
|
843
|
+
RUNS_ON: DAY_MS
|
|
844
|
+
};
|
|
845
|
+
var PARENT_SPAN_CACHE_TTL_MS = 5 * 60 * 1e3;
|
|
846
|
+
function promoteFrontierNodes(graph, opts = {}) {
|
|
847
|
+
const aliasIndex = /* @__PURE__ */ new Map();
|
|
848
|
+
graph.forEachNode((id, attrs) => {
|
|
849
|
+
const a = attrs;
|
|
850
|
+
if (a.type !== import_types3.NodeType.ServiceNode) return;
|
|
851
|
+
aliasIndex.set(a.name, id);
|
|
852
|
+
if (a.aliases) {
|
|
853
|
+
for (const alias of a.aliases) aliasIndex.set(alias, id);
|
|
854
|
+
}
|
|
855
|
+
});
|
|
856
|
+
const toPromote = [];
|
|
857
|
+
graph.forEachNode((id, attrs) => {
|
|
858
|
+
const a = attrs;
|
|
859
|
+
if (a.type !== import_types3.NodeType.FrontierNode) return;
|
|
860
|
+
const target = aliasIndex.get(a.host);
|
|
861
|
+
if (!target) return;
|
|
862
|
+
if (target === id) return;
|
|
863
|
+
toPromote.push({ frontierId: id, serviceId: target });
|
|
864
|
+
});
|
|
865
|
+
let promoted = 0;
|
|
866
|
+
for (const { frontierId: frontierId2, serviceId: serviceId3 } of toPromote) {
|
|
867
|
+
if (opts.policies && opts.policies.length > 0 && opts.policyCtx) {
|
|
868
|
+
const gate = canPromoteFrontier(graph, frontierId2, opts.policies, opts.policyCtx);
|
|
869
|
+
if (!gate.allowed) {
|
|
870
|
+
continue;
|
|
871
|
+
}
|
|
872
|
+
}
|
|
873
|
+
rewireFrontierEdges(graph, frontierId2, serviceId3);
|
|
874
|
+
graph.dropNode(frontierId2);
|
|
875
|
+
promoted++;
|
|
876
|
+
}
|
|
877
|
+
return promoted;
|
|
878
|
+
}
|
|
879
|
+
function rewireFrontierEdges(graph, frontierId2, serviceId3) {
|
|
880
|
+
const inbound = [...graph.inboundEdges(frontierId2)];
|
|
881
|
+
const outbound = [...graph.outboundEdges(frontierId2)];
|
|
882
|
+
for (const edgeId of inbound) {
|
|
883
|
+
const edge = graph.getEdgeAttributes(edgeId);
|
|
884
|
+
rebuildEdge(graph, edge, edge.source, serviceId3, edgeId);
|
|
885
|
+
}
|
|
886
|
+
for (const edgeId of outbound) {
|
|
887
|
+
const edge = graph.getEdgeAttributes(edgeId);
|
|
888
|
+
rebuildEdge(graph, edge, serviceId3, edge.target, edgeId);
|
|
889
|
+
}
|
|
890
|
+
}
|
|
891
|
+
function rebuildEdge(graph, edge, newSource, newTarget, oldEdgeId) {
|
|
892
|
+
graph.dropEdge(oldEdgeId);
|
|
893
|
+
const promotedProvenance = edge.provenance === import_types3.Provenance.FRONTIER ? import_types3.Provenance.OBSERVED : edge.provenance;
|
|
894
|
+
const newId = promotedProvenance === import_types3.Provenance.OBSERVED ? (0, import_types3.observedEdgeId)(newSource, newTarget, edge.type) : promotedProvenance === import_types3.Provenance.INFERRED ? (0, import_types3.inferredEdgeId)(newSource, newTarget, edge.type) : promotedProvenance === import_types3.Provenance.EXTRACTED ? (0, import_types3.extractedEdgeId)(newSource, newTarget, edge.type) : (0, import_types3.frontierEdgeId)(newSource, newTarget, edge.type);
|
|
895
|
+
if (graph.hasEdge(newId)) {
|
|
896
|
+
const existing = graph.getEdgeAttributes(newId);
|
|
897
|
+
const merged = {
|
|
898
|
+
...existing,
|
|
899
|
+
callCount: (existing.callCount ?? 0) + (edge.callCount ?? 0),
|
|
900
|
+
lastObserved: pickLater(existing.lastObserved, edge.lastObserved)
|
|
901
|
+
};
|
|
902
|
+
graph.replaceEdgeAttributes(newId, merged);
|
|
903
|
+
return;
|
|
904
|
+
}
|
|
905
|
+
const rebuilt = {
|
|
906
|
+
...edge,
|
|
907
|
+
id: newId,
|
|
908
|
+
source: newSource,
|
|
909
|
+
target: newTarget,
|
|
910
|
+
provenance: promotedProvenance
|
|
911
|
+
};
|
|
912
|
+
graph.addEdgeWithKey(newId, newSource, newTarget, rebuilt);
|
|
913
|
+
}
|
|
914
|
+
function pickLater(a, b) {
|
|
915
|
+
if (!a) return b;
|
|
916
|
+
if (!b) return a;
|
|
917
|
+
return new Date(a).getTime() >= new Date(b).getTime() ? a : b;
|
|
918
|
+
}
|
|
919
|
+
|
|
920
|
+
// src/extract/services.ts
|
|
921
|
+
var import_node_fs6 = require("fs");
|
|
922
|
+
var import_node_path6 = __toESM(require("path"), 1);
|
|
923
|
+
var import_ignore = __toESM(require("ignore"), 1);
|
|
924
|
+
var import_minimatch = require("minimatch");
|
|
925
|
+
var import_types5 = require("@neat.is/types");
|
|
926
|
+
|
|
927
|
+
// src/extract/shared.ts
|
|
928
|
+
var import_node_fs4 = require("fs");
|
|
929
|
+
var import_node_path4 = __toESM(require("path"), 1);
|
|
930
|
+
var import_yaml = require("yaml");
|
|
931
|
+
var import_types4 = require("@neat.is/types");
|
|
932
|
+
var SERVICE_FILE_EXTENSIONS = /* @__PURE__ */ new Set([".js", ".mjs", ".cjs", ".ts", ".tsx", ".py"]);
|
|
933
|
+
var CONFIG_FILE_EXTENSIONS = /* @__PURE__ */ new Set([".yaml", ".yml"]);
|
|
934
|
+
var IGNORED_DIRS = /* @__PURE__ */ new Set([
|
|
935
|
+
"node_modules",
|
|
936
|
+
".git",
|
|
937
|
+
".turbo",
|
|
938
|
+
"dist",
|
|
939
|
+
"build",
|
|
940
|
+
".next"
|
|
941
|
+
]);
|
|
942
|
+
function isConfigFile(name) {
|
|
943
|
+
const ext = import_node_path4.default.extname(name);
|
|
944
|
+
if (CONFIG_FILE_EXTENSIONS.has(ext)) return { match: true, fileType: ext.slice(1) };
|
|
945
|
+
if (name === ".env" || name.startsWith(".env.")) return { match: true, fileType: "env" };
|
|
946
|
+
return { match: false, fileType: "" };
|
|
947
|
+
}
|
|
948
|
+
function cleanVersion(raw) {
|
|
949
|
+
if (!raw) return void 0;
|
|
950
|
+
return raw.replace(/^[\^~><=v\s]+/, "").trim() || void 0;
|
|
951
|
+
}
|
|
952
|
+
async function readJson(filePath) {
|
|
953
|
+
const raw = await import_node_fs4.promises.readFile(filePath, "utf8");
|
|
954
|
+
return JSON.parse(raw);
|
|
955
|
+
}
|
|
956
|
+
async function readYaml(filePath) {
|
|
957
|
+
const raw = await import_node_fs4.promises.readFile(filePath, "utf8");
|
|
958
|
+
return (0, import_yaml.parse)(raw);
|
|
959
|
+
}
|
|
960
|
+
async function exists(p) {
|
|
961
|
+
try {
|
|
962
|
+
await import_node_fs4.promises.access(p);
|
|
963
|
+
return true;
|
|
964
|
+
} catch {
|
|
965
|
+
return false;
|
|
966
|
+
}
|
|
967
|
+
}
|
|
968
|
+
|
|
969
|
+
// src/extract/python.ts
|
|
970
|
+
var import_node_fs5 = require("fs");
|
|
971
|
+
var import_node_path5 = __toESM(require("path"), 1);
|
|
972
|
+
var import_smol_toml = require("smol-toml");
|
|
973
|
+
var REQUIREMENT_LINE = /^\s*([A-Za-z0-9_.-]+)(?:\[[^\]]*\])?\s*(?:(==)\s*([A-Za-z0-9_.+-]+))?/;
|
|
974
|
+
function parseRequirementsTxt(content) {
|
|
975
|
+
const out = {};
|
|
976
|
+
for (const rawLine of content.split("\n")) {
|
|
977
|
+
const line = rawLine.split("#")[0]?.trim();
|
|
978
|
+
if (!line) continue;
|
|
979
|
+
if (line.startsWith("-")) continue;
|
|
980
|
+
const match = REQUIREMENT_LINE.exec(line);
|
|
981
|
+
if (!match) continue;
|
|
982
|
+
const name = match[1].toLowerCase();
|
|
983
|
+
const version = match[3] ?? "";
|
|
984
|
+
out[name] = version;
|
|
985
|
+
}
|
|
986
|
+
return out;
|
|
987
|
+
}
|
|
988
|
+
function depsFromPyProject(pyproject) {
|
|
989
|
+
const out = {};
|
|
990
|
+
for (const entry2 of pyproject.project?.dependencies ?? []) {
|
|
991
|
+
const match = REQUIREMENT_LINE.exec(entry2);
|
|
992
|
+
if (!match) continue;
|
|
993
|
+
out[match[1].toLowerCase()] = match[3] ?? "";
|
|
994
|
+
}
|
|
995
|
+
const poetryDeps = pyproject.tool?.poetry?.dependencies ?? {};
|
|
996
|
+
for (const [name, value] of Object.entries(poetryDeps)) {
|
|
997
|
+
if (name.toLowerCase() === "python") continue;
|
|
998
|
+
const raw = typeof value === "string" ? value : value?.version ?? "";
|
|
999
|
+
out[name.toLowerCase()] = raw.replace(/^[\^~><=v\s]+/, "");
|
|
1000
|
+
}
|
|
1001
|
+
return out;
|
|
1002
|
+
}
|
|
1003
|
+
async function discoverPythonService(serviceDir) {
|
|
1004
|
+
const pyprojectPath = import_node_path5.default.join(serviceDir, "pyproject.toml");
|
|
1005
|
+
const requirementsPath = import_node_path5.default.join(serviceDir, "requirements.txt");
|
|
1006
|
+
const setupPath = import_node_path5.default.join(serviceDir, "setup.py");
|
|
1007
|
+
const hasPyproject = await exists(pyprojectPath);
|
|
1008
|
+
const hasRequirements = await exists(requirementsPath);
|
|
1009
|
+
const hasSetup = await exists(setupPath);
|
|
1010
|
+
if (!hasPyproject && !hasRequirements && !hasSetup) return null;
|
|
1011
|
+
let name = import_node_path5.default.basename(serviceDir);
|
|
1012
|
+
let version;
|
|
1013
|
+
const dependencies = {};
|
|
1014
|
+
if (hasPyproject) {
|
|
1015
|
+
const raw = await import_node_fs5.promises.readFile(pyprojectPath, "utf8");
|
|
1016
|
+
const pyproject = (0, import_smol_toml.parse)(raw);
|
|
1017
|
+
name = pyproject.project?.name ?? pyproject.tool?.poetry?.name ?? name;
|
|
1018
|
+
version = pyproject.project?.version ?? pyproject.tool?.poetry?.version ?? void 0;
|
|
1019
|
+
Object.assign(dependencies, depsFromPyProject(pyproject));
|
|
1020
|
+
}
|
|
1021
|
+
if (hasRequirements) {
|
|
1022
|
+
const raw = await import_node_fs5.promises.readFile(requirementsPath, "utf8");
|
|
1023
|
+
Object.assign(dependencies, parseRequirementsTxt(raw));
|
|
1024
|
+
}
|
|
1025
|
+
return { name, version, dependencies };
|
|
1026
|
+
}
|
|
1027
|
+
function pythonToPackage(service) {
|
|
1028
|
+
return {
|
|
1029
|
+
name: service.name,
|
|
1030
|
+
version: service.version,
|
|
1031
|
+
dependencies: service.dependencies
|
|
1032
|
+
};
|
|
1033
|
+
}
|
|
1034
|
+
|
|
1035
|
+
// src/extract/services.ts
|
|
1036
|
+
var DEFAULT_SCAN_DEPTH = 5;
|
|
1037
|
+
function parseScanDepth() {
|
|
1038
|
+
const raw = process.env.NEAT_SCAN_DEPTH;
|
|
1039
|
+
if (!raw) return DEFAULT_SCAN_DEPTH;
|
|
1040
|
+
const n = Number.parseInt(raw, 10);
|
|
1041
|
+
return Number.isFinite(n) && n >= 0 ? n : DEFAULT_SCAN_DEPTH;
|
|
1042
|
+
}
|
|
1043
|
+
function workspaceGlobs(pkg) {
|
|
1044
|
+
const ws = pkg.workspaces;
|
|
1045
|
+
if (!ws) return null;
|
|
1046
|
+
if (Array.isArray(ws)) return ws.length > 0 ? ws : null;
|
|
1047
|
+
if (Array.isArray(ws.packages)) return ws.packages.length > 0 ? ws.packages : null;
|
|
1048
|
+
return null;
|
|
1049
|
+
}
|
|
1050
|
+
async function loadGitignore(scanPath) {
|
|
1051
|
+
const gitignorePath = import_node_path6.default.join(scanPath, ".gitignore");
|
|
1052
|
+
if (!await exists(gitignorePath)) return null;
|
|
1053
|
+
const raw = await import_node_fs6.promises.readFile(gitignorePath, "utf8");
|
|
1054
|
+
return (0, import_ignore.default)().add(raw);
|
|
1055
|
+
}
|
|
1056
|
+
async function walkDirs(start, scanPath, options, visit) {
|
|
1057
|
+
async function recurse(current, depth) {
|
|
1058
|
+
if (depth > options.maxDepth) return;
|
|
1059
|
+
const entries = await import_node_fs6.promises.readdir(current, { withFileTypes: true }).catch(() => []);
|
|
1060
|
+
for (const entry2 of entries) {
|
|
1061
|
+
if (!entry2.isDirectory()) continue;
|
|
1062
|
+
if (IGNORED_DIRS.has(entry2.name)) continue;
|
|
1063
|
+
const child = import_node_path6.default.join(current, entry2.name);
|
|
1064
|
+
if (options.ig) {
|
|
1065
|
+
const rel = import_node_path6.default.relative(scanPath, child).split(import_node_path6.default.sep).join("/");
|
|
1066
|
+
if (rel && options.ig.ignores(rel + "/")) continue;
|
|
1067
|
+
}
|
|
1068
|
+
await visit(child);
|
|
1069
|
+
await recurse(child, depth + 1);
|
|
1070
|
+
}
|
|
1071
|
+
}
|
|
1072
|
+
await recurse(start, 0);
|
|
1073
|
+
}
|
|
1074
|
+
async function expandWorkspaceGlobs(scanPath, globs) {
|
|
1075
|
+
const found = /* @__PURE__ */ new Set();
|
|
1076
|
+
const scanDepth = parseScanDepth();
|
|
1077
|
+
for (const raw of globs) {
|
|
1078
|
+
const pattern = raw.replace(/^\.\//, "");
|
|
1079
|
+
if (!pattern.includes("*")) {
|
|
1080
|
+
const candidate = import_node_path6.default.join(scanPath, pattern);
|
|
1081
|
+
if (await exists(import_node_path6.default.join(candidate, "package.json"))) found.add(candidate);
|
|
1082
|
+
continue;
|
|
1083
|
+
}
|
|
1084
|
+
const segments = pattern.split("/");
|
|
1085
|
+
const staticSegments = [];
|
|
1086
|
+
for (const seg of segments) {
|
|
1087
|
+
if (seg.includes("*")) break;
|
|
1088
|
+
staticSegments.push(seg);
|
|
1089
|
+
}
|
|
1090
|
+
const start = import_node_path6.default.join(scanPath, ...staticSegments);
|
|
1091
|
+
if (!await exists(start)) continue;
|
|
1092
|
+
const hasDoubleStar = pattern.includes("**");
|
|
1093
|
+
const walkDepth = hasDoubleStar ? scanDepth : Math.max(0, segments.length - staticSegments.length - 1);
|
|
1094
|
+
await walkDirs(start, scanPath, { maxDepth: walkDepth, ig: null }, async (dir) => {
|
|
1095
|
+
const rel = import_node_path6.default.relative(scanPath, dir).split(import_node_path6.default.sep).join("/");
|
|
1096
|
+
if ((0, import_minimatch.minimatch)(rel, pattern) && await exists(import_node_path6.default.join(dir, "package.json"))) {
|
|
1097
|
+
found.add(dir);
|
|
1098
|
+
}
|
|
1099
|
+
});
|
|
1100
|
+
}
|
|
1101
|
+
return [...found];
|
|
1102
|
+
}
|
|
1103
|
+
async function discoverNodeService(scanPath, dir) {
|
|
1104
|
+
const pkgPath = import_node_path6.default.join(dir, "package.json");
|
|
1105
|
+
if (!await exists(pkgPath)) return null;
|
|
1106
|
+
const pkg = await readJson(pkgPath);
|
|
1107
|
+
if (!pkg.name) return null;
|
|
1108
|
+
const node = {
|
|
1109
|
+
id: (0, import_types5.serviceId)(pkg.name),
|
|
1110
|
+
type: import_types5.NodeType.ServiceNode,
|
|
1111
|
+
name: pkg.name,
|
|
1112
|
+
language: "javascript",
|
|
1113
|
+
version: pkg.version,
|
|
1114
|
+
dependencies: pkg.dependencies ?? {},
|
|
1115
|
+
repoPath: import_node_path6.default.relative(scanPath, dir),
|
|
1116
|
+
...pkg.engines?.node ? { nodeEngine: pkg.engines.node } : {}
|
|
1117
|
+
};
|
|
1118
|
+
return { pkg, dir, node };
|
|
1119
|
+
}
|
|
1120
|
+
async function discoverPyService(scanPath, dir) {
|
|
1121
|
+
const py = await discoverPythonService(dir);
|
|
1122
|
+
if (!py) return null;
|
|
1123
|
+
const pkg = pythonToPackage(py);
|
|
1124
|
+
const node = {
|
|
1125
|
+
id: (0, import_types5.serviceId)(py.name),
|
|
1126
|
+
type: import_types5.NodeType.ServiceNode,
|
|
1127
|
+
name: py.name,
|
|
1128
|
+
language: "python",
|
|
1129
|
+
version: py.version,
|
|
1130
|
+
dependencies: py.dependencies,
|
|
1131
|
+
repoPath: import_node_path6.default.relative(scanPath, dir)
|
|
1132
|
+
};
|
|
1133
|
+
return { pkg, dir, node };
|
|
1134
|
+
}
|
|
1135
|
+
async function discoverServices(scanPath) {
|
|
1136
|
+
const rootPkgPath = import_node_path6.default.join(scanPath, "package.json");
|
|
1137
|
+
const rootPkg = await exists(rootPkgPath) ? await readJson(rootPkgPath) : null;
|
|
1138
|
+
const wsGlobs = rootPkg ? workspaceGlobs(rootPkg) : null;
|
|
1139
|
+
const candidateDirs = [];
|
|
1140
|
+
if (wsGlobs) {
|
|
1141
|
+
candidateDirs.push(...await expandWorkspaceGlobs(scanPath, wsGlobs));
|
|
1142
|
+
} else {
|
|
1143
|
+
if (rootPkg && rootPkg.name) candidateDirs.push(scanPath);
|
|
1144
|
+
const ig = await loadGitignore(scanPath);
|
|
1145
|
+
await walkDirs(
|
|
1146
|
+
scanPath,
|
|
1147
|
+
scanPath,
|
|
1148
|
+
{ maxDepth: parseScanDepth(), ig },
|
|
1149
|
+
async (dir) => {
|
|
1150
|
+
if (await exists(import_node_path6.default.join(dir, "package.json"))) {
|
|
1151
|
+
candidateDirs.push(dir);
|
|
1152
|
+
} else if (await exists(import_node_path6.default.join(dir, "pyproject.toml")) || await exists(import_node_path6.default.join(dir, "requirements.txt")) || await exists(import_node_path6.default.join(dir, "setup.py"))) {
|
|
1153
|
+
candidateDirs.push(dir);
|
|
1154
|
+
}
|
|
1155
|
+
}
|
|
1156
|
+
);
|
|
1157
|
+
}
|
|
1158
|
+
candidateDirs.sort();
|
|
1159
|
+
const seen = /* @__PURE__ */ new Map();
|
|
1160
|
+
const out = [];
|
|
1161
|
+
for (const dir of candidateDirs) {
|
|
1162
|
+
const service = await discoverNodeService(scanPath, dir) ?? await discoverPyService(scanPath, dir);
|
|
1163
|
+
if (!service) continue;
|
|
1164
|
+
const existingDir = seen.get(service.node.name);
|
|
1165
|
+
if (existingDir !== void 0) {
|
|
1166
|
+
const a = import_node_path6.default.relative(scanPath, existingDir) || ".";
|
|
1167
|
+
const b = import_node_path6.default.relative(scanPath, dir) || ".";
|
|
1168
|
+
console.warn(
|
|
1169
|
+
`[neat] duplicate package name "${service.node.name}" \u2014 keeping ${a}, ignoring ${b}`
|
|
1170
|
+
);
|
|
1171
|
+
continue;
|
|
1172
|
+
}
|
|
1173
|
+
seen.set(service.node.name, dir);
|
|
1174
|
+
out.push(service);
|
|
1175
|
+
}
|
|
1176
|
+
return out;
|
|
1177
|
+
}
|
|
1178
|
+
function addServiceNodes(graph, services) {
|
|
1179
|
+
let nodesAdded = 0;
|
|
1180
|
+
for (const service of services) {
|
|
1181
|
+
if (!graph.hasNode(service.node.id)) {
|
|
1182
|
+
graph.addNode(service.node.id, { ...service.node, discoveredVia: "static" });
|
|
1183
|
+
nodesAdded++;
|
|
1184
|
+
continue;
|
|
1185
|
+
}
|
|
1186
|
+
const existing = graph.getNodeAttributes(service.node.id);
|
|
1187
|
+
const mergedDiscoveredVia = existing.discoveredVia === "otel" ? "merged" : "static";
|
|
1188
|
+
graph.replaceNodeAttributes(service.node.id, {
|
|
1189
|
+
...existing,
|
|
1190
|
+
...service.node,
|
|
1191
|
+
discoveredVia: mergedDiscoveredVia
|
|
1192
|
+
});
|
|
1193
|
+
}
|
|
1194
|
+
return nodesAdded;
|
|
1195
|
+
}
|
|
1196
|
+
|
|
1197
|
+
// src/extract/aliases.ts
|
|
1198
|
+
var import_node_path7 = __toESM(require("path"), 1);
|
|
1199
|
+
var import_node_fs7 = require("fs");
|
|
1200
|
+
var import_yaml2 = require("yaml");
|
|
1201
|
+
var import_types6 = require("@neat.is/types");
|
|
1202
|
+
var K8S_KINDS_WITH_HOSTNAMES = /* @__PURE__ */ new Set([
|
|
1203
|
+
"Service",
|
|
1204
|
+
"Deployment",
|
|
1205
|
+
"StatefulSet",
|
|
1206
|
+
"DaemonSet"
|
|
1207
|
+
]);
|
|
1208
|
+
function addAliases(graph, serviceId3, candidates) {
|
|
1209
|
+
if (!graph.hasNode(serviceId3)) return;
|
|
1210
|
+
const node = graph.getNodeAttributes(serviceId3);
|
|
1211
|
+
if (node.type !== import_types6.NodeType.ServiceNode) return;
|
|
1212
|
+
const set = new Set(node.aliases ?? []);
|
|
1213
|
+
for (const c of candidates) {
|
|
1214
|
+
if (!c) continue;
|
|
1215
|
+
if (c === node.name) continue;
|
|
1216
|
+
set.add(c);
|
|
1217
|
+
}
|
|
1218
|
+
if (set.size === 0) return;
|
|
1219
|
+
const updated = { ...node, aliases: [...set].sort() };
|
|
1220
|
+
graph.replaceNodeAttributes(serviceId3, updated);
|
|
1221
|
+
}
|
|
1222
|
+
function indexServicesByName(services) {
|
|
1223
|
+
const map = /* @__PURE__ */ new Map();
|
|
1224
|
+
for (const s of services) {
|
|
1225
|
+
map.set(s.node.name, s.node.id);
|
|
1226
|
+
map.set(import_node_path7.default.basename(s.dir), s.node.id);
|
|
1227
|
+
}
|
|
1228
|
+
return map;
|
|
1229
|
+
}
|
|
1230
|
+
async function collectComposeAliases(graph, scanPath, serviceIndex) {
|
|
1231
|
+
let composePath = null;
|
|
1232
|
+
for (const name of ["docker-compose.yml", "docker-compose.yaml"]) {
|
|
1233
|
+
const abs = import_node_path7.default.join(scanPath, name);
|
|
1234
|
+
if (await exists(abs)) {
|
|
1235
|
+
composePath = abs;
|
|
1236
|
+
break;
|
|
1237
|
+
}
|
|
1238
|
+
}
|
|
1239
|
+
if (!composePath) return;
|
|
1240
|
+
const compose = await readYaml(composePath);
|
|
1241
|
+
if (!compose?.services) return;
|
|
1242
|
+
for (const [composeName, svc] of Object.entries(compose.services)) {
|
|
1243
|
+
const serviceId3 = serviceIndex.get(composeName);
|
|
1244
|
+
if (!serviceId3) continue;
|
|
1245
|
+
const aliases = /* @__PURE__ */ new Set([composeName]);
|
|
1246
|
+
if (svc.container_name) aliases.add(svc.container_name);
|
|
1247
|
+
if (svc.hostname) aliases.add(svc.hostname);
|
|
1248
|
+
addAliases(graph, serviceId3, aliases);
|
|
1249
|
+
}
|
|
1250
|
+
}
|
|
1251
|
+
var LABEL_KEYS = /* @__PURE__ */ new Set([
|
|
1252
|
+
"service",
|
|
1253
|
+
"service.name",
|
|
1254
|
+
"app",
|
|
1255
|
+
"app.name",
|
|
1256
|
+
"com.docker.compose.service",
|
|
1257
|
+
"org.opencontainers.image.title"
|
|
1258
|
+
]);
|
|
1259
|
+
function parseDockerfileLabels(content) {
|
|
1260
|
+
const out = [];
|
|
1261
|
+
const lineRegex = /^\s*label\s+(.+)$/i;
|
|
1262
|
+
for (const raw of content.split("\n")) {
|
|
1263
|
+
const m = lineRegex.exec(raw);
|
|
1264
|
+
if (!m) continue;
|
|
1265
|
+
const rest = m[1];
|
|
1266
|
+
const pairRegex = /([\w.-]+)\s*=\s*("([^"]*)"|'([^']*)'|([^\s]+))/g;
|
|
1267
|
+
let pair;
|
|
1268
|
+
while ((pair = pairRegex.exec(rest)) !== null) {
|
|
1269
|
+
const key = pair[1].toLowerCase();
|
|
1270
|
+
if (!LABEL_KEYS.has(key)) continue;
|
|
1271
|
+
const value = pair[3] ?? pair[4] ?? pair[5] ?? "";
|
|
1272
|
+
if (value) out.push(value);
|
|
1273
|
+
}
|
|
1274
|
+
}
|
|
1275
|
+
return out;
|
|
1276
|
+
}
|
|
1277
|
+
async function collectDockerfileAliases(graph, services) {
|
|
1278
|
+
for (const service of services) {
|
|
1279
|
+
const dockerfilePath = import_node_path7.default.join(service.dir, "Dockerfile");
|
|
1280
|
+
if (!await exists(dockerfilePath)) continue;
|
|
1281
|
+
const content = await import_node_fs7.promises.readFile(dockerfilePath, "utf8");
|
|
1282
|
+
const aliases = parseDockerfileLabels(content);
|
|
1283
|
+
if (aliases.length > 0) addAliases(graph, service.node.id, aliases);
|
|
1284
|
+
}
|
|
1285
|
+
}
|
|
1286
|
+
async function walkYamlFiles(start, depth = 0, max = 5) {
|
|
1287
|
+
if (depth > max) return [];
|
|
1288
|
+
const out = [];
|
|
1289
|
+
const entries = await import_node_fs7.promises.readdir(start, { withFileTypes: true }).catch(() => []);
|
|
1290
|
+
for (const entry2 of entries) {
|
|
1291
|
+
if (entry2.isDirectory()) {
|
|
1292
|
+
if (IGNORED_DIRS.has(entry2.name)) continue;
|
|
1293
|
+
out.push(...await walkYamlFiles(import_node_path7.default.join(start, entry2.name), depth + 1, max));
|
|
1294
|
+
} else if (entry2.isFile() && CONFIG_FILE_EXTENSIONS.has(import_node_path7.default.extname(entry2.name))) {
|
|
1295
|
+
out.push(import_node_path7.default.join(start, entry2.name));
|
|
1296
|
+
}
|
|
1297
|
+
}
|
|
1298
|
+
return out;
|
|
1299
|
+
}
|
|
1300
|
+
function k8sHostnames(name, namespace) {
|
|
1301
|
+
const ns = namespace ?? "default";
|
|
1302
|
+
return [
|
|
1303
|
+
name,
|
|
1304
|
+
`${name}.${ns}`,
|
|
1305
|
+
`${name}.${ns}.svc`,
|
|
1306
|
+
`${name}.${ns}.svc.cluster.local`
|
|
1307
|
+
];
|
|
1308
|
+
}
|
|
1309
|
+
function k8sServiceTarget(doc, byName) {
|
|
1310
|
+
const selector = doc.spec?.selector;
|
|
1311
|
+
const selectorApp = selector?.app ?? selector?.matchLabels?.app;
|
|
1312
|
+
if (selectorApp && byName.has(selectorApp)) return byName.get(selectorApp);
|
|
1313
|
+
const labelApp = doc.metadata?.labels?.app;
|
|
1314
|
+
if (labelApp && byName.has(labelApp)) return byName.get(labelApp);
|
|
1315
|
+
const metaName = doc.metadata?.name;
|
|
1316
|
+
if (metaName && byName.has(metaName)) return byName.get(metaName);
|
|
1317
|
+
return null;
|
|
1318
|
+
}
|
|
1319
|
+
async function collectK8sAliases(graph, scanPath, serviceIndex) {
|
|
1320
|
+
const files = await walkYamlFiles(scanPath);
|
|
1321
|
+
for (const file of files) {
|
|
1322
|
+
const content = await import_node_fs7.promises.readFile(file, "utf8");
|
|
1323
|
+
let docs;
|
|
1324
|
+
try {
|
|
1325
|
+
docs = (0, import_yaml2.parseAllDocuments)(content).map((d) => d.toJSON());
|
|
1326
|
+
} catch {
|
|
1327
|
+
continue;
|
|
1328
|
+
}
|
|
1329
|
+
for (const doc of docs) {
|
|
1330
|
+
if (!doc?.kind || !doc.metadata?.name) continue;
|
|
1331
|
+
if (!K8S_KINDS_WITH_HOSTNAMES.has(doc.kind)) continue;
|
|
1332
|
+
const target = k8sServiceTarget(doc, serviceIndex);
|
|
1333
|
+
if (!target) continue;
|
|
1334
|
+
addAliases(graph, target, k8sHostnames(doc.metadata.name, doc.metadata.namespace));
|
|
1335
|
+
}
|
|
1336
|
+
}
|
|
1337
|
+
}
|
|
1338
|
+
async function addServiceAliases(graph, scanPath, services) {
|
|
1339
|
+
const byName = indexServicesByName(services);
|
|
1340
|
+
await collectComposeAliases(graph, scanPath, byName);
|
|
1341
|
+
await collectDockerfileAliases(graph, services);
|
|
1342
|
+
await collectK8sAliases(graph, scanPath, byName);
|
|
1343
|
+
}
|
|
1344
|
+
|
|
1345
|
+
// src/extract/databases/index.ts
|
|
1346
|
+
var import_node_path15 = __toESM(require("path"), 1);
|
|
1347
|
+
var import_types7 = require("@neat.is/types");
|
|
1348
|
+
|
|
1349
|
+
// src/extract/databases/db-config-yaml.ts
|
|
1350
|
+
var import_node_path8 = __toESM(require("path"), 1);
|
|
1351
|
+
async function parse(serviceDir) {
|
|
1352
|
+
const yamlPath = import_node_path8.default.join(serviceDir, "db-config.yaml");
|
|
1353
|
+
if (!await exists(yamlPath)) return [];
|
|
1354
|
+
const raw = await readYaml(yamlPath);
|
|
1355
|
+
return [
|
|
1356
|
+
{
|
|
1357
|
+
host: raw.host,
|
|
1358
|
+
port: raw.port,
|
|
1359
|
+
database: raw.database,
|
|
1360
|
+
engine: raw.engine,
|
|
1361
|
+
engineVersion: raw.engineVersion !== void 0 ? String(raw.engineVersion) : "unknown",
|
|
1362
|
+
sourceFile: yamlPath
|
|
1363
|
+
}
|
|
1364
|
+
];
|
|
1365
|
+
}
|
|
1366
|
+
var dbConfigYamlParser = { name: "db-config.yaml", parse };
|
|
1367
|
+
|
|
1368
|
+
// src/extract/databases/dotenv.ts
|
|
1369
|
+
var import_node_fs9 = require("fs");
|
|
1370
|
+
var import_node_path10 = __toESM(require("path"), 1);
|
|
1371
|
+
|
|
1372
|
+
// src/extract/databases/shared.ts
|
|
1373
|
+
var import_node_fs8 = require("fs");
|
|
1374
|
+
var import_node_path9 = __toESM(require("path"), 1);
|
|
1375
|
+
function schemeToEngine(scheme) {
|
|
1376
|
+
const s = scheme.toLowerCase().split("+")[0];
|
|
1377
|
+
switch (s) {
|
|
1378
|
+
case "postgres":
|
|
1379
|
+
case "postgresql":
|
|
1380
|
+
return "postgresql";
|
|
1381
|
+
case "mysql":
|
|
1382
|
+
case "mariadb":
|
|
1383
|
+
return "mysql";
|
|
1384
|
+
case "mongodb":
|
|
1385
|
+
case "mongodb+srv":
|
|
1386
|
+
return "mongodb";
|
|
1387
|
+
case "redis":
|
|
1388
|
+
case "rediss":
|
|
1389
|
+
return "redis";
|
|
1390
|
+
case "sqlite":
|
|
1391
|
+
return "sqlite";
|
|
1392
|
+
default:
|
|
1393
|
+
return null;
|
|
1394
|
+
}
|
|
1395
|
+
}
|
|
1396
|
+
function parseConnectionString(url) {
|
|
1397
|
+
const m = url.match(
|
|
1398
|
+
/^(?<scheme>[a-z][a-z+]*):\/\/(?:[^@/]+(?::[^@]*)?@)?(?<host>[^:/?]+)(?::(?<port>\d+))?(?:\/(?<db>[^?#]*))?/i
|
|
1399
|
+
);
|
|
1400
|
+
if (!m || !m.groups) return null;
|
|
1401
|
+
const engine = schemeToEngine(m.groups.scheme);
|
|
1402
|
+
if (!engine) return null;
|
|
1403
|
+
return {
|
|
1404
|
+
host: m.groups.host,
|
|
1405
|
+
port: m.groups.port ? Number(m.groups.port) : void 0,
|
|
1406
|
+
database: m.groups.db ?? "",
|
|
1407
|
+
engine,
|
|
1408
|
+
engineVersion: "unknown"
|
|
1409
|
+
};
|
|
1410
|
+
}
|
|
1411
|
+
async function readIfExists(filePath) {
|
|
1412
|
+
try {
|
|
1413
|
+
return await import_node_fs8.promises.readFile(filePath, "utf8");
|
|
1414
|
+
} catch {
|
|
1415
|
+
return null;
|
|
1416
|
+
}
|
|
1417
|
+
}
|
|
1418
|
+
async function findFirst(serviceDir, candidates) {
|
|
1419
|
+
for (const rel of candidates) {
|
|
1420
|
+
const abs = import_node_path9.default.join(serviceDir, rel);
|
|
1421
|
+
const content = await readIfExists(abs);
|
|
1422
|
+
if (content !== null) return abs;
|
|
1423
|
+
}
|
|
1424
|
+
return null;
|
|
1425
|
+
}
|
|
1426
|
+
function engineFromImage(image) {
|
|
1427
|
+
const lower = image.toLowerCase();
|
|
1428
|
+
const colon = lower.lastIndexOf(":");
|
|
1429
|
+
const repo = colon >= 0 ? lower.slice(0, colon) : lower;
|
|
1430
|
+
const tag = colon >= 0 ? lower.slice(colon + 1) : "latest";
|
|
1431
|
+
const last = repo.split("/").pop() ?? repo;
|
|
1432
|
+
let engine = null;
|
|
1433
|
+
if (last.startsWith("postgres")) engine = "postgresql";
|
|
1434
|
+
else if (last.startsWith("mysql") || last.startsWith("mariadb")) engine = "mysql";
|
|
1435
|
+
else if (last.startsWith("mongo")) engine = "mongodb";
|
|
1436
|
+
else if (last.startsWith("redis")) engine = "redis";
|
|
1437
|
+
else if (last.startsWith("sqlite")) engine = "sqlite";
|
|
1438
|
+
if (!engine) return null;
|
|
1439
|
+
const versionMatch = tag.match(/^(\d+(?:\.\d+){0,2})/);
|
|
1440
|
+
return {
|
|
1441
|
+
engine,
|
|
1442
|
+
engineVersion: versionMatch ? versionMatch[1] : "unknown"
|
|
1443
|
+
};
|
|
1444
|
+
}
|
|
1445
|
+
|
|
1446
|
+
// src/extract/databases/dotenv.ts
|
|
1447
|
+
var CONNECTION_KEYS = /* @__PURE__ */ new Set([
|
|
1448
|
+
"DATABASE_URL",
|
|
1449
|
+
"DB_URL",
|
|
1450
|
+
"POSTGRES_URL",
|
|
1451
|
+
"POSTGRESQL_URL",
|
|
1452
|
+
"MYSQL_URL",
|
|
1453
|
+
"MONGODB_URI",
|
|
1454
|
+
"MONGO_URL",
|
|
1455
|
+
"MONGO_URI",
|
|
1456
|
+
"REDIS_URL"
|
|
1457
|
+
]);
|
|
1458
|
+
function parseDotenvLine(line) {
|
|
1459
|
+
const trimmed = line.trim();
|
|
1460
|
+
if (!trimmed || trimmed.startsWith("#")) return null;
|
|
1461
|
+
const eq = trimmed.indexOf("=");
|
|
1462
|
+
if (eq < 0) return null;
|
|
1463
|
+
const key = trimmed.slice(0, eq).trim();
|
|
1464
|
+
let value = trimmed.slice(eq + 1).trim();
|
|
1465
|
+
if (value.startsWith('"') && value.endsWith('"') || value.startsWith("'") && value.endsWith("'")) {
|
|
1466
|
+
value = value.slice(1, -1);
|
|
1467
|
+
}
|
|
1468
|
+
return { key, value };
|
|
1469
|
+
}
|
|
1470
|
+
async function parse2(serviceDir) {
|
|
1471
|
+
const entries = await import_node_fs9.promises.readdir(serviceDir, { withFileTypes: true }).catch(() => []);
|
|
1472
|
+
const configs = [];
|
|
1473
|
+
const seen = /* @__PURE__ */ new Set();
|
|
1474
|
+
for (const entry2 of entries) {
|
|
1475
|
+
if (!entry2.isFile()) continue;
|
|
1476
|
+
const match = isConfigFile(entry2.name);
|
|
1477
|
+
if (!match.match || match.fileType !== "env") continue;
|
|
1478
|
+
const filePath = import_node_path10.default.join(serviceDir, entry2.name);
|
|
1479
|
+
const content = await import_node_fs9.promises.readFile(filePath, "utf8");
|
|
1480
|
+
for (const line of content.split("\n")) {
|
|
1481
|
+
const parsed = parseDotenvLine(line);
|
|
1482
|
+
if (!parsed) continue;
|
|
1483
|
+
if (!CONNECTION_KEYS.has(parsed.key.toUpperCase())) continue;
|
|
1484
|
+
const config = parseConnectionString(parsed.value);
|
|
1485
|
+
if (!config) continue;
|
|
1486
|
+
const key = `${config.engine}://${config.host}:${config.port ?? ""}/${config.database}`;
|
|
1487
|
+
if (seen.has(key)) continue;
|
|
1488
|
+
seen.add(key);
|
|
1489
|
+
configs.push({ ...config, sourceFile: filePath });
|
|
1490
|
+
}
|
|
1491
|
+
}
|
|
1492
|
+
return configs;
|
|
1493
|
+
}
|
|
1494
|
+
var dotenvParser = { name: ".env", parse: parse2 };
|
|
1495
|
+
|
|
1496
|
+
// src/extract/databases/prisma.ts
|
|
1497
|
+
var import_node_path11 = __toESM(require("path"), 1);
|
|
1498
|
+
async function parse3(serviceDir) {
|
|
1499
|
+
const schemaPath = import_node_path11.default.join(serviceDir, "prisma", "schema.prisma");
|
|
1500
|
+
const content = await readIfExists(schemaPath);
|
|
1501
|
+
if (!content) return [];
|
|
1502
|
+
const block = content.match(/datasource\s+\w+\s*\{([^}]*)\}/s);
|
|
1503
|
+
if (!block) return [];
|
|
1504
|
+
const body = block[1] ?? "";
|
|
1505
|
+
const providerMatch = body.match(/provider\s*=\s*"([^"]+)"/);
|
|
1506
|
+
if (!providerMatch) return [];
|
|
1507
|
+
const engine = schemeToEngine(providerMatch[1]);
|
|
1508
|
+
if (!engine) return [];
|
|
1509
|
+
const urlMatch = body.match(/url\s*=\s*"([^"]+)"/);
|
|
1510
|
+
if (urlMatch) {
|
|
1511
|
+
const config = parseConnectionString(urlMatch[1]);
|
|
1512
|
+
if (config) return [{ ...config, sourceFile: schemaPath }];
|
|
1513
|
+
}
|
|
1514
|
+
return [
|
|
1515
|
+
{
|
|
1516
|
+
host: `${engine}-prisma`,
|
|
1517
|
+
database: "",
|
|
1518
|
+
engine,
|
|
1519
|
+
engineVersion: "unknown",
|
|
1520
|
+
sourceFile: schemaPath
|
|
1521
|
+
}
|
|
1522
|
+
];
|
|
1523
|
+
}
|
|
1524
|
+
var prismaParser = { name: "prisma", parse: parse3 };
|
|
1525
|
+
|
|
1526
|
+
// src/extract/databases/drizzle.ts
|
|
1527
|
+
var DIALECT_TO_ENGINE = {
|
|
1528
|
+
postgresql: "postgresql",
|
|
1529
|
+
postgres: "postgresql",
|
|
1530
|
+
pg: "postgresql",
|
|
1531
|
+
mysql: "mysql",
|
|
1532
|
+
mysql2: "mysql",
|
|
1533
|
+
sqlite: "sqlite",
|
|
1534
|
+
"better-sqlite": "sqlite"
|
|
1535
|
+
};
|
|
1536
|
+
async function parse4(serviceDir) {
|
|
1537
|
+
const filePath = await findFirst(serviceDir, [
|
|
1538
|
+
"drizzle.config.ts",
|
|
1539
|
+
"drizzle.config.js",
|
|
1540
|
+
"drizzle.config.mjs"
|
|
1541
|
+
]);
|
|
1542
|
+
if (!filePath) return [];
|
|
1543
|
+
const content = await readIfExists(filePath);
|
|
1544
|
+
if (!content) return [];
|
|
1545
|
+
const dialectMatch = content.match(/dialect\s*:\s*['"`]([^'"`]+)['"`]/);
|
|
1546
|
+
if (!dialectMatch) return [];
|
|
1547
|
+
const engine = DIALECT_TO_ENGINE[dialectMatch[1].toLowerCase()] ?? schemeToEngine(dialectMatch[1]);
|
|
1548
|
+
if (!engine) return [];
|
|
1549
|
+
const urlMatch = content.match(
|
|
1550
|
+
/(?:url|connectionString)\s*:\s*['"`]([a-z][a-z+]*:\/\/[^'"`]+)['"`]/i
|
|
1551
|
+
);
|
|
1552
|
+
if (urlMatch) {
|
|
1553
|
+
const config = parseConnectionString(urlMatch[1]);
|
|
1554
|
+
if (config) return [{ ...config, sourceFile: filePath }];
|
|
1555
|
+
}
|
|
1556
|
+
const hostMatch = content.match(/host\s*:\s*['"`]([^'"`]+)['"`]/);
|
|
1557
|
+
if (hostMatch) {
|
|
1558
|
+
const portMatch = content.match(/port\s*:\s*(\d+)/);
|
|
1559
|
+
const dbMatch = content.match(/database\s*:\s*['"`]([^'"`]+)['"`]/);
|
|
1560
|
+
return [
|
|
1561
|
+
{
|
|
1562
|
+
host: hostMatch[1],
|
|
1563
|
+
port: portMatch ? Number(portMatch[1]) : void 0,
|
|
1564
|
+
database: dbMatch?.[1] ?? "",
|
|
1565
|
+
engine,
|
|
1566
|
+
engineVersion: "unknown",
|
|
1567
|
+
sourceFile: filePath
|
|
1568
|
+
}
|
|
1569
|
+
];
|
|
1570
|
+
}
|
|
1571
|
+
return [
|
|
1572
|
+
{ host: `${engine}-drizzle`, database: "", engine, engineVersion: "unknown", sourceFile: filePath }
|
|
1573
|
+
];
|
|
1574
|
+
}
|
|
1575
|
+
var drizzleParser = { name: "drizzle", parse: parse4 };
|
|
1576
|
+
|
|
1577
|
+
// src/extract/databases/knex.ts
|
|
1578
|
+
var CLIENT_TO_ENGINE = {
|
|
1579
|
+
pg: "postgresql",
|
|
1580
|
+
postgres: "postgresql",
|
|
1581
|
+
postgresql: "postgresql",
|
|
1582
|
+
mysql: "mysql",
|
|
1583
|
+
mysql2: "mysql",
|
|
1584
|
+
sqlite3: "sqlite",
|
|
1585
|
+
"better-sqlite3": "sqlite"
|
|
1586
|
+
};
|
|
1587
|
+
async function parse5(serviceDir) {
|
|
1588
|
+
const filePath = await findFirst(serviceDir, [
|
|
1589
|
+
"knexfile.js",
|
|
1590
|
+
"knexfile.ts",
|
|
1591
|
+
"knexfile.cjs",
|
|
1592
|
+
"knexfile.mjs"
|
|
1593
|
+
]);
|
|
1594
|
+
if (!filePath) return [];
|
|
1595
|
+
const content = await readIfExists(filePath);
|
|
1596
|
+
if (!content) return [];
|
|
1597
|
+
const clientMatch = content.match(/client\s*:\s*['"`]([^'"`]+)['"`]/);
|
|
1598
|
+
if (!clientMatch) return [];
|
|
1599
|
+
const engine = CLIENT_TO_ENGINE[clientMatch[1].toLowerCase()];
|
|
1600
|
+
if (!engine) return [];
|
|
1601
|
+
const urlMatch = content.match(
|
|
1602
|
+
/connection\s*:\s*['"`]([a-z][a-z+]*:\/\/[^'"`]+)['"`]/i
|
|
1603
|
+
);
|
|
1604
|
+
if (urlMatch) {
|
|
1605
|
+
const config = parseConnectionString(urlMatch[1]);
|
|
1606
|
+
if (config) return [{ ...config, sourceFile: filePath }];
|
|
1607
|
+
}
|
|
1608
|
+
const host = content.match(/host\s*:\s*['"`]([^'"`]+)['"`]/)?.[1];
|
|
1609
|
+
if (host) {
|
|
1610
|
+
const port = content.match(/port\s*:\s*(\d+)/)?.[1];
|
|
1611
|
+
const database = content.match(/database\s*:\s*['"`]([^'"`]+)['"`]/)?.[1] ?? "";
|
|
1612
|
+
return [
|
|
1613
|
+
{
|
|
1614
|
+
host,
|
|
1615
|
+
port: port ? Number(port) : void 0,
|
|
1616
|
+
database,
|
|
1617
|
+
engine,
|
|
1618
|
+
engineVersion: "unknown",
|
|
1619
|
+
sourceFile: filePath
|
|
1620
|
+
}
|
|
1621
|
+
];
|
|
1622
|
+
}
|
|
1623
|
+
return [{ host: `${engine}-knex`, database: "", engine, engineVersion: "unknown", sourceFile: filePath }];
|
|
1624
|
+
}
|
|
1625
|
+
var knexParser = { name: "knex", parse: parse5 };
|
|
1626
|
+
|
|
1627
|
+
// src/extract/databases/ormconfig.ts
|
|
1628
|
+
var import_node_path12 = __toESM(require("path"), 1);
|
|
1629
|
+
async function parse6(serviceDir) {
|
|
1630
|
+
for (const candidate of ["ormconfig.json", "ormconfig.yaml", "ormconfig.yml"]) {
|
|
1631
|
+
const abs = import_node_path12.default.join(serviceDir, candidate);
|
|
1632
|
+
if (!await exists(abs)) continue;
|
|
1633
|
+
const raw = candidate.endsWith(".json") ? await readJson(abs) : await readYaml(abs);
|
|
1634
|
+
const entries = Array.isArray(raw) ? raw : [raw];
|
|
1635
|
+
const out = [];
|
|
1636
|
+
for (const entry2 of entries) {
|
|
1637
|
+
if (!entry2?.type || !entry2.host) continue;
|
|
1638
|
+
const engine = schemeToEngine(entry2.type);
|
|
1639
|
+
if (!engine) continue;
|
|
1640
|
+
out.push({
|
|
1641
|
+
host: entry2.host,
|
|
1642
|
+
port: entry2.port,
|
|
1643
|
+
database: entry2.database ?? "",
|
|
1644
|
+
engine,
|
|
1645
|
+
engineVersion: "unknown",
|
|
1646
|
+
sourceFile: abs
|
|
1647
|
+
});
|
|
1648
|
+
}
|
|
1649
|
+
if (out.length > 0) return out;
|
|
1650
|
+
}
|
|
1651
|
+
return [];
|
|
1652
|
+
}
|
|
1653
|
+
var ormconfigParser = { name: "ormconfig", parse: parse6 };
|
|
1654
|
+
|
|
1655
|
+
// src/extract/databases/typeorm.ts
|
|
1656
|
+
async function parse7(serviceDir) {
|
|
1657
|
+
const filePath = await findFirst(serviceDir, [
|
|
1658
|
+
"data-source.ts",
|
|
1659
|
+
"data-source.js",
|
|
1660
|
+
"src/data-source.ts",
|
|
1661
|
+
"src/data-source.js"
|
|
1662
|
+
]);
|
|
1663
|
+
if (!filePath) return [];
|
|
1664
|
+
const content = await readIfExists(filePath);
|
|
1665
|
+
if (!content) return [];
|
|
1666
|
+
const block = content.match(/new\s+DataSource\s*\(\s*\{([\s\S]*?)\}\s*\)/);
|
|
1667
|
+
const body = block ? block[1] : content;
|
|
1668
|
+
const typeMatch = body.match(/type\s*:\s*['"`]([^'"`]+)['"`]/);
|
|
1669
|
+
const host = body.match(/host\s*:\s*['"`]([^'"`]+)['"`]/)?.[1];
|
|
1670
|
+
if (!typeMatch || !host) return [];
|
|
1671
|
+
const engine = schemeToEngine(typeMatch[1]);
|
|
1672
|
+
if (!engine) return [];
|
|
1673
|
+
const port = body.match(/port\s*:\s*(\d+)/)?.[1];
|
|
1674
|
+
const database = body.match(/database\s*:\s*['"`]([^'"`]+)['"`]/)?.[1] ?? "";
|
|
1675
|
+
return [
|
|
1676
|
+
{
|
|
1677
|
+
host,
|
|
1678
|
+
port: port ? Number(port) : void 0,
|
|
1679
|
+
database,
|
|
1680
|
+
engine,
|
|
1681
|
+
engineVersion: "unknown",
|
|
1682
|
+
sourceFile: filePath
|
|
1683
|
+
}
|
|
1684
|
+
];
|
|
1685
|
+
}
|
|
1686
|
+
var typeormParser = { name: "typeorm", parse: parse7 };
|
|
1687
|
+
|
|
1688
|
+
// src/extract/databases/sequelize.ts
|
|
1689
|
+
var import_node_path13 = __toESM(require("path"), 1);
|
|
1690
|
+
async function parse8(serviceDir) {
|
|
1691
|
+
const configPath = import_node_path13.default.join(serviceDir, "config", "config.json");
|
|
1692
|
+
if (!await exists(configPath)) return [];
|
|
1693
|
+
const raw = await readJson(configPath);
|
|
1694
|
+
const out = [];
|
|
1695
|
+
const seen = /* @__PURE__ */ new Set();
|
|
1696
|
+
for (const entry2 of Object.values(raw)) {
|
|
1697
|
+
if (!entry2?.dialect || !entry2.host) continue;
|
|
1698
|
+
const engine = schemeToEngine(entry2.dialect);
|
|
1699
|
+
if (!engine) continue;
|
|
1700
|
+
const key = `${engine}://${entry2.host}:${entry2.port ?? ""}/${entry2.database ?? ""}`;
|
|
1701
|
+
if (seen.has(key)) continue;
|
|
1702
|
+
seen.add(key);
|
|
1703
|
+
out.push({
|
|
1704
|
+
host: entry2.host,
|
|
1705
|
+
port: entry2.port,
|
|
1706
|
+
database: entry2.database ?? "",
|
|
1707
|
+
engine,
|
|
1708
|
+
engineVersion: "unknown",
|
|
1709
|
+
sourceFile: configPath
|
|
1710
|
+
});
|
|
1711
|
+
}
|
|
1712
|
+
return out;
|
|
1713
|
+
}
|
|
1714
|
+
var sequelizeParser = { name: "sequelize", parse: parse8 };
|
|
1715
|
+
|
|
1716
|
+
// src/extract/databases/docker-compose.ts
|
|
1717
|
+
var import_node_path14 = __toESM(require("path"), 1);
|
|
1718
|
+
function portFromService(svc) {
|
|
1719
|
+
for (const raw of svc.ports ?? []) {
|
|
1720
|
+
const str = String(raw);
|
|
1721
|
+
const last = str.split(":").pop();
|
|
1722
|
+
const n = Number(last);
|
|
1723
|
+
if (Number.isFinite(n) && n > 0) return n;
|
|
1724
|
+
}
|
|
1725
|
+
return void 0;
|
|
1726
|
+
}
|
|
1727
|
+
function databaseFromEnv(svc) {
|
|
1728
|
+
const env = svc.environment;
|
|
1729
|
+
const get = (key) => {
|
|
1730
|
+
if (!env) return void 0;
|
|
1731
|
+
if (Array.isArray(env)) {
|
|
1732
|
+
for (const line of env) {
|
|
1733
|
+
const [k, v] = line.split("=");
|
|
1734
|
+
if (k === key) return v;
|
|
1735
|
+
}
|
|
1736
|
+
return void 0;
|
|
1737
|
+
}
|
|
1738
|
+
return env[key];
|
|
1739
|
+
};
|
|
1740
|
+
return get("POSTGRES_DB") ?? get("MYSQL_DATABASE") ?? get("MONGO_INITDB_DATABASE") ?? "";
|
|
1741
|
+
}
|
|
1742
|
+
async function parse9(serviceDir) {
|
|
1743
|
+
for (const name of ["docker-compose.yml", "docker-compose.yaml"]) {
|
|
1744
|
+
const abs = import_node_path14.default.join(serviceDir, name);
|
|
1745
|
+
if (!await exists(abs)) continue;
|
|
1746
|
+
const raw = await readYaml(abs);
|
|
1747
|
+
if (!raw?.services) return [];
|
|
1748
|
+
const out = [];
|
|
1749
|
+
for (const [serviceName, svc] of Object.entries(raw.services)) {
|
|
1750
|
+
if (!svc.image) continue;
|
|
1751
|
+
const meta = engineFromImage(svc.image);
|
|
1752
|
+
if (!meta) continue;
|
|
1753
|
+
out.push({
|
|
1754
|
+
host: serviceName,
|
|
1755
|
+
port: portFromService(svc),
|
|
1756
|
+
database: databaseFromEnv(svc),
|
|
1757
|
+
engine: meta.engine,
|
|
1758
|
+
engineVersion: meta.engineVersion,
|
|
1759
|
+
sourceFile: abs
|
|
1760
|
+
});
|
|
1761
|
+
}
|
|
1762
|
+
return out;
|
|
1763
|
+
}
|
|
1764
|
+
return [];
|
|
1765
|
+
}
|
|
1766
|
+
var dockerComposeParser = { name: "docker-compose", parse: parse9 };
|
|
1767
|
+
|
|
1768
|
+
// src/extract/databases/index.ts
|
|
1769
|
+
var DB_PARSERS = [
|
|
1770
|
+
dbConfigYamlParser,
|
|
1771
|
+
dotenvParser,
|
|
1772
|
+
prismaParser,
|
|
1773
|
+
drizzleParser,
|
|
1774
|
+
knexParser,
|
|
1775
|
+
ormconfigParser,
|
|
1776
|
+
typeormParser,
|
|
1777
|
+
sequelizeParser,
|
|
1778
|
+
dockerComposeParser
|
|
1779
|
+
];
|
|
1780
|
+
function compatibleDriversFor(engine) {
|
|
1781
|
+
return compatPairs().filter((p) => p.engine === engine).map((p) => ({ name: p.driver, minVersion: p.minDriverVersion }));
|
|
1782
|
+
}
|
|
1783
|
+
function toDatabaseNode(config) {
|
|
1784
|
+
return {
|
|
1785
|
+
id: (0, import_types7.databaseId)(config.host),
|
|
1786
|
+
type: import_types7.NodeType.DatabaseNode,
|
|
1787
|
+
name: config.database || config.host,
|
|
1788
|
+
engine: config.engine,
|
|
1789
|
+
engineVersion: config.engineVersion,
|
|
1790
|
+
compatibleDrivers: compatibleDriversFor(config.engine),
|
|
1791
|
+
host: config.host,
|
|
1792
|
+
port: config.port
|
|
1793
|
+
};
|
|
1794
|
+
}
|
|
1795
|
+
function attachIncompatibilities(service, configs) {
|
|
1796
|
+
const deps = { ...service.pkg.dependencies ?? {}, ...service.pkg.devDependencies ?? {} };
|
|
1797
|
+
const incompatibilities = [];
|
|
1798
|
+
const seen = /* @__PURE__ */ new Set();
|
|
1799
|
+
for (const config of configs) {
|
|
1800
|
+
for (const pair of compatPairs()) {
|
|
1801
|
+
if (pair.engine !== config.engine) continue;
|
|
1802
|
+
const declaredVersion = cleanVersion(deps[pair.driver]);
|
|
1803
|
+
if (!declaredVersion) continue;
|
|
1804
|
+
const result = checkCompatibility(
|
|
1805
|
+
pair.driver,
|
|
1806
|
+
declaredVersion,
|
|
1807
|
+
config.engine,
|
|
1808
|
+
config.engineVersion
|
|
1809
|
+
);
|
|
1810
|
+
if (!result.compatible && result.reason) {
|
|
1811
|
+
const key = `driver-engine|${pair.driver}@${declaredVersion}|${config.engine}@${config.engineVersion}`;
|
|
1812
|
+
if (seen.has(key)) continue;
|
|
1813
|
+
seen.add(key);
|
|
1814
|
+
incompatibilities.push({
|
|
1815
|
+
kind: "driver-engine",
|
|
1816
|
+
driver: pair.driver,
|
|
1817
|
+
driverVersion: declaredVersion,
|
|
1818
|
+
engine: config.engine,
|
|
1819
|
+
engineVersion: config.engineVersion,
|
|
1820
|
+
reason: result.reason
|
|
1821
|
+
});
|
|
1822
|
+
}
|
|
1823
|
+
}
|
|
1824
|
+
}
|
|
1825
|
+
const serviceNodeEngine = service.node.nodeEngine ?? service.pkg.engines?.node;
|
|
1826
|
+
for (const constraint of nodeEngineConstraints()) {
|
|
1827
|
+
const declared = cleanVersion(deps[constraint.package]);
|
|
1828
|
+
if (!declared) continue;
|
|
1829
|
+
const result = checkNodeEngineConstraint(constraint, declared, serviceNodeEngine);
|
|
1830
|
+
if (!result.compatible && result.reason) {
|
|
1831
|
+
const key = `node-engine|${constraint.package}@${declared}|${serviceNodeEngine ?? ""}`;
|
|
1832
|
+
if (seen.has(key)) continue;
|
|
1833
|
+
seen.add(key);
|
|
1834
|
+
incompatibilities.push({
|
|
1835
|
+
kind: "node-engine",
|
|
1836
|
+
package: constraint.package,
|
|
1837
|
+
packageVersion: declared,
|
|
1838
|
+
requiredNodeVersion: result.requiredNodeVersion ?? constraint.minNodeVersion,
|
|
1839
|
+
...serviceNodeEngine ? { declaredNodeEngine: serviceNodeEngine } : {},
|
|
1840
|
+
reason: result.reason
|
|
1841
|
+
});
|
|
1842
|
+
}
|
|
1843
|
+
}
|
|
1844
|
+
for (const conflict of packageConflicts()) {
|
|
1845
|
+
const declared = cleanVersion(deps[conflict.package]);
|
|
1846
|
+
if (!declared) continue;
|
|
1847
|
+
const requiredVersion = cleanVersion(deps[conflict.requires.name]);
|
|
1848
|
+
const result = checkPackageConflict(conflict, declared, requiredVersion);
|
|
1849
|
+
if (!result.compatible && result.reason) {
|
|
1850
|
+
const key = `package-conflict|${conflict.package}@${declared}|${conflict.requires.name}@${requiredVersion ?? "missing"}`;
|
|
1851
|
+
if (seen.has(key)) continue;
|
|
1852
|
+
seen.add(key);
|
|
1853
|
+
incompatibilities.push({
|
|
1854
|
+
kind: "package-conflict",
|
|
1855
|
+
package: conflict.package,
|
|
1856
|
+
packageVersion: declared,
|
|
1857
|
+
requires: conflict.requires,
|
|
1858
|
+
...requiredVersion ? { foundVersion: requiredVersion } : {},
|
|
1859
|
+
reason: result.reason
|
|
1860
|
+
});
|
|
1861
|
+
}
|
|
1862
|
+
}
|
|
1863
|
+
for (const rule of deprecatedApis()) {
|
|
1864
|
+
const declared = cleanVersion(deps[rule.package]);
|
|
1865
|
+
if (declared === void 0) continue;
|
|
1866
|
+
const result = checkDeprecatedApi(rule, declared);
|
|
1867
|
+
if (!result.compatible && result.reason) {
|
|
1868
|
+
const key = `deprecated-api|${rule.package}@${declared}`;
|
|
1869
|
+
if (seen.has(key)) continue;
|
|
1870
|
+
seen.add(key);
|
|
1871
|
+
incompatibilities.push({
|
|
1872
|
+
kind: "deprecated-api",
|
|
1873
|
+
package: rule.package,
|
|
1874
|
+
packageVersion: declared,
|
|
1875
|
+
reason: result.reason
|
|
1876
|
+
});
|
|
1877
|
+
}
|
|
1878
|
+
}
|
|
1879
|
+
if (incompatibilities.length > 0) service.node.incompatibilities = incompatibilities;
|
|
1880
|
+
}
|
|
1881
|
+
async function addDatabasesAndCompat(graph, services, scanPath) {
|
|
1882
|
+
let nodesAdded = 0;
|
|
1883
|
+
let edgesAdded = 0;
|
|
1884
|
+
for (const service of services) {
|
|
1885
|
+
const merged = /* @__PURE__ */ new Map();
|
|
1886
|
+
for (const parser of DB_PARSERS) {
|
|
1887
|
+
let configs;
|
|
1888
|
+
try {
|
|
1889
|
+
configs = await parser.parse(service.dir);
|
|
1890
|
+
} catch (err) {
|
|
1891
|
+
console.warn(
|
|
1892
|
+
`[neat] ${parser.name} parser failed on ${service.node.name}: ${err.message}`
|
|
1893
|
+
);
|
|
1894
|
+
continue;
|
|
1895
|
+
}
|
|
1896
|
+
for (const config of configs) {
|
|
1897
|
+
if (!config.host) continue;
|
|
1898
|
+
if (!merged.has(config.host)) merged.set(config.host, config);
|
|
1899
|
+
}
|
|
1900
|
+
}
|
|
1901
|
+
const allConfigs = [...merged.values()];
|
|
1902
|
+
for (const config of allConfigs) {
|
|
1903
|
+
const dbNode = toDatabaseNode(config);
|
|
1904
|
+
if (!graph.hasNode(dbNode.id)) {
|
|
1905
|
+
graph.addNode(dbNode.id, { ...dbNode, discoveredVia: "static" });
|
|
1906
|
+
nodesAdded++;
|
|
1907
|
+
} else {
|
|
1908
|
+
const existing = graph.getNodeAttributes(dbNode.id);
|
|
1909
|
+
const mergedDiscoveredVia = existing.discoveredVia === "otel" ? "merged" : "static";
|
|
1910
|
+
graph.replaceNodeAttributes(dbNode.id, {
|
|
1911
|
+
...existing,
|
|
1912
|
+
...dbNode,
|
|
1913
|
+
discoveredVia: mergedDiscoveredVia
|
|
1914
|
+
});
|
|
1915
|
+
}
|
|
1916
|
+
const edge = {
|
|
1917
|
+
id: (0, import_types4.extractedEdgeId)(service.node.id, dbNode.id, import_types7.EdgeType.CONNECTS_TO),
|
|
1918
|
+
source: service.node.id,
|
|
1919
|
+
target: dbNode.id,
|
|
1920
|
+
type: import_types7.EdgeType.CONNECTS_TO,
|
|
1921
|
+
provenance: import_types7.Provenance.EXTRACTED,
|
|
1922
|
+
...config.sourceFile ? {
|
|
1923
|
+
evidence: {
|
|
1924
|
+
file: import_node_path15.default.relative(scanPath, config.sourceFile).split(import_node_path15.default.sep).join("/")
|
|
1925
|
+
}
|
|
1926
|
+
} : {}
|
|
1927
|
+
};
|
|
1928
|
+
if (!graph.hasEdge(edge.id)) {
|
|
1929
|
+
graph.addEdgeWithKey(edge.id, edge.source, edge.target, edge);
|
|
1930
|
+
edgesAdded++;
|
|
1931
|
+
}
|
|
1932
|
+
}
|
|
1933
|
+
attachIncompatibilities(service, allConfigs);
|
|
1934
|
+
if (graph.hasNode(service.node.id)) {
|
|
1935
|
+
const current = graph.getNodeAttributes(service.node.id);
|
|
1936
|
+
const updated = {
|
|
1937
|
+
...current,
|
|
1938
|
+
...service.node,
|
|
1939
|
+
...current.aliases ? { aliases: current.aliases } : {}
|
|
1940
|
+
};
|
|
1941
|
+
if (!service.node.incompatibilities || service.node.incompatibilities.length === 0) {
|
|
1942
|
+
delete updated.incompatibilities;
|
|
1943
|
+
}
|
|
1944
|
+
graph.replaceNodeAttributes(service.node.id, updated);
|
|
1945
|
+
}
|
|
1946
|
+
}
|
|
1947
|
+
return { nodesAdded, edgesAdded };
|
|
1948
|
+
}
|
|
1949
|
+
|
|
1950
|
+
// src/extract/configs.ts
|
|
1951
|
+
var import_node_fs10 = require("fs");
|
|
1952
|
+
var import_node_path16 = __toESM(require("path"), 1);
|
|
1953
|
+
var import_types8 = require("@neat.is/types");
|
|
1954
|
+
async function walkConfigFiles(dir) {
|
|
1955
|
+
const out = [];
|
|
1956
|
+
async function walk(current) {
|
|
1957
|
+
const entries = await import_node_fs10.promises.readdir(current, { withFileTypes: true });
|
|
1958
|
+
for (const entry2 of entries) {
|
|
1959
|
+
const full = import_node_path16.default.join(current, entry2.name);
|
|
1960
|
+
if (entry2.isDirectory()) {
|
|
1961
|
+
if (!IGNORED_DIRS.has(entry2.name)) await walk(full);
|
|
1962
|
+
} else if (entry2.isFile() && isConfigFile(entry2.name).match) {
|
|
1963
|
+
out.push(full);
|
|
1964
|
+
}
|
|
1965
|
+
}
|
|
1966
|
+
}
|
|
1967
|
+
await walk(dir);
|
|
1968
|
+
return out;
|
|
1969
|
+
}
|
|
1970
|
+
async function addConfigNodes(graph, services, scanPath) {
|
|
1971
|
+
let nodesAdded = 0;
|
|
1972
|
+
let edgesAdded = 0;
|
|
1973
|
+
for (const service of services) {
|
|
1974
|
+
const configFiles = await walkConfigFiles(service.dir);
|
|
1975
|
+
for (const file of configFiles) {
|
|
1976
|
+
const relPath = import_node_path16.default.relative(scanPath, file);
|
|
1977
|
+
const node = {
|
|
1978
|
+
id: (0, import_types8.configId)(relPath),
|
|
1979
|
+
type: import_types8.NodeType.ConfigNode,
|
|
1980
|
+
name: import_node_path16.default.basename(file),
|
|
1981
|
+
path: relPath,
|
|
1982
|
+
fileType: isConfigFile(import_node_path16.default.basename(file)).fileType
|
|
1983
|
+
};
|
|
1984
|
+
if (!graph.hasNode(node.id)) {
|
|
1985
|
+
graph.addNode(node.id, node);
|
|
1986
|
+
nodesAdded++;
|
|
1987
|
+
}
|
|
1988
|
+
const edge = {
|
|
1989
|
+
id: (0, import_types4.extractedEdgeId)(service.node.id, node.id, import_types8.EdgeType.CONFIGURED_BY),
|
|
1990
|
+
source: service.node.id,
|
|
1991
|
+
target: node.id,
|
|
1992
|
+
type: import_types8.EdgeType.CONFIGURED_BY,
|
|
1993
|
+
provenance: import_types8.Provenance.EXTRACTED,
|
|
1994
|
+
evidence: { file: relPath.split(import_node_path16.default.sep).join("/") }
|
|
1995
|
+
};
|
|
1996
|
+
if (!graph.hasEdge(edge.id)) {
|
|
1997
|
+
graph.addEdgeWithKey(edge.id, edge.source, edge.target, edge);
|
|
1998
|
+
edgesAdded++;
|
|
1999
|
+
}
|
|
2000
|
+
}
|
|
2001
|
+
}
|
|
2002
|
+
return { nodesAdded, edgesAdded };
|
|
2003
|
+
}
|
|
2004
|
+
|
|
2005
|
+
// src/extract/calls/index.ts
|
|
2006
|
+
var import_types14 = require("@neat.is/types");
|
|
2007
|
+
|
|
2008
|
+
// src/extract/calls/http.ts
|
|
2009
|
+
var import_node_path18 = __toESM(require("path"), 1);
|
|
2010
|
+
var import_tree_sitter = __toESM(require("tree-sitter"), 1);
|
|
2011
|
+
var import_tree_sitter_javascript = __toESM(require("tree-sitter-javascript"), 1);
|
|
2012
|
+
var import_tree_sitter_python = __toESM(require("tree-sitter-python"), 1);
|
|
2013
|
+
var import_types9 = require("@neat.is/types");
|
|
2014
|
+
|
|
2015
|
+
// src/extract/calls/shared.ts
|
|
2016
|
+
var import_node_fs11 = require("fs");
|
|
2017
|
+
var import_node_path17 = __toESM(require("path"), 1);
|
|
2018
|
+
async function walkSourceFiles(dir) {
|
|
2019
|
+
const out = [];
|
|
2020
|
+
async function walk(current) {
|
|
2021
|
+
const entries = await import_node_fs11.promises.readdir(current, { withFileTypes: true }).catch(() => []);
|
|
2022
|
+
for (const entry2 of entries) {
|
|
2023
|
+
const full = import_node_path17.default.join(current, entry2.name);
|
|
2024
|
+
if (entry2.isDirectory()) {
|
|
2025
|
+
if (!IGNORED_DIRS.has(entry2.name)) await walk(full);
|
|
2026
|
+
} else if (entry2.isFile() && SERVICE_FILE_EXTENSIONS.has(import_node_path17.default.extname(entry2.name))) {
|
|
2027
|
+
out.push(full);
|
|
2028
|
+
}
|
|
2029
|
+
}
|
|
2030
|
+
}
|
|
2031
|
+
await walk(dir);
|
|
2032
|
+
return out;
|
|
2033
|
+
}
|
|
2034
|
+
async function loadSourceFiles(dir) {
|
|
2035
|
+
const paths = await walkSourceFiles(dir);
|
|
2036
|
+
const out = [];
|
|
2037
|
+
for (const p of paths) {
|
|
2038
|
+
try {
|
|
2039
|
+
const content = await import_node_fs11.promises.readFile(p, "utf8");
|
|
2040
|
+
out.push({ path: p, content });
|
|
2041
|
+
} catch {
|
|
2042
|
+
}
|
|
2043
|
+
}
|
|
2044
|
+
return out;
|
|
2045
|
+
}
|
|
2046
|
+
function lineOf(text, needle) {
|
|
2047
|
+
const idx = text.indexOf(needle);
|
|
2048
|
+
if (idx < 0) return 1;
|
|
2049
|
+
return text.slice(0, idx).split("\n").length;
|
|
2050
|
+
}
|
|
2051
|
+
function snippet(text, line) {
|
|
2052
|
+
const lines = text.split("\n");
|
|
2053
|
+
return (lines[line - 1] ?? "").trim();
|
|
2054
|
+
}
|
|
2055
|
+
|
|
2056
|
+
// src/extract/calls/http.ts
|
|
2057
|
+
var STRING_LITERAL_NODE_TYPES = /* @__PURE__ */ new Set(["string_fragment", "string_content"]);
|
|
2058
|
+
function collectStringLiterals(node, out) {
|
|
2059
|
+
if (STRING_LITERAL_NODE_TYPES.has(node.type)) out.push(node.text);
|
|
2060
|
+
for (let i = 0; i < node.namedChildCount; i++) {
|
|
2061
|
+
const child = node.namedChild(i);
|
|
2062
|
+
if (child) collectStringLiterals(child, out);
|
|
2063
|
+
}
|
|
2064
|
+
}
|
|
2065
|
+
function callsFromSource(source, parser, knownHosts) {
|
|
2066
|
+
const tree = parser.parse(source);
|
|
2067
|
+
const literals = [];
|
|
2068
|
+
collectStringLiterals(tree.rootNode, literals);
|
|
2069
|
+
const targets = /* @__PURE__ */ new Set();
|
|
2070
|
+
for (const lit of literals) {
|
|
2071
|
+
for (const host of knownHosts) {
|
|
2072
|
+
if (lit.includes(`//${host}`) || lit.includes(`//${host}:`)) {
|
|
2073
|
+
targets.add(host);
|
|
2074
|
+
}
|
|
2075
|
+
}
|
|
2076
|
+
}
|
|
2077
|
+
return targets;
|
|
2078
|
+
}
|
|
2079
|
+
function makeJsParser() {
|
|
2080
|
+
const p = new import_tree_sitter.default();
|
|
2081
|
+
p.setLanguage(import_tree_sitter_javascript.default);
|
|
2082
|
+
return p;
|
|
2083
|
+
}
|
|
2084
|
+
function makePyParser() {
|
|
2085
|
+
const p = new import_tree_sitter.default();
|
|
2086
|
+
p.setLanguage(import_tree_sitter_python.default);
|
|
2087
|
+
return p;
|
|
2088
|
+
}
|
|
2089
|
+
async function addHttpCallEdges(graph, services) {
|
|
2090
|
+
const jsParser = makeJsParser();
|
|
2091
|
+
const pyParser = makePyParser();
|
|
2092
|
+
const knownHosts = /* @__PURE__ */ new Set();
|
|
2093
|
+
const hostToNodeId = /* @__PURE__ */ new Map();
|
|
2094
|
+
for (const service of services) {
|
|
2095
|
+
knownHosts.add(import_node_path18.default.basename(service.dir));
|
|
2096
|
+
knownHosts.add(service.pkg.name);
|
|
2097
|
+
hostToNodeId.set(import_node_path18.default.basename(service.dir), service.node.id);
|
|
2098
|
+
hostToNodeId.set(service.pkg.name, service.node.id);
|
|
2099
|
+
}
|
|
2100
|
+
let edgesAdded = 0;
|
|
2101
|
+
for (const service of services) {
|
|
2102
|
+
const files = await loadSourceFiles(service.dir);
|
|
2103
|
+
const seenTargets = /* @__PURE__ */ new Map();
|
|
2104
|
+
for (const file of files) {
|
|
2105
|
+
const parser = import_node_path18.default.extname(file.path) === ".py" ? pyParser : jsParser;
|
|
2106
|
+
const targets = callsFromSource(file.content, parser, knownHosts);
|
|
2107
|
+
for (const t of targets) {
|
|
2108
|
+
const targetId = hostToNodeId.get(t);
|
|
2109
|
+
if (!targetId || targetId === service.node.id) continue;
|
|
2110
|
+
if (!seenTargets.has(targetId)) {
|
|
2111
|
+
seenTargets.set(targetId, { file: file.path, host: t });
|
|
2112
|
+
}
|
|
2113
|
+
}
|
|
2114
|
+
}
|
|
2115
|
+
for (const [targetId, evidenceFile] of seenTargets) {
|
|
2116
|
+
const fileContent = files.find((f) => f.path === evidenceFile.file)?.content ?? "";
|
|
2117
|
+
const line = lineOf(fileContent, `//${evidenceFile.host}`);
|
|
2118
|
+
const edge = {
|
|
2119
|
+
id: (0, import_types4.extractedEdgeId)(service.node.id, targetId, import_types9.EdgeType.CALLS),
|
|
2120
|
+
source: service.node.id,
|
|
2121
|
+
target: targetId,
|
|
2122
|
+
type: import_types9.EdgeType.CALLS,
|
|
2123
|
+
provenance: import_types9.Provenance.EXTRACTED,
|
|
2124
|
+
evidence: {
|
|
2125
|
+
file: import_node_path18.default.relative(service.dir, evidenceFile.file),
|
|
2126
|
+
line,
|
|
2127
|
+
snippet: snippet(fileContent, line)
|
|
2128
|
+
}
|
|
2129
|
+
};
|
|
2130
|
+
if (!graph.hasEdge(edge.id)) {
|
|
2131
|
+
graph.addEdgeWithKey(edge.id, edge.source, edge.target, edge);
|
|
2132
|
+
edgesAdded++;
|
|
2133
|
+
}
|
|
2134
|
+
}
|
|
2135
|
+
}
|
|
2136
|
+
return edgesAdded;
|
|
2137
|
+
}
|
|
2138
|
+
|
|
2139
|
+
// src/extract/calls/kafka.ts
|
|
2140
|
+
var import_node_path19 = __toESM(require("path"), 1);
|
|
2141
|
+
var import_types10 = require("@neat.is/types");
|
|
2142
|
+
var PRODUCER_TOPIC_RE = /(?:producer|kafkaProducer)[\s\S]{0,40}?\.send\s*\(\s*\{[\s\S]{0,200}?topic\s*:\s*['"`]([^'"`]+)['"`]/g;
|
|
2143
|
+
var CONSUMER_TOPIC_RE = /(?:consumer|kafkaConsumer)[\s\S]{0,40}?\.(?:subscribe|run)\s*\(\s*\{[\s\S]{0,200}?topic[s]?\s*:\s*(?:\[\s*)?['"`]([^'"`]+)['"`]/g;
|
|
2144
|
+
function findAll(re, text) {
|
|
2145
|
+
re.lastIndex = 0;
|
|
2146
|
+
const out = [];
|
|
2147
|
+
let m;
|
|
2148
|
+
while ((m = re.exec(text)) !== null) {
|
|
2149
|
+
out.push({ topic: m[1], index: m.index });
|
|
2150
|
+
}
|
|
2151
|
+
return out;
|
|
2152
|
+
}
|
|
2153
|
+
function kafkaEndpointsFromFile(file, serviceDir) {
|
|
2154
|
+
const out = [];
|
|
2155
|
+
const seen = /* @__PURE__ */ new Set();
|
|
2156
|
+
const make = (topic, edgeType) => {
|
|
2157
|
+
const key = `${edgeType}|${topic}`;
|
|
2158
|
+
if (seen.has(key)) return;
|
|
2159
|
+
seen.add(key);
|
|
2160
|
+
const line = lineOf(file.content, topic);
|
|
2161
|
+
out.push({
|
|
2162
|
+
infraId: (0, import_types10.infraId)("kafka-topic", topic),
|
|
2163
|
+
name: topic,
|
|
2164
|
+
kind: "kafka-topic",
|
|
2165
|
+
edgeType,
|
|
2166
|
+
evidence: {
|
|
2167
|
+
file: import_node_path19.default.relative(serviceDir, file.path),
|
|
2168
|
+
line,
|
|
2169
|
+
snippet: snippet(file.content, line)
|
|
2170
|
+
}
|
|
2171
|
+
});
|
|
2172
|
+
};
|
|
2173
|
+
for (const { topic } of findAll(PRODUCER_TOPIC_RE, file.content)) make(topic, "PUBLISHES_TO");
|
|
2174
|
+
for (const { topic } of findAll(CONSUMER_TOPIC_RE, file.content)) make(topic, "CONSUMES_FROM");
|
|
2175
|
+
return out;
|
|
2176
|
+
}
|
|
2177
|
+
|
|
2178
|
+
// src/extract/calls/redis.ts
|
|
2179
|
+
var import_node_path20 = __toESM(require("path"), 1);
|
|
2180
|
+
var import_types11 = require("@neat.is/types");
|
|
2181
|
+
var REDIS_URL_RE = /redis(?:s)?:\/\/(?:[^@'"`\s]+@)?([^:/'"`\s]+)(?::(\d+))?/g;
|
|
2182
|
+
function redisEndpointsFromFile(file, serviceDir) {
|
|
2183
|
+
const out = [];
|
|
2184
|
+
const seen = /* @__PURE__ */ new Set();
|
|
2185
|
+
REDIS_URL_RE.lastIndex = 0;
|
|
2186
|
+
let m;
|
|
2187
|
+
while ((m = REDIS_URL_RE.exec(file.content)) !== null) {
|
|
2188
|
+
const host = m[1];
|
|
2189
|
+
if (seen.has(host)) continue;
|
|
2190
|
+
seen.add(host);
|
|
2191
|
+
const line = lineOf(file.content, host);
|
|
2192
|
+
out.push({
|
|
2193
|
+
infraId: (0, import_types11.infraId)("redis", host),
|
|
2194
|
+
name: host,
|
|
2195
|
+
kind: "redis",
|
|
2196
|
+
edgeType: "CALLS",
|
|
2197
|
+
evidence: {
|
|
2198
|
+
file: import_node_path20.default.relative(serviceDir, file.path),
|
|
2199
|
+
line,
|
|
2200
|
+
snippet: snippet(file.content, line)
|
|
2201
|
+
}
|
|
2202
|
+
});
|
|
2203
|
+
}
|
|
2204
|
+
return out;
|
|
2205
|
+
}
|
|
2206
|
+
|
|
2207
|
+
// src/extract/calls/aws.ts
|
|
2208
|
+
var import_node_path21 = __toESM(require("path"), 1);
|
|
2209
|
+
var import_types12 = require("@neat.is/types");
|
|
2210
|
+
var S3_BUCKET_RE = /Bucket\s*:\s*['"`]([^'"`]+)['"`]/g;
|
|
2211
|
+
var DYNAMO_TABLE_RE = /TableName\s*:\s*['"`]([^'"`]+)['"`]/g;
|
|
2212
|
+
function hasMarker(text, markers) {
|
|
2213
|
+
return markers.some((m) => text.includes(m));
|
|
2214
|
+
}
|
|
2215
|
+
function findAll2(re, text) {
|
|
2216
|
+
re.lastIndex = 0;
|
|
2217
|
+
const out = [];
|
|
2218
|
+
let m;
|
|
2219
|
+
while ((m = re.exec(text)) !== null) {
|
|
2220
|
+
out.push({ name: m[1], index: m.index });
|
|
2221
|
+
}
|
|
2222
|
+
return out;
|
|
2223
|
+
}
|
|
2224
|
+
function awsEndpointsFromFile(file, serviceDir) {
|
|
2225
|
+
const out = [];
|
|
2226
|
+
const seen = /* @__PURE__ */ new Set();
|
|
2227
|
+
const make = (kind, name) => {
|
|
2228
|
+
const key = `${kind}|${name}`;
|
|
2229
|
+
if (seen.has(key)) return;
|
|
2230
|
+
seen.add(key);
|
|
2231
|
+
const line = lineOf(file.content, name);
|
|
2232
|
+
out.push({
|
|
2233
|
+
infraId: (0, import_types12.infraId)(kind, name),
|
|
2234
|
+
name,
|
|
2235
|
+
kind,
|
|
2236
|
+
edgeType: "CALLS",
|
|
2237
|
+
evidence: {
|
|
2238
|
+
file: import_node_path21.default.relative(serviceDir, file.path),
|
|
2239
|
+
line,
|
|
2240
|
+
snippet: snippet(file.content, line)
|
|
2241
|
+
}
|
|
2242
|
+
});
|
|
2243
|
+
};
|
|
2244
|
+
if (hasMarker(file.content, ["S3Client", "PutObjectCommand", "GetObjectCommand", "DeleteObjectCommand"])) {
|
|
2245
|
+
for (const { name } of findAll2(S3_BUCKET_RE, file.content)) make("s3-bucket", name);
|
|
2246
|
+
}
|
|
2247
|
+
if (hasMarker(file.content, [
|
|
2248
|
+
"DynamoDBClient",
|
|
2249
|
+
"DynamoDBDocumentClient",
|
|
2250
|
+
"GetCommand",
|
|
2251
|
+
"PutCommand",
|
|
2252
|
+
"QueryCommand",
|
|
2253
|
+
"UpdateCommand",
|
|
2254
|
+
"DeleteCommand"
|
|
2255
|
+
])) {
|
|
2256
|
+
for (const { name } of findAll2(DYNAMO_TABLE_RE, file.content)) make("dynamodb-table", name);
|
|
2257
|
+
}
|
|
2258
|
+
return out;
|
|
2259
|
+
}
|
|
2260
|
+
|
|
2261
|
+
// src/extract/calls/grpc.ts
|
|
2262
|
+
var import_node_path22 = __toESM(require("path"), 1);
|
|
2263
|
+
var import_types13 = require("@neat.is/types");
|
|
2264
|
+
var GRPC_CLIENT_RE = /new\s+([A-Z][A-Za-z0-9_]*)Client\s*\(\s*['"`]?([^,'"`)]+)?/g;
|
|
2265
|
+
function isLikelyAddress(value) {
|
|
2266
|
+
if (!value) return false;
|
|
2267
|
+
return /:\d{2,5}$/.test(value) || value.includes(".");
|
|
2268
|
+
}
|
|
2269
|
+
function grpcEndpointsFromFile(file, serviceDir) {
|
|
2270
|
+
const out = [];
|
|
2271
|
+
const seen = /* @__PURE__ */ new Set();
|
|
2272
|
+
GRPC_CLIENT_RE.lastIndex = 0;
|
|
2273
|
+
let m;
|
|
2274
|
+
while ((m = GRPC_CLIENT_RE.exec(file.content)) !== null) {
|
|
2275
|
+
const symbol = m[1];
|
|
2276
|
+
const addr = m[2]?.trim();
|
|
2277
|
+
const name = isLikelyAddress(addr) ? addr : symbol;
|
|
2278
|
+
if (seen.has(name)) continue;
|
|
2279
|
+
seen.add(name);
|
|
2280
|
+
const line = lineOf(file.content, m[0]);
|
|
2281
|
+
out.push({
|
|
2282
|
+
infraId: (0, import_types13.infraId)("grpc-service", name),
|
|
2283
|
+
name,
|
|
2284
|
+
kind: "grpc-service",
|
|
2285
|
+
edgeType: "CALLS",
|
|
2286
|
+
evidence: {
|
|
2287
|
+
file: import_node_path22.default.relative(serviceDir, file.path),
|
|
2288
|
+
line,
|
|
2289
|
+
snippet: snippet(file.content, line)
|
|
2290
|
+
}
|
|
2291
|
+
});
|
|
2292
|
+
}
|
|
2293
|
+
return out;
|
|
2294
|
+
}
|
|
2295
|
+
|
|
2296
|
+
// src/extract/calls/index.ts
|
|
2297
|
+
function edgeTypeFromEndpoint(ep) {
|
|
2298
|
+
switch (ep.edgeType) {
|
|
2299
|
+
case "PUBLISHES_TO":
|
|
2300
|
+
return import_types14.EdgeType.PUBLISHES_TO;
|
|
2301
|
+
case "CONSUMES_FROM":
|
|
2302
|
+
return import_types14.EdgeType.CONSUMES_FROM;
|
|
2303
|
+
default:
|
|
2304
|
+
return import_types14.EdgeType.CALLS;
|
|
2305
|
+
}
|
|
2306
|
+
}
|
|
2307
|
+
async function addExternalEndpointEdges(graph, services) {
|
|
2308
|
+
let nodesAdded = 0;
|
|
2309
|
+
let edgesAdded = 0;
|
|
2310
|
+
for (const service of services) {
|
|
2311
|
+
const files = await loadSourceFiles(service.dir);
|
|
2312
|
+
const endpoints = [];
|
|
2313
|
+
for (const file of files) {
|
|
2314
|
+
endpoints.push(...kafkaEndpointsFromFile(file, service.dir));
|
|
2315
|
+
endpoints.push(...redisEndpointsFromFile(file, service.dir));
|
|
2316
|
+
endpoints.push(...awsEndpointsFromFile(file, service.dir));
|
|
2317
|
+
endpoints.push(...grpcEndpointsFromFile(file, service.dir));
|
|
2318
|
+
}
|
|
2319
|
+
if (endpoints.length === 0) continue;
|
|
2320
|
+
const seenEdges = /* @__PURE__ */ new Set();
|
|
2321
|
+
for (const ep of endpoints) {
|
|
2322
|
+
if (!graph.hasNode(ep.infraId)) {
|
|
2323
|
+
const node = {
|
|
2324
|
+
id: ep.infraId,
|
|
2325
|
+
type: import_types14.NodeType.InfraNode,
|
|
2326
|
+
name: ep.name,
|
|
2327
|
+
provider: ep.kind.startsWith("s3") || ep.kind.startsWith("dynamodb") ? "aws" : "self",
|
|
2328
|
+
kind: ep.kind
|
|
2329
|
+
};
|
|
2330
|
+
graph.addNode(node.id, node);
|
|
2331
|
+
nodesAdded++;
|
|
2332
|
+
}
|
|
2333
|
+
const edgeType = edgeTypeFromEndpoint(ep);
|
|
2334
|
+
const edgeId = (0, import_types4.extractedEdgeId)(service.node.id, ep.infraId, edgeType);
|
|
2335
|
+
if (seenEdges.has(edgeId)) continue;
|
|
2336
|
+
seenEdges.add(edgeId);
|
|
2337
|
+
if (!graph.hasEdge(edgeId)) {
|
|
2338
|
+
const edge = {
|
|
2339
|
+
id: edgeId,
|
|
2340
|
+
source: service.node.id,
|
|
2341
|
+
target: ep.infraId,
|
|
2342
|
+
type: edgeType,
|
|
2343
|
+
provenance: import_types14.Provenance.EXTRACTED,
|
|
2344
|
+
evidence: ep.evidence
|
|
2345
|
+
};
|
|
2346
|
+
graph.addEdgeWithKey(edgeId, edge.source, edge.target, edge);
|
|
2347
|
+
edgesAdded++;
|
|
2348
|
+
}
|
|
2349
|
+
}
|
|
2350
|
+
}
|
|
2351
|
+
return { nodesAdded, edgesAdded };
|
|
2352
|
+
}
|
|
2353
|
+
async function addCallEdges(graph, services) {
|
|
2354
|
+
const httpEdges = await addHttpCallEdges(graph, services);
|
|
2355
|
+
const ext = await addExternalEndpointEdges(graph, services);
|
|
2356
|
+
return {
|
|
2357
|
+
nodesAdded: ext.nodesAdded,
|
|
2358
|
+
edgesAdded: httpEdges + ext.edgesAdded
|
|
2359
|
+
};
|
|
2360
|
+
}
|
|
2361
|
+
|
|
2362
|
+
// src/extract/infra/docker-compose.ts
|
|
2363
|
+
var import_node_path23 = __toESM(require("path"), 1);
|
|
2364
|
+
var import_types16 = require("@neat.is/types");
|
|
2365
|
+
|
|
2366
|
+
// src/extract/infra/shared.ts
|
|
2367
|
+
var import_types15 = require("@neat.is/types");
|
|
2368
|
+
function makeInfraNode(kind, name, provider = "self", extras) {
|
|
2369
|
+
return {
|
|
2370
|
+
id: (0, import_types15.infraId)(kind, name),
|
|
2371
|
+
type: import_types15.NodeType.InfraNode,
|
|
2372
|
+
name,
|
|
2373
|
+
provider,
|
|
2374
|
+
kind,
|
|
2375
|
+
...extras?.region ? { region: extras.region } : {}
|
|
2376
|
+
};
|
|
2377
|
+
}
|
|
2378
|
+
function classifyImage(image) {
|
|
2379
|
+
const lower = image.toLowerCase();
|
|
2380
|
+
const repo = lower.split(":")[0];
|
|
2381
|
+
const last = repo.split("/").pop() ?? repo;
|
|
2382
|
+
if (last.startsWith("postgres")) return "postgres";
|
|
2383
|
+
if (last.startsWith("mysql") || last.startsWith("mariadb")) return "mysql";
|
|
2384
|
+
if (last.startsWith("mongo")) return "mongodb";
|
|
2385
|
+
if (last.startsWith("redis")) return "redis";
|
|
2386
|
+
if (last.startsWith("rabbitmq")) return "rabbitmq";
|
|
2387
|
+
if (last.startsWith("kafka") || last.includes("kafka")) return "kafka";
|
|
2388
|
+
if (last.startsWith("memcached")) return "memcached";
|
|
2389
|
+
return "container";
|
|
2390
|
+
}
|
|
2391
|
+
|
|
2392
|
+
// src/extract/infra/docker-compose.ts
|
|
2393
|
+
function dependsOnList(value) {
|
|
2394
|
+
if (!value) return [];
|
|
2395
|
+
if (Array.isArray(value)) return value;
|
|
2396
|
+
return Object.keys(value);
|
|
2397
|
+
}
|
|
2398
|
+
function serviceNameToServiceNode(name, services) {
|
|
2399
|
+
for (const s of services) {
|
|
2400
|
+
if (s.node.name === name || import_node_path23.default.basename(s.dir) === name) return s.node.id;
|
|
2401
|
+
}
|
|
2402
|
+
return null;
|
|
2403
|
+
}
|
|
2404
|
+
async function addComposeInfra(graph, scanPath, services) {
|
|
2405
|
+
let nodesAdded = 0;
|
|
2406
|
+
let edgesAdded = 0;
|
|
2407
|
+
let composePath = null;
|
|
2408
|
+
for (const name of ["docker-compose.yml", "docker-compose.yaml"]) {
|
|
2409
|
+
const abs = import_node_path23.default.join(scanPath, name);
|
|
2410
|
+
if (await exists(abs)) {
|
|
2411
|
+
composePath = abs;
|
|
2412
|
+
break;
|
|
2413
|
+
}
|
|
2414
|
+
}
|
|
2415
|
+
if (!composePath) return { nodesAdded, edgesAdded };
|
|
2416
|
+
const compose = await readYaml(composePath);
|
|
2417
|
+
if (!compose?.services) return { nodesAdded, edgesAdded };
|
|
2418
|
+
const evidenceFile = import_node_path23.default.relative(scanPath, composePath).split(import_node_path23.default.sep).join("/");
|
|
2419
|
+
const composeNameToNodeId = /* @__PURE__ */ new Map();
|
|
2420
|
+
for (const [composeName, svc] of Object.entries(compose.services)) {
|
|
2421
|
+
const matchedServiceId = serviceNameToServiceNode(composeName, services);
|
|
2422
|
+
if (matchedServiceId) {
|
|
2423
|
+
composeNameToNodeId.set(composeName, matchedServiceId);
|
|
2424
|
+
continue;
|
|
2425
|
+
}
|
|
2426
|
+
const kind = svc.image ? classifyImage(svc.image) : "container";
|
|
2427
|
+
const node = makeInfraNode(kind, composeName);
|
|
2428
|
+
if (!graph.hasNode(node.id)) {
|
|
2429
|
+
graph.addNode(node.id, node);
|
|
2430
|
+
nodesAdded++;
|
|
2431
|
+
}
|
|
2432
|
+
composeNameToNodeId.set(composeName, node.id);
|
|
2433
|
+
}
|
|
2434
|
+
for (const [composeName, svc] of Object.entries(compose.services)) {
|
|
2435
|
+
const sourceId = composeNameToNodeId.get(composeName);
|
|
2436
|
+
if (!sourceId) continue;
|
|
2437
|
+
for (const dep of dependsOnList(svc.depends_on)) {
|
|
2438
|
+
const targetId = composeNameToNodeId.get(dep);
|
|
2439
|
+
if (!targetId) continue;
|
|
2440
|
+
const edgeId = (0, import_types4.extractedEdgeId)(sourceId, targetId, import_types16.EdgeType.DEPENDS_ON);
|
|
2441
|
+
if (graph.hasEdge(edgeId)) continue;
|
|
2442
|
+
const edge = {
|
|
2443
|
+
id: edgeId,
|
|
2444
|
+
source: sourceId,
|
|
2445
|
+
target: targetId,
|
|
2446
|
+
type: import_types16.EdgeType.DEPENDS_ON,
|
|
2447
|
+
provenance: import_types16.Provenance.EXTRACTED,
|
|
2448
|
+
evidence: { file: evidenceFile }
|
|
2449
|
+
};
|
|
2450
|
+
graph.addEdgeWithKey(edgeId, edge.source, edge.target, edge);
|
|
2451
|
+
edgesAdded++;
|
|
2452
|
+
}
|
|
2453
|
+
}
|
|
2454
|
+
return { nodesAdded, edgesAdded };
|
|
2455
|
+
}
|
|
2456
|
+
|
|
2457
|
+
// src/extract/infra/dockerfile.ts
|
|
2458
|
+
var import_node_path24 = __toESM(require("path"), 1);
|
|
2459
|
+
var import_node_fs12 = require("fs");
|
|
2460
|
+
var import_types17 = require("@neat.is/types");
|
|
2461
|
+
function runtimeImage(content) {
|
|
2462
|
+
const lines = content.split("\n");
|
|
2463
|
+
let last = null;
|
|
2464
|
+
for (const raw of lines) {
|
|
2465
|
+
const line = raw.trim();
|
|
2466
|
+
if (!line || line.startsWith("#")) continue;
|
|
2467
|
+
if (!/^from\s+/i.test(line)) continue;
|
|
2468
|
+
const tokens = line.split(/\s+/);
|
|
2469
|
+
const image = tokens[1];
|
|
2470
|
+
if (!image || image.toLowerCase() === "scratch") continue;
|
|
2471
|
+
last = image;
|
|
2472
|
+
}
|
|
2473
|
+
return last;
|
|
2474
|
+
}
|
|
2475
|
+
async function addDockerfileRuntimes(graph, services, scanPath) {
|
|
2476
|
+
let nodesAdded = 0;
|
|
2477
|
+
let edgesAdded = 0;
|
|
2478
|
+
for (const service of services) {
|
|
2479
|
+
const dockerfilePath = import_node_path24.default.join(service.dir, "Dockerfile");
|
|
2480
|
+
if (!await exists(dockerfilePath)) continue;
|
|
2481
|
+
const content = await import_node_fs12.promises.readFile(dockerfilePath, "utf8");
|
|
2482
|
+
const image = runtimeImage(content);
|
|
2483
|
+
if (!image) continue;
|
|
2484
|
+
const node = makeInfraNode("container-image", image);
|
|
2485
|
+
if (!graph.hasNode(node.id)) {
|
|
2486
|
+
graph.addNode(node.id, node);
|
|
2487
|
+
nodesAdded++;
|
|
2488
|
+
}
|
|
2489
|
+
const edgeId = (0, import_types4.extractedEdgeId)(service.node.id, node.id, import_types17.EdgeType.RUNS_ON);
|
|
2490
|
+
if (!graph.hasEdge(edgeId)) {
|
|
2491
|
+
const edge = {
|
|
2492
|
+
id: edgeId,
|
|
2493
|
+
source: service.node.id,
|
|
2494
|
+
target: node.id,
|
|
2495
|
+
type: import_types17.EdgeType.RUNS_ON,
|
|
2496
|
+
provenance: import_types17.Provenance.EXTRACTED,
|
|
2497
|
+
evidence: {
|
|
2498
|
+
file: import_node_path24.default.relative(scanPath, dockerfilePath).split(import_node_path24.default.sep).join("/")
|
|
2499
|
+
}
|
|
2500
|
+
};
|
|
2501
|
+
graph.addEdgeWithKey(edgeId, edge.source, edge.target, edge);
|
|
2502
|
+
edgesAdded++;
|
|
2503
|
+
}
|
|
2504
|
+
}
|
|
2505
|
+
return { nodesAdded, edgesAdded };
|
|
2506
|
+
}
|
|
2507
|
+
|
|
2508
|
+
// src/extract/infra/terraform.ts
|
|
2509
|
+
var import_node_fs13 = require("fs");
|
|
2510
|
+
var import_node_path25 = __toESM(require("path"), 1);
|
|
2511
|
+
var RESOURCE_RE = /resource\s+"(aws_[A-Za-z0-9_]+)"\s+"([A-Za-z0-9_-]+)"/g;
|
|
2512
|
+
async function walkTfFiles(start, depth = 0, max = 5) {
|
|
2513
|
+
if (depth > max) return [];
|
|
2514
|
+
const out = [];
|
|
2515
|
+
const entries = await import_node_fs13.promises.readdir(start, { withFileTypes: true }).catch(() => []);
|
|
2516
|
+
for (const entry2 of entries) {
|
|
2517
|
+
if (entry2.isDirectory()) {
|
|
2518
|
+
if (IGNORED_DIRS.has(entry2.name) || entry2.name === ".terraform") continue;
|
|
2519
|
+
out.push(...await walkTfFiles(import_node_path25.default.join(start, entry2.name), depth + 1, max));
|
|
2520
|
+
} else if (entry2.isFile() && entry2.name.endsWith(".tf")) {
|
|
2521
|
+
out.push(import_node_path25.default.join(start, entry2.name));
|
|
2522
|
+
}
|
|
2523
|
+
}
|
|
2524
|
+
return out;
|
|
2525
|
+
}
|
|
2526
|
+
async function addTerraformResources(graph, scanPath) {
|
|
2527
|
+
let nodesAdded = 0;
|
|
2528
|
+
const files = await walkTfFiles(scanPath);
|
|
2529
|
+
for (const file of files) {
|
|
2530
|
+
const content = await import_node_fs13.promises.readFile(file, "utf8");
|
|
2531
|
+
RESOURCE_RE.lastIndex = 0;
|
|
2532
|
+
let m;
|
|
2533
|
+
while ((m = RESOURCE_RE.exec(content)) !== null) {
|
|
2534
|
+
const kind = m[1];
|
|
2535
|
+
const name = m[2];
|
|
2536
|
+
const node = makeInfraNode(kind, name, "aws");
|
|
2537
|
+
if (!graph.hasNode(node.id)) {
|
|
2538
|
+
graph.addNode(node.id, node);
|
|
2539
|
+
nodesAdded++;
|
|
2540
|
+
}
|
|
2541
|
+
}
|
|
2542
|
+
}
|
|
2543
|
+
return { nodesAdded, edgesAdded: 0 };
|
|
2544
|
+
}
|
|
2545
|
+
|
|
2546
|
+
// src/extract/infra/k8s.ts
|
|
2547
|
+
var import_node_fs14 = require("fs");
|
|
2548
|
+
var import_node_path26 = __toESM(require("path"), 1);
|
|
2549
|
+
var import_yaml3 = require("yaml");
|
|
2550
|
+
var K8S_KIND_TO_INFRA_KIND = {
|
|
2551
|
+
Service: "k8s-service",
|
|
2552
|
+
Deployment: "k8s-deployment",
|
|
2553
|
+
StatefulSet: "k8s-statefulset",
|
|
2554
|
+
DaemonSet: "k8s-daemonset",
|
|
2555
|
+
CronJob: "k8s-cronjob",
|
|
2556
|
+
Job: "k8s-job",
|
|
2557
|
+
Ingress: "k8s-ingress"
|
|
2558
|
+
};
|
|
2559
|
+
async function walkYamlFiles2(start, depth = 0, max = 5) {
|
|
2560
|
+
if (depth > max) return [];
|
|
2561
|
+
const out = [];
|
|
2562
|
+
const entries = await import_node_fs14.promises.readdir(start, { withFileTypes: true }).catch(() => []);
|
|
2563
|
+
for (const entry2 of entries) {
|
|
2564
|
+
if (entry2.isDirectory()) {
|
|
2565
|
+
if (IGNORED_DIRS.has(entry2.name)) continue;
|
|
2566
|
+
out.push(...await walkYamlFiles2(import_node_path26.default.join(start, entry2.name), depth + 1, max));
|
|
2567
|
+
} else if (entry2.isFile() && CONFIG_FILE_EXTENSIONS.has(import_node_path26.default.extname(entry2.name))) {
|
|
2568
|
+
out.push(import_node_path26.default.join(start, entry2.name));
|
|
2569
|
+
}
|
|
2570
|
+
}
|
|
2571
|
+
return out;
|
|
2572
|
+
}
|
|
2573
|
+
async function addK8sResources(graph, scanPath) {
|
|
2574
|
+
let nodesAdded = 0;
|
|
2575
|
+
const files = await walkYamlFiles2(scanPath);
|
|
2576
|
+
for (const file of files) {
|
|
2577
|
+
const content = await import_node_fs14.promises.readFile(file, "utf8");
|
|
2578
|
+
let docs;
|
|
2579
|
+
try {
|
|
2580
|
+
docs = (0, import_yaml3.parseAllDocuments)(content).map((d) => d.toJSON());
|
|
2581
|
+
} catch {
|
|
2582
|
+
continue;
|
|
2583
|
+
}
|
|
2584
|
+
for (const doc of docs) {
|
|
2585
|
+
if (!doc?.kind || !doc.metadata?.name) continue;
|
|
2586
|
+
const infraKind = K8S_KIND_TO_INFRA_KIND[doc.kind];
|
|
2587
|
+
if (!infraKind) continue;
|
|
2588
|
+
const namespaced = doc.metadata.namespace ? `${doc.metadata.namespace}/${doc.metadata.name}` : doc.metadata.name;
|
|
2589
|
+
const node = makeInfraNode(infraKind, namespaced, "kubernetes");
|
|
2590
|
+
if (!graph.hasNode(node.id)) {
|
|
2591
|
+
graph.addNode(node.id, node);
|
|
2592
|
+
nodesAdded++;
|
|
2593
|
+
}
|
|
2594
|
+
}
|
|
2595
|
+
}
|
|
2596
|
+
return { nodesAdded, edgesAdded: 0 };
|
|
2597
|
+
}
|
|
2598
|
+
|
|
2599
|
+
// src/extract/infra/index.ts
|
|
2600
|
+
async function addInfra(graph, scanPath, services) {
|
|
2601
|
+
const compose = await addComposeInfra(graph, scanPath, services);
|
|
2602
|
+
const dockerfile = await addDockerfileRuntimes(graph, services, scanPath);
|
|
2603
|
+
const terraform = await addTerraformResources(graph, scanPath);
|
|
2604
|
+
const k8s = await addK8sResources(graph, scanPath);
|
|
2605
|
+
return {
|
|
2606
|
+
nodesAdded: compose.nodesAdded + dockerfile.nodesAdded + terraform.nodesAdded + k8s.nodesAdded,
|
|
2607
|
+
edgesAdded: compose.edgesAdded + dockerfile.edgesAdded + terraform.edgesAdded + k8s.edgesAdded
|
|
2608
|
+
};
|
|
2609
|
+
}
|
|
2610
|
+
|
|
2611
|
+
// src/extract/index.ts
|
|
2612
|
+
async function extractFromDirectory(graph, scanPath, opts = {}) {
|
|
2613
|
+
await ensureCompatLoaded();
|
|
2614
|
+
const services = await discoverServices(scanPath);
|
|
2615
|
+
const phase1Nodes = addServiceNodes(graph, services);
|
|
2616
|
+
await addServiceAliases(graph, scanPath, services);
|
|
2617
|
+
const phase2 = await addDatabasesAndCompat(graph, services, scanPath);
|
|
2618
|
+
const phase3 = await addConfigNodes(graph, services, scanPath);
|
|
2619
|
+
const phase4 = await addCallEdges(graph, services);
|
|
2620
|
+
const phase5 = await addInfra(graph, scanPath, services);
|
|
2621
|
+
const frontiersPromoted = promoteFrontierNodes(graph);
|
|
2622
|
+
if (opts.onPolicyTrigger) await opts.onPolicyTrigger(graph);
|
|
2623
|
+
return {
|
|
2624
|
+
nodesAdded: phase1Nodes + phase2.nodesAdded + phase3.nodesAdded + phase4.nodesAdded + phase5.nodesAdded,
|
|
2625
|
+
edgesAdded: phase2.edgesAdded + phase3.edgesAdded + phase4.edgesAdded + phase5.edgesAdded,
|
|
2626
|
+
frontiersPromoted
|
|
2627
|
+
};
|
|
2628
|
+
}
|
|
2629
|
+
|
|
2630
|
+
// src/persist.ts
|
|
2631
|
+
var import_node_fs15 = require("fs");
|
|
2632
|
+
var import_node_path27 = __toESM(require("path"), 1);
|
|
2633
|
+
var SCHEMA_VERSION = 2;
|
|
2634
|
+
function migrateV1ToV2(payload) {
|
|
2635
|
+
const nodes = payload.graph.nodes;
|
|
2636
|
+
if (Array.isArray(nodes)) {
|
|
2637
|
+
for (const node of nodes) {
|
|
2638
|
+
if (node.attributes && "pgDriverVersion" in node.attributes) {
|
|
2639
|
+
delete node.attributes.pgDriverVersion;
|
|
2640
|
+
}
|
|
2641
|
+
}
|
|
2642
|
+
}
|
|
2643
|
+
return { ...payload, schemaVersion: 2 };
|
|
2644
|
+
}
|
|
2645
|
+
async function ensureDir(filePath) {
|
|
2646
|
+
await import_node_fs15.promises.mkdir(import_node_path27.default.dirname(filePath), { recursive: true });
|
|
2647
|
+
}
|
|
2648
|
+
async function saveGraphToDisk(graph, outPath) {
|
|
2649
|
+
await ensureDir(outPath);
|
|
2650
|
+
const payload = {
|
|
2651
|
+
schemaVersion: SCHEMA_VERSION,
|
|
2652
|
+
exportedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
2653
|
+
graph: graph.export()
|
|
2654
|
+
};
|
|
2655
|
+
const tmp = `${outPath}.tmp`;
|
|
2656
|
+
await import_node_fs15.promises.writeFile(tmp, JSON.stringify(payload), "utf8");
|
|
2657
|
+
await import_node_fs15.promises.rename(tmp, outPath);
|
|
2658
|
+
}
|
|
2659
|
+
async function loadGraphFromDisk(graph, outPath) {
|
|
2660
|
+
let raw;
|
|
2661
|
+
try {
|
|
2662
|
+
raw = await import_node_fs15.promises.readFile(outPath, "utf8");
|
|
2663
|
+
} catch (err) {
|
|
2664
|
+
if (err.code === "ENOENT") return;
|
|
2665
|
+
throw err;
|
|
2666
|
+
}
|
|
2667
|
+
let payload = JSON.parse(raw);
|
|
2668
|
+
if (payload.schemaVersion === 1) {
|
|
2669
|
+
payload = migrateV1ToV2(payload);
|
|
2670
|
+
}
|
|
2671
|
+
if (payload.schemaVersion !== SCHEMA_VERSION) {
|
|
2672
|
+
throw new Error(
|
|
2673
|
+
`persist: unsupported snapshot schemaVersion ${payload.schemaVersion} (expected ${SCHEMA_VERSION})`
|
|
2674
|
+
);
|
|
2675
|
+
}
|
|
2676
|
+
graph.clear();
|
|
2677
|
+
graph.import(payload.graph);
|
|
2678
|
+
}
|
|
2679
|
+
function startPersistLoop(graph, outPath, intervalMs = 6e4) {
|
|
2680
|
+
let stopped = false;
|
|
2681
|
+
const tick = async () => {
|
|
2682
|
+
if (stopped) return;
|
|
2683
|
+
try {
|
|
2684
|
+
await saveGraphToDisk(graph, outPath);
|
|
2685
|
+
} catch (err) {
|
|
2686
|
+
console.error("persist: periodic save failed", err);
|
|
2687
|
+
}
|
|
2688
|
+
};
|
|
2689
|
+
const interval = setInterval(() => {
|
|
2690
|
+
void tick();
|
|
2691
|
+
}, intervalMs);
|
|
2692
|
+
const onSignal = (signal) => {
|
|
2693
|
+
void (async () => {
|
|
2694
|
+
try {
|
|
2695
|
+
await saveGraphToDisk(graph, outPath);
|
|
2696
|
+
} catch (err) {
|
|
2697
|
+
console.error(`persist: ${signal} save failed`, err);
|
|
2698
|
+
} finally {
|
|
2699
|
+
process.exit(0);
|
|
2700
|
+
}
|
|
2701
|
+
})();
|
|
2702
|
+
};
|
|
2703
|
+
process.on("SIGTERM", onSignal);
|
|
2704
|
+
process.on("SIGINT", onSignal);
|
|
2705
|
+
return () => {
|
|
2706
|
+
stopped = true;
|
|
2707
|
+
clearInterval(interval);
|
|
2708
|
+
process.off("SIGTERM", onSignal);
|
|
2709
|
+
process.off("SIGINT", onSignal);
|
|
2710
|
+
};
|
|
2711
|
+
}
|
|
2712
|
+
|
|
2713
|
+
// src/projects.ts
|
|
2714
|
+
var import_node_path28 = __toESM(require("path"), 1);
|
|
2715
|
+
function pathsForProject(project, baseDir) {
|
|
2716
|
+
if (project === DEFAULT_PROJECT) {
|
|
2717
|
+
return {
|
|
2718
|
+
snapshotPath: import_node_path28.default.join(baseDir, "graph.json"),
|
|
2719
|
+
errorsPath: import_node_path28.default.join(baseDir, "errors.ndjson"),
|
|
2720
|
+
staleEventsPath: import_node_path28.default.join(baseDir, "stale-events.ndjson"),
|
|
2721
|
+
embeddingsCachePath: import_node_path28.default.join(baseDir, "embeddings.json"),
|
|
2722
|
+
policyViolationsPath: import_node_path28.default.join(baseDir, "policy-violations.ndjson")
|
|
2723
|
+
};
|
|
2724
|
+
}
|
|
2725
|
+
return {
|
|
2726
|
+
snapshotPath: import_node_path28.default.join(baseDir, `${project}.json`),
|
|
2727
|
+
errorsPath: import_node_path28.default.join(baseDir, `errors.${project}.ndjson`),
|
|
2728
|
+
staleEventsPath: import_node_path28.default.join(baseDir, `stale-events.${project}.ndjson`),
|
|
2729
|
+
embeddingsCachePath: import_node_path28.default.join(baseDir, `embeddings.${project}.json`),
|
|
2730
|
+
policyViolationsPath: import_node_path28.default.join(baseDir, `policy-violations.${project}.ndjson`)
|
|
2731
|
+
};
|
|
2732
|
+
}
|
|
2733
|
+
|
|
2734
|
+
// src/registry.ts
|
|
2735
|
+
var import_node_fs16 = require("fs");
|
|
2736
|
+
var import_node_os2 = __toESM(require("os"), 1);
|
|
2737
|
+
var import_node_path29 = __toESM(require("path"), 1);
|
|
2738
|
+
var import_types18 = require("@neat.is/types");
|
|
2739
|
+
var LOCK_TIMEOUT_MS = 5e3;
|
|
2740
|
+
var LOCK_RETRY_MS = 50;
|
|
2741
|
+
function neatHome() {
|
|
2742
|
+
const override = process.env.NEAT_HOME;
|
|
2743
|
+
if (override && override.length > 0) return import_node_path29.default.resolve(override);
|
|
2744
|
+
return import_node_path29.default.join(import_node_os2.default.homedir(), ".neat");
|
|
2745
|
+
}
|
|
2746
|
+
function registryPath() {
|
|
2747
|
+
return import_node_path29.default.join(neatHome(), "projects.json");
|
|
2748
|
+
}
|
|
2749
|
+
function registryLockPath() {
|
|
2750
|
+
return import_node_path29.default.join(neatHome(), "projects.json.lock");
|
|
2751
|
+
}
|
|
2752
|
+
async function writeAtomically(target, contents) {
|
|
2753
|
+
await import_node_fs16.promises.mkdir(import_node_path29.default.dirname(target), { recursive: true });
|
|
2754
|
+
const tmp = `${target}.${process.pid}.${Date.now()}.${Math.random().toString(36).slice(2, 8)}.tmp`;
|
|
2755
|
+
const fd = await import_node_fs16.promises.open(tmp, "w");
|
|
2756
|
+
try {
|
|
2757
|
+
await fd.writeFile(contents, "utf8");
|
|
2758
|
+
await fd.sync();
|
|
2759
|
+
} finally {
|
|
2760
|
+
await fd.close();
|
|
2761
|
+
}
|
|
2762
|
+
await import_node_fs16.promises.rename(tmp, target);
|
|
2763
|
+
}
|
|
2764
|
+
async function acquireLock(lockPath, timeoutMs = LOCK_TIMEOUT_MS) {
|
|
2765
|
+
const deadline = Date.now() + timeoutMs;
|
|
2766
|
+
await import_node_fs16.promises.mkdir(import_node_path29.default.dirname(lockPath), { recursive: true });
|
|
2767
|
+
while (true) {
|
|
2768
|
+
try {
|
|
2769
|
+
const fd = await import_node_fs16.promises.open(lockPath, "wx");
|
|
2770
|
+
await fd.close();
|
|
2771
|
+
return;
|
|
2772
|
+
} catch (err) {
|
|
2773
|
+
const code = err.code;
|
|
2774
|
+
if (code !== "EEXIST") throw err;
|
|
2775
|
+
if (Date.now() >= deadline) {
|
|
2776
|
+
throw new Error(
|
|
2777
|
+
`neat registry: timed out after ${timeoutMs}ms waiting for ${lockPath}. Another neat process is holding the lock; if no such process exists, remove the file by hand.`
|
|
2778
|
+
);
|
|
2779
|
+
}
|
|
2780
|
+
await new Promise((r) => setTimeout(r, LOCK_RETRY_MS));
|
|
2781
|
+
}
|
|
2782
|
+
}
|
|
2783
|
+
}
|
|
2784
|
+
async function releaseLock(lockPath) {
|
|
2785
|
+
await import_node_fs16.promises.unlink(lockPath).catch(() => {
|
|
2786
|
+
});
|
|
2787
|
+
}
|
|
2788
|
+
async function withLock(fn) {
|
|
2789
|
+
const lock = registryLockPath();
|
|
2790
|
+
await acquireLock(lock);
|
|
2791
|
+
try {
|
|
2792
|
+
return await fn();
|
|
2793
|
+
} finally {
|
|
2794
|
+
await releaseLock(lock);
|
|
2795
|
+
}
|
|
2796
|
+
}
|
|
2797
|
+
async function readRegistry() {
|
|
2798
|
+
const file = registryPath();
|
|
2799
|
+
let raw;
|
|
2800
|
+
try {
|
|
2801
|
+
raw = await import_node_fs16.promises.readFile(file, "utf8");
|
|
2802
|
+
} catch (err) {
|
|
2803
|
+
if (err.code === "ENOENT") {
|
|
2804
|
+
return { version: 1, projects: [] };
|
|
2805
|
+
}
|
|
2806
|
+
throw err;
|
|
2807
|
+
}
|
|
2808
|
+
const parsed = JSON.parse(raw);
|
|
2809
|
+
return import_types18.RegistryFileSchema.parse(parsed);
|
|
2810
|
+
}
|
|
2811
|
+
async function writeRegistry(reg) {
|
|
2812
|
+
const validated = import_types18.RegistryFileSchema.parse(reg);
|
|
2813
|
+
await writeAtomically(registryPath(), JSON.stringify(validated, null, 2) + "\n");
|
|
2814
|
+
}
|
|
2815
|
+
async function listProjects() {
|
|
2816
|
+
const reg = await readRegistry();
|
|
2817
|
+
return reg.projects;
|
|
2818
|
+
}
|
|
2819
|
+
async function setStatus(name, status) {
|
|
2820
|
+
return withLock(async () => {
|
|
2821
|
+
const reg = await readRegistry();
|
|
2822
|
+
const entry2 = reg.projects.find((p) => p.name === name);
|
|
2823
|
+
if (!entry2) throw new Error(`neat registry: no project named "${name}"`);
|
|
2824
|
+
entry2.status = status;
|
|
2825
|
+
await writeRegistry(reg);
|
|
2826
|
+
return entry2;
|
|
2827
|
+
});
|
|
2828
|
+
}
|
|
2829
|
+
async function touchLastSeen(name, at = (/* @__PURE__ */ new Date()).toISOString()) {
|
|
2830
|
+
await withLock(async () => {
|
|
2831
|
+
const reg = await readRegistry();
|
|
2832
|
+
const entry2 = reg.projects.find((p) => p.name === name);
|
|
2833
|
+
if (!entry2) return;
|
|
2834
|
+
entry2.lastSeenAt = at;
|
|
2835
|
+
await writeRegistry(reg);
|
|
2836
|
+
});
|
|
2837
|
+
}
|
|
2838
|
+
|
|
2839
|
+
// src/daemon.ts
|
|
2840
|
+
function neatHomeFor(opts) {
|
|
2841
|
+
if (opts.neatHome && opts.neatHome.length > 0) return import_node_path30.default.resolve(opts.neatHome);
|
|
2842
|
+
const env = process.env.NEAT_HOME;
|
|
2843
|
+
if (env && env.length > 0) return import_node_path30.default.resolve(env);
|
|
2844
|
+
const home = process.env.HOME ?? process.env.USERPROFILE ?? "";
|
|
2845
|
+
return import_node_path30.default.join(home, ".neat");
|
|
2846
|
+
}
|
|
2847
|
+
async function bootstrapProject(entry2) {
|
|
2848
|
+
try {
|
|
2849
|
+
const stat = await import_node_fs17.promises.stat(entry2.path);
|
|
2850
|
+
if (!stat.isDirectory()) {
|
|
2851
|
+
throw new Error(`registered path ${entry2.path} is not a directory`);
|
|
2852
|
+
}
|
|
2853
|
+
} catch (err) {
|
|
2854
|
+
await setStatus(entry2.name, "broken").catch(() => {
|
|
2855
|
+
});
|
|
2856
|
+
return {
|
|
2857
|
+
entry: entry2,
|
|
2858
|
+
// Empty graph is fine — `slots` keeps the entry visible in `status`
|
|
2859
|
+
// output; nothing routes to it because it's not 'active'.
|
|
2860
|
+
graph: getGraph(`__broken__:${entry2.name}`),
|
|
2861
|
+
outPath: "",
|
|
2862
|
+
stopPersist: () => {
|
|
2863
|
+
},
|
|
2864
|
+
status: "broken",
|
|
2865
|
+
errorReason: err.message
|
|
2866
|
+
};
|
|
2867
|
+
}
|
|
2868
|
+
resetGraph(entry2.name);
|
|
2869
|
+
const graph = getGraph(entry2.name);
|
|
2870
|
+
const outPath = pathsForProject(
|
|
2871
|
+
entry2.name,
|
|
2872
|
+
import_node_path30.default.join(entry2.path, "neat-out")
|
|
2873
|
+
).snapshotPath;
|
|
2874
|
+
await loadGraphFromDisk(graph, outPath);
|
|
2875
|
+
await extractFromDirectory(graph, entry2.path);
|
|
2876
|
+
const stopPersist = startPersistLoop(graph, outPath);
|
|
2877
|
+
await touchLastSeen(entry2.name).catch(() => {
|
|
2878
|
+
});
|
|
2879
|
+
return {
|
|
2880
|
+
entry: entry2,
|
|
2881
|
+
graph,
|
|
2882
|
+
outPath,
|
|
2883
|
+
stopPersist,
|
|
2884
|
+
status: "active"
|
|
2885
|
+
};
|
|
2886
|
+
}
|
|
2887
|
+
async function startDaemon(opts = {}) {
|
|
2888
|
+
const home = neatHomeFor(opts);
|
|
2889
|
+
const regPath = registryPath();
|
|
2890
|
+
try {
|
|
2891
|
+
await import_node_fs17.promises.access(regPath);
|
|
2892
|
+
} catch {
|
|
2893
|
+
throw new Error(
|
|
2894
|
+
`neatd: registry not found at ${regPath}. Run \`neat init <path>\` to register a project before starting the daemon.`
|
|
2895
|
+
);
|
|
2896
|
+
}
|
|
2897
|
+
const pidPath = import_node_path30.default.join(home, "neatd.pid");
|
|
2898
|
+
await writeAtomically(pidPath, `${process.pid}
|
|
2899
|
+
`);
|
|
2900
|
+
const slots = /* @__PURE__ */ new Map();
|
|
2901
|
+
async function loadAll() {
|
|
2902
|
+
const projects = await listProjects();
|
|
2903
|
+
const seen = /* @__PURE__ */ new Set();
|
|
2904
|
+
for (const entry2 of projects) {
|
|
2905
|
+
seen.add(entry2.name);
|
|
2906
|
+
if (slots.has(entry2.name)) continue;
|
|
2907
|
+
try {
|
|
2908
|
+
const slot = await bootstrapProject(entry2);
|
|
2909
|
+
slots.set(entry2.name, slot);
|
|
2910
|
+
if (slot.status === "broken") {
|
|
2911
|
+
console.warn(`neatd: project "${entry2.name}" broken \u2014 ${slot.errorReason}`);
|
|
2912
|
+
} else {
|
|
2913
|
+
console.log(`neatd: project "${entry2.name}" active (${entry2.path})`);
|
|
2914
|
+
}
|
|
2915
|
+
} catch (err) {
|
|
2916
|
+
console.warn(
|
|
2917
|
+
`neatd: project "${entry2.name}" failed to bootstrap \u2014 ${err.message}`
|
|
2918
|
+
);
|
|
2919
|
+
await setStatus(entry2.name, "broken").catch(() => {
|
|
2920
|
+
});
|
|
2921
|
+
}
|
|
2922
|
+
}
|
|
2923
|
+
for (const [name, slot] of [...slots.entries()]) {
|
|
2924
|
+
if (seen.has(name)) continue;
|
|
2925
|
+
try {
|
|
2926
|
+
slot.stopPersist();
|
|
2927
|
+
} catch {
|
|
2928
|
+
}
|
|
2929
|
+
slots.delete(name);
|
|
2930
|
+
console.log(`neatd: project "${name}" removed from registry \u2014 stopped`);
|
|
2931
|
+
}
|
|
2932
|
+
}
|
|
2933
|
+
await loadAll();
|
|
2934
|
+
let reloading = null;
|
|
2935
|
+
const reload = async () => {
|
|
2936
|
+
if (reloading) return reloading;
|
|
2937
|
+
reloading = (async () => {
|
|
2938
|
+
try {
|
|
2939
|
+
await loadAll();
|
|
2940
|
+
} finally {
|
|
2941
|
+
reloading = null;
|
|
2942
|
+
}
|
|
2943
|
+
})();
|
|
2944
|
+
return reloading;
|
|
2945
|
+
};
|
|
2946
|
+
const sighupHandler = () => {
|
|
2947
|
+
void reload().catch((err) => {
|
|
2948
|
+
console.warn(`neatd: SIGHUP reload failed \u2014 ${err.message}`);
|
|
2949
|
+
});
|
|
2950
|
+
};
|
|
2951
|
+
process.on("SIGHUP", sighupHandler);
|
|
2952
|
+
let stopped = false;
|
|
2953
|
+
const stop = async () => {
|
|
2954
|
+
if (stopped) return;
|
|
2955
|
+
stopped = true;
|
|
2956
|
+
process.off("SIGHUP", sighupHandler);
|
|
2957
|
+
for (const slot of slots.values()) {
|
|
2958
|
+
try {
|
|
2959
|
+
slot.stopPersist();
|
|
2960
|
+
} catch {
|
|
2961
|
+
}
|
|
2962
|
+
}
|
|
2963
|
+
await import_node_fs17.promises.unlink(pidPath).catch(() => {
|
|
2964
|
+
});
|
|
2965
|
+
};
|
|
2966
|
+
return { slots, reload, stop, pidPath };
|
|
2967
|
+
}
|
|
2968
|
+
|
|
2969
|
+
// src/neatd.ts
|
|
2970
|
+
function neatHome2() {
|
|
2971
|
+
if (process.env.NEAT_HOME && process.env.NEAT_HOME.length > 0) {
|
|
2972
|
+
return import_node_path31.default.resolve(process.env.NEAT_HOME);
|
|
2973
|
+
}
|
|
2974
|
+
const home = process.env.HOME ?? process.env.USERPROFILE ?? "";
|
|
2975
|
+
return import_node_path31.default.join(home, ".neat");
|
|
2976
|
+
}
|
|
2977
|
+
async function readPid() {
|
|
2978
|
+
try {
|
|
2979
|
+
const raw = await import_node_fs18.promises.readFile(import_node_path31.default.join(neatHome2(), "neatd.pid"), "utf8");
|
|
2980
|
+
const n = Number.parseInt(raw.trim(), 10);
|
|
2981
|
+
return Number.isFinite(n) ? n : null;
|
|
2982
|
+
} catch {
|
|
2983
|
+
return null;
|
|
2984
|
+
}
|
|
2985
|
+
}
|
|
2986
|
+
function usage() {
|
|
2987
|
+
console.log("usage: neatd <start|stop|reload|status> [--foreground]");
|
|
2988
|
+
}
|
|
2989
|
+
async function cmdStart() {
|
|
2990
|
+
const handle = await startDaemon();
|
|
2991
|
+
console.log(`neatd: started, PID ${process.pid}, ${handle.slots.size} project(s)`);
|
|
2992
|
+
console.log(`neatd: registry at ${registryPath()}`);
|
|
2993
|
+
console.log("neatd: SIGHUP reloads, SIGTERM/SIGINT stops");
|
|
2994
|
+
let stopping = false;
|
|
2995
|
+
const shutdown = (signal) => {
|
|
2996
|
+
if (stopping) return;
|
|
2997
|
+
stopping = true;
|
|
2998
|
+
console.log(`neatd: ${signal} received, stopping\u2026`);
|
|
2999
|
+
void handle.stop().catch((err) => console.error(`neatd: shutdown error \u2014 ${err.message}`)).finally(() => process.exit(0));
|
|
3000
|
+
};
|
|
3001
|
+
process.on("SIGTERM", shutdown);
|
|
3002
|
+
process.on("SIGINT", shutdown);
|
|
3003
|
+
await new Promise(() => {
|
|
3004
|
+
});
|
|
3005
|
+
}
|
|
3006
|
+
async function cmdStop() {
|
|
3007
|
+
const pid = await readPid();
|
|
3008
|
+
if (pid === null) {
|
|
3009
|
+
console.error("neatd: no running daemon found (no PID file)");
|
|
3010
|
+
process.exit(1);
|
|
3011
|
+
}
|
|
3012
|
+
try {
|
|
3013
|
+
process.kill(pid, "SIGTERM");
|
|
3014
|
+
console.log(`neatd: SIGTERM sent to PID ${pid}`);
|
|
3015
|
+
} catch (err) {
|
|
3016
|
+
console.error(`neatd: failed to signal PID ${pid} \u2014 ${err.message}`);
|
|
3017
|
+
process.exit(1);
|
|
3018
|
+
}
|
|
3019
|
+
}
|
|
3020
|
+
async function cmdReload() {
|
|
3021
|
+
const pid = await readPid();
|
|
3022
|
+
if (pid === null) {
|
|
3023
|
+
console.error("neatd: no running daemon found (no PID file)");
|
|
3024
|
+
process.exit(1);
|
|
3025
|
+
}
|
|
3026
|
+
try {
|
|
3027
|
+
process.kill(pid, "SIGHUP");
|
|
3028
|
+
console.log(`neatd: SIGHUP sent to PID ${pid}`);
|
|
3029
|
+
} catch (err) {
|
|
3030
|
+
console.error(`neatd: failed to signal PID ${pid} \u2014 ${err.message}`);
|
|
3031
|
+
process.exit(1);
|
|
3032
|
+
}
|
|
3033
|
+
}
|
|
3034
|
+
async function cmdStatus() {
|
|
3035
|
+
const pid = await readPid();
|
|
3036
|
+
console.log(`pid: ${pid ?? "(not running)"}`);
|
|
3037
|
+
console.log(`registry: ${registryPath()}`);
|
|
3038
|
+
const projects = await listProjects().catch(() => []);
|
|
3039
|
+
if (projects.length === 0) {
|
|
3040
|
+
console.log("projects: (none)");
|
|
3041
|
+
return;
|
|
3042
|
+
}
|
|
3043
|
+
console.log("projects:");
|
|
3044
|
+
for (const p of projects) {
|
|
3045
|
+
const seen = p.lastSeenAt ?? "never";
|
|
3046
|
+
console.log(` ${p.name} ${p.status} ${p.path} last-seen=${seen}`);
|
|
3047
|
+
}
|
|
3048
|
+
}
|
|
3049
|
+
async function main() {
|
|
3050
|
+
const cmd = process.argv[2];
|
|
3051
|
+
if (!cmd || cmd === "-h" || cmd === "--help") {
|
|
3052
|
+
usage();
|
|
3053
|
+
process.exit(cmd ? 0 : 2);
|
|
3054
|
+
}
|
|
3055
|
+
if (cmd === "start") return cmdStart();
|
|
3056
|
+
if (cmd === "stop") return cmdStop();
|
|
3057
|
+
if (cmd === "reload") return cmdReload();
|
|
3058
|
+
if (cmd === "status") return cmdStatus();
|
|
3059
|
+
console.error(`neatd: unknown command "${cmd}"`);
|
|
3060
|
+
usage();
|
|
3061
|
+
process.exit(1);
|
|
3062
|
+
}
|
|
3063
|
+
var entry = process.argv[1] ?? "";
|
|
3064
|
+
if (/[\\/]neatd\.(?:cjs|js)$/.test(entry) || entry.endsWith("/neatd")) {
|
|
3065
|
+
main().catch((err) => {
|
|
3066
|
+
console.error(err);
|
|
3067
|
+
process.exit(1);
|
|
3068
|
+
});
|
|
3069
|
+
}
|
|
3070
|
+
//# sourceMappingURL=neatd.cjs.map
|