@snapback/cli 1.6.0 → 3.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +121 -22
- package/dist/SkippedTestDetector-AXTMWWHC.js +5 -0
- package/dist/SkippedTestDetector-QLSQV7K7.js +5 -0
- package/dist/analysis-6WTBZJH3.js +6 -0
- package/dist/analysis-C472LUGW.js +2475 -0
- package/dist/auth-UA7I3YE4.js +1446 -0
- package/dist/auto-provision-organization-6YF463TK.js +161 -0
- package/dist/{chunk-FVIYXFCL.js → chunk-4YTE4JEW.js} +2 -3
- package/dist/chunk-5EOPYJ4Y.js +12 -0
- package/dist/{chunk-ARVV3F4K.js → chunk-5SQA44V7.js} +1085 -18
- package/dist/{chunk-RB7H4UQJ.js → chunk-7ADPL4Q3.js} +10 -3
- package/dist/chunk-BE3HNVSV.js +2300 -0
- package/dist/chunk-BWWPGNZ5.js +5842 -0
- package/dist/chunk-CBGOC6RV.js +293 -0
- package/dist/{chunk-7JX6Y4TL.js → chunk-DPWFZNMY.js} +21 -34
- package/dist/{chunk-R7CUQ7CU.js → chunk-E6V6QKS7.js} +317 -33
- package/dist/chunk-GT4ZUCFR.js +111 -0
- package/dist/chunk-NOWJBG6X.js +3654 -0
- package/dist/chunk-O7HMAZ7L.js +3497 -0
- package/dist/chunk-PL4HF4M2.js +593 -0
- package/dist/chunk-V7B37PPD.js +4075 -0
- package/dist/chunk-YVZXPBSV.js +314 -0
- package/dist/chunk-ZBQDE6WJ.js +108 -0
- package/dist/client-RHDS6NOB.js +8 -0
- package/dist/dist-5LR7APG5.js +5 -0
- package/dist/dist-CUHOKNLS.js +12 -0
- package/dist/dist-RJE4RSZJ.js +9 -0
- package/dist/index.js +60568 -36578
- package/dist/local-service-adapter-AB3UYRUK.js +6 -0
- package/dist/pioneer-oauth-hook-V2JKEXM7.js +12 -0
- package/dist/{secure-credentials-IWQB6KU4.js → secure-credentials-UEPG7GWW.js} +2 -3
- package/dist/snapback-dir-MG7DTRMF.js +6 -0
- package/package.json +12 -11
- package/scripts/postinstall.mjs +2 -3
- package/dist/SkippedTestDetector-5WJZKZQ3.js +0 -5
- package/dist/SkippedTestDetector-5WJZKZQ3.js.map +0 -1
- package/dist/analysis-YI4UNUCM.js +0 -6
- package/dist/analysis-YI4UNUCM.js.map +0 -1
- package/dist/chunk-7JX6Y4TL.js.map +0 -1
- package/dist/chunk-ARVV3F4K.js.map +0 -1
- package/dist/chunk-EU2IZPOK.js +0 -13002
- package/dist/chunk-EU2IZPOK.js.map +0 -1
- package/dist/chunk-FVIYXFCL.js.map +0 -1
- package/dist/chunk-R7CUQ7CU.js.map +0 -1
- package/dist/chunk-RB7H4UQJ.js.map +0 -1
- package/dist/chunk-SOABQWAU.js +0 -385
- package/dist/chunk-SOABQWAU.js.map +0 -1
- package/dist/dist-O6EBXLN6.js +0 -5
- package/dist/dist-O6EBXLN6.js.map +0 -1
- package/dist/dist-PJVBBZTF.js +0 -5
- package/dist/dist-PJVBBZTF.js.map +0 -1
- package/dist/index.js.map +0 -1
- package/dist/learning-pruner-QC4CTJDX.js +0 -5
- package/dist/learning-pruner-QC4CTJDX.js.map +0 -1
- package/dist/secure-credentials-IWQB6KU4.js.map +0 -1
- package/dist/snapback-dir-V6MWXIW4.js +0 -5
- package/dist/snapback-dir-V6MWXIW4.js.map +0 -1
|
@@ -0,0 +1,314 @@
|
|
|
1
|
+
#!/usr/bin/env node --no-warnings=ExperimentalWarning
|
|
2
|
+
import { createLogger, LogLevel } from './chunk-NOWJBG6X.js';
|
|
3
|
+
import { __name } from './chunk-7ADPL4Q3.js';
|
|
4
|
+
import { createClient } from 'redis';
|
|
5
|
+
|
|
6
|
+
process.env.SNAPBACK_CLI='true';
|
|
7
|
+
createLogger({
|
|
8
|
+
name: "redis-factory",
|
|
9
|
+
level: LogLevel.INFO
|
|
10
|
+
});
|
|
11
|
+
function isSocketTimeoutError(cause) {
|
|
12
|
+
if (!cause) return false;
|
|
13
|
+
return cause?.name === "SocketTimeoutError" || cause?.message?.includes("socket timeout");
|
|
14
|
+
}
|
|
15
|
+
__name(isSocketTimeoutError, "isSocketTimeoutError");
|
|
16
|
+
|
|
17
|
+
// ../../packages/platform/dist/cache/redis-metrics.js
|
|
18
|
+
var LATENCY_THRESHOLDS = {
|
|
19
|
+
/** Under 100ms is healthy */
|
|
20
|
+
healthy: 100,
|
|
21
|
+
/** Under 500ms is degraded */
|
|
22
|
+
degraded: 500
|
|
23
|
+
};
|
|
24
|
+
var RedisMetricsCollector = class {
|
|
25
|
+
static {
|
|
26
|
+
__name(this, "RedisMetricsCollector");
|
|
27
|
+
}
|
|
28
|
+
reconnectAttempts = 0;
|
|
29
|
+
lastSuccessAt = null;
|
|
30
|
+
lastErrorAt = null;
|
|
31
|
+
lastError = null;
|
|
32
|
+
keyPrefix;
|
|
33
|
+
constructor(keyPrefix = "") {
|
|
34
|
+
this.keyPrefix = keyPrefix;
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Record a successful operation
|
|
38
|
+
*/
|
|
39
|
+
recordSuccess() {
|
|
40
|
+
this.lastSuccessAt = Date.now();
|
|
41
|
+
}
|
|
42
|
+
/**
|
|
43
|
+
* Record an error
|
|
44
|
+
*/
|
|
45
|
+
recordError(error) {
|
|
46
|
+
this.lastErrorAt = Date.now();
|
|
47
|
+
this.lastError = error.message;
|
|
48
|
+
}
|
|
49
|
+
/**
|
|
50
|
+
* Record a reconnection attempt
|
|
51
|
+
*/
|
|
52
|
+
recordReconnect() {
|
|
53
|
+
this.reconnectAttempts++;
|
|
54
|
+
}
|
|
55
|
+
/**
|
|
56
|
+
* Reset reconnection counter (after successful connection)
|
|
57
|
+
*/
|
|
58
|
+
resetReconnectCount() {
|
|
59
|
+
this.reconnectAttempts = 0;
|
|
60
|
+
}
|
|
61
|
+
/**
|
|
62
|
+
* Measure Redis latency using PING command
|
|
63
|
+
*/
|
|
64
|
+
async measureLatency(client) {
|
|
65
|
+
if (!client || !client.isReady) {
|
|
66
|
+
return null;
|
|
67
|
+
}
|
|
68
|
+
try {
|
|
69
|
+
const start = Date.now();
|
|
70
|
+
await client.ping();
|
|
71
|
+
const latency = Date.now() - start;
|
|
72
|
+
this.recordSuccess();
|
|
73
|
+
return latency;
|
|
74
|
+
} catch (error) {
|
|
75
|
+
this.recordError(error);
|
|
76
|
+
return null;
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
/**
|
|
80
|
+
* Collect comprehensive metrics
|
|
81
|
+
*/
|
|
82
|
+
async collect(client) {
|
|
83
|
+
const isConnected = client?.isReady ?? false;
|
|
84
|
+
const isOpen = client?.isOpen ?? false;
|
|
85
|
+
const latency = await this.measureLatency(client);
|
|
86
|
+
let status;
|
|
87
|
+
let message;
|
|
88
|
+
if (!isConnected) {
|
|
89
|
+
status = "unhealthy";
|
|
90
|
+
message = "Redis client not connected";
|
|
91
|
+
} else if (latency === null) {
|
|
92
|
+
status = "unhealthy";
|
|
93
|
+
message = "Redis PING failed";
|
|
94
|
+
} else if (latency < LATENCY_THRESHOLDS.healthy) {
|
|
95
|
+
status = "healthy";
|
|
96
|
+
message = `Redis latency: ${latency}ms`;
|
|
97
|
+
} else if (latency < LATENCY_THRESHOLDS.degraded) {
|
|
98
|
+
status = "degraded";
|
|
99
|
+
message = `Redis latency elevated: ${latency}ms`;
|
|
100
|
+
} else {
|
|
101
|
+
status = "unhealthy";
|
|
102
|
+
message = `Redis latency too high: ${latency}ms`;
|
|
103
|
+
}
|
|
104
|
+
return {
|
|
105
|
+
isConnected,
|
|
106
|
+
isOpen,
|
|
107
|
+
latency,
|
|
108
|
+
status,
|
|
109
|
+
message,
|
|
110
|
+
lastSuccessAt: this.lastSuccessAt,
|
|
111
|
+
lastErrorAt: this.lastErrorAt,
|
|
112
|
+
lastError: this.lastError,
|
|
113
|
+
reconnectAttempts: this.reconnectAttempts,
|
|
114
|
+
keyPrefix: this.keyPrefix
|
|
115
|
+
};
|
|
116
|
+
}
|
|
117
|
+
/**
|
|
118
|
+
* Get current reconnection attempt count
|
|
119
|
+
*/
|
|
120
|
+
getReconnectAttempts() {
|
|
121
|
+
return this.reconnectAttempts;
|
|
122
|
+
}
|
|
123
|
+
};
|
|
124
|
+
var metricsCollectors = /* @__PURE__ */ new Map();
|
|
125
|
+
function getMetricsCollector(keyPrefix = "") {
|
|
126
|
+
let collector = metricsCollectors.get(keyPrefix);
|
|
127
|
+
if (!collector) {
|
|
128
|
+
collector = new RedisMetricsCollector(keyPrefix);
|
|
129
|
+
metricsCollectors.set(keyPrefix, collector);
|
|
130
|
+
}
|
|
131
|
+
return collector;
|
|
132
|
+
}
|
|
133
|
+
__name(getMetricsCollector, "getMetricsCollector");
|
|
134
|
+
|
|
135
|
+
// ../../packages/platform/dist/cache/redis-client.js
|
|
136
|
+
var logger2 = createLogger({
|
|
137
|
+
name: "redis-client",
|
|
138
|
+
level: LogLevel.INFO
|
|
139
|
+
});
|
|
140
|
+
var KEY_PREFIX = "cache:";
|
|
141
|
+
var redisClient = null;
|
|
142
|
+
var redisAvailable = false;
|
|
143
|
+
var initializationPromise = null;
|
|
144
|
+
var metricsCollector = getMetricsCollector(KEY_PREFIX);
|
|
145
|
+
async function initializeRedis() {
|
|
146
|
+
if (initializationPromise) {
|
|
147
|
+
return initializationPromise;
|
|
148
|
+
}
|
|
149
|
+
initializationPromise = (async () => {
|
|
150
|
+
const redisUrl = process.env.REDIS_URL;
|
|
151
|
+
if (!redisUrl) {
|
|
152
|
+
logger2.warn("REDIS_URL not configured - using in-memory fallback for caching");
|
|
153
|
+
return;
|
|
154
|
+
}
|
|
155
|
+
try {
|
|
156
|
+
redisClient = createClient({
|
|
157
|
+
url: redisUrl,
|
|
158
|
+
socket: {
|
|
159
|
+
// Connection timeout - how long to wait for initial connection
|
|
160
|
+
connectTimeout: 1e4,
|
|
161
|
+
// TCP keepalive - prevents silent connection drops
|
|
162
|
+
keepAlive: 5e3,
|
|
163
|
+
// Reconnection strategy with exponential backoff + jitter
|
|
164
|
+
reconnectStrategy: /* @__PURE__ */ __name((retries, cause) => {
|
|
165
|
+
if (isSocketTimeoutError(cause)) {
|
|
166
|
+
logger2.warn("Redis socket timeout - not reconnecting", {
|
|
167
|
+
cause: cause?.message
|
|
168
|
+
});
|
|
169
|
+
return false;
|
|
170
|
+
}
|
|
171
|
+
if (retries > 20) {
|
|
172
|
+
logger2.error("Redis max retries exceeded", {
|
|
173
|
+
retries,
|
|
174
|
+
cause: cause?.message
|
|
175
|
+
});
|
|
176
|
+
return new Error("Redis connection failed");
|
|
177
|
+
}
|
|
178
|
+
const baseDelay = Math.min(2 ** retries * 100, 3e4);
|
|
179
|
+
const jitter = Math.floor(Math.random() * 200);
|
|
180
|
+
return baseDelay + jitter;
|
|
181
|
+
}, "reconnectStrategy")
|
|
182
|
+
},
|
|
183
|
+
// Application-level ping to keep connection alive
|
|
184
|
+
pingInterval: 6e4
|
|
185
|
+
});
|
|
186
|
+
redisClient.on("error", (err) => {
|
|
187
|
+
if (err.message.includes("ECONNRESET") || err.message.includes("ECONNREFUSED")) {
|
|
188
|
+
logger2.debug("Redis connection error (will reconnect)", {
|
|
189
|
+
error: err.message
|
|
190
|
+
});
|
|
191
|
+
} else {
|
|
192
|
+
logger2.warn("Redis client error", {
|
|
193
|
+
error: err.message
|
|
194
|
+
});
|
|
195
|
+
}
|
|
196
|
+
redisAvailable = false;
|
|
197
|
+
metricsCollector.recordError(err);
|
|
198
|
+
});
|
|
199
|
+
redisClient.on("connect", () => {
|
|
200
|
+
redisAvailable = true;
|
|
201
|
+
metricsCollector.resetReconnectCount();
|
|
202
|
+
if (process.env.NODE_ENV !== "production") {
|
|
203
|
+
logger2.info("Redis connected for platform caching");
|
|
204
|
+
}
|
|
205
|
+
});
|
|
206
|
+
redisClient.on("ready", () => {
|
|
207
|
+
redisAvailable = true;
|
|
208
|
+
logger2.debug("Redis client ready for platform caching");
|
|
209
|
+
});
|
|
210
|
+
redisClient.on("reconnecting", () => {
|
|
211
|
+
metricsCollector.recordReconnect();
|
|
212
|
+
logger2.debug("Redis reconnecting for platform caching");
|
|
213
|
+
});
|
|
214
|
+
await redisClient.connect();
|
|
215
|
+
redisAvailable = true;
|
|
216
|
+
if (process.env.NODE_ENV !== "production") {
|
|
217
|
+
logger2.info("\u2705 Redis client initialized for platform caching with production config");
|
|
218
|
+
}
|
|
219
|
+
} catch (error) {
|
|
220
|
+
logger2.error("Redis initialization failed", {
|
|
221
|
+
error: error instanceof Error ? error.message : String(error)
|
|
222
|
+
});
|
|
223
|
+
redisAvailable = false;
|
|
224
|
+
}
|
|
225
|
+
})();
|
|
226
|
+
return initializationPromise;
|
|
227
|
+
}
|
|
228
|
+
__name(initializeRedis, "initializeRedis");
|
|
229
|
+
async function getCache(key) {
|
|
230
|
+
await initializeRedis();
|
|
231
|
+
if (!redisAvailable || !redisClient) {
|
|
232
|
+
return null;
|
|
233
|
+
}
|
|
234
|
+
try {
|
|
235
|
+
const value = await redisClient.get(key);
|
|
236
|
+
if (!value) {
|
|
237
|
+
return null;
|
|
238
|
+
}
|
|
239
|
+
return JSON.parse(value);
|
|
240
|
+
} catch (error) {
|
|
241
|
+
logger2.error("Redis GET failed", {
|
|
242
|
+
key,
|
|
243
|
+
error
|
|
244
|
+
});
|
|
245
|
+
return null;
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
__name(getCache, "getCache");
|
|
249
|
+
async function setCache(key, value, ttlSeconds) {
|
|
250
|
+
await initializeRedis();
|
|
251
|
+
if (!redisAvailable || !redisClient) {
|
|
252
|
+
return false;
|
|
253
|
+
}
|
|
254
|
+
try {
|
|
255
|
+
const serialized = JSON.stringify(value);
|
|
256
|
+
if (ttlSeconds) {
|
|
257
|
+
await redisClient.set(key, serialized, {
|
|
258
|
+
EX: ttlSeconds
|
|
259
|
+
});
|
|
260
|
+
} else {
|
|
261
|
+
await redisClient.set(key, serialized);
|
|
262
|
+
}
|
|
263
|
+
return true;
|
|
264
|
+
} catch (error) {
|
|
265
|
+
logger2.error("Redis SET failed", {
|
|
266
|
+
key,
|
|
267
|
+
error
|
|
268
|
+
});
|
|
269
|
+
return false;
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
__name(setCache, "setCache");
|
|
273
|
+
async function deleteCache(key) {
|
|
274
|
+
await initializeRedis();
|
|
275
|
+
if (!redisAvailable || !redisClient) {
|
|
276
|
+
return false;
|
|
277
|
+
}
|
|
278
|
+
try {
|
|
279
|
+
await redisClient.del(key);
|
|
280
|
+
return true;
|
|
281
|
+
} catch (error) {
|
|
282
|
+
logger2.error("Redis DEL failed", {
|
|
283
|
+
key,
|
|
284
|
+
error
|
|
285
|
+
});
|
|
286
|
+
return false;
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
__name(deleteCache, "deleteCache");
|
|
290
|
+
async function closeRedis() {
|
|
291
|
+
if (redisClient) {
|
|
292
|
+
try {
|
|
293
|
+
await redisClient.quit();
|
|
294
|
+
logger2.info("Redis connection closed");
|
|
295
|
+
} catch (error) {
|
|
296
|
+
logger2.error("Error closing Redis connection", {
|
|
297
|
+
error
|
|
298
|
+
});
|
|
299
|
+
}
|
|
300
|
+
redisClient = null;
|
|
301
|
+
redisAvailable = false;
|
|
302
|
+
}
|
|
303
|
+
}
|
|
304
|
+
__name(closeRedis, "closeRedis");
|
|
305
|
+
function isRedisAvailable() {
|
|
306
|
+
return redisAvailable;
|
|
307
|
+
}
|
|
308
|
+
__name(isRedisAvailable, "isRedisAvailable");
|
|
309
|
+
function getRedisClient() {
|
|
310
|
+
return redisClient;
|
|
311
|
+
}
|
|
312
|
+
__name(getRedisClient, "getRedisClient");
|
|
313
|
+
|
|
314
|
+
export { closeRedis, deleteCache, getCache, getRedisClient, initializeRedis, isRedisAvailable, setCache };
|
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
#!/usr/bin/env node --no-warnings=ExperimentalWarning
|
|
2
|
+
import { __name } from './chunk-7ADPL4Q3.js';
|
|
3
|
+
import { parse } from '@babel/parser';
|
|
4
|
+
|
|
5
|
+
process.env.SNAPBACK_CLI='true';
|
|
6
|
+
function detectSkippedTests(code, filePath) {
|
|
7
|
+
const skipped = [];
|
|
8
|
+
try {
|
|
9
|
+
let visit2 = function(node) {
|
|
10
|
+
if (node.type === "CallExpression") {
|
|
11
|
+
const callee = node.callee;
|
|
12
|
+
if (callee.type === "MemberExpression" && callee.property.type === "Identifier" && callee.property.name === "skip" && callee.object.type === "Identifier") {
|
|
13
|
+
const testType = callee.object.name;
|
|
14
|
+
if (testType === "describe" || testType === "it" || testType === "test") {
|
|
15
|
+
let name;
|
|
16
|
+
if (node.arguments.length > 0) {
|
|
17
|
+
const firstArg = node.arguments[0];
|
|
18
|
+
if (firstArg.type === "StringLiteral") {
|
|
19
|
+
name = firstArg.value;
|
|
20
|
+
} else if (firstArg.type === "TemplateLiteral" && firstArg.quasis.length === 1) {
|
|
21
|
+
name = firstArg.quasis[0].value.raw;
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
skipped.push({
|
|
25
|
+
type: testType,
|
|
26
|
+
name,
|
|
27
|
+
line: node.loc?.start.line ?? 0,
|
|
28
|
+
column: node.loc?.start.column ?? 0,
|
|
29
|
+
file: filePath
|
|
30
|
+
});
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
for (const key of Object.keys(node)) {
|
|
35
|
+
const value = node[key];
|
|
36
|
+
if (value && typeof value === "object") {
|
|
37
|
+
if (Array.isArray(value)) {
|
|
38
|
+
for (const item of value) {
|
|
39
|
+
if (item && typeof item === "object" && "type" in item) {
|
|
40
|
+
visit2(item);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
} else if ("type" in value) {
|
|
44
|
+
visit2(value);
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
};
|
|
49
|
+
var visit = visit2;
|
|
50
|
+
__name(visit2, "visit");
|
|
51
|
+
const ast = parse(code, {
|
|
52
|
+
sourceType: "module",
|
|
53
|
+
plugins: [
|
|
54
|
+
"typescript",
|
|
55
|
+
"jsx"
|
|
56
|
+
],
|
|
57
|
+
errorRecovery: true
|
|
58
|
+
});
|
|
59
|
+
visit2(ast.program);
|
|
60
|
+
return {
|
|
61
|
+
file: filePath,
|
|
62
|
+
skipped,
|
|
63
|
+
parsed: true
|
|
64
|
+
};
|
|
65
|
+
} catch (error) {
|
|
66
|
+
return {
|
|
67
|
+
file: filePath,
|
|
68
|
+
skipped: [],
|
|
69
|
+
parsed: false,
|
|
70
|
+
error: error instanceof Error ? error.message : String(error)
|
|
71
|
+
};
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
__name(detectSkippedTests, "detectSkippedTests");
|
|
75
|
+
function analyzeSkippedTests(files) {
|
|
76
|
+
const results = [];
|
|
77
|
+
for (const [filePath, content] of files) {
|
|
78
|
+
if (filePath.includes(".test.") || filePath.includes(".spec.") || filePath.includes("__tests__")) {
|
|
79
|
+
results.push(detectSkippedTests(content, filePath));
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
return results;
|
|
83
|
+
}
|
|
84
|
+
__name(analyzeSkippedTests, "analyzeSkippedTests");
|
|
85
|
+
function getSkippedTestSummary(results) {
|
|
86
|
+
const summary = {
|
|
87
|
+
totalSkipped: 0,
|
|
88
|
+
byType: {
|
|
89
|
+
describe: 0,
|
|
90
|
+
it: 0,
|
|
91
|
+
test: 0
|
|
92
|
+
},
|
|
93
|
+
filesWithSkipped: []
|
|
94
|
+
};
|
|
95
|
+
for (const result of results) {
|
|
96
|
+
if (result.skipped.length > 0) {
|
|
97
|
+
summary.filesWithSkipped.push(result.file);
|
|
98
|
+
summary.totalSkipped += result.skipped.length;
|
|
99
|
+
for (const test of result.skipped) {
|
|
100
|
+
summary.byType[test.type]++;
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
return summary;
|
|
105
|
+
}
|
|
106
|
+
__name(getSkippedTestSummary, "getSkippedTestSummary");
|
|
107
|
+
|
|
108
|
+
export { analyzeSkippedTests, detectSkippedTests, getSkippedTestSummary };
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
#!/usr/bin/env node --no-warnings=ExperimentalWarning
|
|
2
|
+
export { checkDatabaseConnection, closeDatabaseConnection, combinedSchema, db, pool } from './chunk-BWWPGNZ5.js';
|
|
3
|
+
import './chunk-NOWJBG6X.js';
|
|
4
|
+
import './chunk-5EOPYJ4Y.js';
|
|
5
|
+
import './chunk-CBGOC6RV.js';
|
|
6
|
+
import './chunk-7ADPL4Q3.js';
|
|
7
|
+
|
|
8
|
+
process.env.SNAPBACK_CLI='true';
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
#!/usr/bin/env node --no-warnings=ExperimentalWarning
|
|
2
|
+
export { createManagedMetadata, detectAIClients, detectMCPProcesses, detectWorkspaceConfig, evictCachedPath, getAllCachedPaths, getCachedPath, getClient, getClientConfigPath, getConfiguredClients, getOrCreateIdentity, getServerKey, getSnapbackConfigDir, getSnapbackMCPConfig, injectWorkspacePath, isCommandExecutable2 as isCommandExecutable, isOwnedByThisInstall, isSnapbackMCPRunning, patchApiKeyInClientConfig, readClientConfig, removeSnapbackConfig, repairClientConfig, resetIdentityCache, resolveNodePath, setCachedPath, validateClientConfig, validateConfig, validateWorkspacePath, writeClientConfig } from './chunk-E6V6QKS7.js';
|
|
3
|
+
import './chunk-7ADPL4Q3.js';
|
|
4
|
+
|
|
5
|
+
process.env.SNAPBACK_CLI='true';
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
#!/usr/bin/env node --no-warnings=ExperimentalWarning
|
|
2
|
+
export { AccountSchema, AiChatSchema, AttributionServiceImpl, EntitlementsServiceImpl, InvitationSchema, MCPService, MemberSchema, OrganizationSchema, OrganizationUpdateSchema, PasskeySchema, PurchaseInsertSchema, PurchaseSchema, PurchaseUpdateSchema, SagaOrchestratorImpl, SessionSchema, SnapshotStoreDb, TelemetrySinkDb, TelemetrySinkDbAdapter, UserSchema, UserUpdateSchema, VerificationSchema, anonymizeEmail, anonymizeUserData, anonymizeUserId, appendFalsePositivePatterns, calculateDecayedWeight, cleanupExpiredData, clearCapabilityCache, closeTestDb, countAllOrganizations, countAllUsers, createPurchase, createTestUser, createTierUpgradeSagaWithDeps, createUser, createUserAccount, databaseService, deletePurchaseBySubscriptionId, deleteUserApiKeys, deleteUserData, exportUserData, extensionLinkTokens, extensionSessions as extensionSessionsAuth, findSimilarPatterns, generateOrganizationSlug, getAccountById, getCacheMetrics, getCapabilities, getCapabilityAuditHistory, getInvitationById, getMCPService, getOrganizationById, getOrganizationBySlug, getOrganizationMembership, getOrganizationWithPurchasesAndMembersCount, getOrganizations, getOrganizationsWithMembers, getPendingInvitationByEmail, getPurchaseById, getPurchaseBySubscriptionId, getPurchasesByOrganizationId, getPurchasesByUserId, getTestDb, getUserByEmail, getUserById, getUserPrivacyPreferences, getUsers, getVectorStats, getWorkspaceLinkById, getWorkspaceLinksByUserId, handleTierDowngrade, handleTierUpgrade, healthCheck, incrementDetectionsAnalyzed, insertPatternWithEmbedding, invalidateCapabilityCache, isPgvectorEnabled, linkWorkspace, logAnonymizedEvent, logCapabilityAudit, mergeSignalIntoPattern, recordFalsePositiveSignal, resetCacheMetrics, resetCapabilities, resolveTierByWorkspaceId, sagaPersistence, sanitizeForLogging, searchSimilarPatterns, shouldRetainData, signalToPattern, testInTransaction, truncateAllTables, unlinkAllWorkspacesForUser, unlinkWorkspace, updateCapabilities, updateOrganization, updatePatternEmbedding, updatePurchase, updateUser, updateWorkspaceTier } from './chunk-V7B37PPD.js';
|
|
3
|
+
export { TOPUP_PACKS, account, activationCodeRedemptions, activationCodes, adminAuditLog, agentSuggestions, analysisEvents, apiKeyMetadata, apiKeyUsage, apiKeys, apiUsage, apiUsageLogs, burnInviteCode, checkDatabaseConnection, clientTokens, combinedSchema, creditJobTypeEnum, creditTopups, creditTransactionStatusEnum, creditTransactionTypeEnum, creditsLedger, db, deviceTrials, emailDeliveries, emailPreferences, extensionSessions, featureUsage, feedback, fileSnapshotSessions, fingerprints, getTopupPackDetails, inviteCodes, isValidPackSize, loops, mcpActivityEvents, mcpAggregatedLearnings, mcpSessions, member, newsletterSubscribers, orgDailyMetrics, organization, pendingApiKeys, policyEvaluations, pool, postAcceptOutcomes, purchase, rateLimitViolations, responseCache, retentionConfig, rollbackEvents, ruleViolations, postgres_exports as schema, securityEvents, snapbackSchema, snapshotFiles, snapshots, subscriptions, superAdmins, telemetryDailyStats, telemetryEvents, telemetryIdempotencyKeys, telemetryOutbox, topupStatusEnum, usageLimits, user, userProductMetrics, userSafetyProfiles, waitlist, waitlistAuditLogs, waitlistReferrals, waitlistTasks } from './chunk-BWWPGNZ5.js';
|
|
4
|
+
export { closeRedis, getRedisClient, initializeRedis, isRedisAvailable } from './chunk-YVZXPBSV.js';
|
|
5
|
+
import './chunk-BE3HNVSV.js';
|
|
6
|
+
import './chunk-PL4HF4M2.js';
|
|
7
|
+
import './chunk-NOWJBG6X.js';
|
|
8
|
+
import './chunk-5EOPYJ4Y.js';
|
|
9
|
+
import './chunk-CBGOC6RV.js';
|
|
10
|
+
import './chunk-7ADPL4Q3.js';
|
|
11
|
+
|
|
12
|
+
process.env.SNAPBACK_CLI='true';
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
#!/usr/bin/env node --no-warnings=ExperimentalWarning
|
|
2
|
+
export { AnalyticsEvents, CORRELATION_ANALYSES, CORRELATION_COHORTS, KEY_METRIC_ALERTS, OTelInstrumentationProvider, RETENTION_COHORTS, TelemetryClient, addSentryBreadcrumb, captureError, captureMessage, checkDatabaseConnection, checkErrorBudget, checkHttpService, checkRedisConnection, clearSentryUser, createAlert, createCohort, createGracefulShutdown, createHealthCheck, createSentryMiddleware, deleteAlert, deleteCohort, detectSurface, drainAndCloseServer, flushSentry, getAlerts, getAnalyticsEnv, getAnalyticsSuperProperties, getCohort, getCohortMembers, getCohorts, getCorrelationAnalysis, getDeploymentEnv, getEnvironmentInfo, getErrorRate, getMetrics, initSentry, isDevelopment, isProduction, neon_exports as neon, performCorrelationAnalysis, preStopDelay, prometheus_exports as prometheus, recordError, recordSuccess, registerKeyMetricAlerts, resetMetrics, setSentryUser, startSentryTransaction, toggleAlert, updateCohort } from './chunk-BE3HNVSV.js';
|
|
3
|
+
export { cache_exports as cache, logger, makeWatcher, resiliency_exports as resiliency } from './chunk-PL4HF4M2.js';
|
|
4
|
+
export { LogLevel, NoOpInstrumentationProvider } from './chunk-NOWJBG6X.js';
|
|
5
|
+
import './chunk-5EOPYJ4Y.js';
|
|
6
|
+
import './chunk-CBGOC6RV.js';
|
|
7
|
+
import './chunk-7ADPL4Q3.js';
|
|
8
|
+
|
|
9
|
+
process.env.SNAPBACK_CLI='true';
|