@codemieai/cdk 0.1.270

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,3247 @@
1
+ import * as fs from 'fs';
2
+ import * as path from 'path';
3
+ import * as yaml from 'yaml';
4
+ import * as crypto from 'crypto';
5
+ import { CodeMieClient, DataSourceType } from 'codemie-sdk';
6
+ import 'dotenv/config';
7
+ import pLimit from 'p-limit';
8
+
9
+ // src/lib/codemieConfigLoader.ts
10
+ var CodemieConfigLoader = class {
11
+ appConfig;
12
+ constructor(appConfig) {
13
+ this.appConfig = appConfig;
14
+ }
15
+ /**
16
+ * Load and parse the main codemie.yaml configuration file
17
+ */
18
+ loadConfig() {
19
+ const { rootDir, codemieConfig } = this.appConfig;
20
+ const configPath = path.join(rootDir, codemieConfig);
21
+ if (!fs.existsSync(configPath)) {
22
+ throw new Error(`Configuration file not found: ${configPath}`);
23
+ }
24
+ const content = fs.readFileSync(configPath, "utf8");
25
+ const config = yaml.parse(content);
26
+ this.resolveImports(config, rootDir);
27
+ this.substituteEnvVars(config);
28
+ this.applyDatasourceDefaults(config);
29
+ this.resolveReferencesRecursive(config, config);
30
+ this.validateDatasourceIntegrationReferences(config);
31
+ return config;
32
+ }
33
+ /**
34
+ * Load assistant prompt file
35
+ */
36
+ loadPrompt(promptPath) {
37
+ const { rootDir } = this.appConfig;
38
+ const fullPath = path.join(rootDir, promptPath);
39
+ if (!fs.existsSync(fullPath)) {
40
+ throw new Error(`Prompt file not found: ${fullPath}`);
41
+ }
42
+ return fs.readFileSync(fullPath, "utf8");
43
+ }
44
+ /**
45
+ * Load assistant configuration file
46
+ */
47
+ loadAssistantConfig(configPath) {
48
+ const { rootDir } = this.appConfig;
49
+ const fullPath = path.join(rootDir, configPath);
50
+ if (!fs.existsSync(fullPath)) {
51
+ throw new Error(`Config file not found: ${fullPath}`);
52
+ }
53
+ const content = fs.readFileSync(fullPath, "utf8");
54
+ return yaml.parse(content);
55
+ }
56
+ /**
57
+ * Validate that all referenced files exist
58
+ */
59
+ validateFiles(config) {
60
+ const errors = [];
61
+ const { rootDir } = this.appConfig;
62
+ if (config.resources.assistants) {
63
+ for (const assistant of config.resources.assistants) {
64
+ const promptPath = path.join(rootDir, assistant.prompt);
65
+ if (!fs.existsSync(promptPath)) {
66
+ errors.push(`Prompt file not found for ${assistant.name}: ${assistant.prompt}`);
67
+ }
68
+ if (assistant.config) {
69
+ const configPath = path.join(rootDir, assistant.config);
70
+ if (!fs.existsSync(configPath)) {
71
+ errors.push(`Config file not found for ${assistant.name}: ${assistant.config}`);
72
+ }
73
+ }
74
+ if (!assistant.description) {
75
+ errors.push(`Missing 'description' for assistant: ${assistant.name}`);
76
+ }
77
+ if (!assistant.model) {
78
+ errors.push(`Missing 'model' for assistant: ${assistant.name}`);
79
+ }
80
+ }
81
+ }
82
+ return {
83
+ valid: errors.length === 0,
84
+ errors
85
+ };
86
+ }
87
+ /**
88
+ * Resolve $import directives recursively
89
+ * Automatically handles:
90
+ * 1. Single resource import (object)
91
+ * 2. Multiple resources import (array)
92
+ * 3. Mixed inline + imports
93
+ * Includes circular import detection to prevent infinite recursion
94
+ */
95
+ resolveImports(obj, baseDir, visitedFiles = /* @__PURE__ */ new Set()) {
96
+ if (!this.isPlainObject(obj)) {
97
+ return;
98
+ }
99
+ for (const key in obj) {
100
+ const value = obj[key];
101
+ if (this.isPlainObject(value) && "$import" in value) {
102
+ const importDirective = value;
103
+ const importPath = path.join(baseDir, importDirective.$import);
104
+ const normalizedPath = path.resolve(importPath);
105
+ if (visitedFiles.has(normalizedPath)) {
106
+ throw new Error(
107
+ `Circular import detected: ${normalizedPath}
108
+ Import chain: ${[...visitedFiles].join(" \u2192 ")} \u2192 ${normalizedPath}`
109
+ );
110
+ }
111
+ visitedFiles.add(normalizedPath);
112
+ const imported = this.loadYamlFile(importPath);
113
+ obj[key] = imported;
114
+ this.resolveImports(obj[key], path.dirname(importPath), visitedFiles);
115
+ visitedFiles.delete(normalizedPath);
116
+ } else if (Array.isArray(value)) {
117
+ obj[key] = this.resolveArrayImports(value, baseDir, visitedFiles);
118
+ } else if (this.isPlainObject(value)) {
119
+ this.resolveImports(value, baseDir, visitedFiles);
120
+ }
121
+ }
122
+ }
123
+ /**
124
+ * Resolve imports in arrays
125
+ * Intelligently handles:
126
+ * - Inline objects (keep as-is)
127
+ * - $import with single object (add to array)
128
+ * - $import with array (flatten into parent array)
129
+ * Includes circular import detection
130
+ */
131
+ resolveArrayImports(arr, baseDir, visitedFiles = /* @__PURE__ */ new Set()) {
132
+ const result = [];
133
+ for (const item of arr) {
134
+ if (this.isPlainObject(item) && "$import" in item) {
135
+ const importDirective = item;
136
+ const importPath = path.join(baseDir, importDirective.$import);
137
+ const normalizedPath = path.resolve(importPath);
138
+ if (visitedFiles.has(normalizedPath)) {
139
+ throw new Error(
140
+ `Circular import detected: ${normalizedPath}
141
+ Import chain: ${[...visitedFiles].join(" \u2192 ")} \u2192 ${normalizedPath}`
142
+ );
143
+ }
144
+ visitedFiles.add(normalizedPath);
145
+ const imported = this.loadYamlFile(importPath);
146
+ if (Array.isArray(imported)) {
147
+ const resolvedArray = this.resolveArrayImports(imported, path.dirname(importPath), visitedFiles);
148
+ for (const resolvedItem of resolvedArray) {
149
+ result.push(resolvedItem);
150
+ }
151
+ } else if (this.isPlainObject(imported)) {
152
+ result.push(imported);
153
+ this.resolveImports(imported, path.dirname(importPath), visitedFiles);
154
+ } else {
155
+ throw new TypeError(
156
+ `Import file ${importPath} must contain either an object or an array. Got: ${typeof imported}`
157
+ );
158
+ }
159
+ visitedFiles.delete(normalizedPath);
160
+ } else {
161
+ result.push(item);
162
+ if (this.isPlainObject(item)) {
163
+ this.resolveImports(item, baseDir, visitedFiles);
164
+ }
165
+ }
166
+ }
167
+ return result;
168
+ }
169
+ /**
170
+ * Load YAML file with error handling
171
+ */
172
+ loadYamlFile(filePath) {
173
+ if (!fs.existsSync(filePath)) {
174
+ throw new Error(`Import file not found: ${filePath}`);
175
+ }
176
+ try {
177
+ const content = fs.readFileSync(filePath, "utf8");
178
+ const parsed = yaml.parse(content);
179
+ if (parsed === null || parsed === void 0) {
180
+ throw new TypeError(`Import file ${filePath} is empty`);
181
+ }
182
+ return parsed;
183
+ } catch (error) {
184
+ if (error instanceof Error) {
185
+ throw new TypeError(`Failed to parse YAML file ${filePath}: ${error.message}`);
186
+ }
187
+ throw error;
188
+ }
189
+ }
190
+ /**
191
+ * Recursively resolve all $ref in an object/array structure
192
+ * Handles both object references { $ref: "path" } and string references "$ref:path"
193
+ *
194
+ * @param current - Current node being processed (starts with root config, then recurses into children)
195
+ * @param rootConfig - Root config object (constant reference for resolving paths like "imported.integrations.xxx")
196
+ * @param path - Current path in config tree (for error messages, e.g., "resources.assistants[0].toolkits")
197
+ */
198
+ resolveReferencesRecursive(current, rootConfig, path11 = "") {
199
+ if (!current || typeof current !== "object") {
200
+ return;
201
+ }
202
+ if (Array.isArray(current)) {
203
+ this.resolveArrayReferences(current, rootConfig, path11);
204
+ return;
205
+ }
206
+ if ("$ref" in current && typeof current.$ref === "string") {
207
+ this.resolveObjectReference(current, rootConfig, path11);
208
+ return;
209
+ }
210
+ this.resolveObjectProperties(current, rootConfig, path11);
211
+ }
212
+ /**
213
+ * Resolve $ref items in arrays and flatten if they point to arrays
214
+ * Example: [{ $ref: "context_definitions.repos" }] where repos is [item1, item2]
215
+ * becomes [item1, item2]
216
+ */
217
+ resolveArrayReferences(arr, rootConfig, path11) {
218
+ const result = arr.flatMap((item, i) => {
219
+ const contextPath = `${path11}[${i}]`;
220
+ if (!this.isRefObject(item) || item.$ref.startsWith("#")) {
221
+ this.resolveReferencesRecursive(item, rootConfig, contextPath);
222
+ return [item];
223
+ }
224
+ const resolved = this.resolveReference(rootConfig, item.$ref, contextPath);
225
+ if (Array.isArray(resolved)) {
226
+ return structuredClone(resolved);
227
+ }
228
+ this.resolveObjectReference(item, rootConfig, contextPath);
229
+ return [item];
230
+ });
231
+ arr.splice(0, arr.length, ...result);
232
+ }
233
+ /**
234
+ * Resolve object reference: { $ref: "path" }
235
+ * Replaces object with resolved data (in-place mutation)
236
+ */
237
+ resolveObjectReference(current, rootConfig, path11) {
238
+ const refPath = current.$ref;
239
+ const contextPath = path11 || "root";
240
+ if (refPath.startsWith("#")) {
241
+ return;
242
+ }
243
+ const resolved = this.resolveReference(rootConfig, refPath, contextPath);
244
+ const filteredResolved = Object.fromEntries(Object.entries(resolved).filter(([, value]) => value !== void 0));
245
+ for (const key of Object.keys(current)) {
246
+ delete current[key];
247
+ }
248
+ Object.assign(current, filteredResolved);
249
+ this.resolveReferencesRecursive(current, rootConfig, path11);
250
+ }
251
+ /**
252
+ * Resolve object properties recursively
253
+ * Handles both nested objects and "$ref:path" string references
254
+ */
255
+ resolveObjectProperties(current, rootConfig, path11) {
256
+ for (const [key, value] of Object.entries(current)) {
257
+ if (typeof value === "string" && value.startsWith("$ref:")) {
258
+ const refPath = value.slice(5);
259
+ if (refPath.startsWith("#")) {
260
+ continue;
261
+ }
262
+ const contextPath = path11 ? `${path11}.${key}` : key;
263
+ const resolved = this.resolveReference(rootConfig, refPath, contextPath);
264
+ current[key] = resolved;
265
+ } else {
266
+ this.resolveReferencesRecursive(value, rootConfig, path11 ? `${path11}.${key}` : key);
267
+ }
268
+ }
269
+ }
270
+ /**
271
+ * Validate that datasource setting_id fields are strings (not objects)
272
+ * For datasources, setting_id must reference a string UUID, not an integration object
273
+ * Users should use .id suffix: $ref:imported.integrations.git_conn.id
274
+ */
275
+ validateDatasourceIntegrationReferences(config) {
276
+ if (!config.resources.datasources) {
277
+ return;
278
+ }
279
+ for (const datasource of config.resources.datasources) {
280
+ if (datasource.setting_id && typeof datasource.setting_id !== "string") {
281
+ throw new TypeError(
282
+ `Datasource "${datasource.name}" setting_id must point to a string UUID value, but resolved to an object. Add ".id" suffix to reference the integration ID. Example: setting_id: $ref:imported.integrations.git_conn.id`
283
+ );
284
+ }
285
+ }
286
+ }
287
+ /**
288
+ * Resolve a reference path like "imported.integrations.jira_main" or "tool_definitions.jira_tool"
289
+ * Supports nested paths like "imported.integrations.jira_main.id" to access specific fields
290
+ * Returns the referenced object or value from config
291
+ *
292
+ * Special handling for arrays:
293
+ * - imported.integrations (array): searches by 'alias' field
294
+ * - imported.assistants (array): searches by 'name' field
295
+ * - imported.datasources (array): searches by 'name' field
296
+ */
297
+ resolveReference(config, ref, context) {
298
+ const parts = ref.split(".");
299
+ if (parts.length < 2) {
300
+ throw new Error(
301
+ `Invalid $ref format: "${ref}". Expected format: "imported.integrations.name" or "tool_definitions.name". You can also use nested paths like "imported.integrations.name.id". Referenced in ${context}.`
302
+ );
303
+ }
304
+ let current = config;
305
+ const pathParts = [];
306
+ for (let i = 0; i < parts.length; i++) {
307
+ const part = parts[i];
308
+ pathParts.push(part);
309
+ if (!current || typeof current !== "object" || !(part in current)) {
310
+ throw new Error(
311
+ `Reference path "${ref}" not found: "${pathParts.join(".")}" does not exist. Referenced in ${context}.`
312
+ );
313
+ }
314
+ current = current[part];
315
+ const hasMorePathSegments = i + 1 < parts.length;
316
+ if (Array.isArray(current) && hasMorePathSegments) {
317
+ const nextPathSegment = parts[i + 1];
318
+ const searchField = part === "integrations" ? "alias" : "name";
319
+ const foundItem = current.find(
320
+ (item) => typeof item === "object" && item !== null && item[searchField] === nextPathSegment
321
+ );
322
+ if (!foundItem) {
323
+ throw new Error(
324
+ `Reference path "${ref}" not found: no item with ${searchField}="${nextPathSegment}" in "${pathParts.join(".")}". Referenced in ${context}.`
325
+ );
326
+ }
327
+ current = foundItem;
328
+ pathParts.push(nextPathSegment);
329
+ i++;
330
+ }
331
+ }
332
+ return current;
333
+ }
334
+ /**
335
+ * Apply datasource defaults based on $ref or type
336
+ * Priority: $ref (explicit) > type (fallback)
337
+ */
338
+ applyDatasourceDefaults(config) {
339
+ if (!config.datasource_defaults || !config.resources.datasources) {
340
+ return;
341
+ }
342
+ config.resources.datasources = config.resources.datasources.map((datasource) => {
343
+ let defaults = null;
344
+ if (datasource.$ref) {
345
+ const refPath = datasource.$ref.startsWith("$ref:") ? datasource.$ref.slice(5) : datasource.$ref;
346
+ defaults = this.resolveReference(config, refPath, `datasource "${datasource.name}".$ref`);
347
+ if (!defaults || typeof defaults !== "object") {
348
+ throw new Error(
349
+ `Invalid $ref in datasource "${datasource.name}": "${refPath}" does not point to a valid datasource defaults object`
350
+ );
351
+ }
352
+ } else if (datasource.type) {
353
+ defaults = config.datasource_defaults[datasource.type];
354
+ }
355
+ if (!defaults) {
356
+ return datasource;
357
+ }
358
+ const { $ref: _ref, ...datasourceWithoutRef } = datasource;
359
+ return {
360
+ ...defaults,
361
+ ...datasourceWithoutRef
362
+ };
363
+ });
364
+ }
365
+ /**
366
+ * Type guard to check if value is a plain object (not array, not null)
367
+ */
368
+ isPlainObject(value) {
369
+ return typeof value === "object" && value !== null && !Array.isArray(value);
370
+ }
371
+ isRefObject(item) {
372
+ return this.isPlainObject(item) && "$ref" in item && typeof item.$ref === "string";
373
+ }
374
+ /**
375
+ * Substitute environment variables in configuration
376
+ * Supports syntax:
377
+ * - ${VAR_NAME} - required variable (throws if not set)
378
+ * - ${VAR_NAME:-default} - optional with default value (shell-style)
379
+ * - ${VAR_NAME:default} - optional with default value (simplified)
380
+ * - ${VAR_NAME:-} or ${VAR_NAME:} - optional with empty string default
381
+ */
382
+ substituteEnvVars(obj) {
383
+ for (const key in obj) {
384
+ if (typeof obj[key] === "string") {
385
+ obj[key] = obj[key].replaceAll(
386
+ /\$\{([^:}]+)(?::-?([^}]*))?\}/g,
387
+ (match, varName, defaultValue) => {
388
+ const value = process.env[varName];
389
+ if (value === void 0) {
390
+ if (defaultValue !== void 0) {
391
+ return defaultValue;
392
+ }
393
+ throw new Error(`Environment variable ${varName} is not set`);
394
+ }
395
+ return value;
396
+ }
397
+ );
398
+ } else if (this.isPlainObject(obj[key])) {
399
+ this.substituteEnvVars(obj[key]);
400
+ }
401
+ }
402
+ }
403
+ };
404
+
405
+ // src/lib/logger.ts
406
+ var LogLevel = /* @__PURE__ */ ((LogLevel2) => {
407
+ LogLevel2[LogLevel2["DEBUG"] = 0] = "DEBUG";
408
+ LogLevel2[LogLevel2["INFO"] = 1] = "INFO";
409
+ LogLevel2[LogLevel2["WARN"] = 2] = "WARN";
410
+ LogLevel2[LogLevel2["ERROR"] = 3] = "ERROR";
411
+ LogLevel2[LogLevel2["SILENT"] = 4] = "SILENT";
412
+ return LogLevel2;
413
+ })(LogLevel || {});
414
+ var Logger = class _Logger {
415
+ static instance;
416
+ level = 1 /* INFO */;
417
+ constructor() {
418
+ }
419
+ static getInstance() {
420
+ if (!_Logger.instance) {
421
+ _Logger.instance = new _Logger();
422
+ }
423
+ return _Logger.instance;
424
+ }
425
+ setLevel(level) {
426
+ this.level = level;
427
+ }
428
+ debug(message, ...args) {
429
+ if (this.level <= 0 /* DEBUG */) {
430
+ console.debug(message, ...args);
431
+ }
432
+ }
433
+ info(message, ...args) {
434
+ if (this.level <= 1 /* INFO */) {
435
+ console.log(message, ...args);
436
+ }
437
+ }
438
+ warn(message, ...args) {
439
+ if (this.level <= 2 /* WARN */) {
440
+ console.warn(message, ...args);
441
+ }
442
+ }
443
+ error(message, ...args) {
444
+ if (this.level <= 3 /* ERROR */) {
445
+ console.error(message, ...args);
446
+ }
447
+ }
448
+ };
449
+ var logger = Logger.getInstance();
450
+
451
+ // src/lib/checksumUtils.ts
452
+ var DEFAULT_LLM_MODEL = "gpt-4";
453
+ function normalizeIntegrationSettings(settings) {
454
+ if (!settings || typeof settings !== "object") {
455
+ return settings;
456
+ }
457
+ if ("$ref" in settings && typeof settings.$ref === "string") {
458
+ return { $ref: settings.$ref };
459
+ }
460
+ if ("alias" in settings && typeof settings.alias === "string") {
461
+ return { $ref: `imported.integrations.${settings.alias}` };
462
+ }
463
+ return settings;
464
+ }
465
+ function normalizeTool(tool) {
466
+ return {
467
+ name: tool.name,
468
+ label: tool.label,
469
+ settings_config: tool.settings_config,
470
+ user_description: tool.user_description,
471
+ settings: normalizeIntegrationSettings(tool.settings)
472
+ };
473
+ }
474
+ function normalizeToolkits(toolkits) {
475
+ if (!toolkits) {
476
+ return [];
477
+ }
478
+ return toolkits.map(({ toolkit, tools, label, settings_config, is_external, settings }) => ({
479
+ toolkit,
480
+ label,
481
+ settings_config,
482
+ is_external,
483
+ tools: tools?.map((tool) => normalizeTool(tool)),
484
+ settings: normalizeIntegrationSettings(settings)
485
+ }));
486
+ }
487
+ function normalizeMcpServers(mcpServers) {
488
+ if (!mcpServers) {
489
+ return [];
490
+ }
491
+ return mcpServers.map((mcp) => ({
492
+ ...mcp,
493
+ settings: normalizeIntegrationSettings(mcp.settings),
494
+ mcp_connect_auth_token: normalizeIntegrationSettings(mcp.mcp_connect_auth_token)
495
+ }));
496
+ }
497
+ function calculateChecksum(content) {
498
+ if (typeof content !== "string") {
499
+ throw new TypeError(`calculateChecksum expects string, got ${typeof content}`);
500
+ }
501
+ if (content.length === 0) {
502
+ logger.warn("\u26A0\uFE0F Calculating checksum of empty string");
503
+ }
504
+ return crypto.createHash("sha256").update(content, "utf8").digest("hex");
505
+ }
506
+ function normalizeAssistantConfig(assistant, buildConfig = null) {
507
+ return {
508
+ description: assistant.description || "",
509
+ model: assistant.model || DEFAULT_LLM_MODEL,
510
+ temperature: assistant.temperature,
511
+ top_p: assistant.top_p,
512
+ shared: assistant.shared,
513
+ is_react: assistant.is_react,
514
+ is_global: assistant.is_global,
515
+ icon_url: assistant.icon_url,
516
+ toolkits: normalizeToolkits(assistant.toolkits),
517
+ context: assistant.context || [],
518
+ mcp_servers: normalizeMcpServers(assistant.mcp_servers),
519
+ assistant_ids: assistant.assistant_ids || [],
520
+ sub_assistants: assistant.sub_assistants || [],
521
+ conversation_starters: assistant.conversation_starters || [],
522
+ buildConfig
523
+ };
524
+ }
525
+ function calculateAssistantConfigChecksum(assistant, buildConfig = null) {
526
+ const normalized = normalizeAssistantConfig(assistant, buildConfig);
527
+ return calculateChecksum(JSON.stringify(normalized));
528
+ }
529
+
530
+ // src/lib/codemieConfigChecksums.ts
531
+ function createExcludeSet(keys) {
532
+ return new Set(keys);
533
+ }
534
+ var DATASOURCE_EXCLUDED_FIELDS = createExcludeSet(["force_reindex"]);
535
+ var WORKFLOW_EXCLUDED_FIELDS = createExcludeSet(["definition"]);
536
+ function buildChecksumObject(src, excluded) {
537
+ const out = {};
538
+ const keys = Object.keys(src).sort();
539
+ for (const key of keys) {
540
+ if (excluded.has(key)) {
541
+ continue;
542
+ }
543
+ const value = src[key];
544
+ if (value !== void 0) {
545
+ out[key] = value;
546
+ }
547
+ }
548
+ return out;
549
+ }
550
+ function calculateDatasourceConfigChecksum(datasource) {
551
+ const filtered = buildChecksumObject(datasource, DATASOURCE_EXCLUDED_FIELDS);
552
+ return calculateChecksum(JSON.stringify(filtered));
553
+ }
554
+ function calculateWorkflowConfigChecksum(workflow) {
555
+ const filtered = buildChecksumObject(workflow, WORKFLOW_EXCLUDED_FIELDS);
556
+ return calculateChecksum(JSON.stringify(filtered));
557
+ }
558
+
559
+ // src/lib/stateManager.ts
560
+ var StateManager = class {
561
+ statePath;
562
+ constructor(appConfig) {
563
+ const { rootDir, codemieState } = appConfig;
564
+ this.statePath = path.join(rootDir, codemieState);
565
+ }
566
+ /**
567
+ * Load state file
568
+ */
569
+ loadState() {
570
+ if (!fs.existsSync(this.statePath)) {
571
+ return this.createEmptyState();
572
+ }
573
+ const content = fs.readFileSync(this.statePath, "utf8");
574
+ const state = JSON.parse(content);
575
+ if (!state.resources) {
576
+ state.resources = {
577
+ assistants: {},
578
+ datasources: {},
579
+ workflows: {}
580
+ };
581
+ }
582
+ if (!state.resources.assistants) {
583
+ state.resources.assistants = {};
584
+ }
585
+ if (!state.resources.datasources) {
586
+ state.resources.datasources = {};
587
+ }
588
+ if (!state.resources.workflows) {
589
+ state.resources.workflows = {};
590
+ }
591
+ return state;
592
+ }
593
+ /**
594
+ * Save state file
595
+ */
596
+ saveState(state) {
597
+ const dir = path.dirname(this.statePath);
598
+ if (!fs.existsSync(dir)) {
599
+ fs.mkdirSync(dir, { recursive: true });
600
+ }
601
+ state.lastSync = (/* @__PURE__ */ new Date()).toISOString();
602
+ fs.writeFileSync(this.statePath, JSON.stringify(state, null, 2));
603
+ }
604
+ /**
605
+ * Create empty state structure
606
+ */
607
+ createEmptyState() {
608
+ return {
609
+ version: "1.0",
610
+ project: "",
611
+ lastSync: null,
612
+ resources: {
613
+ assistants: {},
614
+ datasources: {},
615
+ workflows: {}
616
+ }
617
+ };
618
+ }
619
+ /**
620
+ * Update assistant state (keyed by NAME)
621
+ * @param name
622
+ * @param id
623
+ * @param promptContent
624
+ * @param assistantResource - The assistant resource object (used to calculate consistent checksum)
625
+ * @param buildConfig - Optional build-time configuration
626
+ */
627
+ updateAssistantState(name, id, promptContent, assistantResource, buildConfig = null) {
628
+ const state = this.loadState();
629
+ state.resources.assistants[name] = {
630
+ id,
631
+ lastDeployed: (/* @__PURE__ */ new Date()).toISOString(),
632
+ promptChecksum: calculateChecksum(promptContent),
633
+ configChecksum: calculateAssistantConfigChecksum(assistantResource, buildConfig)
634
+ };
635
+ this.saveState(state);
636
+ }
637
+ /**
638
+ * Get assistant state by NAME
639
+ */
640
+ getAssistantState(name) {
641
+ const state = this.loadState();
642
+ return state.resources.assistants[name];
643
+ }
644
+ /**
645
+ * Delete assistant state by NAME
646
+ */
647
+ deleteAssistantState(name) {
648
+ const state = this.loadState();
649
+ delete state.resources.assistants[name];
650
+ this.saveState(state);
651
+ }
652
+ /**
653
+ * Update datasource state (keyed by NAME)
654
+ * @param datasourceResource - The datasource resource object (used to calculate consistent checksum)
655
+ */
656
+ updateDatasourceState(name, id, datasourceResource) {
657
+ const state = this.loadState();
658
+ state.resources.datasources[name] = {
659
+ id,
660
+ lastDeployed: (/* @__PURE__ */ new Date()).toISOString(),
661
+ configChecksum: calculateDatasourceConfigChecksum(datasourceResource)
662
+ };
663
+ this.saveState(state);
664
+ }
665
+ /**
666
+ * Get datasource state by NAME
667
+ */
668
+ getDatasourceState(name) {
669
+ const state = this.loadState();
670
+ return state.resources.datasources[name];
671
+ }
672
+ /**
673
+ * Delete datasource state by NAME
674
+ */
675
+ deleteDatasourceState(name) {
676
+ const state = this.loadState();
677
+ delete state.resources.datasources[name];
678
+ this.saveState(state);
679
+ }
680
+ /**
681
+ * Update workflow state (keyed by NAME)
682
+ */
683
+ updateWorkflowState(name, id, workflowYamlChecksum, configChecksum) {
684
+ const state = this.loadState();
685
+ state.resources.workflows[name] = {
686
+ id,
687
+ lastDeployed: (/* @__PURE__ */ new Date()).toISOString(),
688
+ workflowYamlChecksum,
689
+ configChecksum
690
+ };
691
+ this.saveState(state);
692
+ }
693
+ /**
694
+ * Get workflow state by NAME
695
+ */
696
+ getWorkflowState(name) {
697
+ const state = this.loadState();
698
+ return state.resources.workflows[name];
699
+ }
700
+ /**
701
+ * Delete workflow state by NAME
702
+ */
703
+ deleteWorkflowState(name) {
704
+ const state = this.loadState();
705
+ delete state.resources.workflows[name];
706
+ this.saveState(state);
707
+ }
708
+ /**
709
+ * Get all managed resources (for cleanup/destroy)
710
+ * Returns: { assistants: [name1, name2], datasources: [name1], workflows: [name1] }
711
+ */
712
+ getAllManagedResources() {
713
+ const state = this.loadState();
714
+ return {
715
+ assistants: Object.keys(state.resources.assistants),
716
+ datasources: Object.keys(state.resources.datasources),
717
+ workflows: Object.keys(state.resources.workflows)
718
+ };
719
+ }
720
+ /**
721
+ * Check if a resource is managed by IaC (exists in state.json)
722
+ * @param type Resource type ('assistant', 'datasource', 'workflow')
723
+ * @param name Resource name
724
+ */
725
+ isManagedResource(type, name) {
726
+ const state = this.loadState();
727
+ switch (type) {
728
+ case "assistant": {
729
+ return name in state.resources.assistants;
730
+ }
731
+ case "datasource": {
732
+ return name in state.resources.datasources;
733
+ }
734
+ case "workflow": {
735
+ return name in state.resources.workflows;
736
+ }
737
+ default: {
738
+ return false;
739
+ }
740
+ }
741
+ }
742
+ /**
743
+ * Get ID by name for a specific resource type
744
+ */
745
+ getIdByName(type, name) {
746
+ const state = this.loadState();
747
+ switch (type) {
748
+ case "assistant": {
749
+ return state.resources.assistants[name]?.id;
750
+ }
751
+ case "datasource": {
752
+ return state.resources.datasources[name]?.id;
753
+ }
754
+ case "workflow": {
755
+ return state.resources.workflows[name]?.id;
756
+ }
757
+ default: {
758
+ return void 0;
759
+ }
760
+ }
761
+ }
762
+ };
763
+ async function createClient(config) {
764
+ const client = new CodeMieClient({
765
+ auth_server_url: config.environment.auth_server_url,
766
+ auth_realm_name: config.environment.auth_realm_name,
767
+ codemie_api_domain: config.environment.codemie_api_url,
768
+ auth_client_id: config.environment.client_id,
769
+ auth_client_secret: config.environment.client_secret,
770
+ username: config.environment.username,
771
+ password: config.environment.password,
772
+ verify_ssl: true
773
+ });
774
+ await client.initialize();
775
+ return client;
776
+ }
777
+
778
+ // src/lib/cleanupManager.ts
779
+ var CleanupManager = class {
780
+ constructor(client, stateManager) {
781
+ this.client = client;
782
+ this.stateManager = stateManager;
783
+ }
784
+ /**
785
+ * Check if an error indicates that a resource was not found on the platform
786
+ * Handles both proper 404 responses
787
+ */
788
+ isNotFoundError(error) {
789
+ const errorMessage = error instanceof Error ? error.message : String(error);
790
+ if (errorMessage.includes("404") || errorMessage.includes("not found") || errorMessage.includes("Not Found")) {
791
+ return true;
792
+ }
793
+ return false;
794
+ }
795
+ /**
796
+ * Delete a resource with graceful handling for "not found" errors
797
+ * Returns true if deleted or not found, false if other error occurred
798
+ */
799
+ async deleteResourceSafely(resourceType, name, id, deleteFn) {
800
+ try {
801
+ await deleteFn();
802
+ logger.info(` \u{1F5D1}\uFE0F Deleted orphaned ${resourceType}: ${name} (${id.slice(0, 8)}...)`);
803
+ return { success: true, notFound: false };
804
+ } catch (error) {
805
+ if (this.isNotFoundError(error)) {
806
+ logger.info(
807
+ ` \u26A0\uFE0F ${resourceType.charAt(0).toUpperCase() + resourceType.slice(1)} ${name} not found on platform (already deleted) - removing from state`
808
+ );
809
+ return { success: true, notFound: true };
810
+ }
811
+ throw error;
812
+ }
813
+ }
814
+ /**
815
+ * Find orphaned resources (in state but not in config)
816
+ * These are resources that were managed by IaC but removed from codemie.yaml
817
+ * Returns resource NAMES, not IDs
818
+ */
819
+ findOrphanedResources(config) {
820
+ const managedResources = this.stateManager.getAllManagedResources();
821
+ const configAssistantNames = new Set((config.resources.assistants || []).map(({ name }) => name));
822
+ const configDatasourceNames = new Set((config.resources.datasources || []).map(({ name }) => name));
823
+ const configWorkflowNames = new Set((config.resources.workflows || []).map(({ name }) => name));
824
+ return {
825
+ assistants: managedResources.assistants.filter((name) => !configAssistantNames.has(name)),
826
+ datasources: managedResources.datasources.filter((name) => !configDatasourceNames.has(name)),
827
+ workflows: managedResources.workflows.filter((name) => !configWorkflowNames.has(name))
828
+ };
829
+ }
830
+ /**
831
+ * Delete orphaned resources from platform
832
+ * SAFETY: Only deletes resources that are in state.json (managed by IaC)
833
+ * @param orphaned Object with resource NAMES (not IDs)
834
+ */
835
+ // eslint-disable-next-line max-lines-per-function
836
+ async deleteOrphanedResources(orphaned) {
837
+ const result = {
838
+ deleted: {
839
+ assistants: [],
840
+ datasources: [],
841
+ workflows: []
842
+ },
843
+ errors: []
844
+ };
845
+ for (const name of orphaned.assistants) {
846
+ try {
847
+ if (!this.stateManager.isManagedResource("assistant", name)) {
848
+ logger.info(` \u26A0\uFE0F Skipping ${name} - not in state (safety check)`);
849
+ continue;
850
+ }
851
+ const id = this.stateManager.getIdByName("assistant", name);
852
+ if (!id) {
853
+ logger.info(` \u26A0\uFE0F Skipping ${name} - no ID in state`);
854
+ continue;
855
+ }
856
+ await this.deleteResourceSafely("assistant", name, id, () => this.client.assistants.delete(id));
857
+ this.stateManager.deleteAssistantState(name);
858
+ result.deleted.assistants.push(name);
859
+ } catch (error) {
860
+ result.errors.push({
861
+ type: "assistant",
862
+ name,
863
+ error: error instanceof Error ? error.message : String(error)
864
+ });
865
+ logger.error(
866
+ ` \u274C Failed to delete assistant ${name}: ${error instanceof Error ? error.message : String(error)}`
867
+ );
868
+ }
869
+ }
870
+ for (const name of orphaned.datasources) {
871
+ try {
872
+ if (!this.stateManager.isManagedResource("datasource", name)) {
873
+ logger.info(` \u26A0\uFE0F Skipping ${name} - not in state (safety check)`);
874
+ continue;
875
+ }
876
+ const id = this.stateManager.getIdByName("datasource", name);
877
+ if (!id) {
878
+ logger.info(` \u26A0\uFE0F Skipping ${name} - no ID in state`);
879
+ continue;
880
+ }
881
+ await this.deleteResourceSafely("datasource", name, id, () => this.client.datasources.delete(id));
882
+ this.stateManager.deleteDatasourceState(name);
883
+ result.deleted.datasources.push(name);
884
+ } catch (error) {
885
+ result.errors.push({
886
+ type: "datasource",
887
+ name,
888
+ error: error instanceof Error ? error.message : String(error)
889
+ });
890
+ logger.error(
891
+ ` \u274C Failed to delete datasource ${name}: ${error instanceof Error ? error.message : String(error)}`
892
+ );
893
+ }
894
+ }
895
+ for (const name of orphaned.workflows) {
896
+ try {
897
+ if (!this.stateManager.isManagedResource("workflow", name)) {
898
+ logger.info(` \u26A0\uFE0F Skipping ${name} - not in state (safety check)`);
899
+ continue;
900
+ }
901
+ const id = this.stateManager.getIdByName("workflow", name);
902
+ if (!id) {
903
+ logger.info(` \u26A0\uFE0F Skipping ${name} - no ID in state`);
904
+ continue;
905
+ }
906
+ await this.deleteResourceSafely("workflow", name, id, () => this.client.workflows.delete(id));
907
+ this.stateManager.deleteWorkflowState(name);
908
+ result.deleted.workflows.push(name);
909
+ } catch (error) {
910
+ result.errors.push({
911
+ type: "workflow",
912
+ name,
913
+ error: error instanceof Error ? error.message : String(error)
914
+ });
915
+ logger.error(
916
+ ` \u274C Failed to delete workflow ${name}: ${error instanceof Error ? error.message : String(error)}`
917
+ );
918
+ }
919
+ }
920
+ return result;
921
+ }
922
+ /**
923
+ * Get summary of orphaned resources
924
+ */
925
+ getOrphanedSummary(orphaned) {
926
+ const total = orphaned.assistants.length + orphaned.datasources.length + orphaned.workflows.length;
927
+ if (total === 0) {
928
+ return "No orphaned resources found";
929
+ }
930
+ const parts = [];
931
+ if (orphaned.assistants.length > 0) {
932
+ parts.push(`${orphaned.assistants.length} assistant(s)`);
933
+ }
934
+ if (orphaned.datasources.length > 0) {
935
+ parts.push(`${orphaned.datasources.length} datasource(s)`);
936
+ }
937
+ if (orphaned.workflows.length > 0) {
938
+ parts.push(`${orphaned.workflows.length} workflow(s)`);
939
+ }
940
+ return `Found ${total} orphaned resource(s): ${parts.join(", ")}`;
941
+ }
942
+ };
943
+
944
+ // src/lib/paginationUtils.ts
945
+ async function findResourceByName(listFn, name, resourceType) {
946
+ let page = 0;
947
+ const perPage = 100;
948
+ let totalChecked = 0;
949
+ while (true) {
950
+ const resources = await listFn({ page, per_page: perPage });
951
+ totalChecked += resources.length;
952
+ const found = resources.find((r) => r.name === name);
953
+ if (found && found.id) {
954
+ return found;
955
+ }
956
+ if (resources.length < perPage) {
957
+ logger.warn(
958
+ ` \u26A0\uFE0F ${resourceType} "${name}" not found after checking ${totalChecked} resources across ${page + 1} page(s)`
959
+ );
960
+ return null;
961
+ }
962
+ page++;
963
+ if (page > 1e3) {
964
+ logger.warn(` \u26A0\uFE0F Stopped pagination after 1000 pages (${totalChecked} resources checked)`);
965
+ return null;
966
+ }
967
+ }
968
+ }
969
+ async function findDatasourceByName(client, name) {
970
+ const datasource = await findResourceByName((params) => client.datasources.list(params), name, "datasource");
971
+ return datasource?.id || null;
972
+ }
973
+ async function findAssistantByName(client, name) {
974
+ const assistant = await findResourceByName((params) => client.assistants.list(params), name, "assistant");
975
+ return assistant?.id || null;
976
+ }
977
+
978
+ // src/lib/assistantHelpers.ts
979
+ async function createAssistantAndGetId(client, params, slug) {
980
+ await client.assistants.create(params);
981
+ if (slug) {
982
+ const created = await client.assistants.getBySlug(slug);
983
+ if (!created?.id) {
984
+ throw new Error(`Assistant created but not found by slug: ${slug}`);
985
+ }
986
+ return created.id;
987
+ }
988
+ const assistantId = await findAssistantByName(client, params.name);
989
+ if (!assistantId) {
990
+ throw new Error(`Assistant created but not found by name: ${params.name}`);
991
+ }
992
+ return assistantId;
993
+ }
994
+
995
+ // src/lib/typeGuards.ts
996
+ function hasName(obj) {
997
+ return typeof obj === "object" && obj !== null && "name" in obj;
998
+ }
999
+ function hasSettingId(obj) {
1000
+ return typeof obj === "object" && obj !== null && "setting_id" in obj;
1001
+ }
1002
+ function isResolvedIntegration(obj) {
1003
+ return typeof obj === "object" && obj !== null && "id" in obj && typeof obj.id === "string";
1004
+ }
1005
+
1006
+ // src/lib/converters.ts
1007
+ function validateMcpCommandArgs(s, hasTopLevelCommand) {
1008
+ if (hasTopLevelCommand) {
1009
+ return true;
1010
+ }
1011
+ const config = s.config;
1012
+ const configArgs = config.args;
1013
+ if (!Array.isArray(configArgs)) {
1014
+ return false;
1015
+ }
1016
+ return configArgs.every((arg) => typeof arg === "string");
1017
+ }
1018
+ function isValidMcpServer(server) {
1019
+ if (!server || typeof server !== "object") {
1020
+ return false;
1021
+ }
1022
+ const s = server;
1023
+ if (typeof s.name !== "string" || s.name.length === 0) {
1024
+ return false;
1025
+ }
1026
+ const hasTopLevelCommand = typeof s.command === "string" && s.command.length > 0;
1027
+ const hasConfigCommand = s.config && typeof s.config === "object" && typeof s.config.command === "string" && s.config.command.length > 0;
1028
+ const hasCommand = hasTopLevelCommand || hasConfigCommand;
1029
+ const hasTopLevelUrl = typeof s.mcp_connect_url === "string" && s.mcp_connect_url.length > 0;
1030
+ const hasConfigUrl = s.config && typeof s.config === "object" && typeof s.config.url === "string" && s.config.url.length > 0;
1031
+ const hasUrl = hasTopLevelUrl || hasConfigUrl;
1032
+ const hasSettings = s.settings != null;
1033
+ if (hasCommand && hasUrl) {
1034
+ return false;
1035
+ }
1036
+ if (!hasCommand && !hasUrl && !hasSettings) {
1037
+ return false;
1038
+ }
1039
+ if (hasCommand && !validateMcpCommandArgs(s, hasTopLevelCommand)) {
1040
+ return false;
1041
+ }
1042
+ if (s.description != null && typeof s.description !== "string") {
1043
+ return false;
1044
+ }
1045
+ if (s.enabled !== void 0 && typeof s.enabled !== "boolean") {
1046
+ return false;
1047
+ }
1048
+ return true;
1049
+ }
1050
+ function convertMcpServers(servers) {
1051
+ if (!servers || servers.length === 0) {
1052
+ return void 0;
1053
+ }
1054
+ const validServers = servers.filter((server) => isValidMcpServer(server));
1055
+ if (validServers.length === 0) {
1056
+ return void 0;
1057
+ }
1058
+ return validServers;
1059
+ }
1060
+ function hasValidCodeStructure(code) {
1061
+ if (!code || typeof code !== "object") {
1062
+ return false;
1063
+ }
1064
+ const c = code;
1065
+ return typeof c.link === "string" && c.link.length > 0;
1066
+ }
1067
+ function assistantResponseToResource(assistant) {
1068
+ const slug = assistant.slug || "";
1069
+ const promptFileName = slug || assistant.name.toLowerCase().replaceAll(/\s+/g, "-");
1070
+ const mcpServers = convertMcpServers(assistant.mcp_servers);
1071
+ const nestedAssistants = assistant.nested_assistants;
1072
+ const subAssistants = nestedAssistants?.map((nested) => nested.name);
1073
+ const categoriesRaw = assistant.categories;
1074
+ let categories;
1075
+ if (categoriesRaw && Array.isArray(categoriesRaw)) {
1076
+ if (categoriesRaw.length === 0) {
1077
+ categories = [];
1078
+ } else if (typeof categoriesRaw[0] === "object" && categoriesRaw[0] !== null && "id" in categoriesRaw[0]) {
1079
+ categories = categoriesRaw.map((cat) => cat.id);
1080
+ } else {
1081
+ categories = categoriesRaw;
1082
+ }
1083
+ }
1084
+ return {
1085
+ name: assistant.name,
1086
+ description: assistant.description || "",
1087
+ prompt: `system_prompts/${promptFileName}.prompt.md`,
1088
+ model: assistant.llm_model_type || DEFAULT_LLM_MODEL,
1089
+ ...assistant.temperature !== void 0 && assistant.temperature !== null && { temperature: assistant.temperature },
1090
+ ...assistant.top_p !== void 0 && assistant.top_p !== null && { top_p: assistant.top_p },
1091
+ ...assistant.shared !== void 0 && { shared: assistant.shared },
1092
+ ...assistant.is_react !== void 0 && { is_react: assistant.is_react },
1093
+ ...assistant.is_global !== void 0 && { is_global: assistant.is_global },
1094
+ ...assistant.icon_url && { icon_url: assistant.icon_url },
1095
+ ...assistant.conversation_starters && { conversation_starters: assistant.conversation_starters },
1096
+ ...assistant.toolkits && { toolkits: assistant.toolkits },
1097
+ ...assistant.context && { context: assistant.context },
1098
+ ...mcpServers && { mcp_servers: mcpServers },
1099
+ ...subAssistants && { sub_assistants: subAssistants },
1100
+ ...assistant.prompt_variables && { prompt_variables: assistant.prompt_variables },
1101
+ ...categories && { categories }
1102
+ };
1103
+ }
1104
+ function convertCodeDatasource(datasource, base) {
1105
+ const desc = datasource.description || "";
1106
+ if (hasValidCodeStructure(datasource.code)) {
1107
+ return {
1108
+ ...base,
1109
+ type: "code",
1110
+ description: desc,
1111
+ link: datasource.code.link,
1112
+ branch: datasource.code.branch,
1113
+ index_type: datasource.code.indexType,
1114
+ summarization_model: datasource.code.summarizationModel,
1115
+ files_filter: datasource.code.filesFilter
1116
+ };
1117
+ }
1118
+ return {
1119
+ ...base,
1120
+ type: "code",
1121
+ description: desc,
1122
+ link: void 0
1123
+ };
1124
+ }
1125
+ function datasourceResponseToResource(datasource, integrationAlias) {
1126
+ const settingId = integrationAlias ? `$ref:imported.integrations.${integrationAlias}.id` : datasource.setting_id ?? "";
1127
+ const allowedWithoutSettingId = /* @__PURE__ */ new Set(["knowledge_base_file", "llm_routing_google"]);
1128
+ if (!settingId && !allowedWithoutSettingId.has(datasource.type)) {
1129
+ logger.warn(`\u26A0\uFE0F Datasource "${datasource.name}" is missing setting_id (integration reference)`);
1130
+ }
1131
+ const base = {
1132
+ name: datasource.name,
1133
+ type: datasource.type,
1134
+ embeddings_model: datasource.embeddings_model,
1135
+ setting_id: settingId || void 0,
1136
+ shared_with_project: datasource.shared_with_project
1137
+ };
1138
+ if (datasource.type === DataSourceType.CODE) {
1139
+ return convertCodeDatasource(datasource, base);
1140
+ }
1141
+ if (datasource.type === DataSourceType.CONFLUENCE && datasource.confluence) {
1142
+ return {
1143
+ ...base,
1144
+ type: DataSourceType.CONFLUENCE,
1145
+ description: datasource.description || "",
1146
+ ...datasource.confluence
1147
+ };
1148
+ }
1149
+ if (datasource.type === DataSourceType.JIRA && datasource.jira) {
1150
+ return {
1151
+ ...base,
1152
+ type: DataSourceType.JIRA,
1153
+ description: datasource.description || "",
1154
+ ...datasource.jira
1155
+ };
1156
+ }
1157
+ if (datasource.type === DataSourceType.GOOGLE && datasource.google_doc_link) {
1158
+ return {
1159
+ ...base,
1160
+ type: DataSourceType.GOOGLE,
1161
+ description: datasource.description || "",
1162
+ google_doc: datasource.google_doc_link
1163
+ };
1164
+ }
1165
+ if (datasource.type === DataSourceType.FILE) {
1166
+ return {
1167
+ ...base,
1168
+ type: DataSourceType.FILE,
1169
+ description: datasource.description || ""
1170
+ };
1171
+ }
1172
+ logger.warn(` \u26A0\uFE0F Unknown datasource type '${datasource.type}' - saving with basic fields only`);
1173
+ return {
1174
+ ...base,
1175
+ description: datasource.description || ""
1176
+ };
1177
+ }
1178
+ function workflowResponseToResource(workflow) {
1179
+ return {
1180
+ name: workflow.name,
1181
+ description: workflow.description || "",
1182
+ definition: `workflows/${workflow.name.toLowerCase().replaceAll(/\s+/g, "-")}.yaml`,
1183
+ ...workflow.mode && { mode: workflow.mode },
1184
+ ...workflow.shared !== void 0 && { shared: workflow.shared },
1185
+ ...workflow.icon_url && { icon_url: workflow.icon_url }
1186
+ };
1187
+ }
1188
+ function isValidCodeParams(params) {
1189
+ return typeof params === "object" && params !== null && "link" in params && typeof params.link === "string" && params.link.length > 0;
1190
+ }
1191
+ function datasourceResourceToCreateParams(datasource, projectName) {
1192
+ const {
1193
+ $ref: _ref,
1194
+ force_reindex: _forceReindex,
1195
+ ...sdkFields
1196
+ } = datasource;
1197
+ const params = {
1198
+ ...sdkFields,
1199
+ project_name: projectName,
1200
+ shared_with_project: datasource.shared_with_project ?? true
1201
+ };
1202
+ if (datasource.type === "code" && !isValidCodeParams(params)) {
1203
+ throw new Error(
1204
+ `Invalid code datasource "${datasource.name}": missing required field "link". Please add repository URL to datasource configuration.`
1205
+ );
1206
+ } else if (datasource.type === "knowledge_base_confluence" && (!("cql" in params) || !params.cql)) {
1207
+ throw new Error(
1208
+ `Invalid Confluence datasource "${datasource.name}": missing required field "cql". Please add CQL query to datasource configuration.`
1209
+ );
1210
+ } else if (datasource.type === "knowledge_base_jira" && (!("jql" in params) || !params.jql)) {
1211
+ throw new Error(
1212
+ `Invalid Jira datasource "${datasource.name}": missing required field "jql". Please add JQL query to datasource configuration.`
1213
+ );
1214
+ } else if (datasource.type === "llm_routing_google" && (!("google_doc" in params) || !params.google_doc)) {
1215
+ throw new Error(
1216
+ `Invalid Google Docs datasource "${datasource.name}": missing required field "google_doc". Please add Google Doc ID to datasource configuration.`
1217
+ );
1218
+ }
1219
+ return params;
1220
+ }
1221
+ function iacToolToSdk(tool) {
1222
+ return {
1223
+ ...tool,
1224
+ settings_config: tool.settings_config ?? Boolean(tool.settings),
1225
+ settings: isResolvedIntegration(tool.settings) ? tool.settings : void 0
1226
+ };
1227
+ }
1228
+ function iacToolkitToSdk(toolkit) {
1229
+ return {
1230
+ ...toolkit,
1231
+ settings_config: toolkit.settings_config ?? Boolean(toolkit.settings),
1232
+ is_external: toolkit.is_external ?? false,
1233
+ tools: toolkit.tools.map((tool) => iacToolToSdk(tool)),
1234
+ settings: isResolvedIntegration(toolkit.settings) ? toolkit.settings : void 0
1235
+ };
1236
+ }
1237
+ function iacMcpServerToSdk(mcp) {
1238
+ return {
1239
+ ...mcp,
1240
+ enabled: mcp.enabled ?? true,
1241
+ settings: isResolvedIntegration(mcp.settings) ? mcp.settings : void 0,
1242
+ mcp_connect_auth_token: isResolvedIntegration(mcp.mcp_connect_auth_token) ? mcp.mcp_connect_auth_token : void 0
1243
+ };
1244
+ }
1245
+ function assistantResourceToCreateParams(assistant, projectName, promptContent) {
1246
+ const {
1247
+ prompt: _prompt,
1248
+ config: _config,
1249
+ model,
1250
+ sub_assistants: _subAssistants,
1251
+ datasource_names: _datasourceNames,
1252
+ toolkits,
1253
+ mcp_servers: mcpServers,
1254
+ ...sdkFields
1255
+ } = assistant;
1256
+ return {
1257
+ ...sdkFields,
1258
+ project: projectName,
1259
+ llm_model_type: model,
1260
+ system_prompt: promptContent,
1261
+ name: assistant.name,
1262
+ description: assistant.description,
1263
+ conversation_starters: assistant.conversation_starters || [],
1264
+ toolkits: (toolkits || []).map((toolkit) => iacToolkitToSdk(toolkit)),
1265
+ context: assistant.context || [],
1266
+ mcp_servers: (mcpServers || []).map((mcp) => iacMcpServerToSdk(mcp)),
1267
+ assistant_ids: assistant.assistant_ids || [],
1268
+ shared: assistant.shared ?? true,
1269
+ prompt_variables: assistant.prompt_variables || []
1270
+ };
1271
+ }
1272
+
1273
+ // src/lib/resourceExistenceChecker.ts
1274
+ async function checkResourceExists(getState, getResource) {
1275
+ const existingState = getState();
1276
+ if (!existingState) {
1277
+ return false;
1278
+ }
1279
+ try {
1280
+ await getResource(existingState.id);
1281
+ return true;
1282
+ } catch {
1283
+ return false;
1284
+ }
1285
+ }
1286
+ function checkAssistantExists(client, name, stateManager) {
1287
+ return checkResourceExists(
1288
+ () => stateManager.getAssistantState(name),
1289
+ (id) => client.assistants.get(id)
1290
+ );
1291
+ }
1292
+ function checkDatasourceExists(client, name, stateManager) {
1293
+ return checkResourceExists(
1294
+ () => stateManager.getDatasourceState(name),
1295
+ (id) => client.datasources.get(id)
1296
+ );
1297
+ }
1298
+ function checkWorkflowExists(client, name, stateManager) {
1299
+ return checkResourceExists(
1300
+ () => stateManager.getWorkflowState(name),
1301
+ (id) => client.workflows.get(id)
1302
+ );
1303
+ }
1304
+
1305
+ // src/deploy.ts
1306
+ function sortAssistantsByDependencies(assistants) {
1307
+ const sorted = [];
1308
+ const visited = /* @__PURE__ */ new Set();
1309
+ const visiting = /* @__PURE__ */ new Set();
1310
+ const assistantMap = /* @__PURE__ */ new Map();
1311
+ for (const assistant of assistants) {
1312
+ assistantMap.set(assistant.name, assistant);
1313
+ }
1314
+ function visit(name) {
1315
+ if (visited.has(name)) {
1316
+ return;
1317
+ }
1318
+ if (visiting.has(name)) {
1319
+ throw new Error(`Circular dependency detected for assistant: ${name}`);
1320
+ }
1321
+ visiting.add(name);
1322
+ const assistant = assistantMap.get(name);
1323
+ if (assistant && assistant.sub_assistants) {
1324
+ for (const subName of assistant.sub_assistants) {
1325
+ if (!assistantMap.has(subName)) {
1326
+ throw new Error(
1327
+ `Sub-assistant "${subName}" referenced by "${name}" not found in config. Ensure all sub-assistants are defined in resources.assistants.`
1328
+ );
1329
+ }
1330
+ visit(subName);
1331
+ }
1332
+ }
1333
+ visiting.delete(name);
1334
+ visited.add(name);
1335
+ if (assistant) {
1336
+ sorted.push(assistant);
1337
+ }
1338
+ }
1339
+ for (const assistant of assistants) {
1340
+ visit(assistant.name);
1341
+ }
1342
+ return sorted;
1343
+ }
1344
+ async function deployAssistants(config, client, loader, stateManager) {
1345
+ const stats = { created: 0, updated: 0, unchanged: 0, failed: 0 };
1346
+ if (!config.resources.assistants) {
1347
+ return stats;
1348
+ }
1349
+ logger.info("\u{1F916} Processing assistants...\n");
1350
+ const sortedAssistants = sortAssistantsByDependencies(config.resources.assistants);
1351
+ logger.info(` Sorted ${sortedAssistants.length} assistant(s) by dependencies
1352
+ `);
1353
+ for (const assistant of sortedAssistants) {
1354
+ try {
1355
+ logger.info(`Processing: ${assistant.name}`);
1356
+ const promptContent = loader.loadPrompt(assistant.prompt);
1357
+ let buildConfig = null;
1358
+ if (assistant.config) {
1359
+ buildConfig = loader.loadAssistantConfig(assistant.config);
1360
+ }
1361
+ const configChecksum = calculateAssistantConfigChecksum(assistant, buildConfig);
1362
+ let resolvedAssistantIds = assistant.assistant_ids || [];
1363
+ if (assistant.sub_assistants && assistant.sub_assistants.length > 0) {
1364
+ logger.info(` Resolving ${assistant.sub_assistants.length} sub-assistant name(s)...`);
1365
+ const resolvedIds = [];
1366
+ for (const name of assistant.sub_assistants) {
1367
+ const subAssistantState = stateManager.getAssistantState(name);
1368
+ if (!subAssistantState) {
1369
+ throw new Error(`Sub-assistant "${name}" not found in state. Ensure the assistant is deployed first.`);
1370
+ }
1371
+ resolvedIds.push(subAssistantState.id);
1372
+ logger.info(` \u2713 Resolved "${name}" \u2192 ${subAssistantState.id}`);
1373
+ }
1374
+ resolvedAssistantIds = resolvedIds;
1375
+ }
1376
+ let resolvedContext = assistant.context || [];
1377
+ if (assistant.datasource_names && assistant.datasource_names.length > 0) {
1378
+ logger.info(` Resolving ${assistant.datasource_names.length} datasource name(s)...`);
1379
+ const datasourceContextEntries = [];
1380
+ const datasourcesInConfig = config.resources.datasources || [];
1381
+ const importedDatasources = config.imported?.datasources || [];
1382
+ const allDatasources = [...datasourcesInConfig, ...importedDatasources];
1383
+ for (const dsName of assistant.datasource_names) {
1384
+ const datasource = allDatasources.find((ds) => ds.name === dsName);
1385
+ if (!datasource) {
1386
+ throw new Error(`Datasource with name "${dsName}" not found in config (resources or imported)`);
1387
+ }
1388
+ const contextType = datasource.type === "code" ? "code" : "knowledge_base";
1389
+ datasourceContextEntries.push({
1390
+ context_type: contextType,
1391
+ name: datasource.name
1392
+ });
1393
+ logger.info(` \u2713 Resolved "${dsName}" \u2192 ${datasource.name} (${contextType})`);
1394
+ }
1395
+ resolvedContext = [...resolvedContext, ...datasourceContextEntries];
1396
+ }
1397
+ const assistantWithResolved = {
1398
+ ...assistant,
1399
+ assistant_ids: resolvedAssistantIds,
1400
+ context: resolvedContext
1401
+ };
1402
+ const apiParams = assistantResourceToCreateParams(assistantWithResolved, config.project.name, promptContent);
1403
+ const existingState = stateManager.getAssistantState(assistant.name);
1404
+ if (existingState) {
1405
+ const existsOnPlatform = await checkAssistantExists(client, assistant.name, stateManager);
1406
+ if (existsOnPlatform) {
1407
+ const hasChanged = existingState.promptChecksum !== calculateChecksum(promptContent) || existingState.configChecksum !== configChecksum;
1408
+ if (hasChanged) {
1409
+ logger.info(` Updating assistant (ID: ${existingState.id})...`);
1410
+ if (process.env.DEBUG_API) {
1411
+ logger.debug("\n=== DEBUG: Update API Params ===");
1412
+ logger.debug(JSON.stringify(apiParams, null, 2));
1413
+ logger.debug("================================\n");
1414
+ }
1415
+ await client.assistants.update(existingState.id, apiParams);
1416
+ logger.info(`\u2713 Updated assistant: ${assistant.name} (${existingState.id})`);
1417
+ stateManager.updateAssistantState(assistant.name, existingState.id, promptContent, assistant, buildConfig);
1418
+ stats.updated++;
1419
+ } else {
1420
+ logger.info(` \u2713 No changes detected (ID: ${existingState.id})`);
1421
+ stats.unchanged++;
1422
+ }
1423
+ } else {
1424
+ logger.info(` \u26A0\uFE0F Assistant ID from state not found on platform, will create new`);
1425
+ logger.info(` Creating new assistant...`);
1426
+ if (process.env.DEBUG_API) {
1427
+ logger.debug("\n=== DEBUG: API Params ===");
1428
+ logger.debug(JSON.stringify(apiParams, null, 2));
1429
+ logger.debug("=========================\n");
1430
+ }
1431
+ const assistantId = await createAssistantAndGetId(client, apiParams, assistant.slug);
1432
+ logger.info(`\u2713 Created assistant: ${assistant.name} (${assistantId})`);
1433
+ stateManager.updateAssistantState(assistant.name, assistantId, promptContent, assistant, buildConfig);
1434
+ stats.created++;
1435
+ }
1436
+ } else {
1437
+ logger.info(` Creating new assistant...`);
1438
+ if (process.env.DEBUG_API) {
1439
+ logger.debug("\n=== DEBUG: API Params ===");
1440
+ logger.debug(JSON.stringify(apiParams, null, 2));
1441
+ logger.debug("=========================\n");
1442
+ }
1443
+ const assistantId = await createAssistantAndGetId(client, apiParams, assistant.slug);
1444
+ logger.info(`\u2713 Created assistant: ${assistant.name} (${assistantId})`);
1445
+ stateManager.updateAssistantState(assistant.name, assistantId, promptContent, assistant, buildConfig);
1446
+ stats.created++;
1447
+ }
1448
+ logger.info("");
1449
+ } catch (error) {
1450
+ logger.error(` \u274C Failed to deploy ${assistant.name}:`);
1451
+ if (error instanceof Error) {
1452
+ logger.error(` ${error.message}`);
1453
+ logger.debug(` Stack:`, error.stack);
1454
+ if ("response" in error) {
1455
+ const axiosError = error;
1456
+ logger.error(` Status: ${axiosError.response?.status}`);
1457
+ logger.error(` Data:`, JSON.stringify(axiosError.response?.data, null, 2));
1458
+ }
1459
+ } else {
1460
+ logger.error(` ${String(error)}`);
1461
+ }
1462
+ logger.info("");
1463
+ stats.failed++;
1464
+ }
1465
+ }
1466
+ return stats;
1467
+ }
1468
+ async function createDatasourceAndGetId(client, createParams, datasourceName) {
1469
+ await client.datasources.create(createParams);
1470
+ logger.info(`\u2713 Created datasource: ${datasourceName}`);
1471
+ logger.info(` Fetching datasource ID...`);
1472
+ const datasourceId = await findDatasourceByName(client, datasourceName);
1473
+ if (!datasourceId) {
1474
+ throw new Error(`Datasource created but not found by name: ${datasourceName}`);
1475
+ }
1476
+ logger.info(` Found ID: ${datasourceId}`);
1477
+ return datasourceId;
1478
+ }
1479
+ async function deployDatasources(config, client, stateManager) {
1480
+ const stats = { created: 0, updated: 0, unchanged: 0, failed: 0 };
1481
+ logger.info("\u{1F4CA} Processing datasources...\n");
1482
+ if (!config.resources.datasources) {
1483
+ return stats;
1484
+ }
1485
+ for (const datasource of config.resources.datasources) {
1486
+ try {
1487
+ logger.info(`Processing: ${datasource.name}`);
1488
+ const configChecksum = calculateDatasourceConfigChecksum(datasource);
1489
+ const createParams = datasourceResourceToCreateParams(datasource, config.project.name);
1490
+ const existingState = stateManager.getDatasourceState(datasource.name);
1491
+ if (existingState) {
1492
+ const existsOnPlatform = await checkDatasourceExists(client, datasource.name, stateManager);
1493
+ if (existsOnPlatform) {
1494
+ const hasChanged = existingState.configChecksum !== configChecksum;
1495
+ if (hasChanged || datasource.force_reindex) {
1496
+ if (datasource.force_reindex && !hasChanged) {
1497
+ logger.info(` Force reindexing datasource (ID: ${existingState.id})...`);
1498
+ } else {
1499
+ logger.info(` Updating datasource (ID: ${existingState.id})...`);
1500
+ }
1501
+ const updateParams = {
1502
+ ...createParams,
1503
+ ...datasource.force_reindex && { full_reindex: true }
1504
+ };
1505
+ await client.datasources.update(updateParams);
1506
+ logger.info(
1507
+ `\u2713 ${datasource.force_reindex && !hasChanged ? "Reindexed" : "Updated"} datasource: ${datasource.name} (${existingState.id})`
1508
+ );
1509
+ stateManager.updateDatasourceState(datasource.name, existingState.id, datasource);
1510
+ stats.updated++;
1511
+ } else {
1512
+ logger.info(` \u2713 No changes detected (ID: ${existingState.id})`);
1513
+ stats.unchanged++;
1514
+ }
1515
+ } else {
1516
+ logger.info(` \u26A0\uFE0F Datasource ID from state not found on platform, will create new`);
1517
+ logger.info(` Creating new datasource...`);
1518
+ const datasourceId = await createDatasourceAndGetId(client, createParams, datasource.name);
1519
+ stateManager.updateDatasourceState(datasource.name, datasourceId, datasource);
1520
+ stats.created++;
1521
+ }
1522
+ } else {
1523
+ logger.info(` Creating new datasource...`);
1524
+ const datasourceId = await createDatasourceAndGetId(client, createParams, datasource.name);
1525
+ stateManager.updateDatasourceState(datasource.name, datasourceId, datasource);
1526
+ stats.created++;
1527
+ }
1528
+ logger.info("");
1529
+ } catch (error) {
1530
+ logger.error(` \u274C Failed to deploy ${datasource.name}:`);
1531
+ if (error instanceof Error) {
1532
+ logger.error(` ${error.message}`);
1533
+ logger.debug(` Stack:`, error.stack);
1534
+ if ("response" in error) {
1535
+ const axiosError = error;
1536
+ logger.error(` Status: ${axiosError.response?.status}`);
1537
+ logger.error(` Data:`, JSON.stringify(axiosError.response?.data, null, 2));
1538
+ }
1539
+ } else {
1540
+ logger.error(` ${String(error)}`);
1541
+ }
1542
+ logger.info("");
1543
+ stats.failed++;
1544
+ }
1545
+ }
1546
+ return stats;
1547
+ }
1548
+ async function deployWorkflows(config, client, stateManager, rootDir = process.cwd()) {
1549
+ const stats = { created: 0, updated: 0, unchanged: 0, failed: 0 };
1550
+ logger.info("\u{1F504} Processing workflows...\n");
1551
+ if (!config.resources.workflows) {
1552
+ return stats;
1553
+ }
1554
+ for (const workflow of config.resources.workflows) {
1555
+ try {
1556
+ logger.info(`Processing: ${workflow.name}`);
1557
+ const yamlPath = path.join(rootDir, workflow.definition);
1558
+ if (!fs.existsSync(yamlPath)) {
1559
+ throw new Error(`Workflow definition file not found: ${workflow.definition}`);
1560
+ }
1561
+ const yamlConfigContent = fs.readFileSync(yamlPath, "utf8");
1562
+ const workflowYaml = yaml.parse(yamlConfigContent);
1563
+ const referencesToResolve = [];
1564
+ for (const assistant of workflowYaml.assistants) {
1565
+ const isInline = !assistant.assistant_name && assistant.model && assistant.system_prompt;
1566
+ if (isInline) {
1567
+ logger.info(` Skipping inline assistant: ${assistant.id}`);
1568
+ continue;
1569
+ }
1570
+ if (!assistant.assistant_id) {
1571
+ let resolvedId;
1572
+ if (assistant.assistant_name) {
1573
+ const assistantState = stateManager.getAssistantState(assistant.assistant_name);
1574
+ if (assistantState) {
1575
+ resolvedId = assistantState.id;
1576
+ logger.info(` Resolved (name): ${assistant.assistant_name} \u2192 ${resolvedId.slice(0, 8)}...`);
1577
+ } else {
1578
+ const importedAssistant = config.imported?.assistants?.find(
1579
+ (a) => a.name === assistant.assistant_name
1580
+ );
1581
+ if (importedAssistant && importedAssistant.id) {
1582
+ resolvedId = importedAssistant.id;
1583
+ logger.info(` Resolved (imported): ${assistant.assistant_name} \u2192 ${resolvedId.slice(0, 8)}...`);
1584
+ } else {
1585
+ throw new Error(
1586
+ `Assistant "${assistant.assistant_name}" not found in state or imported. Ensure the assistant is deployed first or added to imported.assistants.`
1587
+ );
1588
+ }
1589
+ }
1590
+ } else {
1591
+ throw new Error(`Workflow assistant reference must have 'assistant_name' (local id: ${assistant.id})`);
1592
+ }
1593
+ assistant.assistant_id = resolvedId;
1594
+ const referenceName = assistant.assistant_name;
1595
+ delete assistant.assistant_name;
1596
+ referencesToResolve.push({
1597
+ localId: assistant.id,
1598
+ reference: referenceName || assistant.id,
1599
+ uuid: resolvedId
1600
+ });
1601
+ }
1602
+ }
1603
+ const yamlConfig = yaml.stringify(workflowYaml);
1604
+ const workflowYamlChecksum = calculateChecksum(yamlConfigContent);
1605
+ const configChecksum = calculateWorkflowConfigChecksum(workflow);
1606
+ const existingState = stateManager.getWorkflowState(workflow.name);
1607
+ if (existingState) {
1608
+ const existsOnPlatform = await checkWorkflowExists(client, workflow.name, stateManager);
1609
+ if (existsOnPlatform) {
1610
+ const hasChanged = existingState.workflowYamlChecksum !== workflowYamlChecksum || existingState.configChecksum !== configChecksum;
1611
+ if (hasChanged) {
1612
+ const apiParams = {
1613
+ project: config.project.name,
1614
+ name: workflow.name,
1615
+ description: workflow.description,
1616
+ yaml_config: yamlConfig,
1617
+ mode: workflow.mode,
1618
+ shared: workflow.shared,
1619
+ icon_url: workflow.icon_url
1620
+ };
1621
+ if (process.env.DEBUG_API) {
1622
+ logger.debug("\n=== DEBUG: API Params ===");
1623
+ logger.debug(JSON.stringify(apiParams, null, 2));
1624
+ logger.debug("=========================\n");
1625
+ }
1626
+ logger.info(` Updating workflow (ID: ${existingState.id})...`);
1627
+ await client.workflows.update(existingState.id, apiParams);
1628
+ logger.info(`\u2713 Updated workflow: ${workflow.name} (${existingState.id})`);
1629
+ stateManager.updateWorkflowState(workflow.name, existingState.id, workflowYamlChecksum, configChecksum);
1630
+ stats.updated++;
1631
+ } else {
1632
+ logger.info(` \u2713 No changes detected (ID: ${existingState.id})`);
1633
+ stats.unchanged++;
1634
+ }
1635
+ } else {
1636
+ logger.info(` \u26A0\uFE0F Workflow ID from state not found on platform, will create new`);
1637
+ logger.info(` Creating new workflow...`);
1638
+ const apiParams = {
1639
+ project: config.project.name,
1640
+ name: workflow.name,
1641
+ description: workflow.description,
1642
+ yaml_config: yamlConfig,
1643
+ mode: workflow.mode || "Sequential",
1644
+ shared: workflow.shared ?? true,
1645
+ icon_url: workflow.icon_url
1646
+ };
1647
+ if (process.env.DEBUG_API) {
1648
+ logger.debug("\n=== DEBUG: API Params ===");
1649
+ logger.debug(JSON.stringify(apiParams, null, 2));
1650
+ logger.debug("=========================\n");
1651
+ }
1652
+ const result = await client.workflows.create(apiParams);
1653
+ const workflowId = result.data?.id || result.id;
1654
+ if (!workflowId) {
1655
+ throw new Error("Workflow created but ID not returned");
1656
+ }
1657
+ logger.info(`\u2713 Created workflow: ${workflow.name} (${workflowId})`);
1658
+ stateManager.updateWorkflowState(workflow.name, workflowId, workflowYamlChecksum, configChecksum);
1659
+ stats.created++;
1660
+ }
1661
+ } else {
1662
+ logger.info(` Creating new workflow...`);
1663
+ const apiParams = {
1664
+ project: config.project.name,
1665
+ name: workflow.name,
1666
+ description: workflow.description,
1667
+ yaml_config: yamlConfig,
1668
+ mode: workflow.mode || "Sequential",
1669
+ shared: workflow.shared ?? true,
1670
+ icon_url: workflow.icon_url
1671
+ };
1672
+ if (process.env.DEBUG_API) {
1673
+ logger.debug("\n=== DEBUG: API Params ===");
1674
+ logger.debug(JSON.stringify(apiParams, null, 2));
1675
+ logger.debug("=========================\n");
1676
+ }
1677
+ const result = await client.workflows.create(apiParams);
1678
+ const workflowId = result.data?.id || result.id;
1679
+ if (!workflowId) {
1680
+ throw new Error("Workflow created but ID not returned");
1681
+ }
1682
+ logger.info(`\u2713 Created workflow: ${workflow.name} (${workflowId})`);
1683
+ stateManager.updateWorkflowState(workflow.name, workflowId, workflowYamlChecksum, configChecksum);
1684
+ stats.created++;
1685
+ }
1686
+ logger.info("");
1687
+ } catch (error) {
1688
+ logger.error(` \u274C Failed to deploy ${workflow.name}:`);
1689
+ if (error instanceof Error) {
1690
+ logger.error(` ${error.message}`);
1691
+ logger.debug(` Stack:`, error.stack);
1692
+ if ("response" in error) {
1693
+ const axiosError = error;
1694
+ logger.error(` Status: ${axiosError.response?.status}`);
1695
+ logger.error(` Data:`, JSON.stringify(axiosError.response?.data, null, 2));
1696
+ }
1697
+ } else {
1698
+ logger.error(` ${String(error)}`);
1699
+ }
1700
+ logger.info("");
1701
+ stats.failed++;
1702
+ }
1703
+ }
1704
+ return stats;
1705
+ }
1706
+ async function deployResources(options) {
1707
+ logger.info("\u{1F680} Deploying resources to Codemie...\n");
1708
+ try {
1709
+ const { appConfig, prune = false } = options;
1710
+ const loader = new CodemieConfigLoader(appConfig);
1711
+ const stateManager = new StateManager(appConfig);
1712
+ logger.info("\u{1F4C4} Loading configuration...");
1713
+ const config = loader.loadConfig();
1714
+ logger.info(`\u2713 Loaded config for project: ${config.project.name}
1715
+ `);
1716
+ logger.info("\u{1F50C} Connecting to Codemie API...");
1717
+ const client = await createClient(config);
1718
+ logger.info("\u2713 Connected to Codemie API\n");
1719
+ logger.info("\u{1F9F9} Checking for orphaned resources...");
1720
+ const cleanupManager = new CleanupManager(client, stateManager);
1721
+ const orphaned = cleanupManager.findOrphanedResources(config);
1722
+ const totalOrphaned = orphaned.assistants.length + orphaned.datasources.length + orphaned.workflows.length;
1723
+ let deleted = 0;
1724
+ if (totalOrphaned > 0) {
1725
+ logger.info(`
1726
+ \u26A0\uFE0F Found ${totalOrphaned} orphaned resource(s):`);
1727
+ if (orphaned.assistants.length > 0) {
1728
+ logger.info(` \u2022 ${orphaned.assistants.length} assistant(s)`);
1729
+ }
1730
+ if (orphaned.datasources.length > 0) {
1731
+ logger.info(` \u2022 ${orphaned.datasources.length} datasource(s)`);
1732
+ }
1733
+ if (orphaned.workflows.length > 0) {
1734
+ logger.info(` \u2022 ${orphaned.workflows.length} workflow(s)`);
1735
+ }
1736
+ if (process.env.SAMPLE_DEPLOY === "1") {
1737
+ logger.info("\n\u{1F50E} SAMPLE_DEPLOY=1 -> Skipping orphan deletion (simulation / partial deploy mode)\n");
1738
+ } else if (prune) {
1739
+ logger.info("\n\u{1F5D1}\uFE0F Deleting orphaned resources (removed from config)...\n");
1740
+ const cleanupResult = await cleanupManager.deleteOrphanedResources(orphaned);
1741
+ deleted = cleanupResult.deleted.assistants.length + cleanupResult.deleted.datasources.length + cleanupResult.deleted.workflows.length;
1742
+ if (deleted > 0) {
1743
+ logger.info(`
1744
+ \u2713 Deleted ${deleted} orphaned resource(s)
1745
+ `);
1746
+ }
1747
+ if (cleanupResult.errors.length > 0) {
1748
+ logger.info(`
1749
+ \u26A0\uFE0F ${cleanupResult.errors.length} error(s) during cleanup
1750
+ `);
1751
+ }
1752
+ } else {
1753
+ logger.info("\n\u26A0\uFE0F Orphaned resources found but not deleted.");
1754
+ logger.info(" Use --prune flag to delete them.\n");
1755
+ }
1756
+ } else {
1757
+ logger.info("\u2713 No orphaned resources found\n");
1758
+ }
1759
+ let created = 0;
1760
+ let updated = 0;
1761
+ let unchanged = 0;
1762
+ let failed = 0;
1763
+ const datasourceStats = await deployDatasources(config, client, stateManager);
1764
+ created += datasourceStats.created;
1765
+ updated += datasourceStats.updated;
1766
+ unchanged += datasourceStats.unchanged;
1767
+ failed += datasourceStats.failed;
1768
+ const assistantStats = await deployAssistants(config, client, loader, stateManager);
1769
+ created += assistantStats.created;
1770
+ updated += assistantStats.updated;
1771
+ unchanged += assistantStats.unchanged;
1772
+ failed += assistantStats.failed;
1773
+ const workflowStats = await deployWorkflows(config, client, stateManager, appConfig.rootDir);
1774
+ created += workflowStats.created;
1775
+ updated += workflowStats.updated;
1776
+ unchanged += workflowStats.unchanged;
1777
+ failed += workflowStats.failed;
1778
+ logger.info("=".repeat(50));
1779
+ logger.info("\u{1F4CA} Deployment Summary:\n");
1780
+ logger.info(` \u2705 Created: ${created}`);
1781
+ logger.info(` \u{1F504} Updated: ${updated}`);
1782
+ logger.info(` \u{1F4CB} Unchanged: ${unchanged}`);
1783
+ if (deleted > 0) {
1784
+ logger.info(` \u{1F5D1}\uFE0F Deleted: ${deleted}`);
1785
+ }
1786
+ if (failed > 0) {
1787
+ logger.info(` \u274C Failed: ${failed}`);
1788
+ }
1789
+ logger.info("");
1790
+ if (failed > 0) {
1791
+ throw new Error("Deployment completed with errors");
1792
+ } else {
1793
+ logger.info("\u2705 Deployment successful!");
1794
+ }
1795
+ } catch (error) {
1796
+ logger.error("\n\u274C Deployment failed:");
1797
+ logger.error(error instanceof Error ? error.message : String(error));
1798
+ throw error;
1799
+ }
1800
+ }
1801
+ var DependencyValidator = class {
1802
+ /**
1803
+ * Validate assistant dependencies for cycles and nested sub-assistants
1804
+ */
1805
+ static validateAssistantDependencies(assistants) {
1806
+ const errors = [];
1807
+ const assistantMap = /* @__PURE__ */ new Map();
1808
+ for (const assistant of assistants) {
1809
+ assistantMap.set(assistant.name, assistant);
1810
+ }
1811
+ const cyclicErrors = this.detectCycles(assistants, assistantMap);
1812
+ errors.push(...cyclicErrors);
1813
+ const nestedErrors = this.detectNestedSubAssistants(assistants);
1814
+ errors.push(...nestedErrors);
1815
+ return errors;
1816
+ }
1817
+ /**
1818
+ * Detect cyclic dependencies using DFS (name-based)
1819
+ */
1820
+ static detectCycles(assistants, assistantMap) {
1821
+ const errors = [];
1822
+ const visited = /* @__PURE__ */ new Set();
1823
+ const recursionStack = /* @__PURE__ */ new Set();
1824
+ const dfs = (name, path11) => {
1825
+ if (recursionStack.has(name)) {
1826
+ const cycleStart = path11.indexOf(name);
1827
+ const cycle = [...path11.slice(cycleStart), name];
1828
+ errors.push(`Cyclic dependency detected: ${cycle.join(" \u2192 ")}`);
1829
+ return true;
1830
+ }
1831
+ if (visited.has(name)) {
1832
+ return false;
1833
+ }
1834
+ visited.add(name);
1835
+ recursionStack.add(name);
1836
+ path11.push(name);
1837
+ const assistant = assistantMap.get(name);
1838
+ const subAssistantRefs = assistant?.sub_assistants || [];
1839
+ for (const subRef of subAssistantRefs) {
1840
+ if (!assistantMap.has(subRef)) {
1841
+ continue;
1842
+ }
1843
+ if (dfs(subRef, [...path11])) {
1844
+ return true;
1845
+ }
1846
+ }
1847
+ recursionStack.delete(name);
1848
+ return false;
1849
+ };
1850
+ for (const assistant of assistants) {
1851
+ if (!visited.has(assistant.name)) {
1852
+ dfs(assistant.name, []);
1853
+ }
1854
+ }
1855
+ return errors;
1856
+ }
1857
+ /**
1858
+ * Detect nested sub-assistants (assistant with sub-assistants cannot be a sub-assistant)
1859
+ * Name-based validation
1860
+ */
1861
+ static detectNestedSubAssistants(assistants) {
1862
+ const errors = [];
1863
+ const usedAsSubAssistant = /* @__PURE__ */ new Set();
1864
+ for (const assistant of assistants) {
1865
+ if (assistant.sub_assistants && assistant.sub_assistants.length > 0) {
1866
+ for (const subName of assistant.sub_assistants) {
1867
+ usedAsSubAssistant.add(subName);
1868
+ }
1869
+ }
1870
+ }
1871
+ for (const assistant of assistants) {
1872
+ const hasSubAssistants = assistant.sub_assistants && assistant.sub_assistants.length > 0;
1873
+ if (hasSubAssistants && usedAsSubAssistant.has(assistant.name)) {
1874
+ const parents = [];
1875
+ for (const potentialParent of assistants) {
1876
+ if (potentialParent.sub_assistants?.includes(assistant.name)) {
1877
+ parents.push(potentialParent.name);
1878
+ }
1879
+ }
1880
+ errors.push(
1881
+ `Assistant "${assistant.name}" has sub-assistants but is itself used as a sub-assistant by: ${parents.join(", ")}. Nested sub-assistants are not allowed.`
1882
+ );
1883
+ }
1884
+ }
1885
+ return errors;
1886
+ }
1887
+ /**
1888
+ * Validate workflow assistant references
1889
+ * Check that all assistant_name references in workflows point to existing assistants
1890
+ */
1891
+ static validateWorkflowAssistantReferences(workflows, assistants, importedAssistants) {
1892
+ const errors = [];
1893
+ const availableAssistants = /* @__PURE__ */ new Set();
1894
+ for (const assistant of assistants) {
1895
+ availableAssistants.add(assistant.name);
1896
+ }
1897
+ for (const imported of importedAssistants) {
1898
+ availableAssistants.add(imported.name);
1899
+ }
1900
+ for (const workflow of workflows) {
1901
+ try {
1902
+ const workflowYaml = yaml.parse(workflow.definition);
1903
+ if (!workflowYaml.assistants) {
1904
+ continue;
1905
+ }
1906
+ for (const assistant of workflowYaml.assistants) {
1907
+ if (assistant.model && assistant.system_prompt && !assistant.assistant_name) {
1908
+ continue;
1909
+ }
1910
+ if (assistant.assistant_name && !availableAssistants.has(assistant.assistant_name)) {
1911
+ errors.push(
1912
+ `Workflow "${workflow.name}": Assistant reference "${assistant.assistant_name}" (id: ${assistant.id}) not found in resources.assistants or imported.assistants`
1913
+ );
1914
+ }
1915
+ }
1916
+ } catch (error) {
1917
+ errors.push(
1918
+ `Workflow "${workflow.name}": Failed to parse workflow YAML: ${error instanceof Error ? error.message : String(error)}`
1919
+ );
1920
+ }
1921
+ }
1922
+ return errors;
1923
+ }
1924
+ };
1925
+
1926
+ // src/lib/validationUtils.ts
1927
+ function isAssistantWithSlug(obj) {
1928
+ return typeof obj === "object" && obj !== null && "slug" in obj && "name" in obj && typeof obj.slug === "string" && typeof obj.name === "string";
1929
+ }
1930
+ function isAssistant(obj) {
1931
+ return typeof obj === "object" && obj !== null && "id" in obj && "slug" in obj;
1932
+ }
1933
+ function validateSlugUniqueness(config) {
1934
+ const slugErrors = [];
1935
+ const slugMap = /* @__PURE__ */ new Map();
1936
+ if (config.resources.assistants) {
1937
+ for (const assistant of config.resources.assistants) {
1938
+ if (assistant.slug) {
1939
+ if (!slugMap.has(assistant.slug)) {
1940
+ slugMap.set(assistant.slug, []);
1941
+ }
1942
+ slugMap.get(assistant.slug).push(assistant.name);
1943
+ }
1944
+ }
1945
+ }
1946
+ for (const [slug, names] of slugMap.entries()) {
1947
+ if (names.length > 1) {
1948
+ slugErrors.push(`Duplicate slug "${slug}" for assistants: ${names.join(", ")}`);
1949
+ }
1950
+ }
1951
+ return slugErrors;
1952
+ }
1953
+ async function checkApiSlugConflicts({
1954
+ config,
1955
+ client,
1956
+ assistantState
1957
+ }) {
1958
+ const existingAssistants = await client.assistants.list({
1959
+ minimal_response: false,
1960
+ per_page: 100
1961
+ });
1962
+ const existingSlugs = /* @__PURE__ */ new Map();
1963
+ for (const assistant of existingAssistants) {
1964
+ if (isAssistantWithSlug(assistant) && assistant.slug) {
1965
+ existingSlugs.set(assistant.slug, assistant.name);
1966
+ }
1967
+ }
1968
+ const conflictErrors = [];
1969
+ const iacAssistantNames = new Set(Object.keys(assistantState));
1970
+ if (config.resources.assistants) {
1971
+ for (const assistant of config.resources.assistants) {
1972
+ if (!assistant.slug || iacAssistantNames.has(assistant.name)) {
1973
+ continue;
1974
+ }
1975
+ if (existingSlugs.has(assistant.slug)) {
1976
+ const existingName = existingSlugs.get(assistant.slug);
1977
+ const existingAssistant = existingAssistants.find((a) => isAssistant(a) && a.slug === assistant.slug);
1978
+ conflictErrors.push(
1979
+ `\u274C Slug "${assistant.slug}" for assistant "${assistant.name}" already exists in Codemie
1980
+ Existing assistant: "${existingName}" (ID: ${isAssistant(existingAssistant) ? existingAssistant.id : "unknown"})
1981
+ \u{1F4A1} Change slug to a unique value or omit slug (platform will generate)`
1982
+ );
1983
+ }
1984
+ }
1985
+ }
1986
+ return conflictErrors;
1987
+ }
1988
+
1989
+ // src/validate.ts
1990
+ async function validateConfig(options) {
1991
+ const errors = [];
1992
+ const { checkApi = false, appConfig } = options;
1993
+ const loader = new CodemieConfigLoader(appConfig);
1994
+ const config = loader.loadConfig();
1995
+ const fileValidation = loader.validateFiles(config);
1996
+ if (!fileValidation.valid) {
1997
+ errors.push(...fileValidation.errors);
1998
+ }
1999
+ const slugErrors = validateSlugUniqueness(config);
2000
+ errors.push(...slugErrors);
2001
+ if (config.resources.assistants && config.resources.assistants.length > 0) {
2002
+ const dependencyErrors = DependencyValidator.validateAssistantDependencies(config.resources.assistants);
2003
+ errors.push(...dependencyErrors);
2004
+ }
2005
+ if (config.resources.workflows && config.resources.workflows.length > 0) {
2006
+ const workflowsWithContent = config.resources.workflows.map((workflow) => {
2007
+ const workflowPath = path.resolve(workflow.definition);
2008
+ if (!fs.existsSync(workflowPath)) {
2009
+ errors.push(`Workflow definition file not found: ${workflow.definition}`);
2010
+ return { ...workflow, definition: "" };
2011
+ }
2012
+ const definition = fs.readFileSync(workflowPath, "utf8");
2013
+ return { ...workflow, definition };
2014
+ });
2015
+ const workflowErrors = DependencyValidator.validateWorkflowAssistantReferences(
2016
+ workflowsWithContent,
2017
+ config.resources.assistants || [],
2018
+ config.imported?.assistants || []
2019
+ );
2020
+ errors.push(...workflowErrors);
2021
+ }
2022
+ if (checkApi && errors.length === 0) {
2023
+ const client = await createClient(config);
2024
+ const stateManager = new StateManager(appConfig);
2025
+ const state = stateManager.loadState();
2026
+ const conflictErrors = await checkApiSlugConflicts({ config, client, assistantState: state.resources.assistants });
2027
+ errors.push(...conflictErrors);
2028
+ }
2029
+ return { success: errors.length === 0, errors };
2030
+ }
2031
+ async function previewResource(resource, resourceType, getState, checkExists, calculateChecksums) {
2032
+ const existingState = getState(resource.name);
2033
+ if (existingState) {
2034
+ const existsOnPlatform = await checkExists();
2035
+ if (existsOnPlatform) {
2036
+ const { hasChanged, updateDetails } = calculateChecksums();
2037
+ return hasChanged ? {
2038
+ type: "update",
2039
+ resourceType,
2040
+ name: resource.name,
2041
+ details: updateDetails
2042
+ } : {
2043
+ type: "no-change",
2044
+ resourceType,
2045
+ name: resource.name
2046
+ };
2047
+ } else {
2048
+ return {
2049
+ type: "create",
2050
+ resourceType,
2051
+ name: resource.name,
2052
+ details: `ID from state not found on platform, will create new`
2053
+ };
2054
+ }
2055
+ } else {
2056
+ const { createDetails } = calculateChecksums();
2057
+ return {
2058
+ type: "create",
2059
+ resourceType,
2060
+ name: resource.name,
2061
+ details: createDetails
2062
+ };
2063
+ }
2064
+ }
2065
+ function previewOrphanedResources(orphaned, stateManager) {
2066
+ const changes = [];
2067
+ for (const name of orphaned.assistants) {
2068
+ const assistantState = stateManager.getAssistantState(name);
2069
+ changes.push({
2070
+ type: "delete",
2071
+ resourceType: "assistant",
2072
+ name,
2073
+ details: assistantState ? `Removed from config (ID: ${assistantState.id})` : "Removed from config"
2074
+ });
2075
+ }
2076
+ for (const name of orphaned.datasources) {
2077
+ const datasourceState = stateManager.getDatasourceState(name);
2078
+ changes.push({
2079
+ type: "delete",
2080
+ resourceType: "datasource",
2081
+ name,
2082
+ details: datasourceState ? `Removed from config (ID: ${datasourceState.id})` : "Removed from config"
2083
+ });
2084
+ }
2085
+ for (const name of orphaned.workflows) {
2086
+ const workflowState = stateManager.getWorkflowState(name);
2087
+ changes.push({
2088
+ type: "delete",
2089
+ resourceType: "workflow",
2090
+ name,
2091
+ details: workflowState ? `Removed from config (ID: ${workflowState.id})` : "Removed from config"
2092
+ });
2093
+ }
2094
+ return changes;
2095
+ }
2096
+ async function previewAssistants(assistants, loader, stateManager, client) {
2097
+ const changes = [];
2098
+ for (const assistant of assistants) {
2099
+ const promptContent = loader.loadPrompt(assistant.prompt);
2100
+ let buildConfig = null;
2101
+ if (assistant.config) {
2102
+ buildConfig = loader.loadAssistantConfig(assistant.config);
2103
+ }
2104
+ const configChecksum = calculateAssistantConfigChecksum(assistant, buildConfig);
2105
+ const change = await previewResource(
2106
+ assistant,
2107
+ "assistant",
2108
+ (name) => stateManager.getAssistantState(name),
2109
+ () => checkAssistantExists(client, assistant.name, stateManager),
2110
+ () => {
2111
+ const existingState = stateManager.getAssistantState(assistant.name);
2112
+ const hasChanged = existingState ? existingState.promptChecksum !== calculateChecksum(promptContent) || existingState.configChecksum !== configChecksum : false;
2113
+ return {
2114
+ hasChanged,
2115
+ createDetails: `Model: ${assistant.model}`,
2116
+ updateDetails: "Prompt or configuration changed"
2117
+ };
2118
+ }
2119
+ );
2120
+ changes.push(change);
2121
+ }
2122
+ return changes;
2123
+ }
2124
+ async function previewDatasources(datasources, stateManager, client) {
2125
+ const changes = [];
2126
+ for (const datasource of datasources) {
2127
+ const change = await previewResource(
2128
+ datasource,
2129
+ "datasource",
2130
+ (name) => stateManager.getDatasourceState(name),
2131
+ () => checkDatasourceExists(client, datasource.name, stateManager),
2132
+ () => {
2133
+ const configChecksum = calculateDatasourceConfigChecksum(datasource);
2134
+ const existingState = stateManager.getDatasourceState(datasource.name);
2135
+ const hasChanged = existingState ? existingState.configChecksum !== configChecksum : false;
2136
+ if (hasChanged || datasource.force_reindex) {
2137
+ return {
2138
+ hasChanged: true,
2139
+ updateDetails: datasource.force_reindex ? "Force reindex" : "Configuration changed"
2140
+ };
2141
+ }
2142
+ return { hasChanged: false };
2143
+ }
2144
+ );
2145
+ if (!change.details && change.type === "create") {
2146
+ change.details = `Type: ${datasource.type}`;
2147
+ }
2148
+ changes.push(change);
2149
+ }
2150
+ return changes;
2151
+ }
2152
+ async function previewWorkflows(workflows, stateManager, client, rootDir = process.cwd()) {
2153
+ const changes = [];
2154
+ for (const workflow of workflows) {
2155
+ const yamlPath = path.join(rootDir, workflow.definition);
2156
+ if (!fs.existsSync(yamlPath)) {
2157
+ throw new Error(`Workflow definition file not found: ${workflow.definition}`);
2158
+ }
2159
+ const yamlConfig = fs.readFileSync(yamlPath, "utf8");
2160
+ const change = await previewResource(
2161
+ workflow,
2162
+ "workflow",
2163
+ (name) => stateManager.getWorkflowState(name),
2164
+ () => checkWorkflowExists(client, workflow.name, stateManager),
2165
+ () => {
2166
+ const workflowYamlChecksum = calculateChecksum(yamlConfig);
2167
+ const configChecksum = calculateWorkflowConfigChecksum(workflow);
2168
+ const existingState = stateManager.getWorkflowState(workflow.name);
2169
+ const hasChanged = !!existingState && (existingState.workflowYamlChecksum !== workflowYamlChecksum || existingState.configChecksum !== configChecksum);
2170
+ return {
2171
+ hasChanged,
2172
+ ...hasChanged ? { updateDetails: "Workflow YAML or configuration changed" } : {}
2173
+ };
2174
+ }
2175
+ );
2176
+ if (!change.details && change.type === "create") {
2177
+ change.details = `Mode: ${workflow.mode}`;
2178
+ }
2179
+ changes.push(change);
2180
+ }
2181
+ return changes;
2182
+ }
2183
+
2184
+ // src/preview.ts
2185
+ async function previewChanges(appConfig, existingClient) {
2186
+ const loader = new CodemieConfigLoader(appConfig);
2187
+ const stateManager = new StateManager(appConfig);
2188
+ const config = loader.loadConfig();
2189
+ const client = existingClient || await createClient(config);
2190
+ const changes = [];
2191
+ const cleanupManager = new CleanupManager(client, stateManager);
2192
+ const orphaned = cleanupManager.findOrphanedResources(config);
2193
+ changes.push(...previewOrphanedResources(orphaned, stateManager));
2194
+ if (config.resources.assistants) {
2195
+ const assistantChanges = await previewAssistants(config.resources.assistants, loader, stateManager, client);
2196
+ changes.push(...assistantChanges);
2197
+ }
2198
+ if (config.resources.datasources) {
2199
+ const datasourceChanges = await previewDatasources(config.resources.datasources, stateManager, client);
2200
+ changes.push(...datasourceChanges);
2201
+ }
2202
+ if (config.resources.workflows) {
2203
+ const workflowChanges = await previewWorkflows(config.resources.workflows, stateManager, client, appConfig.rootDir);
2204
+ changes.push(...workflowChanges);
2205
+ }
2206
+ const toDelete = changes.filter((c) => c.type === "delete");
2207
+ const toCreate = changes.filter((c) => c.type === "create");
2208
+ const toUpdate = changes.filter((c) => c.type === "update");
2209
+ const unchanged = changes.filter((c) => c.type === "no-change");
2210
+ return {
2211
+ changes,
2212
+ summary: {
2213
+ deleted: toDelete.length,
2214
+ created: toCreate.length,
2215
+ updated: toUpdate.length,
2216
+ unchanged: unchanged.length,
2217
+ total: changes.length
2218
+ }
2219
+ };
2220
+ }
2221
+
2222
+ // src/lib/constants.ts
2223
+ var PAGINATION = {
2224
+ DEFAULT_PAGE_SIZE: 100};
2225
+ var TIMEOUTS_MS = {
2226
+ ASSISTANT_FETCH: 3e4,
2227
+ DATASOURCE_FETCH: 3e4,
2228
+ WORKFLOW_FETCH: 3e4,
2229
+ INTEGRATION_FETCH: 3e4
2230
+ };
2231
+ var RATE_LIMITING = {
2232
+ MAX_CONCURRENT_REQUESTS: 5,
2233
+ RETRY_ATTEMPTS: 3,
2234
+ RETRY_DELAY_MS: 1e3
2235
+ };
2236
+ var BACKUP = {
2237
+ TEMP_DIR_PREFIX: ".temp-",
2238
+ TRANSACTION_SAVE_TIMEOUT_MS: 5e3
2239
+ };
2240
+ var BYTES_IN_GB = 1024 ** 3;
2241
+ function sanitizeFileName(name, maxLength = 255) {
2242
+ const nameWithHyphens = name.replaceAll(/[/\\]/g, "-");
2243
+ const sanitized = nameWithHyphens.toLowerCase().replaceAll(/[^a-z0-9]+/g, "-").replaceAll(/^-+|-+$/g, "").slice(0, maxLength);
2244
+ if (!sanitized) {
2245
+ throw new Error(`Sanitized filename is empty for input: "${name}"`);
2246
+ }
2247
+ return sanitized;
2248
+ }
2249
+ function validateBackupDirectory(backupDir, minSpaceGB = 1) {
2250
+ try {
2251
+ const parentDir = path.dirname(backupDir);
2252
+ if (!fs.existsSync(parentDir)) {
2253
+ fs.mkdirSync(parentDir, { recursive: true });
2254
+ }
2255
+ const testFile = path.join(parentDir, ".write-test");
2256
+ fs.writeFileSync(testFile, "test");
2257
+ fs.unlinkSync(testFile);
2258
+ try {
2259
+ const stats = fs.statfsSync(parentDir);
2260
+ const availableGB = stats.bavail * stats.bsize / BYTES_IN_GB;
2261
+ if (availableGB < minSpaceGB) {
2262
+ throw new Error(`Insufficient disk space: ${availableGB.toFixed(2)}GB available, need ${minSpaceGB}GB`);
2263
+ }
2264
+ } catch (error) {
2265
+ if (error.code !== "ERR_METHOD_NOT_SUPPORTED") {
2266
+ throw error;
2267
+ }
2268
+ }
2269
+ } catch (error) {
2270
+ throw new Error(`Cannot write to backup directory: ${error instanceof Error ? error.message : String(error)}`);
2271
+ }
2272
+ }
2273
+ function moveAtomically(tempPath, finalPath) {
2274
+ try {
2275
+ fs.renameSync(tempPath, finalPath);
2276
+ } catch (error) {
2277
+ const err = error;
2278
+ if (err.code === "EEXIST") {
2279
+ throw new Error(`Destination already exists: ${finalPath}`);
2280
+ }
2281
+ throw error;
2282
+ }
2283
+ }
2284
+ function cleanupDirectory(dirPath) {
2285
+ if (fs.existsSync(dirPath)) {
2286
+ fs.rmSync(dirPath, { recursive: true, force: true });
2287
+ }
2288
+ }
2289
+ function ensureDirectoryExists(filePath) {
2290
+ const dir = path.dirname(filePath);
2291
+ if (!fs.existsSync(dir)) {
2292
+ fs.mkdirSync(dir, { recursive: true });
2293
+ }
2294
+ }
2295
+
2296
+ // src/lib/backupTransaction.ts
2297
+ function busyWaitDelay(ms) {
2298
+ }
2299
+ var BackupTransaction = class {
2300
+ data;
2301
+ transactionPath;
2302
+ isDirty = false;
2303
+ saveTimer;
2304
+ constructor(backupDir, transactionId) {
2305
+ this.transactionPath = path.join(backupDir, "transaction.json");
2306
+ try {
2307
+ const content = fs.readFileSync(this.transactionPath, "utf8");
2308
+ this.data = JSON.parse(content);
2309
+ logger.info(`\u{1F4C2} Resuming backup transaction ${this.data.id}`);
2310
+ } catch (error) {
2311
+ const err = error;
2312
+ if (err.code === "ENOENT") {
2313
+ this.data = this.createNewTransaction(backupDir, transactionId);
2314
+ } else {
2315
+ throw error;
2316
+ }
2317
+ }
2318
+ }
2319
+ createNewTransaction(backupDir, transactionId) {
2320
+ const newData = {
2321
+ id: transactionId || this.generateTransactionId(),
2322
+ status: "in-progress",
2323
+ startTime: (/* @__PURE__ */ new Date()).toISOString(),
2324
+ backupDir,
2325
+ resources: {
2326
+ assistants: { total: 0, completed: [], failed: [] },
2327
+ datasources: { total: 0, completed: [], failed: [] },
2328
+ workflows: { total: 0, completed: [], failed: [] },
2329
+ integrations: { total: 0, completed: [], failed: [] }
2330
+ }
2331
+ };
2332
+ ensureDirectoryExists(this.transactionPath);
2333
+ this.writeTransactionExclusively(newData);
2334
+ return newData;
2335
+ }
2336
+ writeTransactionExclusively(data) {
2337
+ try {
2338
+ const fd = fs.openSync(this.transactionPath, "wx");
2339
+ fs.writeSync(fd, JSON.stringify(data, null, 2));
2340
+ fs.closeSync(fd);
2341
+ } catch (writeError) {
2342
+ const writeErr = writeError;
2343
+ if (writeErr.code === "EEXIST") {
2344
+ this.readWithRetry();
2345
+ } else {
2346
+ throw writeError;
2347
+ }
2348
+ }
2349
+ }
2350
+ readWithRetry() {
2351
+ let retries = 3;
2352
+ let lastReadError;
2353
+ while (retries > 0) {
2354
+ try {
2355
+ const delayMs = 100 * (4 - retries);
2356
+ if (delayMs > 0) {
2357
+ busyWaitDelay(delayMs);
2358
+ }
2359
+ const content = fs.readFileSync(this.transactionPath, "utf8");
2360
+ this.data = JSON.parse(content);
2361
+ logger.info(`\u{1F4C2} Resuming backup transaction ${this.data.id} (created by concurrent process)`);
2362
+ break;
2363
+ } catch (readError) {
2364
+ lastReadError = readError instanceof Error ? readError : new Error(String(readError));
2365
+ retries--;
2366
+ if (retries === 0) {
2367
+ throw new Error(
2368
+ `Failed to read transaction file after retries: ${lastReadError.message}. File may be corrupted. Delete ${this.transactionPath} and retry.`
2369
+ );
2370
+ }
2371
+ }
2372
+ }
2373
+ }
2374
+ generateTransactionId() {
2375
+ return crypto.randomBytes(8).toString("hex");
2376
+ }
2377
+ /**
2378
+ * Save transaction state to disk (checkpoint) with timeout protection
2379
+ */
2380
+ async save() {
2381
+ ensureDirectoryExists(this.transactionPath);
2382
+ const savePromise = fs.promises.writeFile(this.transactionPath, JSON.stringify(this.data, null, 2), "utf8");
2383
+ let timeoutId;
2384
+ const timeoutPromise = new Promise((_, reject) => {
2385
+ timeoutId = setTimeout(
2386
+ () => reject(new Error(`Transaction save timeout after ${BACKUP.TRANSACTION_SAVE_TIMEOUT_MS}ms`)),
2387
+ BACKUP.TRANSACTION_SAVE_TIMEOUT_MS
2388
+ );
2389
+ timeoutId.unref();
2390
+ });
2391
+ try {
2392
+ await Promise.race([savePromise, timeoutPromise]);
2393
+ this.isDirty = false;
2394
+ } catch (error) {
2395
+ const err = error instanceof Error ? error : new Error(String(error));
2396
+ throw new Error(`Failed to save transaction file: ${err.message}`);
2397
+ } finally {
2398
+ if (timeoutId !== void 0) {
2399
+ clearTimeout(timeoutId);
2400
+ }
2401
+ }
2402
+ }
2403
+ /**
2404
+ * Internal synchronous save for batching
2405
+ */
2406
+ saveSyncInternal() {
2407
+ ensureDirectoryExists(this.transactionPath);
2408
+ fs.writeFileSync(this.transactionPath, JSON.stringify(this.data, null, 2), "utf8");
2409
+ this.isDirty = false;
2410
+ }
2411
+ /**
2412
+ * Schedule batched save (debounced to avoid excessive disk writes)
2413
+ */
2414
+ scheduleSave() {
2415
+ if (this.saveTimer) {
2416
+ return;
2417
+ }
2418
+ this.saveTimer = setTimeout(() => {
2419
+ if (this.isDirty) {
2420
+ this.saveSyncInternal();
2421
+ }
2422
+ this.saveTimer = void 0;
2423
+ }, 1e3);
2424
+ this.saveTimer.unref();
2425
+ }
2426
+ /**
2427
+ * Internal method to finalize transaction with a specific status
2428
+ */
2429
+ end(status) {
2430
+ this.data.status = status;
2431
+ this.data.endTime = (/* @__PURE__ */ new Date()).toISOString();
2432
+ this.flush();
2433
+ }
2434
+ /**
2435
+ * Force immediate save (flush pending changes)
2436
+ */
2437
+ flush() {
2438
+ if (this.saveTimer) {
2439
+ clearTimeout(this.saveTimer);
2440
+ this.saveTimer = void 0;
2441
+ }
2442
+ if (this.isDirty) {
2443
+ this.saveSyncInternal();
2444
+ }
2445
+ }
2446
+ /**
2447
+ * Set total count for a resource type
2448
+ */
2449
+ setTotal(resourceType, total) {
2450
+ this.data.resources[resourceType].total = total;
2451
+ this.isDirty = true;
2452
+ this.scheduleSave();
2453
+ }
2454
+ /**
2455
+ * Mark resource as completed
2456
+ */
2457
+ markCompleted(resourceType, resourceId) {
2458
+ if (!this.data.resources[resourceType].completed.includes(resourceId)) {
2459
+ this.data.resources[resourceType].completed.push(resourceId);
2460
+ this.isDirty = true;
2461
+ this.scheduleSave();
2462
+ }
2463
+ }
2464
+ /**
2465
+ * Mark resource as failed
2466
+ */
2467
+ markFailed(resourceType, resourceId, error) {
2468
+ const failedEntry = { id: resourceId, error };
2469
+ const failedList = this.data.resources[resourceType].failed;
2470
+ if (!failedList.some((f) => f.id === resourceId)) {
2471
+ failedList.push(failedEntry);
2472
+ this.isDirty = true;
2473
+ this.scheduleSave();
2474
+ }
2475
+ }
2476
+ /**
2477
+ * Check if resource was already backed up
2478
+ */
2479
+ isCompleted(resourceType, resourceId) {
2480
+ return this.data.resources[resourceType].completed.includes(resourceId);
2481
+ }
2482
+ /**
2483
+ * Mark transaction as completed
2484
+ */
2485
+ complete() {
2486
+ this.end("completed");
2487
+ }
2488
+ /**
2489
+ * Mark transaction as failed
2490
+ */
2491
+ fail() {
2492
+ this.end("failed");
2493
+ }
2494
+ /**
2495
+ * Get transaction data (returns deep copy to prevent external modifications)
2496
+ */
2497
+ getData() {
2498
+ return structuredClone(this.data);
2499
+ }
2500
+ /**
2501
+ * Get summary of backup progress
2502
+ */
2503
+ getSummary() {
2504
+ const { resources } = this.data;
2505
+ const lines = [];
2506
+ for (const [type, data] of Object.entries(resources)) {
2507
+ const completed = data.completed.length;
2508
+ const failed = data.failed.length;
2509
+ const total = data.total;
2510
+ const percent = total > 0 ? Math.round(completed / total * 100) : 0;
2511
+ lines.push(` ${type}: ${completed}/${total} (${percent}%) ${failed > 0 ? `[${failed} failed]` : ""}`);
2512
+ }
2513
+ return lines.join("\n");
2514
+ }
2515
+ /**
2516
+ * Clean up transaction file after successful completion
2517
+ */
2518
+ cleanup() {
2519
+ if (fs.existsSync(this.transactionPath)) {
2520
+ fs.unlinkSync(this.transactionPath);
2521
+ }
2522
+ }
2523
+ };
2524
+ function transformIntegrationSettings(settings, integrationIdToAlias, integrationSpecPaths, contextLabel = "integration") {
2525
+ const integrationId = settings.id;
2526
+ const alias = settings.alias || integrationId && integrationIdToAlias.get(integrationId);
2527
+ if (alias) {
2528
+ return { $ref: `imported.integrations.${alias}` };
2529
+ }
2530
+ if (integrationId) {
2531
+ logger.warn(` \u26A0\uFE0F No alias found for ${contextLabel} ${integrationId}, keeping full settings`);
2532
+ const specPath = integrationSpecPaths?.get(integrationId);
2533
+ if (specPath && settings.credential_values) {
2534
+ return {
2535
+ ...settings,
2536
+ credential_values: settings.credential_values.map(
2537
+ (cred) => cred.key === "openapi_spec" ? { key: "openapi_spec", value: specPath } : cred
2538
+ )
2539
+ };
2540
+ }
2541
+ }
2542
+ return settings;
2543
+ }
2544
+ function transformTool(tool, integrationIdToAlias, integrationSpecPaths) {
2545
+ const result = {
2546
+ name: tool.name,
2547
+ settings_config: tool.settings_config
2548
+ };
2549
+ if (tool.label !== void 0 && tool.label !== null) {
2550
+ result.label = tool.label;
2551
+ }
2552
+ if (tool.settings) {
2553
+ result.settings = transformIntegrationSettings(tool.settings, integrationIdToAlias, integrationSpecPaths, "tool");
2554
+ }
2555
+ return result;
2556
+ }
2557
+ function transformMcpServer(mcp, integrationIdToAlias) {
2558
+ const result = {
2559
+ name: mcp.name,
2560
+ description: mcp.description,
2561
+ enabled: mcp.enabled,
2562
+ command: mcp.command,
2563
+ arguments: mcp.arguments,
2564
+ config: mcp.config,
2565
+ mcp_connect_url: mcp.mcp_connect_url,
2566
+ mcp_connect_auth_token: mcp.mcp_connect_auth_token,
2567
+ tools_tokens_size_limit: mcp.tools_tokens_size_limit
2568
+ };
2569
+ if (mcp.settings && isResolvedIntegration(mcp.settings)) {
2570
+ const alias = mcp.settings.alias || integrationIdToAlias.get(mcp.settings.id);
2571
+ if (alias) {
2572
+ result.settings = { $ref: `imported.integrations.${alias}` };
2573
+ } else {
2574
+ logger.warn(` \u26A0\uFE0F No alias found for MCP integration ${mcp.settings.id}, keeping full settings`);
2575
+ result.settings = mcp.settings;
2576
+ }
2577
+ } else if (mcp.settings) {
2578
+ result.settings = mcp.settings;
2579
+ }
2580
+ if (result.mcp_connect_auth_token && isResolvedIntegration(result.mcp_connect_auth_token)) {
2581
+ const authAlias = result.mcp_connect_auth_token.alias || integrationIdToAlias.get(result.mcp_connect_auth_token.id);
2582
+ if (authAlias) {
2583
+ result.mcp_connect_auth_token = { $ref: `imported.integrations.${authAlias}` };
2584
+ } else {
2585
+ logger.warn(
2586
+ ` \u26A0\uFE0F No alias found for MCP auth token integration ${result.mcp_connect_auth_token.id}, keeping full object`
2587
+ );
2588
+ }
2589
+ }
2590
+ return result;
2591
+ }
2592
+ function transformToolkits(toolkits, integrationIdToAlias, integrationSpecPaths) {
2593
+ return toolkits?.map(({ toolkit, tools, label, settings_config, is_external, settings }) => ({
2594
+ toolkit,
2595
+ tools: tools.map((tool) => transformTool(tool, integrationIdToAlias, integrationSpecPaths)),
2596
+ label,
2597
+ settings_config,
2598
+ is_external,
2599
+ ...settings ? {
2600
+ settings: transformIntegrationSettings(settings, integrationIdToAlias, integrationSpecPaths, "toolkit")
2601
+ } : {}
2602
+ }));
2603
+ }
2604
+ function prepareAssistantForYaml(assistant, state, integrationIdToAlias, integrationSpecPaths) {
2605
+ const transformedToolkits = transformToolkits(assistant.toolkits, integrationIdToAlias, integrationSpecPaths);
2606
+ const transformedMcpServers = assistant.mcp_servers?.map((mcp) => transformMcpServer(mcp, integrationIdToAlias));
2607
+ const finalAssistant = {
2608
+ ...assistantResponseToResource(assistant),
2609
+ ...transformedToolkits && { toolkits: transformedToolkits },
2610
+ ...transformedMcpServers && { mcp_servers: transformedMcpServers }
2611
+ };
2612
+ state.resources.assistants[assistant.name] = {
2613
+ id: assistant.id,
2614
+ lastDeployed: (/* @__PURE__ */ new Date()).toISOString(),
2615
+ promptChecksum: calculateChecksum(assistant.system_prompt || ""),
2616
+ configChecksum: calculateAssistantConfigChecksum(finalAssistant)
2617
+ };
2618
+ return finalAssistant;
2619
+ }
2620
+ function prepareDatasourceForYaml(datasource, state, integrationIdToAlias) {
2621
+ const settingId = hasSettingId(datasource) ? String(datasource.setting_id || "") : "";
2622
+ const integrationAlias = integrationIdToAlias.get(settingId);
2623
+ const finalDatasource = datasourceResponseToResource(datasource, integrationAlias);
2624
+ state.resources.datasources[datasource.name] = {
2625
+ id: datasource.id,
2626
+ lastDeployed: (/* @__PURE__ */ new Date()).toISOString(),
2627
+ configChecksum: calculateDatasourceConfigChecksum(finalDatasource)
2628
+ };
2629
+ return finalDatasource;
2630
+ }
2631
+ function prepareWorkflowForYaml(workflow, state, assistants, backupDir) {
2632
+ const resource = workflowResponseToResource(workflow);
2633
+ const yamlConfig = workflow.yaml_config;
2634
+ let finalYamlContent = yamlConfig || "";
2635
+ if (yamlConfig) {
2636
+ try {
2637
+ const workflowYaml = yaml.parse(yamlConfig);
2638
+ const transformedAssistants = workflowYaml.assistants?.map((assistant) => {
2639
+ if (assistant.assistant_id && typeof assistant.assistant_id === "string") {
2640
+ const assistantId = assistant.assistant_id;
2641
+ const matchedAssistant = assistants.find(({ id }) => id === assistantId);
2642
+ if (matchedAssistant && hasName(matchedAssistant)) {
2643
+ const { assistant_id: _assistantId, ...rest } = assistant;
2644
+ return { ...rest, assistant_name: matchedAssistant.name };
2645
+ }
2646
+ }
2647
+ return assistant;
2648
+ });
2649
+ if (transformedAssistants) {
2650
+ const transformedYaml = { ...workflowYaml, assistants: transformedAssistants };
2651
+ const fileName = `${sanitizeFileName(workflow.name)}.yaml`;
2652
+ const filePath = path.join(backupDir, "workflows", fileName);
2653
+ ensureDirectoryExists(filePath);
2654
+ finalYamlContent = yaml.stringify(transformedYaml);
2655
+ fs.writeFileSync(filePath, finalYamlContent, "utf8");
2656
+ }
2657
+ } catch (error) {
2658
+ logger.warn(
2659
+ ` \u26A0\uFE0F Failed to transform workflow YAML for ${workflow.name}: ${error instanceof Error ? error.message : String(error)}`
2660
+ );
2661
+ }
2662
+ }
2663
+ state.resources.workflows[workflow.name] = {
2664
+ id: workflow.id,
2665
+ lastDeployed: (/* @__PURE__ */ new Date()).toISOString(),
2666
+ workflowYamlChecksum: calculateChecksum(finalYamlContent),
2667
+ configChecksum: calculateWorkflowConfigChecksum(resource)
2668
+ };
2669
+ return resource;
2670
+ }
2671
+
2672
+ // src/lib/backupYamlGenerator.ts
2673
+ function generateCodemieYaml(backup, projectName, backupDir, integrationSpecPaths = /* @__PURE__ */ new Map()) {
2674
+ const integrationIdToAlias = /* @__PURE__ */ new Map();
2675
+ const integrationArray = [];
2676
+ for (const integration of backup.resources.integrations) {
2677
+ const alias = integration.alias || `${integration.credential_type.toLowerCase()}_${integration.id.slice(0, 8)}`;
2678
+ integrationIdToAlias.set(integration.id, alias);
2679
+ const specPath = integrationSpecPaths.get(integration.id);
2680
+ let credentialValues = integration.credential_values;
2681
+ if (specPath && credentialValues) {
2682
+ credentialValues = credentialValues.map(
2683
+ (cred) => cred.key === "openapi_spec" ? { key: "openapi_spec", value: specPath } : cred
2684
+ );
2685
+ }
2686
+ integrationArray.push({
2687
+ id: integration.id,
2688
+ alias,
2689
+ credential_type: integration.credential_type,
2690
+ project_name: integration.project_name,
2691
+ setting_type: integration.setting_type,
2692
+ ...credentialValues && { credential_values: credentialValues }
2693
+ });
2694
+ }
2695
+ const config = {
2696
+ version: "1",
2697
+ project: {
2698
+ name: projectName,
2699
+ description: `Backup from ${backup.metadata.timestamp}`
2700
+ },
2701
+ environment: {
2702
+ // biome-ignore lint/suspicious/noTemplateCurlyInString: Not a template, but exact formatting
2703
+ codemie_api_url: "${CODEMIE_API_URL}",
2704
+ // biome-ignore lint/suspicious/noTemplateCurlyInString: Not a template, but exact formatting
2705
+ auth_server_url: "${CODEMIE_AUTH_URL}",
2706
+ // biome-ignore lint/suspicious/noTemplateCurlyInString: Not a template, but exact formatting
2707
+ auth_realm_name: "${CODEMIE_REALM:-codemie}",
2708
+ // biome-ignore lint/suspicious/noTemplateCurlyInString: Not a template, but exact formatting
2709
+ client_id: "${CODEMIE_CLIENT_ID:-}",
2710
+ // biome-ignore lint/suspicious/noTemplateCurlyInString: Not a template, but exact formatting
2711
+ client_secret: "${CODEMIE_CLIENT_SECRET:-}",
2712
+ // biome-ignore lint/suspicious/noTemplateCurlyInString: Not a template, but exact formatting
2713
+ username: "${CODEMIE_USERNAME:-}",
2714
+ // biome-ignore lint/suspicious/noTemplateCurlyInString: Not a template, but exact formatting
2715
+ password: "${CODEMIE_PASSWORD:-}"
2716
+ },
2717
+ imported: {
2718
+ assistants: [],
2719
+ datasources: [],
2720
+ integrations: integrationArray
2721
+ },
2722
+ datasource_defaults: {
2723
+ code: { type: "code", index_type: "code", embeddings_model: "ada-002", shared_with_project: true },
2724
+ knowledge_base_confluence: {
2725
+ type: "knowledge_base_confluence",
2726
+ embeddings_model: "ada-002",
2727
+ shared_with_project: true
2728
+ },
2729
+ knowledge_base_jira: { type: "knowledge_base_jira", embeddings_model: "ada-002", shared_with_project: true }
2730
+ },
2731
+ resources: {
2732
+ assistants: backup.resources.assistants.map(
2733
+ (assistant) => prepareAssistantForYaml(assistant, backup.state, integrationIdToAlias, integrationSpecPaths)
2734
+ ),
2735
+ datasources: backup.resources.datasources.map(
2736
+ (datasource) => prepareDatasourceForYaml(datasource, backup.state, integrationIdToAlias)
2737
+ ),
2738
+ workflows: backup.resources.workflows.map(
2739
+ (workflow) => prepareWorkflowForYaml(workflow, backup.state, backup.resources.assistants, backupDir)
2740
+ )
2741
+ }
2742
+ };
2743
+ return yaml.stringify(config);
2744
+ }
2745
+ function saveIntegrationOpenApiSpecs(backupData, backupDir) {
2746
+ const specsDir = path.join(backupDir, "openapi_specs");
2747
+ let specsCount = 0;
2748
+ const integrationSpecPaths = /* @__PURE__ */ new Map();
2749
+ for (const integration of backupData.resources.integrations) {
2750
+ if (integration.credential_type !== "OpenAPI" || !integration.credential_values) {
2751
+ continue;
2752
+ }
2753
+ const specEntry = integration.credential_values.find(
2754
+ (cv) => cv.key === "openapi_spec"
2755
+ );
2756
+ if (specEntry && typeof specEntry.value === "string" && specEntry.value) {
2757
+ const alias = integration.alias || integration.id;
2758
+ const fileName = String(alias).toLowerCase().replaceAll(/[^a-z0-9]+/g, "-");
2759
+ if (!fs.existsSync(specsDir)) {
2760
+ fs.mkdirSync(specsDir, { recursive: true });
2761
+ }
2762
+ const specContent = specEntry.value;
2763
+ let fileExtension = ".yaml";
2764
+ let contentToSave = specContent;
2765
+ try {
2766
+ const parsed = JSON.parse(specContent);
2767
+ fileExtension = ".json";
2768
+ contentToSave = JSON.stringify(parsed, null, 2);
2769
+ } catch {
2770
+ fileExtension = ".yaml";
2771
+ }
2772
+ const specFileName = `${fileName}${fileExtension}`;
2773
+ const specPath = path.join(specsDir, specFileName);
2774
+ fs.writeFileSync(specPath, contentToSave, "utf8");
2775
+ integrationSpecPaths.set(integration.id, `openapi_specs/${specFileName}`);
2776
+ specsCount++;
2777
+ }
2778
+ }
2779
+ if (specsCount > 0) {
2780
+ logger.info(` \u2713 Saved ${specsCount} integration OpenAPI spec(s)`);
2781
+ }
2782
+ return integrationSpecPaths;
2783
+ }
2784
+ function saveBackupFiles(backupData, backupDir, projectName) {
2785
+ logger.info("\u{1F4BE} Writing backup files...\n");
2786
+ const integrationSpecPaths = saveIntegrationOpenApiSpecs(backupData, backupDir);
2787
+ const codemieYamlPath = path.join(backupDir, "codemie.yaml");
2788
+ const codemieYaml = generateCodemieYaml(backupData, projectName, backupDir, integrationSpecPaths);
2789
+ fs.writeFileSync(codemieYamlPath, codemieYaml, "utf8");
2790
+ logger.info(` \u2713 Saved config file: codemie.yaml`);
2791
+ const backupJsonPath = path.join(backupDir, "backup.json");
2792
+ fs.writeFileSync(backupJsonPath, JSON.stringify(backupData, null, 2), "utf8");
2793
+ logger.info(` \u2713 Saved full backup: backup.json`);
2794
+ const statePath = path.join(backupDir, "state.json");
2795
+ fs.writeFileSync(statePath, JSON.stringify(backupData.state, null, 2), "utf8");
2796
+ logger.info(` \u2713 Saved state file: state.json`);
2797
+ }
2798
+ function createConcurrentLimiter(maxConcurrent = RATE_LIMITING.MAX_CONCURRENT_REQUESTS) {
2799
+ return pLimit(maxConcurrent);
2800
+ }
2801
+ async function withRetry(fn, operation, maxAttempts = RATE_LIMITING.RETRY_ATTEMPTS) {
2802
+ let lastError;
2803
+ for (let attempt = 1; attempt <= maxAttempts; attempt++) {
2804
+ try {
2805
+ return await fn();
2806
+ } catch (error) {
2807
+ lastError = error instanceof Error ? error : new Error(String(error));
2808
+ const errorMsg = lastError.message.toLowerCase();
2809
+ const isRetryable = lastError.message.includes("429") || errorMsg.includes("too many requests") || /\b5\d{2}\b/.test(lastError.message) || // 5xx HTTP status codes only
2810
+ errorMsg.includes("timeout") || errorMsg.includes("econnreset") || errorMsg.includes("econnrefused");
2811
+ if (!isRetryable || attempt === maxAttempts) {
2812
+ throw lastError;
2813
+ }
2814
+ const delayMs = RATE_LIMITING.RETRY_DELAY_MS * 2 ** (attempt - 1);
2815
+ logger.warn(` \u26A0\uFE0F Retry ${attempt}/${maxAttempts} for ${operation} after ${delayMs}ms...`);
2816
+ await new Promise((resolve3) => {
2817
+ const timerId = setTimeout(resolve3, delayMs);
2818
+ timerId.unref();
2819
+ });
2820
+ }
2821
+ }
2822
+ throw lastError || new Error(`Failed after ${maxAttempts} attempts: ${operation}`);
2823
+ }
2824
+
2825
+ // src/lib/timeoutUtils.ts
2826
+ async function withTimeout(promise, timeoutMs, operation) {
2827
+ let timeoutId;
2828
+ const timeoutPromise = new Promise((_, reject) => {
2829
+ timeoutId = setTimeout(() => reject(new Error(`Operation timeout: ${operation} (${timeoutMs}ms)`)), timeoutMs);
2830
+ timeoutId.unref();
2831
+ });
2832
+ try {
2833
+ return await Promise.race([promise, timeoutPromise]);
2834
+ } finally {
2835
+ if (timeoutId !== void 0) {
2836
+ clearTimeout(timeoutId);
2837
+ }
2838
+ }
2839
+ }
2840
+
2841
+ // src/backup.ts
2842
+ async function* streamResources(fetchPage, resourceType) {
2843
+ let page = 0;
2844
+ let hasMore = true;
2845
+ let totalFetched = 0;
2846
+ logger.info(` Streaming ${resourceType} (with pagination)...`);
2847
+ while (hasMore) {
2848
+ const resources = await fetchPage({
2849
+ per_page: PAGINATION.DEFAULT_PAGE_SIZE,
2850
+ page
2851
+ });
2852
+ if (resources.length === 0) {
2853
+ break;
2854
+ }
2855
+ for (const resource of resources) {
2856
+ totalFetched++;
2857
+ yield resource;
2858
+ }
2859
+ if (resources.length < PAGINATION.DEFAULT_PAGE_SIZE) {
2860
+ hasMore = false;
2861
+ } else {
2862
+ page++;
2863
+ logger.info(` Streamed ${totalFetched} ${resourceType} so far...`);
2864
+ }
2865
+ }
2866
+ }
2867
+ async function saveAssistantToBackup(assistant, client, backupData, backupDir) {
2868
+ logger.info(` \u2022 ${assistant.name} (${assistant.id})`);
2869
+ const full = await withTimeout(
2870
+ client.assistants.get(assistant.id),
2871
+ TIMEOUTS_MS.ASSISTANT_FETCH,
2872
+ `Timeout fetching assistant ${assistant.id}`
2873
+ );
2874
+ backupData.resources.assistants.push(full);
2875
+ if (full.system_prompt) {
2876
+ const fileName = `${full.slug || sanitizeFileName(full.name)}.prompt.md`;
2877
+ const filePath = path.join(backupDir, "system_prompts", fileName);
2878
+ ensureDirectoryExists(filePath);
2879
+ fs.writeFileSync(filePath, full.system_prompt, "utf8");
2880
+ }
2881
+ }
2882
+ async function backupAssistants(client, backupData, backupDir, transaction) {
2883
+ logger.info("\u{1F916} Fetching assistants...");
2884
+ const allAssistants = [];
2885
+ for await (const assistant of streamResources((params) => client.assistants.list(params), "assistants")) {
2886
+ allAssistants.push(assistant);
2887
+ }
2888
+ logger.info(` Found ${allAssistants.length} assistant(s)`);
2889
+ transaction.setTotal("assistants", allAssistants.length);
2890
+ const limit = createConcurrentLimiter();
2891
+ for (const assistant of allAssistants) {
2892
+ if (transaction.isCompleted("assistants", assistant.id)) {
2893
+ logger.info(` \u21B7 Skipping ${assistant.name} (already backed up)`);
2894
+ continue;
2895
+ }
2896
+ try {
2897
+ await limit(
2898
+ () => withRetry(
2899
+ async () => saveAssistantToBackup(assistant, client, backupData, backupDir),
2900
+ `Backup assistant ${assistant.name}`
2901
+ )
2902
+ );
2903
+ transaction.markCompleted("assistants", assistant.id);
2904
+ } catch (error) {
2905
+ const errorDetails = error instanceof Error ? { message: error.message, stack: error.stack, name: error.name } : { message: String(error) };
2906
+ logger.error(` \u274C Failed to backup ${assistant.name}:`);
2907
+ logger.error(` ${errorDetails.message}`);
2908
+ if (errorDetails.stack) {
2909
+ logger.error(` Stack trace: ${errorDetails.stack.split("\n").slice(1, 3).join("\n ")}`);
2910
+ }
2911
+ transaction.markFailed("assistants", assistant.id, errorDetails.message);
2912
+ }
2913
+ }
2914
+ logger.info(`\u2713 Backed up ${transaction.getData().resources.assistants.completed.length} assistant(s)
2915
+ `);
2916
+ }
2917
+ async function processDatasourceBackup(datasource, client, backupData, transaction) {
2918
+ logger.info(` \u2022 ${datasource.name} (${datasource.id})`);
2919
+ const full = await withTimeout(
2920
+ client.datasources.get(datasource.id),
2921
+ TIMEOUTS_MS.DATASOURCE_FETCH,
2922
+ `Timeout fetching datasource ${datasource.id}`
2923
+ );
2924
+ backupData.resources.datasources.push(full);
2925
+ transaction.markCompleted("datasources", datasource.id);
2926
+ }
2927
+ async function backupDatasources(client, backupData, transaction) {
2928
+ logger.info("\u{1F4CA} Fetching datasources...");
2929
+ const datasources = [];
2930
+ for await (const datasource of streamResources((params) => client.datasources.list(params), "datasources")) {
2931
+ datasources.push(datasource);
2932
+ }
2933
+ logger.info(` Found ${datasources.length} datasource(s)`);
2934
+ transaction.setTotal("datasources", datasources.length);
2935
+ const limit = createConcurrentLimiter();
2936
+ for (const datasource of datasources) {
2937
+ if (transaction.isCompleted("datasources", datasource.id)) {
2938
+ logger.info(` \u21B7 Skipping ${datasource.name} (already backed up)`);
2939
+ continue;
2940
+ }
2941
+ try {
2942
+ await limit(
2943
+ () => withRetry(
2944
+ async () => processDatasourceBackup(datasource, client, backupData, transaction),
2945
+ `Backup datasource ${datasource.name}`
2946
+ )
2947
+ );
2948
+ } catch (error) {
2949
+ const errorDetails = error instanceof Error ? { message: error.message, stack: error.stack, name: error.name } : { message: String(error) };
2950
+ logger.error(` \u274C Failed to backup ${datasource.name}:`);
2951
+ logger.error(` ${errorDetails.message}`);
2952
+ if (errorDetails.stack) {
2953
+ logger.error(` Stack trace: ${errorDetails.stack.split("\n").slice(1, 3).join("\n ")}`);
2954
+ }
2955
+ transaction.markFailed("datasources", datasource.id, errorDetails.message);
2956
+ }
2957
+ }
2958
+ logger.info(`\u2713 Backed up ${transaction.getData().resources.datasources.completed.length} datasource(s)
2959
+ `);
2960
+ }
2961
+ async function backupWorkflows(client, backupData, backupDir, transaction) {
2962
+ logger.info("\u{1F504} Fetching workflows...");
2963
+ const workflows = [];
2964
+ for await (const workflow of streamResources((params) => client.workflows.list(params), "workflows")) {
2965
+ workflows.push(workflow);
2966
+ }
2967
+ logger.info(` Found ${workflows.length} workflow(s)`);
2968
+ transaction.setTotal("workflows", workflows.length);
2969
+ for (const workflow of workflows) {
2970
+ if (transaction.isCompleted("workflows", workflow.id)) {
2971
+ logger.info(` \u21B7 Skipping ${workflow.name} (already backed up)`);
2972
+ continue;
2973
+ }
2974
+ try {
2975
+ logger.info(` \u2022 ${workflow.name} (${workflow.id})`);
2976
+ const full = await withTimeout(
2977
+ client.workflows.get(workflow.id),
2978
+ TIMEOUTS_MS.WORKFLOW_FETCH,
2979
+ `Timeout fetching workflow ${workflow.id}`
2980
+ );
2981
+ backupData.resources.workflows.push(full);
2982
+ if (full.yaml_config) {
2983
+ const fileName = `${sanitizeFileName(full.name)}.yaml`;
2984
+ const filePath = path.join(backupDir, "workflows", fileName);
2985
+ ensureDirectoryExists(filePath);
2986
+ fs.writeFileSync(filePath, full.yaml_config, "utf8");
2987
+ }
2988
+ transaction.markCompleted("workflows", workflow.id);
2989
+ } catch (error) {
2990
+ const errorDetails = error instanceof Error ? { message: error.message, stack: error.stack, name: error.name } : { message: String(error) };
2991
+ logger.error(` \u274C Failed to backup ${workflow.name}:`);
2992
+ logger.error(` ${errorDetails.message}`);
2993
+ if (errorDetails.stack) {
2994
+ logger.error(` Stack trace: ${errorDetails.stack.split("\n").slice(1, 3).join("\n ")}`);
2995
+ }
2996
+ transaction.markFailed("workflows", workflow.id, errorDetails.message);
2997
+ }
2998
+ }
2999
+ logger.info(`\u2713 Backed up ${transaction.getData().resources.workflows.completed.length} workflow(s)
3000
+ `);
3001
+ }
3002
+ async function backupIntegrations(client, backupData) {
3003
+ logger.info("\u{1F50C} Fetching integrations...");
3004
+ const projectIntegrations = await withTimeout(
3005
+ client.integrations.list({
3006
+ per_page: PAGINATION.DEFAULT_PAGE_SIZE,
3007
+ page: 0,
3008
+ setting_type: "project"
3009
+ }),
3010
+ TIMEOUTS_MS.INTEGRATION_FETCH,
3011
+ "Timeout fetching project integrations"
3012
+ );
3013
+ logger.info(` Found ${projectIntegrations.length} project integration(s)`);
3014
+ for (const integration of projectIntegrations) {
3015
+ logger.info(` \u2022 ${integration.alias || integration.credential_type} (${integration.id}) [project]`);
3016
+ backupData.resources.integrations.push(integration);
3017
+ }
3018
+ const userIntegrations = await withTimeout(
3019
+ client.integrations.list({
3020
+ per_page: PAGINATION.DEFAULT_PAGE_SIZE,
3021
+ page: 0,
3022
+ setting_type: "user"
3023
+ }),
3024
+ TIMEOUTS_MS.INTEGRATION_FETCH,
3025
+ "Timeout fetching user integrations"
3026
+ );
3027
+ logger.info(` Found ${userIntegrations.length} user integration(s)`);
3028
+ for (const integration of userIntegrations) {
3029
+ logger.info(` \u2022 ${integration.alias || integration.credential_type} (${integration.id}) [user]`);
3030
+ backupData.resources.integrations.push(integration);
3031
+ }
3032
+ logger.info(`\u2713 Backed up ${projectIntegrations.length + userIntegrations.length} integration(s)
3033
+ `);
3034
+ }
3035
+ function getUniqueBackupDir(baseDir, timestamp) {
3036
+ let counter = 0;
3037
+ let finalBackupDir = path.join(baseDir, timestamp);
3038
+ while (fs.existsSync(finalBackupDir)) {
3039
+ counter++;
3040
+ finalBackupDir = path.join(baseDir, `${timestamp}-${counter}`);
3041
+ }
3042
+ const dirSuffix = counter > 0 ? `${timestamp}-${counter}` : timestamp;
3043
+ const tempBackupDir = path.join(baseDir, `${BACKUP.TEMP_DIR_PREFIX}${dirSuffix}`);
3044
+ return { finalDir: finalBackupDir, tempDir: tempBackupDir };
3045
+ }
3046
+ function performCleanup(tempBackupDir, transaction) {
3047
+ if (fs.existsSync(tempBackupDir)) {
3048
+ try {
3049
+ logger.info(`\u{1F9F9} Rolling back: cleaning up ${tempBackupDir}...`);
3050
+ cleanupDirectory(tempBackupDir);
3051
+ logger.info("\u2713 Rollback completed");
3052
+ } catch (cleanupError) {
3053
+ logger.error(
3054
+ "\u26A0\uFE0F Failed to cleanup backup directory:",
3055
+ cleanupError instanceof Error ? cleanupError.message : String(cleanupError)
3056
+ );
3057
+ }
3058
+ }
3059
+ if (transaction) {
3060
+ try {
3061
+ transaction.cleanup();
3062
+ logger.info("\u2713 Transaction file cleaned up");
3063
+ } catch (cleanupError) {
3064
+ logger.error(
3065
+ "\u26A0\uFE0F Failed to cleanup transaction file:",
3066
+ cleanupError instanceof Error ? cleanupError.message : String(cleanupError)
3067
+ );
3068
+ }
3069
+ }
3070
+ }
3071
+ async function backupResources(options) {
3072
+ logger.info("\u{1F5C4}\uFE0F Creating backup of all Codemie resources...\n");
3073
+ const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/\.\d{3}Z$/, "").replaceAll(":", "-");
3074
+ const { appConfig } = options;
3075
+ const { rootDir, backupsDirectory } = appConfig;
3076
+ const backupBaseDir = path.join(rootDir, backupsDirectory);
3077
+ const { finalDir: finalBackupDir, tempDir: tempBackupDir } = getUniqueBackupDir(backupBaseDir, timestamp);
3078
+ let transaction = null;
3079
+ try {
3080
+ validateBackupDirectory(backupBaseDir);
3081
+ const loader = new CodemieConfigLoader(appConfig);
3082
+ const config = loader.loadConfig();
3083
+ logger.info("\u{1F50C} Connecting to Codemie API...");
3084
+ const client = await createClient(config);
3085
+ logger.info("\u2713 Connected to Codemie API\n");
3086
+ if (!fs.existsSync(tempBackupDir)) {
3087
+ fs.mkdirSync(tempBackupDir, { recursive: true });
3088
+ }
3089
+ logger.info(`\u{1F4C1} Temporary backup directory: ${tempBackupDir}
3090
+ `);
3091
+ transaction = new BackupTransaction(tempBackupDir);
3092
+ const backupData = {
3093
+ metadata: {
3094
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
3095
+ projectName: config.project.name,
3096
+ codemieApiUrl: config.environment.codemie_api_url
3097
+ },
3098
+ resources: {
3099
+ assistants: [],
3100
+ datasources: [],
3101
+ workflows: [],
3102
+ integrations: []
3103
+ },
3104
+ state: {
3105
+ version: "1.0",
3106
+ project: config.project.name,
3107
+ lastSync: (/* @__PURE__ */ new Date()).toISOString(),
3108
+ resources: {
3109
+ assistants: {},
3110
+ datasources: {},
3111
+ workflows: {}
3112
+ }
3113
+ }
3114
+ };
3115
+ await backupAssistants(client, backupData, tempBackupDir, transaction);
3116
+ await backupDatasources(client, backupData, transaction);
3117
+ await backupWorkflows(client, backupData, tempBackupDir, transaction);
3118
+ await backupIntegrations(client, backupData);
3119
+ const stats = transaction.getData();
3120
+ const totalFailed = stats.resources.assistants.failed.length + stats.resources.datasources.failed.length + stats.resources.workflows.failed.length;
3121
+ if (totalFailed > 0) {
3122
+ logger.warn(`
3123
+ \u26A0\uFE0F Backup completed with ${totalFailed} failed resource(s)`);
3124
+ logger.warn("Review transaction.json for details\n");
3125
+ const totalResources = stats.resources.assistants.total + stats.resources.datasources.total + stats.resources.workflows.total;
3126
+ const failureRate = totalResources > 0 ? totalFailed / totalResources * 100 : 0;
3127
+ if (failureRate > 20) {
3128
+ throw new Error(
3129
+ `Too many failures (${totalFailed}/${totalResources} = ${failureRate.toFixed(1)}%), backup aborted`
3130
+ );
3131
+ }
3132
+ }
3133
+ saveBackupFiles(backupData, tempBackupDir, config.project.name);
3134
+ transaction.complete();
3135
+ logger.info(`
3136
+ \u{1F504} Finalizing backup...`);
3137
+ moveAtomically(tempBackupDir, finalBackupDir);
3138
+ logger.info(`\u2713 Backup finalized at ${finalBackupDir}
3139
+ `);
3140
+ const separator = "=".repeat(50);
3141
+ logger.info(`${separator}`);
3142
+ logger.info("\u{1F4CA} Backup Summary:\n");
3143
+ logger.info(` \u{1F916} Assistants: ${backupData.resources.assistants.length}`);
3144
+ logger.info(` \u{1F4CA} Datasources: ${backupData.resources.datasources.length}`);
3145
+ logger.info(` \u{1F504} Workflows: ${backupData.resources.workflows.length}`);
3146
+ logger.info(` \u{1F50C} Integrations: ${backupData.resources.integrations.length}`);
3147
+ logger.info(`
3148
+ \u{1F4C1} Location: ${finalBackupDir}
3149
+ `);
3150
+ logger.info("\u{1F4C8} Detailed Progress:\n");
3151
+ logger.info(transaction.getSummary());
3152
+ logger.info("");
3153
+ transaction.cleanup();
3154
+ logger.info("\u2705 Backup completed successfully!");
3155
+ } catch (error) {
3156
+ logger.error("\n\u274C Backup failed:");
3157
+ logger.error(error instanceof Error ? error.message : String(error));
3158
+ performCleanup(tempBackupDir, transaction);
3159
+ throw error;
3160
+ }
3161
+ }
3162
+ async function destroyResources(options) {
3163
+ logger.info("\u{1F5D1}\uFE0F Destroy all managed resources\n");
3164
+ logger.info("\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\n");
3165
+ try {
3166
+ const { appConfig, force = false } = options;
3167
+ const loader = new CodemieConfigLoader(appConfig);
3168
+ const stateManager = new StateManager(appConfig);
3169
+ const config = loader.loadConfig();
3170
+ logger.info(`Project: ${config.project.name}
3171
+ `);
3172
+ const managed = stateManager.getAllManagedResources();
3173
+ const total = managed.assistants.length + managed.datasources.length + managed.workflows.length;
3174
+ if (total === 0) {
3175
+ logger.info("\u2713 No managed resources found in state\n");
3176
+ logger.info("\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\n");
3177
+ return;
3178
+ }
3179
+ logger.info(`Found ${total} managed resource(s):
3180
+ `);
3181
+ if (managed.assistants.length > 0) {
3182
+ logger.info(` \u{1F916} Assistants: ${managed.assistants.length}`);
3183
+ for (const name of managed.assistants) {
3184
+ logger.info(` \u2022 ${name}`);
3185
+ }
3186
+ }
3187
+ if (managed.datasources.length > 0) {
3188
+ logger.info(` \u{1F4CA} Datasources: ${managed.datasources.length}`);
3189
+ for (const name of managed.datasources) {
3190
+ logger.info(` \u2022 ${name}`);
3191
+ }
3192
+ }
3193
+ if (managed.workflows.length > 0) {
3194
+ logger.info(` \u{1F504} Workflows: ${managed.workflows.length}`);
3195
+ for (const name of managed.workflows) {
3196
+ logger.info(` \u2022 ${name}`);
3197
+ }
3198
+ }
3199
+ logger.warn("\n\u26A0\uFE0F WARNING: This will DELETE all resources listed above!");
3200
+ logger.warn("\u26A0\uFE0F These resources were created through IaC (in state.json)\n");
3201
+ logger.info("\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\n");
3202
+ if (force) {
3203
+ logger.info("\u{1F680} --force flag detected, skipping confirmation\n");
3204
+ } else {
3205
+ const readline = await import('readline');
3206
+ const rl = readline.createInterface({
3207
+ input: process.stdin,
3208
+ output: process.stdout
3209
+ });
3210
+ const answer = await new Promise((resolve3) => {
3211
+ rl.question('Type "destroy" to confirm deletion: ', resolve3);
3212
+ });
3213
+ rl.close();
3214
+ if (answer.trim().toLowerCase() !== "destroy") {
3215
+ logger.info("\n\u274C Destruction cancelled\n");
3216
+ return;
3217
+ }
3218
+ }
3219
+ logger.info("\n\u{1F5D1}\uFE0F Deleting resources...\n");
3220
+ const client = await createClient(config);
3221
+ const cleanupManager = new CleanupManager(client, stateManager);
3222
+ const result = await cleanupManager.deleteOrphanedResources(managed);
3223
+ const totalDeleted = result.deleted.assistants.length + result.deleted.datasources.length + result.deleted.workflows.length;
3224
+ logger.info("\n\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\n");
3225
+ logger.info("\u{1F4CA} Destruction Summary:\n");
3226
+ logger.info(` \u2705 Deleted: ${totalDeleted}`);
3227
+ if (result.errors.length > 0) {
3228
+ logger.error(` \u274C Failed: ${result.errors.length}
3229
+ `);
3230
+ for (const err of result.errors) {
3231
+ logger.error(` \u2022 ${err.type} ${err.name}: ${err.error}`);
3232
+ }
3233
+ }
3234
+ logger.info("\n\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\n");
3235
+ if (result.errors.length > 0) {
3236
+ throw new Error("Destruction completed with errors");
3237
+ } else {
3238
+ logger.info("\u2705 All managed resources destroyed");
3239
+ }
3240
+ } catch (error) {
3241
+ logger.error("\n\u274C Destruction failed:");
3242
+ logger.error(error instanceof Error ? error.message : String(error));
3243
+ throw error;
3244
+ }
3245
+ }
3246
+
3247
+ export { CleanupManager, CodemieConfigLoader, LogLevel, StateManager, backupResources, createClient, deployAssistants, deployDatasources, deployResources, deployWorkflows, destroyResources, logger, previewChanges, validateConfig };